From 7f303b064864b893cb167640f59b4e4b7863cac8 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 13:15:32 +0000 Subject: [PATCH 001/210] feat(lsp): add initial jrsonnet language server and analysis stack This adds the first full LSP implementation for jrsonnet. It introduces a new `jrsonnet-lsp` binary and a dedicated crate stack for document management, scope resolution, import graphing, type inference, diagnostics/checking, request handlers, and server orchestration. Implemented features include incremental sync, definition, hover, completion, references, rename, inlay hints, code lens, semantic tokens, formatting, workspace symbols, async request execution, and async diagnostics with dependency-aware cache invalidation. Type inference now includes flow typing driven by stdlib predicates. Flow typing means we narrow a variable's type inside control-flow branches based on checks such as `std.isString(x)` and `std.isNumber(x)`. Example: ```jsonnet if std.isString(x) then x + "!" else x ``` In the `then` branch, `x` is narrowed to `string`. Example: ```jsonnet if std.isNumber(x) then x + 1 else x ``` In the `then` branch, `x` is narrowed to `number`. Predicates marked partial (for example `std.isInteger`) only narrow the `true` branch; the `false` branch is not treated as the strict complement. There were some changes to other crates required: - The AST now models field/index/slice/call as explicit expression nodes (`ExprField`, `ExprIndex`, `ExprSlice`, `ExprCall`), giving stable node boundaries and ranges used by hover, go-to-definition, completion, and semantic token classification. - The Rowan parser now exposes the Rowan green-tree types and richer syntax error types, allowing parse results/errors to be reused safely across async diagnostics and structural tests. - `jrsonnet-fmt` is refactored into context/macros/printable modules, with explicit style options and snapshot coverage, so `textDocument/formatting`, which calls this, can be deterministic and regression-tested. - Evaluator/Tanka/import resolution plumbing is updated so eval diagnostics and execute-command evaluation follow the same path-resolution behavior as the language server. - Workspace dependency and compatibility updates (`lsp-types` bump, lockfile refresh, and required clippy/FFI cleanups in touched crates) keep the expanded workspace compiling cleanly under the stricter lint profile used for this work. LSP documentation is added under `docs/lsp/`.` --- .markdownlint.json | 6 + Cargo.lock | 1056 ++++++-- Cargo.toml | 5 +- bindings/jsonnet/src/import.rs | 4 +- bindings/jsonnet/src/native.rs | 2 +- cmds/jrsonnet-fmt/src/comments.rs | 79 +- cmds/jrsonnet-fmt/src/context.rs | 84 + cmds/jrsonnet-fmt/src/macros.rs | 128 + cmds/jrsonnet-fmt/src/main.rs | 833 +----- cmds/jrsonnet-fmt/src/printable.rs | 614 +++++ ...onnet_fmt__tests__array_with_comments.snap | 10 + ...onnet_fmt__tests__array_with_elements.snap | 9 + ...t__tests__comment_style_hash_to_slash.snap | 8 + ...onnet_fmt__tests__comment_style_leave.snap | 10 + ...t__tests__comment_style_slash_to_hash.snap | 8 + ...et_fmt__tests__empty_array_no_padding.snap | 5 + ..._fmt__tests__empty_array_with_padding.snap | 5 + ...__tests__empty_object_default_padding.snap | 5 + ...t_fmt__tests__empty_object_no_padding.snap | 5 + ..._tests__string_style_double_to_single.snap | 8 + ...sonnet_fmt__tests__string_style_leave.snap | 8 + ..._tests__string_style_single_to_double.snap | 8 + ...fmt__tests__string_style_with_escapes.snap | 8 + cmds/jrsonnet-fmt/src/tests.rs | 156 +- cmds/jrsonnet-lsp/Cargo.toml | 21 + cmds/jrsonnet-lsp/src/main.rs | 47 + cmds/rtk/src/tanka.rs | 18 +- crates/jrsonnet-evaluator/src/manifest.rs | 26 +- crates/jrsonnet-lsp-check/Cargo.toml | 26 + crates/jrsonnet-lsp-check/src/diagnostic.rs | 397 +++ crates/jrsonnet-lsp-check/src/format_check.rs | 727 +++++ crates/jrsonnet-lsp-check/src/lib.rs | 17 + crates/jrsonnet-lsp-check/src/lint.rs | 1510 +++++++++++ crates/jrsonnet-lsp-check/src/type_check.rs | 2395 +++++++++++++++++ crates/jrsonnet-lsp-document/Cargo.toml | 18 + crates/jrsonnet-lsp-document/src/ast_utils.rs | 129 + crates/jrsonnet-lsp-document/src/config.rs | 12 + crates/jrsonnet-lsp-document/src/document.rs | 512 ++++ crates/jrsonnet-lsp-document/src/error.rs | 185 ++ crates/jrsonnet-lsp-document/src/lib.rs | 32 + crates/jrsonnet-lsp-document/src/position.rs | 375 +++ crates/jrsonnet-lsp-document/src/types.rs | 290 ++ crates/jrsonnet-lsp-handlers/Cargo.toml | 32 + .../jrsonnet-lsp-handlers/src/code_action.rs | 199 ++ crates/jrsonnet-lsp-handlers/src/code_lens.rs | 702 +++++ .../src/completion/fields.rs | 254 ++ .../src/completion/helpers.rs | 62 + .../src/completion/imports.rs | 227 ++ .../src/completion/locals.rs | 48 + .../src/completion/mod.rs | 639 +++++ .../src/completion/stdlib.rs | 83 + .../jrsonnet-lsp-handlers/src/definition.rs | 715 +++++ .../src/document_highlight.rs | 101 + .../jrsonnet-lsp-handlers/src/formatting.rs | 260 ++ crates/jrsonnet-lsp-handlers/src/hover.rs | 377 +++ .../jrsonnet-lsp-handlers/src/inlay_hint.rs | 228 ++ crates/jrsonnet-lsp-handlers/src/lib.rs | 31 + .../jrsonnet-lsp-handlers/src/references.rs | 561 ++++ crates/jrsonnet-lsp-handlers/src/rename.rs | 859 ++++++ .../src/semantic_tokens.rs | 570 ++++ .../src/signature_help.rs | 513 ++++ crates/jrsonnet-lsp-handlers/src/symbols.rs | 562 ++++ crates/jrsonnet-lsp-import/Cargo.toml | 17 + crates/jrsonnet-lsp-import/src/graph.rs | 865 ++++++ crates/jrsonnet-lsp-import/src/lib.rs | 17 + crates/jrsonnet-lsp-import/src/parse.rs | 190 ++ crates/jrsonnet-lsp-import/src/work_queue.rs | 381 +++ crates/jrsonnet-lsp-inference/Cargo.toml | 35 + crates/jrsonnet-lsp-inference/src/analysis.rs | 787 ++++++ .../jrsonnet-lsp-inference/src/const_eval.rs | 611 +++++ crates/jrsonnet-lsp-inference/src/env.rs | 332 +++ crates/jrsonnet-lsp-inference/src/expr.rs | 1977 ++++++++++++++ crates/jrsonnet-lsp-inference/src/flow.rs | 1698 ++++++++++++ crates/jrsonnet-lsp-inference/src/helpers.rs | 204 ++ crates/jrsonnet-lsp-inference/src/lib.rs | 45 + crates/jrsonnet-lsp-inference/src/manager.rs | 425 +++ crates/jrsonnet-lsp-inference/src/object.rs | 249 ++ crates/jrsonnet-lsp-inference/src/poly.rs | 406 +++ crates/jrsonnet-lsp-inference/src/provider.rs | 221 ++ .../jrsonnet-lsp-inference/src/suggestions.rs | 115 + .../jrsonnet-lsp-inference/src/type_cache.rs | 438 +++ crates/jrsonnet-lsp-scope/Cargo.toml | 17 + crates/jrsonnet-lsp-scope/src/bindings.rs | 165 ++ crates/jrsonnet-lsp-scope/src/lib.rs | 18 + crates/jrsonnet-lsp-scope/src/resolver.rs | 1164 ++++++++ crates/jrsonnet-lsp-stdlib/Cargo.toml | 18 + crates/jrsonnet-lsp-stdlib/src/docs.rs | 188 ++ crates/jrsonnet-lsp-stdlib/src/lib.rs | 14 + crates/jrsonnet-lsp-stdlib/src/signatures.rs | 461 ++++ crates/jrsonnet-lsp-types/Cargo.toml | 19 + crates/jrsonnet-lsp-types/src/display.rs | 422 +++ crates/jrsonnet-lsp-types/src/global_store.rs | 200 ++ crates/jrsonnet-lsp-types/src/lib.rs | 44 + crates/jrsonnet-lsp-types/src/local_store.rs | 141 + crates/jrsonnet-lsp-types/src/mut_store.rs | 887 ++++++ crates/jrsonnet-lsp-types/src/operations.rs | 1502 +++++++++++ crates/jrsonnet-lsp-types/src/store.rs | 2344 ++++++++++++++++ crates/jrsonnet-lsp-types/src/subst.rs | 506 ++++ crates/jrsonnet-lsp-types/src/unification.rs | 874 ++++++ crates/jrsonnet-lsp/Cargo.toml | 52 + crates/jrsonnet-lsp/benches/type_cache.rs | 208 ++ crates/jrsonnet-lsp/benches/type_inference.rs | 188 ++ crates/jrsonnet-lsp/benches/unification.rs | 339 +++ crates/jrsonnet-lsp/src/analysis/eval.rs | 283 ++ crates/jrsonnet-lsp/src/analysis/mod.rs | 9 + crates/jrsonnet-lsp/src/analysis/tanka.rs | 185 ++ crates/jrsonnet-lsp/src/async_diagnostics.rs | 374 +++ crates/jrsonnet-lsp/src/config.rs | 377 +++ .../jrsonnet-lsp/src/handlers/diagnostics.rs | 436 +++ crates/jrsonnet-lsp/src/handlers/mod.rs | 8 + crates/jrsonnet-lsp/src/lib.rs | 30 + crates/jrsonnet-lsp/src/server.rs | 975 +++++++ .../jrsonnet-lsp/src/server/async_requests.rs | 492 ++++ crates/jrsonnet-lsp/tests/cross_file_tests.rs | 687 +++++ .../jrsonnet-lsp/tests/e2e_annotated_tests.rs | 202 ++ .../tests/framework/assertions.rs | 467 ++++ crates/jrsonnet-lsp/tests/framework/mod.rs | 74 + crates/jrsonnet-lsp/tests/framework/parser.rs | 249 ++ crates/jrsonnet-lsp/tests/integration_test.rs | 1106 ++++++++ crates/jrsonnet-lsp/tests/stress_tests.rs | 827 ++++++ crates/jrsonnet-rowan-parser/jsonnet.ungram | 31 +- .../src/generated/nodes.rs | 157 +- .../src/generated/syntax_kinds.rs | 15 +- crates/jrsonnet-rowan-parser/src/lib.rs | 172 +- crates/jrsonnet-rowan-parser/src/parser.rs | 184 +- crates/jrsonnet-rowan-parser/src/tests.rs | 2 +- crates/jrsonnet-rowan-parser/src/token_set.rs | 2 +- crates/jrsonnet-std-sig/Cargo.toml | 13 + crates/jrsonnet-std-sig/src/lib.rs | 1705 ++++++++++++ docs/lsp/ARCHITECTURE.md | 281 ++ docs/lsp/HANDLERS.md | 253 ++ docs/lsp/TYPE_SYSTEM.md | 211 ++ xtask/src/sourcegen/mod.rs | 32 +- 133 files changed, 44268 insertions(+), 1179 deletions(-) create mode 100644 .markdownlint.json create mode 100644 cmds/jrsonnet-fmt/src/context.rs create mode 100644 cmds/jrsonnet-fmt/src/macros.rs create mode 100644 cmds/jrsonnet-fmt/src/printable.rs create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_comments.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_elements.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_hash_to_slash.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_leave.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_slash_to_hash.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_no_padding.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_with_padding.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_default_padding.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_no_padding.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_double_to_single.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_leave.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_single_to_double.snap create mode 100644 cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_with_escapes.snap create mode 100644 cmds/jrsonnet-lsp/Cargo.toml create mode 100644 cmds/jrsonnet-lsp/src/main.rs create mode 100644 crates/jrsonnet-lsp-check/Cargo.toml create mode 100644 crates/jrsonnet-lsp-check/src/diagnostic.rs create mode 100644 crates/jrsonnet-lsp-check/src/format_check.rs create mode 100644 crates/jrsonnet-lsp-check/src/lib.rs create mode 100644 crates/jrsonnet-lsp-check/src/lint.rs create mode 100644 crates/jrsonnet-lsp-check/src/type_check.rs create mode 100644 crates/jrsonnet-lsp-document/Cargo.toml create mode 100644 crates/jrsonnet-lsp-document/src/ast_utils.rs create mode 100644 crates/jrsonnet-lsp-document/src/config.rs create mode 100644 crates/jrsonnet-lsp-document/src/document.rs create mode 100644 crates/jrsonnet-lsp-document/src/error.rs create mode 100644 crates/jrsonnet-lsp-document/src/lib.rs create mode 100644 crates/jrsonnet-lsp-document/src/position.rs create mode 100644 crates/jrsonnet-lsp-document/src/types.rs create mode 100644 crates/jrsonnet-lsp-handlers/Cargo.toml create mode 100644 crates/jrsonnet-lsp-handlers/src/code_action.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/code_lens.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/fields.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/helpers.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/imports.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/locals.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/definition.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/document_highlight.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/formatting.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/hover.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/inlay_hint.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/lib.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/references.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/rename.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/signature_help.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/symbols.rs create mode 100644 crates/jrsonnet-lsp-import/Cargo.toml create mode 100644 crates/jrsonnet-lsp-import/src/graph.rs create mode 100644 crates/jrsonnet-lsp-import/src/lib.rs create mode 100644 crates/jrsonnet-lsp-import/src/parse.rs create mode 100644 crates/jrsonnet-lsp-import/src/work_queue.rs create mode 100644 crates/jrsonnet-lsp-inference/Cargo.toml create mode 100644 crates/jrsonnet-lsp-inference/src/analysis.rs create mode 100644 crates/jrsonnet-lsp-inference/src/const_eval.rs create mode 100644 crates/jrsonnet-lsp-inference/src/env.rs create mode 100644 crates/jrsonnet-lsp-inference/src/expr.rs create mode 100644 crates/jrsonnet-lsp-inference/src/flow.rs create mode 100644 crates/jrsonnet-lsp-inference/src/helpers.rs create mode 100644 crates/jrsonnet-lsp-inference/src/lib.rs create mode 100644 crates/jrsonnet-lsp-inference/src/manager.rs create mode 100644 crates/jrsonnet-lsp-inference/src/object.rs create mode 100644 crates/jrsonnet-lsp-inference/src/poly.rs create mode 100644 crates/jrsonnet-lsp-inference/src/provider.rs create mode 100644 crates/jrsonnet-lsp-inference/src/suggestions.rs create mode 100644 crates/jrsonnet-lsp-inference/src/type_cache.rs create mode 100644 crates/jrsonnet-lsp-scope/Cargo.toml create mode 100644 crates/jrsonnet-lsp-scope/src/bindings.rs create mode 100644 crates/jrsonnet-lsp-scope/src/lib.rs create mode 100644 crates/jrsonnet-lsp-scope/src/resolver.rs create mode 100644 crates/jrsonnet-lsp-stdlib/Cargo.toml create mode 100644 crates/jrsonnet-lsp-stdlib/src/docs.rs create mode 100644 crates/jrsonnet-lsp-stdlib/src/lib.rs create mode 100644 crates/jrsonnet-lsp-stdlib/src/signatures.rs create mode 100644 crates/jrsonnet-lsp-types/Cargo.toml create mode 100644 crates/jrsonnet-lsp-types/src/display.rs create mode 100644 crates/jrsonnet-lsp-types/src/global_store.rs create mode 100644 crates/jrsonnet-lsp-types/src/lib.rs create mode 100644 crates/jrsonnet-lsp-types/src/local_store.rs create mode 100644 crates/jrsonnet-lsp-types/src/mut_store.rs create mode 100644 crates/jrsonnet-lsp-types/src/operations.rs create mode 100644 crates/jrsonnet-lsp-types/src/store.rs create mode 100644 crates/jrsonnet-lsp-types/src/subst.rs create mode 100644 crates/jrsonnet-lsp-types/src/unification.rs create mode 100644 crates/jrsonnet-lsp/Cargo.toml create mode 100644 crates/jrsonnet-lsp/benches/type_cache.rs create mode 100644 crates/jrsonnet-lsp/benches/type_inference.rs create mode 100644 crates/jrsonnet-lsp/benches/unification.rs create mode 100644 crates/jrsonnet-lsp/src/analysis/eval.rs create mode 100644 crates/jrsonnet-lsp/src/analysis/mod.rs create mode 100644 crates/jrsonnet-lsp/src/analysis/tanka.rs create mode 100644 crates/jrsonnet-lsp/src/async_diagnostics.rs create mode 100644 crates/jrsonnet-lsp/src/config.rs create mode 100644 crates/jrsonnet-lsp/src/handlers/diagnostics.rs create mode 100644 crates/jrsonnet-lsp/src/handlers/mod.rs create mode 100644 crates/jrsonnet-lsp/src/lib.rs create mode 100644 crates/jrsonnet-lsp/src/server.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests.rs create mode 100644 crates/jrsonnet-lsp/tests/cross_file_tests.rs create mode 100644 crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs create mode 100644 crates/jrsonnet-lsp/tests/framework/assertions.rs create mode 100644 crates/jrsonnet-lsp/tests/framework/mod.rs create mode 100644 crates/jrsonnet-lsp/tests/framework/parser.rs create mode 100644 crates/jrsonnet-lsp/tests/integration_test.rs create mode 100644 crates/jrsonnet-lsp/tests/stress_tests.rs create mode 100644 crates/jrsonnet-std-sig/Cargo.toml create mode 100644 crates/jrsonnet-std-sig/src/lib.rs create mode 100644 docs/lsp/ARCHITECTURE.md create mode 100644 docs/lsp/HANDLERS.md create mode 100644 docs/lsp/TYPE_SYSTEM.md diff --git a/.markdownlint.json b/.markdownlint.json new file mode 100644 index 00000000..ffe10de6 --- /dev/null +++ b/.markdownlint.json @@ -0,0 +1,6 @@ +{ + "MD013": { + "code_blocks": false, + "tables": false + } +} diff --git a/Cargo.lock b/Cargo.lock index b389f80c..efc25ec0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -45,6 +45,12 @@ dependencies = [ "libc", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "annotate-snippets" version = "0.10.2" @@ -107,15 +113,15 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.100" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" [[package]] name = "ar_archive_writer" -version = "0.2.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c269894b6fe5e9d7ada0cf69b5bf847ff35bc25fc271f08e1d080fce80339a" +checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b" dependencies = [ "object", ] @@ -254,9 +260,15 @@ checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" [[package]] name = "bitflags" -version = "2.10.0" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" [[package]] name = "block-buffer" @@ -269,9 +281,9 @@ dependencies = [ [[package]] name = "bon" -version = "3.8.2" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234655ec178edd82b891e262ea7cf71f6584bcd09eff94db786be23f1821825c" +checksum = "2d13a61f2963b88eef9c1be03df65d42f6996dfeac1054870d950fcf66686f83" dependencies = [ "bon-macros", "rustversion", @@ -279,9 +291,9 @@ dependencies = [ [[package]] name = "bon-macros" -version = "3.8.2" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ec27229c38ed0eb3c0feee3d2c1d6a4379ae44f418a29a658890e062d8f365" +checksum = "d314cc62af2b6b0c65780555abb4d02a03dd3b799cd42419044f0c38d99738c0" dependencies = [ "darling", "ident_case", @@ -304,9 +316,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.1" +version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" [[package]] name = "bytecount" @@ -322,15 +334,21 @@ checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" [[package]] name = "bytes" -version = "1.11.0" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cast" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.53" +version = "1.2.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755d2fce177175ffca841e9a06afdb2c4ab0f593d53b4dee48147dfaade85932" +checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" dependencies = [ "find-msvc-tools", "shlex", @@ -350,9 +368,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.43" +version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" dependencies = [ "iana-time-zone", "js-sys", @@ -361,11 +379,38 @@ dependencies = [ "windows-link", ] +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "clap" -version = "4.5.54" +version = "4.5.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6e6ff9dcd79cff5cd969a17a545d79e84ab086e444102a591e288a8aa3ce394" +checksum = "2797f34da339ce31042b27d23607e051786132987f595b02ba4f6a6dffb7030a" dependencies = [ "clap_builder", "clap_derive", @@ -373,9 +418,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.54" +version = "4.5.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa42cf4d2b7a41bc8f663a7cab4031ebafa1bf3875705bfaf8466dc60ab52c00" +checksum = "24a241312cea5059b13574bb9b3861cabf758b879c15190b37b6d6fd63ab6876" dependencies = [ "anstream", "anstyle", @@ -385,18 +430,18 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.65" +version = "4.5.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "430b4dc2b5e3861848de79627b2bedc9f3342c7da5173a14eaa5d0f8dc18ae5d" +checksum = "c757a3b7e39161a4e56f9365141ada2a6c915a8622c408ab6bb4b5d047371031" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.49" +version = "4.5.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" +checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5" dependencies = [ "heck", "proc-macro2", @@ -406,9 +451,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.7" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32" +checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831" [[package]] name = "colorchoice" @@ -448,6 +493,15 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "core-foundation" version = "0.10.1" @@ -488,6 +542,51 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "criterion" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "criterion-plot", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-deque" version = "0.8.6" @@ -513,6 +612,12 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + [[package]] name = "crypto-common" version = "0.1.7" @@ -557,6 +662,20 @@ dependencies = [ "syn", ] +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "deadpool" version = "0.12.3" @@ -575,13 +694,35 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" +[[package]] +name = "derive_more" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05" +dependencies = [ + "derive_more-impl 1.0.0", +] + [[package]] name = "derive_more" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" dependencies = [ - "derive_more-impl", + "derive_more-impl 2.1.1", +] + +[[package]] +name = "derive_more-impl" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "syn", + "unicode-xid", ] [[package]] @@ -794,26 +935,41 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "find-msvc-tools" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" [[package]] name = "flate2" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b375d6465b98090a5f25b1c7703f3859783755aa9a80433b36e0379a3ec2f369" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" dependencies = [ "crc32fast", "miniz_oxide", ] +[[package]] +name = "fluent-uri" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17c704e9dbe1ddd863da1e6ff3567795087b1eb201ce80d8fa81162e1516500d" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "foldhash" version = "0.2.0" @@ -831,9 +987,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -846,9 +1002,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -856,15 +1012,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-executor" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" dependencies = [ "futures-core", "futures-task", @@ -873,15 +1029,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", @@ -890,15 +1046,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-timer" @@ -908,9 +1064,9 @@ checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-channel", "futures-core", @@ -920,7 +1076,6 @@ dependencies = [ "futures-task", "memchr", "pin-project-lite", - "pin-utils", "slab", ] @@ -957,6 +1112,19 @@ dependencies = [ "wasip2", ] +[[package]] +name = "getrandom" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + [[package]] name = "glob" version = "0.3.3" @@ -1018,7 +1186,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" dependencies = [ "atomic-waker", - "bytes 1.11.0", + "bytes 1.11.1", "fnv", "futures-core", "futures-sink", @@ -1030,6 +1198,17 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" +dependencies = [ + "cfg-if", + "crunchy", + "zerocopy", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -1046,6 +1225,15 @@ dependencies = [ "allocator-api2", ] +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash 0.1.5", +] + [[package]] name = "hashbrown" version = "0.16.1" @@ -1054,7 +1242,7 @@ checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" dependencies = [ "allocator-api2", "equivalent", - "foldhash", + "foldhash 0.2.0", ] [[package]] @@ -1099,7 +1287,7 @@ version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "fnv", "itoa", ] @@ -1110,7 +1298,7 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "itoa", ] @@ -1120,7 +1308,7 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "http 1.4.0", ] @@ -1130,7 +1318,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "futures-core", "http 1.4.0", "http-body", @@ -1156,7 +1344,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ "atomic-waker", - "bytes 1.11.0", + "bytes 1.11.1", "futures-channel", "futures-core", "h2", @@ -1206,14 +1394,13 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" dependencies = [ "base64 0.22.1", - "bytes 1.11.0", + "bytes 1.11.1", "futures-channel", - "futures-core", "futures-util", "http 1.4.0", "http-body", @@ -1230,9 +1417,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.64" +version = "0.1.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -1333,6 +1520,12 @@ dependencies = [ "zerovec", ] +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + [[package]] name = "ident_case" version = "1.0.1" @@ -1406,9 +1599,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.46.1" +version = "1.46.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248b42847813a1550dafd15296fd9748c651d0c32194559dbc05d804d54b21e8" +checksum = "e82db8c87c7f1ccecb34ce0c24399b8a73081427f3c7c50a5d597925356115e4" dependencies = [ "console", "once_cell", @@ -1432,12 +1625,32 @@ dependencies = [ "serde", ] +[[package]] +name = "is-terminal" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.61.2", +] + [[package]] name = "is_terminal_polyfill" version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.13.0" @@ -1464,9 +1677,9 @@ checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] name = "jiff" -version = "0.2.18" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e67e8da4c49d6d9909fe03361f9b620f58898859f5c7aded68351e85e71ecf50" +checksum = "b3e3d65f018c6ae946ab16e80944b97096ed73c35b221d1c478a6c81d8f57940" dependencies = [ "jiff-static", "log", @@ -1477,9 +1690,9 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.18" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0c84ee7f197eca9a86c6fd6cb771e55eb991632f15f2bc3ca6ec838929e6e78" +checksum = "a17c2b211d863c7fde02cbea8a3c1a439b98e109286554f2860bdded7ff83818" dependencies = [ "proc-macro2", "quote", @@ -1590,6 +1803,161 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "jrsonnet-lsp" +version = "0.5.0-pre97" +dependencies = [ + "anyhow", + "assert_matches", + "criterion", + "crossbeam-channel", + "jrsonnet-evaluator", + "jrsonnet-lsp-check", + "jrsonnet-lsp-document", + "jrsonnet-lsp-handlers", + "jrsonnet-lsp-import", + "jrsonnet-lsp-inference", + "jrsonnet-lsp-scope", + "jrsonnet-lsp-types", + "jrsonnet-parser", + "jrsonnet-rowan-parser", + "jrsonnet-stdlib", + "lsp-server", + "lsp-types", + "parking_lot", + "rayon", + "rstest 0.23.0", + "rustc-hash", + "serde", + "serde_json", + "tempfile", + "tracing", +] + +[[package]] +name = "jrsonnet-lsp-bin" +version = "0.5.0-pre97" +dependencies = [ + "anyhow", + "clap", + "jrsonnet-lsp", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "jrsonnet-lsp-check" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", + "jrsonnet-lsp-document", + "jrsonnet-lsp-inference", + "jrsonnet-lsp-stdlib", + "jrsonnet-lsp-types", + "jrsonnet-rowan-parser", + "lsp-types", + "rowan", + "rstest 0.23.0", + "rustc-hash", +] + +[[package]] +name = "jrsonnet-lsp-document" +version = "0.5.0-pre97" +dependencies = [ + "derive_more 1.0.0", + "jrsonnet-rowan-parser", + "lsp-types", + "rowan", + "thiserror 1.0.69", +] + +[[package]] +name = "jrsonnet-lsp-handlers" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", + "jrsonnet-lsp-document", + "jrsonnet-lsp-import", + "jrsonnet-lsp-inference", + "jrsonnet-lsp-scope", + "jrsonnet-lsp-stdlib", + "jrsonnet-lsp-types", + "jrsonnet-rowan-parser", + "lsp-types", + "rayon", + "rowan", + "rstest 0.23.0", + "serde", + "serde_json", + "tempfile", + "tracing", +] + +[[package]] +name = "jrsonnet-lsp-import" +version = "0.5.0-pre97" +dependencies = [ + "jrsonnet-lsp-document", + "jrsonnet-rowan-parser", + "rayon", + "rowan", +] + +[[package]] +name = "jrsonnet-lsp-inference" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", + "dashmap", + "jrsonnet-lsp-document", + "jrsonnet-lsp-import", + "jrsonnet-lsp-scope", + "jrsonnet-lsp-stdlib", + "jrsonnet-lsp-types", + "jrsonnet-rowan-parser", + "jrsonnet-std-sig", + "lru", + "lsp-types", + "moka", + "parking_lot", + "rayon", + "rowan", + "rstest 0.23.0", + "rustc-hash", + "strsim", + "tracing", +] + +[[package]] +name = "jrsonnet-lsp-scope" +version = "0.5.0-pre97" +dependencies = [ + "jrsonnet-lsp-document", + "jrsonnet-rowan-parser", + "rowan", + "rustc-hash", +] + +[[package]] +name = "jrsonnet-lsp-stdlib" +version = "0.5.0-pre97" +dependencies = [ + "indoc", + "jrsonnet-lsp-types", + "jrsonnet-std-sig", +] + +[[package]] +name = "jrsonnet-lsp-types" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", + "jrsonnet-rowan-parser", + "rstest 0.23.0", + "rustc-hash", +] + [[package]] name = "jrsonnet-macros" version = "0.5.0-pre97" @@ -1623,6 +1991,10 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "jrsonnet-std-sig" +version = "0.5.0-pre97" + [[package]] name = "jrsonnet-stdlib" version = "0.5.0-pre97" @@ -1656,9 +2028,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.85" +version = "0.3.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" +checksum = "14dc6f6450b3f6d4ed5b16327f38fed626d375a886159ca555bd7822c0c3a5a6" dependencies = [ "once_cell", "wasm-bindgen", @@ -1715,7 +2087,7 @@ name = "k8s" version = "0.1.0" dependencies = [ "phf", - "rstest", + "rstest 0.26.1", "serde", "serde_json", "thiserror 1.0.69", @@ -1750,18 +2122,18 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +checksum = "cb26cec98cce3a3d96cbb7bced3c4b16e3d13f27ec56dbd62cbc8f39cfb9d653" dependencies = [ "cpufeatures", ] [[package]] name = "kube" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dae7229247e4215781e5c5104a056e1e2163943e577f9084cf8bba7b5248f7a" +checksum = "f96b537b4c4f61fc183594edbecbbefa3037e403feac0701bb24e6eff78e0034" dependencies = [ "k8s-openapi", "kube-client", @@ -1772,12 +2144,12 @@ dependencies = [ [[package]] name = "kube-client" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "010875e291a9c0a4e076f4f9c35b97d82fd2372cb3bc713252c3d08b7e73ce5b" +checksum = "af97b8b696eb737e5694f087c498ca725b172c2a5bc3a6916328d160225537ee" dependencies = [ "base64 0.22.1", - "bytes 1.11.0", + "bytes 1.11.1", "either", "futures", "http 1.4.0", @@ -1807,11 +2179,11 @@ dependencies = [ [[package]] name = "kube-core" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ac76281aa698dd34111e25b21f5f6561932a30feabab5357152be273f8a81bb" +checksum = "e7aeade7d2e9f165f96b3c1749ff01a8e2dc7ea954bd333bcfcecc37d5226bdd" dependencies = [ - "derive_more", + "derive_more 2.1.1", "form_urlencoded", "http 1.4.0", "jiff", @@ -1826,9 +2198,9 @@ dependencies = [ [[package]] name = "kube-derive" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "599c09721efcccc0e6a26e93df28c587da60ff5e099c657626fff2af0ae4cbb8" +checksum = "c98f59f4e68864624a0b993a1cc2424439ab7238eaede5c299e89943e2a093ff" dependencies = [ "darling", "proc-macro2", @@ -1840,9 +2212,9 @@ dependencies = [ [[package]] name = "kube-runtime" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db43d26700f564baf850f681f3cb0f1195d2699bd379bfa70750ecec4dcb209" +checksum = "fc158473d6d86ec22692874bd5ddccf07474eab5c6bb41f226c522e945da5244" dependencies = [ "ahash", "async-broadcast", @@ -1871,11 +2243,17 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + [[package]] name = "libc" -version = "0.2.180" +version = "0.2.182" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" +checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" [[package]] name = "libjsonnet" @@ -1894,7 +2272,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" dependencies = [ - "bitflags", + "bitflags 2.11.0", "libc", ] @@ -1906,9 +2284,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" [[package]] name = "litemap" @@ -1973,6 +2351,32 @@ dependencies = [ "hashbrown 0.16.1", ] +[[package]] +name = "lsp-server" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d6ada348dbc2703cbe7637b2dda05cff84d3da2819c24abcb305dd613e0ba2e" +dependencies = [ + "crossbeam-channel", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "lsp-types" +version = "0.97.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53353550a17c04ac46c585feb189c2db82154fc84b79c7a66c96c2c644f66071" +dependencies = [ + "bitflags 1.3.2", + "fluent-uri", + "serde", + "serde_json", + "serde_repr", +] + [[package]] name = "matchers" version = "0.2.0" @@ -1990,9 +2394,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.7.6" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" [[package]] name = "memoffset" @@ -2073,13 +2477,30 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "moka" +version = "0.12.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ac832c50ced444ef6be0767a008b02c106a909ba79d1d830501e94b96f6b7e" +dependencies = [ + "crossbeam-channel", + "crossbeam-epoch", + "crossbeam-utils", + "equivalent", + "parking_lot", + "portable-atomic", + "smallvec 1.15.1", + "tagptr", + "uuid", +] + [[package]] name = "nix" version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags", + "bitflags 2.11.0", "cfg-if", "cfg_aliases", "libc", @@ -2168,9 +2589,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.32.2" +version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" dependencies = [ "memchr", ] @@ -2187,6 +2608,12 @@ version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" +[[package]] +name = "oorandom" +version = "11.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" + [[package]] name = "openssl-probe" version = "0.2.1" @@ -2214,7 +2641,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7a6d09a73194e6b66df7c8f1b680f156d916a1a942abf2de06823dd02b7855d" dependencies = [ "async-trait", - "bytes 1.11.0", + "bytes 1.11.1", "http 1.4.0", "opentelemetry", "reqwest", @@ -2381,9 +2808,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9eb05c21a464ea704b53158d358a31e6425db2f63a1a7312268b05fe2b75f7" +checksum = "e0848c601009d37dfa3430c4666e147e49cdcf1b92ecd3e63657d8a5f19da662" dependencies = [ "memchr", "ucd-trie", @@ -2391,9 +2818,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f9dbced329c441fa79d80472764b1a2c7e57123553b8519b36663a2fb234ed" +checksum = "11f486f1ea21e6c10ed15d5a7c77165d0ee443402f0780849d1768e7d9d6fe77" dependencies = [ "pest", "pest_generator", @@ -2401,9 +2828,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bb96d5051a78f44f43c8f712d8e810adb0ebf923fc9ed2655a7f66f63ba8ee5" +checksum = "8040c4647b13b210a963c1ed407c1ff4fdfa01c31d6d2a098218702e6664f94f" dependencies = [ "pest", "pest_meta", @@ -2414,9 +2841,9 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "602113b5b5e8621770cfd490cfd90b9f84ab29bd2b0e49ad83eb6d186cef2365" +checksum = "89815c69d36021a140146f26659a81d6c2afa33d216d736dd4be5381a7362220" dependencies = [ "pest", "sha2", @@ -2467,18 +2894,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +checksum = "f1749c7ed4bcaf4c3d0a3efc28538844fb29bcdd7d2b67b2be7e20ba861ff517" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +checksum = "d9b20ed30f105399776b9c883e68e536ef602a16ae6f596d2c473591d6ad64c6" dependencies = [ "proc-macro2", "quote", @@ -2487,9 +2914,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" [[package]] name = "pin-utils" @@ -2497,17 +2924,45 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + [[package]] name = "portable-atomic" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" [[package]] name = "portable-atomic-util" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" +checksum = "7a9db96d7fa8782dd8c15ce32ffe8680bbd1e978a43bf51a34d39483540495f5" dependencies = [ "portable-atomic", ] @@ -2551,9 +3006,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.105" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" dependencies = [ "unicode-ident", ] @@ -2564,7 +3019,7 @@ version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2ea70524a2f82d518bce41317d0fae74151505651af45faf1ffbd6fd33f0568" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "prost-derive", ] @@ -2583,9 +3038,9 @@ dependencies = [ [[package]] name = "psm" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d11f2fedc3b7dafdc2851bc52f277377c5473d378859be234bc7ebb593144d01" +checksum = "3852766467df634d74f0b2d7819bf8dc483a0eb2e3b0f50f756f9cfe8b0d18d8" dependencies = [ "ar_archive_writer", "cc", @@ -2593,9 +3048,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.43" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" dependencies = [ "proc-macro2", ] @@ -2709,7 +3164,7 @@ version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags", + "bitflags 2.11.0", ] [[package]] @@ -2745,9 +3200,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.12.2" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" dependencies = [ "aho-corasick", "memchr", @@ -2757,9 +3212,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" dependencies = [ "aho-corasick", "memchr", @@ -2768,9 +3223,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" [[package]] name = "relative-path" @@ -2785,7 +3240,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" dependencies = [ "base64 0.22.1", - "bytes 1.11.0", + "bytes 1.11.1", "futures-channel", "futures-core", "futures-util", @@ -2839,6 +3294,18 @@ dependencies = [ "text-size", ] +[[package]] +name = "rstest" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a2c585be59b6b5dd66a9d2084aa1d8bd52fbdb806eafdeffb52791147862035" +dependencies = [ + "futures", + "futures-timer", + "rstest_macros 0.23.0", + "rustc_version", +] + [[package]] name = "rstest" version = "0.26.1" @@ -2847,7 +3314,25 @@ checksum = "f5a3193c063baaa2a95a33f03035c8a72b83d97a54916055ba22d35ed3839d49" dependencies = [ "futures-timer", "futures-util", - "rstest_macros", + "rstest_macros 0.26.1", +] + +[[package]] +name = "rstest_macros" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "825ea780781b15345a146be27eaefb05085e337e869bff01b4306a4fd4a9ad5a" +dependencies = [ + "cfg-if", + "glob", + "proc-macro-crate", + "proc-macro2", + "quote", + "regex", + "relative-path", + "rustc_version", + "syn", + "unicode-ident", ] [[package]] @@ -2875,7 +3360,7 @@ dependencies = [ "anyhow", "assert_matches", "bon", - "bytes 1.11.0", + "bytes 1.11.1", "clap", "dirs", "gtmpl", @@ -2901,7 +3386,7 @@ dependencies = [ "patch", "rayon", "regex", - "rstest", + "rstest 0.26.1", "rtk-diff", "serde", "serde-saphyr", @@ -2929,7 +3414,7 @@ dependencies = [ "anyhow", "nu-ansi-term", "patch", - "rstest", + "rstest 0.26.1", "serde_json", "similar", "syntect", @@ -2954,11 +3439,11 @@ dependencies = [ [[package]] name = "rustix" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" dependencies = [ - "bitflags", + "bitflags 2.11.0", "errno", "libc", "linux-raw-sys", @@ -2967,9 +3452,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.36" +version = "0.23.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" +checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4" dependencies = [ "log", "once_cell", @@ -3020,9 +3505,9 @@ checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" [[package]] name = "same-file" @@ -3053,9 +3538,9 @@ dependencies = [ [[package]] name = "schemars" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e910108742c57a770f492731f99be216a52fadd361b06c8fb59d74ccc267d2" +checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc" dependencies = [ "dyn-clone", "ref-cast", @@ -3066,9 +3551,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4908ad288c5035a8eb12cfdf0d49270def0a268ee162b75eeee0f85d155a7c45" +checksum = "7d115b50f4aaeea07e79c1912f645c7513d81715d0420f8bc77a18c6260b307f" dependencies = [ "proc-macro2", "quote", @@ -3093,11 +3578,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "3.5.1" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" dependencies = [ - "bitflags", + "bitflags 2.11.0", "core-foundation", "core-foundation-sys", "libc", @@ -3106,9 +3591,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.15.0" +version = "2.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" dependencies = [ "core-foundation-sys", "libc", @@ -3202,6 +3687,17 @@ dependencies = [ "zmij", ] +[[package]] +name = "serde_repr" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "serde_spanned" version = "0.6.9" @@ -3325,9 +3821,9 @@ checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" [[package]] name = "slab" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" [[package]] name = "smallvec" @@ -3343,9 +3839,9 @@ checksum = "ef784004ca8777809dcdad6ac37629f0a97caee4c685fcea805278d81dd8b857" [[package]] name = "socket2" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" dependencies = [ "libc", "windows-sys 0.60.2", @@ -3359,9 +3855,9 @@ checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "stacker" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1f8b29fb42aafcea4edeeb6b2f2d7ecd0d969c48b4cf0d2e64aafc471dd6e59" +checksum = "08d74a23609d509411d10e2176dc2a4346e3b4aea2e7b1869f19fdedbc71c013" dependencies = [ "cc", "cfg-if", @@ -3390,9 +3886,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.114" +version = "2.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" dependencies = [ "proc-macro2", "quote", @@ -3457,14 +3953,20 @@ dependencies = [ "unicode-width 0.2.2", ] +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + [[package]] name = "tempfile" -version = "3.24.0" +version = "3.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" +checksum = "82a72c767771b47409d2345987fda8628641887d5466101319899796367354a0" dependencies = [ "fastrand", - "getrandom 0.3.4", + "getrandom 0.4.1", "once_cell", "rustix", "windows-sys 0.61.2", @@ -3547,6 +4049,16 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "tk-compare" version = "0.1.0" @@ -3561,7 +4073,7 @@ dependencies = [ "k8s-mock", "k8s-openapi", "regex", - "rstest", + "rstest 0.26.1", "rtk-diff", "serde", "serde_json", @@ -3580,7 +4092,7 @@ version = "1.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "libc", "mio", "pin-project-lite", @@ -3639,7 +4151,7 @@ version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "futures-core", "futures-sink", "pin-project-lite", @@ -3705,9 +4217,9 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.6+spec-1.1.0" +version = "1.0.9+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" dependencies = [ "winnow", ] @@ -3720,13 +4232,13 @@ checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" [[package]] name = "tonic" -version = "0.14.2" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb7613188ce9f7df5bfe185db26c5814347d110db17920415cf2fbcad85e7203" +checksum = "fec7c61a0695dc1887c1b53952990f3ad2e3a31453e1f49f10e75424943a93ec" dependencies = [ "async-trait", "base64 0.22.1", - "bytes 1.11.0", + "bytes 1.11.1", "http 1.4.0", "http-body", "http-body-util", @@ -3746,11 +4258,11 @@ dependencies = [ [[package]] name = "tonic-prost" -version = "0.14.2" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66bd50ad6ce1252d87ef024b3d64fe4c3cf54a86fb9ef4c631fdd0ded7aeaa67" +checksum = "a55376a0bbaa4975a3f10d009ad763d8f4108f067c7c2e74f3001fb49778d309" dependencies = [ - "bytes 1.11.0", + "bytes 1.11.1", "prost", "tonic", ] @@ -3781,8 +4293,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ "base64 0.22.1", - "bitflags", - "bytes 1.11.0", + "bitflags 2.11.0", + "bytes 1.11.1", "futures-util", "http 1.4.0", "http-body", @@ -3951,9 +4463,15 @@ checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f" [[package]] name = "unicode-ident" -version = "1.0.22" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-width" @@ -3967,6 +4485,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -4009,6 +4533,17 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "uuid" +version = "1.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb" +dependencies = [ + "getrandom 0.4.1", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "valuable" version = "0.1.1" @@ -4055,11 +4590,20 @@ dependencies = [ "wit-bindgen", ] +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + [[package]] name = "wasm-bindgen" -version = "0.2.108" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" +checksum = "60722a937f594b7fde9adb894d7c092fc1bb6612897c46368d18e7a20208eff2" dependencies = [ "cfg-if", "once_cell", @@ -4070,9 +4614,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.58" +version = "0.4.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f" +checksum = "8a89f4650b770e4521aa6573724e2aed4704372151bd0de9d16a3bbabb87441a" dependencies = [ "cfg-if", "futures-util", @@ -4084,9 +4628,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.108" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" +checksum = "0fac8c6395094b6b91c4af293f4c79371c163f9a6f56184d2c9a85f5a95f3950" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4094,9 +4638,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.108" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" +checksum = "ab3fabce6159dc20728033842636887e4877688ae94382766e00b180abac9d60" dependencies = [ "bumpalo", "proc-macro2", @@ -4107,18 +4651,52 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.108" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" +checksum = "de0e091bdb824da87dc01d967388880d017a0a9bc4f3bdc0d86ee9f9336e3bb5" dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap 2.13.0", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.11.0", + "hashbrown 0.15.5", + "indexmap 2.13.0", + "semver", +] + [[package]] name = "web-sys" -version = "0.3.85" +version = "0.3.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" +checksum = "705eceb4ce901230f8625bd1d665128056ccbe4b7408faa625eec1ba80f59a97" dependencies = [ "js-sys", "wasm-bindgen", @@ -4136,9 +4714,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c" +checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed" dependencies = [ "rustls-pki-types", ] @@ -4430,6 +5008,88 @@ name = "wit-bindgen" version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap 2.13.0", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.11.0", + "indexmap 2.13.0", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap 2.13.0", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] [[package]] name = "writeable" @@ -4500,18 +5160,18 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.33" +version = "0.8.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" +checksum = "a789c6e490b576db9f7e6b6d661bcc9799f7c0ac8352f56ea20193b2681532e5" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.33" +version = "0.8.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" +checksum = "f65c489a7071a749c849713807783f70672b28094011623e200cb86dcb835953" dependencies = [ "proc-macro2", "quote", @@ -4580,6 +5240,6 @@ dependencies = [ [[package]] name = "zmij" -version = "1.0.16" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfcd145825aace48cff44a8844de64bf75feec3080e0aa5cdbde72961ae51a65" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/Cargo.toml b/Cargo.toml index e2828cb2..32a9628b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -83,6 +83,9 @@ indexmap = "2.2.3" itertools = "0.13.0" xshell = "0.2.6" +lsp-server = "0.7.6" +lsp-types = "0.97.0" + regex = "1.10" lru = "0.16.0" @@ -152,7 +155,7 @@ similar_names = "allow" # Pre-existing issues in jrsonnet crates unnecessary_semicolon = "allow" too_long_first_doc_paragraph = "allow" -map_or_unwrap = "allow" +map_unwrap_or = "allow" option_if_let_else = "allow" manual_repeat_n = "allow" mem_replace_option_with_some = "allow" diff --git a/bindings/jsonnet/src/import.rs b/bindings/jsonnet/src/import.rs index 10473336..7c328a8d 100644 --- a/bindings/jsonnet/src/import.rs +++ b/bindings/jsonnet/src/import.rs @@ -65,8 +65,8 @@ impl ImportResolver for CallbackImportResolver { base.as_ptr(), rel.as_ptr(), &mut found_here.cast_const(), - &mut buf, - &mut buf_len, + &raw mut buf, + &raw mut buf_len, ) }; let buf_slice: &[u8] = unsafe { std::slice::from_raw_parts(buf.cast(), buf_len) }; diff --git a/bindings/jsonnet/src/native.rs b/bindings/jsonnet/src/native.rs index 3f56df87..0b462653 100644 --- a/bindings/jsonnet/src/native.rs +++ b/bindings/jsonnet/src/native.rs @@ -43,7 +43,7 @@ impl NativeCallbackHandler for JsonnetNativeCallbackHandler { } n_args.push(None); let mut success = 1; - let v = unsafe { (self.cb)(self.ctx, n_args.as_ptr().cast(), &mut success) }; + let v = unsafe { (self.cb)(self.ctx, n_args.as_ptr().cast(), &raw mut success) }; let v = unsafe { *Box::from_raw(v) }; if success == 1 { Ok(v) diff --git a/cmds/jrsonnet-fmt/src/comments.rs b/cmds/jrsonnet-fmt/src/comments.rs index dba094ea..4fd0c70d 100644 --- a/cmds/jrsonnet-fmt/src/comments.rs +++ b/cmds/jrsonnet-fmt/src/comments.rs @@ -3,7 +3,11 @@ use std::string::String; use dprint_core::formatting::PrintItems; use jrsonnet_rowan_parser::{nodes::TriviaKind, AstToken}; -use crate::{children::ChildTrivia, p, pi}; +use crate::{ + children::ChildTrivia, + context::{CommentStyle, FormatContext}, + macros::{p, pi}, +}; pub enum CommentLocation { /// Above local, field, other things @@ -15,19 +19,24 @@ pub enum CommentLocation { } #[allow(clippy::too_many_lines, clippy::cognitive_complexity)] -pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut PrintItems) { +pub fn format_comments( + comments: &ChildTrivia, + loc: CommentLocation, + out: &mut PrintItems, + ctx: &FormatContext, +) { for c in comments { let Ok(c) = c else { let mut text = c.as_ref().unwrap_err() as &str; while !text.is_empty() { let pos = text.find(['\n', '\t']).unwrap_or(text.len()); let sliced = &text[..pos]; - p!(out, string(sliced.to_string())); + p!(out, ctx, string(sliced.to_string())); text = &text[pos..]; if !text.is_empty() { match text.as_bytes()[0] { - b'\n' => p!(out, nl), - b'\t' => p!(out, tab), + b'\n' => p!(out, ctx, nl), + b'\t' => p!(out, ctx, tab), _ => unreachable!(), } text = &text[1..]; @@ -70,9 +79,9 @@ pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut P } if lines.len() == 1 && !doc { if matches!(loc, CommentLocation::ItemInline) { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } - p!(out, str("/* ") string(lines[0].trim().to_string()) str(" */") nl); + p!(out, ctx, str("/* ") string(lines[0].trim().to_string()) str(" */") nl); } else if !lines.is_empty() { fn common_ws_prefix<'a>(a: &'a str, b: &str) -> &'a str { let offset = a @@ -107,36 +116,36 @@ pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut P .to_string(); } - p!(out, str("/*")); + p!(out, ctx, str("/*")); if doc { - p!(out, str("*")); + p!(out, ctx, str("*")); } - p!(out, nl); + p!(out, ctx, nl); for mut line in lines { if doc { - p!(out, str(" *")); + p!(out, ctx, str(" *")); } if line.is_empty() { - p!(out, nl); + p!(out, ctx, nl); } else { if doc { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } while let Some(new_line) = line.strip_prefix('\t') { if doc { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } else { - p!(out, tab); + p!(out, ctx, tab); } line = new_line.to_string(); } - p!(out, string(line.to_string()) nl); + p!(out, ctx, string(line.to_string()) nl); } } if doc { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } - p!(out, str("*/") nl); + p!(out, ctx, str("*/") nl); } } // TODO: Keep common padding for multiple continous lines of single-line comments @@ -156,26 +165,44 @@ pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut P // # Line2 // ``` TriviaKind::SingleLineHashComment => { + let text = c + .text() + .strip_prefix('#') + .expect("hash comment starts with #") + .trim(); + let prefix = match ctx.opts.comment_style { + CommentStyle::Slash => "// ", + CommentStyle::Hash | CommentStyle::Leave => "# ", + }; if matches!(loc, CommentLocation::ItemInline) { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } - p!(out, str("# ") string(c.text().strip_prefix('#').expect("hash comment starts with #").trim().to_string())); + p!(out, ctx, str(prefix) string(text.to_string())); if !matches!(loc, CommentLocation::ItemInline) { - p!(out, nl); + p!(out, ctx, nl); } } TriviaKind::SingleLineSlashComment => { + let text = c + .text() + .strip_prefix("//") + .expect("comment starts with //") + .trim(); + let prefix = match ctx.opts.comment_style { + CommentStyle::Hash => "# ", + CommentStyle::Slash | CommentStyle::Leave => "// ", + }; if matches!(loc, CommentLocation::ItemInline) { - p!(out, str(" ")); + p!(out, ctx, str(" ")); } - p!(out, str("// ") string(c.text().strip_prefix("//").expect("comment starts with //").trim().to_string())); + p!(out, ctx, str(prefix) string(text.to_string())); if !matches!(loc, CommentLocation::ItemInline) { - p!(out, nl); + p!(out, ctx, nl); } } // Garbage in - garbage out - TriviaKind::ErrorCommentTooShort => p!(out, str("/*/")), - TriviaKind::ErrorCommentUnterminated => p!(out, string(c.text().to_string())), + TriviaKind::ErrorCommentTooShort => p!(out, ctx, str("/*/")), + TriviaKind::ErrorCommentUnterminated => p!(out, ctx, string(c.text().to_string())), } } } diff --git a/cmds/jrsonnet-fmt/src/context.rs b/cmds/jrsonnet-fmt/src/context.rs new file mode 100644 index 00000000..e5c93b43 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/context.rs @@ -0,0 +1,84 @@ +//! Formatting context and options. + +use dprint_core::formatting::PrintItems; + +/// Comment style for formatting. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum CommentStyle { + /// Convert all comments to hash-style (#). + Hash, + /// Convert all comments to slash-style (//). + Slash, + /// Leave comments as-is. + #[default] + Leave, +} + +/// String literal style for formatting. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum StringStyle { + /// Convert all strings to double quotes. + Double, + /// Convert all strings to single quotes. + Single, + /// Leave strings as-is. + #[default] + Leave, +} + +/// Formatting options that control output style. +#[derive(Debug, Clone)] +pub struct FormatOptions { + /// 0 for hard tabs, otherwise number of spaces. + pub indent: u8, + /// Maximum consecutive blank lines (default: 2). + pub max_blank_lines: u8, + /// Comment style conversion. + pub comment_style: CommentStyle, + /// String literal style conversion. + pub string_style: StringStyle, + /// Add padding inside arrays: [ x, y ] vs [x, y]. + pub pad_arrays: bool, + /// Add padding inside objects: { x: 1 } vs {x: 1}. + pub pad_objects: bool, + /// Use pretty field names (unquoted when possible). + pub pretty_field_names: bool, +} + +impl Default for FormatOptions { + fn default() -> Self { + Self { + indent: 0, + max_blank_lines: 2, + comment_style: CommentStyle::default(), + string_style: StringStyle::default(), + pad_arrays: false, + pad_objects: true, + pretty_field_names: true, + } + } +} + +/// Context passed through the formatting process. +#[derive(Debug, Clone)] +pub struct FormatContext { + pub opts: FormatOptions, +} + +impl FormatContext { + pub fn new(opts: FormatOptions) -> Self { + Self { opts } + } + + /// Emit a blank line if condition is true and max_blank_lines allows it. + /// This is used to preserve source blank lines between items. + pub fn emit_blank_line_if(&self, condition: bool, out: &mut PrintItems) { + if !condition { + return; + } + if self.opts.max_blank_lines == 0 { + return; + } + out.push_signal(dprint_core::formatting::Signal::NewLine); + } +} diff --git a/cmds/jrsonnet-fmt/src/macros.rs b/cmds/jrsonnet-fmt/src/macros.rs new file mode 100644 index 00000000..a769f25e --- /dev/null +++ b/cmds/jrsonnet-fmt/src/macros.rs @@ -0,0 +1,128 @@ +//! Formatting macros for building PrintItems. + +/// Create PrintItems with formatting DSL. +/// +/// Usage: `pi!(@i; ctx; str("text") nl {node} ...)` +macro_rules! pi { + (@i; $ctx:expr; $($t:tt)*) => {{ + #[allow(unused_mut)] + let mut o = dprint_core::formatting::PrintItems::new(); + let __ctx = $ctx; + pi!(@s; o, __ctx: $($t)*); + o + }}; + (@s; $o:ident, $ctx:ident: str($e:expr $(,)?) $($t:tt)*) => {{ + $o.push_str($e); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: string($e:expr $(,)?) $($t:tt)*) => {{ + $o.push_string($e); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: nl $($t:tt)*) => {{ + $o.push_signal(dprint_core::formatting::Signal::NewLine); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: tab $($t:tt)*) => {{ + $o.push_signal(dprint_core::formatting::Signal::Tab); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: >i $($t:tt)*) => {{ + $o.push_signal(dprint_core::formatting::Signal::StartIndent); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: {{ + $o.push_signal(dprint_core::formatting::Signal::FinishIndent); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: info($v:expr) $($t:tt)*) => {{ + $o.push_info($v); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: if($s:literal, $cond:expr, $($i:tt)*) $($t:tt)*) => {{ + use dprint_core::formatting::PrintItems; + let __ctx = $ctx; + $o.push_condition(dprint_core::formatting::conditions::if_true( + $s, + $cond.clone(), + { + let mut o = PrintItems::new(); + p!(o, __ctx, $($i)*); + o + }, + )); + pi!(@s; $o, __ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: if_else($s:literal, $cond:expr, $($i:tt)*)($($e:tt)+) $($t:tt)*) => {{ + use dprint_core::formatting::PrintItems; + let __ctx = $ctx; + $o.push_condition(dprint_core::formatting::conditions::if_true_or( + $s, + $cond.clone(), + { + let mut o = PrintItems::new(); + p!(o, __ctx, $($i)*); + o + }, + { + let mut o = PrintItems::new(); + p!(o, __ctx, $($e)*); + o + }, + )); + pi!(@s; $o, __ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: if_not($s:literal, $cond:expr, $($e:tt)*) $($t:tt)*) => {{ + use dprint_core::formatting::PrintItems; + let __ctx = $ctx; + $o.push_condition(dprint_core::formatting::conditions::if_true_or( + $s, + $cond.clone(), + { + let o = PrintItems::new(); + o + }, + { + let mut o = PrintItems::new(); + p!(o, __ctx, $($e)*); + o + }, + )); + pi!(@s; $o, __ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: {$expr:expr} $($t:tt)*) => {{ + $expr.print($o, $ctx); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: items($expr:expr) $($t:tt)*) => {{ + $o.extend($expr); + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: if ($e:expr)($($then:tt)*) $($t:tt)*) => {{ + if $e { + pi!(@s; $o, $ctx: $($then)*); + } + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $o:ident, $ctx:ident: ifelse ($e:expr)($($then:tt)*)($($else:tt)*) $($t:tt)*) => {{ + if $e { + pi!(@s; $o, $ctx: $($then)*); + } else { + pi!(@s; $o, $ctx: $($else)*); + } + pi!(@s; $o, $ctx: $($t)*); + }}; + (@s; $i:ident, $ctx:ident:) => {} +} + +/// Push to existing PrintItems with formatting DSL. +/// +/// Usage: `p!(out, ctx, str("text") nl {node} ...)` +macro_rules! p { + ($o:ident, $ctx:ident, $($t:tt)*) => { + pi!(@s; $o, $ctx: $($t)*) + }; +} + +pub(crate) use p; +pub(crate) use pi; diff --git a/cmds/jrsonnet-fmt/src/main.rs b/cmds/jrsonnet-fmt/src/main.rs index f6099f70..1bd6f9d0 100644 --- a/cmds/jrsonnet-fmt/src/main.rs +++ b/cmds/jrsonnet-fmt/src/main.rs @@ -1,710 +1,121 @@ +//! jrsonnet-fmt: Jsonnet code formatter +//! +//! A formatter for Jsonnet code using dprint-core as the formatting engine. + use std::{ - any::type_name, fs, io::{self, Write}, path::PathBuf, process, - rc::Rc, }; -use children::{children_between, trivia_before}; use clap::Parser; -use dprint_core::formatting::{ - condition_helpers::is_multiple_lines, condition_resolvers::true_resolver, - ConditionResolverContext, LineNumber, PrintItems, PrintOptions, -}; +use dprint_core::formatting::{PrintItems, PrintOptions}; use hi_doc::Formatting; -use jrsonnet_rowan_parser::{ - nodes::{ - Arg, ArgsDesc, Assertion, BinaryOperator, Bind, CompSpec, Destruct, DestructArrayPart, - DestructRest, Expr, ExprBase, FieldName, ForSpec, IfSpec, ImportKind, Literal, Member, - Name, Number, ObjBody, ObjLocal, ParamsDesc, SliceDesc, SourceFile, Stmt, Suffix, Text, - UnaryOperator, Visibility, - }, - AstNode, AstToken as _, SyntaxToken, -}; - -use crate::{ - children::trivia_after, - comments::{format_comments, CommentLocation}, -}; mod children; mod comments; +mod context; +mod macros; +mod printable; #[cfg(test)] mod tests; -pub trait Printable { - fn print(&self, out: &mut PrintItems); -} - -macro_rules! pi { - (@i; $($t:tt)*) => {{ - #[allow(unused_mut)] - let mut o = dprint_core::formatting::PrintItems::new(); - pi!(@s; o: $($t)*); - o - }}; - (@s; $o:ident: str($e:expr $(,)?) $($t:tt)*) => {{ - $o.push_str($e); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: string($e:expr $(,)?) $($t:tt)*) => {{ - $o.push_string($e); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: nl $($t:tt)*) => {{ - $o.push_signal(dprint_core::formatting::Signal::NewLine); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: tab $($t:tt)*) => {{ - $o.push_signal(dprint_core::formatting::Signal::Tab); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: >i $($t:tt)*) => {{ - $o.push_signal(dprint_core::formatting::Signal::StartIndent); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: {{ - $o.push_signal(dprint_core::formatting::Signal::FinishIndent); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: info($v:expr) $($t:tt)*) => {{ - $o.push_info($v); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: if($s:literal, $cond:expr, $($i:tt)*) $($t:tt)*) => {{ - $o.push_condition(dprint_core::formatting::conditions::if_true( - $s, - $cond.clone(), - { - let mut o = PrintItems::new(); - p!(o, $($i)*); - o - }, - )); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: if_else($s:literal, $cond:expr, $($i:tt)*)($($e:tt)+) $($t:tt)*) => {{ - $o.push_condition(dprint_core::formatting::conditions::if_true_or( - $s, - $cond.clone(), - { - let mut o = PrintItems::new(); - p!(o, $($i)*); - o - }, - { - let mut o = PrintItems::new(); - p!(o, $($e)*); - o - }, - )); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: if_not($s:literal, $cond:expr, $($e:tt)*) $($t:tt)*) => {{ - $o.push_condition(dprint_core::formatting::conditions::if_true_or( - $s, - $cond.clone(), - { - let o = PrintItems::new(); - o - }, - { - let mut o = PrintItems::new(); - p!(o, $($e)*); - o - }, - )); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: {$expr:expr} $($t:tt)*) => {{ - $expr.print($o); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: items($expr:expr) $($t:tt)*) => {{ - $o.extend($expr); - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: if ($e:expr)($($then:tt)*) $($t:tt)*) => {{ - if $e { - pi!(@s; $o: $($then)*); - } - pi!(@s; $o: $($t)*); - }}; - (@s; $o:ident: ifelse ($e:expr)($($then:tt)*)($($else:tt)*) $($t:tt)*) => {{ - if $e { - pi!(@s; $o: $($then)*); - } else { - pi!(@s; $o: $($else)*); - } - pi!(@s; $o: $($t)*); - }}; - (@s; $i:ident:) => {} -} -macro_rules! p { - ($o:ident, $($t:tt)*) => { - pi!(@s; $o: $($t)*) - }; -} -pub(crate) use p; -pub(crate) use pi; - -impl

Printable for Option

-where - P: Printable, -{ - fn print(&self, out: &mut PrintItems) { - if let Some(v) = self { - v.print(out); - } else { - p!( - out, - string(format!( - "/*missing {}*/", - type_name::

().replace("jrsonnet_rowan_parser::generated::nodes::", "") - ),) - ); - } - } -} - -impl Printable for SyntaxToken { - fn print(&self, out: &mut PrintItems) { - p!(out, string(self.to_string())); - } -} - -impl Printable for Text { - fn print(&self, out: &mut PrintItems) { - p!(out, string(format!("{}", self))); - } -} -impl Printable for Number { - fn print(&self, out: &mut PrintItems) { - p!(out, string(format!("{}", self))); - } -} - -impl Printable for Name { - fn print(&self, out: &mut PrintItems) { - p!(out, { self.ident_lit() }); - } -} - -impl Printable for DestructRest { - fn print(&self, out: &mut PrintItems) { - p!(out, str("...")); - if let Some(name) = self.into() { - p!(out, { name }); - } - } -} - -impl Printable for Destruct { - fn print(&self, out: &mut PrintItems) { - match self { - Self::DestructFull(f) => { - p!(out, { f.name() }); - } - Self::DestructSkip(_) => p!(out, str("?")), - Self::DestructArray(a) => { - p!(out, str("[") >i nl); - for el in a.destruct_array_parts() { - match el { - DestructArrayPart::DestructArrayElement(e) => { - p!(out, {e.destruct()} str(",") nl); - } - DestructArrayPart::DestructRest(d) => { - p!(out, {d} str(",") nl); - } - } - } - p!(out, { - p!(out, str("{") >i nl); - for item in o.destruct_object_fields() { - p!(out, { item.field() }); - if let Some(des) = item.destruct() { - p!(out, str(": ") {des}); - } - if let Some(def) = item.expr() { - p!(out, str(" = ") {def}); - } - p!(out, str(",") nl); - } - if let Some(rest) = o.destruct_rest() { - p!(out, {rest} nl); - } - p!(out, { - if let Some(id) = f.id() { - p!(out, { id }); - } else if let Some(str) = f.text() { - p!(out, { str }); - } else { - p!(out, str("/*missing FieldName*/")); - } - } - Self::FieldNameDynamic(d) => { - p!(out, str("[") {d.expr()} str("]")); - } - } +fn parse_string_style(s: &str) -> Result { + match s.to_lowercase().as_str() { + "d" | "double" => Ok(StringStyle::Double), + "s" | "single" => Ok(StringStyle::Single), + "l" | "leave" => Ok(StringStyle::Leave), + _ => Err(format!( + "Invalid string style '{}'. Use: d/double, s/single, l/leave", + s + )), } } -impl Printable for Visibility { - fn print(&self, out: &mut PrintItems) { - p!(out, string(self.to_string())); +fn parse_comment_style(s: &str) -> Result { + match s.to_lowercase().as_str() { + "h" | "hash" => Ok(CommentStyle::Hash), + "s" | "slash" => Ok(CommentStyle::Slash), + "l" | "leave" => Ok(CommentStyle::Leave), + _ => Err(format!( + "Invalid comment style '{}'. Use: h/hash, s/slash, l/leave", + s + )), } } -impl Printable for ObjLocal { - fn print(&self, out: &mut PrintItems) { - p!(out, str("local ") {self.bind()}); - } -} +#[derive(Parser)] +#[allow(clippy::struct_excessive_bools)] +struct Opts { + /// Treat input as code, reformat it instead of reading file. + #[clap(long, short = 'e')] + exec: bool, + /// Path to be reformatted if `--exec` if unset, otherwise code itself. + input: String, + /// Replace code with formatted in-place, instead of printing it to stdout. + /// Only applicable if `--exec` is unset. + #[clap(long, short = 'i')] + in_place: bool, -impl Printable for Assertion { - fn print(&self, out: &mut PrintItems) { - p!(out, str("assert ") {self.condition()}); - if self.colon_token().is_some() || self.message().is_some() { - p!(out, str(": ") {self.message()}); - } - } -} + /// Exit with error if formatted does not match input + #[arg(long)] + test: bool, + /// Number of spaces to indent with + /// + /// 0 for guess from input (default), and use hard tabs if unable to guess. + #[arg(long, default_value = "0")] + indent: u8, + /// Force hard tab for indentation + #[arg(long)] + hard_tabs: bool, -impl Printable for ParamsDesc { - fn print(&self, out: &mut PrintItems) { - p!(out, str("(") >i nl); - for param in self.params() { - p!(out, { param.destruct() }); - if param.assign_token().is_some() || param.expr().is_some() { - p!(out, str(" = ") {param.expr()}); - } - p!(out, str(",") nl); - } - p!(out, i nl)); - let (children, end_comments) = children_between::( - self.syntax().clone(), - self.l_paren_token().map(Into::into).as_ref(), - self.r_paren_token().map(Into::into).as_ref(), - None, - ); - let mut args = children.into_iter().peekable(); - while let Some(ele) = args.next() { - if ele.should_start_with_newline { - p!(out, nl); - } - format_comments(&ele.before_trivia, CommentLocation::AboveItem, out); - let arg = ele.value; - if arg.name().is_some() || arg.assign_token().is_some() { - p!(out, {arg.name()} str(" = ")); - } - let comma_between = if args.peek().is_some() { - true_resolver() - } else { - multi_line.clone() - }; - p!(out, {arg.expr()} if("arg comma", comma_between, str(",") if_not("between args", multi_line, str(" ")))); - format_comments(&ele.inline_trivia, CommentLocation::ItemInline, out); - p!(out, if("between args", multi_line, nl)); - } - if end_comments.should_start_with_newline { - p!(out, nl); - } - format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); - p!(out, if("end args", multi_line, { - p!(out, { b.obj_local() }); - } - Self::MemberAssertStmt(ass) => { - p!(out, { ass.assertion() }); - } - Self::MemberFieldNormal(n) => { - p!(out, {n.field_name()} if(n.plus_token().is_some())({n.plus_token()}) {n.visibility()} str(" ") {n.expr()}); - } - Self::MemberFieldMethod(m) => { - p!(out, {m.field_name()} {m.params_desc()} {m.visibility()} str(" ") {m.expr()}); - } - } - } -} + /// String quote style: d/double, s/single, l/leave (default: l) + #[arg(long, value_parser = parse_string_style, default_value = "l")] + string_style: StringStyle, -impl Printable for ObjBody { - fn print(&self, out: &mut PrintItems) { - match self { - Self::ObjBodyComp(l) => { - let (children, mut end_comments) = children_between::( - l.syntax().clone(), - l.l_brace_token().map(Into::into).as_ref(), - Some( - &(l.comp_specs() - .next() - .expect("at least one spec is defined") - .syntax() - .clone()) - .into(), - ), - None, - ); - let trailing_for_comp = end_comments.extract_trailing(); - p!(out, str("{") >i nl); - for mem in children { - if mem.should_start_with_newline { - p!(out, nl); - } - format_comments(&mem.before_trivia, CommentLocation::AboveItem, out); - p!(out, {mem.value} str(",")); - format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out); - p!(out, nl); - } + /// Comment style: h/hash, s/slash, l/leave (default: l) + #[arg(long, value_parser = parse_comment_style, default_value = "l")] + comment_style: CommentStyle, - if end_comments.should_start_with_newline { - p!(out, nl); - } - format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); + /// Add padding inside arrays: [ x, y ] instead of [x, y] + #[arg(long)] + pad_arrays: bool, - let (compspecs, end_comments) = children_between::( - l.syntax().clone(), - l.member_comps() - .last() - .map(|m| m.syntax().clone()) - .map(Into::into) - .or_else(|| l.l_brace_token().map(Into::into)) - .as_ref(), - l.r_brace_token().map(Into::into).as_ref(), - Some(trailing_for_comp), - ); - for mem in compspecs { - if mem.should_start_with_newline { - p!(out, nl); - } - format_comments(&mem.before_trivia, CommentLocation::AboveItem, out); - p!(out, { mem.value }); - format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out); - } - if end_comments.should_start_with_newline { - p!(out, nl); - } - format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); + /// Remove padding inside objects: {x: 1} instead of { x: 1 } + #[arg(long)] + no_pad_objects: bool, - p!(out, nl { - let (children, end_comments) = children_between::( - l.syntax().clone(), - l.l_brace_token().map(Into::into).as_ref(), - l.r_brace_token().map(Into::into).as_ref(), - None, - ); - if children.is_empty() && end_comments.is_empty() { - p!(out, str("{ }")); - return; - } - p!(out, str("{") >i nl); - for (i, mem) in children.into_iter().enumerate() { - if mem.should_start_with_newline && i != 0 { - p!(out, nl); - } - format_comments(&mem.before_trivia, CommentLocation::AboveItem, out); - p!(out, {mem.value} str(",")); - format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out); - p!(out, nl); - } + /// Don't use pretty (unquoted) field names + #[arg(long)] + no_pretty_field_names: bool, - if end_comments.should_start_with_newline { - p!(out, nl); - } - format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); - p!(out, { - p!(out, {d.into()} str(" = ") {d.value()}); - } - Self::BindFunction(f) => { - p!(out, {f.name()} {f.params()} str(" = ") {f.value()}); - } - } - } -} -impl Printable for Literal { - fn print(&self, out: &mut PrintItems) { - p!(out, string(self.syntax().to_string())); - } -} -impl Printable for ImportKind { - fn print(&self, out: &mut PrintItems) { - p!(out, string(self.syntax().to_string())); - } -} -impl Printable for ForSpec { - fn print(&self, out: &mut PrintItems) { - p!(out, str("for ") {self.bind()} str(" in ") {self.expr()}); - } -} -impl Printable for IfSpec { - fn print(&self, out: &mut PrintItems) { - p!(out, str("if ") {self.expr()}); - } -} -impl Printable for CompSpec { - fn print(&self, out: &mut PrintItems) { - match self { - Self::ForSpec(f) => f.print(out), - Self::IfSpec(i) => i.print(out), - } - } -} -impl Printable for Expr { - fn print(&self, out: &mut PrintItems) { - let (stmts, _ending) = children_between::( - self.syntax().clone(), - None, - self.expr_base() - .as_ref() - .map(ExprBase::syntax) - .cloned() - .map(Into::into) - .as_ref(), - None, - ); - for stmt in stmts { - p!(out, { stmt.value }); - } - p!(out, { self.expr_base() }); - let (suffixes, _ending) = children_between::( - self.syntax().clone(), - self.expr_base() - .as_ref() - .map(ExprBase::syntax) - .cloned() - .map(Into::into) - .as_ref(), - None, - None, - ); - for suffix in suffixes { - p!(out, { suffix.value }); - } - } -} -impl Printable for Suffix { - fn print(&self, out: &mut PrintItems) { - match self { - Self::SuffixIndex(i) => { - if i.question_mark_token().is_some() { - p!(out, str("?")); - } - p!(out, str(".") {i.index()}); - } - Self::SuffixIndexExpr(e) => { - if e.question_mark_token().is_some() { - p!(out, str(".?")); - } - p!(out, str("[") {e.index()} str("]")); - } - Self::SuffixSlice(d) => { - p!(out, { d.slice_desc() }); - } - Self::SuffixApply(a) => { - p!(out, { a.args_desc() }); - } - } - } -} -impl Printable for Stmt { - fn print(&self, out: &mut PrintItems) { - match self { - Self::StmtLocal(l) => { - let (binds, end_comments) = children_between::( - l.syntax().clone(), - l.local_kw_token().map(Into::into).as_ref(), - l.semi_token().map(Into::into).as_ref(), - None, - ); - if binds.len() == 1 { - let bind = &binds[0]; - format_comments(&bind.before_trivia, CommentLocation::AboveItem, out); - p!(out, str("local ") {bind.value}); - // TODO: keep end_comments, child.inline_trivia somehow, force multiple locals formatting in case of presence? - } else { - p!(out,str("local") >i nl); - for bind in binds { - if bind.should_start_with_newline { - p!(out, nl); - } - format_comments(&bind.before_trivia, CommentLocation::AboveItem, out); - p!(out, {bind.value} str(",")); - format_comments(&bind.inline_trivia, CommentLocation::ItemInline, out); - p!(out, nl); - } - if end_comments.should_start_with_newline { - p!(out, nl); - } - format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); - p!(out, { - p!(out, {a.assertion()} str(";") nl); - } - } - } -} -impl Printable for ExprBase { - fn print(&self, out: &mut PrintItems) { - match self { - Self::ExprBinary(b) => { - p!(out, {b.lhs_work()} str(" ") {b.binary_operator()} str(" ") {b.rhs_work()}); - } - Self::ExprUnary(u) => p!(out, {u.unary_operator()} {u.rhs()}), - // Self::ExprSlice(s) => { - // p!(new: {s.expr()} {s.slice_desc()}) - // } - // Self::ExprIndex(i) => { - // p!(new: {i.expr()} str(".") {i.index()}) - // } - // Self::ExprIndexExpr(i) => p!(new: {i.base()} str("[") {i.index()} str("]")), - // Self::ExprApply(a) => { - // let mut pi = p!(new: {a.expr()} {a.args_desc()}); - // if a.tailstrict_kw_token().is_some() { - // p!(out,str(" tailstrict")); - // } - // pi - // } - Self::ExprObjExtend(ex) => { - p!(out, {ex.lhs_work()} str(" ") {ex.rhs_work()}); - } - Self::ExprParened(p) => { - p!(out, str("(") {p.expr()} str(")")); - } - Self::ExprString(s) => p!(out, { s.text() }), - Self::ExprNumber(n) => p!(out, { n.number() }), - Self::ExprArray(a) => { - p!(out, str("[") >i nl); - for el in a.exprs() { - p!(out, {el} str(",") nl); - } - p!(out, { - p!(out, { obj.obj_body() }); - } - Self::ExprArrayComp(arr) => { - p!(out, str("[") {arr.expr()}); - for spec in arr.comp_specs() { - p!(out, str(" ") {spec}); - } - p!(out, str("]")); - } - Self::ExprImport(v) => { - p!(out, {v.import_kind()} str(" ") {v.text()}); - } - Self::ExprVar(n) => p!(out, { n.name() }), - // Self::ExprLocal(l) => { - // } - Self::ExprIfThenElse(ite) => { - p!(out, str("if ") {ite.cond()} str(" then ") {ite.then().map(|t| t.expr())}); - if ite.else_kw_token().is_some() || ite.else_().is_some() { - p!(out, str(" else ") {ite.else_().map(|t| t.expr())}); - } - } - Self::ExprFunction(f) => p!(out, str("function") {f.params_desc()} nl {f.expr()}), - // Self::ExprAssert(a) => p!(new: {a.assertion()} str("; ") {a.expr()}), - Self::ExprError(e) => p!(out, str("error ") {e.expr()}), - Self::ExprLiteral(l) => { - p!(out, { l.literal() }); - } - } - } + /// Debug option: how many times to call reformatting in case of unstable dprint output resolution. + /// + /// 0 for not retrying to reformat. + #[arg(long, default_value = "0")] + conv_limit: usize, } -impl Printable for SourceFile { - fn print(&self, out: &mut PrintItems) { - let before = trivia_before( - self.syntax().clone(), - self.expr() - .map(|e| e.syntax().clone()) - .map(Into::into) - .as_ref(), - ); - let after = trivia_after( - self.syntax().clone(), - self.expr() - .map(|e| e.syntax().clone()) - .map(Into::into) - .as_ref(), - ); - format_comments(&before, CommentLocation::AboveItem, out); - p!(out, {self.expr()} nl); - format_comments(&after, CommentLocation::EndOfItems, out); - } +#[derive(thiserror::Error, Debug)] +enum Error { + #[error("--in-place is incompatible with --exec")] + InPlaceExec, + #[error("io: {0}")] + Io(#[from] io::Error), + #[error("persist: {0}")] + Persist(#[from] tempfile::PersistError), + #[error("parsing failed, refusing to reformat corrupted input")] + Parse, } -struct FormatOptions { - // 0 for hard tabs - indent: u8, -} fn format(input: &str, opts: &FormatOptions) -> Option { let (parsed, errors) = jrsonnet_rowan_parser::parse(input); if !errors.is_empty() { @@ -731,10 +142,13 @@ fn format(input: &str, opts: &FormatOptions) -> Option { // TODO: Verify how formatter interacts in cases of missing positional values, i.e `if cond then /*missing Expr*/ else residual`. return None; } + + let ctx = FormatContext::new(opts.clone()); + Some(dprint_core::formatting::format( || { let mut out = PrintItems::new(); - parsed.print(&mut out); + parsed.print(&mut out, &ctx); out }, PrintOptions { @@ -751,53 +165,7 @@ fn format(input: &str, opts: &FormatOptions) -> Option { )) } -#[derive(Parser)] -#[allow(clippy::struct_excessive_bools)] -struct Opts { - /// Treat input as code, reformat it instead of reading file. - #[clap(long, short = 'e')] - exec: bool, - /// Path to be reformatted if `--exec` if unset, otherwise code itself. - input: String, - /// Replace code with formatted in-place, instead of printing it to stdout. - /// Only applicable if `--exec` is unset. - #[clap(long, short = 'i')] - in_place: bool, - - /// Exit with error if formatted does not match input - #[arg(long)] - test: bool, - /// Number of spaces to indent with - /// - /// 0 for guess from input (default), and use hard tabs if unable to guess. - #[arg(long, default_value = "0")] - indent: u8, - /// Force hard tab for indentation - #[arg(long)] - hard_tabs: bool, - - /// Debug option: how many times to call reformatting in case of unstable dprint output resolution. - /// - /// 0 for not retrying to reformat. - #[arg(long, default_value = "0")] - conv_limit: usize, -} - -#[derive(thiserror::Error, Debug)] -enum Error { - #[error("--in-place is incompatible with --exec")] - InPlaceExec, - #[error("io: {0}")] - Io(#[from] io::Error), - #[error("persist: {0}")] - Persist(#[from] tempfile::PersistError), - #[error("parsing failed, refusing to reformat corrupted input")] - Parse, -} - fn main_result() -> Result<(), Error> { - eprintln!("jrsonnet-fmt is a prototype of a jsonnet code formatter, do not expect it to produce meaningful results right now."); - eprintln!("It is not expected for its output to match other implementations, it will be completly separate implementation with maybe different name."); let mut opts = Opts::parse(); let input = if opts.exec { if opts.in_place { @@ -814,21 +182,26 @@ fn main_result() -> Result<(), Error> { opts.hard_tabs = true; } + let format_opts = FormatOptions { + indent: if opts.indent == 0 || opts.hard_tabs { + 0 + } else { + opts.indent + }, + max_blank_lines: opts.max_blank_lines, + comment_style: opts.comment_style, + string_style: opts.string_style, + pad_arrays: opts.pad_arrays, + pad_objects: !opts.no_pad_objects, + pretty_field_names: !opts.no_pretty_field_names, + }; + let mut iteration = 0; let mut formatted = input.clone(); let mut convergence_tmp; // https://github.com/dprint/dprint/pull/423 loop { - let Some(reformatted) = format( - &formatted, - &FormatOptions { - indent: if opts.indent == 0 || opts.hard_tabs { - 0 - } else { - opts.indent - }, - }, - ) else { + let Some(reformatted) = format(&formatted, &format_opts) else { return Err(Error::Parse); }; convergence_tmp = reformatted.trim().to_owned(); diff --git a/cmds/jrsonnet-fmt/src/printable.rs b/cmds/jrsonnet-fmt/src/printable.rs new file mode 100644 index 00000000..9ca077f1 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/printable.rs @@ -0,0 +1,614 @@ +//! Printable trait and implementations for AST nodes. + +use std::{any::type_name, rc::Rc}; + +use dprint_core::formatting::{ + condition_helpers::is_multiple_lines, condition_resolvers::true_resolver, + ConditionResolverContext, LineNumber, PrintItems, +}; +use jrsonnet_rowan_parser::{ + nodes::{ + Arg, ArgsDesc, Assertion, BinaryOperator, Bind, CompSpec, Destruct, DestructArrayPart, + DestructRest, Expr, ExprBase, FieldName, ForSpec, IfSpec, ImportKind, Literal, Member, + Name, Number, ObjBody, ObjLocal, ParamsDesc, SliceDesc, SourceFile, Stmt, Text, TextKind, + UnaryOperator, Visibility, + }, + AstNode, AstToken as _, SyntaxToken, +}; + +use crate::{ + children::{children_between, trivia_after, trivia_before}, + comments::{format_comments, CommentLocation}, + context::{FormatContext, StringStyle}, + macros::{p, pi}, +}; + +/// Trait for AST nodes that can be formatted. +pub trait Printable { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext); +} + +impl

Printable for Option

+where + P: Printable, +{ + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + if let Some(v) = self { + v.print(out, ctx); + } else { + p!( + out, + ctx, + string(format!( + "/*missing {}*/", + type_name::

().replace("jrsonnet_rowan_parser::generated::nodes::", "") + ),) + ); + } + } +} + +impl Printable for SyntaxToken { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, string(self.to_string())); + } +} + +impl Printable for Text { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + let raw = format!("{}", self); + + // Only convert simple single/double quoted strings + let converted = match (self.kind(), ctx.opts.string_style) { + // Leave as-is + (_, StringStyle::Leave) => raw, + // Already in target style + (TextKind::StringDouble, StringStyle::Double) + | (TextKind::StringSingle, StringStyle::Single) => raw, + // Convert single to double + (TextKind::StringSingle, StringStyle::Double) => convert_string_quotes(&raw, '\'', '"'), + // Convert double to single + (TextKind::StringDouble, StringStyle::Single) => convert_string_quotes(&raw, '"', '\''), + // Don't convert verbatim, block, or error strings + _ => raw, + }; + + p!(out, ctx, string(converted)); + } +} + +/// Convert a string from one quote style to another. +/// Handles escape sequences properly. +fn convert_string_quotes(s: &str, from_quote: char, to_quote: char) -> String { + // Strip the outer quotes + let inner = &s[1..s.len() - 1]; + + let mut result = String::with_capacity(s.len() + 2); + result.push(to_quote); + + let mut chars = inner.chars().peekable(); + while let Some(c) = chars.next() { + if c == '\\' { + if let Some(&next) = chars.peek() { + if next == from_quote { + // \' or \" - unescape since we're changing quote style + chars.next(); + result.push(from_quote); + } else { + // Keep other escapes as-is + result.push('\\'); + } + } else { + result.push('\\'); + } + } else if c == to_quote { + // Escape the new quote character + result.push('\\'); + result.push(c); + } else { + result.push(c); + } + } + + result.push(to_quote); + result +} +impl Printable for Number { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, string(format!("{}", self))); + } +} + +impl Printable for Name { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, { self.ident_lit() }); + } +} + +impl Printable for DestructRest { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("...")); + if let Some(name) = self.into() { + p!(out, ctx, { name }); + } + } +} + +impl Printable for Destruct { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + match self { + Self::DestructFull(f) => { + p!(out, ctx, { f.name() }); + } + Self::DestructSkip(_) => p!(out, ctx, str("?")), + Self::DestructArray(a) => { + p!(out, ctx, str("[") >i nl); + for el in a.destruct_array_parts() { + match el { + DestructArrayPart::DestructArrayElement(e) => { + p!(out, ctx, {e.destruct()} str(",") nl); + } + DestructArrayPart::DestructRest(d) => { + p!(out, ctx, {d} str(",") nl); + } + } + } + p!(out, ctx, { + p!(out, ctx, str("{") >i nl); + for item in o.destruct_object_fields() { + p!(out, ctx, { item.field() }); + if let Some(des) = item.destruct() { + p!(out, ctx, str(": ") {des}); + } + if let Some(def) = item.expr() { + p!(out, ctx, str(" = ") {def}); + } + p!(out, ctx, str(",") nl); + } + if let Some(rest) = o.destruct_rest() { + p!(out, ctx, {rest} nl); + } + p!(out, ctx, { + if let Some(id) = f.id() { + p!(out, ctx, { id }); + } else if let Some(str) = f.text() { + p!(out, ctx, { str }); + } else { + p!(out, ctx, str("/*missing FieldName*/")); + } + } + Self::FieldNameDynamic(d) => { + p!(out, ctx, str("[") {d.expr()} str("]")); + } + } + } +} + +impl Printable for Visibility { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, string(self.to_string())); + } +} + +impl Printable for ObjLocal { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("local ") {self.bind()}); + } +} + +impl Printable for Assertion { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("assert ") {self.condition()}); + if self.colon_token().is_some() || self.message().is_some() { + p!(out, ctx, str(": ") {self.message()}); + } + } +} + +impl Printable for ParamsDesc { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("(") >i nl); + for param in self.params() { + p!(out, ctx, { param.destruct() }); + if param.assign_token().is_some() || param.expr().is_some() { + p!(out, ctx, str(" = ") {param.expr()}); + } + p!(out, ctx, str(",") nl); + } + p!(out, ctx, i nl)); + let (children, end_comments) = children_between::( + self.syntax().clone(), + self.l_paren_token().map(Into::into).as_ref(), + self.r_paren_token().map(Into::into).as_ref(), + None, + ); + let mut args = children.into_iter().peekable(); + while let Some(ele) = args.next() { + ctx.emit_blank_line_if(ele.should_start_with_newline, out); + format_comments(&ele.before_trivia, CommentLocation::AboveItem, out, ctx); + let arg = ele.value; + if arg.name().is_some() || arg.assign_token().is_some() { + p!(out, ctx, {arg.name()} str(" = ")); + } + let comma_between = if args.peek().is_some() { + true_resolver() + } else { + multi_line.clone() + }; + p!(out, ctx, {arg.expr()} if("arg comma", comma_between, str(",") if_not("between args", multi_line, str(" ")))); + format_comments(&ele.inline_trivia, CommentLocation::ItemInline, out, ctx); + p!(out, ctx, if("between args", multi_line, nl)); + } + ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + p!(out, ctx, if("end args", multi_line, { + p!(out, ctx, { b.obj_local() }); + } + Self::MemberAssertStmt(ass) => { + p!(out, ctx, { ass.assertion() }); + } + Self::MemberFieldNormal(n) => { + p!(out, ctx, {n.field_name()} if(n.plus_token().is_some())({n.plus_token()}) {n.visibility()} str(" ") {n.expr()}); + } + Self::MemberFieldMethod(m) => { + p!(out, ctx, {m.field_name()} {m.params_desc()} {m.visibility()} str(" ") {m.expr()}); + } + } + } +} + +impl Printable for ObjBody { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + match self { + Self::ObjBodyComp(l) => { + let (children, mut end_comments) = children_between::( + l.syntax().clone(), + l.l_brace_token().map(Into::into).as_ref(), + Some( + &(l.comp_specs() + .next() + .expect("at least one spec is defined") + .syntax() + .clone()) + .into(), + ), + None, + ); + let trailing_for_comp = end_comments.extract_trailing(); + p!(out, ctx, str("{") >i nl); + for mem in children { + ctx.emit_blank_line_if(mem.should_start_with_newline, out); + format_comments(&mem.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, {mem.value} str(",")); + format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out, ctx); + p!(out, ctx, nl); + } + + ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + + let (compspecs, end_comments) = children_between::( + l.syntax().clone(), + l.member_comps() + .last() + .map(|m| m.syntax().clone()) + .map(Into::into) + .or_else(|| l.l_brace_token().map(Into::into)) + .as_ref(), + l.r_brace_token().map(Into::into).as_ref(), + Some(trailing_for_comp), + ); + for mem in compspecs { + ctx.emit_blank_line_if(mem.should_start_with_newline, out); + format_comments(&mem.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, { mem.value }); + format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out, ctx); + } + ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + + p!(out, ctx, nl { + let (children, end_comments) = children_between::( + l.syntax().clone(), + l.l_brace_token().map(Into::into).as_ref(), + l.r_brace_token().map(Into::into).as_ref(), + None, + ); + if children.is_empty() && end_comments.is_empty() { + if ctx.opts.pad_objects { + p!(out, ctx, str("{ }")); + } else { + p!(out, ctx, str("{}")); + } + return; + } + p!(out, ctx, str("{") >i nl); + for (i, mem) in children.into_iter().enumerate() { + // Don't emit blank line before first item + ctx.emit_blank_line_if(mem.should_start_with_newline && i != 0, out); + format_comments(&mem.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, {mem.value} str(",")); + format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out, ctx); + p!(out, ctx, nl); + } + + ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + p!(out, ctx, { + p!(out, ctx, {d.into()} str(" = ") {d.value()}); + } + Self::BindFunction(f) => { + p!(out, ctx, {f.name()} {f.params()} str(" = ") {f.value()}); + } + } + } +} +impl Printable for Literal { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, string(self.syntax().to_string())); + } +} +impl Printable for ImportKind { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, string(self.syntax().to_string())); + } +} +impl Printable for ForSpec { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("for ") {self.bind()} str(" in ") {self.expr()}); + } +} +impl Printable for IfSpec { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + p!(out, ctx, str("if ") {self.expr()}); + } +} +impl Printable for CompSpec { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + match self { + Self::ForSpec(f) => f.print(out, ctx), + Self::IfSpec(i) => i.print(out, ctx), + } + } +} +impl Printable for Expr { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + let (stmts, _ending) = children_between::( + self.syntax().clone(), + None, + self.expr_base() + .as_ref() + .map(ExprBase::syntax) + .cloned() + .map(Into::into) + .as_ref(), + None, + ); + for stmt in stmts { + p!(out, ctx, { stmt.value }); + } + p!(out, ctx, { self.expr_base() }); + } +} +impl Printable for Stmt { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + match self { + Self::StmtLocal(l) => { + let (binds, end_comments) = children_between::( + l.syntax().clone(), + l.local_kw_token().map(Into::into).as_ref(), + l.semi_token().map(Into::into).as_ref(), + None, + ); + if binds.len() == 1 { + let bind = &binds[0]; + format_comments(&bind.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, str("local ") {bind.value}); + // TODO: keep end_comments, child.inline_trivia somehow, force multiple locals formatting in case of presence? + } else { + p!(out, ctx, str("local") >i nl); + for bind in binds { + ctx.emit_blank_line_if(bind.should_start_with_newline, out); + format_comments(&bind.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, {bind.value} str(",")); + format_comments(&bind.inline_trivia, CommentLocation::ItemInline, out, ctx); + p!(out, ctx, nl); + } + ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + p!(out, ctx, { + p!(out, ctx, {a.assertion()} str(";") nl); + } + } + } +} +impl Printable for ExprBase { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + match self { + Self::ExprBinary(b) => { + p!(out, ctx, {b.lhs_work()} str(" ") {b.binary_operator()} str(" ") {b.rhs_work()}); + } + Self::ExprUnary(u) => p!(out, ctx, {u.unary_operator()} {u.rhs()}), + Self::ExprField(f) => { + p!(out, ctx, { f.base() }); + if f.question_mark_token().is_some() { + p!(out, ctx, str("?")); + } + p!(out, ctx, str(".") {f.field()}); + } + Self::ExprIndex(i) => { + p!(out, ctx, { i.base() }); + if i.question_mark_token().is_some() { + p!(out, ctx, str("?")); + } + if i.dot_token().is_some() { + p!(out, ctx, str(".")); + } + p!(out, ctx, str("[") {i.index()} str("]")); + } + Self::ExprSlice(s) => { + p!(out, ctx, {s.base()} {s.slice_desc()}); + } + Self::ExprCall(c) => { + p!(out, ctx, {c.callee()} {c.args_desc()}); + if c.tailstrict_kw_token().is_some() { + p!(out, ctx, str(" tailstrict")); + } + } + Self::ExprObjExtend(ex) => { + p!(out, ctx, {ex.lhs_work()} str(" ") {ex.rhs_work()}); + } + Self::ExprParened(par) => { + p!(out, ctx, str("(") {par.expr()} str(")")); + } + Self::ExprString(s) => p!(out, ctx, { s.text() }), + Self::ExprNumber(n) => p!(out, ctx, { n.number() }), + Self::ExprArray(a) => { + let (children, end_comments) = children_between::( + a.syntax().clone(), + a.l_brack_token().map(Into::into).as_ref(), + a.r_brack_token().map(Into::into).as_ref(), + None, + ); + if children.is_empty() && end_comments.is_empty() { + if ctx.opts.pad_arrays { + p!(out, ctx, str("[ ]")); + } else { + p!(out, ctx, str("[]")); + } + return; + } + p!(out, ctx, str("[") >i nl); + for (i, el) in children.into_iter().enumerate() { + ctx.emit_blank_line_if(el.should_start_with_newline && i != 0, out); + format_comments(&el.before_trivia, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, {el.value} str(",")); + format_comments(&el.inline_trivia, CommentLocation::ItemInline, out, ctx); + p!(out, ctx, nl); + } + ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); + p!(out, ctx, { + p!(out, ctx, { obj.obj_body() }); + } + Self::ExprArrayComp(arr) => { + p!(out, ctx, str("[") {arr.expr()}); + for spec in arr.comp_specs() { + p!(out, ctx, str(" ") {spec}); + } + p!(out, ctx, str("]")); + } + Self::ExprImport(v) => { + p!(out, ctx, {v.import_kind()} str(" ") {v.text()}); + } + Self::ExprVar(n) => p!(out, ctx, { n.name() }), + // Self::ExprLocal(l) => { + // } + Self::ExprIfThenElse(ite) => { + p!(out, ctx, str("if ") {ite.cond()} str(" then ") {ite.then().map(|t| t.expr())}); + if ite.else_kw_token().is_some() || ite.else_().is_some() { + p!(out, ctx, str(" else ") {ite.else_().map(|t| t.expr())}); + } + } + Self::ExprFunction(f) => p!(out, ctx, str("function") {f.params_desc()} nl {f.expr()}), + // Self::ExprAssert(a) => p!(new: {a.assertion()} str("; ") {a.expr()}), + Self::ExprError(e) => p!(out, ctx, str("error ") {e.expr()}), + Self::ExprLiteral(l) => { + p!(out, ctx, { l.literal() }); + } + } + } +} + +impl Printable for SourceFile { + fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { + let before = trivia_before( + self.syntax().clone(), + self.expr() + .map(|e| e.syntax().clone()) + .map(Into::into) + .as_ref(), + ); + let after = trivia_after( + self.syntax().clone(), + self.expr() + .map(|e| e.syntax().clone()) + .map(Into::into) + .as_ref(), + ); + format_comments(&before, CommentLocation::AboveItem, out, ctx); + p!(out, ctx, {self.expr()} nl); + format_comments(&after, CommentLocation::EndOfItems, out, ctx); + } +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_comments.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_comments.snap new file mode 100644 index 00000000..40885a7f --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_comments.snap @@ -0,0 +1,10 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat(indoc!(\"[\n\t\t\t// comment before\n\t\t\t1,\n\t\t\t2, // inline comment\n\t\t\t// trailing comment\n\t\t]\"))" +--- +[ + // comment before + 1, + 2, // inline comment + // trailing comment +] diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_elements.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_elements.snap new file mode 100644 index 00000000..d5f0a812 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__array_with_elements.snap @@ -0,0 +1,9 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat(\"[1, 2, 3]\")" +--- +[ + 1, + 2, + 3, +] diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_hash_to_slash.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_hash_to_slash.snap new file mode 100644 index 00000000..66489bba --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_hash_to_slash.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(indoc!(\"{\n\t\t\t\t# hash comment\n\t\t\t\ta: 1,\n\t\t\t}\"),\nFormatOptions { comment_style: CommentStyle::Slash, ..Default::default() })" +--- +{ + // hash comment + a: 1, +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_leave.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_leave.snap new file mode 100644 index 00000000..296c74aa --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_leave.snap @@ -0,0 +1,10 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(indoc!(\"{\n\t\t\t\t# hash stays hash\n\t\t\t\ta: 1,\n\t\t\t\t// slash stays slash\n\t\t\t\tb: 2,\n\t\t\t}\"),\nFormatOptions { comment_style: CommentStyle::Leave, ..Default::default() })" +--- +{ + # hash stays hash + a: 1, + // slash stays slash + b: 2, +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_slash_to_hash.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_slash_to_hash.snap new file mode 100644 index 00000000..716b258b --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__comment_style_slash_to_hash.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(indoc!(\"{\n\t\t\t\t// slash comment\n\t\t\t\ta: 1,\n\t\t\t}\"),\nFormatOptions { comment_style: CommentStyle::Hash, ..Default::default() })" +--- +{ + # slash comment + a: 1, +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_no_padding.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_no_padding.snap new file mode 100644 index 00000000..b0164a50 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_no_padding.snap @@ -0,0 +1,5 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat(\"[]\")" +--- +[] diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_with_padding.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_with_padding.snap new file mode 100644 index 00000000..8b40d00a --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_array_with_padding.snap @@ -0,0 +1,5 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(\"[]\", FormatOptions\n{ pad_arrays: true, ..Default::default() })" +--- +[ ] diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_default_padding.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_default_padding.snap new file mode 100644 index 00000000..a7a3fb7a --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_default_padding.snap @@ -0,0 +1,5 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat(\"{}\")" +--- +{ } diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_no_padding.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_no_padding.snap new file mode 100644 index 00000000..49a952c4 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__empty_object_no_padding.snap @@ -0,0 +1,5 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(\"{}\", FormatOptions\n{ pad_objects: false, ..Default::default() })" +--- +{} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_double_to_single.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_double_to_single.snap new file mode 100644 index 00000000..c541040e --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_double_to_single.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(r#\"{ a: \"hello\", b: \"world\" }\"#, FormatOptions\n{ string_style: StringStyle::Single, ..Default::default() })" +--- +{ + a: 'hello', + b: 'world', +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_leave.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_leave.snap new file mode 100644 index 00000000..7c1a6bf4 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_leave.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(r#\"{ a: \"double\", b: 'single' }\"#, FormatOptions\n{ string_style: StringStyle::Leave, ..Default::default() })" +--- +{ + a: "double", + b: 'single', +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_single_to_double.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_single_to_double.snap new file mode 100644 index 00000000..ea466526 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_single_to_double.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(\"{ a: 'hello', b: 'world' }\", FormatOptions\n{ string_style: StringStyle::Double, ..Default::default() })" +--- +{ + a: "hello", + b: "world", +} diff --git a/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_with_escapes.snap b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_with_escapes.snap new file mode 100644 index 00000000..e141d89a --- /dev/null +++ b/cmds/jrsonnet-fmt/src/snapshots/jrsonnet_fmt__tests__string_style_with_escapes.snap @@ -0,0 +1,8 @@ +--- +source: cmds/jrsonnet-fmt/src/tests.rs +expression: "reformat_with_opts(r#\"{ a: \"it's a test\", b: 'say \"hello\"' }\"#, FormatOptions\n{ string_style: StringStyle::Single, ..Default::default() })" +--- +{ + a: 'it\'s a test', + b: 'say "hello"', +} diff --git a/cmds/jrsonnet-fmt/src/tests.rs b/cmds/jrsonnet-fmt/src/tests.rs index eb19a3f0..15370342 100644 --- a/cmds/jrsonnet-fmt/src/tests.rs +++ b/cmds/jrsonnet-fmt/src/tests.rs @@ -1,15 +1,23 @@ use dprint_core::formatting::{PrintItems, PrintOptions}; use indoc::indoc; -use crate::Printable; +use crate::{ + context::{CommentStyle, FormatContext, FormatOptions, StringStyle}, + printable::Printable, +}; fn reformat(input: &str) -> String { + reformat_with_opts(input, FormatOptions::default()) +} + +fn reformat_with_opts(input: &str, opts: FormatOptions) -> String { let (source, _) = jrsonnet_rowan_parser::parse(input); + let ctx = FormatContext::new(opts); dprint_core::formatting::format( || { let mut out = PrintItems::new(); - source.print(&mut out); + source.print(&mut out, &ctx); out }, PrintOptions { @@ -77,3 +85,147 @@ fn complex_comments_snapshot() { }" ))); } + +#[test] +fn empty_array_no_padding() { + insta::assert_snapshot!(reformat("[]")); +} + +#[test] +fn empty_array_with_padding() { + insta::assert_snapshot!(reformat_with_opts( + "[]", + FormatOptions { + pad_arrays: true, + ..Default::default() + } + )); +} + +#[test] +fn empty_object_default_padding() { + insta::assert_snapshot!(reformat("{}")); +} + +#[test] +fn empty_object_no_padding() { + insta::assert_snapshot!(reformat_with_opts( + "{}", + FormatOptions { + pad_objects: false, + ..Default::default() + } + )); +} + +#[test] +fn array_with_elements() { + insta::assert_snapshot!(reformat("[1, 2, 3]")); +} + +#[test] +fn array_with_comments() { + insta::assert_snapshot!(reformat(indoc!( + "[ + // comment before + 1, + 2, // inline comment + // trailing comment + ]" + ))); +} + +#[test] +fn comment_style_hash_to_slash() { + insta::assert_snapshot!(reformat_with_opts( + indoc!( + "{ + # hash comment + a: 1, + }" + ), + FormatOptions { + comment_style: CommentStyle::Slash, + ..Default::default() + } + )); +} + +#[test] +fn comment_style_slash_to_hash() { + insta::assert_snapshot!(reformat_with_opts( + indoc!( + "{ + // slash comment + a: 1, + }" + ), + FormatOptions { + comment_style: CommentStyle::Hash, + ..Default::default() + } + )); +} + +#[test] +fn comment_style_leave() { + insta::assert_snapshot!(reformat_with_opts( + indoc!( + "{ + # hash stays hash + a: 1, + // slash stays slash + b: 2, + }" + ), + FormatOptions { + comment_style: CommentStyle::Leave, + ..Default::default() + } + )); +} + +#[test] +fn string_style_single_to_double() { + insta::assert_snapshot!(reformat_with_opts( + "{ a: 'hello', b: 'world' }", + FormatOptions { + string_style: StringStyle::Double, + ..Default::default() + } + )); +} + +#[test] +fn string_style_double_to_single() { + insta::assert_snapshot!(reformat_with_opts( + r#"{ a: "hello", b: "world" }"#, + FormatOptions { + string_style: StringStyle::Single, + ..Default::default() + } + )); +} + +#[test] +fn string_style_with_escapes() { + // String with quotes that need escaping when converted + insta::assert_snapshot!(reformat_with_opts( + r#"{ a: "it's a test", b: 'say "hello"' }"#, + FormatOptions { + string_style: StringStyle::Single, + ..Default::default() + } + )); +} + +#[test] +fn string_style_leave() { + insta::assert_snapshot!(reformat_with_opts( + r#"{ a: "double", b: 'single' }"#, + FormatOptions { + string_style: StringStyle::Leave, + ..Default::default() + } + )); +} diff --git a/cmds/jrsonnet-lsp/Cargo.toml b/cmds/jrsonnet-lsp/Cargo.toml new file mode 100644 index 00000000..dbfa9a29 --- /dev/null +++ b/cmds/jrsonnet-lsp/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "jrsonnet-lsp-bin" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true + +[[bin]] +name = "jrsonnet-lsp" +path = "src/main.rs" + +[dependencies] +jrsonnet-lsp = { path = "../../crates/jrsonnet-lsp" } +clap = { version = "4.5", features = ["derive"] } +anyhow = "1.0" +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } + +[lints] +workspace = true diff --git a/cmds/jrsonnet-lsp/src/main.rs b/cmds/jrsonnet-lsp/src/main.rs new file mode 100644 index 00000000..c2775b59 --- /dev/null +++ b/cmds/jrsonnet-lsp/src/main.rs @@ -0,0 +1,47 @@ +//! Jsonnet Language Server binary. +//! +//! This is the CLI entry point for the jrsonnet language server. +//! The server communicates over stdio using the Language Server Protocol. + +use anyhow::Result; +use clap::Parser; +use tracing::Level; +use tracing_subscriber::EnvFilter; + +#[derive(Parser, Debug)] +#[command(name = "jrsonnet-lsp")] +#[command(about = "Jsonnet Language Server")] +#[command(version)] +struct Args { + /// Log level (trace, debug, info, warn, error) + #[arg(long, default_value = "info")] + log_level: Level, + + /// Log file path (logs to stderr if not specified) + #[arg(long)] + log_file: Option, +} + +fn main() -> Result<()> { + let args = Args::parse(); + + // Set up logging + let filter = EnvFilter::from_default_env().add_directive(args.log_level.into()); + + if let Some(log_file) = args.log_file { + let file = std::fs::File::create(&log_file)?; + tracing_subscriber::fmt() + .with_env_filter(filter) + .with_writer(file) + .with_ansi(false) + .init(); + } else { + tracing_subscriber::fmt() + .with_env_filter(filter) + .with_writer(std::io::stderr) + .init(); + } + + // Run the LSP server + jrsonnet_lsp::run_stdio() +} diff --git a/cmds/rtk/src/tanka.rs b/cmds/rtk/src/tanka.rs index edb0e0c8..5b207904 100644 --- a/cmds/rtk/src/tanka.rs +++ b/cmds/rtk/src/tanka.rs @@ -95,9 +95,8 @@ fn generate_manifest_key_from_val(val: &Val, name_format: Option<&str>) -> Resul .unwrap_or_else(|| "cluster".to_string()); return Ok(format!("{}_{}_{}", namespace, kind, name)); - } else { - return Ok(format!("{}_{}", kind, name)); } + return Ok(format!("{}_{}", kind, name)); } } @@ -213,7 +212,7 @@ fn to_snake_case(s: &str) -> String { .iter() .skip_while(|c| c.is_ascii_digit()) .next() - .map(|c| c.is_ascii_alphabetic()) + .map(char::is_ascii_alphabetic) .unwrap_or(false); if has_letter_after_digits { result.push('_'); @@ -330,14 +329,13 @@ fn yaml_v3_key_compare(a: &str, b: &str) -> std::cmp::Ordering { } else { std::cmp::Ordering::Greater }; - } else { - // Not after digits: non-letters come first - return if bl { - std::cmp::Ordering::Less - } else { - std::cmp::Ordering::Greater - }; } + // Not after digits: non-letters come first + return if bl { + std::cmp::Ordering::Less + } else { + std::cmp::Ordering::Greater + }; } // Both are non-letters - check for numeric sequences diff --git a/crates/jrsonnet-evaluator/src/manifest.rs b/crates/jrsonnet-evaluator/src/manifest.rs index e210276c..f2a1935b 100644 --- a/crates/jrsonnet-evaluator/src/manifest.rs +++ b/crates/jrsonnet-evaluator/src/manifest.rs @@ -37,7 +37,16 @@ pub(crate) fn format_float_go_g17(v: f64) -> String { v.abs().log10().floor() as i32 }; - if exp < -4 || exp >= 17 { + if (-4..17).contains(&exp) { + // Use decimal notation like %f but with 17 significant digits + // Calculate digits after decimal point needed for 17 sig figs + let digits_after_decimal = (16 - exp).max(0) as usize; + let formatted = format!("{:.prec$}", v, prec = digits_after_decimal); + // Trim trailing zeros but keep at least one digit after decimal if there was one + let trimmed = formatted.trim_end_matches('0'); + let trimmed = trimmed.trim_end_matches('.'); + trimmed.to_string() + } else { // Use scientific notation like %e let formatted = format!("{:.16e}", v); // Parse and clean up: "3.1415926535897930e0" -> "3.141592653589793e0" @@ -53,15 +62,6 @@ pub(crate) fn format_float_go_g17(v: f64) -> String { } else { formatted } - } else { - // Use decimal notation like %f but with 17 significant digits - // Calculate digits after decimal point needed for 17 sig figs - let digits_after_decimal = (16 - exp).max(0) as usize; - let formatted = format!("{:.prec$}", v, prec = digits_after_decimal); - // Trim trailing zeros but keep at least one digit after decimal if there was one - let trimmed = formatted.trim_end_matches('0'); - let trimmed = trimmed.trim_end_matches('.'); - trimmed.to_string() } } @@ -528,10 +528,8 @@ impl ManifestFormat for YamlStreamFormat { } // For jrsonnet empty mode: always add trailing newline // For go-jsonnet mode: only add trailing newline if c_document_end is true - if self.jrsonnet_empty || self.c_document_end { - if self.end_newline { - out.push('\n'); - } + if (self.jrsonnet_empty || self.c_document_end) && self.end_newline { + out.push('\n'); } Ok(()) } diff --git a/crates/jrsonnet-lsp-check/Cargo.toml b/crates/jrsonnet-lsp-check/Cargo.toml new file mode 100644 index 00000000..ff088556 --- /dev/null +++ b/crates/jrsonnet-lsp-check/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "jrsonnet-lsp-check" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Type checking and linting for jrsonnet LSP" + +[dependencies] +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-lsp-inference = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-inference" } +jrsonnet-lsp-stdlib = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-stdlib" } +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +lsp-types.workspace = true +rowan.workspace = true +rustc-hash.workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +rstest = "0.23" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-check/src/diagnostic.rs b/crates/jrsonnet-lsp-check/src/diagnostic.rs new file mode 100644 index 00000000..27a00b9a --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/diagnostic.rs @@ -0,0 +1,397 @@ +//! Unified diagnostic handling for Jsonnet LSP. +//! +//! Provides a consistent error code system and diagnostic builder for +//! type checking, linting, and format checking. + +use jrsonnet_lsp_document::LspRange; +use lsp_types::{DiagnosticRelatedInformation, DiagnosticSeverity, Location, Uri}; + +/// Severity level for diagnostics. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum Severity { + Error, + Warning, + Information, + Hint, +} + +impl From for DiagnosticSeverity { + fn from(severity: Severity) -> Self { + match severity { + Severity::Error => DiagnosticSeverity::ERROR, + Severity::Warning => DiagnosticSeverity::WARNING, + Severity::Information => DiagnosticSeverity::INFORMATION, + Severity::Hint => DiagnosticSeverity::HINT, + } + } +} + +/// Diagnostic error code for programmatic handling. +/// +/// Error codes are grouped by category: +/// - `E0xx`: Type errors +/// - `E1xx`: Scope errors +/// - `E2xx`: Format string errors +/// - `W0xx`: Lint warnings +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ErrorCode { + // Type errors (E0xx) + /// Binary operator type mismatch + BinaryOpMismatch, + /// Unary operator type mismatch + UnaryOpMismatch, + /// Field access on non-object + FieldAccessOnNonObject, + /// Index access on non-indexable type + IndexOnNonIndexable, + /// Function call on non-callable + CallOnNonFunction, + /// Wrong argument count + WrongArgCount, + /// Missing required arguments + TooFewArguments, + /// Too many arguments + TooManyArguments, + /// No such field on object + NoSuchField, + /// Tuple index out of bounds + TupleIndexOutOfBounds, + /// Argument type mismatch + ArgumentTypeMismatch, + /// Callback parameter type mismatch + CallbackTypeMismatch, + + // Scope errors (E1xx) + /// Undefined variable + UndefinedVariable, + /// Duplicate variable binding + DuplicateVariable, + /// Duplicate object field + DuplicateField, + /// Duplicate function parameter + DuplicateParameter, + + // Format string errors (E2xx) + /// Invalid format specifier + FormatInvalidSpecifier, + /// Wrong number of format arguments + FormatArgCountMismatch, + /// Format argument type mismatch + FormatTypeMismatch, + + // Lint warnings (W0xx) + /// Variable declared but never used + UnusedVariable, + /// Parameter declared but never used + UnusedParameter, + /// Variable shadows another variable + ShadowedVariable, + /// Code after error expression is unreachable + UnreachableCode, +} + +impl ErrorCode { + /// Get the default severity for this error code. + pub fn default_severity(&self) -> Severity { + match self { + Self::BinaryOpMismatch + | Self::UnaryOpMismatch + | Self::FieldAccessOnNonObject + | Self::IndexOnNonIndexable + | Self::CallOnNonFunction + | Self::WrongArgCount + | Self::TooFewArguments + | Self::TooManyArguments + | Self::NoSuchField + | Self::TupleIndexOutOfBounds + | Self::ArgumentTypeMismatch + | Self::CallbackTypeMismatch + | Self::UndefinedVariable + | Self::DuplicateVariable + | Self::DuplicateField + | Self::DuplicateParameter + | Self::FormatInvalidSpecifier + | Self::FormatArgCountMismatch + | Self::FormatTypeMismatch => Severity::Error, + + // Lint warnings + Self::UnusedVariable | Self::UnusedParameter | Self::ShadowedVariable => { + Severity::Warning + } + + // Hints + Self::UnreachableCode => Severity::Hint, + } + } + + /// Get the string code for LSP diagnostic. + /// + /// Format: `E0xx` for errors, `W0xx` for warnings. + pub fn as_str(&self) -> &'static str { + match self { + // Type errors + Self::BinaryOpMismatch => "E001", + Self::UnaryOpMismatch => "E002", + Self::FieldAccessOnNonObject => "E003", + Self::IndexOnNonIndexable => "E004", + Self::CallOnNonFunction => "E005", + Self::WrongArgCount => "E006", + Self::TooFewArguments => "E007", + Self::TooManyArguments => "E008", + Self::NoSuchField => "E009", + Self::TupleIndexOutOfBounds => "E010", + Self::ArgumentTypeMismatch => "E011", + Self::CallbackTypeMismatch => "E012", + + // Scope errors + Self::UndefinedVariable => "E101", + Self::DuplicateVariable => "E102", + Self::DuplicateField => "E103", + Self::DuplicateParameter => "E104", + + // Format errors + Self::FormatInvalidSpecifier => "E201", + Self::FormatArgCountMismatch => "E202", + Self::FormatTypeMismatch => "E203", + + // Warnings + Self::UnusedVariable => "W001", + Self::UnusedParameter => "W002", + Self::ShadowedVariable => "W003", + Self::UnreachableCode => "W004", + } + } +} + +/// A diagnostic message with location and metadata. +#[derive(Debug, Clone)] +pub struct Diagnostic { + /// The error code. + pub code: ErrorCode, + /// Severity level. + pub severity: Severity, + /// Human-readable message. + pub message: String, + /// Source location (LSP range). + pub range: LspRange, + /// Optional suggestion for fixing the error. + pub suggestion: Option, + /// Related information (e.g., "defined here"). + pub related: Vec, +} + +/// Related diagnostic information. +#[derive(Debug, Clone)] +pub struct RelatedInfo { + /// Description of the relation. + pub message: String, + /// Location of the related item. + pub uri: Uri, + /// Range within the file. + pub range: LspRange, +} + +impl Diagnostic { + /// Create a new diagnostic with the given code and message. + pub fn new(code: ErrorCode, message: impl Into, range: LspRange) -> Self { + Self { + severity: code.default_severity(), + code, + message: message.into(), + range, + suggestion: None, + related: Vec::new(), + } + } + + /// Add a suggestion to this diagnostic. + #[must_use] + pub fn with_suggestion(mut self, message: impl Into) -> Self { + self.suggestion = Some(message.into()); + self + } + + /// Add related information. + #[must_use] + pub fn with_related(mut self, message: impl Into, uri: Uri, range: LspRange) -> Self { + self.related.push(RelatedInfo { + message: message.into(), + uri, + range, + }); + self + } + + /// Convert to LSP Diagnostic. + pub fn to_lsp(&self) -> lsp_types::Diagnostic { + let related_information = if self.related.is_empty() { + None + } else { + Some( + self.related + .iter() + .map(|r| DiagnosticRelatedInformation { + location: Location { + uri: r.uri.clone(), + range: r.range.into(), + }, + message: r.message.clone(), + }) + .collect(), + ) + }; + + let mut message = self.message.clone(); + if let Some(suggestion) = &self.suggestion { + message.push_str("\n\nSuggestion: "); + message.push_str(suggestion); + } + + lsp_types::Diagnostic { + range: self.range.into(), + severity: Some(self.severity.into()), + code: Some(lsp_types::NumberOrString::String( + self.code.as_str().to_string(), + )), + code_description: None, + source: Some("jrsonnet".to_string()), + message, + related_information, + tags: None, + data: None, + } + } +} + +/// Builder for collecting diagnostics during analysis. +#[derive(Debug, Default)] +pub struct DiagnosticCollector { + diagnostics: Vec, +} + +impl DiagnosticCollector { + /// Create a new empty collector. + pub fn new() -> Self { + Self::default() + } + + /// Push a diagnostic. + pub fn push(&mut self, diagnostic: Diagnostic) { + self.diagnostics.push(diagnostic); + } + + /// Add an error diagnostic. + pub fn error(&mut self, code: ErrorCode, message: impl Into, range: LspRange) { + self.push(Diagnostic::new(code, message, range)); + } + + /// Add a warning diagnostic. + pub fn warning(&mut self, code: ErrorCode, message: impl Into, range: LspRange) { + let mut diag = Diagnostic::new(code, message, range); + diag.severity = Severity::Warning; + self.push(diag); + } + + /// Convert all diagnostics to LSP format. + pub fn into_lsp_diagnostics(self) -> Vec { + self.diagnostics.into_iter().map(|d| d.to_lsp()).collect() + } + + /// Get the collected diagnostics. + pub fn into_diagnostics(self) -> Vec { + self.diagnostics + } + + /// Check if there are any diagnostics. + pub fn is_empty(&self) -> bool { + self.diagnostics.is_empty() + } + + /// Check if there are any errors. + pub fn has_errors(&self) -> bool { + self.diagnostics + .iter() + .any(|d| d.severity == Severity::Error) + } + + /// Get the number of diagnostics. + pub fn len(&self) -> usize { + self.diagnostics.len() + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{CharOffset, Line, LspPosition}; + + use super::*; + + fn test_range() -> LspRange { + LspRange { + start: LspPosition { + line: Line(0), + character: CharOffset(0), + }, + end: LspPosition { + line: Line(0), + character: CharOffset(5), + }, + } + } + + #[test] + fn test_error_codes() { + assert_eq!(ErrorCode::BinaryOpMismatch.as_str(), "E001"); + assert_eq!(ErrorCode::UnusedVariable.as_str(), "W001"); + assert_eq!( + ErrorCode::BinaryOpMismatch.default_severity(), + Severity::Error + ); + assert_eq!( + ErrorCode::UnusedVariable.default_severity(), + Severity::Warning + ); + } + + #[test] + fn test_diagnostic_creation() { + let diag = Diagnostic::new( + ErrorCode::NoSuchField, + "field 'foo' not found", + test_range(), + ); + assert_eq!(diag.code, ErrorCode::NoSuchField); + assert_eq!(diag.severity, Severity::Error); + assert!(diag.message.contains("foo")); + } + + #[test] + fn test_diagnostic_with_suggestion() { + let diag = Diagnostic::new( + ErrorCode::NoSuchField, + "field 'foo' not found", + test_range(), + ) + .with_suggestion("did you mean 'food'?"); + assert!(diag.suggestion.is_some()); + + let lsp = diag.to_lsp(); + assert!(lsp.message.contains("Suggestion:")); + assert!(lsp.message.contains("food")); + } + + #[test] + fn test_collector() { + let mut collector = DiagnosticCollector::new(); + assert!(collector.is_empty()); + + collector.error(ErrorCode::NoSuchField, "error 1", test_range()); + collector.warning(ErrorCode::UnusedVariable, "warning 1", test_range()); + + assert_eq!(collector.len(), 2); + assert!(collector.has_errors()); + + let lsp_diagnostics = collector.into_lsp_diagnostics(); + assert_eq!(lsp_diagnostics.len(), 2); + } +} diff --git a/crates/jrsonnet-lsp-check/src/format_check.rs b/crates/jrsonnet-lsp-check/src/format_check.rs new file mode 100644 index 00000000..ed6fb66e --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/format_check.rs @@ -0,0 +1,727 @@ +//! Format string validation for std.format and the % operator. +//! +//! Jsonnet uses Python-style format strings: +//! - `%s` - string (any value) +//! - `%d`, `%i`, `%u` - integer (number) +//! - `%o`, `%x`, `%X` - integer in octal/hex (number) +//! - `%e`, `%E`, `%f`, `%F`, `%g`, `%G` - floating point (number) +//! - `%c` - character (number or single-char string) +//! - `%%` - literal % +//! - `%(name)s` - named argument from object +//! +//! Format modifiers: +//! - Flags: `-` (left-justify), `+` (show sign), ` ` (space for positive), `#` (alternate form), `0` (zero-pad) +//! - Width: minimum field width (digits or `*` for dynamic) +//! - Precision: `.` followed by digits or `*` (for floats: decimal places; for strings: max chars) + +use jrsonnet_lsp_types::{Ty, TyData, TypeStoreOps}; + +/// Expected type category for a format placeholder. +/// +/// This is a lightweight representation that avoids needing a TyStore during parsing. +/// Convert to `Ty` when needed for type checking. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum FormatTypeKind { + /// Any type is accepted (%s - toString is called) + Any, + /// Number type required (%d, %f, etc.) + Number, + /// Number or single-character string (%c) + NumberOrString, +} + +impl FormatTypeKind { + /// Convert to the corresponding Ty. + /// + /// For `NumberOrString`, creates a union type in the provided store. + pub fn to_ty(self, store: &mut S) -> Ty { + match self { + FormatTypeKind::Any => Ty::ANY, + FormatTypeKind::Number => Ty::NUMBER, + FormatTypeKind::NumberOrString => store.union(vec![Ty::NUMBER, Ty::STRING]), + } + } + + /// Check if the given Ty is compatible with this format type kind. + pub fn is_compatible_with(self, ty: Ty, store: &S) -> bool { + // Any is always compatible + if ty.is_any() { + return true; + } + + match self { + FormatTypeKind::Any => true, + FormatTypeKind::Number => { + ty == Ty::NUMBER || matches!(store.get_data(ty), TyData::BoundedNumber(_)) + } + FormatTypeKind::NumberOrString => { + ty == Ty::NUMBER + || ty == Ty::STRING + || ty == Ty::CHAR + || matches!( + store.get_data(ty), + TyData::BoundedNumber(_) | TyData::LiteralString(_) + ) + } + } + } +} + +/// Format flags that modify output. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum FormatFlag { + LeftJustify, + ShowSign, + SpaceSign, + Alternate, + ZeroPad, +} + +impl FormatFlag { + const fn bit(self) -> u8 { + match self { + Self::LeftJustify => 1 << 0, + Self::ShowSign => 1 << 1, + Self::SpaceSign => 1 << 2, + Self::Alternate => 1 << 3, + Self::ZeroPad => 1 << 4, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub struct FormatFlags { + bits: u8, +} + +impl FormatFlags { + pub fn insert(&mut self, flag: FormatFlag) { + self.bits |= flag.bit(); + } +} + +impl FromIterator for FormatFlags { + fn from_iter>(iter: T) -> Self { + let mut flags = Self::default(); + for flag in iter { + flags.insert(flag); + } + flags + } +} + +/// Width specification for format placeholder. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum WidthSpec { + /// No width specified. + #[default] + None, + /// Fixed width from format string. + Fixed(usize), + /// Dynamic width from next argument (`*`). + Dynamic, +} + +/// Precision specification for format placeholder. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum PrecisionSpec { + /// No precision specified. + #[default] + None, + /// Fixed precision from format string. + Fixed(usize), + /// Dynamic precision from next argument (`.*`). + Dynamic, +} + +/// Parsed format modifiers. +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub struct FormatModifiers { + /// Format flags. + pub flags: FormatFlags, + /// Width specification. + pub width: WidthSpec, + /// Precision specification. + pub precision: PrecisionSpec, +} + +impl FormatModifiers { + /// Count how many extra arguments are consumed by dynamic width/precision. + pub fn dynamic_arg_count(&self) -> usize { + let width_args = usize::from(self.width == WidthSpec::Dynamic); + let prec_args = usize::from(self.precision == PrecisionSpec::Dynamic); + width_args + prec_args + } +} + +/// A placeholder in a format string. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum FormatPlaceholder { + /// Positional placeholder like `%s` or `%d`. + Positional { + /// Expected type category for this placeholder. + expected_type: FormatTypeKind, + /// The format specifier character (s, d, f, etc.). + specifier: char, + /// Parsed format modifiers. + modifiers: FormatModifiers, + }, + /// Named placeholder like `%(name)s`. + Named { + /// The name in the placeholder. + name: String, + /// Expected type category for this placeholder. + expected_type: FormatTypeKind, + /// The format specifier character. + specifier: char, + /// Parsed format modifiers. + modifiers: FormatModifiers, + }, +} + +/// Result of parsing a format string. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct FormatSpec { + /// List of placeholders in order of appearance. + pub placeholders: Vec, + /// Whether all placeholders are named (requires object argument). + pub uses_named: bool, + /// Whether any placeholders are positional (requires array argument). + pub uses_positional: bool, +} + +impl FormatSpec { + /// Count the total number of positional arguments required. + /// + /// This includes: + /// - One for each positional placeholder + /// - One for each dynamic width (`*`) + /// - One for each dynamic precision (`.*`) + pub fn positional_arg_count(&self) -> usize { + self.placeholders + .iter() + .filter_map(|p| match p { + FormatPlaceholder::Positional { modifiers, .. } => { + Some(1 + modifiers.dynamic_arg_count()) + } + FormatPlaceholder::Named { .. } => None, + }) + .sum() + } + + /// Get all named field names required. + pub fn named_fields(&self) -> Vec<&str> { + self.placeholders + .iter() + .filter_map(|p| match p { + FormatPlaceholder::Named { name, .. } => Some(name.as_str()), + FormatPlaceholder::Positional { .. } => None, + }) + .collect() + } +} + +/// Error encountered while parsing a format string. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum FormatParseError { + /// Incomplete format specifier at end of string. + IncompleteSpecifier, + /// Unknown format specifier character. + UnknownSpecifier(char), + /// Missing closing paren in named placeholder. + UnclosedNamedPlaceholder, + /// Empty name in named placeholder. + EmptyName, + /// Mixed positional and named placeholders. + MixedPositionalAndNamed, +} + +/// Parse a format string and extract placeholder information. +pub fn parse_format_string(fmt: &str) -> Result { + let mut placeholders = Vec::new(); + let mut uses_named = false; + let mut uses_positional = false; + + let chars: Vec = fmt.chars().collect(); + let mut i = 0; + + while i < chars.len() { + if chars[i] != '%' { + i += 1; + continue; + } + + // Found a % + i += 1; + if i >= chars.len() { + return Err(FormatParseError::IncompleteSpecifier); + } + + // Check for %% + if chars[i] == '%' { + i += 1; + continue; + } + + // Check for named placeholder %(name) + if chars[i] == '(' { + i += 1; + let name_start = i; + + // Find closing paren + while i < chars.len() && chars[i] != ')' { + i += 1; + } + if i >= chars.len() { + return Err(FormatParseError::UnclosedNamedPlaceholder); + } + + let name: String = chars[name_start..i].iter().collect(); + if name.is_empty() { + return Err(FormatParseError::EmptyName); + } + + i += 1; // Skip ) + + // Parse flags, width, precision + let (modifiers, new_i) = parse_format_modifiers(&chars, i); + i = new_i; + + if i >= chars.len() { + return Err(FormatParseError::IncompleteSpecifier); + } + + let specifier = chars[i]; + let expected_type = specifier_to_type_kind(specifier)?; + + placeholders.push(FormatPlaceholder::Named { + name, + expected_type, + specifier, + modifiers, + }); + uses_named = true; + i += 1; + continue; + } + + // Positional placeholder - parse flags, width, precision + let (modifiers, new_i) = parse_format_modifiers(&chars, i); + i = new_i; + + if i >= chars.len() { + return Err(FormatParseError::IncompleteSpecifier); + } + + let specifier = chars[i]; + let expected_type = specifier_to_type_kind(specifier)?; + + placeholders.push(FormatPlaceholder::Positional { + expected_type, + specifier, + modifiers, + }); + uses_positional = true; + i += 1; + } + + // Check for mixed usage + if uses_named && uses_positional { + return Err(FormatParseError::MixedPositionalAndNamed); + } + + Ok(FormatSpec { + placeholders, + uses_named, + uses_positional, + }) +} + +/// Parse format modifiers (flags, width, precision) and return them with new index. +fn parse_format_modifiers(chars: &[char], mut i: usize) -> (FormatModifiers, usize) { + let mut modifiers = FormatModifiers::default(); + + // Parse flags: -, +, space, #, 0 + while i < chars.len() { + match chars[i] { + '-' => modifiers.flags.insert(FormatFlag::LeftJustify), + '+' => modifiers.flags.insert(FormatFlag::ShowSign), + ' ' => modifiers.flags.insert(FormatFlag::SpaceSign), + '#' => modifiers.flags.insert(FormatFlag::Alternate), + '0' => modifiers.flags.insert(FormatFlag::ZeroPad), + _ => break, + } + i += 1; + } + + // Parse width (digits or *) + if i < chars.len() && chars[i] == '*' { + modifiers.width = WidthSpec::Dynamic; + i += 1; + } else { + let width_start = i; + while i < chars.len() && chars[i].is_ascii_digit() { + i += 1; + } + if i > width_start { + let width_str: String = chars[width_start..i].iter().collect(); + if let Ok(width) = width_str.parse::() { + modifiers.width = WidthSpec::Fixed(width); + } + } + } + + // Parse precision (.digits or .*) + if i < chars.len() && chars[i] == '.' { + i += 1; + if i < chars.len() && chars[i] == '*' { + modifiers.precision = PrecisionSpec::Dynamic; + i += 1; + } else { + let prec_start = i; + while i < chars.len() && chars[i].is_ascii_digit() { + i += 1; + } + if i > prec_start { + let prec_str: String = chars[prec_start..i].iter().collect(); + if let Ok(prec) = prec_str.parse::() { + modifiers.precision = PrecisionSpec::Fixed(prec); + } + } else { + // Just "." with no digits means precision 0 + modifiers.precision = PrecisionSpec::Fixed(0); + } + } + } + + (modifiers, i) +} + +/// Convert a format specifier character to its expected type kind. +fn specifier_to_type_kind(specifier: char) -> Result { + match specifier { + 's' => Ok(FormatTypeKind::Any), // toString is called + 'd' | 'i' | 'u' | 'o' | 'x' | 'X' | 'e' | 'E' | 'f' | 'F' | 'g' | 'G' => { + Ok(FormatTypeKind::Number) + } + 'c' => Ok(FormatTypeKind::NumberOrString), + _ => Err(FormatParseError::UnknownSpecifier(specifier)), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn flags(values: &[FormatFlag]) -> FormatFlags { + values.iter().copied().collect() + } + + fn positional(specifier: char, expected_type: FormatTypeKind) -> FormatPlaceholder { + FormatPlaceholder::Positional { + expected_type, + specifier, + modifiers: FormatModifiers::default(), + } + } + + fn positional_with_mods( + specifier: char, + expected_type: FormatTypeKind, + modifiers: FormatModifiers, + ) -> FormatPlaceholder { + FormatPlaceholder::Positional { + expected_type, + specifier, + modifiers, + } + } + + fn named(name: &str, specifier: char, expected_type: FormatTypeKind) -> FormatPlaceholder { + FormatPlaceholder::Named { + name: name.to_string(), + expected_type, + specifier, + modifiers: FormatModifiers::default(), + } + } + + #[test] + fn test_simple_format_string() { + assert_eq!( + parse_format_string("Hello %s!"), + Ok(FormatSpec { + placeholders: vec![positional('s', FormatTypeKind::Any)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_multiple_placeholders() { + assert_eq!( + parse_format_string("%s has %d apples"), + Ok(FormatSpec { + placeholders: vec![ + positional('s', FormatTypeKind::Any), + positional('d', FormatTypeKind::Number), + ], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_escaped_percent() { + assert_eq!( + parse_format_string("100%% complete"), + Ok(FormatSpec { + placeholders: vec![], + uses_positional: false, + uses_named: false, + }) + ); + } + + #[test] + fn test_named_placeholder() { + assert_eq!( + parse_format_string("Hello %(name)s!"), + Ok(FormatSpec { + placeholders: vec![named("name", 's', FormatTypeKind::Any)], + uses_positional: false, + uses_named: true, + }) + ); + } + + #[test] + fn test_format_with_width_precision() { + let mods = FormatModifiers { + width: WidthSpec::Fixed(10), + precision: PrecisionSpec::Fixed(2), + ..Default::default() + }; + assert_eq!( + parse_format_string("%10.2f"), + Ok(FormatSpec { + placeholders: vec![positional_with_mods('f', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_format_with_flags() { + let mods = FormatModifiers { + flags: flags(&[FormatFlag::LeftJustify, FormatFlag::ShowSign]), + width: WidthSpec::Fixed(10), + ..Default::default() + }; + assert_eq!( + parse_format_string("%-+10d"), + Ok(FormatSpec { + placeholders: vec![positional_with_mods('d', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_format_with_all_flags() { + let mods = FormatModifiers { + flags: flags(&[ + FormatFlag::LeftJustify, + FormatFlag::ShowSign, + FormatFlag::SpaceSign, + FormatFlag::Alternate, + FormatFlag::ZeroPad, + ]), + ..Default::default() + }; + assert_eq!( + parse_format_string("%-+ #0d"), + Ok(FormatSpec { + placeholders: vec![positional_with_mods('d', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_dynamic_width() { + let mods = FormatModifiers { + width: WidthSpec::Dynamic, + ..Default::default() + }; + let spec = parse_format_string("%*s").unwrap(); + assert_eq!( + spec, + FormatSpec { + placeholders: vec![positional_with_mods('s', FormatTypeKind::Any, mods)], + uses_positional: true, + uses_named: false, + } + ); + // Dynamic width consumes one extra arg + assert_eq!(spec.positional_arg_count(), 2); + } + + #[test] + fn test_dynamic_precision() { + let mods = FormatModifiers { + precision: PrecisionSpec::Dynamic, + ..Default::default() + }; + let spec = parse_format_string("%.*f").unwrap(); + assert_eq!( + spec, + FormatSpec { + placeholders: vec![positional_with_mods('f', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + } + ); + // Dynamic precision consumes one extra arg + assert_eq!(spec.positional_arg_count(), 2); + } + + #[test] + fn test_dynamic_width_and_precision() { + let mods = FormatModifiers { + width: WidthSpec::Dynamic, + precision: PrecisionSpec::Dynamic, + ..Default::default() + }; + let spec = parse_format_string("%*.*f").unwrap(); + assert_eq!( + spec, + FormatSpec { + placeholders: vec![positional_with_mods('f', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + } + ); + // Both dynamic width and precision consume args + assert_eq!(spec.positional_arg_count(), 3); + } + + #[test] + fn test_precision_only() { + let mods = FormatModifiers { + precision: PrecisionSpec::Fixed(5), + ..Default::default() + }; + assert_eq!( + parse_format_string("%.5f"), + Ok(FormatSpec { + placeholders: vec![positional_with_mods('f', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_precision_zero() { + let mods = FormatModifiers { + precision: PrecisionSpec::Fixed(0), + ..Default::default() + }; + assert_eq!( + parse_format_string("%.f"), + Ok(FormatSpec { + placeholders: vec![positional_with_mods('f', FormatTypeKind::Number, mods)], + uses_positional: true, + uses_named: false, + }) + ); + } + + #[test] + fn test_incomplete_specifier() { + assert_eq!( + parse_format_string("Hello %"), + Err(FormatParseError::IncompleteSpecifier) + ); + } + + #[test] + fn test_unknown_specifier() { + assert_eq!( + parse_format_string("%z"), + Err(FormatParseError::UnknownSpecifier('z')) + ); + } + + #[test] + fn test_unclosed_named() { + assert_eq!( + parse_format_string("%(name"), + Err(FormatParseError::UnclosedNamedPlaceholder) + ); + } + + #[test] + fn test_empty_name() { + assert_eq!( + parse_format_string("%()s"), + Err(FormatParseError::EmptyName) + ); + } + + #[test] + fn test_mixed_positional_and_named() { + assert_eq!( + parse_format_string("%s %(name)s"), + Err(FormatParseError::MixedPositionalAndNamed) + ); + } + + #[test] + fn test_all_numeric_specifiers() { + for spec_char in ['d', 'i', 'u', 'o', 'x', 'X', 'e', 'E', 'f', 'F', 'g', 'G'] { + let fmt = format!("%{spec_char}"); + assert_eq!( + parse_format_string(&fmt), + Ok(FormatSpec { + placeholders: vec![positional(spec_char, FormatTypeKind::Number)], + uses_positional: true, + uses_named: false, + }), + "specifier {spec_char}" + ); + } + } + + #[test] + fn test_complex_format_string() { + // Real-world example: "%-20s: %+10.2f%%" + let mods1 = FormatModifiers { + flags: flags(&[FormatFlag::LeftJustify]), + width: WidthSpec::Fixed(20), + ..Default::default() + }; + let mods2 = FormatModifiers { + flags: flags(&[FormatFlag::ShowSign]), + width: WidthSpec::Fixed(10), + precision: PrecisionSpec::Fixed(2), + }; + assert_eq!( + parse_format_string("%-20s: %+10.2f%%"), + Ok(FormatSpec { + placeholders: vec![ + positional_with_mods('s', FormatTypeKind::Any, mods1), + positional_with_mods('f', FormatTypeKind::Number, mods2), + ], + uses_positional: true, + uses_named: false, + }) + ); + } +} diff --git a/crates/jrsonnet-lsp-check/src/lib.rs b/crates/jrsonnet-lsp-check/src/lib.rs new file mode 100644 index 00000000..b37a1da5 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/lib.rs @@ -0,0 +1,17 @@ +//! Type checking and linting for Jsonnet LSP. +//! +//! Provides static analysis that runs after type inference: +//! - Type checking: detect type mismatches before evaluation +//! - Linting: detect unused variables, shadowing, unreachable code +//! - Format checking: validate format strings in std.format and % +//! - Unified diagnostics: consistent error codes and formatting + +pub mod diagnostic; +pub mod format_check; +pub mod lint; +pub mod type_check; + +pub use diagnostic::{Diagnostic, DiagnosticCollector, ErrorCode, RelatedInfo, Severity}; +pub use format_check::{parse_format_string, FormatParseError, FormatPlaceholder, FormatTypeKind}; +pub use lint::{lint, LintConfig}; +pub use type_check::{check_types, TypeCheckConfig, TypeError, TypeErrorKind}; diff --git a/crates/jrsonnet-lsp-check/src/lint.rs b/crates/jrsonnet-lsp-check/src/lint.rs new file mode 100644 index 00000000..d899ce32 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/lint.rs @@ -0,0 +1,1510 @@ +//! Lint diagnostics for Jsonnet code. +//! +//! Provides static analysis warnings that don't require evaluation: +//! - Unused variables +//! - Shadowed variables +//! - Unreachable code (via type inference) + +use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; +use jrsonnet_lsp_inference::{infer_expr_ty, TypeAnalysis, TypeEnv}; +use jrsonnet_lsp_types::Ty; +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, Destruct, DestructArrayPart, Expr, ExprBase, ExprFunction, ExprObject, ExprVar, + FieldName, ForSpec, Member, ObjBody, Param, Stmt, StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, +}; +use lsp_types::{ + Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, Location, NumberOrString, Range, +}; +use rowan::TextRange; +use rustc_hash::{FxHashMap, FxHashSet}; + +/// Information about a variable in scope, used for related diagnostics. +#[derive(Debug, Clone)] +struct ScopeVar { + range: TextRange, +} + +/// A variable definition with its location and name. +#[derive(Debug, Clone)] +struct VarDef { + name: String, + range: TextRange, + /// Whether the variable name starts with underscore (intentionally unused). + is_underscore_prefixed: bool, +} + +/// Lint configuration options. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum LintRule { + UnusedVariables, + UnreachableCode, + ShadowedVariables, + TypeErrors, + DuplicateFields, + DuplicateParams, +} + +impl LintRule { + const fn bit(self) -> u8 { + match self { + Self::UnusedVariables => 1 << 0, + Self::UnreachableCode => 1 << 1, + Self::ShadowedVariables => 1 << 2, + Self::TypeErrors => 1 << 3, + Self::DuplicateFields => 1 << 4, + Self::DuplicateParams => 1 << 5, + } + } +} + +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +pub struct LintConfig { + enabled: u8, +} + +impl LintConfig { + /// Enable a single lint rule in this config. + #[must_use] + pub fn with_enabled(mut self, rule: LintRule) -> Self { + self.enable(rule); + self + } + + /// Enable a single lint rule in this config. + pub fn enable(&mut self, rule: LintRule) { + self.enabled |= rule.bit(); + } + + /// Check whether a lint rule is enabled. + #[must_use] + pub fn is_enabled(self, rule: LintRule) -> bool { + self.enabled & rule.bit() != 0 + } + + /// Create a config with all lints enabled. + pub fn all() -> Self { + Self { + enabled: LintRule::UnusedVariables.bit() + | LintRule::UnreachableCode.bit() + | LintRule::ShadowedVariables.bit() + | LintRule::TypeErrors.bit() + | LintRule::DuplicateFields.bit() + | LintRule::DuplicateParams.bit(), + } + } +} + +/// Run lint checks on a document. +/// +/// The `uri` parameter is used to create related location information in diagnostics. +pub fn lint( + document: &Document, + analysis: &TypeAnalysis, + config: &LintConfig, + uri: &lsp_types::Uri, +) -> Vec { + let mut diagnostics = Vec::new(); + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + if let Some(expr) = ast.expr() { + let ctx = LintContext::new(text, line_index, uri); + + if config.is_enabled(LintRule::UnusedVariables) { + check_unused_variables(&expr, &ctx, &mut diagnostics); + } + + if config.is_enabled(LintRule::ShadowedVariables) { + let mut scope_stack: Vec> = vec![FxHashMap::default()]; + check_shadowed_variables(expr.syntax(), &ctx, &mut scope_stack, &mut diagnostics); + } + + if config.is_enabled(LintRule::UnreachableCode) { + let mut env = TypeEnv::new_default(); + check_unreachable_code(&expr, &ctx, &mut env, &mut diagnostics); + } + + if config.is_enabled(LintRule::TypeErrors) { + let type_check_config = super::type_check::TypeCheckConfig::all(); + let type_errors = + super::type_check::check_types(document, analysis, &type_check_config); + diagnostics.extend( + type_errors + .into_iter() + .map(|e| e.to_diagnostic(line_index, text, analysis)), + ); + } + + if config.is_enabled(LintRule::DuplicateFields) { + check_duplicate_fields(expr.syntax(), &ctx, &mut diagnostics); + } + + if config.is_enabled(LintRule::DuplicateParams) { + check_duplicate_params(expr.syntax(), &ctx, &mut diagnostics); + } + } + + diagnostics +} + +/// Context for lint checking. +struct LintContext<'a> { + text: &'a str, + line_index: &'a LineIndex, + uri: &'a lsp_types::Uri, +} + +impl<'a> LintContext<'a> { + fn new(text: &'a str, line_index: &'a LineIndex, uri: &'a lsp_types::Uri) -> Self { + Self { + text, + line_index, + uri, + } + } + + fn to_lsp_range(&self, range: TextRange) -> Range { + to_lsp_range(range, self.line_index, self.text) + } + + fn make_diagnostic( + &self, + range: TextRange, + message: String, + severity: DiagnosticSeverity, + code: &str, + ) -> Diagnostic { + Diagnostic { + range: self.to_lsp_range(range), + severity: Some(severity), + code: Some(NumberOrString::String(code.to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message, + related_information: None, + tags: None, + data: None, + } + } + + fn make_diagnostic_with_related( + &self, + range: TextRange, + message: String, + severity: DiagnosticSeverity, + code: &str, + related_range: TextRange, + related_message: String, + ) -> Diagnostic { + Diagnostic { + range: self.to_lsp_range(range), + severity: Some(severity), + code: Some(NumberOrString::String(code.to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message, + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: self.uri.clone(), + range: self.to_lsp_range(related_range), + }, + message: related_message, + }]), + tags: None, + data: None, + } + } +} + +/// Check for unused variables in an expression. +fn check_unused_variables(expr: &Expr, ctx: &LintContext, diagnostics: &mut Vec) { + let mut definitions: FxHashMap> = FxHashMap::default(); + let mut references: FxHashSet = FxHashSet::default(); + + // Collect all definitions and references + collect_definitions_and_references(expr.syntax(), &mut definitions, &mut references); + + // Report unused variables + for (name, defs) in definitions { + // Skip if any reference exists for this name + if references.contains(&name) { + continue; + } + + for def in defs { + // Skip underscore-prefixed variables (intentionally unused) + if def.is_underscore_prefixed { + continue; + } + + diagnostics.push(ctx.make_diagnostic( + def.range, + format!( + "unused variable: `{}`; prefix with `_` to silence this warning", + def.name + ), + DiagnosticSeverity::WARNING, + "unused-variable", + )); + } + } +} + +/// Check for shadowed variables in nested scopes. +fn check_shadowed_variables( + node: &SyntaxNode, + ctx: &LintContext, + scope_stack: &mut Vec>, + diagnostics: &mut Vec, +) { + // Check if this node introduces a new scope + // - Functions and for specs create new scopes for their parameters + // - Nested EXPR nodes (sub-expressions) create new scopes for their locals + let introduces_scope = match node.kind() { + SyntaxKind::EXPR_FUNCTION | SyntaxKind::BIND_FUNCTION | SyntaxKind::FOR_SPEC => true, + SyntaxKind::EXPR => { + // Only create a new scope if this EXPR is not the root (has a parent EXPR) + // This handles cases like `local y = (local x = 2; x)` where the inner + // expression creates a new scope + node.parent() + .map(|p| p.kind() != SyntaxKind::SOURCE_FILE) + .unwrap_or(false) + } + _ => false, + }; + + if introduces_scope { + scope_stack.push(FxHashMap::default()); + } + + // Process definitions at this node + match node.kind() { + SyntaxKind::STMT_LOCAL => { + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + check_bind_for_shadow(&bind, ctx, scope_stack, diagnostics); + } + } + } + SyntaxKind::EXPR_FUNCTION => { + if let Some(func) = jrsonnet_rowan_parser::nodes::ExprFunction::cast(node.clone()) { + if let Some(params) = func.params_desc() { + for param in params.params() { + check_param_for_shadow(¶m, ctx, scope_stack, diagnostics); + } + } + } + } + SyntaxKind::BIND_FUNCTION => { + if let Some(func) = jrsonnet_rowan_parser::nodes::BindFunction::cast(node.clone()) { + if let Some(params) = func.params() { + for param in params.params() { + check_param_for_shadow(¶m, ctx, scope_stack, diagnostics); + } + } + } + } + SyntaxKind::FOR_SPEC => { + if let Some(for_spec) = ForSpec::cast(node.clone()) { + if let Some(destruct) = for_spec.bind() { + check_destruct_for_shadow(&destruct, ctx, scope_stack, diagnostics); + } + } + } + _ => {} + } + + // Recurse into children + for child in node.children() { + check_shadowed_variables(&child, ctx, scope_stack, diagnostics); + } + + if introduces_scope { + scope_stack.pop(); + } +} + +/// Check if a name shadows a variable in an outer scope. +fn check_for_shadow( + name: &str, + range: TextRange, + ctx: &LintContext, + scope_stack: &mut Vec>, + diagnostics: &mut Vec, +) { + // Skip underscore-prefixed variables + if name.starts_with('_') { + if let Some(current_scope) = scope_stack.last_mut() { + current_scope.insert(name.to_string(), ScopeVar { range }); + } + return; + } + + // Check outer scopes for shadowing (skip the current scope) + let num_scopes = scope_stack.len(); + for scope in scope_stack.iter().take(num_scopes.saturating_sub(1)) { + if let Some(original) = scope.get(name) { + diagnostics.push(ctx.make_diagnostic_with_related( + range, + format!("variable `{}` shadows a variable from an outer scope", name), + DiagnosticSeverity::WARNING, + "shadowed-variable", + original.range, + format!("`{}` originally defined here", name), + )); + break; + } + } + + // Add to current scope + if let Some(current_scope) = scope_stack.last_mut() { + current_scope.insert(name.to_string(), ScopeVar { range }); + } +} + +/// Check a Bind node for shadowing. +fn check_bind_for_shadow( + bind: &Bind, + ctx: &LintContext, + scope_stack: &mut Vec>, + diagnostics: &mut Vec, +) { + match bind { + Bind::BindDestruct(bd) => { + if let Some(destruct) = bd.into() { + check_destruct_for_shadow(&destruct, ctx, scope_stack, diagnostics); + } + } + Bind::BindFunction(bf) => { + if let Some(name_node) = bf.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text(); + check_for_shadow( + name, + name_node.syntax().text_range(), + ctx, + scope_stack, + diagnostics, + ); + } + } + } + } +} + +/// Check a Destruct node for shadowing. +fn check_destruct_for_shadow( + destruct: &Destruct, + ctx: &LintContext, + scope_stack: &mut Vec>, + diagnostics: &mut Vec, +) { + match destruct { + Destruct::DestructFull(full) => { + if let Some(name_node) = full.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text(); + check_for_shadow( + name, + name_node.syntax().text_range(), + ctx, + scope_stack, + diagnostics, + ); + } + } + } + Destruct::DestructArray(arr) => { + for elem in arr.destruct_array_parts() { + if let DestructArrayPart::DestructArrayElement(array_elem) = elem { + if let Some(destruct) = array_elem.destruct() { + check_destruct_for_shadow(&destruct, ctx, scope_stack, diagnostics); + } + } + } + } + Destruct::DestructObject(obj) => { + for field in obj.destruct_object_fields() { + if let Some(destruct) = field.destruct() { + check_destruct_for_shadow(&destruct, ctx, scope_stack, diagnostics); + } + } + } + Destruct::DestructSkip(_) => {} + } +} + +/// Check a Param node for shadowing. +fn check_param_for_shadow( + param: &Param, + ctx: &LintContext, + scope_stack: &mut Vec>, + diagnostics: &mut Vec, +) { + if let Some(destruct) = param.destruct() { + check_destruct_for_shadow(&destruct, ctx, scope_stack, diagnostics); + } +} + +/// Extract variable name from a simple BindDestruct (not array/object destructuring). +/// +/// Returns the variable name and a reference to the BindDestruct for value access. +fn extract_simple_bind_name( + bind: &Bind, +) -> Option<(String, &jrsonnet_rowan_parser::nodes::BindDestruct)> { + let Bind::BindDestruct(bd) = bind else { + return None; + }; + let destruct: Destruct = bd.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let name = full.name()?.ident_lit()?.text().to_string(); + Some((name, bd)) +} + +/// Check for unreachable code in an expression. +/// +/// Unreachable code patterns detected: +/// - Code after an `assert` whose condition is a divergent expression (type Never) +/// - Code after an `assert false` +fn check_unreachable_code( + expr: &Expr, + ctx: &LintContext, + env: &mut TypeEnv, + diagnostics: &mut Vec, +) { + // Check statements (locals and asserts) that precede the body + for stmt in expr.stmts() { + match &stmt { + Stmt::StmtAssert(assert_stmt) => { + // Check if the assert condition diverges + if let Some(assertion) = assert_stmt.assertion() { + if let Some(cond) = assertion.condition() { + let cond_ty = infer_expr_ty(&cond, env); + if cond_ty.is_never() { + // The assert condition itself diverges, so everything after is unreachable + // Find the range of everything after this assert + if let Some(body_range) = find_code_after_stmt(&stmt, expr) { + diagnostics.push( + ctx.make_diagnostic_with_related( + body_range, + "unreachable code: assert condition always diverges" + .to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + cond.syntax().text_range(), + "divergent expression here".to_string(), + ), + ); + return; // Don't report more unreachable code + } + } + // Also check if condition is statically `false` + if is_statically_false(&cond) { + if let Some(body_range) = find_code_after_stmt(&stmt, expr) { + diagnostics.push( + ctx.make_diagnostic_with_related( + body_range, + "unreachable code: assert condition is always false" + .to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + cond.syntax().text_range(), + "condition is `false`".to_string(), + ), + ); + return; + } + } + } + } + } + Stmt::StmtLocal(local_stmt) => { + // Process local bindings for the type environment + for bind in local_stmt.binds() { + if let Some((name, bd)) = extract_simple_bind_name(&bind) { + let ty = bd + .value() + .map(|v| infer_expr_ty(&v, env)) + .unwrap_or(Ty::ANY); + env.define_ty(name, ty); + } + } + } + } + } + + // Recursively check the body expression and nested expressions + if let Some(base) = expr.expr_base() { + check_unreachable_in_base(&base, ctx, env, diagnostics); + } +} + +/// Check if an expression is statically `false`. +fn is_statically_false(expr: &Expr) -> bool { + if let Some(base) = expr.expr_base() { + if let ExprBase::ExprLiteral(lit) = base { + if let Some(literal) = lit.literal() { + return matches!( + literal.kind(), + jrsonnet_rowan_parser::nodes::LiteralKind::FalseKw + ); + } + } + } + false +} + +/// Find the range of code after a statement in an expression. +fn find_code_after_stmt(stmt: &Stmt, expr: &Expr) -> Option { + let stmt_end = stmt.syntax().text_range().end(); + let expr_end = expr.syntax().text_range().end(); + + // Check if there's anything after this statement + if stmt_end < expr_end { + // Find the start of the next significant content + let next_start = stmt_end; + Some(TextRange::new(next_start, expr_end)) + } else { + None + } +} + +/// Check for unreachable code in a base expression. +fn check_unreachable_in_base( + base: &ExprBase, + ctx: &LintContext, + env: &mut TypeEnv, + diagnostics: &mut Vec, +) { + match base { + ExprBase::ExprBinary(binary) => { + // Check if left operand is divergent + if let Some(lhs) = binary.lhs() { + let lhs_ty = infer_expr_ty(&lhs, env); + if lhs_ty.is_never() { + // Right operand is unreachable + if let Some(rhs) = binary.rhs() { + diagnostics.push(ctx.make_diagnostic_with_related( + rhs.syntax().text_range(), + "unreachable code: left operand always diverges".to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + lhs.syntax().text_range(), + "divergent expression here".to_string(), + )); + return; + } + } + // Recursively check left operand + check_unreachable_code(&lhs, ctx, env, diagnostics); + } + // Recursively check right operand + if let Some(rhs) = binary.rhs() { + check_unreachable_code(&rhs, ctx, env, diagnostics); + } + } + ExprBase::ExprIfThenElse(if_expr) => { + // Check condition for divergence + if let Some(cond) = if_expr.cond() { + let cond_ty = infer_expr_ty(&cond, env); + if cond_ty.is_never() { + // Both branches are unreachable + if let Some(then_clause) = if_expr.then() { + if let Some(then_expr) = then_clause.expr() { + diagnostics.push(ctx.make_diagnostic_with_related( + then_expr.syntax().text_range(), + "unreachable code: condition always diverges".to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + cond.syntax().text_range(), + "divergent expression here".to_string(), + )); + } + } + return; + } + // Recursively check condition + check_unreachable_code(&cond, ctx, env, diagnostics); + } + // Recursively check branches + if let Some(then_clause) = if_expr.then() { + if let Some(then_expr) = then_clause.expr() { + check_unreachable_code(&then_expr, ctx, env, diagnostics); + } + } + if let Some(else_clause) = if_expr.else_() { + if let Some(else_expr) = else_clause.expr() { + check_unreachable_code(&else_expr, ctx, env, diagnostics); + } + } + } + ExprBase::ExprParened(parens) => { + if let Some(inner) = parens.expr() { + check_unreachable_code(&inner, ctx, env, diagnostics); + } + } + ExprBase::ExprFunction(func) => { + // Check function body + if let Some(body) = func.expr() { + env.push_scope(); + check_unreachable_code(&body, ctx, env, diagnostics); + env.pop_scope(); + } + } + ExprBase::ExprArray(arr) => { + for elem in arr.exprs() { + check_unreachable_code(&elem, ctx, env, diagnostics); + } + } + ExprBase::ExprObject(obj) => { + if let Some(body) = obj.obj_body() { + check_unreachable_in_obj_body(&body, ctx, env, diagnostics); + } + } + _ => {} + } +} + +/// Check for unreachable code in an object body. +fn check_unreachable_in_obj_body( + body: &jrsonnet_rowan_parser::nodes::ObjBody, + ctx: &LintContext, + env: &mut TypeEnv, + diagnostics: &mut Vec, +) { + use jrsonnet_rowan_parser::nodes::{Member, ObjBody}; + if let ObjBody::ObjBodyMemberList(members) = body { + for member in members.members() { + match member { + Member::MemberFieldNormal(field) => { + if let Some(expr) = field.expr() { + check_unreachable_code(&expr, ctx, env, diagnostics); + } + } + Member::MemberFieldMethod(method) => { + if let Some(expr) = method.expr() { + env.push_scope(); + check_unreachable_code(&expr, ctx, env, diagnostics); + env.pop_scope(); + } + } + Member::MemberAssertStmt(assert_member) => { + if let Some(assertion) = assert_member.assertion() { + if let Some(cond) = assertion.condition() { + check_unreachable_code(&cond, ctx, env, diagnostics); + } + } + } + Member::MemberBindStmt(_) => {} + } + } + } +} + +/// Collect variable definitions and references from the AST. +fn collect_definitions_and_references( + node: &SyntaxNode, + definitions: &mut FxHashMap>, + references: &mut FxHashSet, +) { + // Process this node + match node.kind() { + SyntaxKind::STMT_LOCAL => { + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + collect_bind_definition(&bind, definitions); + } + } + } + SyntaxKind::EXPR_FUNCTION => { + // Function parameters + if let Some(func) = jrsonnet_rowan_parser::nodes::ExprFunction::cast(node.clone()) { + if let Some(params) = func.params_desc() { + for param in params.params() { + collect_param_definition(¶m, definitions); + } + } + } + } + SyntaxKind::BIND_FUNCTION => { + // Function binding parameters + if let Some(func) = jrsonnet_rowan_parser::nodes::BindFunction::cast(node.clone()) { + if let Some(params) = func.params() { + for param in params.params() { + collect_param_definition(¶m, definitions); + } + } + } + } + SyntaxKind::FOR_SPEC => { + // For comprehension variable + if let Some(for_spec) = ForSpec::cast(node.clone()) { + if let Some(destruct) = for_spec.bind() { + collect_destruct_definition(&destruct, definitions); + } + } + } + SyntaxKind::EXPR_VAR => { + if let Some(var) = ExprVar::cast(node.clone()) { + if let Some(name) = var.name().and_then(|n| n.ident_lit()) { + let text = name.text().to_string(); + if text != "std" { + references.insert(text); + } + } + } + } + _ => {} + } + + // Recurse into children + for child in node.children() { + collect_definitions_and_references(&child, definitions, references); + } +} + +/// Collect definition from a Bind node. +fn collect_bind_definition(bind: &Bind, definitions: &mut FxHashMap>) { + match bind { + Bind::BindDestruct(bd) => { + if let Some(destruct) = bd.into() { + collect_destruct_definition(&destruct, definitions); + } + } + Bind::BindFunction(bf) => { + if let Some(name_node) = bf.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text().to_string(); + let is_underscore_prefixed = name.starts_with('_'); + definitions.entry(name.clone()).or_default().push(VarDef { + name, + range: name_node.syntax().text_range(), + is_underscore_prefixed, + }); + } + } + } + } +} + +/// Collect definition from a Destruct node. +fn collect_destruct_definition( + destruct: &Destruct, + definitions: &mut FxHashMap>, +) { + match destruct { + Destruct::DestructFull(full) => { + if let Some(name_node) = full.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text().to_string(); + let is_underscore_prefixed = name.starts_with('_'); + definitions.entry(name.clone()).or_default().push(VarDef { + name, + range: name_node.syntax().text_range(), + is_underscore_prefixed, + }); + } + } + } + Destruct::DestructArray(arr) => { + // Array destructuring: [a, b, c] + for elem in arr.destruct_array_parts() { + if let DestructArrayPart::DestructArrayElement(array_elem) = elem { + if let Some(destruct) = array_elem.destruct() { + collect_destruct_definition(&destruct, definitions); + } + } + } + } + Destruct::DestructObject(obj) => { + // Object destructuring: {a, b, c} + for field in obj.destruct_object_fields() { + if let Some(destruct) = field.destruct() { + collect_destruct_definition(&destruct, definitions); + } + } + } + Destruct::DestructSkip(_) => { + // Skip patterns don't define variables + } + } +} + +/// Collect definition from a Param node. +fn collect_param_definition(param: &Param, definitions: &mut FxHashMap>) { + if let Some(destruct) = param.destruct() { + collect_destruct_definition(&destruct, definitions); + } +} + +/// Check for duplicate object fields in the entire AST. +fn check_duplicate_fields(node: &SyntaxNode, ctx: &LintContext, diagnostics: &mut Vec) { + // Find all object expressions + for child in node.descendants() { + if child.kind() == SyntaxKind::EXPR_OBJECT { + if let Some(obj) = ExprObject::cast(child) { + check_object_for_duplicate_fields(&obj, ctx, diagnostics); + } + } + } +} + +/// Check a single object expression for duplicate fields. +fn check_object_for_duplicate_fields( + obj: &ExprObject, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + let mut seen: FxHashMap = FxHashMap::default(); + + let Some(obj_body) = obj.obj_body() else { + return; + }; + + let ObjBody::ObjBodyMemberList(members) = obj_body else { + // Object comprehension - can't have static duplicate fields + return; + }; + + for member in members.members() { + let field_name = match &member { + Member::MemberBindStmt(bind_stmt) => { + // { local x = value } - object-local binding + extract_bind_name(bind_stmt.obj_local().and_then(|ol| ol.bind())) + } + Member::MemberFieldNormal(field) => { + // { field: value } or { field:: value } + field.field_name().and_then(extract_static_field_name) + } + Member::MemberFieldMethod(method) => { + // { method(...): value } + method.field_name().and_then(extract_static_field_name) + } + Member::MemberAssertStmt(_) => None, // assert doesn't define a field + }; + + if let Some(name) = field_name { + let range = member.syntax().text_range(); + if let Some(first_range) = seen.get(&name) { + diagnostics.push(Diagnostic { + range: ctx.to_lsp_range(range), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("duplicate-field".to_string())), + source: Some("jrsonnet-lsp".to_string()), + message: format!("duplicate field `{}`", name), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: ctx.uri.clone(), + range: ctx.to_lsp_range(*first_range), + }, + message: "first definition here".to_string(), + }]), + ..Default::default() + }); + } else { + seen.insert(name, range); + } + } + } +} + +/// Extract a name from an optional Bind node. +fn extract_bind_name(bind: Option) -> Option { + let bind = bind?; + match bind { + Bind::BindDestruct(bd) => { + // Use BindDestruct::into to get Option + // (note: calling bd.into() directly can be ambiguous with Into trait) + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bd)?; + if let Destruct::DestructFull(full) = destruct { + full.name() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + } else { + None + } + } + Bind::BindFunction(bf) => bf + .name() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()), + } +} + +/// Extract a static field name from a FieldName node. +fn extract_static_field_name(field_name: FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => { + // FieldNameFixed has id() for identifier and text() for string literals + fixed + .id() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + } + FieldName::FieldNameDynamic(_) => None, // Dynamic field names can't be statically checked + } +} + +/// Check for duplicate function parameters in the entire AST. +fn check_duplicate_params(node: &SyntaxNode, ctx: &LintContext, diagnostics: &mut Vec) { + // Find all function expressions + for child in node.descendants() { + if child.kind() == SyntaxKind::EXPR_FUNCTION { + if let Some(func) = ExprFunction::cast(child) { + check_function_for_duplicate_params(&func, ctx, diagnostics); + } + } + } +} + +/// Check a single function expression for duplicate parameters. +fn check_function_for_duplicate_params( + func: &ExprFunction, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + let Some(params_desc) = func.params_desc() else { + return; + }; + + let mut seen: FxHashMap = FxHashMap::default(); + + for param in params_desc.params() { + // Extract parameter name from destruct + let param_name = param.destruct().and_then(|d| match d { + Destruct::DestructFull(full) => full + .name() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()), + _ => None, // Array/object destructuring is more complex + }); + + if let Some(name) = param_name { + let range = param.syntax().text_range(); + if let Some(first_range) = seen.get(&name) { + diagnostics.push(Diagnostic { + range: ctx.to_lsp_range(range), + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("duplicate-param".to_string())), + source: Some("jrsonnet-lsp".to_string()), + message: format!("duplicate parameter `{}`", name), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: ctx.uri.clone(), + range: ctx.to_lsp_range(*first_range), + }, + message: "first definition here".to_string(), + }]), + ..Default::default() + }); + } else { + seen.insert(name, range); + } + } + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + use lsp_types::{Position, Range, Uri}; + + use super::*; + + fn test_uri() -> Uri { + "file:///test.jsonnet".parse().unwrap() + } + + #[derive(Debug, Clone, Copy, PartialEq, Eq)] + struct Span { + start_line: u32, + start_char: u32, + end_line: u32, + end_char: u32, + } + + impl Span { + const fn new(start_line: u32, start_char: u32, end_line: u32, end_char: u32) -> Self { + Self { + start_line, + start_char, + end_line, + end_char, + } + } + + fn to_range(self) -> Range { + Range { + start: Position { + line: self.start_line, + character: self.start_char, + }, + end: Position { + line: self.end_line, + character: self.end_char, + }, + } + } + } + + const fn span(start_line: u32, start_char: u32, end_line: u32, end_char: u32) -> Span { + Span::new(start_line, start_char, end_line, end_char) + } + + fn make_unused_var_diagnostic( + start_line: u32, + start_char: u32, + end_line: u32, + end_char: u32, + name: &str, + ) -> Diagnostic { + Diagnostic { + range: Range { + start: Position { + line: start_line, + character: start_char, + }, + end: Position { + line: end_line, + character: end_char, + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("unused-variable".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: format!( + "unused variable: `{}`; prefix with `_` to silence this warning", + name + ), + related_information: None, + tags: None, + data: None, + } + } + + #[test] + fn test_unused_variable_detected() { + let code = "local x = 1; local y = 2; y"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unused_var_diagnostic(0, 6, 0, 7, "x")] + ); + } + + #[test] + fn test_used_variable_not_flagged() { + let code = "local x = 1; x + 1"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_underscore_prefixed_variable_not_flagged() { + let code = "local _unused = 1; local used = 2; used"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + // _unused should not be flagged (intentionally unused) + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_unused_function_parameter() { + let code = "local f(x, y) = y; f(1, 2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unused_var_diagnostic(0, 8, 0, 9, "x")] + ); + } + + #[test] + fn test_for_comprehension_variable_used() { + let code = "[x * 2 for x in [1, 2, 3]]"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_no_lints_when_disabled() { + let code = "local unused = 1; 42"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default(); // All disabled + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + fn make_shadowed_var_diagnostic(range: Span, name: &str, original_range: Span) -> Diagnostic { + Diagnostic { + range: range.to_range(), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("shadowed-variable".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: format!("variable `{}` shadows a variable from an outer scope", name), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: test_uri(), + range: original_range.to_range(), + }, + message: format!("`{}` originally defined here", name), + }]), + tags: None, + data: None, + } + } + + #[test] + fn test_shadowed_variable_in_function() { + // x in function parameter shadows outer x + let code = "local x = 1; local f(x) = x; f(2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::ShadowedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_shadowed_var_diagnostic( + span(0, 21, 0, 22), + "x", + span(0, 6, 0, 7) + )] + ); + } + + #[test] + fn test_shadowed_variable_in_nested_local() { + // Inner x shadows outer x + let code = "local x = 1; local y = (local x = 2; x); x + y"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::ShadowedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_shadowed_var_diagnostic( + span(0, 30, 0, 31), + "x", + span(0, 6, 0, 7) + )] + ); + } + + #[test] + fn test_no_shadow_for_different_names() { + let code = "local x = 1; local f(y) = y; f(x)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::ShadowedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_underscore_prefixed_shadow_not_flagged() { + // _x shadows outer x but underscore prefix silences the warning + let code = "local x = 1; local f(_x) = _x; f(2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::ShadowedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_shadowed_variable_in_for_comprehension() { + // x in for comprehension shadows outer x + let code = "local x = 1; [x for x in [1, 2, 3]]"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::ShadowedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_shadowed_var_diagnostic( + span(0, 20, 0, 21), + "x", + span(0, 6, 0, 7) + )] + ); + } + + fn make_unreachable_diagnostic( + range: Span, + message: &str, + related_range: Span, + related_message: &str, + ) -> Diagnostic { + Diagnostic { + range: range.to_range(), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("unreachable-code".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: message.to_string(), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: test_uri(), + range: related_range.to_range(), + }, + message: related_message.to_string(), + }]), + tags: None, + data: None, + } + } + + #[test] + fn test_unreachable_after_assert_false() { + // Code after `assert false` is unreachable + // "assert false; 42" - char 13 is space after semicolon + let code = "assert false; 42"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnreachableCode); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unreachable_diagnostic( + span(0, 13, 0, 16), + "unreachable code: assert condition is always false", + span(0, 7, 0, 12), + "condition is `false`" + )] + ); + } + + #[test] + fn test_unreachable_after_assert_error() { + // Code after `assert error "msg"` is unreachable because condition diverges + // "assert error "fail"; 42" - char 20 is space after semicolon + let code = r#"assert error "fail"; 42"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnreachableCode); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unreachable_diagnostic( + span(0, 20, 0, 23), + "unreachable code: assert condition always diverges", + span(0, 7, 0, 19), + "divergent expression here" + )] + ); + } + + #[test] + fn test_unreachable_rhs_of_binary_with_parens() { + // `(error "fail") + 1` - right operand is unreachable because left diverges + // Note: without parens, `error "fail" + 1` is parsed as `error ("fail" + 1)` + let code = r#"(error "fail") + 1"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnreachableCode); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unreachable_diagnostic( + span(0, 17, 0, 18), + "unreachable code: left operand always diverges", + span(0, 0, 0, 14), + "divergent expression here" + )] + ); + } + + #[test] + fn test_unreachable_branches_when_condition_diverges() { + // Both branches are unreachable if condition is error + let code = r#"if error "fail" then 1 else 2"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnreachableCode); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_unreachable_diagnostic( + span(0, 21, 0, 22), + "unreachable code: condition always diverges", + span(0, 3, 0, 15), + "divergent expression here" + )] + ); + } + + #[test] + fn test_no_unreachable_with_valid_code() { + // Normal code should not trigger unreachable warnings + let code = "assert true; 42"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnreachableCode); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_no_unreachable_when_disabled() { + // Unreachable code warnings should not be reported when disabled + let code = "assert false; 42"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default(); // All disabled + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + fn make_duplicate_field_diagnostic(range: Span, name: &str, first_range: Span) -> Diagnostic { + Diagnostic { + range: range.to_range(), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("duplicate-field".to_string())), + code_description: None, + source: Some("jrsonnet-lsp".to_string()), + message: format!("duplicate field `{}`", name), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: test_uri(), + range: first_range.to_range(), + }, + message: "first definition here".to_string(), + }]), + tags: None, + data: None, + } + } + + fn make_duplicate_param_diagnostic(range: Span, name: &str, first_range: Span) -> Diagnostic { + Diagnostic { + range: range.to_range(), + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("duplicate-param".to_string())), + code_description: None, + source: Some("jrsonnet-lsp".to_string()), + message: format!("duplicate parameter `{}`", name), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: test_uri(), + range: first_range.to_range(), + }, + message: "first definition here".to_string(), + }]), + tags: None, + data: None, + } + } + + #[test] + fn test_duplicate_field_detected() { + // { a: 1, b: 2, a: 3 } + // First 'a' is at chars 2-6 (a: 1), second 'a' is at chars 14-18 (a: 3) + let code = "{ a: 1, b: 2, a: 3 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::DuplicateFields); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_duplicate_field_diagnostic( + span(0, 14, 0, 18), + "a", + span(0, 2, 0, 6) + )] + ); + } + + #[test] + fn test_no_duplicate_field_for_different_names() { + let code = "{ a: 1, b: 2, c: 3 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::DuplicateFields); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_duplicate_param_detected() { + // function(a, b, a) a + b + // First 'a' is at char 9, second 'a' is at char 15 + let code = "function(a, b, a) a + b"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::DuplicateParams); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![make_duplicate_param_diagnostic( + span(0, 15, 0, 16), + "a", + span(0, 9, 0, 10) + )] + ); + } + + #[test] + fn test_no_duplicate_param_for_different_names() { + let code = "function(a, b, c) a + b + c"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::DuplicateParams); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_duplicate_field_disabled() { + let code = "{ a: 1, a: 2 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default(); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + + #[test] + fn test_duplicate_param_disabled() { + let code = "function(a, a) a"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default(); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } +} diff --git a/crates/jrsonnet-lsp-check/src/type_check.rs b/crates/jrsonnet-lsp-check/src/type_check.rs new file mode 100644 index 00000000..ddc5c623 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/type_check.rs @@ -0,0 +1,2395 @@ +//! Static type checking for Jsonnet expressions. +//! +//! Provides type error diagnostics that detect type mismatches before evaluation: +//! - Binary operator type mismatches +//! - Unary operator type mismatches +//! - Field access on non-objects +//! - Index access on non-indexable types +//! - Function calls on non-callables +//! - Wrong argument counts for function calls + +use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; +use jrsonnet_lsp_inference::{find_best_match, TypeAnalysis, TypeEnv}; +use jrsonnet_lsp_stdlib::get_stdlib_signature; +use jrsonnet_lsp_types::{binary_op_result_ty, unary_op_result_ty, FunctionData, Ty, TyData}; +use jrsonnet_rowan_parser::{ + nodes::{ + BinaryOperatorKind, Expr, ExprArray, ExprArrayComp, ExprBase, ExprBinary, ExprCall, + ExprField, ExprFunction, ExprIfThenElse, ExprIndex, ExprObjExtend, ExprObject, ExprParened, + ExprSlice, ExprUnary, Member, ObjBody, UnaryOperatorKind, + }, + AstNode, +}; +use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString}; +use rowan::TextRange; + +use crate::format_check::{ + parse_format_string, FormatParseError, FormatPlaceholder, FormatTypeKind, +}; + +/// A type error detected during static analysis. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TypeError { + /// The kind of type error. + pub kind: TypeErrorKind, + /// The source location of the error. + pub range: TextRange, +} + +/// The kind of type error. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum TypeErrorKind { + /// Binary operator applied to incompatible types. + BinaryOpMismatch { + lhs: Ty, + rhs: Ty, + op: &'static str, + message: String, + }, + /// Unary operator applied to incompatible type. + UnaryOpMismatch { + operand: Ty, + op: &'static str, + message: String, + }, + /// Field access (`.field`) on a non-object type. + FieldAccessOnNonObject { actual: Ty }, + /// Index access (`[i]`) on a non-indexable type. + IndexOnNonIndexable { actual: Ty }, + /// Function call on a non-callable type. + CallOnNonFunction { actual: Ty }, + /// Wrong number of arguments to function. + WrongArgCount { expected: usize, actual: usize }, + /// Too few arguments to function. + TooFewArguments { + function_name: String, + required: usize, + provided: usize, + }, + /// Too many arguments to function. + TooManyArguments { + function_name: String, + max_allowed: usize, + provided: usize, + }, + /// Access to non-existent field on object with known structure. + NoSuchField { + field: String, + available: Vec, + suggestion: Option, + }, + /// Index out of bounds on a tuple with known length. + TupleIndexOutOfBounds { tuple_len: usize, index: usize }, + /// Format string parse error. + FormatStringError { message: String }, + /// Wrong number of format arguments. + FormatArgCount { expected: usize, provided: usize }, + /// Format argument type mismatch. + FormatArgTypeMismatch { + index: usize, + expected: Ty, + actual: Ty, + specifier: char, + }, + /// Function argument type mismatch. + ArgumentTypeMismatch { + function_name: String, + param_name: String, + param_index: usize, + expected: Ty, + actual: Ty, + }, + /// Callback function parameter type mismatch with collection element type. + CallbackTypeMismatch { + function_name: String, + callback_param: String, + element_type: Ty, + callback_param_type: Ty, + }, +} + +impl TypeErrorKind { + /// Apply a type substitution to all `Ty` references in this error kind. + /// + /// This is used when merging local types into the global store - the substitution + /// maps local `Ty` values to their global equivalents. + #[must_use] + pub fn apply_substitution(&self, subst: &jrsonnet_lsp_types::TySubst) -> Self { + match self { + TypeErrorKind::BinaryOpMismatch { + lhs, + rhs, + op, + message, + } => TypeErrorKind::BinaryOpMismatch { + lhs: subst.apply(*lhs), + rhs: subst.apply(*rhs), + op, + message: message.clone(), + }, + TypeErrorKind::UnaryOpMismatch { + operand, + op, + message, + } => TypeErrorKind::UnaryOpMismatch { + operand: subst.apply(*operand), + op, + message: message.clone(), + }, + TypeErrorKind::FieldAccessOnNonObject { actual } => { + TypeErrorKind::FieldAccessOnNonObject { + actual: subst.apply(*actual), + } + } + TypeErrorKind::IndexOnNonIndexable { actual } => TypeErrorKind::IndexOnNonIndexable { + actual: subst.apply(*actual), + }, + TypeErrorKind::CallOnNonFunction { actual } => TypeErrorKind::CallOnNonFunction { + actual: subst.apply(*actual), + }, + // These variants have no Ty references + TypeErrorKind::WrongArgCount { .. } + | TypeErrorKind::TooFewArguments { .. } + | TypeErrorKind::TooManyArguments { .. } + | TypeErrorKind::NoSuchField { .. } + | TypeErrorKind::TupleIndexOutOfBounds { .. } + | TypeErrorKind::FormatStringError { .. } + | TypeErrorKind::FormatArgCount { .. } => self.clone(), + TypeErrorKind::FormatArgTypeMismatch { + index, + expected, + actual, + specifier, + } => TypeErrorKind::FormatArgTypeMismatch { + index: *index, + expected: subst.apply(*expected), + actual: subst.apply(*actual), + specifier: *specifier, + }, + TypeErrorKind::ArgumentTypeMismatch { + function_name, + param_name, + param_index, + expected, + actual, + } => TypeErrorKind::ArgumentTypeMismatch { + function_name: function_name.clone(), + param_name: param_name.clone(), + param_index: *param_index, + expected: subst.apply(*expected), + actual: subst.apply(*actual), + }, + TypeErrorKind::CallbackTypeMismatch { + function_name, + callback_param, + element_type, + callback_param_type, + } => TypeErrorKind::CallbackTypeMismatch { + function_name: function_name.clone(), + callback_param: callback_param.clone(), + element_type: subst.apply(*element_type), + callback_param_type: subst.apply(*callback_param_type), + }, + } + } +} + +impl TypeError { + /// Apply a type substitution to all `Ty` references in this error. + /// + /// This is used when merging local types into the global store - the substitution + /// maps local `Ty` values to their global equivalents. + #[must_use] + pub fn apply_substitution(&self, subst: &jrsonnet_lsp_types::TySubst) -> Self { + Self { + kind: self.kind.apply_substitution(subst), + range: self.range, + } + } + + /// Convert the type error to an LSP diagnostic. + pub fn to_diagnostic( + &self, + line_index: &LineIndex, + text: &str, + analysis: &TypeAnalysis, + ) -> Diagnostic { + let message = match &self.kind { + TypeErrorKind::BinaryOpMismatch { message, .. } + | TypeErrorKind::UnaryOpMismatch { message, .. } => message.clone(), + TypeErrorKind::FieldAccessOnNonObject { actual } => { + format!( + "field access on non-object type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::IndexOnNonIndexable { actual } => { + format!( + "index access on non-indexable type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::CallOnNonFunction { actual } => { + format!( + "cannot call non-function type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::WrongArgCount { expected, actual } => { + format!( + "function expects {} argument(s), but {} provided", + expected, actual + ) + } + TypeErrorKind::TooFewArguments { + function_name, + required, + provided, + } => { + format!( + "`{}` requires at least {} argument(s), but {} provided", + function_name, required, provided + ) + } + TypeErrorKind::TooManyArguments { + function_name, + max_allowed, + provided, + } => { + format!( + "`{}` accepts at most {} argument(s), but {} provided", + function_name, max_allowed, provided + ) + } + TypeErrorKind::NoSuchField { + field, + available, + suggestion, + } => { + let mut msg = format!("no such field `{}`", field); + if let Some(suggested) = suggestion { + msg.push_str("; did you mean `"); + msg.push_str(suggested); + msg.push_str("`?"); + } else if !available.is_empty() { + let available_str = available.join(", "); + msg.push_str("; available fields: "); + msg.push_str(&available_str); + } + msg + } + TypeErrorKind::TupleIndexOutOfBounds { tuple_len, index } => { + format!( + "index {} is out of bounds for tuple of length {}", + index, tuple_len + ) + } + TypeErrorKind::FormatStringError { message } => { + format!("invalid format string: {}", message) + } + TypeErrorKind::FormatArgCount { expected, provided } => { + format!( + "format string expects {} argument(s), but {} provided", + expected, provided + ) + } + TypeErrorKind::FormatArgTypeMismatch { + index, + expected, + actual, + specifier, + } => { + format!( + "format argument {} (specifier %{}) expects `{}`, got `{}`", + index + 1, + specifier, + analysis.display(*expected), + analysis.display(*actual) + ) + } + TypeErrorKind::ArgumentTypeMismatch { + function_name, + param_name, + param_index, + expected, + actual, + } => { + format!( + "`{}` argument {} (`{}`) expects `{}`, got `{}`", + function_name, + param_index + 1, + param_name, + analysis.display(*expected), + analysis.display(*actual) + ) + } + TypeErrorKind::CallbackTypeMismatch { + function_name, + callback_param, + element_type, + callback_param_type, + } => { + format!( + "`{}` callback parameter `{}` has type `{}`, but array elements have type `{}`", + function_name, + callback_param, + analysis.display(*callback_param_type), + analysis.display(*element_type) + ) + } + }; + + Diagnostic { + range: to_lsp_range(self.range, line_index, text), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("type-error".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message, + related_information: None, + tags: None, + data: None, + } + } +} + +/// Configuration for type checking. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TypeCheckRule { + BinaryOps, + UnaryOps, + FieldAccess, + IndexAccess, + CallChecks, +} + +impl TypeCheckRule { + const fn bit(self) -> u8 { + match self { + Self::BinaryOps => 1 << 0, + Self::UnaryOps => 1 << 1, + Self::FieldAccess => 1 << 2, + Self::IndexAccess => 1 << 3, + Self::CallChecks => 1 << 4, + } + } +} + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct TypeCheckConfig { + enabled: u8, +} + +impl TypeCheckConfig { + /// Enable a single type-check rule in this config. + #[must_use] + pub fn with_enabled(mut self, rule: TypeCheckRule) -> Self { + self.enable(rule); + self + } + + /// Enable a single type-check rule in this config. + pub fn enable(&mut self, rule: TypeCheckRule) { + self.enabled |= rule.bit(); + } + + /// Check whether a type-check rule is enabled. + #[must_use] + pub fn is_enabled(&self, rule: TypeCheckRule) -> bool { + self.enabled & rule.bit() != 0 + } + + /// Create a config with all checks enabled. + pub fn all() -> Self { + Self { + enabled: TypeCheckRule::BinaryOps.bit() + | TypeCheckRule::UnaryOps.bit() + | TypeCheckRule::FieldAccess.bit() + | TypeCheckRule::IndexAccess.bit() + | TypeCheckRule::CallChecks.bit(), + } + } +} + +/// Check types in a document and return any type errors. +pub fn check_types( + document: &Document, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, +) -> Vec { + let mut errors = Vec::new(); + let ast = document.ast(); + let mut env = TypeEnv::new_default(); + + if let Some(expr) = ast.expr() { + check_expr(&expr, analysis, config, &mut env, &mut errors); + } + + errors +} + +/// Check an expression for type errors. +fn check_expr( + expr: &jrsonnet_rowan_parser::nodes::Expr, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + // Process local bindings + for stmt in expr.stmts() { + if let Some(stmt_local) = + jrsonnet_rowan_parser::nodes::StmtLocal::cast(stmt.syntax().clone()) + { + for bind in stmt_local.binds() { + check_bind(&bind, analysis, config, env, errors); + } + } + } + + // Check base expression (this includes ExprCall, ExprField, ExprIndex, etc.) + if let Some(base) = expr.expr_base() { + check_base(&base, analysis, config, env, errors); + } +} + +/// Check a binding for type errors. +fn check_bind( + bind: &jrsonnet_rowan_parser::nodes::Bind, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + use jrsonnet_rowan_parser::nodes::Bind; + match bind { + Bind::BindDestruct(bd) => { + if let Some(value) = bd.value() { + check_expr(&value, analysis, config, env, errors); + } + } + Bind::BindFunction(bf) => { + if let Some(body) = bf.value() { + env.push_scope(); + check_expr(&body, analysis, config, env, errors); + env.pop_scope(); + } + } + } +} + +/// Check a base expression for type errors. +fn check_base( + base: &ExprBase, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + match base { + ExprBase::ExprBinary(binary) => { + check_binary_base(base, binary, analysis, config, env, errors); + } + ExprBase::ExprUnary(unary) => check_unary_base(base, unary, analysis, config, env, errors), + ExprBase::ExprIfThenElse(if_expr) => { + check_if_then_else_base(if_expr, analysis, config, env, errors); + } + ExprBase::ExprParened(parens) => check_parened_base(parens, analysis, config, env, errors), + ExprBase::ExprFunction(func) => check_function_base(func, analysis, config, env, errors), + ExprBase::ExprArray(arr) => check_array_base(arr, analysis, config, env, errors), + ExprBase::ExprObject(obj) => check_object_base(obj, analysis, config, env, errors), + ExprBase::ExprObjExtend(extend) => { + check_obj_extend_base(extend, analysis, config, env, errors); + } + ExprBase::ExprArrayComp(comp) => { + check_array_comp_base(comp, analysis, config, env, errors); + } + ExprBase::ExprField(field) => check_field_base(base, field, analysis, config, env, errors), + ExprBase::ExprIndex(idx) => check_index_base(base, idx, analysis, config, env, errors), + ExprBase::ExprSlice(slice) => check_slice_base(base, slice, analysis, config, env, errors), + ExprBase::ExprCall(call) => check_call_base(base, call, analysis, config, env, errors), + _ => {} + } +} + +fn check_binary_base( + base: &ExprBase, + binary: &ExprBinary, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(lhs) = binary.lhs() { + check_expr(&lhs, analysis, config, env, errors); + } + if let Some(rhs) = binary.rhs() { + check_expr(&rhs, analysis, config, env, errors); + } + if !config.is_enabled(TypeCheckRule::BinaryOps) { + return; + } + let (Some(lhs), Some(rhs), Some(op)) = (binary.lhs(), binary.rhs(), binary.binary_operator()) + else { + return; + }; + let lhs_ty = analysis + .type_for_range(lhs.syntax().text_range()) + .unwrap_or(Ty::ANY); + let rhs_ty = analysis + .type_for_range(rhs.syntax().text_range()) + .unwrap_or(Ty::ANY); + if lhs_ty.is_any() || rhs_ty.is_any() || lhs_ty.is_never() || rhs_ty.is_never() { + return; + } + if let Err(message) = + analysis.with_store_mut(|store| binary_op_result_ty(op.kind(), lhs_ty, rhs_ty, store)) + { + errors.push(TypeError { + kind: TypeErrorKind::BinaryOpMismatch { + lhs: lhs_ty, + rhs: rhs_ty, + op: binary_op_str(op.kind()), + message, + }, + range: base.syntax().text_range(), + }); + } +} + +fn check_unary_base( + base: &ExprBase, + unary: &ExprUnary, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + let Some(rhs) = unary.rhs() else { + return; + }; + check_expr(&rhs, analysis, config, env, errors); + if !config.is_enabled(TypeCheckRule::UnaryOps) { + return; + } + let Some(op) = unary.unary_operator() else { + return; + }; + let rhs_ty = analysis + .type_for_range(rhs.syntax().text_range()) + .unwrap_or(Ty::ANY); + if rhs_ty.is_any() || rhs_ty.is_never() { + return; + } + if let Err(message) = + analysis.with_store_mut(|store| unary_op_result_ty(op.kind(), rhs_ty, store)) + { + errors.push(TypeError { + kind: TypeErrorKind::UnaryOpMismatch { + operand: rhs_ty, + op: unary_op_str(op.kind()), + message, + }, + range: base.syntax().text_range(), + }); + } +} + +fn check_if_then_else_base( + if_expr: &ExprIfThenElse, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(cond) = if_expr.cond() { + check_expr(&cond, analysis, config, env, errors); + } + if let Some(then_expr) = if_expr.then().and_then(|then_clause| then_clause.expr()) { + check_expr(&then_expr, analysis, config, env, errors); + } + if let Some(else_expr) = if_expr.else_().and_then(|else_clause| else_clause.expr()) { + check_expr(&else_expr, analysis, config, env, errors); + } +} + +fn check_parened_base( + parens: &ExprParened, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(inner) = parens.expr() { + check_expr(&inner, analysis, config, env, errors); + } +} + +fn check_function_base( + func: &ExprFunction, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + let Some(body) = func.expr() else { + return; + }; + env.push_scope(); + check_expr(&body, analysis, config, env, errors); + env.pop_scope(); +} + +fn check_array_base( + arr: &ExprArray, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + for elem in arr.exprs() { + check_expr(&elem, analysis, config, env, errors); + } +} + +fn check_object_base( + obj: &ExprObject, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(body) = obj.obj_body() { + check_obj_body(&body, analysis, config, env, errors); + } +} + +fn check_obj_extend_base( + extend: &ExprObjExtend, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(inner) = extend.expr() { + check_expr(&inner, analysis, config, env, errors); + } +} + +fn check_array_comp_base( + comp: &ExprArrayComp, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(expr) = comp.expr() { + check_expr(&expr, analysis, config, env, errors); + } +} + +fn check_field_base( + base: &ExprBase, + field: &ExprField, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(base_expr) = field.base() { + check_expr(&base_expr, analysis, config, env, errors); + } + if !config.is_enabled(TypeCheckRule::FieldAccess) { + return; + } + let Some(base_expr) = field.base() else { + return; + }; + let base_ty = analysis + .type_for_range(base_expr.syntax().text_range()) + .unwrap_or(Ty::ANY); + if base_ty.is_any() || base_ty.is_never() { + return; + } + if !analysis.supports_field_access(base_ty) { + errors.push(TypeError { + kind: TypeErrorKind::FieldAccessOnNonObject { actual: base_ty }, + range: base.syntax().text_range(), + }); + return; + } + if !analysis.is_closed_object(base_ty) { + return; + } + let Some(field_name) = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + else { + return; + }; + if analysis.object_has_field(base_ty, &field_name) != Some(false) { + return; + } + let mut available = analysis.object_field_names(base_ty).unwrap_or_default(); + available.sort(); + let suggestion = find_best_match( + &field_name, + available.iter().map(std::string::String::as_str), + ) + .map(std::string::ToString::to_string); + errors.push(TypeError { + kind: TypeErrorKind::NoSuchField { + field: field_name, + available, + suggestion, + }, + range: base.syntax().text_range(), + }); +} + +fn check_index_base( + base: &ExprBase, + idx: &ExprIndex, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(base_expr) = idx.base() { + check_expr(&base_expr, analysis, config, env, errors); + } + if let Some(index_expr) = idx.index() { + check_expr(&index_expr, analysis, config, env, errors); + } + if !config.is_enabled(TypeCheckRule::IndexAccess) { + return; + } + let Some(base_expr) = idx.base() else { + return; + }; + let base_ty = analysis + .type_for_range(base_expr.syntax().text_range()) + .unwrap_or(Ty::ANY); + if base_ty.is_any() || base_ty.is_never() { + return; + } + if !analysis.is_indexable(base_ty) { + errors.push(TypeError { + kind: TypeErrorKind::IndexOnNonIndexable { actual: base_ty }, + range: base.syntax().text_range(), + }); + return; + } + let Some(tuple_len) = analysis.tuple_len(base_ty) else { + return; + }; + let Some(index) = get_constant_index(idx.index().as_ref()) else { + return; + }; + if index >= tuple_len { + errors.push(TypeError { + kind: TypeErrorKind::TupleIndexOutOfBounds { tuple_len, index }, + range: base.syntax().text_range(), + }); + } +} + +fn check_slice_base( + base: &ExprBase, + slice: &ExprSlice, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(base_expr) = slice.base() { + check_expr(&base_expr, analysis, config, env, errors); + } + if !config.is_enabled(TypeCheckRule::IndexAccess) { + return; + } + let Some(base_expr) = slice.base() else { + return; + }; + let base_ty = analysis + .type_for_range(base_expr.syntax().text_range()) + .unwrap_or(Ty::ANY); + if !base_ty.is_any() && !base_ty.is_never() && !analysis.is_sliceable(base_ty) { + errors.push(TypeError { + kind: TypeErrorKind::IndexOnNonIndexable { actual: base_ty }, + range: base.syntax().text_range(), + }); + } +} + +fn check_call_base( + base: &ExprBase, + call: &ExprCall, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let Some(callee_expr) = call.callee() { + check_expr(&callee_expr, analysis, config, env, errors); + } + if let Some(args_desc) = call.args_desc() { + for arg in args_desc.args() { + if let Some(arg_expr) = arg.expr() { + check_expr(&arg_expr, analysis, config, env, errors); + } + } + } + if !config.is_enabled(TypeCheckRule::CallChecks) { + return; + } + if let Some(callee_expr) = call.callee() { + let callee_ty = analysis + .type_for_range(callee_expr.syntax().text_range()) + .unwrap_or(Ty::ANY); + if !callee_ty.is_any() && !callee_ty.is_never() && !analysis.is_callable(callee_ty) { + errors.push(TypeError { + kind: TypeErrorKind::CallOnNonFunction { actual: callee_ty }, + range: base.syntax().text_range(), + }); + } + } + check_stdlib_call_expr(call, analysis, errors); + check_user_function_call_expr(call, analysis, errors); +} + +/// Check an object body for type errors. +fn check_obj_body( + body: &ObjBody, + analysis: &TypeAnalysis, + config: &TypeCheckConfig, + env: &mut TypeEnv, + errors: &mut Vec, +) { + if let ObjBody::ObjBodyMemberList(members) = body { + for member in members.members() { + match member { + Member::MemberFieldNormal(field) => { + if let Some(expr) = field.expr() { + check_expr(&expr, analysis, config, env, errors); + } + } + Member::MemberFieldMethod(method) => { + if let Some(expr) = method.expr() { + env.push_scope(); + check_expr(&expr, analysis, config, env, errors); + env.pop_scope(); + } + } + Member::MemberAssertStmt(assert_member) => { + if let Some(assertion) = assert_member.assertion() { + if let Some(cond) = assertion.condition() { + check_expr(&cond, analysis, config, env, errors); + } + } + } + Member::MemberBindStmt(_) => {} + } + } + } +} + +/// Validate a function call using FunctionData (Ty-native version). +fn validate_function_call_ty( + func_data: &FunctionData, + function_name: String, + arg_count: usize, + range: TextRange, +) -> Option { + // Count required parameters (those without defaults) + let required = func_data.params.iter().filter(|p| !p.has_default).count(); + let total = func_data.params.len(); + + if arg_count < required { + Some(TypeError { + kind: TypeErrorKind::TooFewArguments { + function_name, + required, + provided: arg_count, + }, + range, + }) + } else if arg_count > total && !func_data.variadic { + Some(TypeError { + kind: TypeErrorKind::TooManyArguments { + function_name, + max_allowed: total, + provided: arg_count, + }, + range, + }) + } else { + None + } +} + +/// Check if an ExprCall is a stdlib function call and validate argument count and types. +/// +/// Matches the pattern: `std.functionName(args...)` +/// - Callee must be ExprField with base being ExprVar "std" +fn check_stdlib_call_expr(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut Vec) { + // Get the callee - should be std.functionName (ExprField) + let Some(callee_expr) = call.callee() else { + return; + }; + let Some(ExprBase::ExprField(field)) = callee_expr.expr_base() else { + return; + }; + + // Check if base is `std` + let Some(base_expr) = field.base() else { + return; + }; + let Some(ExprBase::ExprVar(var)) = base_expr.expr_base() else { + return; + }; + let is_std = var + .name() + .and_then(|n| n.ident_lit()) + .is_some_and(|t| t.text() == "std"); + if !is_std { + return; + } + + // Extract function name from the field + let Some(fn_name) = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + else { + return; + }; + + // Look up signature + let Some(sig) = get_stdlib_signature(&fn_name) else { + return; + }; + + // Count arguments + let arg_count = call + .args_desc() + .map(|args| args.args().count()) + .unwrap_or(0); + + // Validate using the unified function + let qualified_name = format!("std.{}", fn_name); + let Some(func_data) = sig.func_data() else { + return; + }; + if let Some(error) = validate_function_call_ty( + &func_data, + qualified_name.clone(), + arg_count, + call.syntax().text_range(), + ) { + errors.push(error); + } + + // Check argument types + if let Some(args_desc) = call.args_desc() { + for (i, arg) in args_desc.args().enumerate() { + if i >= func_data.params.len() { + break; // Variadic or too many args - handled elsewhere + } + + let param = &func_data.params[i]; + let stdlib_expected_ty = param.ty; + + // Skip if expected type is Any (no constraint) + if stdlib_expected_ty.is_any() { + continue; + } + + // Get the argument's inferred type + let Some(arg_expr) = arg.expr() else { + continue; + }; + let Some(actual_ty) = analysis.type_for_range(arg_expr.syntax().text_range()) else { + continue; + }; + + // Skip if actual type is Any or Never + if actual_ty.is_any() || actual_ty.is_never() { + continue; + } + + // Import expected type from stdlib store into analysis store + let expected_ty = analysis.import_from_stdlib(stdlib_expected_ty); + + // Check if actual type is subtype of expected type + // Special case: if expected is function_any() (no params), accept any function + // This handles higher-order functions like std.map where we accept any callable + let is_function_wildcard = { + let stdlib_store = jrsonnet_lsp_stdlib::stdlib_store(); + match *stdlib_store.get(stdlib_expected_ty) { + TyData::Function(ref f) => f.params.is_empty(), + _ => false, + } + }; + let type_matches = if is_function_wildcard { + analysis.is_function(actual_ty) + } else { + analysis.is_subtype(actual_ty, expected_ty) + }; + + if !type_matches { + errors.push(TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { + function_name: qualified_name.clone(), + param_name: param.name.clone(), + param_index: i, + expected: expected_ty, + actual: actual_ty, + }, + range: arg_expr.syntax().text_range(), + }); + } + } + } + + // Special validation for std.format + if fn_name == "format" { + check_format_call(call, analysis, errors); + } + + // Higher-order function validation + check_higher_order_call(&fn_name, call, analysis, errors); +} + +/// Configuration for higher-order function validation. +struct HigherOrderConfig { + /// Name of the callback parameter. + callback_param_name: &'static str, + /// Index of the callback argument (0-based). + callback_arg_index: usize, + /// Index of the array argument (0-based). + array_arg_index: usize, +} + +/// Validate higher-order function calls. +/// +/// Checks that callback function parameters are compatible with array element types. +/// For example, in `std.map(func, arr)`, we verify that `func` can accept elements of `arr`. +fn check_higher_order_call( + fn_name: &str, + call: &ExprCall, + analysis: &TypeAnalysis, + errors: &mut Vec, +) { + // Configuration for higher-order functions + // (callback_param_name, callback_arg_index, array_arg_index) + let config: Option = match fn_name { + "map" | "filter" | "flatMap" => Some(HigherOrderConfig { + callback_param_name: "func", + callback_arg_index: 0, + array_arg_index: 1, + }), + "find" | "findIndex" => Some(HigherOrderConfig { + callback_param_name: "func", + callback_arg_index: 1, + array_arg_index: 0, + }), + "sort" | "uniq" => Some(HigherOrderConfig { + callback_param_name: "keyF", + callback_arg_index: 1, + array_arg_index: 0, + }), + // foldl/foldr are more complex (accumulator + element), handle separately if needed + _ => None, + }; + + let Some(config) = config else { + return; + }; + + let Some(args_desc) = call.args_desc() else { + return; + }; + let args: Vec<_> = args_desc.args().collect(); + + // Get the callback and array arguments + let Some(callback_arg) = args.get(config.callback_arg_index) else { + return; + }; + let Some(array_arg) = args.get(config.array_arg_index) else { + return; + }; + + // Get the callback's type + let Some(callback_expr) = callback_arg.expr() else { + return; + }; + let Some(callback_ty) = analysis.type_for_range(callback_expr.syntax().text_range()) else { + return; + }; + + // Get the array's type + let Some(array_expr) = array_arg.expr() else { + return; + }; + let Some(array_ty) = analysis.type_for_range(array_expr.syntax().text_range()) else { + return; + }; + + // Skip if types are Any or Never + if array_ty.is_any() || array_ty.is_never() || callback_ty.is_any() || callback_ty.is_never() { + return; + } + + // Extract element type from array + // First get the data, then create union outside the borrow + let element_info = analysis.with_data(array_ty, |data| match data { + TyData::Array { elem, .. } => Some(Ok(*elem)), + TyData::Tuple { elems } => { + if elems.is_empty() { + None + } else { + Some(Err(elems.clone())) // Need to create union outside borrow + } + } + _ => None, + }); + let element_ty = match element_info { + Some(Ok(ty)) => ty, + Some(Err(elems)) => analysis.union(elems), + None => return, + }; + + // Skip if element type is Any + if element_ty.is_any() { + return; + } + + // Extract the callback's first parameter type + let callback_param_ty = analysis.with_data(callback_ty, |data| match data { + TyData::Function(ft) if !ft.params.is_empty() => Some(ft.params[0].ty), + _ => None, + }); + let Some(callback_param_ty) = callback_param_ty else { + return; + }; + + // Skip if callback param type is Any + if callback_param_ty.is_any() { + return; + } + + // Check if element type is compatible with callback param type + if !analysis.is_subtype(element_ty, callback_param_ty) { + errors.push(TypeError { + kind: TypeErrorKind::CallbackTypeMismatch { + function_name: format!("std.{}", fn_name), + callback_param: config.callback_param_name.to_string(), + element_type: element_ty, + callback_param_type: callback_param_ty, + }, + range: callback_expr.syntax().text_range(), + }); + } +} + +/// Validate a std.format() call. +/// +/// Checks: +/// - Format string is valid +/// - Argument count matches placeholders +/// - Argument types match expected types (when inferrable) +fn check_format_call(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut Vec) { + let Some(args_desc) = call.args_desc() else { + return; + }; + let args: Vec<_> = args_desc.args().collect(); + if args.is_empty() { + return; + } + + // Get the format string from the first argument + let Some(fmt_arg) = args.first() else { + return; + }; + let Some(fmt_expr) = fmt_arg.expr() else { + return; + }; + + // Try to extract a literal string value + let Some(fmt_string) = get_string_literal(&fmt_expr) else { + return; // Can't validate non-literal format strings + }; + + // Parse the format string + let format_spec = match parse_format_string(&fmt_string) { + Ok(spec) => spec, + Err(e) => { + let message = match e { + FormatParseError::IncompleteSpecifier => "incomplete format specifier".to_string(), + FormatParseError::UnknownSpecifier(c) => format!("unknown specifier '%{}'", c), + FormatParseError::UnclosedNamedPlaceholder => { + "unclosed named placeholder".to_string() + } + FormatParseError::EmptyName => "empty name in named placeholder".to_string(), + FormatParseError::MixedPositionalAndNamed => { + "cannot mix positional and named placeholders".to_string() + } + }; + errors.push(TypeError { + kind: TypeErrorKind::FormatStringError { message }, + range: fmt_expr.syntax().text_range(), + }); + return; + } + }; + + // For positional placeholders, check argument count + // This includes extra args consumed by dynamic width (*) and precision (.*) + if format_spec.uses_positional { + let positional_count = format_spec.positional_arg_count(); + let provided = args.len() - 1; // Exclude format string itself + + if provided != positional_count { + errors.push(TypeError { + kind: TypeErrorKind::FormatArgCount { + expected: positional_count, + provided, + }, + range: call.syntax().text_range(), + }); + return; // Skip type checking if count is wrong + } + + // Check argument types (skip format string, check remaining args) + for (i, placeholder) in format_spec.placeholders.iter().enumerate() { + if let FormatPlaceholder::Positional { + expected_type, + specifier, + .. + } = placeholder + { + // Get the corresponding argument (offset by 1 for format string) + if let Some(arg) = args.get(i + 1) { + if let Some(arg_expr) = arg.expr() { + let Some(actual_ty) = + analysis.type_for_range(arg_expr.syntax().text_range()) + else { + continue; + }; + + // Skip Any types (unknown) + if actual_ty.is_any() || *expected_type == FormatTypeKind::Any { + continue; + } + + // Check type compatibility using FormatTypeKind method + let is_compatible = analysis + .with_store(|store| expected_type.is_compatible_with(actual_ty, store)); + if !is_compatible { + let expected_ty = + analysis.with_store_mut(|store| expected_type.to_ty(store)); + errors.push(TypeError { + kind: TypeErrorKind::FormatArgTypeMismatch { + index: i, + expected: expected_ty, + actual: actual_ty, + specifier: *specifier, + }, + range: arg_expr.syntax().text_range(), + }); + } + } + } + } + } + } +} + +/// Extract a string literal value from an expression. +fn get_string_literal(expr: &Expr) -> Option { + let base = expr.expr_base()?; + match base { + ExprBase::ExprString(s) => { + // Get the text and strip quotes + let text = s.syntax().text().to_string(); + // Handle different string formats: "...", '...', @"...", @'...', |||...||| + if text.starts_with("|||") { + // Text block - complex to parse, skip for now + None + } else if text.starts_with('@') { + // Verbatim string + let inner = text.get(2..text.len() - 1)?; + Some(inner.to_string()) + } else if text.starts_with('"') || text.starts_with('\'') { + // Regular string - need to handle escape sequences + let inner = text.get(1..text.len() - 1)?; + Some(unescape_string(inner)) + } else { + None + } + } + _ => None, + } +} + +/// Unescape a string literal (simplified version). +fn unescape_string(s: &str) -> String { + let mut result = String::with_capacity(s.len()); + let mut chars = s.chars().peekable(); + + while let Some(c) = chars.next() { + if c == '\\' { + match chars.next() { + Some('n') => result.push('\n'), + Some('t') => result.push('\t'), + Some('r') => result.push('\r'), + Some('\\') | None => result.push('\\'), + Some('"') => result.push('"'), + Some('\'') => result.push('\''), + Some(other) => { + result.push('\\'); + result.push(other); + } + } + } else { + result.push(c); + } + } + + result +} + +/// Check if an ExprCall is a user function call and validate argument count. +/// +/// Matches the pattern: `varName(args...)` where varName is a known function. +fn check_user_function_call_expr( + call: &ExprCall, + analysis: &TypeAnalysis, + errors: &mut Vec, +) { + // Get the callee - should be a variable (ExprVar) + let Some(callee_expr) = call.callee() else { + return; + }; + let Some(ExprBase::ExprVar(var)) = callee_expr.expr_base() else { + return; + }; + + // Get the variable name + let Some(var_name) = var + .name() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + else { + return; + }; + + // Skip `std` - handled by check_stdlib_call_expr + if var_name == "std" { + return; + } + + // Look up the type of the variable + let Some(var_ty) = analysis.type_for_range(var.syntax().text_range()) else { + return; + }; + + // Get function data using Ty-native method + let Some(func_data) = analysis.get_function(var_ty) else { + return; + }; + + // Count arguments + let arg_count = call + .args_desc() + .map(|args| args.args().count()) + .unwrap_or(0); + + // Validate using the Ty-native function + if let Some(error) = + validate_function_call_ty(&func_data, var_name, arg_count, call.syntax().text_range()) + { + errors.push(error); + } +} + +/// Get the string representation of a binary operator. +fn binary_op_str(op: BinaryOperatorKind) -> &'static str { + match op { + BinaryOperatorKind::Plus => "+", + BinaryOperatorKind::Minus => "-", + BinaryOperatorKind::Mul => "*", + BinaryOperatorKind::Div => "/", + BinaryOperatorKind::Modulo => "%", + BinaryOperatorKind::And => "&&", + BinaryOperatorKind::Or => "||", + BinaryOperatorKind::BitAnd => "&", + BinaryOperatorKind::BitOr => "|", + BinaryOperatorKind::BitXor => "^", + BinaryOperatorKind::Lhs => "<<", + BinaryOperatorKind::Rhs => ">>", + BinaryOperatorKind::Eq => "==", + BinaryOperatorKind::Ne => "!=", + BinaryOperatorKind::Lt => "<", + BinaryOperatorKind::Gt => ">", + BinaryOperatorKind::Le => "<=", + BinaryOperatorKind::Ge => ">=", + BinaryOperatorKind::InKw => "in", + BinaryOperatorKind::NullCoaelse => "??", + BinaryOperatorKind::MetaObjectApply => "+:", + BinaryOperatorKind::ErrorNoOperator => "", + } +} + +/// Get the string representation of a unary operator. +fn unary_op_str(op: UnaryOperatorKind) -> &'static str { + match op { + UnaryOperatorKind::Not => "!", + UnaryOperatorKind::Minus => "-", + UnaryOperatorKind::BitNot => "~", + } +} + +/// Extract a constant index value from an expression. +/// +/// Returns `Some(index)` if the expression is a non-negative integer literal. +fn get_constant_index(expr: Option<&Expr>) -> Option { + let expr = expr?; + let ExprBase::ExprNumber(num) = expr.expr_base()? else { + return None; + }; + let text = num.syntax().text().to_string(); + let value: f64 = text.parse().ok()?; + + // Only accept non-negative integers + if value >= 0.0 && value.fract() == 0.0 { + Some(value as usize) + } else { + None + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + fn check_code(code: &str) -> Vec { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze(&doc); + let config = TypeCheckConfig::all(); + check_types(&doc, &analysis, &config) + } + + #[test] + fn test_string_plus_object_error() { + let errors = check_code(r#""str" + {}"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::BinaryOpMismatch { .. }, + .. + }] + ), + "expected single BinaryOpMismatch, got: {errors:?}" + ); + } + + #[test] + fn test_number_field_access_error() { + // Use parentheses to ensure parser treats this as field access on number + let errors = check_code("(42).foo"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FieldAccessOnNonObject { .. }, + .. + }] + ), + "expected single FieldAccessOnNonObject, got: {errors:?}" + ); + } + + #[test] + fn test_number_index_error() { + let errors = check_code("42[0]"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::IndexOnNonIndexable { .. }, + .. + }] + ), + "expected single IndexOnNonIndexable, got: {errors:?}" + ); + } + + #[test] + fn test_string_call_error() { + let errors = check_code(r#""hello"()"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::CallOnNonFunction { .. }, + .. + }] + ), + "expected single CallOnNonFunction, got: {errors:?}" + ); + } + + #[test] + fn test_unary_not_on_object_error() { + // Use parentheses to ensure parser parses correctly + let errors = check_code("!({})"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::UnaryOpMismatch { .. }, + .. + }] + ), + "expected single UnaryOpMismatch, got: {errors:?}" + ); + } + + #[test] + fn test_unary_minus_on_string_error() { + // Use parentheses to ensure parser parses correctly + let errors = check_code(r#"-("hello")"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::UnaryOpMismatch { .. }, + .. + }] + ), + "expected single UnaryOpMismatch, got: {errors:?}" + ); + } + + #[test] + fn test_valid_number_addition() { + let errors = check_code("1 + 2"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_string_concatenation() { + let errors = check_code(r#""hello" + "world""#); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_array_concatenation() { + let errors = check_code("[1, 2] + [3, 4]"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_object_merge() { + let errors = check_code("{a: 1} + {b: 2}"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_field_access() { + let errors = check_code("{a: 1}.a"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_array_index() { + let errors = check_code("[1, 2, 3][0]"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_valid_function_call() { + let errors = check_code("(function(x) x)(1)"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_any_type_no_error() { + // Calling an unknown stdlib function returns Any, so no type error + let errors = check_code("std.foo() + 1"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_bitwise_on_strings_error() { + let errors = check_code(r#""a" | "b""#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::BinaryOpMismatch { .. }, + .. + }] + ), + "expected single BinaryOpMismatch, got: {errors:?}" + ); + } + + #[test] + fn test_in_operator_valid() { + let errors = check_code(r#""foo" in {foo: 1}"#); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_in_operator_invalid_lhs() { + let errors = check_code("42 in {foo: 1}"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::BinaryOpMismatch { .. }, + .. + }] + ), + "expected single BinaryOpMismatch, got: {errors:?}" + ); + } + + #[test] + fn test_comparison_always_valid() { + // Comparison operators work on any types + let errors = check_code(r#""a" == 1"#); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_null_coalesce_always_valid() { + let errors = check_code("null ?? 1"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_stdlib_too_few_args() { + // std.map requires 2 arguments + let errors = check_code("std.map(function(x) x)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TooFewArguments { + function_name, + required: 2, + provided: 1, + }, + .. + }] if function_name == "std.map" + ), + "expected TooFewArguments for std.map, got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_too_many_args() { + // std.length takes 1 argument + let errors = check_code("std.length([1, 2], 3)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TooManyArguments { + function_name, + max_allowed: 1, + provided: 2, + }, + .. + }] if function_name == "std.length" + ), + "expected TooManyArguments for std.length, got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_correct_arg_count() { + // std.map with correct 2 arguments + let errors = check_code("std.map(function(x) x, [1, 2, 3])"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_stdlib_optional_args() { + // std.sort has 1 required and 1 optional argument + let errors = check_code("std.sort([3, 1, 2])"); + assert!( + errors.is_empty(), + "expected no errors for sort with 1 arg, got: {errors:?}" + ); + + // With optional argument + let errors = check_code("std.sort([3, 1, 2], function(x) x)"); + assert!( + errors.is_empty(), + "expected no errors for sort with 2 args, got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_variadic() { + // std.format is variadic + let errors = check_code(r#"std.format("%s %d %s", "a", 1, "b")"#); + assert!( + errors.is_empty(), + "expected no errors for variadic format, got: {errors:?}" + ); + } + + #[test] + fn test_user_function_too_few_args() { + // User function with 2 required parameters called with 1 arg + let errors = check_code("local add(a, b) = a + b; add(1)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TooFewArguments { + function_name, + required: 2, + provided: 1, + }, + .. + }] if function_name == "add" + ), + "expected TooFewArguments for add, got: {errors:?}" + ); + } + + #[test] + fn test_user_function_too_many_args() { + // User function with 1 parameter called with 2 args + let errors = check_code("local double(x) = x * 2; double(1, 2)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TooManyArguments { + function_name, + max_allowed: 1, + provided: 2, + }, + .. + }] if function_name == "double" + ), + "expected TooManyArguments for double, got: {errors:?}" + ); + } + + #[test] + fn test_user_function_correct_arg_count() { + // User function with correct argument count + let errors = check_code("local add(a, b) = a + b; add(1, 2)"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_user_function_with_defaults() { + // User function with default parameter + let errors = check_code( + "local greet(name, greeting='Hello') = greeting + ' ' + name; greet('world')", + ); + assert!( + errors.is_empty(), + "expected no errors for function with default param, got: {errors:?}" + ); + + // With both args + let errors = check_code( + "local greet(name, greeting='Hello') = greeting + ' ' + name; greet('world', 'Hi')", + ); + assert!( + errors.is_empty(), + "expected no errors for function with both args, got: {errors:?}" + ); + } + + #[test] + fn test_user_function_all_defaults() { + // Function with only default parameters - can be called with 0 args + let errors = check_code("local f(a=1, b=2) = a + b; f()"); + assert!( + errors.is_empty(), + "expected no errors for function with all defaults, got: {errors:?}" + ); + } + + #[test] + fn test_no_such_field_error() { + // Access non-existent field on object with known structure + let errors = check_code("{a: 1, b: 2}.c"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, available, .. }, + .. + }] if field == "c" && available == &["a", "b"] + ), + "expected NoSuchField error for field 'c', got: {errors:?}" + ); + } + + #[test] + fn test_valid_field_access_known_object() { + // Access existing field on object with known structure + let errors = check_code("{a: 1, b: 2}.a"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + } + + #[test] + fn test_field_access_on_object_with_unknown_fields() { + // Object comprehension has unknown fields - no error + let errors = check_code("{ [x]: x for x in ['a', 'b'] }.c"); + assert!( + errors.is_empty(), + "expected no errors for object comprehension, got: {errors:?}" + ); + } + + #[test] + fn test_field_access_on_merged_objects() { + // Merged objects with known fields - we track the combined fields + let errors = check_code("({a: 1} + {b: 2}).c"); + // Field access on merged object with known fields should error + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, available, .. }, + .. + }] if field == "c" && available == &["a", "b"] + ), + "expected NoSuchField error for field 'c', got: {errors:?}" + ); + } + + #[test] + fn test_valid_field_access_on_merged_objects() { + // Merged objects - accessing known fields should succeed + let errors = check_code("({a: 1} + {b: 2}).a"); + assert!( + errors.is_empty(), + "expected no errors for valid field access on merged object, got: {errors:?}" + ); + + let errors = check_code("({a: 1} + {b: 2}).b"); + assert!( + errors.is_empty(), + "expected no errors for valid field access on merged object, got: {errors:?}" + ); + } + + #[test] + fn test_merged_object_field_override() { + // When merging, right operand fields override left + // Both have field 'a', result should have 'a' and 'b' + let errors = check_code("({a: 1} + {a: 'str', b: 2}).c"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, available, .. }, + .. + }] if field == "c" && available == &["a", "b"] + ), + "expected NoSuchField error for field 'c', got: {errors:?}" + ); + } + + #[test] + fn test_no_such_field_on_local_object() { + // Access non-existent field on local object binding + let errors = check_code("local obj = {x: 1}; obj.y"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, available, .. }, + .. + }] if field == "y" && available == &["x"] + ), + "expected NoSuchField error for field 'y', got: {errors:?}" + ); + } + + #[test] + fn test_tuple_index_in_bounds() { + // Valid tuple index access + assert_eq!(check_code("[1, 2, 3][0]").as_slice(), &[]); + assert_eq!(check_code("[1, 2, 3][2]").as_slice(), &[]); + } + + #[test] + fn test_tuple_index_out_of_bounds() { + // Index out of bounds on tuple + let errors = check_code("[1, 2, 3][5]"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TupleIndexOutOfBounds { + tuple_len: 3, + index: 5 + }, + .. + }] + ), + "expected TupleIndexOutOfBounds error, got: {errors:?}" + ); + } + + #[test] + fn test_tuple_negative_index_ignored() { + // Negative indices are not statically checked + assert_eq!(check_code("[1, 2, 3][-1]").as_slice(), &[]); + } + + #[test] + fn test_tuple_non_constant_index_ignored() { + // Non-constant indices can't be checked statically + assert_eq!(check_code("local i = 5; [1, 2, 3][i]").as_slice(), &[]); + } + + // Format string validation tests + + #[test] + fn test_format_valid_string() { + // Valid format string with correct arguments + let errors = check_code(r#"std.format("Hello %s!", "world")"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors, got: {errors:?}" + ); + } + + #[test] + fn test_format_valid_multiple_args() { + // Valid format with multiple arguments + let errors = check_code(r#"std.format("%s has %d apples", "Alice", 5)"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors, got: {errors:?}" + ); + } + + #[test] + fn test_format_invalid_specifier() { + // Unknown format specifier + let errors = check_code(r#"std.format("%z", 1)"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatStringError { message }, + .. + }] if message.contains("%z") + ), + "expected FormatStringError for unknown specifier, got: {errors:?}" + ); + } + + #[test] + fn test_format_too_few_args() { + // Not enough arguments for placeholders + let errors = check_code(r#"std.format("%s %s", "one")"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatArgCount { + expected: 2, + provided: 1 + }, + .. + }] + ), + "expected FormatArgCount error, got: {errors:?}" + ); + } + + #[test] + fn test_format_too_many_args() { + // Too many arguments for placeholders + let errors = check_code(r#"std.format("%s", "one", "two", "three")"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatArgCount { + expected: 1, + provided: 3 + }, + .. + }] + ), + "expected FormatArgCount error, got: {errors:?}" + ); + } + + #[test] + fn test_format_type_mismatch_number() { + // %d expects number, got string + let errors = check_code(r#"std.format("%d", "not a number")"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatArgTypeMismatch { + index: 0, + specifier: 'd', + .. + }, + .. + }] + ), + "expected FormatArgTypeMismatch for %d with string, got: {errors:?}" + ); + } + + #[test] + fn test_format_string_accepts_any() { + // %s accepts any type - number is fine + let errors = check_code(r#"std.format("%s", 42)"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for %s with number, got: {errors:?}" + ); + } + + #[test] + fn test_format_escaped_percent() { + // %% doesn't count as a placeholder + let errors = check_code(r#"std.format("100%% complete")"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for escaped percent, got: {errors:?}" + ); + } + + #[test] + fn test_format_incomplete_specifier() { + // Incomplete format specifier at end + let errors = check_code(r#"std.format("Hello %")"#); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatStringError { message }, + .. + }] if message.contains("incomplete") + ), + "expected FormatStringError for incomplete specifier, got: {errors:?}" + ); + } + + #[test] + fn test_format_with_width_precision() { + // Format with width and precision modifiers + let errors = check_code(r#"std.format("%10.2f", 3.14159)"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for format with width/precision, got: {errors:?}" + ); + } + + #[test] + fn test_format_non_literal_string() { + // Can't validate non-literal format strings + let errors = check_code(r#"local fmt = "%s"; std.format(fmt, "hello")"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for non-literal format string, got: {errors:?}" + ); + } + + #[test] + fn test_no_such_field_with_suggestion() { + // Typo should trigger "did you mean" suggestion + let errors = check_code("{length: 1, width: 2}.lenght"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, suggestion: Some(suggestion), .. }, + .. + }] if field == "lenght" && suggestion == "length" + ); + } + + #[test] + fn test_no_such_field_no_suggestion_for_unrelated() { + // Completely different field name should not have suggestion + let errors = check_code("{a: 1, b: 2}.xyz"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::NoSuchField { field, suggestion: None, .. }, + .. + }] if field == "xyz" + ); + } + + // Argument type validation tests + + #[test] + fn test_stdlib_arg_type_mismatch() { + // std.length expects an array, string, or object, not a number + let errors = check_code("std.length(42)"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { function_name, param_name, .. }, + .. + }] if function_name == "std.length" && param_name == "x" + ); + } + + #[test] + fn test_stdlib_arg_type_valid() { + // std.length with valid array argument should produce no errors + let errors = check_code("std.length([1, 2, 3])"); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors, got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_higher_order_accepts_any_function() { + // std.map should accept any function, not just function() + let errors = check_code("std.map(function(x) x + 1, [1, 2, 3])"); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for std.map with function(x), got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_higher_order_rejects_non_function() { + // std.map's first arg must be a function, not a number + let errors = check_code("std.map(42, [1, 2, 3])"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { function_name, param_name, .. }, + .. + }] if function_name == "std.map" && param_name == "func" + ); + } + + // Higher-order callback type validation tests + + #[test] + fn test_callback_type_valid_map_with_any_param() { + // Callback with untyped param (Any) should accept anything + // User-defined functions don't have type annotations in Jsonnet + let errors = check_code("std.map(function(x) x + 1, [1, 2, 3])"); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for std.map with untyped callback, got: {errors:?}" + ); + } + + #[test] + fn test_callback_type_valid_map_std_length_with_arrays() { + // std.length accepts arrays, so passing array elements to it is valid + let errors = check_code("std.map(std.length, [[1, 2], [3, 4, 5]])"); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for std.map(std.length, [[...], ...]), got: {errors:?}" + ); + } + + #[test] + fn test_callback_type_valid_map_std_length_with_strings() { + // std.length accepts strings + let errors = check_code(r#"std.map(std.length, ["hello", "world"])"#); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for std.map(std.length, [...strings...]), got: {errors:?}" + ); + } + + #[test] + fn test_callback_type_mismatch_map_std_length_with_numbers() { + // std.length does NOT accept numbers, so passing number array should error + let errors = check_code("std.map(std.length, [1, 2, 3])"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::CallbackTypeMismatch { function_name, callback_param, .. }, + .. + }] if function_name == "std.map" && callback_param == "func" + ); + } + + #[test] + fn test_callback_type_mismatch_map_std_length_with_booleans() { + // std.length does NOT accept booleans + let errors = check_code("std.map(std.length, [true, false])"); + assert_matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::CallbackTypeMismatch { function_name, callback_param, .. }, + .. + }] if function_name == "std.map" && callback_param == "func" + ); + } + + #[test] + fn test_apply_substitution() { + use assert_matches::assert_matches; + use jrsonnet_lsp_types::{GlobalTyStore, LocalTyStore, Ty, TyData, TySubst}; + use rowan::TextRange; + + // Create a local type and a substitution mapping it to a global type + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + let local_arr = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + assert!(local_arr.is_local()); + + let subst = TySubst::merge(&global, local); + let global_arr = subst.apply(local_arr); + assert!(global_arr.is_global()); + + // Create an error with the local type + let error = TypeError { + kind: TypeErrorKind::FieldAccessOnNonObject { actual: local_arr }, + range: TextRange::new(0.into(), 10.into()), + }; + + // Apply substitution + let substituted = error.apply_substitution(&subst); + + // Verify the type was substituted and range is preserved + assert_matches!( + substituted, + TypeError { + kind: TypeErrorKind::FieldAccessOnNonObject { actual }, + range, + } if actual == global_arr && actual.is_global() && range == error.range + ); + } + + #[test] + fn test_apply_substitution_argument_mismatch() { + use assert_matches::assert_matches; + use jrsonnet_lsp_types::{GlobalTyStore, LocalTyStore, Ty, TyData, TySubst}; + use rowan::TextRange; + + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create two local types + let expected_local = local.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + let actual_local = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + let subst = TySubst::merge(&global, local); + let expected_global = subst.apply(expected_local); + let actual_global = subst.apply(actual_local); + + // Create an error with both local types + let error = TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { + function_name: "myFunc".to_string(), + param_name: "arr".to_string(), + param_index: 0, + expected: expected_local, + actual: actual_local, + }, + range: TextRange::new(5.into(), 15.into()), + }; + + // Apply substitution + let substituted = error.apply_substitution(&subst); + + // Verify both types were substituted + assert_matches!( + substituted, + TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { + function_name, + param_name, + param_index: 0, + expected, + actual, + }, + .. + } if function_name == "myFunc" + && param_name == "arr" + && expected == expected_global + && actual == actual_global + && expected.is_global() + && actual.is_global() + ); + } + + #[test] + fn test_apply_substitution_no_ty_fields() { + use assert_matches::assert_matches; + use jrsonnet_lsp_types::TySubst; + use rowan::TextRange; + + // Error kinds without Ty fields should be unchanged + let subst = TySubst::new(); + + let error = TypeError { + kind: TypeErrorKind::NoSuchField { + field: "foo".to_string(), + available: vec!["bar".to_string(), "baz".to_string()], + suggestion: Some("bar".to_string()), + }, + range: TextRange::new(0.into(), 5.into()), + }; + + let substituted = error.apply_substitution(&subst); + + assert_matches!( + substituted, + TypeError { + kind: TypeErrorKind::NoSuchField { + field, + available, + suggestion: Some(suggested), + }, + .. + } if field == "foo" + && available == vec!["bar".to_string(), "baz".to_string()] + && suggested == "bar" + ); + } +} diff --git a/crates/jrsonnet-lsp-document/Cargo.toml b/crates/jrsonnet-lsp-document/Cargo.toml new file mode 100644 index 00000000..7e460d29 --- /dev/null +++ b/crates/jrsonnet-lsp-document/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "jrsonnet-lsp-document" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Document parsing, position conversion, and AST utilities for jrsonnet LSP" + +[dependencies] +derive_more = { version = "1", features = ["full"] } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +lsp-types.workspace = true +rowan.workspace = true +thiserror.workspace = true + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-document/src/ast_utils.rs b/crates/jrsonnet-lsp-document/src/ast_utils.rs new file mode 100644 index 00000000..ea3af298 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/ast_utils.rs @@ -0,0 +1,129 @@ +//! Shared AST utility functions. +//! +//! This module provides common utility functions for working with the Jsonnet AST +//! that are used across multiple handlers. + +use jrsonnet_rowan_parser::{SyntaxNode, SyntaxToken}; +use lsp_types::Range; +use rowan::TextRange; + +use crate::{ByteOffset, LineIndex, LspPosition}; + +/// Strip quotes from a string literal, handling all Jsonnet string formats. +/// +/// Handles: +/// - Double-quoted: `"foo"` → `foo` +/// - Single-quoted: `'foo'` → `foo` +/// - Verbatim double: `@"foo"` → `foo` +/// - Verbatim single: `@'foo'` → `foo` +pub fn strip_string_quotes(s: &str) -> String { + s.trim_start_matches('@') + .trim_start_matches('"') + .trim_start_matches('\'') + .trim_end_matches('"') + .trim_end_matches('\'') + .to_string() +} + +/// Find the token at the given byte offset, preferring the rightmost token +/// when the offset is between two tokens. +pub fn token_at_offset(root: &SyntaxNode, offset: ByteOffset) -> Option { + root.token_at_offset(rowan::TextSize::from(u32::from(offset))) + .right_biased() +} + +/// Convert a rowan `TextRange` to an LSP `Range`. +/// +/// This handles the conversion from byte offsets to LSP positions +/// (line number and UTF-16 character offset). +pub fn to_lsp_range(range: TextRange, line_index: &LineIndex, text: &str) -> Range { + let start = line_index + .position(range.start().into(), text) + .unwrap_or_default(); + let end = line_index + .position(range.end().into(), text) + .unwrap_or_default(); + + Range { + start: start.into(), + end: end.into(), + } +} + +/// Find the deepest node containing the given offset. +/// +/// This is useful when the cursor is at whitespace or between tokens, +/// where `token_at_offset` would return `None`. +pub fn find_node_at_offset(root: &SyntaxNode, offset: ByteOffset) -> Option { + let text_size = rowan::TextSize::from(u32::from(offset)); + + // Find the deepest node that contains this offset + let mut result = None; + for node in root.descendants() { + if node.text_range().contains_inclusive(text_size) { + result = Some(node); + } + } + result +} + +/// Convert an LSP position to a byte offset in the document. +/// +/// Returns `None` if the position is invalid. +pub fn position_to_offset( + line_index: &LineIndex, + position: LspPosition, + text: &str, +) -> Option { + line_index.offset(position, text) +} + +#[cfg(test)] +mod tests { + use jrsonnet_rowan_parser::AstNode; + + use super::*; + use crate::{DocVersion, Document}; + + #[test] + fn test_token_at_offset() { + let code = "local x = 1;"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Token at 'x' (offset 6) + let token = token_at_offset(ast.syntax(), ByteOffset::from(6u32)) + .expect("should find token at offset 6"); + assert_eq!(token.text(), "x"); + } + + #[test] + fn test_to_lsp_range() { + let code = "local x = 1;"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let line_index = doc.line_index(); + + // Range for "local" + let range = TextRange::new(0.into(), 5.into()); + let lsp_range = to_lsp_range(range, line_index, code); + + assert_eq!(lsp_range.start.line, 0); + assert_eq!(lsp_range.start.character, 0); + assert_eq!(lsp_range.end.line, 0); + assert_eq!(lsp_range.end.character, 5); + } + + #[test] + fn test_strip_string_quotes() { + // Double-quoted strings + assert_eq!(strip_string_quotes(r#""foo.jsonnet""#), "foo.jsonnet"); + // Single-quoted strings + assert_eq!(strip_string_quotes("'bar.jsonnet'"), "bar.jsonnet"); + // Verbatim double-quoted strings + assert_eq!(strip_string_quotes(r#"@"baz.jsonnet""#), "baz.jsonnet"); + // Verbatim single-quoted strings + assert_eq!(strip_string_quotes("@'qux.jsonnet'"), "qux.jsonnet"); + // No quotes (edge case) + assert_eq!(strip_string_quotes("raw"), "raw"); + } +} diff --git a/crates/jrsonnet-lsp-document/src/config.rs b/crates/jrsonnet-lsp-document/src/config.rs new file mode 100644 index 00000000..a00a17e3 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/config.rs @@ -0,0 +1,12 @@ +//! Configuration constants for LSP analysis. +//! +//! Centralizes cache capacities and other tunable parameters. + +/// Default capacity for the closed document cache in DocumentManager. +pub const DEFAULT_CLOSED_CACHE_CAPACITY: usize = 100; + +/// Default capacity for the type analysis cache in DocumentManager. +pub const DEFAULT_ANALYSIS_CACHE_CAPACITY: usize = 100; + +/// Default capacity for the shared type cache. +pub const DEFAULT_TYPE_CACHE_CAPACITY: usize = 500; diff --git a/crates/jrsonnet-lsp-document/src/document.rs b/crates/jrsonnet-lsp-document/src/document.rs new file mode 100644 index 00000000..02968701 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/document.rs @@ -0,0 +1,512 @@ +//! Document representation and parsing. +//! +//! Wraps the jrsonnet-rowan-parser to provide error-tolerant AST +//! and associated metadata for IDE features. + +use std::{collections::HashSet, ops::Deref, sync::Arc}; + +/// Re-export the parser's syntax error type for use in LSP. +pub use jrsonnet_rowan_parser::LocatedSyntaxError as SyntaxError; +use jrsonnet_rowan_parser::{nodes::SourceFile, GreenNode}; + +use crate::{position::LineIndex, types::DocVersion}; + +/// A parsed Jsonnet document containing the AST and any syntax errors. +/// +/// Uses error-tolerant parsing to produce a partial AST even when the +/// document contains syntax errors. This enables IDE features to work +/// on incomplete or invalid code. +/// +/// Stores a `GreenNode` (thread-safe) and creates `SourceFile` on demand. +/// This allows `ParsedDocument` to be `Send + Sync` for parallel processing. +#[derive(Debug, Clone)] +pub struct ParsedDocument { + green: GreenNode, + errors: Arc<[SyntaxError]>, +} + +impl ParsedDocument { + /// Parse a document from source text. + pub fn parse(text: &str) -> Self { + let (green, errors) = jrsonnet_rowan_parser::parse_green(text); + Self { + green, + errors: errors.into(), + } + } + + /// Get the AST. Creates a fresh `SourceFile` cursor on each call. + pub fn ast(&self) -> SourceFile { + jrsonnet_rowan_parser::source_file_from_green(&self.green) + } + + /// Get syntax errors. + pub fn errors(&self) -> &[SyntaxError] { + &self.errors + } + + /// Check if the document has any syntax errors. + pub fn has_errors(&self) -> bool { + !self.errors.is_empty() + } +} + +/// A document with all computed artifacts. +/// +/// This is the cached representation of an open document, +/// containing the source text and computed metadata. +/// Cloning is cheap and shares the underlying data. +#[derive(Debug, Clone)] +pub struct Document { + /// The source text of the document. + text: Arc, + /// Document version from the editor. + version: DocVersion, + /// Parsed AST and errors. + parsed: ParsedDocument, + /// Line index for position conversion. + line_index: Arc, + /// Last successful parse for graceful degradation. + /// Used when current parse has errors. + last_good_parse: Option, + /// Last good line index (corresponding to last_good_parse). + last_good_line_index: Option>, + /// Lines that have changed since last_good_parse. + /// If None, no tracking is active (current parse is good). + dirty_lines: Option>, +} + +impl Document { + /// Create a new document from source text. + pub fn new(text: String, version: DocVersion) -> Self { + let line_index = Arc::new(LineIndex::new(&text)); + let parsed = ParsedDocument::parse(&text); + Self { + text: text.into(), + version, + parsed, + line_index, + last_good_parse: None, + last_good_line_index: None, + dirty_lines: None, + } + } + + /// Get the source text. + pub fn text(&self) -> &str { + &self.text + } + + /// Get the document version. + pub fn version(&self) -> DocVersion { + self.version + } + + /// Get the line index. + pub fn line_index(&self) -> &LineIndex { + &self.line_index + } + + /// Update the document with new text (full sync). + /// + /// This replaces the document content and re-parses. + /// Due to Arc, any clones will continue to reference the old data. + pub fn update(&mut self, text: String, version: DocVersion) { + // Save the current state if it's error-free (for graceful degradation) + if !self.parsed.has_errors() { + self.last_good_parse = Some(self.parsed.clone()); + self.last_good_line_index = Some(self.line_index.clone()); + } + + self.line_index = Arc::new(LineIndex::new(&text)); + self.parsed = ParsedDocument::parse(&text); + self.text = text.into(); + self.version = version; + + // Update dirty line tracking + if self.parsed.has_errors() { + // Full replacement means all lines are potentially dirty + let line_count = self.line_index.line_count(); + self.dirty_lines = Some((0..line_count).collect()); + } else { + // Parse succeeded, clear dirty tracking + self.dirty_lines = None; + self.last_good_parse = None; + self.last_good_line_index = None; + } + } + + /// Apply an incremental change to the document. + /// + /// Takes an LSP range and new text, applies the change, and re-parses. + /// Returns true if the change was applied successfully. + pub fn apply_incremental_change( + &mut self, + range: lsp_types::Range, + new_text: &str, + version: DocVersion, + ) -> bool { + use crate::types::LspPosition; + + // Save the current state if it's error-free (for graceful degradation) + if !self.parsed.has_errors() { + self.last_good_parse = Some(self.parsed.clone()); + self.last_good_line_index = Some(self.line_index.clone()); + self.dirty_lines = Some(HashSet::new()); + } + + // Convert LSP range to byte offsets + let start_pos = LspPosition::from(range.start); + let end_pos = LspPosition::from(range.end); + + let start_offset = match self.line_index.offset(start_pos, &self.text) { + Some(o) => usize::from(o), + None => return false, + }; + let end_offset = match self.line_index.offset(end_pos, &self.text) { + Some(o) => usize::from(o), + None => return false, + }; + + // Validate offsets + if start_offset > end_offset || end_offset > self.text.len() { + return false; + } + + // Track which lines are affected by this change + let start_line = range.start.line; + let end_line = range.end.line; + let new_line_count = new_text.matches('\n').count() as u32; + let affected_lines = end_line.saturating_sub(start_line) + new_line_count + 1; + + // Apply the text change + let mut text = self.text.to_string(); + text.replace_range(start_offset..end_offset, new_text); + + // Rebuild + self.line_index = Arc::new(LineIndex::new(&text)); + self.parsed = ParsedDocument::parse(&text); + self.text = text.into(); + self.version = version; + + // Update dirty line tracking + if self.parsed.has_errors() { + // Mark affected lines as dirty + if let Some(ref mut dirty) = self.dirty_lines { + for line in start_line..start_line.saturating_add(affected_lines) { + dirty.insert(line); + } + } + } else { + // Parse succeeded, clear dirty tracking + self.dirty_lines = None; + self.last_good_parse = None; + self.last_good_line_index = None; + } + + true + } + + /// Get the AST for navigation purposes. + /// + /// Returns the current parse if successful, otherwise falls back to + /// the last good parse for graceful degradation on broken files. + pub fn navigation_ast(&self) -> SourceFile { + if self.parsed.has_errors() { + if let Some(ref last_good) = self.last_good_parse { + return last_good.ast(); + } + } + self.parsed.ast() + } + + /// Get the line index for navigation purposes. + /// + /// Returns the current line index if parse is successful, otherwise + /// falls back to the last good line index. + pub fn navigation_line_index(&self) -> &LineIndex { + if self.parsed.has_errors() { + if let Some(ref last_good) = self.last_good_line_index { + return last_good; + } + } + &self.line_index + } + + /// Check if a position is in a dirty (recently changed) region. + /// + /// Returns true if the line at the given position has been modified + /// since the last successful parse. + pub fn is_position_dirty(&self, line: u32) -> bool { + self.dirty_lines + .as_ref() + .map(|d| d.contains(&line)) + .unwrap_or(false) + } + + /// Check if there are any dirty lines (broken state with pending changes). + pub fn has_dirty_lines(&self) -> bool { + self.dirty_lines + .as_ref() + .map(|d| !d.is_empty()) + .unwrap_or(false) + } + + /// Get the set of dirty line numbers. + pub fn dirty_lines(&self) -> Option<&HashSet> { + self.dirty_lines.as_ref() + } +} + +impl Deref for Document { + type Target = ParsedDocument; + + fn deref(&self) -> &Self::Target { + &self.parsed + } +} + +/// Thread-safe document wrapper using Arc. +pub type SharedDocument = std::sync::Arc; + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_valid_document() { + let text = r#"{ hello: "world" }"#; + let doc = Document::new(text.to_string(), DocVersion::new(1)); + + // Methods from ParsedDocument are accessed via Deref + assert_eq!(doc.errors(), &[], "valid document should have no errors"); + assert_eq!(doc.version(), DocVersion::new(1)); + } + + #[test] + fn test_parse_invalid_document() { + use jrsonnet_rowan_parser::{ExpectedSyntax, SyntaxError as ParserSyntaxError}; + use rowan::TextRange; + + let text = r"{ hello: }"; // Missing value after colon + let doc = Document::new(text.to_string(), DocVersion::new(1)); + + // Methods from ParsedDocument are accessed via Deref + assert_eq!( + doc.errors(), + &[SyntaxError { + error: ParserSyntaxError::Missing { + expected: ExpectedSyntax::Named("expression") + }, + range: TextRange::new(9.into(), 9.into()), + }] + ); + } + + #[test] + fn test_document_update() { + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + assert_eq!(doc.version(), DocVersion::new(1)); + + doc.update("{ a: 2 }".to_string(), DocVersion::new(2)); + assert_eq!(doc.version(), DocVersion::new(2)); + } + + #[test] + fn test_incremental_change_insert() { + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + + // Insert text at position (0, 7) - before the closing brace + let range = lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6, + }, + end: lsp_types::Position { + line: 0, + character: 6, + }, + }; + let success = doc.apply_incremental_change(range, ", b: 2", DocVersion::new(2)); + + assert!(success); + assert_eq!(doc.text(), "{ a: 1, b: 2 }"); + assert_eq!(doc.version(), DocVersion::new(2)); + } + + #[test] + fn test_incremental_change_replace() { + let mut doc = Document::new("{ hello: 1 }".to_string(), DocVersion::new(1)); + + // Replace "hello" with "world" + let range = lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 2, + }, + end: lsp_types::Position { + line: 0, + character: 7, + }, + }; + let success = doc.apply_incremental_change(range, "world", DocVersion::new(2)); + + assert!(success); + assert_eq!(doc.text(), "{ world: 1 }"); + } + + #[test] + fn test_incremental_change_delete() { + let mut doc = Document::new("{ a: 1, b: 2 }".to_string(), DocVersion::new(1)); + + // Delete ", b: 2" + let range = lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6, + }, + end: lsp_types::Position { + line: 0, + character: 12, + }, + }; + let success = doc.apply_incremental_change(range, "", DocVersion::new(2)); + + assert!(success); + assert_eq!(doc.text(), "{ a: 1 }"); + } + + #[test] + fn test_incremental_change_multiline() { + let mut doc = Document::new("{\n a: 1\n}".to_string(), DocVersion::new(1)); + + // Insert a new field on line 2 + let range = lsp_types::Range { + start: lsp_types::Position { + line: 1, + character: 6, + }, + end: lsp_types::Position { + line: 1, + character: 6, + }, + }; + let success = doc.apply_incremental_change(range, ",\n b: 2", DocVersion::new(2)); + + assert!(success); + assert_eq!(doc.text(), "{\n a: 1,\n b: 2\n}"); + } + + #[test] + fn test_incremental_change_invalid_range() { + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + + // Invalid range: start after end + let range = lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 5, + }, + end: lsp_types::Position { + line: 0, + character: 2, + }, + }; + let success = doc.apply_incremental_change(range, "test", DocVersion::new(2)); + + assert!(!success); + // Document should be unchanged + assert_eq!(doc.text(), "{ a: 1 }"); + assert_eq!(doc.version(), DocVersion::new(1)); + } + + #[test] + fn test_incremental_change_out_of_bounds() { + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + + // Out of bounds line + let range = lsp_types::Range { + start: lsp_types::Position { + line: 10, + character: 0, + }, + end: lsp_types::Position { + line: 10, + character: 5, + }, + }; + let success = doc.apply_incremental_change(range, "test", DocVersion::new(2)); + + assert!(!success); + assert_eq!(doc.text(), "{ a: 1 }"); + } + + #[test] + fn test_graceful_degradation_on_syntax_error() { + use jrsonnet_rowan_parser::AstNode; + + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + + // Initially should have no errors + assert!(!doc.has_errors()); + assert!(!doc.has_dirty_lines()); + + // Introduce a syntax error + doc.update("{ a: }".to_string(), DocVersion::new(2)); + + // Should have errors now + assert!(doc.has_errors()); + assert!(doc.has_dirty_lines()); + + // Should have a fallback AST + let nav_ast = doc.navigation_ast(); + // The fallback AST should still be usable (from the original good parse) + // and be different from the current broken AST + assert!(!doc.ast().syntax().text().to_string().is_empty()); + assert!(!nav_ast.syntax().text().to_string().is_empty()); + } + + #[test] + fn test_graceful_degradation_recovery() { + let mut doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + + // Introduce a syntax error + doc.update("{ a: }".to_string(), DocVersion::new(2)); + assert!(doc.has_errors()); + assert!(doc.has_dirty_lines()); + + // Fix the error + doc.update("{ a: 2 }".to_string(), DocVersion::new(3)); + + // Should no longer have errors or dirty lines + assert!(!doc.has_errors()); + assert!(!doc.has_dirty_lines()); + } + + #[test] + fn test_dirty_line_tracking() { + let mut doc = Document::new("{\n a: 1\n}".to_string(), DocVersion::new(1)); + + // Make an incremental change that causes an error + let range = lsp_types::Range { + start: lsp_types::Position { + line: 1, + character: 5, + }, + end: lsp_types::Position { + line: 1, + character: 6, + }, + }; + // Delete the "1" leaving "{ a: }" + doc.apply_incremental_change(range, "", DocVersion::new(2)); + + // Should have errors (incomplete expression) + assert!(doc.has_errors()); + + // Line 1 should be dirty + assert!(doc.is_position_dirty(1)); + // Line 0 should not be dirty + assert!(!doc.is_position_dirty(0)); + } +} diff --git a/crates/jrsonnet-lsp-document/src/error.rs b/crates/jrsonnet-lsp-document/src/error.rs new file mode 100644 index 00000000..04baecf7 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/error.rs @@ -0,0 +1,185 @@ +//! Error types for the LSP server. +//! +//! Provides structured error types for better error handling and diagnostics. + +use thiserror::Error; + +/// Errors that can occur during LSP operations. +#[derive(Error, Debug)] +pub enum LspError { + /// Document was not found in the document manager. + #[error("document not found: {0}")] + DocumentNotFound(String), + + /// Position is invalid (e.g., line or character out of bounds). + #[error("invalid position: line {line}, character {character}")] + InvalidPosition { line: u32, character: u32 }, + + /// Byte offset is out of bounds for the document. + #[error("position out of bounds: offset {0}")] + PositionOutOfBounds(u32), + + /// No token found at the given position. + #[error("no token at position")] + NoTokenAtPosition, + + /// URI could not be parsed or converted to a path. + #[error("invalid URI: {0}")] + InvalidUri(String), + + /// Identifier is not valid for Jsonnet. + #[error("invalid identifier: {0}")] + InvalidIdentifier(String), + + /// IO error occurred. + #[error("IO error: {0}")] + Io(#[from] std::io::Error), +} + +/// Result type for LSP operations. +pub type LspResult = Result; + +/// Result type for handler functions that may return no result. +/// +/// This is used for handlers where `None` is a valid response (e.g., no hover info available) +/// but we also want to distinguish from actual errors. +pub type HandlerResult = Result, LspError>; + +/// Jsonnet language keywords that cannot be used as identifiers. +const JSONNET_KEYWORDS: &[&str] = &[ + "assert", + "else", + "error", + "false", + "for", + "function", + "if", + "import", + "importbin", + "importstr", + "in", + "local", + "null", + "self", + "super", + "tailstrict", + "then", + "true", +]; + +/// Check if a string is a valid Jsonnet identifier. +/// +/// Valid identifiers: +/// - Start with a letter (a-z, A-Z) or underscore +/// - Contain only letters, digits, and underscores +/// - Are not Jsonnet keywords +pub fn is_valid_jsonnet_identifier(name: &str) -> bool { + if name.is_empty() { + return false; + } + + // Check first character + let mut chars = name.chars(); + let first = chars.next().unwrap(); + if !first.is_ascii_alphabetic() && first != '_' { + return false; + } + + // Check remaining characters + for c in chars { + if !c.is_ascii_alphanumeric() && c != '_' { + return false; + } + } + + // Check not a keyword + !JSONNET_KEYWORDS.contains(&name) +} + +/// Validate an identifier for renaming operations. +/// +/// Returns `Ok(())` if valid, or an error describing why it's invalid. +pub fn validate_identifier(name: &str) -> LspResult<()> { + if name.is_empty() { + return Err(LspError::InvalidIdentifier( + "identifier cannot be empty".to_string(), + )); + } + + let mut chars = name.chars(); + let first = chars.next().unwrap(); + if !first.is_ascii_alphabetic() && first != '_' { + return Err(LspError::InvalidIdentifier(format!( + "identifier must start with a letter or underscore, got '{first}'" + ))); + } + + for c in chars { + if !c.is_ascii_alphanumeric() && c != '_' { + return Err(LspError::InvalidIdentifier(format!( + "identifier contains invalid character '{c}'" + ))); + } + } + + if JSONNET_KEYWORDS.contains(&name) { + return Err(LspError::InvalidIdentifier(format!( + "'{name}' is a reserved keyword" + ))); + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_valid_identifiers() { + assert!(is_valid_jsonnet_identifier("foo")); + assert!(is_valid_jsonnet_identifier("_foo")); + assert!(is_valid_jsonnet_identifier("foo123")); + assert!(is_valid_jsonnet_identifier("_")); + assert!(is_valid_jsonnet_identifier("camelCase")); + assert!(is_valid_jsonnet_identifier("snake_case")); + assert!(is_valid_jsonnet_identifier("SCREAMING_SNAKE")); + } + + #[test] + fn test_invalid_identifiers() { + assert!(!is_valid_jsonnet_identifier("")); + assert!(!is_valid_jsonnet_identifier("123foo")); + assert!(!is_valid_jsonnet_identifier("foo-bar")); + assert!(!is_valid_jsonnet_identifier("foo.bar")); + assert!(!is_valid_jsonnet_identifier("foo bar")); + } + + #[test] + fn test_keywords_are_invalid() { + assert!(!is_valid_jsonnet_identifier("local")); + assert!(!is_valid_jsonnet_identifier("function")); + assert!(!is_valid_jsonnet_identifier("if")); + assert!(!is_valid_jsonnet_identifier("then")); + assert!(!is_valid_jsonnet_identifier("else")); + assert!(!is_valid_jsonnet_identifier("true")); + assert!(!is_valid_jsonnet_identifier("false")); + assert!(!is_valid_jsonnet_identifier("null")); + assert!(!is_valid_jsonnet_identifier("self")); + assert!(!is_valid_jsonnet_identifier("super")); + } + + #[test] + fn test_validate_identifier_errors() { + validate_identifier("foo").expect("foo should be valid"); + + let err = validate_identifier("").unwrap_err(); + assert!(matches!(err, LspError::InvalidIdentifier(_))); + + let err = validate_identifier("123foo").unwrap_err(); + assert!(matches!(err, LspError::InvalidIdentifier(_))); + + let err = validate_identifier("local").unwrap_err(); + assert!(matches!(err, LspError::InvalidIdentifier(_))); + } +} diff --git a/crates/jrsonnet-lsp-document/src/lib.rs b/crates/jrsonnet-lsp-document/src/lib.rs new file mode 100644 index 00000000..52f703d5 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/lib.rs @@ -0,0 +1,32 @@ +//! Document parsing, position conversion, and AST utilities for Jsonnet LSP. +//! +//! This crate provides the foundational types and utilities for working with +//! Jsonnet documents in an LSP context: +//! +//! - [`Document`] and [`ParsedDocument`] for parsed Jsonnet files +//! - [`LineIndex`] for efficient position conversion +//! - Position types ([`ByteOffset`], [`LspPosition`], [`LspRange`]) +//! - AST utilities for common operations + +pub mod ast_utils; +pub mod config; +pub mod document; +pub mod error; +pub mod position; +pub mod types; + +pub use ast_utils::{ + find_node_at_offset, position_to_offset, strip_string_quotes, to_lsp_range, token_at_offset, +}; +pub use config::{ + DEFAULT_ANALYSIS_CACHE_CAPACITY, DEFAULT_CLOSED_CACHE_CAPACITY, DEFAULT_TYPE_CACHE_CAPACITY, +}; +pub use document::{Document, ParsedDocument, SharedDocument, SyntaxError}; +pub use error::{ + is_valid_jsonnet_identifier, validate_identifier, HandlerResult, LspError, LspResult, +}; +pub use position::LineIndex; +pub use types::{ + ByteOffset, CanonicalPath, CharOffset, DocVersion, Line, LspPosition, LspRange, SymbolName, + Utf16Offset, +}; diff --git a/crates/jrsonnet-lsp-document/src/position.rs b/crates/jrsonnet-lsp-document/src/position.rs new file mode 100644 index 00000000..3adbee8e --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/position.rs @@ -0,0 +1,375 @@ +//! Line index for efficient position conversion. +//! +//! LSP uses UTF-16 code units for character positions, while Rust strings +//! and rowan use byte offsets. This module provides efficient conversion +//! between the two coordinate systems. + +use crate::types::{ByteOffset, CharOffset, Line, LspPosition, LspRange}; + +/// Line index - stores offsets only, no string copies. +/// +/// This allows O(1) line lookup and O(line_length) character offset conversion. +#[derive(Debug, Clone)] +pub struct LineIndex { + /// Byte offset of each line start (including line 0 at offset 0). + line_starts: Vec, +} + +impl LineIndex { + /// Build from source text - O(n) single pass, one allocation. + pub fn new(text: &str) -> Self { + let mut line_starts = vec![ByteOffset(0)]; + + for (i, ch) in text.char_indices() { + if ch == '\n' { + line_starts.push(ByteOffset((i + 1) as u32)); + } + } + + Self { line_starts } + } + + /// Get the number of lines in the document. + pub fn line_count(&self) -> u32 { + self.line_starts.len() as u32 + } + + /// Get the byte offset of a line start. + pub fn line_start(&self, line: Line) -> Option { + self.line_starts.get(line.0 as usize).copied() + } + + /// Get the line number for a byte offset. + pub fn line_of_offset(&self, offset: ByteOffset) -> Line { + // Binary search for the line containing this offset + match self.line_starts.binary_search(&offset) { + Ok(line) => Line(line as u32), + Err(line) => Line(line.saturating_sub(1) as u32), + } + } + + /// Convert LSP position to byte offset. + /// + /// Returns None if the position is out of bounds. + pub fn offset(&self, pos: LspPosition, text: &str) -> Option { + let line_start = self.line_start(pos.line)?; + let line_start_usize: usize = line_start.into(); + + // Find the end of this line (exclude newline) + let next_line = Line(pos.line.0 + 1); + let line_end = self + .line_start(next_line) + .map_or(text.len(), |o| usize::from(o).saturating_sub(1)); + + let line_text = text.get(line_start_usize..line_end)?; + + // Walk the line, counting UTF-16 code units + let mut utf16_count = 0u32; + for (byte_idx, ch) in line_text.char_indices() { + if utf16_count >= pos.character.0 { + return Some(ByteOffset((line_start_usize + byte_idx) as u32)); + } + utf16_count += ch.len_utf16() as u32; + } + + // Position is at or past end of line + Some(ByteOffset((line_start_usize + line_text.len()) as u32)) + } + + /// Convert byte offset to LSP position. + /// + /// Returns None if the offset is out of bounds. + pub fn position(&self, offset: ByteOffset, text: &str) -> Option { + let offset_usize: usize = offset.into(); + if offset_usize > text.len() { + return None; + } + + let line = self.line_of_offset(offset); + let line_start: usize = self.line_start(line)?.into(); + + // Count UTF-16 code units from line start to offset + let line_prefix = text.get(line_start..offset_usize)?; + let character: u32 = line_prefix.chars().map(|ch| ch.len_utf16() as u32).sum(); + + Some(LspPosition { + line, + character: CharOffset(character), + }) + } + + /// Convert a rowan TextRange to an LSP Range. + pub fn range(&self, range: rowan::TextRange, text: &str) -> Option { + let start = self.position(range.start().into(), text)?; + let end = self.position(range.end().into(), text)?; + Some(LspRange { start, end }) + } + + /// Convert an LSP Range to a rowan TextRange. + pub fn text_range(&self, range: LspRange, text: &str) -> Option { + let start = self.offset(range.start, text)?; + let end = self.offset(range.end, text)?; + Some(rowan::TextRange::new(start.into(), end.into())) + } + + /// Get the text of a specific line (without trailing newline). + pub fn line_text<'a>(&self, line: Line, text: &'a str) -> Option<&'a str> { + let start: usize = self.line_start(line)?.into(); + let next_line = Line(line.0 + 1); + let end = self + .line_start(next_line) + .map_or(text.len(), |o| usize::from(o).saturating_sub(1)); + text.get(start..end) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_line_index_basic() { + let text = "hello\nworld\n"; + let index = LineIndex::new(text); + + assert_eq!(index.line_count(), 3); // "hello", "world", "" + assert_eq!(index.line_start(Line(0)), Some(ByteOffset(0))); + assert_eq!(index.line_start(Line(1)), Some(ByteOffset(6))); + assert_eq!(index.line_start(Line(2)), Some(ByteOffset(12))); + } + + #[test] + fn test_line_of_offset() { + let text = "hello\nworld\n"; + let index = LineIndex::new(text); + + assert_eq!(index.line_of_offset(ByteOffset(0)), Line(0)); + assert_eq!(index.line_of_offset(ByteOffset(3)), Line(0)); + assert_eq!(index.line_of_offset(ByteOffset(5)), Line(0)); // 'o' in hello + assert_eq!(index.line_of_offset(ByteOffset(6)), Line(1)); // 'w' in world + assert_eq!(index.line_of_offset(ByteOffset(11)), Line(1)); // 'd' in world + } + + #[test] + fn test_lsp_position_to_offset() { + let text = "hello\nworld"; + let index = LineIndex::new(text); + + // Start of file + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(0) + }, + text + ), + Some(ByteOffset(0)) + ); + + // Middle of first line + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(3) + }, + text + ), + Some(ByteOffset(3)) + ); + + // Start of second line + assert_eq!( + index.offset( + LspPosition { + line: Line(1), + character: CharOffset(0) + }, + text + ), + Some(ByteOffset(6)) + ); + + // End of second line + assert_eq!( + index.offset( + LspPosition { + line: Line(1), + character: CharOffset(5) + }, + text + ), + Some(ByteOffset(11)) + ); + } + + #[test] + fn test_offset_to_lsp_position() { + let text = "hello\nworld"; + let index = LineIndex::new(text); + + assert_eq!( + index.position(ByteOffset(0), text), + Some(LspPosition { + line: Line(0), + character: CharOffset(0) + }) + ); + + assert_eq!( + index.position(ByteOffset(3), text), + Some(LspPosition { + line: Line(0), + character: CharOffset(3) + }) + ); + + assert_eq!( + index.position(ByteOffset(6), text), + Some(LspPosition { + line: Line(1), + character: CharOffset(0) + }) + ); + + assert_eq!( + index.position(ByteOffset(11), text), + Some(LspPosition { + line: Line(1), + character: CharOffset(5) + }) + ); + } + + #[test] + fn test_utf16_handling() { + // '🦀' is 4 bytes in UTF-8 but 2 UTF-16 code units + let text = "a🦀b"; + let index = LineIndex::new(text); + + // 'a' is at character 0 + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(0) + }, + text + ), + Some(ByteOffset(0)) + ); + + // '🦀' is at character 1 (UTF-16), byte offset 1 + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(1) + }, + text + ), + Some(ByteOffset(1)) + ); + + // 'b' is at character 3 (UTF-16: 1 for 'a' + 2 for '🦀'), byte offset 5 + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(3) + }, + text + ), + Some(ByteOffset(5)) + ); + + // Reverse: byte offset 5 -> character 3 + assert_eq!( + index.position(ByteOffset(5), text), + Some(LspPosition { + line: Line(0), + character: CharOffset(3) + }) + ); + } + + #[test] + fn test_line_text() { + let text = "hello\nworld\n"; + let index = LineIndex::new(text); + + assert_eq!(index.line_text(Line(0), text), Some("hello")); + assert_eq!(index.line_text(Line(1), text), Some("world")); + assert_eq!(index.line_text(Line(2), text), Some("")); + } + + #[test] + fn test_empty_file() { + let text = ""; + let index = LineIndex::new(text); + + assert_eq!(index.line_count(), 1); + assert_eq!(index.line_start(Line(0)), Some(ByteOffset(0))); + assert_eq!( + index.offset( + LspPosition { + line: Line(0), + character: CharOffset(0) + }, + text + ), + Some(ByteOffset(0)) + ); + } + + #[test] + fn test_crlf_line_endings() { + // Windows-style CRLF line endings + let text = "hello\r\nworld\r\n"; + let index = LineIndex::new(text); + + // Line text includes \r (we only split on \n) + let expected = LineIndex { + line_starts: vec![ByteOffset(0), ByteOffset(7), ByteOffset(14)], + }; + assert_eq!(index.line_starts, expected.line_starts); + } + + #[test] + fn test_cr_only_line_endings() { + // Old Mac-style CR-only line endings (rare) + // CR alone is NOT treated as a line ending + let text = "hello\rworld\r"; + let index = LineIndex::new(text); + + let expected = LineIndex { + line_starts: vec![ByteOffset(0)], + }; + assert_eq!(index.line_starts, expected.line_starts); + } + + #[test] + fn test_mixed_line_endings() { + // Mix of LF and CRLF: "line1\n" (6) + "line2\r\n" (7) + "line3\n" (6) + let text = "line1\nline2\r\nline3\n"; + let index = LineIndex::new(text); + + let expected = LineIndex { + line_starts: vec![ByteOffset(0), ByteOffset(6), ByteOffset(13), ByteOffset(19)], + }; + assert_eq!(index.line_starts, expected.line_starts); + } + + #[test] + fn test_lf_only_line_endings() { + // Unix-style LF-only (most common) + let text = "hello\nworld\n"; + let index = LineIndex::new(text); + + let expected = LineIndex { + line_starts: vec![ByteOffset(0), ByteOffset(6), ByteOffset(12)], + }; + assert_eq!(index.line_starts, expected.line_starts); + } +} diff --git a/crates/jrsonnet-lsp-document/src/types.rs b/crates/jrsonnet-lsp-document/src/types.rs new file mode 100644 index 00000000..3fb72f3f --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/types.rs @@ -0,0 +1,290 @@ +//! Domain-specific types for the LSP. +//! +//! These provide semantic clarity and type safety for common operations +//! like position conversion between byte offsets and LSP UTF-16 positions. + +use std::path::PathBuf; + +use derive_more::{AsRef, Deref, Display, From, Into}; + +use crate::error::{validate_identifier, LspResult}; + +/// Byte offset within a document (rowan uses byte offsets). +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, From, Into, Default)] +pub struct ByteOffset(pub u32); + +impl ByteOffset { + pub fn new(offset: u32) -> Self { + Self(offset) + } +} + +impl From for ByteOffset { + fn from(ts: rowan::TextSize) -> Self { + Self(ts.into()) + } +} + +impl From for rowan::TextSize { + fn from(offset: ByteOffset) -> Self { + rowan::TextSize::from(offset.0) + } +} + +impl From for ByteOffset { + fn from(offset: usize) -> Self { + Self(offset as u32) + } +} + +impl From for usize { + fn from(offset: ByteOffset) -> Self { + offset.0 as usize + } +} + +/// UTF-16 offset (LSP uses UTF-16 code units). +#[derive(Debug, Clone, Copy, PartialEq, Eq, From, Into, Default)] +pub struct Utf16Offset(pub u32); + +/// Line number (0-indexed). +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, From, Into, Default)] +pub struct Line(pub u32); + +/// Character offset within a line (0-indexed, UTF-16 code units). +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, From, Into, Default)] +pub struct CharOffset(pub u32); + +/// LSP position (0-indexed line, UTF-16 character). +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub struct LspPosition { + pub line: Line, + pub character: CharOffset, +} + +impl From<(u32, u32)> for LspPosition { + fn from((line, character): (u32, u32)) -> Self { + Self { + line: Line(line), + character: CharOffset(character), + } + } +} + +impl From for LspPosition { + fn from(p: lsp_types::Position) -> Self { + Self { + line: Line(p.line), + character: CharOffset(p.character), + } + } +} + +impl From for lsp_types::Position { + fn from(p: LspPosition) -> Self { + Self { + line: p.line.0, + character: p.character.0, + } + } +} + +/// LSP range (start and end positions). +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub struct LspRange { + pub start: LspPosition, + pub end: LspPosition, +} + +impl From for LspRange { + fn from(r: lsp_types::Range) -> Self { + Self { + start: r.start.into(), + end: r.end.into(), + } + } +} + +impl From for lsp_types::Range { + fn from(r: LspRange) -> Self { + Self { + start: r.start.into(), + end: r.end.into(), + } + } +} + +/// Normalized canonical path as cache key. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Display, Deref)] +#[display("{}", _0.display())] +pub struct CanonicalPath(PathBuf); + +impl CanonicalPath { + /// Create a new canonical path from an already-canonicalized path. + pub fn new(path: PathBuf) -> Self { + Self(path) + } + + /// Try to create a canonical path, canonicalizing if needed. + pub fn try_from_path(path: &std::path::Path) -> std::io::Result { + Ok(Self(path.canonicalize()?)) + } + + /// Create from a URI, returning None if the URI is not a file URI. + pub fn from_uri(uri: &lsp_types::Uri) -> Option { + let uri_str = uri.as_str(); + if !uri_str.starts_with("file://") { + return None; + } + // Parse file:// URI to path + let path_str = uri_str.strip_prefix("file://")?; + // Simple percent decoding for common cases + let decoded = path_str.replace("%20", " "); + let path = PathBuf::from(decoded); + // Try to canonicalize, but if it fails (file doesn't exist yet), use as-is + match path.canonicalize() { + Ok(canonical) => Some(Self(canonical)), + Err(_) => Some(Self(path)), + } + } + + /// Convert to a file URI. + pub fn to_uri(&self) -> lsp_types::Uri { + let path_str = self.0.to_string_lossy(); + // Create file:// URI + let uri_string = format!("file://{path_str}"); + uri_string + .parse() + .expect("canonical path should produce valid URI") + } + + /// Get the inner path. + pub fn as_path(&self) -> &std::path::Path { + &self.0 + } +} + +/// Document version (monotonically increasing). +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, From, Into, Default)] +pub struct DocVersion(pub i32); + +impl DocVersion { + pub fn new(version: i32) -> Self { + Self(version) + } +} + +/// A validated Jsonnet identifier (symbol name). +/// +/// This type ensures that any identifier used in rename operations or +/// symbol lookups is a valid Jsonnet identifier at the type level. +#[derive(Debug, Clone, PartialEq, Eq, Hash, Deref, AsRef, Display)] +#[display("{}", _0)] +pub struct SymbolName(String); + +impl SymbolName { + /// Create a new symbol name, validating that it's a valid Jsonnet identifier. + /// + /// Returns an error if the name is empty, starts with a digit, + /// contains invalid characters, or is a reserved keyword. + pub fn new(s: &str) -> LspResult { + validate_identifier(s)?; + Ok(Self(s.to_string())) + } + + /// Create a symbol name from an already-validated string. + /// + /// This is useful when extracting identifiers from parsed AST tokens, + /// which are guaranteed to be syntactically valid identifiers. + /// + /// # Safety + /// The caller must ensure the string is a valid Jsonnet identifier. + pub fn from_token(s: &str) -> Self { + debug_assert!( + crate::error::is_valid_jsonnet_identifier(s), + "SymbolName::from_token called with invalid identifier: {s}" + ); + Self(s.to_string()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_byte_offset_conversions() { + let offset = ByteOffset::new(42); + assert_eq!(offset.0, 42); + + let ts: rowan::TextSize = offset.into(); + assert_eq!(u32::from(ts), 42); + + let offset2: ByteOffset = ts.into(); + assert_eq!(offset2, offset); + } + + #[test] + fn test_lsp_position_conversion() { + let lsp_pos = lsp_types::Position { + line: 10, + character: 5, + }; + let pos: LspPosition = lsp_pos.into(); + assert_eq!(pos.line, Line(10)); + assert_eq!(pos.character, CharOffset(5)); + + let back: lsp_types::Position = pos.into(); + assert_eq!(back, lsp_pos); + } + + #[test] + fn test_line_char_offset_ordering() { + assert!(Line(0) < Line(1)); + assert!(CharOffset(0) < CharOffset(10)); + } + + #[test] + fn test_symbol_name_valid() { + let name = SymbolName::new("foo").unwrap(); + assert_eq!(&*name, "foo"); + assert_eq!(name.as_ref(), "foo"); + + // from_token for already-validated identifiers + let name2 = SymbolName::from_token("bar"); + assert_eq!(&*name2, "bar"); + } + + #[test] + fn test_symbol_name_invalid() { + use crate::error::LspError; + + // Empty + let err = SymbolName::new("").unwrap_err(); + assert!(matches!(err, LspError::InvalidIdentifier(_))); + + // Starts with digit + let err = SymbolName::new("123foo").unwrap_err(); + assert!(matches!(err, LspError::InvalidIdentifier(_))); + + // Invalid characters + let err = SymbolName::new("foo-bar").unwrap_err(); + assert!(matches!(err, LspError::InvalidIdentifier(_))); + let err = SymbolName::new("foo.bar").unwrap_err(); + assert!(matches!(err, LspError::InvalidIdentifier(_))); + + // Keywords + let err = SymbolName::new("local").unwrap_err(); + assert!(matches!(err, LspError::InvalidIdentifier(_))); + let err = SymbolName::new("function").unwrap_err(); + assert!(matches!(err, LspError::InvalidIdentifier(_))); + let err = SymbolName::new("if").unwrap_err(); + assert!(matches!(err, LspError::InvalidIdentifier(_))); + } + + #[test] + fn test_symbol_name_display() { + let name = SymbolName::new("myVar").unwrap(); + assert_eq!(format!("{name}"), "myVar"); + } +} diff --git a/crates/jrsonnet-lsp-handlers/Cargo.toml b/crates/jrsonnet-lsp-handlers/Cargo.toml new file mode 100644 index 00000000..e9b6f01d --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "jrsonnet-lsp-handlers" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "LSP request handlers for jrsonnet LSP" + +[dependencies] +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-lsp-import = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-import" } +jrsonnet-lsp-inference = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-inference" } +jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } +jrsonnet-lsp-stdlib = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-stdlib" } +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +lsp-types.workspace = true +rayon = "1.11.0" +serde = { workspace = true, features = ["derive"] } +rowan.workspace = true +serde_json.workspace = true +tracing = "0.1.44" + +[lints] +workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +rstest = "0.23" +tempfile.workspace = true diff --git a/crates/jrsonnet-lsp-handlers/src/code_action.rs b/crates/jrsonnet-lsp-handlers/src/code_action.rs new file mode 100644 index 00000000..4f31e2a6 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_action.rs @@ -0,0 +1,199 @@ +//! Code action handler. +//! +//! Provides quick fixes for diagnostics. + +use std::collections::HashMap; + +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::{ + CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, NumberOrString, + Range, TextEdit, Uri, WorkspaceEdit, +}; + +fn range_overlaps(a: Range, b: Range) -> bool { + (a.start.line, a.start.character) <= (b.end.line, b.end.character) + && (b.start.line, b.start.character) <= (a.end.line, a.end.character) +} + +fn wants_quickfix(context: &CodeActionContext) -> bool { + match &context.only { + None => true, + Some(kinds) => kinds + .iter() + .any(|kind| kind.as_str().starts_with(CodeActionKind::QUICKFIX.as_str())), + } +} + +fn unused_variable_action( + document: &Document, + uri: &Uri, + diagnostic: &Diagnostic, +) -> Option { + let NumberOrString::String(code) = diagnostic.code.as_ref()? else { + return None; + }; + if code != "unused-variable" { + return None; + } + + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(diagnostic.range.start.into(), text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text(); + if name.starts_with('_') { + return None; + } + + let mut changes = HashMap::new(); + changes.insert( + uri.clone(), + vec![TextEdit { + range: to_lsp_range(token.text_range(), line_index, text), + new_text: format!("_{name}"), + }], + ); + + Some( + CodeAction { + title: format!("Prefix `{name}` with `_`"), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + } + .into(), + ) +} + +/// Build code actions for a given range and context. +pub fn code_actions( + document: &Document, + uri: &Uri, + range: Range, + context: &CodeActionContext, +) -> Vec { + if !wants_quickfix(context) { + return Vec::new(); + } + + context + .diagnostics + .iter() + .filter(|diagnostic| range_overlaps(diagnostic.range, range)) + .filter_map(|diagnostic| unused_variable_action(document, uri, diagnostic)) + .collect() +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + use lsp_types::{CodeActionContext, CodeActionKind, DiagnosticSeverity, Position}; + + use super::*; + + fn diag_unused(range: Range) -> Diagnostic { + Diagnostic { + range, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("unused-variable".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + } + } + + fn uri() -> Uri { + "file:///test/code_action.jsonnet".parse().unwrap() + } + + fn range(start: u32, end: u32) -> Range { + Range { + start: Position { + line: 0, + character: start, + }, + end: Position { + line: 0, + character: end, + }, + } + } + + #[test] + fn test_unused_variable_quickfix() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + + let actions = code_actions(&document, &uri(), range(0, 20), &context); + assert_eq!(actions.len(), 1); + + let CodeActionOrCommand::CodeAction(action) = &actions[0] else { + panic!("Expected code action") + }; + assert_eq!(action.kind, Some(CodeActionKind::QUICKFIX)); + assert_eq!(action.is_preferred, Some(true)); + + let edit = action.edit.as_ref().expect("action should include edit"); + let uri = uri(); + let edits = edit + .changes + .as_ref() + .and_then(|c| c.get(&uri)) + .expect("edits for document should exist"); + assert_eq!( + edits, + &vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }] + ); + } + + #[test] + fn test_code_action_respects_only_filter() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: Some(vec![CodeActionKind::SOURCE_FIX_ALL]), + trigger_kind: None, + }; + + let actions = code_actions(&document, &uri(), range(0, 20), &context); + assert!(actions.is_empty()); + } + + #[test] + fn test_code_action_skips_non_identifier_range() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(8, 9))], // '=' character + only: None, + trigger_kind: None, + }; + + let actions = code_actions(&document, &uri(), range(0, 20), &context); + assert!(actions.is_empty()); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens.rs b/crates/jrsonnet-lsp-handlers/src/code_lens.rs new file mode 100644 index 00000000..de6e7a5e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_lens.rs @@ -0,0 +1,702 @@ +//! Code lens handler for showing reference counts and actions. +//! +//! Provides: +//! - Reference counts for definitions (functions, variables) +//! - "Evaluate" action for executable Jsonnet files +//! - Type annotations for function definitions +//! - Error status indicator for the file + +use jrsonnet_lsp_document::{to_lsp_range, Document}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_scope::{is_definition_site, ScopeResolver}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, BindFunction, StmtLocal}, + AstNode, SyntaxKind, +}; +use lsp_types::{CodeLens, Command, Range, Uri}; + +/// Configuration for code lens generation. +#[derive(Debug, Clone, Default)] +pub struct CodeLensConfig { + /// Show reference counts for definitions. + pub show_references: bool, + /// Show "Evaluate" action for executable files. + pub show_evaluate: bool, + /// Show inferred types for function definitions. + pub show_types: bool, + /// Show error status at top of file. + pub show_errors: ErrorLensVisibility, +} + +/// Whether to include the file-level error status lens. +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +pub enum ErrorLensVisibility { + #[default] + Hidden, + Visible, +} + +impl ErrorLensVisibility { + const fn is_visible(self) -> bool { + matches!(self, Self::Visible) + } +} + +impl CodeLensConfig { + /// Create a config that shows all code lenses. + pub fn all() -> Self { + Self { + show_references: true, + show_evaluate: true, + show_types: true, + show_errors: ErrorLensVisibility::Visible, + } + } +} + +/// Generate code lenses for a document. +/// +/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure +/// that import types are properly resolved. Pass `None` to skip type lenses. +pub fn code_lens( + document: &Document, + uri: &Uri, + config: &CodeLensConfig, + analysis: Option<&TypeAnalysis>, +) -> Vec { + let mut lenses = Vec::new(); + + if config.show_references { + lenses.extend(reference_count_lenses(document, uri)); + } + + if config.show_evaluate { + if let Some(lens) = evaluate_lens(document, uri) { + lenses.push(lens); + } + } + + if config.show_types { + if let Some(analysis) = analysis { + lenses.extend(type_lenses(document, analysis)); + } + } + + if config.show_errors.is_visible() { + if let Some(lens) = error_status_lens(document, uri) { + lenses.push(lens); + } + } + + lenses +} + +/// Generate reference count code lenses for all definitions. +fn reference_count_lenses(document: &Document, uri: &Uri) -> Vec { + let mut lenses = Vec::new(); + let ast = document.ast(); + let text = document.text(); + let line_index = document.line_index(); + + // Build scope resolver for reference counting + let resolver = ScopeResolver::new(ast.syntax()); + + // Find all definitions and count their references + for token in ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT { + continue; + } + + if !is_definition_site(&token) { + continue; + } + + let Some(parent) = token.parent() else { + continue; + }; + + let def_range = parent.text_range(); + let name = token.text(); + + // Count references (excluding the definition itself) + let references = resolver.find_references(ast.syntax(), name, def_range); + let ref_count = references.len().saturating_sub(1); // Exclude definition + + // Skip if no references (to avoid clutter) + if ref_count == 0 { + continue; + } + + let range = to_lsp_range(def_range, line_index, text); + + let title = if ref_count == 1 { + "1 reference".to_string() + } else { + format!("{} references", ref_count) + }; + + lenses.push(CodeLens { + range, + command: Some(Command { + title, + command: "jrsonnet.findReferences".to_string(), + arguments: Some(vec![ + serde_json::to_value(uri.to_string()).unwrap(), + serde_json::to_value(range.start.line).unwrap(), + serde_json::to_value(range.start.character).unwrap(), + ]), + }), + data: None, + }); + } + + lenses +} + +/// Generate "Evaluate" code lens for the document root. +fn evaluate_lens(document: &Document, uri: &Uri) -> Option { + let ast = document.ast(); + + // Only show evaluate lens if document has a root expression + ast.expr()?; + + // Place the lens at line 0 + let range = Range { + start: lsp_types::Position { + line: 0, + character: 0, + }, + end: lsp_types::Position { + line: 0, + character: 0, + }, + }; + + Some(CodeLens { + range, + command: Some(Command { + title: "Evaluate".to_string(), + command: "jrsonnet.evalFile".to_string(), + arguments: Some(vec![serde_json::to_value(uri.to_string()).unwrap()]), + }), + data: None, + }) +} + +/// Generate error status code lens for the document. +/// +/// Shows the number of syntax errors at the top of the file. +/// Only shown when there are errors (no lens for clean files to reduce clutter). +fn error_status_lens(document: &Document, uri: &Uri) -> Option { + let errors = document.errors(); + + // Only show lens if there are errors + if errors.is_empty() { + return None; + } + + let error_count = errors.len(); + + // Place the lens at line 0 + let range = Range { + start: lsp_types::Position { + line: 0, + character: 0, + }, + end: lsp_types::Position { + line: 0, + character: 0, + }, + }; + + let title = if error_count == 1 { + "1 syntax error".to_string() + } else { + format!("{} syntax errors", error_count) + }; + + Some(CodeLens { + range, + command: Some(Command { + title, + command: "jrsonnet.showErrors".to_string(), + arguments: Some(vec![serde_json::to_value(uri.to_string()).unwrap()]), + }), + data: None, + }) +} + +/// Generate type annotation code lenses for function definitions. +fn type_lenses(document: &Document, analysis: &TypeAnalysis) -> Vec { + let mut lenses = Vec::new(); + let ast = document.ast(); + let text = document.text(); + let line_index = document.line_index(); + + // Find all function definitions (local f(x) = ...) + for node in ast.syntax().descendants() { + // Look for BindFunction nodes (function definitions) + if let Some(bind_func) = BindFunction::cast(node.clone()) { + let Some(name_node) = bind_func.name() else { + continue; + }; + + // Get the type for the function body (the expression) + let Some(body) = bind_func.value() else { + continue; + }; + let body_range = body.syntax().text_range(); + let Some(ty) = analysis.type_for_range(body_range) else { + continue; + }; + + // Format the type + let type_str = analysis.display(ty); + + // Skip if it's just "any" or "function" - not informative + if type_str == "any" || type_str == "function" { + continue; + } + + let range = to_lsp_range(name_node.syntax().text_range(), line_index, text); + + lenses.push(CodeLens { + range, + command: Some(Command { + title: format!(":: {}", type_str), + command: String::new(), // No action, just informational + arguments: None, + }), + data: None, + }); + } + + // Also show types for complex local bindings (local x = { ... }) + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + if let Bind::BindDestruct(bd) = bind { + // Skip simple literals - only show for complex expressions + let Some(value) = bd.value() else { + continue; + }; + + // Check if it's a "complex" expression (object, array, function call) + let is_complex = value.expr_base().map_or(false, |base| { + matches!( + base.syntax().kind(), + SyntaxKind::EXPR_OBJECT + | SyntaxKind::EXPR_ARRAY | SyntaxKind::EXPR_CALL + | SyntaxKind::EXPR_IF_THEN_ELSE + ) + }); + + if !is_complex { + continue; + } + + // Get type for the binding value + let value_range = value.syntax().text_range(); + let Some(ty) = analysis.type_for_range(value_range) else { + continue; + }; + + let type_str = analysis.display(ty); + + // Skip uninformative types + if type_str == "any" { + continue; + } + + let range = to_lsp_range(bd.syntax().text_range(), line_index, text); + + lenses.push(CodeLens { + range, + command: Some(Command { + title: format!(":: {}", type_str), + command: String::new(), + arguments: None, + }), + data: None, + }); + } + } + } + } + + lenses +} + +/// Resolve a code lens (add command if not present). +/// +/// This is called when the client requests resolution of a code lens +/// that was returned without a command. +pub fn resolve_code_lens(lens: CodeLens) -> CodeLens { + // Our code lenses always include commands, so no resolution needed + lens +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_types::GlobalTyStore; + use rowan::NodeOrToken; + + use super::*; + + fn make_uri(name: &str) -> Uri { + format!("file:///test/{}.jsonnet", name).parse().unwrap() + } + + fn test_analysis(doc: &Document) -> TypeAnalysis { + let global_types = Arc::new(GlobalTyStore::new()); + TypeAnalysis::analyze_with_global(doc, global_types) + } + + fn expected_reference_lens(doc: &Document, uri: &Uri, name: &str) -> CodeLens { + let ast = doc.ast(); + let text = doc.text(); + let line_index = doc.line_index(); + let token = ast + .syntax() + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .find(|token| { + token.kind() == SyntaxKind::IDENT + && token.text() == name + && is_definition_site(token) + }) + .expect("definition token should exist"); + let def_range = token + .parent() + .expect("definition token should have parent") + .text_range(); + let range = to_lsp_range(def_range, line_index, text); + let resolver = ScopeResolver::new(ast.syntax()); + let references = resolver.find_references(ast.syntax(), name, def_range); + let ref_count = references.len().saturating_sub(1); + let title = if ref_count == 1 { + "1 reference".to_string() + } else { + format!("{ref_count} references") + }; + CodeLens { + range, + command: Some(Command { + title, + command: "jrsonnet.findReferences".to_string(), + arguments: Some(vec![ + serde_json::to_value(uri.to_string()).unwrap(), + serde_json::to_value(range.start.line).unwrap(), + serde_json::to_value(range.start.character).unwrap(), + ]), + }), + data: None, + } + } + + fn expected_evaluate_lens(uri: &Uri) -> CodeLens { + CodeLens { + range: Range { + start: lsp_types::Position { + line: 0, + character: 0, + }, + end: lsp_types::Position { + line: 0, + character: 0, + }, + }, + command: Some(Command { + title: "Evaluate".to_string(), + command: "jrsonnet.evalFile".to_string(), + arguments: Some(vec![serde_json::to_value(uri.to_string()).unwrap()]), + }), + data: None, + } + } + + fn expected_function_type_lens( + doc: &Document, + analysis: &TypeAnalysis, + name: &str, + ) -> CodeLens { + let ast = doc.ast(); + let text = doc.text(); + let line_index = doc.line_index(); + let bind_func = ast + .syntax() + .descendants() + .filter_map(BindFunction::cast) + .find(|bind_func| { + bind_func + .name() + .map(|n| n.syntax().text() == name) + .unwrap_or(false) + }) + .expect("function binding should exist"); + let name_node = bind_func.name().expect("function should have name"); + let body = bind_func.value().expect("function should have body"); + let ty = analysis + .type_for_range(body.syntax().text_range()) + .expect("function body should have inferred type"); + let type_str = analysis.display(ty); + assert_ne!(type_str, "any", "function type lens should be informative"); + assert_ne!( + type_str, "function", + "function type lens should not be generic" + ); + CodeLens { + range: to_lsp_range(name_node.syntax().text_range(), line_index, text), + command: Some(Command { + title: format!(":: {type_str}"), + command: String::new(), + arguments: None, + }), + data: None, + } + } + + fn expected_complex_binding_type_lens(doc: &Document, analysis: &TypeAnalysis) -> CodeLens { + let ast = doc.ast(); + let text = doc.text(); + let line_index = doc.line_index(); + for node in ast.syntax().descendants() { + let Some(stmt_local) = StmtLocal::cast(node) else { + continue; + }; + for bind in stmt_local.binds() { + let Bind::BindDestruct(bd) = bind else { + continue; + }; + let Some(value) = bd.value() else { + continue; + }; + let is_complex = value.expr_base().is_some_and(|base| { + matches!( + base.syntax().kind(), + SyntaxKind::EXPR_OBJECT + | SyntaxKind::EXPR_ARRAY + | SyntaxKind::EXPR_CALL + | SyntaxKind::EXPR_IF_THEN_ELSE + ) + }); + if !is_complex { + continue; + } + let ty = analysis + .type_for_range(value.syntax().text_range()) + .expect("complex binding should have inferred type"); + let type_str = analysis.display(ty); + assert_ne!( + type_str, "any", + "complex binding type should be informative" + ); + return CodeLens { + range: to_lsp_range(bd.syntax().text_range(), line_index, text), + command: Some(Command { + title: format!(":: {type_str}"), + command: String::new(), + arguments: None, + }), + data: None, + }; + } + } + panic!("complex binding should produce a type lens"); + } + + #[test] + fn test_reference_count_lens() { + let code = "local x = 1; x + x + x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + + let config = CodeLensConfig { + show_references: true, + show_evaluate: false, + show_types: false, + show_errors: ErrorLensVisibility::Hidden, + }; + + let lenses = code_lens(&doc, &uri, &config, None); + let expected = vec![expected_reference_lens(&doc, &uri, "x")]; + + assert_eq!(lenses, expected); + } + + #[test] + fn test_no_lens_for_unused() { + let code = "local unused = 1; 42"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + + let config = CodeLensConfig { + show_references: true, + show_evaluate: false, + show_types: false, + show_errors: ErrorLensVisibility::Hidden, + }; + + let lenses = code_lens(&doc, &uri, &config, None); + + // No lens because 'unused' has 0 references + assert!(lenses.is_empty()); + } + + #[test] + fn test_evaluate_lens() { + let code = "{ a: 1 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + + let config = CodeLensConfig { + show_references: false, + show_evaluate: true, + show_types: false, + show_errors: ErrorLensVisibility::Hidden, + }; + + let lenses = code_lens(&doc, &uri, &config, None); + let expected = vec![expected_evaluate_lens(&uri)]; + assert_eq!(lenses, expected); + } + + #[test] + fn test_type_lens_for_function() { + let code = "local add(a, b) = a + b; add(1, 2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + let analysis = test_analysis(&doc); + + let config = CodeLensConfig { + show_references: false, + show_evaluate: false, + show_types: true, + show_errors: ErrorLensVisibility::Hidden, + }; + + let lenses = code_lens(&doc, &uri, &config, Some(&analysis)); + let expected = vec![expected_function_type_lens(&doc, &analysis, "add")]; + assert_eq!(lenses, expected); + } + + #[test] + fn test_type_lens_for_complex_binding() { + let code = "local config = { name: 'test', count: 42 }; config"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + let analysis = test_analysis(&doc); + + let config = CodeLensConfig { + show_references: false, + show_evaluate: false, + show_types: true, + show_errors: ErrorLensVisibility::Hidden, + }; + + let lenses = code_lens(&doc, &uri, &config, Some(&analysis)); + let expected = vec![expected_complex_binding_type_lens(&doc, &analysis)]; + assert_eq!(lenses, expected); + } + + #[test] + fn test_all_lenses_with_types() { + let code = "local f(x) = x * 2; f(21)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + let analysis = test_analysis(&doc); + + let config = CodeLensConfig::all(); + + let lenses = code_lens(&doc, &uri, &config, Some(&analysis)); + let expected = vec![ + expected_reference_lens(&doc, &uri, "f"), + expected_reference_lens(&doc, &uri, "x"), + expected_evaluate_lens(&uri), + expected_function_type_lens(&doc, &analysis, "f"), + ]; + assert_eq!(lenses, expected); + } + + #[test] + fn test_singular_reference() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + + let config = CodeLensConfig { + show_references: true, + show_evaluate: false, + show_types: false, + show_errors: ErrorLensVisibility::Hidden, + }; + + let lenses = code_lens(&doc, &uri, &config, None); + let expected = vec![expected_reference_lens(&doc, &uri, "x")]; + assert_eq!(lenses, expected); + } + + #[test] + fn test_error_status_lens_with_errors() { + // Invalid syntax - missing expression after + + let code = "1 +"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + + let config = CodeLensConfig { + show_references: false, + show_evaluate: false, + show_types: false, + show_errors: ErrorLensVisibility::Visible, + }; + + let lenses = code_lens(&doc, &uri, &config, None); + + let expected = vec![CodeLens { + range: Range { + start: lsp_types::Position { + line: 0, + character: 0, + }, + end: lsp_types::Position { + line: 0, + character: 0, + }, + }, + command: Some(Command { + title: "1 syntax error".to_string(), + command: "jrsonnet.showErrors".to_string(), + arguments: Some(vec![serde_json::to_value(uri.to_string()).unwrap()]), + }), + data: None, + }]; + + assert_eq!(lenses, expected); + } + + #[test] + fn test_error_status_lens_no_errors() { + let code = "{ a: 1, b: 2 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri = make_uri("test"); + + let config = CodeLensConfig { + show_references: false, + show_evaluate: false, + show_types: false, + show_errors: ErrorLensVisibility::Visible, + }; + + let lenses = code_lens(&doc, &uri, &config, None); + + let expected: Vec = vec![]; + + assert_eq!(lenses, expected); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs new file mode 100644 index 00000000..31ade04a --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs @@ -0,0 +1,254 @@ +//! Object field completions for `obj.` patterns. + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_rowan_parser::{ + nodes::{BindDestruct, Destruct, FieldName, MemberFieldNormal, ObjBody, StmtLocal}, + AstNode, AstToken, SyntaxKind, SyntaxNode, +}; +use lsp_types::{CompletionItem, CompletionItemKind}; + +/// Check if we're completing object fields after `obj.`. +/// +/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure +/// that import types are properly resolved. +pub fn check_object_field_completion( + document: &Document, + text: &str, + offset: u32, + analysis: &TypeAnalysis, +) -> Option> { + let offset_usize = offset as usize; + + // Look for `identifier.` pattern before cursor + // Find the dot + let before_cursor = &text[..offset_usize]; + let dot_pos = before_cursor.rfind('.')?; + + // Check there's no whitespace between dot and cursor + let after_dot = &before_cursor[dot_pos + 1..]; + if after_dot.contains(char::is_whitespace) && !after_dot.trim().is_empty() { + return None; + } + + // Get the identifier before the dot + let before_dot = &before_cursor[..dot_pos]; + let ident_start = before_dot + .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') + .map_or(0, |i| i + 1); + let identifier = before_dot[ident_start..].trim(); + + if identifier.is_empty() || identifier == "std" { + // Skip empty identifiers and std (handled separately) + return None; + } + + // Get what the user is typing after the dot (for filtering) + let prefix = after_dot.trim(); + + // Try type inference for richer type information + let ast = document.ast(); + // Look for expression just before the dot (not at the dot) + let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; + let before_dot_text_size = rowan::TextSize::from(before_dot_pos as u32); + + // Try to get fields from type inference + if let Some(fields) = analysis.fields_at_position(ast.syntax(), before_dot_text_size) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .map(|(name, ty)| CompletionItem { + label: name, + kind: Some(CompletionItemKind::FIELD), + detail: Some(store.display(ty)), + ..Default::default() + }) + .collect::>(); + + if !items.is_empty() { + return Some(items); + } + } + + // Fall back to AST-based field extraction for cases where type inference isn't enough + let fields = find_object_fields_for_identifier(ast.syntax(), identifier, dot_pos as u32)?; + + // Filter and convert to completion items + let items = fields + .into_iter() + .filter(|f| prefix.is_empty() || f.starts_with(prefix)) + .map(|name| CompletionItem { + label: name, + kind: Some(CompletionItemKind::FIELD), + detail: Some("object field".to_string()), + ..Default::default() + }) + .collect::>(); + + if items.is_empty() { + None + } else { + Some(items) + } +} + +/// Find object fields for an identifier by looking up its definition. +fn find_object_fields_for_identifier( + root: &SyntaxNode, + identifier: &str, + offset: u32, +) -> Option> { + let text_size = rowan::TextSize::from(offset); + + // Search for local bindings with this name + for node in root.descendants() { + if node.kind() == SyntaxKind::STMT_LOCAL { + // Check if this binding is before our position + if node.text_range().end() > text_size { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + match &bind { + jrsonnet_rowan_parser::nodes::Bind::BindDestruct(bd) => { + if let Some(fields) = check_bind_destruct_for_object(bd, identifier) { + return Some(fields); + } + } + jrsonnet_rowan_parser::nodes::Bind::BindFunction(bf) => { + // Functions don't have object fields in this context + let _ = bf; + } + } + } + } + } + } + + None +} + +/// Check if a BindDestruct is for the given identifier and extract object fields. +fn check_bind_destruct_for_object(bind: &BindDestruct, identifier: &str) -> Option> { + let destruct = bind.into()?; + + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + + if ident.text() != identifier { + return None; + } + + // Found the binding, now look at its value + // The value is in the parent BindDestruct + let value_expr = bind.value()?; + + // Check if the value is an object + extract_object_fields(value_expr.syntax()) + } else { + None + } +} + +/// Extract field names from an object expression. +fn extract_object_fields(expr: &SyntaxNode) -> Option> { + // The expression might be wrapped in Expr nodes + let obj_node = find_object_in_expr(expr)?; + + let obj_body = ObjBody::cast(obj_node)?; + + let mut fields = Vec::new(); + + // ObjBody contains members + if let ObjBody::ObjBodyMemberList(member_list) = obj_body { + for member in member_list.members() { + match member { + jrsonnet_rowan_parser::nodes::Member::MemberFieldNormal(field) => { + if let Some(name) = extract_field_name(&field) { + fields.push(name); + } + } + jrsonnet_rowan_parser::nodes::Member::MemberFieldMethod(method) => { + if let Some(field_name) = method.field_name() { + if let Some(name) = extract_field_name_from_field_name(&field_name) { + fields.push(name); + } + } + } + _ => {} + } + } + } + + if fields.is_empty() { + None + } else { + Some(fields) + } +} + +/// Find an object body node within an expression. +fn find_object_in_expr(node: &SyntaxNode) -> Option { + // Direct object body + if node.kind() == SyntaxKind::OBJ_BODY_MEMBER_LIST || node.kind() == SyntaxKind::OBJ_BODY_COMP { + return Some(node.clone()); + } + + // Look for ExprObject child + for child in node.children() { + if child.kind() == SyntaxKind::EXPR_OBJECT { + // Find the ObjBody inside + for obj_child in child.children() { + if obj_child.kind() == SyntaxKind::OBJ_BODY_MEMBER_LIST + || obj_child.kind() == SyntaxKind::OBJ_BODY_COMP + { + return Some(obj_child); + } + } + } + + // Recurse into Expr nodes + if child.kind() == SyntaxKind::EXPR { + if let Some(found) = find_object_in_expr(&child) { + return Some(found); + } + } + } + + None +} + +/// Extract field name from a MemberFieldNormal. +fn extract_field_name(field: &MemberFieldNormal) -> Option { + let field_name = field.field_name()?; + extract_field_name_from_field_name(&field_name) +} + +/// Extract name string from a FieldName node. +fn extract_field_name_from_field_name(field_name: &FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => { + // FieldNameFixed has id() which returns Name + if let Some(name) = fixed.id() { + if let Some(ident) = name.ident_lit() { + return Some(ident.text().to_string()); + } + } + // Or it could have a text() string + if let Some(text) = fixed.text() { + // Remove quotes from text + let s = text.syntax().text().to_string(); + let s = s.trim_matches('"').trim_matches('\''); + return Some(s.to_string()); + } + None + } + FieldName::FieldNameDynamic(_) => { + // Dynamic field names like [expr] can't be completed statically + None + } + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/helpers.rs b/crates/jrsonnet-lsp-handlers/src/completion/helpers.rs new file mode 100644 index 00000000..15644d58 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/helpers.rs @@ -0,0 +1,62 @@ +//! Shared helper functions for completion. + +use jrsonnet_lsp_document::ByteOffset; +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxNode}; + +/// Get the identifier prefix at the current position. +pub fn get_identifier_prefix(text: &str, offset: usize) -> &str { + if offset == 0 { + return ""; + } + + let before = &text[..offset]; + let start = before + .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') + .map_or(0, |i| i + 1); + + &before[start..] +} + +/// Check if a string is a valid identifier prefix. +pub fn is_identifier_prefix(s: &str) -> bool { + let mut chars = s.chars(); + match chars.next() { + Some(c) if c.is_ascii_alphabetic() || c == '_' => {} + _ => return false, + } + chars.all(|c| c.is_ascii_alphanumeric() || c == '_') +} + +/// Check if the given offset is inside an object body. +/// +/// Walks up the AST from the token at the offset looking for object-related nodes. +pub fn is_inside_object(root: &SyntaxNode, offset: ByteOffset) -> bool { + use jrsonnet_lsp_document::find_node_at_offset; + + // Find the node at the offset + let Some(node) = find_node_at_offset(root, offset) else { + return false; + }; + + // Walk up the tree looking for object body nodes + let mut current = node; + loop { + match current.kind() { + // Object body nodes indicate we're inside an object + SyntaxKind::OBJ_BODY_MEMBER_LIST + | SyntaxKind::OBJ_BODY_COMP + // Object expression node + | SyntaxKind::EXPR_OBJECT => { + return true; + } + _ => {} + } + + let Some(parent) = current.parent() else { + break; + }; + current = parent; + } + + false +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/imports.rs b/crates/jrsonnet-lsp-handlers/src/completion/imports.rs new file mode 100644 index 00000000..dc21ee62 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/imports.rs @@ -0,0 +1,227 @@ +//! Import path completions for `import` statements. + +use std::{ + collections::HashSet, + path::{Path, PathBuf}, +}; + +use lsp_types::{CompletionItem, CompletionItemKind}; + +/// Check if we're inside an import string and provide path completions. +pub fn check_import_completion( + text: &str, + offset: u32, + doc_path: Option<&Path>, + import_roots: &[PathBuf], +) -> Option> { + let offset_usize = offset as usize; + + // Find if we're inside a string that's part of an import + // Look for patterns like: import "..., import '..., importstr "..., importstr '... + let before_cursor = &text[..offset_usize]; + + // Find the start of the current string + let string_start = find_import_string_start(before_cursor)?; + + // Extract the partial path typed so far + let partial_path = &before_cursor[string_start..]; + + let (dir_part, prefix) = split_partial_path(partial_path); + let roots = import_search_roots(doc_path, import_roots); + if roots.is_empty() { + return None; + } + + let mut items = Vec::new(); + let mut seen = HashSet::new(); + + for root in roots { + let search_dir = if dir_part.is_empty() { + root + } else { + root.join(dir_part) + }; + + let Ok(entries) = std::fs::read_dir(&search_dir) else { + continue; + }; + + for entry in entries.filter_map(std::result::Result::ok) { + let name = entry.file_name().to_string_lossy().to_string(); + + // Filter by prefix + if !prefix.is_empty() && !name.starts_with(prefix) { + continue; + } + + let path = entry.path(); + let is_dir = path.is_dir(); + + // For files, only show .jsonnet and .libsonnet files + if !is_dir { + let ext = path.extension().and_then(|e| e.to_str()).unwrap_or(""); + if ext != "jsonnet" && ext != "libsonnet" && ext != "json" { + continue; + } + } + + let (kind, insert_text) = if is_dir { + (CompletionItemKind::FOLDER, format!("{name}/")) + } else { + (CompletionItemKind::FILE, name.clone()) + }; + + let key = (is_dir, insert_text.clone()); + if !seen.insert(key) { + continue; + } + + items.push(CompletionItem { + label: name, + kind: Some(kind), + insert_text: Some(insert_text), + ..Default::default() + }); + } + } + + if items.is_empty() { + None + } else { + // Sort: directories first, then files + items.sort_by(|a, b| { + let a_is_dir = a.kind == Some(CompletionItemKind::FOLDER); + let b_is_dir = b.kind == Some(CompletionItemKind::FOLDER); + match (a_is_dir, b_is_dir) { + (true, false) => std::cmp::Ordering::Less, + (false, true) => std::cmp::Ordering::Greater, + _ => a.label.cmp(&b.label), + } + }); + Some(items) + } +} + +fn split_partial_path(partial_path: &str) -> (&str, &str) { + let Some(last_slash) = partial_path.rfind('/') else { + return ("", partial_path); + }; + let dir_part = &partial_path[..last_slash]; + let prefix = &partial_path[last_slash + 1..]; + (dir_part, prefix) +} + +fn import_search_roots(doc_path: Option<&Path>, import_roots: &[PathBuf]) -> Vec { + let mut roots = Vec::new(); + if let Some(doc_dir) = doc_path.and_then(Path::parent) { + roots.push(doc_dir.to_path_buf()); + } + roots.extend(import_roots.iter().cloned()); + + // Keep the first occurrence for stable precedence (document dir before jpath). + let mut deduped = Vec::new(); + let mut seen = HashSet::new(); + for root in roots { + if seen.insert(root.clone()) { + deduped.push(root); + } + } + deduped +} + +/// Find the start of an import string, returning the position after the opening quote. +pub fn find_import_string_start(text: &str) -> Option { + // Look backwards for import keyword followed by a string + // Patterns: import ", import ', importstr ", importstr ', importbin ", importbin ' + + // Find the last opening quote + let last_double = text.rfind('"'); + let last_single = text.rfind('\''); + + let (quote_pos, _quote_char) = match (last_double, last_single) { + (Some(d), Some(s)) => { + if d > s { + (d, '"') + } else { + (s, '\'') + } + } + (Some(d), None) => (d, '"'), + (None, Some(s)) => (s, '\''), + (None, None) => return None, + }; + + // Check that there's no closing quote after this opening quote + // (i.e., we're inside the string, not after it) + let after_quote = &text[quote_pos + 1..]; + if after_quote.contains('"') || after_quote.contains('\'') { + return None; + } + + // Check that before the quote we have an import keyword + let before_quote = text[..quote_pos].trim_end(); + + if before_quote.ends_with("import") + || before_quote.ends_with("importstr") + || before_quote.ends_with("importbin") + { + return Some(quote_pos + 1); + } + + None +} + +#[cfg(test)] +mod tests { + use std::fs; + + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_import_completion_searches_jpath_roots() { + let tmp = TempDir::new().expect("temp dir should be created"); + let workspace = tmp.path().join("workspace"); + let jpath = tmp.path().join("jpath"); + fs::create_dir_all(&workspace).expect("workspace should be created"); + fs::create_dir_all(&jpath).expect("jpath should be created"); + + let doc_path = workspace.join("main.jsonnet"); + fs::write(&doc_path, r#"import "sh""#).expect("main file should be created"); + fs::write(workspace.join("local.libsonnet"), "{}").expect("local import should be created"); + fs::write(jpath.join("shared.libsonnet"), "{}").expect("jpath import should be created"); + + let source = r#"import "sh"#; + let items = check_import_completion(source, source.len() as u32, Some(&doc_path), &[jpath]) + .expect("should have import completions"); + + let labels: Vec<_> = items.iter().map(|item| item.label.as_str()).collect(); + assert_eq!(labels, vec!["shared.libsonnet"]); + } + + #[test] + fn test_import_completion_dedupes_across_roots() { + let tmp = TempDir::new().expect("temp dir should be created"); + let workspace = tmp.path().join("workspace"); + let jpath = tmp.path().join("jpath"); + fs::create_dir_all(&workspace).expect("workspace should be created"); + fs::create_dir_all(&jpath).expect("jpath should be created"); + + let doc_path = workspace.join("main.jsonnet"); + fs::write(&doc_path, r#"import """#).expect("main file should be created"); + fs::write(workspace.join("dup.libsonnet"), "{}") + .expect("workspace import should be created"); + fs::write(jpath.join("dup.libsonnet"), "{}").expect("jpath import should be created"); + + let source = r#"import ""#; + let items = check_import_completion(source, source.len() as u32, Some(&doc_path), &[jpath]) + .expect("should have import completions"); + + let dup_count = items + .iter() + .filter(|item| item.label == "dup.libsonnet") + .count(); + assert_eq!(dup_count, 1, "duplicate entries should be deduped"); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/locals.rs b/crates/jrsonnet-lsp-handlers/src/completion/locals.rs new file mode 100644 index 00000000..e0cd5440 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/locals.rs @@ -0,0 +1,48 @@ +//! Local variable completions. + +use jrsonnet_lsp_document::{Document, LspPosition}; +use lsp_types::{CompletionItem, CompletionItemKind}; + +use super::helpers::get_identifier_prefix; +use crate::definition::{collect_visible_bindings, BindingKind}; + +/// Get completions for local variables in scope. +pub fn get_local_completions( + document: &Document, + position: LspPosition, + text: &str, + offset: u32, +) -> Vec { + // Get the prefix the user is typing (if any) + let prefix = get_identifier_prefix(text, offset as usize); + + // Collect all visible bindings + let bindings = collect_visible_bindings(document, position); + + bindings + .into_iter() + .filter(|b| prefix.is_empty() || b.name.starts_with(prefix)) + .map(|binding| { + let kind = match binding.kind { + BindingKind::LocalFunction => CompletionItemKind::FUNCTION, + BindingKind::LocalVariable | BindingKind::Parameter | BindingKind::ForVariable => { + CompletionItemKind::VARIABLE + } + }; + + let detail = match binding.kind { + BindingKind::LocalVariable => "local variable", + BindingKind::LocalFunction => "local function", + BindingKind::Parameter => "parameter", + BindingKind::ForVariable => "for variable", + }; + + CompletionItem { + label: binding.name, + kind: Some(kind), + detail: Some(detail.to_string()), + ..Default::default() + } + }) + .collect() +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/mod.rs b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs new file mode 100644 index 00000000..426fe95a --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs @@ -0,0 +1,639 @@ +//! Completion handler for providing code completions. +//! +//! Supports: +//! - Standard library functions (triggered by `std.`) +//! - Local variables in scope +//! - Object field completion (triggered by `obj.`) +//! - Import path completion (inside import strings) + +mod fields; +mod helpers; +mod imports; +mod locals; +mod stdlib; + +use std::path::{Path, PathBuf}; + +use jrsonnet_lsp_document::{token_at_offset, Document, LspPosition}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_rowan_parser::AstNode; +use lsp_types::{CompletionItem, CompletionItemKind, CompletionList}; +use tracing::debug; + +// Re-export for tests +pub use self::imports::find_import_string_start; +use self::{ + fields::check_object_field_completion, + helpers::{get_identifier_prefix, is_inside_object}, + imports::check_import_completion, + locals::get_local_completions, + stdlib::check_stdlib_completion, +}; + +/// Get completion items for the given position. +/// +/// `doc_path` is the path to the current document, used for import path completion. +/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure +/// that import types are properly resolved. +pub fn completion( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + analysis: &TypeAnalysis, +) -> Option { + completion_with_import_roots(document, position, doc_path, &[], analysis) +} + +/// Get completion items with explicit import search roots. +/// +/// `import_roots` is typically configured from server `jpath` entries and used +/// when completing inside import strings. +pub fn completion_with_import_roots( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + import_roots: &[PathBuf], + analysis: &TypeAnalysis, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + // Convert LSP position to byte offset + let offset = line_index.offset(position, text)?; + + // Get the AST root + let ast = document.ast(); + + // Find the token at or before the offset (may be None at whitespace/EOF) + let token = token_at_offset(ast.syntax(), offset); + + // Check if we're completing after `std.` + if let Some(ref t) = token { + if let Some(items) = check_stdlib_completion(t, text, offset.into()) { + debug!(count = items.len(), "providing stdlib completions"); + return Some(CompletionList { + is_incomplete: false, + items, + }); + } + } + + // Check if we're inside an import string + if let Some(items) = check_import_completion(text, offset.into(), doc_path, import_roots) { + debug!(count = items.len(), "providing import completions"); + return Some(CompletionList { + is_incomplete: false, + items, + }); + } + + // Check if we're completing after `obj.` (object field access) + if let Some(items) = check_object_field_completion(document, text, offset.into(), analysis) { + debug!(count = items.len(), "providing object field completions"); + return Some(CompletionList { + is_incomplete: false, + items, + }); + } + + // For general completion, provide local variables in scope + let mut items = get_local_completions(document, position, text, offset.into()); + + // Also include `std` as a completion option since it's always available + items.push(CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }); + + // Add `self`, `super`, and `$` if inside an object + if is_inside_object(ast.syntax(), offset) { + let prefix = get_identifier_prefix(text, offset.into()); + + if prefix.is_empty() || "$".starts_with(prefix) { + items.push(CompletionItem { + label: "$".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to root object".to_string()), + ..Default::default() + }); + } + + if prefix.is_empty() || "self".starts_with(prefix) { + items.push(CompletionItem { + label: "self".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to current object".to_string()), + ..Default::default() + }); + } + + if prefix.is_empty() || "super".starts_with(prefix) { + items.push(CompletionItem { + label: "super".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to inherited object".to_string()), + ..Default::default() + }); + } + } + + Some(CompletionList { + is_incomplete: false, + items, + }) +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_types::GlobalTyStore; + + use super::*; + + /// Create a TypeAnalysis for test purposes. + fn test_analysis(doc: &Document) -> TypeAnalysis { + let global_types = Arc::new(GlobalTyStore::new()); + TypeAnalysis::analyze_with_global(doc, global_types) + } + + #[test] + fn test_stdlib_completion_with_prefix_xor() { + // Use prefix "xo" which only matches "xor" in stdlib + let code = "std.xo"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 6).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + + // Extract just labels for comparison (full CompletionItem has dynamic detail text) + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + assert_eq!(labels, vec!["xor"]); + } + + #[test] + fn test_stdlib_completion_with_prefix_x() { + // Use prefix "x" which matches "xnor" and "xor" in stdlib + let code = "std.x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 5).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + + // Extract labels and sort for comparison + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["xnor", "xor"]); + } + + #[test] + fn test_completion_includes_std_and_local() { + // Simple case: one local variable, position at end where std and x are visible + let code = "local x = 1; "; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 13).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + + // Extract labels and sort for comparison + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_completion_item_has_documentation() { + // Use a simple object where we can assert the full completion list + let code = "local obj = { foo: 1 }; obj."; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 28).into(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + + // Assert the full completion list for object field access + assert_eq!( + result.items, + vec![CompletionItem { + label: "foo".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + + #[test] + fn test_local_variable_completion() { + let code = "local foo = 1; local bar = 2; "; + // ^ cursor here (character 30) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 30).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["bar", "foo", "std"]); + } + + #[test] + fn test_function_parameter_completion() { + let code = "local f(x, y) = x + "; + // ^ cursor here (character 20) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 20).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + // x, y are params; f is in scope; std is always available + assert_eq!(labels, vec!["f", "std", "x", "y"]); + } + + #[test] + fn test_completion_with_prefix() { + let code = "local foo = 1; local bar = 2; f"; + // ^ cursor at 'f' (character 31) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 31).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + // 'foo' starts with 'f', and 'std' is always included (no prefix filtering on keywords) + assert_eq!(labels, vec!["foo", "std"]); + } + + #[test] + fn test_object_field_completion() { + let code = "local obj = { foo: 1, bar: 2 }; obj."; + // ^ cursor here (character 36) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 36).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get object field completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["bar", "foo"]); + } + + #[test] + fn test_object_field_completion_with_prefix() { + let code = "local obj = { foo: 1, bar: 2 }; obj.f"; + // ^ cursor here (character 37) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 37).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get object field completions with prefix"); + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + // Only 'foo' starts with 'f' + assert_eq!(labels, vec!["foo"]); + } + + #[test] + fn test_import_string_detection() { + // Test that we correctly detect import string context + // import " - cursor inside open string after import keyword + let text1 = r#"import ""#; + assert_eq!(find_import_string_start(text1), Some(8)); // position after opening quote + + // importstr " - cursor inside open string after importstr keyword + let text2 = r#"importstr ""#; + assert_eq!(find_import_string_start(text2), Some(11)); // position after opening quote + + // local x = " - not an import, just a regular string + let text3 = r#"local x = ""#; + assert_eq!(find_import_string_start(text3), None); + + // import "foo.jsonnet" - closed string, not inside import path + let text4 = r#"import "foo.jsonnet""#; + assert_eq!(find_import_string_start(text4), None); + } + + #[test] + fn test_import_completion_with_path() { + use tempfile::TempDir; + + // Create a temp directory with some files + let temp_dir = TempDir::new().unwrap(); + let temp_path = temp_dir.path(); + + // Create test files + std::fs::write(temp_path.join("utils.libsonnet"), "{}").unwrap(); + std::fs::write(temp_path.join("config.jsonnet"), "{}").unwrap(); + std::fs::create_dir(temp_path.join("lib")).unwrap(); + std::fs::write(temp_path.join("lib/helper.libsonnet"), "{}").unwrap(); + + // Create a document in the temp directory + let doc_path = temp_path.join("main.jsonnet"); + let code = r#"import ""#; + // ^ cursor here (character 8) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 8).into(); + + let list = completion(&doc, pos, Some(&doc_path), &analysis) + .expect("should get import completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["config.jsonnet", "lib", "utils.libsonnet"]); + } + + #[test] + fn test_for_comprehension_variable_completion() { + let code = "[x for x in [1, 2, 3] if ]"; + // ^ cursor here (character 25) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 25).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in for comprehension"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_object_local_completion() { + let code = "{ local helper = 1, field: }"; + // ^ cursor here (character 27) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 27).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get completions in object"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["$", "helper", "self", "std", "super"]); + } + + #[test] + fn test_nested_function_completion() { + // Cursor inside inner function body, after "a + " + let code = "local outer(a) = local inner(b) = a + ; inner(1); outer(1)"; + // ^ cursor at position 37 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 37).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in nested function"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + // a (outer param), b (inner param), std + assert_eq!(labels, vec!["a", "b", "std"]); + } + + #[test] + fn test_completion_at_eof() { + let code = "local x = 1;\n"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = (1, 0).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions at EOF"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_completion_in_object() { + let code = "{ foo: 1, bar: }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = (0, 14).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get completions in object"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "$".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to root object".to_string()), + ..Default::default() + }, + CompletionItem { + label: "self".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to current object".to_string()), + ..Default::default() + }, + CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }, + CompletionItem { + label: "super".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to inherited object".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_no_self_super_outside_object() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = (0, 13).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get completions"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }, + CompletionItem { + label: "x".to_string(), + kind: Some(CompletionItemKind::VARIABLE), + detail: Some("local variable".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_multiple_for_specs_completion() { + // Multiple for specs in array comprehension + let code = "[x + y for x in [1] for y in [2]]"; + // ^ cursor at position 3 (inside expression) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 3).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in multi-for comprehension"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x", "y"]); + } + + #[test] + fn test_completion_with_syntax_error() { + // Incomplete expression with syntax error + let code = "local x = 1;\nlocal y = 2;\n{ foo: x +"; + // ^ cursor at position 9 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (2, 10).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions despite syntax error"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["$", "self", "std", "super", "x", "y"]); + } + + #[test] + fn test_shadowed_variable_completion() { + // Inner binding shadows outer binding + let code = "local x = 1;\nlocal f(x) = x +"; + // ^ cursor at position 16 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (1, 16).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get completions with shadowing"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + // Only one 'x' (the parameter) should be visible - outer 'x' is shadowed + assert_eq!(labels, vec!["f", "std", "x"]); + } + + #[test] + fn test_object_field_completion_with_types() { + let code = "local obj = { num: 42, str: \"hello\", arr: [1, 2] }; obj."; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = (0, 56).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get object field completions"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "arr".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("[number, number]".to_string()), // Tuple type + ..Default::default() + }, + CompletionItem { + label: "num".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "str".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("string".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_object_field_completion_nested() { + let code = "local obj = { inner: { x: 1, y: 2 } }; obj.inner."; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = (0, 49).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get nested object field completions"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "x".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "y".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + ] + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs b/crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs new file mode 100644 index 00000000..850fcfcc --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs @@ -0,0 +1,83 @@ +//! Standard library completions for `std.` prefix. + +use jrsonnet_lsp_stdlib as stdlib; +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; +use lsp_types::{CompletionItem, CompletionItemKind, Documentation, MarkupContent, MarkupKind}; + +use super::helpers::is_identifier_prefix; + +/// Check if we should provide stdlib completions. +pub fn check_stdlib_completion( + token: &SyntaxToken, + text: &str, + offset: u32, +) -> Option> { + // Look backwards from the cursor to find `std.` + let offset_usize = offset as usize; + + // Find the start of the current line + let line_start = text[..offset_usize].rfind('\n').map_or(0, |i| i + 1); + + let line_prefix = &text[line_start..offset_usize]; + + // Check if we have `std.` followed by an optional partial identifier + if let Some(std_pos) = line_prefix.rfind("std.") { + // Get what's after `std.` + let after_std_dot = &line_prefix[std_pos + 4..]; + + // If there's text after `std.`, it should be a valid identifier prefix + if after_std_dot.is_empty() || is_identifier_prefix(after_std_dot) { + let prefix = after_std_dot; + return Some(get_stdlib_completions(prefix)); + } + } + + // Also check if the token itself indicates we're after `std.` + // This handles cases where the parser created a proper AST + if token.kind() == SyntaxKind::DOT { + // Check if the expression before the dot is `std` + if let Some(parent) = token.parent() { + // Look for std identifier before this dot + for sibling in parent.children_with_tokens() { + if let Some(t) = sibling.as_token() { + if t.kind() == SyntaxKind::IDENT && t.text() == "std" { + return Some(get_stdlib_completions("")); + } + } + } + } + } + + // Check if we're on an identifier that comes after `std.` + if token.kind() == SyntaxKind::IDENT { + // Walk back to see if there's a `std.` pattern + let token_start = u32::from(token.text_range().start()) as usize; + if token_start >= 4 { + let before_token = &text[token_start.saturating_sub(4)..token_start]; + if before_token == "std." { + let prefix = token.text(); + return Some(get_stdlib_completions(prefix)); + } + } + } + + None +} + +/// Get stdlib completion items, filtered by prefix. +pub fn get_stdlib_completions(prefix: &str) -> Vec { + stdlib::get_all_stdlib_docs() + .filter(|doc| doc.name.starts_with(prefix)) + .map(|doc| CompletionItem { + label: doc.name.to_string(), + kind: Some(CompletionItemKind::FUNCTION), + detail: Some(format!("std.{}{}", doc.name, doc.signature)), + documentation: Some(Documentation::MarkupContent(MarkupContent { + kind: MarkupKind::Markdown, + value: doc.description.to_string(), + })), + insert_text: Some(doc.name.to_string()), + ..Default::default() + }) + .collect() +} diff --git a/crates/jrsonnet-lsp-handlers/src/definition.rs b/crates/jrsonnet-lsp-handlers/src/definition.rs new file mode 100644 index 00000000..51006d45 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition.rs @@ -0,0 +1,715 @@ +//! Go-to-definition handler. +//! +//! Finds the definition of a symbol at a given position by: +//! 1. Finding the token at the cursor position +//! 2. If it's a variable reference, walking up the scope chain to find the binding +//! 3. If it's an import path, returning the import path for resolution +//! 4. If it's a field access on an import, returning the import path and field chain + +use jrsonnet_lsp_document::{ + find_node_at_offset, to_lsp_range, token_at_offset, Document, LspPosition, +}; +use jrsonnet_lsp_import::check_import_path; +use jrsonnet_lsp_inference::{trace_base, ConstEvalResult}; +use jrsonnet_lsp_scope::{find_definition_range, is_variable_reference}; +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, BindFunction, Destruct, ExprBase, ExprField, ExprFunction, ForSpec, MemberBindStmt, + Param, StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use lsp_types::Range; +use rowan::TextRange; +use tracing::debug; + +/// Result of a go-to-definition request. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum DefinitionResult { + /// A local definition within the same document. + Local(Range), + /// An import path that needs to be resolved relative to the document. + Import(String), + /// A field in an imported file. Contains the import path and the field chain. + /// For `lib.foo.bar`, this would be `("lib.libsonnet", ["foo", "bar"])`. + ImportField { + /// The import path to resolve. + path: String, + /// The chain of field names to navigate. + fields: Vec, + }, +} + +/// Find the definition of the symbol at the given position. +/// +/// Returns a `DefinitionResult` indicating either: +/// - A local range within the document +/// - An import path that needs to be resolved by the caller +/// - A field in an imported file (path + field chain) +pub fn goto_definition(document: &Document, position: LspPosition) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + // Convert LSP position to byte offset + let offset = line_index.offset(position, text)?; + + // Get the AST root + let ast = document.ast(); + + // Find the token at the offset + let token = token_at_offset(ast.syntax(), offset)?; + + // Check for import path first + if let Some(import_path) = check_import_path(&token) { + debug!(import_path = %import_path, "found import definition"); + return Some(DefinitionResult::Import(import_path)); + } + + // Check if this is an identifier that could be a variable reference + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text().to_string(); + + // Check if this identifier is a field access (e.g., `foo` in `lib.foo`) + // and if the base resolves to an import + if let Some(result) = check_field_access_on_import(&token, &name, document) { + return Some(result); + } + + // Check if this identifier is part of a variable reference (ExprVar) + if !is_variable_reference(&token) { + return None; + } + + // Walk up the scope chain to find the definition + let def_range = find_definition_range(&token, &name)?; + + debug!(name = %name, "found local definition"); + + // Convert to LSP range + Some(DefinitionResult::Local(to_lsp_range( + def_range, line_index, text, + ))) +} + +/// Check if the token is a field name in a field access expression where the base +/// is defined as an import. For example, `foo` in `lib.foo` where `lib = import "..."`. +/// +/// Returns `DefinitionResult::ImportField` if this is a field access on an import. +fn check_field_access_on_import( + token: &SyntaxToken, + _field_name: &str, + document: &Document, +) -> Option { + // Check if we're inside an ExprField (field access like .foo) + let expr_field = token.parent_ancestors().find_map(ExprField::cast)?; + + // Verify this token is the field name in the ExprField + let field_name_node = expr_field.field()?; + if field_name_node.syntax().text_range() != token.parent()?.text_range() { + return None; + } + + // Build the field chain by walking up the ExprField chain + // We collect fields from innermost to outermost, then reverse + let mut fields = Vec::new(); + let mut current_field = expr_field; + + // Add the current field name + if let Some(name) = current_field.field() { + if let Some(ident) = name.ident_lit() { + fields.push(ident.text().to_string()); + } + } + + // Walk up the ExprField chain (from outermost to innermost in the code) + // For `lib.foo.bar`, the current_field starts at `.bar`, its base is ExprField `.foo` + let base = loop { + // Get the base of the current field access + let base_expr = current_field.base()?; + + match base_expr.expr_base()? { + ExprBase::ExprField(inner_field) => { + // Another field access - collect its field name and continue + if let Some(name) = inner_field.field() { + if let Some(ident) = name.ident_lit() { + fields.push(ident.text().to_string()); + } + } + current_field = inner_field; + } + other => { + // Found the base (could be ExprVar, ExprImport, etc.) + break other; + } + } + }; + + // Reverse to get fields in order from base outward + fields.reverse(); + + // Now check if the base resolves to an import + let (import_path, mut base_fields) = resolve_base_to_import(&base, document)?; + + // Combine base fields (from tracing through bindings) with field chain + base_fields.extend(fields); + + debug!( + import_path = %import_path, + fields = ?base_fields, + "found import field definition" + ); + + Some(DefinitionResult::ImportField { + path: import_path, + fields: base_fields, + }) +} + +/// Try to resolve the base expression to an import path and any additional fields. +/// +/// This handles cases like: +/// - `lib.foo` where `lib = import "lib.libsonnet"` +/// - Direct import: `(import "lib.libsonnet").foo` +/// - Chained bindings: `local x = import "..."; local y = x; y.foo` +/// +/// Returns the import path and any fields that were traced through the base. +fn resolve_base_to_import(base: &ExprBase, document: &Document) -> Option<(String, Vec)> { + // Use const_eval to trace through bindings + match trace_base(base, document)? { + ConstEvalResult::Import { path, fields } => Some((path, fields)), + ConstEvalResult::Std { .. } | ConstEvalResult::Local { .. } => None, + } +} + +/// A binding visible at a given position. +#[derive(Debug, Clone)] +pub struct VisibleBinding { + /// The name of the binding. + pub name: String, + /// The kind of binding. + pub kind: BindingKind, + /// The text range of the definition. + pub range: TextRange, +} + +/// The kind of a binding. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum BindingKind { + /// A local variable binding (local x = ...). + LocalVariable, + /// A function binding (local f(x) = ...). + LocalFunction, + /// A function parameter. + Parameter, + /// A for-comprehension variable. + ForVariable, +} + +/// Collect all visible bindings at the given byte offset. +pub fn collect_visible_bindings(document: &Document, position: LspPosition) -> Vec { + let text = document.text(); + let line_index = document.line_index(); + + // Convert LSP position to byte offset + let Some(offset) = line_index.offset(position, text) else { + return Vec::new(); + }; + + // Get the AST root + let ast = document.ast(); + let root = ast.syntax(); + + // Find a starting node for the scope walk + // Try token at offset first, then covering element, then root + let start_node = if let Some(token) = token_at_offset(root, offset) { + token.parent() + } else { + // At whitespace or EOF - find the deepest node that contains this offset + find_node_at_offset(root, offset) + }; + + let Some(mut current) = start_node else { + // Fall back to root node + return collect_bindings_from_root(root, u32::from(offset)); + }; + + // Walk up the scope chain and collect all bindings + let mut bindings = Vec::new(); + let mut seen_names = std::collections::HashSet::new(); + + // First collect from current node if it's a scope + if is_scope_node(¤t) { + // Create a dummy child at the end for scope checking + collect_scope_bindings_at_offset( + ¤t, + u32::from(offset), + &mut bindings, + &mut seen_names, + ); + } + + while let Some(parent) = current.parent() { + collect_scope_bindings_at_offset( + &parent, + u32::from(offset), + &mut bindings, + &mut seen_names, + ); + current = parent; + } + + bindings +} + +/// Check if a node is a scope-introducing node. +fn is_scope_node(node: &SyntaxNode) -> bool { + matches!( + node.kind(), + SyntaxKind::EXPR + | SyntaxKind::EXPR_FUNCTION + | SyntaxKind::BIND_FUNCTION + | SyntaxKind::FOR_SPEC + | SyntaxKind::OBJ_BODY_MEMBER_LIST + | SyntaxKind::EXPR_ARRAY_COMP + | SyntaxKind::OBJ_BODY_COMP + ) +} + +/// Collect bindings from a scope using offset for visibility checking. +fn collect_scope_bindings_at_offset( + scope: &SyntaxNode, + offset: u32, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + match scope.kind() { + SyntaxKind::EXPR => { + collect_expr_bindings_at_offset(scope, offset, bindings, seen); + } + SyntaxKind::EXPR_FUNCTION => { + collect_function_params(scope, bindings, seen); + } + SyntaxKind::BIND_FUNCTION => { + collect_bind_function_params(scope, bindings, seen); + } + SyntaxKind::FOR_SPEC => { + collect_for_binding(scope, bindings, seen); + } + SyntaxKind::OBJ_BODY_MEMBER_LIST => { + collect_object_locals(scope, bindings, seen); + } + SyntaxKind::EXPR_ARRAY_COMP | SyntaxKind::OBJ_BODY_COMP => { + collect_comprehension_bindings(scope, bindings, seen); + } + _ => {} + } +} + +/// Collect local bindings from an Expr using offset for visibility. +fn collect_expr_bindings_at_offset( + expr: &SyntaxNode, + offset: u32, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let text_size = rowan::TextSize::from(offset); + + for stmt_node in expr.children() { + if stmt_node.kind() == SyntaxKind::STMT_LOCAL { + // Only consider bindings that appear before our position + if stmt_node.text_range().end() > text_size { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let Some(binding) = extract_binding(&bind) { + if !seen.contains(&binding.name) { + seen.insert(binding.name.clone()); + bindings.push(binding); + } + } + } + } + } + } +} + +/// Collect bindings starting from the root (fallback). +fn collect_bindings_from_root(root: &SyntaxNode, offset: u32) -> Vec { + let mut bindings = Vec::new(); + let mut seen = std::collections::HashSet::new(); + + // Walk the entire tree looking for visible bindings + for node in root.descendants() { + if is_scope_node(&node) { + collect_scope_bindings_at_offset(&node, offset, &mut bindings, &mut seen); + } + } + + bindings +} + +/// Extract a binding from a Bind node. +fn extract_binding(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + return Some(VisibleBinding { + name: ident.text().to_string(), + kind: BindingKind::LocalVariable, + range: bind_name.syntax().text_range(), + }); + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + Some(VisibleBinding { + name: ident.text().to_string(), + kind: BindingKind::LocalFunction, + range: bind_name.syntax().text_range(), + }) + } + } +} + +/// Collect function parameters. +fn collect_function_params( + func_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let Some(func) = ExprFunction::cast(func_node.clone()) else { + return; + }; + let Some(params) = func.params_desc() else { + return; + }; + + for param in params.params() { + if let Some(binding) = extract_param_binding(¶m) { + if !seen.contains(&binding.name) { + seen.insert(binding.name.clone()); + bindings.push(binding); + } + } + } +} + +/// Collect BindFunction parameters. +fn collect_bind_function_params( + func_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let Some(func) = BindFunction::cast(func_node.clone()) else { + return; + }; + let Some(params) = func.params() else { + return; + }; + + for param in params.params() { + if let Some(binding) = extract_param_binding(¶m) { + if !seen.contains(&binding.name) { + seen.insert(binding.name.clone()); + bindings.push(binding); + } + } + } +} + +/// Extract a binding from a Param node. +fn extract_param_binding(param: &Param) -> Option { + let destruct = param.destruct()?; + if let Destruct::DestructFull(full) = destruct { + let param_name = full.name()?; + let ident = param_name.ident_lit()?; + return Some(VisibleBinding { + name: ident.text().to_string(), + kind: BindingKind::Parameter, + range: param_name.syntax().text_range(), + }); + } + None +} + +/// Collect for-comprehension binding. +fn collect_for_binding( + for_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let Some(for_spec) = ForSpec::cast(for_node.clone()) else { + return; + }; + let Some(destruct) = for_spec.bind() else { + return; + }; + + if let Destruct::DestructFull(full) = destruct { + if let Some(bind_name) = full.name() { + if let Some(ident) = bind_name.ident_lit() { + let name = ident.text().to_string(); + if !seen.contains(&name) { + seen.insert(name.clone()); + bindings.push(VisibleBinding { + name, + kind: BindingKind::ForVariable, + range: bind_name.syntax().text_range(), + }); + } + } + } + } +} + +/// Collect object local bindings. +fn collect_object_locals( + obj_body: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + for member_node in obj_body.children() { + if member_node.kind() != SyntaxKind::MEMBER_BIND_STMT { + continue; + } + let Some(member_bind) = MemberBindStmt::cast(member_node) else { + continue; + }; + let Some(obj_local) = member_bind.obj_local() else { + continue; + }; + let Some(bind) = obj_local.bind() else { + continue; + }; + let Some(binding) = extract_binding(&bind) else { + continue; + }; + if seen.contains(&binding.name) { + continue; + } + seen.insert(binding.name.clone()); + bindings.push(binding); + } +} + +/// Collect bindings from comprehension FOR_SPEC children. +fn collect_comprehension_bindings( + comp_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + for child in comp_node.children() { + if child.kind() != SyntaxKind::FOR_SPEC { + continue; + } + collect_for_binding(&child, bindings, seen); + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + /// Helper to extract local range from definition result. + fn expect_local(result: Option) -> Range { + match result { + Some(DefinitionResult::Local(r)) => r, + other => panic!("Expected Local definition, got {:?}", other), + } + } + + #[test] + fn test_local_variable_definition() { + let code = r"local x = 1; x + 1"; + // ^^^^^^ def ^ use at position 13 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position at the 'x' in 'x + 1' (character 13) + let pos = (0, 13).into(); + + let result = goto_definition(&doc, pos); + let r = expect_local(result); + // Definition should be at position 6 (the 'x' in 'local x') + assert_eq!(r.start.line, 0); + assert_eq!(r.start.character, 6); + } + + #[test] + fn test_function_parameter_definition() { + let code = r"local f(x) = x * 2; f(3)"; + // ^ param ^ use + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position at the 'x' in 'x * 2' (character 13) + let pos = (0, 13).into(); + + let result = goto_definition(&doc, pos); + let r = expect_local(result); + // Definition should be at position 8 (the 'x' parameter) + assert_eq!(r.start.line, 0); + assert_eq!(r.start.character, 8); + } + + #[test] + fn test_nested_local_shadowing() { + let code = r"local x = 1; local x = 2; x"; + // ^def1 ^def2 ^use + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position at the final 'x' (character 26) + let pos = (0, 26).into(); + + let result = goto_definition(&doc, pos); + let r = expect_local(result); + // Should find the second (closer) definition at position 19 + assert_eq!(r.start.character, 19); + } + + #[test] + fn test_no_definition_for_undefined() { + let code = r"local x = 1; y + 1"; + // ^ undefined + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position at 'y' (character 13) + let pos = (0, 13).into(); + + let result = goto_definition(&doc, pos); + assert!( + result.is_none(), + "Should not find definition for undefined y" + ); + } + + #[test] + fn test_import_definition() { + let code = r#"import "lib/utils.libsonnet""#; + // ^ cursor in string + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position inside the import string (character 8) + let pos = (0, 8).into(); + + let result = goto_definition(&doc, pos); + match result { + Some(DefinitionResult::Import(path)) => { + assert_eq!(path, "lib/utils.libsonnet"); + } + other => panic!("Expected Import definition, got {:?}", other), + } + } + + #[test] + fn test_importstr_definition() { + let code = r#"importstr "data/config.txt""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let pos = (0, 12).into(); + + let result = goto_definition(&doc, pos); + match result { + Some(DefinitionResult::Import(path)) => { + assert_eq!(path, "data/config.txt"); + } + other => panic!("Expected Import definition, got {:?}", other), + } + } + + #[test] + fn test_for_comprehension_variable() { + let code = r"[x * 2 for x in [1, 2, 3]]"; + // ^use ^def + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position at 'x' in 'x * 2' (character 1) + let pos = (0, 1).into(); + + let result = goto_definition(&doc, pos); + let r = expect_local(result); + // Definition should be at position 11 (the 'x' in 'for x') + assert_eq!(r.start.line, 0); + assert_eq!(r.start.character, 11); + } + + #[test] + fn test_object_local_binding() { + let code = r"{ local helper = 1, result: helper }"; + // ^def ^use + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position at 'helper' in 'result: helper' (character 28) + let pos = (0, 28).into(); + + let result = goto_definition(&doc, pos); + let r = expect_local(result); + // Definition should be at position 8 (the 'helper' in 'local helper') + assert_eq!(r.start.line, 0); + assert_eq!(r.start.character, 8); + } + + #[test] + fn test_field_access_on_import() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo"#; + // ^field access + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position at 'foo' in 'lib.foo' (character 40) + let pos = (0, 40).into(); + + let result = goto_definition(&doc, pos); + match result { + Some(DefinitionResult::ImportField { path, fields }) => { + assert_eq!(path, "lib.libsonnet"); + assert_eq!(fields, vec!["foo"]); + } + other => panic!("Expected ImportField definition, got {:?}", other), + } + } + + #[test] + fn test_nested_field_access_on_import() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo.bar"#; + // ^field access + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position at 'bar' in 'lib.foo.bar' (character 44) + let pos = (0, 44).into(); + + let result = goto_definition(&doc, pos); + match result { + Some(DefinitionResult::ImportField { path, fields }) => { + assert_eq!(path, "lib.libsonnet"); + assert_eq!(fields, vec!["foo", "bar"]); + } + other => panic!("Expected ImportField definition, got {:?}", other), + } + } + + #[test] + fn test_multiline_local_definition() { + let code = "local\n x\n =\n 1;\nx"; + // ^def(line 1, char 2) ^use(line 4, char 0) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position at 'x' on the last line + let pos = (4, 0).into(); + + let result = goto_definition(&doc, pos); + let r = expect_local(result); + // Definition should be on line 1, character 2 + assert_eq!(r.start.line, 1); + assert_eq!(r.start.character, 2); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/document_highlight.rs b/crates/jrsonnet-lsp-handlers/src/document_highlight.rs new file mode 100644 index 00000000..b936194e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/document_highlight.rs @@ -0,0 +1,101 @@ +//! Document highlight handler. +//! +//! Highlights all occurrences of the symbol under cursor in the current file. + +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document, LspPosition}; +use jrsonnet_lsp_scope::{ + find_definition_range, is_definition_site, is_variable_reference, ScopeResolver, +}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::{DocumentHighlight, DocumentHighlightKind}; + +/// Find document highlights for the symbol at the given position. +pub fn document_highlights(document: &Document, position: LspPosition) -> Vec { + let text = document.text(); + let line_index = document.line_index(); + let Some(offset) = line_index.offset(position, text) else { + return Vec::new(); + }; + + let ast = document.ast(); + let Some(token) = token_at_offset(ast.syntax(), offset) else { + return Vec::new(); + }; + + if token.kind() != SyntaxKind::IDENT { + return Vec::new(); + } + + let name = token.text(); + let definition_range = if is_definition_site(&token) { + token.parent().map(|p| p.text_range()) + } else if is_variable_reference(&token) { + find_definition_range(&token, name) + } else { + None + }; + let Some(def_range) = definition_range else { + return Vec::new(); + }; + + let resolver = ScopeResolver::new(ast.syntax()); + resolver + .find_references(ast.syntax(), name, def_range) + .into_iter() + .map(|range| DocumentHighlight { + range: to_lsp_range(range, line_index, text), + kind: Some(if range == def_range { + DocumentHighlightKind::WRITE + } else { + DocumentHighlightKind::READ + }), + }) + .collect() +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + + use super::*; + + #[test] + fn test_document_highlights_local_variable() { + let code = "local x = 1; x + x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 13).into(); + + let mut highlights = document_highlights(&doc, pos); + highlights.sort_by_key(|h| h.range.start.character); + + assert_eq!(highlights.len(), 3); + assert_eq!(highlights[0].range.start.character, 6); + assert_eq!(highlights[0].kind, Some(DocumentHighlightKind::WRITE)); + assert_eq!(highlights[1].kind, Some(DocumentHighlightKind::READ)); + assert_eq!(highlights[2].kind, Some(DocumentHighlightKind::READ)); + } + + #[test] + fn test_document_highlights_non_identifier() { + let code = "local x = 1; x + x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 15).into(); + + assert!(document_highlights(&doc, pos).is_empty()); + } + + #[test] + fn test_document_highlights_respects_scope() { + let code = "local x = 1; (local x = 2; x) + x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 6).into(); + + let mut starts: Vec = document_highlights(&doc, pos) + .into_iter() + .map(|h| h.range.start.character) + .collect(); + starts.sort_unstable(); + + assert_eq!(starts, vec![6, 32]); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/formatting.rs b/crates/jrsonnet-lsp-handlers/src/formatting.rs new file mode 100644 index 00000000..ca41fc5f --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/formatting.rs @@ -0,0 +1,260 @@ +//! Document formatting handler. +//! +//! Formats Jsonnet code using an external formatter (jrsonnet-fmt, jsonnetfmt, etc.). + +use std::process::{Command, Stdio}; + +use lsp_types::{Position, Range, TextEdit}; +use serde::{Deserialize, Serialize}; + +/// Formatting configuration options. +/// +/// These options correspond to the go-jsonnet formatter (jsonnetfmt) options. +/// Not all formatters support all options - jrsonnet-fmt only supports `indent`. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(default)] +pub struct FormattingConfig { + /// Number of spaces per indentation level (default: 2). + #[serde(alias = "Indent")] + pub indent: Option, + + /// Maximum blank lines allowed (default: 2). + #[serde(alias = "MaxBlankLines")] + pub max_blank_lines: Option, + + /// String style: "double", "single", or "leave" (default: "leave"). + #[serde(alias = "StringStyle")] + pub string_style: Option, + + /// Comment style: "hash", "slash", or "leave" (default: "leave"). + #[serde(alias = "CommentStyle")] + pub comment_style: Option, + + /// Add padding inside arrays (default: false). + #[serde(alias = "PadArrays")] + pub pad_arrays: Option, + + /// Add padding inside objects (default: true). + #[serde(alias = "PadObjects")] + pub pad_objects: Option, + + /// Pretty print fields (one per line) (default: true). + #[serde(alias = "PrettyFieldNames")] + pub pretty_field_names: Option, + + /// Sort imports (default: true). + #[serde(alias = "SortImports")] + pub sort_imports: Option, + + /// Use implicit plus for object inheritance (default: true). + #[serde(alias = "UseImplicitPlus")] + pub use_implicit_plus: Option, + + /// Strip everything after // (default: false). + #[serde(alias = "StripEverything")] + pub strip_everything: Option, + + /// Strip all comments (default: false). + #[serde(alias = "StripComments")] + pub strip_comments: Option, + + /// Strip all comments except those at the start (default: false). + #[serde(alias = "StripAllButComments")] + pub strip_all_but_comments: Option, + + /// Path to the formatter binary (default: searches PATH for jrsonnet-fmt or jsonnetfmt). + #[serde(alias = "FormatterPath")] + pub formatter_path: Option, +} + +/// Format a Jsonnet document with default configuration. +/// +/// Returns a list of text edits to apply to the document. +/// On error, returns None. +pub fn format_document(text: &str) -> Option> { + format_document_with_config(text, &FormattingConfig::default()) +} + +/// Format a Jsonnet document with the given configuration. +/// +/// Returns a list of text edits to apply to the document. +/// On error, returns None. +pub fn format_document_with_config(text: &str, config: &FormattingConfig) -> Option> { + // Try to run the formatter + let formatted = run_formatter(text, config)?; + + if formatted == text { + // No changes needed + return Some(Vec::new()); + } + + // Return a single edit that replaces the entire document + let lines: Vec<&str> = text.lines().collect(); + let last_line = lines.len().saturating_sub(1) as u32; + let last_col = lines.last().map_or(0, |l| l.len()) as u32; + + Some(vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: last_line, + character: last_col, + }, + }, + new_text: formatted, + }]) +} + +/// Run the formatter on the input text. +/// +/// If `config.formatter_path` is set, that formatter is used directly. +/// Otherwise, searches PATH for jrsonnet-fmt or jsonnetfmt. +fn run_formatter(input: &str, config: &FormattingConfig) -> Option { + // If a custom formatter path is provided, use it directly + if let Some(path) = &config.formatter_path { + return try_run_formatter_binary(path, input, config); + } + + // Try common locations for the formatter + let formatter_names = ["jrsonnet-fmt", "jsonnetfmt"]; + + for name in &formatter_names { + if let Some(result) = try_run_formatter_binary(name, input, config) { + return Some(result); + } + } + + None +} + +/// Build CLI arguments for the formatter based on config. +/// +/// Different formatters support different options: +/// - jrsonnet-fmt: only --indent +/// - jsonnetfmt: all options +fn build_formatter_args(config: &FormattingConfig) -> Vec { + let mut args = vec!["-".to_string()]; // Read from stdin + + if let Some(indent) = config.indent { + args.push("--indent".to_string()); + args.push(indent.to_string()); + } + + if let Some(max_blank_lines) = config.max_blank_lines { + args.push("--max-blank-lines".to_string()); + args.push(max_blank_lines.to_string()); + } + + if let Some(ref style) = config.string_style { + // jsonnetfmt uses single-letter codes: d=double, s=single, l=leave + let code = match style.to_lowercase().as_str() { + "double" | "d" => "d", + "single" | "s" => "s", + _ => "l", + }; + args.push("--string-style".to_string()); + args.push(code.to_string()); + } + + if let Some(ref style) = config.comment_style { + // jsonnetfmt uses single-letter codes: h=hash, s=slash, l=leave + let code = match style.to_lowercase().as_str() { + "hash" | "h" => "h", + "slash" | "s" => "s", + _ => "l", + }; + args.push("--comment-style".to_string()); + args.push(code.to_string()); + } + + if config.pad_arrays == Some(true) { + args.push("--pad-arrays".to_string()); + } + + if config.pad_objects == Some(false) { + args.push("--no-pad-objects".to_string()); + } + + if config.pretty_field_names == Some(false) { + args.push("--no-pretty-field-names".to_string()); + } + + if config.sort_imports == Some(false) { + args.push("--no-sort-imports".to_string()); + } + + if config.use_implicit_plus == Some(false) { + args.push("--no-use-implicit-plus".to_string()); + } + + if config.strip_everything == Some(true) { + args.push("--strip-everything".to_string()); + } + + if config.strip_comments == Some(true) { + args.push("--strip-comments".to_string()); + } + + if config.strip_all_but_comments == Some(true) { + args.push("--strip-all-but-comments".to_string()); + } + + args +} + +/// Try to run a specific formatter binary. +fn try_run_formatter_binary(name: &str, input: &str, config: &FormattingConfig) -> Option { + let args = build_formatter_args(config); + + let mut child = Command::new(name) + .args(&args) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .ok()?; + + // Write input to stdin + if let Some(mut stdin) = child.stdin.take() { + use std::io::Write; + stdin.write_all(input.as_bytes()).ok()?; + } + + let output = child.wait_with_output().ok()?; + + if output.status.success() { + String::from_utf8(output.stdout).ok() + } else { + None + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_format_already_formatted() { + // This test only passes if jrsonnet-fmt is available + let code = "{\n foo: 1,\n}\n"; + + if let Some(edits) = format_document(code) { + // Either no edits (already formatted) or some edits + // We can't assert specific behavior without the formatter + let _ = edits; + } + // If formatter not available, test is skipped implicitly + } + + #[test] + fn test_format_returns_none_without_formatter() { + // Test that we handle missing formatter gracefully + // This is difficult to test since it depends on the environment + // Just verify the function doesn't panic + let code = "{ foo: 1 }"; + let _ = format_document(code); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/hover.rs b/crates/jrsonnet-lsp-handlers/src/hover.rs new file mode 100644 index 00000000..8678e329 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover.rs @@ -0,0 +1,377 @@ +//! Hover handler for providing documentation on hover. +//! +//! Supports: +//! - Standard library functions (std.*) +//! - Local variable definitions (shows first few lines) + +use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document, LspPosition}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_stdlib as stdlib; +use jrsonnet_rowan_parser::{ + nodes::{Bind, ExprBase, ExprField}, + AstNode, SyntaxKind, SyntaxToken, +}; +use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; + +use crate::definition::{goto_definition, DefinitionResult}; + +/// Maximum number of lines to show in hover for local definitions. +const MAX_HOVER_LINES: usize = 5; + +/// Get hover information for the given position. +/// +/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure +/// that import types are properly resolved. +pub fn hover(document: &Document, position: LspPosition, analysis: &TypeAnalysis) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + let offset = line_index.offset(position, text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + + // Only provide hover for meaningful tokens + if !is_meaningful_token(&token) { + return None; + } + + if let Some(hover) = check_stdlib_hover(&token) { + return Some(hover); + } + + if let Some(hover) = check_local_hover(document, analysis, position, text, offset) { + return Some(hover); + } + + // Fall back to showing just the inferred type + if let Some(hover) = check_type_hover(document, analysis, offset) { + return Some(hover); + } + + None +} + +/// Check if a token is meaningful for hover (identifiers, literals, keywords). +fn is_meaningful_token(token: &SyntaxToken) -> bool { + matches!( + token.kind(), + SyntaxKind::IDENT + | SyntaxKind::FLOAT + | SyntaxKind::STRING_DOUBLE + | SyntaxKind::STRING_SINGLE + | SyntaxKind::STRING_DOUBLE_VERBATIM + | SyntaxKind::STRING_SINGLE_VERBATIM + | SyntaxKind::STRING_BLOCK + | SyntaxKind::NULL_KW + | SyntaxKind::TRUE_KW + | SyntaxKind::FALSE_KW + | SyntaxKind::SELF_KW + | SyntaxKind::SUPER_KW + | SyntaxKind::DOLLAR + ) +} + +/// Check for inferred type at position. +fn check_type_hover( + document: &Document, + analysis: &TypeAnalysis, + offset: ByteOffset, +) -> Option { + let ast = document.ast(); + let ty = analysis.type_at_position(ast.syntax(), offset.into())?; + + Some(Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: format!("`{}`", analysis.display(ty)), + }), + range: None, + }) +} + +/// Check for hover on a local variable reference. +fn check_local_hover( + document: &Document, + analysis: &TypeAnalysis, + position: LspPosition, + text: &str, + offset: ByteOffset, +) -> Option { + let result = goto_definition(document, position)?; + + // Get the inferred type at this position. If the local definition site only + // reports `any`, fall back to the bound value expression type. + let ast = document.ast(); + let mut inferred_type = analysis + .type_at_position(ast.syntax(), offset.into()) + .map(|ty| analysis.display(ty)); + + if matches!(inferred_type.as_deref(), None | Some("any")) { + if let DefinitionResult::Local(range) = &result { + inferred_type = definition_value_type(document, analysis, range); + } + } + + let type_str = inferred_type + .map(|ty| format!("`{ty}`\n\n")) + .unwrap_or_default(); + + match result { + DefinitionResult::Local(range) => { + let start_line = range.start.line as usize; + let lines: Vec<&str> = text.lines().collect(); + let num_lines = lines.len(); + + if start_line >= num_lines { + return None; + } + + let mut def_end_line = start_line; + for (i, line) in lines + .iter() + .enumerate() + .skip(start_line) + .take(MAX_HOVER_LINES) + { + def_end_line = i; + if line.contains(';') { + break; + } + } + + let preview_lines: Vec<&str> = lines[start_line..=def_end_line] + .iter() + .take(MAX_HOVER_LINES) + .copied() + .collect(); + + let mut preview = preview_lines.join("\n"); + if def_end_line - start_line >= MAX_HOVER_LINES { + preview.push_str("\n..."); + } + + Some(Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: format!("{}```jsonnet\n{}\n```", type_str, preview), + }), + range: None, + }) + } + DefinitionResult::Import(path) => Some(Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: format!("{type_str}`{path}`"), + }), + range: None, + }), + DefinitionResult::ImportField { path, fields } => { + let field_chain = fields.join("."); + Some(Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: format!("{type_str}`{field_chain}` from `{path}`"), + }), + range: None, + }) + } + } +} + +fn definition_value_type( + document: &Document, + analysis: &TypeAnalysis, + range: &lsp_types::Range, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let def_pos: LspPosition = (range.start.line, range.start.character).into(); + let def_offset = line_index.offset(def_pos, text)?; + let token = token_at_offset(ast.syntax(), def_offset)?; + + let bind = token.parent()?.ancestors().find_map(Bind::cast)?; + let value = match bind { + Bind::BindDestruct(bind) => bind.value()?, + Bind::BindFunction(bind) => bind.value()?, + }; + let ty = analysis.type_for_range(value.syntax().text_range())?; + Some(analysis.display(ty)) +} + +/// Check if the token is a stdlib function call and return hover info. +fn check_stdlib_hover(token: &SyntaxToken) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text(); + + // Walk up: IDENT -> NAME -> EXPR_FIELD + let parent = token.parent()?; + if parent.kind() != SyntaxKind::NAME { + return None; + } + + let field = ExprField::cast(parent.parent()?)?; + + // Check if base is `std` + let base = field.base()?; + let ExprBase::ExprVar(var) = base.expr_base()? else { + return None; + }; + if var.name()?.ident_lit()?.text() != "std" { + return None; + } + + stdlib::ensure_initialized(); + let doc = stdlib::get_stdlib_doc(name)?; + + Some(Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: doc.to_markdown(), + }), + range: None, + }) +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use assert_matches::assert_matches; + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_types::GlobalTyStore; + use rstest::rstest; + + use super::*; + + fn get_hover(code: &str, line: u32, character: u32) -> Option { + let global_types = Arc::new(GlobalTyStore::new()); + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze_with_global(&doc, global_types); + let pos = (line, character).into(); + hover(&doc, pos, &analysis) + } + + #[rstest] + #[case( + "std.map(function(x) x, [])", + 0, 4, + "```jsonnet\nstd.map(func, arr)\n```\n\nApplies `func` to each element of `arr`.\n\n**Example:**\n```jsonnet\nstd.map(function(x) x * 2, [1,2,3]) // [2, 4, 6]\n```" + )] + #[case( + "std.filter(function(x) x > 0, [1, -1, 2])", + 0, 4, + "```jsonnet\nstd.filter(func, arr)\n```\n\nReturns elements of `arr` where `func(x)` is true.\n\n**Example:**\n```jsonnet\nstd.filter(function(x) x > 1, [1,2,3]) // [2, 3]\n```" + )] + fn test_stdlib_hover( + #[case] code: &str, + #[case] line: u32, + #[case] char: u32, + #[case] expected: &str, + ) { + let result = get_hover(code, line, char); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, expected); + }); + } + + #[rstest] + #[case( + "local foo = 1; foo", + 0, + 15, + "`number`\n\n```jsonnet\nlocal foo = 1; foo\n```" + )] + #[case( + "local add(a, b) = a + b; add(1, 2)", + 0, + 25, + "`function(a, b)`\n\n```jsonnet\nlocal add(a, b) = a + b; add(1, 2)\n```" + )] + fn test_local_hover( + #[case] code: &str, + #[case] line: u32, + #[case] char: u32, + #[case] expected: &str, + ) { + let result = get_hover(code, line, char); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, expected); + }); + } + + #[rstest] + #[case( + r#"import "lib/utils.libsonnet""#, + 0, + 10, + "`any`\n\n`lib/utils.libsonnet`" + )] + #[case( + r#"local lib = import "lib.libsonnet"; lib.foo"#, + 0, + 40, + "`any`\n\n`foo` from `lib.libsonnet`" + )] + fn test_import_hover( + #[case] code: &str, + #[case] line: u32, + #[case] char: u32, + #[case] expected: &str, + ) { + let result = get_hover(code, line, char); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, expected); + }); + } + + #[rstest] + #[case( + "local x = 42; x", + 0, + 14, + "`number`\n\n```jsonnet\nlocal x = 42; x\n```" + )] + #[case( + "local obj = { a: 1, b: \"hello\" }; obj", + 0, + 35, + "`{ a, b }`\n\n```jsonnet\nlocal obj = { a: 1, b: \"hello\" }; obj\n```" + )] + #[case("42", 0, 0, "`number`")] + #[case("\"hello\"", 0, 1, "`string`")] + #[case("[1, 2, 3]", 0, 1, "`number`")] + fn test_type_inference_hover( + #[case] code: &str, + #[case] line: u32, + #[case] char: u32, + #[case] expected: &str, + ) { + let result = get_hover(code, line, char); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, expected); + }); + } + + #[test] + fn test_no_hover_on_whitespace() { + let result = get_hover("local x = 1; x", 0, 13); + assert_matches!(result, None); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs new file mode 100644 index 00000000..48cc6e8c --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs @@ -0,0 +1,228 @@ +//! Inlay hint handler. +//! +//! Provides type hints for local bindings and local function return values. + +use jrsonnet_lsp_document::{to_lsp_range, Document}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_rowan_parser::{ + nodes::{Bind, BindDestruct, BindFunction, Destruct, StmtLocal}, + AstNode, +}; +use lsp_types::{InlayHint, InlayHintKind, InlayHintLabel, Position, Range}; + +fn type_hint(position: Position, label: String) -> InlayHint { + InlayHint { + position, + label: InlayHintLabel::String(label), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + } +} + +fn position_in_range(pos: Position, range: Range) -> bool { + (range.start.line, range.start.character) <= (pos.line, pos.character) + && (pos.line, pos.character) <= (range.end.line, range.end.character) +} + +fn is_uninformative_type(type_str: &str) -> bool { + type_str == "any" || type_str == "function" +} + +/// Compute inlay hints for a visible range in a document. +pub fn inlay_hints( + document: &Document, + analysis: &TypeAnalysis, + visible_range: Range, +) -> Vec { + let ast = document.ast(); + let text = document.text(); + let line_index = document.line_index(); + let mut hints = Vec::new(); + + for node in ast.syntax().descendants() { + if let Some(bind_func) = BindFunction::cast(node.clone()) { + let Some(name_node) = bind_func.name() else { + continue; + }; + let Some(body) = bind_func.value() else { + continue; + }; + let Some(ty) = analysis.type_for_range(body.syntax().text_range()) else { + continue; + }; + + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + continue; + } + + let name_range = to_lsp_range(name_node.syntax().text_range(), line_index, text); + if position_in_range(name_range.end, visible_range) { + hints.push(type_hint(name_range.end, format!(" -> {type_str}"))); + } + } + + if let Some(stmt_local) = StmtLocal::cast(node) { + for bind in stmt_local.binds() { + let Bind::BindDestruct(bind_destruct) = bind else { + continue; + }; + let Some(value) = bind_destruct.value() else { + continue; + }; + let Some(destruct) = BindDestruct::into(&bind_destruct) else { + continue; + }; + let Destruct::DestructFull(full) = destruct else { + continue; + }; + let Some(name_node) = full.name() else { + continue; + }; + let Some(ident) = name_node.ident_lit() else { + continue; + }; + let Some(ty) = analysis.type_for_range(value.syntax().text_range()) else { + continue; + }; + + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + continue; + } + + let name_range = to_lsp_range(ident.text_range(), line_index, text); + if position_in_range(name_range.end, visible_range) { + hints.push(type_hint(name_range.end, format!(": {type_str}"))); + } + } + } + } + + hints +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::GlobalTyStore; + use lsp_types::{InlayHint, InlayHintKind, InlayHintLabel, Position, Range}; + + use super::*; + + fn full_line_range() -> Range { + Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 200, + }, + } + } + + fn test_analysis(doc: &Document) -> TypeAnalysis { + let global_types = Arc::new(GlobalTyStore::new()); + TypeAnalysis::analyze_with_global(doc, global_types) + } + + fn assert_hints_eq(actual: &[InlayHint], expected: Vec) { + let actual_json = serde_json::to_value(actual).expect("actual hints should serialize"); + let expected_json = + serde_json::to_value(expected).expect("expected hints should serialize"); + assert_eq!(actual_json, expected_json); + } + + #[test] + fn test_local_binding_type_hint() { + let doc = Document::new("local x = 1; x".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let hints = inlay_hints(&doc, &analysis, full_line_range()); + + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 7, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_local_function_return_hint() { + let doc = Document::new("local f() = 42; f()".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let hints = inlay_hints(&doc, &analysis, full_line_range()); + + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 7, + }, + label: InlayHintLabel::String(" -> number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_inlay_hint_respects_visible_range() { + let doc = Document::new( + "local x = 1; local y = 2; x + y".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let range = Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 12, + }, + }; + + let hints = inlay_hints(&doc, &analysis, range); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 7, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs new file mode 100644 index 00000000..c3c8a99f --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -0,0 +1,31 @@ +//! LSP request and notification handlers. + +pub mod code_action; +pub mod code_lens; +pub mod completion; +pub mod definition; +pub mod document_highlight; +pub mod formatting; +pub mod hover; +pub mod inlay_hint; +pub mod references; +pub mod rename; +pub mod semantic_tokens; +pub mod signature_help; +pub mod symbols; + +pub use code_action::code_actions; +pub use code_lens::{code_lens, resolve_code_lens, CodeLensConfig}; +pub use completion::{completion, completion_with_import_roots}; +pub use definition::{ + collect_visible_bindings, goto_definition, BindingKind, DefinitionResult, VisibleBinding, +}; +pub use document_highlight::document_highlights; +pub use formatting::{format_document, format_document_with_config, FormattingConfig}; +pub use hover::hover; +pub use inlay_hint::inlay_hints; +pub use references::{find_cross_file_references, find_references}; +pub use rename::{prepare_rename, rename, rename_cross_file}; +pub use semantic_tokens::{legend as semantic_tokens_legend, semantic_tokens}; +pub use signature_help::signature_help; +pub use symbols::{document_symbols, workspace_symbols_for_document}; diff --git a/crates/jrsonnet-lsp-handlers/src/references.rs b/crates/jrsonnet-lsp-handlers/src/references.rs new file mode 100644 index 00000000..e5791edb --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/references.rs @@ -0,0 +1,561 @@ +//! Find references handler. +//! +//! Finds all references to a symbol within the current document and across +//! all open documents (cross-file references). + +use std::path::Path; + +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, CanonicalPath, Document, LspPosition}; +use jrsonnet_lsp_import::get_import_path_from_node; +use jrsonnet_lsp_scope::{ + find_definition_range, is_at_file_scope, is_definition_site, is_variable_reference, + ScopeResolver, +}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, ExprBase, ExprField, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, +}; +use lsp_types::{Location, Uri}; +use rayon::prelude::*; +use rowan::TextRange; +use tracing::debug; + +/// Find all references to the symbol at the given position. +pub fn find_references( + document: &Document, + position: LspPosition, + uri: &Uri, + include_declaration: bool, +) -> Vec { + let text = document.text(); + let line_index = document.line_index(); + + let Some(offset) = line_index.offset(position, text) else { + return Vec::new(); + }; + + let ast = document.ast(); + + let Some(token) = token_at_offset(ast.syntax(), offset) else { + return Vec::new(); + }; + + if token.kind() != SyntaxKind::IDENT { + return Vec::new(); + } + + let name = token.text(); + + let definition_range = if is_definition_site(&token) { + token.parent().map(|p| p.text_range()) + } else if is_variable_reference(&token) { + find_definition_range(&token, name) + } else { + return Vec::new(); + }; + + let Some(def_range) = definition_range else { + return Vec::new(); + }; + + // Build scope resolver for O(1) lookups + let resolver = ScopeResolver::new(ast.syntax()); + let mut references = resolver.find_references(ast.syntax(), name, def_range); + + if !include_declaration { + references.retain(|r| *r != def_range); + } + + debug!(name = %name, count = references.len(), "found references"); + + references + .into_iter() + .map(|range| Location { + uri: uri.clone(), + range: to_lsp_range(range, line_index, text), + }) + .collect() +} + +/// Find cross-file references to a symbol. +/// +/// This function searches all provided documents for references to a symbol +/// that is exported from the current document (i.e., accessible via import). +/// +/// Returns references from other documents that import this file and use the symbol. +/// +/// The `documents` parameter is a slice of (path, document reference) pairs representing +/// all open documents to search. +pub fn find_cross_file_references<'a>( + current_document: &Document, + current_path: &CanonicalPath, + position: LspPosition, + documents: &[(&'a CanonicalPath, &'a Document)], +) -> Vec { + let text = current_document.text(); + let line_index = current_document.line_index(); + + // Convert LSP position to byte offset + let Some(offset) = line_index.offset(position, text) else { + return Vec::new(); + }; + + let ast = current_document.ast(); + + // Find the token at the offset + let Some(token) = token_at_offset(ast.syntax(), offset) else { + return Vec::new(); + }; + + // Must be an identifier + if token.kind() != SyntaxKind::IDENT { + return Vec::new(); + } + + let name = token.text().to_string(); + + // Check if this symbol is at file scope (could be exported) + // For now, we check if the symbol is defined at the top level of the file + let is_file_scope_definition = is_definition_site(&token) && is_at_file_scope(&token); + + if !is_file_scope_definition { + // If it's a reference, check if it comes from an import + if is_variable_reference(&token) { + // Check if this is a reference to an imported symbol + // For now, only handle local symbols + } + return Vec::new(); + } + + let current_path_str = current_path.as_path(); + + // Search all other documents for imports of this file (in parallel) + let references: Vec = documents + .par_iter() + .filter(|(doc_path, _)| *doc_path != current_path) + .flat_map(|(doc_path, doc)| { + // Find imports in this document that point to our file + let imports = find_imports_of_file(doc, doc_path.as_path(), current_path_str); + + let doc_uri = doc_path.to_uri(); + let doc_text = doc.text(); + let doc_line_index = doc.line_index(); + + imports + .into_iter() + .flat_map(|import_info| { + // Find references to the imported name in this document + find_references_to_import(doc, &import_info, &name) + }) + .map(|range| Location { + uri: doc_uri.clone(), + range: to_lsp_range(range, doc_line_index, doc_text), + }) + .collect::>() + }) + .collect(); + + references +} + +/// Information about an import statement. +struct ImportInfo { + /// The name this import is bound to (e.g., "lib" in "local lib = import 'lib.jsonnet'") + binding_name: String, +} + +/// Find imports in a document that point to a specific file. +fn find_imports_of_file(doc: &Document, doc_path: &Path, target_path: &Path) -> Vec { + let mut imports = Vec::new(); + let ast = doc.ast(); + let doc_dir = doc_path.parent(); + + for node in ast.syntax().descendants() { + if node.kind() == SyntaxKind::STMT_LOCAL { + if let Some(stmt_local) = StmtLocal::cast(node) { + for bind in stmt_local.binds() { + if let Some(info) = check_bind_for_import(&bind, doc_dir, target_path) { + imports.push(info); + } + } + } + } + } + + imports +} + +/// Check if a bind imports from a specific file. +fn check_bind_for_import( + bind: &Bind, + doc_dir: Option<&Path>, + target_path: &Path, +) -> Option { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let name_text = bind_name.ident_lit()?.text().to_string(); + + // Check if the expression is an import + let expr = bd.value()?; + if let Some(import_path) = get_import_path_from_node(expr.syntax()) { + if import_resolves_to(doc_dir, &import_path, target_path) { + return Some(ImportInfo { + binding_name: name_text, + }); + } + } + } + None + } + Bind::BindFunction(_) => None, // Functions can't be imports + } +} + +/// Check if an import path resolves to the target file. +fn import_resolves_to(doc_dir: Option<&Path>, import_path: &str, target_path: &Path) -> bool { + let Some(dir) = doc_dir else { + return false; + }; + + let resolved = dir.join(import_path); + + // Try to canonicalize both paths for comparison + let resolved_canonical = resolved.canonicalize().ok(); + let target_canonical = target_path.canonicalize().ok(); + + match (resolved_canonical, target_canonical) { + (Some(r), Some(t)) => r == t, + _ => { + // Fallback to simple comparison + resolved == target_path + } + } +} + +/// Find references to an imported symbol in a document. +fn find_references_to_import( + doc: &Document, + import_info: &ImportInfo, + field_name: &str, +) -> Vec { + let mut references = Vec::new(); + let ast = doc.ast(); + + // Walk all tokens looking for field accesses on the imported name + for node in ast.syntax().descendants() { + // Look for field accesses: importName.fieldName + if node.kind() == SyntaxKind::EXPR_FIELD { + // Check if this is accessing the imported binding + if let Some(range) = check_field_access(&node, &import_info.binding_name, field_name) { + references.push(range); + } + } + } + + references +} + +/// Check if a field access is accessing a specific field on a specific binding. +fn check_field_access( + node: &SyntaxNode, + binding_name: &str, + field_name: &str, +) -> Option { + let field = ExprField::cast(node.clone())?; + let field_ident = field.field()?.ident_lit()?; + if field_ident.text() != field_name { + return None; + } + + let base = field.base()?.expr_base()?; + let ExprBase::ExprVar(var) = base else { + return None; + }; + if var.name()?.ident_lit()?.text() != binding_name { + return None; + } + + Some(field_ident.text_range()) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + #[test] + fn test_find_local_variable_references() { + let code = "local x = 1; local y = x + x; x"; + // ^def ^ref ^ref ^ref + // 0123456789... + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Position on the definition of x + let pos = (0, 6).into(); + + let refs = find_references(&doc, pos, &uri, true); + // x at positions: 6 (def), 23, 27, 30 + assert_eq!( + refs, + vec![ + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + }, + }, + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 23 + }, + end: lsp_types::Position { + line: 0, + character: 24 + }, + }, + }, + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 27 + }, + end: lsp_types::Position { + line: 0, + character: 28 + }, + }, + }, + Location { + uri, + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 30 + }, + end: lsp_types::Position { + line: 0, + character: 31 + }, + }, + }, + ] + ); + } + + #[test] + fn test_find_references_exclude_declaration() { + let code = "local x = 1; x + x"; + // ^def ^ref ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let pos = (0, 6).into(); + + let refs = find_references(&doc, pos, &uri, false); + // Only the two uses at positions 13 and 17, not the definition + assert_eq!( + refs, + vec![ + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + }, + }, + Location { + uri, + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 17 + }, + end: lsp_types::Position { + line: 0, + character: 18 + }, + }, + }, + ] + ); + } + + #[test] + fn test_find_parameter_references() { + let code = "local f(x) = x * x; f(1)"; + // ^def ^ref ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Position on parameter x + let pos = (0, 8).into(); + + let refs = find_references(&doc, pos, &uri, true); + // x at positions: 8 (def), 13, 17 + assert_eq!( + refs, + vec![ + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 8 + }, + end: lsp_types::Position { + line: 0, + character: 9 + }, + }, + }, + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + }, + }, + Location { + uri, + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 17 + }, + end: lsp_types::Position { + line: 0, + character: 18 + }, + }, + }, + ] + ); + } + + #[test] + fn test_no_references_for_different_scope() { + let code = "local x = 1; local f(x) = x; x"; + // ^def1 ^def2 ^ref2 ^ref1 + // The last 'x' refers to def1, not def2 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Position on the outer x definition + let pos = (0, 6).into(); + + let refs = find_references(&doc, pos, &uri, true); + // Should find: the definition at 6 and the last reference at 29 (not the inner x) + assert_eq!( + refs, + vec![ + Location { + uri: uri.clone(), + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + }, + }, + Location { + uri, + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 29 + }, + end: lsp_types::Position { + line: 0, + character: 30 + }, + }, + }, + ] + ); + } + + #[test] + fn test_is_at_file_scope() { + // Test file-scope detection helper + let code = "local x = 1; x"; + // ^file scope + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the 'x' identifier at the definition site + for t in ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if t.kind() == SyntaxKind::IDENT && t.text() == "x" && is_definition_site(&t) { + // The first x (definition) should be at file scope + assert!( + is_at_file_scope(&t), + "Top-level local should be at file scope" + ); + return; + } + } + panic!("Did not find definition token"); + } + + #[test] + fn test_nested_not_at_file_scope() { + // Nested definitions should not be at file scope + let code = "local f(x) = local y = 1; y; f(1)"; + // ^not file scope + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the 'y' identifier at the definition site (inside function) + for t in ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if t.kind() == SyntaxKind::IDENT && t.text() == "y" && is_definition_site(&t) { + // y is nested, should not be at file scope + assert!( + !is_at_file_scope(&t), + "Nested local should not be at file scope" + ); + return; + } + } + panic!("Did not find definition token"); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/rename.rs b/crates/jrsonnet-lsp-handlers/src/rename.rs new file mode 100644 index 00000000..1e36de0e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/rename.rs @@ -0,0 +1,859 @@ +//! Rename handler. +//! +//! Provides rename functionality for local symbols. +//! +//! # Cross-File Rename +//! +//! For symbols that are exported from a file, `rename_cross_file` can be used +//! to rename references in importing files. This requires: +//! - A `DocumentManager` to access documents +//! - An `ImportGraph` to find importing files +//! +//! Cross-file rename finds references by: +//! 1. Finding files that import the current file +//! 2. Looking for field accesses that match the renamed symbol +//! 3. Returning a `WorkspaceEdit` with changes across all files + +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; + +use jrsonnet_lsp_document::{ + to_lsp_range, token_at_offset, CanonicalPath, Document, LspPosition, SymbolName, +}; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::DocumentManager; +use jrsonnet_lsp_scope::{ + find_definition_range, is_definition_site, is_renameable, is_variable_reference, ScopeResolver, +}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::{PrepareRenameResponse, TextEdit, Uri, WorkspaceEdit}; + +fn workspace_edit_from_changes(changes: HashMap>) -> Option { + if changes.is_empty() { + return None; + } + + Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }) +} + +fn field_definition_range(token: &jrsonnet_rowan_parser::SyntaxToken) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let mut node = token.parent(); + while let Some(current) = node { + if current.kind() == SyntaxKind::FIELD_NAME_FIXED { + let parent = current.parent()?; + if matches!( + parent.kind(), + SyntaxKind::MEMBER_FIELD_NORMAL | SyntaxKind::MEMBER_FIELD_METHOD + ) { + return Some(token.text_range()); + } + return None; + } + node = current.parent(); + } + + None +} + +/// Prepare rename response. +/// Returns the range of the symbol to be renamed and its current name. +pub fn prepare_rename(document: &Document, position: LspPosition) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + // Convert LSP position to byte offset + let offset = line_index.offset(position, text)?; + + let ast = document.ast(); + + // Find the token at the offset + let token = token_at_offset(ast.syntax(), offset)?; + + // Must be an identifier + if token.kind() != SyntaxKind::IDENT { + return None; + } + + // Check if this is a renameable symbol (definition or reference to a local) + if !is_renameable(&token) && field_definition_range(&token).is_none() { + return None; + } + + let range = to_lsp_range(token.text_range(), line_index, text); + + Some(PrepareRenameResponse::Range(range)) +} + +/// Perform rename operation. +/// Returns a workspace edit with all text edits needed. +/// +/// The `new_name` parameter is a validated `SymbolName`, ensuring that +/// validation happens at the API boundary (in server.rs) before this +/// function is called. +/// +/// Returns `None` if: +/// - The position is not on an identifier +/// - No references are found +pub fn rename( + document: &Document, + position: LspPosition, + new_name: &SymbolName, + uri: &Uri, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(position, text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text(); + + let definition_range = if is_definition_site(&token) { + token.parent().map(|p| p.text_range()) + } else if is_variable_reference(&token) { + find_definition_range(&token, name) + } else { + return None; + }; + + let def_range = definition_range?; + + let resolver = ScopeResolver::new(ast.syntax()); + let references = resolver.find_references(ast.syntax(), name, def_range); + + if references.is_empty() { + return None; + } + + // SymbolName derefs to String, so we can clone it for each edit + let new_name_str = new_name.to_string(); + + let edits: Vec = references + .into_iter() + .map(|range| TextEdit { + range: to_lsp_range(range, line_index, text), + new_text: new_name_str.clone(), + }) + .collect(); + + let mut changes = HashMap::new(); + changes.insert(uri.clone(), edits); + + workspace_edit_from_changes(changes) +} + +/// Perform cross-file rename operation. +/// +/// This extends the basic rename to also find references in files that import +/// the current file. For top-level object fields, it finds field accesses +/// in importing files. +/// +/// # Arguments +/// - `document`: The current document +/// - `position`: Cursor position +/// - `new_name`: The new name for the symbol +/// - `uri`: URI of the current document +/// - `current_path`: Canonical path of the current file +/// - `manager`: Document manager for accessing other files +/// - `import_graph`: Import graph for finding importing files +/// +/// # Returns +/// A `WorkspaceEdit` with changes across all affected files. +pub fn rename_cross_file( + document: &Document, + position: LspPosition, + new_name: &SymbolName, + uri: &Uri, + current_path: &CanonicalPath, + manager: &Arc, + import_graph: &ImportGraph, +) -> Option { + // First, do the local rename to get edits for the current file + let mut all_changes: HashMap> = HashMap::new(); + + // Get local edits + if let Some(local_edit) = rename(document, position, new_name, uri) { + if let Some(changes) = local_edit.changes { + all_changes.extend(changes); + } + } + + // Find the symbol name being renamed + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(position, text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + + if let Some(range) = field_definition_range(&token) { + all_changes.entry(uri.clone()).or_default().push(TextEdit { + range: to_lsp_range(range, line_index, text), + new_text: new_name.to_string(), + }); + } + + if token.kind() != SyntaxKind::IDENT { + return workspace_edit_from_changes(all_changes); + } + + let old_name = token.text().to_string(); + + // Find files that import this file + let importers = import_graph.transitive_importers(current_path); + + // Find references in each importing file + for importer_path in &importers { + if let Some((importer_uri, edits)) = + find_references_in_importer(importer_path, current_path, &old_name, new_name, manager) + { + all_changes.entry(importer_uri).or_default().extend(edits); + } + } + + workspace_edit_from_changes(all_changes) +} + +/// Resolve an import path relative to the importing file. +fn resolve_import_path(importer_path: &CanonicalPath, import_str: &str) -> Option { + use std::path::Path; + + let import_path = Path::new(import_str); + + // If absolute, use directly + if import_path.is_absolute() { + return Some(CanonicalPath::new(import_path.to_path_buf())); + } + + // Resolve relative to the importer's directory + let importer_dir = importer_path.as_path().parent()?; + let resolved = importer_dir.join(import_path); + + // Canonicalize to handle . and .. + let canonical = resolved.canonicalize().ok()?; + Some(CanonicalPath::new(canonical)) +} + +/// Find references to a symbol in a file that imports the source file. +/// +/// This looks for patterns like: +/// ```jsonnet +/// local lib = import 'source.jsonnet'; +/// lib.field_name // This is a reference to field_name in source.jsonnet +/// ``` +fn find_references_in_importer( + importer_path: &CanonicalPath, + source_path: &CanonicalPath, + old_name: &str, + new_name: &SymbolName, + manager: &Arc, +) -> Option<(Uri, Vec)> { + use jrsonnet_lsp_import::extract_import_path; + use jrsonnet_rowan_parser::nodes::{ExprBase, ExprField, StmtLocal}; + + let doc = manager.get_document(importer_path)?; + let uri = importer_path.to_uri(); + let text = doc.text(); + let line_index = doc.line_index(); + let ast = doc.ast(); + + let mut edits = Vec::new(); + + // Find local variables bound to imports of source_path + let mut import_bindings: HashSet = HashSet::new(); + + // Walk the AST to find import bindings + for node in ast.syntax().descendants() { + if node.kind() != SyntaxKind::STMT_LOCAL { + continue; + } + + let Some(stmt_local) = StmtLocal::cast(node) else { + continue; + }; + + for bind in stmt_local.binds() { + let jrsonnet_rowan_parser::nodes::Bind::BindDestruct(bd) = bind else { + continue; + }; + + let Some(value) = bd.value() else { + continue; + }; + + // Check if the value is an import of source_path + let Some(base) = value.expr_base() else { + continue; + }; + + let ExprBase::ExprImport(import_expr) = base else { + continue; + }; + + let Some(import_str) = extract_import_path(&import_expr) else { + continue; + }; + + // Resolve the import path relative to the importer + let Some(resolved_path) = resolve_import_path(importer_path, &import_str) else { + continue; + }; + + if resolved_path != *source_path { + continue; + } + + // Found an import of source_path, get the binding name + // Use explicit method syntax to avoid ambiguity with Into trait + let Some(destruct) = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bd) else { + continue; + }; + + let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct else { + continue; + }; + + let Some(name_node) = full.name() else { + continue; + }; + + let Some(ident) = name_node.ident_lit() else { + continue; + }; + + import_bindings.insert(ident.text().to_string()); + } + } + + if import_bindings.is_empty() { + return None; + } + + // Find field accesses on the import bindings that match old_name + // ExprField represents obj.field syntax + for node in ast.syntax().descendants() { + if node.kind() != SyntaxKind::EXPR_FIELD { + continue; + } + + let Some(field_expr) = ExprField::cast(node) else { + continue; + }; + + // Check if the field name matches + let Some(field_name) = field_expr.field() else { + continue; + }; + + let Some(field_ident) = field_name.ident_lit() else { + continue; + }; + + if field_ident.text() != old_name { + continue; + } + + // Check if the base expression is one of our import bindings + let Some(base_expr) = field_expr.base() else { + continue; + }; + + let Some(base) = base_expr.expr_base() else { + continue; + }; + + let ExprBase::ExprVar(var) = base else { + continue; + }; + + let Some(var_name) = var.name() else { + continue; + }; + + let Some(var_ident) = var_name.ident_lit() else { + continue; + }; + + if !import_bindings.contains(var_ident.text()) { + continue; + } + + // Found a field access on an import binding matching old_name + edits.push(TextEdit { + range: to_lsp_range(field_ident.text_range(), line_index, text), + new_text: new_name.to_string(), + }); + } + + if edits.is_empty() { + None + } else { + Some((uri, edits)) + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + use rstest::rstest; + + use super::*; + + #[test] + fn test_prepare_rename_on_definition() { + let code = "local x = 1; x"; + // ^def (position 6) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let pos = (0, 6).into(); + + let result = prepare_rename(&doc, pos); + // 'x' spans characters 6-7 + assert_eq!( + result, + Some(PrepareRenameResponse::Range(lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + })) + ); + } + + #[test] + fn test_prepare_rename_on_reference() { + let code = "local x = 1; x + x"; + // ^ref (position 13) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let pos = (0, 13).into(); + + let result = prepare_rename(&doc, pos); + // 'x' at position 13 spans characters 13-14 + assert_eq!( + result, + Some(PrepareRenameResponse::Range(lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + })) + ); + } + + #[test] + fn test_prepare_rename_on_field_definition() { + let code = "{ helper: 1 }"; + // ^ field (position 2) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let pos = (0, 2).into(); + + let result = prepare_rename(&doc, pos); + assert_eq!( + result, + Some(PrepareRenameResponse::Range(lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 2 + }, + end: lsp_types::Position { + line: 0, + character: 8 + }, + })) + ); + } + + #[test] + fn test_rename_local_variable() { + let code = "local x = 1; x + x"; + // ^def ^ref ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Rename 'x' to 'y' from the definition + let pos = (0, 6).into(); + let new_name = SymbolName::new("y").unwrap(); + + let edit = rename(&doc, pos, &new_name, &uri).expect("rename should succeed"); + let changes = edit.changes.expect("should have changes"); + let edits = changes.get(&uri).expect("should have edits for uri"); + + // Edits for x at positions 6, 13, 17 + assert_eq!( + edits, + &vec![ + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + }, + new_text: "y".to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + }, + new_text: "y".to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 17 + }, + end: lsp_types::Position { + line: 0, + character: 18 + }, + }, + new_text: "y".to_string(), + }, + ] + ); + } + + #[test] + fn test_rename_function_parameter() { + let code = "local f(x) = x * x; f(1)"; + // ^def ^ref ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Rename 'x' to 'n' from a reference + let pos = (0, 13).into(); + let new_name = SymbolName::new("n").unwrap(); + + let edit = rename(&doc, pos, &new_name, &uri).expect("rename should succeed"); + let changes = edit.changes.expect("should have changes"); + let edits = changes.get(&uri).expect("should have edits for uri"); + + // Edits for x at positions 8 (param), 13, 17 + assert_eq!( + edits, + &vec![ + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 8 + }, + end: lsp_types::Position { + line: 0, + character: 9 + }, + }, + new_text: "n".to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + }, + new_text: "n".to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 17 + }, + end: lsp_types::Position { + line: 0, + character: 18 + }, + }, + new_text: "n".to_string(), + }, + ] + ); + } + + #[test] + fn test_rename_respects_scope() { + let code = "local x = 1; local f(x) = x; x"; + // ^def1 ^def2 ^ref2 ^ref1 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + // Rename the outer x + let pos = (0, 6).into(); + let new_name = SymbolName::new("y").unwrap(); + + let edit = rename(&doc, pos, &new_name, &uri).expect("rename should succeed"); + let changes = edit.changes.expect("should have changes"); + let edits = changes.get(&uri).expect("should have edits for uri"); + + // Only edits for outer x at positions 6 (def) and 29 (final ref), not the inner scope + assert_eq!( + edits, + &vec![ + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + }, + new_text: "y".to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 29 + }, + end: lsp_types::Position { + line: 0, + character: 30 + }, + }, + new_text: "y".to_string(), + }, + ] + ); + } + + #[test] + fn test_no_rename_for_non_identifier() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + // Position on '=' (not an identifier) + let pos = (0, 8).into(); + + let result = prepare_rename(&doc, pos); + assert_eq!(result, None); + } + + #[rstest] + #[case("123foo")] + #[case("foo-bar")] + #[case("local")] + #[case("")] + fn test_symbol_name_rejects_invalid_identifier(#[case] new_name: &str) { + use assert_matches::assert_matches; + use jrsonnet_lsp_document::LspError; + + // Validation happens at the boundary when creating SymbolName + let err = SymbolName::new(new_name).expect_err("should reject invalid identifier"); + assert_matches!(err, LspError::InvalidIdentifier(_)); + } + + #[rstest] + #[case("newName")] + #[case("new_name")] + #[case("_private")] + #[case("y")] + fn test_rename_accepts_valid_identifier(#[case] new_name: &str) { + // code: local x = 1; x + // ^def ^ref (positions 6 and 13) + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + let pos = (0, 6).into(); + let symbol_name = SymbolName::new(new_name).expect("should be valid identifier"); + + let edit = rename(&doc, pos, &symbol_name, &uri).expect("rename should succeed"); + let changes = edit.changes.expect("should have changes"); + let edits = changes.get(&uri).expect("should have edits for uri"); + + // x at positions 6-7 (def) and 13-14 (ref) + assert_eq!( + edits, + &vec![ + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 6 + }, + end: lsp_types::Position { + line: 0, + character: 7 + }, + }, + new_text: new_name.to_string(), + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 13 + }, + end: lsp_types::Position { + line: 0, + character: 14 + }, + }, + new_text: new_name.to_string(), + }, + ] + ); + } + + #[test] + fn test_cross_file_rename_field_access() { + use std::io::Write; + + use jrsonnet_lsp_types::GlobalTyStore; + use tempfile::TempDir; + + // Create a temp directory with two files + let temp_dir = TempDir::new().unwrap(); + let lib_path = temp_dir.path().join("lib.jsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + // lib.jsonnet: exports an object with a `helper` field + let lib_code = r"{ helper: function(x) x * 2 }"; + std::fs::File::create(&lib_path) + .unwrap() + .write_all(lib_code.as_bytes()) + .unwrap(); + + // main.jsonnet: imports lib and uses lib.helper + let main_code = r#"local lib = import "lib.jsonnet"; lib.helper(5)"#; + std::fs::File::create(&main_path) + .unwrap() + .write_all(main_code.as_bytes()) + .unwrap(); + + // Create the manager and import graph + let global = Arc::new(GlobalTyStore::new()); + let manager = Arc::new(DocumentManager::new(global)); + + let lib_canon = CanonicalPath::new(lib_path); + let main_canon = CanonicalPath::new(main_path); + + // Open both documents + manager.open(lib_canon.clone(), lib_code.to_string(), DocVersion::new(1)); + manager.open( + main_canon.clone(), + main_code.to_string(), + DocVersion::new(1), + ); + + // Build import graph + let mut import_graph = ImportGraph::new(); + if let Some(main_doc) = manager.get_document(&main_canon) { + let entries = jrsonnet_lsp_import::parse_document_imports(&main_doc, &|import_path| { + let import_full = temp_dir.path().join(import_path); + import_full.canonicalize().ok().map(CanonicalPath::new) + }); + import_graph.update_file_with_entries(&main_canon, entries); + } + + // Get the lib document + let lib_doc = manager.get_document(&lib_canon).unwrap(); + let lib_uri = lib_canon.to_uri(); + + // Rename 'helper' in lib.jsonnet (position 2 is the 'h' in 'helper') + // This is an object field, not a local variable, so local rename won't work + // but cross-file rename should find `lib.helper` in main.jsonnet + let pos = (0, 2).into(); + let new_name = SymbolName::new("util").unwrap(); + + let result = rename_cross_file( + &lib_doc, + pos, + &new_name, + &lib_uri, + &lib_canon, + &manager, + &import_graph, + ); + + // Cross-file rename should find `lib.helper` in main.jsonnet + let edit = result.expect("should produce workspace edit"); + let changes = edit.changes.expect("should have changes"); + + // Should rename the source field definition and importer field access. + // lib_code: { helper: function(x) x * 2 } + // ^^^^^^ + // position: 2 8 + // main_code: local lib = import "lib.jsonnet"; lib.helper(5) + // ^^^^^^ + // position: 38 44 + let mut expected_changes: HashMap> = HashMap::new(); + expected_changes.insert( + lib_uri, + vec![TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 2, + }, + end: lsp_types::Position { + line: 0, + character: 8, + }, + }, + new_text: "util".to_string(), + }], + ); + + let main_uri = main_canon.to_uri(); + expected_changes.insert( + main_uri, + vec![TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 38, + }, + end: lsp_types::Position { + line: 0, + character: 44, + }, + }, + new_text: "util".to_string(), + }], + ); + assert_eq!(changes, expected_changes); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs new file mode 100644 index 00000000..e09b5aa4 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs @@ -0,0 +1,570 @@ +//! Semantic tokens handler. +//! +//! Provides semantic highlighting for Jsonnet code. + +use jrsonnet_lsp_document::{Document, LineIndex}; +use jrsonnet_rowan_parser::{ + nodes::{BindFunction, Destruct, ParamsDesc, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use lsp_types::{SemanticToken, SemanticTokenType, SemanticTokens, SemanticTokensLegend}; + +/// Semantic token type with compile-time index. +/// +/// The enum values match the indices in `TOKEN_TYPES`. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(u32)] +enum TokenType { + Namespace = 0, + Parameter = 7, + Variable = 8, + Property = 9, + Function = 12, + Method = 13, + Keyword = 15, + Comment = 17, + String = 18, + Number = 19, + Operator = 21, +} + +/// Semantic token types we support. +/// +/// The indices in this array must match the `TokenType` enum values. +pub const TOKEN_TYPES: &[SemanticTokenType] = &[ + SemanticTokenType::NAMESPACE, // 0: std + SemanticTokenType::TYPE, // 1: (unused) + SemanticTokenType::CLASS, // 2: (unused) + SemanticTokenType::ENUM, // 3: (unused) + SemanticTokenType::INTERFACE, // 4: (unused) + SemanticTokenType::STRUCT, // 5: (unused) + SemanticTokenType::TYPE_PARAMETER, // 6: (unused) + SemanticTokenType::PARAMETER, // 7: function parameters + SemanticTokenType::VARIABLE, // 8: local variables + SemanticTokenType::PROPERTY, // 9: object fields + SemanticTokenType::ENUM_MEMBER, // 10: (unused) + SemanticTokenType::EVENT, // 11: (unused) + SemanticTokenType::FUNCTION, // 12: function definitions + SemanticTokenType::METHOD, // 13: object methods + SemanticTokenType::MACRO, // 14: (unused) + SemanticTokenType::KEYWORD, // 15: keywords + SemanticTokenType::MODIFIER, // 16: (unused) + SemanticTokenType::COMMENT, // 17: comments + SemanticTokenType::STRING, // 18: strings + SemanticTokenType::NUMBER, // 19: numbers + SemanticTokenType::REGEXP, // 20: (unused) + SemanticTokenType::OPERATOR, // 21: operators +]; + +/// Semantic token modifiers (bit flags). +pub const TOKEN_MODIFIERS: &[lsp_types::SemanticTokenModifier] = &[ + lsp_types::SemanticTokenModifier::DECLARATION, + lsp_types::SemanticTokenModifier::DEFINITION, + lsp_types::SemanticTokenModifier::READONLY, + lsp_types::SemanticTokenModifier::STATIC, + lsp_types::SemanticTokenModifier::DEPRECATED, + lsp_types::SemanticTokenModifier::ABSTRACT, + lsp_types::SemanticTokenModifier::ASYNC, + lsp_types::SemanticTokenModifier::MODIFICATION, + lsp_types::SemanticTokenModifier::DOCUMENTATION, + lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY, +]; + +/// Modifier bit flags. +mod token_modifier { + pub const DECLARATION: u32 = 1 << 0; + pub const DEFINITION: u32 = 1 << 1; + pub const DEFAULT_LIBRARY: u32 = 1 << 9; +} + +/// Get the semantic tokens legend. +pub fn legend() -> SemanticTokensLegend { + SemanticTokensLegend { + token_types: TOKEN_TYPES.to_vec(), + token_modifiers: TOKEN_MODIFIERS.to_vec(), + } +} + +/// Compute semantic tokens for a document. +pub fn semantic_tokens(document: &Document) -> SemanticTokens { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let mut builder = SemanticTokenBuilder::new(line_index, text); + + // Walk all tokens in the document + for element in ast.syntax().descendants_with_tokens() { + if let Some(token) = element.into_token() { + builder.visit_token(&token); + } + } + + builder.build() +} + +/// Builder for semantic tokens. +struct SemanticTokenBuilder<'a> { + line_index: &'a LineIndex, + text: &'a str, + tokens: Vec, +} + +/// Raw token before delta encoding. +struct RawToken { + line: u32, + start_char: u32, + length: u32, + token_type: u32, + token_modifiers: u32, +} + +impl<'a> SemanticTokenBuilder<'a> { + fn new(line_index: &'a LineIndex, text: &'a str) -> Self { + Self { + line_index, + text, + tokens: Vec::new(), + } + } + + fn visit_token(&mut self, token: &SyntaxToken) { + let kind = token.kind(); + + match kind { + // Keywords + SyntaxKind::LOCAL_KW + | SyntaxKind::IF_KW + | SyntaxKind::THEN_KW + | SyntaxKind::ELSE_KW + | SyntaxKind::FUNCTION_KW + | SyntaxKind::IMPORT_KW + | SyntaxKind::IMPORTSTR_KW + | SyntaxKind::IMPORTBIN_KW + | SyntaxKind::FOR_KW + | SyntaxKind::IN_KW + | SyntaxKind::TRUE_KW + | SyntaxKind::FALSE_KW + | SyntaxKind::NULL_KW + | SyntaxKind::SELF_KW + | SyntaxKind::SUPER_KW + | SyntaxKind::ERROR_KW + | SyntaxKind::ASSERT_KW + | SyntaxKind::TAILSTRICT_KW => { + self.add_token(token, TokenType::Keyword, 0); + } + + // Comments + SyntaxKind::SINGLE_LINE_SLASH_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::MULTI_LINE_COMMENT => { + self.add_token(token, TokenType::Comment, 0); + } + + // Strings + SyntaxKind::STRING_DOUBLE + | SyntaxKind::STRING_SINGLE + | SyntaxKind::STRING_DOUBLE_VERBATIM + | SyntaxKind::STRING_SINGLE_VERBATIM + | SyntaxKind::STRING_BLOCK => { + self.add_token(token, TokenType::String, 0); + } + + // Numbers + SyntaxKind::FLOAT => { + self.add_token(token, TokenType::Number, 0); + } + + // Identifiers - need context to determine type + SyntaxKind::IDENT => { + self.visit_identifier(token); + } + + // Operators + SyntaxKind::PLUS + | SyntaxKind::MINUS + | SyntaxKind::MUL + | SyntaxKind::DIV + | SyntaxKind::MODULO + | SyntaxKind::BIT_AND + | SyntaxKind::BIT_OR + | SyntaxKind::BIT_XOR + | SyntaxKind::BIT_NOT + | SyntaxKind::LT + | SyntaxKind::GT + | SyntaxKind::NOT + | SyntaxKind::ASSIGN + | SyntaxKind::LE + | SyntaxKind::GE + | SyntaxKind::EQ + | SyntaxKind::NE + | SyntaxKind::AND + | SyntaxKind::OR + | SyntaxKind::LHS + | SyntaxKind::RHS => { + self.add_token(token, TokenType::Operator, 0); + } + + _ => {} + } + } + + fn visit_identifier(&mut self, token: &SyntaxToken) { + let Some(parent) = token.parent() else { + return; + }; + + // Check if this is "std" + if token.text() == "std" { + self.add_token(token, TokenType::Namespace, token_modifier::DEFAULT_LIBRARY); + return; + } + + // Check context based on parent/grandparent + if parent.kind() == SyntaxKind::NAME { + if let Some(grandparent) = parent.parent() { + match grandparent.kind() { + // Variable reference + SyntaxKind::EXPR_VAR => { + // Check if this references a parameter or variable + let token_type = classify_variable_reference(token); + self.add_token(token, token_type, 0); + } + + // Definition sites + SyntaxKind::DESTRUCT_FULL => { + // Could be parameter or local variable definition + let (token_type, modifiers) = classify_definition_site(&grandparent); + self.add_token(token, token_type, modifiers); + } + + SyntaxKind::BIND_FUNCTION => { + // Function definition + self.add_token( + token, + TokenType::Function, + token_modifier::DECLARATION | token_modifier::DEFINITION, + ); + } + + // Field access (std.xyz or obj.field) + SyntaxKind::EXPR_FIELD => { + // Check if accessing std + if is_stdlib_access(&grandparent) { + self.add_token( + token, + TokenType::Function, + token_modifier::DEFAULT_LIBRARY, + ); + } else { + self.add_token(token, TokenType::Property, 0); + } + } + + _ => {} + } + } + } + + // Check for field name in object (ID node) + if parent.kind() == SyntaxKind::NAME { + if let Some(grandparent) = parent.parent() { + if grandparent.kind() == SyntaxKind::FIELD_NAME_FIXED { + // This is a field definition + if let Some(great_grandparent) = grandparent.parent() { + if great_grandparent.kind() == SyntaxKind::MEMBER_FIELD_METHOD { + self.add_token( + token, + TokenType::Method, + token_modifier::DECLARATION | token_modifier::DEFINITION, + ); + } else if great_grandparent.kind() == SyntaxKind::MEMBER_FIELD_NORMAL { + self.add_token( + token, + TokenType::Property, + token_modifier::DECLARATION | token_modifier::DEFINITION, + ); + } + } + } + } + } + } + + fn add_token(&mut self, token: &SyntaxToken, token_type: TokenType, token_modifiers: u32) { + let range = token.text_range(); + let start_pos = self + .line_index + .position(range.start().into(), self.text) + .unwrap_or_default(); + + // Handle multi-line tokens (like block strings/comments) + let token_text = token.text(); + let lines: Vec<&str> = token_text.lines().collect(); + + let token_type_u32 = token_type as u32; + + if lines.len() <= 1 { + // Single line token + self.tokens.push(RawToken { + line: start_pos.line.0, + start_char: start_pos.character.0, + length: token_text.len() as u32, + token_type: token_type_u32, + token_modifiers, + }); + } else { + // Multi-line token - emit one token per line + for (i, line) in lines.iter().enumerate() { + let line_num = start_pos.line.0 + i as u32; + let start_char = if i == 0 { start_pos.character.0 } else { 0 }; + let length = line.len() as u32; + + if length > 0 { + self.tokens.push(RawToken { + line: line_num, + start_char, + length, + token_type: token_type_u32, + token_modifiers, + }); + } + } + } + } + + fn build(mut self) -> SemanticTokens { + // Sort tokens by position + self.tokens + .sort_unstable_by(|a, b| (a.line, a.start_char).cmp(&(b.line, b.start_char))); + + // Convert to delta-encoded SemanticToken format + let mut data = Vec::with_capacity(self.tokens.len()); + let mut prev_line = 0u32; + let mut prev_char = 0u32; + + for token in &self.tokens { + let delta_line = token.line - prev_line; + let delta_start = if delta_line == 0 { + token.start_char - prev_char + } else { + token.start_char + }; + + data.push(SemanticToken { + delta_line, + delta_start, + length: token.length, + token_type: token.token_type, + token_modifiers_bitset: token.token_modifiers, + }); + + prev_line = token.line; + prev_char = token.start_char; + } + + SemanticTokens { + result_id: None, + data, + } + } +} + +/// Classify a variable reference to determine its token type. +fn classify_variable_reference(token: &SyntaxToken) -> TokenType { + // Walk up the scope chain to find the definition + let mut current = match token.parent() { + Some(p) => p, + None => return TokenType::Variable, + }; + + let name = token.text(); + + while let Some(parent) = current.parent() { + if is_parameter_in_scope(&parent, name) { + return TokenType::Parameter; + } + if is_function_in_scope(&parent, ¤t, name) { + return TokenType::Function; + } + current = parent; + } + + TokenType::Variable +} + +/// Check if a name is a parameter in the given scope. +fn is_parameter_in_scope(scope: &SyntaxNode, name: &str) -> bool { + match scope.kind() { + SyntaxKind::EXPR_FUNCTION => { + if let Some(func) = jrsonnet_rowan_parser::nodes::ExprFunction::cast(scope.clone()) { + if let Some(params) = func.params_desc() { + return params_contain_name(¶ms, name); + } + } + } + SyntaxKind::BIND_FUNCTION => { + if let Some(func) = BindFunction::cast(scope.clone()) { + if let Some(params) = func.params() { + return params_contain_name(¶ms, name); + } + } + } + _ => {} + } + false +} + +/// Check if params contain a given name. +fn params_contain_name(params: &ParamsDesc, name: &str) -> bool { + for param in params.params() { + if let Some(destruct) = param.destruct() { + if let Destruct::DestructFull(full) = destruct { + if let Some(param_name) = full.name() { + if let Some(ident) = param_name.ident_lit() { + if ident.text() == name { + return true; + } + } + } + } + } + } + false +} + +/// Check if a name is a function defined in the given scope. +fn is_function_in_scope(scope: &SyntaxNode, child: &SyntaxNode, name: &str) -> bool { + if scope.kind() != SyntaxKind::EXPR { + return false; + } + + for stmt_node in scope.children() { + if stmt_node.kind() == SyntaxKind::STMT_LOCAL { + if stmt_node.text_range().end() > child.text_range().start() { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let jrsonnet_rowan_parser::nodes::Bind::BindFunction(bf) = bind { + if let Some(bind_name) = bf.name() { + if let Some(ident) = bind_name.ident_lit() { + if ident.text() == name { + return true; + } + } + } + } + } + } + } + } + false +} + +/// Classify a definition site. +fn classify_definition_site(destruct_node: &SyntaxNode) -> (TokenType, u32) { + // Walk up to find if this is a parameter or local variable + let mut current = destruct_node.clone(); + + while let Some(parent) = current.parent() { + match parent.kind() { + SyntaxKind::PARAM => { + return ( + TokenType::Parameter, + token_modifier::DECLARATION | token_modifier::DEFINITION, + ); + } + SyntaxKind::BIND_DESTRUCT | SyntaxKind::FOR_SPEC => { + return ( + TokenType::Variable, + token_modifier::DECLARATION | token_modifier::DEFINITION, + ); + } + _ => {} + } + current = parent; + } + + ( + TokenType::Variable, + token_modifier::DECLARATION | token_modifier::DEFINITION, + ) +} + +/// Check if an index access is accessing the stdlib. +fn is_stdlib_access(suffix_index: &SyntaxNode) -> bool { + // Look for a preceding EXPR_VAR with "std" + if let Some(parent) = suffix_index.parent() { + for child in parent.children() { + if child.kind() == SyntaxKind::EXPR_VAR { + for token in child + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() == SyntaxKind::IDENT && token.text() == "std" { + return true; + } + } + } + // Stop when we reach the suffix_index + if child.text_range() == suffix_index.text_range() { + break; + } + } + } + false +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + #[test] + fn test_semantic_tokens_keywords() { + let code = "local x = if true then 1 else 2; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert!(!tokens.data.is_empty()); + + // Should have tokens for: local, if, true, then, else, and identifiers + } + + #[test] + fn test_semantic_tokens_function() { + let code = "local add(a, b) = a + b; add(1, 2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert!(!tokens.data.is_empty()); + } + + #[test] + fn test_semantic_tokens_object() { + let code = r#"{ name: "test", greet(x): "Hello " + x }"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert!(!tokens.data.is_empty()); + } + + #[test] + fn test_semantic_tokens_stdlib() { + let code = "std.length([1, 2, 3])"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert!(!tokens.data.is_empty()); + } + + #[test] + fn test_legend() { + let leg = legend(); + assert!(!leg.token_types.is_empty()); + assert!(!leg.token_modifiers.is_empty()); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help.rs b/crates/jrsonnet-lsp-handlers/src/signature_help.rs new file mode 100644 index 00000000..2cc61d60 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help.rs @@ -0,0 +1,513 @@ +//! Signature help handler. +//! +//! Provides parameter information when the user is inside a function call. + +use jrsonnet_lsp_document::{Document, LspPosition}; +use jrsonnet_lsp_stdlib as stdlib; +use jrsonnet_rowan_parser::{ + nodes::{ + Arg, ArgsDesc, Bind, BindFunction, Destruct, ExprBase, ExprCall, ExprField, Param, + StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use lsp_types::{ + Documentation, MarkupContent, MarkupKind, ParameterInformation, ParameterLabel, SignatureHelp, + SignatureInformation, +}; + +#[derive(Debug, Default, Clone)] +struct ActiveArg { + positional_index: u32, + named_arg: Option, +} + +#[derive(Debug, Clone)] +struct SignatureParamInfo { + label: String, + name: String, +} + +/// Get signature help at the given position. +pub fn signature_help(document: &Document, position: LspPosition) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + let offset = line_index.offset(position, text)?; + let cursor_offset: rowan::TextSize = offset.into(); + + let ast = document.ast(); + let root = ast.syntax(); + + let token = token_at_offset(root, offset.into())?; + let (func_name, active_arg) = find_call_context(&token, cursor_offset)?; + + get_signature_for_function(&func_name, &token, &active_arg) +} + +/// Find the function call context around the cursor. +/// Returns the function name and active argument info. +fn find_call_context( + token: &SyntaxToken, + cursor_offset: rowan::TextSize, +) -> Option<(String, ActiveArg)> { + let mut current = token.parent()?; + + loop { + if let Some(call) = ExprCall::cast(current.clone()) { + return extract_call_info(&call, cursor_offset); + } + + if current.kind() == SyntaxKind::ARGS_DESC { + if let Some(call) = current.parent().and_then(ExprCall::cast) { + return extract_call_info(&call, cursor_offset); + } + } + + current = current.parent()?; + } +} + +/// Extract call information from an ExprCall node. +fn extract_call_info( + call: &ExprCall, + cursor_offset: rowan::TextSize, +) -> Option<(String, ActiveArg)> { + let func_name = extract_callee_name(call)?; + let active_arg = active_arg_for_call(call, cursor_offset); + Some((func_name, active_arg)) +} + +/// Extract the function name from the callee of an ExprCall. +fn extract_callee_name(call: &ExprCall) -> Option { + let callee = call.callee()?; + match callee.expr_base()? { + ExprBase::ExprVar(var) => Some(var.name()?.ident_lit()?.text().to_string()), + ExprBase::ExprField(field) => extract_field_name(&field), + _ => None, + } +} + +/// Extract the field name from an ExprField (returns just the field name, e.g., "length" from std.length). +fn extract_field_name(field: &ExprField) -> Option { + Some(field.field()?.ident_lit()?.text().to_string()) +} + +fn active_arg_for_call(call: &ExprCall, cursor_offset: rowan::TextSize) -> ActiveArg { + let Some(args_desc) = call.args_desc() else { + return ActiveArg::default(); + }; + let positional_index = positional_arg_index(&args_desc, cursor_offset); + let named_arg = args_desc + .args() + .nth(positional_index as usize) + .and_then(arg_name); + + ActiveArg { + positional_index, + named_arg, + } +} + +/// Compute the currently active positional argument index. +fn positional_arg_index(args_desc: &ArgsDesc, cursor_offset: rowan::TextSize) -> u32 { + let args: Vec<_> = args_desc.args().collect(); + if args.is_empty() { + return count_preceding_commas(args_desc, cursor_offset); + } + + for (index, arg) in args.iter().enumerate() { + if cursor_offset <= arg.syntax().text_range().end() { + return index as u32; + } + } + + count_preceding_commas(args_desc, cursor_offset) +} + +fn arg_name(arg: Arg) -> Option { + Some(arg.name()?.ident_lit()?.text().to_string()) +} + +/// Count top-level commas before the cursor inside an argument list. +fn count_preceding_commas(args_desc: &ArgsDesc, cursor_offset: rowan::TextSize) -> u32 { + args_desc + .syntax() + .children_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .filter(|t| t.kind() == SyntaxKind::COMMA && t.text_range().end() <= cursor_offset) + .count() as u32 +} + +/// Get signature information for a function. +fn get_signature_for_function( + name: &str, + token: &SyntaxToken, + active_arg: &ActiveArg, +) -> Option { + // First check stdlib + if let Some(doc) = stdlib::get_stdlib_doc(name) { + let (params_info, variadic) = stdlib_params(name, &doc.signature); + let active_param = resolve_active_parameter(¶ms_info, variadic, active_arg); + let params = to_lsp_params(¶ms_info); + + let label = format!("std.{}{})", name, doc.signature); + + return Some(SignatureHelp { + signatures: vec![SignatureInformation { + label, + documentation: Some(Documentation::MarkupContent(MarkupContent { + kind: MarkupKind::Markdown, + value: doc.description.to_string(), + })), + parameters: Some(params), + active_parameter: Some(active_param), + }], + active_signature: Some(0), + active_parameter: Some(active_param), + }); + } + + // Check for local function definitions + if let Some(sig) = find_local_function_signature(token, name) { + let active_param = resolve_active_parameter(&sig.params, false, active_arg); + let params = to_lsp_params(&sig.params); + let labels: Vec<_> = sig + .params + .iter() + .map(|param| param.label.as_str()) + .collect(); + let label = format!("{}({})", name, labels.join(", ")); + + return Some(SignatureHelp { + signatures: vec![SignatureInformation { + label, + documentation: None, + parameters: Some(params), + active_parameter: Some(active_param), + }], + active_signature: Some(0), + active_parameter: Some(active_param), + }); + } + + None +} + +fn to_lsp_params(params: &[SignatureParamInfo]) -> Vec { + params + .iter() + .map(|param| ParameterInformation { + label: ParameterLabel::Simple(param.label.clone()), + documentation: None, + }) + .collect() +} + +fn resolve_active_parameter( + params: &[SignatureParamInfo], + _variadic: bool, + active_arg: &ActiveArg, +) -> u32 { + if params.is_empty() { + return active_arg.positional_index; + } + + if let Some(named_arg) = active_arg.named_arg.as_deref() { + if let Some(index) = params.iter().position(|param| param.name == named_arg) { + return index as u32; + } + } + + let max_index = params.len().saturating_sub(1) as u32; + active_arg.positional_index.min(max_index) +} + +fn stdlib_params(name: &str, fallback_signature: &str) -> (Vec, bool) { + if let Some(func_data) = stdlib::get_stdlib_func_data(name) { + let params = func_data + .params + .into_iter() + .map(|param| { + let label = if param.has_default { + format!("{}=...", param.name) + } else { + param.name.clone() + }; + SignatureParamInfo { + label, + name: param.name, + } + }) + .collect(); + return (params, func_data.variadic); + } + + let params = parse_signature_params(fallback_signature) + .into_iter() + .map(|label| { + let name = label + .split_once('=') + .map_or_else(|| label.clone(), |(name, _)| name.to_string()); + SignatureParamInfo { label, name } + }) + .collect(); + (params, false) +} + +/// Parse parameter names from a signature string like "(func, arr". +fn parse_signature_params(signature: &str) -> Vec { + // Remove leading '(' if present + let s = signature.trim_start_matches('('); + // Split by comma and trim whitespace + s.split(',') + .map(|p| p.trim().to_string()) + .filter(|p| !p.is_empty()) + .collect() +} + +/// Local function signature info. +struct LocalFunctionSignature { + params: Vec, +} + +/// Find a local function definition and extract its signature. +fn find_local_function_signature( + token: &SyntaxToken, + name: &str, +) -> Option { + let mut current = token.parent()?; + + while let Some(parent) = current.parent() { + if let Some(sig) = check_scope_for_function(&parent, ¤t, name) { + return Some(sig); + } + current = parent; + } + + None +} + +/// Check a scope for a function definition. +fn check_scope_for_function( + scope: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + match scope.kind() { + SyntaxKind::EXPR => check_expr_for_function(scope, child, name), + _ => None, + } +} + +/// Check an Expr for local function definitions. +fn check_expr_for_function( + expr: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + for stmt_node in expr.children() { + if stmt_node.kind() == SyntaxKind::STMT_LOCAL { + // Only look at locals that appear before the child + if stmt_node.text_range().end() > child.text_range().start() { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let Some(sig) = check_bind_for_function(&bind, name) { + return Some(sig); + } + } + } + } + } + None +} + +/// Check a bind for a function definition. +fn check_bind_for_function(bind: &Bind, name: &str) -> Option { + match bind { + Bind::BindDestruct(bd) => { + // Check if this is a function value + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() != name { + return None; + } + + // Check if the value is a function + let value = bd.value()?; + if let Some(base) = value.expr_base() { + if let ExprBase::ExprFunction(func) = base { + return extract_params_from_function_expr(&func); + } + } + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() != name { + return None; + } + + extract_params_from_bind_function(bf) + } + } +} + +/// Extract parameter names from a BindFunction. +fn extract_params_from_bind_function(func: &BindFunction) -> Option { + let params_desc = func.params()?; + let params: Vec = params_desc + .params() + .filter_map(|p| extract_param_info(&p)) + .collect(); + + Some(LocalFunctionSignature { params }) +} + +/// Extract parameter names from an ExprFunction. +fn extract_params_from_function_expr( + func: &jrsonnet_rowan_parser::nodes::ExprFunction, +) -> Option { + let params_desc = func.params_desc()?; + let params: Vec = params_desc + .params() + .filter_map(|p| extract_param_info(&p)) + .collect(); + + Some(LocalFunctionSignature { params }) +} + +/// Extract parameter label and matching name. +fn extract_param_info(param: &Param) -> Option { + let destruct = param.destruct()?; + let name = match destruct { + Destruct::DestructFull(full) => { + let name = full.name()?; + name.ident_lit()?.text().to_string() + } + Destruct::DestructArray(_) => "[array]".to_string(), + Destruct::DestructObject(_) => "{object}".to_string(), + Destruct::DestructSkip(_) => return None, + }; + + let label = if param.assign_token().is_some() { + format!("{name}=...") + } else { + name.clone() + }; + Some(SignatureParamInfo { label, name }) +} + +/// Find the token at the given byte offset. +fn token_at_offset(root: &SyntaxNode, offset: u32) -> Option { + root.token_at_offset(rowan::TextSize::from(offset)) + .right_biased() +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + fn document_with_cursor(code_with_cursor: &str) -> (Document, LspPosition) { + let cursor = code_with_cursor + .find('|') + .expect("test source should include `|` cursor marker"); + let mut source = code_with_cursor.to_string(); + source.remove(cursor); + + let before = &code_with_cursor[..cursor]; + let line = before.bytes().filter(|&b| b == b'\n').count() as u32; + let column = before + .rsplit_once('\n') + .map_or(before.len(), |(_, suffix)| suffix.len()) as u32; + + ( + Document::new(source, DocVersion::new(1)), + (line, column).into(), + ) + } + + #[test] + fn test_parse_signature_params() { + assert_eq!( + parse_signature_params("(func, arr"), + vec!["func".to_string(), "arr".to_string()] + ); + assert_eq!(parse_signature_params("(x"), vec!["x".to_string()]); + assert_eq!( + parse_signature_params("(a, b, c"), + vec!["a".to_string(), "b".to_string(), "c".to_string()] + ); + } + + #[test] + fn test_stdlib_signature_help() { + let code = "std.filter(|"; + // ^ cursor here (position 11) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let pos = (0, 11).into(); + + let help = signature_help(&doc, pos); + // May be None if we can't parse incomplete code well + if let Some(help) = help { + // Exactly one signature for std.filter + let labels: Vec<_> = help.signatures.iter().map(|s| s.label.as_str()).collect(); + assert_eq!(labels, vec!["std.filter(func, arr)"]); + } + } + + #[test] + fn test_local_function_signature_help() { + let code = r"local add(a, b) = a + b; add(1|"; + // ^ cursor here + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let pos = (0, 30).into(); + + let help = signature_help(&doc, pos); + // May be None depending on parser error recovery + if let Some(help) = help { + // Exactly one signature for local add function + let labels: Vec<_> = help.signatures.iter().map(|s| s.label.as_str()).collect(); + assert_eq!(labels, vec!["add(a, b)"]); + } + } + + #[test] + fn test_no_signature_help_outside_call() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let pos = (0, 13).into(); + + let help = signature_help(&doc, pos); + assert_eq!(help, None); + } + + #[test] + fn test_stdlib_named_argument_active_parameter() { + let (doc, pos) = document_with_cursor(r#"std.substr(str="abc", from=1|, len=1)"#); + let help = signature_help(&doc, pos).expect("signature help should be available"); + assert_eq!(help.active_parameter, Some(1)); + assert_eq!(help.signatures[0].active_parameter, Some(1)); + } + + #[test] + fn test_local_named_argument_active_parameter() { + let (doc, pos) = + document_with_cursor(r"local add(a, b, c) = a + b + c; add(c=3, a=1, b=2|)"); + let help = signature_help(&doc, pos).expect("signature help should be available"); + assert_eq!(help.active_parameter, Some(1)); + assert_eq!(help.signatures[0].active_parameter, Some(1)); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/symbols.rs b/crates/jrsonnet-lsp-handlers/src/symbols.rs new file mode 100644 index 00000000..a701ed84 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/symbols.rs @@ -0,0 +1,562 @@ +//! Document symbols handler for providing outline view. +//! +//! Extracts symbols from Jsonnet AST including: +//! - Local bindings (local x = ...) +//! - Object fields +//! - Function definitions + +// The `deprecated` field on DocumentSymbol is deprecated in lsp-types 0.96 +// in favor of `tags`, but it's still a required field. Suppress the warning. +#![allow(deprecated)] + +use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, BindDestruct, BindFunction, ExprBase, ExprObject, FieldName, FieldNameFixed, Member, + MemberBindStmt, MemberFieldMethod, MemberFieldNormal, ObjBody, ObjBodyMemberList, ObjLocal, + Stmt, StmtLocal, + }, + AstNode, AstToken, +}; +use lsp_types::{DocumentSymbol, Location, SymbolInformation, SymbolKind, Uri}; +use rowan::TextRange; + +/// Extract document symbols from a parsed document. +pub fn document_symbols(document: &Document) -> Vec { + let ast = document.ast(); + let text = document.text(); + let line_index = document.line_index(); + + let mut symbols = Vec::new(); + + if let Some(expr) = ast.expr() { + // Process top-level statements (local bindings) + for stmt in expr.stmts() { + if let Some(sym) = process_stmt(&stmt, text, line_index) { + symbols.push(sym); + } + } + + // Process the main expression + if let Some(base) = expr.expr_base() { + symbols.extend(process_expr_base(&base, text, line_index)); + } + } + + symbols +} + +/// Process a statement and extract symbols. +fn process_stmt(stmt: &Stmt, text: &str, line_index: &LineIndex) -> Option { + match stmt { + Stmt::StmtLocal(local) => process_local_stmt(local, text, line_index), + Stmt::StmtAssert(_) => None, // Asserts don't produce symbols + } +} + +/// Process a local statement. +fn process_local_stmt( + local: &StmtLocal, + text: &str, + line_index: &LineIndex, +) -> Option { + // Local statements can have multiple bindings + let binds: Vec<_> = local.binds().collect(); + + if binds.len() == 1 { + // Single binding - return it directly + process_bind(&binds[0], text, line_index) + } else if !binds.is_empty() { + // Multiple bindings - create a container + let range = local.syntax().text_range(); + let children: Vec<_> = binds + .iter() + .filter_map(|b| process_bind(b, text, line_index)) + .collect(); + + if children.is_empty() { + return None; + } + + Some(create_symbol( + "local".to_string(), + SymbolKind::NAMESPACE, + range, + range, + line_index, + text, + Some(children), + )) + } else { + None + } +} + +/// Process a binding and extract symbols. +fn process_bind(bind: &Bind, text: &str, line_index: &LineIndex) -> Option { + match bind { + Bind::BindDestruct(bd) => process_bind_destruct(bd, text, line_index), + Bind::BindFunction(bf) => process_bind_function(bf, text, line_index), + } +} + +/// Process a destructuring binding. +fn process_bind_destruct( + bind: &BindDestruct, + text: &str, + line_index: &LineIndex, +) -> Option { + let destruct = bind.into()?; + + // Get the name from the destruct pattern + let name = get_destruct_name(&destruct)?; + let range = bind.syntax().text_range(); + + // Check if the value is a function + let (kind, children) = if let Some(value_expr) = bind.value() { + if let Some(base) = value_expr.expr_base() { + match &base { + ExprBase::ExprFunction(_) => (SymbolKind::FUNCTION, None), + ExprBase::ExprObject(obj) => { + let children = process_object(obj, text, line_index); + ( + SymbolKind::OBJECT, + if children.is_empty() { + None + } else { + Some(children) + }, + ) + } + _ => (SymbolKind::VARIABLE, None), + } + } else { + (SymbolKind::VARIABLE, None) + } + } else { + (SymbolKind::VARIABLE, None) + }; + + Some(create_symbol( + name, kind, range, range, line_index, text, children, + )) +} + +/// Process a function binding. +fn process_bind_function( + bind: &BindFunction, + text: &str, + line_index: &LineIndex, +) -> Option { + let name = bind.name()?.ident_lit()?.text().to_string(); + let range = bind.syntax().text_range(); + + // Get parameter names for detail + let detail = if let Some(params) = bind.params() { + let param_names: Vec<_> = params + .params() + .filter_map(|p| p.destruct().and_then(|d| get_destruct_name(&d))) + .collect(); + Some(format!("({})", param_names.join(", "))) + } else { + None + }; + + Some(DocumentSymbol { + name, + detail, + kind: SymbolKind::FUNCTION, + tags: None, + deprecated: None, + range: to_lsp_range(range, line_index, text), + selection_range: to_lsp_range(range, line_index, text), + children: None, + }) +} + +/// Process an expression base and extract symbols. +fn process_expr_base(base: &ExprBase, text: &str, line_index: &LineIndex) -> Vec { + match base { + ExprBase::ExprObject(obj) => process_object(obj, text, line_index), + _ => Vec::new(), + } +} + +/// Process an object expression. +fn process_object(obj: &ExprObject, text: &str, line_index: &LineIndex) -> Vec { + let Some(body) = obj.obj_body() else { + return Vec::new(); + }; + + match body { + ObjBody::ObjBodyMemberList(list) => process_member_list(&list, text, line_index), + ObjBody::ObjBodyComp(_) => Vec::new(), // Object comprehensions don't have static fields + } +} + +/// Process a member list (object body). +fn process_member_list( + list: &ObjBodyMemberList, + text: &str, + line_index: &LineIndex, +) -> Vec { + let mut symbols = Vec::new(); + + for member in list.members() { + if let Some(sym) = process_member(&member, text, line_index) { + symbols.push(sym); + } + } + + symbols +} + +/// Process a single member. +fn process_member(member: &Member, text: &str, line_index: &LineIndex) -> Option { + match member { + Member::MemberBindStmt(bind_stmt) => process_member_bind(bind_stmt, text, line_index), + Member::MemberFieldNormal(field) => process_field_normal(field, text, line_index), + Member::MemberFieldMethod(method) => process_field_method(method, text, line_index), + Member::MemberAssertStmt(_) => None, // Asserts don't produce symbols + } +} + +/// Process a member bind statement (local inside object). +fn process_member_bind( + bind_stmt: &MemberBindStmt, + text: &str, + line_index: &LineIndex, +) -> Option { + let obj_local = bind_stmt.obj_local()?; + process_obj_local(&obj_local, text, line_index) +} + +/// Process an object-local binding. +fn process_obj_local( + obj_local: &ObjLocal, + text: &str, + line_index: &LineIndex, +) -> Option { + let bind = obj_local.bind()?; + process_bind(&bind, text, line_index) +} + +/// Process a normal field. +fn process_field_normal( + field: &MemberFieldNormal, + text: &str, + line_index: &LineIndex, +) -> Option { + let name = get_field_name(&field.field_name()?)?; + let range = field.syntax().text_range(); + + // Check if the value is an object (for nested symbols) + let children = if let Some(expr) = field.expr() { + if let Some(ExprBase::ExprObject(obj)) = expr.expr_base() { + let children = process_object(&obj, text, line_index); + if children.is_empty() { + None + } else { + Some(children) + } + } else { + None + } + } else { + None + }; + + // Determine kind based on value + let kind = if let Some(expr) = field.expr() { + if let Some(base) = expr.expr_base() { + match base { + ExprBase::ExprFunction(_) => SymbolKind::FUNCTION, + ExprBase::ExprObject(_) => SymbolKind::OBJECT, + ExprBase::ExprArray(_) => SymbolKind::ARRAY, + _ => SymbolKind::FIELD, + } + } else { + SymbolKind::FIELD + } + } else { + SymbolKind::FIELD + }; + + Some(create_symbol( + name, kind, range, range, line_index, text, children, + )) +} + +/// Process a method field. +fn process_field_method( + method: &MemberFieldMethod, + text: &str, + line_index: &LineIndex, +) -> Option { + let name = get_field_name(&method.field_name()?)?; + let range = method.syntax().text_range(); + + // Get parameter names for detail + let detail = if let Some(params) = method.params_desc() { + let param_names: Vec<_> = params + .params() + .filter_map(|p| p.destruct().and_then(|d| get_destruct_name(&d))) + .collect(); + Some(format!("({})", param_names.join(", "))) + } else { + None + }; + + Some(DocumentSymbol { + name, + detail, + kind: SymbolKind::METHOD, + tags: None, + deprecated: None, + range: to_lsp_range(range, line_index, text), + selection_range: to_lsp_range(range, line_index, text), + children: None, + }) +} + +/// Get the name from a field name node. +fn get_field_name(field_name: &FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => get_fixed_field_name(fixed), + FieldName::FieldNameDynamic(_) => Some("[computed]".to_string()), + } +} + +/// Get the name from a fixed field name. +fn get_fixed_field_name(fixed: &FieldNameFixed) -> Option { + if let Some(name) = fixed.id() { + Some(name.ident_lit()?.text().to_string()) + } else { + fixed + .text() + .map(|text| text.text().trim_matches('"').trim_matches('\'').to_string()) + } +} + +/// Get the name from a destruct pattern. +fn get_destruct_name(destruct: &jrsonnet_rowan_parser::nodes::Destruct) -> Option { + use jrsonnet_rowan_parser::nodes::Destruct; + match destruct { + Destruct::DestructFull(full) => Some(full.name()?.ident_lit()?.text().to_string()), + Destruct::DestructSkip(_) => None, + Destruct::DestructArray(_) => Some("[array]".to_string()), + Destruct::DestructObject(_) => Some("{object}".to_string()), + } +} + +/// Create a DocumentSymbol with the given properties. +fn create_symbol( + name: String, + kind: SymbolKind, + range: TextRange, + selection_range: TextRange, + line_index: &LineIndex, + text: &str, + children: Option>, +) -> DocumentSymbol { + DocumentSymbol { + name, + detail: None, + kind, + tags: None, + deprecated: None, + range: to_lsp_range(range, line_index, text), + selection_range: to_lsp_range(selection_range, line_index, text), + children, + } +} + +/// Search for symbols matching a query across a document. +/// Returns a flat list of SymbolInformation. +pub fn workspace_symbols_for_document( + document: &Document, + uri: &Uri, + query: &str, +) -> Vec { + let doc_symbols = document_symbols(document); + let mut results = Vec::new(); + + // Flatten and filter document symbols + flatten_symbols(&doc_symbols, uri, query, None, &mut results); + + results +} + +/// Recursively flatten DocumentSymbol tree into SymbolInformation list. +fn flatten_symbols( + symbols: &[DocumentSymbol], + uri: &Uri, + query: &str, + container_name: Option<&str>, + results: &mut Vec, +) { + let query_lower = query.to_lowercase(); + + for symbol in symbols { + // Check if symbol name matches query (case-insensitive substring match) + let matches = query.is_empty() || symbol.name.to_lowercase().contains(&query_lower); + + if matches { + results.push(SymbolInformation { + name: symbol.name.clone(), + kind: symbol.kind, + tags: symbol.tags.clone(), + deprecated: symbol.deprecated, + location: Location { + uri: uri.clone(), + range: symbol.range, + }, + container_name: container_name.map(String::from), + }); + } + + // Recursively process children + if let Some(children) = &symbol.children { + flatten_symbols(children, uri, query, Some(&symbol.name), results); + } + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + #[test] + fn test_empty_object() { + let doc = Document::new("{}".to_string(), DocVersion::new(1)); + let symbols = document_symbols(&doc); + assert!(symbols.is_empty()); + } + + #[test] + fn test_object_with_fields() { + let doc = Document::new( + r#"{ name: "test", value: 42, nested: { inner: true } }"#.to_string(), + DocVersion::new(1), + ); + let symbols = document_symbols(&doc); + + // Check symbol names + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["name", "value", "nested"]); + + // Check nested children + let nested_children = symbols[2] + .children + .as_ref() + .expect("nested should have children"); + let nested_names: Vec<_> = nested_children.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(nested_names, vec!["inner"]); + } + + #[test] + fn test_local_bindings() { + let doc = Document::new( + r"local x = 1; local y = 2; { a: x, b: y }".to_string(), + DocVersion::new(1), + ); + let symbols = document_symbols(&doc); + + // Should have local x, local y, and object fields a, b + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["x", "y", "a", "b"]); + } + + #[test] + fn test_function_binding() { + let doc = Document::new( + r"local add(a, b) = a + b; { result: add(1, 2) }".to_string(), + DocVersion::new(1), + ); + let symbols = document_symbols(&doc); + + // Assert full structure of symbols + let symbol_info: Vec<(&str, SymbolKind)> = + symbols.iter().map(|s| (s.name.as_str(), s.kind)).collect(); + assert_eq!( + symbol_info, + vec![("add", SymbolKind::FUNCTION), ("result", SymbolKind::FIELD)] + ); + } + + #[test] + fn test_method_field() { + let doc = Document::new( + r#"{ greet(name): "Hello, " + name }"#.to_string(), + DocVersion::new(1), + ); + let symbols = document_symbols(&doc); + + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["greet"]); + assert_eq!(symbols[0].kind, SymbolKind::METHOD); + } + + #[test] + fn test_workspace_symbols_empty_query() { + let doc = Document::new( + r#"local x = 1; { name: "test", value: x }"#.to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, ""); + + // Should return all symbols: x, name, value + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["x", "name", "value"]); + } + + #[test] + fn test_workspace_symbols_with_query() { + let doc = Document::new( + r"{ myField: 1, otherField: 2, myMethod(x): x }".to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, "my"); + + // Should match myField and myMethod + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["myField", "myMethod"]); + } + + #[test] + fn test_workspace_symbols_case_insensitive() { + let doc = Document::new( + r"{ MyField: 1, myfield: 2, MYFIELD: 3 }".to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, "myfield"); + + // Should match all three (case insensitive) + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["MyField", "myfield", "MYFIELD"]); + } + + #[test] + fn test_workspace_symbols_nested() { + let doc = Document::new( + r"{ outer: { innerField: 1 } }".to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, "inner"); + + // Should find innerField with container_name "outer" + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["innerField"]); + assert_eq!(symbols[0].container_name, Some("outer".to_string())); + } +} diff --git a/crates/jrsonnet-lsp-import/Cargo.toml b/crates/jrsonnet-lsp-import/Cargo.toml new file mode 100644 index 00000000..88b240f0 --- /dev/null +++ b/crates/jrsonnet-lsp-import/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "jrsonnet-lsp-import" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Import graph and work queue for jrsonnet LSP" + +[dependencies] +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +rayon = "1.11.0" +rowan.workspace = true + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs new file mode 100644 index 00000000..9c796b50 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph.rs @@ -0,0 +1,865 @@ +//! Import graph for tracking file dependencies. +//! +//! Maintains a bidirectional graph of import relationships between files, +//! enabling efficient cross-file reference lookups. + +use std::collections::{HashMap, HashSet, VecDeque}; + +use jrsonnet_lsp_document::{CanonicalPath, Document}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, ExprImport, StmtLocal}, + AstNode, SyntaxKind, +}; + +use crate::{ + parse::extract_import_path, + work_queue::{WorkQueue, WorkQueueExt}, +}; + +/// Information about an import in a file. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ImportEntry { + /// The binding name if this import is bound to a variable. + /// e.g., "lib" in `local lib = import "lib.jsonnet"` + pub binding_name: Option, + /// The raw import path as written in the source. + pub import_path: String, + /// The resolved canonical path of the imported file. + pub resolved_path: Option, +} + +/// Import graph tracking dependencies between files. +/// +/// This structure maintains two maps: +/// - `imports`: file → list of files it imports +/// - `imported_by`: file → list of files that import it (reverse index) +#[derive(Debug, Default)] +pub struct ImportGraph { + /// Map of file → import entries in that file. + imports: HashMap>, + /// Reverse index: file → files that import it. + imported_by: HashMap>, +} + +impl ImportGraph { + /// Create a new empty import graph. + pub fn new() -> Self { + Self::default() + } + + /// Update the import graph for a file. + /// + /// This parses the document to find all imports, resolves their paths, + /// and updates both the forward and reverse maps. + /// Update a file's imports in the graph with pre-parsed entries. + /// + /// This is the preferred method when you want to minimize lock hold time. + /// Parse the imports first using [`parse_document_imports`], then call this + /// method while holding the write lock. + pub fn update_file_with_entries(&mut self, path: &CanonicalPath, entries: Vec) { + // Remove old entries for this file + self.remove_file(path); + + // Update imported_by reverse index + for entry in &entries { + if let Some(ref resolved) = entry.resolved_path { + self.imported_by + .entry(resolved.clone()) + .or_default() + .insert(path.clone()); + } + } + + // Store the import entries + self.imports.insert(path.clone(), entries); + } + + /// Update a file's imports in the graph. + /// + /// This parses the document and updates the import graph atomically. + /// For better performance when parsing is slow, use [`parse_document_imports`] + /// followed by [`update_file_with_entries`] to parse outside the lock. + pub fn update_file(&mut self, path: &CanonicalPath, doc: &Document, resolve_import: F) + where + F: Fn(&str) -> Option, + { + let entries = parse_document_imports(doc, &resolve_import); + self.update_file_with_entries(path, entries); + } + + /// Remove a file from the import graph. + /// + /// This removes the file's import entries and updates the reverse index. + pub fn remove_file(&mut self, path: &CanonicalPath) { + // Remove from imported_by reverse index + if let Some(old_entries) = self.imports.get(path) { + for entry in old_entries { + if let Some(ref resolved) = entry.resolved_path { + if let Some(importers) = self.imported_by.get_mut(resolved) { + importers.remove(path); + // Clean up empty sets + if importers.is_empty() { + self.imported_by.remove(resolved); + } + } + } + } + } + + // Remove the import entries + self.imports.remove(path); + } + + /// Get the files that directly import a given file. + pub fn direct_importers(&self, path: &CanonicalPath) -> Vec { + self.imported_by + .get(path) + .map(|s| s.iter().cloned().collect()) + .unwrap_or_default() + } + + /// Get all files that transitively import a given file. + /// + /// This performs a breadth-first search through the import graph + /// to find all files that depend on the given file, directly or indirectly. + pub fn transitive_importers(&self, path: &CanonicalPath) -> HashSet { + let mut result = HashSet::new(); + let mut queue = VecDeque::from([path.clone()]); + + while let Some(current) = queue.pop_front() { + for importer in self.direct_importers(¤t) { + if result.insert(importer.clone()) { + queue.push_back(importer); + } + } + } + + result + } + + /// Get the import entries for a file. + pub fn imports(&self, path: &CanonicalPath) -> &[ImportEntry] { + self.imports.get(path).map(Vec::as_slice).unwrap_or(&[]) + } + + /// Find imports in a file that point to a specific target file. + pub fn imports_of_target( + &self, + file: &CanonicalPath, + target: &CanonicalPath, + ) -> Vec<&ImportEntry> { + self.imports + .get(file) + .map(|entries| { + entries + .iter() + .filter(|e| e.resolved_path.as_ref() == Some(target)) + .collect() + }) + .unwrap_or_default() + } + + /// Get the number of files tracked in the graph. + pub fn file_count(&self) -> usize { + self.imports.len() + } + + /// Get all files tracked in the graph. + pub fn all_files(&self) -> impl Iterator { + self.imports.keys() + } + + /// Compute a topological ordering of files based on import dependencies. + /// + /// Returns files in an order where each file comes after all files it imports. + /// Files at the same "level" (no dependencies between them) can be processed + /// in parallel. + /// + /// Returns `None` if there's a cycle in the import graph. + pub fn topological_order(&self) -> Option>> { + let mut in_degree: HashMap<&CanonicalPath, usize> = HashMap::new(); + let mut levels: Vec> = Vec::new(); + + // Initialize in-degree for all files to 0 + for path in self.imports.keys() { + in_degree.insert(path, 0); + } + + // Calculate in-degree: count how many dependencies each file has + // (how many files it imports that are also in our graph) + for (path, entries) in &self.imports { + let dep_count = entries + .iter() + .filter(|e| { + e.resolved_path + .as_ref() + .map(|p| self.imports.contains_key(p)) + .unwrap_or(false) + }) + .count(); + in_degree.insert(path, dep_count); + } + + // Find all files with no dependencies (in-degree 0) + let mut current_level: Vec = in_degree + .iter() + .filter(|(_, °)| deg == 0) + .map(|(&path, _)| path.clone()) + .collect(); + + let mut processed = HashSet::new(); + + while !current_level.is_empty() { + // Sort for deterministic ordering + current_level.sort(); + + // Mark current level as processed + for path in ¤t_level { + processed.insert(path.clone()); + } + + levels.push(current_level.clone()); + + // Find next level: files whose dependencies are all now processed + let mut next_level = Vec::new(); + for path in ¤t_level { + // For each file that imports this one + if let Some(importers) = self.imported_by.get(path) { + for importer in importers { + if processed.contains(importer) { + continue; + } + // Check if all dependencies of importer are processed + let all_deps_processed = self + .imports + .get(importer) + .map(|entries| { + entries.iter().all(|e| { + e.resolved_path + .as_ref() + .map(|p| { + processed.contains(p) || !self.imports.contains_key(p) + }) + .unwrap_or(true) + }) + }) + .unwrap_or(true); + + if all_deps_processed && !next_level.contains(importer) { + next_level.push(importer.clone()); + } + } + } + } + + current_level = next_level; + } + + // Check if all files were processed (no cycles) + if processed.len() == self.imports.len() { + Some(levels) + } else { + None // Cycle detected + } + } + + /// Process files in topological order with parallel processing within each level. + /// + /// This computes a topological ordering of files based on import dependencies, + /// then processes each level in parallel. Files in the same level have no + /// dependencies on each other and can safely be processed concurrently. + /// + /// # Arguments + /// * `f` - Function to call for each file path. Must be `Sync` for parallel execution. + /// + /// # Returns + /// * `Some(())` if processing completed successfully + /// * `None` if there's a cycle in the import graph + /// + /// # Example + /// ```ignore + /// graph.process_in_parallel(|path| { + /// analyze_file(path); + /// }); + /// ``` + pub fn process_in_parallel(&self, f: F) -> Option<()> + where + F: Fn(&CanonicalPath) + Sync, + { + use rayon::prelude::*; + + let levels = self.topological_order()?; + + // Process each level sequentially, but files within each level in parallel + for level in levels { + level.par_iter().for_each(&f); + } + + Some(()) + } + + /// Process files in reverse topological order with parallel processing within each level. + /// + /// Similar to `process_in_parallel`, but processes files in reverse order - + /// files that are imported by others are processed last. This is useful when + /// you need to process dependents before their dependencies. + pub fn process_in_parallel_reverse(&self, f: F) -> Option<()> + where + F: Fn(&CanonicalPath) + Sync, + { + use rayon::prelude::*; + + let levels = self.topological_order()?; + + // Process levels in reverse order + for level in levels.into_iter().rev() { + level.par_iter().for_each(&f); + } + + Some(()) + } + + /// Process a file and its transitive dependencies using a work queue. + /// + /// This dynamically discovers dependencies during processing and ensures + /// dependencies are processed before dependents. Uses per-level parallelism. + /// + /// # Arguments + /// * `root` - The root file to process + /// * `f` - Function to call for each file path + /// + /// # Example + /// ```ignore + /// graph.process_with_dependencies(&path, |p| { + /// analyze_file(p); + /// }); + /// ``` + pub fn process_with_dependencies(&self, root: &CanonicalPath, f: F) + where + F: Fn(&CanonicalPath) + Sync, + { + let mut work = WorkQueue::new(); + work.push(root.clone()); + + let levels = work.run(|path, deps| { + // Get dependencies from import graph + for entry in self.imports(path) { + if let Some(ref resolved) = entry.resolved_path { + deps.push(resolved.clone()); + } + } + }); + + // Process levels in dependency order (leaves first) + levels.process_parallel(&f); + } + + /// Process a file and its transitive importers using a work queue. + /// + /// This processes files in reverse dependency order - the root file first, + /// then files that import it, and so on. Uses per-level parallelism. + /// + /// Useful for invalidation cascading: when a file changes, process it + /// and all files that depend on it. + pub fn process_importers_with_work_queue(&self, root: &CanonicalPath, f: F) + where + F: Fn(&CanonicalPath) + Sync, + { + let mut work = WorkQueue::new(); + work.push(root.clone()); + + let mut levels = work.run(|path, deps| { + // Get files that import this file + for importer in self.direct_importers(path) { + deps.push(importer); + } + }); + + // Reverse levels: work queue puts leaves (files with no importers) at level 0, + // but we want root first, then progressively outward to importers + levels.reverse(); + + // Process levels (root first, then importers) + levels.process_parallel(&f); + } +} + +/// Parse import statements from a document. +/// +/// This extracts all import entries from the document without modifying +/// the import graph. Use this when you want to parse outside a lock, +/// then pass the results to [`ImportGraph::update_file_with_entries`]. +pub fn parse_document_imports(doc: &Document, resolve_import: &F) -> Vec +where + F: Fn(&str) -> Option, +{ + let mut entries = Vec::new(); + let mut seen_import_ranges = std::collections::HashSet::new(); + let ast = doc.ast(); + + // First pass: find imports in local statements (these have bindings) + for node in ast.syntax().descendants() { + if node.kind() == SyntaxKind::STMT_LOCAL { + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + if let Some((entry, import_range)) = + parse_bind_import_with_range(&bind, resolve_import) + { + seen_import_ranges.insert(import_range); + entries.push(entry); + } + } + } + } + } + + // Second pass: find bare import expressions that weren't part of a local statement + for node in ast.syntax().descendants() { + if node.kind() == SyntaxKind::EXPR_IMPORT { + let range = node.text_range(); + // Skip if we already captured this import in a local statement + if seen_import_ranges.contains(&range) { + continue; + } + if let Some(import) = ExprImport::cast(node) { + if let Some(path) = extract_import_path(&import) { + let resolved = resolve_import(&path); + entries.push(ImportEntry { + binding_name: None, + import_path: path, + resolved_path: resolved, + }); + } + } + } + } + + entries +} + +/// Parse a bind to extract import information, returning the import's text range. +fn parse_bind_import_with_range( + bind: &Bind, + resolve_import: &F, +) -> Option<(ImportEntry, rowan::TextRange)> +where + F: Fn(&str) -> Option, +{ + let Bind::BindDestruct(bd) = bind else { + return None; + }; + + let destruct = bd.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + + let bind_name = full.name()?.ident_lit()?.text().to_string(); + + // Check if the expression is an import + let expr = bd.value()?; + for node in expr.syntax().descendants() { + if node.kind() == SyntaxKind::EXPR_IMPORT { + let range = node.text_range(); + if let Some(import) = ExprImport::cast(node) { + if let Some(path) = extract_import_path(&import) { + let resolved = resolve_import(&path); + return Some(( + ImportEntry { + binding_name: Some(bind_name), + import_path: path, + resolved_path: resolved, + }, + range, + )); + } + } + } + } + + None +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(PathBuf::from(format!("/test/{name}"))) + } + + /// A simple resolver that just appends the import path to /test/ + fn simple_resolver(import: &str) -> Option { + if import.is_empty() { + None + } else { + Some(test_path(import)) + } + } + + #[test] + fn test_parse_local_import() { + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let entries = parse_document_imports(&doc, &simple_resolver); + + assert_eq!( + entries, + vec![ImportEntry { + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(test_path("lib.jsonnet")), + }] + ); + } + + #[test] + fn test_parse_local_import_single_quote() { + let code = "local lib = import 'lib.jsonnet'; lib"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let entries = parse_document_imports(&doc, &simple_resolver); + + assert_eq!( + entries, + vec![ImportEntry { + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(test_path("lib.jsonnet")), + }] + ); + } + + #[test] + fn test_parse_multiple_imports() { + let code = r#" +local lib1 = import "lib1.jsonnet"; +local lib2 = import "lib2.jsonnet"; +lib1 + lib2 +"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let entries = parse_document_imports(&doc, &simple_resolver); + + assert_eq!( + entries, + vec![ + ImportEntry { + binding_name: Some("lib1".to_string()), + import_path: "lib1.jsonnet".to_string(), + resolved_path: Some(test_path("lib1.jsonnet")), + }, + ImportEntry { + binding_name: Some("lib2".to_string()), + import_path: "lib2.jsonnet".to_string(), + resolved_path: Some(test_path("lib2.jsonnet")), + }, + ] + ); + } + + #[test] + fn test_import_graph_update() { + let mut graph = ImportGraph::new(); + + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + graph.update_file(&main, &doc, simple_resolver); + + // Check that main imports lib + let imports = graph.imports(&main); + assert_eq!( + imports, + vec![ImportEntry { + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(lib.clone()), + }] + ); + + // Check the reverse index + let importers = graph.direct_importers(&lib); + assert_eq!(importers, vec![main.clone()]); + } + + #[test] + fn test_import_graph_remove() { + let mut graph = ImportGraph::new(); + + let main = test_path("main.jsonnet"); + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + graph.update_file(&main, &doc, simple_resolver); + + // Remove main + graph.remove_file(&main); + + // Check that main no longer has imports + assert!(graph.imports(&main).is_empty()); + + // Check the reverse index is updated + let lib = test_path("lib.jsonnet"); + assert!(graph.direct_importers(&lib).is_empty()); + } + + #[test] + fn test_transitive_importers() { + let mut graph = ImportGraph::new(); + + // Setup: main.jsonnet -> utils.jsonnet -> lib.jsonnet + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(&main, &main_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(&utils, &utils_doc, simple_resolver); + + // Check transitive importers of lib + let importers = graph.transitive_importers(&lib); + assert_eq!(importers, HashSet::from([utils, main])); + } + + #[test] + fn test_imports_of_target() { + let mut graph = ImportGraph::new(); + + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + + let code = r#" +local lib = import "lib.jsonnet"; +local other = import "other.jsonnet"; +lib + other +"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + graph.update_file(&main, &doc, simple_resolver); + + // Get imports of lib.jsonnet from main + let imports = graph.imports_of_target(&main, &lib); + assert_eq!( + imports, + vec![&ImportEntry { + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(lib), + }] + ); + } + + #[test] + fn test_topological_order_simple() { + let mut graph = ImportGraph::new(); + + // Setup: main -> utils -> lib (chain dependency) + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_code = "{}"; + let lib_doc = Document::new(lib_code.to_string(), DocVersion::new(1)); + graph.update_file(&lib, &lib_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(&utils, &utils_doc, simple_resolver); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(&main, &main_doc, simple_resolver); + + let levels = graph.topological_order().expect("Should not have cycles"); + + // lib should be in first level (no deps) + // utils should be in second level (depends on lib) + // main should be in third level (depends on utils) + assert_eq!(levels, vec![vec![lib], vec![utils], vec![main]]); + } + + #[test] + fn test_topological_order_parallel_files() { + let mut graph = ImportGraph::new(); + + // Setup: main imports both utils1 and utils2 (independent) + let main = test_path("main.jsonnet"); + let utils1 = test_path("utils1.jsonnet"); + let utils2 = test_path("utils2.jsonnet"); + + // utils1 has no imports + let utils1_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(&utils1, &utils1_doc, simple_resolver); + + // utils2 has no imports + let utils2_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(&utils2, &utils2_doc, simple_resolver); + + // main imports both + let main_code = r#" +local u1 = import "utils1.jsonnet"; +local u2 = import "utils2.jsonnet"; +u1 + u2 +"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(&main, &main_doc, simple_resolver); + + let levels = graph.topological_order().expect("Should not have cycles"); + + // utils1 and utils2 should be in first level (independent, can be parallel, sorted) + // main should be in second level + assert_eq!(levels, vec![vec![utils1, utils2], vec![main]]); + } + + #[test] + fn test_process_in_parallel() { + use std::sync::atomic::{AtomicUsize, Ordering}; + + let mut graph = ImportGraph::new(); + + // Setup: main -> lib (chain) + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(&lib, &lib_doc, simple_resolver); + + // main imports lib + let main_code = r#"local lib = import "lib.jsonnet"; lib"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(&main, &main_doc, simple_resolver); + + let counter = AtomicUsize::new(0); + graph + .process_in_parallel(|_path| { + counter.fetch_add(1, Ordering::SeqCst); + }) + .expect("should process files in parallel"); + assert_eq!(counter.load(Ordering::SeqCst), 2); + } + + #[test] + fn test_process_in_parallel_order() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(); + + // Setup: main -> lib (chain) + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(&lib, &lib_doc, simple_resolver); + + // main imports lib + let main_code = r#"local lib = import "lib.jsonnet"; lib"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(&main, &main_doc, simple_resolver); + + let processed_order = Arc::new(Mutex::new(Vec::new())); + let order_clone = Arc::clone(&processed_order); + graph.process_in_parallel(move |path| { + order_clone.lock().unwrap().push(path.clone()); + }); + + let order: Vec = processed_order.lock().unwrap().clone(); + // lib should be processed before main (lib has no deps, main depends on lib) + assert_eq!(order, vec![lib, main]); + } + + #[test] + fn test_process_with_dependencies() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(); + + // Setup: main -> utils -> lib + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(&lib, &lib_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(&utils, &utils_doc, simple_resolver); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(&main, &main_doc, simple_resolver); + + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + // Process main and its dependencies + graph.process_with_dependencies(&main, move |path| { + processed_clone.lock().unwrap().push(path.clone()); + }); + + let order: Vec = processed.lock().unwrap().clone(); + + // lib should be processed before utils, utils before main + assert_eq!(order, vec![lib, utils, main]); + } + + #[test] + fn test_process_importers_with_work_queue() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(); + + // Setup: main -> utils -> lib + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(&lib, &lib_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(&utils, &utils_doc, simple_resolver); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(&main, &main_doc, simple_resolver); + + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + // Process lib and its importers (cascade) + graph.process_importers_with_work_queue(&lib, move |path| { + processed_clone.lock().unwrap().push(path.clone()); + }); + + let order: Vec = processed.lock().unwrap().clone(); + + // lib first, then utils (imports lib), then main (imports utils) + assert_eq!(order, vec![lib, utils, main]); + } +} diff --git a/crates/jrsonnet-lsp-import/src/lib.rs b/crates/jrsonnet-lsp-import/src/lib.rs new file mode 100644 index 00000000..e64bfe08 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/lib.rs @@ -0,0 +1,17 @@ +//! Import graph and work queue for jrsonnet LSP. +//! +//! This crate provides: +//! - Import path extraction from AST nodes +//! - Import graph tracking dependencies between files +//! - Topological work queue for parallel processing + +pub mod graph; +pub mod parse; +pub mod work_queue; + +pub use graph::{parse_document_imports, ImportEntry, ImportGraph}; +pub use parse::{ + check_import_from_token, check_import_path, extract_import_path, find_import_in_node, + get_import_path_from_node, +}; +pub use work_queue::{WorkQueue, WorkQueueExt}; diff --git a/crates/jrsonnet-lsp-import/src/parse.rs b/crates/jrsonnet-lsp-import/src/parse.rs new file mode 100644 index 00000000..5e1f0501 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/parse.rs @@ -0,0 +1,190 @@ +//! Import path extraction from AST nodes. +//! +//! Utilities for extracting import paths from Jsonnet AST nodes. + +use jrsonnet_lsp_document::strip_string_quotes; +use jrsonnet_rowan_parser::{ + nodes::ExprImport, AstNode, AstToken, SyntaxKind, SyntaxNode, SyntaxToken, +}; + +/// Extract the path string from an import expression. +/// +/// This handles all string literal formats: double-quoted, single-quoted, +/// and verbatim strings (with @ prefix). +/// +/// # Example +/// ```ignore +/// // For `import "foo.jsonnet"` returns Some("foo.jsonnet") +/// // For `import @'bar.jsonnet'` returns Some("bar.jsonnet") +/// let path = extract_import_path(&import_expr); +/// ``` +pub fn extract_import_path(import: &ExprImport) -> Option { + let text_token = import.text()?; + let text = text_token.text(); + Some(strip_string_quotes(text)) +} + +/// Find an import expression within a syntax node's descendants. +/// +/// Returns the first `ExprImport` found, or `None` if no import exists. +pub fn find_import_in_node(node: &SyntaxNode) -> Option { + for descendant in node.descendants() { + if descendant.kind() == SyntaxKind::EXPR_IMPORT { + return ExprImport::cast(descendant); + } + } + None +} + +/// Check if a token is inside an import expression and extract the import path. +/// +/// This is useful for handling "go to definition" from within an import string. +/// Returns `Some((import_expr, path))` if the token is inside an import, `None` otherwise. +pub fn check_import_from_token(token: &SyntaxToken) -> Option<(ExprImport, String)> { + // Must be a string token + let kind = token.kind(); + if !matches!( + kind, + SyntaxKind::STRING_DOUBLE + | SyntaxKind::STRING_SINGLE + | SyntaxKind::STRING_DOUBLE_VERBATIM + | SyntaxKind::STRING_SINGLE_VERBATIM + ) { + return None; + } + + // Walk up to find if this is inside an ExprImport + let mut node = token.parent()?; + loop { + if let Some(import) = ExprImport::cast(node.clone()) { + let path = extract_import_path(&import)?; + return Some((import, path)); + } + node = node.parent()?; + } +} + +/// Get the import path from a syntax node if it contains an import. +/// +/// This searches the node's descendants for an import expression and returns its path. +pub fn get_import_path_from_node(node: &SyntaxNode) -> Option { + let import = find_import_in_node(node)?; + extract_import_path(&import) +} + +/// Check if a token is inside an import expression and return the import path. +/// +/// This is a convenience wrapper around [`check_import_from_token`] that just +/// returns the path string. +pub fn check_import_path(token: &SyntaxToken) -> Option { + check_import_from_token(token).map(|(_, path)| path) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_rowan_parser::AstNode; + + use super::*; + + #[test] + fn test_extract_import_path() { + let code = r#"import "lib.jsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let import = find_import_in_node(ast.syntax()).expect("should find import expression"); + + let path = extract_import_path(&import); + assert_eq!(path, Some("lib.jsonnet".to_string())); + } + + #[test] + fn test_extract_import_path_verbatim() { + let code = r#"import @"lib.jsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let import = find_import_in_node(ast.syntax()).expect("should find import expression"); + + let path = extract_import_path(&import); + assert_eq!(path, Some("lib.jsonnet".to_string())); + } + + #[test] + fn test_find_import_in_node() { + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let _import = + find_import_in_node(ast.syntax()).expect("should find import in local binding"); + } + + #[test] + fn test_get_import_path_from_node() { + let code = r#"import "utils.jsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let path = get_import_path_from_node(ast.syntax()); + assert_eq!(path, Some("utils.jsonnet".to_string())); + } + + #[test] + fn test_check_import_path() { + let code = r#"import "foo.jsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the string token - there should be exactly one STRING_DOUBLE token + let string_token = ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|t| t.kind() == SyntaxKind::STRING_DOUBLE) + .expect("should find STRING_DOUBLE token"); + + let path = check_import_path(&string_token); + assert_eq!(path, Some("foo.jsonnet".to_string())); + } + + #[test] + fn test_check_import_from_token() { + let code = r#"import "bar.jsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the string token - there should be exactly one STRING_DOUBLE token + let string_token = ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|t| t.kind() == SyntaxKind::STRING_DOUBLE) + .expect("should find STRING_DOUBLE token"); + + let (import, path) = + check_import_from_token(&string_token).expect("should find import from token"); + assert_eq!(path, "bar.jsonnet"); + let _text = import.text().expect("import should have text"); + } + + #[test] + fn test_check_import_path_not_import() { + // A string that is NOT inside an import + let code = r#"local x = "not an import";"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the string token - there should be exactly one STRING_DOUBLE token + let string_token = ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|t| t.kind() == SyntaxKind::STRING_DOUBLE) + .expect("should find STRING_DOUBLE token"); + + let path = check_import_path(&string_token); + assert_eq!(path, None); + } +} diff --git a/crates/jrsonnet-lsp-import/src/work_queue.rs b/crates/jrsonnet-lsp-import/src/work_queue.rs new file mode 100644 index 00000000..24bb3bd9 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/work_queue.rs @@ -0,0 +1,381 @@ +//! Work queue for topological processing with per-level parallelization. +//! +//! This module provides a work queue that processes items in topological order, +//! grouping items by dependency level. Items at the same level have no dependencies +//! on each other and can be processed in parallel. +//! +//! # Pattern +//! +//! 1. Push initial work items +//! 2. Process items via visitor pattern - visitor can push dependencies +//! 3. Items are grouped by dependency level (leaves at level 0) +//! 4. Caller processes each level: parallel within level, sequential between levels +//! +//! # Example +//! +//! ```ignore +//! let mut work = WorkQueue::new(); +//! work.push(root_item); +//! +//! let levels = work.run(|item, queue| { +//! // Process item, push dependencies +//! for dep in get_dependencies(item) { +//! queue.push(dep); +//! } +//! }); +//! +//! // Process levels with rayon +//! for level in levels { +//! level.into_par_iter().for_each(|item| process(item)); +//! } +//! ``` + +use std::{collections::HashMap, hash::Hash}; + +/// Action in the work queue stack. +#[derive(Debug)] +enum Action { + /// Enter processing for this item (DFS pre-order). + Enter(T), + /// Exit processing for this item (DFS post-order). + Exit(T), +} + +/// Work queue for topological processing. +/// +/// Uses depth-first traversal to discover dependencies and group items by level. +/// Level is computed as: max(dependency levels) + 1, with leaves at level 0. +#[derive(Debug)] +pub struct WorkQueue { + /// Stack of actions (LIFO for DFS). + actions: Vec>, + /// Computed levels for processed items. + item_levels: HashMap, + /// Track which items are currently being processed (for cycle detection). + in_progress: HashMap, +} + +impl Default for WorkQueue +where + T: Eq + Hash, +{ + fn default() -> Self { + Self::new() + } +} + +impl WorkQueue +where + T: Eq + Hash, +{ + /// Create a new empty work queue. + pub fn new() -> Self { + Self { + actions: Vec::new(), + item_levels: HashMap::new(), + in_progress: HashMap::new(), + } + } + + /// Push an item to be processed. + /// + /// Items are deduplicated - pushing the same item twice has no effect. + pub fn push(&mut self, item: T) + where + T: Clone, + { + // Skip if already processed or in progress + if !self.item_levels.contains_key(&item) && !self.in_progress.contains_key(&item) { + self.actions.push(Action::Enter(item)); + } + } + + /// Get the computed level for an item (if already processed). + fn get_level(&self, item: &T) -> Option { + self.item_levels.get(item).copied() + } + + /// Run the work queue with a visitor function. + /// + /// The visitor is called for each item and can push dependencies via the queue. + /// Returns items grouped by dependency level - items at level 0 have no dependencies, + /// items at level 1 depend only on level 0 items, etc. + /// + /// # Arguments + /// + /// * `visitor` - Function called for each item. Receives the item and a mutable + /// reference to a collector for dependencies. + /// + /// # Returns + /// + /// Vector of levels, where each level is a vector of items that can be processed + /// in parallel. + pub fn run(mut self, mut visitor: F) -> Vec> + where + T: Clone, + F: FnMut(&T, &mut Vec), + { + // Track dependencies for each item to compute levels + let mut item_deps: HashMap> = HashMap::new(); + + while let Some(action) = self.actions.pop() { + match action { + Action::Enter(item) => { + if self.item_levels.contains_key(&item) { + continue; // Already processed + } + + // Mark as in progress + self.in_progress.insert(item.clone(), true); + + // Schedule exit + self.actions.push(Action::Exit(item.clone())); + + // Collect dependencies + let mut deps = Vec::new(); + visitor(&item, &mut deps); + + // Store deps for level computation + item_deps.insert(item.clone(), deps.clone()); + + // Push dependencies onto stack + for dep in deps { + if !self.item_levels.contains_key(&dep) + && !self.in_progress.contains_key(&dep) + { + self.actions.push(Action::Enter(dep)); + } + } + } + Action::Exit(item) => { + self.in_progress.remove(&item); + + // Compute level: max(dep levels) + 1, or 0 if no deps + let deps = item_deps.get(&item).cloned().unwrap_or_default(); + let max_dep_level = deps + .iter() + .filter_map(|dep| self.get_level(dep)) + .max() + .unwrap_or(0); + + let level = if deps.is_empty() { + 0 + } else { + max_dep_level + 1 + }; + + self.item_levels.insert(item, level); + } + } + } + + // Group items by level + if self.item_levels.is_empty() { + return Vec::new(); + } + + let max_level = self.item_levels.values().max().copied().unwrap_or(0); + let mut levels: Vec> = vec![Vec::new(); max_level + 1]; + + for (item, level) in self.item_levels { + levels[level].push(item); + } + + levels + } + + /// Check if an item has already been processed. + pub fn contains(&self, item: &T) -> bool { + self.item_levels.contains_key(item) + } +} + +/// Extension trait for processing work queue results with rayon. +pub trait WorkQueueExt { + /// Process each level in parallel using rayon. + /// + /// Items within each level are processed in parallel. + /// Levels are processed sequentially (barrier between levels). + fn process_parallel(&self, f: F) + where + F: Fn(&T) + Sync, + T: Sync; + + /// Process each level in parallel with mutable accumulator. + /// + /// Each level is processed in parallel, then results are combined sequentially. + fn process_parallel_with(&self, init: A, process: F, combine: C) -> A + where + F: Fn(&T) -> A + Sync, + C: Fn(&mut A, A), + A: Send, + T: Sync; +} + +impl WorkQueueExt for Vec> { + fn process_parallel(&self, f: F) + where + F: Fn(&T) + Sync, + T: Sync, + { + use rayon::prelude::*; + + for level in self { + level.par_iter().for_each(&f); + } + } + + fn process_parallel_with(&self, mut acc: A, process: F, combine: C) -> A + where + F: Fn(&T) -> A + Sync, + C: Fn(&mut A, A), + A: Send, + T: Sync, + { + use rayon::prelude::*; + + for level in self { + let level_results: Vec = level.par_iter().map(&process).collect(); + for result in level_results { + combine(&mut acc, result); + } + } + + acc + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_empty_queue() { + let work: WorkQueue = WorkQueue::new(); + let levels = work.run(|_, _| {}); + + assert_eq!(levels, Vec::>::new()); + } + + #[test] + fn test_single_item() { + let mut work = WorkQueue::new(); + work.push(1); + + let levels = work.run(|_, _| {}); + + assert_eq!(levels, vec![vec![1]]); + } + + #[test] + fn test_chain_dependency() { + // 1 depends on 2, 2 depends on 3 + // Expected levels: [3], [2], [1] + let deps: HashMap> = [(1, vec![2]), (2, vec![3]), (3, vec![])] + .into_iter() + .collect(); + + let mut work = WorkQueue::new(); + work.push(1); + + let levels = work.run(|item, dep_collector| { + if let Some(item_deps) = deps.get(item) { + dep_collector.extend(item_deps.iter().copied()); + } + }); + + assert_eq!(levels, vec![vec![3], vec![2], vec![1]]); + } + + #[test] + fn test_parallel_items() { + // 1 depends on 2 and 3 (2 and 3 are independent) + // Expected levels: [2, 3] (order may vary), [1] + let deps: HashMap> = [(1, vec![2, 3]), (2, vec![]), (3, vec![])] + .into_iter() + .collect(); + + let mut work = WorkQueue::new(); + work.push(1); + + let levels = work.run(|item, dep_collector| { + if let Some(item_deps) = deps.get(item) { + dep_collector.extend(item_deps.iter().copied()); + } + }); + + // Level 0 should have 2 and 3, level 1 should have 1 + assert_eq!(levels.len(), 2); + assert_eq!(levels[1], vec![1]); + + let mut level0 = levels[0].clone(); + level0.sort_unstable(); + assert_eq!(level0, vec![2, 3]); + } + + #[test] + fn test_diamond_dependency() { + // 1 depends on 2 and 3, both 2 and 3 depend on 4 + // Expected: [4], [2, 3], [1] + let deps: HashMap> = + [(1, vec![2, 3]), (2, vec![4]), (3, vec![4]), (4, vec![])] + .into_iter() + .collect(); + + let mut work = WorkQueue::new(); + work.push(1); + + let levels = work.run(|item, dep_collector| { + if let Some(item_deps) = deps.get(item) { + dep_collector.extend(item_deps.iter().copied()); + } + }); + + assert_eq!(levels.len(), 3); + assert_eq!(levels[0], vec![4]); // 4 (leaf) + assert_eq!(levels[2], vec![1]); // 1 (root) + + let mut level1 = levels[1].clone(); + level1.sort_unstable(); + assert_eq!(level1, vec![2, 3]); // 2 and 3 + } + + #[test] + fn test_deduplication() { + // Push same item twice - should only appear once + let mut work = WorkQueue::new(); + work.push(1); + work.push(1); + work.push(2); + work.push(2); + + let levels = work.run(|_, _| {}); + + // Should have 2 items total, not 4 + let total: usize = levels.iter().map(std::vec::Vec::len).sum(); + assert_eq!(total, 2); + } + + #[test] + fn test_process_parallel() { + use std::sync::atomic::{AtomicUsize, Ordering}; + + let levels = vec![vec![1, 2, 3], vec![4, 5]]; + + let counter = AtomicUsize::new(0); + levels.process_parallel(|_| { + counter.fetch_add(1, Ordering::SeqCst); + }); + + assert_eq!(counter.load(Ordering::SeqCst), 5); + } + + #[test] + fn test_process_parallel_with() { + let levels = vec![vec![1, 2, 3], vec![4, 5]]; + + let sum = levels.process_parallel_with(0, |&x| x, |acc, x| *acc += x); + + assert_eq!(sum, 15); // 1 + 2 + 3 + 4 + 5 + } +} diff --git a/crates/jrsonnet-lsp-inference/Cargo.toml b/crates/jrsonnet-lsp-inference/Cargo.toml new file mode 100644 index 00000000..c3d27228 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "jrsonnet-lsp-inference" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Type inference and flow analysis for jrsonnet LSP" + +[dependencies] +dashmap = "6" +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-lsp-import = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-import" } +jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } +jrsonnet-lsp-stdlib = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-stdlib" } +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +jrsonnet-std-sig = { version = "0.5.0-pre97", path = "../jrsonnet-std-sig" } +lru.workspace = true +lsp-types.workspace = true +moka = { version = "0.12", features = ["sync"] } +parking_lot = "0.12" +rayon = "1.11.0" +rowan.workspace = true +rustc-hash.workspace = true +strsim.workspace = true +tracing = "0.1.44" + +[dev-dependencies] +assert_matches = "1.5.0" +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +rstest = "0.23" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-inference/src/analysis.rs b/crates/jrsonnet-lsp-inference/src/analysis.rs new file mode 100644 index 00000000..291c0054 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/analysis.rs @@ -0,0 +1,787 @@ +//! Type analysis storage and queries. +//! +//! Provides eager type analysis for Jsonnet documents. Analysis is computed +//! once during construction and results are immutable, making `TypeAnalysis` +//! safe to share across threads and cache in concurrent data structures. + +use std::sync::Arc; + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_types::{ + is_subtype_ty, FunctionData, GlobalTyStore, MutStore, ObjectData, Ty, TyData, +}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Expr, ExprBase, Member, ObjBody, StmtLocal}, + AstNode, SyntaxNode, +}; +use parking_lot::RwLock; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +use crate::{ + env::{ImportResolver, TypeEnv}, + expr::infer_expr_ty, +}; + +/// Stores inferred types for all expressions, allowing queries by position. +/// +/// Internally uses interned `Ty` references for memory efficiency. +/// Analysis is computed eagerly during construction. +/// +/// This type is `Send + Sync` and can be safely shared across threads +/// and cached in concurrent data structures like moka. +pub struct TypeAnalysis { + /// Type store for interning and looking up types. + /// Uses RwLock because some query operations (like union) may intern new types. + store: RwLock, + /// Map from expression text range to interned type. + /// Immutable after construction. + expr_types: FxHashMap, + /// The inferred type of the document's root expression. + document_type: Ty, +} + +impl std::fmt::Debug for TypeAnalysis { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TypeAnalysis") + .field("store", &">") + .field("expr_types_count", &self.expr_types.len()) + .field("document_type", &self.document_type) + .finish() + } +} + +impl Default for TypeAnalysis { + fn default() -> Self { + Self::new() + } +} + +impl TypeAnalysis { + /// Create a new empty type analysis with a default global store. + pub fn new() -> Self { + Self { + store: RwLock::new(MutStore::new(Arc::new(GlobalTyStore::new()))), + expr_types: FxHashMap::default(), + document_type: Ty::ANY, + } + } + + /// Create a new empty type analysis with a specific global store. + pub fn with_global(global: Arc) -> Self { + Self { + store: RwLock::new(MutStore::new(global)), + expr_types: FxHashMap::default(), + document_type: Ty::ANY, + } + } + + /// Analyze a document and return the type analysis results. + pub fn analyze(document: &Document) -> Self { + Self::analyze_with_global(document, Arc::new(GlobalTyStore::new())) + } + + /// Analyze a document with a shared global store. + pub fn analyze_with_global(document: &Document, global: Arc) -> Self { + let mut env = TypeEnv::new(Arc::clone(&global)); + let ast = document.ast(); + + let mut expr_types = FxHashMap::default(); + + let doc_ty = if let Some(expr) = ast.expr() { + analyze_and_record(&expr, &mut env, &mut expr_types) + } else { + Ty::ANY + }; + + Self { + store: RwLock::new(env.into_store()), + expr_types, + document_type: doc_ty, + } + } + + /// Analyze a document with a shared global store and import resolver. + pub fn analyze_with_resolver( + document: &Document, + global: Arc, + import_resolver: Arc, + ) -> Self { + let mut env = TypeEnv::with_import_resolver(Arc::clone(&global), import_resolver); + let ast = document.ast(); + + let mut expr_types = FxHashMap::default(); + + let doc_ty = if let Some(expr) = ast.expr() { + analyze_and_record(&expr, &mut env, &mut expr_types) + } else { + Ty::ANY + }; + + Self { + store: RwLock::new(env.into_store()), + expr_types, + document_type: doc_ty, + } + } + + /// Get the inferred type of the document's root expression. + #[inline] + pub fn document_type(&self) -> Ty { + self.document_type + } + + /// Get the type of an expression at a specific position. + /// + /// Finds the smallest expression containing the position and returns its type. + pub fn type_at_position(&self, _root: &SyntaxNode, offset: rowan::TextSize) -> Option { + self.find_type_at(offset) + } + + /// Find a type at the given offset. + fn find_type_at(&self, offset: rowan::TextSize) -> Option { + let mut best_range: Option = None; + + for range in self.expr_types.keys() { + if range.contains(offset) { + match best_range { + None => best_range = Some(*range), + Some(current) if range.len() < current.len() => best_range = Some(*range), + _ => {} + } + } + } + + best_range.and_then(|r| self.expr_types.get(&r).copied()) + } + + /// Get the type of an expression by its exact range. + pub fn type_for_range(&self, range: TextRange) -> Option { + self.expr_types.get(&range).copied() + } + + /// Display a Ty as a string. + pub fn display(&self, ty: Ty) -> String { + self.store.read().display(ty) + } + + /// Check if `subtype` is a subtype of `supertype`. + pub fn is_subtype(&self, subtype: Ty, supertype: Ty) -> bool { + is_subtype_ty(&*self.store.read(), subtype, supertype) + } + + /// Access the type data for a Ty via closure (avoids cloning). + pub fn with_data(&self, ty: Ty, f: impl FnOnce(&TyData) -> R) -> R { + f(&self.store.read().get(ty)) + } + + /// Create a union type from multiple Ty values. + pub fn union(&self, types: Vec) -> Ty { + self.store.write().union(types) + } + + /// Get all known fields for an object type at a position. + /// + /// Returns field names and their interned types, useful for completion. + pub fn fields_at_position( + &self, + root: &SyntaxNode, + offset: rowan::TextSize, + ) -> Option> { + let ty = self.type_at_position(root, offset)?; + self.extract_fields(ty) + } + + /// Extract fields from a type (handles unions by merging fields). + fn extract_fields(&self, ty: Ty) -> Option> { + let ty_data = self.store.read().get(ty); + + match ty_data { + TyData::Object(obj) => { + let fields: Vec<_> = obj.fields.iter().map(|(k, v)| (k.clone(), v.ty)).collect(); + if fields.is_empty() && !obj.has_unknown { + None + } else { + Some(fields) + } + } + TyData::Union(types) => { + // Merge fields from all object types in the union + let mut all_fields: FxHashMap> = FxHashMap::default(); + for t in types { + if let Some(fields) = self.extract_fields(t) { + for (name, field_ty) in fields { + all_fields.entry(name).or_default().push(field_ty); + } + } + } + if all_fields.is_empty() { + None + } else { + // Create union types for fields that have multiple types + let result: Vec<_> = all_fields + .into_iter() + .map(|(name, tys)| { + let unified_ty = self.store.write().union(tys); + (name, unified_ty) + }) + .collect(); + Some(result) + } + } + _ => None, + } + } + + /// Check if a type supports indexing (array, object, string, tuple). + #[inline] + pub fn is_indexable(&self, ty: Ty) -> bool { + self.store.read().is_indexable(ty) + } + + /// Check if a type supports field access (object or attrs_of). + #[inline] + pub fn supports_field_access(&self, ty: Ty) -> bool { + self.store.read().supports_field_access(ty) + } + + /// Check if a type is callable (function). + #[inline] + pub fn is_callable(&self, ty: Ty) -> bool { + self.store.read().is_callable(ty) + } + + /// Access the type store directly (for complex operations). + pub fn store(&self) -> parking_lot::RwLockReadGuard<'_, MutStore> { + self.store.read() + } + + /// Execute a closure with mutable access to the type store. + pub fn with_store_mut(&self, f: impl FnOnce(&mut MutStore) -> R) -> R { + f(&mut self.store.write()) + } + + /// Execute a function with immutable access to the MutStore. + pub fn with_store(&self, f: impl FnOnce(&MutStore) -> R) -> R { + f(&self.store.read()) + } + + /// Import a type from the stdlib store into this analysis store. + pub fn import_from_stdlib(&self, ty: Ty) -> Ty { + jrsonnet_lsp_stdlib::import_ty_from_stdlib(&mut self.store.write(), ty) + } + + /// Get the type data for a Ty, cloning it for pattern matching. + #[inline] + pub fn get_data(&self, ty: Ty) -> TyData { + self.store.read().get(ty) + } + + /// Get object data if the type is an Object. + pub fn get_object(&self, ty: Ty) -> Option { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Object(obj) => Some(obj), + _ => None, + } + } + + /// Get tuple element types if the type is a Tuple. + pub fn get_tuple(&self, ty: Ty) -> Option> { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Tuple { elems } => Some(elems), + _ => None, + } + } + + /// Get function data if the type is a Function. + pub fn get_function(&self, ty: Ty) -> Option { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Function(func) => Some(func), + _ => None, + } + } + + /// Check if a type is an object and has known (closed) fields. + #[inline] + pub fn is_closed_object(&self, ty: Ty) -> bool { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Object(obj) => !obj.has_unknown, + _ => false, + } + } + + /// Get the length of a tuple type. + #[inline] + pub fn tuple_len(&self, ty: Ty) -> Option { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Tuple { elems } => Some(elems.len()), + _ => None, + } + } + + /// Check if an object type has a specific field. + pub fn object_has_field(&self, ty: Ty, field_name: &str) -> Option { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Object(obj) => Some(obj.fields.iter().any(|(name, _)| name == field_name)), + _ => None, + } + } + + /// Get all field names from an object type. + pub fn object_field_names(&self, ty: Ty) -> Option> { + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Object(obj) => Some(obj.fields.iter().map(|(name, _)| name.clone()).collect()), + _ => None, + } + } + + /// Check if a type supports slicing (array, tuple, or string). + #[inline] + pub fn is_sliceable(&self, ty: Ty) -> bool { + if ty.is_any() { + return true; + } + let ty_data = self.store.read().get(ty); + match ty_data { + TyData::Array { .. } + | TyData::Tuple { .. } + | TyData::String + | TyData::Char + | TyData::LiteralString(_) => true, + TyData::Union(types) => types.iter().all(|&t| self.is_sliceable(t)), + _ => false, + } + } + + /// Check if a type is an array type. + #[inline] + pub fn is_array(&self, ty: Ty) -> bool { + let ty_data = self.store.read().get(ty); + matches!(ty_data, TyData::Array { .. }) + } + + /// Check if a type is a string type (including Char and LiteralString). + #[inline] + pub fn is_string(&self, ty: Ty) -> bool { + let ty_data = self.store.read().get(ty); + matches!( + ty_data, + TyData::String | TyData::Char | TyData::LiteralString(_) + ) + } + + /// Check if a type is a function. + #[inline] + pub fn is_function(&self, ty: Ty) -> bool { + let ty_data = self.store.read().get(ty); + matches!(ty_data, TyData::Function(_)) + } +} + +// ============================================================================ +// Analysis helpers (used during construction) +// ============================================================================ + +/// Analyze an expression and record types for it and all sub-expressions. +fn analyze_and_record( + expr: &Expr, + env: &mut TypeEnv, + expr_types: &mut FxHashMap, +) -> Ty { + let ty = infer_expr_ty(expr, env); + + // Record the type for the expression + expr_types.insert(expr.syntax().text_range(), ty); + + // Also record the type for the base expression if present + if let Some(base) = expr.expr_base() { + expr_types.insert(base.syntax().text_range(), ty); + } + + // Recursively visit all child expressions to record their types + visit_children(expr, env, expr_types); + + ty +} + +/// Visit all child expressions and record their types. +fn visit_children(expr: &Expr, env: &mut TypeEnv, expr_types: &mut FxHashMap) { + // Visit statements + for stmt in expr.stmts() { + if let Some(stmt_local) = StmtLocal::cast(stmt.syntax().clone()) { + for bind in stmt_local.binds() { + visit_bind(&bind, env, expr_types); + } + } + } + + // Visit base expression children + if let Some(base) = expr.expr_base() { + visit_base(&base, env, expr_types); + } +} + +/// Visit a binding and record types. +fn visit_bind(bind: &Bind, env: &mut TypeEnv, expr_types: &mut FxHashMap) { + match bind { + Bind::BindDestruct(bd) => { + if let Some(value) = bd.value() { + analyze_and_record(&value, env, expr_types); + } + } + Bind::BindFunction(bf) => { + if let Some(body) = bf.value() { + env.push_scope(); + if let Some(params) = bf.params() { + for param in params.params() { + if let Some(name_node) = param.destruct() { + if let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = + name_node + { + if let Some(name) = full.name() { + if let Some(ident) = name.ident_lit() { + env.define_ty(ident.text().to_string(), Ty::ANY); + } + } + } + } + } + } + analyze_and_record(&body, env, expr_types); + env.pop_scope(); + } + } + } +} + +/// Visit a base expression and record types for all children. +fn visit_base(base: &ExprBase, env: &mut TypeEnv, expr_types: &mut FxHashMap) { + match base { + ExprBase::ExprArray(arr) => visit_array_base(arr, env, expr_types), + ExprBase::ExprObject(obj) => { + if let Some(body) = obj.obj_body() { + visit_obj_body(&body, env, expr_types); + } + } + ExprBase::ExprFunction(func) => visit_function_base(func, env, expr_types), + ExprBase::ExprParened(parens) => visit_optional_expr(parens.expr(), env, expr_types), + ExprBase::ExprIfThenElse(if_expr) => visit_if_then_else_base(if_expr, env, expr_types), + ExprBase::ExprBinary(binary) => visit_binary_base(binary, env, expr_types), + ExprBase::ExprUnary(unary) => visit_optional_expr(unary.rhs(), env, expr_types), + ExprBase::ExprObjExtend(extend) => visit_optional_expr(extend.expr(), env, expr_types), + ExprBase::ExprArrayComp(comp) => visit_optional_expr(comp.expr(), env, expr_types), + ExprBase::ExprField(field) => visit_optional_expr(field.base(), env, expr_types), + ExprBase::ExprIndex(idx) => visit_index_base(idx, env, expr_types), + ExprBase::ExprSlice(slice) => visit_optional_expr(slice.base(), env, expr_types), + ExprBase::ExprCall(call) => visit_call_base(call, env, expr_types), + // Leaf nodes - no children to visit + ExprBase::ExprLiteral(_) + | ExprBase::ExprNumber(_) + | ExprBase::ExprString(_) + | ExprBase::ExprError(_) + | ExprBase::ExprVar(_) + | ExprBase::ExprImport(_) => {} + } +} + +fn define_param_as_any(param: &jrsonnet_rowan_parser::nodes::Param, env: &mut TypeEnv) { + let Some(name_node) = param.destruct() else { + return; + }; + let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = name_node else { + return; + }; + let Some(name) = full.name() else { + return; + }; + let Some(ident) = name.ident_lit() else { + return; + }; + env.define_ty(ident.text().to_string(), Ty::ANY); +} + +fn visit_optional_expr( + expr: Option, + env: &mut TypeEnv, + expr_types: &mut FxHashMap, +) { + if let Some(expr) = expr { + analyze_and_record(&expr, env, expr_types); + } +} + +fn visit_array_base( + arr: &jrsonnet_rowan_parser::nodes::ExprArray, + env: &mut TypeEnv, + expr_types: &mut FxHashMap, +) { + for elem in arr.exprs() { + analyze_and_record(&elem, env, expr_types); + } +} + +fn visit_function_base( + func: &jrsonnet_rowan_parser::nodes::ExprFunction, + env: &mut TypeEnv, + expr_types: &mut FxHashMap, +) { + let Some(body) = func.expr() else { + return; + }; + + env.push_scope(); + if let Some(params) = func.params_desc() { + for param in params.params() { + define_param_as_any(¶m, env); + } + } + analyze_and_record(&body, env, expr_types); + env.pop_scope(); +} + +fn visit_if_then_else_base( + if_expr: &jrsonnet_rowan_parser::nodes::ExprIfThenElse, + env: &mut TypeEnv, + expr_types: &mut FxHashMap, +) { + visit_optional_expr(if_expr.cond(), env, expr_types); + let then_expr = if_expr.then().and_then(|then_clause| then_clause.expr()); + visit_optional_expr(then_expr, env, expr_types); + let else_expr = if_expr.else_().and_then(|else_clause| else_clause.expr()); + visit_optional_expr(else_expr, env, expr_types); +} + +fn visit_binary_base( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + env: &mut TypeEnv, + expr_types: &mut FxHashMap, +) { + visit_optional_expr(binary.lhs(), env, expr_types); + visit_optional_expr(binary.rhs(), env, expr_types); +} + +fn visit_index_base( + idx: &jrsonnet_rowan_parser::nodes::ExprIndex, + env: &mut TypeEnv, + expr_types: &mut FxHashMap, +) { + visit_optional_expr(idx.base(), env, expr_types); + visit_optional_expr(idx.index(), env, expr_types); +} + +fn visit_call_base( + call: &jrsonnet_rowan_parser::nodes::ExprCall, + env: &mut TypeEnv, + expr_types: &mut FxHashMap, +) { + visit_optional_expr(call.callee(), env, expr_types); + let Some(args) = call.args_desc() else { + return; + }; + for arg in args.args() { + visit_optional_expr(arg.expr(), env, expr_types); + } +} + +/// Visit object body members. +fn visit_obj_body(body: &ObjBody, env: &mut TypeEnv, expr_types: &mut FxHashMap) { + match body { + ObjBody::ObjBodyMemberList(members) => { + for member in members.members() { + match member { + Member::MemberFieldNormal(field) => { + if let Some(expr) = field.expr() { + analyze_and_record(&expr, env, expr_types); + } + } + Member::MemberFieldMethod(method) => { + if let Some(expr) = method.expr() { + env.push_scope(); + if let Some(params) = method.params_desc() { + for param in params.params() { + if let Some(name_node) = param.destruct() { + if let jrsonnet_rowan_parser::nodes::Destruct::DestructFull( + full, + ) = name_node + { + if let Some(name) = full.name() { + if let Some(ident) = name.ident_lit() { + env.define_ty( + ident.text().to_string(), + Ty::ANY, + ); + } + } + } + } + } + } + analyze_and_record(&expr, env, expr_types); + env.pop_scope(); + } + } + _ => {} + } + } + } + ObjBody::ObjBodyComp(comp) => { + for comp_spec in comp.comp_specs() { + if let Some(for_spec) = + jrsonnet_rowan_parser::nodes::ForSpec::cast(comp_spec.syntax().clone()) + { + if let Some(expr) = for_spec.expr() { + analyze_and_record(&expr, env, expr_types); + } + } + } + } + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + fn analyze_doc(code: &str) -> (TypeAnalysis, Document) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze(&doc); + (analysis, doc) + } + + #[test] + fn test_type_at_position_finds_smallest() { + let code = "{ a: 1 + 2 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + // Position 5 is inside "1" + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(5)) + .expect("should find type at position 5"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_type_at_position_nested_expressions() { + let code = "[[1]]"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + // Position 2 is at the inner "1" + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(2)) + .expect("should find type at position 2"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_fields_at_position_object() { + let code = "{ a: 1, b: 2 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let fields = analysis + .fields_at_position(&root, rowan::TextSize::new(0)) + .expect("should find fields at position 0"); + let mut names: Vec<_> = fields.iter().map(|(n, _)| n.as_str()).collect(); + names.sort_unstable(); + assert_eq!(names, vec!["a", "b"]); + } + + #[test] + fn test_extract_fields_union_merges() { + let code = "if true then { a: 1 } else { b: 2 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(0)) + .expect("should find type at position 0"); + + let fields = analysis + .extract_fields(ty) + .expect("should extract fields from union type"); + let mut names: Vec<_> = fields.iter().map(|(n, _)| n.as_str()).collect(); + names.sort_unstable(); + assert_eq!(names, vec!["a", "b"]); + } + + #[test] + fn test_is_closed_object() { + let code = "{ a: 1 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(0)) + .unwrap(); + assert!(analysis.is_closed_object(ty)); + } + + #[test] + fn test_object_has_field() { + let code = "{ existing: 1 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(0)) + .unwrap(); + assert_eq!(analysis.object_has_field(ty, "existing"), Some(true)); + assert_eq!(analysis.object_has_field(ty, "nonexistent"), Some(false)); + } + + #[test] + fn test_object_field_names() { + let code = "{ x: 1, y: 2, z: 3 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let ty = analysis + .type_at_position(&root, rowan::TextSize::new(0)) + .expect("should find type at position 0"); + let mut names = analysis + .object_field_names(ty) + .expect("should get field names"); + names.sort(); + assert_eq!( + names, + vec!["x".to_string(), "y".to_string(), "z".to_string()] + ); + } + + #[test] + fn test_is_predicates() { + let (analysis, doc) = analyze_doc("[1, 2, 3]"); + let root = doc.ast().syntax().clone(); + let array_ty = analysis + .type_at_position(&root, rowan::TextSize::new(0)) + .unwrap(); + + assert!(analysis.is_indexable(array_ty)); + assert!(analysis.is_sliceable(array_ty)); + assert!(!analysis.is_callable(array_ty)); + assert!(!analysis.supports_field_access(array_ty)); + } + + #[test] + fn test_document_type() { + let (analysis, _doc) = analyze_doc("42"); + assert_eq!(analysis.document_type(), Ty::NUMBER); + } + + #[test] + fn test_document_type_object() { + let (analysis, _doc) = analyze_doc("{ a: 1 }"); + assert!(analysis.is_closed_object(analysis.document_type())); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/const_eval.rs b/crates/jrsonnet-lsp-inference/src/const_eval.rs new file mode 100644 index 00000000..0e49abca --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/const_eval.rs @@ -0,0 +1,611 @@ +//! Const-eval: trace expressions through bindings without full evaluation. +//! +//! This module provides static analysis to trace expressions through local bindings +//! and field accesses. This enables go-to-definition to work for patterns like: +//! +//! ```jsonnet +//! local x = import "lib.libsonnet"; +//! local y = x; +//! y.foo.bar // go-to-def on 'bar' should trace through y -> x -> import -> foo -> bar +//! ``` + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_import::extract_import_path; +use jrsonnet_lsp_scope::find_definition_range; +use jrsonnet_rowan_parser::{ + nodes::{ + BindDestruct, Expr, ExprBase, ExprField, ExprImport, ExprIndex, ExprVar, FieldName, Member, + Name, ObjBody, ObjBodyMemberList, + }, + AstNode, AstToken, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use rowan::TextRange; +use rustc_hash::FxHashSet; + +/// Result of const-eval tracing. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ConstEvalResult { + /// Traced to an import path with optional field chain. + Import { + /// The import path. + path: String, + /// Field names to navigate after import (e.g., `["foo", "bar"]` for import.foo.bar). + fields: Vec, + }, + /// Traced to a local expression (couldn't trace further). + Local { + /// The text range of the expression. + range: TextRange, + }, + /// Traced to std library, optionally with a field. + Std { + /// The field name if accessing a std field (e.g., "length" for std.length). + field: Option, + }, +} + +/// Context for const-eval to prevent infinite loops. +struct EvalContext { + /// Set of (expression range) pairs we've already visited. + seen: FxHashSet, + /// Maximum depth to prevent runaway recursion. + max_depth: usize, + /// Current depth. + depth: usize, +} + +impl EvalContext { + fn new() -> Self { + Self { + seen: FxHashSet::default(), + max_depth: 50, + depth: 0, + } + } + + /// Check if we should continue (not seen, not too deep). + fn enter(&mut self, range: TextRange) -> bool { + if self.depth >= self.max_depth { + return false; + } + if !self.seen.insert(range) { + return false; + } + self.depth += 1; + true + } + + fn exit(&mut self) { + self.depth = self.depth.saturating_sub(1); + } +} + +/// Trace an expression to find what it evaluates to statically. +/// +/// This doesn't do full evaluation - it traces through: +/// - Variable references to their definitions +/// - Field accesses on objects +/// - Import expressions +/// - Local bindings +pub fn trace_expr(expr: &Expr, document: &Document) -> Option { + let mut ctx = EvalContext::new(); + trace_expr_inner(expr, document, &mut ctx) +} + +/// Trace from an ExprBase. +pub fn trace_base(base: &ExprBase, document: &Document) -> Option { + let mut ctx = EvalContext::new(); + trace_base_inner(base, document, &mut ctx) +} + +/// Trace from an identifier token to what it refers to. +pub fn trace_ident(token: &SyntaxToken, document: &Document) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let mut ctx = EvalContext::new(); + let name = token.text(); + + // First check if this is 'std' + if name == "std" { + return Some(ConstEvalResult::Std { field: None }); + } + + // Find the definition of this variable + let def_range = find_definition_range(token, name)?; + + if !ctx.enter(def_range) { + return None; + } + + // Find the bind at that definition site and trace its value + let result = trace_definition_value(def_range, document, &mut ctx); + ctx.exit(); + result +} + +fn trace_expr_inner( + expr: &Expr, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + let range = expr.syntax().text_range(); + if !ctx.enter(range) { + return None; + } + + // First get the base expression + let base = expr.expr_base()?; + let result = trace_base_inner(&base, document, ctx); + + ctx.exit(); + result +} + +fn trace_base_inner( + base: &ExprBase, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + match base { + ExprBase::ExprVar(var) => trace_var(var, document, ctx), + ExprBase::ExprImport(import) => trace_import(import), + ExprBase::ExprField(field) => trace_field(field, document, ctx), + ExprBase::ExprIndex(index) => trace_index(index, document, ctx), + ExprBase::ExprParened(parens) => { + let inner = parens.expr()?; + trace_expr_inner(&inner, document, ctx) + } + // For other expressions, we can't trace further statically + _ => Some(ConstEvalResult::Local { + range: base.syntax().text_range(), + }), + } +} + +/// Trace a variable reference. +fn trace_var(var: &ExprVar, document: &Document, ctx: &mut EvalContext) -> Option { + let name_node = var.name()?; + let ident = name_node.ident_lit()?; + let name = ident.text(); + + // Check for 'std' + if name == "std" { + return Some(ConstEvalResult::Std { field: None }); + } + + // Find the definition + let def_range = find_definition_range(&ident, name)?; + + if !ctx.enter(def_range) { + return Some(ConstEvalResult::Local { range: def_range }); + } + + let result = trace_definition_value(def_range, document, ctx); + ctx.exit(); + + result.or(Some(ConstEvalResult::Local { range: def_range })) +} + +/// Trace an import expression. +fn trace_import(import: &ExprImport) -> Option { + let path = extract_import_path(import)?; + Some(ConstEvalResult::Import { + path, + fields: vec![], + }) +} + +/// Trace a field access (e.g., `x.foo`). +fn trace_field( + field: &ExprField, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + // Get the field name + let field_name = extract_field_name(field.field()?)?; + + // Get the base expression + let base_expr = field.base()?; + let base = base_expr.expr_base()?; + + // Trace the base + let base_result = trace_base_inner(&base, document, ctx)?; + + // Append the field to the result + match base_result { + ConstEvalResult::Import { path, mut fields } => { + fields.push(field_name); + Some(ConstEvalResult::Import { path, fields }) + } + ConstEvalResult::Std { field: None } => Some(ConstEvalResult::Std { + field: Some(field_name), + }), + ConstEvalResult::Std { field: Some(_) } => { + // std.foo.bar - can't trace further into std fields + None + } + ConstEvalResult::Local { range } => { + // Try to find the field in the local object + let ast = document.ast(); + trace_field_in_object(range, &field_name, ast.syntax(), document, ctx) + } + } +} + +/// Trace an index access (e.g., `x["foo"]`). +fn trace_index( + index: &ExprIndex, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + // Get the index expression - must be a string literal for us to trace + let index_expr = index.index()?; + let field_name = extract_string_literal(&index_expr)?; + + // Get the base expression + let base_expr = index.base()?; + let base = base_expr.expr_base()?; + + // Trace the base + let base_result = trace_base_inner(&base, document, ctx)?; + + // Append the field to the result + match base_result { + ConstEvalResult::Import { path, mut fields } => { + fields.push(field_name); + Some(ConstEvalResult::Import { path, fields }) + } + ConstEvalResult::Std { field: None } => Some(ConstEvalResult::Std { + field: Some(field_name), + }), + ConstEvalResult::Std { field: Some(_) } => None, + ConstEvalResult::Local { range } => { + let ast = document.ast(); + trace_field_in_object(range, &field_name, ast.syntax(), document, ctx) + } + } +} + +/// Find the value bound at a definition site and trace it. +fn trace_definition_value( + def_range: TextRange, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + let ast = document.ast(); + let root = ast.syntax(); + + // Find the node at the definition range + for node in root.descendants() { + if node.text_range() == def_range { + // Found the definition name - now find its value + return trace_bind_value(&node, document, ctx); + } + } + None +} + +/// Given a definition name node, find and trace the bound value. +fn trace_bind_value( + name_node: &SyntaxNode, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + // Walk up to find the Bind node + let bind_node = name_node.ancestors().find(|n| { + matches!( + n.kind(), + SyntaxKind::BIND_DESTRUCT | SyntaxKind::BIND_FUNCTION + ) + })?; + + // Look for BindDestruct with a value + if let Some(bind_destruct) = BindDestruct::cast(bind_node) { + let value_expr = bind_destruct.value()?; + return trace_expr_inner(&value_expr, document, ctx); + } + + // BindFunction doesn't have a traceable value in the same way + None +} + +/// Try to find a field in an object at the given range. +fn trace_field_in_object( + obj_range: TextRange, + field_name: &str, + root: &SyntaxNode, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + // Find the node at the object range + let node = root.descendants().find(|n| n.text_range() == obj_range)?; + + // Try to get object body from Expr wrapper + if let Some(obj_body) = Expr::cast(node.clone()) + .and_then(|e| e.expr_base()) + .and_then(|base| match base { + ExprBase::ExprObject(obj) => obj.obj_body(), + _ => None, + }) { + return find_field_in_object_body(&obj_body, field_name, document, ctx); + } + + // Try direct ExprObject + let obj = jrsonnet_rowan_parser::nodes::ExprObject::cast(node)?; + find_field_in_object_body(&obj.obj_body()?, field_name, document, ctx) +} + +/// Find a field value in an object body. +fn find_field_in_object_body( + body: &ObjBody, + field_name: &str, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + match body { + ObjBody::ObjBodyMemberList(members) => { + find_field_in_member_list(members, field_name, document, ctx) + } + ObjBody::ObjBodyComp(_) => { + // Object comprehensions are too dynamic to trace + None + } + } +} + +/// Find a field in an object member list. +fn find_field_in_member_list( + members: &ObjBodyMemberList, + field_name: &str, + document: &Document, + ctx: &mut EvalContext, +) -> Option { + for member in members.members() { + // Extract field name and value from the member + let (name, value) = match &member { + Member::MemberFieldNormal(field) => { + let name = extract_field_key_name(&field.field_name()?)?; + let value = field.expr()?; + (name, value) + } + Member::MemberFieldMethod(method) => { + let name = extract_field_key_name(&method.field_name()?)?; + // Methods have params and body, but we can trace the full method expression + // For now, return Local since methods are complex + if name == field_name { + return Some(ConstEvalResult::Local { + range: method.syntax().text_range(), + }); + } + continue; + } + Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => continue, + }; + + if name == field_name { + return trace_expr_inner(&value, document, ctx); + } + } + None +} + +/// Extract the key name from a FieldName node. +fn extract_field_key_name(field_name: &FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => { + // Try identifier first + if let Some(name) = fixed.id().and_then(|id| id.ident_lit()) { + return Some(name.text().to_string()); + } + // Try string literal (text token) + let text = fixed.text()?; + Some(strip_quotes(text.text())) + } + FieldName::FieldNameDynamic(dynamic) => { + // Dynamic field name like [expr] - try to extract if it's a string literal + extract_string_literal(&dynamic.expr()?) + } + } +} + +/// Extract a field name from a Name node. +fn extract_field_name(name: Name) -> Option { + let ident = name.ident_lit()?; + Some(ident.text().to_string()) +} + +/// Extract a string literal value from an expression. +fn extract_string_literal(expr: &Expr) -> Option { + let ExprBase::ExprString(str_expr) = expr.expr_base()? else { + return None; + }; + let text = str_expr.syntax().text().to_string(); + Some(strip_quotes(&text)) +} + +/// Strip quotes from a string literal. +fn strip_quotes(s: &str) -> String { + s.trim_start_matches('@') + .trim_start_matches('"') + .trim_start_matches('\'') + .trim_end_matches('"') + .trim_end_matches('\'') + .to_string() +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{token_at_offset, ByteOffset, DocVersion}; + + use super::*; + + fn make_doc(code: &str) -> Document { + Document::new(code.to_string(), DocVersion::new(1)) + } + + fn find_expr_at(doc: &Document, offset: u32) -> Option { + let ast = doc.ast(); + let token = token_at_offset(ast.syntax(), ByteOffset::new(offset))?; + // Find the outermost Expr containing this token (last match in ancestors) + token.parent_ancestors().filter_map(Expr::cast).last() + } + + #[test] + fn test_trace_direct_import() { + let code = r#"import "lib.libsonnet""#; + let doc = make_doc(code); + let expr = find_expr_at(&doc, 0).unwrap(); + + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec![], + }) + ); + } + + #[test] + fn test_trace_var_to_import() { + let code = r#"local x = import "lib.libsonnet"; x"#; + let doc = make_doc(code); + + // Find the 'x' at the end + let ast = doc.ast(); + let token = token_at_offset(ast.syntax(), ByteOffset::new(34)) + .expect("should find token at offset 34"); + assert_eq!(token.text(), "x"); + + let result = trace_ident(&token, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec![], + }) + ); + } + + #[test] + fn test_trace_chained_vars() { + let code = r#"local x = import "lib.libsonnet"; local y = x; y"#; + let doc = make_doc(code); + + // Find the 'y' at the end + let ast = doc.ast(); + let token = token_at_offset(ast.syntax(), ByteOffset::new(47)) + .expect("should find token at offset 47"); + assert_eq!(token.text(), "y"); + + let result = trace_ident(&token, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec![], + }) + ); + } + + #[test] + fn test_trace_std() { + let code = r"std"; + let doc = make_doc(code); + + let ast = doc.ast(); + let token = token_at_offset(ast.syntax(), ByteOffset::new(0)); + let token = token.unwrap(); + + let result = trace_ident(&token, &doc); + assert_eq!(result, Some(ConstEvalResult::Std { field: None })); + } + + #[test] + fn test_trace_std_field() { + let code = r"std.length"; + let doc = make_doc(code); + + // The entire expression should trace to Std with field + let expr = find_expr_at(&doc, 0).unwrap(); + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Std { + field: Some("length".to_string()) + }) + ); + } + + #[test] + fn test_trace_import_field() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo"#; + let doc = make_doc(code); + + // Find 'lib.foo' expression - position at 'foo' + let expr = find_expr_at(&doc, 40).unwrap(); + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec!["foo".to_string()], + }) + ); + } + + #[test] + fn test_trace_nested_import_fields() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo.bar"#; + let doc = make_doc(code); + + // Find 'lib.foo.bar' expression + let expr = find_expr_at(&doc, 44).unwrap(); + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec!["foo".to_string(), "bar".to_string()], + }) + ); + } + + #[test] + fn test_trace_through_chained_var_with_field() { + let code = r#"local x = import "lib.libsonnet"; local y = x; y.foo"#; + let doc = make_doc(code); + + // Find 'y.foo' expression + let expr = find_expr_at(&doc, 51).unwrap(); + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec!["foo".to_string()], + }) + ); + } + + #[test] + fn test_trace_cycle_detection() { + // This would cause infinite recursion without cycle detection + // local x = x; x + // ^6 ^10 ^13 + let code = r"local x = x; x"; + let doc = make_doc(code); + + let ast = doc.ast(); + let token = token_at_offset(ast.syntax(), ByteOffset::new(13)); + let token = token.unwrap(); + + // Should not panic or hang - returns None because cycle detection + // kicks in when we try to trace the value `x` which refers to itself + let result = trace_ident(&token, &doc); + // Cycle detection returns None when we can't make progress + assert_eq!(result, None); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/env.rs b/crates/jrsonnet-lsp-inference/src/env.rs new file mode 100644 index 00000000..f1007bc4 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/env.rs @@ -0,0 +1,332 @@ +//! Type environment for tracking variable types during inference. + +use std::sync::Arc; + +use jrsonnet_lsp_types::{GlobalTyStore, MutStore, Ty}; +use rowan::TextRange; +use rustc_hash::{FxHashMap, FxHashSet}; + +/// Maximum depth for function body inference to prevent infinite recursion. +pub const MAX_FUNCTION_INFERENCE_DEPTH: usize = 5; + +/// Trait for resolving import types during analysis. +/// +/// Implementations can look up cached types for imported files, +/// enabling cross-file type sharing. +pub trait ImportResolver: std::fmt::Debug { + /// Resolve the type for an import path. + /// + /// Returns `Some(ty)` if the imported file's type is known, + /// `None` otherwise (falls back to `Ty::ANY`). + fn resolve_import(&self, import_path: &str) -> Option; +} + +/// Context for `self` and `super` within object literals (using interned types). +#[derive(Debug, Clone)] +pub struct ObjectContextInterned { + /// The type of the current object (for `self` and `$` references). + pub self_type: Ty, + /// The type of the object being extended (for `super` references). + pub super_type: Option, +} + +/// Type environment for tracking variable types during inference. +/// +/// Uses interned `Ty` references for memory efficiency and O(1) equality. +/// The `MutStore` combines a global store (shared across files) with a local +/// store (per-file types created during analysis). +#[derive(Debug)] +pub struct TypeEnv { + /// Type store for interning types - combines global and local. + store: MutStore, + /// Stack of scopes, each mapping variable names to interned types. + scopes: Vec>, + /// Current depth of function body inference (to prevent infinite recursion). + function_depth: usize, + /// Constraints observed on parameters (parameter name -> observed type constraints). + /// Only populated when in_constraint_mode is true. + constraints: FxHashMap>, + /// Whether we're currently tracking parameter constraints. + in_constraint_mode: bool, + /// Set of parameter names we're tracking constraints for. + tracked_params: FxHashSet, + /// Stack of object contexts for `self` and `super` references. + /// The innermost object is at the end. + object_context: Vec, + /// Cache of inferred function types by their text range. + /// This prevents redundant inference and helps with recursive functions. + function_cache: FxHashMap, + /// Set of function ranges currently being inferred (to detect cycles). + functions_in_progress: FxHashSet, + /// Optional import resolver for cross-file type sharing. + import_resolver: Option>, +} + +impl TypeEnv { + /// Create a new type environment with a global store. + pub fn new(global: Arc) -> Self { + Self { + store: MutStore::new(global), + scopes: vec![FxHashMap::default()], + function_depth: 0, + constraints: FxHashMap::default(), + in_constraint_mode: false, + tracked_params: FxHashSet::default(), + object_context: Vec::new(), + function_cache: FxHashMap::default(), + functions_in_progress: FxHashSet::default(), + import_resolver: None, + } + } + + /// Create a new type environment with a global store and import resolver. + pub fn with_import_resolver( + global: Arc, + import_resolver: Arc, + ) -> Self { + Self { + store: MutStore::new(global), + scopes: vec![FxHashMap::default()], + function_depth: 0, + constraints: FxHashMap::default(), + in_constraint_mode: false, + tracked_params: FxHashSet::default(), + object_context: Vec::new(), + function_cache: FxHashMap::default(), + functions_in_progress: FxHashSet::default(), + import_resolver: Some(import_resolver), + } + } + + /// Create a new type environment with a default global store. + /// + /// This is primarily for tests and backwards compatibility. + pub fn new_default() -> Self { + Self::new(Arc::new(GlobalTyStore::new())) + } + + /// Resolve an import path to a type using the import resolver. + /// + /// Returns `Some(ty)` if the resolver is set and the import is cached, + /// `None` otherwise. + pub fn resolve_import(&self, import_path: &str) -> Option { + self.import_resolver.as_ref()?.resolve_import(import_path) + } + + /// Get an immutable reference to the type store. + pub fn store(&self) -> &MutStore { + &self.store + } + + /// Get a mutable reference to the type store. + pub fn store_mut(&mut self) -> &mut MutStore { + &mut self.store + } + + /// Consume and return the MutStore. + /// + /// This is used when transferring the store after analysis. + pub fn into_store(self) -> MutStore { + self.store + } + + /// Check if we can infer function bodies (not too deep). + pub fn can_infer_function_body(&self) -> bool { + self.function_depth < MAX_FUNCTION_INFERENCE_DEPTH + } + + /// Enter a function body for inference. + pub fn enter_function(&mut self) { + self.function_depth += 1; + } + + /// Exit a function body after inference. + pub fn exit_function(&mut self) { + self.function_depth -= 1; + } + + /// Check if a function type has been cached for the given range. + pub fn get_cached_function(&self, range: TextRange) -> Option { + self.function_cache.get(&range).copied() + } + + /// Cache a function type for the given range. + pub fn cache_function(&mut self, range: TextRange, ty: Ty) { + self.function_cache.insert(range, ty); + } + + /// Check if a function is currently being inferred (cycle detection). + pub fn is_function_in_progress(&self, range: TextRange) -> bool { + self.functions_in_progress.contains(&range) + } + + /// Mark a function as being inferred. + pub fn start_function_inference(&mut self, range: TextRange) { + self.functions_in_progress.insert(range); + } + + /// Mark a function as finished being inferred. + pub fn finish_function_inference(&mut self, range: TextRange) { + self.functions_in_progress.remove(&range); + } + + /// Start tracking parameter constraints for the given parameter names. + pub fn start_constraint_tracking(&mut self, param_names: &[String]) { + self.in_constraint_mode = true; + self.constraints.clear(); + self.tracked_params.clear(); + for name in param_names { + self.tracked_params.insert(name.clone()); + } + } + + /// Stop tracking parameter constraints and return the collected constraints. + pub fn stop_constraint_tracking_ty(&mut self) -> FxHashMap> { + self.in_constraint_mode = false; + self.tracked_params.clear(); + std::mem::take(&mut self.constraints) + } + + /// Add a constraint for a tracked parameter. + pub fn add_constraint_ty(&mut self, var_name: &str, constraint: Ty) { + if self.in_constraint_mode && self.tracked_params.contains(var_name) { + self.constraints + .entry(var_name.to_string()) + .or_default() + .push(constraint); + } + } + + /// Check if a variable is being tracked for constraints. + pub fn is_tracked_param(&self, var_name: &str) -> bool { + self.in_constraint_mode && self.tracked_params.contains(var_name) + } + + /// Push a new scope. + pub fn push_scope(&mut self) { + self.scopes.push(FxHashMap::default()); + } + + /// Pop the current scope. + pub fn pop_scope(&mut self) { + if self.scopes.len() > 1 { + self.scopes.pop(); + } + } + + /// Define a variable in the current scope. + pub fn define_ty(&mut self, name: String, ty: Ty) { + if let Some(scope) = self.scopes.last_mut() { + scope.insert(name, ty); + } + } + + /// Look up a variable in all scopes (innermost first). + pub fn lookup(&self, name: &str) -> Option { + for scope in self.scopes.iter().rev() { + if let Some(&ty) = scope.get(name) { + return Some(ty); + } + } + None + } + + /// Narrow a variable's type by intersecting with a new type. + /// + /// This is used for flow typing - when we know additional constraints + /// about a variable's type from a type guard. + pub fn narrow(&mut self, name: &str, narrowed_type: Ty) { + // Find the current type + let current_ty = self.lookup(name).unwrap_or(Ty::ANY); + // Use MutStore::narrow() directly + let new_ty = self.store.narrow(current_ty, narrowed_type); + // Define in current scope (shadows outer definition) + self.define_ty(name.to_string(), new_ty); + } + + /// Widen a variable's type by subtracting a type. + /// + /// This is used for flow typing in else branches - when we know + /// a type guard returned false. + pub fn widen(&mut self, name: &str, remove_type: Ty) { + // Find the current type + let current_ty = self.lookup(name).unwrap_or(Ty::ANY); + // Use MutStore::widen() directly + let new_ty = self.store.widen(current_ty, remove_type); + // Define in current scope (shadows outer definition) + self.define_ty(name.to_string(), new_ty); + } + + /// Enter an object literal context, making `self` available with the given type. + /// + /// Call this when starting to infer field types within an object literal. + /// The `super_type` should be provided when this is an object extension. + pub fn push_object_context_ty(&mut self, self_type: Ty, super_type: Option) { + self.object_context.push(ObjectContextInterned { + self_type, + super_type, + }); + } + + /// Exit the current object literal context. + pub fn pop_object_context(&mut self) { + self.object_context.pop(); + } + + /// Get the interned `Ty` for `self` in the current context. + pub fn self_ty(&self) -> Option { + self.object_context.last().map(|ctx| ctx.self_type) + } + + /// Get the interned `Ty` for `super` in the current context. + pub fn super_ty(&self) -> Option { + self.object_context.last().and_then(|ctx| ctx.super_type) + } + + /// Get the interned `Ty` for `$` (root object) in the current context. + pub fn root_ty(&self) -> Option { + self.object_context.first().map(|ctx| ctx.self_type) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_type_env_scoping() { + let mut env = TypeEnv::new_default(); + + env.define_ty("x".to_string(), Ty::NUMBER); + assert_eq!(env.lookup("x"), Some(Ty::NUMBER)); + + env.push_scope(); + env.define_ty("x".to_string(), Ty::STRING); + assert_eq!(env.lookup("x"), Some(Ty::STRING)); + + env.pop_scope(); + assert_eq!(env.lookup("x"), Some(Ty::NUMBER)); + } + + #[test] + fn test_env_narrow() { + let mut env = TypeEnv::new_default(); + env.define_ty("x".to_string(), Ty::ANY); + env.push_scope(); + env.narrow("x", Ty::NUMBER); + assert_eq!(env.lookup("x"), Some(Ty::NUMBER)); + env.pop_scope(); + assert_eq!(env.lookup("x"), Some(Ty::ANY)); + } + + #[test] + fn test_env_widen() { + let mut env = TypeEnv::new_default(); + let union = env.store_mut().union(vec![Ty::NUMBER, Ty::NULL]); + env.define_ty("x".to_string(), union); + env.push_scope(); + env.widen("x", Ty::NULL); + assert_eq!(env.lookup("x"), Some(Ty::NUMBER)); + env.pop_scope(); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr.rs new file mode 100644 index 00000000..5495fb93 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/expr.rs @@ -0,0 +1,1977 @@ +//! Expression type inference. + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, + TyData, +}; +use jrsonnet_rowan_parser::{ + nodes::{BinaryOperatorKind, Bind, ExprBase, LiteralKind, UnaryOperatorKind}, + AstNode, AstToken, +}; +use rustc_hash::FxHashMap; + +use crate::{ + env::TypeEnv, + flow::{self, Facts}, + helpers::{ + extract_params_with_default_types_ty, extract_var_name_from_expr, + infer_stdlib_field_access_ty, + }, + object::{infer_object_type_ty, infer_object_type_with_super_ty}, +}; + +/// Apply type facts to the environment, narrowing variable types. +/// +/// This is used to apply facts extracted from assert statements and +/// type guard conditions to narrow types for subsequent code. +fn apply_facts_to_env(facts: &Facts, env: &mut TypeEnv) { + for (var_name, fact) in facts.iter() { + // Get the current type of the variable + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + // Apply the fact to narrow the type + let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); + // Update the environment with the narrowed type + env.define_ty(var_name.clone(), narrowed_ty); + } +} + +/// Infer the type of a document's root expression, returning an interned `Ty` and the environment. +/// +/// This is useful for tests that need to inspect the type structure using `TyData`. +pub fn infer_document_type_ty(document: &Document) -> (Ty, TypeEnv) { + let ast = document.ast(); + let mut env = TypeEnv::new_default(); + + let ty = if let Some(expr) = ast.expr() { + infer_expr_ty(&expr, &mut env) + } else { + Ty::ANY + }; + + (ty, env) +} + +/// Infer the type of an expression, returning an interned `Ty`. +pub fn infer_expr_ty(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> Ty { + infer_expr_ty_with_expected(expr, env, None) +} + +/// Infer the type of an expression with an optional expected type, returning `Ty`. +/// +/// This is the efficient internal version that works with interned types throughout. +pub fn infer_expr_ty_with_expected( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, +) -> Ty { + // First, handle local bindings and assert statements that may precede the expression + for stmt in expr.stmts() { + match stmt { + jrsonnet_rowan_parser::nodes::Stmt::StmtLocal(stmt_local) => { + for bind in stmt_local.binds() { + infer_bind_type_ty(&bind, env); + } + } + jrsonnet_rowan_parser::nodes::Stmt::StmtAssert(stmt_assert) => { + // Extract type facts from assert conditions and apply them + if let Some(assertion) = stmt_assert.assertion() { + if let Some(cond) = assertion.condition() { + let facts = flow::extract_facts(&cond); + apply_facts_to_env(&facts, env); + } + } + } + } + } + + // Get the base expression type + if let Some(base) = expr.expr_base() { + infer_base_ty(base, env, expected) + } else { + Ty::ANY + } +} + +/// Infer types from a bind (local variable definition) using interned types. +pub(super) fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv) { + match bind { + Bind::BindDestruct(bd) => { + if let Some(destruct) = bd.into() { + if let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct { + if let Some(name_node) = full.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text().to_string(); + let ty = bd + .value() + .map(|v| infer_expr_ty(&v, env)) + .unwrap_or(Ty::ANY); + env.define_ty(name, ty); + } + } + } + } + } + Bind::BindFunction(bf) => { + if let Some(name_node) = bf.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text().to_string(); + let params = bf + .params() + .map(|p| extract_params_with_default_types_ty(&p, env)) + .unwrap_or_default(); + let func_data = FunctionData { + params, + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }; + let func_ty = env.store_mut().intern(TyData::Function(func_data)); + env.define_ty(name, func_ty); + } + } + } + } +} + +/// Bind a destructuring pattern with an interned type. +/// +/// This is used for comprehension variables where we know the element type +/// from the iterator expression. +pub(super) fn bind_destruct_with_type_ty( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, + ty: Ty, + env: &mut TypeEnv, +) { + use jrsonnet_rowan_parser::nodes::{Destruct, DestructArrayPart}; + + match destruct { + Destruct::DestructFull(full) => { + let Some(ident) = full.name().and_then(|n| n.ident_lit()) else { + return; + }; + env.define_ty(ident.text().to_string(), ty); + } + Destruct::DestructArray(arr) => { + let elem_types = extract_array_element_types_ty(ty, env); + for (i, part) in arr.destruct_array_parts().enumerate() { + let DestructArrayPart::DestructArrayElement(elem) = part else { + continue; + }; + let Some(inner) = elem.destruct() else { + continue; + }; + let elem_ty = elem_types.get(i).copied().unwrap_or(Ty::ANY); + bind_destruct_with_type_ty(&inner, elem_ty, env); + } + } + Destruct::DestructObject(obj) => { + for field in obj.destruct_object_fields() { + let Some(inner) = field.destruct() else { + continue; + }; + let field_ty = lookup_destruct_field_type_ty(&inner, ty, env); + bind_destruct_with_type_ty(&inner, field_ty, env); + } + } + Destruct::DestructSkip(_) => {} + } +} + +/// Extract element types from an array or tuple type (Ty version). +fn extract_array_element_types_ty(ty: Ty, env: &TypeEnv) -> Vec { + let store = env.store(); + match store.get(ty) { + TyData::Tuple { ref elems } => elems.clone(), + TyData::Array { elem, .. } => vec![elem], + _ => vec![], + } +} + +/// Look up the type for a destructured field from an object type (Ty version). +fn lookup_destruct_field_type_ty( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, + ty: Ty, + env: &TypeEnv, +) -> Ty { + let store = env.store(); + let TyData::Object(ref obj_data) = store.get(ty) else { + return Ty::ANY; + }; + + let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct else { + return Ty::ANY; + }; + + let Some(ident) = full.name().and_then(|n| n.ident_lit()) else { + return Ty::ANY; + }; + + obj_data + .get_field(ident.text()) + .map(|fd| fd.ty) + .unwrap_or(Ty::ANY) +} + +/// Check if an expression is guaranteed to diverge (never return). +/// +/// An expression diverges if it has type `Never` - meaning it always +/// throws an error. This is used for unreachable code detection. +pub fn is_divergent(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> bool { + infer_expr_ty(expr, env).is_never() +} + +/// Infer the type of a base expression with an optional expected type, returning `Ty`. +/// +/// This is the efficient internal version that works with interned types throughout. +pub(super) fn infer_base_ty(base: ExprBase, env: &mut TypeEnv, expected: Option) -> Ty { + match &base { + // Primitives - return constants directly + ExprBase::ExprLiteral(lit) => { + if let Some(literal) = lit.literal() { + match literal.kind() { + LiteralKind::NullKw => return Ty::NULL, + LiteralKind::TrueKw => return Ty::TRUE, + LiteralKind::FalseKw => return Ty::FALSE, + LiteralKind::SelfKw => { + // `self` refers to the innermost object + return env + .self_ty() + .unwrap_or_else(|| env.store_mut().object_any()); + } + LiteralKind::Dollar => { + // `$` refers to the outermost (root) object + return env + .root_ty() + .unwrap_or_else(|| env.store_mut().object_any()); + } + LiteralKind::SuperKw => { + // `super` refers to the base object being extended + return env + .super_ty() + .unwrap_or_else(|| env.store_mut().object_any()); + } + } + } + Ty::ANY + } + ExprBase::ExprNumber(_) => Ty::NUMBER, + ExprBase::ExprString(_) => Ty::STRING, + ExprBase::ExprError(_) => Ty::NEVER, + + // Variable lookup - use Ty-native lookup + ExprBase::ExprVar(var) => { + if let Some(name_node) = var.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text(); + if name == "std" { + // std is a special open object + return env.store_mut().object_any(); + } + return env.lookup(name).unwrap_or(Ty::ANY); + } + } + Ty::ANY + } + + // Parenthesized expression - recurse with expected type + ExprBase::ExprParened(parens) => { + if let Some(inner) = parens.expr() { + return infer_expr_ty_with_expected(&inner, env, expected); + } + Ty::ANY + } + + // Import - try to resolve the type from the import cache + ExprBase::ExprImport(import) => { + let Some(text) = import.text() else { + return Ty::ANY; + }; + let s = text.syntax().text(); + // Need at least 2 chars for the quotes + if s.len() < 2 { + return Ty::ANY; + } + // Strip quotes from the string literal + let path_str = &s[1..s.len() - 1]; + if path_str.is_empty() { + return Ty::ANY; + } + // Try to resolve the import type, fall back to ANY + env.resolve_import(path_str).unwrap_or(Ty::ANY) + } + + // Unary operators + ExprBase::ExprUnary(unary) => { + let rhs_ty = unary + .rhs() + .map(|rhs| infer_expr_ty(&rhs, env)) + .unwrap_or(Ty::ANY); + if rhs_ty == Ty::NEVER { + return Ty::NEVER; + } + match unary.unary_operator().map(|op| op.kind()) { + Some(UnaryOperatorKind::Not) => Ty::BOOL, + Some(UnaryOperatorKind::Minus | UnaryOperatorKind::BitNot) => Ty::NUMBER, + _ => Ty::ANY, + } + } + + // Binary operators - handle simple cases directly + ExprBase::ExprBinary(binary) => infer_binary_expr_base_ty(binary, env), + + // Index access: arr[0], obj["field"], str[0] + ExprBase::ExprIndex(idx) => infer_index_expr_base_ty(idx, env), + + // Slice: arr[1:3], str[::2] + ExprBase::ExprSlice(slice) => infer_slice_expr_base_ty(slice, env), + + // If-then-else with flow typing + ExprBase::ExprIfThenElse(if_expr) => { + infer_if_then_else_expr_base_ty(if_expr, env, expected) + } + + // Field access: obj.field or std.fn + ExprBase::ExprField(field) => infer_field_expr_base_ty(field, env), + + // Function call: fn(args) + ExprBase::ExprCall(call) => infer_call_expr_base_ty(call, env), + + // Array literal: [a, b, c] + ExprBase::ExprArray(arr) => infer_array_expr_base_ty(arr, env, expected), + + // Array comprehension: [expr for x in arr] + ExprBase::ExprArrayComp(comp) => infer_array_comp_expr_base_ty(comp, env), + + // Object literal: { field: value } + ExprBase::ExprObject(obj) => infer_object_type_ty(obj.obj_body().as_ref(), env), + + // Function definition: function(x) body + ExprBase::ExprFunction(func) => infer_function_expr_base_ty(func, env), + + // Object extension: base { ... } + ExprBase::ExprObjExtend(extend) => infer_obj_extend_expr_base_ty(extend, env), + } +} + +fn infer_binary_expr_base_ty( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + env: &mut TypeEnv, +) -> Ty { + let lhs_ty = binary + .lhs() + .map(|e| infer_expr_ty(&e, env)) + .unwrap_or(Ty::ANY); + if lhs_ty == Ty::NEVER { + return Ty::NEVER; + } + let rhs_ty = binary + .rhs() + .map(|e| infer_expr_ty(&e, env)) + .unwrap_or(Ty::ANY); + if rhs_ty == Ty::NEVER { + return Ty::NEVER; + } + + match binary.binary_operator().map(|op| op.kind()) { + Some( + BinaryOperatorKind::Minus + | BinaryOperatorKind::Mul + | BinaryOperatorKind::Div + | BinaryOperatorKind::Modulo + | BinaryOperatorKind::BitAnd + | BinaryOperatorKind::BitOr + | BinaryOperatorKind::BitXor + | BinaryOperatorKind::Lhs + | BinaryOperatorKind::Rhs, + ) => Ty::NUMBER, + Some( + BinaryOperatorKind::Lt + | BinaryOperatorKind::Le + | BinaryOperatorKind::Gt + | BinaryOperatorKind::Ge + | BinaryOperatorKind::Eq + | BinaryOperatorKind::Ne + | BinaryOperatorKind::InKw, + ) => Ty::BOOL, + Some(BinaryOperatorKind::Plus) => { + if lhs_ty == Ty::STRING && rhs_ty == Ty::STRING { + return Ty::STRING; + } + if lhs_ty == Ty::NUMBER && rhs_ty == Ty::NUMBER { + return Ty::NUMBER; + } + let store = env.store_mut(); + let lhs_data = store.get(lhs_ty); + let rhs_data = store.get(rhs_ty); + match (&lhs_data, &rhs_data) { + (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { + let elem_union = store.union(vec![*l, *r]); + store.array(elem_union) + } + (TyData::Tuple { elems: l }, TyData::Tuple { elems: r }) => { + let mut elems = l.clone(); + elems.extend(r.iter().copied()); + store.tuple(elems) + } + (TyData::Object(left_obj), TyData::Object(right_obj)) => { + let merged = ObjectData::merge(left_obj, right_obj); + store.object(merged) + } + _ => Ty::NUMBER, + } + } + Some( + BinaryOperatorKind::And | BinaryOperatorKind::Or | BinaryOperatorKind::NullCoaelse, + ) => env.store_mut().union(vec![lhs_ty, rhs_ty]), + _ => Ty::ANY, + } +} + +fn infer_index_expr_base_ty( + idx: &jrsonnet_rowan_parser::nodes::ExprIndex, + env: &mut TypeEnv, +) -> Ty { + let base_ty = idx + .base() + .map(|e| infer_expr_ty(&e, env)) + .unwrap_or(Ty::ANY); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + if let Some(index_expr) = idx.index() { + let idx_ty = infer_expr_ty(&index_expr, env); + if idx_ty == Ty::NEVER { + return Ty::NEVER; + } + } + if base_ty == Ty::STRING { + return Ty::STRING; + } + let store = env.store_mut(); + match store.get(base_ty) { + TyData::Array { elem, .. } => elem, + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + store.union(elems_copy) + } + _ => Ty::ANY, + } +} + +fn infer_slice_expr_base_ty( + slice: &jrsonnet_rowan_parser::nodes::ExprSlice, + env: &mut TypeEnv, +) -> Ty { + let base_ty = slice + .base() + .map(|e| infer_expr_ty(&e, env)) + .unwrap_or(Ty::ANY); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + let store = env.store_mut(); + match store.get(base_ty) { + TyData::Array { .. } => base_ty, + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + let elem_union = store.union(elems_copy); + store.array(elem_union) + } + _ if base_ty == Ty::STRING => Ty::STRING, + _ => Ty::ANY, + } +} + +fn infer_if_then_else_expr_base_ty( + if_expr: &jrsonnet_rowan_parser::nodes::ExprIfThenElse, + env: &mut TypeEnv, + expected: Option, +) -> Ty { + let facts = if let Some(cond) = if_expr.cond() { + let cond_ty = infer_expr_ty(&cond, env); + if cond_ty == Ty::NEVER { + return Ty::NEVER; + } + flow::extract_facts(&cond) + } else { + Facts::new() + }; + + let then_ty = if let Some(then_clause) = if_expr.then() { + if let Some(then_expr) = then_clause.expr() { + env.push_scope(); + for (var_name, fact) in facts.iter() { + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); + env.define_ty(var_name.clone(), narrowed_ty); + } + let ty = infer_expr_ty_with_expected(&then_expr, env, expected); + env.pop_scope(); + ty + } else { + Ty::ANY + } + } else { + Ty::ANY + }; + + let else_ty = if let Some(else_clause) = if_expr.else_() { + if let Some(else_expr) = else_clause.expr() { + env.push_scope(); + for (var_name, fact) in facts.iter() { + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + let widened_ty = fact.apply_negated(current_ty, env.store_mut()); + env.define_ty(var_name.clone(), widened_ty); + } + let ty = infer_expr_ty_with_expected(&else_expr, env, expected); + env.pop_scope(); + ty + } else { + Ty::ANY + } + } else { + Ty::ANY + }; + + env.store_mut().union(vec![then_ty, else_ty]) +} + +fn infer_field_expr_base_ty( + field: &jrsonnet_rowan_parser::nodes::ExprField, + env: &mut TypeEnv, +) -> Ty { + if let Some(ty) = infer_stdlib_field_access_ty(field, env) { + return ty; + } + + if let Some(base_expr) = field.base() { + if let Some(var_name) = extract_var_name_from_expr(&base_expr) { + if env.is_tracked_param(&var_name) { + let field_name = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()); + if let Some(fn_name) = field_name { + let obj_data = ObjectData { + fields: vec![( + fn_name, + FieldDefInterned { + ty: Ty::ANY, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }; + let constraint = env.store_mut().object(obj_data); + env.add_constraint_ty(&var_name, constraint); + } else { + let constraint = env.store_mut().object_any(); + env.add_constraint_ty(&var_name, constraint); + } + } + } + } + + let base_ty = field + .base() + .map(|e| infer_expr_ty(&e, env)) + .unwrap_or(Ty::ANY); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + if base_ty == Ty::STRING { + return Ty::STRING; + } + + let field_name = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()); + + let store = env.store(); + if let TyData::Object(ref obj_data) = store.get(base_ty) { + if let Some(field_name) = &field_name { + for (name, field_def) in &obj_data.fields { + if name == field_name { + return field_def.ty; + } + } + } + if obj_data.has_unknown { + return Ty::ANY; + } + } + + Ty::ANY +} + +fn infer_call_expr_base_ty(call: &jrsonnet_rowan_parser::nodes::ExprCall, env: &mut TypeEnv) -> Ty { + let base_ty = call + .callee() + .map(|e| infer_expr_ty(&e, env)) + .unwrap_or(Ty::ANY); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + + let mut arg_types = Vec::new(); + if let Some(args) = call.args_desc() { + for arg in args.args() { + if let Some(expr) = arg.expr() { + let arg_ty = infer_expr_ty(&expr, env); + if arg_ty == Ty::NEVER { + return Ty::NEVER; + } + arg_types.push(arg_ty); + } + } + } + + let store = env.store_mut(); + if let TyData::Function(ref func_data) = store.get(base_ty) { + let return_spec = func_data.return_spec.clone(); + return match return_spec { + ReturnSpec::Fixed(ret_ty) => ret_ty, + ReturnSpec::SameAsArg(idx) => arg_types.get(idx).copied().unwrap_or(Ty::ANY), + ReturnSpec::NonNegative => store.bounded_number(NumBounds::non_negative()), + ReturnSpec::ArrayOfArg(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + store.array(arg_ty) + } + ReturnSpec::ArrayWithSameElements(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + match store.get(arg_ty) { + TyData::Array { elem, .. } => store.array(elem), + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + let elem_union = store.union(elems_copy); + store.array(elem_union) + } + _ => store.array(Ty::ANY), + } + } + ReturnSpec::SetWithSameElements(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + match store.get(arg_ty) { + TyData::Array { elem, .. } => store.array_set(elem), + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + let elem_union = store.union(elems_copy); + store.array_set(elem_union) + } + _ => store.array_set(Ty::ANY), + } + } + ReturnSpec::FuncReturnType(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + match store.get(arg_ty) { + TyData::Function(ref func_data) => match &func_data.return_spec { + ReturnSpec::Fixed(ret) => *ret, + _ => Ty::ANY, + }, + _ => Ty::ANY, + } + } + ReturnSpec::ArrayOfFuncReturn(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + let ret_ty = match store.get(arg_ty) { + TyData::Function(ref func_data) => match &func_data.return_spec { + ReturnSpec::Fixed(ret) => *ret, + _ => Ty::ANY, + }, + _ => Ty::ANY, + }; + store.array(ret_ty) + } + ReturnSpec::FlatMapResult(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + match store.get(arg_ty) { + TyData::Function(ref func_data) => match &func_data.return_spec { + ReturnSpec::Fixed(ret) => match store.get(*ret) { + TyData::Array { elem, .. } => store.array(elem), + _ => store.array(Ty::ANY), + }, + _ => store.array(Ty::ANY), + }, + _ => store.array(Ty::ANY), + } + } + ReturnSpec::ObjectValuesType(idx) => { + let arg_ty = arg_types.get(idx).copied().unwrap_or(Ty::ANY); + match store.get(arg_ty) { + TyData::Object(ref obj_data) => { + if obj_data.fields.is_empty() { + store.array(Ty::ANY) + } else { + let field_types: Vec = + obj_data.fields.iter().map(|(_, fd)| fd.ty).collect(); + let elem_union = store.union(field_types); + store.array(elem_union) + } + } + _ => store.array(Ty::ANY), + } + } + }; + } + + Ty::ANY +} + +fn infer_array_expr_base_ty( + arr: &jrsonnet_rowan_parser::nodes::ExprArray, + env: &mut TypeEnv, + expected: Option, +) -> Ty { + let (expected_array_elem, expected_tuple_elems) = if let Some(exp_ty) = expected { + let store = env.store(); + match store.get(exp_ty) { + TyData::Array { elem, .. } => (Some(elem), None), + TyData::Tuple { elems } => (None, Some(elems)), + _ => (None, None), + } + } else { + (None, None) + }; + + let elem_types: Vec = arr + .exprs() + .enumerate() + .map(|(i, e)| { + let elem_expected = expected_tuple_elems + .as_ref() + .and_then(|elems| elems.get(i).copied()) + .or(expected_array_elem); + infer_expr_ty_with_expected(&e, env, elem_expected) + }) + .collect(); + + if elem_types.is_empty() { + if let Some(elem_ty) = expected_array_elem { + return env.store_mut().array(elem_ty); + } + return env.store_mut().array(Ty::ANY); + } + + let all_known = elem_types.iter().all(|ty| *ty != Ty::ANY); + if all_known { + return env.store_mut().tuple(elem_types); + } + + let elem_ty = env.store_mut().union(elem_types); + env.store_mut().array(elem_ty) +} + +fn infer_array_comp_expr_base_ty( + comp: &jrsonnet_rowan_parser::nodes::ExprArrayComp, + env: &mut TypeEnv, +) -> Ty { + env.push_scope(); + + for comp_spec in comp.comp_specs() { + if let jrsonnet_rowan_parser::nodes::CompSpec::ForSpec(for_spec) = comp_spec { + let iter_ty = for_spec + .expr() + .map(|e| infer_expr_ty(&e, env)) + .unwrap_or(Ty::ANY); + + let elem_ty = { + let store = env.store_mut(); + match store.get(iter_ty) { + TyData::Array { elem, .. } => elem, + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + store.union(elems_copy) + } + _ if iter_ty == Ty::NEVER => Ty::NEVER, + _ => Ty::ANY, + } + }; + + if let Some(destruct) = for_spec.bind() { + bind_destruct_with_type_ty(&destruct, elem_ty, env); + } + } + } + + let body_ty = comp + .expr() + .map(|e| infer_expr_ty(&e, env)) + .unwrap_or(Ty::ANY); + + env.pop_scope(); + env.store_mut().array(body_ty) +} + +fn infer_function_expr_base_ty( + func: &jrsonnet_rowan_parser::nodes::ExprFunction, + env: &mut TypeEnv, +) -> Ty { + let func_range = func.syntax().text_range(); + + if let Some(cached) = env.get_cached_function(func_range) { + return cached; + } + + if env.is_function_in_progress(func_range) { + let params = func + .params_desc() + .map(|p| extract_params_with_default_types_ty(&p, env)) + .unwrap_or_default(); + let any_params: Vec<_> = params + .into_iter() + .map(|p| ParamInterned { + name: p.name, + ty: Ty::ANY, + has_default: p.has_default, + }) + .collect(); + let store = env.store_mut(); + return store.function(FunctionData { + params: any_params, + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }); + } + + env.start_function_inference(func_range); + + let params = if let Some(params_desc) = func.params_desc() { + extract_params_with_default_types_ty(¶ms_desc, env) + } else { + Vec::new() + }; + + let (return_ty, param_constraints) = if env.can_infer_function_body() { + if let Some(body) = func.expr() { + env.push_scope(); + let param_names: Vec = params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + + env.start_constraint_tracking(¶m_names); + + env.enter_function(); + let body_ty = infer_expr_ty(&body, env); + env.exit_function(); + + let constraints = env.stop_constraint_tracking_ty(); + + env.pop_scope(); + (body_ty, constraints) + } else { + (Ty::ANY, FxHashMap::default()) + } + } else { + (Ty::ANY, FxHashMap::default()) + }; + + let final_params: Vec = params + .into_iter() + .map(|p| { + let mut param_ty = p.ty; + if let Some(constraints) = param_constraints.get(&p.name) { + for constraint_ty in constraints { + param_ty = env.store_mut().narrow(param_ty, *constraint_ty); + } + } + ParamInterned { + name: p.name, + ty: param_ty, + has_default: p.has_default, + } + }) + .collect(); + + let result = env.store_mut().function(FunctionData { + params: final_params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }); + + env.finish_function_inference(func_range); + env.cache_function(func_range, result); + + result +} + +fn infer_obj_extend_expr_base_ty( + extend: &jrsonnet_rowan_parser::nodes::ExprObjExtend, + env: &mut TypeEnv, +) -> Ty { + let base_ty = extend + .lhs_work() + .map(|e| infer_expr_ty(&e, env)) + .unwrap_or_else(|| env.store_mut().object_any()); + + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + + let extension_body = extend.rhs_work().and_then(|e| { + if let Some(ExprBase::ExprObject(obj)) = e.expr_base() { + obj.obj_body() + } else { + None + } + }); + + let extension_ty = infer_object_type_with_super_ty(extension_body.as_ref(), env, Some(base_ty)); + + let (base_data, ext_data) = { + let store = env.store(); + let base_data = if let TyData::Object(ref obj) = store.get(base_ty) { + Some(obj.clone()) + } else { + None + }; + let ext_data = if let TyData::Object(ref obj) = store.get(extension_ty) { + Some(obj.clone()) + } else { + None + }; + (base_data, ext_data) + }; + + match (base_data, ext_data) { + (Some(base_obj), Some(ext_obj)) => { + let merged = ObjectData::merge(&base_obj, &ext_obj); + env.store_mut().object(merged) + } + (None, Some(ext_obj)) => env.store_mut().object(ext_obj), + (Some(base_obj), None) => env.store_mut().object(base_obj), + (None, None) => env.store_mut().object_any(), + } +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, FunctionData, MutStore, ObjectData, ParamInterned, ReturnSpec, + TyData, + }; + use rstest::rstest; + + use super::*; + + /// Assert that an ObjectData has exactly the specified field names. + fn assert_fields_ty(obj: &ObjectData, expected: &[&str]) { + let actual: BTreeSet<_> = obj.fields.iter().map(|(name, _)| name.as_str()).collect(); + let expected: BTreeSet<_> = expected.iter().copied().collect(); + assert_eq!(actual, expected, "Field mismatch"); + } + + /// Get TyData from TypeEnv for a Ty. + fn get_ty_data(env: &TypeEnv, ty: Ty) -> TyData { + env.store().get(ty) + } + + /// Try to extract tuple element types. + fn try_tuple(env: &TypeEnv, ty: Ty) -> Option> { + match env.store().get(ty) { + TyData::Tuple { elems } => Some(elems), + _ => None, + } + } + + /// Try to extract array element type. + fn try_array(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Array { elem, .. } => Some(elem), + _ => None, + } + } + + /// Try to extract array info (element type and is_set flag). + fn try_array_with_set(env: &TypeEnv, ty: Ty) -> Option<(Ty, bool)> { + match env.store().get(ty) { + TyData::Array { elem, is_set } => Some((elem, is_set)), + _ => None, + } + } + + /// Try to extract ObjectData. + fn try_object(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Object(obj) => Some(obj), + _ => None, + } + } + + /// Try to extract FunctionData. + fn try_function(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Function(func) => Some(func), + _ => None, + } + } + + /// Try to extract union variants. + fn try_union(env: &TypeEnv, ty: Ty) -> Option> { + match env.store().get(ty) { + TyData::Union(variants) => Some(variants), + _ => None, + } + } + + /// Get field definition from ObjectData by name. + fn get_field_ty<'a>(obj: &'a ObjectData, name: &str) -> Option<&'a FieldDefInterned> { + obj.fields + .iter() + .find(|(n, _)| n == name) + .map(|(_, def)| def) + } + + /// Helper to infer document type and return (Ty, TypeEnv) for testing. + fn infer_doc(code: &str) -> (Ty, TypeEnv) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + infer_document_type_ty(&doc) + } + + #[test] + fn test_infer_error_is_never() { + let (ty, _) = infer_doc(r#"error "fail""#); + assert_eq!(ty, Ty::NEVER); + } + + #[test] + fn test_infer_number() { + let (ty, _) = infer_doc("42"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_infer_string() { + let (ty, _) = infer_doc(r#""hello""#); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_infer_bool_true() { + let (ty, _) = infer_doc("true"); + assert_eq!(ty, Ty::TRUE); + } + + #[test] + fn test_infer_bool_false() { + let (ty, _) = infer_doc("false"); + assert_eq!(ty, Ty::FALSE); + } + + #[test] + fn test_infer_null() { + let (ty, _) = infer_doc("null"); + assert_eq!(ty, Ty::NULL); + } + + #[test] + fn test_infer_array() { + // Array literals with known element types become tuples + let (ty, env) = infer_doc("[1, 2, 3]"); + let elems = try_tuple(&env, ty).expect("expected tuple"); + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + } + + #[test] + fn test_infer_empty_array() { + let (ty, env) = infer_doc("[]"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::ANY); + } + + #[test] + fn test_infer_function() { + // x + y where x and y are Any defaults to Number + let (ty, env) = infer_doc("function(x, y) x + y"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ + ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + }, + ParamInterned { + name: "y".to_string(), + ty: Ty::ANY, + has_default: false + }, + ], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_infer_function_return_object() { + // Function that returns an object + let (ty, env) = infer_doc("function(x) { a: x }"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func.params, + vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + },] + ); + assert!(!func.variadic); + // Return type should be inferred as Object + let ReturnSpec::Fixed(ret_ty) = &func.return_spec else { + unreachable!("Expected Fixed return type"); + }; + let obj = try_object(&env, *ret_ty).expect("expected object"); + assert_fields_ty(&obj, &["a"]); + } + + #[test] + fn test_infer_function_return_number() { + // Function that returns a number + let (ty, env) = infer_doc("function() 42"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_infer_function_return_string() { + // Function that returns a string + let (ty, env) = infer_doc(r#"function(s) s + "!""#); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func.params, + vec![ParamInterned { + name: "s".to_string(), + ty: Ty::ANY, + has_default: false + },] + ); + assert!(!func.variadic); + // s + "!" where s is Any defaults to Number (not String, because Any + String is unhandled) + assert_eq!(func.return_spec, ReturnSpec::Fixed(Ty::NUMBER)); + } + + #[test] + fn test_function_default_number() { + let (ty, env) = infer_doc("function(x=1) x"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: true + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_function_default_string() { + let (ty, env) = infer_doc(r#"function(x="hello") x"#); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::STRING, + has_default: true + }], + return_spec: ReturnSpec::Fixed(Ty::STRING), + variadic: false, + } + ); + } + + #[test] + fn test_function_default_bool() { + let (ty, env) = infer_doc("function(x=true) x"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::TRUE, + has_default: true + }], + return_spec: ReturnSpec::Fixed(Ty::TRUE), + variadic: false, + } + ); + } + + #[test] + fn test_function_default_null() { + let (ty, env) = infer_doc("function(x=null) x"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NULL, + has_default: true + }], + return_spec: ReturnSpec::Fixed(Ty::NULL), + variadic: false, + } + ); + } + + #[test] + fn test_function_no_default() { + let (ty, env) = infer_doc("function(x) x"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + }], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + } + ); + } + + #[test] + fn test_function_default_expression() { + let (ty, env) = infer_doc("function(x=1+2) x"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: true + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_function_multiple_params_mixed_defaults() { + let (ty, env) = infer_doc(r#"function(a, b=1, c="hello") a + b"#); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ + ParamInterned { + name: "a".to_string(), + ty: Ty::ANY, + has_default: false + }, + ParamInterned { + name: "b".to_string(), + ty: Ty::NUMBER, + has_default: true + }, + ParamInterned { + name: "c".to_string(), + ty: Ty::STRING, + has_default: true + }, + ], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_function_default_param_array() { + let (ty, env) = infer_doc("function(x=[1,2,3]) x"); + let func = try_function(&env, ty).expect("expected function"); + // Default is a tuple [Number, Number, Number] + let param_tuple = try_tuple(&env, func.params[0].ty).expect("expected tuple"); + assert_eq!(param_tuple, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + // Return type is the same tuple + let ReturnSpec::Fixed(ret) = func.return_spec.clone() else { + unreachable!("Expected Fixed return spec"); + }; + let ret_tuple = try_tuple(&env, ret).expect("expected tuple"); + assert_eq!(ret_tuple, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + // Full param structure + assert_eq!( + func.params, + vec![ParamInterned { + name: "x".to_string(), + ty: func.params[0].ty, + has_default: true, + }] + ); + } + + #[test] + fn test_function_default_param_object() { + let default_value = ['{', 'a', ':', '1', '}'].into_iter().collect::(); + let doc = format!("function(x={default_value}) x"); + let (ty, env) = infer_doc(&doc); + let func = try_function(&env, ty).expect("expected function"); + // Default is an object with field 'a' + let param_obj = try_object(&env, func.params[0].ty).expect("expected object"); + assert_fields_ty(¶m_obj, &["a"]); + // Full param structure + assert_eq!( + func.params, + vec![ParamInterned { + name: "x".to_string(), + ty: func.params[0].ty, + has_default: true, + }] + ); + } + + #[test] + fn test_infer_conditional_union() { + let (ty, env) = infer_doc("if true then 1 else \"x\""); + // Should be a union of number and string + let variants = try_union(&env, ty).expect("expected union"); + assert!(variants.contains(&Ty::NUMBER)); + assert!(variants.contains(&Ty::STRING)); + } + + #[test] + fn test_infer_conditional_with_error() { + // If condition is error, the whole expression is Never + let (ty, _) = infer_doc(r#"if error "fail" then 1 else 2"#); + assert_eq!(ty, Ty::NEVER); + } + + #[test] + fn test_stdlib_map_infers_func_return() { + let (ty, env) = infer_doc("std.map(function(x) x + 1, [1, 2, 3])"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_stdlib_filter_preserves_array_type() { + let (ty, env) = infer_doc("std.filter(function(x) x > 0, [1, 2, 3])"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_stdlib_foldl_returns_accumulator_type() { + let (ty, _) = infer_doc("std.foldl(function(acc, x) acc + x, [1, 2, 3], 0)"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_stdlib_length_returns_non_negative() { + let (ty, env) = infer_doc("std.length([1, 2, 3])"); + let data = get_ty_data(&env, ty); + assert!(matches!(data, TyData::BoundedNumber(_))); + } + + #[test] + fn test_stdlib_abs_returns_non_negative() { + let (ty, env) = infer_doc("std.abs(-5)"); + let data = get_ty_data(&env, ty); + assert!(matches!(data, TyData::BoundedNumber(_))); + } + + #[test] + fn test_stdlib_count_returns_non_negative() { + let (ty, env) = infer_doc("std.count([1, 2, 2, 3], 2)"); + let data = get_ty_data(&env, ty); + assert!(matches!(data, TyData::BoundedNumber(_))); + } + + #[test] + fn test_stdlib_type_returns_string() { + let (ty, _) = infer_doc("std.type(42)"); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_stdlib_object_values_infers_field_types() { + let (ty, env) = infer_doc("std.objectValues({a: 1, b: 2})"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_stdlib_object_values_mixed_types() { + let (ty, env) = infer_doc(r#"std.objectValues({x: 1, y: "str"})"#); + let elem = try_array(&env, ty).expect("expected array"); + let variants = try_union(&env, elem).expect("expected union"); + assert!(variants.contains(&Ty::NUMBER)); + assert!(variants.contains(&Ty::STRING)); + } + + #[test] + fn test_stdlib_field_access_returns_function() { + // Accessing std.map (without calling) should return a function type + let (ty, env) = infer_doc("std.map"); + let _ = try_function(&env, ty).expect("expected function"); + } + + #[test] + fn test_comprehension_variable_type() { + // [x + 1 for x in [1, 2, 3]] - x should be inferred as Number + // Result should be Array + let (ty, env) = infer_doc("[x + 1 for x in [1, 2, 3]]"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_comprehension_field_access() { + // [x.name for x in arr] where arr is [{name: "a"}] + // The loop variable x should have the element type from the array + let (ty, env) = + infer_doc(r#"local arr = [{name: "a"}, {name: "b"}]; [x.name for x in arr]"#); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::STRING); + } + + #[test] + fn test_comprehension_with_any_iterator() { + // When iterator type is unknown, loop var should be Any + let (ty, env) = infer_doc("local getItems = function() []; [x for x in getItems()]"); + // Result should still be an array + let _ = try_array(&env, ty).expect("expected array"); + } + + // Parameter constraint tests + + #[test] + fn test_parameter_constraint_from_field_access() { + // function(x) x.name - x should be inferred as Object with field "name" + let (ty, env) = infer_doc("function(x) x.name"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func.params.iter().map(|p| &p.name).collect::>(), + vec!["x"] + ); + let param_obj = try_object(&env, func.params[0].ty).expect("expected object"); + assert_fields_ty(¶m_obj, &["name"]); + } + + #[test] + fn test_parameter_constraint_multiple_fields() { + // function(x) x.a + x.b - x should be inferred as Object with fields "a" and "b" + let (ty, env) = infer_doc("function(x) x.a + x.b"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func.params.iter().map(|p| &p.name).collect::>(), + vec!["x"] + ); + let param_obj = try_object(&env, func.params[0].ty).expect("expected object"); + assert_fields_ty(¶m_obj, &["a", "b"]); + } + + #[test] + fn test_parameter_constraint_not_used() { + // function(x) 42 - x is not used, should remain Any + let (ty, env) = infer_doc("function(x) 42"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_parameter_constraint_multiple_params() { + // function(a, b) a.x + b.y - each param gets its own constraints + let (ty, env) = infer_doc("function(a, b) a.x + b.y"); + let func = try_function(&env, ty).expect("expected function"); + assert_eq!( + func.params.iter().map(|p| &p.name).collect::>(), + vec!["a", "b"] + ); + + // First param 'a' should have only field 'x' + let obj_a = try_object(&env, func.params[0].ty).expect("expected object"); + assert_fields_ty(&obj_a, &["x"]); + + // Second param 'b' should have only field 'y' + let obj_b = try_object(&env, func.params[1].ty).expect("expected object"); + assert_fields_ty(&obj_b, &["y"]); + } + + /// Helper to infer an expression with an expected type (Ty-native) + /// The expected_builder is called with the env's store to create the expected type. + fn infer_with_expected_fn(code: &str, expected_builder: F) -> (Ty, TypeEnv) + where + F: FnOnce(&mut MutStore) -> Ty, + { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expected = expected_builder(env.store_mut()); + let ty = if let Some(expr) = doc.ast().expr() { + infer_expr_ty_with_expected(&expr, &mut env, Some(expected)) + } else { + Ty::ANY + }; + (ty, env) + } + + #[test] + fn test_bidirectional_empty_array_with_expected() { + // [] with expected Array should get Number element type + let (ty, env) = infer_with_expected_fn("[]", |store| store.array(Ty::NUMBER)); + + // Should be Array, not Array + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_bidirectional_without_expected() { + // [] without expected type should be Array + let (ty, env) = infer_doc("[]"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::ANY); + } + + #[test] + fn test_bidirectional_if_then_else_propagates() { + // if cond then [] else [] with expected Array + let (ty, env) = + infer_with_expected_fn("if true then [] else []", |store| store.array(Ty::STRING)); + + // Both branches should get Array + // Result should be Array (union of both String arrays = String array) + let data = get_ty_data(&env, ty); + match data { + TyData::Array { elem, .. } => assert_eq!(elem, Ty::STRING), + TyData::Union(variants) => { + for v in variants { + let elem = try_array(&env, v).expect("expected array"); + assert_eq!(elem, Ty::STRING); + } + } + _ => unreachable!("Expected Array or Union, got {:?}", data), + } + } + + #[test] + fn test_bidirectional_parened_propagates() { + // (expr) should propagate expected type through + let (ty, env) = infer_with_expected_fn("([])", |store| store.array(Ty::BOOL)); + + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::BOOL); + } + + #[test] + fn test_bidirectional_array_elements() { + // [a, b] where we have no info on a, b but expect Array + // This doesn't magically make unknown expressions typed, but sets the context + // Even without expected type, [1, 2, 3] should be Tuple([Number, Number, Number]) + let (ty, env) = infer_doc("[1, 2, 3]"); + + let elems = try_tuple(&env, ty).expect("expected tuple"); + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + } + + #[test] + fn test_bidirectional_expected_tuple() { + // With expected Tuple type, elements get positional expected types + let (ty, env) = infer_with_expected_fn("[[], []]", |store| { + let arr_num = store.array(Ty::NUMBER); + let arr_str = store.array(Ty::STRING); + store.tuple(vec![arr_num, arr_str]) + }); + + let elems = try_tuple(&env, ty).expect("expected tuple"); + // Extract inner types of each array element and compare full structure + let inner_types: Vec = elems + .iter() + .map(|&e| try_array(&env, e).expect("expected array")) + .collect(); + assert_eq!(inner_types, vec![Ty::NUMBER, Ty::STRING]); + } + + #[rstest] + #[case::is_number( + r#"local x = if true then 1 else "str"; assert std.isNumber(x); x"#, + Ty::NUMBER + )] + #[case::is_string( + r#"local s = if true then "hello" else 42; assert std.isString(s); s"#, + Ty::STRING + )] + #[case::is_boolean( + // With literal boolean inference, `if true then false else 0` evaluates to `false` + // which has type Ty::FALSE (a subtype of BOOL) + r"local b = if true then false else 0; assert std.isBoolean(b); b", + Ty::FALSE + )] + fn test_assert_narrows_primitive_type(#[case] code: &str, #[case] expected: Ty) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + assert_eq!(infer_expr_ty(&expr, &mut env), expected); + } + + #[test] + fn test_assert_narrows_to_array() { + let code = r"local arr = if true then [1, 2] else {}; assert std.isArray(arr); arr"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + // After assert std.isArray(arr), arr should be narrowed to the Tuple from the then-branch + let elems = try_tuple(&env, ty).expect("expected tuple"); + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER]); + } + + #[test] + fn test_assert_narrows_to_object() { + let code = r"local obj = if true then { a: 1 } else [1, 2]; assert std.isObject(obj); obj"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + // After assert std.isObject(obj), obj should be narrowed to the Object from the then-branch + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["a"]); + } + + #[test] + fn test_assert_narrows_null_check() { + let code = r"local x = if true then { a: 1 } else null; assert x != null; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + // After assert x != null, x should be narrowed to the Object from the then-branch + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["a"]); + } + + #[test] + fn test_multiple_assert_statements() { + let code = r#" + local x = if true then 1 else "str"; + local y = if true then [1] else {}; + assert std.isNumber(x); + assert std.isArray(y); + [x, y] + "#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + let elems = try_tuple(&env, ty).expect("expected tuple"); + // First element is narrowed to Number, second to Tuple([Number]) + // Verify complete structure: exactly [Number, Tuple([Number])] + assert!( + matches!(elems.as_slice(), [first, second] if *first == Ty::NUMBER && try_tuple(&env, *second).expect("expected tuple") == vec![Ty::NUMBER]), + "Expected [Number, Tuple([Number])], got {:?}", + elems + ); + } + + #[test] + fn test_assert_object_has() { + let code = r#"local obj = {}; assert std.objectHas(obj, "key"); obj"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["key"]); + } + + #[rstest] + #[case::normal_visibility("{ a: 1 }", "a", FieldVis::Normal)] + #[case::hidden_visibility("{ a:: 1 }", "a", FieldVis::Hidden)] + #[case::force_visible("{ a::: 1 }", "a", FieldVis::ForceVisible)] + fn test_field_visibility_tracking( + #[case] code: &str, + #[case] field_name: &str, + #[case] expected_visibility: FieldVis, + ) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + let obj = try_object(&env, ty).expect("expected object"); + let field_def = get_field_ty(&obj, field_name).expect("Field should exist"); + assert_eq!(field_def.visibility, expected_visibility); + } + + #[test] + fn test_object_with_mixed_visibility() { + let code = r"{ visible: 1, hidden:: 2, force::: 3 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + let obj = try_object(&env, ty).expect("expected object"); + + assert_eq!( + get_field_ty(&obj, "visible").unwrap().visibility, + FieldVis::Normal + ); + assert_eq!( + get_field_ty(&obj, "hidden").unwrap().visibility, + FieldVis::Hidden + ); + assert_eq!( + get_field_ty(&obj, "force").unwrap().visibility, + FieldVis::ForceVisible + ); + } + + #[test] + fn test_field_is_visible() { + let code = r"{ visible: 1, hidden:: 2 }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); + let obj = try_object(&env, ty).expect("expected object"); + + assert!(matches!( + get_field_ty(&obj, "visible").unwrap().visibility, + FieldVis::Normal | FieldVis::ForceVisible + )); + assert!(matches!( + get_field_ty(&obj, "hidden").unwrap().visibility, + FieldVis::Hidden + )); + } + + #[test] + fn test_self_reference_simple() { + // self.a should be typed as Number inside the object + let (ty, env) = infer_doc(r"{ a: 1, b: self.a }"); + let obj = try_object(&env, ty).expect("expected object"); + // Field 'b' should have type Any (because self.a is inferred in first pass as Any) + // This is the limitation of the two-pass approach + assert_fields_ty(&obj, &["a", "b"]); + } + + #[test] + fn test_self_knows_fields_exist() { + // self should know which fields exist + let (ty, env) = infer_doc(r"{ a: 1, b: 2, c: self }"); + let obj = try_object(&env, ty).expect("expected object"); + // Field 'c' should be typed as Object with fields a, b, c + let c_field = get_field_ty(&obj, "c").expect("should have c field"); + let c_obj = try_object(&env, c_field.ty).expect("expected object"); + assert_fields_ty(&c_obj, &["a", "b", "c"]); + } + + #[test] + fn test_self_outside_object_is_open() { + // Outside an object, self should return open object + // This is a somewhat artificial test since 'self' outside an object is a runtime error + // We just test that the inference doesn't panic + let (ty, env) = infer_doc("self"); + let obj = try_object(&env, ty).expect("expected object"); + // Should be open (has_unknown = true) + assert!(obj.has_unknown, "self outside object should be open object"); + } + + #[test] + fn test_nested_objects_self() { + // In nested objects, self should refer to innermost object + let (ty, env) = infer_doc(r"{ outer: 1, nested: { inner: 2, self_ref: self } }"); + let obj = try_object(&env, ty).expect("expected object"); + + // Get nested object + let nested_field = get_field_ty(&obj, "nested").expect("should have nested field"); + let nested_obj = try_object(&env, nested_field.ty).expect("expected object"); + + // self_ref should refer to the nested object, not outer + let self_ref_field = + get_field_ty(&nested_obj, "self_ref").expect("should have self_ref field"); + let self_ref_obj = try_object(&env, self_ref_field.ty).expect("expected object"); + + // self_ref should have 'inner' and 'self_ref', but NOT 'outer' + assert_fields_ty(&self_ref_obj, &["inner", "self_ref"]); + } + + #[test] + fn test_object_extension_super() { + // In an object extension, super should refer to the base object + let code = r" + local base = { a: 1, b: 2 }; + base { c: super.a } + "; + let (ty, env) = infer_doc(code); + let obj = try_object(&env, ty).expect("expected object"); + + // Result should have fields a, b, c + assert_fields_ty(&obj, &["a", "b", "c"]); + } + + #[test] + fn test_dollar_root_reference() { + // $ should refer to the outermost object + let (ty, env) = infer_doc(r"{ outer: 1, nested: { inner: 2, root_ref: $ } }"); + let obj = try_object(&env, ty).expect("expected object"); + + // Get nested object + let nested_field = get_field_ty(&obj, "nested").expect("should have nested field"); + let nested_obj = try_object(&env, nested_field.ty).expect("expected object"); + + // root_ref should refer to the outer object + let root_ref_field = + get_field_ty(&nested_obj, "root_ref").expect("should have root_ref field"); + let root_ref_obj = try_object(&env, root_ref_field.ty).expect("expected object"); + + // root_ref (=$) should have 'outer' and 'nested' + assert_fields_ty(&root_ref_obj, &["outer", "nested"]); + } + + #[test] + fn test_object_extension_result_type() { + // Object extension should merge types properly + let code = r#" + local base = { a: 1 }; + base { b: "hello" } + "#; + let (ty, env) = infer_doc(code); + let obj = try_object(&env, ty).expect("expected object"); + + // Result should have both a (Number) and b (String) + assert_fields_ty(&obj, &["a", "b"]); + assert_eq!(get_field_ty(&obj, "a").unwrap().ty, Ty::NUMBER); + assert_eq!(get_field_ty(&obj, "b").unwrap().ty, Ty::STRING); + } + + #[test] + fn test_recursive_function_memoization() { + // Recursive function - should not cause infinite loop + let code = r" + local factorial = function(n) + if n <= 1 then 1 + else n * factorial(n - 1); + factorial + "; + let (ty, env) = infer_doc(code); + + // Should be a function type (not infinite loop) + let _func = try_function(&env, ty).expect("expected function"); + } + + #[test] + fn test_mutually_recursive_functions() { + // Mutually recursive functions + let code = r" + local isEven = function(n) + if n == 0 then true + else isOdd(n - 1); + local isOdd = function(n) + if n == 0 then false + else isEven(n - 1); + { isEven: isEven, isOdd: isOdd } + "; + let (ty, env) = infer_doc(code); + + // Should infer without infinite loop + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["isEven", "isOdd"]); + } + + #[test] + fn test_function_memoization_reuse() { + // Same function referenced multiple times - should be memoized + let code = r" + local f = function(x) x + 1; + { a: f, b: f, c: f } + "; + let (ty, env) = infer_doc(code); + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["a", "b", "c"]); + + // All three fields should have the same function type + let a_ty = get_field_ty(&obj, "a").unwrap().ty; + let b_ty = get_field_ty(&obj, "b").unwrap().ty; + let c_ty = get_field_ty(&obj, "c").unwrap().ty; + let _func = try_function(&env, a_ty).expect("expected function"); + // With type interning, same type = same Ty + assert_eq!(a_ty, b_ty); + assert_eq!(b_ty, c_ty); + } + + #[test] + fn test_deeply_nested_functions() { + // Deeply nested function calls - tests depth limit with memoization + let code = r" + local f1 = function(x) x; + local f2 = function(x) f1(x); + local f3 = function(x) f2(x); + local f4 = function(x) f3(x); + local f5 = function(x) f4(x); + local f6 = function(x) f5(x); + f6 + "; + let (ty, env) = infer_doc(code); + + // Should be a function (memoization helps with deep nesting) + let _func = try_function(&env, ty).expect("expected function"); + } + + #[rstest] + #[case::std_set("std.set([1, 2, 3])", true, Ty::NUMBER)] + #[case::std_uniq(r#"std.uniq(["a", "b", "c"])"#, true, Ty::STRING)] + #[case::std_set_union("std.setUnion([1, 2], [3, 4])", true, Ty::NUMBER)] + #[case::std_set_inter("std.setInter([1, 2], [2, 3])", true, Ty::NUMBER)] + #[case::std_set_diff("std.setDiff([1, 2, 3], [2])", true, Ty::NUMBER)] + #[case::std_filter("std.filter(function(x) true, [1, 2, 3])", false, Ty::NUMBER)] + #[case::std_slice("std.slice([1, 2, 3], 0, 2, 1)", false, Ty::NUMBER)] + #[case::std_map_typed("std.map(function(x) x + 0, [1, 2, 3])", false, Ty::NUMBER)] + fn test_array_is_set_flag( + #[case] code: &str, + #[case] expected_is_set: bool, + #[case] expected_elem: Ty, + ) { + let (ty, env) = infer_doc(code); + let (elem, is_set) = try_array_with_set(&env, ty).expect("expected array type"); + assert_eq!(is_set, expected_is_set, "is_set mismatch for: {}", code); + assert_eq!(elem, expected_elem, "element type mismatch for: {}", code); + } + + #[test] + fn test_std_reverse_preserves_tuple_type() { + // std.reverse with SameAsArg(0) preserves the input type + // A tuple literal stays a tuple after reverse + let (ty, env) = infer_doc("std.reverse([1, 2, 3])"); + let elems = try_tuple(&env, ty).expect("reverse of tuple should be tuple"); + // All elements should be NUMBER + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/flow.rs b/crates/jrsonnet-lsp-inference/src/flow.rs new file mode 100644 index 00000000..ff4ac4e8 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/flow.rs @@ -0,0 +1,1698 @@ +//! Flow typing: extracting and applying type facts from conditions. +//! +//! This module implements flow-sensitive type narrowing based on conditions. +//! When a condition like `std.isNumber(x)` is true, we can narrow the type of `x` +//! to `Number` in the then-branch, and to "not Number" in the else-branch. +//! +//! The design follows the approach from rjsonnet, with a fact-based system that +//! supports logical combinations (and, or, not) and proper totality tracking. + +use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, FunctionData, MutStore, ObjectData, ReturnSpec, Ty, TyData, +}; +use rustc_hash::FxHashMap; + +/// Totality indicates whether a fact can be negated. +/// +/// - `Total`: The fact fully classifies the value. For example, `std.isNumber(x)` +/// is total because if it's false, we know `x` is definitely NOT a number. +/// - `Partial`: The fact only partially classifies the value. For example, +/// `std.isInteger(x)` is partial because if it's false, `x` might still be +/// a decimal number. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Totality { + /// Fact can be negated - if false, the negation applies. + Total, + /// Fact cannot be negated - if false, we learn nothing. + Partial, +} + +/// A type fact about a single variable. +/// +/// Facts represent what we know about a variable's type based on a condition. +/// They can be combined using logical operators and applied to narrow types. +#[derive(Debug, Clone, PartialEq)] +pub struct Fact { + repr: FactRepr, +} + +/// Internal representation of a fact. +#[derive(Debug, Clone, PartialEq)] +enum FactRepr { + /// Narrows to a primitive type (number, string, bool, null, array, object, function). + Prim(PrimFact, Totality), + /// Object has a field with optional type constraint. + HasField { + field: String, + /// Optional constraint on the field's type. + field_type: Option>, + }, + /// Value has a specific length. + /// For arrays: converts to tuple with that many elements. + /// For objects: closes the object if field count matches. + HasLen(usize), + /// Value has at least this length (non-empty check). + /// Useful for `std.length(x) > 0` patterns. + MinLen(usize), + /// Array elements are all of a specific type. + /// Used for patterns like `std.all(std.map(std.isNumber, arr))`. + ArrayElemType(PrimFact, Totality), + /// Value equals a literal boolean (true or false). + /// Used for `x == true` or `x == false` patterns. + LiteralBool(bool), + /// Value equals a literal string. + /// Used for `x == "literal"` patterns. + LiteralString(String), + /// Logical AND of two facts. + And(Box, Box), + /// Logical OR of two facts. + Or(Box, Box), + /// Logical NOT of a fact. + Not(Box), +} + +/// Primitive type facts. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum PrimFact { + Null, + Bool, + Number, + String, + Array, + Object, + Function, +} + +impl PrimFact { + /// Convert to an interned Ty. + fn as_ty(self, store: &mut MutStore) -> Ty { + match self { + PrimFact::Null => Ty::NULL, + PrimFact::Bool => Ty::BOOL, + PrimFact::Number => Ty::NUMBER, + PrimFact::String => Ty::STRING, + PrimFact::Array => store.array(Ty::ANY), + PrimFact::Object => store.object(ObjectData::open()), + PrimFact::Function => store.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }), + } + } +} + +impl Fact { + /// Create a null fact. + pub fn null() -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Null, Totality::Total), + } + } + + /// Create a number fact with given totality. + pub fn number(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Number, totality), + } + } + + /// Create a string fact with given totality. + pub fn string(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::String, totality), + } + } + + /// Create an array fact with given totality. + pub fn array(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Array, totality), + } + } + + /// Create an object fact with given totality. + pub fn object(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Object, totality), + } + } + + /// Create a function fact. + pub fn function() -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Function, Totality::Total), + } + } + + /// Create a boolean fact. + pub fn boolean() -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Bool, Totality::Total), + } + } + + /// Create a fact that an object has a field. + pub fn has_field(field: String) -> Self { + Self { + repr: FactRepr::HasField { + field, + field_type: None, + }, + } + } + + /// Create a fact that an object has a field with a specific type. + pub fn has_field_typed(field: String, field_fact: Fact) -> Self { + Self { + repr: FactRepr::HasField { + field, + field_type: Some(Box::new(field_fact)), + }, + } + } + + /// Create a fact that a value has a specific length. + /// Applies to arrays, strings, and objects. + pub fn has_len(len: usize) -> Self { + Self { + repr: FactRepr::HasLen(len), + } + } + + /// Create a fact that a value has at least a minimum length. + /// Useful for non-empty checks like `std.length(x) > 0`. + pub fn min_len(min: usize) -> Self { + Self { + repr: FactRepr::MinLen(min), + } + } + + /// Create a fact that an array's elements are all of a specific type. + /// Used for higher-order predicates like `std.all(std.map(std.isNumber, arr))`. + fn array_elem_number(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Number, totality), + } + } + + fn array_elem_string(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::String, totality), + } + } + + fn array_elem_bool(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Bool, totality), + } + } + + fn array_elem_array(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Array, totality), + } + } + + fn array_elem_object(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Object, totality), + } + } + + fn array_elem_function(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Function, totality), + } + } + + /// Create a fact that a value equals a specific boolean literal. + /// Used for `x == true` or `x == false` patterns. + pub fn literal_bool(value: bool) -> Self { + Self { + repr: FactRepr::LiteralBool(value), + } + } + + /// Create a fact that a value equals a specific string literal. + /// Used for `x == "literal"` patterns. + pub fn literal_string(value: String) -> Self { + Self { + repr: FactRepr::LiteralString(value), + } + } + + /// Logical AND of two facts. + #[must_use] + pub fn and(self, other: Self) -> Self { + Self { + repr: FactRepr::And(Box::new(self.repr), Box::new(other.repr)), + } + } + + /// Logical OR of two facts. + #[must_use] + pub fn or(self, other: Self) -> Self { + Self { + repr: FactRepr::Or(Box::new(self.repr), Box::new(other.repr)), + } + } + + /// Return the logical negation of this fact. + #[must_use] + pub fn negated(self) -> Self { + Self { + repr: FactRepr::Not(Box::new(self.repr)), + } + } + + /// Apply this fact to narrow a type. + /// + /// Returns the narrowed type when the fact is known to be true. + pub fn apply_to(&self, ty: Ty, store: &mut MutStore) -> Ty { + self.repr.apply_to(ty, store) + } + + /// Apply the negation of this fact to narrow a type. + /// + /// Returns the narrowed type when the fact is known to be false. + pub fn apply_negated(&self, ty: Ty, store: &mut MutStore) -> Ty { + self.repr.apply_negated(ty, store) + } +} + +impl std::ops::Not for Fact { + type Output = Self; + + fn not(self) -> Self::Output { + self.negated() + } +} + +impl FactRepr { + /// Apply this fact to narrow a type (when fact is true). + fn apply_to(&self, ty: Ty, store: &mut MutStore) -> Ty { + match self { + FactRepr::Prim(prim, _) => { + let constraint = prim.as_ty(store); + store.narrow(ty, constraint) + } + + FactRepr::HasField { field, field_type } => { + // Narrow to an object that has this field + let field_ty = field_type + .as_ref() + .map(|f| f.apply_to(Ty::ANY, store)) + .unwrap_or(Ty::ANY); + + let obj_data = ObjectData { + fields: vec![( + field.clone(), + FieldDefInterned { + ty: field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }; + let constraint = store.object(obj_data); + store.narrow(ty, constraint) + } + + FactRepr::HasLen(len) => store.with_len(ty, *len), + + FactRepr::MinLen(min) => store.with_min_len(ty, *min), + + FactRepr::ArrayElemType(prim, _) => { + // Narrow array element type: Array -> Array + let prim_ty = prim.as_ty(store); + match store.get(ty) { + TyData::Array { .. } | TyData::Any => store.array(prim_ty), + TyData::Tuple { elems } => { + // Narrow each tuple element + let narrowed: Vec<_> = elems + .iter() + .map(|&e| store.narrow(e, prim_ty)) + .filter(|&e| e != Ty::NEVER) + .collect(); + if narrowed.is_empty() { + Ty::NEVER + } else { + store.tuple(narrowed) + } + } + TyData::Union(types) => { + // Apply to each variant + let narrowed: Vec<_> = types + .iter() + .map(|&t| self.apply_to(t, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + _ => ty, // Non-array types unchanged + } + } + + FactRepr::LiteralBool(value) => { + // Narrow to the specific boolean literal type + let constraint = if *value { Ty::TRUE } else { Ty::FALSE }; + store.narrow(ty, constraint) + } + + FactRepr::LiteralString(value) => { + // Narrow to the specific string literal type + let constraint = store.literal_string(value.clone()); + store.narrow(ty, constraint) + } + + FactRepr::And(lhs, rhs) => { + // Apply both facts sequentially + let narrowed = lhs.apply_to(ty, store); + rhs.apply_to(narrowed, store) + } + + FactRepr::Or(lhs, rhs) => { + // Apply each fact and union the results + // (a || b) means: either a is true OR b is true + // So the type is: (ty narrowed by a) | (ty narrowed by b) + let t1 = lhs.apply_to(ty, store); + let t2 = rhs.apply_to(ty, store); + store.union(vec![t1, t2]) + } + + FactRepr::Not(inner) => { + // Apply negated inner fact + inner.apply_negated(ty, store) + } + } + } + + /// Apply the negation of this fact (when fact is false). + fn apply_negated(&self, ty: Ty, store: &mut MutStore) -> Ty { + match self { + FactRepr::Prim(prim, totality) => { + match totality { + Totality::Total => { + // Can negate: widen by removing this type + let remove = prim.as_ty(store); + store.widen(ty, remove) + } + Totality::Partial => { + // Cannot negate: type unchanged + ty + } + } + } + + FactRepr::HasField { .. } => { + // Negating "has field" doesn't narrow the type in a useful way + // (the object might just not have that field) + ty + } + + FactRepr::HasLen(_) => { + // Negating "has length n" doesn't narrow the type in a useful way + // (it just has a different length) + ty + } + + FactRepr::MinLen(min) => { + // Negating "min length n" means length < n + // For min=1 (non-empty), negation means empty (length 0) + if *min == 1 { + store.with_len(ty, 0) + } else { + ty + } + } + + FactRepr::ArrayElemType(_prim, totality) => { + // Negating "all elements are type T" + match totality { + Totality::Total => { + // Total: we know at least one element is NOT T + // This doesn't narrow the type in a useful way + ty + } + Totality::Partial => { + // Partial: can't negate + ty + } + } + } + + FactRepr::LiteralBool(value) => { + // Negating "x == true" means x is false (and vice versa) + // This is total: if x != true and x is a bool, then x == false + let constraint = if *value { Ty::FALSE } else { Ty::TRUE }; + store.narrow(ty, constraint) + } + + FactRepr::LiteralString(value) => { + // Negating "x == literal" means x != literal + // Widen by removing the literal type + let remove = store.literal_string(value.clone()); + store.widen(ty, remove) + } + + // De Morgan's laws: + // !(a && b) = !a || !b + FactRepr::And(lhs, rhs) => { + let t1 = lhs.apply_negated(ty, store); + let t2 = rhs.apply_negated(ty, store); + store.union(vec![t1, t2]) + } + + // !(a || b) = !a && !b + FactRepr::Or(lhs, rhs) => { + let narrowed = lhs.apply_negated(ty, store); + rhs.apply_negated(narrowed, store) + } + + // Double negation: !!a = a + FactRepr::Not(inner) => inner.apply_to(ty, store), + } + } +} + +/// A collection of facts about multiple variables. +#[derive(Debug, Clone, Default)] +pub struct Facts { + /// Map from variable name to fact about that variable. + facts: FxHashMap, +} + +impl Facts { + /// Create an empty facts collection. + pub fn new() -> Self { + Self::default() + } + + /// Add a fact for a variable. + /// + /// If a fact already exists for this variable, they are ANDed together. + pub fn add(&mut self, var_name: String, fact: Fact) { + if let Some(existing) = self.facts.remove(&var_name) { + self.facts.insert(var_name, existing.and(fact)); + } else { + self.facts.insert(var_name, fact); + } + } + + /// Get the fact for a variable, if any. + pub fn get(&self, var_name: &str) -> Option<&Fact> { + self.facts.get(var_name) + } + + /// Check if there are any facts. + pub fn is_empty(&self) -> bool { + self.facts.is_empty() + } + + /// Iterate over all facts. + pub fn iter(&self) -> impl Iterator { + self.facts.iter() + } + + /// Combine two fact sets with OR. + /// + /// Only keeps facts that exist in both sets, combining them with OR. + /// This is used for || conditions. + #[must_use] + pub fn or_combine(mut self, mut other: Self) -> Self { + let mut result = Facts::new(); + for (var_name, fact) in self.facts.drain() { + if let Some(other_fact) = other.facts.remove(&var_name) { + result.facts.insert(var_name, fact.or(other_fact)); + } + // If not in both, we learn nothing + } + result + } + + /// Combine two fact sets with AND (merge). + /// + /// Combines all facts from both sets. + #[must_use] + pub fn and_combine(mut self, other: Self) -> Self { + for (var_name, fact) in other.facts { + self.add(var_name, fact); + } + self + } + + /// Negate all facts in this collection. + #[must_use] + pub fn negate(self) -> Self { + let mut result = Facts::new(); + for (var_name, fact) in self.facts { + result.facts.insert(var_name, !fact); + } + result + } +} + +use jrsonnet_rowan_parser::{ + nodes::{ + ArgsDesc, BinaryOperatorKind, Expr, ExprBase, ExprCall, LiteralKind, UnaryOperatorKind, + }, + AstNode, AstToken, +}; + +/// Extract type facts from a condition expression. +/// +/// Recognizes patterns like: +/// - `std.isNumber(x)` → x: Number +/// - `std.isString(x)` → x: String +/// - `std.isBoolean(x)` → x: Bool +/// - `std.isArray(x)` → x: Array +/// - `std.isObject(x)` → x: Object +/// - `std.isFunction(x)` → x: Function +/// - `std.objectHas(x, "field")` → x has field "field" +/// - `"field" in x` → x has field "field" +/// - `x == null` → x: Null +/// - `x != null` → x: NOT Null +/// - `std.type(x) == "number"` → x: Number +/// - `a && b` → facts from a AND facts from b +/// - `a || b` → facts from a OR facts from b (intersection) +/// - `!a` → negated facts from a +pub fn extract_facts(cond: &Expr) -> Facts { + let mut facts = Facts::new(); + extract_facts_into(cond, &mut facts); + facts +} + +fn extract_facts_into(cond: &Expr, facts: &mut Facts) { + let Some(base) = cond.expr_base() else { + return; + }; + + match base { + ExprBase::ExprCall(call) => { + // Could be std.isX(var) or std.objectHas(var, "field") + if let Some((var_name, fact)) = check_std_call(&call) { + facts.add(var_name, fact); + } + } + ExprBase::ExprBinary(binary) => { + extract_binary_facts(&binary, facts); + } + ExprBase::ExprUnary(unary) => { + let Some(op) = unary.unary_operator() else { + return; + }; + if op.kind() != UnaryOperatorKind::Not { + return; + } + // !expr - extract facts from inner and negate + if let Some(inner) = unary.rhs() { + let inner_facts = extract_facts(&inner); + let negated = inner_facts.negate(); + for (var_name, fact) in negated.facts { + facts.add(var_name, fact); + } + } + } + ExprBase::ExprParened(parens) => { + if let Some(inner) = parens.expr() { + extract_facts_into(&inner, facts); + } + } + _ => {} + } +} + +/// Extract facts from a binary expression. +fn extract_binary_facts(binary: &jrsonnet_rowan_parser::nodes::ExprBinary, facts: &mut Facts) { + let (Some(lhs), Some(rhs), Some(op)) = (binary.lhs(), binary.rhs(), binary.binary_operator()) + else { + return; + }; + + match op.kind() { + BinaryOperatorKind::Eq => { + // var == null or null == var + if let Some((var_name, fact)) = check_null_equality(&lhs, &rhs, Totality::Total) { + facts.add(var_name, fact); + } else if let Some((var_name, fact)) = check_null_equality(&rhs, &lhs, Totality::Total) + { + facts.add(var_name, fact); + } + // var == "literal" or "literal" == var + if let Some((var_name, fact)) = check_literal_string_equality(&lhs, &rhs) { + facts.add(var_name, fact); + } else if let Some((var_name, fact)) = check_literal_string_equality(&rhs, &lhs) { + facts.add(var_name, fact); + } + // var == true/false or true/false == var + if let Some((var_name, fact)) = check_literal_bool_equality(&lhs, &rhs) { + facts.add(var_name, fact); + } else if let Some((var_name, fact)) = check_literal_bool_equality(&rhs, &lhs) { + facts.add(var_name, fact); + } + // std.type(x) == "typename" + if let Some((var_name, fact)) = check_std_type_comparison(binary, &rhs) { + facts.add(var_name, fact); + } + // std.length(x) == n + if let Some((var_name, fact)) = check_std_length_comparison(binary, &rhs) { + facts.add(var_name, fact); + } + } + BinaryOperatorKind::Ne => { + // var != null - create fact and negate it + if let Some((var_name, fact)) = check_null_equality(&lhs, &rhs, Totality::Total) { + facts.add(var_name, !fact); + } else if let Some((var_name, fact)) = check_null_equality(&rhs, &lhs, Totality::Total) + { + facts.add(var_name, !fact); + } + // var != "literal" or "literal" != var + if let Some((var_name, fact)) = check_literal_string_equality(&lhs, &rhs) { + facts.add(var_name, !fact); + } else if let Some((var_name, fact)) = check_literal_string_equality(&rhs, &lhs) { + facts.add(var_name, !fact); + } + // var != true/false or true/false != var + if let Some((var_name, fact)) = check_literal_bool_equality(&lhs, &rhs) { + facts.add(var_name, !fact); + } else if let Some((var_name, fact)) = check_literal_bool_equality(&rhs, &lhs) { + facts.add(var_name, !fact); + } + // std.length(x) != 0 means non-empty + if let Some((var_name, fact)) = check_std_length_not_zero(binary, &rhs) { + facts.add(var_name, fact); + } + } + BinaryOperatorKind::Gt => { + // std.length(x) > n means length >= n+1 + if let Some((var_name, fact)) = check_std_length_greater(binary, &rhs) { + facts.add(var_name, fact); + } + } + BinaryOperatorKind::Ge => { + // std.length(x) >= n means length >= n + if let Some((var_name, fact)) = check_std_length_greater_eq(binary, &rhs) { + facts.add(var_name, fact); + } + } + BinaryOperatorKind::InKw => { + // "field" in obj + if let Some((var_name, fact)) = check_in_operator(&lhs, &rhs) { + facts.add(var_name, fact); + } + } + BinaryOperatorKind::And => { + // a && b - extract facts from both sides + extract_facts_into(&lhs, facts); + extract_facts_into(&rhs, facts); + } + BinaryOperatorKind::Or => { + // a || b - only keep facts that are in both + let lhs_facts = extract_facts(&lhs); + let rhs_facts = extract_facts(&rhs); + let combined = lhs_facts.or_combine(rhs_facts); + for (var_name, fact) in combined.facts { + facts.add(var_name, fact); + } + } + _ => {} + } +} + +/// Result of extracting a std.methodName call. +struct StdMethodCall { + method_name: String, + args: ArgsDesc, +} + +/// Check if an ExprCall is a call to std.methodName and extract the method name and args. +fn extract_std_method_call(call: &ExprCall) -> Option { + let callee = call.callee()?; + let ExprBase::ExprField(field) = callee.expr_base()? else { + return None; + }; + + // Check base is std + let base = field.base()?; + let ExprBase::ExprVar(var) = base.expr_base()? else { + return None; + }; + if var.name()?.ident_lit()?.text() != "std" { + return None; + } + + let method_name = field.field()?.ident_lit()?.text().to_string(); + let args = call.args_desc()?; + Some(StdMethodCall { method_name, args }) +} + +/// Extract a std method call from the LHS of a binary expression. +fn extract_std_method_from_binary( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, +) -> Option { + let lhs = binary.lhs()?; + let ExprBase::ExprCall(call) = lhs.expr_base()? else { + return None; + }; + extract_std_method_call(&call) +} + +/// Check for std.isX(var) or std.objectHas(var, "field") pattern in an ExprCall. +fn check_std_call(call: &ExprCall) -> Option<(String, Fact)> { + let std_call = extract_std_method_call(call)?; + + // Try std.isX patterns first + if let Some(fact) = unary_std_fn_fact(&std_call.method_name) { + let var_name = extract_single_arg_var_name(&std_call.args)?; + return Some((var_name, fact)); + } + + // Try std.objectHas(obj, "field") + if std_call.method_name == "objectHas" || std_call.method_name == "objectHasAll" { + let arg_list: Vec<_> = std_call.args.args().collect(); + if arg_list.len() != 2 { + return None; + } + let var_name = extract_var_name(&arg_list[0].expr()?)?; + let field_name = extract_string_literal(&arg_list[1].expr()?)?; + return Some((var_name, Fact::has_field(field_name))); + } + + // Try std.all(std.map(predicate, arr)) pattern + if std_call.method_name == "all" { + return check_higher_order_predicate(&std_call.args); + } + + None +} + +/// Check for std.all(std.map(predicate, arr)) pattern. +/// +/// When `std.all(std.map(std.isNumber, arr))` is true, we know all elements +/// of `arr` are numbers, so we can narrow `arr` to `Array`. +fn check_higher_order_predicate(all_args: &ArgsDesc) -> Option<(String, Fact)> { + let arg_list: Vec<_> = all_args.args().collect(); + if arg_list.len() != 1 { + return None; + } + + // The argument should be std.map(predicate, arr) + let map_arg = arg_list[0].expr()?; + let ExprBase::ExprCall(map_call) = map_arg.expr_base()? else { + return None; + }; + + let map_std_call = extract_std_method_call(&map_call)?; + if map_std_call.method_name != "map" { + return None; + } + + let map_args: Vec<_> = map_std_call.args.args().collect(); + if map_args.len() != 2 { + return None; + } + + // First arg is the predicate (e.g., std.isNumber) + let pred_expr = map_args[0].expr()?; + let elem_fact = extract_predicate_element_fact(&pred_expr)?; + + // Second arg is the array variable + let arr_expr = map_args[1].expr()?; + let var_name = extract_var_name(&arr_expr)?; + + Some((var_name, elem_fact)) +} + +/// Extract the element type fact from a predicate expression. +/// +/// For `std.isNumber`, returns `Fact::array_elem_number(Totality::Total)`. +fn extract_predicate_element_fact(pred: &Expr) -> Option { + use jrsonnet_std_sig::{get_flow_typing, NarrowsTo, Totality as SigTotality}; + + // Check if it's a std.isX function reference + let ExprBase::ExprField(field) = pred.expr_base()? else { + return None; + }; + + // Check base is std + let base = field.base()?; + let ExprBase::ExprVar(var) = base.expr_base()? else { + return None; + }; + if var.name()?.ident_lit()?.text() != "std" { + return None; + } + + let method_ident = field.field()?.ident_lit()?; + let method_name = method_ident.text(); + + // Look up flow typing info from spec + let ft = get_flow_typing(method_name)?; + let totality = match ft.totality { + SigTotality::Total => Totality::Total, + SigTotality::Partial => Totality::Partial, + }; + + // Map narrowing type to array element fact + Some(match ft.narrows_to { + NarrowsTo::Number => Fact::array_elem_number(totality), + NarrowsTo::String => Fact::array_elem_string(totality), + NarrowsTo::Bool => Fact::array_elem_bool(totality), + NarrowsTo::Array => Fact::array_elem_array(totality), + NarrowsTo::Object => Fact::array_elem_object(totality), + NarrowsTo::Function => Fact::array_elem_function(totality), + NarrowsTo::Null => return None, // No array element fact for null + }) +} + +/// Map std function names to type facts using the spec. +fn unary_std_fn_fact(fn_name: &str) -> Option { + use jrsonnet_std_sig::{get_flow_typing, NarrowsTo, Totality as SigTotality}; + + let ft = get_flow_typing(fn_name)?; + let totality = match ft.totality { + SigTotality::Total => Totality::Total, + SigTotality::Partial => Totality::Partial, + }; + + Some(match ft.narrows_to { + NarrowsTo::Number => Fact::number(totality), + NarrowsTo::String => Fact::string(totality), + NarrowsTo::Bool => Fact::boolean(), + NarrowsTo::Array => Fact::array(totality), + NarrowsTo::Object => Fact::object(totality), + NarrowsTo::Function => Fact::function(), + NarrowsTo::Null => Fact::null(), + }) +} + +/// Check for var == null pattern. +fn check_null_equality( + var_side: &Expr, + null_side: &Expr, + totality: Totality, +) -> Option<(String, Fact)> { + let var_name = extract_var_name(var_side)?; + if !is_null_literal(null_side) { + return None; + } + let fact = if totality == Totality::Partial { + Fact { + repr: FactRepr::Prim(PrimFact::Null, Totality::Partial), + } + } else { + Fact::null() + }; + Some((var_name, fact)) +} + +/// Check for var == "literal" pattern. +fn check_literal_string_equality(var_side: &Expr, literal_side: &Expr) -> Option<(String, Fact)> { + let var_name = extract_var_name(var_side)?; + let literal_value = extract_string_literal(literal_side)?; + Some((var_name, Fact::literal_string(literal_value))) +} + +/// Check for var == true/false pattern. +fn check_literal_bool_equality(var_side: &Expr, literal_side: &Expr) -> Option<(String, Fact)> { + let var_name = extract_var_name(var_side)?; + let literal_value = extract_bool_literal(literal_side)?; + Some((var_name, Fact::literal_bool(literal_value))) +} + +/// Check for "field" in obj pattern. +fn check_in_operator(lhs: &Expr, rhs: &Expr) -> Option<(String, Fact)> { + let field_name = extract_string_literal(lhs)?; + let var_name = extract_var_name(rhs)?; + Some((var_name, Fact::has_field(field_name))) +} + +/// Check for std.type(x) == "typename" pattern. +fn check_std_type_comparison( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + rhs: &Expr, +) -> Option<(String, Fact)> { + let call = extract_std_method_from_binary(binary)?; + if call.method_name != "type" { + return None; + } + + let var_name = extract_single_arg_var_name(&call.args)?; + let type_str = extract_string_literal(rhs)?; + + let fact = match type_str.as_str() { + "number" => Fact::number(Totality::Total), + "string" => Fact::string(Totality::Total), + "boolean" => Fact::boolean(), + "array" => Fact::array(Totality::Total), + "object" => Fact::object(Totality::Total), + "function" => Fact::function(), + "null" => Fact::null(), + _ => return None, + }; + + Some((var_name, fact)) +} + +/// Check for std.length(x) == n pattern. +fn check_std_length_comparison( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + rhs: &Expr, +) -> Option<(String, Fact)> { + let call = extract_std_method_from_binary(binary)?; + if call.method_name != "length" { + return None; + } + + let var_name = extract_single_arg_var_name(&call.args)?; + let len = extract_number_literal(rhs)?; + + // Length must be a non-negative integer + if len < 0.0 || len.fract() != 0.0 { + return None; + } + + Some((var_name, Fact::has_len(len as usize))) +} + +/// Check for std.length(x) != 0 pattern (non-empty). +fn check_std_length_not_zero( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + rhs: &Expr, +) -> Option<(String, Fact)> { + let call = extract_std_method_from_binary(binary)?; + if call.method_name != "length" { + return None; + } + + let var_name = extract_single_arg_var_name(&call.args)?; + let len = extract_number_literal(rhs)?; + + // Only handle != 0 + if len != 0.0 { + return None; + } + + Some((var_name, Fact::min_len(1))) +} + +/// Check for std.length(x) > n pattern. +fn check_std_length_greater( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + rhs: &Expr, +) -> Option<(String, Fact)> { + let call = extract_std_method_from_binary(binary)?; + if call.method_name != "length" { + return None; + } + + let var_name = extract_single_arg_var_name(&call.args)?; + let len = extract_number_literal(rhs)?; + + // Length must be a non-negative integer + if len < 0.0 || len.fract() != 0.0 { + return None; + } + + // std.length(x) > n means length >= n+1 + Some((var_name, Fact::min_len(len as usize + 1))) +} + +/// Check for std.length(x) >= n pattern. +fn check_std_length_greater_eq( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + rhs: &Expr, +) -> Option<(String, Fact)> { + let call = extract_std_method_from_binary(binary)?; + if call.method_name != "length" { + return None; + } + + let var_name = extract_single_arg_var_name(&call.args)?; + let len = extract_number_literal(rhs)?; + + // Length must be a non-negative integer + if len < 0.0 || len.fract() != 0.0 { + return None; + } + + Some((var_name, Fact::min_len(len as usize))) +} + +/// Extract a number literal value from an expression. +fn extract_number_literal(expr: &Expr) -> Option { + let base = expr.expr_base()?; + let ExprBase::ExprNumber(num) = base else { + return None; + }; + + let number = num.number()?; + let text = number.syntax().text().to_string(); + text.parse().ok() +} + +/// Extract variable name from a single-argument function call. +fn extract_single_arg_var_name(args: &ArgsDesc) -> Option { + let arg_list: Vec<_> = args.args().collect(); + if arg_list.len() != 1 { + return None; + } + let arg_expr = arg_list[0].expr()?; + extract_var_name(&arg_expr) +} + +/// Extract a variable name from an expression if it's a simple variable reference. +fn extract_var_name(expr: &Expr) -> Option { + let base = expr.expr_base()?; + let ExprBase::ExprVar(var) = base else { + return None; + }; + let name_node = var.name()?; + let ident = name_node.ident_lit()?; + let name = ident.text(); + // Don't count std as a variable for narrowing + if name == "std" { + return None; + } + Some(name.to_string()) +} + +/// Extract a string literal value from an expression. +fn extract_string_literal(expr: &Expr) -> Option { + let base = expr.expr_base()?; + let ExprBase::ExprString(s) = base else { + return None; + }; + + // Get the string text + let text = s.syntax().first_token()?.text().to_string(); + + // Remove quotes - handle both single and double quotes + if (text.starts_with('"') && text.ends_with('"')) + || (text.starts_with('\'') && text.ends_with('\'')) + { + Some(text[1..text.len() - 1].to_string()) + } else { + None + } +} + +/// Check if an expression is a null literal. +fn is_null_literal(expr: &Expr) -> bool { + let Some(base) = expr.expr_base() else { + return false; + }; + if let ExprBase::ExprLiteral(lit) = base { + lit.literal() + .is_some_and(|l| l.kind() == LiteralKind::NullKw) + } else { + false + } +} + +/// Extract a boolean literal value from an expression. +fn extract_bool_literal(expr: &Expr) -> Option { + let base = expr.expr_base()?; + let ExprBase::ExprLiteral(lit) = base else { + return None; + }; + let literal = lit.literal()?; + match literal.kind() { + LiteralKind::TrueKw => Some(true), + LiteralKind::FalseKw => Some(false), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::{FieldDefInterned, FieldVis, GlobalTyStore, ObjectData, TyData}; + use rstest::rstest; + + use super::*; + + /// Parse Jsonnet code and extract facts. + fn parse_and_extract(code: &str) -> Facts { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + let expr = ast.expr().expect("should parse"); + extract_facts(&expr) + } + + /// Create an open object with a single field (test helper). + fn object_with_field(name: &str, ty: Ty, required: bool) -> ObjectData { + ObjectData { + fields: vec![( + name.to_string(), + FieldDefInterned { + ty, + required, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + } + } + + #[rstest] + #[case::number_narrows_any(Fact::number(Totality::Total), Ty::ANY, Ty::NUMBER)] + #[case::string_narrows_any(Fact::string(Totality::Total), Ty::ANY, Ty::STRING)] + #[case::null_narrows_any(Fact::null(), Ty::ANY, Ty::NULL)] + #[case::boolean_narrows_any(Fact::boolean(), Ty::ANY, Ty::BOOL)] + fn test_fact_apply_to(#[case] fact: Fact, #[case] input: Ty, #[case] expected: Ty) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let result = fact.apply_to(input, &mut store); + assert_eq!(result, expected); + } + + #[rstest] + #[case::number_removes_from_union( + Fact::number(Totality::Total), + vec![Ty::NUMBER, Ty::STRING], + Ty::STRING + )] + #[case::null_removes_from_union( + Fact::null(), + vec![Ty::NULL, Ty::STRING], + Ty::STRING + )] + fn test_total_fact_negation( + #[case] fact: Fact, + #[case] union_types: Vec, + #[case] expected: Ty, + ) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let input = store.union(union_types); + let result = fact.apply_negated(input, &mut store); + assert_eq!(result, expected); + } + + #[test] + fn test_partial_fact_negation_preserves_type() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = Fact::number(Totality::Partial); + let input = store.union(vec![Ty::NUMBER, Ty::STRING]); + let result = fact.apply_negated(input, &mut store); + // Partial facts cannot negate - type unchanged + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(result, expected); + } + + #[test] + fn test_has_field_fact_produces_object_with_field() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = Fact::has_field("foo".to_string()); + let result = fact.apply_to(Ty::ANY, &mut store); + let expected = store.object(object_with_field("foo", Ty::ANY, true)); + assert_eq!(result, expected); + } + + #[test] + fn test_and_fact_intersects_constraints() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let combined = Fact::number(Totality::Total).and(Fact::number(Totality::Total)); + let result = combined.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_or_fact_unions_constraints() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let combined = Fact::number(Totality::Total).or(Fact::string(Totality::Total)); + let result = combined.apply_to(Ty::ANY, &mut store); + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(result, expected); + } + + #[test] + fn test_de_morgan_negated_or_becomes_and() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + // !(number || string) = !number && !string + let combined = Fact::number(Totality::Total) + .or(Fact::string(Totality::Total)) + .negated(); + let input = store.union(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + let result = combined.apply_to(input, &mut store); + assert_eq!(result, Ty::BOOL); + } + + #[test] + fn test_double_negation_cancels() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = !(!Fact::number(Totality::Total)); + let result = fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_facts_collection_stores_by_variable() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let mut facts = Facts::new(); + facts.add("x".to_string(), Fact::number(Totality::Total)); + facts.add("y".to_string(), Fact::string(Totality::Total)); + + let x_fact = facts.get("x").expect("x should exist"); + let y_fact = facts.get("y").expect("y should exist"); + let z_fact = facts.get("z"); + + assert_eq!(x_fact.apply_to(Ty::ANY, &mut store), Ty::NUMBER); + assert_eq!(y_fact.apply_to(Ty::ANY, &mut store), Ty::STRING); + assert_eq!(z_fact, None); + } + + #[test] + fn test_facts_add_same_variable_combines_with_and() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let mut facts = Facts::new(); + facts.add("x".to_string(), Fact::object(Totality::Total)); + facts.add("x".to_string(), Fact::has_field("foo".to_string())); + + let result = facts + .get("x") + .expect("x should exist") + .apply_to(Ty::ANY, &mut store); + let expected = store.object(object_with_field("foo", Ty::ANY, true)); + assert_eq!(result, expected); + } + + #[test] + fn test_facts_and_combine_merges_all_variables() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let mut f1 = Facts::new(); + f1.add("x".to_string(), Fact::number(Totality::Total)); + + let mut f2 = Facts::new(); + f2.add("y".to_string(), Fact::string(Totality::Total)); + + let combined = f1.and_combine(f2); + + let x_result = combined.get("x").expect("x").apply_to(Ty::ANY, &mut store); + let y_result = combined.get("y").expect("y").apply_to(Ty::ANY, &mut store); + assert_eq!(x_result, Ty::NUMBER); + assert_eq!(y_result, Ty::STRING); + } + + #[test] + fn test_facts_or_combine_keeps_only_common_variables() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let mut f1 = Facts::new(); + f1.add("x".to_string(), Fact::number(Totality::Total)); + + let mut f2 = Facts::new(); + f2.add("x".to_string(), Fact::string(Totality::Total)); + f2.add("y".to_string(), Fact::string(Totality::Total)); + + let combined = f1.or_combine(f2); + + let x_result = combined.get("x").expect("x").apply_to(Ty::ANY, &mut store); + let y_exists = combined.get("y"); + + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(x_result, expected); + assert_eq!(y_exists, None); + } + + #[rstest] + #[case::is_number("std.isNumber(x)", Ty::NUMBER)] + #[case::is_string("std.isString(x)", Ty::STRING)] + #[case::is_boolean("std.isBoolean(x)", Ty::BOOL)] + fn test_extract_std_is_primitive(#[case] code: &str, #[case] expected: Ty) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(code); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_std_is_array() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isArray(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + let expected = store.array(Ty::ANY); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_std_is_object() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isObject(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + let expected = store.object(ObjectData::open()); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_std_is_function() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isFunction(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert!(matches!(store.get(result), TyData::Function(_))); + } + + #[test] + fn test_extract_eq_null() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("x == null"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NULL); + } + + #[test] + fn test_extract_ne_null() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("x != null"); + let x_fact = facts.get("x").expect("should have fact for x"); + let input = store.union(vec![Ty::NULL, Ty::STRING]); + let result = x_fact.apply_to(input, &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_extract_std_object_has() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(r#"std.objectHas(obj, "field")"#); + let obj_fact = facts.get("obj").expect("should have fact for obj"); + let result = obj_fact.apply_to(Ty::ANY, &mut store); + let expected = store.object(object_with_field("field", Ty::ANY, true)); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_in_operator() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(r#""field" in obj"#); + let obj_fact = facts.get("obj").expect("should have fact for obj"); + let result = obj_fact.apply_to(Ty::ANY, &mut store); + let expected = store.object(object_with_field("field", Ty::ANY, true)); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_logical_and() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isNumber(x) && std.isString(y)"); + let x_result = facts.get("x").expect("x").apply_to(Ty::ANY, &mut store); + let y_result = facts.get("y").expect("y").apply_to(Ty::ANY, &mut store); + assert_eq!(x_result, Ty::NUMBER); + assert_eq!(y_result, Ty::STRING); + } + + #[test] + fn test_extract_logical_or() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isNumber(x) || std.isString(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_negation() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("!std.isNumber(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let input = store.union(vec![Ty::NUMBER, Ty::STRING]); + let result = x_fact.apply_to(input, &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_extract_std_type_comparison() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(r#"std.type(x) == "number""#); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_extract_partial_predicate_cannot_negate() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.isInteger(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + + let positive = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(positive, Ty::NUMBER); + + let input = store.union(vec![Ty::NUMBER, Ty::STRING]); + let negative = x_fact.apply_negated(input, &mut store); + // Partial facts cannot negate + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(negative, expected); + } + + #[test] + fn test_extract_std_length_equality() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.length(arr) == 3"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Array with unknown elements becomes a 3-tuple + let input = store.array(Ty::NUMBER); + let result = arr_fact.apply_to(input, &mut store); + let expected = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_std_length_greater_than_zero() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.length(arr) > 0"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Non-empty array - for now just confirm the fact exists + let input = store.array(Ty::NUMBER); + let result = arr_fact.apply_to(input, &mut store); + // With min_len(1), this should narrow to a non-empty array (at least 1 element) + assert!(result != Ty::NEVER); + } + + #[test] + fn test_extract_std_length_not_zero() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.length(arr) != 0"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Non-empty array + let input = store.array(Ty::NUMBER); + let result = arr_fact.apply_to(input, &mut store); + assert!(result != Ty::NEVER); + } + + // Higher-order predicate tests + + #[test] + fn test_higher_order_predicate_all_map_is_number() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.all(std.map(std.isNumber, arr))"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Narrows Array to Array + let input = store.array(Ty::ANY); + let result = arr_fact.apply_to(input, &mut store); + let expected = store.array(Ty::NUMBER); + assert_eq!(result, expected); + } + + #[test] + fn test_higher_order_predicate_all_map_is_string() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.all(std.map(std.isString, arr))"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Narrows Array to Array + let input = store.array(Ty::ANY); + let result = arr_fact.apply_to(input, &mut store); + let expected = store.array(Ty::STRING); + assert_eq!(result, expected); + } + + #[test] + fn test_higher_order_predicate_all_map_is_object() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.all(std.map(std.isObject, arr))"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Narrows Array to Array + let input = store.array(Ty::ANY); + let result = arr_fact.apply_to(input, &mut store); + let obj_ty = store.object(ObjectData::open()); + let expected = store.array(obj_ty); + assert_eq!(result, expected); + } + + #[test] + fn test_higher_order_predicate_narrows_existing_array() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.all(std.map(std.isNumber, arr))"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + // Narrows existing array type to Number elements + let elem_union = store.union(vec![Ty::NUMBER, Ty::STRING]); + let input = store.array(elem_union); + let result = arr_fact.apply_to(input, &mut store); + let expected = store.array(Ty::NUMBER); + assert_eq!(result, expected); + } + + #[test] + fn test_higher_order_predicate_with_any_type() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.all(std.map(std.isNumber, x))"); + let x_fact = facts.get("x").expect("should have fact for x"); + + // When applied to Any, narrows to Array + let result = x_fact.apply_to(Ty::ANY, &mut store); + let expected = store.array(Ty::NUMBER); + assert_eq!(result, expected); + } + + #[rstest] + #[case::is_decimal(Fact::number(Totality::Partial))] + #[case::is_integer(Fact::number(Totality::Partial))] + fn test_partial_predicates_cannot_negate(#[case] fact: Fact) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let input = store.union(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + // Partial facts should not remove anything when negated + let result = fact.apply_negated(input, &mut store); + let expected = store.union(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + assert_eq!(result, expected); + } + + #[test] + fn test_partial_and_total_combination() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + // Combining partial and total facts + // isNumber (total) AND isInteger (partial) + let total = Fact::number(Totality::Total); + let partial = Fact::number(Totality::Partial); + let combined = total.and(partial); + + // Positive case: narrows to Number + let result = combined.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NUMBER); + + // Negative case: !(total AND partial) = !total OR !partial + // !total widens, !partial preserves, union of both + let input = store.union(vec![Ty::NUMBER, Ty::STRING]); + let negated = combined.apply_negated(input, &mut store); + // Should be union of (String, Union(Number, String)) = Union(Number, String) + assert!( + matches!(store.get(negated), TyData::Union(_)), + "Expected Union type, got {:?}", + store.get(negated) + ); + } + + #[test] + fn test_total_fact_negation_removes_type() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = Fact::number(Totality::Total); + let input = store.union(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + let result = fact.apply_negated(input, &mut store); + // Number should be removed from the union + let expected = store.union(vec![Ty::STRING, Ty::BOOL]); + assert_eq!(result, expected); + } + + #[rstest] + #[case::literal_bool_true_narrows_any(Fact::literal_bool(true), Ty::ANY, Ty::TRUE)] + #[case::literal_bool_false_narrows_any(Fact::literal_bool(false), Ty::ANY, Ty::FALSE)] + #[case::literal_bool_true_narrows_bool(Fact::literal_bool(true), Ty::BOOL, Ty::TRUE)] + fn test_literal_fact_apply_to_primitives( + #[case] fact: Fact, + #[case] input: Ty, + #[case] expected: Ty, + ) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + assert_eq!(fact.apply_to(input, &mut store), expected); + } + + #[test] + fn test_literal_string_fact_narrows_any() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = Fact::literal_string("hello".to_string()); + let result = fact.apply_to(Ty::ANY, &mut store); + let expected = store.literal_string("hello".to_string()); + assert_eq!(result, expected); + } + + #[test] + fn test_literal_string_fact_narrows_string() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let fact = Fact::literal_string("hello".to_string()); + let result = fact.apply_to(Ty::STRING, &mut store); + let expected = store.literal_string("hello".to_string()); + assert_eq!(result, expected); + } + + #[rstest] + #[case::literal_bool_true_negated_becomes_false(Fact::literal_bool(true), Ty::BOOL, Ty::FALSE)] + #[case::literal_bool_false_negated_becomes_true(Fact::literal_bool(false), Ty::BOOL, Ty::TRUE)] + fn test_literal_bool_fact_apply_negated( + #[case] fact: Fact, + #[case] input: Ty, + #[case] expected: Ty, + ) { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + assert_eq!(fact.apply_negated(input, &mut store), expected); + } + + #[test] + fn test_extract_literal_string_equality_fact() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(r#"x == "hello""#); + let fact = facts.get("x").expect("should have fact for x"); + let result = fact.apply_to(Ty::ANY, &mut store); + let expected = store.literal_string("hello".to_string()); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_literal_bool_equality_fact_true() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("x == true"); + let fact = facts.get("x").expect("should have fact for x"); + assert_eq!(fact.apply_to(Ty::ANY, &mut store), Ty::TRUE); + } + + #[test] + fn test_extract_literal_bool_equality_fact_false() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("x == false"); + let fact = facts.get("x").expect("should have fact for x"); + assert_eq!(fact.apply_to(Ty::ANY, &mut store), Ty::FALSE); + } + + #[test] + fn test_extract_literal_string_reversed() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + // "hello" == x should also work + let facts = parse_and_extract(r#""hello" == x"#); + let fact = facts.get("x").expect("should have fact for x"); + let result = fact.apply_to(Ty::ANY, &mut store); + let expected = store.literal_string("hello".to_string()); + assert_eq!(result, expected); + } + + #[test] + fn test_extract_literal_bool_not_equal() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + // x != true should narrow x to false in the then-branch via negation + let facts = parse_and_extract("x != true"); + let fact = facts.get("x").expect("should have fact for x"); + // The fact is negated (`!literal_bool(true)`). + // When applied to Bool, it should give LiteralBool(false) + assert_eq!(fact.apply_to(Ty::BOOL, &mut store), Ty::FALSE); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/helpers.rs b/crates/jrsonnet-lsp-inference/src/helpers.rs new file mode 100644 index 00000000..065392ae --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/helpers.rs @@ -0,0 +1,204 @@ +//! Helper functions for type inference. + +use jrsonnet_lsp_types::{FieldVis, ParamInterned, Ty}; +use jrsonnet_rowan_parser::{ + nodes::{ExprBase, FieldName, Visibility, VisibilityKind}, + AstToken, +}; + +use crate::{env::TypeEnv, expr::infer_expr_ty}; + +/// Convert AST visibility to interned field visibility. +pub fn convert_visibility_ty(vis: Option) -> FieldVis { + match vis.map(|v| v.kind()) { + Some(VisibilityKind::Coloncoloncolon) => FieldVis::ForceVisible, // ::: + Some(VisibilityKind::Coloncolon) => FieldVis::Hidden, // :: + Some(VisibilityKind::Colon) | None => FieldVis::Normal, // : (default) + } +} + +/// Extract field name from a FieldName node. +pub fn extract_field_name(field_name: &FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => { + // Try id first, then text (for string keys) + if let Some(name) = fixed.id() { + if let Some(ident) = name.ident_lit() { + return Some(ident.text().to_string()); + } + } + // String key + if let Some(text) = fixed.text() { + // Return the text content without quotes + let s = text.syntax().text(); + // Remove quotes + if s.len() >= 2 { + return Some(s[1..s.len() - 1].to_string()); + } + } + None + } + FieldName::FieldNameDynamic(_) => { + // Dynamic field names can't be statically determined + None + } + } +} + +/// Infer type for stdlib field access (e.g., `std.map`), returning `Ty`. +/// +/// Returns the interned function type for stdlib functions when accessing them as fields. +pub fn infer_stdlib_field_access_ty( + field: &jrsonnet_rowan_parser::nodes::ExprField, + env: &mut TypeEnv, +) -> Option { + // Check that the base is 'std' + let base = field.base()?; + let ExprBase::ExprVar(var) = base.expr_base()? else { + return None; + }; + let name_node = var.name()?; + let ident = name_node.ident_lit()?; + if ident.text() != "std" { + return None; + } + + // Get the field name + let field_name = field.field()?.ident_lit()?.text().to_string(); + + // Look up the stdlib function type and import it into the current store + if let Some(ty) = + jrsonnet_lsp_stdlib::import_stdlib_func_to_mut_store(env.store_mut(), &field_name) + { + return Some(ty); + } + + // Unknown stdlib function - return generic function type (interned) + Some(env.store_mut().function_any()) +} + +/// Extract a variable name from an expression if it's a simple variable reference. +/// +/// This is used for constraint tracking - we can only track constraints on +/// direct variable references, not complex expressions. +pub fn extract_var_name_from_expr(expr: &jrsonnet_rowan_parser::nodes::Expr) -> Option { + let ExprBase::ExprVar(var) = expr.expr_base()? else { + return None; + }; + let name_node = var.name()?; + let ident = name_node.ident_lit()?; + Some(ident.text().to_string()) +} + +/// Extract parameter information (name and has_default) from a ParamsDesc. +/// +/// Returns a list of (name, has_default) pairs for each parameter. +pub fn extract_params_with_defaults( + params_desc: &jrsonnet_rowan_parser::nodes::ParamsDesc, +) -> Vec<(String, bool)> { + params_desc + .params() + .filter_map(|param| { + let name = param.destruct().and_then(|d| match d { + jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) => full + .name() + .and_then(|n| n.ident_lit()) + .map(|i| i.text().to_string()), + _ => None, + })?; + // A parameter has a default if it has an = token or an expr + let has_default = param.assign_token().is_some() || param.expr().is_some(); + Some((name, has_default)) + }) + .collect() +} + +/// Extract parameters from a function definition as interned types. +/// Returns Vec of ParamInterned with name, type from default, and has_default flag. +pub fn extract_params_with_default_types_ty( + params_desc: &jrsonnet_rowan_parser::nodes::ParamsDesc, + env: &mut TypeEnv, +) -> Vec { + params_desc + .params() + .filter_map(|param| { + let name = param.destruct().and_then(|d| match d { + jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) => full + .name() + .and_then(|n| n.ident_lit()) + .map(|i| i.text().to_string()), + _ => None, + })?; + + let has_default = param.assign_token().is_some() || param.expr().is_some(); + + // Infer type from default value if present + let default_ty = if let Some(default_expr) = param.expr() { + infer_expr_ty(&default_expr, env) + } else { + Ty::ANY + }; + + Some(ParamInterned { + name, + ty: default_ty, + has_default, + }) + }) + .collect() +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::{ObjectData, TyData}; + + use super::*; + use crate::expr::infer_document_type_ty; + + fn infer_doc(code: &str) -> (Ty, TypeEnv) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + infer_document_type_ty(&doc) + } + + fn try_object(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Object(obj) => Some(obj), + _ => None, + } + } + + fn assert_fields_ty(obj: &ObjectData, expected: &[&str]) { + let actual: BTreeSet<_> = obj.fields.iter().map(|(name, _)| name.as_str()).collect(); + let expected: BTreeSet<_> = expected.iter().copied().collect(); + assert_eq!(actual, expected, "Field mismatch"); + } + + #[test] + fn test_string_key_field_extraction() { + // String keys like { "my-field": 1 } should be extracted correctly + let (ty, env) = infer_doc(r#"{ "my-field": 1, "another-key": 2 }"#); + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["my-field", "another-key"]); + } + + #[test] + fn test_mixed_field_name_styles() { + // Mix of identifier and string key styles + let (ty, env) = infer_doc(r#"{ normal: 1, "string-key": 2 }"#); + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["normal", "string-key"]); + } + + #[test] + fn test_dynamic_field_not_in_static_fields() { + // Dynamic field names [expr] cannot be statically determined + // The object should still be created but without the dynamic field in static analysis + let (ty, env) = infer_doc(r#"{ static: 1, ["dyn" + "amic"]: 2 }"#); + let obj = try_object(&env, ty).expect("expected object"); + // Only the static field should be present + assert_fields_ty(&obj, &["static"]); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/lib.rs b/crates/jrsonnet-lsp-inference/src/lib.rs new file mode 100644 index 00000000..f4da8189 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/lib.rs @@ -0,0 +1,45 @@ +//! Type inference and flow analysis for jrsonnet LSP. +//! +//! This crate provides type inference, flow-sensitive type narrowing, +//! and type caching for Jsonnet code analysis. +//! +//! # Modules +//! +//! - [`analysis`]: Type analysis storage and queries (`TypeAnalysis`) +//! - [`env`]: Type environment (`TypeEnv`) for tracking variable types +//! - [`expr`]: Expression type inference +//! - [`object`]: Object type inference +//! - [`helpers`]: Helper functions for inference +//! - [`poly`]: Polymorphic type instantiation +//! - [`flow`]: Flow-sensitive type narrowing +//! - [`const_eval`]: Constant expression evaluation +//! - [`manager`]: Document management with type analysis caching +//! - [`type_cache`]: Cross-file type caching +//! - [`provider`]: Type provider for cross-file analysis with dependency handling +//! - [`suggestions`]: String similarity for suggestions + +pub mod analysis; +pub mod const_eval; +pub mod env; +pub mod expr; +pub mod flow; +pub mod helpers; +pub mod manager; +pub mod object; +pub mod poly; +pub mod provider; +pub mod suggestions; +pub mod type_cache; + +// Re-export public API +pub use analysis::TypeAnalysis; +pub use const_eval::{trace_base, trace_expr, trace_ident, ConstEvalResult}; +pub use env::{ImportResolver, ObjectContextInterned, TypeEnv, MAX_FUNCTION_INFERENCE_DEPTH}; +pub use expr::{infer_document_type_ty, infer_expr_ty, infer_expr_ty_with_expected, is_divergent}; +pub use flow::{extract_facts, Fact, Facts, Totality}; +pub use helpers::extract_params_with_defaults; +pub use manager::{DocumentManager, SharedDocumentManager}; +pub use poly::instantiate_function_call_ty; +pub use provider::{DocumentSource, TypeProvider}; +pub use suggestions::find_best_match; +pub use type_cache::{analyze_and_cache, new_shared_cache, SharedTypeCache, TypeCache}; diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs new file mode 100644 index 00000000..3a838239 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -0,0 +1,425 @@ +//! Document manager for tracking open documents. +//! +//! Uses DashMap for concurrent access to documents without +//! requiring a global lock. Thread-safe for use with TypeProvider. + +use std::{num::NonZeroUsize, sync::Arc}; + +use dashmap::DashMap; +use jrsonnet_lsp_document::{ + CanonicalPath, DocVersion, Document, DEFAULT_ANALYSIS_CACHE_CAPACITY, + DEFAULT_CLOSED_CACHE_CAPACITY, +}; +use jrsonnet_lsp_types::GlobalTyStore; +use lru::LruCache; +use moka::sync::Cache as MokaCache; +use parking_lot::RwLock; + +use crate::{analysis::TypeAnalysis, provider::DocumentSource}; + +/// Cached type analysis for a document. +#[derive(Clone)] +struct CachedAnalysis { + /// Version of the document this analysis was computed for. + version: DocVersion, + /// The cached analysis (wrapped in Arc for sharing). + analysis: Arc, +} + +/// Document manager that tracks open documents and caches recently closed ones. +/// +/// This type is `Sync` and can be safely shared across threads. +pub struct DocumentManager { + /// Open documents - concurrent access without global lock. + open: DashMap, + /// LRU cache for recently closed documents. + closed: RwLock>, + /// Cached type analysis per document (keyed by path, validated by version). + /// Uses moka for thread-safe concurrent caching with LRU-like eviction. + analysis_cache: MokaCache, + /// Global type store shared across all analyses. + global_types: Arc, +} + +impl Default for DocumentManager { + fn default() -> Self { + Self::new(Arc::new(GlobalTyStore::new())) + } +} + +impl DocumentManager { + /// Create a new document manager with a shared global type store. + pub fn new(global_types: Arc) -> Self { + Self::with_capacity(global_types, DEFAULT_CLOSED_CACHE_CAPACITY) + } + + /// Create a new document manager with specific capacities. + pub fn with_capacity(global_types: Arc, closed_capacity: usize) -> Self { + let closed_capacity = NonZeroUsize::new(closed_capacity).unwrap_or(NonZeroUsize::MIN); + Self { + open: DashMap::new(), + closed: RwLock::new(LruCache::new(closed_capacity)), + analysis_cache: MokaCache::new(DEFAULT_ANALYSIS_CACHE_CAPACITY as u64), + global_types, + } + } + + /// Get a reference to the global type store. + pub fn global_types(&self) -> &Arc { + &self.global_types + } + + /// Open a document (called on textDocument/didOpen). + pub fn open(&self, path: CanonicalPath, text: String, version: DocVersion) { + // Move from closed cache if present + { + let mut closed = self.closed.write(); + closed.pop(&path); + } + + let document = Document::new(text, version); + self.open.insert(path, document); + } + + /// Update an open document with full text (called on textDocument/didChange with full sync). + /// + /// Returns true if the document was found and updated. + pub fn update(&self, path: &CanonicalPath, text: String, version: DocVersion) -> bool { + if let Some(mut doc) = self.open.get_mut(path) { + doc.update(text, version); + true + } else { + false + } + } + + /// Apply an incremental change to an open document. + /// + /// Returns true if the document was found and the change was applied successfully. + pub fn apply_incremental_change( + &self, + path: &CanonicalPath, + range: lsp_types::Range, + new_text: &str, + version: DocVersion, + ) -> bool { + if let Some(mut doc) = self.open.get_mut(path) { + doc.apply_incremental_change(range, new_text, version) + } else { + false + } + } + + /// Close a document (called on textDocument/didClose). + /// + /// Moves the document to the closed cache for potential reuse. + pub fn close(&self, path: &CanonicalPath) { + if let Some((path, document)) = self.open.remove(path) { + let mut closed = self.closed.write(); + closed.put(path, document); + } + } + + /// Get a reference to an open document. + pub fn get( + &self, + path: &CanonicalPath, + ) -> Option> { + self.open.get(path) + } + + /// Get the document text for a path. + /// + /// Checks open documents first, then the closed cache, + /// and finally tries to read from disk. + pub fn get_text(&self, path: &CanonicalPath) -> Option { + // Check open documents + if let Some(doc) = self.open.get(path) { + return Some(doc.text().to_string()); + } + + // Check closed cache (use peek to avoid write lock when just reading) + { + let closed = self.closed.read(); + if let Some(doc) = closed.peek(path) { + return Some(doc.text().to_string()); + } + } + + // Try to read from disk + std::fs::read_to_string(path.as_path()).ok() + } + + /// Get a document from any source (open, closed cache, or disk). + /// + /// Returns a cloned Document which is cheap due to internal Arc usage. + pub fn get_document(&self, path: &CanonicalPath) -> Option { + // Check open documents + if let Some(doc) = self.open.get(path) { + return Some(doc.clone()); + } + + // Check closed cache + { + let closed = self.closed.read(); + if let Some(doc) = closed.peek(path) { + return Some(doc.clone()); + } + } + + // Try to read from disk and parse + let text = std::fs::read_to_string(path.as_path()).ok()?; + Some(Document::new(text, DocVersion::new(0))) + } + + /// Check if a document is currently open. + pub fn is_open(&self, path: &CanonicalPath) -> bool { + self.open.contains_key(path) + } + + /// Get the number of open documents. + pub fn open_count(&self) -> usize { + self.open.len() + } + + /// Iterate over all open documents. + pub fn for_each_open(&self, mut f: F) + where + F: FnMut(&CanonicalPath, &Document), + { + for entry in &self.open { + f(entry.key(), entry.value()); + } + } + + /// Get all open document paths. + pub fn open_paths(&self) -> Vec { + self.open.iter().map(|e| e.key().clone()).collect() + } + + /// Iterate over all open documents. + pub fn iter(&self) -> dashmap::iter::Iter<'_, CanonicalPath, Document> { + self.open.iter() + } + + /// Parallel iterate over all open documents. + pub fn par_iter( + &self, + ) -> rayon::iter::IterBridge> { + use rayon::prelude::*; + self.open.iter().par_bridge() + } + + /// Get cached type analysis for a document, computing it if needed. + /// + /// Returns `None` if the document is not found. + /// The analysis is cached and reused until the document version changes. + pub fn get_analysis(&self, path: &CanonicalPath) -> Option> { + // Get the document first + let doc = self.get_document(path)?; + let version = doc.version(); + + // Check if we have a valid cached analysis + if let Some(cached) = self.analysis_cache.get(path) { + if cached.version == version { + return Some(Arc::clone(&cached.analysis)); + } + } + + // Compute new analysis using shared global store + let analysis = Arc::new(TypeAnalysis::analyze_with_global( + &doc, + Arc::clone(&self.global_types), + )); + + // Cache it + self.analysis_cache.insert( + path.clone(), + CachedAnalysis { + version, + analysis: Arc::clone(&analysis), + }, + ); + + Some(analysis) + } + + /// Invalidate the analysis cache for a document. + /// + /// Call this when a document's content changes to ensure the next + /// `get_analysis` call recomputes the analysis. + pub fn invalidate_analysis(&self, path: &CanonicalPath) { + self.analysis_cache.invalidate(path); + } +} + +impl DocumentSource for DocumentManager { + fn get_document(&self, path: &CanonicalPath) -> Option { + // Delegate to the existing get_document method + DocumentManager::get_document(self, path) + } +} + +impl<'a> IntoIterator for &'a DocumentManager { + type Item = dashmap::mapref::multiple::RefMulti<'a, CanonicalPath, Document>; + type IntoIter = dashmap::iter::Iter<'a, CanonicalPath, Document>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +/// Shared document manager wrapped in thread-safe reference-counted pointer. +pub type SharedDocumentManager = Arc; + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use super::*; + + fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(PathBuf::from(format!("/test/{name}.jsonnet"))) + } + + fn test_global_store() -> Arc { + Arc::new(GlobalTyStore::new()) + } + + #[test] + fn test_open_and_get() { + let manager = DocumentManager::new(test_global_store()); + let path = test_path("test"); + + manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); + + let doc = manager.get(&path).expect("document should exist").clone(); + assert_eq!(doc.text(), "{ a: 1 }"); + assert_eq!(doc.version(), DocVersion::new(1)); + } + + #[test] + fn test_update() { + let manager = DocumentManager::new(test_global_store()); + let path = test_path("test"); + + manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); + assert!(manager.update(&path, "{ a: 2 }".to_string(), DocVersion::new(2))); + + let doc = manager.get(&path).expect("document should exist").clone(); + assert_eq!(doc.text(), "{ a: 2 }"); + assert_eq!(doc.version(), DocVersion::new(2)); + } + + #[test] + fn test_close_moves_to_cache() { + let manager = DocumentManager::new(test_global_store()); + let path = test_path("test"); + + manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); + manager.close(&path); + + assert!(!manager.is_open(&path)); + // But the text should still be available from cache + assert_eq!(manager.get_text(&path), Some("{ a: 1 }".to_string())); + } + + #[test] + fn test_reopen_clears_from_cache() { + let manager = DocumentManager::new(test_global_store()); + let path = test_path("test"); + + manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); + manager.close(&path); + + // Reopen with new content + manager.open(path.clone(), "{ a: 2 }".to_string(), DocVersion::new(2)); + + let doc = manager.get(&path).expect("document should exist").clone(); + assert_eq!(doc.text(), "{ a: 2 }"); + } + + #[test] + fn test_multiple_documents() { + let manager = DocumentManager::new(test_global_store()); + + // Open multiple documents sequentially + for i in 0..10 { + let path = test_path(&format!("test{i}")); + manager.open(path.clone(), format!("{{ a: {i} }}"), DocVersion::new(i)); + assert!(manager.is_open(&path)); + } + + assert_eq!(manager.open_count(), 10); + } + + #[test] + fn test_analysis_caching() { + let manager = DocumentManager::new(test_global_store()); + let path = test_path("test"); + + manager.open( + path.clone(), + "local x = 1; x".to_string(), + DocVersion::new(1), + ); + + // First call computes analysis + let analysis1 = manager.get_analysis(&path).expect("analysis should exist"); + + // Second call returns cached (same Arc pointer) + let analysis2 = manager.get_analysis(&path).expect("analysis should exist"); + assert!( + Arc::ptr_eq(&analysis1, &analysis2), + "should return cached analysis" + ); + } + + #[test] + fn test_analysis_cache_invalidation() { + let manager = DocumentManager::new(test_global_store()); + let path = test_path("test"); + + manager.open( + path.clone(), + "local x = 1; x".to_string(), + DocVersion::new(1), + ); + + let analysis1 = manager.get_analysis(&path).expect("analysis should exist"); + + // Invalidate the cache + manager.invalidate_analysis(&path); + + // Next call recomputes (different Arc pointer) + let analysis2 = manager.get_analysis(&path).expect("analysis should exist"); + assert!( + !Arc::ptr_eq(&analysis1, &analysis2), + "should recompute after invalidation" + ); + } + + #[test] + fn test_analysis_cache_version_mismatch() { + let manager = DocumentManager::new(test_global_store()); + let path = test_path("test"); + + manager.open( + path.clone(), + "local x = 1; x".to_string(), + DocVersion::new(1), + ); + + let analysis1 = manager.get_analysis(&path).expect("analysis should exist"); + + // Update the document (changes version) + manager.update(&path, "local y = 2; y".to_string(), DocVersion::new(2)); + + // Next call recomputes due to version mismatch + let analysis2 = manager.get_analysis(&path).expect("analysis should exist"); + assert!( + !Arc::ptr_eq(&analysis1, &analysis2), + "should recompute after version change" + ); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/object.rs b/crates/jrsonnet-lsp-inference/src/object.rs new file mode 100644 index 00000000..5f2eebc1 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/object.rs @@ -0,0 +1,249 @@ +//! Object type inference for Jsonnet expressions. + +use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, FunctionData, ObjectData, ReturnSpec, Ty, TyData, +}; +use jrsonnet_rowan_parser::nodes::{Member, ObjBody}; + +use crate::{ + env::TypeEnv, + expr::infer_expr_ty, + helpers::{convert_visibility_ty, extract_field_name, extract_params_with_default_types_ty}, +}; + +/// Infer the type of an object body, returning interned `Ty`. +pub fn infer_object_type_ty(body: Option<&ObjBody>, env: &mut TypeEnv) -> Ty { + infer_object_type_with_super_ty(body, env, None) +} + +/// Infer the type of an object body with an optional super type, returning interned `Ty`. +/// +/// The `super_type` is used for object extension expressions like `base { ... }`. +pub fn infer_object_type_with_super_ty( + body: Option<&ObjBody>, + env: &mut TypeEnv, + super_type: Option, +) -> Ty { + let Some(body) = body else { + return env.store_mut().object(ObjectData::empty()); + }; + + match body { + ObjBody::ObjBodyMemberList(members) => { + // Pass 1: Collect all field names with preliminary types + // This creates a "skeleton" of the object for self references + let mut preliminary_fields: Vec<(String, FieldVis)> = Vec::new(); + + for member in members.members() { + match &member { + Member::MemberFieldNormal(field) => { + if let Some(field_name) = field.field_name() { + if let Some(name_str) = extract_field_name(&field_name) { + let visibility = convert_visibility_ty(field.visibility()); + preliminary_fields.push((name_str, visibility)); + } + } + } + Member::MemberFieldMethod(method) => { + if let Some(field_name) = method.field_name() { + if let Some(name_str) = extract_field_name(&field_name) { + let visibility = convert_visibility_ty(method.visibility()); + preliminary_fields.push((name_str, visibility)); + } + } + } + _ => {} + } + } + + // Build preliminary object data (all fields have Any type initially) + let mut preliminary_obj_fields: Vec<(String, FieldDefInterned)> = preliminary_fields + .iter() + .map(|(name, vis)| { + ( + name.clone(), + FieldDefInterned { + ty: Ty::ANY, + required: true, + visibility: *vis, + }, + ) + }) + .collect(); + + // If we have a super type, merge its fields into the preliminary type + if let Some(super_ty) = super_type { + if let TyData::Object(super_obj) = env.store_mut().get(super_ty) { + for (name, field_def) in &super_obj.fields { + if !preliminary_obj_fields.iter().any(|(n, _)| n == name) { + preliminary_obj_fields.push((name.clone(), field_def.clone())); + } + } + } + } + + // Sort for canonical form + preliminary_obj_fields.sort_by(|(a, _), (b, _)| a.cmp(b)); + + let preliminary_obj = ObjectData { + fields: preliminary_obj_fields, + has_unknown: false, + }; + let preliminary_ty = env.store_mut().object(preliminary_obj); + + // Push object context for self references + env.push_object_context_ty(preliminary_ty, super_type); + + // Pass 2: Infer actual field types with self available + let mut final_fields: Vec<(String, FieldDefInterned)> = Vec::new(); + + for member in members.members() { + match member { + Member::MemberFieldNormal(field) => { + if let Some(field_name) = field.field_name() { + if let Some(name_str) = extract_field_name(&field_name) { + let field_ty = field + .expr() + .map(|e| infer_expr_ty(&e, env)) + .unwrap_or(Ty::ANY); + let visibility = convert_visibility_ty(field.visibility()); + final_fields.push(( + name_str, + FieldDefInterned { + ty: field_ty, + required: true, + visibility, + }, + )); + } + } + } + Member::MemberFieldMethod(method) => { + if let Some(field_name) = method.field_name() { + if let Some(name_str) = extract_field_name(&field_name) { + // Methods are functions - infer from params + let params = method + .params_desc() + .map(|p| extract_params_with_default_types_ty(&p, env)) + .unwrap_or_default(); + let visibility = convert_visibility_ty(method.visibility()); + + let func_ty = env.store_mut().function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }); + + final_fields.push(( + name_str, + FieldDefInterned { + ty: func_ty, + required: true, + visibility, + }, + )); + } + } + } + _ => {} + } + } + + // Pop object context + env.pop_object_context(); + + // Sort for canonical form + final_fields.sort_by(|(a, _), (b, _)| a.cmp(b)); + + env.store_mut().object(ObjectData { + fields: final_fields, + has_unknown: false, + }) + } + ObjBody::ObjBodyComp(_) => { + // Object comprehension has unknown fields + env.store_mut().object(ObjectData::open()) + } + } +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::{FunctionData, ObjectData, TyData}; + + use super::*; + use crate::expr::infer_document_type_ty; + + fn infer_doc(code: &str) -> (Ty, TypeEnv) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + infer_document_type_ty(&doc) + } + + fn try_object(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Object(obj) => Some(obj), + _ => None, + } + } + + fn assert_fields_ty(obj: &ObjectData, expected: &[&str]) { + let actual: BTreeSet<_> = obj.fields.iter().map(|(name, _)| name.as_str()).collect(); + let expected: BTreeSet<_> = expected.iter().copied().collect(); + assert_eq!(actual, expected, "Field mismatch"); + } + + fn get_field_ty<'a>(obj: &'a ObjectData, name: &str) -> Option<&'a FieldDefInterned> { + obj.fields + .iter() + .find(|(n, _)| n == name) + .map(|(_, def)| def) + } + + #[test] + fn test_object_comprehension_produces_open_object() { + // Object comprehensions have dynamic keys, so they produce open objects + let (ty, env) = infer_doc("{ [k]: v for k in ['a', 'b'] for v in [1, 2] }"); + let obj = try_object(&env, ty).expect("expected object"); + assert!( + obj.has_unknown, + "Object comprehension should produce open object" + ); + } + + #[test] + fn test_regular_object_is_closed() { + // Regular objects with explicit fields are closed + let (ty, env) = infer_doc("{ a: 1, b: 2 }"); + let obj = try_object(&env, ty).expect("expected object"); + assert!(!obj.has_unknown, "Regular object should be closed"); + assert_fields_ty(&obj, &["a", "b"]); + } + + fn try_function(env: &TypeEnv, ty: Ty) -> Option { + match env.store().get(ty) { + TyData::Function(func) => Some(func), + _ => None, + } + } + + #[test] + fn test_method_field_inference() { + // Methods should be inferred as functions + let (ty, env) = infer_doc("{ greet(name): 'Hello, ' + name }"); + let obj = try_object(&env, ty).expect("expected object"); + assert_fields_ty(&obj, &["greet"]); + + let field_def = get_field_ty(&obj, "greet").expect("Should have 'greet' field"); + let func = try_function(&env, field_def.ty).expect("expected function"); + assert_eq!( + func.params + .iter() + .map(|p| p.name.as_str()) + .collect::>(), + vec!["name"] + ); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/poly.rs b/crates/jrsonnet-lsp-inference/src/poly.rs new file mode 100644 index 00000000..c09b65ee --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/poly.rs @@ -0,0 +1,406 @@ +//! Polymorphic type instantiation for function calls. + +use jrsonnet_lsp_types::{ReturnSpec, Ty, TyData, TyStore, TySubstitution}; + +/// Instantiate a polymorphic function call (Ty-native version). +/// +/// When calling a function that contains type variables in its parameter or return types, +/// this function builds a substitution by matching formal parameter types with actual +/// argument types, then applies that substitution to the return type. +/// +/// For example, if we have: +/// - Function type: `(T) -> Array` where T is a type variable +/// - Actual argument type: `Number` +/// +/// This function will: +/// 1. Build substitution: `{T -> Number}` +/// 2. Apply to return type: `Array` -> `Array` +/// 3. Return `Array` +/// +/// If the function has no type variables, the standard return type resolution is used. +pub fn instantiate_function_call_ty(func_ty: Ty, arg_types: &[Ty], store: &mut TyStore) -> Ty { + // Check if the function type has any type variables + if !store.has_type_vars(func_ty) { + // No type variables - return the fixed return type or ANY + if let TyData::Function(func_data) = store.get(func_ty).clone() { + return match &func_data.return_spec { + ReturnSpec::Fixed(ret) => *ret, + _ => Ty::ANY, // Dynamic return specs without type vars + }; + } + return Ty::ANY; + } + + // Get the function data + let func_data = match store.get(func_ty).clone() { + TyData::Function(f) => f, + _ => return Ty::ANY, + }; + + // Build a substitution by matching parameter types with argument types + let mut substitution = TySubstitution::new(); + + for (param, &arg_ty) in func_data.params.iter().zip(arg_types.iter()) { + collect_type_var_substitutions_ty(param.ty, arg_ty, &mut substitution, store); + } + + // Apply the substitution to the return type + + match &func_data.return_spec { + ReturnSpec::Fixed(ret) => store.apply_substitution(*ret, &substitution), + _ => Ty::ANY, // Dynamic return specs not yet supported with substitution + } +} + +/// Collect type variable substitutions by matching a pattern type against a concrete type (Ty-native). +/// +/// This is a simple unification that collects substitutions for type variables. +/// It doesn't do full bidirectional unification - it just assigns concrete types +/// to type variables when the pattern contains a variable and the target is concrete. +pub fn collect_type_var_substitutions_ty( + pattern: Ty, + target: Ty, + substitution: &mut TySubstitution, + store: &TyStore, +) { + // Clone data to avoid borrow issues + let pattern_data = store.get(pattern).clone(); + let target_data = store.get(target).clone(); + + match (&pattern_data, &target_data) { + // Type variable matches anything - record the substitution + (TyData::TypeVar { id, constraints }, _) + if !matches!(target_data, TyData::TypeVar { .. }) => + { + // Check that the target satisfies constraints + if constraints.satisfied_by(target, store) { + // Occurs check: don't substitute if it creates infinite type + if !TySubstitution::occurs_in(*id, target, store) { + substitution.insert(*id, target); + } + } + } + + // Array types - recurse into element types + (TyData::Array { elem: pat_elem, .. }, TyData::Array { elem: tgt_elem, .. }) => { + collect_type_var_substitutions_ty(*pat_elem, *tgt_elem, substitution, store); + } + + // Tuple types - match element-wise + (TyData::Tuple { elems: pat_elems }, TyData::Tuple { elems: tgt_elems }) => { + for (pe, te) in pat_elems.iter().zip(tgt_elems.iter()) { + collect_type_var_substitutions_ty(*pe, *te, substitution, store); + } + } + + // Also handle Array vs Tuple (common case: Array matched against [1, 2, 3]) + (TyData::Array { elem: pat_elem, .. }, TyData::Tuple { elems: tgt_elems }) + if !tgt_elems.is_empty() => + { + // Use the union of tuple element types (need mutable store for this) + // For now, just match against first element as approximation + collect_type_var_substitutions_ty(*pat_elem, tgt_elems[0], substitution, store); + } + + // Object types - match field types + (TyData::Object(pat_obj), TyData::Object(tgt_obj)) => { + for (field_name, pat_field) in &pat_obj.fields { + if let Some(tgt_field) = tgt_obj.fields.iter().find(|(n, _)| n == field_name) { + collect_type_var_substitutions_ty( + pat_field.ty, + tgt_field.1.ty, + substitution, + store, + ); + } + } + } + + // AttrsOf types - match value types + (TyData::AttrsOf { value: pat_val }, TyData::AttrsOf { value: tgt_val }) => { + collect_type_var_substitutions_ty(*pat_val, *tgt_val, substitution, store); + } + + // Function types - match param and return types + (TyData::Function(pat_fn), TyData::Function(tgt_fn)) => { + // Match parameter types + for (pp, tp) in pat_fn.params.iter().zip(tgt_fn.params.iter()) { + collect_type_var_substitutions_ty(pp.ty, tp.ty, substitution, store); + } + // Match return types + if let (ReturnSpec::Fixed(pat_ret), ReturnSpec::Fixed(tgt_ret)) = + (&pat_fn.return_spec, &tgt_fn.return_spec) + { + collect_type_var_substitutions_ty(*pat_ret, *tgt_ret, substitution, store); + } + } + + // Union types - try to match with each variant + (TyData::Union(pat_variants), _) => { + for &pv in pat_variants { + collect_type_var_substitutions_ty(pv, target, substitution, store); + } + } + + // All other cases - no substitution to collect + _ => {} + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_types::{FunctionData, ParamInterned, TyConstraints, TyVarId}; + + use super::*; + + #[test] + fn test_instantiate_ty_non_polymorphic() { + let mut store = TyStore::new(); + + // Create function(x: Number) -> String + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(Ty::STRING), + variadic: false, + })); + + let result = instantiate_function_call_ty(func_ty, &[Ty::NUMBER], &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_instantiate_ty_identity_function() { + let mut store = TyStore::new(); + + // Create type variable T + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + + // Create function(x: T) -> T + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: t_var, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(t_var), + variadic: false, + })); + + let result = instantiate_function_call_ty(func_ty, &[Ty::NUMBER], &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_instantiate_ty_array_element() { + let mut store = TyStore::new(); + + // Create type variable T + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + + // Create Array + let array_t = store.array(t_var); + + // Create function(arr: Array) -> T + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "arr".to_string(), + ty: array_t, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(t_var), + variadic: false, + })); + + // Call with Array + let array_string = store.array(Ty::STRING); + let result = instantiate_function_call_ty(func_ty, &[array_string], &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_instantiate_ty_map_function() { + let mut store = TyStore::new(); + + // Create type variables T and U + let t_id = TyVarId::fresh(); + let u_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + let u_var = store.type_var(u_id, TyConstraints::none()); + + // Create callback type: (T) -> U + let callback_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: t_var, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(u_var), + variadic: false, + })); + + // Create Array and Array + let array_t = store.array(t_var); + let array_u = store.array(u_var); + + // Create function(fn: (T) -> U, arr: Array) -> Array + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ + ParamInterned { + name: "fn".to_string(), + ty: callback_ty, + has_default: false, + }, + ParamInterned { + name: "arr".to_string(), + ty: array_t, + has_default: false, + }, + ], + return_spec: ReturnSpec::Fixed(array_u), + variadic: false, + })); + + // Create concrete callback: (Number) -> String + let concrete_callback = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(Ty::STRING), + variadic: false, + })); + + // Create Array + let array_number = store.array(Ty::NUMBER); + + // Instantiate + let result = + instantiate_function_call_ty(func_ty, &[concrete_callback, array_number], &mut store); + + // Should return Array + let expected = store.array(Ty::STRING); + assert_eq!(result, expected); + } + + #[test] + fn test_instantiate_ty_nested_arrays() { + let mut store = TyStore::new(); + + // Create type variable T + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + + // Create Array> + let array_t = store.array(t_var); + let array_array_t = store.array(array_t); + + // Create function(arr: Array>) -> Array + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "arr".to_string(), + ty: array_array_t, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(array_t), + variadic: false, + })); + + // Call with Array> + let array_string = store.array(Ty::STRING); + let array_array_string = store.array(array_string); + let result = instantiate_function_call_ty(func_ty, &[array_array_string], &mut store); + + // Should return Array + let expected = store.array(Ty::STRING); + assert_eq!(result, expected); + } + + #[test] + fn test_ty_substitution_basic() { + let mut store = TyStore::new(); + + // Create type variable T + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + + // Create substitution T -> Number + let mut sub = TySubstitution::new(); + collect_type_var_substitutions_ty(t_var, Ty::NUMBER, &mut sub, &store); + + assert_eq!(sub.get(t_id), Some(Ty::NUMBER)); + } + + #[test] + fn test_ty_substitution_array() { + let mut store = TyStore::new(); + + // Create type variable T and Array + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + let array_t = store.array(t_var); + + // Create Array + let array_string = store.array(Ty::STRING); + + // Collect substitutions + let mut sub = TySubstitution::new(); + collect_type_var_substitutions_ty(array_t, array_string, &mut sub, &store); + + assert_eq!(sub.get(t_id), Some(Ty::STRING)); + } + + #[test] + fn test_ty_substitution_with_constraints() { + let mut store = TyStore::new(); + + // Create type variable T with indexable constraint + let t_id = TyVarId::fresh(); + let t_var = store.type_var( + t_id, + TyConstraints { + must_be_indexable: true, + ..TyConstraints::none() + }, + ); + + // Number is not indexable - should not substitute + let mut sub = TySubstitution::new(); + collect_type_var_substitutions_ty(t_var, Ty::NUMBER, &mut sub, &store); + assert_eq!(sub.get(t_id), None); + + // Array IS indexable - should substitute + let array_num = store.array(Ty::NUMBER); + collect_type_var_substitutions_ty(t_var, array_num, &mut sub, &store); + assert_eq!(sub.get(t_id), Some(array_num)); + } + + #[test] + fn test_ty_apply_substitution() { + let mut store = TyStore::new(); + + // Create type variable T + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + + // Create Array + let array_t = store.array(t_var); + + // Create substitution T -> Number + let mut sub = TySubstitution::new(); + sub.insert(t_id, Ty::NUMBER); + + // Apply substitution + let result = store.apply_substitution(array_t, &sub); + + // Should be Array + let expected = store.array(Ty::NUMBER); + assert_eq!(result, expected); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs new file mode 100644 index 00000000..2af8a168 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -0,0 +1,221 @@ +//! Type provider for cross-file type analysis. +//! +//! Provides type analysis with proper dependency handling, ensuring that +//! imports have their types resolved before analyzing the target file. + +use std::sync::Arc; + +use jrsonnet_lsp_document::{CanonicalPath, Document}; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_types::GlobalTyStore; +use parking_lot::RwLock; + +use crate::{ + analysis::TypeAnalysis, + type_cache::{analyze_and_cache, CachingImportResolver, SharedTypeCache}, +}; + +/// Trait for looking up documents by path. +/// +/// This allows `TypeProvider` to work with different document storage +/// implementations (e.g., `DocumentManager`, `DashMap`). +pub trait DocumentSource { + /// Get a document by path, if it exists. + fn get_document(&self, path: &CanonicalPath) -> Option; +} + +/// Provides type analysis with proper dependency handling. +/// +/// When analyzing a file, ensures all its imports are analyzed first +/// (in topological order) so that import types are available. +/// +/// # Example +/// +/// ```ignore +/// let provider = TypeProvider::new(type_cache, import_graph, global_types); +/// +/// // This ensures all dependencies are analyzed before the target file +/// let analysis = provider.analyze(&path, &doc, &doc_manager); +/// let ty = analysis.type_at_position(offset); +/// ``` +pub struct TypeProvider { + /// Type cache for storing analyzed types. + type_cache: SharedTypeCache, + /// Import graph for dependency information. + import_graph: Arc>, + /// Global type store. + global_types: Arc, +} + +impl TypeProvider { + /// Create a new type provider. + pub fn new( + type_cache: SharedTypeCache, + import_graph: Arc>, + global_types: Arc, + ) -> Self { + Self { + type_cache, + import_graph, + global_types, + } + } + + /// Analyze a file with all its dependencies pre-analyzed. + /// + /// Uses topological ordering to ensure dependencies are analyzed first, + /// so that import types are available when analyzing the target file. + /// + /// The `doc_source` parameter provides access to documents for dependency analysis. + pub fn analyze( + &self, + path: &CanonicalPath, + doc: &Document, + doc_source: &D, + ) -> TypeAnalysis { + // Ensure dependencies are analyzed first (in topological order) + self.ensure_dependencies_analyzed(path, doc_source); + + // Analyze with import resolution + let import_resolver = Arc::new(CachingImportResolver::new( + path.as_path(), + Arc::clone(&self.type_cache), + )); + + TypeAnalysis::analyze_with_resolver(doc, Arc::clone(&self.global_types), import_resolver) + } + + /// Ensure all dependencies of a file are analyzed and cached. + /// + /// Uses topological processing to analyze dependencies before dependents. + fn ensure_dependencies_analyzed( + &self, + path: &CanonicalPath, + doc_source: &D, + ) { + let graph = self.import_graph.read(); + + // process_with_dependencies processes in "leaves first" order, + // meaning dependencies are analyzed before dependents + graph.process_with_dependencies(path, |dep_path| { + if let Some(doc) = doc_source.get_document(dep_path) { + analyze_and_cache(dep_path, &doc, &self.type_cache); + } + }); + } + + /// Get the global type store. + pub fn global_types(&self) -> &Arc { + &self.global_types + } + + /// Get the type cache. + pub fn type_cache(&self) -> &SharedTypeCache { + &self.type_cache + } +} + +#[cfg(test)] +mod tests { + use dashmap::DashMap; + use jrsonnet_lsp_document::{CanonicalPath, DocVersion}; + use jrsonnet_lsp_types::Ty; + + use super::*; + use crate::type_cache::new_shared_cache; + + /// Test document source backed by a DashMap. + struct TestDocSource { + docs: DashMap, + } + + impl TestDocSource { + fn new() -> Self { + Self { + docs: DashMap::new(), + } + } + + fn insert(&self, path: CanonicalPath, doc: Document) { + self.docs.insert(path, doc); + } + } + + impl DocumentSource for TestDocSource { + fn get_document(&self, path: &CanonicalPath) -> Option { + self.docs.get(path).map(|r| r.clone()) + } + } + + fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(std::path::PathBuf::from(format!("/test/{name}"))) + } + + #[test] + fn test_provider_analyze_simple() { + let global_types = Arc::new(GlobalTyStore::new()); + let type_cache = new_shared_cache(Arc::clone(&global_types)); + let import_graph = Arc::new(RwLock::new(ImportGraph::new())); + let doc_source = TestDocSource::new(); + + let provider = TypeProvider::new(type_cache, import_graph, global_types); + + // Add a simple document + let path = test_path("simple.jsonnet"); + let doc = Document::new("42".to_string(), DocVersion(1)); + doc_source.insert(path.clone(), doc.clone()); + + // Analyze + let analysis = provider.analyze(&path, &doc, &doc_source); + let ty = analysis.document_type(); + + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_provider_ensures_dependencies_analyzed() { + let global_types = Arc::new(GlobalTyStore::new()); + let type_cache = new_shared_cache(Arc::clone(&global_types)); + let import_graph = Arc::new(RwLock::new(ImportGraph::new())); + let doc_source = TestDocSource::new(); + + // Add imported file + let dep_path = test_path("dep.jsonnet"); + let dep_doc = Document::new("{ value: 42 }".to_string(), DocVersion(1)); + doc_source.insert(dep_path.clone(), dep_doc); + + // Add main file that imports dep + let main_path = test_path("main.jsonnet"); + let main_doc = Document::new("42".to_string(), DocVersion(1)); + doc_source.insert(main_path.clone(), main_doc.clone()); + + // Update import graph to show main imports dep + { + let mut graph = import_graph.write(); + graph.update_file_with_entries( + &main_path, + vec![jrsonnet_lsp_import::ImportEntry { + import_path: "dep.jsonnet".to_string(), + resolved_path: Some(dep_path.clone()), + binding_name: None, + }], + ); + } + + let provider = TypeProvider::new( + Arc::clone(&type_cache), + Arc::clone(&import_graph), + Arc::clone(&global_types), + ); + + // Before analyzing main, dep should not be in the cache + assert!(type_cache.read().get(&dep_path).is_none()); + + // Analyze main - this should trigger dependency analysis + let _analysis = provider.analyze(&main_path, &main_doc, &doc_source); + + // After analyzing main, dep should be in the cache + // (because ensure_dependencies_analyzed processes it first) + assert!(type_cache.read().get(&dep_path).is_some()); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/suggestions.rs b/crates/jrsonnet-lsp-inference/src/suggestions.rs new file mode 100644 index 00000000..fc8988e5 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/suggestions.rs @@ -0,0 +1,115 @@ +//! "Did you mean?" suggestions for error messages. +//! +//! Uses string similarity to suggest corrections for typos in field names, +//! variable names, and common mistakes. + +use strsim::jaro_winkler; + +/// Minimum similarity threshold for suggestions (0.0 to 1.0). +/// Jaro-Winkler scores above this are considered "similar enough" to suggest. +const SIMILARITY_THRESHOLD: f64 = 0.8; + +/// Find the best matching name from candidates. +/// +/// Returns the candidate with the highest similarity score above the threshold, +/// or `None` if no candidate is similar enough. +pub fn find_best_match<'a>( + name: &str, + candidates: impl IntoIterator, +) -> Option<&'a str> { + let mut best: Option<(&str, f64)> = None; + + for candidate in candidates { + let score = jaro_winkler(name, candidate); + if score >= SIMILARITY_THRESHOLD { + match best { + None => best = Some((candidate, score)), + Some((_, best_score)) if score > best_score => best = Some((candidate, score)), + _ => {} + } + } + } + + best.map(|(name, _)| name) +} + +/// Find all similar names from candidates, sorted by similarity (best first). +pub fn find_similar<'a>(name: &str, candidates: impl IntoIterator) -> Vec<&'a str> { + let mut matches: Vec<_> = candidates + .into_iter() + .map(|c| (c, jaro_winkler(name, c))) + .filter(|(_, score)| *score >= SIMILARITY_THRESHOLD) + .collect(); + + matches.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal)); + matches.into_iter().map(|(name, _)| name).collect() +} + +/// Common mistakes mapping for quick corrections. +/// +/// Maps common typos/mistakes to their correct Jsonnet equivalents. +pub fn suggest_common_mistake(name: &str) -> Option<&'static str> { + match name { + // Boolean literals (from other languages) + "True" | "TRUE" => Some("true"), + "False" | "FALSE" => Some("false"), + + // Null variants + "None" | "nil" | "undefined" | "NULL" | "Null" => Some("null"), + + // Function keywords from other languages + "func" | "fn" | "def" | "lambda" => Some("function"), + + // Import variants + "require" | "include" => Some("import"), + + // Self reference + "this" => Some("self"), + + _ => None, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_find_best_match_exact() { + let candidates = ["foo", "bar", "baz"]; + assert_eq!(find_best_match("foo", candidates), Some("foo")); + } + + #[test] + fn test_find_best_match_typo() { + let candidates = ["length", "format", "type"]; + assert_eq!(find_best_match("lenght", candidates), Some("length")); + } + + #[test] + fn test_find_best_match_case_typo() { + let candidates = ["objectHas", "objectKeys", "objectValues"]; + assert_eq!(find_best_match("objecthas", candidates), Some("objectHas")); + } + + #[test] + fn test_find_best_match_no_match() { + let candidates = ["foo", "bar", "baz"]; + assert_eq!(find_best_match("completely_different", candidates), None); + } + + #[test] + fn test_find_similar_multiple() { + let candidates = ["objectHas", "objectKeys", "objectValues", "object"]; + let similar = find_similar("objectHs", candidates); + assert_eq!(similar.first(), Some(&"objectHas")); + } + + #[test] + fn test_common_mistakes() { + assert_eq!(suggest_common_mistake("True"), Some("true")); + assert_eq!(suggest_common_mistake("None"), Some("null")); + assert_eq!(suggest_common_mistake("this"), Some("self")); + assert_eq!(suggest_common_mistake("valid"), None); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs new file mode 100644 index 00000000..aaca214c --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -0,0 +1,438 @@ +//! Cross-file type caching for improved import type resolution. +//! +//! This module provides a cache of inferred types for exported values (top-level expressions) +//! across files. When an import expression is encountered, we can look up the cached type +//! instead of returning `Any`. +//! +//! Uses an LRU cache to bound memory usage in large codebases. +//! +//! Types are stored in the shared [`GlobalTyStore`], enabling cross-file type sharing. + +use std::{num::NonZeroUsize, sync::Arc}; + +use jrsonnet_lsp_document::{CanonicalPath, Document, DEFAULT_TYPE_CACHE_CAPACITY}; +use jrsonnet_lsp_types::{GlobalTyStore, Ty}; +use lru::LruCache; +use parking_lot::RwLock; + +use crate::analysis::TypeAnalysis; + +/// Cache of top-level types for documents. +/// +/// This stores the inferred type of each file's top-level expression, +/// enabling better type inference for imports. +/// +/// Uses an LRU eviction policy to bound memory usage. Types are stored +/// in the shared [`GlobalTyStore`], enabling cross-file type sharing. +#[derive(Debug)] +pub struct TypeCache { + /// LRU cache from file path to its cached type. + cache: LruCache, + /// Global type store for shared types. + global_types: Arc, +} + +/// A cached type entry with metadata. +#[derive(Debug, Clone)] +struct CachedType { + /// The interned type for this file's top-level expression. + ty: Ty, + /// The document version when this type was cached. + version: i32, +} + +impl TypeCache { + /// Create a new empty type cache with default capacity. + pub fn new(global_types: Arc) -> Self { + Self::with_capacity(global_types, DEFAULT_TYPE_CACHE_CAPACITY) + } + + /// Create a new type cache with the specified capacity. + pub fn with_capacity(global_types: Arc, capacity: usize) -> Self { + let capacity = NonZeroUsize::new(capacity).unwrap_or(NonZeroUsize::MIN); + Self { + cache: LruCache::new(capacity), + global_types, + } + } + + /// Get a reference to the global type store. + pub fn global_types(&self) -> &Arc { + &self.global_types + } + + /// Get the cached type for a file, if available. + /// + /// Uses `peek` to avoid updating LRU order for read-only lookups. + pub fn get(&self, path: &CanonicalPath) -> Option { + self.cache.peek(path).map(|c| c.ty) + } + + /// Get the cached type for a file and update LRU order. + /// + /// Use this when the lookup indicates actual usage of the cached type. + pub fn get_and_touch(&mut self, path: &CanonicalPath) -> Option { + self.cache.get(path).map(|c| c.ty) + } + + /// Update the cache for a file. + /// + /// Note: The Ty must be from a compatible TyStore or be a well-known constant. + pub fn update(&mut self, path: &CanonicalPath, ty: Ty, version: i32) { + self.cache.put(path.clone(), CachedType { ty, version }); + } + + /// Invalidate the cache for a file. + pub fn invalidate(&mut self, path: &CanonicalPath) { + self.cache.pop(path); + } + + /// Invalidate the cache for multiple files. + pub fn invalidate_many(&mut self, paths: impl IntoIterator) { + for path in paths { + self.cache.pop(&path); + } + } + + /// Check if a file's cache is up to date with the given version. + pub fn is_up_to_date(&self, path: &CanonicalPath, version: i32) -> bool { + self.cache + .peek(path) + .map(|c| c.version == version) + .unwrap_or(false) + } + + /// Get the number of cached entries. + pub fn len(&self) -> usize { + self.cache.len() + } + + /// Check if the cache is empty. + pub fn is_empty(&self) -> bool { + self.cache.is_empty() + } + + /// Clear all cached entries. + pub fn clear(&mut self) { + self.cache.clear(); + } +} + +/// Thread-safe shared type cache. +pub type SharedTypeCache = Arc>; + +/// Create a new shared type cache with the given global type store. +pub fn new_shared_cache(global_types: Arc) -> SharedTypeCache { + Arc::new(RwLock::new(TypeCache::new(global_types))) +} + +/// Analyze a document and update the type cache. +/// +/// Returns the inferred top-level type as a `Ty` from the global store. +pub fn analyze_and_cache(path: &CanonicalPath, doc: &Document, cache: &SharedTypeCache) -> Ty { + let version = doc.version().0; + + // Check if we already have a cached type for this version + { + let read_cache = cache.read(); + if read_cache.is_up_to_date(path, version) { + if let Some(ty) = read_cache.get(path) { + return ty; + } + } + } + + // Get the global types from the cache + let global_types = { + let read_cache = cache.read(); + Arc::clone(read_cache.global_types()) + }; + + // Create an import resolver for cross-file type resolution + let import_resolver = Arc::new(CachingImportResolver::new( + path.as_path(), + Arc::clone(cache), + )); + + // Infer the type using the global store and import resolver + let analysis = TypeAnalysis::analyze_with_resolver(doc, global_types, import_resolver); + let ty = analysis.document_type(); + + // Cache the type (it's already in the global store) + { + let mut write_cache = cache.write(); + write_cache.update(path, ty, version); + } + + ty +} + +/// Import resolver that looks up types from the type cache. +/// +/// Resolves relative imports based on the base document's directory. +#[derive(Debug)] +pub struct CachingImportResolver { + /// Base directory for resolving relative imports. + base_dir: std::path::PathBuf, + /// Type cache for looking up cached file types. + cache: SharedTypeCache, +} + +impl CachingImportResolver { + /// Create a new import resolver. + /// + /// # Arguments + /// * `base_path` - Path to the document being analyzed (used to resolve relative imports) + /// * `cache` - Shared type cache for looking up cached types + pub fn new(base_path: &std::path::Path, cache: SharedTypeCache) -> Self { + let base_dir = base_path + .parent() + .map(std::path::Path::to_path_buf) + .unwrap_or_else(|| std::path::PathBuf::from(".")); + Self { base_dir, cache } + } + + /// Resolve an import path to a canonical file path. + fn resolve_path(&self, import_path: &str) -> Option { + let resolved = if std::path::Path::new(import_path).is_absolute() { + std::path::PathBuf::from(import_path) + } else { + self.base_dir.join(import_path) + }; + + // Canonicalize the path (resolves .., symlinks, etc.) + resolved.canonicalize().ok().map(CanonicalPath::new) + } +} + +impl crate::env::ImportResolver for CachingImportResolver { + fn resolve_import(&self, import_path: &str) -> Option { + let canonical_path = self.resolve_path(import_path)?; + let cache = self.cache.read(); + cache.get(&canonical_path) + } +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(std::path::PathBuf::from(format!("/test/{name}"))) + } + + fn test_global_store() -> Arc { + Arc::new(GlobalTyStore::new()) + } + + /// Assert that the cache contains exactly the specified Ty entries. + fn assert_cache_contents_ty(cache: &TypeCache, expected: &[(&str, Ty)]) { + let actual: BTreeSet<_> = cache + .cache + .iter() + .map(|(k, v)| (k.as_path().to_string_lossy().to_string(), v.ty)) + .collect(); + let expected: BTreeSet<_> = expected + .iter() + .map(|(k, v)| (format!("/test/{k}"), *v)) + .collect(); + assert_eq!(actual, expected, "Cache contents mismatch"); + } + + #[test] + fn test_cache_basic_ty() { + let mut cache = TypeCache::new(test_global_store()); + let path = test_path("main.jsonnet"); + + // Initially empty + assert_cache_contents_ty(&cache, &[]); + + // Add an entry using Ty-native API + cache.update(&path, Ty::NUMBER, 1); + assert_cache_contents_ty(&cache, &[("main.jsonnet", Ty::NUMBER)]); + assert!(cache.is_up_to_date(&path, 1)); + assert!(!cache.is_up_to_date(&path, 2)); + + // Update the entry + cache.update(&path, Ty::STRING, 2); + assert_cache_contents_ty(&cache, &[("main.jsonnet", Ty::STRING)]); + assert!(cache.is_up_to_date(&path, 2)); + + // Invalidate + cache.invalidate(&path); + assert_cache_contents_ty(&cache, &[]); + } + + #[test] + fn test_analyze_and_cache() { + let global_types = test_global_store(); + let cache = new_shared_cache(global_types); + let path = test_path("test.jsonnet"); + let doc = Document::new("42".to_string(), DocVersion::new(1)); + + // First call should analyze and cache + let ty1 = analyze_and_cache(&path, &doc, &cache); + assert_eq!(ty1, Ty::NUMBER); + // Verify cached value + assert_eq!(cache.read().get(&path), Some(Ty::NUMBER)); + + // Second call should return cached value + let ty2 = analyze_and_cache(&path, &doc, &cache); + assert_eq!(ty2, Ty::NUMBER); + + // New version should re-analyze + let doc2 = Document::new("\"hello\"".to_string(), DocVersion::new(2)); + let ty3 = analyze_and_cache(&path, &doc2, &cache); + assert_eq!(ty3, Ty::STRING); + assert_eq!(cache.read().get(&path), Some(Ty::STRING)); + } + + #[test] + fn test_multiple_files_ty() { + let mut cache = TypeCache::new(test_global_store()); + + let path1 = test_path("file1.jsonnet"); + let path2 = test_path("file2.jsonnet"); + + cache.update(&path1, Ty::NUMBER, 1); + cache.update(&path2, Ty::STRING, 1); + + assert_cache_contents_ty( + &cache, + &[("file1.jsonnet", Ty::NUMBER), ("file2.jsonnet", Ty::STRING)], + ); + + cache.invalidate(&path1); + assert_cache_contents_ty(&cache, &[("file2.jsonnet", Ty::STRING)]); + } + + #[test] + fn test_invalidate_many_ty() { + let mut cache = TypeCache::new(test_global_store()); + + let path1 = test_path("lib.jsonnet"); + let path2 = test_path("utils.jsonnet"); + let path3 = test_path("main.jsonnet"); + let path4 = test_path("other.jsonnet"); + + // Cache all files using Ty-native API + cache.update(&path1, Ty::NUMBER, 1); + cache.update(&path2, Ty::STRING, 1); + cache.update(&path3, Ty::BOOL, 1); + cache.update(&path4, Ty::NULL, 1); + + assert_cache_contents_ty( + &cache, + &[ + ("lib.jsonnet", Ty::NUMBER), + ("main.jsonnet", Ty::BOOL), + ("other.jsonnet", Ty::NULL), + ("utils.jsonnet", Ty::STRING), + ], + ); + + // Invalidate multiple files (simulating cascading invalidation) + cache.invalidate_many(vec![path1.clone(), path2.clone(), path3.clone()]); + + // Only path4 should remain + assert_cache_contents_ty(&cache, &[("other.jsonnet", Ty::NULL)]); + } + + #[test] + fn test_basic_get_update() { + let mut cache = TypeCache::new(test_global_store()); + let path = test_path("test.jsonnet"); + + // Update with Ty + cache.update(&path, Ty::NUMBER, 1); + + // Get should return the same Ty + assert_eq!(cache.get(&path), Some(Ty::NUMBER)); + + // Version check + assert!(cache.is_up_to_date(&path, 1)); + assert!(!cache.is_up_to_date(&path, 2)); + } + + #[test] + fn test_global_store_access() { + let global_types = test_global_store(); + let mut cache = TypeCache::new(Arc::clone(&global_types)); + let path = test_path("test.jsonnet"); + + // Cache a type - types are stored in the shared global store + cache.update(&path, Ty::NUMBER, 1); + + // Verify we can retrieve the type + let retrieved = cache.get(&path).unwrap(); + assert_eq!(retrieved, Ty::NUMBER); + + // The cache's global_types should be the same reference + assert!(Arc::ptr_eq(cache.global_types(), &global_types)); + } + + #[test] + fn test_clear() { + let mut cache = TypeCache::new(test_global_store()); + let path1 = test_path("a.jsonnet"); + let path2 = test_path("b.jsonnet"); + + cache.update(&path1, Ty::NUMBER, 1); + cache.update(&path2, Ty::STRING, 1); + + assert_eq!(cache.len(), 2); + assert!(!cache.is_empty()); + + cache.clear(); + + assert_eq!(cache.len(), 0); + assert!(cache.is_empty()); + assert_eq!(cache.get(&path1), None); + } + + #[test] + fn test_lru_eviction() { + // Create a cache with capacity 3 + let mut cache = TypeCache::with_capacity(test_global_store(), 3); + + let path1 = test_path("file1.jsonnet"); + let path2 = test_path("file2.jsonnet"); + let path3 = test_path("file3.jsonnet"); + let path4 = test_path("file4.jsonnet"); + + // Fill the cache + cache.update(&path1, Ty::NUMBER, 1); + cache.update(&path2, Ty::STRING, 1); + cache.update(&path3, Ty::BOOL, 1); + + assert_eq!(cache.len(), 3); + assert_eq!(cache.get(&path1), Some(Ty::NUMBER)); + assert_eq!(cache.get(&path2), Some(Ty::STRING)); + assert_eq!(cache.get(&path3), Some(Ty::BOOL)); + + // Access path1 to make it recently used (path2 is now least recently used) + let _ = cache.get_and_touch(&path1); + + // Add a fourth entry - should evict path2 (LRU) + cache.update(&path4, Ty::NULL, 1); + + assert_eq!(cache.len(), 3); + assert_eq!(cache.get(&path1), Some(Ty::NUMBER)); // Still present (was touched) + assert_eq!(cache.get(&path2), None); // Evicted (was LRU) + assert_eq!(cache.get(&path3), Some(Ty::BOOL)); // Still present + assert_eq!(cache.get(&path4), Some(Ty::NULL)); // Newly added + } + + #[test] + fn test_capacity_zero_falls_back_to_one() { + // Verify with_capacity(0) doesn't panic and has minimum capacity + let mut cache = TypeCache::with_capacity(test_global_store(), 0); + let path = test_path("test.jsonnet"); + cache.update(&path, Ty::NUMBER, 1); + assert_eq!(cache.get(&path), Some(Ty::NUMBER)); + } +} diff --git a/crates/jrsonnet-lsp-scope/Cargo.toml b/crates/jrsonnet-lsp-scope/Cargo.toml new file mode 100644 index 00000000..2d226745 --- /dev/null +++ b/crates/jrsonnet-lsp-scope/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "jrsonnet-lsp-scope" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Scope resolution for jrsonnet LSP" + +[dependencies] +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +rowan.workspace = true +rustc-hash.workspace = true + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-scope/src/bindings.rs b/crates/jrsonnet-lsp-scope/src/bindings.rs new file mode 100644 index 00000000..f74b8285 --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/bindings.rs @@ -0,0 +1,165 @@ +//! Scope binding utility functions. +//! +//! This module provides utility functions for identifying definition sites +//! and variable references in the Jsonnet AST. +//! +//! Import-related utilities are provided by `jrsonnet_lsp_import`. +//! General AST utilities (token_at_offset, to_lsp_range, etc.) are provided by +//! `jrsonnet_lsp_document`. + +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; + +/// Check if a token is at a definition site (binding name, parameter, etc.) +/// +/// A definition site is where a name is bound (declared), as opposed to where it's used. +/// This includes: +/// - Local variable bindings: `local x = ...` +/// - Function names: `local f(x) = ...` +/// - Function parameters: `function(x)` +pub fn is_definition_site(token: &SyntaxToken) -> bool { + let Some(parent) = token.parent() else { + return false; + }; + + // Must be a Name node + if parent.kind() != SyntaxKind::NAME { + return false; + } + + // Check grandparent to see if this is a definition + let Some(grandparent) = parent.parent() else { + return false; + }; + + // These are definition contexts + matches!( + grandparent.kind(), + SyntaxKind::DESTRUCT_FULL | SyntaxKind::BIND_FUNCTION + ) +} + +/// Check if an identifier token is a variable reference (not a definition). +/// +/// A variable reference is a use of a previously-defined name. +/// This checks if the token is part of an `ExprVar` node. +pub fn is_variable_reference(token: &SyntaxToken) -> bool { + let Some(parent) = token.parent() else { + return false; + }; + + if parent.kind() != SyntaxKind::NAME { + return false; + } + + let Some(grandparent) = parent.parent() else { + return false; + }; + + grandparent.kind() == SyntaxKind::EXPR_VAR +} + +/// Check if a token can be renamed (is either a definition or reference to a local binding). +pub fn is_renameable(token: &SyntaxToken) -> bool { + is_definition_site(token) || is_variable_reference(token) +} + +/// Check if a token is defined at file scope (top-level). +/// +/// A file-scope definition is one that could potentially be exported +/// from a file via an import. +pub fn is_at_file_scope(token: &SyntaxToken) -> bool { + let mut node = token.parent(); + + // Walk up the tree looking for the depth + let mut depth = 0; + while let Some(n) = node { + match n.kind() { + SyntaxKind::STMT_LOCAL => depth += 1, + SyntaxKind::EXPR => { + // Check if this is the root expression + if n.parent() + .map_or(false, |p| p.kind() == SyntaxKind::SOURCE_FILE) + { + // File-level locals are at depth 1 + return depth <= 1; + } + } + _ => {} + } + node = n.parent(); + } + + false +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_rowan_parser::AstNode; + + use super::*; + + #[test] + fn test_is_definition_site() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the 'x' tokens + let mut found_def = false; + let mut found_ref = false; + for token in ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() == SyntaxKind::IDENT && token.text() == "x" { + if is_definition_site(&token) { + found_def = true; + } else if is_variable_reference(&token) { + found_ref = true; + } + } + } + assert!(found_def, "Should find definition site"); + assert!(found_ref, "Should find reference site"); + } + + #[test] + fn test_is_variable_reference() { + let code = "local x = 1; x + x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Count references + let ref_count = ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .filter(|t| { + t.kind() == SyntaxKind::IDENT && t.text() == "x" && is_variable_reference(t) + }) + .count(); + + assert_eq!(ref_count, 2, "Should find 2 variable references"); + } + + #[test] + fn test_is_at_file_scope() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let def_token = ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|t| t.kind() == SyntaxKind::IDENT && t.text() == "x" && is_definition_site(t)) + .expect("should find definition token for x"); + + assert!( + is_at_file_scope(&def_token), + "Top-level local should be at file scope" + ); + } +} diff --git a/crates/jrsonnet-lsp-scope/src/lib.rs b/crates/jrsonnet-lsp-scope/src/lib.rs new file mode 100644 index 00000000..596edd12 --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/lib.rs @@ -0,0 +1,18 @@ +//! Scope resolution for Jsonnet LSP. +//! +//! This crate provides scope resolution and binding tracking for Jsonnet code. +//! It includes utilities for: +//! - Finding definitions of variables +//! - Finding all references to a binding +//! - Identifying definition sites vs. variable references +//! - Efficient scope indexing with O(log n) lookups + +pub mod bindings; +pub mod resolver; + +pub use bindings::{is_at_file_scope, is_definition_site, is_renameable, is_variable_reference}; +pub use resolver::{ + check_bind_for_name, check_param_for_name, check_scope_for_definition, find_all_references, + find_all_references_for_rename, find_definition_range, references_definition, ScopeIndex, + ScopeResolver, +}; diff --git a/crates/jrsonnet-lsp-scope/src/resolver.rs b/crates/jrsonnet-lsp-scope/src/resolver.rs new file mode 100644 index 00000000..abb16616 --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/resolver.rs @@ -0,0 +1,1164 @@ +//! Scope resolution for Jsonnet AST. +//! +//! This module provides shared functionality for resolving symbol definitions +//! and finding references within Jsonnet code. +//! +//! The `ScopeResolver` struct precomputes a scope map for O(1) definition lookups. +//! The `ScopeIndex` struct provides O(log n) lookups using binary search. + +use std::cell::RefCell; + +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, BindFunction, Destruct, ExprFunction, ForSpec, MemberBindStmt, Param, StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use rowan::{TextRange, TextSize}; +use rustc_hash::FxHashMap; + +use crate::bindings::{is_definition_site, is_variable_reference}; + +/// Find the definition range of a symbol by walking up the scope chain. +/// +/// Starting from a token that references a variable, this walks up the AST +/// looking for the binding that defines the variable. +pub fn find_definition_range(token: &SyntaxToken, name: &str) -> Option { + let mut current = token.parent()?; + + while let Some(parent) = current.parent() { + if let Some(range) = check_scope_for_definition(&parent, ¤t, name) { + return Some(range); + } + current = parent; + } + + None +} + +/// Check if a scope contains a definition for the given name. +/// +/// `child` is the node we came from (used for visibility checking). +pub fn check_scope_for_definition( + scope: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + match scope.kind() { + SyntaxKind::EXPR => check_expr_for_definition(scope, child, name), + SyntaxKind::EXPR_FUNCTION => check_function_for_definition(scope, name), + SyntaxKind::BIND_FUNCTION => check_bind_function_for_definition(scope, name), + SyntaxKind::FOR_SPEC => check_for_spec_for_definition(scope, name), + SyntaxKind::OBJ_BODY_MEMBER_LIST => check_object_for_definition(scope, name), + // Array/object comprehensions: the FOR_SPEC bindings are visible to the expression + SyntaxKind::EXPR_ARRAY_COMP | SyntaxKind::OBJ_BODY_COMP => { + check_comprehension_for_definition(scope, name) + } + _ => None, + } +} + +/// Check an Expr for local definitions. +/// +/// Local definitions are only visible after their declaration point, +/// so we only check bindings that appear before the reference. +fn check_expr_for_definition( + expr: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + let mut last_match = None; + + for stmt_node in expr.children() { + if stmt_node.kind() == SyntaxKind::STMT_LOCAL { + // Only consider bindings that appear before our reference + if stmt_node.text_range().end() > child.text_range().start() { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let Some(range) = check_bind_for_name(&bind, name) { + // Keep track of the last (nearest) match for shadowing + last_match = Some(range); + } + } + } + } + } + + last_match +} + +/// Check a Bind for a name. +pub fn check_bind_for_name(bind: &Bind, name: &str) -> Option { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() == name { + return Some(bind_name.syntax().text_range()); + } + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() == name { + return Some(bind_name.syntax().text_range()); + } + None + } + } +} + +/// Check function parameters for a definition. +fn check_function_for_definition(func_node: &SyntaxNode, name: &str) -> Option { + let func = ExprFunction::cast(func_node.clone())?; + let params = func.params_desc()?; + + for param in params.params() { + if let Some(range) = check_param_for_name(¶m, name) { + return Some(range); + } + } + None +} + +/// Check BindFunction parameters for a definition. +fn check_bind_function_for_definition(func_node: &SyntaxNode, name: &str) -> Option { + let func = BindFunction::cast(func_node.clone())?; + let params = func.params()?; + + for param in params.params() { + if let Some(range) = check_param_for_name(¶m, name) { + return Some(range); + } + } + None +} + +/// Check a parameter for a name. +pub fn check_param_for_name(param: &Param, name: &str) -> Option { + let destruct = param.destruct()?; + if let Destruct::DestructFull(full) = destruct { + let param_name = full.name()?; + let ident = param_name.ident_lit()?; + if ident.text() == name { + return Some(param_name.syntax().text_range()); + } + } + None +} + +/// Check ForSpec for a definition. +fn check_for_spec_for_definition(for_node: &SyntaxNode, name: &str) -> Option { + let for_spec = ForSpec::cast(for_node.clone())?; + let destruct = for_spec.bind()?; + + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() == name { + return Some(bind_name.syntax().text_range()); + } + } + None +} + +/// Check object locals for a definition. +fn check_object_for_definition(obj_body: &SyntaxNode, name: &str) -> Option { + for member_node in obj_body.children() { + if member_node.kind() == SyntaxKind::MEMBER_BIND_STMT { + if let Some(member_bind) = MemberBindStmt::cast(member_node) { + if let Some(obj_local) = member_bind.obj_local() { + if let Some(bind) = obj_local.bind() { + if let Some(range) = check_bind_for_name(&bind, name) { + return Some(range); + } + } + } + } + } + } + None +} + +/// Check comprehension (array or object) for FOR_SPEC definitions. +/// +/// In `[x for x in arr]`, the FOR_SPEC binding is visible to the expression. +fn check_comprehension_for_definition(comp_node: &SyntaxNode, name: &str) -> Option { + for child in comp_node.children() { + if child.kind() != SyntaxKind::FOR_SPEC { + continue; + } + if let Some(range) = check_for_spec_for_definition(&child, name) { + return Some(range); + } + } + None +} + +/// Check if a reference resolves to a specific definition. +/// +/// Walks up the scope chain from the token to find its definition, +/// then checks if it matches the expected definition range. +pub fn references_definition(token: &SyntaxToken, name: &str, def_range: TextRange) -> bool { + let Some(mut current) = token.parent() else { + return false; + }; + + while let Some(parent) = current.parent() { + if let Some(found_range) = check_scope_for_definition(&parent, ¤t, name) { + return found_range == def_range; + } + current = parent; + } + + false +} + +/// Find all references to a name in the AST. +/// +/// This function walks the entire AST looking for identifiers that: +/// 1. Match the given name +/// 2. Are either the definition or references that resolve to the definition +/// +/// The `definition_range` should be the range of the Name node at the definition site. +pub fn find_all_references( + root: &SyntaxNode, + name: &str, + definition_range: TextRange, +) -> Vec { + let mut references = Vec::new(); + + // Walk all tokens looking for identifiers matching the name + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() == SyntaxKind::IDENT && token.text() == name { + // Check if this is a reference (ExprVar) + if is_variable_reference(&token) { + // It's a reference - check if it resolves to our definition + if references_definition(&token, name, definition_range) { + references.push(token.text_range()); + } + } else if is_definition_site(&token) { + // It's a definition - check if it matches our target definition + if let Some(parent) = token.parent() { + if parent.text_range() == definition_range { + references.push(parent.text_range()); + } + } + } + } + } + + references +} + +/// Find all references including both definition and uses, returning identifier ranges. +/// +/// This is a variant of `find_all_references` that returns the identifier token ranges +/// instead of the Name node ranges. This is useful for rename operations where +/// we want to replace just the identifier text. +pub fn find_all_references_for_rename( + root: &SyntaxNode, + name: &str, + definition_range: TextRange, +) -> Vec { + let mut references = Vec::new(); + + // Walk all tokens looking for identifiers matching the name + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() == SyntaxKind::IDENT && token.text() == name { + // Check if this is a reference (ExprVar) + if is_variable_reference(&token) { + // It's a reference - check if it resolves to our definition + if references_definition(&token, name, definition_range) { + // For rename, we want just the identifier range, not the Name node + references.push(token.text_range()); + } + } else if is_definition_site(&token) { + // It's a definition - check if it matches our target definition + if let Some(parent) = token.parent() { + if parent.text_range() == definition_range { + // Return the identifier range, not the Name node + references.push(token.text_range()); + } + } + } + } + } + + references +} + +/// Cached scope resolver for efficient repeated lookups. +/// +/// Precomputes a mapping from each variable reference to its definition. +pub struct ScopeResolver { + /// Maps reference token start position to definition's TextRange. + reference_to_def: FxHashMap, +} + +impl ScopeResolver { + /// Build a scope resolver for the given AST root. + /// + /// Walks the AST once to build the scope map. + pub fn new(root: &SyntaxNode) -> Self { + let mut reference_to_def = FxHashMap::default(); + + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT { + continue; + } + + if !is_variable_reference(&token) { + continue; + } + + if let Some(def_range) = find_definition_range(&token, token.text()) { + reference_to_def.insert(token.text_range().start(), def_range); + } + } + + Self { reference_to_def } + } + + /// Get the definition range for a reference token. + /// + /// Returns the TextRange of the Name node at the definition site, + /// or None if the token is not a reference or has no definition. + pub fn get_definition(&self, token: &SyntaxToken) -> Option { + self.reference_to_def + .get(&token.text_range().start()) + .copied() + } + + /// Check if a reference resolves to a specific definition. + pub fn references_definition(&self, token: &SyntaxToken, def_range: TextRange) -> bool { + self.get_definition(token) == Some(def_range) + } + + /// Find all references to a definition, returning identifier token ranges. + pub fn find_references( + &self, + root: &SyntaxNode, + name: &str, + definition_range: TextRange, + ) -> Vec { + let mut references = Vec::new(); + + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT || token.text() != name { + continue; + } + + if is_variable_reference(&token) { + if self.references_definition(&token, definition_range) { + references.push(token.text_range()); + } + continue; + } + + if !is_definition_site(&token) { + continue; + } + + let Some(parent) = token.parent() else { + continue; + }; + + if parent.text_range() == definition_range { + references.push(token.text_range()); + } + } + + references + } +} + +/// A binding in a scope - maps a name to its definition range. +#[derive(Debug, Clone)] +struct ScopeBinding { + /// The name of the binding. + name: String, + /// The TextRange of the definition (Name node). + range: TextRange, + /// The position after which this binding is visible (for local bindings). + /// None means visible throughout the scope (e.g., function params). + visible_after: Option, +} + +/// A scope with its bindings. +#[derive(Debug, Clone)] +struct IndexedScope { + /// The range of this scope. + range: TextRange, + /// The index of the parent scope in the scopes vector, or None for root. + parent: Option, + /// Bindings in this scope. + bindings: Vec, +} + +/// Indexed scope structure for efficient O(log n) lookups. +/// +/// Builds a scope tree once and uses binary search to find scopes containing +/// a given position. This is more efficient than walking the AST for each lookup. +/// +/// Scope chains are memoized for repeated lookups at the same scope. +pub struct ScopeIndex { + /// Scopes sorted by start position. + scopes: Vec, + /// Map from scope start position to index for quick lookup. + scope_starts: Vec<(TextSize, usize)>, + /// Cached scope chains: scope_index -> chain of scope ranges (innermost first). + scope_chain_cache: RefCell>>, + /// Cached bindings per scope chain: scope_index -> all bindings in chain (with visibility info). + bindings_cache: RefCell>>, +} + +/// A cached binding with visibility information for filtering at query time. +#[derive(Debug, Clone)] +struct CachedBinding { + name: String, + range: TextRange, + /// Position after which this binding is visible, or None if always visible. + visible_after: Option, +} + +impl ScopeIndex { + /// Build a scope index from an AST root. + /// + /// Walks the AST once to collect all scopes and their bindings. + pub fn new(root: &SyntaxNode) -> Self { + let mut scopes = Vec::new(); + let mut scope_stack: Vec = Vec::new(); + + Self::collect_scopes(root, &mut scopes, &mut scope_stack); + + // Build sorted index for binary search + let mut scope_starts: Vec<(TextSize, usize)> = scopes + .iter() + .enumerate() + .map(|(i, s)| (s.range.start(), i)) + .collect(); + scope_starts.sort_by_key(|(pos, _)| *pos); + + Self { + scopes, + scope_starts, + scope_chain_cache: RefCell::new(FxHashMap::default()), + bindings_cache: RefCell::new(FxHashMap::default()), + } + } + + /// Collect scopes recursively from the AST. + fn collect_scopes( + node: &SyntaxNode, + scopes: &mut Vec, + scope_stack: &mut Vec, + ) { + let is_scope = matches!( + node.kind(), + SyntaxKind::EXPR_FUNCTION + | SyntaxKind::BIND_FUNCTION + | SyntaxKind::FOR_SPEC + | SyntaxKind::OBJ_BODY_MEMBER_LIST + | SyntaxKind::EXPR_ARRAY_COMP + | SyntaxKind::OBJ_BODY_COMP + | SyntaxKind::EXPR + ); + + let scope_idx = if is_scope { + let parent = scope_stack.last().copied(); + let bindings = Self::extract_bindings(node); + let idx = scopes.len(); + scopes.push(IndexedScope { + range: node.text_range(), + parent, + bindings, + }); + scope_stack.push(idx); + Some(idx) + } else { + None + }; + + // Recurse into children + for child in node.children() { + Self::collect_scopes(&child, scopes, scope_stack); + } + + if scope_idx.is_some() { + scope_stack.pop(); + } + } + + /// Extract bindings from a scope node. + fn extract_bindings(node: &SyntaxNode) -> Vec { + match node.kind() { + SyntaxKind::EXPR => Self::extract_expr_bindings(node), + SyntaxKind::EXPR_FUNCTION => Self::extract_expr_function_bindings(node), + SyntaxKind::BIND_FUNCTION => Self::extract_bind_function_bindings(node), + SyntaxKind::FOR_SPEC => ForSpec::cast(node.clone()) + .and_then(Self::for_spec_binding) + .into_iter() + .collect(), + SyntaxKind::OBJ_BODY_MEMBER_LIST => Self::extract_object_local_bindings(node), + SyntaxKind::EXPR_ARRAY_COMP | SyntaxKind::OBJ_BODY_COMP => { + Self::extract_comprehension_bindings(node) + } + _ => Vec::new(), + } + } + + fn make_binding( + name: String, + range: TextRange, + visible_after: Option, + ) -> ScopeBinding { + ScopeBinding { + name, + range, + visible_after, + } + } + + fn extract_expr_bindings(node: &SyntaxNode) -> Vec { + node.children() + .filter(|stmt_node| stmt_node.kind() == SyntaxKind::STMT_LOCAL) + .filter_map(StmtLocal::cast) + .flat_map(|stmt_local| { + let visible_after = Some(stmt_local.syntax().text_range().end()); + stmt_local.binds().filter_map(move |bind| { + Self::binding_name_and_range(&bind) + .map(|(name, range)| Self::make_binding(name, range, visible_after)) + }) + }) + .collect() + } + + fn extract_expr_function_bindings(node: &SyntaxNode) -> Vec { + let Some(func) = ExprFunction::cast(node.clone()) else { + return Vec::new(); + }; + let Some(params) = func.params_desc() else { + return Vec::new(); + }; + params + .params() + .filter_map(|param| { + Self::param_name_and_range(¶m) + .map(|(name, range)| Self::make_binding(name, range, None)) + }) + .collect() + } + + fn extract_bind_function_bindings(node: &SyntaxNode) -> Vec { + let Some(func) = BindFunction::cast(node.clone()) else { + return Vec::new(); + }; + let Some(params) = func.params() else { + return Vec::new(); + }; + params + .params() + .filter_map(|param| { + Self::param_name_and_range(¶m) + .map(|(name, range)| Self::make_binding(name, range, None)) + }) + .collect() + } + + fn for_spec_binding(for_spec: ForSpec) -> Option { + let destruct = for_spec.bind()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + Some(Self::make_binding( + ident.text().to_string(), + bind_name.syntax().text_range(), + None, + )) + } + + fn extract_object_local_bindings(node: &SyntaxNode) -> Vec { + node.children() + .filter(|member_node| member_node.kind() == SyntaxKind::MEMBER_BIND_STMT) + .filter_map(MemberBindStmt::cast) + .filter_map(|member_bind| member_bind.obj_local()) + .filter_map(|obj_local| obj_local.bind()) + .filter_map(|bind| { + Self::binding_name_and_range(&bind) + .map(|(name, range)| Self::make_binding(name, range, None)) + }) + .collect() + } + + fn extract_comprehension_bindings(node: &SyntaxNode) -> Vec { + node.children() + .filter(|child| child.kind() == SyntaxKind::FOR_SPEC) + .filter_map(ForSpec::cast) + .filter_map(Self::for_spec_binding) + .collect() + } + + /// Extract name and range from a Bind. + fn binding_name_and_range(bind: &Bind) -> Option<(String, TextRange)> { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + return Some((ident.text().to_string(), bind_name.syntax().text_range())); + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + Some((ident.text().to_string(), bind_name.syntax().text_range())) + } + } + } + + /// Extract name and range from a Param. + fn param_name_and_range(param: &Param) -> Option<(String, TextRange)> { + let destruct = param.destruct()?; + if let Destruct::DestructFull(full) = destruct { + let param_name = full.name()?; + let ident = param_name.ident_lit()?; + return Some((ident.text().to_string(), param_name.syntax().text_range())); + } + None + } + + /// Find the innermost scope containing a position. + fn find_innermost_scope(&self, pos: TextSize) -> Option { + // Binary search to find candidate scopes + let search_idx = self + .scope_starts + .partition_point(|(start, _)| *start <= pos); + + // Check scopes from the found position backwards + let mut best: Option = None; + let mut best_size = u32::MAX; + + for i in (0..search_idx).rev() { + let (_, scope_idx) = self.scope_starts[i]; + let scope = &self.scopes[scope_idx]; + + if !scope.range.contains(pos) { + continue; + } + + let size = scope.range.len().into(); + if size < best_size { + best = Some(scope_idx); + best_size = size; + } + } + + best + } + + /// Find the definition for a name at a given position. + /// + /// Returns the TextRange of the definition's Name node. + pub fn find_definition(&self, pos: TextSize, name: &str) -> Option { + let mut scope_idx = self.find_innermost_scope(pos)?; + + loop { + let scope = &self.scopes[scope_idx]; + + // Search bindings in reverse order for shadowing (last match wins) + for binding in scope.bindings.iter().rev() { + if binding.name != name { + continue; + } + + // Check visibility + if let Some(visible_after) = binding.visible_after { + if pos < visible_after { + continue; + } + } + + return Some(binding.range); + } + + // Move to parent scope + scope_idx = scope.parent?; + } + } + + /// Check if a position references a specific definition. + pub fn references_definition(&self, pos: TextSize, name: &str, def_range: TextRange) -> bool { + self.find_definition(pos, name) == Some(def_range) + } + + /// Get the scope chain for a position (innermost to outermost). + /// + /// Returns a vector of scope ranges from the innermost scope containing + /// the position to the root scope. Results are memoized per scope index. + pub fn scope_chain(&self, pos: TextSize) -> Vec { + let scope_idx = match self.find_innermost_scope(pos) { + Some(idx) => idx, + None => return Vec::new(), + }; + + // Check cache first + if let Some(cached) = self.scope_chain_cache.borrow().get(&scope_idx) { + return cached.clone(); + } + + // Compute the scope chain + let chain = self.compute_scope_chain(scope_idx); + + // Cache and return + self.scope_chain_cache + .borrow_mut() + .insert(scope_idx, chain.clone()); + chain + } + + /// Compute the scope chain for a given scope index (uncached). + fn compute_scope_chain(&self, start_scope_idx: usize) -> Vec { + let mut chain = Vec::new(); + let mut scope_idx = start_scope_idx; + + loop { + chain.push(self.scopes[scope_idx].range); + match self.scopes[scope_idx].parent { + Some(parent_idx) => scope_idx = parent_idx, + None => break, + } + } + + chain + } + + /// Get all bindings visible at a position. + /// + /// Returns bindings from innermost to outermost scope, + /// including shadowed names. The cached bindings include visibility info, + /// which is filtered at query time. + pub fn bindings_at(&self, pos: TextSize) -> Vec<(String, TextRange)> { + let scope_idx = match self.find_innermost_scope(pos) { + Some(idx) => idx, + None => return Vec::new(), + }; + + // Get or compute cached bindings for this scope chain + let cached = self.get_or_compute_bindings(scope_idx); + + // Filter by visibility at the query position + cached + .into_iter() + .filter(|b| match b.visible_after { + Some(visible_after) => pos >= visible_after, + None => true, + }) + .map(|b| (b.name, b.range)) + .collect() + } + + /// Get or compute cached bindings for a scope chain. + fn get_or_compute_bindings(&self, scope_idx: usize) -> Vec { + // Check cache first + if let Some(cached) = self.bindings_cache.borrow().get(&scope_idx) { + return cached.clone(); + } + + // Compute bindings for the entire scope chain + let bindings = self.compute_bindings(scope_idx); + + // Cache and return + self.bindings_cache + .borrow_mut() + .insert(scope_idx, bindings.clone()); + bindings + } + + /// Compute all bindings in a scope chain (uncached). + fn compute_bindings(&self, start_scope_idx: usize) -> Vec { + let mut bindings = Vec::new(); + let mut scope_idx = start_scope_idx; + + loop { + let scope = &self.scopes[scope_idx]; + + for binding in &scope.bindings { + bindings.push(CachedBinding { + name: binding.name.clone(), + range: binding.range, + visible_after: binding.visible_after, + }); + } + + match scope.parent { + Some(parent_idx) => scope_idx = parent_idx, + None => break, + } + } + + bindings + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{token_at_offset, ByteOffset, DocVersion, Document}; + use jrsonnet_rowan_parser::AstNode; + + use super::*; + + #[test] + fn test_find_definition_range_local_variable() { + let code = "local x = 1; x + 1"; + // ^def ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the reference 'x' at position 13 + let token = token_at_offset(ast.syntax(), ByteOffset::from(13u32)) + .expect("should find token at position 13"); + assert_eq!(token.text(), "x"); + + let range = find_definition_range(&token, "x").expect("should find definition range"); + + // Definition is at position 6 + assert_eq!(range.start(), 6.into()); + } + + #[test] + fn test_find_definition_range_function_param() { + let code = "local f(x) = x * 2; f(3)"; + // ^param ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the reference 'x' at position 13 + let token = token_at_offset(ast.syntax(), ByteOffset::from(13u32)) + .expect("should find token at position 13"); + assert_eq!(token.text(), "x"); + + let range = + find_definition_range(&token, "x").expect("should find definition range for parameter"); + + // Parameter is at position 8 + assert_eq!(range.start(), 8.into()); + } + + #[test] + fn test_find_all_references() { + let code = "local x = 1; x + x"; + // ^def ^ref ^ref + // 0123456789... + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Get the definition range (Name node at position 6) + let def_range = TextRange::new(6.into(), 7.into()); + + let refs = find_all_references(ast.syntax(), "x", def_range); + // def at 6, refs at 13 and 17 + assert_eq!( + refs, + vec![ + TextRange::new(6.into(), 7.into()), // definition + TextRange::new(13.into(), 14.into()), // first use + TextRange::new(17.into(), 18.into()), // second use + ] + ); + } + + #[test] + fn test_shadowing() { + let code = "local x = 1; local x = 2; x"; + // ^def1 ^def2 ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the final 'x' reference + let token = token_at_offset(ast.syntax(), ByteOffset::from(26u32)) + .expect("should find token at position 26"); + assert_eq!(token.text(), "x"); + + let range = find_definition_range(&token, "x").expect("should find definition range"); + + // Should resolve to the second (closer) definition at position 19 + assert_eq!(range.start(), 19.into()); + } + + #[test] + fn test_references_respects_scope() { + let code = "local x = 1; local f(x) = x; x"; + // ^def1 ^def2 ^ref2 ^ref1 + // 0123456789... + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Get the definition range for outer x (Name node at position 6) + let def_range = TextRange::new(6.into(), 7.into()); + + let refs = find_all_references(ast.syntax(), "x", def_range); + // Should find: the definition (6) and the last reference (29), not the inner x + assert_eq!( + refs, + vec![ + TextRange::new(6.into(), 7.into()), // outer x definition + TextRange::new(29.into(), 30.into()), // final reference to outer x + ] + ); + } + + // ScopeIndex tests + + #[test] + fn test_scope_index_local_variable() { + let code = "local x = 1; x + 1"; + // ^def ^ref + // 0123456789012345678 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Reference at position 13 should find definition at position 6-7 + let def_range = index.find_definition(13.into(), "x"); + assert_eq!(def_range, Some(TextRange::new(6.into(), 7.into()))); + } + + #[test] + fn test_scope_index_function_param() { + let code = "local f(x) = x * 2; f(3)"; + // ^param ^ref + // 0123456789012345678901234 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Reference at position 13 should find parameter at position 8 + let def_range = index.find_definition(13.into(), "x"); + assert_eq!(def_range, Some(TextRange::new(8.into(), 9.into()))); + } + + #[test] + fn test_scope_index_shadowing() { + let code = "local x = 1; local x = 2; x"; + // ^def1 ^def2 ^ref + // 0123456789012345678901234567890 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Reference at position 26 should find the second (shadowing) definition + let def_range = index.find_definition(26.into(), "x"); + assert_eq!(def_range, Some(TextRange::new(19.into(), 20.into()))); + } + + #[test] + fn test_scope_index_nested_scopes() { + let code = "local x = 1; local f(x) = x; x"; + // ^def1 ^def2 ^ref2 ^ref1 + // 0123456789012345678901234567890 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Reference at position 26 (inside function) should find param at position 21 + let def_range = index.find_definition(26.into(), "x"); + assert_eq!(def_range, Some(TextRange::new(21.into(), 22.into()))); + + // Reference at position 29 (outside function) should find outer x at position 6 + let def_range = index.find_definition(29.into(), "x"); + assert_eq!(def_range, Some(TextRange::new(6.into(), 7.into()))); + } + + #[test] + fn test_scope_index_matches_linear_search() { + // Verify that ScopeIndex produces the same results as the linear search + let code = "local a = 1; local f(x, y) = x + y; local b = f(a, 2); b"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Test various positions + for token in ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT { + continue; + } + if !is_variable_reference(&token) { + continue; + } + + let name = token.text(); + let pos = token.text_range().start(); + + let linear_result = find_definition_range(&token, name); + let index_result = index.find_definition(pos, name); + + assert_eq!( + linear_result, index_result, + "Mismatch for '{}' at position {:?}", + name, pos + ); + } + } + + #[test] + fn test_scope_chain() { + let code = "local f(x) = x * 2; f(3)"; + // 0123456789012345678901234 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Position 13 (inside function body) should have multiple scopes + let chain = index.scope_chain(13.into()); + // Function body is nested within multiple syntax nodes + assert_eq!(chain.len(), 4); + } + + #[test] + fn test_bindings_at() { + let code = "local a = 1; local b = 2; a + b"; + // 0123456789012345678901234567890 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // At position 30 (after both bindings), both 'a' and 'b' should be visible + let bindings = index.bindings_at(30.into()); + let mut names: Vec<_> = bindings.iter().map(|(n, _)| n.as_str()).collect(); + names.sort_unstable(); + assert_eq!(names, vec!["a", "b"]); + } + + #[test] + fn test_scope_chain_cache_consistency() { + // Test that multiple calls to scope_chain return consistent results + let code = "local f(x) = x * 2; f(3)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Call multiple times at the same position - should return identical results + let chain1 = index.scope_chain(13.into()); + let chain2 = index.scope_chain(13.into()); + let chain3 = index.scope_chain(13.into()); + + assert_eq!(chain1, chain2, "Repeated calls should return same result"); + assert_eq!(chain2, chain3, "Repeated calls should return same result"); + + // Verify the cache is populated (we get results, proving the mechanism works) + assert!(!chain1.is_empty(), "Should have at least one scope"); + } + + #[test] + fn test_bindings_cache_with_visibility() { + // Test that bindings cache correctly handles visibility filtering + let code = "local a = 1; local b = 2; local c = 3; a + b + c"; + // 0 1 2 3 4 + // 0123456789012345678901234567890123456789012345678 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // At position 15 (between 'local a' and 'local b'), only 'a' should be visible + let bindings_15 = index.bindings_at(15.into()); + let names_15: Vec<_> = bindings_15.iter().map(|(n, _)| n.as_str()).collect(); + assert_eq!(names_15, vec!["a"]); + + // At position 28 (between 'local b' and 'local c'), 'a' and 'b' should be visible + let bindings_28 = index.bindings_at(28.into()); + let mut names_28: Vec<_> = bindings_28.iter().map(|(n, _)| n.as_str()).collect(); + names_28.sort_unstable(); + assert_eq!(names_28, vec!["a", "b"]); + + // At position 45 (after all locals), all should be visible + let bindings_45 = index.bindings_at(45.into()); + let mut names_45: Vec<_> = bindings_45.iter().map(|(n, _)| n.as_str()).collect(); + names_45.sort_unstable(); + assert_eq!(names_45, vec!["a", "b", "c"]); + + // Repeated call should give same result (using cache) + let bindings_45_again = index.bindings_at(45.into()); + let mut names_45_again: Vec<_> = + bindings_45_again.iter().map(|(n, _)| n.as_str()).collect(); + names_45_again.sort_unstable(); + assert_eq!(names_45, names_45_again); + } + + #[test] + fn test_cache_handles_different_scopes() { + // Test that caching works correctly across different scopes + let code = "local outer = 1; local f(inner) = inner + outer; outer + f(2)"; + // 0 1 2 3 4 5 6 + // 01234567890123456789012345678901234567890123456789012345678901234 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + let index = ScopeIndex::new(ast.syntax()); + + // Inside function (position 35, the 'inner' reference) + let bindings_in_func = index.bindings_at(35.into()); + let mut names_in_func: Vec<_> = bindings_in_func.iter().map(|(n, _)| n.as_str()).collect(); + names_in_func.sort_unstable(); + assert!( + names_in_func.contains(&"inner"), + "Should see 'inner' inside function" + ); + assert!( + names_in_func.contains(&"outer"), + "Should see 'outer' inside function" + ); + + // Outside function (position 58, after function definition) + let bindings_outside = index.bindings_at(58.into()); + let names_outside: Vec<_> = bindings_outside.iter().map(|(n, _)| n.as_str()).collect(); + assert!( + !names_outside.contains(&"inner"), + "Should NOT see 'inner' outside function" + ); + assert!( + names_outside.contains(&"outer"), + "Should see 'outer' outside function" + ); + assert!( + names_outside.contains(&"f"), + "Should see 'f' outside function" + ); + } +} diff --git a/crates/jrsonnet-lsp-stdlib/Cargo.toml b/crates/jrsonnet-lsp-stdlib/Cargo.toml new file mode 100644 index 00000000..38830462 --- /dev/null +++ b/crates/jrsonnet-lsp-stdlib/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "jrsonnet-lsp-stdlib" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Standard library signatures and documentation for jrsonnet LSP" + +[dependencies] +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +jrsonnet-std-sig = { version = "0.5.0-pre97", path = "../jrsonnet-std-sig" } + +[dev-dependencies] +indoc = "2" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-stdlib/src/docs.rs b/crates/jrsonnet-lsp-stdlib/src/docs.rs new file mode 100644 index 00000000..0f98610e --- /dev/null +++ b/crates/jrsonnet-lsp-stdlib/src/docs.rs @@ -0,0 +1,188 @@ +//! Standard library function documentation. +//! +//! Documentation for Jsonnet standard library functions. +//! Documentation strings are sourced from the `jrsonnet-std-sig` crate spec. + +use std::{collections::HashMap, sync::OnceLock}; + +use jrsonnet_std_sig::FNS; + +/// Documentation for a stdlib function. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct StdlibDoc { + /// Function name (without `std.` prefix). + pub name: &'static str, + /// Function signature, e.g. `(arr, func)`. + pub signature: String, + /// Short description. + pub description: &'static str, + /// Example usage (optional). + pub example: Option<&'static str>, +} + +impl StdlibDoc { + /// Format as markdown for hover display. + pub fn to_markdown(&self) -> String { + let mut md = format!("```jsonnet\nstd.{}{})\n```\n\n", self.name, self.signature); + md.push_str(self.description); + if let Some(example) = self.example { + md.push_str("\n\n**Example:**\n```jsonnet\n"); + md.push_str(example); + md.push_str("\n```"); + } + md + } +} + +/// Get documentation for a stdlib function by name. +pub fn get_stdlib_doc(name: &str) -> Option<&'static StdlibDoc> { + STDLIB_DOCS.get().and_then(|docs| docs.get(name)) +} + +/// Get all stdlib function docs for completion. +pub fn get_all_stdlib_docs() -> impl Iterator { + ensure_initialized(); + STDLIB_DOCS.get().into_iter().flat_map(|docs| docs.values()) +} + +static STDLIB_DOCS: OnceLock> = OnceLock::new(); + +/// Generate signature string from function parameters. +fn build_signature(spec_fn: &jrsonnet_std_sig::StdFn) -> String { + let params: Vec = spec_fn + .params + .iter() + .map(|p| { + if p.has_default { + format!("{}=...", p.name) + } else { + p.name.to_string() + } + }) + .collect(); + + format!("({}", params.join(", ")) +} + +fn init_stdlib_docs() -> HashMap<&'static str, StdlibDoc> { + FNS.iter() + .map(|spec_fn| { + let doc = StdlibDoc { + name: spec_fn.name, + signature: build_signature(spec_fn), + description: spec_fn.doc, + example: spec_fn.example, + }; + (spec_fn.name, doc) + }) + .collect() +} + +/// Initialize the stdlib docs (called lazily). +pub fn ensure_initialized() { + STDLIB_DOCS.get_or_init(init_stdlib_docs); +} + +#[cfg(test)] +mod tests { + use indoc::indoc; + + use super::*; + + #[test] + fn test_get_stdlib_doc_map() { + ensure_initialized(); + assert_eq!( + get_stdlib_doc("map").unwrap(), + &StdlibDoc { + name: "map", + signature: "(func, arr".to_string(), + description: "Applies `func` to each element of `arr`.", + example: Some("std.map(function(x) x * 2, [1,2,3]) // [2, 4, 6]"), + } + ); + } + + #[test] + fn test_get_stdlib_doc_sort_with_optional() { + ensure_initialized(); + assert_eq!( + get_stdlib_doc("sort").unwrap(), + &StdlibDoc { + name: "sort", + signature: "(arr, keyF=...".to_string(), + description: "Sorts array, optionally by key function.", + example: Some("std.sort([3,1,2]) // [1, 2, 3]"), + } + ); + } + + #[test] + fn test_get_stdlib_doc_format_variadic() { + ensure_initialized(); + assert_eq!( + get_stdlib_doc("format").unwrap(), + &StdlibDoc { + name: "format", + signature: "(fmt".to_string(), + description: "Printf-style formatting.", + example: Some(r#"std.format("Hello %s", ["world"]) // "Hello world""#), + } + ); + } + + #[test] + fn test_to_markdown_with_example() { + ensure_initialized(); + let doc = get_stdlib_doc("map").unwrap(); + assert_eq!( + doc.to_markdown(), + indoc! {r" + ```jsonnet + std.map(func, arr) + ``` + + Applies `func` to each element of `arr`. + + **Example:** + ```jsonnet + std.map(function(x) x * 2, [1,2,3]) // [2, 4, 6] + ```"} + ); + } + + #[test] + fn test_to_markdown_no_example() { + ensure_initialized(); + let doc = get_stdlib_doc("mapWithIndex").unwrap(); + assert_eq!( + doc.to_markdown(), + indoc! {" + ```jsonnet + std.mapWithIndex(func, arr) + ``` + + Like `map`, but `func` takes `(index, element)`."} + ); + } + + #[test] + fn test_unknown_function() { + ensure_initialized(); + assert!(get_stdlib_doc("unknownFunction").is_none()); + } + + #[test] + fn test_all_spec_functions_have_docs() { + ensure_initialized(); + for spec_fn in FNS { + let doc = get_stdlib_doc(spec_fn.name); + assert!(doc.is_some(), "Missing doc for {}", spec_fn.name); + + let doc = doc.unwrap(); + assert_eq!(doc.name, spec_fn.name); + assert_eq!(doc.description, spec_fn.doc); + assert_eq!(doc.example, spec_fn.example); + } + } +} diff --git a/crates/jrsonnet-lsp-stdlib/src/lib.rs b/crates/jrsonnet-lsp-stdlib/src/lib.rs new file mode 100644 index 00000000..ebc9a37c --- /dev/null +++ b/crates/jrsonnet-lsp-stdlib/src/lib.rs @@ -0,0 +1,14 @@ +//! Standard library documentation and signatures for Jsonnet LSP. +//! +//! This crate provides: +//! - Type signatures for standard library functions +//! - Documentation strings for hover and completion + +mod docs; +mod signatures; + +pub use docs::{ensure_initialized, get_all_stdlib_docs, get_stdlib_doc, StdlibDoc}; +pub use signatures::{ + get_all_stdlib_signatures, get_stdlib_func_data, get_stdlib_func_ty, get_stdlib_signature, + import_stdlib_func_to_mut_store, import_ty_from_stdlib, stdlib_store, StdlibSignature, +}; diff --git a/crates/jrsonnet-lsp-stdlib/src/signatures.rs b/crates/jrsonnet-lsp-stdlib/src/signatures.rs new file mode 100644 index 00000000..5887caa8 --- /dev/null +++ b/crates/jrsonnet-lsp-stdlib/src/signatures.rs @@ -0,0 +1,461 @@ +//! Standard library function signatures for type checking. +//! +//! Provides parameter and return type information for stdlib functions. +//! Uses `Ty` and `FunctionData` for efficient interned type representation. +//! +//! Type signatures are generated from the `jrsonnet-std-sig` crate spec. + +use std::{collections::HashMap, sync::OnceLock}; + +use jrsonnet_lsp_types::{ + FieldDefInterned, FunctionData, MutStore, ObjectData, ParamInterned, + ReturnSpec as LspReturnSpec, Ty, TyConstraints, TyData, TyStore, +}; +use jrsonnet_std_sig::{ParamType, ReturnSpec as SigReturnSpec, FNS}; + +/// Combined storage for stdlib types and signatures. +struct StdlibData { + /// Store for interned stdlib types. + store: TyStore, + /// Map from function name to signature. + signatures: HashMap<&'static str, StdlibSignature>, +} + +/// Signature for a stdlib function. +#[derive(Debug, Clone)] +pub struct StdlibSignature { + /// Function name (without `std.` prefix). + pub name: &'static str, + /// The function type as interned Ty (references STDLIB_DATA.store). + pub func_ty: Ty, +} + +impl StdlibSignature { + /// Count of required parameters. + pub fn required_count(&self) -> usize { + match *stdlib_store().get(self.func_ty) { + TyData::Function(ref f) => f.required_count(), + _ => 0, + } + } + + /// Total parameter count. + pub fn total_count(&self) -> usize { + match *stdlib_store().get(self.func_ty) { + TyData::Function(ref f) => f.params.len(), + _ => 0, + } + } + + /// Whether the function accepts variadic arguments. + pub fn variadic(&self) -> bool { + match *stdlib_store().get(self.func_ty) { + TyData::Function(ref f) => f.variadic, + _ => false, + } + } + + /// Get the function data from the global store as an owned copy. + pub fn func_data(&self) -> Option { + match *stdlib_store().get(self.func_ty) { + TyData::Function(ref f) => Some(f.clone()), + _ => None, + } + } +} + +static STDLIB_DATA: OnceLock = OnceLock::new(); + +/// Get the global stdlib type store. +pub fn stdlib_store() -> &'static TyStore { + &STDLIB_DATA.get_or_init(init_stdlib_data).store +} + +/// Get the signature for a stdlib function by name. +pub fn get_stdlib_signature(name: &str) -> Option<&'static StdlibSignature> { + STDLIB_DATA + .get_or_init(init_stdlib_data) + .signatures + .get(name) +} + +/// Get the function type (as Ty) for a stdlib function by name. +pub fn get_stdlib_func_ty(name: &str) -> Option { + get_stdlib_signature(name).map(|s| s.func_ty) +} + +/// Get the function data for a stdlib function by name. +pub fn get_stdlib_func_data(name: &str) -> Option { + get_stdlib_signature(name).and_then(StdlibSignature::func_data) +} + +/// Get all stdlib signatures. +pub fn get_all_stdlib_signatures() -> impl Iterator { + ensure_initialized(); + STDLIB_DATA + .get() + .into_iter() + .flat_map(|data| data.signatures.values()) +} + +/// Initialize the stdlib signatures (called lazily). +pub fn ensure_initialized() { + STDLIB_DATA.get_or_init(init_stdlib_data); +} + +/// Convert a spec ParamType to an interned Ty. +fn param_type_to_ty(store: &mut TyStore, pt: ParamType) -> Ty { + match pt { + ParamType::Any => Ty::ANY, + ParamType::Null => Ty::NULL, + ParamType::Bool => Ty::BOOL, + ParamType::Number => Ty::NUMBER, + ParamType::String => Ty::STRING, + ParamType::Char => Ty::CHAR, + ParamType::Array => store.array(Ty::ANY), + ParamType::ArrayNumber => store.array(Ty::NUMBER), + ParamType::ArrayString => store.array(Ty::STRING), + ParamType::ArrayChar => store.array(Ty::CHAR), + ParamType::ArrayBool => store.array(Ty::BOOL), + ParamType::Object => store.object_any(), + ParamType::Function => store.function_any(), + ParamType::StringOrArray => { + let arr = store.array(Ty::ANY); + store.union(vec![Ty::STRING, arr]) + } + ParamType::Lengthable => { + let arr = store.array(Ty::ANY); + let obj = store.object_any(); + let func = store.function_any(); + store.union(vec![arr, Ty::STRING, obj, func]) + } + } +} + +/// Convert a spec ReturnSpec to an LSP ReturnSpec. +fn convert_return_spec(store: &mut TyStore, rs: SigReturnSpec) -> LspReturnSpec { + match rs { + SigReturnSpec::Fixed(pt) => LspReturnSpec::Fixed(param_type_to_ty(store, pt)), + SigReturnSpec::SameAsArg(idx) => LspReturnSpec::SameAsArg(idx), + SigReturnSpec::NonNegative => LspReturnSpec::NonNegative, + SigReturnSpec::ArrayOfFuncReturn(idx) => LspReturnSpec::ArrayOfFuncReturn(idx), + SigReturnSpec::ArrayWithSameElements(idx) => LspReturnSpec::ArrayWithSameElements(idx), + SigReturnSpec::SetWithSameElements(idx) => LspReturnSpec::SetWithSameElements(idx), + SigReturnSpec::ObjectValuesType(idx) => LspReturnSpec::ObjectValuesType(idx), + SigReturnSpec::FlatMapResult(idx) => LspReturnSpec::FlatMapResult(idx), + SigReturnSpec::Any => LspReturnSpec::default(), + } +} + +fn init_stdlib_data() -> StdlibData { + let mut store = TyStore::new(); + + // Generate signatures from the spec + let sigs: Vec = FNS + .iter() + .map(|spec_fn| { + // Convert parameters + let params: Vec = spec_fn + .params + .iter() + .map(|p| ParamInterned { + name: p.name.to_string(), + ty: param_type_to_ty(&mut store, p.ty), + has_default: p.has_default, + }) + .collect(); + + // Convert return spec + let return_spec = convert_return_spec(&mut store, spec_fn.return_spec); + + // Create function type + let func_data = FunctionData { + params, + return_spec, + variadic: spec_fn.variadic, + }; + let func_ty = store.intern(TyData::Function(func_data)); + + StdlibSignature { + name: spec_fn.name, + func_ty, + } + }) + .collect(); + + let signatures: HashMap<&'static str, StdlibSignature> = + sigs.into_iter().map(|s| (s.name, s)).collect(); + + StdlibData { store, signatures } +} + +/// Look up a stdlib function type by name and import it into a MutStore. +/// +/// Returns the function type if found, interned into the local store. +pub fn import_stdlib_func_to_mut_store(store: &mut MutStore, name: &str) -> Option { + let func_ty = get_stdlib_func_ty(name)?; + Some(import_ty_from_stdlib(store, func_ty)) +} + +/// Import a type from the stdlib store into a MutStore. +pub fn import_ty_from_stdlib(store: &mut MutStore, ty: Ty) -> Ty { + // Well-known constants are the same in all stores + if ty.is_well_known() { + return ty; + } + + let source = stdlib_store(); + match *source.get(ty) { + TyData::Array { elem, .. } => { + let imported_elem = import_ty_from_stdlib(store, elem); + store.array(imported_elem) + } + TyData::Tuple { ref elems } => { + let imported_elems: Vec<_> = elems + .iter() + .map(|&e| import_ty_from_stdlib(store, e)) + .collect(); + store.tuple(imported_elems) + } + TyData::Union(ref variants) => { + let imported_variants: Vec<_> = variants + .iter() + .map(|&v| import_ty_from_stdlib(store, v)) + .collect(); + store.union(imported_variants) + } + TyData::Object(ref obj) => { + let imported_fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + ( + name.clone(), + FieldDefInterned { + ty: import_ty_from_stdlib(store, field.ty), + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + store.object(ObjectData { + fields: imported_fields, + has_unknown: obj.has_unknown, + }) + } + TyData::AttrsOf { value } => { + let imported_value = import_ty_from_stdlib(store, value); + store.attrs_of(imported_value) + } + TyData::Function(ref func) => { + let imported_params: Vec<_> = func + .params + .iter() + .map(|p| ParamInterned { + name: p.name.clone(), + ty: import_ty_from_stdlib(store, p.ty), + has_default: p.has_default, + }) + .collect(); + let imported_return = match &func.return_spec { + LspReturnSpec::Fixed(ret) => { + LspReturnSpec::Fixed(import_ty_from_stdlib(store, *ret)) + } + other => other.clone(), + }; + store.function(FunctionData { + params: imported_params, + return_spec: imported_return, + variadic: func.variadic, + }) + } + TyData::Sum(ref variants) => { + let imported_variants: Vec<_> = variants + .iter() + .map(|&v| import_ty_from_stdlib(store, v)) + .collect(); + store.sum(imported_variants) + } + TyData::BoundedNumber(bounds) => store.bounded_number(bounds), + TyData::LiteralString(ref s) => store.literal_string(s.clone()), + TyData::TypeVar { + id, + ref constraints, + } => { + let imported_upper = constraints + .upper_bound + .map(|b| import_ty_from_stdlib(store, b)); + store.type_var( + id, + TyConstraints { + must_be_indexable: constraints.must_be_indexable, + must_support_fields: constraints.must_support_fields, + must_be_callable: constraints.must_be_callable, + upper_bound: imported_upper, + }, + ) + } + // Primitives are the same everywhere + TyData::Any + | TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::String + | TyData::Char => ty, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_stdlib_signature_map() { + ensure_initialized(); + let sig = get_stdlib_signature("map").unwrap(); + assert_eq!(sig.name, "map"); + + let func_data = sig.func_data().unwrap(); + // Extract (name, has_default) for structural comparison + let params: Vec<_> = func_data + .params + .iter() + .map(|p| (p.name.as_str(), p.has_default)) + .collect(); + assert_eq!(params, vec![("func", false), ("arr", false)]); + assert_eq!( + (func_data.variadic, &func_data.return_spec), + (false, &LspReturnSpec::ArrayOfFuncReturn(0)) + ); + } + + #[test] + fn test_get_stdlib_signature_sort_with_optional() { + ensure_initialized(); + let sig = get_stdlib_signature("sort").unwrap(); + assert_eq!(sig.name, "sort"); + + let func_data = sig.func_data().unwrap(); + let params: Vec<_> = func_data + .params + .iter() + .map(|p| (p.name.as_str(), p.has_default)) + .collect(); + assert_eq!(params, vec![("arr", false), ("keyF", true)]); + assert_eq!( + (func_data.variadic, &func_data.return_spec), + (false, &LspReturnSpec::SameAsArg(0)) + ); + } + + #[test] + fn test_get_stdlib_signature_format_variadic() { + ensure_initialized(); + let sig = get_stdlib_signature("format").unwrap(); + assert_eq!(sig.name, "format"); + + let func_data = sig.func_data().unwrap(); + let params: Vec<_> = func_data + .params + .iter() + .map(|p| (p.name.as_str(), p.has_default)) + .collect(); + assert_eq!(params, vec![("fmt", false)]); + assert_eq!( + (func_data.variadic, &func_data.return_spec), + (true, &LspReturnSpec::Fixed(Ty::STRING)) + ); + } + + #[test] + fn test_get_stdlib_signature_length() { + ensure_initialized(); + let sig = get_stdlib_signature("length").unwrap(); + assert_eq!(sig.name, "length"); + + let func_data = sig.func_data().unwrap(); + let params: Vec<_> = func_data + .params + .iter() + .map(|p| (p.name.as_str(), p.has_default)) + .collect(); + assert_eq!(params, vec![("x", false)]); + assert_eq!( + (func_data.variadic, &func_data.return_spec), + (false, &LspReturnSpec::NonNegative) + ); + + // Verify the parameter is a union type + let param_ty = func_data.params[0].ty; + assert!(matches!(*stdlib_store().get(param_ty), TyData::Union(_))); + } + + #[test] + fn test_unknown_function() { + ensure_initialized(); + assert!(get_stdlib_signature("unknownFunction").is_none()); + } + + #[test] + fn test_required_count() { + ensure_initialized(); + + // sort has 1 required, 1 optional + let sort = get_stdlib_signature("sort").unwrap(); + assert_eq!(sort.required_count(), 1); + assert_eq!(sort.total_count(), 2); + + // map has 2 required + let map = get_stdlib_signature("map").unwrap(); + assert_eq!(map.required_count(), 2); + assert_eq!(map.total_count(), 2); + } + + #[test] + fn test_set_functions_return_set_type() { + ensure_initialized(); + + // std.set returns a set + let set = get_stdlib_signature("set").unwrap(); + let func_data = set.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::SetWithSameElements(0)); + + // std.uniq returns a set + let uniq = get_stdlib_signature("uniq").unwrap(); + let func_data = uniq.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::SetWithSameElements(0)); + + // std.setUnion returns a set + let set_union = get_stdlib_signature("setUnion").unwrap(); + let func_data = set_union.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::SetWithSameElements(0)); + + // std.setInter returns a set + let set_inter = get_stdlib_signature("setInter").unwrap(); + let func_data = set_inter.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::SetWithSameElements(0)); + + // std.setDiff returns a set + let set_diff = get_stdlib_signature("setDiff").unwrap(); + let func_data = set_diff.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::SetWithSameElements(0)); + + // std.setMember returns bool (not a set) + let set_member = get_stdlib_signature("setMember").unwrap(); + let func_data = set_member.func_data().unwrap(); + assert_eq!(func_data.return_spec, LspReturnSpec::Fixed(Ty::BOOL)); + } + + #[test] + fn test_all_spec_functions_have_signatures() { + ensure_initialized(); + for spec_fn in FNS { + let sig = get_stdlib_signature(spec_fn.name); + assert!(sig.is_some(), "Missing signature for {}", spec_fn.name); + } + } +} diff --git a/crates/jrsonnet-lsp-types/Cargo.toml b/crates/jrsonnet-lsp-types/Cargo.toml new file mode 100644 index 00000000..55397c90 --- /dev/null +++ b/crates/jrsonnet-lsp-types/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "jrsonnet-lsp-types" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Type system for jrsonnet LSP" + +[dependencies] +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +rustc-hash.workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" +rstest = "0.23" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-types/src/display.rs b/crates/jrsonnet-lsp-types/src/display.rs new file mode 100644 index 00000000..17f8bc5a --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/display.rs @@ -0,0 +1,422 @@ +//! Type display formatting with configurable verbosity. +//! +//! Provides `DisplayContext` for controlling how types are formatted: +//! - Compact mode: elide long lists, abbreviate objects +//! - Detailed mode: show all fields, full signatures + +use crate::{NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, TypeStoreOps}; + +/// Display style for types. +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +pub enum DisplayStyle { + /// Compact display - elide long lists, abbreviate objects. + #[default] + Compact, + /// Detailed display - show all fields, full signatures. + Detailed, +} + +/// Context for displaying types with configurable verbosity. +#[derive(Clone)] +pub struct DisplayContext<'a, S: TypeStoreOps> { + /// Display style. + pub style: DisplayStyle, + /// Type store for resolving types. + store: &'a S, + /// Maximum depth for nested types (to prevent infinite recursion). + pub max_depth: usize, + /// Maximum items to show in arrays/objects before eliding. + pub max_items: usize, + /// Maximum union members to show before eliding. + pub max_union_members: usize, +} + +impl<'a, S: TypeStoreOps> DisplayContext<'a, S> { + /// Create a compact display context. + pub fn compact(store: &'a S) -> Self { + Self { + style: DisplayStyle::Compact, + store, + max_depth: 3, + max_items: 3, + max_union_members: 3, + } + } + + /// Create a detailed display context. + pub fn detailed(store: &'a S) -> Self { + Self { + style: DisplayStyle::Detailed, + store, + max_depth: 10, + max_items: 20, + max_union_members: 10, + } + } + + /// Format a type using this context. + pub fn format(&self, ty: Ty) -> String { + self.format_impl(ty, 0) + } + + fn format_impl(&self, ty: Ty, depth: usize) -> String { + if depth > self.max_depth { + return "...".to_string(); + } + + match self.store.get_data(ty) { + TyData::Any => "any".to_string(), + TyData::Never => "never".to_string(), + TyData::Null => "null".to_string(), + TyData::Bool => "boolean".to_string(), + TyData::True => "true".to_string(), + TyData::False => "false".to_string(), + TyData::Number => "number".to_string(), + TyData::BoundedNumber(bounds) => self.format_bounded_number(&bounds), + TyData::String => "string".to_string(), + TyData::Char => "char".to_string(), + TyData::LiteralString(s) => { + if self.style == DisplayStyle::Compact && s.len() > 20 { + format!("\"{}...\"", &s[..17]) + } else { + format!("\"{}\"", s) + } + } + TyData::Array { elem, is_set } => { + let elem_str = self.format_impl(elem, depth + 1); + if is_set { + format!("set<{}>", elem_str) + } else { + format!("array<{}>", elem_str) + } + } + TyData::Tuple { elems } => self.format_tuple(&elems, depth), + TyData::Object(obj) => self.format_object(&obj, depth), + TyData::AttrsOf { value } => { + format!("object<{}>", self.format_impl(value, depth + 1)) + } + TyData::Function(func) => { + let ret = match &func.return_spec { + ReturnSpec::Fixed(ret) => *ret, + _ => Ty::ANY, // For complex return specs, show "any" + }; + self.format_function(&func.params, ret, depth) + } + TyData::Union(types) => self.format_union(&types, depth), + TyData::Sum(types) => self.format_sum(&types, depth), + TyData::TypeVar { id, constraints } => { + let mut s = id.to_string(); + if !constraints.is_empty() && self.style == DisplayStyle::Detailed { + let mut parts: Vec = Vec::new(); + if constraints.must_be_indexable { + parts.push("indexable".to_string()); + } + if constraints.must_support_fields { + parts.push("object-like".to_string()); + } + if constraints.must_be_callable { + parts.push("callable".to_string()); + } + if let Some(bound) = constraints.upper_bound { + parts.push(format!("<: {}", self.format_impl(bound, depth + 1))); + } + if !parts.is_empty() { + s.push_str(" where "); + s.push_str(&parts.join(", ")); + } + } + s + } + } + } + + fn format_bounded_number(&self, bounds: &NumBounds) -> String { + match (bounds.min_f64(), bounds.max_f64()) { + (None, None) => "number".to_string(), + (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { + if lo.fract() == 0.0 { + format!("{}", lo as i64) + } else { + format!("{}", lo) + } + } + (Some(lo), Some(hi)) => { + if self.style == DisplayStyle::Compact { + "number".to_string() + } else { + format!("number[{}..{}]", lo, hi) + } + } + (Some(lo), None) => { + if self.style == DisplayStyle::Compact { + "number".to_string() + } else { + format!("number[{}..Inf]", lo) + } + } + (None, Some(hi)) => { + if self.style == DisplayStyle::Compact { + "number".to_string() + } else { + format!("number[-Inf..{}]", hi) + } + } + } + } + + fn format_tuple(&self, elems: &[Ty], depth: usize) -> String { + if elems.is_empty() { + return "[]".to_string(); + } + + let show_count = if self.style == DisplayStyle::Compact { + self.max_items.min(elems.len()) + } else { + elems.len() + }; + + let elem_strs: Vec = elems[..show_count] + .iter() + .map(|&t| self.format_impl(t, depth + 1)) + .collect(); + + let elided = if elems.len() > show_count { + format!(", ...{} more", elems.len() - show_count) + } else { + String::new() + }; + + format!("[{}{}]", elem_strs.join(", "), elided) + } + + fn format_object(&self, obj: &ObjectData, depth: usize) -> String { + if obj.fields.is_empty() && !obj.has_unknown { + return "{}".to_string(); + } + + if obj.has_unknown && obj.fields.is_empty() { + return "object".to_string(); + } + + // Sort fields by name for consistent output + let mut fields: Vec<_> = obj.fields.iter().collect(); + fields.sort_by(|(a, _), (b, _)| a.cmp(b)); + + let show_count = if self.style == DisplayStyle::Compact { + self.max_items.min(fields.len()) + } else { + fields.len() + }; + + let field_strs: Vec = fields[..show_count] + .iter() + .map(|(name, field)| { + if self.style == DisplayStyle::Detailed { + format!("{}: {}", name, self.format_impl(field.ty, depth + 1)) + } else { + name.to_string() + } + }) + .collect(); + + let elided = if fields.len() > show_count { + format!(", ...{} more", fields.len() - show_count) + } else { + String::new() + }; + + let suffix = if obj.has_unknown { ", ..." } else { "" }; + + format!("{{ {}{}{} }}", field_strs.join(", "), elided, suffix) + } + + fn format_function(&self, params: &[ParamInterned], ret: Ty, depth: usize) -> String { + let param_strs: Vec = if self.style == DisplayStyle::Detailed { + params + .iter() + .map(|p| { + let ty_str = self.format_impl(p.ty, depth + 1); + if p.has_default { + format!("{}?: {}", p.name, ty_str) + } else { + format!("{}: {}", p.name, ty_str) + } + }) + .collect() + } else { + params.iter().map(|p| p.name.to_string()).collect() + }; + + if self.style == DisplayStyle::Detailed && !ret.is_any() { + format!( + "({}) -> {}", + param_strs.join(", "), + self.format_impl(ret, depth + 1) + ) + } else { + format!("function({})", param_strs.join(", ")) + } + } + + fn format_union(&self, types: &[Ty], depth: usize) -> String { + if types.is_empty() { + return "never".to_string(); + } + + let show_count = if self.style == DisplayStyle::Compact { + self.max_union_members.min(types.len()) + } else { + types.len() + }; + + let parts: Vec = types[..show_count] + .iter() + .map(|&t| self.format_impl(t, depth + 1)) + .collect(); + + let elided = if types.len() > show_count { + format!(" | ...{} more", types.len() - show_count) + } else { + String::new() + }; + + format!("{}{}", parts.join(" | "), elided) + } + + fn format_sum(&self, types: &[Ty], depth: usize) -> String { + let parts: Vec = types + .iter() + .map(|&t| self.format_impl(t, depth + 1)) + .collect(); + parts.join(" & ") + } +} + +/// Wrapper for displaying a type with a context using std::fmt. +pub struct DisplayTy<'a, S: TypeStoreOps> { + ty: Ty, + cx: &'a DisplayContext<'a, S>, +} + +impl<'a, S: TypeStoreOps> DisplayTy<'a, S> { + /// Create a new display wrapper. + pub fn new(ty: Ty, cx: &'a DisplayContext<'a, S>) -> Self { + Self { ty, cx } + } +} + +impl std::fmt::Display for DisplayTy<'_, S> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.cx.format(self.ty)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{FieldDefInterned, FieldVis, FunctionData, TyStore}; + + #[test] + fn test_compact_vs_detailed_object() { + let mut store = TyStore::new(); + + // Create an object with many fields + let fields: Vec<_> = (0..10) + .map(|i| { + ( + format!("field{}", i), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + ) + }) + .collect(); + let obj_ty = store.object(ObjectData { + fields, + has_unknown: false, + }); + + let compact = DisplayContext::compact(&store); + let detailed = DisplayContext::detailed(&store); + + let compact_str = compact.format(obj_ty); + let detailed_str = detailed.format(obj_ty); + + // Compact should elide fields + assert!(compact_str.contains("...")); + assert!(compact_str.contains("more")); + + // Detailed should show all + assert!(detailed_str.contains("field9")); + } + + #[test] + fn test_function_display() { + let mut store = TyStore::new(); + + let func_ty = store.function(FunctionData { + params: vec![ + ParamInterned { + name: "x".into(), + ty: Ty::NUMBER, + has_default: false, + }, + ParamInterned { + name: "y".into(), + ty: Ty::STRING, + has_default: true, + }, + ], + return_spec: ReturnSpec::Fixed(Ty::BOOL), + variadic: false, + }); + + let compact = DisplayContext::compact(&store); + let detailed = DisplayContext::detailed(&store); + + let compact_str = compact.format(func_ty); + let detailed_str = detailed.format(func_ty); + + // Compact: function(x, y) + assert_eq!(compact_str, "function(x, y)"); + + // Detailed: (x: number, y?: string) -> boolean + assert!(detailed_str.contains("number")); + assert!(detailed_str.contains("y?:")); + assert!(detailed_str.contains("->")); + } + + #[test] + fn test_max_depth() { + let mut store = TyStore::new(); + + // Create deeply nested array: array>> + let mut ty = Ty::NUMBER; + for _ in 0..20 { + ty = store.array(ty); + } + + let compact = DisplayContext::compact(&store); + let result = compact.format(ty); + + // Should hit max depth and show "..." + assert!(result.contains("...")); + } + + #[test] + fn test_union_elision() { + let mut store = TyStore::new(); + + // Create a union with many members + let members: Vec = (0..10).map(|_| Ty::STRING).collect(); + let union_ty = store.union(members); + + let compact = DisplayContext::compact(&store); + let result = compact.format(union_ty); + + // Compact should elide union members + assert!(result.contains("more") || result.matches(" | ").count() < 9); + } +} diff --git a/crates/jrsonnet-lsp-types/src/global_store.rs b/crates/jrsonnet-lsp-types/src/global_store.rs new file mode 100644 index 00000000..57c3c4eb --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/global_store.rs @@ -0,0 +1,200 @@ +//! Global type store - shared across all files in a session. +//! +//! This store is thread-safe and holds all interned types that persist +//! across file analyses. Types from any file can reference types in +//! this store, enabling cross-file type sharing. + +use std::sync::RwLock; + +use rustc_hash::FxHashMap; + +use crate::store::{Ty, TyData}; + +/// Internal storage implementation shared between global and local stores. +#[derive(Debug, Clone)] +pub(crate) struct TyStoreInner { + /// Type data indexed by Ty ID. + pub(crate) data: Vec, + /// Reverse mapping for deduplication. + pub(crate) dedup: FxHashMap, +} + +impl TyStoreInner { + /// Create a new store with well-known types pre-populated. + pub(crate) fn with_builtins() -> Self { + let mut inner = Self { + data: Vec::with_capacity(64), + dedup: FxHashMap::default(), + }; + inner.init_builtins(); + inner + } + + /// Initialize built-in well-known types. + fn init_builtins(&mut self) { + // Must match the order of Ty constants! + let builtins = [ + TyData::Any, // 0 = ANY + TyData::Never, // 1 = NEVER + TyData::Null, // 2 = NULL + TyData::Bool, // 3 = BOOL + TyData::True, // 4 = TRUE + TyData::False, // 5 = FALSE + TyData::Number, // 6 = NUMBER + TyData::String, // 7 = STRING + TyData::Char, // 8 = CHAR + // Padding to RESERVED_COUNT + TyData::Any, // 9 - reserved + TyData::Any, // 10 - reserved + TyData::Any, // 11 - reserved + TyData::Any, // 12 - reserved + TyData::Any, // 13 - reserved + TyData::Any, // 14 - reserved + TyData::Any, // 15 - reserved + ]; + + for (i, data) in builtins.into_iter().enumerate() { + let ty = Ty::from_raw(i as u32); + self.data.push(data.clone()); + // Only dedup the non-padding entries + if i < 9 { + self.dedup.insert(data, ty); + } + } + + debug_assert_eq!(self.data.len(), Ty::RESERVED_COUNT as usize); + } + + /// Get type data by index (panics if out of bounds). + #[inline] + pub(crate) fn get_data(&self, index: u32) -> &TyData { + &self.data[index as usize] + } + + /// Get the number of types in this store. + pub(crate) fn len(&self) -> usize { + self.data.len() + } + + /// Intern a type, returning existing ID if already present. + /// The `make_ty` function creates the Ty from the raw index. + pub(crate) fn intern(&mut self, data: TyData, make_ty: impl Fn(u32) -> Ty) -> Ty { + // Check if already interned + if let Some(&existing) = self.dedup.get(&data) { + return existing; + } + + // Intern new type + let id = make_ty(self.data.len() as u32); + self.data.push(data.clone()); + self.dedup.insert(data, id); + id + } +} + +/// Global type store - shared across all files in a session. +/// +/// Thread-safe via `RwLock`. All persistent types live here. +/// Types interned here have `Ty` values without the `LOCAL_BIT` set. +#[derive(Debug)] +pub struct GlobalTyStore { + inner: RwLock, +} + +impl GlobalTyStore { + /// Create a new global store with built-in types. + pub fn new() -> Self { + Self { + inner: RwLock::new(TyStoreInner::with_builtins()), + } + } + + /// Get read access to the inner store. + pub(crate) fn read(&self) -> std::sync::RwLockReadGuard<'_, TyStoreInner> { + self.inner.read().unwrap() + } + + /// Get write access to the inner store. + pub(crate) fn write(&self) -> std::sync::RwLockWriteGuard<'_, TyStoreInner> { + self.inner.write().unwrap() + } + + /// Get type data for a global Ty. + /// + /// # Panics + /// Panics if `ty` is a local type (has LOCAL_BIT set). + pub fn get_data(&self, ty: Ty) -> TyData { + debug_assert!(!ty.is_local(), "Cannot look up local type in global store"); + self.read().get_data(ty.raw_index()).clone() + } + + /// Intern a type into the global store. + /// + /// Returns an existing type if the data is already interned. + pub fn intern(&self, data: TyData) -> Ty { + // Fast path for well-known types + if let Some(ty) = Ty::well_known_for_data(&data) { + return ty; + } + + let mut inner = self.write(); + inner.intern(data, Ty::from_raw) + } + + /// Get the number of types in the global store. + pub fn len(&self) -> usize { + self.read().len() + } + + /// Check if empty (never true after init). + pub fn is_empty(&self) -> bool { + self.read().len() == 0 + } +} + +impl Default for GlobalTyStore { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_global_store_new() { + let store = GlobalTyStore::new(); + // Should have built-in types + assert!(store.len() >= Ty::RESERVED_COUNT as usize); + } + + #[test] + fn test_global_store_intern_dedup() { + let store = GlobalTyStore::new(); + + let arr1 = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let arr2 = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + assert_eq!(arr1, arr2); + assert!(!arr1.is_local()); + } + + #[test] + fn test_global_store_well_known() { + let store = GlobalTyStore::new(); + + // Well-known types should return the constant + let any = store.intern(TyData::Any); + assert_eq!(any, Ty::ANY); + + let num = store.intern(TyData::Number); + assert_eq!(num, Ty::NUMBER); + } +} diff --git a/crates/jrsonnet-lsp-types/src/lib.rs b/crates/jrsonnet-lsp-types/src/lib.rs new file mode 100644 index 00000000..c08ba712 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/lib.rs @@ -0,0 +1,44 @@ +//! Jsonnet type system definitions. +//! +//! This crate provides the type definitions and operations used throughout +//! the Jsonnet LSP for type inference, type checking, and flow typing. +//! +//! The type system uses a simplified model where: +//! - `Any` is the top type (all values) +//! - `Never` is the bottom type (no values, unreachable code) +//! - Union types represent values that could be one of several types +//! +//! # Module Organization +//! +//! - [`store`]: Type storage with interned `Ty` references for efficient representation +//! - [`global_store`]: Thread-safe global type store shared across all files +//! - [`local_store`]: Per-file local type store for analysis +//! - [`mut_store`]: Mutable store combining global and local for analysis +//! - [`operations`]: Type operations for checking and combining types +//! - [`unification`]: Type unification with variance handling +//! - [`display`]: Type display formatting with configurable verbosity + +pub mod display; +pub mod global_store; +pub mod local_store; +pub mod mut_store; +mod operations; +pub mod store; +pub mod subst; +pub mod unification; + +pub use display::{DisplayContext, DisplayStyle, DisplayTy}; +pub use global_store::GlobalTyStore; +pub use local_store::LocalTyStore; +pub use mut_store::MutStore; +pub use operations::*; +pub use store::{ + reset_store, with_store, FieldDefInterned, FieldVis, FunctionData, NumBounds, ObjectData, + ParamInterned, ReturnSpec, Ty, TyConstraints, TyData, TyStore, TySubstitution, TyVarId, + TypeStoreOps, +}; +pub use subst::TySubst; +pub use unification::{ + is_subtype_ty, types_equivalent_ty, unify_ty, PathElement, UnifyError, UnifyReason, + UnifyResult, Variance, +}; diff --git a/crates/jrsonnet-lsp-types/src/local_store.rs b/crates/jrsonnet-lsp-types/src/local_store.rs new file mode 100644 index 00000000..6ca0e526 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/local_store.rs @@ -0,0 +1,141 @@ +//! Local type store - per-file temporary storage during analysis. +//! +//! Local types are created during file analysis and then merged into +//! the global store when analysis completes. Local types have the +//! `LOCAL_BIT` set in their `Ty` values. + +use rustc_hash::FxHashMap; + +use crate::store::{Ty, TyData}; + +/// Per-file local type store - temporary during analysis. +/// +/// Types interned here have `Ty` values with the `LOCAL_BIT` set. +/// After analysis, these types are merged into the global store. +#[derive(Debug, Clone)] +pub struct LocalTyStore { + /// Type data indexed by local Ty index. + data: Vec, + /// Reverse mapping for deduplication. + dedup: FxHashMap, +} + +impl LocalTyStore { + /// Create a new empty local store. + pub fn new() -> Self { + Self { + data: Vec::new(), + dedup: FxHashMap::default(), + } + } + + /// Get type data by local index. + /// + /// # Panics + /// Panics if `ty` is not a local type or index is out of bounds. + pub fn get_data(&self, ty: Ty) -> &TyData { + debug_assert!(ty.is_local(), "Expected local type"); + &self.data[ty.raw_index() as usize] + } + + /// Get the number of local types. + pub fn len(&self) -> usize { + self.data.len() + } + + /// Check if empty. + pub fn is_empty(&self) -> bool { + self.data.is_empty() + } + + /// Intern a type into the local store. + /// + /// Returns an existing local type if the data is already interned locally. + /// Note: Does NOT check the global store - caller should check global first. + pub fn intern(&mut self, data: TyData) -> Ty { + // Check if already interned locally + if let Some(&existing) = self.dedup.get(&data) { + return existing; + } + + // Intern new local type + let id = Ty::from_raw_local(self.data.len() as u32); + self.data.push(data.clone()); + self.dedup.insert(data, id); + id + } + + /// Iterate over all local types with their indices. + pub fn iter(&self) -> impl Iterator { + self.data + .iter() + .enumerate() + .map(|(i, data)| (Ty::from_raw_local(i as u32), data)) + } + + /// Consume the store and return the type data vector. + pub fn into_data(self) -> Vec { + self.data + } +} + +impl Default for LocalTyStore { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_local_store_intern() { + let mut store = LocalTyStore::new(); + + let arr1 = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let arr2 = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + assert_eq!(arr1, arr2); + assert!(arr1.is_local()); + assert_eq!(arr1.raw_index(), 0); + } + + #[test] + fn test_local_store_get_data() { + let mut store = LocalTyStore::new(); + + let arr = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let data = store.get_data(arr); + + assert!(matches!(data, TyData::Array { elem, .. } if *elem == Ty::NUMBER)); + } + + #[test] + fn test_local_store_iter() { + let mut store = LocalTyStore::new(); + + store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + store.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + let types: Vec<_> = store.iter().collect(); + assert_eq!(types.len(), 2); + assert!(types[0].0.is_local()); + assert!(types[1].0.is_local()); + } +} diff --git a/crates/jrsonnet-lsp-types/src/mut_store.rs b/crates/jrsonnet-lsp-types/src/mut_store.rs new file mode 100644 index 00000000..29d83961 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/mut_store.rs @@ -0,0 +1,887 @@ +//! Mutable store for type analysis - combines global and local stores. +//! +//! During analysis, types are looked up in the global store first, +//! and new types are created in the local store. After analysis, +//! local types are merged into the global store. + +use std::sync::Arc; + +use crate::{ + global_store::GlobalTyStore, + local_store::LocalTyStore, + store::{ + FieldDefInterned, FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, + TyConstraints, TyData, TyVarId, TypeStoreOps, + }, +}; + +/// Mutable store for type analysis - combines global and local stores. +/// +/// Provides a unified interface for type operations during file analysis. +/// - Lookups check global store first, then local store +/// - New types are created in the local store +/// - After analysis, call `into_local()` to get the local types for merging +/// +/// Uses `Arc` for easy sharing. +/// +/// Note: MutStore is intentionally not Clone. During analysis, a single +/// MutStore is used and passed by mutable reference. After analysis, +/// the local types are merged into the global store via `TySubst::merge`. +#[derive(Debug)] +pub struct MutStore { + /// Reference to the global store (read-only during analysis). + global: Arc, + /// Local store for types created during this analysis. + local: LocalTyStore, +} + +impl MutStore { + /// Create a new mutable store wrapping a global store. + pub fn new(global: Arc) -> Self { + Self { + global, + local: LocalTyStore::new(), + } + } + + /// Create a new mutable store from a global store reference. + /// + /// Convenience constructor that clones the Arc. + pub fn from_ref(global: &Arc) -> Self { + Self::new(Arc::clone(global)) + } + + /// Get type data for any Ty (global or local). + pub fn get_data(&self, ty: Ty) -> TyData { + if ty.is_local() { + self.local.get_data(ty).clone() + } else { + self.global.get_data(ty) + } + } + + /// Intern a type, checking global first, then local. + /// + /// - Well-known types return immediately + /// - Types already in global store return the global Ty + /// - Types already in local store return the local Ty + /// - New types are created in local store + pub fn intern(&mut self, data: TyData) -> Ty { + // Fast path for well-known types + if let Some(ty) = Ty::well_known_for_data(&data) { + return ty; + } + + // Check global store first (read-only) + { + let global_inner = self.global.read(); + if let Some(&existing) = global_inner.dedup.get(&data) { + return existing; + } + } + + // Check/create in local store + self.local.intern(data) + } + + /// Consume and return the local store for merging. + pub fn into_local(self) -> LocalTyStore { + self.local + } + + /// Get reference to the global store. + pub fn global(&self) -> &GlobalTyStore { + &self.global + } + + /// Get the Arc to the global store. + pub fn global_arc(&self) -> &Arc { + &self.global + } + + /// Get reference to the local store. + pub fn local(&self) -> &LocalTyStore { + &self.local + } + + // ========== Type constructors ========== + + /// Create an array type. + pub fn array(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { + elem, + is_set: false, + }) + } + + /// Create a set type (array with sorted, unique elements). + pub fn array_set(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { elem, is_set: true }) + } + + /// Create a tuple type. + pub fn tuple(&mut self, elems: Vec) -> Ty { + if elems.is_empty() { + return self.intern(TyData::Tuple { elems: vec![] }); + } + self.intern(TyData::Tuple { elems }) + } + + /// Create an object type. + pub fn object(&mut self, data: ObjectData) -> Ty { + self.intern(TyData::Object(data)) + } + + /// Create an open object (unknown fields). + pub fn object_any(&mut self) -> Ty { + self.object(ObjectData::open()) + } + + /// Create a generic function type (accepts any args, returns any). + pub fn function_any(&mut self) -> Ty { + self.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }) + } + + /// Create an AttrsOf type (object with uniform value type). + pub fn attrs_of(&mut self, value: Ty) -> Ty { + self.intern(TyData::AttrsOf { value }) + } + + /// Create a function type. + pub fn function(&mut self, data: FunctionData) -> Ty { + self.intern(TyData::Function(data)) + } + + /// Create a function with simple params and fixed return. + pub fn function_simple(&mut self, param_names: Vec<&str>, return_ty: Ty) -> Ty { + let params = param_names + .into_iter() + .map(|name| ParamInterned { + name: name.to_string(), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + self.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }) + } + + /// Create a union type. + pub fn union(&mut self, mut types: Vec) -> Ty { + // Simplification rules + if types.is_empty() { + return Ty::NEVER; + } + if types.len() == 1 { + return types[0]; + } + + // Flatten nested unions and remove duplicates + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::ANY { + return Ty::ANY; // Any absorbs everything + } + if ty == Ty::NEVER { + continue; // Never is identity for union + } + if let TyData::Union(inner) = self.get_data(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + // Sort for canonical form + flattened.sort_by_key(|t| t.id()); + flattened.dedup(); + + match flattened.len() { + 0 => Ty::NEVER, + 1 => flattened[0], + _ => self.intern(TyData::Union(flattened)), + } + } + + /// Create a sum (intersection) type. + pub fn sum(&mut self, mut types: Vec) -> Ty { + if types.is_empty() { + return Ty::ANY; + } + if types.len() == 1 { + return types[0]; + } + + // Flatten and simplify + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::NEVER { + return Ty::NEVER; // Never absorbs everything in intersection + } + if ty == Ty::ANY { + continue; // Any is identity for intersection + } + if let TyData::Sum(inner) = self.get_data(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + flattened.sort_by_key(|t| t.id()); + flattened.dedup(); + + match flattened.len() { + 0 => Ty::ANY, + 1 => flattened[0], + _ => self.intern(TyData::Sum(flattened)), + } + } + + /// Create a bounded number type. + pub fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + self.intern(TyData::BoundedNumber(bounds)) + } + + /// Create a literal string type. + pub fn literal_string(&mut self, s: String) -> Ty { + self.intern(TyData::LiteralString(s)) + } + + /// Create a type variable. + pub fn type_var(&mut self, id: TyVarId, constraints: TyConstraints) -> Ty { + self.intern(TyData::TypeVar { id, constraints }) + } + + /// Create a fresh type variable with no constraints. + pub fn fresh_var(&mut self) -> Ty { + self.type_var(TyVarId::fresh(), TyConstraints::none()) + } + + // ========== Type queries ========== + + /// Check if type is indexable. + pub fn is_indexable(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::Any + | TyData::String + | TyData::Char + | TyData::Array { .. } + | TyData::Tuple { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } => true, + TyData::Union(types) | TyData::Sum(types) => { + types.iter().all(|&t| self.is_indexable(t)) + } + TyData::TypeVar { constraints, .. } => constraints.must_be_indexable, + _ => false, + } + } + + /// Check if type supports field access. + pub fn supports_field_access(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::Any | TyData::Object(_) | TyData::AttrsOf { .. } => true, + TyData::Union(types) | TyData::Sum(types) => { + types.iter().all(|&t| self.supports_field_access(t)) + } + TyData::TypeVar { constraints, .. } => constraints.must_support_fields, + _ => false, + } + } + + /// Check if type is callable. + pub fn is_callable(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::Any | TyData::Function(_) => true, + TyData::Union(types) | TyData::Sum(types) => types.iter().all(|&t| self.is_callable(t)), + TyData::TypeVar { constraints, .. } => constraints.must_be_callable, + _ => false, + } + } + + /// Check if a type has any type variables. + pub fn has_type_vars(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::TypeVar { .. } => true, + TyData::Array { elem, .. } => self.has_type_vars(elem), + TyData::Tuple { elems } => elems.iter().any(|&e| self.has_type_vars(e)), + TyData::Union(variants) | TyData::Sum(variants) => { + variants.iter().any(|&v| self.has_type_vars(v)) + } + TyData::Object(obj) => obj.fields.iter().any(|(_, fd)| self.has_type_vars(fd.ty)), + TyData::AttrsOf { value } => self.has_type_vars(value), + TyData::Function(func) => { + func.params.iter().any(|p| self.has_type_vars(p.ty)) + || matches!(&func.return_spec, ReturnSpec::Fixed(ret) if self.has_type_vars(*ret)) + } + _ => false, + } + } + + /// Format a type for display. + pub fn display(&self, ty: Ty) -> String { + match self.get_data(ty) { + TyData::Any => "any".to_string(), + TyData::Never => "never".to_string(), + TyData::Null => "null".to_string(), + TyData::Bool => "boolean".to_string(), + TyData::True => "true".to_string(), + TyData::False => "false".to_string(), + TyData::Number => "number".to_string(), + TyData::BoundedNumber(bounds) => match (bounds.min_f64(), bounds.max_f64()) { + (None, None) => "number".to_string(), + (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { + if lo.fract() == 0.0 { + format!("{}", lo as i64) + } else { + format!("{}", lo) + } + } + (Some(lo), Some(hi)) => format!("number[{}..{}]", lo, hi), + (Some(lo), None) => format!("number[{}..]", lo), + (None, Some(hi)) => format!("number[..{}]", hi), + }, + TyData::String => "string".to_string(), + TyData::Char => "char".to_string(), + TyData::LiteralString(s) => format!("\"{}\"", s), + TyData::Array { elem, is_set } => { + let base = format!("array<{}>", self.display(elem)); + if is_set { + format!("set<{}>", self.display(elem)) + } else { + base + } + } + TyData::Tuple { elems } => { + let types: Vec<_> = elems.iter().map(|&t| self.display(t)).collect(); + format!("[{}]", types.join(", ")) + } + TyData::Object(obj) => { + if obj.fields.is_empty() && !obj.has_unknown { + "{}".to_string() + } else if obj.has_unknown { + "object".to_string() + } else { + let mut fields: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + fields.sort_unstable(); + format!("{{ {} }}", fields.join(", ")) + } + } + TyData::AttrsOf { value } => format!("object<{}>", self.display(value)), + TyData::Function(func) => { + let params: Vec<_> = func.params.iter().map(|p| p.name.as_str()).collect(); + format!("function({})", params.join(", ")) + } + TyData::Union(types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" | ") + } + TyData::Sum(types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" & ") + } + TyData::TypeVar { id, constraints } => { + let mut s = id.to_string(); + if !constraints.is_empty() { + let mut parts: Vec = Vec::new(); + if constraints.must_be_indexable { + parts.push("indexable".to_string()); + } + if constraints.must_support_fields { + parts.push("object-like".to_string()); + } + if constraints.must_be_callable { + parts.push("callable".to_string()); + } + if let Some(bound) = constraints.upper_bound { + parts.push(format!("<: {}", self.display(bound))); + } + if !parts.is_empty() { + s.push_str(" where "); + s.push_str(&parts.join(", ")); + } + } + s + } + } + } + + /// Alias for `get_data()` to ease migration from TyStore. + /// + /// TyStore::get() returns TyRef which derefs to TyData. This returns + /// TyData directly. Callers using `*store.get(ty)` should use `store.get(ty)`. + #[inline] + pub fn get(&self, ty: Ty) -> TyData { + self.get_data(ty) + } + + // ========== Type operations ========== + + /// Narrow a type by intersecting with a constraint. + pub fn narrow(&mut self, ty: Ty, constraint: Ty) -> Ty { + // Fast paths + if ty == Ty::NEVER || constraint == Ty::NEVER { + return Ty::NEVER; + } + if ty == Ty::ANY { + return constraint; + } + if constraint == Ty::ANY { + return ty; + } + if ty == constraint { + return ty; + } + + let ty_data = self.get_data(ty); + let constraint_data = self.get_data(constraint); + + // Handle unions: narrow each element + if let TyData::Union(types) = ty_data { + let narrowed: Vec = types + .iter() + .map(|&t| self.narrow(t, constraint)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return self.union(narrowed); + } + if let TyData::Union(types) = constraint_data { + let narrowed: Vec = types + .iter() + .map(|&t| self.narrow(ty, t)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return self.union(narrowed); + } + + // Handle literal types as subtypes + match (&ty_data, &constraint_data) { + (TyData::Bool, TyData::True) | (TyData::True, TyData::Bool) => return Ty::TRUE, + (TyData::Bool, TyData::False) | (TyData::False, TyData::Bool) => return Ty::FALSE, + (TyData::String, TyData::Char) | (TyData::Char, TyData::String) => return Ty::CHAR, + (TyData::String, TyData::LiteralString(s)) + | (TyData::LiteralString(s), TyData::String) => { + return self.literal_string(s.clone()); + } + _ => {} + } + + // Handle arrays (preserve is_set if both are sets) + if let ( + TyData::Array { + elem: e1, + is_set: s1, + }, + TyData::Array { + elem: e2, + is_set: s2, + }, + ) = (&ty_data, &constraint_data) + { + let elem = self.narrow(*e1, *e2); + if elem == Ty::NEVER { + return Ty::NEVER; + } + // Result is a set only if both inputs are sets + if *s1 && *s2 { + return self.array_set(elem); + } + return self.array(elem); + } + + // Handle tuples with arrays + if let (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) = + (&ty_data, &constraint_data) + { + let narrowed: Vec = elems.iter().map(|&e| self.narrow(e, *arr_elem)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return self.tuple(narrowed); + } + if let (TyData::Array { elem: arr_elem, .. }, TyData::Tuple { elems }) = + (&ty_data, &constraint_data) + { + let narrowed: Vec = elems.iter().map(|&e| self.narrow(*arr_elem, e)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return self.tuple(narrowed); + } + + // Handle tuples with tuples + if let (TyData::Tuple { elems: e1 }, TyData::Tuple { elems: e2 }) = + (&ty_data, &constraint_data) + { + if e1.len() != e2.len() { + return Ty::NEVER; + } + let narrowed: Vec = e1 + .iter() + .zip(e2.iter()) + .map(|(&a, &b)| self.narrow(a, b)) + .collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return self.tuple(narrowed); + } + + // Handle objects + if let (TyData::Object(obj1), TyData::Object(obj2)) = (&ty_data, &constraint_data) { + let mut fields = obj1.fields.clone(); + for (name, def2) in &obj2.fields { + if let Some(pos) = fields.iter().position(|(n, _)| n == name) { + let (_, def1) = &fields[pos]; + let narrowed_ty = self.narrow(def1.ty, def2.ty); + fields[pos] = ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: def1.required && def2.required, + visibility: def1.visibility, + }, + ); + } else { + fields.push((name.clone(), def2.clone())); + } + } + let has_unknown = obj1.has_unknown && obj2.has_unknown; + return self.object(ObjectData { + fields, + has_unknown, + }); + } + + // Different concrete types have no intersection + Ty::NEVER + } + + /// Narrow a type to one with an exact length. + /// + /// - Arrays become tuples with that length + /// - Strings with length 1 become Char + /// - Tuples must have matching length + pub fn with_len(&mut self, ty: Ty, len: usize) -> Ty { + match self.get_data(ty) { + TyData::Any => Ty::ANY, + + TyData::Array { elem, .. } => { + let elems = vec![elem; len]; + self.tuple(elems) + } + + TyData::Tuple { elems } => { + if elems.len() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Object(obj) => match (obj.fields.len().cmp(&len), obj.has_unknown) { + (std::cmp::Ordering::Equal, false) | (std::cmp::Ordering::Less, true) => ty, + (std::cmp::Ordering::Equal, true) => self.object(ObjectData { + fields: obj.fields, + has_unknown: false, + }), + (std::cmp::Ordering::Less, false) | (std::cmp::Ordering::Greater, _) => Ty::NEVER, + }, + + TyData::String => { + if len == 1 { + Ty::CHAR + } else { + ty + } + } + + TyData::LiteralString(s) => { + if s.len() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if len == 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Function(_) | TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, + + TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => Ty::NEVER, + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| self.with_len(t, len)) + .filter(|&t| t != Ty::NEVER) + .collect(); + self.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types.iter().map(|&t| self.with_len(t, len)).collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + self.sum(narrowed) + } + } + } + } + + /// Narrow a type to one with at least a minimum length. + pub fn with_min_len(&mut self, ty: Ty, min: usize) -> Ty { + match self.get_data(ty) { + TyData::Any => Ty::ANY, + TyData::Never => Ty::NEVER, + + TyData::Array { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } + | TyData::String + | TyData::LiteralString(_) + | TyData::Function(_) + | TyData::TypeVar { .. } => ty, + + TyData::Tuple { elems } => { + if elems.len() >= min { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if min <= 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => { + if min == 0 { + ty + } else { + Ty::NEVER + } + } + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| self.with_min_len(t, min)) + .filter(|&t| t != Ty::NEVER) + .collect(); + self.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types.iter().map(|&t| self.with_min_len(t, min)).collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + self.sum(narrowed) + } + } + } + } + + /// Widen a type by removing a constraint. + pub fn widen(&mut self, base: Ty, remove: Ty) -> Ty { + // Fast paths + if base == Ty::NEVER { + return Ty::NEVER; + } + if remove == Ty::NEVER { + return base; + } + if remove == Ty::ANY { + return Ty::NEVER; + } + if base == Ty::ANY { + return Ty::ANY; + } + if base == remove { + return Ty::NEVER; + } + + // Handle unions: remove from each element + if let TyData::Union(types) = self.get_data(base) { + let remaining: Vec = types + .iter() + .map(|&t| self.widen(t, remove)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return self.union(remaining); + } + + // Different concrete types: nothing to remove + base + } +} + +impl TypeStoreOps for MutStore { + fn get_data(&self, ty: Ty) -> TyData { + MutStore::get_data(self, ty) + } + + fn display(&self, ty: Ty) -> String { + MutStore::display(self, ty) + } + + fn array(&mut self, elem: Ty) -> Ty { + MutStore::array(self, elem) + } + + fn array_set(&mut self, elem: Ty) -> Ty { + MutStore::array_set(self, elem) + } + + fn tuple(&mut self, elems: Vec) -> Ty { + MutStore::tuple(self, elems) + } + + fn object(&mut self, data: ObjectData) -> Ty { + MutStore::object(self, data) + } + + fn attrs_of(&mut self, value: Ty) -> Ty { + MutStore::attrs_of(self, value) + } + + fn function(&mut self, data: FunctionData) -> Ty { + MutStore::function(self, data) + } + + fn union(&mut self, types: Vec) -> Ty { + MutStore::union(self, types) + } + + fn sum(&mut self, types: Vec) -> Ty { + MutStore::sum(self, types) + } + + fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + MutStore::bounded_number(self, bounds) + } + + fn literal_string(&mut self, s: String) -> Ty { + MutStore::literal_string(self, s) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_mut_store_intern_global_first() { + let global = Arc::new(GlobalTyStore::new()); + + // Pre-intern a type in global + let arr_global = global.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + // MutStore should find it in global + let mut store = MutStore::new(Arc::clone(&global)); + let arr = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + assert_eq!(arr, arr_global); + assert!(arr.is_global()); + assert!(store.local.is_empty()); + } + + #[test] + fn test_mut_store_intern_local() { + let global = Arc::new(GlobalTyStore::new()); + let mut store = MutStore::new(global); + + // Intern a new type not in global + let arr = store.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + assert!(arr.is_local()); + assert_eq!(store.local.len(), 1); + } + + #[test] + fn test_mut_store_get_data() { + let global = Arc::new(GlobalTyStore::new()); + let arr_global = global.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + let mut store = MutStore::new(Arc::clone(&global)); + let arr_local = store.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + // Should get data from both stores + assert!( + matches!(store.get_data(arr_global), TyData::Array { elem, .. } if elem == Ty::NUMBER) + ); + assert!( + matches!(store.get_data(arr_local), TyData::Array { elem, .. } if elem == Ty::STRING) + ); + } + + #[test] + fn test_mut_store_union() { + let global = Arc::new(GlobalTyStore::new()); + let mut store = MutStore::new(global); + + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert!(!union.is_well_known()); + + // Union with ANY is ANY + let with_any = store.union(vec![Ty::NUMBER, Ty::ANY]); + assert_eq!(with_any, Ty::ANY); + } + + #[test] + fn test_mut_store_display() { + let global = Arc::new(GlobalTyStore::new()); + let mut store = MutStore::new(global); + + assert_eq!(store.display(Ty::NUMBER), "number"); + + let arr = store.array(Ty::STRING); + assert_eq!(store.display(arr), "array"); + } +} diff --git a/crates/jrsonnet-lsp-types/src/operations.rs b/crates/jrsonnet-lsp-types/src/operations.rs new file mode 100644 index 00000000..7246ea4f --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations.rs @@ -0,0 +1,1502 @@ +//! Type operations for checking and combining types. +//! +//! This module provides operations that determine the result types of +//! Jsonnet operations like binary operators, unary operators, and type +//! checking predicates. +//! +//! # Type Logic Operations +//! +//! The module also provides core type logic operations for type narrowing: +//! +//! - [`ty_and`][]: Intersection of types (narrows to what satisfies both) +//! - [`ty_minus`][]: Exclusion (removes a type from a union) +//! - [`ty_with_len`][]: Constrains to a specific length +//! - [`ty_with_min_len`][]: Constrains to a minimum length +//! +//! These operations distribute over unions, following the rule: +//! `(A | B) & C = (A & C) | (B & C)` + +use jrsonnet_rowan_parser::nodes::{BinaryOperatorKind, UnaryOperatorKind}; + +use crate::store::{FieldDefInterned, FieldVis, ObjectData, Ty, TyData, TypeStoreOps}; + +/// Check if a binary operation is valid and return the result type. +/// +/// Returns `Ok(result_ty)` if the operation is valid for the given operand types, +/// or `Err(error_message)` if the operation is invalid. +pub fn binary_op_result_ty( + op: BinaryOperatorKind, + lhs: Ty, + rhs: Ty, + store: &mut S, +) -> Result { + // Any, Never short-circuit + if lhs.is_any() || rhs.is_any() { + return Ok(Ty::ANY); + } + if lhs.is_never() { + return Ok(Ty::NEVER); + } + if rhs.is_never() { + return Ok(Ty::NEVER); + } + + // Check for TypeVar + if let TyData::TypeVar { .. } = store.get_data(lhs) { + return Ok(Ty::ANY); + } + if let TyData::TypeVar { .. } = store.get_data(rhs) { + return Ok(Ty::ANY); + } + + // Handle union types by checking all combinations + if let TyData::Union(lhs_types) = store.get_data(lhs) { + let mut results = Vec::new(); + for lt in lhs_types { + match binary_op_result_ty(op, lt, rhs, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + if let TyData::Union(rhs_types) = store.get_data(rhs) { + let mut results = Vec::new(); + for rt in rhs_types { + match binary_op_result_ty(op, lhs, rt, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + + // Handle Sum (intersection) types - all variants must support the operation + if let TyData::Sum(lhs_types) = store.get_data(lhs) { + let mut results = Vec::new(); + for lt in lhs_types { + match binary_op_result_ty(op, lt, rhs, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + if let TyData::Sum(rhs_types) = store.get_data(rhs) { + let mut results = Vec::new(); + for rt in rhs_types { + match binary_op_result_ty(op, lhs, rt, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + + // Get type data for matching + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + match op { + // Arithmetic: (Number, Number) -> Number + BinaryOperatorKind::Minus + | BinaryOperatorKind::Mul + | BinaryOperatorKind::Div + | BinaryOperatorKind::Modulo => { + if is_number_ty(&lhs_data) && is_number_ty(&rhs_data) { + Ok(Ty::NUMBER) + } else { + Err(format!( + "operator requires (number, number), got ({}, {})", + store.display(lhs), + store.display(rhs) + )) + } + } + + // Plus: overloaded for number, string, char, array, tuple, object + BinaryOperatorKind::Plus => match (&lhs_data, &rhs_data) { + (d1, d2) if is_number_ty(d1) && is_number_ty(d2) => Ok(Ty::NUMBER), + (TyData::String | TyData::Char | TyData::LiteralString(_), +TyData::String | TyData::Char | TyData::LiteralString(_)) => Ok(Ty::STRING), + (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { + let elem = store.union(vec![*l, *r]); + Ok(store.array(elem)) + } + (TyData::Tuple { elems: l }, TyData::Tuple { elems: r }) => { + // Concatenate tuple element types + let mut elements = l.clone(); + elements.extend(r.iter().copied()); + Ok(store.tuple(elements)) + } + (TyData::Array { elem: a, .. }, TyData::Tuple { elems: t }) + | (TyData::Tuple { elems: t }, TyData::Array { elem: a, .. }) => { + // Mixed array/tuple concatenation - result is array + let mut types = t.clone(); + types.push(*a); + let elem = store.union(types); + Ok(store.array(elem)) + } + (TyData::Object(l), TyData::Object(r)) => Ok(store.object(ObjectData::merge(l, r))), + (TyData::AttrsOf { value: l }, TyData::AttrsOf { value: r }) => { + let value = store.union(vec![*l, *r]); + Ok(store.attrs_of(value)) + } + (TyData::Object(obj), TyData::AttrsOf { value }) + | (TyData::AttrsOf { value }, TyData::Object(obj)) => { + // Merge object with attrs-of: result is object with fields widened + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let widened_ty = store.union(vec![field.ty, *value]); + ( + name.clone(), + FieldDefInterned { + ty: widened_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + Ok(store.object(ObjectData { + fields, + has_unknown: true, // AttrsOf adds unknown fields + })) + } + _ => Err(format!( + "operator `+` requires matching types (number+number, string+string, array+array, or object+object), got ({}, {})", + store.display(lhs), store.display(rhs) + )), + }, + + // Logical: (Bool, Bool) -> Bool (though Jsonnet actually allows any types) + BinaryOperatorKind::And | BinaryOperatorKind::Or => { + // In Jsonnet, && and || work on any types (short-circuit) + // But we can warn if operands aren't boolean + if is_bool_ty(&lhs_data) && is_bool_ty(&rhs_data) { + Ok(Ty::BOOL) + } else { + // Jsonnet allows this but returns one of the operands + Ok(store.union(vec![lhs, rhs])) + } + } + + // Bitwise: (Number, Number) -> Number + BinaryOperatorKind::BitAnd + | BinaryOperatorKind::BitOr + | BinaryOperatorKind::BitXor + | BinaryOperatorKind::Lhs + | BinaryOperatorKind::Rhs => { + if is_number_ty(&lhs_data) && is_number_ty(&rhs_data) { + Ok(Ty::NUMBER) + } else { + Err(format!( + "bitwise operator requires (number, number), got ({}, {})", + store.display(lhs), + store.display(rhs) + )) + } + } + + // Comparison: any types are valid, returns Bool + BinaryOperatorKind::Eq + | BinaryOperatorKind::Ne + | BinaryOperatorKind::Lt + | BinaryOperatorKind::Gt + | BinaryOperatorKind::Le + | BinaryOperatorKind::Ge => Ok(Ty::BOOL), + + // In: (String, Object) -> Bool + BinaryOperatorKind::InKw => { + let lhs_is_string = + matches!(lhs_data, TyData::String | TyData::Char | TyData::LiteralString(_)); + let rhs_is_object = matches!(rhs_data, TyData::Object(_) | TyData::AttrsOf { .. }); + if lhs_is_string && rhs_is_object { + Ok(Ty::BOOL) + } else { + Err(format!( + "operator `in` requires (string, object), got ({}, {})", + store.display(lhs), + store.display(rhs) + )) + } + } + + // Null coalesce: any types, returns union + BinaryOperatorKind::NullCoaelse => Ok(store.union(vec![lhs, rhs])), + + // Internal/error operators - treat as Any + BinaryOperatorKind::MetaObjectApply | BinaryOperatorKind::ErrorNoOperator => Ok(Ty::ANY), + } +} + +/// Check if a unary operation is valid and return the result type. +/// +/// Returns `Ok(result_ty)` if the operation is valid for the given operand type, +/// or `Err(error_message)` if the operation is invalid. +pub fn unary_op_result_ty( + op: UnaryOperatorKind, + operand: Ty, + store: &mut S, +) -> Result { + // Any, Never short-circuit + if operand.is_any() { + return Ok(Ty::ANY); + } + if operand.is_never() { + return Ok(Ty::NEVER); + } + + // Check for TypeVar + if let TyData::TypeVar { .. } = store.get_data(operand) { + return Ok(Ty::ANY); + } + + // Handle union types + if let TyData::Union(types) = store.get_data(operand) { + let mut results = Vec::new(); + for t in types { + match unary_op_result_ty(op, t, store) { + Ok(r) => results.push(r), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + + let operand_data = store.get_data(operand); + + match op { + UnaryOperatorKind::Not => { + if is_bool_ty(&operand_data) { + Ok(Ty::BOOL) + } else { + Err(format!( + "operator `!` requires boolean, got {}", + store.display(operand) + )) + } + } + UnaryOperatorKind::Minus => { + if is_number_ty(&operand_data) { + Ok(Ty::NUMBER) + } else { + Err(format!( + "operator `-` requires number, got {}", + store.display(operand) + )) + } + } + UnaryOperatorKind::BitNot => { + if is_number_ty(&operand_data) { + Ok(Ty::NUMBER) + } else { + Err(format!( + "operator `~` requires number, got {}", + store.display(operand) + )) + } + } + } +} + +/// Helper to check if TyData represents a number type. +fn is_number_ty(data: &TyData) -> bool { + matches!(data, TyData::Number | TyData::BoundedNumber(_)) +} + +/// Helper to check if TyData represents a boolean type. +fn is_bool_ty(data: &TyData) -> bool { + matches!(data, TyData::Bool | TyData::True | TyData::False) +} + +/// Concatenate two arrays or tuples. +/// +/// Returns the type of the concatenated result. +pub fn array_concat_ty(left: Ty, right: Ty, store: &mut S) -> Ty { + let left_data = store.get_data(left); + let right_data = store.get_data(right); + + match (left_data, right_data) { + (TyData::Tuple { elems: a }, TyData::Tuple { elems: b }) => { + let mut elements = a; + elements.extend(b); + store.tuple(elements) + } + (TyData::Array { elem: a, .. }, TyData::Array { elem: b, .. }) => { + let elem = store.union(vec![a, b]); + store.array(elem) + } + (TyData::Tuple { elems: t }, TyData::Array { elem: a, .. }) + | (TyData::Array { elem: a, .. }, TyData::Tuple { elems: t }) => { + let mut types = t; + types.push(a); + let elem = store.union(types); + store.array(elem) + } + _ => store.array(Ty::ANY), + } +} + +// ============================================================================= +// Type Logic Operations +// ============================================================================= + +/// Compute the intersection of two types (type narrowing). +/// +/// Returns the most specific type that satisfies both constraints. +/// This is the logical AND of types - values must satisfy both. +/// +/// # Examples +/// +/// - `ty_and(Any, Number)` → `Number` +/// - `ty_and(Number, String)` → `Never` (no value is both) +/// - `ty_and(Bool, True)` → `True` +/// - `ty_and(Number | String, Number)` → `Number` +/// +/// # Distribution over Unions +/// +/// This operation distributes over unions: +/// `(A | B) & C = (A & C) | (B & C)` +pub fn ty_and(lhs: Ty, rhs: Ty, store: &mut S) -> Ty { + // Fast paths for special types + if lhs == Ty::NEVER || rhs == Ty::NEVER { + return Ty::NEVER; + } + if lhs == Ty::ANY { + return rhs; + } + if rhs == Ty::ANY { + return lhs; + } + if lhs == rhs { + return lhs; + } + + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + // Handle unions: distribute (A | B) & C = (A & C) | (B & C) + if let TyData::Union(types) = lhs_data { + let narrowed: Vec = types + .iter() + .map(|&t| ty_and(t, rhs, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(narrowed); + } + if let TyData::Union(types) = rhs_data { + let narrowed: Vec = types + .iter() + .map(|&t| ty_and(lhs, t, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(narrowed); + } + + // Refresh data after potential recursion + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + // Handle literal/subtype relationships + match (&lhs_data, &rhs_data) { + // Bool and its literals + (TyData::Bool, TyData::True) | (TyData::True, TyData::Bool) => return Ty::TRUE, + (TyData::Bool, TyData::False) | (TyData::False, TyData::Bool) => return Ty::FALSE, + + // String and Char + (TyData::String, TyData::Char) | (TyData::Char, TyData::String) => return Ty::CHAR, + + // String and LiteralString + (TyData::String, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::String) => { + return store.literal_string(s.clone()); + } + + // LiteralString with same value + (TyData::LiteralString(s1), TyData::LiteralString(s2)) => { + if s1 == s2 { + return store.literal_string(s1.clone()); + } + return Ty::NEVER; + } + + // Char and LiteralString of length 1 + (TyData::Char, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::Char) => { + if s.chars().count() == 1 { + return store.literal_string(s.clone()); + } + return Ty::NEVER; + } + + _ => {} + } + + // Handle arrays (preserve is_set if both are sets) + if let ( + TyData::Array { + elem: e1, + is_set: s1, + }, + TyData::Array { + elem: e2, + is_set: s2, + }, + ) = (&lhs_data, &rhs_data) + { + let elem = ty_and(*e1, *e2, store); + if elem == Ty::NEVER { + return Ty::NEVER; + } + // Result is a set only if both inputs are sets + if *s1 && *s2 { + return store.array_set(elem); + } + return store.array(elem); + } + + // Handle tuples with arrays + if let (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) = (&lhs_data, &rhs_data) + { + let narrowed: Vec = elems.iter().map(|&e| ty_and(e, *arr_elem, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + if let (TyData::Array { elem: arr_elem, .. }, TyData::Tuple { elems }) = (&lhs_data, &rhs_data) + { + let narrowed: Vec = elems.iter().map(|&e| ty_and(*arr_elem, e, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + + // Handle tuples with tuples + if let (TyData::Tuple { elems: e1 }, TyData::Tuple { elems: e2 }) = (&lhs_data, &rhs_data) { + if e1.len() != e2.len() { + return Ty::NEVER; + } + let narrowed: Vec = e1 + .iter() + .zip(e2.iter()) + .map(|(&a, &b)| ty_and(a, b, store)) + .collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + + // Handle objects + if let (TyData::Object(obj1), TyData::Object(obj2)) = (&lhs_data, &rhs_data) { + let mut fields = obj1.fields.clone(); + for (name, def2) in &obj2.fields { + if let Some(pos) = fields.iter().position(|(n, _)| n == name) { + let (_, def1) = &fields[pos]; + let narrowed_ty = ty_and(def1.ty, def2.ty, store); + fields[pos] = ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: def1.required || def2.required, + visibility: def1.visibility, + }, + ); + } else { + fields.push((name.clone(), def2.clone())); + } + } + let has_unknown = obj1.has_unknown && obj2.has_unknown; + return store.object(ObjectData { + fields, + has_unknown, + }); + } + + // Handle AttrsOf + if let (TyData::AttrsOf { value: v1 }, TyData::AttrsOf { value: v2 }) = (&lhs_data, &rhs_data) { + let elem = ty_and(*v1, *v2, store); + if elem == Ty::NEVER { + return Ty::NEVER; + } + return store.attrs_of(elem); + } + + // Handle object + AttrsOf + if let (TyData::Object(obj), TyData::AttrsOf { value }) = (&lhs_data, &rhs_data) { + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let narrowed_ty = ty_and(field.ty, *value, store); + ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + return store.object(ObjectData { + fields, + has_unknown: obj.has_unknown, + }); + } + if let (TyData::AttrsOf { value }, TyData::Object(obj)) = (&lhs_data, &rhs_data) { + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let narrowed_ty = ty_and(*value, field.ty, store); + ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + return store.object(ObjectData { + fields, + has_unknown: obj.has_unknown, + }); + } + + // Handle BoundedNumber + if let (TyData::Number, TyData::BoundedNumber(bounds)) + | (TyData::BoundedNumber(bounds), TyData::Number) = (&lhs_data, &rhs_data) + { + return store.bounded_number(*bounds); + } + if let (TyData::BoundedNumber(b1), TyData::BoundedNumber(b2)) = (&lhs_data, &rhs_data) { + // Intersection of bounds: take stricter bounds + let min = match (b1.min_f64(), b2.min_f64()) { + (Some(a), Some(b)) => Some(a.max(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + let max = match (b1.max_f64(), b2.max_f64()) { + (Some(a), Some(b)) => Some(a.min(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + // Check for empty range + if let (Some(lo), Some(hi)) = (min, max) { + if lo > hi { + return Ty::NEVER; + } + } + return store.bounded_number(crate::store::NumBounds { + min: min.map(f64::to_bits), + max: max.map(f64::to_bits), + }); + } + + // Handle Sum (intersection) types + if let TyData::Sum(types) = lhs_data { + let narrowed: Vec = types.iter().map(|&t| ty_and(t, rhs, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.sum(narrowed); + } + if let TyData::Sum(types) = rhs_data { + let narrowed: Vec = types.iter().map(|&t| ty_and(lhs, t, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.sum(narrowed); + } + + // Handle TypeVar - keep it, may be resolved later + if matches!(lhs_data, TyData::TypeVar { .. }) || matches!(rhs_data, TyData::TypeVar { .. }) { + return store.sum(vec![lhs, rhs]); + } + + // Different incompatible concrete types have no intersection + Ty::NEVER +} + +/// Compute the exclusion of one type from another. +/// +/// Returns the type with the constraint removed (difference/minus). +/// This removes values that match `remove` from `base`. +/// +/// # Examples +/// +/// - `ty_minus(Number | String, Number)` → `String` +/// - `ty_minus(Bool, True)` → `False` +/// - `ty_minus(Any, Number)` → `Any` (Any is too general) +/// - `ty_minus(Number, Number)` → `Never` +/// +/// # Distribution over Unions +/// +/// This operation distributes over unions: +/// `(A | B) - C = (A - C) | (B - C)` +pub fn ty_minus(base: Ty, remove: Ty, store: &mut S) -> Ty { + // Fast paths + if base == Ty::NEVER { + return Ty::NEVER; + } + if remove == Ty::NEVER { + return base; + } + if remove == Ty::ANY { + return Ty::NEVER; + } + if base == Ty::ANY { + // Can't remove anything meaningful from Any + return Ty::ANY; + } + if base == remove { + return Ty::NEVER; + } + + let base_data = store.get_data(base); + + // Handle unions: distribute (A | B) - C = (A - C) | (B - C) + if let TyData::Union(types) = base_data { + let remaining: Vec = types + .iter() + .map(|&t| ty_minus(t, remove, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(remaining); + } + + // Special case: Bool minus True/False + let base_data = store.get_data(base); + let remove_data = store.get_data(remove); + match (&base_data, &remove_data) { + (TyData::Bool, TyData::True) => return Ty::FALSE, + (TyData::Bool, TyData::False) => return Ty::TRUE, + (TyData::True | TyData::False, TyData::Bool) => return Ty::NEVER, + _ => {} + } + + // For non-union types, if they don't match the remove type, return unchanged + base +} + +/// Narrow a type to one with a specific length. +/// +/// This is useful for narrowing based on `std.length(x) == n` conditions. +/// +/// # Behavior +/// +/// - Arrays become tuples with `n` elements of the same element type +/// - Tuples must have exactly `n` elements (otherwise `Never`) +/// - Strings with length 1 become `Char` +/// - Objects must have exactly `n` fields (if closed) or at least `n` (if open) +/// - Primitives like Number/Bool return `Never` (they don't have length) +/// +/// # Examples +/// +/// - `ty_with_len(Array, 3)` → `[Number, Number, Number]` +/// - `ty_with_len(String, 1)` → `Char` +/// - `ty_with_len([Number, String], 2)` → `[Number, String]` +/// - `ty_with_len([Number, String], 3)` → `Never` +pub fn ty_with_len(ty: Ty, len: usize, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => Ty::ANY, + + TyData::Array { elem, .. } => { + let elems = vec![elem; len]; + store.tuple(elems) + } + + TyData::Tuple { elems } => { + if elems.len() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Object(obj) => { + match (obj.fields.len().cmp(&len), obj.has_unknown) { + // Exactly right number of fields + (std::cmp::Ordering::Equal, false) => ty, + // Open object with fewer fields - close it at this length + (std::cmp::Ordering::Less | std::cmp::Ordering::Equal, true) => { + if obj.fields.len() == len { + store.object(ObjectData { + fields: obj.fields, + has_unknown: false, + }) + } else { + ty // Can have unknown fields to reach the length + } + } + // Too few fields in closed object, or too many fields + (std::cmp::Ordering::Less, false) | (std::cmp::Ordering::Greater, _) => Ty::NEVER, + } + } + + TyData::AttrsOf { .. } | TyData::Function(_) | TyData::TypeVar { .. } => ty, + + TyData::String => { + if len == 1 { + Ty::CHAR + } else { + ty // String can be any length + } + } + + TyData::LiteralString(s) => { + if s.chars().count() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if len == 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => Ty::NEVER, + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_len(t, len, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types.iter().map(|&t| ty_with_len(t, len, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + store.sum(narrowed) + } + } + } +} + +/// Narrow a type to one with at least a minimum length. +/// +/// This is useful for narrowing based on `std.length(x) >= n` conditions. +/// +/// # Behavior +/// +/// - Arrays stay arrays (can have any length) +/// - Tuples must have at least `n` elements +/// - Strings stay strings (can have any length) +/// - Char requires `min <= 1` +/// - Objects with unknown fields stay as-is +/// +/// # Examples +/// +/// - `ty_with_min_len(Array, 3)` → `Array` +/// - `ty_with_min_len([Number, String], 1)` → `[Number, String]` +/// - `ty_with_min_len([Number], 2)` → `Never` +/// - `ty_with_min_len(Char, 2)` → `Never` +pub fn ty_with_min_len(ty: Ty, min: usize, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => Ty::ANY, + TyData::Never => Ty::NEVER, + + TyData::Array { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } + | TyData::String + | TyData::LiteralString(_) + | TyData::Function(_) + | TyData::TypeVar { .. } => ty, + + TyData::Tuple { elems } => { + if elems.len() >= min { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if min <= 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => { + if min == 0 { + ty // Everything has "length >= 0" + } else { + Ty::NEVER + } + } + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_min_len(t, min, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_min_len(t, min, store)) + .collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + store.sum(narrowed) + } + } + } +} + +/// Add a required field to an object type. +/// +/// Returns a new object type with the specified field added. +/// If the field already exists, its type is narrowed with the new type. +pub fn ty_with_field(ty: Ty, field: &str, field_ty: Ty, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => { + // Create an open object with this field + store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }) + } + + TyData::Object(mut obj) => { + if let Some(pos) = obj.fields.iter().position(|(n, _)| n == field) { + let (_, existing) = &obj.fields[pos]; + let narrowed = ty_and(existing.ty, field_ty, store); + obj.fields[pos] = ( + field.to_string(), + FieldDefInterned { + ty: narrowed, + required: true, + visibility: existing.visibility, + }, + ); + } else { + obj.fields.push(( + field.to_string(), + FieldDefInterned { + ty: field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )); + } + store.object(obj) + } + + TyData::AttrsOf { value } => { + // AttrsOf with a specific field becomes object with that field + let narrowed = ty_and(value, field_ty, store); + store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: narrowed, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }) + } + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_field(t, field, field_ty, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + // Non-object types can't have fields + _ => Ty::NEVER, + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::{super::TyStore, *}; + + #[test] + fn test_binary_op_valid_number_plus_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::NUMBER, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_binary_op_valid_string_plus_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::STRING, Ty::STRING, &mut store); + assert_eq!(result, Ok(Ty::STRING)); + } + + #[test] + fn test_binary_op_invalid_string_plus_number_ty() { + let mut store = TyStore::new(); + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::STRING, Ty::NUMBER, &mut store) + .expect_err("String + Number is invalid"); + } + + #[test] + fn test_binary_op_comparison_returns_bool_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Lt, Ty::NUMBER, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_binary_op_with_any_ty() { + let mut store = TyStore::new(); + let result = binary_op_result_ty(BinaryOperatorKind::Plus, Ty::ANY, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::ANY)); + } + + #[test] + fn test_binary_op_with_never_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::NEVER, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NEVER)); + } + + #[test] + fn test_unary_op_valid_not_bool_ty() { + let mut store = TyStore::new(); + let result = unary_op_result_ty(UnaryOperatorKind::Not, Ty::BOOL, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_unary_op_invalid_not_number_ty() { + let mut store = TyStore::new(); + unary_op_result_ty(UnaryOperatorKind::Not, Ty::NUMBER, &mut store) + .expect_err("!Number is invalid"); + } + + #[test] + fn test_unary_op_minus_number_ty() { + let mut store = TyStore::new(); + let result = unary_op_result_ty(UnaryOperatorKind::Minus, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_unary_op_bitnot_number_ty() { + let mut store = TyStore::new(); + let result = unary_op_result_ty(UnaryOperatorKind::BitNot, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_array_concat_tuples_ty() { + let mut store = TyStore::new(); + let left = store.tuple(vec![Ty::NUMBER]); + let right = store.tuple(vec![Ty::STRING]); + let result = array_concat_ty(left, right, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::STRING]); + }); + } + + #[test] + fn test_array_concat_arrays_ty() { + let mut store = TyStore::new(); + let left = store.array(Ty::NUMBER); + let right = store.array(Ty::STRING); + let result = array_concat_ty(left, right, &mut store); + assert_matches!(store.get_data(result), TyData::Array { .. }); + } + + #[test] + fn test_binary_op_union_lhs_ty() { + let mut store = TyStore::new(); + // (Number | String) + Number should fail (String + Number invalid) + let union_ty = store.union(vec![Ty::NUMBER, Ty::STRING]); + binary_op_result_ty(BinaryOperatorKind::Plus, union_ty, Ty::NUMBER, &mut store) + .expect_err("(Number|String) + Number is invalid because String+Number fails"); + } + + #[test] + fn test_binary_op_union_valid_ty() { + let mut store = TyStore::new(); + // (Number | Number) + Number should succeed + let union_ty = store.union(vec![Ty::NUMBER, Ty::NUMBER]); + binary_op_result_ty(BinaryOperatorKind::Plus, union_ty, Ty::NUMBER, &mut store) + .expect("(Number|Number) + Number should succeed"); + } + + #[test] + fn test_binary_op_in_ty() { + let mut store = TyStore::new(); + let obj_ty = store.object(ObjectData { + fields: vec![], + has_unknown: true, + }); + let result = binary_op_result_ty(BinaryOperatorKind::InKw, Ty::STRING, obj_ty, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_binary_op_bitwise_ty() { + let mut store = TyStore::new(); + let result = binary_op_result_ty( + BinaryOperatorKind::BitAnd, + Ty::NUMBER, + Ty::NUMBER, + &mut store, + ); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_binary_op_logical_bool_ty() { + let mut store = TyStore::new(); + let result = binary_op_result_ty(BinaryOperatorKind::And, Ty::BOOL, Ty::BOOL, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_binary_op_null_coalesce_ty() { + let mut store = TyStore::new(); + let result_ty = binary_op_result_ty( + BinaryOperatorKind::NullCoaelse, + Ty::NUMBER, + Ty::STRING, + &mut store, + ) + .expect("null coalesce should succeed"); + // Result should be union of both types + assert_matches!(store.get_data(result_ty), TyData::Union(_)); + } + + // ============================================================================= + // Type Logic Operations Tests + // ============================================================================= + + mod ty_and_tests { + use super::*; + + #[test] + fn test_any_narrows_to_constraint() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::ANY, Ty::NUMBER, &mut store), Ty::NUMBER); + assert_eq!(ty_and(Ty::NUMBER, Ty::ANY, &mut store), Ty::NUMBER); + } + + #[test] + fn test_never_always_never() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); + assert_eq!(ty_and(Ty::NUMBER, Ty::NEVER, &mut store), Ty::NEVER); + } + + #[test] + fn test_same_type_returns_same() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NUMBER); + assert_eq!(ty_and(Ty::STRING, Ty::STRING, &mut store), Ty::STRING); + } + + #[test] + fn test_incompatible_types_never() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NUMBER, Ty::STRING, &mut store), Ty::NEVER); + assert_eq!(ty_and(Ty::BOOL, Ty::NUMBER, &mut store), Ty::NEVER); + } + + #[test] + fn test_bool_narrows_to_literal() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::BOOL, Ty::TRUE, &mut store), Ty::TRUE); + assert_eq!(ty_and(Ty::BOOL, Ty::FALSE, &mut store), Ty::FALSE); + assert_eq!(ty_and(Ty::TRUE, Ty::BOOL, &mut store), Ty::TRUE); + assert_eq!(ty_and(Ty::FALSE, Ty::BOOL, &mut store), Ty::FALSE); + } + + #[test] + fn test_string_narrows_to_char() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::STRING, Ty::CHAR, &mut store), Ty::CHAR); + assert_eq!(ty_and(Ty::CHAR, Ty::STRING, &mut store), Ty::CHAR); + } + + #[test] + fn test_union_distributes() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // (Number | String) & Number = Number + assert_eq!(ty_and(union, Ty::NUMBER, &mut store), Ty::NUMBER); + // Number & (Number | String) = Number + assert_eq!(ty_and(Ty::NUMBER, union, &mut store), Ty::NUMBER); + } + + #[test] + fn test_union_with_incompatible_gives_partial() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // (Number | String) & Bool = Never (both incompatible) + assert_eq!(ty_and(union, Ty::BOOL, &mut store), Ty::NEVER); + } + + #[test] + fn test_array_intersection() { + let mut store = TyStore::new(); + let arr_num = store.array(Ty::NUMBER); + let arr_any = store.array(Ty::ANY); + // Array & Array = Array + let result = ty_and(arr_num, arr_any, &mut store); + assert!( + matches!(store.get_data(result), TyData::Array { elem, .. } if elem == Ty::NUMBER) + ); + } + + #[test] + fn test_tuple_intersection_same_length() { + let mut store = TyStore::new(); + let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::ANY]); + let tuple2 = store.tuple(vec![Ty::ANY, Ty::STRING]); + let result = ty_and(tuple1, tuple2, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::STRING]); + }); + } + + #[test] + fn test_tuple_intersection_different_length_never() { + let mut store = TyStore::new(); + let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + let tuple2 = store.tuple(vec![Ty::NUMBER]); + assert_eq!(ty_and(tuple1, tuple2, &mut store), Ty::NEVER); + } + + #[test] + fn test_object_intersection_merges_fields() { + let mut store = TyStore::new(); + let obj1 = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let obj2 = store.object(ObjectData { + fields: vec![( + "b".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_and(obj1, obj2, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + // Should have both fields "a" and "b" + let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + field_names.sort_unstable(); + assert_eq!(field_names, vec!["a", "b"]); + }); + } + + #[test] + fn test_bounded_number_intersection() { + let mut store = TyStore::new(); + use crate::store::NumBounds; + let bounded1 = store.bounded_number(NumBounds::at_least(0.0)); + let bounded2 = store.bounded_number(NumBounds::between(-10.0, 10.0)); + let result = ty_and(bounded1, bounded2, &mut store); + // Should get [0..10] + assert_matches!(store.get_data(result), TyData::BoundedNumber(bounds) => { + assert_eq!(bounds.min_f64(), Some(0.0)); + assert_eq!(bounds.max_f64(), Some(10.0)); + }); + } + } + + mod ty_minus_tests { + use super::*; + + #[test] + fn test_same_type_gives_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NEVER); + } + + #[test] + fn test_different_type_unchanged() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::STRING, &mut store), Ty::NUMBER); + } + + #[test] + fn test_any_stays_any() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::ANY, Ty::NUMBER, &mut store), Ty::ANY); + } + + #[test] + fn test_minus_any_gives_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::ANY, &mut store), Ty::NEVER); + } + + #[test] + fn test_union_removes_matching() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_minus(union, Ty::NUMBER, &mut store), Ty::STRING); + } + + #[test] + fn test_bool_minus_true_gives_false() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::BOOL, Ty::TRUE, &mut store), Ty::FALSE); + assert_eq!(ty_minus(Ty::BOOL, Ty::FALSE, &mut store), Ty::TRUE); + } + + #[test] + fn test_never_stays_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); + } + } + + mod ty_with_len_tests { + use super::*; + + #[test] + fn test_array_to_tuple() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + let result = ty_with_len(arr, 3, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + }); + } + + #[test] + fn test_tuple_matching_length() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_with_len(tuple, 2, &mut store), tuple); + } + + #[test] + fn test_tuple_wrong_length_never() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_with_len(tuple, 3, &mut store), Ty::NEVER); + } + + #[test] + fn test_string_len_1_to_char() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::STRING, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_len_1_ok() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::CHAR, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_len_not_1_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::CHAR, 0, &mut store), Ty::NEVER); + assert_eq!(ty_with_len(Ty::CHAR, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_number_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::NUMBER, 5, &mut store), Ty::NEVER); + } + + #[test] + fn test_literal_string_matching_len() { + let mut store = TyStore::new(); + let lit = store.literal_string("hello".to_string()); + assert_eq!(ty_with_len(lit, 5, &mut store), lit); + } + + #[test] + fn test_literal_string_wrong_len_never() { + let mut store = TyStore::new(); + let lit = store.literal_string("hello".to_string()); + assert_eq!(ty_with_len(lit, 3, &mut store), Ty::NEVER); + } + + #[test] + fn test_union_filters() { + let mut store = TyStore::new(); + let tuple2 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER]); + let tuple3 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + let union = store.union(vec![tuple2, tuple3]); + assert_eq!(ty_with_len(union, 2, &mut store), tuple2); + } + } + + mod ty_with_min_len_tests { + use super::*; + + #[test] + fn test_array_unchanged() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + assert_eq!(ty_with_min_len(arr, 5, &mut store), arr); + } + + #[test] + fn test_tuple_satisfies_min() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + assert_eq!(ty_with_min_len(tuple, 2, &mut store), tuple); + } + + #[test] + fn test_tuple_too_short_never() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER]); + assert_eq!(ty_with_min_len(tuple, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_char_min_1_ok() { + let mut store = TyStore::new(); + assert_eq!(ty_with_min_len(Ty::CHAR, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_min_2_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_min_len(Ty::CHAR, 2, &mut store), Ty::NEVER); + } + } + + mod ty_with_field_tests { + use super::*; + + #[test] + fn test_any_to_object() { + let mut store = TyStore::new(); + let result = ty_with_field(Ty::ANY, "foo", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + assert_eq!(obj.fields, vec![("foo".to_string(), FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + })]); + assert!(obj.has_unknown); + }); + } + + #[test] + fn test_object_adds_field() { + let mut store = TyStore::new(); + let obj = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_with_field(obj, "b", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + field_names.sort_unstable(); + assert_eq!(field_names, vec!["a", "b"]); + }); + } + + #[test] + fn test_object_narrows_existing_field() { + let mut store = TyStore::new(); + let obj = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::ANY, + required: false, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_with_field(obj, "a", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); + assert!(obj.fields[0].1.required); + }); + } + + #[test] + fn test_number_never() { + let mut store = TyStore::new(); + assert_eq!( + ty_with_field(Ty::NUMBER, "foo", Ty::STRING, &mut store), + Ty::NEVER + ); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/store.rs b/crates/jrsonnet-lsp-types/src/store.rs new file mode 100644 index 00000000..0beaaa7a --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store.rs @@ -0,0 +1,2344 @@ +//! Interned type storage for efficient type representation. +//! +//! This module provides type interning - storing types once and referencing them +//! by small integer IDs. This provides: +//! +//! - **Memory efficiency**: Identical types are stored only once +//! - **O(1) equality**: Comparing types is just integer comparison +//! - **Free cloning**: `Ty` is `Copy`, no heap allocation needed +//! - **Cache-friendly**: Types stored contiguously in `Vec` +//! +//! # Architecture +//! +//! The type system uses three main components: +//! +//! - [`Ty`]: A 4-byte type ID that references interned type data +//! - [`TyData`]: The actual type representation (primitives, compounds, etc.) +//! - [`TyStore`]: The storage that holds all interned types +//! +//! # Example +//! +//! ```ignore +//! let mut store = TyStore::new(); +//! +//! // Intern some types +//! let num = Ty::NUMBER; // Built-in constant +//! let arr = store.array(num); // Array +//! +//! // Same type gives same ID +//! let arr2 = store.array(num); +//! assert_eq!(arr, arr2); // O(1) comparison +//! +//! // Look up type data +//! let data = store.get(arr); +//! assert!(matches!(data, TyData::Array { .. })); +//! ``` + +use std::{ + cell::RefCell, + sync::atomic::{AtomicU32, Ordering}, +}; + +use rustc_hash::FxHashMap; + +/// Global counter for generating unique type variable IDs. +static TYPE_VAR_COUNTER: AtomicU32 = AtomicU32::new(0); + +/// An interned type reference. +/// +/// This is a small (4 byte) identifier that references type data stored in a [`TyStore`]. +/// It implements `Copy`, so cloning is free (just copies 4 bytes). +/// +/// Type equality is O(1) - just comparing the internal ID. +/// +/// # Local vs Global Types +/// +/// The upper bit (bit 31) is the `LOCAL_BIT`: +/// - `LOCAL_BIT = 0`: Global type (in `GlobalTyStore`) +/// - `LOCAL_BIT = 1`: Local type (in `LocalTyStore`, temporary during analysis) +/// +/// After analysis, local types are merged into the global store via substitution. +/// +/// # Well-Known Types +/// +/// Common types have predefined constants for efficiency: +/// - [`Ty::ANY`], [`Ty::NEVER`] - top and bottom types +/// - [`Ty::NULL`], [`Ty::BOOL`], [`Ty::NUMBER`], [`Ty::STRING`] - primitives +/// - [`Ty::TRUE`], [`Ty::FALSE`] - boolean literals +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Ty(u32); + +impl Ty { + /// Bit flag indicating a local (per-file) type vs global type. + pub const LOCAL_BIT: u32 = 1 << 31; + + /// Mask for extracting the raw index without the local bit. + const INDEX_MASK: u32 = !Self::LOCAL_BIT; + + // Well-known type constants (indices 0-15 are reserved, always global) + /// The top type - any value is valid. + pub const ANY: Ty = Ty(0); + /// The bottom type - no value is valid (unreachable code). + pub const NEVER: Ty = Ty(1); + /// The null type. + pub const NULL: Ty = Ty(2); + /// Boolean type (true or false). + pub const BOOL: Ty = Ty(3); + /// The literal `true` value. + pub const TRUE: Ty = Ty(4); + /// The literal `false` value. + pub const FALSE: Ty = Ty(5); + /// Numeric type (any number). + pub const NUMBER: Ty = Ty(6); + /// String type. + pub const STRING: Ty = Ty(7); + /// Single character type (string of length 1). + pub const CHAR: Ty = Ty(8); + + /// Number of reserved well-known type slots. + pub const RESERVED_COUNT: u32 = 16; + + /// Create a Ty from a raw index (global, no local bit). + #[inline] + pub const fn from_raw(index: u32) -> Ty { + debug_assert!(index & Self::LOCAL_BIT == 0, "Index too large"); + Ty(index) + } + + /// Create a local Ty from a raw index. + #[inline] + pub const fn from_raw_local(index: u32) -> Ty { + debug_assert!(index & Self::LOCAL_BIT == 0, "Index too large"); + Ty(index | Self::LOCAL_BIT) + } + + /// Get the raw ID including the local bit (for debugging). + #[inline] + pub fn id(self) -> u32 { + self.0 + } + + /// Get the raw index without the local bit. + #[inline] + pub fn raw_index(self) -> u32 { + self.0 & Self::INDEX_MASK + } + + /// Check if this is a local (per-file) type. + #[inline] + pub fn is_local(self) -> bool { + self.0 & Self::LOCAL_BIT != 0 + } + + /// Check if this is a global type. + #[inline] + pub fn is_global(self) -> bool { + !self.is_local() + } + + /// Check if this is the Any type. + #[inline] + pub fn is_any(self) -> bool { + self == Self::ANY + } + + /// Check if this is the Never type. + #[inline] + pub fn is_never(self) -> bool { + self == Self::NEVER + } + + /// Check if this is a well-known type constant. + /// + /// Well-known types (ANY, NEVER, NULL, BOOL, TRUE, FALSE, NUMBER, STRING, CHAR) + /// have the same `Ty` value across all stores and are always global. + #[inline] + pub fn is_well_known(self) -> bool { + // Well-known types are always global (no LOCAL_BIT) and have low indices + self.0 < Self::RESERVED_COUNT + } + + /// Get the well-known Ty for a TyData, if it matches a well-known type. + pub fn well_known_for_data(data: &TyData) -> Option { + match data { + TyData::Any => Some(Ty::ANY), + TyData::Never => Some(Ty::NEVER), + TyData::Null => Some(Ty::NULL), + TyData::Bool => Some(Ty::BOOL), + TyData::True => Some(Ty::TRUE), + TyData::False => Some(Ty::FALSE), + TyData::Number => Some(Ty::NUMBER), + TyData::String => Some(Ty::STRING), + TyData::Char => Some(Ty::CHAR), + _ => None, + } + } +} + +impl std::fmt::Debug for Ty { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match *self { + Ty::ANY => write!(f, "Ty::ANY"), + Ty::NEVER => write!(f, "Ty::NEVER"), + Ty::NULL => write!(f, "Ty::NULL"), + Ty::BOOL => write!(f, "Ty::BOOL"), + Ty::TRUE => write!(f, "Ty::TRUE"), + Ty::FALSE => write!(f, "Ty::FALSE"), + Ty::NUMBER => write!(f, "Ty::NUMBER"), + Ty::STRING => write!(f, "Ty::STRING"), + Ty::CHAR => write!(f, "Ty::CHAR"), + ty if ty.is_local() => write!(f, "Ty(L{})", ty.raw_index()), + Ty(id) => write!(f, "Ty({})", id), + } + } +} + +/// A reference to type data with access to the store for display. +/// +/// This wrapper holds both a reference to the type data and the store, +/// enabling `Display` implementation that can recursively format nested types. +/// +/// Implements `Deref` for ergonomic access to the underlying data. +/// +/// # Example +/// ```ignore +/// let store = TyStore::new(); +/// let arr = store.array(Ty::NUMBER); +/// +/// // Display works directly +/// println!("{}", store.get(arr)); // "array" +/// +/// // Pattern matching via Deref +/// match &*store.get(arr) { +/// TyData::Array { elem } => println!("element: {}", store.get(*elem)), +/// _ => {} +/// } +/// ``` +pub struct TyRef<'a> { + store: &'a TyStore, + ty: Ty, +} + +impl<'a> TyRef<'a> { + /// Get the Ty ID this reference points to. + pub fn ty(&self) -> Ty { + self.ty + } + + /// Get a reference to another type from the same store. + #[must_use] + pub fn get(&self, ty: Ty) -> TyRef<'a> { + TyRef { + store: self.store, + ty, + } + } +} + +impl std::ops::Deref for TyRef<'_> { + type Target = TyData; + + fn deref(&self) -> &Self::Target { + &self.store.data[self.ty.raw_index() as usize] + } +} + +impl std::fmt::Display for TyRef<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.store.display(self.ty)) + } +} + +impl std::fmt::Debug for TyRef<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TyRef") + .field("ty", &self.ty) + .field("data", &**self) + .finish() + } +} + +/// Unique identifier for a type variable. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TyVarId(pub u32); + +impl TyVarId { + /// Generate a fresh type variable ID. + pub fn fresh() -> Self { + TyVarId(TYPE_VAR_COUNTER.fetch_add(1, Ordering::SeqCst)) + } +} + +impl std::fmt::Display for TyVarId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + // Display as Greek letters for small IDs, T prefix for larger + let greek = ['α', 'β', 'γ', 'δ', 'ε', 'ζ', 'η', 'θ']; + if (self.0 as usize) < greek.len() { + write!(f, "{}", greek[self.0 as usize]) + } else { + write!(f, "T{}", self.0) + } + } +} + +/// Numeric bounds for range validation. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct NumBounds { + /// Minimum value (as bits for hashing), if known. + pub min: Option, + /// Maximum value (as bits for hashing), if known. + pub max: Option, +} + +impl NumBounds { + /// Create unbounded numeric range. + pub fn unbounded() -> Self { + Self { + min: None, + max: None, + } + } + + /// Create a non-negative range (>= 0). + pub fn non_negative() -> Self { + Self { + min: Some(0.0_f64.to_bits()), + max: None, + } + } + + /// Create a range with minimum bound. + pub fn at_least(min: f64) -> Self { + Self { + min: Some(min.to_bits()), + max: None, + } + } + + /// Create a range with both bounds. + pub fn between(min: f64, max: f64) -> Self { + Self { + min: Some(min.to_bits()), + max: Some(max.to_bits()), + } + } + + /// Get the minimum as f64. + pub fn min_f64(&self) -> Option { + self.min.map(f64::from_bits) + } + + /// Get the maximum as f64. + pub fn max_f64(&self) -> Option { + self.max.map(f64::from_bits) + } + + /// Check if this range is a subset of another range. + pub fn is_subset_of(&self, other: &NumBounds) -> bool { + let self_min = self.min_f64(); + let self_max = self.max_f64(); + let other_min = other.min_f64(); + let other_max = other.max_f64(); + + // Our min must be >= their min (or they have no min) + let min_ok = match (self_min, other_min) { + (_, None) => true, + (Some(a), Some(b)) => a >= b, + (None, Some(_)) => false, // We go to -inf, they don't + }; + // Our max must be <= their max (or they have no max) + let max_ok = match (self_max, other_max) { + (_, None) => true, + (Some(a), Some(b)) => a <= b, + (None, Some(_)) => false, // We go to +inf, they don't + }; + min_ok && max_ok + } + + /// Intersect two numeric bounds, returning the tighter constraint. + /// + /// Returns `None` if the intersection is empty (min > max). + pub fn intersect(&self, other: &NumBounds) -> Option { + // Take the maximum of the minimums (tighter lower bound) + let new_min = match (self.min_f64(), other.min_f64()) { + (Some(a), Some(b)) => Some(a.max(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + + // Take the minimum of the maximums (tighter upper bound) + let new_max = match (self.max_f64(), other.max_f64()) { + (Some(a), Some(b)) => Some(a.min(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + + // Check if the intersection is empty + if let (Some(min), Some(max)) = (new_min, new_max) { + if min > max { + return None; // Empty intersection + } + } + + Some(NumBounds { + min: new_min.map(f64::to_bits), + max: new_max.map(f64::to_bits), + }) + } +} + +/// Visibility of an object field. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] +pub enum FieldVis { + /// Normal field (`:`) - visible in output + #[default] + Normal, + /// Hidden field (`::`) - not visible in output + Hidden, + /// Force visible field (`:::`) - always visible + ForceVisible, +} + +/// Definition of an object field in interned form. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FieldDefInterned { + /// The type of the field value. + pub ty: Ty, + /// Whether this field is required. + pub required: bool, + /// Visibility of the field. + pub visibility: FieldVis, +} + +/// Object type data. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ObjectData { + /// Known fields (sorted by name for canonical form). + pub fields: Vec<(String, FieldDefInterned)>, + /// Whether the object may have unknown fields. + pub has_unknown: bool, +} + +impl ObjectData { + /// Create an empty closed object. + pub fn empty() -> Self { + Self { + fields: Vec::new(), + has_unknown: false, + } + } + + /// Create an open object (may have unknown fields). + pub fn open() -> Self { + Self { + fields: Vec::new(), + has_unknown: true, + } + } + + /// Get a field by name. + pub fn get_field(&self, name: &str) -> Option<&FieldDefInterned> { + self.fields.iter().find(|(n, _)| n == name).map(|(_, f)| f) + } + + /// Merge two objects (right fields override left). + pub fn merge(left: &ObjectData, right: &ObjectData) -> Self { + let mut fields = left.fields.clone(); + // Add/override fields from right + for (name, field) in &right.fields { + if let Some(pos) = fields.iter().position(|(n, _)| n == name) { + fields[pos] = (name.clone(), field.clone()); + } else { + fields.push((name.clone(), field.clone())); + } + } + // Sort for canonical form + fields.sort_by(|(a, _), (b, _)| a.cmp(b)); + Self { + fields, + has_unknown: left.has_unknown || right.has_unknown, + } + } +} + +/// Function parameter in interned form. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ParamInterned { + /// Parameter name. + pub name: String, + /// Expected parameter type. + pub ty: Ty, + /// Whether the parameter has a default value. + pub has_default: bool, +} + +/// How a function's return type is determined. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ReturnSpec { + /// Fixed return type. + Fixed(Ty), + /// Return type is same as argument at index. + SameAsArg(usize), + /// Return type is array of argument type at index. + ArrayOfArg(usize), + /// Return type keeps element type of array arg. + ArrayWithSameElements(usize), + /// Return type is a set (sorted, unique array) with same element type as array arg. + SetWithSameElements(usize), + /// Return type is function's return type at index. + FuncReturnType(usize), + /// Return type is Array. + ArrayOfFuncReturn(usize), + /// Return type is flattened array from func. + FlatMapResult(usize), + /// Return type is non-negative number. + NonNegative, + /// Return type is array of object values. + ObjectValuesType(usize), +} + +impl Default for ReturnSpec { + fn default() -> Self { + ReturnSpec::Fixed(Ty::ANY) + } +} + +/// Function type data. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FunctionData { + /// Function parameters. + pub params: Vec, + /// How return type is determined. + pub return_spec: ReturnSpec, + /// Whether function is variadic. + pub variadic: bool, +} + +impl FunctionData { + /// Count of required parameters. + pub fn required_count(&self) -> usize { + self.params.iter().filter(|p| !p.has_default).count() + } + + /// Total parameter count. + pub fn total_count(&self) -> usize { + self.params.len() + } +} + +/// Constraints on a type variable. +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] +pub struct TyConstraints { + /// Must be indexable (array, object, string). + pub must_be_indexable: bool, + /// Must support field access (object). + pub must_support_fields: bool, + /// Must be callable (function). + pub must_be_callable: bool, + /// Upper bound type (must be subtype of this). + pub upper_bound: Option, +} + +impl TyConstraints { + /// No constraints. + pub fn none() -> Self { + Self::default() + } + + /// Check if there are any constraints. + pub fn is_empty(&self) -> bool { + !self.must_be_indexable + && !self.must_support_fields + && !self.must_be_callable + && self.upper_bound.is_none() + } + + /// Check if a type satisfies these constraints. + pub fn satisfied_by(&self, ty: Ty, store: &TyStore) -> bool { + if self.must_be_indexable && !store.is_indexable(ty) { + return false; + } + if self.must_support_fields && !store.supports_field_access(ty) { + return false; + } + if self.must_be_callable && !store.is_callable(ty) { + return false; + } + // Check upper bound + if let Some(upper) = self.upper_bound { + if !store.is_subtype_of(ty, upper) { + return false; + } + } + true + } +} + +/// A substitution mapping type variable IDs to concrete types (Ty-native). +#[derive(Debug, Clone, Default)] +pub struct TySubstitution { + /// Mapping from type variable IDs to their substituted types. + pub mappings: FxHashMap, +} + +impl TySubstitution { + /// Create an empty substitution. + pub fn new() -> Self { + Self::default() + } + + /// Add a mapping from a type variable to a type. + pub fn insert(&mut self, var: TyVarId, ty: Ty) { + self.mappings.insert(var, ty); + } + + /// Look up a type variable's substitution. + pub fn get(&self, var: TyVarId) -> Option { + self.mappings.get(&var).copied() + } + + /// Check if a type variable ID occurs in a type. + pub fn occurs_in(var: TyVarId, ty: Ty, store: &TyStore) -> bool { + match *store.get(ty) { + TyData::TypeVar { id, .. } => id == var, + TyData::Array { elem, .. } => Self::occurs_in(var, elem, store), + TyData::Tuple { ref elems } => elems.iter().any(|e| Self::occurs_in(var, *e, store)), + TyData::Union(ref variants) => variants.iter().any(|v| Self::occurs_in(var, *v, store)), + TyData::Object(ref obj) => obj + .fields + .iter() + .any(|(_, fd)| Self::occurs_in(var, fd.ty, store)), + TyData::AttrsOf { value } => Self::occurs_in(var, value, store), + TyData::Function(ref func) => { + func.params + .iter() + .any(|p| Self::occurs_in(var, p.ty, store)) + || matches!(&func.return_spec, ReturnSpec::Fixed(ret) if Self::occurs_in(var, *ret, store)) + } + _ => false, + } + } +} + +/// The actual type data that a [`Ty`] references. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TyData { + /// Top type - any value. + Any, + /// Bottom type - no value. + Never, + /// Null value. + Null, + /// Boolean (true or false). + Bool, + /// Literal true. + True, + /// Literal false. + False, + /// Number (unbounded). + Number, + /// Number with bounds. + BoundedNumber(NumBounds), + /// String. + String, + /// Single character. + Char, + /// Literal string value. + LiteralString(String), + + /// Array with element type and optional set semantics. + /// + /// When `is_set` is true, the array is known to be sorted and contain unique elements. + /// This is useful for tracking the result of `std.set()` calls. + Array { elem: Ty, is_set: bool }, + /// Tuple with fixed element types. + Tuple { elems: Vec }, + /// Object with known fields. + Object(ObjectData), + /// Object where all values have same type. + AttrsOf { value: Ty }, + /// Function type. + Function(FunctionData), + + /// Union of types (value matches ANY). + Union(Vec), + /// Intersection of types (value matches ALL). + Sum(Vec), + + /// Type variable with constraints. + TypeVar { + id: TyVarId, + constraints: TyConstraints, + }, +} + +/// Common operations for type stores. +/// +/// This trait abstracts over `TyStore` and `MutStore`, allowing unification +/// and type operation code to work with either store type without duplication. +pub trait TypeStoreOps { + /// Get type data for a Ty (returns owned TyData for simplicity). + fn get_data(&self, ty: Ty) -> TyData; + + /// Format a type for display. + fn display(&self, ty: Ty) -> String; + + /// Create an array type. + fn array(&mut self, elem: Ty) -> Ty; + + /// Create a set type (array with sorted, unique elements). + fn array_set(&mut self, elem: Ty) -> Ty; + + /// Create a tuple type. + fn tuple(&mut self, elems: Vec) -> Ty; + + /// Create an object type. + fn object(&mut self, data: ObjectData) -> Ty; + + /// Create an AttrsOf type (object with uniform value type). + fn attrs_of(&mut self, value: Ty) -> Ty; + + /// Create a function type. + fn function(&mut self, data: FunctionData) -> Ty; + + /// Create a union type. + fn union(&mut self, types: Vec) -> Ty; + + /// Create a sum (intersection) type. + fn sum(&mut self, types: Vec) -> Ty; + + /// Create a bounded number type. + fn bounded_number(&mut self, bounds: NumBounds) -> Ty; + + /// Create a literal string type. + fn literal_string(&mut self, s: String) -> Ty; +} + +/// Storage for interned types. +/// +/// Types are stored once and referenced by [`Ty`] IDs. +/// The store ensures deduplication - the same type data always +/// maps to the same ID. +#[derive(Debug, Clone)] +pub struct TyStore { + /// Type data indexed by Ty ID. + data: Vec, + /// Reverse mapping for deduplication. + dedup: FxHashMap, +} + +impl TyStore { + /// Create a new type store with well-known types pre-populated. + pub fn new() -> Self { + let mut store = Self { + data: Vec::with_capacity(64), + dedup: FxHashMap::default(), + }; + store.init_builtins(); + store + } + + /// Initialize built-in well-known types. + fn init_builtins(&mut self) { + // Must match the order of Ty constants! + let builtins = [ + TyData::Any, // 0 = ANY + TyData::Never, // 1 = NEVER + TyData::Null, // 2 = NULL + TyData::Bool, // 3 = BOOL + TyData::True, // 4 = TRUE + TyData::False, // 5 = FALSE + TyData::Number, // 6 = NUMBER + TyData::String, // 7 = STRING + TyData::Char, // 8 = CHAR + // Padding to RESERVED_COUNT + TyData::Any, // 9 - reserved + TyData::Any, // 10 - reserved + TyData::Any, // 11 - reserved + TyData::Any, // 12 - reserved + TyData::Any, // 13 - reserved + TyData::Any, // 14 - reserved + TyData::Any, // 15 - reserved + ]; + + for (i, data) in builtins.into_iter().enumerate() { + let ty = Ty::from_raw(i as u32); + self.data.push(data.clone()); + // Only dedup the non-padding entries + if i < 9 { + self.dedup.insert(data, ty); + } + } + + debug_assert_eq!(self.data.len(), Ty::RESERVED_COUNT as usize); + } + + /// Intern a type, returning existing ID if already present. + pub fn intern(&mut self, data: TyData) -> Ty { + // Fast path for well-known types + if let Some(ty) = Ty::well_known_for_data(&data) { + return ty; + } + + // Check if already interned + if let Some(&existing) = self.dedup.get(&data) { + return existing; + } + + // Intern new type + let id = Ty::from_raw(self.data.len() as u32); + self.data.push(data.clone()); + self.dedup.insert(data, id); + id + } + + /// Get a reference to type data with display capability. + /// + /// Returns a `TyRef` that derefs to `&TyData` and implements `Display`. + /// Use `*store.get(ty)` to pattern match on the underlying `TyData`. + #[inline] + pub fn get(&self, ty: Ty) -> TyRef<'_> { + TyRef { store: self, ty } + } + + /// Get the number of interned types. + pub fn len(&self) -> usize { + self.data.len() + } + + /// Check if empty (never true after init). + pub fn is_empty(&self) -> bool { + self.data.is_empty() + } + + /// Create an array type. + pub fn array(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { + elem, + is_set: false, + }) + } + + /// Create a set type (array with sorted, unique elements). + pub fn array_set(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { elem, is_set: true }) + } + + /// Create a tuple type. + pub fn tuple(&mut self, elems: Vec) -> Ty { + if elems.is_empty() { + // Empty tuple is a closed empty array + return self.intern(TyData::Tuple { elems: vec![] }); + } + self.intern(TyData::Tuple { elems }) + } + + /// Create an object type. + pub fn object(&mut self, data: ObjectData) -> Ty { + self.intern(TyData::Object(data)) + } + + /// Create an open object (unknown fields). + pub fn object_any(&mut self) -> Ty { + self.object(ObjectData::open()) + } + + /// Create a generic function type (accepts any args, returns any). + pub fn function_any(&mut self) -> Ty { + self.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }) + } + + /// Create an AttrsOf type (object with uniform value type). + pub fn attrs_of(&mut self, value: Ty) -> Ty { + self.intern(TyData::AttrsOf { value }) + } + + /// Create a function type. + pub fn function(&mut self, data: FunctionData) -> Ty { + self.intern(TyData::Function(data)) + } + + /// Create a function with simple params and fixed return. + pub fn function_simple(&mut self, param_names: Vec<&str>, return_ty: Ty) -> Ty { + let params = param_names + .into_iter() + .map(|name| ParamInterned { + name: name.to_string(), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + self.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }) + } + + /// Create a union type. + pub fn union(&mut self, mut types: Vec) -> Ty { + // Simplification rules + if types.is_empty() { + return Ty::NEVER; + } + if types.len() == 1 { + return types[0]; + } + + // Flatten nested unions and remove duplicates + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::ANY { + return Ty::ANY; // Any absorbs everything + } + if ty == Ty::NEVER { + continue; // Never is identity for union + } + if let TyData::Union(ref inner) = *self.get(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + // Sort for canonical form + flattened.sort_by_key(|t| t.0); + flattened.dedup(); + + match flattened.len() { + 0 => Ty::NEVER, + 1 => flattened[0], + _ => self.intern(TyData::Union(flattened)), + } + } + + /// Create a sum (intersection) type. + pub fn sum(&mut self, mut types: Vec) -> Ty { + if types.is_empty() { + return Ty::ANY; + } + if types.len() == 1 { + return types[0]; + } + + // Flatten and simplify + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::NEVER { + return Ty::NEVER; // Never absorbs everything in intersection + } + if ty == Ty::ANY { + continue; // Any is identity for intersection + } + if let TyData::Sum(ref inner) = *self.get(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + flattened.sort_by_key(|t| t.0); + flattened.dedup(); + + match flattened.len() { + 0 => Ty::ANY, + 1 => flattened[0], + _ => self.intern(TyData::Sum(flattened)), + } + } + + /// Create a bounded number type. + pub fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + self.intern(TyData::BoundedNumber(bounds)) + } + + /// Create a literal string type. + pub fn literal_string(&mut self, s: String) -> Ty { + self.intern(TyData::LiteralString(s)) + } + + /// Create a type variable. + pub fn type_var(&mut self, id: TyVarId, constraints: TyConstraints) -> Ty { + self.intern(TyData::TypeVar { id, constraints }) + } + + /// Create a fresh type variable with no constraints. + pub fn fresh_var(&mut self) -> Ty { + self.type_var(TyVarId::fresh(), TyConstraints::none()) + } + + /// Narrow a type by intersecting with a constraint. + /// + /// Returns the most specific type that satisfies both. + /// For example, `narrow(Any, Number)` returns `Number`. + pub fn narrow(&mut self, ty: Ty, constraint: Ty) -> Ty { + // Fast paths + if ty == Ty::NEVER || constraint == Ty::NEVER { + return Ty::NEVER; + } + if ty == Ty::ANY { + return constraint; + } + if constraint == Ty::ANY { + return ty; + } + if ty == constraint { + return ty; + } + + // Handle unions: narrow each element + if let TyData::Union(types) = self.get(ty).clone() { + let narrowed: Vec = types + .iter() + .map(|&t| self.narrow(t, constraint)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return self.union(narrowed); + } + if let TyData::Union(types) = self.get(constraint).clone() { + let narrowed: Vec = types + .iter() + .map(|&t| self.narrow(ty, t)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return self.union(narrowed); + } + + // Handle literal types as subtypes + match (&*self.get(ty), &*self.get(constraint)) { + (TyData::Bool, TyData::True) | (TyData::True, TyData::Bool) => return Ty::TRUE, + (TyData::Bool, TyData::False) | (TyData::False, TyData::Bool) => return Ty::FALSE, + (TyData::String, TyData::Char) | (TyData::Char, TyData::String) => return Ty::CHAR, + (TyData::String, TyData::LiteralString(s)) + | (TyData::LiteralString(s), TyData::String) => { + return self.literal_string(s.clone()); + } + _ => {} + } + + // Handle bounded numbers + match (&*self.get(ty), &*self.get(constraint)) { + // Number narrowed with BoundedNumber -> BoundedNumber + // BoundedNumber narrowed with Number -> BoundedNumber (unchanged) + (TyData::Number, TyData::BoundedNumber(bounds)) + | (TyData::BoundedNumber(bounds), TyData::Number) => { + return self.bounded_number(*bounds); + } + // Two BoundedNumbers -> intersect bounds + (TyData::BoundedNumber(b1), TyData::BoundedNumber(b2)) => { + return match b1.intersect(b2) { + Some(bounds) => self.bounded_number(bounds), + None => Ty::NEVER, // Empty intersection + }; + } + _ => {} + } + + // Handle arrays (preserve is_set if both are sets) + if let ( + TyData::Array { + elem: e1, + is_set: s1, + }, + TyData::Array { + elem: e2, + is_set: s2, + }, + ) = (self.get(ty).clone(), self.get(constraint).clone()) + { + let elem = self.narrow(e1, e2); + if elem == Ty::NEVER { + return Ty::NEVER; + } + // Result is a set only if both inputs are sets + if s1 && s2 { + return self.array_set(elem); + } + return self.array(elem); + } + + // Handle tuples with arrays + if let (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) = + (self.get(ty).clone(), self.get(constraint).clone()) + { + let narrowed: Vec = elems.iter().map(|&e| self.narrow(e, arr_elem)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return self.tuple(narrowed); + } + if let (TyData::Array { elem: arr_elem, .. }, TyData::Tuple { elems }) = + (self.get(ty).clone(), self.get(constraint).clone()) + { + let narrowed: Vec = elems.iter().map(|&e| self.narrow(arr_elem, e)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return self.tuple(narrowed); + } + + // Handle tuples with tuples + if let (TyData::Tuple { elems: e1 }, TyData::Tuple { elems: e2 }) = + (self.get(ty).clone(), self.get(constraint).clone()) + { + if e1.len() != e2.len() { + return Ty::NEVER; + } + let narrowed: Vec = e1 + .iter() + .zip(e2.iter()) + .map(|(&a, &b)| self.narrow(a, b)) + .collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return self.tuple(narrowed); + } + + // Handle objects + if let (TyData::Object(obj1), TyData::Object(obj2)) = + (self.get(ty).clone(), self.get(constraint).clone()) + { + let mut fields = obj1.fields.clone(); + for (name, def2) in &obj2.fields { + if let Some(pos) = fields.iter().position(|(n, _)| n == name) { + let (_, def1) = &fields[pos]; + let narrowed_ty = self.narrow(def1.ty, def2.ty); + fields[pos] = ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: def1.required && def2.required, + visibility: def1.visibility, + }, + ); + } else { + fields.push((name.clone(), def2.clone())); + } + } + let has_unknown = obj1.has_unknown && obj2.has_unknown; + return self.object(ObjectData { + fields, + has_unknown, + }); + } + + // Different concrete types have no intersection + Ty::NEVER + } + + /// Widen a type by removing a constraint. + /// + /// Returns the type with the constraint removed. + /// For example, `widen(Union(Number, String), Number)` returns `String`. + pub fn widen(&mut self, base: Ty, remove: Ty) -> Ty { + // Fast paths + if base == Ty::NEVER { + return Ty::NEVER; + } + if remove == Ty::NEVER { + return base; + } + if remove == Ty::ANY { + return Ty::NEVER; + } + if base == Ty::ANY { + return Ty::ANY; + } + if base == remove { + return Ty::NEVER; + } + + // Handle unions: remove from each element + if let TyData::Union(types) = self.get(base).clone() { + let remaining: Vec = types + .iter() + .map(|&t| self.widen(t, remove)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return self.union(remaining); + } + + // Different concrete types: nothing to remove + base + } + + /// Narrow a type to one with a specific length. + /// + /// - Arrays become tuples with n elements + /// - Strings with length 1 become Char + /// - Tuples must have matching length + pub fn with_len(&mut self, ty: Ty, len: usize) -> Ty { + match self.get(ty).clone() { + TyData::Any => Ty::ANY, + + TyData::Array { elem, .. } => { + let elems = vec![elem; len]; + self.tuple(elems) + } + + TyData::Tuple { elems } => { + if elems.len() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Object(obj) => match (obj.fields.len().cmp(&len), obj.has_unknown) { + (std::cmp::Ordering::Equal, false) | (std::cmp::Ordering::Less, true) => ty, + (std::cmp::Ordering::Equal, true) => self.object(ObjectData { + fields: obj.fields, + has_unknown: false, + }), + (std::cmp::Ordering::Less, false) | (std::cmp::Ordering::Greater, _) => Ty::NEVER, + }, + + TyData::String => { + if len == 1 { + Ty::CHAR + } else { + ty + } + } + + TyData::LiteralString(s) => { + if s.len() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if len == 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Function(_) | TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, + + TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => Ty::NEVER, + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| self.with_len(t, len)) + .filter(|&t| t != Ty::NEVER) + .collect(); + self.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types.iter().map(|&t| self.with_len(t, len)).collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + self.sum(narrowed) + } + } + } + } + + /// Narrow a type to one with at least a minimum length. + pub fn with_min_len(&mut self, ty: Ty, min: usize) -> Ty { + match self.get(ty).clone() { + TyData::Any => Ty::ANY, + TyData::Never => Ty::NEVER, + + TyData::Array { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } + | TyData::String + | TyData::LiteralString(_) + | TyData::Function(_) + | TyData::TypeVar { .. } => ty, + + TyData::Tuple { elems } => { + if elems.len() >= min { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if min <= 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => { + if min == 0 { + ty + } else { + Ty::NEVER + } + } + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| self.with_min_len(t, min)) + .filter(|&t| t != Ty::NEVER) + .collect(); + self.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types.iter().map(|&t| self.with_min_len(t, min)).collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + self.sum(narrowed) + } + } + } + } + + /// Check if type is indexable. + pub fn is_indexable(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::Any + | TyData::String + | TyData::Char + | TyData::Array { .. } + | TyData::Tuple { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } => true, + TyData::Union(ref types) | TyData::Sum(ref types) => { + types.iter().all(|&t| self.is_indexable(t)) + } + TyData::TypeVar { + ref constraints, .. + } => constraints.must_be_indexable, + _ => false, + } + } + + /// Check if type supports field access. + pub fn supports_field_access(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::Any | TyData::Object(_) | TyData::AttrsOf { .. } => true, + TyData::Union(ref types) | TyData::Sum(ref types) => { + types.iter().all(|&t| self.supports_field_access(t)) + } + TyData::TypeVar { + ref constraints, .. + } => constraints.must_support_fields, + _ => false, + } + } + + /// Check if type is callable. + pub fn is_callable(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::Any | TyData::Function(_) => true, + TyData::Union(ref types) | TyData::Sum(ref types) => { + types.iter().all(|&t| self.is_callable(t)) + } + TyData::TypeVar { + ref constraints, .. + } => constraints.must_be_callable, + _ => false, + } + } + + /// Simplified subtype check for constraint satisfaction. + /// + /// Checks if `subtype` is a subtype of `supertype`. This is a simplified + /// version that handles the most common cases; for full subtype checking + /// use the unification module. + pub fn is_subtype_of(&self, subtype: Ty, supertype: Ty) -> bool { + // Fast paths + if subtype == supertype { + return true; + } + if subtype == Ty::NEVER { + return true; // Never is subtype of everything + } + if supertype == Ty::ANY { + return true; // Everything is subtype of Any + } + if subtype == Ty::ANY { + return false; // Any is only subtype of Any (already checked) + } + + match (&*self.get(subtype), &*self.get(supertype)) { + // Char <: String + // LiteralString <: Char (if single char) + (TyData::LiteralString(s), TyData::Char) => s.chars().count() == 1, + // Char <: String + // LiteralString <: String + // True <: Bool, False <: Bool + // BoundedNumber <: Number + (TyData::Char | TyData::LiteralString(_), TyData::String) + | (TyData::True | TyData::False, TyData::Bool) + | (TyData::BoundedNumber(_), TyData::Number) => true, + // Array covariance: Array <: Array if A <: B + (TyData::Array { elem: sub_elem, .. }, TyData::Array { elem: sup_elem, .. }) => { + self.is_subtype_of(*sub_elem, *sup_elem) + } + // Tuple <: Array if all elements are subtypes + (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) => { + elems.iter().all(|&e| self.is_subtype_of(e, *arr_elem)) + } + // Union subtyping: all variants must be subtypes + (TyData::Union(variants), _) => { + let variants = variants.clone(); + variants.iter().all(|&v| self.is_subtype_of(v, supertype)) + } + // Subtype of union: must be subtype of some variant + (_, TyData::Union(variants)) => { + let variants = variants.clone(); + variants.iter().any(|&v| self.is_subtype_of(subtype, v)) + } + // Default: not a subtype + _ => false, + } + } + + /// Apply a substitution to a type, replacing all type variables. + pub fn apply_substitution(&mut self, ty: Ty, sub: &TySubstitution) -> Ty { + // Clone the data to avoid borrow issues + let data = self.get(ty).clone(); + match data { + TyData::TypeVar { id, .. } => sub.get(id).unwrap_or(ty), + TyData::Array { elem, is_set } => { + let new_elem = self.apply_substitution(elem, sub); + if is_set { + self.array_set(new_elem) + } else { + self.array(new_elem) + } + } + TyData::Tuple { elems } => { + // Collect elements first to avoid closure borrowing issues + let elems_vec: Vec = elems; + let mut new_elems = Vec::with_capacity(elems_vec.len()); + for e in elems_vec { + new_elems.push(self.apply_substitution(e, sub)); + } + self.tuple(new_elems) + } + TyData::Union(variants) => { + let variants_vec: Vec = variants; + let mut new_variants = Vec::with_capacity(variants_vec.len()); + for v in variants_vec { + new_variants.push(self.apply_substitution(v, sub)); + } + self.union(new_variants) + } + TyData::Sum(variants) => { + let variants_vec: Vec = variants; + let mut new_variants = Vec::with_capacity(variants_vec.len()); + for v in variants_vec { + new_variants.push(self.apply_substitution(v, sub)); + } + self.sum(new_variants) + } + TyData::Object(obj) => { + // Extract field info first + let field_info: Vec<_> = obj + .fields + .iter() + .map(|(name, fd)| (name.clone(), fd.ty, fd.required, fd.visibility)) + .collect(); + let has_unknown = obj.has_unknown; + // Now apply substitutions + let mut new_fields = Vec::with_capacity(field_info.len()); + for (name, ty, required, visibility) in field_info { + new_fields.push(( + name, + FieldDefInterned { + ty: self.apply_substitution(ty, sub), + required, + visibility, + }, + )); + } + self.object(ObjectData { + fields: new_fields, + has_unknown, + }) + } + TyData::AttrsOf { value } => { + let new_value = self.apply_substitution(value, sub); + self.attrs_of(new_value) + } + TyData::Function(func) => { + // Extract param info first + let param_info: Vec<_> = func + .params + .iter() + .map(|p| (p.name.clone(), p.ty, p.has_default)) + .collect(); + let (old_return_spec, variadic) = (func.return_spec.clone(), func.variadic); + // Now apply substitutions + let mut new_params = Vec::with_capacity(param_info.len()); + for (name, ty, has_default) in param_info { + new_params.push(ParamInterned { + name, + ty: self.apply_substitution(ty, sub), + has_default, + }); + } + let new_return_spec = match old_return_spec { + ReturnSpec::Fixed(ret) => ReturnSpec::Fixed(self.apply_substitution(ret, sub)), + other => other, + }; + self.intern(TyData::Function(FunctionData { + params: new_params, + return_spec: new_return_spec, + variadic, + })) + } + // Primitives and other types don't contain type variables + _ => ty, + } + } + + /// Check if a type has any type variables. + pub fn has_type_vars(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::TypeVar { .. } => true, + TyData::Array { elem, .. } => self.has_type_vars(elem), + TyData::Tuple { ref elems } => elems.iter().any(|&e| self.has_type_vars(e)), + TyData::Union(ref variants) | TyData::Sum(ref variants) => { + variants.iter().any(|&v| self.has_type_vars(v)) + } + TyData::Object(ref obj) => obj.fields.iter().any(|(_, fd)| self.has_type_vars(fd.ty)), + TyData::AttrsOf { value } => self.has_type_vars(value), + TyData::Function(ref func) => { + func.params.iter().any(|p| self.has_type_vars(p.ty)) + || matches!(&func.return_spec, ReturnSpec::Fixed(ret) if self.has_type_vars(*ret)) + } + _ => false, + } + } + + /// Format a type for display. + pub fn display(&self, ty: Ty) -> String { + match *self.get(ty) { + TyData::Any => "any".to_string(), + TyData::Never => "never".to_string(), + TyData::Null => "null".to_string(), + TyData::Bool => "boolean".to_string(), + TyData::True => "true".to_string(), + TyData::False => "false".to_string(), + TyData::Number => "number".to_string(), + TyData::BoundedNumber(bounds) => match (bounds.min_f64(), bounds.max_f64()) { + (None, None) => "number".to_string(), + (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { + if lo.fract() == 0.0 { + format!("{}", lo as i64) + } else { + format!("{}", lo) + } + } + (Some(lo), Some(hi)) => format!("number[{}..{}]", lo, hi), + (Some(lo), None) => format!("number[{}..]", lo), + (None, Some(hi)) => format!("number[..{}]", hi), + }, + TyData::String => "string".to_string(), + TyData::Char => "char".to_string(), + TyData::LiteralString(ref s) => format!("\"{}\"", s), + TyData::Array { elem, is_set } => { + if is_set { + format!("set<{}>", self.display(elem)) + } else { + format!("array<{}>", self.display(elem)) + } + } + TyData::Tuple { ref elems } => { + let types: Vec<_> = elems.iter().map(|&t| self.display(t)).collect(); + format!("[{}]", types.join(", ")) + } + TyData::Object(ref obj) => { + if obj.fields.is_empty() && !obj.has_unknown { + "{}".to_string() + } else if obj.has_unknown { + "object".to_string() + } else { + let mut fields: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + fields.sort_unstable(); + format!("{{ {} }}", fields.join(", ")) + } + } + TyData::AttrsOf { value } => format!("object<{}>", self.display(value)), + TyData::Function(ref func) => { + let params: Vec<_> = func.params.iter().map(|p| p.name.as_str()).collect(); + format!("function({})", params.join(", ")) + } + TyData::Union(ref types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" | ") + } + TyData::Sum(ref types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" & ") + } + TyData::TypeVar { + id, + ref constraints, + } => { + let mut s = id.to_string(); + if !constraints.is_empty() { + let mut parts: Vec = Vec::new(); + if constraints.must_be_indexable { + parts.push("indexable".to_string()); + } + if constraints.must_support_fields { + parts.push("object-like".to_string()); + } + if constraints.must_be_callable { + parts.push("callable".to_string()); + } + if let Some(bound) = constraints.upper_bound { + parts.push(format!("<: {}", self.display(bound))); + } + if !parts.is_empty() { + s.push_str(" where "); + s.push_str(&parts.join(", ")); + } + } + s + } + } + } + + /// Import a type from another store into this store. + /// + /// Well-known constants (ANY, NEVER, NULL, BOOL, TRUE, FALSE, NUMBER, STRING, CHAR) + /// are returned as-is since they have the same value across all stores. + /// Complex types are recursively imported and re-interned. + pub fn import_from(&mut self, ty: Ty, source: &S) -> Ty { + // Well-known constants are the same in all stores + if ty.is_well_known() { + return ty; + } + + // Complex types need re-interning + match source.get_data(ty) { + TyData::Array { elem, is_set } => { + let imported_elem = self.import_from(elem, source); + if is_set { + self.array_set(imported_elem) + } else { + self.array(imported_elem) + } + } + TyData::Tuple { elems } => { + let imported_elems: Vec<_> = + elems.iter().map(|&e| self.import_from(e, source)).collect(); + self.tuple(imported_elems) + } + TyData::Union(variants) => { + let imported_variants: Vec<_> = variants + .iter() + .map(|&v| self.import_from(v, source)) + .collect(); + self.union(imported_variants) + } + TyData::Object(obj) => { + let imported_fields: Vec<_> = obj + .fields + .iter() + .map(|(name, def)| { + ( + name.clone(), + FieldDefInterned { + ty: self.import_from(def.ty, source), + required: def.required, + visibility: def.visibility, + }, + ) + }) + .collect(); + self.object(ObjectData { + fields: imported_fields, + has_unknown: obj.has_unknown, + }) + } + TyData::Function(func) => { + let imported_params: Vec<_> = func + .params + .iter() + .map(|p| ParamInterned { + name: p.name.clone(), + ty: self.import_from(p.ty, source), + has_default: p.has_default, + }) + .collect(); + let imported_return_spec = match &func.return_spec { + ReturnSpec::Fixed(ret_ty) => { + ReturnSpec::Fixed(self.import_from(*ret_ty, source)) + } + other => other.clone(), + }; + self.intern(TyData::Function(FunctionData { + params: imported_params, + return_spec: imported_return_spec, + variadic: func.variadic, + })) + } + TyData::AttrsOf { value } => { + let imported_value = self.import_from(value, source); + self.intern(TyData::AttrsOf { + value: imported_value, + }) + } + TyData::BoundedNumber(bounds) => self.intern(TyData::BoundedNumber(bounds)), + TyData::LiteralString(s) => self.intern(TyData::LiteralString(s)), + TyData::TypeVar { id, constraints } => self.intern(TyData::TypeVar { id, constraints }), + TyData::Sum(variants) => { + let imported_variants: Vec<_> = variants + .iter() + .map(|&v| self.import_from(v, source)) + .collect(); + self.intern(TyData::Sum(imported_variants)) + } + // Primitives should have been caught by is_well_known(), but handle anyway + TyData::Any => Ty::ANY, + TyData::Never => Ty::NEVER, + TyData::Null => Ty::NULL, + TyData::Bool => Ty::BOOL, + TyData::True => Ty::TRUE, + TyData::False => Ty::FALSE, + TyData::Number => Ty::NUMBER, + TyData::String => Ty::STRING, + TyData::Char => Ty::CHAR, + } + } +} + +impl TypeStoreOps for TyStore { + fn get_data(&self, ty: Ty) -> TyData { + self.get(ty).clone() + } + + fn display(&self, ty: Ty) -> String { + TyStore::display(self, ty) + } + + fn array(&mut self, elem: Ty) -> Ty { + TyStore::array(self, elem) + } + + fn array_set(&mut self, elem: Ty) -> Ty { + TyStore::array_set(self, elem) + } + + fn tuple(&mut self, elems: Vec) -> Ty { + TyStore::tuple(self, elems) + } + + fn object(&mut self, data: ObjectData) -> Ty { + TyStore::object(self, data) + } + + fn attrs_of(&mut self, value: Ty) -> Ty { + TyStore::attrs_of(self, value) + } + + fn function(&mut self, data: FunctionData) -> Ty { + TyStore::function(self, data) + } + + fn union(&mut self, types: Vec) -> Ty { + TyStore::union(self, types) + } + + fn sum(&mut self, types: Vec) -> Ty { + TyStore::sum(self, types) + } + + fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + TyStore::bounded_number(self, bounds) + } + + fn literal_string(&mut self, s: String) -> Ty { + TyStore::literal_string(self, s) + } +} + +impl Default for TyStore { + fn default() -> Self { + Self::new() + } +} + +// Thread-local store for convenient access during analysis +thread_local! { + static STORE: RefCell = RefCell::new(TyStore::new()); +} + +/// Execute a function with access to the thread-local type store. +pub fn with_store(f: impl FnOnce(&mut TyStore) -> R) -> R { + STORE.with(|s| f(&mut s.borrow_mut())) +} + +/// Reset the thread-local store (useful for tests). +pub fn reset_store() { + STORE.with(|s| *s.borrow_mut() = TyStore::new()); +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + + #[test] + fn test_well_known_types() { + let store = TyStore::new(); + + // Check that well-known types have correct data + assert!(matches!(*store.get(Ty::ANY), TyData::Any)); + assert!(matches!(*store.get(Ty::NEVER), TyData::Never)); + assert!(matches!(*store.get(Ty::NULL), TyData::Null)); + assert!(matches!(*store.get(Ty::BOOL), TyData::Bool)); + assert!(matches!(*store.get(Ty::TRUE), TyData::True)); + assert!(matches!(*store.get(Ty::FALSE), TyData::False)); + assert!(matches!(*store.get(Ty::NUMBER), TyData::Number)); + assert!(matches!(*store.get(Ty::STRING), TyData::String)); + assert!(matches!(*store.get(Ty::CHAR), TyData::Char)); + } + + #[test] + fn test_intern_deduplication() { + let mut store = TyStore::new(); + + // Same type data should return same ID + let arr1 = store.array(Ty::NUMBER); + let arr2 = store.array(Ty::NUMBER); + assert_eq!(arr1, arr2); + + // Different element type should be different + let arr3 = store.array(Ty::STRING); + assert_ne!(arr1, arr3); + } + + #[test] + fn test_array_type() { + let mut store = TyStore::new(); + + let arr = store.array(Ty::NUMBER); + assert!( + matches!(*store.get(arr), TyData::Array { elem, is_set: false } if elem == Ty::NUMBER) + ); + } + + #[test] + fn test_array_set_type() { + let mut store = TyStore::new(); + + let arr_set = store.array_set(Ty::NUMBER); + assert!( + matches!(*store.get(arr_set), TyData::Array { elem, is_set: true } if elem == Ty::NUMBER) + ); + + // Sets and arrays are distinct types + let arr = store.array(Ty::NUMBER); + assert_ne!(arr, arr_set); + } + + #[test] + fn test_tuple_type() { + let mut store = TyStore::new(); + + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + assert_matches!(*store.get(tuple), TyData::Tuple { ref elems } => { + assert_eq!(elems, &vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + }); + } + + #[test] + fn test_union_simplification() { + let mut store = TyStore::new(); + + // Empty union is Never + let empty = store.union(vec![]); + assert_eq!(empty, Ty::NEVER); + + // Single element union is just the element + let single = store.union(vec![Ty::NUMBER]); + assert_eq!(single, Ty::NUMBER); + + // Union with Any is Any + let with_any = store.union(vec![Ty::NUMBER, Ty::ANY, Ty::STRING]); + assert_eq!(with_any, Ty::ANY); + + // Union without Never removes it + let with_never = store.union(vec![Ty::NUMBER, Ty::NEVER, Ty::STRING]); + if let TyData::Union(ref types) = *store.get(with_never) { + assert!(!types.contains(&Ty::NEVER)); + } + } + + #[test] + fn test_union_flattening() { + let mut store = TyStore::new(); + + // Create nested union + let inner = store.union(vec![Ty::NUMBER, Ty::STRING]); + let outer = store.union(vec![inner, Ty::BOOL]); + + // Should be flattened - union types are sorted for determinism + assert_matches!(*store.get(outer), TyData::Union(ref types) => { + let mut expected = vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]; + expected.sort(); + let mut actual = types.clone(); + actual.sort(); + assert_eq!(actual, expected); + }); + } + + #[test] + fn test_sum_simplification() { + let mut store = TyStore::new(); + + // Empty sum is Any + let empty = store.sum(vec![]); + assert_eq!(empty, Ty::ANY); + + // Sum with Never is Never + let with_never = store.sum(vec![Ty::NUMBER, Ty::NEVER, Ty::STRING]); + assert_eq!(with_never, Ty::NEVER); + + // Sum with Any removes it (Any is identity) + let with_any = store.sum(vec![Ty::NUMBER, Ty::ANY]); + assert_eq!(with_any, Ty::NUMBER); + } + + #[test] + fn test_is_indexable() { + let mut store = TyStore::new(); + + assert!(store.is_indexable(Ty::ANY)); + assert!(store.is_indexable(Ty::STRING)); + assert!(store.is_indexable(Ty::CHAR)); + + let arr = store.array(Ty::NUMBER); + assert!(store.is_indexable(arr)); + + let obj = store.object_any(); + assert!(store.is_indexable(obj)); + + assert!(!store.is_indexable(Ty::NUMBER)); + assert!(!store.is_indexable(Ty::BOOL)); + } + + #[test] + fn test_supports_field_access() { + let mut store = TyStore::new(); + + assert!(store.supports_field_access(Ty::ANY)); + + let obj = store.object_any(); + assert!(store.supports_field_access(obj)); + + let attrs = store.attrs_of(Ty::NUMBER); + assert!(store.supports_field_access(attrs)); + + assert!(!store.supports_field_access(Ty::NUMBER)); + assert!(!store.supports_field_access(Ty::STRING)); + } + + #[test] + fn test_is_callable() { + let mut store = TyStore::new(); + + assert!(store.is_callable(Ty::ANY)); + + let func = store.function_simple(vec!["x"], Ty::NUMBER); + assert!(store.is_callable(func)); + + assert!(!store.is_callable(Ty::NUMBER)); + assert!(!store.is_callable(Ty::STRING)); + } + + #[test] + fn test_display() { + let mut store = TyStore::new(); + + assert_eq!(store.display(Ty::ANY), "any"); + assert_eq!(store.display(Ty::NEVER), "never"); + assert_eq!(store.display(Ty::NULL), "null"); + assert_eq!(store.display(Ty::BOOL), "boolean"); + assert_eq!(store.display(Ty::NUMBER), "number"); + assert_eq!(store.display(Ty::STRING), "string"); + + let arr = store.array(Ty::NUMBER); + assert_eq!(store.display(arr), "array"); + + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(store.display(union), "number | string"); + } + + #[test] + fn test_ty_copy() { + // Ty should be Copy + let ty = Ty::NUMBER; + let ty2 = ty; // Copy, not move + assert_eq!(ty, ty2); + } + + #[test] + fn test_ty_size() { + // Ty should be 4 bytes + assert_eq!(std::mem::size_of::(), 4); + } + + #[test] + fn test_type_var() { + let mut store = TyStore::new(); + + let var = store.fresh_var(); + assert!(matches!(*store.get(var), TyData::TypeVar { .. })); + } + + #[test] + fn test_bounded_number() { + let mut store = TyStore::new(); + + let bounded = store.bounded_number(NumBounds::non_negative()); + assert_eq!(store.display(bounded), "number[0..]"); + } + + #[test] + fn test_literal_string() { + let mut store = TyStore::new(); + + let lit = store.literal_string("hello".to_string()); + assert_eq!(store.display(lit), "\"hello\""); + } + + #[test] + fn test_object_with_fields() { + let mut store = TyStore::new(); + + let obj = store.object(ObjectData { + fields: vec![ + ( + "name".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + ), + ( + "age".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + ), + ], + has_unknown: false, + }); + + assert_matches!(*store.get(obj), TyData::Object(ref data) => { + assert_eq!( + data.fields, + vec![ + ( + "name".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + ), + ( + "age".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + ), + ] + ); + }); + } + + #[test] + fn test_function_type() { + let mut store = TyStore::new(); + + let func = store.function(FunctionData { + params: vec![ + ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: false, + }, + ParamInterned { + name: "y".to_string(), + ty: Ty::STRING, + has_default: true, + }, + ], + return_spec: ReturnSpec::Fixed(Ty::BOOL), + variadic: false, + }); + + assert_matches!(*store.get(func), TyData::Function(ref data) => { + assert_eq!(data.required_count(), 1); + assert_eq!(data.total_count(), 2); + }); + } + + use rstest::rstest; + + #[rstest] + #[case::any_to_number(Ty::ANY, Ty::NUMBER, Ty::NUMBER)] + #[case::number_to_number(Ty::NUMBER, Ty::NUMBER, Ty::NUMBER)] + #[case::number_to_string_never(Ty::NUMBER, Ty::STRING, Ty::NEVER)] + #[case::never_stays_never(Ty::NEVER, Ty::NUMBER, Ty::NEVER)] + #[case::constraint_never_is_never(Ty::NUMBER, Ty::NEVER, Ty::NEVER)] + #[case::bool_to_true(Ty::BOOL, Ty::TRUE, Ty::TRUE)] + #[case::bool_to_false(Ty::BOOL, Ty::FALSE, Ty::FALSE)] + fn test_narrow(#[case] ty: Ty, #[case] constraint: Ty, #[case] expected: Ty) { + let mut store = TyStore::new(); + assert_eq!(store.narrow(ty, constraint), expected); + } + + #[test] + fn test_narrow_union() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(store.narrow(union, Ty::NUMBER), Ty::NUMBER); + } + + #[test] + fn test_narrow_number_with_bounded() { + let mut store = TyStore::new(); + let bounded = store.bounded_number(NumBounds::non_negative()); + let result = store.narrow(Ty::NUMBER, bounded); + assert_eq!(result, bounded); + } + + #[test] + fn test_narrow_bounded_with_number() { + let mut store = TyStore::new(); + let bounded = store.bounded_number(NumBounds::non_negative()); + let result = store.narrow(bounded, Ty::NUMBER); + assert_eq!(result, bounded); + } + + #[test] + fn test_narrow_bounded_intersect() { + let mut store = TyStore::new(); + // [0, inf) intersected with [-inf, 10] = [0, 10] + let non_neg = store.bounded_number(NumBounds::non_negative()); + let at_most_10 = store.bounded_number(NumBounds { + min: None, + max: Some(10.0_f64.to_bits()), + }); + let result = store.narrow(non_neg, at_most_10); + let expected = store.bounded_number(NumBounds::between(0.0, 10.0)); + assert_eq!(result, expected); + } + + #[test] + fn test_narrow_bounded_empty_intersection() { + let mut store = TyStore::new(); + // [10, inf) intersected with [-inf, 5] = empty + let at_least_10 = store.bounded_number(NumBounds::at_least(10.0)); + let at_most_5 = store.bounded_number(NumBounds { + min: None, + max: Some(5.0_f64.to_bits()), + }); + let result = store.narrow(at_least_10, at_most_5); + assert_eq!(result, Ty::NEVER); + } + + #[test] + fn test_num_bounds_intersect_both_bounded() { + let b1 = NumBounds::between(0.0, 100.0); + let b2 = NumBounds::between(50.0, 200.0); + let result = b1.intersect(&b2).expect("should intersect"); + assert_eq!(result.min_f64(), Some(50.0)); + assert_eq!(result.max_f64(), Some(100.0)); + } + + #[test] + fn test_num_bounds_intersect_empty() { + let b1 = NumBounds::between(0.0, 10.0); + let b2 = NumBounds::between(20.0, 30.0); + assert!(b1.intersect(&b2).is_none()); + } + + #[test] + fn test_num_bounds_intersect_one_unbounded() { + let bounded = NumBounds::between(5.0, 15.0); + let unbounded = NumBounds::unbounded(); + let result = bounded.intersect(&unbounded).expect("should intersect"); + assert_eq!(result.min_f64(), Some(5.0)); + assert_eq!(result.max_f64(), Some(15.0)); + } + + #[rstest] + #[case::same_type_is_never(Ty::NUMBER, Ty::NUMBER, Ty::NEVER)] + #[case::different_type_unchanged(Ty::NUMBER, Ty::STRING, Ty::NUMBER)] + #[case::any_stays_any(Ty::ANY, Ty::NUMBER, Ty::ANY)] + #[case::never_stays_never(Ty::NEVER, Ty::NUMBER, Ty::NEVER)] + #[case::remove_never_unchanged(Ty::NUMBER, Ty::NEVER, Ty::NUMBER)] + fn test_widen(#[case] base: Ty, #[case] remove: Ty, #[case] expected: Ty) { + let mut store = TyStore::new(); + assert_eq!(store.widen(base, remove), expected); + } + + #[test] + fn test_widen_union_removes_type() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(store.widen(union, Ty::NUMBER), Ty::STRING); + } + + #[test] + fn test_with_len_array_to_tuple() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + let result = store.with_len(arr, 3); + let expected = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + assert_eq!(result, expected); + } + + #[rstest] + #[case::string_len_1_to_char(Ty::STRING, 1, Ty::CHAR)] + #[case::char_len_1_stays(Ty::CHAR, 1, Ty::CHAR)] + #[case::char_len_0_never(Ty::CHAR, 0, Ty::NEVER)] + #[case::char_len_2_never(Ty::CHAR, 2, Ty::NEVER)] + fn test_with_len(#[case] ty: Ty, #[case] len: usize, #[case] expected: Ty) { + let mut store = TyStore::new(); + assert_eq!(store.with_len(ty, len), expected); + } + + mod test_is_subtype_of { + use super::*; + + #[rstest] + #[case::same_type(Ty::NUMBER, Ty::NUMBER, true)] + #[case::never_to_any(Ty::NEVER, Ty::ANY, true)] + #[case::never_to_number(Ty::NEVER, Ty::NUMBER, true)] + #[case::any_to_any(Ty::ANY, Ty::ANY, true)] + #[case::number_to_any(Ty::NUMBER, Ty::ANY, true)] + #[case::any_to_number(Ty::ANY, Ty::NUMBER, false)] + #[case::char_to_string(Ty::CHAR, Ty::STRING, true)] + #[case::string_to_char(Ty::STRING, Ty::CHAR, false)] + #[case::true_to_bool(Ty::TRUE, Ty::BOOL, true)] + #[case::false_to_bool(Ty::FALSE, Ty::BOOL, true)] + #[case::bool_to_true(Ty::BOOL, Ty::TRUE, false)] + #[case::number_to_string(Ty::NUMBER, Ty::STRING, false)] + fn test_well_known(#[case] sub: Ty, #[case] sup: Ty, #[case] expected: bool) { + let store = TyStore::new(); + assert_eq!(store.is_subtype_of(sub, sup), expected); + } + + #[test] + fn test_array_covariance() { + let mut store = TyStore::new(); + let arr_char = store.array(Ty::CHAR); + let arr_string = store.array(Ty::STRING); + // Array <: Array because Char <: String + assert!(store.is_subtype_of(arr_char, arr_string)); + // Array NOT <: Array + assert!(!store.is_subtype_of(arr_string, arr_char)); + } + + #[test] + fn test_tuple_to_array() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::CHAR, Ty::CHAR]); + let arr_string = store.array(Ty::STRING); + // [Char, Char] <: Array because Char <: String + assert!(store.is_subtype_of(tuple, arr_string)); + } + + #[test] + fn test_union_subtype() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::TRUE, Ty::FALSE]); + // (True | False) <: Bool + assert!(store.is_subtype_of(union, Ty::BOOL)); + } + + #[test] + fn test_subtype_of_union() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // Number <: (Number | String) + assert!(store.is_subtype_of(Ty::NUMBER, union)); + } + } + + mod test_constraints_satisfied_by { + use super::*; + + #[test] + fn test_no_constraints() { + let store = TyStore::new(); + let constraints = TyConstraints::none(); + assert!(constraints.satisfied_by(Ty::NUMBER, &store)); + assert!(constraints.satisfied_by(Ty::STRING, &store)); + assert!(constraints.satisfied_by(Ty::ANY, &store)); + } + + #[test] + fn test_must_be_indexable() { + let mut store = TyStore::new(); + let constraints = TyConstraints { + must_be_indexable: true, + ..Default::default() + }; + // Arrays are indexable + let arr = store.array(Ty::NUMBER); + assert!(constraints.satisfied_by(arr, &store)); + // Strings are indexable + assert!(constraints.satisfied_by(Ty::STRING, &store)); + // Numbers are not indexable + assert!(!constraints.satisfied_by(Ty::NUMBER, &store)); + } + + #[test] + fn test_upper_bound() { + let store = TyStore::new(); + let constraints = TyConstraints { + upper_bound: Some(Ty::STRING), + ..Default::default() + }; + // Char <: String + assert!(constraints.satisfied_by(Ty::CHAR, &store)); + // String <: String + assert!(constraints.satisfied_by(Ty::STRING, &store)); + // Number NOT <: String + assert!(!constraints.satisfied_by(Ty::NUMBER, &store)); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/subst.rs b/crates/jrsonnet-lsp-types/src/subst.rs new file mode 100644 index 00000000..13c5218a --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/subst.rs @@ -0,0 +1,506 @@ +//! Type substitution for merging local types into the global store. +//! +//! After file analysis, local types (created in `LocalTyStore`) need to be +//! merged into the global store. This module provides the substitution +//! mechanism that: +//! +//! 1. Topologically sorts local types by dependencies +//! 2. Re-interns each type into the global store +//! 3. Builds a mapping from local Ty → global Ty +//! 4. Allows applying this mapping to update references + +use rustc_hash::FxHashMap; + +use crate::{ + global_store::GlobalTyStore, + local_store::LocalTyStore, + store::{ + FieldDefInterned, FunctionData, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, + TyData, + }, +}; + +/// Substitution mapping from local types to global types. +/// +/// After analysis, this is used to rewrite local `Ty` references +/// to their corresponding global `Ty` values. +#[derive(Debug, Clone, Default)] +pub struct TySubst { + /// Mapping from local Ty → global Ty. + mapping: FxHashMap, +} + +impl TySubst { + /// Create an empty substitution. + pub fn new() -> Self { + Self::default() + } + + /// Merge local types into the global store, building the substitution map. + /// + /// This is the main entry point. It: + /// 1. Topologically sorts local types by dependency order + /// 2. Interns each into the global store (applying current substitutions) + /// 3. Records the local→global mapping + /// + /// # Arguments + /// - `global`: The global store to merge into (mutably borrowed) + /// - `local`: The local store to merge from (consumed) + /// + /// # Returns + /// A substitution that can be used to rewrite local Ty references. + pub fn merge(global: &GlobalTyStore, local: LocalTyStore) -> Self { + let mut subst = Self::new(); + + if local.is_empty() { + return subst; + } + + // Get topological ordering of local types + let order = Self::topological_sort(&local); + + // Process each local type in dependency order + for local_ty in order { + let local_data = local.get_data(local_ty); + + // Apply current substitution to the type data + let substituted_data = subst.apply_to_data(local_data); + + // Intern into global store (handles deduplication) + let global_ty = global.intern(substituted_data); + + // Record the mapping + subst.mapping.insert(local_ty, global_ty); + } + + subst + } + + /// Apply the substitution to a Ty. + /// + /// - Global types are returned unchanged + /// - Local types are looked up in the mapping + /// - Unknown local types return the original (shouldn't happen after merge) + pub fn apply(&self, ty: Ty) -> Ty { + if ty.is_global() { + return ty; + } + self.mapping.get(&ty).copied().unwrap_or(ty) + } + + /// Check if this substitution contains a mapping for a type. + pub fn contains(&self, ty: Ty) -> bool { + self.mapping.contains_key(&ty) + } + + /// Get the number of mappings. + pub fn len(&self) -> usize { + self.mapping.len() + } + + /// Check if empty. + pub fn is_empty(&self) -> bool { + self.mapping.is_empty() + } + + /// Apply the substitution to TyData, rewriting all Ty references. + fn apply_to_data(&self, data: &TyData) -> TyData { + match data { + // Primitives have no Ty references + TyData::Any => TyData::Any, + TyData::Never => TyData::Never, + TyData::Null => TyData::Null, + TyData::Bool => TyData::Bool, + TyData::True => TyData::True, + TyData::False => TyData::False, + TyData::Number => TyData::Number, + TyData::String => TyData::String, + TyData::Char => TyData::Char, + TyData::BoundedNumber(bounds) => TyData::BoundedNumber(*bounds), + TyData::LiteralString(s) => TyData::LiteralString(s.clone()), + + // Compound types - recursively apply substitution + TyData::Array { elem, is_set } => TyData::Array { + elem: self.apply(*elem), + is_set: *is_set, + }, + + TyData::Tuple { elems } => TyData::Tuple { + elems: elems.iter().map(|&e| self.apply(e)).collect(), + }, + + TyData::Union(variants) => { + TyData::Union(variants.iter().map(|&v| self.apply(v)).collect()) + } + + TyData::Sum(variants) => TyData::Sum(variants.iter().map(|&v| self.apply(v)).collect()), + + TyData::Object(obj) => TyData::Object(ObjectData { + fields: obj + .fields + .iter() + .map(|(name, def)| { + ( + name.clone(), + FieldDefInterned { + ty: self.apply(def.ty), + required: def.required, + visibility: def.visibility, + }, + ) + }) + .collect(), + has_unknown: obj.has_unknown, + }), + + TyData::AttrsOf { value } => TyData::AttrsOf { + value: self.apply(*value), + }, + + TyData::Function(func) => TyData::Function(FunctionData { + params: func + .params + .iter() + .map(|p| ParamInterned { + name: p.name.clone(), + ty: self.apply(p.ty), + has_default: p.has_default, + }) + .collect(), + return_spec: match &func.return_spec { + ReturnSpec::Fixed(ret) => ReturnSpec::Fixed(self.apply(*ret)), + other => other.clone(), + }, + variadic: func.variadic, + }), + + TyData::TypeVar { id, constraints } => TyData::TypeVar { + id: *id, + constraints: TyConstraints { + must_be_indexable: constraints.must_be_indexable, + must_support_fields: constraints.must_support_fields, + must_be_callable: constraints.must_be_callable, + upper_bound: constraints.upper_bound.map(|b| self.apply(b)), + }, + }, + } + } + + /// Topologically sort local types by dependency order. + /// + /// Types that don't depend on other local types come first. + /// This ensures that when we process a type, all its dependencies + /// have already been mapped to global types. + fn topological_sort(local: &LocalTyStore) -> Vec { + let types: Vec<_> = local.iter().collect(); + let n = types.len(); + + if n == 0 { + return vec![]; + } + + // Build adjacency list: edges[i] = types that type i depends on + let mut in_degree: FxHashMap = FxHashMap::default(); + let mut dependents: FxHashMap> = FxHashMap::default(); + + for &(ty, _) in &types { + in_degree.insert(ty, 0); + dependents.insert(ty, Vec::new()); + } + + // Count dependencies (only local ones matter) + for &(ty, data) in &types { + let deps = Self::get_local_dependencies(data); + *in_degree.get_mut(&ty).unwrap() = deps.len(); + for dep in deps { + if let Some(dep_list) = dependents.get_mut(&dep) { + dep_list.push(ty); + } + } + } + + // Kahn's algorithm for topological sort + let mut result = Vec::with_capacity(n); + let mut queue: Vec = in_degree + .iter() + .filter(|(_, °)| deg == 0) + .map(|(&ty, _)| ty) + .collect(); + + while let Some(ty) = queue.pop() { + result.push(ty); + + if let Some(deps) = dependents.get(&ty) { + for &dependent in deps { + if let Some(deg) = in_degree.get_mut(&dependent) { + *deg -= 1; + if *deg == 0 { + queue.push(dependent); + } + } + } + } + } + + // If we didn't process all types, there's a cycle. + // This shouldn't happen with well-formed types, but handle gracefully. + if result.len() < n { + // Add remaining types in arbitrary order + for &(ty, _) in &types { + if !result.contains(&ty) { + result.push(ty); + } + } + } + + result + } + + /// Get local Ty references in a TyData. + fn get_local_dependencies(data: &TyData) -> Vec { + let mut deps = Vec::new(); + Self::collect_local_refs(data, &mut deps); + deps + } + + /// Recursively collect local Ty references. + fn collect_local_refs(data: &TyData, deps: &mut Vec) { + match data { + TyData::Array { elem, .. } => { + if elem.is_local() { + deps.push(*elem); + } + } + TyData::Tuple { elems } => { + for &e in elems { + if e.is_local() { + deps.push(e); + } + } + } + TyData::Union(variants) | TyData::Sum(variants) => { + for &v in variants { + if v.is_local() { + deps.push(v); + } + } + } + TyData::Object(obj) => { + for (_, def) in &obj.fields { + if def.ty.is_local() { + deps.push(def.ty); + } + } + } + TyData::AttrsOf { value } => { + if value.is_local() { + deps.push(*value); + } + } + TyData::Function(func) => { + for p in &func.params { + if p.ty.is_local() { + deps.push(p.ty); + } + } + if let ReturnSpec::Fixed(ret) = &func.return_spec { + if ret.is_local() { + deps.push(*ret); + } + } + } + TyData::TypeVar { constraints, .. } => { + if let Some(bound) = constraints.upper_bound { + if bound.is_local() { + deps.push(bound); + } + } + } + // Primitives have no references + _ => {} + } + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + + #[test] + fn test_subst_empty_local() { + let global = GlobalTyStore::new(); + let local = LocalTyStore::new(); + + let subst = TySubst::merge(&global, local); + assert!(subst.is_empty()); + } + + #[test] + fn test_subst_simple_type() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create a local array type + let arr = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + assert!(arr.is_local()); + + let subst = TySubst::merge(&global, local); + assert_eq!(subst.len(), 1); + + // The mapped type should be global + let global_arr = subst.apply(arr); + assert!(global_arr.is_global()); + + // Verify the data is correct + let data = global.get_data(global_arr); + assert_matches!(data, TyData::Array { elem, .. } if elem == Ty::NUMBER); + } + + #[test] + fn test_subst_nested_types() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create nested local types: Array> + let inner = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let outer = local.intern(TyData::Array { + elem: inner, + is_set: false, + }); + + assert!(inner.is_local()); + assert!(outer.is_local()); + + let subst = TySubst::merge(&global, local); + assert_eq!(subst.len(), 2); + + // Both should be mapped to global + let global_inner = subst.apply(inner); + let global_outer = subst.apply(outer); + assert!(global_inner.is_global()); + assert!(global_outer.is_global()); + + // Verify the outer type references the global inner + let outer_data = global.get_data(global_outer); + assert_matches!(outer_data, TyData::Array { elem, .. } if elem == global_inner); + } + + #[test] + fn test_subst_global_types_unchanged() { + let subst = TySubst::new(); + + // Global types should pass through unchanged + assert_eq!(subst.apply(Ty::NUMBER), Ty::NUMBER); + assert_eq!(subst.apply(Ty::STRING), Ty::STRING); + assert_eq!(subst.apply(Ty::ANY), Ty::ANY); + } + + #[test] + fn test_subst_deduplication() { + let global = GlobalTyStore::new(); + + // Pre-intern a type in global + let global_arr = global.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + // Create the same type locally + let mut local = LocalTyStore::new(); + let local_arr = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + let subst = TySubst::merge(&global, local); + + // Should map to the existing global type + assert_eq!(subst.apply(local_arr), global_arr); + } + + #[test] + fn test_subst_union_type() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create a local union type + let union = local.intern(TyData::Union(vec![Ty::NUMBER, Ty::STRING])); + + let subst = TySubst::merge(&global, local); + + let global_union = subst.apply(union); + assert!(global_union.is_global()); + + let data = global.get_data(global_union); + assert_matches!(data, TyData::Union(variants) if variants == vec![Ty::NUMBER, Ty::STRING]); + } + + #[test] + fn test_subst_object_type() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create a local object type + let obj = local.intern(TyData::Object(ObjectData { + fields: vec![( + "name".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: super::super::store::FieldVis::Normal, + }, + )], + has_unknown: false, + })); + + let subst = TySubst::merge(&global, local); + + let global_obj = subst.apply(obj); + assert!(global_obj.is_global()); + } + + #[test] + fn test_topological_sort_independent() { + let mut local = LocalTyStore::new(); + + // Create independent types (no dependencies between them) + let _arr1 = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let _arr2 = local.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + let order = TySubst::topological_sort(&local); + assert_eq!(order.len(), 2); + } + + #[test] + fn test_topological_sort_dependent() { + let mut local = LocalTyStore::new(); + + // Create dependent types + let inner = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let outer = local.intern(TyData::Array { + elem: inner, + is_set: false, + }); + + let order = TySubst::topological_sort(&local); + // inner should come before outer (inner has no deps, outer depends on inner) + assert_eq!(order, vec![inner, outer]); + } +} diff --git a/crates/jrsonnet-lsp-types/src/unification.rs b/crates/jrsonnet-lsp-types/src/unification.rs new file mode 100644 index 00000000..241f8503 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/unification.rs @@ -0,0 +1,874 @@ +//! Type unification with structural subtyping and variance. +//! +//! This module implements type unification for Jsonnet's type system. +//! Unification checks whether a "got" type can be used where an "expected" +//! type is required, with proper handling of variance for function types. +//! +//! # Variance +//! +//! - **Covariant**: Most positions. `A <: B` means `A` can substitute for `B`. +//! More specific types can be used where more general types are expected. +//! Example: `Array` can be used where `Array` is expected. +//! +//! - **Contravariant**: Function parameters only. The relationship flips. +//! A function accepting `Any` can substitute for one expecting `Number`, +//! because it can handle all the inputs the original could. +//! +//! # Examples +//! +//! ```ignore +//! // Covariant: Number <: Any, so Array <: Array +//! unify(Array, Array, Covariant) // Ok +//! +//! // Contravariant for params: fn(Any) can substitute for fn(Number) +//! unify(fn(Any) -> String, fn(Number) -> String, Covariant) // Ok +//! +//! // But fn(Number) cannot substitute for fn(Any) +//! unify(fn(Number) -> String, fn(Any) -> String, Covariant) // Err +//! ``` + +use std::fmt::Write as _; + +use crate::store::{Ty, TyData, TypeStoreOps}; + +/// Variance determines how subtyping propagates through type constructors. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Variance { + /// Normal subtyping direction. More specific types can substitute for general ones. + Covariant, + /// Reversed subtyping direction. Used for function parameters. + Contravariant, +} + +impl Variance { + /// Flip variance (used when descending into contravariant positions). + #[must_use] + pub fn flip(self) -> Self { + match self { + Variance::Covariant => Variance::Contravariant, + Variance::Contravariant => Variance::Covariant, + } + } +} + +/// Path element describing where in the type structure an error occurred. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PathElement { + /// In a function parameter at the given index (0-based). + Parameter(usize), + /// In the return type of a function. + ReturnType, + /// In an object field with the given name. + Field(String), + /// In an array element type. + ArrayElement, + /// In a tuple element at the given index. + TupleElement(usize), + /// In a union variant. + UnionVariant, +} + +impl std::fmt::Display for PathElement { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + PathElement::Parameter(i) => write!(f, "parameter {}", i + 1), + PathElement::ReturnType => write!(f, "return type"), + PathElement::Field(name) => write!(f, "field '{}'", name), + PathElement::ArrayElement => write!(f, "array element"), + PathElement::TupleElement(i) => write!(f, "element {}", i), + PathElement::UnionVariant => write!(f, "union variant"), + } + } +} + +/// Reason why unification failed. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum UnifyReason { + /// Incompatible base types (e.g., Number vs String). + TypeMismatch, + /// Object is missing a required field. + MissingField(String), + /// Closed object has an unexpected field. + ExtraField(String), + /// Function parameter count doesn't match. + ParamCountMismatch { got: usize, expected: usize }, + /// Nested unification error (for recursive structures). + Nested(Box), + /// Failed to unify with any variant of a union type. + /// Contains the errors from attempting each variant. + UnionMismatch(Vec), +} + +/// Detailed error explaining why unification failed. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct UnifyError { + /// The type that was provided (interned). + pub got: Ty, + /// The type that was expected (interned). + pub expected: Ty, + /// Why unification failed. + pub reason: UnifyReason, + /// Path to where the error occurred (innermost first). + pub path: Vec, +} + +impl UnifyError { + /// Create a new unification error. + pub fn new(got: Ty, expected: Ty, reason: UnifyReason) -> Self { + Self { + got, + expected, + reason, + path: Vec::new(), + } + } + + /// Add a path element to track where in the type structure the error occurred. + #[must_use] + pub fn with_path(mut self, element: PathElement) -> Self { + self.path.push(element); + self + } + + /// Format the error for display. + pub fn format(&self, store: &S) -> String { + let mut msg = format!( + "type mismatch: expected `{}`, got `{}`", + store.display(self.expected), + store.display(self.got) + ); + + if !self.path.is_empty() { + msg.push_str(" in "); + for (i, element) in self.path.iter().rev().enumerate() { + if i > 0 { + msg.push_str(" -> "); + } + msg.push_str(&element.to_string()); + } + } + + match &self.reason { + UnifyReason::TypeMismatch => {} + UnifyReason::MissingField(name) => { + let _ = write!(msg, " (missing required field '{}')", name); + } + UnifyReason::ExtraField(name) => { + let _ = write!(msg, " (unexpected field '{}')", name); + } + UnifyReason::ParamCountMismatch { got, expected } => { + let _ = write!(msg, " (expected {} parameters, got {})", expected, got); + } + UnifyReason::Nested(inner) => { + let _ = write!(msg, " caused by: {}", inner.format(store)); + } + UnifyReason::UnionMismatch(variant_errors) => { + msg.push_str(" (no matching union variant)"); + for err in variant_errors { + // Show the variant type that was attempted + let reason = match &err.reason { + UnifyReason::TypeMismatch => "type mismatch".to_string(), + UnifyReason::MissingField(name) => format!("missing field '{}'", name), + UnifyReason::ExtraField(name) => format!("unexpected field '{}'", name), + UnifyReason::ParamCountMismatch { got, expected } => { + format!("expected {} params, got {}", expected, got) + } + UnifyReason::Nested(inner) => inner.format(store), + UnifyReason::UnionMismatch(_) => "nested union mismatch".to_string(), + }; + let _ = write!(msg, "\n tried `{}`: {reason}", store.display(err.expected)); + } + } + } + + msg + } +} + +/// Result of unification. +pub type UnifyResult = Result<(), UnifyError>; + +/// Check if `got` type can be used where `expected` type is required. +/// +/// This is the primary API for type unification with interned types. +/// Works directly with TyData, avoiding allocation from export(). +pub fn unify_ty( + store: &S, + got: Ty, + expected: Ty, + variance: Variance, +) -> UnifyResult { + // Fast path: identical types always unify + if got == expected { + return Ok(()); + } + + // Fast paths for well-known types + if got == Ty::NEVER { + return Ok(()); // Never is bottom, unifies with anything + } + if expected == Ty::ANY { + return Ok(()); // Any accepts everything in covariant position + } + if got == Ty::ANY && variance == Variance::Contravariant { + return Ok(()); // Any in contravariant position + } + + // Delegate to the full implementation + unify_ty_impl(store, got, expected, variance) +} + +/// Internal unification implementation. +fn unify_ty_impl( + store: &S, + got: Ty, + expected: Ty, + variance: Variance, +) -> UnifyResult { + let got_data = store.get_data(got); + let expected_data = store.get_data(expected); + + // Handle top and bottom types + match (&got_data, &expected_data) { + // Never is bottom type - unifies with anything + // Any = Any regardless of variance + (TyData::Never, _) | (TyData::Any, TyData::Any) => return Ok(()), + + // When expected is Any (and got is not Any) + (_, TyData::Any) => { + return match variance { + Variance::Covariant => Ok(()), + Variance::Contravariant => Err(make_error(got, expected)), + }; + } + + // When got is Any (and expected is not Any) + (TyData::Any, _) => { + return match variance { + Variance::Contravariant => Ok(()), + Variance::Covariant => Err(make_error(got, expected)), + }; + } + + // Expecting Never means unreachable code - only Never satisfies it + (_, TyData::Never) => return Err(make_error(got, expected)), + + // Type variables unify with any type + (TyData::TypeVar { constraints, .. }, _) => { + if let Some(bound) = &constraints.upper_bound { + return unify_ty(store, *bound, expected, variance); + } + return Ok(()); + } + (_, TyData::TypeVar { constraints, .. }) => { + if let Some(bound) = &constraints.upper_bound { + return unify_ty(store, got, *bound, variance); + } + return Ok(()); + } + + _ => {} + } + + // Handle unions + if let TyData::Union(got_variants) = store.get_data(got) { + // All variants of got must unify with expected + for variant in got_variants { + unify_ty(store, variant, expected, variance)?; + } + return Ok(()); + } + + if let TyData::Union(expected_variants) = store.get_data(expected) { + // Got must unify with at least one expected variant + // Collect all errors for comprehensive reporting + let mut variant_errors = Vec::new(); + for variant in expected_variants { + match unify_ty(store, got, variant, variance) { + Ok(()) => return Ok(()), + Err(e) => variant_errors.push(e), + } + } + // None of the variants matched - report all errors + return Err(UnifyError::new( + got, + expected, + UnifyReason::UnionMismatch(variant_errors), + )); + } + + // Re-fetch data for the concrete type matching + let got_data = store.get_data(got); + let expected_data = store.get_data(expected); + + // Now handle concrete type pairs + match (&got_data, &expected_data) { + // Primitives must match exactly + (TyData::Null, TyData::Null) + | (TyData::Bool | TyData::True | TyData::False, TyData::Bool) + | (TyData::Number | TyData::BoundedNumber(_), TyData::Number) + | (TyData::String | TyData::Char | TyData::LiteralString(_), TyData::String) + | (TyData::Char, TyData::Char) + | (TyData::True, TyData::True) + | (TyData::False, TyData::False) => Ok(()), + (TyData::LiteralString(a), TyData::LiteralString(b)) if a == b => Ok(()), + + // BoundedNumber with narrower bounds is subtype of wider + (TyData::BoundedNumber(got_bounds), TyData::BoundedNumber(expected_bounds)) => { + if got_bounds.is_subset_of(expected_bounds) { + Ok(()) + } else { + Err(make_error(got, expected)) + } + } + + // Arrays are covariant in element type + ( + TyData::Array { elem: got_elem, .. }, + TyData::Array { + elem: expected_elem, + .. + }, + ) => unify_ty(store, *got_elem, *expected_elem, variance) + .map_err(|e| e.with_path(PathElement::ArrayElement)), + + // Tuple to Array: all tuple elements must unify with array element + ( + TyData::Tuple { elems }, + TyData::Array { + elem: expected_elem, + .. + }, + ) => { + for (i, elem) in elems.iter().enumerate() { + unify_ty(store, *elem, *expected_elem, variance) + .map_err(|e| e.with_path(PathElement::TupleElement(i)))?; + } + Ok(()) + } + + // Tuples must have same length, elements unify positionally + ( + TyData::Tuple { elems: got_elems }, + TyData::Tuple { + elems: expected_elems, + }, + ) => { + if got_elems.len() != expected_elems.len() { + return Err(make_error(got, expected)); + } + for (i, (g, e)) in got_elems.iter().zip(expected_elems.iter()).enumerate() { + unify_ty(store, *g, *e, variance) + .map_err(|err| err.with_path(PathElement::TupleElement(i)))?; + } + Ok(()) + } + + // Objects use structural subtyping + (TyData::Object(got_obj), TyData::Object(expected_obj)) => { + unify_objects_ty(store, got_obj, expected_obj, variance, got, expected) + } + + // AttrsOf is covariant in T + ( + TyData::AttrsOf { value: got_value }, + TyData::AttrsOf { + value: expected_value, + }, + ) => unify_ty(store, *got_value, *expected_value, variance) + .map_err(|e| e.with_path(PathElement::Field("[*]".to_string()))), + + // Object with known fields can be used where AttrsOf is expected + ( + TyData::Object(got_obj), + TyData::AttrsOf { + value: expected_value, + }, + ) => { + for (field_name, field_def) in &got_obj.fields { + unify_ty(store, field_def.ty, *expected_value, variance) + .map_err(|e| e.with_path(PathElement::Field(field_name.clone())))?; + } + Ok(()) + } + + // AttrsOf can be used where open Object is expected + (TyData::AttrsOf { .. }, TyData::Object(expected_obj)) => { + if expected_obj.has_unknown && expected_obj.fields.is_empty() { + Ok(()) + } else { + Err(make_error(got, expected)) + } + } + + // Functions have contravariant parameters, covariant return + (TyData::Function(got_fn), TyData::Function(expected_fn)) => { + unify_functions_ty(store, got_fn, expected_fn, variance, got, expected) + } + + // All other combinations are mismatches + _ => Err(make_error(got, expected)), + } +} + +/// Unify two object types structurally. +fn unify_objects_ty( + store: &S, + got: &super::store::ObjectData, + expected: &super::store::ObjectData, + variance: Variance, + got_ty: Ty, + expected_ty: Ty, +) -> UnifyResult { + // Check that got has all required fields from expected + for (field_name, expected_field) in &expected.fields { + match got.fields.iter().find(|(n, _)| n == field_name) { + Some((_, got_field)) => { + // Field exists - unify the types + unify_ty(store, got_field.ty, expected_field.ty, variance) + .map_err(|e| e.with_path(PathElement::Field(field_name.clone())))?; + } + None => { + // Field missing - error if required and got is closed + if expected_field.required && !got.has_unknown { + return Err(UnifyError::new( + got_ty, + expected_ty, + UnifyReason::MissingField(field_name.clone()), + )); + } + } + } + } + + // If expected is closed, got cannot have extra fields + if !expected.has_unknown { + for (field_name, _) in &got.fields { + if !expected.fields.iter().any(|(n, _)| n == field_name) { + return Err(UnifyError::new( + got_ty, + expected_ty, + UnifyReason::ExtraField(field_name.clone()), + )); + } + } + } + + Ok(()) +} + +/// Unify two function types with proper variance handling. +fn unify_functions_ty( + store: &S, + got: &super::store::FunctionData, + expected: &super::store::FunctionData, + variance: Variance, + got_ty: Ty, + expected_ty: Ty, +) -> UnifyResult { + // Check parameter count compatibility + let got_required = got.params.iter().filter(|p| !p.has_default).count(); + let expected_required = expected.params.iter().filter(|p| !p.has_default).count(); + + if got_required > expected.params.len() || got.params.len() < expected_required { + return Err(UnifyError::new( + got_ty, + expected_ty, + UnifyReason::ParamCountMismatch { + got: got.params.len(), + expected: expected.params.len(), + }, + )); + } + + // Parameters are CONTRAVARIANT: swap argument order + let param_count = got.params.len().min(expected.params.len()); + for i in 0..param_count { + let got_param_ty = got.params[i].ty; + let expected_param_ty = expected.params[i].ty; + + // Swap argument order for contravariant position + unify_ty(store, expected_param_ty, got_param_ty, variance) + .map_err(|e| e.with_path(PathElement::Parameter(i)))?; + } + + // Return type is COVARIANT + // Extract fixed return type or default to Any for dynamic specs + let got_return = match &got.return_spec { + super::store::ReturnSpec::Fixed(ty) => *ty, + _ => Ty::ANY, // Dynamic return specs can't be checked statically + }; + let expected_return = match &expected.return_spec { + super::store::ReturnSpec::Fixed(ty) => *ty, + _ => Ty::ANY, + }; + + unify_ty(store, got_return, expected_return, variance) + .map_err(|e| e.with_path(PathElement::ReturnType)) +} + +/// Create a UnifyError for type mismatch. +fn make_error(got: Ty, expected: Ty) -> UnifyError { + UnifyError::new(got, expected, UnifyReason::TypeMismatch) +} + +/// Check if `subtype` is a subtype of `supertype`. +pub fn is_subtype_ty(store: &S, subtype: Ty, supertype: Ty) -> bool { + // Fast path: identical types + if subtype == supertype { + return true; + } + + // Fast paths for well-known types + if subtype == Ty::NEVER { + return true; // Never <: everything + } + if supertype == Ty::ANY { + return true; // everything <: Any + } + + unify_ty(store, subtype, supertype, Variance::Covariant).is_ok() +} + +/// Check if two types are equivalent. +pub fn types_equivalent_ty(store: &S, a: Ty, b: Ty) -> bool { + if a == b { + return true; + } + is_subtype_ty(store, a, b) && is_subtype_ty(store, b, a) +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::{ + super::store::{ + FieldDefInterned, FieldVis, FunctionData, NumBounds, ObjectData, ParamInterned, + ReturnSpec, Ty, TyStore, + }, + *, + }; + + /// Helper to create a simple function type + fn func_ty(store: &mut TyStore, params: Vec, ret: Ty) -> Ty { + let params: Vec = params + .into_iter() + .enumerate() + .map(|(i, ty)| ParamInterned { + name: format!("p{}", i), + ty, + has_default: false, + }) + .collect(); + store.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(ret), + variadic: false, + }) + } + + /// Helper to create an object type + fn obj_ty(store: &mut TyStore, fields: Vec<(&str, Ty, bool)>, has_unknown: bool) -> Ty { + let fields = fields + .into_iter() + .map(|(name, ty, required)| { + ( + name.to_string(), + FieldDefInterned { + ty, + required, + visibility: FieldVis::Normal, + }, + ) + }) + .collect(); + store.object(ObjectData { + fields, + has_unknown, + }) + } + + #[test] + fn test_primitives_unify() { + let store = TyStore::new(); + unify_ty(&store, Ty::NUMBER, Ty::NUMBER, Variance::Covariant) + .expect("Number unifies with Number"); + unify_ty(&store, Ty::STRING, Ty::STRING, Variance::Covariant) + .expect("String unifies with String"); + unify_ty(&store, Ty::BOOL, Ty::BOOL, Variance::Covariant).expect("Bool unifies with Bool"); + unify_ty(&store, Ty::NULL, Ty::NULL, Variance::Covariant).expect("Null unifies with Null"); + } + + #[test] + fn test_any_accepts_all() { + let store = TyStore::new(); + unify_ty(&store, Ty::NUMBER, Ty::ANY, Variance::Covariant).expect("Number subtype of Any"); + unify_ty(&store, Ty::STRING, Ty::ANY, Variance::Covariant).expect("String subtype of Any"); + unify_ty(&store, Ty::BOOL, Ty::ANY, Variance::Covariant).expect("Bool subtype of Any"); + } + + #[test] + fn test_never_is_bottom() { + let store = TyStore::new(); + unify_ty(&store, Ty::NEVER, Ty::NUMBER, Variance::Covariant) + .expect("Never subtype of Number"); + unify_ty(&store, Ty::NEVER, Ty::STRING, Variance::Covariant) + .expect("Never subtype of String"); + unify_ty(&store, Ty::NEVER, Ty::ANY, Variance::Covariant).expect("Never subtype of Any"); + } + + #[test] + fn test_primitive_mismatch() { + let store = TyStore::new(); + let err1 = unify_ty(&store, Ty::NUMBER, Ty::STRING, Variance::Covariant) + .expect_err("Number != String"); + assert_matches!(err1.reason, UnifyReason::TypeMismatch); + let err2 = unify_ty(&store, Ty::BOOL, Ty::NUMBER, Variance::Covariant) + .expect_err("Bool != Number"); + assert_matches!(err2.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_char_subtype_of_string() { + let store = TyStore::new(); + unify_ty(&store, Ty::CHAR, Ty::STRING, Variance::Covariant) + .expect("Char subtype of String"); + let err = unify_ty(&store, Ty::STRING, Ty::CHAR, Variance::Covariant) + .expect_err("String not subtype of Char"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_bool_literals() { + let store = TyStore::new(); + unify_ty(&store, Ty::TRUE, Ty::BOOL, Variance::Covariant).expect("True subtype of Bool"); + unify_ty(&store, Ty::FALSE, Ty::BOOL, Variance::Covariant).expect("False subtype of Bool"); + let err = unify_ty(&store, Ty::BOOL, Ty::TRUE, Variance::Covariant) + .expect_err("Bool not subtype of True"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_array_covariance() { + let mut store = TyStore::new(); + let arr_num = store.array(Ty::NUMBER); + let arr_any = store.array(Ty::ANY); + + unify_ty(&store, arr_num, arr_any, Variance::Covariant) + .expect("Array subtype of Array"); + let err = unify_ty(&store, arr_any, arr_num, Variance::Covariant) + .expect_err("Array not subtype of Array"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_tuple_to_array() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::NUMBER]); + let arr_num = store.array(Ty::NUMBER); + + unify_ty(&store, tuple, arr_num, Variance::Covariant) + .expect("Tuple[Number, Number] subtype of Array"); + } + + #[test] + fn test_function_param_contravariance() { + let mut store = TyStore::new(); + let fn_any_to_num = func_ty(&mut store, vec![Ty::ANY], Ty::NUMBER); + let fn_num_to_num = func_ty(&mut store, vec![Ty::NUMBER], Ty::NUMBER); + + // fn(Any) -> Number can substitute for fn(Number) -> Number + unify_ty(&store, fn_any_to_num, fn_num_to_num, Variance::Covariant) + .expect("fn(Any)->Number subtype of fn(Number)->Number"); + + // fn(Number) -> Number cannot substitute for fn(Any) -> Number + let err = unify_ty(&store, fn_num_to_num, fn_any_to_num, Variance::Covariant) + .expect_err("fn(Number)->Number not subtype of fn(Any)->Number"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_function_return_covariance() { + let mut store = TyStore::new(); + let fn_to_num = func_ty(&mut store, vec![], Ty::NUMBER); + let fn_to_any = func_ty(&mut store, vec![], Ty::ANY); + + // fn() -> Number can substitute for fn() -> Any + unify_ty(&store, fn_to_num, fn_to_any, Variance::Covariant) + .expect("fn()->Number subtype of fn()->Any"); + + // fn() -> Any cannot substitute for fn() -> Number + let err = unify_ty(&store, fn_to_any, fn_to_num, Variance::Covariant) + .expect_err("fn()->Any not subtype of fn()->Number"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_object_structural_subtyping() { + let mut store = TyStore::new(); + let obj_ab = obj_ty( + &mut store, + vec![("a", Ty::NUMBER, true), ("b", Ty::STRING, true)], + false, + ); + let obj_a = obj_ty(&mut store, vec![("a", Ty::NUMBER, true)], true); + + // Object with more fields can be used where fewer are required (if open) + unify_ty(&store, obj_ab, obj_a, Variance::Covariant).expect("{a,b} subtype of {a,...}"); + } + + #[test] + fn test_object_missing_field() { + let mut store = TyStore::new(); + let obj_a = obj_ty(&mut store, vec![("a", Ty::NUMBER, true)], false); + let obj_ab = obj_ty( + &mut store, + vec![("a", Ty::NUMBER, true), ("b", Ty::STRING, true)], + false, + ); + + // Closed object with fewer fields cannot match one requiring more + let err = unify_ty(&store, obj_a, obj_ab, Variance::Covariant) + .expect_err("{a} not subtype of {a,b}"); + assert_matches!(err.reason, UnifyReason::MissingField { .. }); + } + + #[test] + fn test_bounded_number() { + let mut store = TyStore::new(); + let narrow = store.bounded_number(NumBounds::between(0.0, 10.0)); + let wide = store.bounded_number(NumBounds::between(-100.0, 100.0)); + + // Narrow bounds are subtype of wider bounds + unify_ty(&store, narrow, wide, Variance::Covariant).expect("[0,10] subtype of [-100,100]"); + let err1 = unify_ty(&store, wide, narrow, Variance::Covariant) + .expect_err("[-100,100] not subtype of [0,10]"); + assert_matches!(err1.reason, UnifyReason::TypeMismatch); + + // BoundedNumber is subtype of Number + unify_ty(&store, narrow, Ty::NUMBER, Variance::Covariant) + .expect("[0,10] subtype of Number"); + let err2 = unify_ty(&store, Ty::NUMBER, narrow, Variance::Covariant) + .expect_err("Number not subtype of [0,10]"); + assert_matches!(err2.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_union_subtyping() { + let mut store = TyStore::new(); + let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); + + // Number is subtype of (Number | String) + unify_ty(&store, Ty::NUMBER, num_or_str, Variance::Covariant) + .expect("Number subtype of (Number|String)"); + unify_ty(&store, Ty::STRING, num_or_str, Variance::Covariant) + .expect("String subtype of (Number|String)"); + + // (Number | String) is NOT subtype of Number + let err = unify_ty(&store, num_or_str, Ty::NUMBER, Variance::Covariant) + .expect_err("(Number|String) not subtype of Number"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_is_subtype() { + let store = TyStore::new(); + assert!(is_subtype_ty(&store, Ty::NUMBER, Ty::ANY)); + assert!(is_subtype_ty(&store, Ty::NEVER, Ty::NUMBER)); + assert!(!is_subtype_ty(&store, Ty::NUMBER, Ty::STRING)); + } + + #[test] + fn test_types_equivalent() { + let store = TyStore::new(); + assert!(types_equivalent_ty(&store, Ty::NUMBER, Ty::NUMBER)); + assert!(!types_equivalent_ty(&store, Ty::NUMBER, Ty::ANY)); + } + + #[test] + fn test_union_mismatch_error() { + let mut store = TyStore::new(); + let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); + + // Bool cannot unify with (Number | String), should get comprehensive error + let result = unify_ty(&store, Ty::BOOL, num_or_str, Variance::Covariant); + + // Should be a UnionMismatch with errors for each variant + assert_matches!(result, Err(UnifyError { reason: UnifyReason::UnionMismatch(ref errs), .. }) => { + // Should have two errors - one for each union variant + assert_eq!( + errs.iter().map(|e| &e.reason).collect::>(), + vec![ + &UnifyReason::TypeMismatch, + &UnifyReason::TypeMismatch, + ] + ); + }); + } + + #[test] + fn test_union_mismatch_error_format() { + let mut store = TyStore::new(); + let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); + + let result = unify_ty(&store, Ty::BOOL, num_or_str, Variance::Covariant); + let err = result.expect_err("should fail"); + let formatted = err.format(&store); + + // Should show each variant type that was tried + assert!( + formatted.contains("tried `number`: type mismatch"), + "Should show number variant: {}", + formatted + ); + assert!( + formatted.contains("tried `string`: type mismatch"), + "Should show string variant: {}", + formatted + ); + } + + #[test] + fn test_union_mismatch_with_object_error() { + let mut store = TyStore::new(); + + // Create an object type with required field 'a' + let obj_with_a = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + + // Create union: Number | {a: number} + let union = store.union(vec![Ty::NUMBER, obj_with_a]); + + // Empty object should fail with specific error for each variant + let empty_obj = store.object(ObjectData { + fields: vec![], + has_unknown: false, + }); + + let result = unify_ty(&store, empty_obj, union, Variance::Covariant); + let err = result.expect_err("should fail"); + let formatted = err.format(&store); + + // Should show type mismatch for number and missing field for object + assert!( + formatted.contains("tried `number`"), + "Should show number variant: {}", + formatted + ); + assert!( + formatted.contains("missing field 'a'"), + "Should show missing field error: {}", + formatted + ); + } +} diff --git a/crates/jrsonnet-lsp/Cargo.toml b/crates/jrsonnet-lsp/Cargo.toml new file mode 100644 index 00000000..8680b64e --- /dev/null +++ b/crates/jrsonnet-lsp/Cargo.toml @@ -0,0 +1,52 @@ +[package] +name = "jrsonnet-lsp" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true + +[dependencies] +anyhow.workspace = true +crossbeam-channel = "0.5" +jrsonnet-lsp-check = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-check" } +jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } +jrsonnet-lsp-handlers = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-handlers" } +jrsonnet-lsp-import = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-import" } +jrsonnet-lsp-inference = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-inference" } +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } +jrsonnet-evaluator.workspace = true +jrsonnet-parser.workspace = true +jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } +jrsonnet-stdlib.workspace = true +lsp-server.workspace = true +lsp-types.workspace = true +parking_lot = "0.12" +rayon = "1.11.0" +rustc-hash.workspace = true +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true +tracing = "0.1.44" + +[lints] +workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" +criterion = { version = "0.5", features = ["html_reports"] } +jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } +rstest = "0.23" +serde_json.workspace = true +tempfile.workspace = true + +[[bench]] +name = "type_inference" +harness = false + +[[bench]] +name = "type_cache" +harness = false + +[[bench]] +name = "unification" +harness = false diff --git a/crates/jrsonnet-lsp/benches/type_cache.rs b/crates/jrsonnet-lsp/benches/type_cache.rs new file mode 100644 index 00000000..fdb8a563 --- /dev/null +++ b/crates/jrsonnet-lsp/benches/type_cache.rs @@ -0,0 +1,208 @@ +//! Benchmarks for type cache operations. +//! +//! Tests cache lookup, update, and invalidation performance. + +use std::sync::Arc; + +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_inference::{new_shared_cache, TypeCache}; +use jrsonnet_lsp_types::{GlobalTyStore, Ty}; + +fn make_path(i: usize) -> CanonicalPath { + CanonicalPath::new(std::path::PathBuf::from(format!("/test/file{i}.jsonnet"))) +} + +fn bench_cache_operations(c: &mut Criterion) { + let mut group = c.benchmark_group("type_cache"); + + // Benchmark cache updates + for size in [100, 500, 1000] { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global)); + + group.bench_with_input(BenchmarkId::new("update", size), &size, |b, &size| { + b.iter(|| { + for i in 0..size { + let path = make_path(i); + cache.update(&path, Ty::NUMBER, 1); + } + }); + }); + } + + // Benchmark cache lookups (cache hit) + for size in [100, 500, 1000] { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global)); + + // Pre-populate cache + for i in 0..size { + let path = make_path(i); + cache.update(&path, Ty::NUMBER, 1); + } + + group.bench_with_input(BenchmarkId::new("lookup_hit", size), &size, |b, &size| { + b.iter(|| { + for i in 0..size { + let path = make_path(i); + black_box(cache.get(&path)); + } + }); + }); + } + + // Benchmark cache lookups (cache miss) + for size in [100, 500, 1000] { + let global = Arc::new(GlobalTyStore::new()); + let cache = TypeCache::new(Arc::clone(&global)); + + group.bench_with_input(BenchmarkId::new("lookup_miss", size), &size, |b, &size| { + b.iter(|| { + for i in 0..size { + let path = make_path(i); + black_box(cache.get(&path)); + } + }); + }); + } + + // Benchmark invalidation + for size in [100, 500, 1000] { + let global = Arc::new(GlobalTyStore::new()); + + group.bench_with_input(BenchmarkId::new("invalidate", size), &size, |b, &size| { + b.iter_batched( + || { + // Setup: create and populate cache + let mut cache = TypeCache::new(Arc::clone(&global)); + for i in 0..size { + let path = make_path(i); + cache.update(&path, Ty::NUMBER, 1); + } + cache + }, + |mut cache| { + // Benchmark: invalidate all entries + for i in 0..size { + let path = make_path(i); + cache.invalidate(&path); + } + }, + criterion::BatchSize::SmallInput, + ); + }); + } + + // Benchmark bulk invalidation + for size in [100, 500, 1000] { + let global = Arc::new(GlobalTyStore::new()); + + group.bench_with_input( + BenchmarkId::new("invalidate_many", size), + &size, + |b, &size| { + b.iter_batched( + || { + // Setup: create and populate cache + let mut cache = TypeCache::new(Arc::clone(&global)); + let paths: Vec<_> = (0..size).map(make_path).collect(); + for path in &paths { + cache.update(path, Ty::NUMBER, 1); + } + (cache, paths) + }, + |(mut cache, paths)| { + // Benchmark: invalidate all at once + cache.invalidate_many(paths); + }, + criterion::BatchSize::SmallInput, + ); + }, + ); + } + + group.finish(); +} + +fn bench_shared_cache(c: &mut Criterion) { + let mut group = c.benchmark_group("shared_type_cache"); + + // Benchmark concurrent access patterns (simulated via sequential access with locking) + for size in [100, 500] { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global)); + + // Pre-populate + { + let mut write_cache = cache.write(); + for i in 0..size { + let path = make_path(i); + write_cache.update(&path, Ty::NUMBER, 1); + } + } + + group.bench_with_input(BenchmarkId::new("read_lock", size), &size, |b, &size| { + b.iter(|| { + for i in 0..size { + let path = make_path(i); + let read_cache = cache.read(); + black_box(read_cache.get(&path)); + } + }); + }); + + group.bench_with_input(BenchmarkId::new("write_lock", size), &size, |b, &size| { + b.iter(|| { + for i in 0..size { + let path = make_path(i); + let mut write_cache = cache.write(); + write_cache.update(&path, Ty::STRING, 2); + } + }); + }); + } + + group.finish(); +} + +fn bench_version_check(c: &mut Criterion) { + let mut group = c.benchmark_group("version_check"); + + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global)); + + // Pre-populate with version 1 + for i in 0..1000 { + let path = make_path(i); + cache.update(&path, Ty::NUMBER, 1); + } + + group.bench_function("is_up_to_date_hit", |b| { + b.iter(|| { + for i in 0..1000 { + let path = make_path(i); + black_box(cache.is_up_to_date(&path, 1)); + } + }); + }); + + group.bench_function("is_up_to_date_miss", |b| { + b.iter(|| { + for i in 0..1000 { + let path = make_path(i); + black_box(cache.is_up_to_date(&path, 2)); // Different version + } + }); + }); + + group.finish(); +} + +criterion_group!( + benches, + bench_cache_operations, + bench_shared_cache, + bench_version_check +); +criterion_main!(benches); diff --git a/crates/jrsonnet-lsp/benches/type_inference.rs b/crates/jrsonnet-lsp/benches/type_inference.rs new file mode 100644 index 00000000..01509a4d --- /dev/null +++ b/crates/jrsonnet-lsp/benches/type_inference.rs @@ -0,0 +1,188 @@ +//! Benchmarks for type inference performance. +//! +//! Tests type inference on Jsonnet documents of varying sizes and complexity. + +use std::{fmt::Write as _, sync::Arc}; + +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; +use jrsonnet_lsp_document::{DocVersion, Document}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_types::GlobalTyStore; + +/// Generate a simple Jsonnet document with N local bindings. +fn generate_locals_document(n: usize) -> String { + let mut code = String::new(); + for i in 0..n { + let _ = writeln!(&mut code, "local x{i} = {i};"); + } + code.push_str("{\n"); + for i in 0..n { + let _ = writeln!(&mut code, " field{i}: x{i},"); + } + code.push_str("}\n"); + code +} + +/// Generate a nested object document with depth D and width W. +fn generate_nested_object(depth: usize, width: usize) -> String { + fn generate_level(depth: usize, width: usize, indent: usize) -> String { + let spaces = " ".repeat(indent); + if depth == 0 { + return format!("{spaces}value: 42"); + } + let mut fields = Vec::new(); + for i in 0..width { + let inner = generate_level(depth - 1, width, indent + 2); + fields.push(format!("{spaces}field{i}: {{\n{inner}\n{spaces}}}")); + } + fields.join(",\n") + } + format!("{{\n{}\n}}", generate_level(depth, width, 2)) +} + +/// Generate a document with function definitions and calls. +fn generate_functions_document(n: usize) -> String { + let mut code = String::new(); + // Define functions + for i in 0..n { + let _ = writeln!(&mut code, "local fn{i}(x) = x + {i};"); + } + // Call functions + code.push_str("[\n"); + for i in 0..n { + let _ = writeln!(&mut code, " fn{i}({i}),"); + } + code.push_str("]\n"); + code +} + +/// Generate a document with array comprehensions. +fn generate_comprehension_document(size: usize) -> String { + format!( + r"local data = std.range(0, {size}); +[x * 2 for x in data if x % 2 == 0] + " + ) +} + +fn bench_type_inference(c: &mut Criterion) { + let mut group = c.benchmark_group("type_inference"); + + // Benchmark locals scaling + for size in [10, 50, 100, 200, 500] { + let code = generate_locals_document(size); + let doc = Document::new(code.clone(), DocVersion::new(1)); + let global = Arc::new(GlobalTyStore::new()); + + group.throughput(Throughput::Elements(size as u64)); + group.bench_with_input( + BenchmarkId::new("locals", size), + &(doc, global), + |b, (doc, global)| { + b.iter(|| { + let analysis = + TypeAnalysis::analyze_with_global(black_box(doc), Arc::clone(global)); + black_box(analysis.document_type()) + }); + }, + ); + } + + // Benchmark nested objects + for (depth, width) in [(2, 3), (3, 3), (4, 2), (5, 2)] { + let code = generate_nested_object(depth, width); + let doc = Document::new(code.clone(), DocVersion::new(1)); + let global = Arc::new(GlobalTyStore::new()); + let label = format!("d{depth}w{width}"); + + group.bench_with_input( + BenchmarkId::new("nested_object", &label), + &(doc, global), + |b, (doc, global)| { + b.iter(|| { + let analysis = + TypeAnalysis::analyze_with_global(black_box(doc), Arc::clone(global)); + black_box(analysis.document_type()) + }); + }, + ); + } + + // Benchmark functions + for size in [10, 25, 50, 100] { + let code = generate_functions_document(size); + let doc = Document::new(code.clone(), DocVersion::new(1)); + let global = Arc::new(GlobalTyStore::new()); + + group.throughput(Throughput::Elements(size as u64)); + group.bench_with_input( + BenchmarkId::new("functions", size), + &(doc, global), + |b, (doc, global)| { + b.iter(|| { + let analysis = + TypeAnalysis::analyze_with_global(black_box(doc), Arc::clone(global)); + black_box(analysis.document_type()) + }); + }, + ); + } + + // Benchmark comprehensions + for size in [10, 50, 100, 500] { + let code = generate_comprehension_document(size); + let doc = Document::new(code.clone(), DocVersion::new(1)); + let global = Arc::new(GlobalTyStore::new()); + + group.bench_with_input( + BenchmarkId::new("comprehension", size), + &(doc, global), + |b, (doc, global)| { + b.iter(|| { + let analysis = + TypeAnalysis::analyze_with_global(black_box(doc), Arc::clone(global)); + black_box(analysis.document_type()) + }); + }, + ); + } + + group.finish(); +} + +fn bench_repeated_analysis(c: &mut Criterion) { + let mut group = c.benchmark_group("repeated_analysis"); + + // Test that repeated analysis with same global store is efficient + let code = generate_locals_document(100); + let global = Arc::new(GlobalTyStore::new()); + + group.bench_function("same_global_store", |b| { + b.iter(|| { + // Simulate analyzing 10 documents with shared global store + for i in 0..10 { + let doc = Document::new(code.clone(), DocVersion::new(i)); + let analysis = + TypeAnalysis::analyze_with_global(black_box(&doc), Arc::clone(&global)); + black_box(analysis.document_type()); + } + }); + }); + + group.bench_function("fresh_global_store", |b| { + b.iter(|| { + // Simulate analyzing 10 documents with fresh global stores + for i in 0..10 { + let doc = Document::new(code.clone(), DocVersion::new(i)); + let fresh_global = Arc::new(GlobalTyStore::new()); + let analysis = TypeAnalysis::analyze_with_global(black_box(&doc), fresh_global); + black_box(analysis.document_type()); + } + }); + }); + + group.finish(); +} + +criterion_group!(benches, bench_type_inference, bench_repeated_analysis); +criterion_main!(benches); diff --git a/crates/jrsonnet-lsp/benches/unification.rs b/crates/jrsonnet-lsp/benches/unification.rs new file mode 100644 index 00000000..e13df58c --- /dev/null +++ b/crates/jrsonnet-lsp/benches/unification.rs @@ -0,0 +1,339 @@ +//! Benchmarks for type unification. +//! +//! Tests unification performance for various type combinations. + +use std::sync::Arc; + +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; +use jrsonnet_lsp_types::{ + is_subtype_ty, FieldDefInterned, FieldVis, FunctionData, GlobalTyStore, MutStore, ObjectData, + ParamInterned, ReturnSpec, Ty, TyData, +}; + +fn bench_primitive_unification(c: &mut Criterion) { + let mut group = c.benchmark_group("unification_primitives"); + + let global = Arc::new(GlobalTyStore::new()); + + // Same type comparisons (fast path) + group.bench_function("same_type", |b| { + let store = MutStore::new(Arc::clone(&global)); + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, Ty::NUMBER, Ty::NUMBER)); + black_box(is_subtype_ty(&store, Ty::STRING, Ty::STRING)); + black_box(is_subtype_ty(&store, Ty::BOOL, Ty::BOOL)); + } + }); + }); + + // ANY comparisons + group.bench_function("any_supertype", |b| { + let store = MutStore::new(Arc::clone(&global)); + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, Ty::NUMBER, Ty::ANY)); + black_box(is_subtype_ty(&store, Ty::STRING, Ty::ANY)); + black_box(is_subtype_ty(&store, Ty::BOOL, Ty::ANY)); + } + }); + }); + + // NEVER comparisons + group.bench_function("never_subtype", |b| { + let store = MutStore::new(Arc::clone(&global)); + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, Ty::NEVER, Ty::NUMBER)); + black_box(is_subtype_ty(&store, Ty::NEVER, Ty::STRING)); + black_box(is_subtype_ty(&store, Ty::NEVER, Ty::ANY)); + } + }); + }); + + // Incompatible types + group.bench_function("incompatible", |b| { + let store = MutStore::new(Arc::clone(&global)); + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, Ty::NUMBER, Ty::STRING)); + black_box(is_subtype_ty(&store, Ty::STRING, Ty::BOOL)); + black_box(is_subtype_ty(&store, Ty::BOOL, Ty::NUMBER)); + } + }); + }); + + group.finish(); +} + +fn bench_array_unification(c: &mut Criterion) { + let mut group = c.benchmark_group("unification_arrays"); + + let global = Arc::new(GlobalTyStore::new()); + + // Simple array comparisons + group.bench_function("simple_arrays", |b| { + let mut store = MutStore::new(Arc::clone(&global)); + let arr_num = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let arr_str = store.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + let arr_any = store.intern(TyData::Array { + elem: Ty::ANY, + is_set: false, + }); + + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, arr_num, arr_num)); + black_box(is_subtype_ty(&store, arr_num, arr_any)); + black_box(is_subtype_ty(&store, arr_num, arr_str)); + } + }); + }); + + // Nested arrays + group.bench_function("nested_arrays", |b| { + let mut store = MutStore::new(Arc::clone(&global)); + let arr_num = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let arr_arr_num = store.intern(TyData::Array { + elem: arr_num, + is_set: false, + }); + let arr_arr_arr_num = store.intern(TyData::Array { + elem: arr_arr_num, + is_set: false, + }); + + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, arr_arr_num, arr_arr_num)); + black_box(is_subtype_ty(&store, arr_arr_arr_num, arr_arr_arr_num)); + } + }); + }); + + group.finish(); +} + +fn bench_object_unification(c: &mut Criterion) { + let mut group = c.benchmark_group("unification_objects"); + + let global = Arc::new(GlobalTyStore::new()); + + // Small objects + for n_fields in [2, 5, 10, 20] { + group.bench_with_input( + BenchmarkId::new("small_object", n_fields), + &n_fields, + |b, &n_fields| { + let mut store = MutStore::new(Arc::clone(&global)); + + let fields: Vec<_> = (0..n_fields) + .map(|i| { + ( + format!("field{i}"), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + ) + }) + .collect(); + + let obj = store.intern(TyData::Object(ObjectData { + fields, + has_unknown: false, + })); + + b.iter(|| { + for _ in 0..100 { + black_box(is_subtype_ty(&store, obj, obj)); + } + }); + }, + ); + } + + // Object with unknown fields + group.bench_function("open_object", |b| { + let mut store = MutStore::new(Arc::clone(&global)); + + let closed_obj = store.intern(TyData::Object(ObjectData { + fields: vec![( + "x".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + })); + + let open_obj = store.intern(TyData::Object(ObjectData { + fields: vec![( + "x".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + })); + + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, closed_obj, open_obj)); + black_box(is_subtype_ty(&store, open_obj, closed_obj)); + } + }); + }); + + group.finish(); +} + +fn bench_function_unification(c: &mut Criterion) { + let mut group = c.benchmark_group("unification_functions"); + + let global = Arc::new(GlobalTyStore::new()); + + // Simple functions + group.bench_function("simple_function", |b| { + let mut store = MutStore::new(Arc::clone(&global)); + + let fn1 = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".into(), + ty: Ty::NUMBER, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + })); + + let fn2 = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".into(), + ty: Ty::ANY, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + })); + + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, fn1, fn1)); + black_box(is_subtype_ty(&store, fn1, fn2)); + } + }); + }); + + // Functions with multiple params + for n_params in [1, 3, 5, 10] { + group.bench_with_input( + BenchmarkId::new("multi_param_function", n_params), + &n_params, + |b, &n_params| { + let mut store = MutStore::new(Arc::clone(&global)); + + let params: Vec<_> = (0..n_params) + .map(|i| ParamInterned { + name: format!("p{i}"), + ty: Ty::NUMBER, + has_default: false, + }) + .collect(); + + let func = store.intern(TyData::Function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + })); + + b.iter(|| { + for _ in 0..100 { + black_box(is_subtype_ty(&store, func, func)); + } + }); + }, + ); + } + + group.finish(); +} + +fn bench_union_unification(c: &mut Criterion) { + let mut group = c.benchmark_group("unification_unions"); + + let global = Arc::new(GlobalTyStore::new()); + + // Small unions + group.bench_function("small_union", |b| { + let mut store = MutStore::new(Arc::clone(&global)); + + let union2 = store.intern(TyData::Union(vec![Ty::NUMBER, Ty::STRING])); + let union3 = store.intern(TyData::Union(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL])); + + b.iter(|| { + for _ in 0..1000 { + black_box(is_subtype_ty(&store, Ty::NUMBER, union2)); + black_box(is_subtype_ty(&store, union2, union3)); + } + }); + }); + + // Larger unions + for n_variants in [2, 4, 8] { + group.bench_with_input( + BenchmarkId::new("union_size", n_variants), + &n_variants, + |b, &n_variants| { + let mut store = MutStore::new(Arc::clone(&global)); + + // Create distinct types for the union + let variants: Vec<_> = (0..n_variants) + .map(|i| { + store.intern(TyData::Array { + elem: if i % 2 == 0 { Ty::NUMBER } else { Ty::STRING }, + is_set: false, + }) + }) + .collect(); + + let union = store.intern(TyData::Union(variants.clone())); + + b.iter(|| { + for _ in 0..100 { + // Check if each variant is subtype of union + for v in &variants { + black_box(is_subtype_ty(&store, *v, union)); + } + } + }); + }, + ); + } + + group.finish(); +} + +criterion_group!( + benches, + bench_primitive_unification, + bench_array_unification, + bench_object_unification, + bench_function_unification, + bench_union_unification, +); +criterion_main!(benches); diff --git a/crates/jrsonnet-lsp/src/analysis/eval.rs b/crates/jrsonnet-lsp/src/analysis/eval.rs new file mode 100644 index 00000000..23c8bfe1 --- /dev/null +++ b/crates/jrsonnet-lsp/src/analysis/eval.rs @@ -0,0 +1,283 @@ +//! Evaluation diagnostics for runtime error detection. +//! +//! This module provides the ability to evaluate Jsonnet documents and convert +//! runtime errors into LSP diagnostics. + +use std::path::{Path, PathBuf}; + +use jrsonnet_evaluator::{ + error::Error as EvalError, trace::PathResolver, FileImportResolver, State, +}; +use jrsonnet_lsp_document::{CanonicalPath, LineIndex}; +use jrsonnet_parser::{SourceFile, SourcePath}; +use jrsonnet_stdlib::ContextInitializer; +use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString}; + +use super::tanka; + +/// Configuration for evaluation diagnostics. +#[derive(Debug, Clone, Default)] +pub struct EvalConfig { + /// Import paths (JPATH) to search for imports. + pub jpath: Vec, + /// Whether to resolve paths using Tanka conventions. + pub resolve_paths_with_tanka: bool, +} + +/// Create a jrsonnet State with the given jpath entries. +pub(crate) fn create_state_with_jpath(jpath: &[PathBuf]) -> State { + let resolver = PathResolver::new_cwd_fallback(); + let context_initializer = ContextInitializer::new(resolver); + + let mut import_resolver = FileImportResolver::default(); + for path in jpath { + import_resolver.add_jpath(path.clone()); + } + + let mut builder = State::builder(); + builder.context_initializer(context_initializer); + builder.import_resolver(import_resolver); + builder.build() +} + +/// Evaluator for Jsonnet documents. +pub struct Evaluator { + /// Base jpath configuration (from settings). + base_jpath: Vec, + /// Whether to resolve paths using Tanka conventions. + tanka_mode: bool, +} + +impl Evaluator { + /// Create a new evaluator with the given configuration. + pub fn new(config: &EvalConfig) -> Self { + Self { + base_jpath: config.jpath.clone(), + tanka_mode: config.resolve_paths_with_tanka, + } + } + + /// Get the jpath for evaluating a specific file. + /// + /// If Tanka mode is enabled, this will resolve paths based on the + /// file's location in the Tanka project structure. + fn get_jpath_for_file(&self, path: &Path) -> Vec { + let mut jpath = self.base_jpath.clone(); + + if self.tanka_mode { + // Add Tanka-resolved paths + let tanka_paths = tanka::resolve_jpath(path); + jpath.extend(tanka_paths); + } else { + // Add the file's directory to jpath (standard behavior) + if let Some(dir) = path.parent() { + jpath.push(dir.to_path_buf()); + } + } + + jpath + } + + /// Evaluate a document and return any diagnostics. + /// + /// Returns `None` if evaluation succeeds, or a diagnostic if it fails. + pub fn evaluate( + &self, + path: &CanonicalPath, + text: &str, + line_index: &LineIndex, + ) -> Option { + let jpath = self.get_jpath_for_file(path.as_path()); + let state = create_state_with_jpath(&jpath); + + let source_path = SourcePath::new(SourceFile::new(path.as_path().to_path_buf())); + + // Evaluate the snippet + match state.evaluate_snippet(source_path.to_string(), text) { + Ok(_) => None, + Err(err) => Some(eval_error_to_diagnostic(&err, path, text, line_index)), + } + } +} + +/// Convert an evaluation error to an LSP diagnostic. +fn eval_error_to_diagnostic( + err: &EvalError, + file_path: &CanonicalPath, + text: &str, + line_index: &LineIndex, +) -> Diagnostic { + // Try to find the location in the error trace that matches our file + let mut range = lsp_types::Range::default(); + let mut found_location = false; + + let trace = err.trace(); + for element in &trace.0 { + if let Some(span) = &element.location { + // Check if this span is from our file + let span_path = span.0.source_path(); + if let Some(span_file) = span_path.downcast_ref::() { + if span_file.path() == file_path.as_path() { + // Convert byte offsets to LSP positions + let start_offset = span.1; + let end_offset = span.2; + + if let Some(start) = line_index.position(start_offset.into(), text) { + range.start = start.into(); + if let Some(end) = line_index.position(end_offset.into(), text) { + range.end = end.into(); + } else { + range.end = range.start; + } + found_location = true; + break; + } + } + } + } + } + + // If we didn't find a location in the trace, use the beginning of the file + if !found_location { + range = lsp_types::Range::default(); + } + + // Format the error message + let error_kind = err.error(); + let message = format!("{error_kind}"); + + Diagnostic { + range, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("eval-error".to_string())), + code_description: None, + source: Some("jrsonnet-eval".to_string()), + message, + related_information: None, + tags: None, + data: None, + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + + use super::*; + + fn test_path() -> CanonicalPath { + CanonicalPath::new("/test.jsonnet".into()) + } + + /// Assert that a diagnostic has the expected eval-error structure. + /// + /// Checks that: + /// - severity is ERROR + /// - code is "eval-error" + /// - source is "jrsonnet-eval" + /// - message contains the expected substring(s) + fn assert_eval_diagnostic(diag: &Diagnostic, message_contains: &[&str]) { + assert_eq!( + diag.severity, + Some(DiagnosticSeverity::ERROR), + "expected ERROR severity" + ); + assert_eq!( + diag.code, + Some(NumberOrString::String("eval-error".to_string())), + "expected eval-error code" + ); + assert_eq!( + diag.source, + Some("jrsonnet-eval".to_string()), + "expected jrsonnet-eval source" + ); + for expected in message_contains { + assert!( + diag.message.contains(expected), + "expected message to contain '{}', got: {}", + expected, + diag.message + ); + } + } + + #[test] + fn test_eval_success() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new(r#"{ hello: "world" }"#.to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + assert_eq!(result, None); + } + + #[test] + fn test_eval_undefined_variable() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new("undefined_var".to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with undefined variable"); + assert_eval_diagnostic(&diag, &["not defined"]); + } + + #[test] + fn test_eval_type_error() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + // Array index must be a number, not a string + let doc = Document::new(r#"[1, 2, 3]["hello"]"#.to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with type error"); + // jrsonnet may report this as "index" or "type" error + assert_eq!(diag.severity, Some(DiagnosticSeverity::ERROR)); + assert_eq!( + diag.code, + Some(NumberOrString::String("eval-error".to_string())) + ); + assert!( + diag.message.contains("index") || diag.message.contains("type"), + "expected message about index/type error, got: {}", + diag.message + ); + } + + #[test] + fn test_eval_assert_failure() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new(r"assert false; true".to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with assert"); + assert_eval_diagnostic(&diag, &["assert"]); + } + + #[test] + fn test_eval_field_access_error() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new(r"{}.nonexistent".to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with field access error"); + assert_eval_diagnostic(&diag, &["field"]); + } + + #[test] + fn test_eval_runtime_error() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new( + r#"error "custom error message""#.to_string(), + DocVersion::new(1), + ); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with runtime error"); + assert_eval_diagnostic(&diag, &["custom error message"]); + } +} diff --git a/crates/jrsonnet-lsp/src/analysis/mod.rs b/crates/jrsonnet-lsp/src/analysis/mod.rs new file mode 100644 index 00000000..9cc9fb5e --- /dev/null +++ b/crates/jrsonnet-lsp/src/analysis/mod.rs @@ -0,0 +1,9 @@ +//! Analysis module for evaluation-specific functionality. +//! +//! This module contains evaluation-related modules that depend on jrsonnet-evaluator. +//! For static analysis, see the jrsonnet-lsp-check crate. + +pub mod eval; +pub mod tanka; + +pub use eval::{EvalConfig, Evaluator}; diff --git a/crates/jrsonnet-lsp/src/analysis/tanka.rs b/crates/jrsonnet-lsp/src/analysis/tanka.rs new file mode 100644 index 00000000..26f02753 --- /dev/null +++ b/crates/jrsonnet-lsp/src/analysis/tanka.rs @@ -0,0 +1,185 @@ +//! Tanka integration for resolving import paths. +//! +//! When `resolve_paths_with_tanka` is enabled, the LSP will look for +//! `jsonnetfile.json` or `tkrc.yaml` in parent directories to find the +//! project root and add appropriate vendor/lib paths to the jpath. + +use std::path::{Path, PathBuf}; + +/// Find the Tanka project root by walking up from the given path. +/// +/// The root is the directory that contains either `tkrc.yaml` or `jsonnetfile.json`. +/// Returns `None` if no root is found. +pub fn find_root(path: &Path) -> Option { + let start = if path.is_file() { path.parent()? } else { path }; + + // Try tkrc.yaml first, then jsonnetfile.json + find_parent_file(start, "tkrc.yaml").or_else(|| find_parent_file(start, "jsonnetfile.json")) +} + +/// Walk up the directory tree looking for a file with the given name. +/// Returns the directory containing the file, or None if not found. +fn find_parent_file(start: &Path, filename: &str) -> Option { + let mut current = start.to_path_buf(); + + loop { + let candidate = current.join(filename); + if candidate.exists() { + return Some(current); + } + + match current.parent() { + Some(parent) if parent != current => { + current = parent.to_path_buf(); + } + _ => return None, + } + } +} + +/// Resolve jpath entries for a file in a Tanka project. +/// +/// Returns a list of paths to add to jpath for import resolution: +/// - `/vendor` +/// - `/vendor` (if different from root) +/// - `/lib` +/// - `` (the directory containing the file) +/// +/// Returns an empty list if no Tanka root is found. +pub fn resolve_jpath(path: &Path) -> Vec { + let base = if path.is_file() { + path.parent().map(Path::to_path_buf) + } else { + Some(path.to_path_buf()) + }; + + let Some(base) = base else { + return Vec::new(); + }; + + let Some(root) = find_root(path) else { + return Vec::new(); + }; + + let mut jpath = Vec::new(); + + // Add root/vendor + let root_vendor = root.join("vendor"); + if root_vendor.is_dir() { + jpath.push(root_vendor); + } + + // Add base/vendor if different from root + if base != root { + let base_vendor = base.join("vendor"); + if base_vendor.is_dir() { + jpath.push(base_vendor); + } + } + + // Add root/lib + let root_lib = root.join("lib"); + if root_lib.is_dir() { + jpath.push(root_lib); + } + + // Add base directory + jpath.push(base); + + jpath +} + +#[cfg(test)] +mod tests { + use std::fs; + + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_find_root_with_jsonnetfile() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + + // Create jsonnetfile.json at root + fs::write(root.join("jsonnetfile.json"), "{}").unwrap(); + + // Create a nested directory + let nested = root.join("environments").join("prod"); + fs::create_dir_all(&nested).unwrap(); + + // Find root from nested directory + let found = find_root(&nested); + assert_eq!(found, Some(root.to_path_buf())); + } + + #[test] + fn test_find_root_with_tkrc() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + + // Create tkrc.yaml at root (takes precedence over jsonnetfile.json) + fs::write(root.join("tkrc.yaml"), "").unwrap(); + fs::write(root.join("jsonnetfile.json"), "{}").unwrap(); + + let found = find_root(root); + assert_eq!(found, Some(root.to_path_buf())); + } + + #[test] + fn test_find_root_not_found() { + let tmp = TempDir::new().unwrap(); + let found = find_root(tmp.path()); + assert_eq!(found, None); + } + + #[test] + fn test_resolve_jpath() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + + // Create Tanka structure + fs::write(root.join("jsonnetfile.json"), "{}").unwrap(); + fs::create_dir(root.join("vendor")).unwrap(); + fs::create_dir(root.join("lib")).unwrap(); + + let env = root.join("environments").join("prod"); + fs::create_dir_all(&env).unwrap(); + + // Resolve jpath from environment directory + let jpath = resolve_jpath(&env); + + assert!(jpath.contains(&root.join("vendor"))); + assert!(jpath.contains(&root.join("lib"))); + assert!(jpath.contains(&env)); + } + + #[test] + fn test_resolve_jpath_with_env_vendor() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + + // Create Tanka structure with env-level vendor + fs::write(root.join("jsonnetfile.json"), "{}").unwrap(); + fs::create_dir(root.join("vendor")).unwrap(); + + let env = root.join("environments").join("prod"); + fs::create_dir_all(&env).unwrap(); + fs::create_dir(env.join("vendor")).unwrap(); + + // Resolve jpath from environment directory + let jpath = resolve_jpath(&env); + + // Both vendors should be included + assert!(jpath.contains(&root.join("vendor"))); + assert!(jpath.contains(&env.join("vendor"))); + } + + #[test] + fn test_resolve_jpath_no_root() { + let tmp = TempDir::new().unwrap(); + let jpath = resolve_jpath(tmp.path()); + assert!(jpath.is_empty()); + } +} diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs new file mode 100644 index 00000000..60359a95 --- /dev/null +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -0,0 +1,374 @@ +//! Async diagnostics with debouncing. +//! +//! Runs diagnostics computation in a background thread to avoid blocking the LSP event loop. +//! Debounces rapid edits to avoid computing diagnostics for intermediate states. + +use std::{ + sync::{ + atomic::{AtomicU64, Ordering}, + Arc, + }, + thread, + time::Duration, +}; + +use crossbeam_channel::{Receiver, Sender}; +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document}; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::{ + DocumentSource, SharedDocumentManager, SharedTypeCache, TypeProvider, +}; +use jrsonnet_lsp_types::GlobalTyStore; +use parking_lot::RwLock; +use rustc_hash::FxHashMap; +use tracing::{debug, trace}; + +use crate::{analysis::Evaluator, handlers}; + +/// Debounce delay before computing diagnostics after a change. +const DEBOUNCE_DELAY_MS: u64 = 500; + +/// Request to compute diagnostics for a document. +/// +/// We pass `text` and `version` to compute diagnostics against the exact +/// document state that triggered this request. +#[derive(Debug, Clone)] +struct DiagnosticsRequest { + /// Path of the document. + path: CanonicalPath, + /// The document text. + text: String, + /// The document version. + version: DocVersion, + /// Whether to enable lint diagnostics. + enable_lint: bool, + /// Sequence number to detect stale requests. + sequence: u64, +} + +/// Completed diagnostics result. +#[derive(Debug)] +pub struct DiagnosticsResult { + /// Path of the document. + pub path: CanonicalPath, + /// The computed diagnostics params. + pub params: lsp_types::PublishDiagnosticsParams, +} + +/// Configuration for the async diagnostics runner. +#[derive(Clone)] +pub struct DiagnosticsConfig { + /// Optional evaluator for runtime diagnostics. + pub evaluator: Option>, + /// Document source used for dependency-aware type analysis. + pub documents: SharedDocumentManager, + /// Import graph used to analyze dependencies before target files. + pub import_graph: Arc>, + /// Shared type cache used by the type provider. + pub type_cache: SharedTypeCache, + /// Shared global type store. + pub global_types: Arc, +} + +struct WorkerDocumentSource { + current_path: CanonicalPath, + current_doc: Document, + documents: SharedDocumentManager, +} + +impl DocumentSource for WorkerDocumentSource { + fn get_document(&self, path: &CanonicalPath) -> Option { + if path == &self.current_path { + return Some(self.current_doc.clone()); + } + self.documents.get_document(path) + } +} + +/// Async diagnostics runner. +/// +/// Computes diagnostics in a background thread with debouncing. +pub struct AsyncDiagnostics { + /// Channel to send requests to the background thread. + request_sender: Sender, + /// Channel to receive completed diagnostics. + result_receiver: Receiver, + /// Sequence counter for detecting stale requests. + sequence: AtomicU64, + /// Latest requested sequence per file (for debouncing). + latest_sequences: Arc>>, + /// Background thread handle. + _thread_handle: thread::JoinHandle<()>, +} + +impl AsyncDiagnostics { + /// Create a new async diagnostics runner with the given configuration. + pub fn new(config: DiagnosticsConfig) -> Self { + let (request_sender, request_receiver) = crossbeam_channel::unbounded(); + let (result_sender, result_receiver) = crossbeam_channel::unbounded(); + let latest_sequences = Arc::new(RwLock::new(FxHashMap::default())); + let sequences_clone = Arc::clone(&latest_sequences); + + let thread_handle = thread::spawn(move || { + Self::worker_loop(request_receiver, result_sender, sequences_clone, config); + }); + + Self { + request_sender, + result_receiver, + sequence: AtomicU64::new(0), + latest_sequences, + _thread_handle: thread_handle, + } + } + + /// Schedule diagnostics computation for a document. + /// + /// The request is debounced - if multiple requests come in for the same file + /// within the debounce window, only the latest is processed. + pub fn schedule( + &self, + path: CanonicalPath, + text: String, + version: DocVersion, + enable_lint: bool, + ) { + let sequence = self.sequence.fetch_add(1, Ordering::SeqCst); + + // Record this as the latest sequence for this path + self.latest_sequences.write().insert(path.clone(), sequence); + + let request = DiagnosticsRequest { + path, + text, + version, + enable_lint, + sequence, + }; + + // Send to background thread (non-blocking) + if let Err(e) = self.request_sender.send(request) { + debug!("Failed to send diagnostics request: {}", e); + } + } + + /// Get the result receiver for polling completed diagnostics. + pub fn results(&self) -> &Receiver { + &self.result_receiver + } + + /// Background worker loop. + fn worker_loop( + requests: Receiver, + results: Sender, + latest_sequences: Arc>>, + config: DiagnosticsConfig, + ) { + loop { + // Wait for a request + let request = if let Ok(req) = requests.recv() { + req + } else { + // Channel closed, exit + debug!("Diagnostics worker: channel closed, exiting"); + break; + }; + + trace!( + "Diagnostics worker: received request for {} (seq={})", + request.path.as_path().display(), + request.sequence + ); + + // Debounce: wait a bit before processing + thread::sleep(Duration::from_millis(DEBOUNCE_DELAY_MS)); + + // Check if this request is still the latest for this file + { + let sequences = latest_sequences.read(); + if let Some(&latest) = sequences.get(&request.path) { + if latest > request.sequence { + trace!( + "Diagnostics worker: skipping stale request for {} (seq={}, latest={})", + request.path.as_path().display(), + request.sequence, + latest + ); + continue; + } + } + } + + // Compute diagnostics + trace!( + "Diagnostics worker: computing diagnostics for {}", + request.path.as_path().display() + ); + + // Reconstruct the document in the worker thread + let document = Document::new(request.text, request.version); + let doc_source = WorkerDocumentSource { + current_path: request.path.clone(), + current_doc: document.clone(), + documents: Arc::clone(&config.documents), + }; + let provider = TypeProvider::new( + Arc::clone(&config.type_cache), + Arc::clone(&config.import_graph), + Arc::clone(&config.global_types), + ); + let analysis = provider.analyze(&request.path, &document, &doc_source); + + let params = handlers::publish_diagnostics_params( + &request.path, + &document, + request.enable_lint, + config.evaluator.as_deref(), + &analysis, + ); + + // Check again if still the latest (diagnostics computation may have taken time) + { + let sequences = latest_sequences.read(); + if let Some(&latest) = sequences.get(&request.path) { + if latest > request.sequence { + trace!( + "Diagnostics worker: discarding result for {} (seq={}, latest={})", + request.path.as_path().display(), + request.sequence, + latest + ); + continue; + } + } + } + + // Send result + let result = DiagnosticsResult { + path: request.path, + params, + }; + + if results.send(result).is_err() { + debug!("Diagnostics worker: result channel closed, exiting"); + break; + } + } + } +} + +impl Drop for AsyncDiagnostics { + fn drop(&mut self) { + // Dropping the sender will cause the worker to exit + // when it next tries to receive. + // We don't need to explicitly signal shutdown. + } +} + +#[cfg(test)] +mod tests { + use std::time::Duration; + + use super::*; + + fn test_config() -> DiagnosticsConfig { + let global_types = Arc::new(GlobalTyStore::new()); + DiagnosticsConfig { + evaluator: None, + documents: Arc::new(jrsonnet_lsp_inference::DocumentManager::new(Arc::clone( + &global_types, + ))), + import_graph: Arc::new(RwLock::new(ImportGraph::new())), + type_cache: jrsonnet_lsp_inference::new_shared_cache(Arc::clone(&global_types)), + global_types, + } + } + + fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(format!("/test/{name}.jsonnet").into()) + } + + #[test] + fn test_basic_diagnostics() { + let runner = AsyncDiagnostics::new(test_config()); + + let path = test_path("test"); + let text = "{ a: 1 }".to_string(); + let version = DocVersion::new(1); + + runner.schedule(path.clone(), text, version, false); + + // Wait for result with timeout + let result = runner + .results() + .recv_timeout(Duration::from_secs(2)) + .expect("should receive result"); + + assert_eq!(result.path, path); + assert!(result.params.diagnostics.is_empty()); + } + + #[test] + fn test_debouncing() { + let runner = AsyncDiagnostics::new(test_config()); + + let path = test_path("test"); + + // Schedule multiple requests rapidly + for i in 0..5 { + let text = format!("{{ a: {} }}", i); + let version = DocVersion::new(i); + runner.schedule(path.clone(), text, version, false); + } + + // Should only get one result (the last one) due to debouncing + // Timeout needs to be > debounce delay (500ms) + computation time + let result = runner + .results() + .recv_timeout(Duration::from_secs(3)) + .expect("should receive result"); + + assert_eq!(result.path, path); + + // Should not get more results immediately (other requests were debounced) + runner + .results() + .recv_timeout(Duration::from_millis(200)) + .expect_err("should not receive more results after debouncing"); + } + + #[test] + fn test_syntax_errors() { + let runner = AsyncDiagnostics::new(test_config()); + + let path = test_path("test"); + let text = "{ a: }".to_string(); + let version = DocVersion::new(1); + + runner.schedule(path, text, version, false); + + let result = runner + .results() + .recv_timeout(Duration::from_secs(2)) + .expect("should receive result"); + + assert!(!result.params.diagnostics.is_empty()); + // Check that at least one diagnostic mentions "expected" + let expected_msgs: Vec<_> = result + .params + .diagnostics + .iter() + .filter(|d| d.message.contains("expected")) + .collect(); + assert!( + !expected_msgs.is_empty(), + "Expected diagnostic message containing 'expected', got: {:?}", + result + .params + .diagnostics + .iter() + .map(|d| &d.message) + .collect::>() + ); + } +} diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs new file mode 100644 index 00000000..9fce255b --- /dev/null +++ b/crates/jrsonnet-lsp/src/config.rs @@ -0,0 +1,377 @@ +//! LSP server configuration. +//! +//! Handles configuration options passed via LSP initialization or +//! workspace/didChangeConfiguration notifications. +//! +//! Configuration is compatible with the Go [grafana/jsonnet-language-server](https://github.com/grafana/jsonnet-language-server). + +use std::{collections::HashMap, path::PathBuf}; + +// Re-export FormattingConfig from handlers crate +pub use jrsonnet_lsp_handlers::FormattingConfig; +use serde::{Deserialize, Serialize}; + +/// Server configuration options. +/// +/// These can be passed via: +/// - `initializationOptions` in the initialize request +/// - `workspace/didChangeConfiguration` notification +/// +/// Field names use snake_case internally but accept camelCase from JSON +/// for compatibility with VS Code settings. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(default)] +pub struct ServerConfig { + /// Library search paths for imports (equivalent to jsonnet -J flag). + /// Paths are searched in order when resolving imports. + /// Right-most paths take precedence. + #[serde(alias = "jpath")] + pub jpath: Vec, + + /// External variables (equivalent to jsonnet --ext-str flag). + /// Maps variable names to string values. + #[serde(alias = "extVars", alias = "ext_vars")] + pub ext_vars: HashMap, + + /// External code variables (equivalent to jsonnet --ext-code flag). + /// Maps variable names to Jsonnet code that will be evaluated. + #[serde(alias = "extCode", alias = "ext_code")] + pub ext_code: HashMap, + + /// Enable evaluation-based diagnostics. + /// When enabled, the server will evaluate Jsonnet files and report runtime errors. + /// This can be slow for large files and may produce false positives. + #[serde(alias = "enableEvalDiagnostics", alias = "eval")] + pub enable_eval_diagnostics: bool, + + /// Enable linting diagnostics. + #[serde(alias = "enableLintDiagnostics", alias = "lint")] + pub enable_lint_diagnostics: bool, + + /// Resolve import paths using Tanka conventions. + /// When enabled, looks for jsonnetfile.json and vendor directories. + #[serde(alias = "resolvePathsWithTanka", alias = "tankaMode")] + pub resolve_paths_with_tanka: bool, + + /// Formatting options. + #[serde(default)] + pub formatting: FormattingConfig, + + /// Log level for the server (error, warn, info, debug). + #[serde(alias = "logLevel", alias = "log_level")] + pub log_level: Option, +} + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(default)] +struct ServerConfigPatch { + #[serde(alias = "jpath")] + jpath: Option>, + #[serde(alias = "extVars", alias = "ext_vars")] + ext_vars: Option>, + #[serde(alias = "extCode", alias = "ext_code")] + ext_code: Option>, + #[serde(alias = "enableEvalDiagnostics", alias = "eval")] + enable_eval_diagnostics: Option, + #[serde(alias = "enableLintDiagnostics", alias = "lint")] + enable_lint_diagnostics: Option, + #[serde(alias = "resolvePathsWithTanka", alias = "tankaMode")] + resolve_paths_with_tanka: Option, + formatting: Option, + #[serde(alias = "logLevel", alias = "log_level")] + log_level: Option, +} + +impl ServerConfigPatch { + fn is_empty(&self) -> bool { + self.jpath.is_none() + && self.ext_vars.is_none() + && self.ext_code.is_none() + && self.enable_eval_diagnostics.is_none() + && self.enable_lint_diagnostics.is_none() + && self.resolve_paths_with_tanka.is_none() + && self.formatting.is_none() + && self.log_level.is_none() + } + + fn apply(self, config: &mut ServerConfig) { + if let Some(jpath) = self.jpath { + config.jpath = jpath; + } + if let Some(ext_vars) = self.ext_vars { + config.ext_vars = ext_vars; + } + if let Some(ext_code) = self.ext_code { + config.ext_code = ext_code; + } + if let Some(enable_eval_diagnostics) = self.enable_eval_diagnostics { + config.enable_eval_diagnostics = enable_eval_diagnostics; + } + if let Some(enable_lint_diagnostics) = self.enable_lint_diagnostics { + config.enable_lint_diagnostics = enable_lint_diagnostics; + } + if let Some(resolve_paths_with_tanka) = self.resolve_paths_with_tanka { + config.resolve_paths_with_tanka = resolve_paths_with_tanka; + } + if let Some(formatting) = self.formatting { + config.merge_formatting(formatting); + } + if let Some(log_level) = self.log_level { + config.log_level = log_level.as_str().map(ToString::to_string); + } + } +} + +impl ServerConfig { + /// Create a new default configuration. + pub fn new() -> Self { + Self::default() + } + + /// Parse configuration from LSP initialization options. + pub fn from_initialization_options(value: Option) -> Self { + match value { + Some(v) => serde_json::from_value(v).unwrap_or_default(), + None => Self::default(), + } + } + + /// Update configuration from a didChangeConfiguration notification. + /// Returns true if the configuration was updated. + pub fn update_from_settings(&mut self, settings: serde_json::Value) -> bool { + let Ok(patch) = serde_json::from_value::(settings) else { + return false; + }; + if patch.is_empty() { + return false; + } + patch.apply(self); + true + } + + /// Merge another configuration into this one. + /// Values from `other` take precedence. + pub fn merge(&mut self, other: ServerConfig) { + if !other.jpath.is_empty() { + self.jpath = other.jpath; + } + if !other.ext_vars.is_empty() { + self.ext_vars.extend(other.ext_vars); + } + if !other.ext_code.is_empty() { + self.ext_code.extend(other.ext_code); + } + if other.enable_eval_diagnostics { + self.enable_eval_diagnostics = true; + } + if other.enable_lint_diagnostics { + self.enable_lint_diagnostics = true; + } + if other.resolve_paths_with_tanka { + self.resolve_paths_with_tanka = true; + } + if other.log_level.is_some() { + self.log_level = other.log_level; + } + self.merge_formatting(other.formatting); + } + + /// Merge formatting configuration. + fn merge_formatting(&mut self, other: FormattingConfig) { + if other.indent.is_some() { + self.formatting.indent = other.indent; + } + if other.max_blank_lines.is_some() { + self.formatting.max_blank_lines = other.max_blank_lines; + } + if other.string_style.is_some() { + self.formatting.string_style = other.string_style; + } + if other.comment_style.is_some() { + self.formatting.comment_style = other.comment_style; + } + if other.pad_arrays.is_some() { + self.formatting.pad_arrays = other.pad_arrays; + } + if other.pad_objects.is_some() { + self.formatting.pad_objects = other.pad_objects; + } + if other.pretty_field_names.is_some() { + self.formatting.pretty_field_names = other.pretty_field_names; + } + if other.sort_imports.is_some() { + self.formatting.sort_imports = other.sort_imports; + } + if other.use_implicit_plus.is_some() { + self.formatting.use_implicit_plus = other.use_implicit_plus; + } + if other.strip_everything.is_some() { + self.formatting.strip_everything = other.strip_everything; + } + if other.strip_comments.is_some() { + self.formatting.strip_comments = other.strip_comments; + } + if other.strip_all_but_comments.is_some() { + self.formatting.strip_all_but_comments = other.strip_all_but_comments; + } + if other.formatter_path.is_some() { + self.formatting.formatter_path = other.formatter_path; + } + } + + /// Get all library paths for import resolution. + /// Returns jpath entries. + pub fn library_paths(&self) -> &[PathBuf] { + &self.jpath + } + + /// Check if a jpath is configured. + pub fn has_jpath(&self) -> bool { + !self.jpath.is_empty() + } + + /// Check if evaluation diagnostics are enabled. + pub fn eval_diagnostics_enabled(&self) -> bool { + self.enable_eval_diagnostics + } + + /// Check if lint diagnostics are enabled. + pub fn lint_diagnostics_enabled(&self) -> bool { + self.enable_lint_diagnostics + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_default_config() { + let config = ServerConfig::new(); + assert!(config.jpath.is_empty()); + assert!(config.ext_vars.is_empty()); + assert!(!config.enable_eval_diagnostics); + } + + #[test] + fn test_parse_from_json_camel_case() { + let json = serde_json::json!({ + "jpath": ["/usr/share/jsonnet", "./lib"], + "extVars": { + "environment": "production", + "version": "1.0.0" + }, + "extCode": { + "config": "{ key: 'value' }" + }, + "enableEvalDiagnostics": true + }); + + let config = ServerConfig::from_initialization_options(Some(json)); + assert_eq!( + config.jpath, + vec![PathBuf::from("/usr/share/jsonnet"), PathBuf::from("./lib")] + ); + assert_eq!( + config.ext_vars.get("environment"), + Some(&"production".to_string()) + ); + assert_eq!(config.ext_vars.get("version"), Some(&"1.0.0".to_string())); + assert_eq!( + config.ext_code.get("config"), + Some(&"{ key: 'value' }".to_string()) + ); + assert!(config.enable_eval_diagnostics); + } + + #[test] + fn test_parse_from_json_snake_case() { + let json = serde_json::json!({ + "jpath": ["/usr/share/jsonnet"], + "ext_vars": { + "env": "dev" + }, + "resolve_paths_with_tanka": true + }); + + let config = ServerConfig::from_initialization_options(Some(json)); + assert_eq!(config.jpath, vec![PathBuf::from("/usr/share/jsonnet")]); + assert_eq!(config.ext_vars.get("env"), Some(&"dev".to_string())); + assert!(config.resolve_paths_with_tanka); + } + + #[test] + fn test_parse_empty_options() { + let config = ServerConfig::from_initialization_options(None); + assert!(config.jpath.is_empty()); + } + + #[test] + fn test_merge_configs() { + let mut base = ServerConfig::new(); + base.jpath = vec![PathBuf::from("/base/path")]; + base.ext_vars + .insert("key1".to_string(), "value1".to_string()); + + let mut other = ServerConfig::new(); + other.jpath = vec![PathBuf::from("/other/path")]; + other + .ext_vars + .insert("key2".to_string(), "value2".to_string()); + other.enable_eval_diagnostics = true; + + base.merge(other); + + // jpath is replaced, not merged + assert_eq!(base.jpath, vec![PathBuf::from("/other/path")]); + // ext_vars are merged + assert_eq!(base.ext_vars.get("key1"), Some(&"value1".to_string())); + assert_eq!(base.ext_vars.get("key2"), Some(&"value2".to_string())); + assert!(base.enable_eval_diagnostics); + } + + #[test] + fn test_update_from_settings() { + let mut config = ServerConfig::new(); + + let settings = serde_json::json!({ + "jpath": ["/new/path"], + "enableEvalDiagnostics": true, + "logLevel": "debug" + }); + + assert!(config.update_from_settings(settings)); + assert_eq!(config.jpath, vec![PathBuf::from("/new/path")]); + assert!(config.enable_eval_diagnostics); + assert_eq!(config.log_level, Some("debug".to_string())); + } + + #[test] + fn test_update_from_settings_can_disable_eval_diagnostics() { + let mut config = ServerConfig::new(); + config.enable_eval_diagnostics = true; + + let settings = serde_json::json!({ + "enableEvalDiagnostics": false + }); + + assert!(config.update_from_settings(settings)); + assert!(!config.enable_eval_diagnostics); + } + + #[test] + fn test_formatting_config() { + let json = serde_json::json!({ + "formatting": { + "Indent": 4, + "StringStyle": "double", + "PadArrays": true + } + }); + + let config = ServerConfig::from_initialization_options(Some(json)); + assert_eq!(config.formatting.indent, Some(4)); + assert_eq!(config.formatting.string_style, Some("double".to_string())); + assert_eq!(config.formatting.pad_arrays, Some(true)); + } +} diff --git a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs new file mode 100644 index 00000000..ac800838 --- /dev/null +++ b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs @@ -0,0 +1,436 @@ +//! Diagnostics handler for publishing parse errors, lint warnings, and evaluation errors. + +use jrsonnet_lsp_check::lint; +use jrsonnet_lsp_document::{CanonicalPath, Document, LineIndex, SyntaxError}; +use jrsonnet_lsp_inference::TypeAnalysis; +use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString, Range}; + +use crate::analysis::Evaluator; + +/// Convert a syntax error to an LSP diagnostic. +fn syntax_error_to_diagnostic( + error: &SyntaxError, + line_index: &LineIndex, + text: &str, +) -> Diagnostic { + let range = error.range; + + // Convert rowan TextRange to LSP Range + let start_pos = line_index + .position(range.start().into(), text) + .unwrap_or_default(); + let end_pos = line_index + .position(range.end().into(), text) + .unwrap_or_default(); + + Diagnostic { + range: Range { + start: start_pos.into(), + end: end_pos.into(), + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: error.to_string(), + related_information: None, + tags: None, + data: None, + } +} + +/// Compute diagnostics for a document. +/// +/// # Arguments +/// * `document` - The document to check +/// * `path` - The canonical path of the document (needed for evaluation) +/// * `enable_lint` - Whether to include lint warnings +/// * `evaluator` - Optional evaluator for runtime error detection +/// * `uri` - The URI of the document (needed for lint related information) +/// * `analysis` - Precomputed type analysis used by lint type checks +pub fn compute_diagnostics( + document: &Document, + path: &CanonicalPath, + enable_lint: bool, + evaluator: Option<&Evaluator>, + uri: &lsp_types::Uri, + analysis: &TypeAnalysis, +) -> Vec { + let text = document.text(); + let line_index = document.line_index(); + let errors = document.errors(); + + let mut diagnostics: Vec = errors + .iter() + .map(|e| syntax_error_to_diagnostic(e, line_index, text)) + .collect(); + + // Add lint diagnostics if enabled and the document parsed successfully + if enable_lint && errors.is_empty() { + let lint_config = lint::LintConfig::all(); + let lint_diagnostics = lint::lint(document, analysis, &lint_config, uri); + diagnostics.extend(lint_diagnostics); + } + + // Add evaluation diagnostics if enabled and the document parsed successfully + if let Some(eval) = evaluator.filter(|_| errors.is_empty()) { + if let Some(eval_diag) = eval.evaluate(path, text, line_index) { + diagnostics.push(eval_diag); + } + } + + diagnostics +} + +/// Create a PublishDiagnostics notification. +/// +/// # Arguments +/// * `path` - The canonical path of the document +/// * `document` - The document to check +/// * `enable_lint` - Whether to include lint warnings +/// * `evaluator` - Optional evaluator for runtime error detection +/// * `analysis` - Precomputed type analysis used by lint type checks +pub fn publish_diagnostics_params( + path: &CanonicalPath, + document: &Document, + enable_lint: bool, + evaluator: Option<&Evaluator>, + analysis: &TypeAnalysis, +) -> lsp_types::PublishDiagnosticsParams { + let uri = path.to_uri(); + let diagnostics = compute_diagnostics(document, path, enable_lint, evaluator, &uri, analysis); + + lsp_types::PublishDiagnosticsParams { + uri, + diagnostics, + version: Some(document.version().0), + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_inference::TypeAnalysis; + use lsp_types::{Position, Uri}; + + use super::*; + use crate::analysis::EvalConfig; + + fn test_uri() -> Uri { + "file:///test.jsonnet".parse().unwrap() + } + + fn test_path() -> CanonicalPath { + CanonicalPath::new("/test.jsonnet".into()) + } + + fn test_evaluator() -> Evaluator { + Evaluator::new(&EvalConfig::default()) + } + + fn diagnostics_for( + doc: &Document, + enable_lint: bool, + evaluator: Option<&Evaluator>, + ) -> Vec { + let analysis = TypeAnalysis::analyze(doc); + compute_diagnostics( + doc, + &test_path(), + enable_lint, + evaluator, + &test_uri(), + &analysis, + ) + } + + #[test] + fn test_valid_document_no_diagnostics() { + let doc = Document::new(r#"{ hello: "world" }"#.to_string(), DocVersion::new(1)); + assert_eq!(diagnostics_for(&doc, false, None), vec![]); + } + + #[test] + fn test_syntax_error_produces_diagnostic() { + let doc = Document::new("{ hello: }".to_string(), DocVersion::new(1)); + assert_eq!( + diagnostics_for(&doc, false, None), + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 9 + }, + end: Position { + line: 0, + character: 9 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected expression".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_diagnostic_position() { + // Error is at the closing brace position (line 2, char 0) + // "{\n a: \n}" - missing value after 'a:' + let doc = Document::new("{\n a: \n}".to_string(), DocVersion::new(1)); + assert_eq!( + diagnostics_for(&doc, false, None), + vec![Diagnostic { + range: Range { + start: Position { + line: 2, + character: 0 + }, + end: Position { + line: 2, + character: 0 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected expression".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_lint_diagnostics_when_enabled() { + // Has unused variable 'x' + let doc = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + assert_eq!( + diagnostics_for(&doc, true, None), + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 6 + }, + end: Position { + line: 0, + character: 7 + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("unused-variable".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable: `x`; prefix with `_` to silence this warning" + .to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_lint_diagnostics_when_disabled() { + // Has unused variable 'x' but lint is disabled + let doc = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + assert_eq!(diagnostics_for(&doc, false, None), vec![]); + } + + #[test] + fn test_type_diagnostics_when_enabled() { + let doc = Document::new(r#""str" + {}"#.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze(&doc); + let lint_diags = lint::lint(&doc, &analysis, &lint::LintConfig::all(), &test_uri()); + assert_eq!( + lint_diags, + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 0 + }, + end: Position { + line: 0, + character: 10 + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("type-error".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "operator `+` requires matching types (number+number, string+string, array+array, or object+object), got (string, {})".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + assert_eq!( + diagnostics_for(&doc, true, None), + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 0 + }, + end: Position { + line: 0, + character: 10 + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("type-error".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "operator `+` requires matching types (number+number, string+string, array+array, or object+object), got (string, {})".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_type_diagnostics_when_disabled() { + let doc = Document::new(r#""str" + {}"#.to_string(), DocVersion::new(1)); + assert_eq!(diagnostics_for(&doc, false, None), vec![]); + } + + #[test] + fn test_lint_not_run_on_syntax_errors() { + // Has both syntax error and what would be unused variable + let doc = Document::new("local x = 1; {".to_string(), DocVersion::new(1)); + // Should only have syntax errors, not lint warnings + assert_eq!( + diagnostics_for(&doc, true, None), + vec![ + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 14 + }, + end: Position { + line: 0, + character: 14 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected field name".to_string(), + related_information: None, + tags: None, + data: None, + }, + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 14 + }, + end: Position { + line: 0, + character: 14 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected PLUS, L_PAREN, COLON, COLONCOLON or COLONCOLONCOLON" + .to_string(), + related_information: None, + tags: None, + data: None, + }, + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 14 + }, + end: Position { + line: 0, + character: 14 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected expression".to_string(), + related_information: None, + tags: None, + data: None, + }, + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 14 + }, + end: Position { + line: 0, + character: 14 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("syntax-error".to_string())), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected R_BRACE, SEMI, COMMA, IF_KW or FOR_KW".to_string(), + related_information: None, + tags: None, + data: None, + }, + ] + ); + } + + #[test] + fn test_eval_diagnostics_when_enabled() { + // Has runtime error (undefined variable) + let doc = Document::new("undefined_var".to_string(), DocVersion::new(1)); + let eval = test_evaluator(); + let diagnostics = diagnostics_for(&doc, false, Some(&eval)); + assert_eq!( + diagnostics, + vec![Diagnostic { + range: Range::default(), + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("eval-error".to_string())), + code_description: None, + source: Some("jrsonnet-eval".to_string()), + message: "local is not defined: undefined_var".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_eval_diagnostics_not_run_on_syntax_errors() { + // Has both syntax error and what would be runtime error + let doc = Document::new("undefined_var {".to_string(), DocVersion::new(1)); + let eval = test_evaluator(); + let diagnostics = diagnostics_for(&doc, false, Some(&eval)); + // Should only have syntax errors, not eval errors + assert!(!diagnostics.is_empty()); + assert!(diagnostics.iter().all(|d| d + .code + .as_ref() + .map(|c| matches!(c, NumberOrString::String(s) if s == "syntax-error")) + .unwrap_or(false))); + } +} diff --git a/crates/jrsonnet-lsp/src/handlers/mod.rs b/crates/jrsonnet-lsp/src/handlers/mod.rs new file mode 100644 index 00000000..7c763df5 --- /dev/null +++ b/crates/jrsonnet-lsp/src/handlers/mod.rs @@ -0,0 +1,8 @@ +//! LSP request and notification handlers. +//! +//! Most handlers are provided by the `jrsonnet-lsp-handlers` crate. +//! This module contains handlers that depend on the evaluator. + +pub mod diagnostics; + +pub use diagnostics::{compute_diagnostics, publish_diagnostics_params}; diff --git a/crates/jrsonnet-lsp/src/lib.rs b/crates/jrsonnet-lsp/src/lib.rs new file mode 100644 index 00000000..a0ab3d80 --- /dev/null +++ b/crates/jrsonnet-lsp/src/lib.rs @@ -0,0 +1,30 @@ +//! Jsonnet Language Server Protocol implementation. +//! +//! This crate provides an LSP server for Jsonnet files, offering features like: +//! - Syntax error diagnostics +//! - Go to definition (local bindings and imports) +//! - Hover information (stdlib functions and local definitions) +//! - Document symbols (outline) +//! - Completions (stdlib, local variables, object fields, imports) +//! - Formatting (via jrsonnet-fmt) +//! - Find references (local and cross-file) +//! - Rename (local symbols) +//! - Signature help +//! - Semantic tokens +//! +//! # Configuration +//! +//! The server accepts configuration via: +//! - `initializationOptions` in the initialize request +//! - `workspace/didChangeConfiguration` notifications +//! +//! See [`config::ServerConfig`] for available options. + +pub mod analysis; +pub mod async_diagnostics; +pub mod config; +pub mod handlers; +pub mod server; + +pub use config::ServerConfig; +pub use server::run_stdio; diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs new file mode 100644 index 00000000..9c727d1b --- /dev/null +++ b/crates/jrsonnet-lsp/src/server.rs @@ -0,0 +1,975 @@ +//! LSP server main loop. +//! +//! Uses the lsp-server crate for the transport layer, following the rust-analyzer pattern. +//! Diagnostics are computed asynchronously with debouncing to avoid blocking the event loop. + +mod async_requests; + +use std::{ + panic::{catch_unwind, AssertUnwindSafe}, + sync::Arc, +}; + +use anyhow::{Context, Result}; +use crossbeam_channel::{select, Receiver, Sender}; +use jrsonnet_lsp_document::{CanonicalPath, DocVersion}; +use jrsonnet_lsp_handlers as handlers; +use jrsonnet_lsp_import::{parse_document_imports, ImportGraph}; +use jrsonnet_lsp_inference::{ + new_shared_cache, DocumentManager, SharedDocumentManager, SharedTypeCache, +}; +use jrsonnet_lsp_types::GlobalTyStore; +use lsp_server::{Connection, Message, Notification, Request, RequestId, Response}; +use lsp_types::{ + notification::{ + DidChangeConfiguration, DidChangeTextDocument, DidChangeWatchedFiles, DidCloseTextDocument, + DidOpenTextDocument, Notification as _, PublishDiagnostics, + }, + request::{ + CodeActionRequest, CodeLensRequest, Completion, DocumentHighlightRequest, + DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDefinition, HoverRequest, + InlayHintRequest, PrepareRenameRequest, References, Rename, Request as _, + SemanticTokensFullRequest, Shutdown, SignatureHelpRequest, WorkspaceSymbolRequest, + }, + CodeActionKind, CodeActionOptions, CodeActionParams, CodeActionProviderCapability, + CodeActionResponse, CodeLensOptions, CodeLensParams, CompletionOptions, CompletionParams, + DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams, + DidCloseTextDocumentParams, DidOpenTextDocumentParams, DocumentFormattingParams, + DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, + ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, GotoDefinitionParams, HoverParams, + HoverProviderCapability, InitializeParams, InitializeResult, InlayHintParams, OneOf, + PrepareRenameResponse, ReferenceParams, RenameParams, SemanticTokens, + SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, + SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, + SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, + TextDocumentSyncCapability, TextDocumentSyncKind, TextEdit, WorkDoneProgressOptions, + WorkspaceSymbolParams, +}; +use parking_lot::RwLock; +use tracing::{debug, error, info, warn}; + +use self::async_requests::AsyncRequestContext; +use crate::{ + analysis::{EvalConfig, Evaluator}, + async_diagnostics::{AsyncDiagnostics, DiagnosticsConfig}, + config::ServerConfig, +}; + +/// Shared server configuration. +pub type SharedConfig = Arc>; + +/// LSP server state. +pub struct Server { + /// The LSP connection. + connection: Connection, + /// Document manager. + documents: SharedDocumentManager, + /// Import graph for cross-file references. + import_graph: Arc>, + /// Global type store shared across all analyses. + global_types: Arc, + /// Cross-file type cache for import resolution. + type_cache: SharedTypeCache, + /// Server configuration. + config: SharedConfig, + /// Evaluator for runtime diagnostics (wrapped in Arc for sharing with async diagnostics). + evaluator: Option>, + /// Async diagnostics runner. + diagnostics: AsyncDiagnostics, + /// Channel for async request responses. + request_response_sender: Sender, + /// Channel for async request responses. + request_response_receiver: Receiver, + /// Shutdown requested flag. + shutdown_requested: bool, +} + +impl Server { + /// Create a new server with the given connection. + pub fn new(connection: Connection) -> Self { + let global_types = Arc::new(GlobalTyStore::new()); + let documents = Arc::new(DocumentManager::new(Arc::clone(&global_types))); + let import_graph = Arc::new(RwLock::new(ImportGraph::new())); + let type_cache = new_shared_cache(Arc::clone(&global_types)); + let (request_response_sender, request_response_receiver) = crossbeam_channel::unbounded(); + let diagnostics = AsyncDiagnostics::new(DiagnosticsConfig { + evaluator: None, + documents: Arc::clone(&documents), + import_graph: Arc::clone(&import_graph), + type_cache: Arc::clone(&type_cache), + global_types: Arc::clone(&global_types), + }); + + Self { + connection, + documents, + import_graph, + type_cache, + global_types, + config: Arc::new(RwLock::new(ServerConfig::default())), + evaluator: None, + diagnostics, + request_response_sender, + request_response_receiver, + shutdown_requested: false, + } + } + + /// Get a reference to the global type store. + pub fn global_types(&self) -> &Arc { + &self.global_types + } + + /// Update the configuration. + pub fn update_config(&self, new_config: ServerConfig) { + let mut config = self.config.write(); + config.merge(new_config); + } + + /// Invalidate the type cache for a file and all files that transitively import it. + /// + /// When a file changes, any cached types for files that depend on it may be stale, + /// so we invalidate the entire dependency chain. + fn invalidate_type_cache_with_dependents(&self, path: &CanonicalPath) { + // Get all files that transitively import this file + let dependents = self.import_graph.read().transitive_importers(path); + + // Invalidate the changed file and all its dependents + let mut cache = self.type_cache.write(); + cache.invalidate(path); + cache.invalidate_many(dependents); + } + + fn async_request_context(&self) -> AsyncRequestContext { + AsyncRequestContext::new( + Arc::clone(&self.documents), + Arc::clone(&self.import_graph), + Arc::clone(&self.global_types), + Arc::clone(&self.type_cache), + Arc::clone(&self.config), + ) + } + + fn reconfigure_runtime_components(&mut self, config: &ServerConfig) { + self.evaluator = if config.enable_eval_diagnostics { + let eval_config = EvalConfig { + jpath: config.jpath.clone(), + resolve_paths_with_tanka: config.resolve_paths_with_tanka, + }; + Some(Arc::new(Evaluator::new(&eval_config))) + } else { + None + }; + + self.diagnostics = AsyncDiagnostics::new(DiagnosticsConfig { + evaluator: self.evaluator.clone(), + documents: Arc::clone(&self.documents), + import_graph: Arc::clone(&self.import_graph), + type_cache: Arc::clone(&self.type_cache), + global_types: Arc::clone(&self.global_types), + }); + } + + fn spawn_async_response(&self, id: RequestId, method: &'static str, compute: F) + where + F: FnOnce() -> Result + Send + 'static, + { + let sender = self.request_response_sender.clone(); + rayon::spawn(move || { + let response = match catch_unwind(AssertUnwindSafe(compute)) { + Ok(Ok(value)) => Response::new_ok(id, value), + Ok(Err(err)) => { + error!("Async handler failed for {}: {err:#}", method); + Response::new_err( + id, + lsp_server::ErrorCode::InternalError as i32, + format!("{} failed: {err:#}", method), + ) + } + Err(_) => { + error!("Async handler panicked for {}", method); + Response::new_err( + id, + lsp_server::ErrorCode::InternalError as i32, + format!("{} panicked", method), + ) + } + }; + if sender.send(response).is_err() { + debug!("Dropping async response for {}: channel closed", method); + } + }); + } + + /// Run the server, handling the initialize handshake first. + pub fn run(mut self) -> Result<()> { + info!("Starting jrsonnet language server"); + + // Handle initialize request + let (id, params) = self.initialize()?; + + // Parse initialization options into configuration + let init_config = ServerConfig::from_initialization_options(params.initialization_options); + self.update_config(init_config.clone()); + info!( + "Configuration: jpath={:?}, eval_diagnostics={}, tanka_mode={}", + init_config.jpath, + init_config.enable_eval_diagnostics, + init_config.resolve_paths_with_tanka + ); + + self.reconfigure_runtime_components(&init_config); + debug!("Runtime components initialized"); + + // Send initialize result + let result = InitializeResult { + capabilities: Self::server_capabilities(), + server_info: Some(ServerInfo { + name: "jrsonnet-lsp".to_string(), + version: Some(env!("CARGO_PKG_VERSION").to_string()), + }), + }; + + let result = serde_json::to_value(result)?; + self.connection + .sender + .send(Message::Response(Response::new_ok(id, result)))?; + + info!("Server initialized"); + + // Wait for initialized notification + match self.connection.receiver.recv() { + Ok(Message::Notification(n)) if n.method == "initialized" => { + info!("Received initialized notification"); + } + Ok(msg) => { + warn!("Expected initialized notification, got: {:?}", msg); + } + Err(e) => { + error!("Error receiving initialized notification: {}", e); + return Err(e.into()); + } + } + + // Main loop + self.main_loop()?; + + info!("Server shutting down"); + Ok(()) + } + + /// Handle the initialize request. + fn initialize(&self) -> Result<(RequestId, InitializeParams)> { + let msg = self + .connection + .receiver + .recv() + .context("Failed to receive initialize request")?; + + match msg { + Message::Request(req) if req.method == "initialize" => { + let params: InitializeParams = serde_json::from_value(req.params)?; + info!("Initialize request from: {:?}", params.client_info); + Ok((req.id, params)) + } + _ => anyhow::bail!("Expected initialize request, got: {:?}", msg), + } + } + + /// Get the server capabilities. + fn server_capabilities() -> ServerCapabilities { + ServerCapabilities { + text_document_sync: Some(TextDocumentSyncCapability::Kind( + TextDocumentSyncKind::INCREMENTAL, + )), + document_symbol_provider: Some(OneOf::Left(true)), + definition_provider: Some(OneOf::Left(true)), + hover_provider: Some(HoverProviderCapability::Simple(true)), + document_highlight_provider: Some(OneOf::Left(true)), + inlay_hint_provider: Some(OneOf::Left(true)), + code_action_provider: Some(CodeActionProviderCapability::Options(CodeActionOptions { + code_action_kinds: Some(vec![CodeActionKind::QUICKFIX]), + work_done_progress_options: WorkDoneProgressOptions::default(), + resolve_provider: Some(false), + })), + completion_provider: Some(CompletionOptions { + trigger_characters: Some(vec![".".to_string()]), + ..Default::default() + }), + signature_help_provider: Some(SignatureHelpOptions { + trigger_characters: Some(vec!["(".to_string(), ",".to_string()]), + retrigger_characters: None, + work_done_progress_options: WorkDoneProgressOptions::default(), + }), + document_formatting_provider: Some(OneOf::Left(true)), + references_provider: Some(OneOf::Left(true)), + workspace_symbol_provider: Some(OneOf::Left(true)), + rename_provider: Some(OneOf::Right(lsp_types::RenameOptions { + prepare_provider: Some(true), + work_done_progress_options: WorkDoneProgressOptions::default(), + })), + semantic_tokens_provider: Some( + SemanticTokensServerCapabilities::SemanticTokensOptions(SemanticTokensOptions { + legend: handlers::semantic_tokens_legend(), + full: Some(SemanticTokensFullOptions::Bool(true)), + range: None, + work_done_progress_options: WorkDoneProgressOptions::default(), + }), + ), + execute_command_provider: Some(ExecuteCommandOptions { + commands: vec![ + "jrsonnet.evalFile".to_string(), + "jrsonnet.evalExpression".to_string(), + "jrsonnet.findTransitiveImporters".to_string(), + "jrsonnet.findReferences".to_string(), + ], + work_done_progress_options: WorkDoneProgressOptions::default(), + }), + code_lens_provider: Some(CodeLensOptions { + resolve_provider: Some(false), + }), + ..Default::default() + } + } + + /// Main message loop. + /// + /// Uses crossbeam select! to handle both LSP messages and async diagnostics results. + fn main_loop(&mut self) -> Result<()> { + loop { + // Use select! to wait on either LSP messages or diagnostics results. + // We receive first and then process to avoid borrow conflicts. + enum SelectResult { + LspMessage(Result), + DiagnosticsResult( + Result< + crate::async_diagnostics::DiagnosticsResult, + crossbeam_channel::RecvError, + >, + ), + AsyncRequestResponse(Result), + } + + let result = { + let lsp_receiver = &self.connection.receiver; + let diag_receiver = self.diagnostics.results(); + let request_receiver = &self.request_response_receiver; + + select! { + recv(lsp_receiver) -> msg => SelectResult::LspMessage(msg), + recv(diag_receiver) -> result => SelectResult::DiagnosticsResult(result), + recv(request_receiver) -> response => SelectResult::AsyncRequestResponse(response), + } + }; + + match result { + SelectResult::LspMessage(Ok(msg)) => { + if self.handle_message(msg)? { + break; // Exit notification received + } + } + SelectResult::LspMessage(Err(e)) => { + error!("Error receiving message: {}", e); + break; + } + SelectResult::DiagnosticsResult(Ok(result)) => { + // Send the completed diagnostics to the client + self.send_notification::(result.params)?; + } + SelectResult::DiagnosticsResult(Err(_)) => { + // Diagnostics channel closed, that's fine + debug!("Diagnostics channel closed"); + } + SelectResult::AsyncRequestResponse(Ok(response)) => { + self.connection.sender.send(Message::Response(response))?; + } + SelectResult::AsyncRequestResponse(Err(_)) => { + debug!("Async request response channel closed"); + } + } + } + + Ok(()) + } + + /// Handle a single LSP message. + /// + /// Returns true if exit notification was received. + fn handle_message(&mut self, msg: Message) -> Result { + match msg { + Message::Request(req) => { + if self.shutdown_requested { + // After shutdown, only respond with errors + let resp = Response::new_err( + req.id, + lsp_server::ErrorCode::InvalidRequest as i32, + "Server is shutting down".to_string(), + ); + self.connection.sender.send(Message::Response(resp))?; + } else { + self.handle_request(req)?; + } + Ok(false) + } + Message::Response(resp) => { + debug!("Received response: {:?}", resp.id); + Ok(false) + } + Message::Notification(notif) => self.handle_notification(notif), + } + } + + /// Handle an incoming request. + fn handle_request(&mut self, req: Request) -> Result<()> { + debug!("Handling request: {} (id={})", req.method, req.id); + + match req.method.as_str() { + Shutdown::METHOD => { + info!("Shutdown request received"); + self.shutdown_requested = true; + let resp = Response::new_ok(req.id, serde_json::Value::Null); + self.connection.sender.send(Message::Response(resp))?; + } + DocumentSymbolRequest::METHOD => { + let params: DocumentSymbolParams = serde_json::from_value(req.params)?; + let result = self.on_document_symbol(params); + let resp = Response::new_ok(req.id, serde_json::to_value(result)?); + self.connection.sender.send(Message::Response(resp))?; + } + GotoDefinition::METHOD => { + let params: GotoDefinitionParams = serde_json::from_value(req.params)?; + let context = self.async_request_context(); + self.spawn_async_response(req.id, GotoDefinition::METHOD, move || { + let result = context.goto_definition(params); + serde_json::to_value(result).map_err(Into::into) + }); + } + HoverRequest::METHOD => { + let params: HoverParams = serde_json::from_value(req.params)?; + let context = self.async_request_context(); + self.spawn_async_response(req.id, HoverRequest::METHOD, move || { + let result = context.hover(params); + serde_json::to_value(result).map_err(Into::into) + }); + } + DocumentHighlightRequest::METHOD => { + let params: DocumentHighlightParams = serde_json::from_value(req.params)?; + let result = self.on_document_highlight(params); + let resp = Response::new_ok(req.id, serde_json::to_value(result)?); + self.connection.sender.send(Message::Response(resp))?; + } + InlayHintRequest::METHOD => { + let params: InlayHintParams = serde_json::from_value(req.params)?; + let context = self.async_request_context(); + self.spawn_async_response(req.id, InlayHintRequest::METHOD, move || { + let result = context.inlay_hints(params); + serde_json::to_value(result).map_err(Into::into) + }); + } + CodeActionRequest::METHOD => { + let params: CodeActionParams = serde_json::from_value(req.params)?; + let result = self.on_code_action(params); + let resp = Response::new_ok(req.id, serde_json::to_value(result)?); + self.connection.sender.send(Message::Response(resp))?; + } + Completion::METHOD => { + let params: CompletionParams = serde_json::from_value(req.params)?; + let context = self.async_request_context(); + self.spawn_async_response(req.id, Completion::METHOD, move || { + let result = context.completion(params); + serde_json::to_value(result).map_err(Into::into) + }); + } + SignatureHelpRequest::METHOD => { + let params: SignatureHelpParams = serde_json::from_value(req.params)?; + let result = self.on_signature_help(params); + let resp = Response::new_ok(req.id, serde_json::to_value(result)?); + self.connection.sender.send(Message::Response(resp))?; + } + Formatting::METHOD => { + let params: DocumentFormattingParams = serde_json::from_value(req.params)?; + let result = self.on_formatting(params); + let resp = Response::new_ok(req.id, serde_json::to_value(result)?); + self.connection.sender.send(Message::Response(resp))?; + } + References::METHOD => { + let params: ReferenceParams = serde_json::from_value(req.params)?; + let context = self.async_request_context(); + self.spawn_async_response(req.id, References::METHOD, move || { + let result = context.references(params); + serde_json::to_value(result).map_err(Into::into) + }); + } + WorkspaceSymbolRequest::METHOD => { + let params: WorkspaceSymbolParams = serde_json::from_value(req.params)?; + let context = self.async_request_context(); + self.spawn_async_response(req.id, WorkspaceSymbolRequest::METHOD, move || { + let result = context.workspace_symbol(params); + serde_json::to_value(result).map_err(Into::into) + }); + } + PrepareRenameRequest::METHOD => { + let params: TextDocumentPositionParams = serde_json::from_value(req.params)?; + let result = self.on_prepare_rename(params); + let resp = Response::new_ok(req.id, serde_json::to_value(result)?); + self.connection.sender.send(Message::Response(resp))?; + } + Rename::METHOD => { + let params: RenameParams = serde_json::from_value(req.params)?; + let context = self.async_request_context(); + self.spawn_async_response(req.id, Rename::METHOD, move || { + let result = context.rename(params); + serde_json::to_value(result).map_err(Into::into) + }); + } + SemanticTokensFullRequest::METHOD => { + let params: SemanticTokensParams = serde_json::from_value(req.params)?; + let result = self.on_semantic_tokens_full(params); + let resp = Response::new_ok(req.id, serde_json::to_value(result)?); + self.connection.sender.send(Message::Response(resp))?; + } + CodeLensRequest::METHOD => { + let params: CodeLensParams = serde_json::from_value(req.params)?; + let context = self.async_request_context(); + self.spawn_async_response(req.id, CodeLensRequest::METHOD, move || { + let result = context.code_lens(params); + serde_json::to_value(result).map_err(Into::into) + }); + } + ExecuteCommand::METHOD => { + let params: ExecuteCommandParams = serde_json::from_value(req.params)?; + let context = self.async_request_context(); + self.spawn_async_response(req.id, ExecuteCommand::METHOD, move || { + let result = context.execute_command(params); + serde_json::to_value(result).map_err(Into::into) + }); + } + _ => { + warn!("Unhandled request: {}", req.method); + let resp = Response::new_err( + req.id, + lsp_server::ErrorCode::MethodNotFound as i32, + format!("Method not found: {}", req.method), + ); + self.connection.sender.send(Message::Response(resp))?; + } + } + + Ok(()) + } + + /// Handle textDocument/documentSymbol request. + fn on_document_symbol(&self, params: DocumentSymbolParams) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?.clone(); + + let symbols = handlers::document_symbols(&doc); + Some(DocumentSymbolResponse::Nested(symbols)) + } + + /// Handle textDocument/documentHighlight request. + fn on_document_highlight( + &self, + params: DocumentHighlightParams, + ) -> Option> { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?.clone(); + let lsp_pos = position.into(); + + let highlights = handlers::document_highlights(&doc, lsp_pos); + if highlights.is_empty() { + return None; + } + + Some(highlights) + } + + /// Handle textDocument/codeAction request. + fn on_code_action(&self, params: CodeActionParams) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri)?; + let actions = { + let doc = self.documents.get(&path)?; + handlers::code_actions(&doc, uri, params.range, ¶ms.context) + }; + if actions.is_empty() { + return None; + } + + Some(actions) + } + + /// Handle textDocument/signatureHelp request. + fn on_signature_help(&self, params: SignatureHelpParams) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?; + + let lsp_pos = position.into(); + + handlers::signature_help(&doc, lsp_pos) + } + + /// Handle textDocument/formatting request. + fn on_formatting(&self, params: DocumentFormattingParams) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?; + + // Get formatting config + let config = self.config.read().formatting.clone(); + + handlers::format_document_with_config(doc.text(), &config) + } + + /// Handle textDocument/prepareRename request. + fn on_prepare_rename( + &self, + params: TextDocumentPositionParams, + ) -> Option { + let uri = ¶ms.text_document.uri; + let position = params.position; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?; + + let lsp_pos = position.into(); + + handlers::prepare_rename(&doc, lsp_pos) + } + + /// Handle textDocument/semanticTokens/full request. + fn on_semantic_tokens_full(&self, params: SemanticTokensParams) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?; + + Some(handlers::semantic_tokens(&doc)) + } +} + +impl Server { + /// Handle an incoming notification. + /// + /// Returns true if exit notification was received. + fn handle_notification(&mut self, notif: Notification) -> Result { + debug!("Handling notification: {}", notif.method); + + match notif.method.as_str() { + DidOpenTextDocument::METHOD => { + let params: DidOpenTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_open(params); + } + DidChangeTextDocument::METHOD => { + let params: DidChangeTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_change(params); + } + DidCloseTextDocument::METHOD => { + let params: DidCloseTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_close(params)?; + } + DidChangeConfiguration::METHOD => { + let params: DidChangeConfigurationParams = serde_json::from_value(notif.params)?; + self.on_did_change_configuration(params); + } + DidChangeWatchedFiles::METHOD => { + let params: DidChangeWatchedFilesParams = serde_json::from_value(notif.params)?; + self.on_did_change_watched_files(params); + } + "exit" => { + info!("Exit notification received"); + return Ok(true); + } + _ => { + debug!("Unhandled notification: {}", notif.method); + } + } + + Ok(false) + } + + /// Handle textDocument/didOpen notification. + fn on_did_open(&self, params: DidOpenTextDocumentParams) { + let uri = ¶ms.text_document.uri; + info!("Document opened: {}", uri.as_str()); + + let Some(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return; + }; + + let text = params.text_document.text; + let version = DocVersion::new(params.text_document.version); + + self.documents.open(path.clone(), text, version); + + // Invalidate type cache for this file and all files that depend on it + self.invalidate_type_cache_with_dependents(&path); + + // Update import graph + self.update_import_graph(&path); + + // Publish diagnostics + self.schedule_diagnostics(&path); + } + + /// Handle textDocument/didChange notification. + fn on_did_change(&self, params: DidChangeTextDocumentParams) { + let uri = ¶ms.text_document.uri; + debug!("Document changed: {}", uri.as_str()); + + let Some(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return; + }; + + let version = DocVersion::new(params.text_document.version); + + // Process each change (INCREMENTAL sync may send multiple changes) + for change in params.content_changes { + let success = if let Some(range) = change.range { + // Incremental change: apply the range-based edit + self.documents + .apply_incremental_change(&path, range, &change.text, version) + } else { + // Full change: no range means full document replacement + self.documents.update(&path, change.text, version) + }; + + if !success { + warn!("Failed to apply change to document: {}", uri.as_str()); + return; + } + } + + // Invalidate type cache for this file and all files that depend on it + self.invalidate_type_cache_with_dependents(&path); + + // Update import graph (imports may have changed) + self.update_import_graph(&path); + + // Publish diagnostics + self.schedule_diagnostics(&path); + } + + /// Handle textDocument/didClose notification. + fn on_did_close(&self, params: DidCloseTextDocumentParams) -> Result<()> { + let uri = ¶ms.text_document.uri; + info!("Document closed: {}", uri.as_str()); + + let Some(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return Ok(()); + }; + + self.documents.close(&path); + + // Invalidate type cache for this file and all files that depend on it + // (dependents may have cached types based on this file's exports) + self.invalidate_type_cache_with_dependents(&path); + + // Remove from import graph + self.import_graph.write().remove_file(&path); + + // Clear diagnostics for closed document + self.send_notification::(lsp_types::PublishDiagnosticsParams { + uri: uri.clone(), + diagnostics: vec![], + version: None, + })?; + + Ok(()) + } + + /// Handle workspace/didChangeConfiguration notification. + fn on_did_change_configuration(&mut self, params: DidChangeConfigurationParams) { + info!("Configuration changed"); + + // The settings can come in different formats depending on the client + // VS Code sends settings under a "jsonnet" key, others may send flat settings + let settings = if let Some(jsonnet_settings) = params + .settings + .as_object() + .and_then(|o| o.get("jsonnet")) + .or_else(|| { + params + .settings + .as_object() + .and_then(|o| o.get("jsonnet-language-server")) + }) { + jsonnet_settings.clone() + } else { + params.settings + }; + + let old_config = self.config.read().clone(); + let updated_config = { + let mut config = self.config.write(); + if config.update_from_settings(settings) { + Some(config.clone()) + } else { + None + } + }; + + if let Some(updated_config) = updated_config { + let runtime_config_changed = old_config.jpath != updated_config.jpath + || old_config.enable_eval_diagnostics != updated_config.enable_eval_diagnostics + || old_config.resolve_paths_with_tanka != updated_config.resolve_paths_with_tanka; + let diagnostics_config_changed = + old_config.enable_lint_diagnostics != updated_config.enable_lint_diagnostics; + + if runtime_config_changed { + self.reconfigure_runtime_components(&updated_config); + debug!("Runtime components reconfigured after settings update"); + } + + if runtime_config_changed { + // Import resolution and cached file types depend on jpath/tanka settings. + self.type_cache.write().clear(); + for path in self.documents.open_paths() { + self.update_import_graph(&path); + } + } + + if runtime_config_changed || diagnostics_config_changed { + for path in self.documents.open_paths() { + self.schedule_diagnostics(&path); + } + } + + info!( + "Configuration updated: jpath={:?}, eval_diagnostics={}, tanka_mode={}", + updated_config.jpath, + updated_config.enable_eval_diagnostics, + updated_config.resolve_paths_with_tanka + ); + } + } + + /// Handle workspace/didChangeWatchedFiles notification. + /// + /// This keeps import graph and type cache up to date for files that change on disk + /// while not being open in the editor. + fn on_did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { + for change in params.changes { + let Some(path) = CanonicalPath::from_uri(&change.uri) else { + continue; + }; + + self.invalidate_type_cache_with_dependents(&path); + + match change.typ { + FileChangeType::DELETED => { + self.import_graph.write().remove_file(&path); + } + FileChangeType::CHANGED | FileChangeType::CREATED => { + self.update_import_graph(&path); + } + _ => {} + } + + if self.documents.is_open(&path) { + self.schedule_diagnostics(&path); + } + self.schedule_diagnostics_for_open_importers(&path); + } + } + + /// Update the import graph for a document. + /// + /// Parses the document's import statements and updates the graph + /// so that cross-file references can be found efficiently. + fn update_import_graph(&self, path: &CanonicalPath) { + let Some(doc) = self.documents.get_document(path) else { + // File no longer exists or cannot be read. + self.import_graph.write().remove_file(path); + return; + }; + + let from_path = path.clone(); + let config = self.config.read(); + let jpath = config.jpath.clone(); + drop(config); + + // Create a resolver closure that captures the path and jpath + let resolve_import = |import: &str| -> Option { + // First, try relative to the importing file + if let Some(parent) = from_path.as_path().parent() { + let resolved = parent.join(import); + if let Ok(canonical) = resolved.canonicalize() { + return Some(CanonicalPath::new(canonical)); + } + } + + // Then, try each jpath directory + for jpath_dir in &jpath { + let resolved = jpath_dir.join(import); + if let Ok(canonical) = resolved.canonicalize() { + return Some(CanonicalPath::new(canonical)); + } + } + + None + }; + + // Parse imports OUTSIDE the lock to minimize lock hold time. + // This is important for responsiveness when parsing large files. + let entries = parse_document_imports(&doc, &resolve_import); + + // Now acquire the write lock and do the quick data structure update + self.import_graph + .write() + .update_file_with_entries(path, entries); + } + + /// Schedule diagnostics for currently-open files that import `path`. + fn schedule_diagnostics_for_open_importers(&self, path: &CanonicalPath) { + let importers = self.import_graph.read().transitive_importers(path); + for importer in importers { + if self.documents.is_open(&importer) { + self.schedule_diagnostics(&importer); + } + } + } + + /// Schedule diagnostics computation for a document. + /// + /// Diagnostics are computed asynchronously with debouncing. + fn schedule_diagnostics(&self, path: &CanonicalPath) { + let Some(doc) = self.documents.get(path) else { + return; + }; + + let enable_lint = self.config.read().lint_diagnostics_enabled(); + let text = doc.text().to_string(); + let version = doc.version(); + drop(doc); // Release the borrow before scheduling + + self.diagnostics + .schedule(path.clone(), text, version, enable_lint); + } + + /// Send a notification to the client. + fn send_notification( + &self, + params: N::Params, + ) -> Result<()> { + let params = serde_json::to_value(params)?; + let notif = Notification::new(N::METHOD.to_string(), params); + self.connection.sender.send(Message::Notification(notif))?; + Ok(()) + } +} + +/// Run the LSP server over stdio. +pub fn run_stdio() -> Result<()> { + let (connection, io_threads) = Connection::stdio(); + let server = Server::new(connection); + server.run()?; + io_threads.join()?; + Ok(()) +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs new file mode 100644 index 00000000..5ec122bd --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -0,0 +1,492 @@ +use std::sync::Arc; + +use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, DocVersion, Document, SymbolName}; +use jrsonnet_lsp_handlers as handlers; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; +use jrsonnet_lsp_types::GlobalTyStore; +use lsp_types::{ + CodeLens, CodeLensParams, CompletionParams, CompletionResponse, ExecuteCommandParams, + GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams, InlayHint, InlayHintParams, + Location, ReferenceParams, RenameParams, SymbolInformation, WorkspaceEdit, + WorkspaceSymbolParams, +}; +use parking_lot::RwLock; +use rayon::prelude::*; +use tracing::{info, warn}; + +use super::SharedConfig; +use crate::analysis::eval::create_state_with_jpath; + +#[derive(Clone)] +pub(super) struct AsyncRequestContext { + documents: SharedDocumentManager, + import_graph: Arc>, + global_types: Arc, + type_cache: SharedTypeCache, + config: SharedConfig, +} + +impl AsyncRequestContext { + pub(super) fn new( + documents: SharedDocumentManager, + import_graph: Arc>, + global_types: Arc, + type_cache: SharedTypeCache, + config: SharedConfig, + ) -> Self { + Self { + documents, + import_graph, + global_types, + type_cache, + config, + } + } + + /// Analyze a document with dependency-aware import resolution. + fn analyze_document(&self, path: &CanonicalPath, doc: &Document) -> TypeAnalysis { + let provider = TypeProvider::new( + Arc::clone(&self.type_cache), + Arc::clone(&self.import_graph), + Arc::clone(&self.global_types), + ); + provider.analyze(path, doc, self.documents.as_ref()) + } + + pub(super) fn hover(&self, params: HoverParams) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?.clone(); + let lsp_pos = position.into(); + let analysis = self.analyze_document(&path, &doc); + handlers::hover(&doc, lsp_pos, &analysis) + } + + pub(super) fn goto_definition( + &self, + params: GotoDefinitionParams, + ) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?.clone(); + let lsp_pos = position.into(); + + let result = handlers::goto_definition(&doc, lsp_pos)?; + match result { + handlers::DefinitionResult::Local(range) => { + Some(GotoDefinitionResponse::Scalar(Location { + uri: uri.clone(), + range, + })) + } + handlers::DefinitionResult::Import(import_path) => { + let resolved = self.resolve_import_path(&path, &import_path)?; + Some(GotoDefinitionResponse::Scalar(Location { + uri: resolved.to_uri(), + range: lsp_types::Range::default(), + })) + } + handlers::DefinitionResult::ImportField { + path: import_path, + fields, + } => { + let resolved = self.resolve_import_path(&path, &import_path)?; + let range = self + .find_field_in_file(&resolved, &fields) + .unwrap_or_default(); + Some(GotoDefinitionResponse::Scalar(Location { + uri: resolved.to_uri(), + range, + })) + } + } + } + + pub(super) fn inlay_hints(&self, params: InlayHintParams) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?.clone(); + let analysis = self.analyze_document(&path, &doc); + let hints = handlers::inlay_hints(&doc, &analysis, params.range); + if hints.is_empty() { + return None; + } + Some(hints) + } + + pub(super) fn completion(&self, params: CompletionParams) -> Option { + let uri = ¶ms.text_document_position.text_document.uri; + let position = params.text_document_position.position; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?.clone(); + + let lsp_pos = position.into(); + let analysis = self.analyze_document(&path, &doc); + let import_roots = self.config.read().jpath.clone(); + + let list = handlers::completion_with_import_roots( + &doc, + lsp_pos, + Some(path.as_path()), + &import_roots, + &analysis, + )?; + Some(CompletionResponse::List(list)) + } + + pub(super) fn references(&self, params: ReferenceParams) -> Option> { + let uri = ¶ms.text_document_position.text_document.uri; + let position = params.text_document_position.position; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?.clone(); + let lsp_pos = position.into(); + + let include_declaration = params.context.include_declaration; + let mut refs = handlers::find_references(&doc, lsp_pos, uri, include_declaration); + + let import_graph = self.import_graph.read(); + let importers = import_graph.transitive_importers(&path); + drop(import_graph); + + let importer_docs: Vec<_> = importers + .into_iter() + .filter_map(|p| self.documents.get_document(&p).map(|d| (p, d))) + .collect(); + let importer_refs: Vec<_> = importer_docs.iter().map(|(k, v)| (k, v)).collect(); + + let cross_refs = handlers::find_cross_file_references(&doc, &path, lsp_pos, &importer_refs); + refs.extend(cross_refs); + + if refs.is_empty() { + return None; + } + Some(refs) + } + + pub(super) fn workspace_symbol( + &self, + params: WorkspaceSymbolParams, + ) -> Option> { + let query = ¶ms.query; + let all_symbols: Vec = self + .documents + .par_iter() + .flat_map(|entry| { + let uri = entry.key().to_uri(); + handlers::workspace_symbols_for_document(entry.value(), &uri, query) + }) + .collect(); + + if all_symbols.is_empty() { + return None; + } + Some(all_symbols) + } + + pub(super) fn rename(&self, params: RenameParams) -> Option { + let uri = ¶ms.text_document_position.text_document.uri; + let position = params.text_document_position.position; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?; + + let new_name = match SymbolName::new(¶ms.new_name) { + Ok(name) => name, + Err(e) => { + warn!("rename rejected: {}", e); + return None; + } + }; + + let lsp_pos = position.into(); + let import_graph = self.import_graph.read(); + + handlers::rename_cross_file( + &doc, + lsp_pos, + &new_name, + uri, + &path, + &self.documents, + &import_graph, + ) + } + + pub(super) fn code_lens(&self, params: CodeLensParams) -> Vec { + let uri = ¶ms.text_document.uri; + let Some(path) = CanonicalPath::from_uri(uri) else { + return Vec::new(); + }; + let Some(doc) = self.documents.get(&path) else { + return Vec::new(); + }; + let doc = doc.clone(); + + let config = handlers::CodeLensConfig::all(); + let analysis = self.analyze_document(&path, &doc); + handlers::code_lens(&doc, uri, &config, Some(&analysis)) + } + + pub(super) fn execute_command( + &self, + params: ExecuteCommandParams, + ) -> Option { + info!("Execute command: {}", params.command); + + match params.command.as_str() { + "jrsonnet.evalFile" => { + let uri = params.arguments.first()?.as_str()?; + self.execute_eval_file(uri) + } + "jrsonnet.evalExpression" => { + let expr = params.arguments.first()?.as_str()?; + let base_uri = params.arguments.get(1).and_then(|v| v.as_str()); + Some(self.execute_eval_expression(expr, base_uri)) + } + "jrsonnet.findTransitiveImporters" => { + let uri = params.arguments.first()?.as_str()?; + self.execute_find_transitive_importers(uri) + } + _ => { + warn!("Unknown command: {}", params.command); + None + } + } + } + + fn execute_eval_file(&self, uri: &str) -> Option { + use jrsonnet_evaluator::manifest::JsonFormat; + use jrsonnet_parser::{SourceFile, SourcePath}; + + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed)?; + let text = self.documents.get_text(&path)?; + + let config = self.config.read(); + let mut jpath = config.jpath.clone(); + drop(config); + if let Some(dir) = path.as_path().parent() { + jpath.push(dir.to_path_buf()); + } + let state = create_state_with_jpath(&jpath); + + let source_path = SourcePath::new(SourceFile::new(path.as_path().to_path_buf())); + + match state.evaluate_snippet(source_path.to_string(), &text) { + Ok(val) => { + let json_format = JsonFormat::default(); + match val.manifest(json_format) { + Ok(json_str) => match serde_json::from_str::(&json_str) { + Ok(json) => Some(json), + Err(e) => { + warn!("Failed to parse manifest result as JSON: {}", e); + Some(serde_json::Value::String(json_str)) + } + }, + Err(e) => { + warn!("Failed to manifest: {}", e); + Some(serde_json::json!({ + "error": format!("Manifest error: {}", e.error()) + })) + } + } + } + Err(e) => { + warn!("Evaluation failed: {}", e); + Some(serde_json::json!({ + "error": format!("Evaluation error: {}", e.error()) + })) + } + } + } + + fn execute_eval_expression(&self, expr: &str, base_uri: Option<&str>) -> serde_json::Value { + use jrsonnet_evaluator::manifest::JsonFormat; + + let config = self.config.read(); + let mut jpath = config.jpath.clone(); + drop(config); + + if let Some(uri) = base_uri { + if let Ok(uri_parsed) = uri.parse::() { + if let Some(path) = CanonicalPath::from_uri(&uri_parsed) { + if let Some(dir) = path.as_path().parent() { + jpath.push(dir.to_path_buf()); + } + } + } + } + let state = create_state_with_jpath(&jpath); + + match state.evaluate_snippet("".to_string(), expr) { + Ok(val) => { + let json_format = JsonFormat::default(); + match val.manifest(json_format) { + Ok(json_str) => match serde_json::from_str::(&json_str) { + Ok(json) => json, + Err(e) => { + warn!("Failed to parse manifest result as JSON: {}", e); + serde_json::Value::String(json_str) + } + }, + Err(e) => serde_json::json!({ + "error": format!("Manifest error: {}", e.error()) + }), + } + } + Err(e) => serde_json::json!({ + "error": format!("Evaluation error: {}", e.error()) + }), + } + } + + fn execute_find_transitive_importers(&self, uri: &str) -> Option { + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed)?; + + let import_graph = self.import_graph.read(); + let importers = import_graph.transitive_importers(&path); + drop(import_graph); + + let importer_uris: Vec = importers.iter().map(|p| p.to_uri().to_string()).collect(); + + Some(serde_json::json!({ + "file": uri, + "transitiveImporters": importer_uris + })) + } + + /// Search order: + /// 1. Relative to the importing file's directory + /// 2. Each directory in jpath (in order) + fn resolve_import_path(&self, from: &CanonicalPath, import: &str) -> Option { + let parent = from.as_path().parent()?; + let resolved = parent.join(import); + if let Ok(canonical) = resolved.canonicalize() { + return Some(CanonicalPath::new(canonical)); + } + + let jpath = self.config.read().jpath.clone(); + for jpath_dir in &jpath { + let resolved = jpath_dir.join(import); + if let Ok(canonical) = resolved.canonicalize() { + return Some(CanonicalPath::new(canonical)); + } + } + + None + } + + /// For a field chain like `foo.bar`, this finds the `bar` field + /// inside the `foo` field of the top-level object. + fn find_field_in_file( + &self, + path: &CanonicalPath, + fields: &[String], + ) -> Option { + use jrsonnet_rowan_parser::{ + nodes::{Member, ObjBody}, + AstNode, + }; + + let doc = if let Some(d) = self.documents.get(path) { + d.clone() + } else { + let content = std::fs::read_to_string(path.as_path()).ok()?; + Document::new(content, DocVersion::new(0)) + }; + + let ast = doc.ast(); + let text = doc.text(); + let line_index = doc.line_index(); + let expr = ast.expr()?; + + let mut current_obj_body = expr.expr_base().and_then(|base| { + if let jrsonnet_rowan_parser::nodes::ExprBase::ExprObject(obj) = base { + obj.obj_body() + } else { + None + } + })?; + + for (i, field_name) in fields.iter().enumerate() { + let is_last = i == fields.len() - 1; + if let ObjBody::ObjBodyMemberList(members) = ¤t_obj_body { + let mut found = false; + for member in members.members() { + let (name, field_range, value_expr) = match &member { + Member::MemberFieldNormal(field) => { + let name_node = field.field_name()?; + let name_str = extract_field_name_string(&name_node)?; + let range = name_node.syntax().text_range(); + let value = field.expr(); + (name_str, range, value) + } + Member::MemberFieldMethod(method) => { + let name_node = method.field_name()?; + let name_str = extract_field_name_string(&name_node)?; + let range = name_node.syntax().text_range(); + (name_str, range, None) + } + _ => continue, + }; + + if name == *field_name { + if is_last { + return Some(to_lsp_range(field_range, line_index, text)); + } + + if let Some(value) = value_expr { + if let Some(base) = value.expr_base() { + if let jrsonnet_rowan_parser::nodes::ExprBase::ExprObject(obj) = + base + { + if let Some(body) = obj.obj_body() { + current_obj_body = body; + found = true; + break; + } + } + } + } + return None; + } + } + if !found && !is_last { + return None; + } + } else { + return None; + } + } + + None + } +} + +fn extract_field_name_string(name: &jrsonnet_rowan_parser::nodes::FieldName) -> Option { + use jrsonnet_rowan_parser::{nodes::FieldName, AstToken}; + + match name { + FieldName::FieldNameFixed(fixed) => { + if let Some(name_node) = fixed.id() { + if let Some(ident) = name_node.ident_lit() { + return Some(ident.text().to_string()); + } + } + if let Some(text) = fixed.text() { + let s = text.syntax().text(); + let name = s + .trim_start_matches('"') + .trim_start_matches('\'') + .trim_end_matches('"') + .trim_end_matches('\''); + return Some(name.to_string()); + } + None + } + FieldName::FieldNameDynamic(_) => None, + } +} diff --git a/crates/jrsonnet-lsp/tests/cross_file_tests.rs b/crates/jrsonnet-lsp/tests/cross_file_tests.rs new file mode 100644 index 00000000..b806b576 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/cross_file_tests.rs @@ -0,0 +1,687 @@ +//! Cross-file analysis tests. +//! +//! Tests import graph operations, type caching across files, and cache invalidation. +//! These tests validate the Global TyStore work enables proper cross-file type sharing. + +use std::{ + fs, + path::{Path, PathBuf}, + sync::Arc, +}; + +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document}; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::{analyze_and_cache, new_shared_cache, TypeAnalysis, TypeCache}; +use jrsonnet_lsp_types::{GlobalTyStore, Ty}; +use tempfile::TempDir; + +/// Helper to create a test file in the temp directory. +fn write_file(dir: &TempDir, name: &str, content: &str) -> PathBuf { + let path = dir.path().join(name); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).unwrap(); + } + fs::write(&path, content).unwrap(); + path +} + +/// Helper to get canonical path from a temp file. +fn canonical_path(path: &PathBuf) -> CanonicalPath { + CanonicalPath::new(path.canonicalize().unwrap()) +} + +/// Create a resolver function for the given base directory. +fn make_resolver(base_dir: &Path) -> impl Fn(&str) -> Option + '_ { + move |import_path: &str| { + let resolved = if Path::new(import_path).is_absolute() { + PathBuf::from(import_path) + } else { + base_dir.join(import_path) + }; + resolved.canonicalize().ok().map(CanonicalPath::new) + } +} + +mod import_graph_tests { + use super::*; + + #[test] + fn test_deep_import_chain() { + // Create a chain: file1 -> file2 -> file3 -> file4 -> file5 -> file6 + let tmp = TempDir::new().unwrap(); + let base_dir = tmp.path(); + + let file6 = write_file(&tmp, "file6.jsonnet", "{ value: 6 }"); + let file5 = write_file( + &tmp, + "file5.jsonnet", + "local f6 = import 'file6.jsonnet'; { value: 5, next: f6 }", + ); + let file4 = write_file( + &tmp, + "file4.jsonnet", + "local f5 = import 'file5.jsonnet'; { value: 4, next: f5 }", + ); + let file3 = write_file( + &tmp, + "file3.jsonnet", + "local f4 = import 'file4.jsonnet'; { value: 3, next: f4 }", + ); + let file2 = write_file( + &tmp, + "file2.jsonnet", + "local f3 = import 'file3.jsonnet'; { value: 2, next: f3 }", + ); + let file1 = write_file( + &tmp, + "file1.jsonnet", + "local f2 = import 'file2.jsonnet'; { value: 1, next: f2 }", + ); + + let mut graph = ImportGraph::new(); + let resolver = make_resolver(base_dir); + + // Parse all files + for file in [&file1, &file2, &file3, &file4, &file5, &file6] { + let content = fs::read_to_string(file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(&path, &doc, &resolver); + } + + // file1 should have direct imports: file2 + let path1 = canonical_path(&file1); + let imports1 = graph.imports(&path1); + assert_eq!(imports1.len(), 1, "file1 should import exactly one file"); + + // file6 should have no imports + let path6 = canonical_path(&file6); + let imports6 = graph.imports(&path6); + assert!(imports6.is_empty(), "file6 should have no imports"); + + // Transitive importers of file6 should include file1-5 + let importers = graph.transitive_importers(&path6); + assert!( + importers.len() >= 5, + "file6 should have at least 5 transitive importers" + ); + } + + #[test] + fn test_diamond_dependency() { + // Create a diamond: A imports B and C, both B and C import D + // A + // / \ + // B C + // \ / + // D + let tmp = TempDir::new().unwrap(); + let base_dir = tmp.path(); + + let file_d = write_file(&tmp, "d.jsonnet", "{ shared: 'value' }"); + let file_b = write_file( + &tmp, + "b.jsonnet", + "local d = import 'd.jsonnet'; { b_field: d.shared }", + ); + let file_c = write_file( + &tmp, + "c.jsonnet", + "local d = import 'd.jsonnet'; { c_field: d.shared }", + ); + let file_a = write_file( + &tmp, + "a.jsonnet", + r" + local b = import 'b.jsonnet'; + local c = import 'c.jsonnet'; + { a_field: b.b_field + c.c_field } + ", + ); + + let mut graph = ImportGraph::new(); + let resolver = make_resolver(base_dir); + + for file in [&file_d, &file_b, &file_c, &file_a] { + let content = fs::read_to_string(file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(&path, &doc, &resolver); + } + + // A should import B and C + let path_a = canonical_path(&file_a); + let imports_a = graph.imports(&path_a); + assert_eq!( + imports_a.len(), + 2, + "A should import exactly 2 files (B and C)" + ); + + // D should be imported by both B and C + let path_d = canonical_path(&file_d); + let importers_d = graph.transitive_importers(&path_d); + // D's transitive importers: B, C, A + assert_eq!( + importers_d.len(), + 3, + "D should have 3 transitive importers (B, C, A)" + ); + } + + #[test] + fn test_import_graph_removal() { + let tmp = TempDir::new().unwrap(); + let base_dir = tmp.path(); + + let lib = write_file(&tmp, "lib.jsonnet", "{ helper: 42 }"); + let main = write_file( + &tmp, + "main.jsonnet", + "local lib = import 'lib.jsonnet'; lib.helper", + ); + + let mut graph = ImportGraph::new(); + let resolver = make_resolver(base_dir); + + // Add both files + let lib_content = fs::read_to_string(&lib).unwrap(); + let lib_doc = Document::new(lib_content, DocVersion::new(1)); + let lib_path = canonical_path(&lib); + graph.update_file(&lib_path, &lib_doc, &resolver); + + let main_content = fs::read_to_string(&main).unwrap(); + let main_doc = Document::new(main_content, DocVersion::new(1)); + let main_path = canonical_path(&main); + graph.update_file(&main_path, &main_doc, &resolver); + + // Verify import relationship + assert_eq!(graph.imports(&main_path).len(), 1); + + // Remove lib + graph.remove_file(&lib_path); + + // Graph should handle missing targets gracefully + let imports = graph.imports(&main_path); + // main still imports lib (by path), even if lib is removed from graph + assert_eq!(imports.len(), 1); + } + + #[test] + fn test_multiple_imports_same_file() { + // Test that importing the same file from multiple locations is tracked correctly + let tmp = TempDir::new().unwrap(); + let base_dir = tmp.path(); + + let shared = write_file(&tmp, "shared.jsonnet", "{ x: 1 }"); + let user1 = write_file( + &tmp, + "user1.jsonnet", + "local s = import 'shared.jsonnet'; s.x", + ); + let user2 = write_file( + &tmp, + "user2.jsonnet", + "local s = import 'shared.jsonnet'; s.x + 1", + ); + let user3 = write_file( + &tmp, + "user3.jsonnet", + "local s = import 'shared.jsonnet'; s.x * 2", + ); + + let mut graph = ImportGraph::new(); + let resolver = make_resolver(base_dir); + + for file in [&shared, &user1, &user2, &user3] { + let content = fs::read_to_string(file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(&path, &doc, &resolver); + } + + // shared should be imported by user1, user2, user3 + let shared_path = canonical_path(&shared); + let importers = graph.transitive_importers(&shared_path); + assert_eq!(importers.len(), 3, "shared should have 3 importers"); + } +} + +mod type_cache_tests { + use super::*; + + #[test] + fn test_cache_basic_types() { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global)); + let tmp = TempDir::new().unwrap(); + + let file1 = write_file(&tmp, "number.jsonnet", "42"); + let file2 = write_file(&tmp, "string.jsonnet", "\"hello\""); + let file3 = write_file(&tmp, "bool.jsonnet", "true"); + + let path1 = canonical_path(&file1); + let path2 = canonical_path(&file2); + let path3 = canonical_path(&file3); + + // Cache different types + cache.update(&path1, Ty::NUMBER, 1); + cache.update(&path2, Ty::STRING, 1); + cache.update(&path3, Ty::BOOL, 1); + + // Retrieve and verify + assert_eq!(cache.get(&path1), Some(Ty::NUMBER)); + assert_eq!(cache.get(&path2), Some(Ty::STRING)); + assert_eq!(cache.get(&path3), Some(Ty::BOOL)); + } + + #[test] + fn test_cache_version_tracking() { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global)); + let tmp = TempDir::new().unwrap(); + + let file = write_file(&tmp, "test.jsonnet", "1"); + let path = canonical_path(&file); + + // Initial version + cache.update(&path, Ty::NUMBER, 1); + assert!(cache.is_up_to_date(&path, 1)); + assert!(!cache.is_up_to_date(&path, 2)); + + // Update version + cache.update(&path, Ty::STRING, 2); + assert!(!cache.is_up_to_date(&path, 1)); + assert!(cache.is_up_to_date(&path, 2)); + } + + #[test] + fn test_cache_invalidation() { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global)); + let tmp = TempDir::new().unwrap(); + + let file1 = write_file(&tmp, "a.jsonnet", "1"); + let file2 = write_file(&tmp, "b.jsonnet", "2"); + let file3 = write_file(&tmp, "c.jsonnet", "3"); + + let path1 = canonical_path(&file1); + let path2 = canonical_path(&file2); + let path3 = canonical_path(&file3); + + cache.update(&path1, Ty::NUMBER, 1); + cache.update(&path2, Ty::NUMBER, 1); + cache.update(&path3, Ty::NUMBER, 1); + + assert_eq!(cache.len(), 3); + + // Invalidate one + cache.invalidate(&path2); + assert_eq!(cache.len(), 2); + cache.get(&path1).expect("path1 should still be cached"); + assert_eq!(cache.get(&path2), None); + cache.get(&path3).expect("path3 should still be cached"); + } + + #[test] + fn test_cache_invalidate_many() { + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global)); + let tmp = TempDir::new().unwrap(); + + let files: Vec<_> = (0..10) + .map(|i| { + let file = write_file(&tmp, &format!("file{i}.jsonnet"), &format!("{i}")); + canonical_path(&file) + }) + .collect(); + + // Cache all + for path in &files { + cache.update(path, Ty::NUMBER, 1); + } + assert_eq!(cache.len(), 10); + + // Invalidate half + let to_invalidate: Vec<_> = files.iter().step_by(2).cloned().collect(); + cache.invalidate_many(to_invalidate); + + // Should have 5 remaining + assert_eq!(cache.len(), 5); + } +} + +mod cross_file_type_tests { + use super::*; + + #[test] + fn test_analyze_and_cache_basic() { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global)); + let tmp = TempDir::new().unwrap(); + + let file = write_file(&tmp, "number.jsonnet", "42"); + let path = canonical_path(&file); + + let content = fs::read_to_string(&file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + + // Analyze and cache + let ty = analyze_and_cache(&path, &doc, &cache); + assert_eq!(ty, Ty::NUMBER); + + // Should be cached + let cached = cache.read().get(&path); + assert_eq!(cached, Some(Ty::NUMBER)); + } + + #[test] + fn test_analyze_and_cache_different_types() { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global)); + let tmp = TempDir::new().unwrap(); + + // Test various Jsonnet literal types + let test_cases = [ + ("number.jsonnet", "42", Ty::NUMBER), + ("string.jsonnet", "\"hello\"", Ty::STRING), + ("bool.jsonnet", "true", Ty::TRUE), // Literal boolean type + ("null.jsonnet", "null", Ty::NULL), + ]; + + for (name, content, expected_ty) in test_cases { + let file = write_file(&tmp, name, content); + let path = canonical_path(&file); + let doc = Document::new(content.to_string(), DocVersion::new(1)); + + let ty = analyze_and_cache(&path, &doc, &cache); + assert_eq!(ty, expected_ty, "Type mismatch for {}", name); + } + } + + #[test] + fn test_cache_hit_on_same_version() { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global)); + let tmp = TempDir::new().unwrap(); + + let file = write_file(&tmp, "test.jsonnet", "42"); + let path = canonical_path(&file); + let doc = Document::new("42".to_string(), DocVersion::new(1)); + + // First analysis + let ty1 = analyze_and_cache(&path, &doc, &cache); + + // Second analysis with same version should hit cache + let ty2 = analyze_and_cache(&path, &doc, &cache); + + assert_eq!(ty1, ty2); + assert_eq!(ty1, Ty::NUMBER); + } + + #[test] + fn test_cache_miss_on_new_version() { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global)); + let tmp = TempDir::new().unwrap(); + + let file = write_file(&tmp, "test.jsonnet", "42"); + let path = canonical_path(&file); + + // Version 1: number + let doc1 = Document::new("42".to_string(), DocVersion::new(1)); + let ty1 = analyze_and_cache(&path, &doc1, &cache); + assert_eq!(ty1, Ty::NUMBER); + + // Version 2: string - should re-analyze + let doc2 = Document::new("\"hello\"".to_string(), DocVersion::new(2)); + let ty2 = analyze_and_cache(&path, &doc2, &cache); + assert_eq!(ty2, Ty::STRING); + } + + #[test] + fn test_shared_global_store() { + let global = Arc::new(GlobalTyStore::new()); + let cache = new_shared_cache(Arc::clone(&global)); + let tmp = TempDir::new().unwrap(); + + // Analyze multiple files + let file1 = write_file(&tmp, "a.jsonnet", "1"); + let file2 = write_file(&tmp, "b.jsonnet", "2"); + + let path1 = canonical_path(&file1); + let path2 = canonical_path(&file2); + + let doc1 = Document::new("1".to_string(), DocVersion::new(1)); + let doc2 = Document::new("2".to_string(), DocVersion::new(1)); + + let ty1 = analyze_and_cache(&path1, &doc1, &cache); + let ty2 = analyze_and_cache(&path2, &doc2, &cache); + + // Both should be NUMBER + assert_eq!(ty1, Ty::NUMBER); + assert_eq!(ty2, Ty::NUMBER); + + // They should be the same type ID (from global store) + assert_eq!(ty1, ty2); + } +} + +mod transitive_update_tests { + use super::*; + + #[test] + fn test_transitive_invalidation_chain() { + // When a base file changes, all transitive dependents should be invalidated + let tmp = TempDir::new().unwrap(); + let base_dir = tmp.path(); + + let base = write_file(&tmp, "base.jsonnet", "{ x: 1 }"); + let mid = write_file(&tmp, "mid.jsonnet", "local b = import 'base.jsonnet'; b"); + let top = write_file(&tmp, "top.jsonnet", "local m = import 'mid.jsonnet'; m"); + + let mut graph = ImportGraph::new(); + let resolver = make_resolver(base_dir); + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global)); + + // Build graph + for file in [&base, &mid, &top] { + let content = fs::read_to_string(file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(&path, &doc, &resolver); + cache.update(&path, Ty::ANY, 1); // Placeholder type + } + + let base_path = canonical_path(&base); + let mid_path = canonical_path(&mid); + let top_path = canonical_path(&top); + + // All should be cached + cache.get(&base_path).expect("base should be cached"); + cache.get(&mid_path).expect("mid should be cached"); + cache.get(&top_path).expect("top should be cached"); + + // Simulate base file change - need to invalidate transitive importers + let importers = graph.transitive_importers(&base_path); + cache.invalidate(&base_path); + cache.invalidate_many(importers); + + // All should be invalidated + assert_eq!(cache.get(&base_path), None); + assert_eq!(cache.get(&mid_path), None); + assert_eq!(cache.get(&top_path), None); + } + + #[test] + fn test_partial_invalidation() { + // When a leaf file changes, only its importers should be affected + let tmp = TempDir::new().unwrap(); + let base_dir = tmp.path(); + + let lib1 = write_file(&tmp, "lib1.jsonnet", "{ a: 1 }"); + let lib2 = write_file(&tmp, "lib2.jsonnet", "{ b: 2 }"); + let main = write_file(&tmp, "main.jsonnet", "local l1 = import 'lib1.jsonnet'; l1"); + + let mut graph = ImportGraph::new(); + let resolver = make_resolver(base_dir); + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global)); + + // Build graph - main imports lib1, not lib2 + for file in [&lib1, &lib2, &main] { + let content = fs::read_to_string(file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(&path, &doc, &resolver); + cache.update(&path, Ty::ANY, 1); + } + + let lib1_path = canonical_path(&lib1); + let lib2_path = canonical_path(&lib2); + let main_path = canonical_path(&main); + + // Change lib1 - should invalidate lib1 and main, but not lib2 + let importers = graph.transitive_importers(&lib1_path); + cache.invalidate(&lib1_path); + cache.invalidate_many(importers); + + assert_eq!(cache.get(&lib1_path), None); + assert_eq!(cache.get(&main_path), None); + cache.get(&lib2_path).expect("lib2 should be unchanged"); + } + + #[test] + fn test_diamond_invalidation() { + // When D changes in A -> B,C -> D diamond, all should be invalidated + let tmp = TempDir::new().unwrap(); + let base_dir = tmp.path(); + + let d = write_file(&tmp, "d.jsonnet", "{ shared: 1 }"); + let b = write_file(&tmp, "b.jsonnet", "local d = import 'd.jsonnet'; d"); + let c = write_file(&tmp, "c.jsonnet", "local d = import 'd.jsonnet'; d"); + let a = write_file( + &tmp, + "a.jsonnet", + r" + local b = import 'b.jsonnet'; + local c = import 'c.jsonnet'; + { b: b, c: c } + ", + ); + + let mut graph = ImportGraph::new(); + let resolver = make_resolver(base_dir); + let global = Arc::new(GlobalTyStore::new()); + let mut cache = TypeCache::new(Arc::clone(&global)); + + for file in [&d, &b, &c, &a] { + let content = fs::read_to_string(file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + let path = canonical_path(file); + graph.update_file(&path, &doc, &resolver); + cache.update(&path, Ty::ANY, 1); + } + + let d_path = canonical_path(&d); + + // Change D + let importers = graph.transitive_importers(&d_path); + cache.invalidate(&d_path); + cache.invalidate_many(importers); + + // All should be invalidated (D is transitively imported by all) + assert!(cache.get(&canonical_path(&d)).is_none()); + assert!(cache.get(&canonical_path(&b)).is_none()); + assert!(cache.get(&canonical_path(&c)).is_none()); + assert!(cache.get(&canonical_path(&a)).is_none()); + } +} + +mod type_analysis_imports_tests { + use super::*; + + #[test] + fn test_analyze_object_type() { + let global = Arc::new(GlobalTyStore::new()); + let tmp = TempDir::new().unwrap(); + + let file = write_file(&tmp, "obj.jsonnet", "{ a: 1, b: 'hello', c: true }"); + let content = fs::read_to_string(&file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + + let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); + let ty = analysis.document_type(); + + // Should be an object type (not primitive) + assert_ne!(ty, Ty::NUMBER); + assert_ne!(ty, Ty::STRING); + assert_ne!(ty, Ty::BOOL); + assert_ne!(ty, Ty::NULL); + } + + #[test] + fn test_analyze_array_type() { + let global = Arc::new(GlobalTyStore::new()); + let tmp = TempDir::new().unwrap(); + + let file = write_file(&tmp, "arr.jsonnet", "[1, 2, 3]"); + let content = fs::read_to_string(&file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + + let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); + let ty = analysis.document_type(); + + // Should be an array type (not primitive) + assert_ne!(ty, Ty::NUMBER); + assert_ne!(ty, Ty::STRING); + } + + #[test] + fn test_analyze_function_type() { + let global = Arc::new(GlobalTyStore::new()); + let tmp = TempDir::new().unwrap(); + + let file = write_file(&tmp, "func.jsonnet", "function(x) x + 1"); + let content = fs::read_to_string(&file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + + let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); + let ty = analysis.document_type(); + + // Should be a function type (not primitive) + assert_ne!(ty, Ty::NUMBER); + assert_ne!(ty, Ty::STRING); + } + + #[test] + fn test_analyze_local_binding() { + let global = Arc::new(GlobalTyStore::new()); + let tmp = TempDir::new().unwrap(); + + let file = write_file(&tmp, "local.jsonnet", "local x = 42; x"); + let content = fs::read_to_string(&file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + + let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); + let ty = analysis.document_type(); + + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_analyze_conditional() { + let global = Arc::new(GlobalTyStore::new()); + let tmp = TempDir::new().unwrap(); + + let file = write_file(&tmp, "cond.jsonnet", "if true then 1 else 2"); + let content = fs::read_to_string(&file).unwrap(); + let doc = Document::new(content, DocVersion::new(1)); + + let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); + let ty = analysis.document_type(); + + assert_eq!(ty, Ty::NUMBER); + } +} diff --git a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs new file mode 100644 index 00000000..fb084c09 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs @@ -0,0 +1,202 @@ +//! End-to-end tests using the annotation-based testing framework. +//! +//! These tests use embedded annotations to verify LSP behavior at specific positions. +//! +//! The column alignment is critical: the `^` marker's position after `##` indicates +//! the column on the previous code line being annotated (0-indexed). + +mod framework; + +use framework::{check_definition, check_hover}; + +// ============================================================================ +// Definition and Reference Tests +// ============================================================================ + +#[test] +fn test_local_variable_definition() { + // 'x' is at column 6: l(0)o(1)c(2)a(3)l(4)space(5)x(6) + // Second line 'x' is at column 0 + check_definition( + r"local x = 1; +## ^ def: x +x + 1 +##^ use: x", + ); +} + +#[test] +fn test_multiple_usages() { + // 'x' def at column 6 + // First 'x' use at column 0 + // Second 'x' use at column 4: x(0)space(1)+(2)space(3)x(4) + check_definition( + r"local x = 1; +## ^ def: x +x + x +##^ use: x +## ^ use: x", + ); +} + +#[test] +fn test_function_parameters() { + // 'a' at column 8: local(5)space(5)f(6)((7)a(8) + // 'b' at column 11: ,(9)space(10)b(11) + check_definition( + r"local f(a, b) = a + b; +## ^ def: a +## ^ def: b", + ); +} + +#[test] +fn test_function_param_usage() { + // 'x' def at column 10: local(5)space(5)add(3)((9)x(10) + // 'y' def at column 13 + // 'x' use at column 2 (after 2-space indent) + // 'y' use at column 6 + check_definition( + r"local add(x, y) = +## ^ def: x +## ^ def: y + x + y; +## ^ use: x +## ^ use: y +add(1, 2)", + ); +} + +#[test] +fn test_nested_local() { + // 'outer' at column 6 + // 'inner' at column 8 (after 2-space indent) + // 'inner' use at column 2 + check_definition( + r"local outer = +## ^ def: outer + local inner = 1; +## ^ def: inner + inner + 1; +## ^ use: inner +outer", + ); +} + +#[test] +fn test_shadowing_different_scopes() { + // First 'x' at column 6 + // Second 'x' (param) at column 8 + check_definition( + r"local x = 1; +## ^ def: x +local f(x) = +## ^ def: x + x; +x", + ); +} + +#[test] +fn test_object_local() { + // 'helper' def at column 8 (after 2-space indent) + // 'helper' use at column 9: 2 spaces + "value: " = 9 + check_definition( + r"{ + local helper = 42, +## ^ def: helper + value: helper, +## ^ use: helper +}", + ); +} + +#[test] +fn test_for_comprehension_binding() { + // First 'x' at column 1 + // 'x' def (after 'for') at column 7 + check_definition( + r"[x for x in [1,2,3]] +## ^ use: x +## ^ def: x", + ); +} + +// ============================================================================ +// Hover Tests +// ============================================================================ + +#[test] +fn test_hover_number_literal() { + // '42' starts at column 10 + check_hover( + r"local x = 42; +## ^ hover: number", + ); +} + +#[test] +fn test_hover_string_literal() { + // '"hello"' starts at column 10 + check_hover( + r#"local s = "hello"; +## ^ hover: string"#, + ); +} + +#[test] +fn test_hover_boolean() { + // 'true' starts at column 10, LSP infers literal type 'true' + check_hover( + r"local b = true; +## ^ hover: true", + ); +} + +#[test] +fn test_hover_null() { + // 'null' starts at column 10 + check_hover( + r"local n = null; +## ^ hover: null", + ); +} + +#[test] +fn test_hover_array() { + // Hover on the variable name 'arr' at definition site shows 'any' + // TODO: Could be improved to show inferred array type + check_hover( + r"local arr = [1, 2, 3]; +## ^ hover: any", + ); +} + +#[test] +fn test_hover_object() { + // Hover on the variable name 'obj' at definition site shows 'any' + // TODO: Could be improved to show inferred object type + check_hover( + r"local obj = { a: 1 }; +## ^ hover: any", + ); +} + +#[test] +fn test_hover_function() { + // Hover on the function name 'f' at column 6 + // Note: Currently infers as 'any' - could be improved to show function type + check_hover( + r"local f(x) = x; +## ^ hover: any", + ); +} + +#[test] +fn test_hover_std_function() { + // 'std' at column 0 - std is an object containing stdlib functions + check_hover( + r"std.length +##^ hover: object", + ); +} diff --git a/crates/jrsonnet-lsp/tests/framework/assertions.rs b/crates/jrsonnet-lsp/tests/framework/assertions.rs new file mode 100644 index 00000000..b74cc778 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/framework/assertions.rs @@ -0,0 +1,467 @@ +//! Test assertion helpers for annotated tests. +//! +//! Provides functions to verify that handler results match annotation expectations. + +use std::sync::Arc; + +use jrsonnet_lsp_document::{ + position_to_offset, token_at_offset, ByteOffset, CharOffset, DocVersion, Document, Line, + LspPosition, +}; +use jrsonnet_lsp_handlers as handlers; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_scope::{find_definition_range, is_variable_reference}; +use jrsonnet_lsp_types::GlobalTyStore; +use jrsonnet_rowan_parser::AstNode; + +use crate::framework::parser::{Annotation, AnnotationKind, ParsedSource}; + +/// Result of verifying annotations against actual behavior. +#[derive(Debug)] +pub struct VerificationResult { + /// Whether all checks passed. + pub passed: bool, + /// List of failures with descriptions. + pub failures: Vec, + /// Number of checks that passed. + pub passed_count: usize, + /// Total number of checks. + pub total_count: usize, +} + +impl VerificationResult { + fn new() -> Self { + Self { + passed: true, + failures: Vec::new(), + passed_count: 0, + total_count: 0, + } + } + + fn add_failure(&mut self, msg: String) { + self.passed = false; + self.failures.push(msg); + self.total_count += 1; + } + + fn add_success(&mut self) { + self.passed_count += 1; + self.total_count += 1; + } +} + +/// Context for running annotated tests. +pub struct TestContext { + pub document: Document, + pub analysis: TypeAnalysis, +} + +impl TestContext { + /// Create a new test context from parsed source. + pub fn new(parsed: &ParsedSource) -> Self { + let document = Document::new(parsed.source.clone(), DocVersion::new(1)); + let global_types = Arc::new(GlobalTyStore::new()); + let analysis = TypeAnalysis::analyze_with_global(&document, Arc::clone(&global_types)); + Self { document, analysis } + } + + /// Get position from line and column. + pub fn position(line: u32, column: u32) -> LspPosition { + LspPosition { + line: Line(line), + character: CharOffset(column), + } + } + + /// Get byte offset from position. + pub fn offset(&self, line: u32, column: u32) -> Option { + let pos = Self::position(line, column); + position_to_offset(self.document.line_index(), pos, self.document.text()) + } +} + +/// Verify all annotations in a parsed source. +pub fn verify_annotations(parsed: &ParsedSource) -> VerificationResult { + let ctx = TestContext::new(parsed); + let mut result = VerificationResult::new(); + + for ann in &parsed.annotations { + match &ann.kind { + AnnotationKind::Definition(name) => { + verify_definition(&ctx, ann, name, &mut result); + } + AnnotationKind::Usage(name) => { + verify_usage(&ctx, ann, name, &mut result); + } + AnnotationKind::Hover(expected) => { + verify_hover(&ctx, ann, expected, &mut result); + } + AnnotationKind::Type(expected) => { + verify_type(&ctx, ann, expected, &mut result); + } + AnnotationKind::Error(_expected) => { + // TODO: Implement error verification + result.add_failure(format!( + "{}:{}: error annotations not yet implemented", + ann.line, ann.column + )); + } + AnnotationKind::Completion(expected) => { + verify_completion(&ctx, ann, expected, &mut result); + } + AnnotationKind::NoCompletion(unexpected) => { + verify_no_completion(&ctx, ann, unexpected, &mut result); + } + AnnotationKind::Goto { line, column } => { + verify_goto(&ctx, ann, *line, *column, &mut result); + } + } + } + + result +} + +/// Verify that a definition exists at the annotated position. +fn verify_definition( + ctx: &TestContext, + ann: &Annotation, + name: &str, + result: &mut VerificationResult, +) { + let Some(offset) = ctx.offset(ann.line, ann.column) else { + result.add_failure(format!( + "{}:{}: could not convert position to offset", + ann.line, ann.column + )); + return; + }; + + let Some(token) = token_at_offset(ctx.document.ast().syntax(), offset) else { + result.add_failure(format!( + "{}:{}: no token at position (def: {})", + ann.line, ann.column, name + )); + return; + }; + + // Verify the token text matches the expected name + if token.text() != name { + result.add_failure(format!( + "{}:{}: expected definition of '{}', found token '{}'", + ann.line, + ann.column, + name, + token.text() + )); + return; + } + + // Verify this is a definition site (not a usage) + if is_variable_reference(&token) { + result.add_failure(format!( + "{}:{}: expected definition of '{}', but found a usage/reference", + ann.line, ann.column, name + )); + return; + } + + result.add_success(); +} + +/// Verify that a usage/reference exists at the annotated position. +fn verify_usage(ctx: &TestContext, ann: &Annotation, name: &str, result: &mut VerificationResult) { + let Some(offset) = ctx.offset(ann.line, ann.column) else { + result.add_failure(format!( + "{}:{}: could not convert position to offset", + ann.line, ann.column + )); + return; + }; + + let Some(token) = token_at_offset(ctx.document.ast().syntax(), offset) else { + result.add_failure(format!( + "{}:{}: no token at position (use: {})", + ann.line, ann.column, name + )); + return; + }; + + // Verify the token text matches + if token.text() != name { + result.add_failure(format!( + "{}:{}: expected usage of '{}', found token '{}'", + ann.line, + ann.column, + name, + token.text() + )); + return; + } + + // Verify this is a variable reference + if !is_variable_reference(&token) { + result.add_failure(format!( + "{}:{}: expected usage/reference of '{}', but token is not a variable reference", + ann.line, ann.column, name + )); + return; + } + + // Verify it has a definition (can be resolved) + if find_definition_range(&token, name).is_none() { + result.add_failure(format!( + "{}:{}: usage of '{}' does not resolve to any definition", + ann.line, ann.column, name + )); + return; + } + + result.add_success(); +} + +/// Verify that hover contains the expected text. +fn verify_hover( + ctx: &TestContext, + ann: &Annotation, + expected: &str, + result: &mut VerificationResult, +) { + let pos = TestContext::position(ann.line, ann.column); + + let hover = handlers::hover(&ctx.document, pos, &ctx.analysis); + + match hover { + Some(h) => { + let contents = match &h.contents { + lsp_types::HoverContents::Scalar(s) => match s { + lsp_types::MarkedString::String(text) => text.clone(), + lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), + }, + lsp_types::HoverContents::Array(arr) => arr + .iter() + .map(|s| match s { + lsp_types::MarkedString::String(text) => text.clone(), + lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), + }) + .collect::>() + .join("\n"), + lsp_types::HoverContents::Markup(m) => m.value.clone(), + }; + + if contents.contains(expected) { + result.add_success(); + } else { + result.add_failure(format!( + "{}:{}: hover should contain '{}', got '{}'", + ann.line, ann.column, expected, contents + )); + } + } + None => { + result.add_failure(format!( + "{}:{}: expected hover containing '{}', got no hover", + ann.line, ann.column, expected + )); + } + } +} + +/// Verify that the type at position matches expected. +fn verify_type( + ctx: &TestContext, + ann: &Annotation, + expected: &str, + result: &mut VerificationResult, +) { + // Use hover to get type information + let pos = TestContext::position(ann.line, ann.column); + + let hover = handlers::hover(&ctx.document, pos, &ctx.analysis); + + match hover { + Some(h) => { + let contents = match &h.contents { + lsp_types::HoverContents::Markup(m) => m.value.clone(), + lsp_types::HoverContents::Scalar(s) => match s { + lsp_types::MarkedString::String(text) => text.clone(), + lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), + }, + lsp_types::HoverContents::Array(arr) => arr + .iter() + .map(|s| match s { + lsp_types::MarkedString::String(text) => text.clone(), + lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), + }) + .collect::>() + .join("\n"), + }; + + // Check if the hover contains the expected type + if contents.contains(expected) { + result.add_success(); + } else { + result.add_failure(format!( + "{}:{}: expected type '{}', hover shows '{}'", + ann.line, ann.column, expected, contents + )); + } + } + None => { + result.add_failure(format!( + "{}:{}: expected type '{}', got no hover", + ann.line, ann.column, expected + )); + } + } +} + +/// Verify that completions include expected items. +fn verify_completion( + ctx: &TestContext, + ann: &Annotation, + expected: &[String], + result: &mut VerificationResult, +) { + let pos = TestContext::position(ann.line, ann.column); + + let completion = handlers::completion(&ctx.document, pos, None, &ctx.analysis); + + match completion { + Some(list) => { + let items: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + + for exp in expected { + if items.contains(&exp.as_str()) { + result.add_success(); + } else { + result.add_failure(format!( + "{}:{}: completion should include '{}', available: {:?}", + ann.line, + ann.column, + exp, + &items[..items.len().min(10)] + )); + } + } + } + None => { + result.add_failure(format!( + "{}:{}: expected completions {:?}, got none", + ann.line, ann.column, expected + )); + } + } +} + +/// Verify that completions do NOT include a specific item. +fn verify_no_completion( + ctx: &TestContext, + ann: &Annotation, + unexpected: &str, + result: &mut VerificationResult, +) { + let pos = TestContext::position(ann.line, ann.column); + + let completion = handlers::completion(&ctx.document, pos, None, &ctx.analysis); + + match completion { + Some(list) => { + let has_item = list.items.iter().any(|i| i.label == unexpected); + if has_item { + result.add_failure(format!( + "{}:{}: completion should NOT include '{}'", + ann.line, ann.column, unexpected + )); + } else { + result.add_success(); + } + } + None => { + // No completions means the unexpected item is not there + result.add_success(); + } + } +} + +/// Verify that go-to-definition jumps to the expected position. +fn verify_goto( + ctx: &TestContext, + ann: &Annotation, + expected_line: u32, + expected_col: u32, + result: &mut VerificationResult, +) { + let pos = TestContext::position(ann.line, ann.column); + + let goto_result = handlers::goto_definition(&ctx.document, pos); + + match goto_result { + Some(handlers::DefinitionResult::Local(range)) => { + let start = range.start; + if start.line == expected_line && start.character == expected_col { + result.add_success(); + } else { + result.add_failure(format!( + "{}:{}: go-to-definition should jump to {}:{}, got {}:{}", + ann.line, ann.column, expected_line, expected_col, start.line, start.character + )); + } + } + Some(_) => { + result.add_failure(format!( + "{}:{}: expected local definition, got import definition", + ann.line, ann.column + )); + } + None => { + result.add_failure(format!( + "{}:{}: expected go-to-definition to {}:{}, got no result", + ann.line, ann.column, expected_line, expected_col + )); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::framework::parser::parse_annotated_source; + + #[test] + fn test_verify_definition_success() { + // 'x' is at column 6 + let source = r"local x = 1; +## ^ def: x"; + let parsed = parse_annotated_source(source); + let result = verify_annotations(&parsed); + assert!(result.passed, "Failures: {:?}", result.failures); + assert_eq!(result.passed_count, 1); + } + + #[test] + fn test_verify_usage_success() { + // 'x' usage is at column 0 + let source = r"local x = 1; +x + 1 +##^ use: x"; + let parsed = parse_annotated_source(source); + let result = verify_annotations(&parsed); + assert!(result.passed, "Failures: {:?}", result.failures); + assert_eq!(result.passed_count, 1); + } + + #[test] + fn test_verify_definition_and_usage() { + // 'x' def at column 6, first usage at column 0, second at column 4 + let source = r"local x = 1; +## ^ def: x +x + x +##^ use: x +## ^ use: x"; + let parsed = parse_annotated_source(source); + let result = verify_annotations(&parsed); + assert!(result.passed, "Failures: {:?}", result.failures); + assert_eq!(result.passed_count, 3); + } +} diff --git a/crates/jrsonnet-lsp/tests/framework/mod.rs b/crates/jrsonnet-lsp/tests/framework/mod.rs new file mode 100644 index 00000000..b1d10c47 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/framework/mod.rs @@ -0,0 +1,74 @@ +//! Annotation-based E2E testing framework for Jsonnet LSP. +//! +//! This framework allows writing self-documenting tests with embedded annotations +//! that specify expected behavior at specific positions in the code. +//! +//! # Example +//! +//! ```ignore +//! check_definition(r" +//! local x = 1; +//! ## ^ def: x +//! x + 1 +//! ## ^ use: x +//! "); +//! ``` +//! +//! # Annotation Syntax +//! +//! Annotations are lines starting with `##` followed by spaces and a `^` marker. +//! The `^` indicates the column on the previous line being annotated. +//! +//! | Annotation | Meaning | +//! |------------|---------| +//! | `## ^ def: x` | Definition of `x` is at this position | +//! | `## ^ use: x` | Usage/reference to `x` is at this position | +//! | `## ^ hover: text` | Hover at this position contains "text" | +//! | `## ^ type: T` | Type at this position is `T` | +//! | `## ^ error: msg` | Error at this position contains "msg" | +//! | `## ^ completion: a, b` | Completions include `a` and `b` | +//! | `## ^ no-completion: x` | Completions do NOT include `x` | +//! | `## ^ goto: 5:10` | Go-to-definition jumps to line 5, col 10 | + +pub mod assertions; +pub mod parser; + +use std::fmt::Write as _; + +pub use assertions::verify_annotations; +pub use parser::parse_annotated_source; + +/// Run annotated test and panic on failure. +/// +/// This is the main entry point for annotated tests. +pub fn check(source: &str) { + let parsed = parse_annotated_source(source); + let result = verify_annotations(&parsed); + + if !result.passed { + let mut msg = format!( + "Annotated test failed ({}/{} checks passed):\n", + result.passed_count, result.total_count + ); + for failure in &result.failures { + msg.push_str(" - "); + msg.push_str(failure); + msg.push('\n'); + } + msg.push_str("\nSource:\n"); + for (i, line) in parsed.source.lines().enumerate() { + let _ = writeln!(msg, "{:3}| {}", i, line); + } + panic!("{}", msg); + } +} + +/// Check definition and usage annotations. +pub fn check_definition(source: &str) { + check(source); +} + +/// Check hover annotations. +pub fn check_hover(source: &str) { + check(source); +} diff --git a/crates/jrsonnet-lsp/tests/framework/parser.rs b/crates/jrsonnet-lsp/tests/framework/parser.rs new file mode 100644 index 00000000..18336cc2 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/framework/parser.rs @@ -0,0 +1,249 @@ +//! Annotation parser for test sources. +//! +//! Parses embedded annotations in test code that specify expected behavior. +//! +//! Annotation format: +//! ```text +//! local x = 1; +//! ## ^ def: x +//! x + 1 +//! ## ^ use: x +//! ``` +//! +//! The `##` prefix marks annotation lines. The `^` marker indicates +//! the column on the previous code line being annotated. + +use std::collections::HashMap; + +/// A parsed annotation from test source. +#[derive(Debug, Clone)] +pub struct Annotation { + /// Line number (0-indexed) of the code line being annotated. + pub line: u32, + /// Column (0-indexed) indicated by the `^` marker. + pub column: u32, + /// The annotation kind and value. + pub kind: AnnotationKind, +} + +/// The type of annotation. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum AnnotationKind { + /// `^ def: name` - expect definition of `name` here + Definition(String), + /// `^ use: name` - expect reference to `name` here + Usage(String), + /// `^ hover: text` - expect hover to contain `text` + Hover(String), + /// `^ type: T` - expect type `T` at this position + Type(String), + /// `^ error: msg` - expect error containing `msg` + Error(String), + /// `^ completion: item1, item2` - expect these completions + Completion(Vec), + /// `^ no-completion: item` - expect this NOT in completions + NoCompletion(String), + /// `^ goto: line:col` - expect go-to-definition to jump here + Goto { line: u32, column: u32 }, +} + +/// Result of parsing annotated source. +#[derive(Debug)] +pub struct ParsedSource { + /// The clean source code (annotations stripped). + pub source: String, + /// Parsed annotations with their positions. + pub annotations: Vec, + /// Map from (line, col) to annotation for quick lookup. + pub position_map: HashMap<(u32, u32), Vec>, +} + +impl ParsedSource { + /// Get annotations at a specific position. + pub fn annotations_at(&self, line: u32, col: u32) -> &[Annotation] { + self.position_map + .get(&(line, col)) + .map(std::vec::Vec::as_slice) + .unwrap_or(&[]) + } +} + +/// Parse source code with embedded annotations. +/// +/// Returns the clean source (annotations stripped) and the parsed annotations. +pub fn parse_annotated_source(source: &str) -> ParsedSource { + let mut clean_lines: Vec<&str> = Vec::new(); + let mut annotations: Vec = Vec::new(); + + let lines: Vec<&str> = source.lines().collect(); + let mut clean_line_idx: u32 = 0; + + for line in &lines { + let trimmed = line.trim_start(); + + if trimmed.starts_with("##") { + // This is an annotation line - parse it + if let Some(ann) = parse_annotation_line(trimmed, clean_line_idx.saturating_sub(1)) { + annotations.push(ann); + } + // Don't add to clean_lines + } else { + // Regular code line + clean_lines.push(line); + clean_line_idx += 1; + } + } + + // Build position map for quick lookup + let mut position_map: HashMap<(u32, u32), Vec> = HashMap::new(); + for ann in &annotations { + position_map + .entry((ann.line, ann.column)) + .or_default() + .push(ann.clone()); + } + + ParsedSource { + source: clean_lines.join("\n"), + annotations, + position_map, + } +} + +/// Parse a single annotation line. +/// +/// Format: `## ^ kind: value` +fn parse_annotation_line(line: &str, prev_line: u32) -> Option { + // Strip the ## prefix + let content = line.strip_prefix("##")?; + + // Find the ^ marker + let caret_pos = content.find('^')?; + + // The column is the position of ^ in the content (accounting for leading spaces) + let column = caret_pos as u32; + + // Parse the rest after the ^ + let rest = content[caret_pos + 1..].trim(); + + // Parse the kind: value part + let kind = parse_annotation_kind(rest)?; + + Some(Annotation { + line: prev_line, + column, + kind, + }) +} + +/// Parse the annotation kind from "kind: value" format. +fn parse_annotation_kind(s: &str) -> Option { + let (kind_str, value) = s.split_once(':')?; + let kind_str = kind_str.trim(); + let value = value.trim(); + + match kind_str { + "def" => Some(AnnotationKind::Definition(value.to_string())), + "use" => Some(AnnotationKind::Usage(value.to_string())), + "hover" => Some(AnnotationKind::Hover(value.to_string())), + "type" => Some(AnnotationKind::Type(value.to_string())), + "error" => Some(AnnotationKind::Error(value.to_string())), + "completion" => { + let items: Vec = value.split(',').map(|s| s.trim().to_string()).collect(); + Some(AnnotationKind::Completion(items)) + } + "no-completion" => Some(AnnotationKind::NoCompletion(value.to_string())), + "goto" => { + let (line_str, col_str) = value.split_once(':')?; + let line: u32 = line_str.trim().parse().ok()?; + let column: u32 = col_str.trim().parse().ok()?; + Some(AnnotationKind::Goto { line, column }) + } + _ => None, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_simple_annotation() { + // 'x' is at column 6 in "local x = 1;" + // 'x' is at column 0 in "x + 1" + let source = r"local x = 1; +## ^ def: x +x + 1 +##^ use: x"; + + let parsed = parse_annotated_source(source); + + // Clean source should have annotations stripped + assert_eq!(parsed.source, "local x = 1;\nx + 1"); + + // Should have 2 annotations + assert_eq!(parsed.annotations.len(), 2); + + // First annotation: def: x at line 0, column 6 + let ann1 = &parsed.annotations[0]; + assert_eq!(ann1.line, 0); + assert_eq!(ann1.column, 6); + assert_eq!(ann1.kind, AnnotationKind::Definition("x".to_string())); + + // Second annotation: use: x at line 1, column 0 + let ann2 = &parsed.annotations[1]; + assert_eq!(ann2.line, 1); + assert_eq!(ann2.column, 0); + assert_eq!(ann2.kind, AnnotationKind::Usage("x".to_string())); + } + + #[test] + fn test_parse_completion_annotation() { + let source = r"std. +## ^ completion: length, type, isNumber"; + + let parsed = parse_annotated_source(source); + assert_eq!(parsed.source, "std."); + assert_eq!(parsed.annotations.len(), 1); + + let ann = &parsed.annotations[0]; + assert!(matches!( + &ann.kind, + AnnotationKind::Completion(items) if items == &["length", "type", "isNumber"] + )); + } + + #[test] + fn test_parse_goto_annotation() { + let source = r"local f(x) = x; +## ^ goto: 0:8"; + + let parsed = parse_annotated_source(source); + assert_eq!(parsed.annotations.len(), 1); + + let ann = &parsed.annotations[0]; + assert!(matches!( + &ann.kind, + AnnotationKind::Goto { line: 0, column: 8 } + )); + } + + #[test] + fn test_position_map() { + // 'x' at column 6, '1' at column 10 + let source = r"local x = 1; +## ^ def: x +## ^ type: number"; + + let parsed = parse_annotated_source(source); + + // Should be able to look up by position + let anns_at_6 = parsed.annotations_at(0, 6); + assert_eq!(anns_at_6.len(), 1); + assert!(matches!(&anns_at_6[0].kind, AnnotationKind::Definition(_))); + + let anns_at_10 = parsed.annotations_at(0, 10); + assert_eq!(anns_at_10.len(), 1); + assert!(matches!(&anns_at_10[0].kind, AnnotationKind::Type(_))); + } +} diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs new file mode 100644 index 00000000..4db01591 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -0,0 +1,1106 @@ +//! Integration tests for the LSP server. +//! +//! Uses in-process testing with channels rather than subprocess management, +//! following patterns from ast-grep and simple-completion-language-server. + +use std::{fs, thread, time::Duration}; + +use assert_matches::assert_matches; +use lsp_server::{Connection, Message, Notification, Request}; +use lsp_types::{ + notification::{ + DidChangeConfiguration, DidChangeWatchedFiles, DidOpenTextDocument, Notification as _, + PublishDiagnostics, + }, + request::{ + CodeActionRequest, DocumentHighlightRequest, ExecuteCommand, GotoDefinition, Initialize, + InlayHintRequest, References, Rename, Request as _, Shutdown, + }, + DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, + ExecuteCommandParams, FileChangeType, FileEvent, GotoDefinitionParams, GotoDefinitionResponse, + InitializeParams, PartialResultParams, Position, ReferenceContext, ReferenceParams, + RenameParams, TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, + WorkDoneProgressParams, +}; +use serde_json::json; +use tempfile::TempDir; + +/// Helper to create an initialize request. +fn initialize_request(id: i32) -> Request { + initialize_request_with_options(id, serde_json::Value::Null) +} + +/// Helper to create an initialize request with custom initialization options. +fn initialize_request_with_options(id: i32, initialization_options: serde_json::Value) -> Request { + let mut params = InitializeParams::default(); + if !initialization_options.is_null() { + params.initialization_options = Some(initialization_options); + } + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +/// Helper to create a shutdown request. +fn shutdown_request(id: i32) -> Request { + Request::new( + id.into(), + Shutdown::METHOD.to_string(), + serde_json::Value::Null, + ) +} + +/// Helper to create an initialized notification. +fn initialized_notification() -> Notification { + Notification::new("initialized".to_string(), json!({})) +} + +/// Helper to create an exit notification. +fn exit_notification() -> Notification { + Notification::new("exit".to_string(), json!({})) +} + +/// Helper to create a didOpen notification. +fn did_open_notification(uri: &str, text: &str) -> Notification { + let params = DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: uri.parse().unwrap(), + language_id: "jsonnet".to_string(), + version: 1, + text: text.to_string(), + }, + }; + Notification::new( + DidOpenTextDocument::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +/// Helper to create a goto definition request. +fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoDefinition::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +/// Helper to create a references request. +fn references_request( + id: i32, + uri: &str, + line: u32, + character: u32, + include_declaration: bool, +) -> Request { + let params = ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + position: Position { line, character }, + }, + context: ReferenceContext { + include_declaration, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + References::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +fn document_highlight_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = lsp_types::DocumentHighlightParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + DocumentHighlightRequest::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +fn rename_request(id: i32, uri: &str, line: u32, character: u32, new_name: &str) -> Request { + let params = RenameParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + position: Position { line, character }, + }, + new_name: new_name.to_string(), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + Rename::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +fn inlay_hint_request( + id: i32, + uri: &str, + start_line: u32, + start_character: u32, + end_line: u32, + end_character: u32, +) -> Request { + let params = lsp_types::InlayHintParams { + work_done_progress_params: WorkDoneProgressParams::default(), + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + range: lsp_types::Range { + start: Position { + line: start_line, + character: start_character, + }, + end: Position { + line: end_line, + character: end_character, + }, + }, + }; + Request::new( + id.into(), + InlayHintRequest::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +fn code_action_request( + id: i32, + uri: &str, + range: lsp_types::Range, + diagnostics: Vec, + only: Option>, +) -> Request { + let params = lsp_types::CodeActionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + range, + context: lsp_types::CodeActionContext { + diagnostics, + only, + trigger_kind: None, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + CodeActionRequest::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +fn did_change_watched_files_notification(changes: Vec) -> Notification { + let params = DidChangeWatchedFilesParams { changes }; + Notification::new( + DidChangeWatchedFiles::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +fn did_change_configuration_notification(settings: serde_json::Value) -> Notification { + let params = DidChangeConfigurationParams { settings }; + Notification::new( + DidChangeConfiguration::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +fn execute_command_request(id: i32, command: &str, arguments: Vec) -> Request { + let params = ExecuteCommandParams { + command: command.to_string(), + arguments, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + ExecuteCommand::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +fn file_uri(path: &std::path::Path) -> String { + format!("file://{}", path.to_string_lossy()) +} + +fn recv_response(conn: &Connection, expected_id: i32) -> lsp_server::Response { + loop { + let message = conn + .receiver + .recv_timeout(Duration::from_secs(3)) + .expect("expected response message"); + if let Message::Response(response) = message { + if response.id == expected_id.into() { + return response; + } + } + } +} + +fn recv_publish_diagnostics_for_uri( + conn: &Connection, + uri: &str, + timeout: Duration, +) -> lsp_types::PublishDiagnosticsParams { + loop { + let message = conn + .receiver + .recv_timeout(timeout) + .expect("expected diagnostics notification"); + if let Message::Notification(notif) = message { + if notif.method != PublishDiagnostics::METHOD { + continue; + } + + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notif.params).unwrap(); + if params.uri.as_str() == uri { + return params; + } + } + } +} + +/// Run the server with the given connection in a separate thread. +fn run_server(connection: Connection) -> thread::JoinHandle<()> { + thread::spawn(move || { + let server = jrsonnet_lsp::server::Server::new(connection); + let _ = server.run(); + }) +} + +#[test] +fn test_initialize_shutdown() { + // Create an in-memory connection pair + let (client_conn, server_conn) = Connection::memory(); + + // Run the server in a background thread + let server_thread = run_server(server_conn); + + // Send initialize request + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + + // Receive initialize response + let response = client_conn.receiver.recv().unwrap(); + assert_matches!(response, Message::Response(resp) => { + assert_eq!(resp.id, 1.into()); + assert!(resp.error.is_none(), "Initialize should succeed"); + let result = resp.result.expect("should have result"); + assert!(result.get("capabilities").is_some(), "should have capabilities"); + assert_eq!( + result["capabilities"]["documentHighlightProvider"], + serde_json::Value::Bool(true), + "document highlight capability should be advertised", + ); + assert_eq!( + result["capabilities"]["inlayHintProvider"], + serde_json::Value::Bool(true), + "inlay hint capability should be advertised", + ); + assert_eq!( + result["capabilities"]["codeActionProvider"]["codeActionKinds"][0], + serde_json::Value::String("quickfix".to_string()), + "quickfix code action capability should be advertised", + ); + let server_name = result + .get("serverInfo") + .and_then(|s| s.get("name")) + .and_then(|n| n.as_str()) + .expect("should have serverInfo.name"); + assert!(server_name.contains("jrsonnet")); + }); + + // Send initialized notification + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + // Send shutdown request + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + + // Receive shutdown response + let response = client_conn.receiver.recv().unwrap(); + assert_matches!(response, Message::Response(resp) => { + assert_eq!(resp.id, 2.into()); + assert!(resp.error.is_none(), "Shutdown should succeed"); + }); + + // Send exit notification + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + // Wait for server to exit + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_diagnostics_on_open() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); // ignore response + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + // Open a document with a syntax error + let uri = "file:///test/error.jsonnet"; + let text = "{ a: }"; // Missing value - syntax error + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + // Should receive diagnostics notification + let notification = client_conn.receiver.recv().unwrap(); + assert_matches!(notification, Message::Notification(notif) => { + assert_eq!(notif.method, PublishDiagnostics::METHOD); + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notif.params).unwrap(); + assert!( + !params.diagnostics.is_empty(), + "Should have diagnostics for syntax error" + ); + }); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_configuration_change_reconfigures_eval_diagnostics() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize with eval diagnostics enabled. + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "enableEvalDiagnostics": true + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/eval-config-change.jsonnet"; + let text = "error 'boom'"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + let initial_diagnostics = + recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert!( + initial_diagnostics + .diagnostics + .iter() + .any(|diag| diag.source.as_deref() == Some("jrsonnet-eval")), + "expected eval diagnostics to be present before config change" + ); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "enableEvalDiagnostics": false + } + })), + )) + .unwrap(); + + let updated_diagnostics = + recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert!( + updated_diagnostics + .diagnostics + .iter() + .all(|diag| diag.source.as_deref() != Some("jrsonnet-eval")), + "expected eval diagnostics to be removed after config change" + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = recv_response(&client_conn, 2); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_valid_document_no_errors() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + // Open a valid document + let uri = "file:///test/valid.jsonnet"; + let text = r#"{ hello: "world", answer: 42 }"#; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + // Should receive diagnostics notification with empty diagnostics + let notification = client_conn.receiver.recv().unwrap(); + assert_matches!(notification, Message::Notification(notif) => { + assert_eq!(notif.method, PublishDiagnostics::METHOD); + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notif.params).unwrap(); + assert!( + params.diagnostics.is_empty(), + "Valid document should have no diagnostics" + ); + }); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_definition() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + // Open a document with a local binding + let uri = "file:///test/definition.jsonnet"; + let text = r"local x = 1; x + 1"; + // ^^^^^^ def ^ use at position (0, 13) + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + // Receive diagnostics notification (discard) + let _ = client_conn.receiver.recv().unwrap(); + + // Send goto definition request for 'x' usage at position (0, 13) + client_conn + .sender + .send(Message::Request(goto_definition_request(2, uri, 0, 13))) + .unwrap(); + + // Should receive definition response + let response = client_conn.receiver.recv().unwrap(); + assert_matches!(response, Message::Response(resp) => { + assert_eq!(resp.id, 2.into()); + assert!(resp.error.is_none(), "Goto definition should succeed"); + let result: GotoDefinitionResponse = + serde_json::from_value(resp.result.expect("should have result")).unwrap(); + assert_matches!(result, GotoDefinitionResponse::Scalar(location) => { + // Definition should be at position 6 (the 'x' in 'local x') + assert_eq!(location.range.start.line, 0); + assert_eq!(location.range.start.character, 6); + }); + }); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_document_highlight() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/highlight.jsonnet"; + let text = "local x = 1; x + x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(document_highlight_request(2, uri, 0, 13))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "Document highlight should succeed" + ); + + let highlights: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let highlights = highlights.unwrap_or_default(); + assert_eq!(highlights.len(), 3); + assert!( + highlights.iter().any(|highlight| { + highlight.range.start.character == 6 + && highlight.kind == Some(lsp_types::DocumentHighlightKind::WRITE) + }), + "Definition should be highlighted as WRITE" + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_inlay_hint() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/inlay.jsonnet"; + let text = "local x = 1; x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(inlay_hint_request(2, uri, 0, 0, 0, 50))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Inlay hint should succeed"); + + let hints: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let hints = hints.unwrap_or_default(); + let hints_json = serde_json::to_value(&hints).expect("hints should serialize"); + let expected_json = serde_json::json!([{ + "position": { "line": 0, "character": 7 }, + "label": ": number", + "kind": 1, + "paddingLeft": true + }]); + assert_eq!(hints_json, expected_json); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_action_unused_variable_quickfix() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/code-action.jsonnet"; + let text = "local x = 1; 42"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + let diagnostic = lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::WARNING), + code: Some(lsp_types::NumberOrString::String( + "unused-variable".to_string(), + )), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + }; + + client_conn + .sender + .send(Message::Request(code_action_request( + 2, + uri, + lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 20, + }, + }, + vec![diagnostic.clone()], + None, + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Code action should succeed"); + let actions: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let actions = actions.unwrap_or_default(); + assert_eq!(actions.len(), 1); + let lsp_types::CodeActionOrCommand::CodeAction(action) = &actions[0] else { + panic!("Expected code action"); + }; + assert_eq!(action.title, "Prefix `x` with `_`"); + assert_eq!(action.kind, Some(lsp_types::CodeActionKind::QUICKFIX)); + let uri_parsed: lsp_types::Uri = uri.parse().unwrap(); + let edits = action + .edit + .as_ref() + .and_then(|edit| edit.changes.as_ref()) + .and_then(|changes| changes.get(&uri_parsed)) + .expect("expected workspace edit for file"); + assert_eq!( + edits, + &vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }] + ); + + // Requesting non-quickfix actions should filter this out. + client_conn + .sender + .send(Message::Request(code_action_request( + 3, + uri, + lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 20, + }, + }, + vec![diagnostic], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ))) + .unwrap(); + let response = recv_response(&client_conn, 3); + assert!(response.error.is_none(), "Code action should succeed"); + let filtered_actions: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert!(filtered_actions.is_none()); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_watched_file_refreshes_unopened_importers_for_references() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib1_path = tmp.path().join("lib1.jsonnet"); + let lib2_path = tmp.path().join("lib2.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + + fs::write(&lib1_path, "local target = 1; target").expect("lib1 should be written"); + fs::write(&lib2_path, "local target = 2; target").expect("lib2 should be written"); + fs::write(&main_path, "local lib = import 'lib1.jsonnet'; lib.target") + .expect("main should be written"); + + let lib1_uri = file_uri(&lib1_path.canonicalize().expect("lib1 should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + // Open lib1 (current document for references requests) + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib1_uri, + "local target = 1; target", + ))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); // initial diagnostics + + // Index unopened main file via watched-files notification + client_conn + .sender + .send(Message::Notification( + did_change_watched_files_notification(vec![FileEvent { + uri: main_uri.parse().unwrap(), + typ: FileChangeType::CREATED, + }]), + )) + .unwrap(); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 20, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib1_uri.clone())], + ))) + .unwrap(); + let response = recv_response(&client_conn, 20); + assert!(response.error.is_none(), "Command should succeed"); + let command_result = response.result.expect("command should return result"); + let importers = command_result["transitiveImporters"] + .as_array() + .expect("transitiveImporters should be an array") + .iter() + .filter_map(|value| value.as_str()) + .collect::>(); + assert!( + importers.iter().any(|uri| *uri == main_uri), + "Expected main to be indexed as lib1 importer, got: {importers:?}" + ); + + // Query references to `target` definition in lib1 (line 0, col 6) + client_conn + .sender + .send(Message::Request(references_request( + 2, &lib1_uri, 0, 6, false, + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "References should succeed"); + let refs: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let refs = refs.unwrap_or_default(); + assert!( + refs.iter() + .any(|location| location.uri.to_string() == main_uri), + "Expected cross-file reference from unopened main file, got: {refs:?}" + ); + + // Update main on disk to import lib2 instead of lib1 + fs::write(&main_path, "local lib = import 'lib2.jsonnet'; lib.target") + .expect("main should be rewritten"); + + client_conn + .sender + .send(Message::Notification( + did_change_watched_files_notification(vec![FileEvent { + uri: main_uri.parse().unwrap(), + typ: FileChangeType::CHANGED, + }]), + )) + .unwrap(); + + // References to lib1 target should no longer include main + client_conn + .sender + .send(Message::Request(references_request( + 3, &lib1_uri, 0, 6, false, + ))) + .unwrap(); + let response = recv_response(&client_conn, 3); + assert!(response.error.is_none(), "References should succeed"); + let refs: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let refs = refs.unwrap_or_default(); + assert!( + !refs + .iter() + .any(|location| location.uri.to_string() == main_uri), + "Main should no longer reference lib1 after watched-file update" + ); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_cross_file_rename_updates_definition_and_importers() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ helper: function(x) x * 2 }").expect("lib should be written"); + fs::write( + &main_path, + "local lib = import 'lib.jsonnet'; lib.helper(1) + lib.helper(2)", + ) + .expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, + "{ helper: function(x) x * 2 }", + ))) + .unwrap(); + + client_conn + .sender + .send(Message::Notification( + did_change_watched_files_notification(vec![FileEvent { + uri: main_uri.parse().unwrap(), + typ: FileChangeType::CREATED, + }]), + )) + .unwrap(); + + client_conn + .sender + .send(Message::Request(rename_request(2, &lib_uri, 0, 2, "util"))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Rename should succeed"); + + let edit: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let edit = edit.expect("rename should produce workspace edit"); + let changes = edit.changes.expect("workspace edit should include changes"); + + let lib_edits = changes + .iter() + .find_map(|(uri, edits)| (uri.as_str() == lib_uri).then_some(edits)) + .expect("lib file should be edited"); + assert_eq!(lib_edits.len(), 1, "lib should have one definition rename"); + assert_eq!(lib_edits[0].new_text, "util"); + + let main_edits = changes + .iter() + .find_map(|(uri, edits)| (uri.as_str() == main_uri).then_some(edits)) + .expect("main importer should be edited"); + assert_eq!( + main_edits.len(), + 2, + "main should rename both field references" + ); + assert!(main_edits.iter().all(|edit| edit.new_text == "util")); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} diff --git a/crates/jrsonnet-lsp/tests/stress_tests.rs b/crates/jrsonnet-lsp/tests/stress_tests.rs new file mode 100644 index 00000000..2249232c --- /dev/null +++ b/crates/jrsonnet-lsp/tests/stress_tests.rs @@ -0,0 +1,827 @@ +//! Stress tests for the LSP server. +//! +//! These tests verify the server behaves correctly under load: +//! - Rapid document changes (simulating fast typing) +//! - Concurrent requests from multiple threads +//! - Many documents open simultaneously +//! - Large document handling + +use std::{fmt::Write as _, sync::Arc, thread, time::Duration}; + +use assert_matches::assert_matches; +use crossbeam_channel::RecvTimeoutError; +use lsp_server::{Connection, Message, Notification, Request}; +use lsp_types::{ + notification::{DidChangeTextDocument, DidOpenTextDocument, Notification as _}, + request::{Completion, GotoDefinition, HoverRequest, Initialize, Request as _, Shutdown}, + CompletionParams, DidChangeTextDocumentParams, DidOpenTextDocumentParams, GotoDefinitionParams, + HoverParams, InitializeParams, PartialResultParams, Position, Range, + TextDocumentContentChangeEvent, TextDocumentIdentifier, TextDocumentItem, + TextDocumentPositionParams, VersionedTextDocumentIdentifier, WorkDoneProgressParams, +}; +use serde_json::json; + +// ============================================================================= +// Test Helpers +// ============================================================================= + +/// Helper to create an initialize request. +fn initialize_request(id: i32) -> Request { + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(InitializeParams::default()).unwrap(), + ) +} + +/// Helper to create a shutdown request. +fn shutdown_request(id: i32) -> Request { + Request::new( + id.into(), + Shutdown::METHOD.to_string(), + serde_json::Value::Null, + ) +} + +/// Helper to create an initialized notification. +fn initialized_notification() -> Notification { + Notification::new("initialized".to_string(), json!({})) +} + +/// Helper to create an exit notification. +fn exit_notification() -> Notification { + Notification::new("exit".to_string(), json!({})) +} + +/// Helper to create a didOpen notification. +fn did_open_notification(uri: &str, text: &str, version: i32) -> Notification { + let params = DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: uri.parse().unwrap(), + language_id: "jsonnet".to_string(), + version, + text: text.to_string(), + }, + }; + Notification::new( + DidOpenTextDocument::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +/// Helper to create a didChange notification (full document replacement). +fn did_change_notification_full(uri: &str, text: &str, version: i32) -> Notification { + let params = DidChangeTextDocumentParams { + text_document: VersionedTextDocumentIdentifier { + uri: uri.parse().unwrap(), + version, + }, + content_changes: vec![TextDocumentContentChangeEvent { + range: None, + range_length: None, + text: text.to_string(), + }], + }; + Notification::new( + DidChangeTextDocument::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +/// Helper to create a didChange notification (incremental change). +fn did_change_notification_incremental( + uri: &str, + range: Range, + text: &str, + version: i32, +) -> Notification { + let params = DidChangeTextDocumentParams { + text_document: VersionedTextDocumentIdentifier { + uri: uri.parse().unwrap(), + version, + }, + content_changes: vec![TextDocumentContentChangeEvent { + range: Some(range), + range_length: None, + text: text.to_string(), + }], + }; + Notification::new( + DidChangeTextDocument::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +/// Helper to create a hover request. +fn hover_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = HoverParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + HoverRequest::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +/// Helper to create a completion request. +fn completion_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = CompletionParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + }; + Request::new( + id.into(), + Completion::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +/// Helper to create a goto definition request. +fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoDefinition::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +/// Run the server with the given connection in a separate thread. +fn run_server(connection: Connection) -> thread::JoinHandle<()> { + thread::spawn(move || { + let server = jrsonnet_lsp::server::Server::new(connection); + let _ = server.run(); + }) +} + +/// Initialize a server connection and return the client connection. +fn init_server() -> (Connection, thread::JoinHandle<()>) { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Send initialize request + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + + // Receive initialize response + let response = client_conn.receiver.recv().unwrap(); + assert_matches!(response, Message::Response(resp) => { + assert!(resp.error.is_none(), "Initialize should succeed"); + }); + + // Send initialized notification + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + (client_conn, server_thread) +} + +/// Shutdown and clean up the server. +fn shutdown_server(client_conn: &Connection, server_thread: thread::JoinHandle<()>, req_id: i32) { + // Send shutdown request + client_conn + .sender + .send(Message::Request(shutdown_request(req_id))) + .unwrap(); + + // Receive shutdown response + let response = client_conn.receiver.recv().unwrap(); + assert_matches!(response, Message::Response(resp) => { + assert!(resp.error.is_none(), "Shutdown should succeed"); + }); + + // Send exit notification + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + // Wait for server to exit + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +/// Drain all pending messages from the receiver. +fn drain_messages(conn: &Connection, timeout: Duration) { + while conn.receiver.recv_timeout(timeout).is_ok() {} +} + +// ============================================================================= +// Stress Tests +// ============================================================================= + +/// Test rapid document changes (simulating fast typing). +/// +/// This verifies that the server handles many quick edits without crashing +/// or getting into an inconsistent state. +#[test] +fn test_rapid_document_changes() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/rapid.jsonnet"; + + // Open a document + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, "{}", 1))) + .unwrap(); + + // Wait for initial diagnostics + drain_messages(&client_conn, Duration::from_millis(50)); + + // Rapid full-document changes (simulating fast typing) + for i in 2..=100 { + let content = format!("{{ x: {} }}", i); + client_conn + .sender + .send(Message::Notification(did_change_notification_full( + uri, &content, i, + ))) + .unwrap(); + } + + // Wait for processing to settle + drain_messages(&client_conn, Duration::from_millis(200)); + + // Verify server is still responsive with a hover request + client_conn + .sender + .send(Message::Request(hover_request(1000, uri, 0, 3))) + .unwrap(); + + // Should get a response (not necessarily with content, but should respond) + let response = client_conn + .receiver + .recv_timeout(Duration::from_secs(2)) + .expect("Server should respond after rapid changes"); + assert_matches!(response, Message::Response(resp) => { + assert_eq!(resp.id, 1000.into()); + assert!(resp.error.is_none(), "Request should not error"); + }); + + shutdown_server(&client_conn, server_thread, 1001); +} + +/// Test rapid incremental changes (simulating character-by-character typing). +#[test] +fn test_rapid_incremental_changes() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/incremental.jsonnet"; + + // Open a document + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, + "local x = 1;\n", + 1, + ))) + .unwrap(); + + // Wait for initial diagnostics + drain_messages(&client_conn, Duration::from_millis(50)); + + // Simulate typing "x + 1" character by character at the end + let chars = ['x', ' ', '+', ' ', '1']; + let mut version = 2; + for (i, ch) in chars.iter().enumerate() { + let range = Range { + start: Position { + line: 1, + character: i as u32, + }, + end: Position { + line: 1, + character: i as u32, + }, + }; + client_conn + .sender + .send(Message::Notification(did_change_notification_incremental( + uri, + range, + &ch.to_string(), + version, + ))) + .unwrap(); + version += 1; + } + + // Wait for processing + drain_messages(&client_conn, Duration::from_millis(200)); + + // Verify server is responsive + client_conn + .sender + .send(Message::Request(hover_request(100, uri, 0, 6))) + .unwrap(); + + let response = client_conn + .receiver + .recv_timeout(Duration::from_secs(2)) + .expect("Server should respond"); + assert_matches!(response, Message::Response(resp) => { + assert!(resp.error.is_none()); + }); + + shutdown_server(&client_conn, server_thread, 101); +} + +/// Test concurrent requests from multiple threads. +/// +/// This verifies that the server can handle requests arriving nearly simultaneously. +#[test] +fn test_concurrent_requests() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/concurrent.jsonnet"; + let text = r"local x = 1; +local y = 2; +local add(a, b) = a + b; +add(x, y)"; + + // Open a document + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text, 1))) + .unwrap(); + + // Wait for initial diagnostics + drain_messages(&client_conn, Duration::from_millis(100)); + + // Send multiple requests concurrently + let sender = Arc::new(client_conn.sender.clone()); + let handles: Vec<_> = (0..10) + .map(|i| { + let sender = Arc::clone(&sender); + let uri = uri.to_string(); + thread::spawn(move || { + let base_id = (i + 1) * 100; + // Send hover request + sender + .send(Message::Request(hover_request(base_id, &uri, 0, 7))) + .unwrap(); + // Send goto definition request + sender + .send(Message::Request(goto_definition_request( + base_id + 1, + &uri, + 3, + 4, + ))) + .unwrap(); + // Send completion request + sender + .send(Message::Request(completion_request( + base_id + 2, + &uri, + 3, + 0, + ))) + .unwrap(); + }) + }) + .collect(); + + // Wait for all sends to complete + for handle in handles { + handle.join().unwrap(); + } + + // Collect all responses (30 requests total) + let mut responses = Vec::new(); + let mut attempts = 0; + while responses.len() < 30 && attempts < 100 { + match client_conn + .receiver + .recv_timeout(Duration::from_millis(100)) + { + Ok(Message::Response(resp)) => { + responses.push(resp); + } + Ok(_) => { + // Ignore notifications + } + Err(_) => { + attempts += 1; + } + } + } + + // Verify we got all responses + assert!( + responses.len() >= 25, + "Should receive most responses, got {}", + responses.len() + ); + + // Verify no errors in responses + for resp in &responses { + assert!( + resp.error.is_none(), + "Response {} should not have error: {:?}", + resp.id, + resp.error + ); + } + + shutdown_server(&client_conn, server_thread, 9999); +} + +/// Test many documents open simultaneously. +/// +/// This verifies that the server can handle many open documents without +/// excessive memory usage or performance degradation. +#[test] +fn test_many_documents() { + let (client_conn, server_thread) = init_server(); + + let num_documents = 50; + + // Open many documents + for i in 0..num_documents { + let uri = format!("file:///test/doc{}.jsonnet", i); + let content = format!( + r"local x{i} = {i}; +local f{i}(a) = a + x{i}; +f{i}(1)", + i = i + ); + client_conn + .sender + .send(Message::Notification(did_open_notification( + &uri, &content, 1, + ))) + .unwrap(); + } + + // Wait for all documents to be processed + drain_messages(&client_conn, Duration::from_millis(500)); + + // Query each document to verify they're all accessible + let mut successful_queries = 0; + for i in 0..num_documents { + let uri = format!("file:///test/doc{}.jsonnet", i); + client_conn + .sender + .send(Message::Request(hover_request(i + 100, &uri, 0, 7))) + .unwrap(); + + match client_conn.receiver.recv_timeout(Duration::from_secs(1)) { + Ok(Message::Response(resp)) => { + if resp.error.is_none() { + successful_queries += 1; + } + } + Ok(Message::Notification(_)) => { + // Retry to get the response + if let Ok(Message::Response(resp)) = client_conn + .receiver + .recv_timeout(Duration::from_millis(500)) + { + if resp.error.is_none() { + successful_queries += 1; + } + } + } + Ok(Message::Request(_)) + | Err(RecvTimeoutError::Timeout | RecvTimeoutError::Disconnected) => {} + } + } + + assert!( + successful_queries >= num_documents / 2, + "Should successfully query at least half the documents, got {}/{}", + successful_queries, + num_documents + ); + + shutdown_server(&client_conn, server_thread, 9999); +} + +/// Test handling of a large document. +/// +/// This verifies that the server can handle documents with many definitions +/// without excessive slowdown. +#[test] +fn test_large_document() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/large.jsonnet"; + + // Generate a large document with many local bindings + let mut content = String::new(); + let num_locals = 200; + for i in 0..num_locals { + let _ = writeln!(content, "local x{i} = {i};"); + } + content.push_str("{\n"); + for i in 0..num_locals { + let _ = writeln!(content, " field{i}: x{i},"); + } + content.push_str("}\n"); + + // Open the large document + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, &content, 1, + ))) + .unwrap(); + + // Wait for processing + drain_messages(&client_conn, Duration::from_millis(500)); + + // Test hover at the beginning + client_conn + .sender + .send(Message::Request(hover_request(100, uri, 0, 7))) + .unwrap(); + + let response = client_conn + .receiver + .recv_timeout(Duration::from_secs(5)) + .expect("Server should respond to hover on large document"); + assert_matches!(response, Message::Response(resp) => { + assert!(resp.error.is_none(), "Hover should succeed on large document"); + }); + + // Test goto definition in the middle + let middle_line = num_locals + (num_locals / 2); + client_conn + .sender + .send(Message::Request(goto_definition_request( + 101, + uri, + middle_line as u32, + 12, + ))) + .unwrap(); + + // Drain any notifications and get the response + loop { + match client_conn.receiver.recv_timeout(Duration::from_secs(5)) { + Ok(Message::Response(resp)) => { + assert!( + resp.error.is_none(), + "Goto definition should succeed on large document" + ); + break; + } + Ok(Message::Notification(_) | Message::Request(_)) => continue, + Err(err) => panic!("Should receive goto definition response: {err:?}"), + } + } + + shutdown_server(&client_conn, server_thread, 9999); +} + +/// Test document changes during pending requests. +/// +/// This verifies that the server handles document updates while requests +/// are being processed, without returning stale or incorrect results. +#[test] +fn test_changes_during_requests() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/changing.jsonnet"; + + // Open a document + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, + "local x = 1; x", + 1, + ))) + .unwrap(); + + // Wait for initial processing + drain_messages(&client_conn, Duration::from_millis(100)); + + // Send a request and immediately change the document + client_conn + .sender + .send(Message::Request(goto_definition_request(100, uri, 0, 13))) + .unwrap(); + + // Change document before response + client_conn + .sender + .send(Message::Notification(did_change_notification_full( + uri, + "local y = 2; y", + 2, + ))) + .unwrap(); + + // The server should handle this gracefully + // Either return a result for the old or new document, but not crash + loop { + match client_conn.receiver.recv_timeout(Duration::from_secs(2)) { + Ok(Message::Response(resp)) => { + // Response may succeed or fail, but shouldn't be an internal error + if let Some(err) = &resp.error { + assert!( + err.code != -32603, + "Should not have internal error: {:?}", + err + ); + } + break; + } + Ok(Message::Notification(_) | Message::Request(_)) => continue, + Err(RecvTimeoutError::Timeout | RecvTimeoutError::Disconnected) => { + // Timeout is acceptable - document changed + break; + } + } + } + + // Verify server is still responsive after the change + drain_messages(&client_conn, Duration::from_millis(100)); + + client_conn + .sender + .send(Message::Request(hover_request(200, uri, 0, 6))) + .unwrap(); + + loop { + match client_conn.receiver.recv_timeout(Duration::from_secs(2)) { + Ok(Message::Response(resp)) => { + assert!( + resp.error.is_none(), + "Server should be responsive after document change" + ); + break; + } + Ok(Message::Notification(_) | Message::Request(_)) => continue, + Err(err) => panic!("Server should respond after document change: {err:?}"), + } + } + + shutdown_server(&client_conn, server_thread, 9999); +} + +/// Test that requests are handled without blocking for too long. +/// +/// This is a basic responsiveness test to ensure the server doesn't hang. +#[test] +fn test_responsiveness() { + let (client_conn, server_thread) = init_server(); + + let uri = "file:///test/responsive.jsonnet"; + let text = r"local x = 1; +local f(a) = a * 2; +f(x)"; + + // Open a document + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text, 1))) + .unwrap(); + + // Wait for processing + drain_messages(&client_conn, Duration::from_millis(100)); + + // Send multiple different request types and verify all respond within timeout + let requests = vec![ + hover_request(1, uri, 0, 7), + goto_definition_request(2, uri, 2, 0), + completion_request(3, uri, 2, 0), + ]; + + for req in requests { + let req_id = req.id.clone(); + client_conn.sender.send(Message::Request(req)).unwrap(); + + let start = std::time::Instant::now(); + let timeout = Duration::from_secs(5); + + loop { + let Some(remaining) = timeout.checked_sub(start.elapsed()) else { + panic!("Request {:?} timed out", req_id); + }; + match client_conn.receiver.recv_timeout(remaining) { + Ok(Message::Response(resp)) => { + assert_eq!( + resp.id, req_id, + "Should receive response for correct request" + ); + assert!( + resp.error.is_none(), + "Request {:?} should not error", + req_id + ); + break; + } + Ok(Message::Notification(_) | Message::Request(_)) => continue, + Err(err) => panic!("Request {:?} timed out: {:?}", req_id, err), + } + } + } + + shutdown_server(&client_conn, server_thread, 9999); +} + +/// Test clean shutdown during document processing. +/// +/// This verifies that the server shuts down cleanly even when documents +/// are being processed. +#[test] +fn test_shutdown_during_processing() { + let (client_conn, server_thread) = init_server(); + + // Open several documents to keep the server busy + for i in 0..10 { + let uri = format!("file:///test/shutdown{}.jsonnet", i); + let content = format!("local x{} = {}; x{}", i, i, i); + client_conn + .sender + .send(Message::Notification(did_open_notification( + &uri, &content, 1, + ))) + .unwrap(); + } + + // Don't wait for diagnostics - immediately shutdown + // This tests graceful shutdown during active processing + client_conn + .sender + .send(Message::Request(shutdown_request(100))) + .unwrap(); + + // Should still get a clean shutdown response + let response = client_conn + .receiver + .recv_timeout(Duration::from_secs(5)) + .expect("Should receive shutdown response"); + + // May receive notifications before the response + let mut got_response = false; + match response { + Message::Response(resp) => { + assert!(resp.error.is_none(), "Shutdown should succeed"); + got_response = true; + } + Message::Notification(_) => { + // Try to get the response + loop { + match client_conn.receiver.recv_timeout(Duration::from_secs(2)) { + Ok(Message::Response(resp)) => { + assert!(resp.error.is_none(), "Shutdown should succeed"); + got_response = true; + break; + } + Ok(Message::Notification(_) | Message::Request(_)) => continue, + Err(RecvTimeoutError::Timeout | RecvTimeoutError::Disconnected) => break, + } + } + } + Message::Request(_) => { + // Unexpected request from server, ignore and wait for response + } + } + + assert!(got_response, "Should receive shutdown response"); + + // Send exit + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + // Server should exit cleanly + server_thread + .join() + .expect("Server thread should exit cleanly after shutdown during processing"); +} diff --git a/crates/jrsonnet-rowan-parser/jsonnet.ungram b/crates/jrsonnet-rowan-parser/jsonnet.ungram index b2c70e44..90376010 100644 --- a/crates/jrsonnet-rowan-parser/jsonnet.ungram +++ b/crates/jrsonnet-rowan-parser/jsonnet.ungram @@ -5,25 +5,29 @@ SourceFile = Expr -SuffixIndex = +// Progressive wrapping expression types - each suffix operation wraps its base +ExprField = + base:Expr '?'? '.' - index:Name -SuffixIndexExpr = - ('?' '.')? + field:Name + +ExprIndex = + base:Expr + '?'? + '.'? '[' index:Expr ']' -SuffixSlice = + +ExprSlice = + base:Expr SliceDesc -SuffixApply = + +ExprCall = + callee:Expr ArgsDesc 'tailstrict'? -Suffix = - SuffixIndex -| SuffixIndexExpr -| SuffixSlice -| SuffixApply StmtLocal = 'local' @@ -101,7 +105,6 @@ ExprError = Expr = Stmt* ExprBase - Suffix* ExprBase = ExprBinary @@ -119,6 +122,10 @@ ExprBase = | ExprIfThenElse | ExprFunction | ExprError +| ExprField +| ExprIndex +| ExprSlice +| ExprCall BinaryOperator = '||' | '??' | '&&' diff --git a/crates/jrsonnet-rowan-parser/src/generated/nodes.rs b/crates/jrsonnet-rowan-parser/src/generated/nodes.rs index 1b55459d..99489271 100644 --- a/crates/jrsonnet-rowan-parser/src/generated/nodes.rs +++ b/crates/jrsonnet-rowan-parser/src/generated/nodes.rs @@ -29,23 +29,23 @@ impl Expr { pub fn expr_base(&self) -> Option { support::child(&self.syntax) } - pub fn suffixs(&self) -> AstChildren { - support::children(&self.syntax) - } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct SuffixIndex { +pub struct ExprField { pub(crate) syntax: SyntaxNode, } -impl SuffixIndex { +impl ExprField { + pub fn base(&self) -> Option { + support::child(&self.syntax) + } pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } pub fn dot_token(&self) -> Option { support::token(&self.syntax, T![.]) } - pub fn index(&self) -> Option { + pub fn field(&self) -> Option { support::child(&self.syntax) } } @@ -61,10 +61,13 @@ impl Name { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct SuffixIndexExpr { +pub struct ExprIndex { pub(crate) syntax: SyntaxNode, } -impl SuffixIndexExpr { +impl ExprIndex { + pub fn base(&self) -> Option { + support::child(&self.syntax) + } pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } @@ -75,7 +78,7 @@ impl SuffixIndexExpr { support::token(&self.syntax, T!['[']) } pub fn index(&self) -> Option { - support::child(&self.syntax) + support::children::(&self.syntax).nth(1) } pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) @@ -83,10 +86,13 @@ impl SuffixIndexExpr { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct SuffixSlice { +pub struct ExprSlice { pub(crate) syntax: SyntaxNode, } -impl SuffixSlice { +impl ExprSlice { + pub fn base(&self) -> Option { + support::child(&self.syntax) + } pub fn slice_desc(&self) -> Option { support::child(&self.syntax) } @@ -118,10 +124,13 @@ impl SliceDesc { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct SuffixApply { +pub struct ExprCall { pub(crate) syntax: SyntaxNode, } -impl SuffixApply { +impl ExprCall { + pub fn callee(&self) -> Option { + support::child(&self.syntax) + } pub fn args_desc(&self) -> Option { support::child(&self.syntax) } @@ -190,7 +199,7 @@ impl Assertion { support::token(&self.syntax, T![:]) } pub fn message(&self) -> Option { - support::child(&self.syntax) + support::children::(&self.syntax).nth(1) } } @@ -206,7 +215,7 @@ impl ExprBinary { support::token_child(&self.syntax) } pub fn rhs(&self) -> Option { - support::child(&self.syntax) + support::children::(&self.syntax).nth(1) } } @@ -803,14 +812,6 @@ impl DestructArrayElement { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Suffix { - SuffixIndex(SuffixIndex), - SuffixIndexExpr(SuffixIndexExpr), - SuffixSlice(SuffixSlice), - SuffixApply(SuffixApply), -} - #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Bind { BindDestruct(BindDestruct), @@ -852,6 +853,10 @@ pub enum ExprBase { ExprIfThenElse(ExprIfThenElse), ExprFunction(ExprFunction), ExprError(ExprError), + ExprField(ExprField), + ExprIndex(ExprIndex), + ExprSlice(ExprSlice), + ExprCall(ExprCall), } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -1072,9 +1077,9 @@ impl AstNode for Expr { &self.syntax } } -impl AstNode for SuffixIndex { +impl AstNode for ExprField { fn can_cast(kind: SyntaxKind) -> bool { - kind == SUFFIX_INDEX + kind == EXPR_FIELD } fn cast(syntax: SyntaxNode) -> Option { if Self::can_cast(syntax.kind()) { @@ -1102,9 +1107,9 @@ impl AstNode for Name { &self.syntax } } -impl AstNode for SuffixIndexExpr { +impl AstNode for ExprIndex { fn can_cast(kind: SyntaxKind) -> bool { - kind == SUFFIX_INDEX_EXPR + kind == EXPR_INDEX } fn cast(syntax: SyntaxNode) -> Option { if Self::can_cast(syntax.kind()) { @@ -1117,9 +1122,9 @@ impl AstNode for SuffixIndexExpr { &self.syntax } } -impl AstNode for SuffixSlice { +impl AstNode for ExprSlice { fn can_cast(kind: SyntaxKind) -> bool { - kind == SUFFIX_SLICE + kind == EXPR_SLICE } fn cast(syntax: SyntaxNode) -> Option { if Self::can_cast(syntax.kind()) { @@ -1147,9 +1152,9 @@ impl AstNode for SliceDesc { &self.syntax } } -impl AstNode for SuffixApply { +impl AstNode for ExprCall { fn can_cast(kind: SyntaxKind) -> bool { - kind == SUFFIX_APPLY + kind == EXPR_CALL } fn cast(syntax: SyntaxNode) -> Option { if Self::can_cast(syntax.kind()) { @@ -1852,52 +1857,6 @@ impl AstNode for DestructArrayElement { &self.syntax } } -impl From for Suffix { - fn from(node: SuffixIndex) -> Suffix { - Suffix::SuffixIndex(node) - } -} -impl From for Suffix { - fn from(node: SuffixIndexExpr) -> Suffix { - Suffix::SuffixIndexExpr(node) - } -} -impl From for Suffix { - fn from(node: SuffixSlice) -> Suffix { - Suffix::SuffixSlice(node) - } -} -impl From for Suffix { - fn from(node: SuffixApply) -> Suffix { - Suffix::SuffixApply(node) - } -} -impl AstNode for Suffix { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - SUFFIX_INDEX | SUFFIX_INDEX_EXPR | SUFFIX_SLICE | SUFFIX_APPLY => true, - _ => false, - } - } - fn cast(syntax: SyntaxNode) -> Option { - let res = match syntax.kind() { - SUFFIX_INDEX => Suffix::SuffixIndex(SuffixIndex { syntax }), - SUFFIX_INDEX_EXPR => Suffix::SuffixIndexExpr(SuffixIndexExpr { syntax }), - SUFFIX_SLICE => Suffix::SuffixSlice(SuffixSlice { syntax }), - SUFFIX_APPLY => Suffix::SuffixApply(SuffixApply { syntax }), - _ => return None, - }; - Some(res) - } - fn syntax(&self) -> &SyntaxNode { - match self { - Suffix::SuffixIndex(it) => &it.syntax, - Suffix::SuffixIndexExpr(it) => &it.syntax, - Suffix::SuffixSlice(it) => &it.syntax, - Suffix::SuffixApply(it) => &it.syntax, - } - } -} impl From for Bind { fn from(node: BindDestruct) -> Bind { Bind::BindDestruct(node) @@ -2101,12 +2060,33 @@ impl From for ExprBase { ExprBase::ExprError(node) } } +impl From for ExprBase { + fn from(node: ExprField) -> ExprBase { + ExprBase::ExprField(node) + } +} +impl From for ExprBase { + fn from(node: ExprIndex) -> ExprBase { + ExprBase::ExprIndex(node) + } +} +impl From for ExprBase { + fn from(node: ExprSlice) -> ExprBase { + ExprBase::ExprSlice(node) + } +} +impl From for ExprBase { + fn from(node: ExprCall) -> ExprBase { + ExprBase::ExprCall(node) + } +} impl AstNode for ExprBase { fn can_cast(kind: SyntaxKind) -> bool { match kind { EXPR_BINARY | EXPR_UNARY | EXPR_OBJ_EXTEND | EXPR_PARENED | EXPR_STRING | EXPR_NUMBER | EXPR_LITERAL | EXPR_ARRAY | EXPR_OBJECT | EXPR_ARRAY_COMP - | EXPR_IMPORT | EXPR_VAR | EXPR_IF_THEN_ELSE | EXPR_FUNCTION | EXPR_ERROR => true, + | EXPR_IMPORT | EXPR_VAR | EXPR_IF_THEN_ELSE | EXPR_FUNCTION | EXPR_ERROR + | EXPR_FIELD | EXPR_INDEX | EXPR_SLICE | EXPR_CALL => true, _ => false, } } @@ -2127,6 +2107,10 @@ impl AstNode for ExprBase { EXPR_IF_THEN_ELSE => ExprBase::ExprIfThenElse(ExprIfThenElse { syntax }), EXPR_FUNCTION => ExprBase::ExprFunction(ExprFunction { syntax }), EXPR_ERROR => ExprBase::ExprError(ExprError { syntax }), + EXPR_FIELD => ExprBase::ExprField(ExprField { syntax }), + EXPR_INDEX => ExprBase::ExprIndex(ExprIndex { syntax }), + EXPR_SLICE => ExprBase::ExprSlice(ExprSlice { syntax }), + EXPR_CALL => ExprBase::ExprCall(ExprCall { syntax }), _ => return None, }; Some(res) @@ -2148,6 +2132,10 @@ impl AstNode for ExprBase { ExprBase::ExprIfThenElse(it) => &it.syntax, ExprBase::ExprFunction(it) => &it.syntax, ExprBase::ExprError(it) => &it.syntax, + ExprBase::ExprField(it) => &it.syntax, + ExprBase::ExprIndex(it) => &it.syntax, + ExprBase::ExprSlice(it) => &it.syntax, + ExprBase::ExprCall(it) => &it.syntax, } } } @@ -2765,11 +2753,6 @@ impl std::fmt::Display for CustomError { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for Suffix { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(self.syntax(), f) - } -} impl std::fmt::Display for Bind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) @@ -2830,7 +2813,7 @@ impl std::fmt::Display for Expr { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for SuffixIndex { +impl std::fmt::Display for ExprField { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } @@ -2840,12 +2823,12 @@ impl std::fmt::Display for Name { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for SuffixIndexExpr { +impl std::fmt::Display for ExprIndex { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for SuffixSlice { +impl std::fmt::Display for ExprSlice { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } @@ -2855,7 +2838,7 @@ impl std::fmt::Display for SliceDesc { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for SuffixApply { +impl std::fmt::Display for ExprCall { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } diff --git a/crates/jrsonnet-rowan-parser/src/generated/syntax_kinds.rs b/crates/jrsonnet-rowan-parser/src/generated/syntax_kinds.rs index 156bc9c8..91670c79 100644 --- a/crates/jrsonnet-rowan-parser/src/generated/syntax_kinds.rs +++ b/crates/jrsonnet-rowan-parser/src/generated/syntax_kinds.rs @@ -182,12 +182,12 @@ pub enum SyntaxKind { __LAST_TOKEN, SOURCE_FILE, EXPR, - SUFFIX_INDEX, + EXPR_FIELD, NAME, - SUFFIX_INDEX_EXPR, - SUFFIX_SLICE, + EXPR_INDEX, + EXPR_SLICE, SLICE_DESC, - SUFFIX_APPLY, + EXPR_CALL, ARGS_DESC, STMT_LOCAL, STMT_ASSERT, @@ -234,7 +234,6 @@ pub enum SyntaxKind { DESTRUCT_OBJECT_FIELD, DESTRUCT_REST, DESTRUCT_ARRAY_ELEMENT, - SUFFIX, BIND, STMT, OBJ_BODY, @@ -273,9 +272,9 @@ impl SyntaxKind { } pub fn is_enum(self) -> bool { match self { - SUFFIX | BIND | STMT | OBJ_BODY | COMP_SPEC | EXPR_BASE | MEMBER_COMP | MEMBER - | FIELD_NAME | DESTRUCT | DESTRUCT_ARRAY_PART | BINARY_OPERATOR | UNARY_OPERATOR - | LITERAL | TEXT | NUMBER | IMPORT_KIND | VISIBILITY | TRIVIA | CUSTOM_ERROR => true, + BIND | STMT | OBJ_BODY | COMP_SPEC | EXPR_BASE | MEMBER_COMP | MEMBER | FIELD_NAME + | DESTRUCT | DESTRUCT_ARRAY_PART | BINARY_OPERATOR | UNARY_OPERATOR | LITERAL + | TEXT | NUMBER | IMPORT_KIND | VISIBILITY | TRIVIA | CUSTOM_ERROR => true, _ => false, } } diff --git a/crates/jrsonnet-rowan-parser/src/lib.rs b/crates/jrsonnet-rowan-parser/src/lib.rs index b1bceef6..ffc5fa1e 100644 --- a/crates/jrsonnet-rowan-parser/src/lib.rs +++ b/crates/jrsonnet-rowan-parser/src/lib.rs @@ -3,7 +3,7 @@ use event::Sink; use generated::nodes::{SourceFile, Trivia}; use lex::lex; -use parser::{LocatedSyntaxError, Parser}; +use parser::Parser; pub use rowan; mod ast; @@ -21,6 +21,8 @@ mod token_set; pub use ast::{AstChildren, AstNode, AstToken}; pub use generated::{nodes, syntax_kinds::SyntaxKind}; pub use language::*; +pub use parser::{ExpectedSyntax, LocatedSyntaxError, SyntaxError}; +pub use rowan::GreenNode; pub use token_set::SyntaxKindSet; use self::{ @@ -28,7 +30,10 @@ use self::{ generated::nodes::{Expr, ExprBinary, ExprObjExtend}, }; -pub fn parse(input: &str) -> (SourceFile, Vec) { +/// Parse input and return a GreenNode (thread-safe) plus errors. +/// +/// Use `source_file_from_green` to create a `SourceFile` from the green node. +pub fn parse_green(input: &str) -> (GreenNode, Vec) { let lexemes = lex(input); let kinds = lexemes .iter() @@ -40,12 +45,19 @@ pub fn parse(input: &str) -> (SourceFile, Vec) { let sink = Sink::new(events, &lexemes); let parse = sink.finish(); - ( - SourceFile { - syntax: parse.syntax(), - }, - parse.errors, - ) + (parse.green_node, parse.errors) +} + +/// Create a SourceFile from a GreenNode. +pub fn source_file_from_green(green: &GreenNode) -> SourceFile { + SourceFile { + syntax: SyntaxNode::new_root(green.clone()), + } +} + +pub fn parse(input: &str) -> (SourceFile, Vec) { + let (green, errors) = parse_green(input); + (source_file_from_green(&green), errors) } impl ExprBinary { pub fn lhs_work(&self) -> Option { @@ -69,3 +81,147 @@ impl ExprObjExtend { children.next() } } + +#[cfg(test)] +mod ast_structure_tests { + use indoc::indoc; + + use super::*; + + fn check(input: &str, expected: &str) { + let (file, errors) = parse(input); + assert!( + errors.is_empty(), + "parse errors for '{}': {:?}", + input, + errors + ); + let actual = format!("{:#?}", file.syntax()); + assert_eq!( + actual.trim(), + expected.trim(), + "AST mismatch for '{}'", + input + ); + } + + #[test] + fn field_access() { + check( + "std.length", + indoc! {r#" + SOURCE_FILE@0..10 + EXPR@0..10 + EXPR_FIELD@0..10 + EXPR@0..3 + EXPR_VAR@0..3 + NAME@0..3 + IDENT@0..3 "std" + DOT@3..4 "." + NAME@4..10 + IDENT@4..10 "length" + "#}, + ); + } + + #[test] + fn method_call() { + check( + "std.length(x)", + indoc! {r#" + SOURCE_FILE@0..13 + EXPR@0..13 + EXPR_CALL@0..13 + EXPR@0..10 + EXPR_FIELD@0..10 + EXPR@0..3 + EXPR_VAR@0..3 + NAME@0..3 + IDENT@0..3 "std" + DOT@3..4 "." + NAME@4..10 + IDENT@4..10 "length" + ARGS_DESC@10..13 + L_PAREN@10..11 "(" + ARG@11..12 + EXPR@11..12 + EXPR_VAR@11..12 + NAME@11..12 + IDENT@11..12 "x" + R_PAREN@12..13 ")" + "#}, + ); + } + + #[test] + fn chained_field_access() { + check( + "a.b.c", + indoc! {r#" + SOURCE_FILE@0..5 + EXPR@0..5 + EXPR_FIELD@0..5 + EXPR@0..3 + EXPR_FIELD@0..3 + EXPR@0..1 + EXPR_VAR@0..1 + NAME@0..1 + IDENT@0..1 "a" + DOT@1..2 "." + NAME@2..3 + IDENT@2..3 "b" + DOT@3..4 "." + NAME@4..5 + IDENT@4..5 "c" + "#}, + ); + } + + #[test] + fn index_access() { + check( + "arr[0]", + indoc! {r#" + SOURCE_FILE@0..6 + EXPR@0..6 + EXPR_INDEX@0..6 + EXPR@0..3 + EXPR_VAR@0..3 + NAME@0..3 + IDENT@0..3 "arr" + L_BRACK@3..4 "[" + EXPR@4..5 + EXPR_NUMBER@4..5 + FLOAT@4..5 "0" + R_BRACK@5..6 "]" + "#}, + ); + } + + #[test] + fn slice_access() { + check( + "arr[1:3]", + indoc! {r#" + SOURCE_FILE@0..8 + EXPR@0..8 + EXPR_SLICE@0..8 + EXPR@0..3 + EXPR_VAR@0..3 + NAME@0..3 + IDENT@0..3 "arr" + SLICE_DESC@3..8 + L_BRACK@3..4 "[" + EXPR@4..5 + EXPR_NUMBER@4..5 + FLOAT@4..5 "1" + COLON@5..6 ":" + SLICE_DESC_END@6..7 + EXPR@6..7 + EXPR_NUMBER@6..7 + FLOAT@6..7 "3" + R_BRACK@7..8 "]" + "#}, + ); + } +} diff --git a/crates/jrsonnet-rowan-parser/src/parser.rs b/crates/jrsonnet-rowan-parser/src/parser.rs index 91d3b224..4cde73ef 100644 --- a/crates/jrsonnet-rowan-parser/src/parser.rs +++ b/crates/jrsonnet-rowan-parser/src/parser.rs @@ -1,6 +1,7 @@ use std::{cell::Cell, fmt, rc::Rc}; use rowan::{GreenNode, TextRange}; +use thiserror::Error; use crate::{ event::Event, @@ -12,6 +13,10 @@ use crate::{ SyntaxNode, T, TS, }; +/// Token set for field visibility (:, ::, :::). +/// Note: We can't use VISIBILITY because Rust's macro tokenizer splits ":::" into "::" + ":" +const VISIBILITY: SyntaxKindSet = TS![: ::].with(COLONCOLONCOLON); + pub struct Parse { pub green_node: GreenNode, pub errors: Vec, @@ -29,24 +34,23 @@ pub struct Parser { steps: Cell, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq, Error)] pub enum SyntaxError { + #[error("expected {expected}, found {found:?}")] Unexpected { expected: ExpectedSyntax, found: SyntaxKind, }, - Missing { - expected: ExpectedSyntax, - }, - Custom { - error: String, - }, - Hint { - error: String, - }, + #[error("expected {expected}")] + Missing { expected: ExpectedSyntax }, + #[error("{error}")] + Custom { error: String }, + #[error("{error}")] + Hint { error: String }, } -#[derive(Debug)] +#[derive(Debug, PartialEq, Eq, Error)] +#[error("{error}")] pub struct LocatedSyntaxError { pub error: SyntaxError, pub range: TextRange, @@ -255,7 +259,7 @@ impl Drop for ExpectedSyntaxGuard { } } -#[derive(Clone, Debug, Copy)] +#[derive(Clone, Debug, Copy, PartialEq, Eq)] pub enum ExpectedSyntax { Named(&'static str), Unnamed(SyntaxKindSet), @@ -629,89 +633,117 @@ fn array(p: &mut Parser) -> CompletedMarker { m.complete(p, EXPR_ARRAY) } } -/// Returns true if it was slice, false if just index + +/// Parses safe navigation: `expr?.field` or `expr?.[index]` +/// Expects parser to be positioned at `?` with `.` following #[must_use] -fn slice_desc_or_index(p: &mut Parser) -> bool { - let m = p.start(); - p.bump(); - // TODO: do not treat :, ::, ::: as full tokens? - // Start +fn suffix_safe_nav(p: &mut Parser, m: Marker) -> CompletedMarker { + p.bump(); // ? + p.bump(); // . + if p.at(IDENT) { + name(p); + m.complete(p, EXPR_FIELD) + } else if p.at(T!['[']) { + p.bump(); // [ + expr(p); + p.expect(T![']']); + m.complete(p, EXPR_INDEX) + } else { + m.complete_missing(p, ExpectedSyntax::Named("field or index")) + } +} + +/// Parses field access: `expr.field` +/// Expects parser to be positioned at `.` +#[must_use] +fn suffix_field(p: &mut Parser, m: Marker) -> CompletedMarker { + p.bump(); // . + name(p); + m.complete(p, EXPR_FIELD) +} + +/// Parses index or slice: `expr[index]` or `expr[start:end:step]` +/// Expects parser to be positioned at `[` +#[must_use] +fn suffix_index_or_slice(p: &mut Parser, m: Marker) -> CompletedMarker { + let slice_m = p.start(); + p.bump(); // [ + + // Parse start expression if not immediately a colon if !p.at(T![:]) && !p.at(T![::]) { expr(p); } + if p.at(T![:]) { - p.bump(); - // End - if !p.at(T![']']) { - expr(p).wrap(p, SLICE_DESC_END); - } - if p.at(T![:]) { - p.bump(); - // Step - if !p.at(T![']']) { - expr(p).wrap(p, SLICE_DESC_STEP); - } - } + slice_single_colon(p); + slice_m.complete(p, SLICE_DESC); + m.complete(p, EXPR_SLICE) } else if p.at(T![::]) { - p.bump(); - // End - if !p.at(T![']']) { - expr(p).wrap(p, SLICE_DESC_END); - } + slice_double_colon(p); + slice_m.complete(p, SLICE_DESC); + m.complete(p, EXPR_SLICE) } else { - // It was not a slice p.expect(T![']']); - m.forget(p); - return false; + slice_m.forget(p); + m.complete(p, EXPR_INDEX) + } +} + +/// Continues parsing slice after single `:` - handles `[start:end]` and `[start:end:step]` +fn slice_single_colon(p: &mut Parser) { + p.bump(); // first : + if !p.at(T![:]) && !p.at(T![']']) { + expr(p).wrap(p, SLICE_DESC_END); + } + if p.at(T![:]) { + p.bump(); // second : + if !p.at(T![']']) { + expr(p).wrap(p, SLICE_DESC_STEP); + } } p.expect(T![']']); - m.complete(p, SLICE_DESC); - true } -fn suffix(p: &mut Parser) { +/// Continues parsing slice after `::` - handles `[start::step]` +fn slice_double_colon(p: &mut Parser) { + p.bump(); // :: + if !p.at(T![']']) { + expr(p).wrap(p, SLICE_DESC_END); + } + p.expect(T![']']); +} + +/// Parses function call: `expr(args)` +/// Expects parser to be positioned at `(` +#[must_use] +fn suffix_call(p: &mut Parser, m: Marker) -> CompletedMarker { + args_desc(p); + m.complete(p, EXPR_CALL) +} + +fn lhs(p: &mut Parser) -> Result { + let mut result = lhs_basic(p)?; + + // Each suffix wraps the previous result progressively loop { - let start = p.start(); - let _marker: CompletedMarker = if p.at(T![?]) { - p.bump(); - p.expect(T![.]); - if p.at(IDENT) { - name(p); - start.complete(p, SUFFIX_INDEX) - } else if p.at(T!['[']) { - p.bump(); - expr(p); - p.expect(T![']']); - start.complete(p, SUFFIX_INDEX_EXPR) - } else { - start.complete_missing(p, ExpectedSyntax::Named("index")) - } + result = if p.at(T![?]) && p.nth_at(1, T![.]) { + let m = result.wrap(p, EXPR).precede(p); + suffix_safe_nav(p, m) } else if p.at(T![.]) { - p.bump(); - name(p); - start.complete(p, SUFFIX_INDEX) + let m = result.wrap(p, EXPR).precede(p); + suffix_field(p, m) } else if p.at(T!['[']) { - if slice_desc_or_index(p) { - start.complete(p, SUFFIX_SLICE) - } else { - start.complete(p, SUFFIX_INDEX_EXPR) - } + let m = result.wrap(p, EXPR).precede(p); + suffix_index_or_slice(p, m) } else if p.at(T!['(']) { - args_desc(p); - start.complete(p, SUFFIX_APPLY) + let m = result.wrap(p, EXPR).precede(p); + suffix_call(p, m) } else { - start.forget(p); break; }; } -} - -fn lhs(p: &mut Parser) -> Result { - let lhs = lhs_basic(p)?; - - suffix(p); - Ok(lhs) + Ok(result) } fn name(p: &mut Parser) { let m = p.start(); @@ -900,7 +932,9 @@ fn lhs_basic(p: &mut Parser) -> Result { let m = p.start(); p.bump(); - let _ = expr_binding_power(p, right_binding_power); + if let Ok(operand) = expr_binding_power(p, right_binding_power) { + operand.precede(p).complete(p, EXPR); + } m.complete(p, EXPR_UNARY) } else if p.at(T!['(']) { let m = p.start(); diff --git a/crates/jrsonnet-rowan-parser/src/tests.rs b/crates/jrsonnet-rowan-parser/src/tests.rs index 34181c1b..77431b36 100644 --- a/crates/jrsonnet-rowan-parser/src/tests.rs +++ b/crates/jrsonnet-rowan-parser/src/tests.rs @@ -1,4 +1,4 @@ -// `never` +// `never` - tests disabled due to missing deps (miette, jrsonnet-stdlib) #![cfg(any())] use miette::{ diff --git a/crates/jrsonnet-rowan-parser/src/token_set.rs b/crates/jrsonnet-rowan-parser/src/token_set.rs index 24055fc4..9f5cb437 100644 --- a/crates/jrsonnet-rowan-parser/src/token_set.rs +++ b/crates/jrsonnet-rowan-parser/src/token_set.rs @@ -2,7 +2,7 @@ use std::fmt; use crate::SyntaxKind; -#[derive(Clone, Copy, Default)] +#[derive(Clone, Copy, Default, PartialEq, Eq)] pub struct SyntaxKindSet(u128); impl SyntaxKindSet { diff --git a/crates/jrsonnet-std-sig/Cargo.toml b/crates/jrsonnet-std-sig/Cargo.toml new file mode 100644 index 00000000..d148c5d1 --- /dev/null +++ b/crates/jrsonnet-std-sig/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "jrsonnet-std-sig" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true +description = "Jsonnet stdlib function specifications for LSP" + +[dependencies] + +[lints] +workspace = true diff --git a/crates/jrsonnet-std-sig/src/lib.rs b/crates/jrsonnet-std-sig/src/lib.rs new file mode 100644 index 00000000..829532de --- /dev/null +++ b/crates/jrsonnet-std-sig/src/lib.rs @@ -0,0 +1,1705 @@ +//! Jsonnet standard library function specifications. +//! +//! This crate provides a single source of truth for all stdlib function metadata: +//! - Parameter names and types +//! - Return types +//! - Documentation +//! - Flow typing information (for type narrowing in conditionals) +//! +//! This data is used by: +//! - `jrsonnet-lsp-stdlib` for type signatures and documentation +//! - `jrsonnet-lsp-inference` for flow-sensitive type narrowing + +/// Parameter type specification. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ParamType { + /// Any type + Any, + /// Null type + Null, + /// Boolean type + Bool, + /// Number type + Number, + /// String type + String, + /// Character type (single-char string) + Char, + /// Array of any elements + Array, + /// Array of numbers + ArrayNumber, + /// Array of strings + ArrayString, + /// Array of characters + ArrayChar, + /// Array of booleans + ArrayBool, + /// Object type + Object, + /// Function type + Function, + /// String or array + StringOrArray, + /// String, array, object, or function (for std.length) + Lengthable, +} + +/// Return type specification. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ReturnSpec { + /// Fixed return type + Fixed(ParamType), + /// Same type as argument at given index + SameAsArg(usize), + /// Non-negative number (length result) + NonNegative, + /// Array with element type from function return at param index + ArrayOfFuncReturn(usize), + /// Array with same element type as input array at param index + ArrayWithSameElements(usize), + /// Set (sorted unique) with same element type as input at param index + SetWithSameElements(usize), + /// Array of values from object at param index + ObjectValuesType(usize), + /// Flattened result of map function at param index + FlatMapResult(usize), + /// Any type (unknown) + Any, +} + +impl Default for ReturnSpec { + fn default() -> Self { + Self::Any + } +} + +/// Parameter definition. +#[derive(Debug, Clone, Copy)] +pub struct Param { + /// Parameter name + pub name: &'static str, + /// Parameter type + pub ty: ParamType, + /// Whether this parameter has a default value + pub has_default: bool, +} + +impl Param { + /// Create a required parameter. + pub const fn req(name: &'static str, ty: ParamType) -> Self { + Self { + name, + ty, + has_default: false, + } + } + + /// Create an optional parameter. + pub const fn opt(name: &'static str, ty: ParamType) -> Self { + Self { + name, + ty, + has_default: true, + } + } +} + +/// Totality of a flow typing predicate. +/// +/// Determines whether the negation of the predicate provides useful type information. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Totality { + /// Total predicates can be negated. + /// Example: if `std.isNumber(x)` is false, then x is definitely NOT a number. + Total, + /// Partial predicates cannot be negated. + /// Example: if `std.isInteger(x)` is false, x might still be a decimal number. + Partial, +} + +/// Type that a predicate narrows to. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum NarrowsTo { + Null, + Bool, + Number, + String, + Array, + Object, + Function, +} + +/// Flow typing information for type-guard functions. +/// +/// When a function like `std.isNumber(x)` returns true, we can narrow +/// the type of `x` to `Number` in the then-branch. +#[derive(Debug, Clone, Copy)] +pub struct FlowTyping { + /// Index of the parameter being narrowed (usually 0) + pub param_idx: usize, + /// Type that the parameter is narrowed to when predicate is true + pub narrows_to: NarrowsTo, + /// Whether the predicate is total (can negate) or partial (cannot) + pub totality: Totality, +} + +/// Complete specification for a stdlib function. +#[derive(Debug, Clone)] +pub struct StdFn { + /// Function name (without "std." prefix) + pub name: &'static str, + /// Parameter definitions + pub params: &'static [Param], + /// Return type specification + pub return_spec: ReturnSpec, + /// Whether function is variadic + pub variadic: bool, + /// Short documentation + pub doc: &'static str, + /// Example usage (optional) + pub example: Option<&'static str>, + /// Flow typing info (for type guards like std.isNumber) + pub flow_typing: Option, +} + +// Helper constants for common parameter types +const ANY: ParamType = ParamType::Any; +const NUM: ParamType = ParamType::Number; +const STR: ParamType = ParamType::String; +const BOOL: ParamType = ParamType::Bool; +const ARR: ParamType = ParamType::Array; +const ARR_NUM: ParamType = ParamType::ArrayNumber; +const ARR_STR: ParamType = ParamType::ArrayString; +const ARR_CHAR: ParamType = ParamType::ArrayChar; +const OBJ: ParamType = ParamType::Object; +const FUNC: ParamType = ParamType::Function; +const CHAR: ParamType = ParamType::Char; +const LEN: ParamType = ParamType::Lengthable; + +// Helper for creating flow typing info +const fn flow(narrows_to: NarrowsTo, totality: Totality) -> FlowTyping { + FlowTyping { + param_idx: 0, + narrows_to, + totality, + } +} + +/// All stdlib function specifications. +pub static FNS: &[StdFn] = &[ + // ========================================================================== + // Type checking functions + // ========================================================================== + StdFn { + name: "type", + params: &[Param::req("x", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Returns the type of `x` as a string.", + example: Some(r#"std.type([1,2]) // "array""#), + flow_typing: None, + }, + StdFn { + name: "isString", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is a string.", + example: None, + flow_typing: Some(flow(NarrowsTo::String, Totality::Total)), + }, + StdFn { + name: "isNumber", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is a number.", + example: None, + flow_typing: Some(flow(NarrowsTo::Number, Totality::Total)), + }, + StdFn { + name: "isBoolean", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is a boolean.", + example: None, + flow_typing: Some(flow(NarrowsTo::Bool, Totality::Total)), + }, + StdFn { + name: "isObject", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is an object.", + example: None, + flow_typing: Some(flow(NarrowsTo::Object, Totality::Total)), + }, + StdFn { + name: "isArray", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is an array.", + example: None, + flow_typing: Some(flow(NarrowsTo::Array, Totality::Total)), + }, + StdFn { + name: "isFunction", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is a function.", + example: None, + flow_typing: Some(flow(NarrowsTo::Function, Totality::Total)), + }, + StdFn { + name: "isEmpty", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if string, array, or object is empty.", + example: None, + flow_typing: None, + }, + StdFn { + name: "isInteger", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is an integer.", + example: None, + flow_typing: Some(flow(NarrowsTo::Number, Totality::Partial)), + }, + StdFn { + name: "isDecimal", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is a decimal (has fractional part).", + example: None, + flow_typing: Some(flow(NarrowsTo::Number, Totality::Partial)), + }, + StdFn { + name: "isEven", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is an even integer.", + example: None, + flow_typing: Some(flow(NarrowsTo::Number, Totality::Partial)), + }, + StdFn { + name: "isOdd", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `v` is an odd integer.", + example: None, + flow_typing: Some(flow(NarrowsTo::Number, Totality::Partial)), + }, + // ========================================================================== + // Length and basic array functions + // ========================================================================== + StdFn { + name: "length", + params: &[Param::req("x", LEN)], + return_spec: ReturnSpec::NonNegative, + variadic: false, + doc: "Returns the length of an array, string, object, or function parameters.", + example: Some("std.length([1,2,3]) // 3"), + flow_typing: None, + }, + StdFn { + name: "makeArray", + params: &[Param::req("sz", NUM), Param::req("func", FUNC)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Creates an array of size `sz` by calling `func(i)` for each index.", + example: Some("std.makeArray(3, function(i) i * 2) // [0, 2, 4]"), + flow_typing: None, + }, + // ========================================================================== + // Higher-order array functions + // ========================================================================== + StdFn { + name: "map", + params: &[Param::req("func", FUNC), Param::req("arr", ARR)], + return_spec: ReturnSpec::ArrayOfFuncReturn(0), + variadic: false, + doc: "Applies `func` to each element of `arr`.", + example: Some("std.map(function(x) x * 2, [1,2,3]) // [2, 4, 6]"), + flow_typing: None, + }, + StdFn { + name: "mapWithIndex", + params: &[Param::req("func", FUNC), Param::req("arr", ARR)], + return_spec: ReturnSpec::ArrayOfFuncReturn(0), + variadic: false, + doc: "Like `map`, but `func` takes `(index, element)`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "mapWithKey", + params: &[Param::req("func", FUNC), Param::req("obj", OBJ)], + return_spec: ReturnSpec::Fixed(OBJ), + variadic: false, + doc: "Applies `func(key, value)` to each field of `obj`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "filter", + params: &[Param::req("func", FUNC), Param::req("arr", ARR)], + return_spec: ReturnSpec::ArrayWithSameElements(1), + variadic: false, + doc: "Returns elements of `arr` where `func(x)` is true.", + example: Some("std.filter(function(x) x > 1, [1,2,3]) // [2, 3]"), + flow_typing: None, + }, + StdFn { + name: "flatMap", + params: &[Param::req("func", FUNC), Param::req("arr", ARR)], + return_spec: ReturnSpec::FlatMapResult(0), + variadic: false, + doc: "Maps `func` over `arr` and flattens the result.", + example: None, + flow_typing: None, + }, + StdFn { + name: "filterMap", + params: &[ + Param::req("filter_func", FUNC), + Param::req("map_func", FUNC), + Param::req("arr", ARR), + ], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Filters then maps array elements.", + example: None, + flow_typing: None, + }, + StdFn { + name: "foldl", + params: &[ + Param::req("func", FUNC), + Param::req("arr", ARR), + Param::req("init", ANY), + ], + return_spec: ReturnSpec::SameAsArg(2), + variadic: false, + doc: "Left fold: `func(func(func(init, arr[0]), arr[1]), ...)`.", + example: Some("std.foldl(function(a, b) a + b, [1,2,3], 0) // 6"), + flow_typing: None, + }, + StdFn { + name: "foldr", + params: &[ + Param::req("func", FUNC), + Param::req("arr", ARR), + Param::req("init", ANY), + ], + return_spec: ReturnSpec::SameAsArg(2), + variadic: false, + doc: "Right fold: `func(arr[0], func(arr[1], ... init))`.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Array functions + // ========================================================================== + StdFn { + name: "range", + params: &[Param::req("from", NUM), Param::req("to", NUM)], + return_spec: ReturnSpec::Fixed(ARR_NUM), + variadic: false, + doc: "Returns an array `[from, from+1, ..., to]`.", + example: Some("std.range(1, 5) // [1, 2, 3, 4, 5]"), + flow_typing: None, + }, + StdFn { + name: "slice", + params: &[ + Param::req("arr", ANY), + Param::req("index", NUM), + Param::req("end", NUM), + Param::opt("step", NUM), + ], + return_spec: ReturnSpec::ArrayWithSameElements(0), + variadic: false, + doc: "Slices array from `index` to `end` with `step`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "join", + params: &[Param::req("sep", STR), Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Joins array elements with separator.", + example: Some(r#"std.join(",", ["a","b","c"]) // "a,b,c""#), + flow_typing: None, + }, + StdFn { + name: "deepJoin", + params: &[Param::req("arr", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Recursively joins nested arrays into a string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "lines", + params: &[Param::req("arr", ARR_STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Joins array with newlines and adds trailing newline.", + example: None, + flow_typing: None, + }, + StdFn { + name: "reverse", + params: &[Param::req("arr", ANY)], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + doc: "Reverses an array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "member", + params: &[Param::req("arr", ARR), Param::req("x", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `x` is in `arr`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "find", + params: &[Param::req("value", ANY), Param::req("arr", ANY)], + return_spec: ReturnSpec::Fixed(ARR_NUM), + variadic: false, + doc: "Returns indices where `arr[i] == value`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "contains", + params: &[Param::req("arr", ANY), Param::req("x", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if array contains `x`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "count", + params: &[Param::req("arr", ANY), Param::req("x", ANY)], + return_spec: ReturnSpec::NonNegative, + variadic: false, + doc: "Counts occurrences of `x` in `arr`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "all", + params: &[Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if all elements are truthy.", + example: None, + flow_typing: None, + }, + StdFn { + name: "any", + params: &[Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if any element is truthy.", + example: None, + flow_typing: None, + }, + StdFn { + name: "avg", + params: &[Param::req("arr", ARR_NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns average of numeric array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sort", + params: &[Param::req("arr", ARR), Param::opt("keyF", FUNC)], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + doc: "Sorts array, optionally by key function.", + example: Some("std.sort([3,1,2]) // [1, 2, 3]"), + flow_typing: None, + }, + StdFn { + name: "uniq", + params: &[Param::req("arr", ARR), Param::opt("keyF", FUNC)], + return_spec: ReturnSpec::SetWithSameElements(0), + variadic: false, + doc: "Removes consecutive duplicates.", + example: None, + flow_typing: None, + }, + StdFn { + name: "set", + params: &[Param::req("arr", ARR), Param::opt("keyF", FUNC)], + return_spec: ReturnSpec::SetWithSameElements(0), + variadic: false, + doc: "Returns sorted unique elements.", + example: None, + flow_typing: None, + }, + StdFn { + name: "setUnion", + params: &[ + Param::req("a", ARR), + Param::req("b", ARR), + Param::opt("keyF", FUNC), + ], + return_spec: ReturnSpec::SetWithSameElements(0), + variadic: false, + doc: "Union of two sets.", + example: None, + flow_typing: None, + }, + StdFn { + name: "setInter", + params: &[ + Param::req("a", ARR), + Param::req("b", ARR), + Param::opt("keyF", FUNC), + ], + return_spec: ReturnSpec::SetWithSameElements(0), + variadic: false, + doc: "Intersection of two sets.", + example: None, + flow_typing: None, + }, + StdFn { + name: "setDiff", + params: &[ + Param::req("a", ARR), + Param::req("b", ARR), + Param::opt("keyF", FUNC), + ], + return_spec: ReturnSpec::SetWithSameElements(0), + variadic: false, + doc: "Difference of two sets (a - b).", + example: None, + flow_typing: None, + }, + StdFn { + name: "setMember", + params: &[ + Param::req("x", ANY), + Param::req("arr", ARR), + Param::opt("keyF", FUNC), + ], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `x` is in set.", + example: None, + flow_typing: None, + }, + StdFn { + name: "flatten", + params: &[Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Flattens nested arrays by one level.", + example: None, + flow_typing: None, + }, + StdFn { + name: "flattenArrays", + params: &[Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Alias for `flatten`.", + example: None, + flow_typing: None, + }, + StdFn { + name: "flattenDeepArray", + params: &[Param::req("arr", ARR)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Recursively flattens all nested arrays.", + example: None, + flow_typing: None, + }, + StdFn { + name: "remove", + params: &[Param::req("arr", ANY), Param::req("elem", ANY)], + return_spec: ReturnSpec::ArrayWithSameElements(0), + variadic: false, + doc: "Removes first occurrence of element from array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "removeAt", + params: &[Param::req("arr", ANY), Param::req("idx", NUM)], + return_spec: ReturnSpec::ArrayWithSameElements(0), + variadic: false, + doc: "Removes element at index from array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "repeat", + params: &[Param::req("arr", ANY), Param::req("n", NUM)], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + doc: "Repeats array `n` times.", + example: None, + flow_typing: None, + }, + StdFn { + name: "prune", + params: &[Param::req("x", ANY)], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + doc: "Recursively removes nulls, empty arrays, and empty objects.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // String functions + // ========================================================================== + StdFn { + name: "toString", + params: &[Param::req("x", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts value to string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "format", + params: &[Param::req("fmt", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: true, + doc: "Printf-style formatting.", + example: Some(r#"std.format("Hello %s", ["world"]) // "Hello world""#), + flow_typing: None, + }, + StdFn { + name: "escapeStringJson", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes string for JSON.", + example: None, + flow_typing: None, + }, + StdFn { + name: "escapeStringBash", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes string for Bash.", + example: None, + flow_typing: None, + }, + StdFn { + name: "escapeStringDollars", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes `$` characters.", + example: None, + flow_typing: None, + }, + StdFn { + name: "escapeStringPython", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes string for Python.", + example: None, + flow_typing: None, + }, + StdFn { + name: "escapeStringXml", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes string for XML.", + example: None, + flow_typing: None, + }, + StdFn { + name: "escapeStringXML", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes string for XML (alias).", + example: None, + flow_typing: None, + }, + StdFn { + name: "substr", + params: &[ + Param::req("s", STR), + Param::req("from", NUM), + Param::req("len", NUM), + ], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Returns substring.", + example: None, + flow_typing: None, + }, + StdFn { + name: "split", + params: &[Param::req("str", STR), Param::req("c", STR)], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Splits string by delimiter.", + example: Some(r#"std.split("a,b,c", ",") // ["a", "b", "c"]"#), + flow_typing: None, + }, + StdFn { + name: "splitLimit", + params: &[ + Param::req("str", STR), + Param::req("c", STR), + Param::req("maxsplits", NUM), + ], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Splits with maximum splits.", + example: None, + flow_typing: None, + }, + StdFn { + name: "splitLimitR", + params: &[ + Param::req("str", STR), + Param::req("c", STR), + Param::req("maxsplits", NUM), + ], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Splits from right with maximum.", + example: None, + flow_typing: None, + }, + StdFn { + name: "strReplace", + params: &[ + Param::req("str", STR), + Param::req("from", STR), + Param::req("to", STR), + ], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Replaces all occurrences.", + example: Some(r#"std.strReplace("foo", "o", "0") // "f00""#), + flow_typing: None, + }, + StdFn { + name: "stripChars", + params: &[Param::req("str", STR), Param::req("chars", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Strips characters from both ends.", + example: None, + flow_typing: None, + }, + StdFn { + name: "lstripChars", + params: &[Param::req("str", STR), Param::req("chars", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Strips characters from left.", + example: None, + flow_typing: None, + }, + StdFn { + name: "rstripChars", + params: &[Param::req("str", STR), Param::req("chars", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Strips characters from right.", + example: None, + flow_typing: None, + }, + StdFn { + name: "asciiLower", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to lowercase ASCII.", + example: None, + flow_typing: None, + }, + StdFn { + name: "asciiUpper", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to uppercase ASCII.", + example: None, + flow_typing: None, + }, + StdFn { + name: "char", + params: &[Param::req("n", NUM)], + return_spec: ReturnSpec::Fixed(CHAR), + variadic: false, + doc: "Returns character for codepoint.", + example: Some(r#"std.char(65) // "A""#), + flow_typing: None, + }, + StdFn { + name: "codepoint", + params: &[Param::req("c", CHAR)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns codepoint for character.", + example: Some(r#"std.codepoint("A") // 65"#), + flow_typing: None, + }, + StdFn { + name: "startsWith", + params: &[Param::req("str", STR), Param::req("prefix", STR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if string starts with prefix.", + example: None, + flow_typing: None, + }, + StdFn { + name: "endsWith", + params: &[Param::req("str", STR), Param::req("suffix", STR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if string ends with suffix.", + example: None, + flow_typing: None, + }, + StdFn { + name: "findSubstr", + params: &[Param::req("pat", STR), Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(ARR_NUM), + variadic: false, + doc: "Returns indices where pattern matches.", + example: None, + flow_typing: None, + }, + StdFn { + name: "stringChars", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(ARR_CHAR), + variadic: false, + doc: "Returns array of single-character strings.", + example: None, + flow_typing: None, + }, + StdFn { + name: "equalsIgnoreCase", + params: &[Param::req("str1", STR), Param::req("str2", STR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Case-insensitive string equality.", + example: None, + flow_typing: None, + }, + StdFn { + name: "parseInt", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Parses integer from string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "parseOctal", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Parses octal integer.", + example: None, + flow_typing: None, + }, + StdFn { + name: "parseHex", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Parses hexadecimal integer.", + example: None, + flow_typing: None, + }, + StdFn { + name: "parseJson", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Parses JSON string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "parseYaml", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Parses YAML string.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Object functions + // ========================================================================== + StdFn { + name: "objectFields", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Returns array of field names.", + example: Some(r#"std.objectFields({a: 1, b: 2}) // ["a", "b"]"#), + flow_typing: None, + }, + StdFn { + name: "objectFieldsAll", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Returns all field names including hidden.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectHas", + params: &[Param::req("obj", OBJ), Param::req("f", STR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if object has field.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectHasAll", + params: &[Param::req("obj", OBJ), Param::req("f", STR)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true including hidden fields.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectValues", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::ObjectValuesType(0), + variadic: false, + doc: "Returns array of field values.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectValuesAll", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::ObjectValuesType(0), + variadic: false, + doc: "Returns all values including hidden.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectKeysValues", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Returns array of `{key, value}` objects.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectKeysValuesAll", + params: &[Param::req("obj", OBJ)], + return_spec: ReturnSpec::Fixed(ARR), + variadic: false, + doc: "Includes hidden fields.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectFieldsEx", + params: &[Param::req("obj", OBJ), Param::req("hidden", BOOL)], + return_spec: ReturnSpec::Fixed(ARR_STR), + variadic: false, + doc: "Returns field names with hidden control.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectHasEx", + params: &[ + Param::req("obj", OBJ), + Param::req("f", STR), + Param::req("hidden", BOOL), + ], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Checks for field with hidden control.", + example: None, + flow_typing: None, + }, + StdFn { + name: "objectRemoveKey", + params: &[Param::req("obj", OBJ), Param::req("key", STR)], + return_spec: ReturnSpec::Fixed(OBJ), + variadic: false, + doc: "Returns object with key removed.", + example: None, + flow_typing: None, + }, + StdFn { + name: "get", + params: &[ + Param::req("obj", OBJ), + Param::req("f", STR), + Param::opt("default", ANY), + Param::opt("inc_hidden", BOOL), + ], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Gets field with default.", + example: None, + flow_typing: None, + }, + StdFn { + name: "equals", + params: &[Param::req("a", ANY), Param::req("b", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Deep equality comparison.", + example: None, + flow_typing: None, + }, + StdFn { + name: "primitiveEquals", + params: &[Param::req("a", ANY), Param::req("b", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Equality for primitives (non-recursive).", + example: None, + flow_typing: None, + }, + StdFn { + name: "mergePatch", + params: &[Param::req("target", ANY), Param::req("patch", ANY)], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + doc: "JSON Merge Patch (RFC 7396).", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Math functions + // ========================================================================== + StdFn { + name: "abs", + params: &[Param::req("n", NUM)], + return_spec: ReturnSpec::NonNegative, + variadic: false, + doc: "Absolute value.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sign", + params: &[Param::req("n", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns -1, 0, or 1.", + example: None, + flow_typing: None, + }, + StdFn { + name: "max", + params: &[Param::req("a", NUM), Param::req("b", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns maximum.", + example: None, + flow_typing: None, + }, + StdFn { + name: "min", + params: &[Param::req("a", NUM), Param::req("b", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns minimum.", + example: None, + flow_typing: None, + }, + StdFn { + name: "clamp", + params: &[ + Param::req("x", NUM), + Param::req("minVal", NUM), + Param::req("maxVal", NUM), + ], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Clamps value to range.", + example: None, + flow_typing: None, + }, + StdFn { + name: "pow", + params: &[Param::req("x", NUM), Param::req("n", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns x to the power n.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sqrt", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Square root.", + example: None, + flow_typing: None, + }, + StdFn { + name: "exp", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "e to the power x.", + example: None, + flow_typing: None, + }, + StdFn { + name: "log", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Natural logarithm.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sin", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Sine (radians).", + example: None, + flow_typing: None, + }, + StdFn { + name: "cos", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Cosine (radians).", + example: None, + flow_typing: None, + }, + StdFn { + name: "tan", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Tangent (radians).", + example: None, + flow_typing: None, + }, + StdFn { + name: "asin", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Arc sine.", + example: None, + flow_typing: None, + }, + StdFn { + name: "acos", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Arc cosine.", + example: None, + flow_typing: None, + }, + StdFn { + name: "atan", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Arc tangent.", + example: None, + flow_typing: None, + }, + StdFn { + name: "floor", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Floor.", + example: None, + flow_typing: None, + }, + StdFn { + name: "ceil", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Ceiling.", + example: None, + flow_typing: None, + }, + StdFn { + name: "round", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Round to nearest integer.", + example: None, + flow_typing: None, + }, + StdFn { + name: "modulo", + params: &[Param::req("x", NUM), Param::req("y", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Modulo operation.", + example: None, + flow_typing: None, + }, + StdFn { + name: "mod", + params: &[Param::req("a", NUM), Param::req("b", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Modulo (same as `a % b`).", + example: None, + flow_typing: None, + }, + StdFn { + name: "mantissa", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns mantissa of floating point number.", + example: None, + flow_typing: None, + }, + StdFn { + name: "exponent", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Returns exponent of floating point number.", + example: None, + flow_typing: None, + }, + StdFn { + name: "bigint", + params: &[Param::req("v", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts value to arbitrary-precision integer.", + example: None, + flow_typing: None, + }, + StdFn { + name: "xor", + params: &[Param::req("a", BOOL), Param::req("b", BOOL)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Logical XOR.", + example: None, + flow_typing: None, + }, + StdFn { + name: "xnor", + params: &[Param::req("a", BOOL), Param::req("b", BOOL)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Logical XNOR.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sum", + params: &[Param::req("arr", ARR_NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Sum of array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "minArray", + params: &[Param::req("arr", ARR), Param::opt("keyF", FUNC)], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Minimum of array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "maxArray", + params: &[Param::req("arr", ARR), Param::opt("keyF", FUNC)], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Maximum of array.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Encoding/hashing + // ========================================================================== + StdFn { + name: "base64", + params: &[Param::req("input", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Base64 encodes string or bytes.", + example: None, + flow_typing: None, + }, + StdFn { + name: "base64Decode", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Base64 decodes to string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "base64DecodeBytes", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(ARR_NUM), + variadic: false, + doc: "Base64 decodes to bytes.", + example: None, + flow_typing: None, + }, + StdFn { + name: "encodeUTF8", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(ARR_NUM), + variadic: false, + doc: "Encodes string to UTF-8 byte array.", + example: None, + flow_typing: None, + }, + StdFn { + name: "decodeUTF8", + params: &[Param::req("arr", ARR_NUM)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Decodes UTF-8 byte array to string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "md5", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "MD5 hash as hex string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sha1", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "SHA-1 hash as hex string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sha256", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "SHA-256 hash as hex string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sha512", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "SHA-512 hash as hex string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "sha3", + params: &[Param::req("s", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "SHA-3 hash as hex string.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Manifest functions + // ========================================================================== + StdFn { + name: "manifestJson", + params: &[Param::req("value", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts value to JSON string.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestJsonEx", + params: &[ + Param::req("value", ANY), + Param::opt("indent", STR), + Param::opt("newline", STR), + Param::opt("key_val_sep", STR), + ], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "JSON with custom formatting.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestJsonMinified", + params: &[Param::req("value", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Minified JSON.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestYamlDoc", + params: &[ + Param::req("value", ANY), + Param::opt("indent_array_in_object", BOOL), + Param::opt("quote_keys", BOOL), + ], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to YAML.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestYamlStream", + params: &[ + Param::req("value", ANY), + Param::opt("indent_array_in_object", BOOL), + Param::opt("c_document_end", BOOL), + Param::opt("quote_keys", BOOL), + ], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "YAML stream.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestPython", + params: &[Param::req("value", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to Python literal.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestPythonVars", + params: &[Param::req("conf", OBJ)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Python variable assignments.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestIni", + params: &[Param::req("ini", OBJ)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to INI format.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestToml", + params: &[Param::req("value", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to TOML.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestTomlEx", + params: &[Param::req("value", ANY), Param::req("indent", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts to TOML with custom indent.", + example: None, + flow_typing: None, + }, + StdFn { + name: "manifestXmlJsonml", + params: &[Param::req("value", ANY)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Converts JSONML to XML.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Regex + // ========================================================================== + StdFn { + name: "regexQuoteMeta", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Escapes regex metacharacters in string.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Path/import + // ========================================================================== + StdFn { + name: "resolvePath", + params: &[Param::req("from", STR), Param::req("to", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Resolves a relative path.", + example: None, + flow_typing: None, + }, + // ========================================================================== + // Miscellaneous + // ========================================================================== + StdFn { + name: "trace", + params: &[Param::req("str", STR), Param::req("rest", ANY)], + return_spec: ReturnSpec::SameAsArg(1), + variadic: false, + doc: "Prints `str` and returns `rest`.", + example: Some("std.trace(\"debug\", value)"), + flow_typing: None, + }, + StdFn { + name: "assertEqual", + params: &[Param::req("a", ANY), Param::req("b", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Asserts `a == b`, returns true.", + example: None, + flow_typing: None, + }, + StdFn { + name: "native", + params: &[Param::req("name", STR)], + return_spec: ReturnSpec::Fixed(FUNC), + variadic: false, + doc: "Calls a native extension function.", + example: None, + flow_typing: None, + }, + StdFn { + name: "extVar", + params: &[Param::req("name", STR)], + return_spec: ReturnSpec::Any, + variadic: false, + doc: "Gets external variable value.", + example: None, + flow_typing: None, + }, + StdFn { + name: "thisFile", + params: &[], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Returns the current file path.", + example: None, + flow_typing: None, + }, +]; + +/// Look up a stdlib function by name. +pub fn get_fn(name: &str) -> Option<&'static StdFn> { + FNS.iter().find(|f| f.name == name) +} + +/// Get flow typing info for a function by name. +pub fn get_flow_typing(name: &str) -> Option<&'static FlowTyping> { + get_fn(name).and_then(|f| f.flow_typing.as_ref()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_fn() { + let f = get_fn("isNumber").unwrap(); + assert_eq!(f.name, "isNumber"); + assert_eq!(f.params.len(), 1); + assert_eq!(f.params[0].name, "v"); + } + + #[test] + fn test_get_flow_typing() { + let ft = get_flow_typing("isNumber").unwrap(); + assert_eq!(ft.narrows_to, NarrowsTo::Number); + assert_eq!(ft.totality, Totality::Total); + } + + #[test] + fn test_partial_predicate() { + let ft = get_flow_typing("isInteger").unwrap(); + assert_eq!(ft.narrows_to, NarrowsTo::Number); + assert_eq!(ft.totality, Totality::Partial); + } + + #[test] + fn test_no_flow_typing() { + assert!(get_flow_typing("length").is_none()); + } + + #[test] + fn test_all_fns_have_valid_params() { + for f in FNS { + // Check param names are non-empty + for p in f.params { + assert!(!p.name.is_empty(), "{} has empty param name", f.name); + } + } + } +} diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md new file mode 100644 index 00000000..2d6bbaf5 --- /dev/null +++ b/docs/lsp/ARCHITECTURE.md @@ -0,0 +1,281 @@ +# jrsonnet-lsp Architecture + +This document describes the current architecture of the Jsonnet language server +implemented by `crates/jrsonnet-lsp` and companion crates. + +## Scope + +The LSP implementation is split across multiple crates. At a high level: + +- `jrsonnet-lsp` owns process lifecycle, LSP transport, request/notification + routing, configuration updates, and async diagnostics orchestration. +- `jrsonnet-lsp-handlers` owns most request handlers (hover, completion, + references, rename, and so on). +- `jrsonnet-lsp-inference` owns `TypeAnalysis`, cross-file type caching, and + dependency-aware analysis via `TypeProvider`. +- `jrsonnet-lsp-import` owns import parsing and dependency graph maintenance. +- `jrsonnet-lsp-check` owns lint and type-check diagnostics. +- `jrsonnet-lsp-document`, `jrsonnet-lsp-scope`, `jrsonnet-lsp-stdlib`, and + `jrsonnet-lsp-types` provide foundational utilities used throughout. + +## Crate Graph + +The rough dependency direction is: + +```text +jrsonnet-lsp-types + ├─ jrsonnet-lsp-stdlib + ├─ jrsonnet-lsp-document + ├─ jrsonnet-lsp-import + ├─ jrsonnet-lsp-scope + └─ jrsonnet-lsp-inference + └─ jrsonnet-lsp-check + └─ jrsonnet-lsp-handlers + └─ jrsonnet-lsp +``` + +`jrsonnet-lsp` also depends on `jrsonnet-evaluator` for evaluation-based runtime +diagnostics and command execution. + +## Runtime Components + +`Server` in `crates/jrsonnet-lsp/src/server.rs` owns shared state: + +- `connection: lsp_server::Connection` +- `documents: SharedDocumentManager` +- `import_graph: Arc>` +- `global_types: Arc` +- `type_cache: SharedTypeCache` +- `config: SharedConfig` +- `evaluator: Option>` +- `diagnostics: AsyncDiagnostics` +- async request response channels +- shutdown flag + +`DocumentManager` keeps open documents in a concurrent map and keeps recently +closed documents in an LRU cache. It can also read unopened files from disk when +needed by cross-file operations. + +## Initialization + +Startup flow: + +1. Accept `initialize`. +2. Parse `initializationOptions` into `ServerConfig`. +3. Configure runtime components (`Evaluator`, `AsyncDiagnostics`). +4. Send `InitializeResult` with server capabilities. +5. Wait for `initialized` notification. +6. Enter the main loop. + +Entry point: `run_stdio()` in `crates/jrsonnet-lsp/src/server.rs`. + +## Main Loop Model + +The main loop multiplexes three channels with `crossbeam_channel::select!`: + +- LSP transport messages. +- Completed async diagnostics results. +- Completed async request responses. + +This keeps the message loop responsive while expensive analysis work runs off +thread. + +## Request Routing Model + +### Synchronous request handlers + +Handled directly on the server thread: + +- `textDocument/documentSymbol` +- `textDocument/documentHighlight` +- `textDocument/codeAction` +- `textDocument/signatureHelp` +- `textDocument/formatting` +- `textDocument/prepareRename` +- `textDocument/semanticTokens/full` +- `shutdown` + +### Asynchronous request handlers + +Dispatched via `spawn_async_response` (Rayon): + +- `textDocument/definition` +- `textDocument/hover` +- `textDocument/inlayHint` +- `textDocument/completion` +- `textDocument/references` +- `workspace/symbol` +- `textDocument/rename` +- `textDocument/codeLens` +- `workspace/executeCommand` + +Async handlers run through `AsyncRequestContext` +(`crates/jrsonnet-lsp/src/server/async_requests.rs`), which centralizes access +to documents, import graph, type cache, config, and dependency-aware analysis. + +## Advertised LSP Capabilities + +`server_capabilities()` currently advertises: + +- incremental text sync (`TextDocumentSyncKind::INCREMENTAL`) +- definition, hover, document symbols, document highlights +- completion (trigger `.`) +- signature help (triggers `(` and `,`) +- formatting +- references +- workspace symbol search +- rename with `prepareRename` +- semantic tokens (full document) +- code actions (quick-fix kind) +- code lens (resolve disabled) +- execute command (four command IDs) + +For the canonical list, see `crates/jrsonnet-lsp/src/server.rs`. + +Not currently advertised: + +- declaration/type-definition/implementation providers +- semantic tokens range requests +- code-lens resolve requests + +## Notification Handling + +Implemented notifications: + +- `textDocument/didOpen` +- `textDocument/didChange` +- `textDocument/didClose` +- `workspace/didChangeConfiguration` +- `workspace/didChangeWatchedFiles` +- `exit` + +`textDocument/didSave` is not currently handled. + +### Open/change path + +For open/change events, the server: + +1. Updates document contents/version. +2. Invalidates type cache for changed file and transitive importers. +3. Rebuilds import graph entries for that file. +4. Schedules diagnostics. + +### Close path + +For close events, the server: + +1. Moves the file from open docs to closed-cache storage. +2. Invalidates the file and dependent type cache entries. +3. Removes import graph entries for the closed file. +4. Publishes empty diagnostics for the URI. + +### Watched file path + +For watched file events, the server invalidates cache and updates graph even for +unopened files. It also schedules diagnostics for open importers to refresh +cross-file diagnostics after external file changes. + +## Import Resolution Strategy + +Import resolution is used in multiple places (analysis, definition, commands). +The common resolution order is: + +1. relative to the importing file's directory +2. each configured `jpath` entry in order + +Import graph update path: `update_import_graph` in +`crates/jrsonnet-lsp/src/server.rs`. + +## Diagnostics Architecture + +Diagnostics run in a dedicated background worker (`AsyncDiagnostics` in +`crates/jrsonnet-lsp/src/async_diagnostics.rs`): + +- Requests are debounced per-file (`500 ms`). +- Stale requests/results are discarded by sequence numbers. +- Worker reconstructs `Document` from scheduled text/version. +- Worker uses `TypeProvider` + `TypeCache` + `ImportGraph` for dependency-aware + analysis. +- Result is sent back as `PublishDiagnosticsParams`. + +Diagnostic composition (`crates/jrsonnet-lsp/src/handlers/diagnostics.rs`): + +1. Syntax diagnostics from parser errors. +2. Lint/type diagnostics from `jrsonnet-lsp-check` when lint is enabled and + parse succeeded. +3. Evaluation diagnostic from `Evaluator` (optional) when parse succeeded. + +Evaluation diagnostics use `analysis/eval.rs` and can optionally apply +Tanka-aware `jpath` expansion via `analysis/tanka.rs`. + +## Cross-file Analysis and Caching + +`TypeProvider` in `jrsonnet-lsp-inference` ensures imports are analyzed first: + +- Uses import graph dependency ordering (`process_with_dependencies`). +- Uses `TypeCache` to reuse previously inferred top-level types. +- Uses `CachingImportResolver` so import expressions can consult cached types. + +Cache invalidation is dependency-aware through +`invalidate_type_cache_with_dependents`. + +## Configuration and Reconfiguration + +`ServerConfig` supports initialization options and live updates from +`workspace/didChangeConfiguration`. + +Important behavior in `on_did_change_configuration`: + +- Runtime-affecting changes (`jpath`, eval diagnostics mode, Tanka mode) trigger + runtime component rebuild. +- Runtime-affecting changes clear the shared type cache and refresh import graph + for open files. +- Runtime changes and lint toggle changes both trigger diagnostic rescheduling + for open files. + +## Configuration Surface + +`ServerConfig` (`crates/jrsonnet-lsp/src/config.rs`) includes: + +- `jpath` +- `ext_vars` +- `ext_code` +- `enable_eval_diagnostics` +- `enable_lint_diagnostics` +- `resolve_paths_with_tanka` +- `formatting` +- `log_level` + +Configuration can arrive via initialization options or +`workspace/didChangeConfiguration` settings payloads. The update logic accepts +both flat and namespaced settings (`jsonnet`, `jsonnet-language-server`). + +## Execute Commands + +Advertised commands: + +- `jrsonnet.evalFile` +- `jrsonnet.evalExpression` +- `jrsonnet.findTransitiveImporters` +- `jrsonnet.findReferences` + +Current async command implementation handles: + +- `jrsonnet.evalFile` +- `jrsonnet.evalExpression` +- `jrsonnet.findTransitiveImporters` + +`jrsonnet.findReferences` is currently advertised but does not have a dedicated +execution branch in `execute_command` yet. + +## Concurrency Strategy + +Concurrency is intentionally split: + +- Fast protocol/state transitions stay on the main loop thread. +- Heavier read/analysis operations run in Rayon jobs. +- Diagnostics run on a dedicated worker thread with debouncing. +- Shared mutable structures use `Arc` + `RwLock`/concurrent maps where needed. + +This balance keeps latency low for editor interactions while preserving +cross-file correctness. diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md new file mode 100644 index 00000000..7fefbc66 --- /dev/null +++ b/docs/lsp/HANDLERS.md @@ -0,0 +1,253 @@ +# jrsonnet-lsp Handler Guide + +This document describes current LSP request handling across +`crates/jrsonnet-lsp` and `crates/jrsonnet-lsp-handlers`. + +## Scope and Ownership + +- `jrsonnet-lsp` owns transport, method dispatch, async orchestration, and + diagnostics publishing. +- `jrsonnet-lsp-handlers` owns most feature logic for request methods. +- `jrsonnet-lsp` also owns diagnostics composition + (`crates/jrsonnet-lsp/src/handlers/diagnostics.rs`) and execute-command + orchestration (`crates/jrsonnet-lsp/src/server/async_requests.rs`). + +## Module Inventory + +Public handler modules in `crates/jrsonnet-lsp-handlers/src`: + +- `code_action.rs` +- `code_lens.rs` +- `completion/` +- `definition.rs` +- `document_highlight.rs` +- `formatting.rs` +- `hover.rs` +- `inlay_hint.rs` +- `references.rs` +- `rename.rs` +- `semantic_tokens.rs` +- `signature_help.rs` +- `symbols.rs` + +## Request Method Matrix + +Current request routing in `crates/jrsonnet-lsp/src/server.rs`: + +| LSP method | Server entry point | Implementation owner | Execution model | Uses `TypeAnalysis` | +| ---------------------------------- | ---------------------------------- | ------------------------------------------------- | --------------- | ------------------- | +| `textDocument/documentSymbol` | `on_document_symbol` | handlers crate (`document_symbols`) | sync | no | +| `textDocument/definition` | async context (`goto_definition`) | mixed: handlers + server import resolution | async | no | +| `textDocument/hover` | async context (`hover`) | handlers crate (`hover`) | async | yes | +| `textDocument/documentHighlight` | `on_document_highlight` | handlers crate (`document_highlights`) | sync | no | +| `textDocument/inlayHint` | async context (`inlay_hints`) | handlers crate (`inlay_hints`) | async | yes | +| `textDocument/codeAction` | `on_code_action` | handlers crate (`code_actions`) | sync | no | +| `textDocument/completion` | async context (`completion`) | handlers crate (`completion_with_import_roots`) | async | yes | +| `textDocument/signatureHelp` | `on_signature_help` | handlers crate (`signature_help`) | sync | no | +| `textDocument/formatting` | `on_formatting` | handlers crate (`format_document_with_config`) | sync | no | +| `textDocument/references` | async context (`references`) | mixed: handlers + server import graph merge | async | no | +| `workspace/symbol` | async context (`workspace_symbol`) | handlers crate (`workspace_symbols_for_document`) | async | no | +| `textDocument/prepareRename` | `on_prepare_rename` | handlers crate (`prepare_rename`) | sync | no | +| `textDocument/rename` | async context (`rename`) | handlers crate (`rename_cross_file`) | async | no | +| `textDocument/semanticTokens/full` | `on_semantic_tokens_full` | handlers crate (`semantic_tokens`) | sync | no | +| `textDocument/codeLens` | async context (`code_lens`) | handlers crate (`code_lens`) | async | yes | +| `workspace/executeCommand` | async context (`execute_command`) | server async context | async | no | +| `shutdown` | direct in `handle_request` | server | sync | no | + +Async requests are sent back over the server's async response channel after +worker completion. + +## Handler Details + +### Code Action + +File: `crates/jrsonnet-lsp-handlers/src/code_action.rs` + +- Produces quick fixes from diagnostic context (for example unused-variable + fixes). +- Uses current document plus selected range and diagnostics from request params. +- Returned through sync `textDocument/codeAction`. + +### Code Lens + +File: `crates/jrsonnet-lsp-handlers/src/code_lens.rs` + +`CodeLensConfig` supports: + +- `show_references` +- `show_evaluate` +- `show_types` +- `show_errors` (`Hidden` or `Visible`) + +Current server path builds `CodeLensConfig::all()` in async context and passes +computed `TypeAnalysis`. + +Lens categories: + +- reference count lenses at definition sites +- evaluate-file command lens (`jrsonnet.evalFile`) +- inferred type lenses for selected bindings +- syntax-status lens for parse errors + +### Completion + +Files: + +- `completion/mod.rs` +- `completion/stdlib.rs` +- `completion/locals.rs` +- `completion/fields.rs` +- `completion/imports.rs` +- `completion/helpers.rs` + +Completion sources include: + +- stdlib functions and docs +- in-scope locals +- object fields from inferred types +- import paths using file path + configured import roots +- object-context keywords (`$`, `self`, `super`) + +Server capabilities advertise `.` as trigger. Other completion contexts can +still return items on explicit completion requests. + +### Definition + +File: `crates/jrsonnet-lsp-handlers/src/definition.rs` + +`goto_definition` returns: + +- `Local(range)` +- `Import(path)` +- `ImportField { path, fields }` + +Async server context resolves import paths and can locate nested field ranges in +imported files before returning final `Location`. + +### Document Highlight + +File: `crates/jrsonnet-lsp-handlers/src/document_highlight.rs` + +- Same-file symbol highlighting with scope-aware matching. + +### Formatting + +File: `crates/jrsonnet-lsp-handlers/src/formatting.rs` + +- Uses `formatter_path` when configured. +- Otherwise tries `jrsonnet-fmt` then `jsonnetfmt`. +- Returns a full-document replacement edit when formatting changes text. +- Returns `None` when formatting is unavailable or fails. + +### Hover + +File: `crates/jrsonnet-lsp-handlers/src/hover.rs` + +Hover combines: + +- inferred type information +- stdlib documentation/signatures +- local definition context snippets + +Requires `TypeAnalysis` from async server context. + +### Inlay Hint + +File: `crates/jrsonnet-lsp-handlers/src/inlay_hint.rs` + +- Produces type hints for locals and function-related positions from inferred + types. +- Respects requested visible range. + +### References + +File: `crates/jrsonnet-lsp-handlers/src/references.rs` + +- Same-file references from scope-aware search. +- Cross-file references by searching transitive importers. +- Async server context merges same-file and cross-file results. + +### Rename + +File: `crates/jrsonnet-lsp-handlers/src/rename.rs` + +- `prepare_rename` validates renameability at cursor. +- `rename_cross_file` builds `WorkspaceEdit` across current file and importer + graph where needed. +- New names are validated with `SymbolName`. + +### Semantic Tokens + +File: `crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs` + +- Produces encoded semantic tokens for full-document requests. +- Server currently advertises only `textDocument/semanticTokens/full` (no range + variant). + +### Signature Help + +File: `crates/jrsonnet-lsp-handlers/src/signature_help.rs` + +- Trigger characters: `(` and `,`. +- Uses stdlib signatures and local function information. +- Computes active parameter index from call context. + +### Symbols + +File: `crates/jrsonnet-lsp-handlers/src/symbols.rs` + +- `document_symbols` builds hierarchical document outline. +- `workspace_symbols_for_document` filters symbols by query for one document. +- Async server path parallelizes across currently open documents. + +## Async Request Context + +`AsyncRequestContext` in `crates/jrsonnet-lsp/src/server/async_requests.rs` +provides shared orchestration for async methods: + +- canonical path conversion and document lookup +- dependency-aware `TypeAnalysis` creation via `TypeProvider` +- import graph lookups for cross-file features +- config access (`jpath`, command behavior) +- import-path resolution for definition targets + +This keeps handler modules focused on feature logic while server code owns +runtime and cross-cutting orchestration. + +## Type Analysis Usage + +`TypeAnalysis` is currently required in these request paths: + +- hover +- inlay hints +- completion +- code lens + +These analyses are computed in async context before handler invocation. + +## Commands and Handler Interaction + +Advertised command IDs: + +- `jrsonnet.evalFile` +- `jrsonnet.evalExpression` +- `jrsonnet.findTransitiveImporters` +- `jrsonnet.findReferences` + +Current async command implementation handles: + +- `jrsonnet.evalFile` +- `jrsonnet.evalExpression` +- `jrsonnet.findTransitiveImporters` + +`jrsonnet.findReferences` is advertised in server capabilities but has no +dedicated branch in `execute_command`. + +Code-lens output may also contain `jrsonnet.showErrors` for informational UI +actions. + +## Testing Strategy + +- Handler unit tests live with handler modules in + `crates/jrsonnet-lsp-handlers/src/*`. +- LSP integration and routing tests live in `crates/jrsonnet-lsp/tests`. diff --git a/docs/lsp/TYPE_SYSTEM.md b/docs/lsp/TYPE_SYSTEM.md new file mode 100644 index 00000000..4b9490a7 --- /dev/null +++ b/docs/lsp/TYPE_SYSTEM.md @@ -0,0 +1,211 @@ +# jrsonnet-lsp Type System + +This document describes the current type-system architecture used by the LSP, +with emphasis on interned representation, store layering, type operations, and +cross-file usage. + +## Scope + +Core type-system code lives in `crates/jrsonnet-lsp-types`. + +Inference and analysis integration live in `crates/jrsonnet-lsp-inference`, +which uses these types and stores to produce `TypeAnalysis` for handlers, +completion, diagnostics, and refactoring features. + +## Design Goals + +- Fast equality checks (`Ty` ID compare). +- Memory efficiency via interning and deduplication. +- Shareable type state across files (`GlobalTyStore`). +- Efficient per-analysis mutation (`MutStore` + local types). +- Expressive enough for Jsonnet semantics (objects, tuples, unions, + intersections, bounded numbers, and function return relationships). + +## Core Representation + +## `Ty`: interned type handle + +`Ty` is a compact `u32` wrapper (`Copy`, `Eq`, `Hash`) that references interned +`TyData`. + +Important constants include: + +- `Ty::ANY` +- `Ty::NEVER` +- `Ty::NULL` +- `Ty::BOOL`, `Ty::TRUE`, `Ty::FALSE` +- `Ty::NUMBER` +- `Ty::STRING` +- `Ty::CHAR` + +Well-known IDs are reserved and stable across stores. + +## Local/global distinction + +The top bit (`LOCAL_BIT`) distinguishes origin: + +- local type IDs for per-analysis local store entries +- global type IDs for shared global store entries + +This enables cheap local construction during analysis and later merging into the +global store. + +## `TyData`: structural type payload + +Representative variants include: + +- primitives: `Any`, `Never`, `Null`, `Bool`, `Number`, `String`, literals +- collections: `Array`, `Tuple` +- objects: `Object(ObjectData)`, `AttrsOf` +- functions: `Function(FunctionData)` +- type algebra: `Union(Vec)`, `Sum(Vec)` +- constraints: `BoundedNumber(NumBounds)`, `TypeVar` + +## Store Architecture + +`jrsonnet-lsp-types` exposes three complementary stores. + +## `GlobalTyStore` + +- Thread-safe shared store (`Arc` + lock-protected intern tables). +- Lives for server lifetime. +- Used for cross-file type reuse and cache stability. + +## `LocalTyStore` + +- Per-analysis local store. +- No cross-thread sharing requirement. +- Cheap staging area for temporary type construction. + +## `MutStore` + +- Unified interface that reads global + local and interns into local as needed. +- Used heavily by inference/analysis code. +- Supports merging locals into global through substitution machinery + (`TySubst`). + +## Type Substitution and Merge + +When local analysis results must survive outside the local context, local IDs +are remapped to global IDs. `subst.rs` and `TySubst` cover this remapping +process. + +This keeps handler-visible and cache-visible types globally valid. + +## Structural Data Types + +## Object model + +`ObjectData` carries: + +- sorted field list of `(String, FieldDefInterned)` +- `has_unknown` flag to model open vs closed objects + +Field defs include type, required-ness, and visibility (`:`, `::`, `:::` +semantics). + +## Function model + +`FunctionData` carries params and return specification. Params include: + +- name +- type +- default-value presence + +`ReturnSpec` supports fixed and relationship-based returns, including patterns +such as "same as arg" and "array of arg", which are useful for stdlib function +modeling. + +## Number bounds + +`NumBounds` stores optional min/max bounds and supports subset/intersection +logic for narrowing and compatibility checks. + +## Core Type Operations + +`crates/jrsonnet-lsp-types/src/operations.rs` implements operation semantics. + +Key APIs: + +- `binary_op_result_ty` +- `unary_op_result_ty` +- `array_concat_ty` +- `ty_and` (intersection/narrowing) +- `ty_minus` (set subtraction from unions) +- `ty_with_len` +- `ty_with_min_len` +- `ty_with_field` + +These operations are used in inference and lint/type-check diagnostics. + +## Unification and Subtyping + +`crates/jrsonnet-lsp-types/src/unification.rs` provides: + +- `unify_ty` +- `is_subtype_ty` +- `types_equivalent_ty` + +Important traits of this unifier: + +- variance-aware (`Covariant`, `Contravariant`) +- structural matching for objects/functions/collections +- detailed error reporting with `UnifyError` and path context +- union mismatch reporting with per-variant error detail + +## Display and Diagnostics Formatting + +`display.rs` exposes formatting helpers (`DisplayTy`, `DisplayContext`) used to +turn internal interned types into readable strings for hovers, inlay hints, and +diagnostics. + +The display layer is intentionally separate from inference logic. + +## Integration in Inference + +`TypeAnalysis` in `crates/jrsonnet-lsp-inference/src/analysis.rs` stores: + +- immutable map from expression ranges to inferred `Ty` +- document root type +- an internal `RwLock` for query-time helpers + +`TypeAnalysis` exposes utilities used by handlers/checks: + +- type lookup by range/position +- field extraction and object checks +- indexable/callable/sliceable checks +- display and store-access helpers + +## Cross-file Analysis and Cache + +`TypeProvider` (`provider.rs`) ensures dependency-aware analysis order using +`ImportGraph`. + +`TypeCache` (`type_cache.rs`) stores top-level inferred type per file with +version checks and LRU behavior. `CachingImportResolver` reads from this cache +for import type resolution. + +This architecture allows import-aware inference without requiring full project +re-analysis on every request. + +## Flow Typing Integration + +Flow-sensitive narrowing logic is implemented in +`crates/jrsonnet-lsp-inference/src/flow.rs` and works with type-system +operations (`ty_and`, `ty_minus`, and related helpers). + +Stdlib predicate metadata (`jrsonnet-std-sig`, `flow_typing`) feeds into this +narrowing behavior so predicates such as `std.isNumber(x)` can refine branch +types. + +## Why This Shape Works for the LSP + +The current design balances correctness and latency: + +- interned `Ty` handles keep comparisons and copies cheap +- local/global split reduces synchronization costs during inference +- explicit substitution maintains correctness when persisting local results +- unified operations/unification provide consistent behavior across handlers and + diagnostics +- dependency-aware cache and provider keep cross-file features fast enough for + interactive use diff --git a/xtask/src/sourcegen/mod.rs b/xtask/src/sourcegen/mod.rs index 27737f7e..9fcda3bf 100644 --- a/xtask/src/sourcegen/mod.rs +++ b/xtask/src/sourcegen/mod.rs @@ -202,9 +202,15 @@ fn generate_nodes(kinds: &KindsSrc, grammar: &AstSrc) -> Result { quote!(impl ast::#trait_name for #name {}) }); + // Count how many fields of each type exist to handle cases like + // ExprBinary = lhs:Expr BinaryOperator rhs:Expr + // where we need to get the nth child of type Expr + let mut type_counts: std::collections::HashMap = + std::collections::HashMap::new(); let methods = node.fields.iter().map(|field| { let method_name = field.method_name(kinds); let ty = field.ty(); + let ty_str = ty.to_string(); if field.is_many() { quote! { @@ -225,9 +231,24 @@ fn generate_nodes(kinds: &KindsSrc, grammar: &AstSrc) -> Result { } } } else { - quote! { - pub fn #method_name(&self) -> Option<#ty> { - support::child(&self.syntax) + // Track which occurrence of this type this field is + let index = *type_counts.get(&ty_str).unwrap_or(&0); + type_counts.insert(ty_str, index + 1); + + if index == 0 { + // First field of this type - use simple child() + quote! { + pub fn #method_name(&self) -> Option<#ty> { + support::child(&self.syntax) + } + } + } else { + // Nth field of this type - use children().nth(N) + let index_lit = proc_macro2::Literal::usize_unsuffixed(index); + quote! { + pub fn #method_name(&self) -> Option<#ty> { + support::children::<#ty>(&self.syntax).nth(#index_lit) + } } } } @@ -535,7 +556,12 @@ pub fn escape_token_macro(token: &str) -> TokenStream { quote! { #c } } else if token.contains('$') { quote! { #token } + } else if token.chars().all(|c| c.is_ascii_alphabetic() || c == '_') { + // Keywords like "tailstrict", "local", etc. - use as identifier + let ident = format_ident!("{}", token); + quote! { #ident } } else { + // Punctuation tokens like "+", "==", etc. let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint)); quote! { #(#cs)* } } From 24842c1f229174b3dc1a6e7864c9d28d4879ec40 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 13:26:51 +0000 Subject: [PATCH 002/210] feat(lsp): handle didSave notifications Add textDocument/didSave handling in the server and advertise save support in textDocumentSync options. On save, refresh document state (when text is provided), invalidate dependent type cache entries, update import graph state, and reschedule diagnostics for the saved file and open importers. Add integration coverage for save-triggered diagnostics refresh and update architecture docs to describe the didSave path. --- crates/jrsonnet-lsp/src/server.rs | 57 ++++++++++-- crates/jrsonnet-lsp/tests/integration_test.rs | 89 +++++++++++++++++-- docs/lsp/ARCHITECTURE.md | 14 ++- 3 files changed, 143 insertions(+), 17 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 9c727d1b..3a6ad133 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -23,7 +23,7 @@ use lsp_server::{Connection, Message, Notification, Request, RequestId, Response use lsp_types::{ notification::{ DidChangeConfiguration, DidChangeTextDocument, DidChangeWatchedFiles, DidCloseTextDocument, - DidOpenTextDocument, Notification as _, PublishDiagnostics, + DidOpenTextDocument, DidSaveTextDocument, Notification as _, PublishDiagnostics, }, request::{ CodeActionRequest, CodeLensRequest, Completion, DocumentHighlightRequest, @@ -34,11 +34,11 @@ use lsp_types::{ CodeActionKind, CodeActionOptions, CodeActionParams, CodeActionProviderCapability, CodeActionResponse, CodeLensOptions, CodeLensParams, CompletionOptions, CompletionParams, DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams, - DidCloseTextDocumentParams, DidOpenTextDocumentParams, DocumentFormattingParams, - DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, - ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, GotoDefinitionParams, HoverParams, - HoverProviderCapability, InitializeParams, InitializeResult, InlayHintParams, OneOf, - PrepareRenameResponse, ReferenceParams, RenameParams, SemanticTokens, + DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, + DocumentFormattingParams, DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, + DocumentSymbolResponse, ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, + GotoDefinitionParams, HoverParams, HoverProviderCapability, InitializeParams, InitializeResult, + InlayHintParams, OneOf, PrepareRenameResponse, ReferenceParams, RenameParams, SemanticTokens, SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, @@ -279,8 +279,14 @@ impl Server { /// Get the server capabilities. fn server_capabilities() -> ServerCapabilities { ServerCapabilities { - text_document_sync: Some(TextDocumentSyncCapability::Kind( - TextDocumentSyncKind::INCREMENTAL, + text_document_sync: Some(TextDocumentSyncCapability::Options( + lsp_types::TextDocumentSyncOptions { + open_close: Some(true), + change: Some(TextDocumentSyncKind::INCREMENTAL), + will_save: None, + will_save_wait_until: None, + save: Some(lsp_types::TextDocumentSyncSaveOptions::Supported(true)), + }, )), document_symbol_provider: Some(OneOf::Left(true)), definition_provider: Some(OneOf::Left(true)), @@ -671,6 +677,10 @@ impl Server { let params: DidCloseTextDocumentParams = serde_json::from_value(notif.params)?; self.on_did_close(params)?; } + DidSaveTextDocument::METHOD => { + let params: DidSaveTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_save(params); + } DidChangeConfiguration::METHOD => { let params: DidChangeConfigurationParams = serde_json::from_value(notif.params)?; self.on_did_change_configuration(params); @@ -784,6 +794,37 @@ impl Server { Ok(()) } + /// Handle textDocument/didSave notification. + fn on_did_save(&self, params: DidSaveTextDocumentParams) { + let uri = ¶ms.text_document.uri; + debug!("Document saved: {}", uri.as_str()); + + let Some(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return; + }; + + if let Some(text) = params.text { + let Some(doc) = self.documents.get(&path) else { + return; + }; + let version = doc.version(); + drop(doc); + + if !self.documents.update(&path, text, version) { + warn!("Failed to update saved document contents: {}", uri.as_str()); + return; + } + } + + self.invalidate_type_cache_with_dependents(&path); + self.update_import_graph(&path); + if self.documents.is_open(&path) { + self.schedule_diagnostics(&path); + } + self.schedule_diagnostics_for_open_importers(&path); + } + /// Handle workspace/didChangeConfiguration notification. fn on_did_change_configuration(&mut self, params: DidChangeConfigurationParams) { info!("Configuration changed"); diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 4db01591..3d6a822a 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -9,18 +9,18 @@ use assert_matches::assert_matches; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{ notification::{ - DidChangeConfiguration, DidChangeWatchedFiles, DidOpenTextDocument, Notification as _, - PublishDiagnostics, + DidChangeConfiguration, DidChangeWatchedFiles, DidOpenTextDocument, DidSaveTextDocument, + Notification as _, PublishDiagnostics, }, request::{ CodeActionRequest, DocumentHighlightRequest, ExecuteCommand, GotoDefinition, Initialize, InlayHintRequest, References, Rename, Request as _, Shutdown, }, DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, - ExecuteCommandParams, FileChangeType, FileEvent, GotoDefinitionParams, GotoDefinitionResponse, - InitializeParams, PartialResultParams, Position, ReferenceContext, ReferenceParams, - RenameParams, TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, - WorkDoneProgressParams, + DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, FileEvent, + GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, PartialResultParams, Position, + ReferenceContext, ReferenceParams, RenameParams, TextDocumentIdentifier, TextDocumentItem, + TextDocumentPositionParams, WorkDoneProgressParams, }; use serde_json::json; use tempfile::TempDir; @@ -79,6 +79,19 @@ fn did_open_notification(uri: &str, text: &str) -> Notification { ) } +fn did_save_notification(uri: &str, text: Option<&str>) -> Notification { + let params = DidSaveTextDocumentParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + text: text.map(ToString::to_string), + }; + Notification::new( + DidSaveTextDocument::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + /// Helper to create a goto definition request. fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { let params = GotoDefinitionParams { @@ -428,6 +441,70 @@ fn test_diagnostics_on_open() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_diagnostics_refresh_on_did_save_with_text() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/save-refresh.jsonnet"; + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, "{ a: 1 }", + ))) + .unwrap(); + let opened = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert!(opened.diagnostics.is_empty()); + + client_conn + .sender + .send(Message::Notification(did_save_notification( + uri, + Some("{ a: }"), + ))) + .unwrap(); + let saved_invalid = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert!( + !saved_invalid.diagnostics.is_empty(), + "saving invalid text should publish diagnostics" + ); + + client_conn + .sender + .send(Message::Notification(did_save_notification( + uri, + Some("{ a: 2 }"), + ))) + .unwrap(); + let saved_valid = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert!(saved_valid.diagnostics.is_empty()); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_configuration_change_reconfigures_eval_diagnostics() { let (client_conn, server_conn) = Connection::memory(); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 2d6bbaf5..998e5438 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -117,7 +117,7 @@ to documents, import graph, type cache, config, and dependency-aware analysis. `server_capabilities()` currently advertises: -- incremental text sync (`TextDocumentSyncKind::INCREMENTAL`) +- incremental text sync with open/close and save notifications - definition, hover, document symbols, document highlights - completion (trigger `.`) - signature help (triggers `(` and `,`) @@ -144,13 +144,12 @@ Implemented notifications: - `textDocument/didOpen` - `textDocument/didChange` +- `textDocument/didSave` - `textDocument/didClose` - `workspace/didChangeConfiguration` - `workspace/didChangeWatchedFiles` - `exit` -`textDocument/didSave` is not currently handled. - ### Open/change path For open/change events, the server: @@ -169,6 +168,15 @@ For close events, the server: 3. Removes import graph entries for the closed file. 4. Publishes empty diagnostics for the URI. +### Save path + +For save events, the server: + +1. Optionally updates open-document contents when `didSave` includes `text`. +2. Invalidates type cache entries for the file and dependent importers. +3. Refreshes import graph entries for the saved file. +4. Schedules diagnostics for the saved file and currently open importers. + ### Watched file path For watched file events, the server invalidates cache and updates graph even for From cc23c0db3ee735d686fd70696aa4f185feab64bc Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 13:28:40 +0000 Subject: [PATCH 003/210] feat(lsp): implement findReferences execute command Wire into async command execution with argument parsing for URI, position, and optional includeDeclaration. Reuse the existing references path and return LSP Location arrays to command callers. Add integration coverage for command behavior (with and without declarations) and update LSP docs to mark the command as implemented. --- .../jrsonnet-lsp/src/server/async_requests.rs | 43 +++++- crates/jrsonnet-lsp/tests/integration_test.rs | 143 ++++++++++++++++++ docs/lsp/ARCHITECTURE.md | 4 +- docs/lsp/HANDLERS.md | 4 +- 4 files changed, 186 insertions(+), 8 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 5ec122bd..59196551 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -8,8 +8,9 @@ use jrsonnet_lsp_types::GlobalTyStore; use lsp_types::{ CodeLens, CodeLensParams, CompletionParams, CompletionResponse, ExecuteCommandParams, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams, InlayHint, InlayHintParams, - Location, ReferenceParams, RenameParams, SymbolInformation, WorkspaceEdit, - WorkspaceSymbolParams, + Location, PartialResultParams, Position, ReferenceContext, ReferenceParams, RenameParams, + SymbolInformation, TextDocumentIdentifier, TextDocumentPositionParams, WorkDoneProgressParams, + WorkspaceEdit, WorkspaceSymbolParams, }; use parking_lot::RwLock; use rayon::prelude::*; @@ -249,6 +250,19 @@ impl AsyncRequestContext { let uri = params.arguments.first()?.as_str()?; self.execute_find_transitive_importers(uri) } + "jrsonnet.findReferences" => { + let uri = params.arguments.first()?.as_str()?; + let line = params.arguments.get(1)?.as_u64()?; + let line = u32::try_from(line).ok()?; + let character = params.arguments.get(2)?.as_u64()?; + let character = u32::try_from(character).ok()?; + let include_declaration = params + .arguments + .get(3) + .and_then(serde_json::Value::as_bool) + .unwrap_or(false); + self.execute_find_references(uri, line, character, include_declaration) + } _ => { warn!("Unknown command: {}", params.command); None @@ -358,6 +372,31 @@ impl AsyncRequestContext { })) } + fn execute_find_references( + &self, + uri: &str, + line: u32, + character: u32, + include_declaration: bool, + ) -> Option { + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let refs = self + .references(ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri: uri_parsed }, + position: Position { line, character }, + }, + context: ReferenceContext { + include_declaration, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }) + .unwrap_or_default(); + + serde_json::to_value(refs).ok() + } + /// Search order: /// 1. Relative to the importing file's directory /// 2. Each directory in jpath (in order) diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 3d6a822a..c66d3a93 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -953,6 +953,149 @@ fn test_code_action_unused_variable_quickfix() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_execute_command_find_references() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/find-refs-command.jsonnet"; + let text = "local x = 1; x + x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.findReferences", + vec![ + serde_json::Value::String(uri.to_string()), + serde_json::Value::Number(0_u64.into()), + serde_json::Value::Number(13_u64.into()), + ], + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Command should succeed"); + let refs: Vec = + serde_json::from_value(response.result.expect("command should return result")).unwrap(); + let expected_without_declaration = vec![ + lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 13, + }, + end: Position { + line: 0, + character: 14, + }, + }, + }, + lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 17, + }, + end: Position { + line: 0, + character: 18, + }, + }, + }, + ]; + assert_eq!(refs, expected_without_declaration); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 3, + "jrsonnet.findReferences", + vec![ + serde_json::Value::String(uri.to_string()), + serde_json::Value::Number(0_u64.into()), + serde_json::Value::Number(13_u64.into()), + serde_json::Value::Bool(true), + ], + ))) + .unwrap(); + let response = recv_response(&client_conn, 3); + assert!(response.error.is_none(), "Command should succeed"); + let refs_with_declaration: Vec = + serde_json::from_value(response.result.expect("command should return result")).unwrap(); + let expected_with_declaration = vec![ + lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + }, + lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 13, + }, + end: Position { + line: 0, + character: 14, + }, + }, + }, + lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 17, + }, + end: Position { + line: 0, + character: 18, + }, + }, + }, + ]; + assert_eq!(refs_with_declaration, expected_with_declaration); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_watched_file_refreshes_unopened_importers_for_references() { let tmp = TempDir::new().expect("tempdir should be created"); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 998e5438..c1bfd05a 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -272,9 +272,7 @@ Current async command implementation handles: - `jrsonnet.evalFile` - `jrsonnet.evalExpression` - `jrsonnet.findTransitiveImporters` - -`jrsonnet.findReferences` is currently advertised but does not have a dedicated -execution branch in `execute_command` yet. +- `jrsonnet.findReferences` ## Concurrency Strategy diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 7fefbc66..15f2fe67 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -239,9 +239,7 @@ Current async command implementation handles: - `jrsonnet.evalFile` - `jrsonnet.evalExpression` - `jrsonnet.findTransitiveImporters` - -`jrsonnet.findReferences` is advertised in server capabilities but has no -dedicated branch in `execute_command`. +- `jrsonnet.findReferences` Code-lens output may also contain `jrsonnet.showErrors` for informational UI actions. From dd4c6a9c518bf6d87882e150d010cb1696091892 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 13:32:30 +0000 Subject: [PATCH 004/210] feat(lsp): add semantic tokens range support Advertise range support in semantic token capabilities, route textDocument/semanticTokens/range requests in the server, and add a range-aware token generation path in handlers. Add unit and integration tests for range filtering and update LSP docs to reflect full + range semantic token support. --- crates/jrsonnet-lsp-handlers/src/lib.rs | 4 +- .../src/semantic_tokens.rs | 80 +++++++++++++- crates/jrsonnet-lsp/src/server.rs | 27 ++++- crates/jrsonnet-lsp/tests/integration_test.rs | 104 +++++++++++++++++- docs/lsp/ARCHITECTURE.md | 3 +- docs/lsp/HANDLERS.md | 42 +++---- 6 files changed, 224 insertions(+), 36 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index c3c8a99f..5a426564 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -26,6 +26,8 @@ pub use hover::hover; pub use inlay_hint::inlay_hints; pub use references::{find_cross_file_references, find_references}; pub use rename::{prepare_rename, rename, rename_cross_file}; -pub use semantic_tokens::{legend as semantic_tokens_legend, semantic_tokens}; +pub use semantic_tokens::{ + legend as semantic_tokens_legend, semantic_tokens, semantic_tokens_range, +}; pub use signature_help::signature_help; pub use symbols::{document_symbols, workspace_symbols_for_document}; diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs index e09b5aa4..33bf28d0 100644 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs @@ -7,7 +7,7 @@ use jrsonnet_rowan_parser::{ nodes::{BindFunction, Destruct, ParamsDesc, StmtLocal}, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; -use lsp_types::{SemanticToken, SemanticTokenType, SemanticTokens, SemanticTokensLegend}; +use lsp_types::{Range, SemanticToken, SemanticTokenType, SemanticTokens, SemanticTokensLegend}; /// Semantic token type with compile-time index. /// @@ -91,7 +91,25 @@ pub fn semantic_tokens(document: &Document) -> SemanticTokens { let line_index = document.line_index(); let ast = document.ast(); - let mut builder = SemanticTokenBuilder::new(line_index, text); + let mut builder = SemanticTokenBuilder::new(line_index, text, None); + + // Walk all tokens in the document + for element in ast.syntax().descendants_with_tokens() { + if let Some(token) = element.into_token() { + builder.visit_token(&token); + } + } + + builder.build() +} + +/// Compute semantic tokens for a specific range in a document. +pub fn semantic_tokens_range(document: &Document, range: Range) -> SemanticTokens { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let mut builder = SemanticTokenBuilder::new(line_index, text, Some(range)); // Walk all tokens in the document for element in ast.syntax().descendants_with_tokens() { @@ -107,6 +125,7 @@ pub fn semantic_tokens(document: &Document) -> SemanticTokens { struct SemanticTokenBuilder<'a> { line_index: &'a LineIndex, text: &'a str, + range: Option, tokens: Vec, } @@ -120,10 +139,11 @@ struct RawToken { } impl<'a> SemanticTokenBuilder<'a> { - fn new(line_index: &'a LineIndex, text: &'a str) -> Self { + fn new(line_index: &'a LineIndex, text: &'a str, range: Option) -> Self { Self { line_index, text, + range, tokens: Vec::new(), } } @@ -306,7 +326,7 @@ impl<'a> SemanticTokenBuilder<'a> { if lines.len() <= 1 { // Single line token - self.tokens.push(RawToken { + self.push_token_if_in_range(RawToken { line: start_pos.line.0, start_char: start_pos.character.0, length: token_text.len() as u32, @@ -321,7 +341,7 @@ impl<'a> SemanticTokenBuilder<'a> { let length = line.len() as u32; if length > 0 { - self.tokens.push(RawToken { + self.push_token_if_in_range(RawToken { line: line_num, start_char, length, @@ -333,6 +353,26 @@ impl<'a> SemanticTokenBuilder<'a> { } } + fn push_token_if_in_range(&mut self, token: RawToken) { + let Some(range) = &self.range else { + self.tokens.push(token); + return; + }; + + let token_end = token.start_char.saturating_add(token.length); + if token.line < range.start.line || token.line > range.end.line { + return; + } + if token.line == range.start.line && token_end <= range.start.character { + return; + } + if token.line == range.end.line && token.start_char >= range.end.character { + return; + } + + self.tokens.push(token); + } + fn build(mut self) -> SemanticTokens { // Sort tokens by position self.tokens @@ -567,4 +607,34 @@ mod tests { assert!(!leg.token_types.is_empty()); assert!(!leg.token_modifiers.is_empty()); } + + #[test] + fn test_semantic_tokens_range_filters_lines() { + let code = "local x = 1\nlocal y = x + 1"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens_range( + &doc, + Range { + start: lsp_types::Position { + line: 1, + character: 0, + }, + end: lsp_types::Position { + line: 1, + character: 100, + }, + }, + ); + assert!(!tokens.data.is_empty()); + + let mut absolute_line = 0_u32; + for token in tokens.data { + absolute_line += token.delta_line; + assert_eq!( + absolute_line, 1, + "token line should stay inside requested range" + ); + } + } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 3a6ad133..64385acf 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -29,7 +29,8 @@ use lsp_types::{ CodeActionRequest, CodeLensRequest, Completion, DocumentHighlightRequest, DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDefinition, HoverRequest, InlayHintRequest, PrepareRenameRequest, References, Rename, Request as _, - SemanticTokensFullRequest, Shutdown, SignatureHelpRequest, WorkspaceSymbolRequest, + SemanticTokensFullRequest, SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, + WorkspaceSymbolRequest, }, CodeActionKind, CodeActionOptions, CodeActionParams, CodeActionProviderCapability, CodeActionResponse, CodeLensOptions, CodeLensParams, CompletionOptions, CompletionParams, @@ -40,8 +41,8 @@ use lsp_types::{ GotoDefinitionParams, HoverParams, HoverProviderCapability, InitializeParams, InitializeResult, InlayHintParams, OneOf, PrepareRenameResponse, ReferenceParams, RenameParams, SemanticTokens, SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, - SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, - SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, + SemanticTokensRangeParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, + SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, TextDocumentSyncCapability, TextDocumentSyncKind, TextEdit, WorkDoneProgressOptions, WorkspaceSymbolParams, }; @@ -318,7 +319,7 @@ impl Server { SemanticTokensServerCapabilities::SemanticTokensOptions(SemanticTokensOptions { legend: handlers::semantic_tokens_legend(), full: Some(SemanticTokensFullOptions::Bool(true)), - range: None, + range: Some(true), work_done_progress_options: WorkDoneProgressOptions::default(), }), ), @@ -534,6 +535,12 @@ impl Server { let resp = Response::new_ok(req.id, serde_json::to_value(result)?); self.connection.sender.send(Message::Response(resp))?; } + SemanticTokensRangeRequest::METHOD => { + let params: SemanticTokensRangeParams = serde_json::from_value(req.params)?; + let result = self.on_semantic_tokens_range(params); + let resp = Response::new_ok(req.id, serde_json::to_value(result)?); + self.connection.sender.send(Message::Response(resp))?; + } CodeLensRequest::METHOD => { let params: CodeLensParams = serde_json::from_value(req.params)?; let context = self.async_request_context(); @@ -655,6 +662,18 @@ impl Server { Some(handlers::semantic_tokens(&doc)) } + + /// Handle textDocument/semanticTokens/range request. + fn on_semantic_tokens_range( + &self, + params: SemanticTokensRangeParams, + ) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?; + + Some(handlers::semantic_tokens_range(&doc, params.range)) + } } impl Server { diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index c66d3a93..a4763730 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -14,13 +14,13 @@ use lsp_types::{ }, request::{ CodeActionRequest, DocumentHighlightRequest, ExecuteCommand, GotoDefinition, Initialize, - InlayHintRequest, References, Rename, Request as _, Shutdown, + InlayHintRequest, References, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, }, DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, FileEvent, GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, PartialResultParams, Position, - ReferenceContext, ReferenceParams, RenameParams, TextDocumentIdentifier, TextDocumentItem, - TextDocumentPositionParams, WorkDoneProgressParams, + ReferenceContext, ReferenceParams, RenameParams, SemanticTokensRangeParams, + TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, }; use serde_json::json; use tempfile::TempDir; @@ -206,6 +206,38 @@ fn inlay_hint_request( ) } +fn semantic_tokens_range_request( + id: i32, + uri: &str, + start_line: u32, + start_character: u32, + end_line: u32, + end_character: u32, +) -> Request { + let params = SemanticTokensRangeParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + range: lsp_types::Range { + start: Position { + line: start_line, + character: start_character, + }, + end: Position { + line: end_line, + character: end_character, + }, + }, + }; + Request::new( + id.into(), + SemanticTokensRangeRequest::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + fn code_action_request( id: i32, uri: &str, @@ -1096,6 +1128,72 @@ fn test_execute_command_find_references() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_semantic_tokens_range_request() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/semantic-range.jsonnet"; + let text = "local first = 1\nlocal second = first + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(semantic_tokens_range_request( + 2, uri, 1, 0, 1, 100, + ))) + .unwrap(); + + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "semantic tokens range request should succeed" + ); + let tokens: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let tokens = tokens.expect("semantic tokens range should be returned"); + assert!( + !tokens.data.is_empty(), + "range should include tokens from requested line" + ); + + let mut absolute_line = 0_u32; + for token in tokens.data { + absolute_line += token.delta_line; + assert_eq!( + absolute_line, 1, + "token should be constrained to requested range line" + ); + } + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_watched_file_refreshes_unopened_importers_for_references() { let tmp = TempDir::new().expect("tempdir should be created"); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index c1bfd05a..70c4a4eb 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -125,7 +125,7 @@ to documents, import graph, type cache, config, and dependency-aware analysis. - references - workspace symbol search - rename with `prepareRename` -- semantic tokens (full document) +- semantic tokens (full document and range) - code actions (quick-fix kind) - code lens (resolve disabled) - execute command (four command IDs) @@ -135,7 +135,6 @@ For the canonical list, see `crates/jrsonnet-lsp/src/server.rs`. Not currently advertised: - declaration/type-definition/implementation providers -- semantic tokens range requests - code-lens resolve requests ## Notification Handling diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 15f2fe67..0bf2d6e3 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -34,25 +34,26 @@ Public handler modules in `crates/jrsonnet-lsp-handlers/src`: Current request routing in `crates/jrsonnet-lsp/src/server.rs`: -| LSP method | Server entry point | Implementation owner | Execution model | Uses `TypeAnalysis` | -| ---------------------------------- | ---------------------------------- | ------------------------------------------------- | --------------- | ------------------- | -| `textDocument/documentSymbol` | `on_document_symbol` | handlers crate (`document_symbols`) | sync | no | -| `textDocument/definition` | async context (`goto_definition`) | mixed: handlers + server import resolution | async | no | -| `textDocument/hover` | async context (`hover`) | handlers crate (`hover`) | async | yes | -| `textDocument/documentHighlight` | `on_document_highlight` | handlers crate (`document_highlights`) | sync | no | -| `textDocument/inlayHint` | async context (`inlay_hints`) | handlers crate (`inlay_hints`) | async | yes | -| `textDocument/codeAction` | `on_code_action` | handlers crate (`code_actions`) | sync | no | -| `textDocument/completion` | async context (`completion`) | handlers crate (`completion_with_import_roots`) | async | yes | -| `textDocument/signatureHelp` | `on_signature_help` | handlers crate (`signature_help`) | sync | no | -| `textDocument/formatting` | `on_formatting` | handlers crate (`format_document_with_config`) | sync | no | -| `textDocument/references` | async context (`references`) | mixed: handlers + server import graph merge | async | no | -| `workspace/symbol` | async context (`workspace_symbol`) | handlers crate (`workspace_symbols_for_document`) | async | no | -| `textDocument/prepareRename` | `on_prepare_rename` | handlers crate (`prepare_rename`) | sync | no | -| `textDocument/rename` | async context (`rename`) | handlers crate (`rename_cross_file`) | async | no | -| `textDocument/semanticTokens/full` | `on_semantic_tokens_full` | handlers crate (`semantic_tokens`) | sync | no | -| `textDocument/codeLens` | async context (`code_lens`) | handlers crate (`code_lens`) | async | yes | -| `workspace/executeCommand` | async context (`execute_command`) | server async context | async | no | -| `shutdown` | direct in `handle_request` | server | sync | no | +| LSP method | Server entry point | Implementation owner | Execution model | Uses `TypeAnalysis` | +| ----------------------------------- | ---------------------------------- | ------------------------------------------------- | --------------- | ------------------- | +| `textDocument/documentSymbol` | `on_document_symbol` | handlers crate (`document_symbols`) | sync | no | +| `textDocument/definition` | async context (`goto_definition`) | mixed: handlers + server import resolution | async | no | +| `textDocument/hover` | async context (`hover`) | handlers crate (`hover`) | async | yes | +| `textDocument/documentHighlight` | `on_document_highlight` | handlers crate (`document_highlights`) | sync | no | +| `textDocument/inlayHint` | async context (`inlay_hints`) | handlers crate (`inlay_hints`) | async | yes | +| `textDocument/codeAction` | `on_code_action` | handlers crate (`code_actions`) | sync | no | +| `textDocument/completion` | async context (`completion`) | handlers crate (`completion_with_import_roots`) | async | yes | +| `textDocument/signatureHelp` | `on_signature_help` | handlers crate (`signature_help`) | sync | no | +| `textDocument/formatting` | `on_formatting` | handlers crate (`format_document_with_config`) | sync | no | +| `textDocument/references` | async context (`references`) | mixed: handlers + server import graph merge | async | no | +| `workspace/symbol` | async context (`workspace_symbol`) | handlers crate (`workspace_symbols_for_document`) | async | no | +| `textDocument/prepareRename` | `on_prepare_rename` | handlers crate (`prepare_rename`) | sync | no | +| `textDocument/rename` | async context (`rename`) | handlers crate (`rename_cross_file`) | async | no | +| `textDocument/semanticTokens/full` | `on_semantic_tokens_full` | handlers crate (`semantic_tokens`) | sync | no | +| `textDocument/semanticTokens/range` | `on_semantic_tokens_range` | handlers crate (`semantic_tokens_range`) | sync | no | +| `textDocument/codeLens` | async context (`code_lens`) | handlers crate (`code_lens`) | async | yes | +| `workspace/executeCommand` | async context (`execute_command`) | server async context | async | no | +| `shutdown` | direct in `handle_request` | server | sync | no | Async requests are sent back over the server's async response channel after worker completion. @@ -181,8 +182,7 @@ File: `crates/jrsonnet-lsp-handlers/src/rename.rs` File: `crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs` - Produces encoded semantic tokens for full-document requests. -- Server currently advertises only `textDocument/semanticTokens/full` (no range - variant). +- Produces encoded semantic tokens for range requests as well. ### Signature Help From 91de75913f5adfceba374f81f89ca9b7b74927c8 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 13:41:04 +0000 Subject: [PATCH 005/210] feat(lsp): add codeLens/resolve support --- crates/jrsonnet-lsp/src/server.rs | 30 +++-- crates/jrsonnet-lsp/tests/integration_test.rs | 104 +++++++++++++++++- docs/lsp/ARCHITECTURE.md | 3 +- docs/lsp/HANDLERS.md | 4 + 4 files changed, 128 insertions(+), 13 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 64385acf..7bddbd9a 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -26,20 +26,21 @@ use lsp_types::{ DidOpenTextDocument, DidSaveTextDocument, Notification as _, PublishDiagnostics, }, request::{ - CodeActionRequest, CodeLensRequest, Completion, DocumentHighlightRequest, + CodeActionRequest, CodeLensRequest, CodeLensResolve, Completion, DocumentHighlightRequest, DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDefinition, HoverRequest, InlayHintRequest, PrepareRenameRequest, References, Rename, Request as _, SemanticTokensFullRequest, SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, WorkspaceSymbolRequest, }, CodeActionKind, CodeActionOptions, CodeActionParams, CodeActionProviderCapability, - CodeActionResponse, CodeLensOptions, CodeLensParams, CompletionOptions, CompletionParams, - DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams, - DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, - DocumentFormattingParams, DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, - DocumentSymbolResponse, ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, - GotoDefinitionParams, HoverParams, HoverProviderCapability, InitializeParams, InitializeResult, - InlayHintParams, OneOf, PrepareRenameResponse, ReferenceParams, RenameParams, SemanticTokens, + CodeActionResponse, CodeLens, CodeLensOptions, CodeLensParams, CompletionOptions, + CompletionParams, DidChangeConfigurationParams, DidChangeTextDocumentParams, + DidChangeWatchedFilesParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, + DidSaveTextDocumentParams, DocumentFormattingParams, DocumentHighlight, + DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, ExecuteCommandOptions, + ExecuteCommandParams, FileChangeType, GotoDefinitionParams, HoverParams, + HoverProviderCapability, InitializeParams, InitializeResult, InlayHintParams, OneOf, + PrepareRenameResponse, ReferenceParams, RenameParams, SemanticTokens, SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, @@ -333,7 +334,7 @@ impl Server { work_done_progress_options: WorkDoneProgressOptions::default(), }), code_lens_provider: Some(CodeLensOptions { - resolve_provider: Some(false), + resolve_provider: Some(true), }), ..Default::default() } @@ -549,6 +550,12 @@ impl Server { serde_json::to_value(result).map_err(Into::into) }); } + CodeLensResolve::METHOD => { + let params: CodeLens = serde_json::from_value(req.params)?; + let result = Self::on_code_lens_resolve(params); + let resp = Response::new_ok(req.id, serde_json::to_value(result)?); + self.connection.sender.send(Message::Response(resp))?; + } ExecuteCommand::METHOD => { let params: ExecuteCommandParams = serde_json::from_value(req.params)?; let context = self.async_request_context(); @@ -674,6 +681,11 @@ impl Server { Some(handlers::semantic_tokens_range(&doc, params.range)) } + + /// Handle codeLens/resolve request. + fn on_code_lens_resolve(params: CodeLens) -> CodeLens { + handlers::resolve_code_lens(params) + } } impl Server { diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index a4763730..9ece6e3a 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -13,8 +13,9 @@ use lsp_types::{ Notification as _, PublishDiagnostics, }, request::{ - CodeActionRequest, DocumentHighlightRequest, ExecuteCommand, GotoDefinition, Initialize, - InlayHintRequest, References, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, + CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, + ExecuteCommand, GotoDefinition, Initialize, InlayHintRequest, References, Rename, + Request as _, SemanticTokensRangeRequest, Shutdown, }, DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, FileEvent, @@ -265,6 +266,29 @@ fn code_action_request( ) } +fn code_lens_request(id: i32, uri: &str) -> Request { + let params = lsp_types::CodeLensParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + CodeLensRequest::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +fn code_lens_resolve_request(id: i32, lens: lsp_types::CodeLens) -> Request { + Request::new( + id.into(), + CodeLensResolve::METHOD.to_string(), + serde_json::to_value(lens).unwrap(), + ) +} + fn did_change_watched_files_notification(changes: Vec) -> Notification { let params = DidChangeWatchedFilesParams { changes }; Notification::new( @@ -380,6 +404,11 @@ fn test_initialize_shutdown() { serde_json::Value::String("quickfix".to_string()), "quickfix code action capability should be advertised", ); + assert_eq!( + result["capabilities"]["codeLensProvider"]["resolveProvider"], + serde_json::Value::Bool(true), + "code lens resolve capability should be advertised", + ); let server_name = result .get("serverInfo") .and_then(|s| s.get("name")) @@ -1194,6 +1223,77 @@ fn test_semantic_tokens_range_request() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_code_lens_resolve_request() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/code-lens-resolve.jsonnet"; + let text = "local x = 1; x + x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(code_lens_request(2, uri))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "code lens request should succeed"); + let lenses: Vec = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert!(!lenses.is_empty(), "expected code lenses for test document"); + let reference_lens = lenses + .into_iter() + .find(|lens| { + lens.command + .as_ref() + .is_some_and(|command| command.command == "jrsonnet.findReferences") + }) + .expect("expected reference count code lens"); + + client_conn + .sender + .send(Message::Request(code_lens_resolve_request( + 3, + reference_lens.clone(), + ))) + .unwrap(); + let response = recv_response(&client_conn, 3); + assert!( + response.error.is_none(), + "code lens resolve request should succeed" + ); + let resolved: lsp_types::CodeLens = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!(resolved, reference_lens); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_watched_file_refreshes_unopened_importers_for_references() { let tmp = TempDir::new().expect("tempdir should be created"); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 70c4a4eb..85889faa 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -127,7 +127,7 @@ to documents, import graph, type cache, config, and dependency-aware analysis. - rename with `prepareRename` - semantic tokens (full document and range) - code actions (quick-fix kind) -- code lens (resolve disabled) +- code lens (resolve enabled) - execute command (four command IDs) For the canonical list, see `crates/jrsonnet-lsp/src/server.rs`. @@ -135,7 +135,6 @@ For the canonical list, see `crates/jrsonnet-lsp/src/server.rs`. Not currently advertised: - declaration/type-definition/implementation providers -- code-lens resolve requests ## Notification Handling diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 0bf2d6e3..1ee2638b 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -52,6 +52,7 @@ Current request routing in `crates/jrsonnet-lsp/src/server.rs`: | `textDocument/semanticTokens/full` | `on_semantic_tokens_full` | handlers crate (`semantic_tokens`) | sync | no | | `textDocument/semanticTokens/range` | `on_semantic_tokens_range` | handlers crate (`semantic_tokens_range`) | sync | no | | `textDocument/codeLens` | async context (`code_lens`) | handlers crate (`code_lens`) | async | yes | +| `codeLens/resolve` | `on_code_lens_resolve` | handlers crate (`resolve_code_lens`) | sync | no | | `workspace/executeCommand` | async context (`execute_command`) | server async context | async | no | | `shutdown` | direct in `handle_request` | server | sync | no | @@ -83,6 +84,9 @@ File: `crates/jrsonnet-lsp-handlers/src/code_lens.rs` Current server path builds `CodeLensConfig::all()` in async context and passes computed `TypeAnalysis`. +`codeLens/resolve` is a synchronous pass-through over +`handlers::resolve_code_lens`, which currently preserves lens payloads. + Lens categories: - reference count lenses at definition sites From 71f2ad85adac58473288b8b4d2cd1855307d652d Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 13:51:21 +0000 Subject: [PATCH 006/210] feat(lsp): add declaration request support --- crates/jrsonnet-lsp/src/server.rs | 13 +- .../jrsonnet-lsp/src/server/async_requests.rs | 7 ++ crates/jrsonnet-lsp/tests/integration_test.rs | 114 ++++++++++++++++-- docs/lsp/ARCHITECTURE.md | 5 +- docs/lsp/HANDLERS.md | 4 + 5 files changed, 127 insertions(+), 16 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 7bddbd9a..906cbb44 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -27,8 +27,8 @@ use lsp_types::{ }, request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, Completion, DocumentHighlightRequest, - DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDefinition, HoverRequest, - InlayHintRequest, PrepareRenameRequest, References, Rename, Request as _, + DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, + HoverRequest, InlayHintRequest, PrepareRenameRequest, References, Rename, Request as _, SemanticTokensFullRequest, SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, WorkspaceSymbolRequest, }, @@ -292,6 +292,7 @@ impl Server { )), document_symbol_provider: Some(OneOf::Left(true)), definition_provider: Some(OneOf::Left(true)), + declaration_provider: Some(lsp_types::DeclarationCapability::Simple(true)), hover_provider: Some(HoverProviderCapability::Simple(true)), document_highlight_provider: Some(OneOf::Left(true)), inlay_hint_provider: Some(OneOf::Left(true)), @@ -452,6 +453,14 @@ impl Server { serde_json::to_value(result).map_err(Into::into) }); } + GotoDeclaration::METHOD => { + let params: GotoDefinitionParams = serde_json::from_value(req.params)?; + let context = self.async_request_context(); + self.spawn_async_response(req.id, GotoDeclaration::METHOD, move || { + let result = context.goto_declaration(params); + serde_json::to_value(result).map_err(Into::into) + }); + } HoverRequest::METHOD => { let params: HoverParams = serde_json::from_value(req.params)?; let context = self.async_request_context(); diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 59196551..c3d3a1e6 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -106,6 +106,13 @@ impl AsyncRequestContext { } } + pub(super) fn goto_declaration( + &self, + params: GotoDefinitionParams, + ) -> Option { + self.goto_definition(params) + } + pub(super) fn inlay_hints(&self, params: InlayHintParams) -> Option> { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri)?; diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 9ece6e3a..425e8d69 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -14,8 +14,8 @@ use lsp_types::{ }, request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, - ExecuteCommand, GotoDefinition, Initialize, InlayHintRequest, References, Rename, - Request as _, SemanticTokensRangeRequest, Shutdown, + ExecuteCommand, GotoDeclaration, GotoDefinition, Initialize, InlayHintRequest, References, + Rename, Request as _, SemanticTokensRangeRequest, Shutdown, }, DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, FileEvent, @@ -112,6 +112,25 @@ fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Req ) } +/// Helper to create a goto declaration request. +fn goto_declaration_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoDeclaration::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + /// Helper to create a references request. fn references_request( id: i32, @@ -404,16 +423,21 @@ fn test_initialize_shutdown() { serde_json::Value::String("quickfix".to_string()), "quickfix code action capability should be advertised", ); - assert_eq!( - result["capabilities"]["codeLensProvider"]["resolveProvider"], - serde_json::Value::Bool(true), - "code lens resolve capability should be advertised", - ); - let server_name = result - .get("serverInfo") - .and_then(|s| s.get("name")) - .and_then(|n| n.as_str()) - .expect("should have serverInfo.name"); + assert_eq!( + result["capabilities"]["codeLensProvider"]["resolveProvider"], + serde_json::Value::Bool(true), + "code lens resolve capability should be advertised", + ); + assert_eq!( + result["capabilities"]["declarationProvider"], + serde_json::Value::Bool(true), + "declaration capability should be advertised", + ); + let server_name = result + .get("serverInfo") + .and_then(|s| s.get("name")) + .and_then(|n| n.as_str()) + .expect("should have serverInfo.name"); assert!(server_name.contains("jrsonnet")); }); @@ -762,6 +786,72 @@ fn test_goto_definition() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_goto_declaration() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/declaration.jsonnet"; + let text = "local x = 1; x + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(2, uri, 0, 13))) + .unwrap(); + + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "Goto declaration request should succeed" + ); + let result: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_document_highlight() { let (client_conn, server_conn) = Connection::memory(); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 85889faa..6534a21f 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -100,6 +100,7 @@ Handled directly on the server thread: Dispatched via `spawn_async_response` (Rayon): - `textDocument/definition` +- `textDocument/declaration` - `textDocument/hover` - `textDocument/inlayHint` - `textDocument/completion` @@ -118,7 +119,7 @@ to documents, import graph, type cache, config, and dependency-aware analysis. `server_capabilities()` currently advertises: - incremental text sync with open/close and save notifications -- definition, hover, document symbols, document highlights +- definition, declaration, hover, document symbols, document highlights - completion (trigger `.`) - signature help (triggers `(` and `,`) - formatting @@ -134,7 +135,7 @@ For the canonical list, see `crates/jrsonnet-lsp/src/server.rs`. Not currently advertised: -- declaration/type-definition/implementation providers +- type-definition/implementation providers ## Notification Handling diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 1ee2638b..23194707 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -38,6 +38,7 @@ Current request routing in `crates/jrsonnet-lsp/src/server.rs`: | ----------------------------------- | ---------------------------------- | ------------------------------------------------- | --------------- | ------------------- | | `textDocument/documentSymbol` | `on_document_symbol` | handlers crate (`document_symbols`) | sync | no | | `textDocument/definition` | async context (`goto_definition`) | mixed: handlers + server import resolution | async | no | +| `textDocument/declaration` | async context (`goto_declaration`) | mixed: handlers + server import resolution | async | no | | `textDocument/hover` | async context (`hover`) | handlers crate (`hover`) | async | yes | | `textDocument/documentHighlight` | `on_document_highlight` | handlers crate (`document_highlights`) | sync | no | | `textDocument/inlayHint` | async context (`inlay_hints`) | handlers crate (`inlay_hints`) | async | yes | @@ -129,6 +130,9 @@ File: `crates/jrsonnet-lsp-handlers/src/definition.rs` Async server context resolves import paths and can locate nested field ranges in imported files before returning final `Location`. +`textDocument/declaration` currently delegates to the same resolution path as +`textDocument/definition`. + ### Document Highlight File: `crates/jrsonnet-lsp-handlers/src/document_highlight.rs` From a9524c231951116c4b365a70e4d3ddc81110182e Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 14:23:55 +0000 Subject: [PATCH 007/210] fix(lsp-inference): prevent local Ty leakage across analysis boundaries - finalize each TypeAnalysis by merging its LocalTyStore into the shared GlobalTyStore and remapping all recorded expression/document types before caching or reuse. - introduce GlobalTy in jrsonnet-lsp-types and switch type-cache/import-resolver boundaries to use it, so cross-file APIs cannot accept local Ty IDs at compile time. - harden TySubst::merge for cyclic local types by lowering unresolved local refs to Any during merge, preventing invalid global entries. - update cross-file cache tests/bench wiring for GlobalTy and add cycle regression coverage in subst tests. --- crates/jrsonnet-lsp-inference/src/analysis.rs | 52 +++++-- crates/jrsonnet-lsp-inference/src/env.rs | 6 +- crates/jrsonnet-lsp-inference/src/expr.rs | 2 +- .../jrsonnet-lsp-inference/src/type_cache.rs | 137 ++++++++++-------- crates/jrsonnet-lsp-types/src/lib.rs | 6 +- crates/jrsonnet-lsp-types/src/store.rs | 124 ++++++++++++++++ crates/jrsonnet-lsp-types/src/subst.rs | 107 ++++++++++++-- crates/jrsonnet-lsp/benches/type_cache.rs | 16 +- crates/jrsonnet-lsp/tests/cross_file_tests.rs | 44 +++--- 9 files changed, 373 insertions(+), 121 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/analysis.rs b/crates/jrsonnet-lsp-inference/src/analysis.rs index 291c0054..b9654b36 100644 --- a/crates/jrsonnet-lsp-inference/src/analysis.rs +++ b/crates/jrsonnet-lsp-inference/src/analysis.rs @@ -8,7 +8,7 @@ use std::sync::Arc; use jrsonnet_lsp_document::Document; use jrsonnet_lsp_types::{ - is_subtype_ty, FunctionData, GlobalTyStore, MutStore, ObjectData, Ty, TyData, + is_subtype_ty, FunctionData, GlobalTy, GlobalTyStore, MutStore, ObjectData, Ty, TyData, TySubst, }; use jrsonnet_rowan_parser::{ nodes::{Bind, Expr, ExprBase, Member, ObjBody, StmtLocal}, @@ -38,7 +38,7 @@ pub struct TypeAnalysis { /// Immutable after construction. expr_types: FxHashMap, /// The inferred type of the document's root expression. - document_type: Ty, + document_type: GlobalTy, } impl std::fmt::Debug for TypeAnalysis { @@ -63,7 +63,7 @@ impl TypeAnalysis { Self { store: RwLock::new(MutStore::new(Arc::new(GlobalTyStore::new()))), expr_types: FxHashMap::default(), - document_type: Ty::ANY, + document_type: GlobalTy::ANY, } } @@ -72,7 +72,7 @@ impl TypeAnalysis { Self { store: RwLock::new(MutStore::new(global)), expr_types: FxHashMap::default(), - document_type: Ty::ANY, + document_type: GlobalTy::ANY, } } @@ -94,11 +94,7 @@ impl TypeAnalysis { Ty::ANY }; - Self { - store: RwLock::new(env.into_store()), - expr_types, - document_type: doc_ty, - } + Self::finalize_analysis(global, env.into_store(), expr_types, doc_ty) } /// Analyze a document with a shared global store and import resolver. @@ -118,16 +114,50 @@ impl TypeAnalysis { Ty::ANY }; + Self::finalize_analysis(global, env.into_store(), expr_types, doc_ty) + } + + fn finalize_analysis( + global: Arc, + store: MutStore, + mut expr_types: FxHashMap, + document_type: Ty, + ) -> Self { + let local = store.into_local(); + let subst = TySubst::merge(global.as_ref(), local); + + let map_ty = |ty: Ty| { + let mapped = subst.apply(ty); + if mapped.is_local() { + Ty::ANY + } else { + mapped + } + }; + + for ty in expr_types.values_mut() { + *ty = map_ty(*ty); + } + + let document_type = map_ty(document_type); + let document_type = GlobalTy::new(document_type).expect("document type must be global"); + Self { - store: RwLock::new(env.into_store()), + store: RwLock::new(MutStore::new(global)), expr_types, - document_type: doc_ty, + document_type, } } /// Get the inferred type of the document's root expression. #[inline] pub fn document_type(&self) -> Ty { + self.document_type.into() + } + + /// Get the inferred type of the document's root expression as a global type. + #[inline] + pub fn document_type_global(&self) -> GlobalTy { self.document_type } diff --git a/crates/jrsonnet-lsp-inference/src/env.rs b/crates/jrsonnet-lsp-inference/src/env.rs index f1007bc4..b95fd3c5 100644 --- a/crates/jrsonnet-lsp-inference/src/env.rs +++ b/crates/jrsonnet-lsp-inference/src/env.rs @@ -2,7 +2,7 @@ use std::sync::Arc; -use jrsonnet_lsp_types::{GlobalTyStore, MutStore, Ty}; +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore, MutStore, Ty}; use rowan::TextRange; use rustc_hash::{FxHashMap, FxHashSet}; @@ -18,7 +18,7 @@ pub trait ImportResolver: std::fmt::Debug { /// /// Returns `Some(ty)` if the imported file's type is known, /// `None` otherwise (falls back to `Ty::ANY`). - fn resolve_import(&self, import_path: &str) -> Option; + fn resolve_import(&self, import_path: &str) -> Option; } /// Context for `self` and `super` within object literals (using interned types). @@ -109,7 +109,7 @@ impl TypeEnv { /// /// Returns `Some(ty)` if the resolver is set and the import is cached, /// `None` otherwise. - pub fn resolve_import(&self, import_path: &str) -> Option { + pub fn resolve_import(&self, import_path: &str) -> Option { self.import_resolver.as_ref()?.resolve_import(import_path) } diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr.rs index 5495fb93..b7e27379 100644 --- a/crates/jrsonnet-lsp-inference/src/expr.rs +++ b/crates/jrsonnet-lsp-inference/src/expr.rs @@ -297,7 +297,7 @@ pub(super) fn infer_base_ty(base: ExprBase, env: &mut TypeEnv, expected: Option< return Ty::ANY; } // Try to resolve the import type, fall back to ANY - env.resolve_import(path_str).unwrap_or(Ty::ANY) + env.resolve_import(path_str).map_or(Ty::ANY, Ty::from) } // Unary operators diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs index aaca214c..7d3e783e 100644 --- a/crates/jrsonnet-lsp-inference/src/type_cache.rs +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -11,7 +11,7 @@ use std::{num::NonZeroUsize, sync::Arc}; use jrsonnet_lsp_document::{CanonicalPath, Document, DEFAULT_TYPE_CACHE_CAPACITY}; -use jrsonnet_lsp_types::{GlobalTyStore, Ty}; +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; use lru::LruCache; use parking_lot::RwLock; @@ -36,7 +36,7 @@ pub struct TypeCache { #[derive(Debug, Clone)] struct CachedType { /// The interned type for this file's top-level expression. - ty: Ty, + ty: GlobalTy, /// The document version when this type was cached. version: i32, } @@ -57,6 +57,7 @@ impl TypeCache { } /// Get a reference to the global type store. + #[must_use] pub fn global_types(&self) -> &Arc { &self.global_types } @@ -64,21 +65,22 @@ impl TypeCache { /// Get the cached type for a file, if available. /// /// Uses `peek` to avoid updating LRU order for read-only lookups. - pub fn get(&self, path: &CanonicalPath) -> Option { + #[must_use] + pub fn get(&self, path: &CanonicalPath) -> Option { self.cache.peek(path).map(|c| c.ty) } /// Get the cached type for a file and update LRU order. /// /// Use this when the lookup indicates actual usage of the cached type. - pub fn get_and_touch(&mut self, path: &CanonicalPath) -> Option { + pub fn get_and_touch(&mut self, path: &CanonicalPath) -> Option { self.cache.get(path).map(|c| c.ty) } /// Update the cache for a file. /// - /// Note: The Ty must be from a compatible TyStore or be a well-known constant. - pub fn update(&mut self, path: &CanonicalPath, ty: Ty, version: i32) { + /// The type must be global, preventing accidental cross-analysis leakage. + pub fn update(&mut self, path: &CanonicalPath, ty: GlobalTy, version: i32) { self.cache.put(path.clone(), CachedType { ty, version }); } @@ -95,19 +97,19 @@ impl TypeCache { } /// Check if a file's cache is up to date with the given version. + #[must_use] pub fn is_up_to_date(&self, path: &CanonicalPath, version: i32) -> bool { - self.cache - .peek(path) - .map(|c| c.version == version) - .unwrap_or(false) + self.cache.peek(path).is_some_and(|c| c.version == version) } /// Get the number of cached entries. + #[must_use] pub fn len(&self) -> usize { self.cache.len() } /// Check if the cache is empty. + #[must_use] pub fn is_empty(&self) -> bool { self.cache.is_empty() } @@ -128,8 +130,12 @@ pub fn new_shared_cache(global_types: Arc) -> SharedTypeCache { /// Analyze a document and update the type cache. /// -/// Returns the inferred top-level type as a `Ty` from the global store. -pub fn analyze_and_cache(path: &CanonicalPath, doc: &Document, cache: &SharedTypeCache) -> Ty { +/// Returns the inferred top-level type as a global type. +pub fn analyze_and_cache( + path: &CanonicalPath, + doc: &Document, + cache: &SharedTypeCache, +) -> GlobalTy { let version = doc.version().0; // Check if we already have a cached type for this version @@ -156,7 +162,7 @@ pub fn analyze_and_cache(path: &CanonicalPath, doc: &Document, cache: &SharedTyp // Infer the type using the global store and import resolver let analysis = TypeAnalysis::analyze_with_resolver(doc, global_types, import_resolver); - let ty = analysis.document_type(); + let ty = analysis.document_type_global(); // Cache the type (it's already in the global store) { @@ -185,10 +191,10 @@ impl CachingImportResolver { /// * `base_path` - Path to the document being analyzed (used to resolve relative imports) /// * `cache` - Shared type cache for looking up cached types pub fn new(base_path: &std::path::Path, cache: SharedTypeCache) -> Self { - let base_dir = base_path - .parent() - .map(std::path::Path::to_path_buf) - .unwrap_or_else(|| std::path::PathBuf::from(".")); + let base_dir = base_path.parent().map_or_else( + || std::path::PathBuf::from("."), + std::path::Path::to_path_buf, + ); Self { base_dir, cache } } @@ -206,7 +212,7 @@ impl CachingImportResolver { } impl crate::env::ImportResolver for CachingImportResolver { - fn resolve_import(&self, import_path: &str) -> Option { + fn resolve_import(&self, import_path: &str) -> Option { let canonical_path = self.resolve_path(import_path)?; let cache = self.cache.read(); cache.get(&canonical_path) @@ -229,8 +235,8 @@ mod tests { Arc::new(GlobalTyStore::new()) } - /// Assert that the cache contains exactly the specified Ty entries. - fn assert_cache_contents_ty(cache: &TypeCache, expected: &[(&str, Ty)]) { + /// Assert that the cache contains exactly the specified global type entries. + fn assert_cache_contents_ty(cache: &TypeCache, expected: &[(&str, GlobalTy)]) { let actual: BTreeSet<_> = cache .cache .iter() @@ -251,15 +257,15 @@ mod tests { // Initially empty assert_cache_contents_ty(&cache, &[]); - // Add an entry using Ty-native API - cache.update(&path, Ty::NUMBER, 1); - assert_cache_contents_ty(&cache, &[("main.jsonnet", Ty::NUMBER)]); + // Add an entry + cache.update(&path, GlobalTy::NUMBER, 1); + assert_cache_contents_ty(&cache, &[("main.jsonnet", GlobalTy::NUMBER)]); assert!(cache.is_up_to_date(&path, 1)); assert!(!cache.is_up_to_date(&path, 2)); // Update the entry - cache.update(&path, Ty::STRING, 2); - assert_cache_contents_ty(&cache, &[("main.jsonnet", Ty::STRING)]); + cache.update(&path, GlobalTy::STRING, 2); + assert_cache_contents_ty(&cache, &[("main.jsonnet", GlobalTy::STRING)]); assert!(cache.is_up_to_date(&path, 2)); // Invalidate @@ -276,19 +282,19 @@ mod tests { // First call should analyze and cache let ty1 = analyze_and_cache(&path, &doc, &cache); - assert_eq!(ty1, Ty::NUMBER); + assert_eq!(ty1, GlobalTy::NUMBER); // Verify cached value - assert_eq!(cache.read().get(&path), Some(Ty::NUMBER)); + assert_eq!(cache.read().get(&path), Some(GlobalTy::NUMBER)); // Second call should return cached value let ty2 = analyze_and_cache(&path, &doc, &cache); - assert_eq!(ty2, Ty::NUMBER); + assert_eq!(ty2, GlobalTy::NUMBER); // New version should re-analyze let doc2 = Document::new("\"hello\"".to_string(), DocVersion::new(2)); let ty3 = analyze_and_cache(&path, &doc2, &cache); - assert_eq!(ty3, Ty::STRING); - assert_eq!(cache.read().get(&path), Some(Ty::STRING)); + assert_eq!(ty3, GlobalTy::STRING); + assert_eq!(cache.read().get(&path), Some(GlobalTy::STRING)); } #[test] @@ -298,16 +304,19 @@ mod tests { let path1 = test_path("file1.jsonnet"); let path2 = test_path("file2.jsonnet"); - cache.update(&path1, Ty::NUMBER, 1); - cache.update(&path2, Ty::STRING, 1); + cache.update(&path1, GlobalTy::NUMBER, 1); + cache.update(&path2, GlobalTy::STRING, 1); assert_cache_contents_ty( &cache, - &[("file1.jsonnet", Ty::NUMBER), ("file2.jsonnet", Ty::STRING)], + &[ + ("file1.jsonnet", GlobalTy::NUMBER), + ("file2.jsonnet", GlobalTy::STRING), + ], ); cache.invalidate(&path1); - assert_cache_contents_ty(&cache, &[("file2.jsonnet", Ty::STRING)]); + assert_cache_contents_ty(&cache, &[("file2.jsonnet", GlobalTy::STRING)]); } #[test] @@ -319,19 +328,19 @@ mod tests { let path3 = test_path("main.jsonnet"); let path4 = test_path("other.jsonnet"); - // Cache all files using Ty-native API - cache.update(&path1, Ty::NUMBER, 1); - cache.update(&path2, Ty::STRING, 1); - cache.update(&path3, Ty::BOOL, 1); - cache.update(&path4, Ty::NULL, 1); + // Cache all files + cache.update(&path1, GlobalTy::NUMBER, 1); + cache.update(&path2, GlobalTy::STRING, 1); + cache.update(&path3, GlobalTy::BOOL, 1); + cache.update(&path4, GlobalTy::NULL, 1); assert_cache_contents_ty( &cache, &[ - ("lib.jsonnet", Ty::NUMBER), - ("main.jsonnet", Ty::BOOL), - ("other.jsonnet", Ty::NULL), - ("utils.jsonnet", Ty::STRING), + ("lib.jsonnet", GlobalTy::NUMBER), + ("main.jsonnet", GlobalTy::BOOL), + ("other.jsonnet", GlobalTy::NULL), + ("utils.jsonnet", GlobalTy::STRING), ], ); @@ -339,7 +348,7 @@ mod tests { cache.invalidate_many(vec![path1.clone(), path2.clone(), path3.clone()]); // Only path4 should remain - assert_cache_contents_ty(&cache, &[("other.jsonnet", Ty::NULL)]); + assert_cache_contents_ty(&cache, &[("other.jsonnet", GlobalTy::NULL)]); } #[test] @@ -347,11 +356,11 @@ mod tests { let mut cache = TypeCache::new(test_global_store()); let path = test_path("test.jsonnet"); - // Update with Ty - cache.update(&path, Ty::NUMBER, 1); + // Update with a global type + cache.update(&path, GlobalTy::NUMBER, 1); - // Get should return the same Ty - assert_eq!(cache.get(&path), Some(Ty::NUMBER)); + // Get should return the same type + assert_eq!(cache.get(&path), Some(GlobalTy::NUMBER)); // Version check assert!(cache.is_up_to_date(&path, 1)); @@ -365,11 +374,11 @@ mod tests { let path = test_path("test.jsonnet"); // Cache a type - types are stored in the shared global store - cache.update(&path, Ty::NUMBER, 1); + cache.update(&path, GlobalTy::NUMBER, 1); // Verify we can retrieve the type let retrieved = cache.get(&path).unwrap(); - assert_eq!(retrieved, Ty::NUMBER); + assert_eq!(retrieved, GlobalTy::NUMBER); // The cache's global_types should be the same reference assert!(Arc::ptr_eq(cache.global_types(), &global_types)); @@ -381,8 +390,8 @@ mod tests { let path1 = test_path("a.jsonnet"); let path2 = test_path("b.jsonnet"); - cache.update(&path1, Ty::NUMBER, 1); - cache.update(&path2, Ty::STRING, 1); + cache.update(&path1, GlobalTy::NUMBER, 1); + cache.update(&path2, GlobalTy::STRING, 1); assert_eq!(cache.len(), 2); assert!(!cache.is_empty()); @@ -405,26 +414,26 @@ mod tests { let path4 = test_path("file4.jsonnet"); // Fill the cache - cache.update(&path1, Ty::NUMBER, 1); - cache.update(&path2, Ty::STRING, 1); - cache.update(&path3, Ty::BOOL, 1); + cache.update(&path1, GlobalTy::NUMBER, 1); + cache.update(&path2, GlobalTy::STRING, 1); + cache.update(&path3, GlobalTy::BOOL, 1); assert_eq!(cache.len(), 3); - assert_eq!(cache.get(&path1), Some(Ty::NUMBER)); - assert_eq!(cache.get(&path2), Some(Ty::STRING)); - assert_eq!(cache.get(&path3), Some(Ty::BOOL)); + assert_eq!(cache.get(&path1), Some(GlobalTy::NUMBER)); + assert_eq!(cache.get(&path2), Some(GlobalTy::STRING)); + assert_eq!(cache.get(&path3), Some(GlobalTy::BOOL)); // Access path1 to make it recently used (path2 is now least recently used) let _ = cache.get_and_touch(&path1); // Add a fourth entry - should evict path2 (LRU) - cache.update(&path4, Ty::NULL, 1); + cache.update(&path4, GlobalTy::NULL, 1); assert_eq!(cache.len(), 3); - assert_eq!(cache.get(&path1), Some(Ty::NUMBER)); // Still present (was touched) + assert_eq!(cache.get(&path1), Some(GlobalTy::NUMBER)); // Still present (was touched) assert_eq!(cache.get(&path2), None); // Evicted (was LRU) - assert_eq!(cache.get(&path3), Some(Ty::BOOL)); // Still present - assert_eq!(cache.get(&path4), Some(Ty::NULL)); // Newly added + assert_eq!(cache.get(&path3), Some(GlobalTy::BOOL)); // Still present + assert_eq!(cache.get(&path4), Some(GlobalTy::NULL)); // Newly added } #[test] @@ -432,7 +441,7 @@ mod tests { // Verify with_capacity(0) doesn't panic and has minimum capacity let mut cache = TypeCache::with_capacity(test_global_store(), 0); let path = test_path("test.jsonnet"); - cache.update(&path, Ty::NUMBER, 1); - assert_eq!(cache.get(&path), Some(Ty::NUMBER)); + cache.update(&path, GlobalTy::NUMBER, 1); + assert_eq!(cache.get(&path), Some(GlobalTy::NUMBER)); } } diff --git a/crates/jrsonnet-lsp-types/src/lib.rs b/crates/jrsonnet-lsp-types/src/lib.rs index c08ba712..9cc50ef5 100644 --- a/crates/jrsonnet-lsp-types/src/lib.rs +++ b/crates/jrsonnet-lsp-types/src/lib.rs @@ -33,9 +33,9 @@ pub use local_store::LocalTyStore; pub use mut_store::MutStore; pub use operations::*; pub use store::{ - reset_store, with_store, FieldDefInterned, FieldVis, FunctionData, NumBounds, ObjectData, - ParamInterned, ReturnSpec, Ty, TyConstraints, TyData, TyStore, TySubstitution, TyVarId, - TypeStoreOps, + reset_store, with_store, FieldDefInterned, FieldVis, FunctionData, GlobalTy, NotGlobalTy, + NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, TyData, TyStore, + TySubstitution, TyVarId, TypeStoreOps, }; pub use subst::TySubst; pub use unification::{ diff --git a/crates/jrsonnet-lsp-types/src/store.rs b/crates/jrsonnet-lsp-types/src/store.rs index 0beaaa7a..aa5df6b6 100644 --- a/crates/jrsonnet-lsp-types/src/store.rs +++ b/crates/jrsonnet-lsp-types/src/store.rs @@ -175,6 +175,117 @@ impl Ty { } } +/// Error returned when converting a local [`Ty`] to a [`GlobalTy`]. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct NotGlobalTy(Ty); + +impl NotGlobalTy { + /// Return the original type that failed conversion. + #[must_use] + pub const fn ty(self) -> Ty { + self.0 + } +} + +impl std::fmt::Display for NotGlobalTy { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "expected a global type, got local {:?}", self.0) + } +} + +impl std::error::Error for NotGlobalTy {} + +/// A type known to be global (stored in [`crate::global_store::GlobalTyStore`]). +/// +/// This wrapper prevents APIs that cross analysis boundaries from accidentally +/// accepting local (per-analysis) type IDs. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct GlobalTy(Ty); + +impl GlobalTy { + /// Global top type. + pub const ANY: Self = Self(Ty::ANY); + /// Global bottom type. + pub const NEVER: Self = Self(Ty::NEVER); + /// Global null type. + pub const NULL: Self = Self(Ty::NULL); + /// Global boolean type. + pub const BOOL: Self = Self(Ty::BOOL); + /// Global literal `true` type. + pub const TRUE: Self = Self(Ty::TRUE); + /// Global literal `false` type. + pub const FALSE: Self = Self(Ty::FALSE); + /// Global number type. + pub const NUMBER: Self = Self(Ty::NUMBER); + /// Global string type. + pub const STRING: Self = Self(Ty::STRING); + /// Global character type. + pub const CHAR: Self = Self(Ty::CHAR); + + /// Construct a [`GlobalTy`] if the provided type is global. + #[must_use] + pub fn new(ty: Ty) -> Option { + if ty.is_global() { + Some(Self(ty)) + } else { + None + } + } + + /// Return the underlying [`Ty`]. + #[must_use] + pub const fn as_ty(self) -> Ty { + self.0 + } + + /// Return the raw global index. + #[must_use] + pub fn raw_index(self) -> u32 { + self.0.raw_index() + } +} + +impl std::fmt::Debug for GlobalTy { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +impl std::fmt::Display for GlobalTy { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } +} + +impl PartialEq for GlobalTy { + fn eq(&self, other: &Ty) -> bool { + self.0 == *other + } +} + +impl PartialEq for Ty { + fn eq(&self, other: &GlobalTy) -> bool { + *self == other.0 + } +} + +impl TryFrom for GlobalTy { + type Error = NotGlobalTy; + + /// # Errors + /// + /// Returns [`NotGlobalTy`] if `value` is a local type ID. + fn try_from(value: Ty) -> Result { + Self::new(value).ok_or(NotGlobalTy(value)) + } +} + +impl From for Ty { + fn from(value: GlobalTy) -> Self { + value.0 + } +} + impl std::fmt::Debug for Ty { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match *self { @@ -1826,6 +1937,19 @@ mod tests { assert!(matches!(*store.get(Ty::CHAR), TyData::Char)); } + #[test] + fn test_global_ty_accepts_global() { + let global = GlobalTy::new(Ty::NUMBER); + assert_eq!(global, Some(GlobalTy::NUMBER)); + } + + #[test] + fn test_global_ty_rejects_local() { + let local = Ty::from_raw_local(123); + assert!(GlobalTy::new(local).is_none()); + assert_eq!(GlobalTy::try_from(local), Err(NotGlobalTy(local))); + } + #[test] fn test_intern_deduplication() { let mut store = TyStore::new(); diff --git a/crates/jrsonnet-lsp-types/src/subst.rs b/crates/jrsonnet-lsp-types/src/subst.rs index 13c5218a..4efdf1a9 100644 --- a/crates/jrsonnet-lsp-types/src/subst.rs +++ b/crates/jrsonnet-lsp-types/src/subst.rs @@ -121,19 +121,21 @@ impl TySubst { // Compound types - recursively apply substitution TyData::Array { elem, is_set } => TyData::Array { - elem: self.apply(*elem), + elem: self.apply_for_merge(*elem), is_set: *is_set, }, TyData::Tuple { elems } => TyData::Tuple { - elems: elems.iter().map(|&e| self.apply(e)).collect(), + elems: elems.iter().map(|&e| self.apply_for_merge(e)).collect(), }, TyData::Union(variants) => { - TyData::Union(variants.iter().map(|&v| self.apply(v)).collect()) + TyData::Union(variants.iter().map(|&v| self.apply_for_merge(v)).collect()) } - TyData::Sum(variants) => TyData::Sum(variants.iter().map(|&v| self.apply(v)).collect()), + TyData::Sum(variants) => { + TyData::Sum(variants.iter().map(|&v| self.apply_for_merge(v)).collect()) + } TyData::Object(obj) => TyData::Object(ObjectData { fields: obj @@ -143,7 +145,7 @@ impl TySubst { ( name.clone(), FieldDefInterned { - ty: self.apply(def.ty), + ty: self.apply_for_merge(def.ty), required: def.required, visibility: def.visibility, }, @@ -154,7 +156,7 @@ impl TySubst { }), TyData::AttrsOf { value } => TyData::AttrsOf { - value: self.apply(*value), + value: self.apply_for_merge(*value), }, TyData::Function(func) => TyData::Function(FunctionData { @@ -163,12 +165,12 @@ impl TySubst { .iter() .map(|p| ParamInterned { name: p.name.clone(), - ty: self.apply(p.ty), + ty: self.apply_for_merge(p.ty), has_default: p.has_default, }) .collect(), return_spec: match &func.return_spec { - ReturnSpec::Fixed(ret) => ReturnSpec::Fixed(self.apply(*ret)), + ReturnSpec::Fixed(ret) => ReturnSpec::Fixed(self.apply_for_merge(*ret)), other => other.clone(), }, variadic: func.variadic, @@ -180,12 +182,23 @@ impl TySubst { must_be_indexable: constraints.must_be_indexable, must_support_fields: constraints.must_support_fields, must_be_callable: constraints.must_be_callable, - upper_bound: constraints.upper_bound.map(|b| self.apply(b)), + upper_bound: constraints.upper_bound.map(|b| self.apply_for_merge(b)), }, }, } } + /// Apply substitution during merge. + /// + /// Any unresolved local reference is lowered to `any` instead of leaking a + /// local `Ty` into the global store. + fn apply_for_merge(&self, ty: Ty) -> Ty { + if ty.is_global() { + return ty; + } + self.mapping.get(&ty).copied().unwrap_or(Ty::ANY) + } + /// Topologically sort local types by dependency order. /// /// Types that don't depend on other local types come first. @@ -503,4 +516,80 @@ mod tests { // inner should come before outer (inner has no deps, outer depends on inner) assert_eq!(order, vec![inner, outer]); } + + #[test] + fn test_merge_cycle_does_not_leave_local_refs_in_global() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Construct a direct cycle between local types: + // L0 = array, L1 = array + let local_0 = local.intern(TyData::Array { + elem: Ty::from_raw_local(1), + is_set: false, + }); + let local_1 = local.intern(TyData::Array { + elem: Ty::from_raw_local(0), + is_set: false, + }); + + let subst = TySubst::merge(&global, local); + + let global_0 = subst.apply(local_0); + let global_1 = subst.apply(local_1); + assert!(global_0.is_global()); + assert!(global_1.is_global()); + + let data_0 = global.get_data(global_0); + let data_1 = global.get_data(global_1); + assert!( + !type_data_contains_local_refs(&data_0), + "first merged global type still contains local refs: {data_0:?}" + ); + assert!( + !type_data_contains_local_refs(&data_1), + "second merged global type still contains local refs: {data_1:?}" + ); + } + + fn type_data_contains_local_refs(data: &TyData) -> bool { + match data { + TyData::Array { elem, .. } => elem.is_local(), + TyData::Tuple { elems } => elems.iter().any(|ty| ty.is_local()), + TyData::Union(variants) | TyData::Sum(variants) => { + variants.iter().any(|ty| ty.is_local()) + } + TyData::Object(obj) => obj.fields.iter().any(|(_, field)| field.ty.is_local()), + TyData::AttrsOf { value } => value.is_local(), + TyData::Function(func) => { + func.params.iter().any(|param| param.ty.is_local()) + || match &func.return_spec { + ReturnSpec::Fixed(ty) => ty.is_local(), + ReturnSpec::SameAsArg(_) + | ReturnSpec::ArrayOfArg(_) + | ReturnSpec::ArrayWithSameElements(_) + | ReturnSpec::SetWithSameElements(_) + | ReturnSpec::FuncReturnType(_) + | ReturnSpec::ArrayOfFuncReturn(_) + | ReturnSpec::FlatMapResult(_) + | ReturnSpec::NonNegative + | ReturnSpec::ObjectValuesType(_) => false, + } + } + TyData::TypeVar { constraints, .. } => { + constraints.upper_bound.is_some_and(Ty::is_local) + } + TyData::Any + | TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::String + | TyData::Char + | TyData::BoundedNumber(_) + | TyData::LiteralString(_) => false, + } + } } diff --git a/crates/jrsonnet-lsp/benches/type_cache.rs b/crates/jrsonnet-lsp/benches/type_cache.rs index fdb8a563..26afe1b2 100644 --- a/crates/jrsonnet-lsp/benches/type_cache.rs +++ b/crates/jrsonnet-lsp/benches/type_cache.rs @@ -7,7 +7,7 @@ use std::sync::Arc; use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; use jrsonnet_lsp_document::CanonicalPath; use jrsonnet_lsp_inference::{new_shared_cache, TypeCache}; -use jrsonnet_lsp_types::{GlobalTyStore, Ty}; +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; fn make_path(i: usize) -> CanonicalPath { CanonicalPath::new(std::path::PathBuf::from(format!("/test/file{i}.jsonnet"))) @@ -25,7 +25,7 @@ fn bench_cache_operations(c: &mut Criterion) { b.iter(|| { for i in 0..size { let path = make_path(i); - cache.update(&path, Ty::NUMBER, 1); + cache.update(&path, GlobalTy::NUMBER, 1); } }); }); @@ -39,7 +39,7 @@ fn bench_cache_operations(c: &mut Criterion) { // Pre-populate cache for i in 0..size { let path = make_path(i); - cache.update(&path, Ty::NUMBER, 1); + cache.update(&path, GlobalTy::NUMBER, 1); } group.bench_with_input(BenchmarkId::new("lookup_hit", size), &size, |b, &size| { @@ -78,7 +78,7 @@ fn bench_cache_operations(c: &mut Criterion) { let mut cache = TypeCache::new(Arc::clone(&global)); for i in 0..size { let path = make_path(i); - cache.update(&path, Ty::NUMBER, 1); + cache.update(&path, GlobalTy::NUMBER, 1); } cache }, @@ -108,7 +108,7 @@ fn bench_cache_operations(c: &mut Criterion) { let mut cache = TypeCache::new(Arc::clone(&global)); let paths: Vec<_> = (0..size).map(make_path).collect(); for path in &paths { - cache.update(path, Ty::NUMBER, 1); + cache.update(path, GlobalTy::NUMBER, 1); } (cache, paths) }, @@ -138,7 +138,7 @@ fn bench_shared_cache(c: &mut Criterion) { let mut write_cache = cache.write(); for i in 0..size { let path = make_path(i); - write_cache.update(&path, Ty::NUMBER, 1); + write_cache.update(&path, GlobalTy::NUMBER, 1); } } @@ -157,7 +157,7 @@ fn bench_shared_cache(c: &mut Criterion) { for i in 0..size { let path = make_path(i); let mut write_cache = cache.write(); - write_cache.update(&path, Ty::STRING, 2); + write_cache.update(&path, GlobalTy::STRING, 2); } }); }); @@ -175,7 +175,7 @@ fn bench_version_check(c: &mut Criterion) { // Pre-populate with version 1 for i in 0..1000 { let path = make_path(i); - cache.update(&path, Ty::NUMBER, 1); + cache.update(&path, GlobalTy::NUMBER, 1); } group.bench_function("is_up_to_date_hit", |b| { diff --git a/crates/jrsonnet-lsp/tests/cross_file_tests.rs b/crates/jrsonnet-lsp/tests/cross_file_tests.rs index b806b576..3053fdd6 100644 --- a/crates/jrsonnet-lsp/tests/cross_file_tests.rs +++ b/crates/jrsonnet-lsp/tests/cross_file_tests.rs @@ -1,7 +1,7 @@ //! Cross-file analysis tests. //! //! Tests import graph operations, type caching across files, and cache invalidation. -//! These tests validate the Global TyStore work enables proper cross-file type sharing. +//! These tests validate the global `TyStore` work enables proper cross-file type sharing. use std::{ fs, @@ -12,7 +12,7 @@ use std::{ use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document}; use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_inference::{analyze_and_cache, new_shared_cache, TypeAnalysis, TypeCache}; -use jrsonnet_lsp_types::{GlobalTyStore, Ty}; +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore, Ty}; use tempfile::TempDir; /// Helper to create a test file in the temp directory. @@ -265,14 +265,14 @@ mod type_cache_tests { let path3 = canonical_path(&file3); // Cache different types - cache.update(&path1, Ty::NUMBER, 1); - cache.update(&path2, Ty::STRING, 1); - cache.update(&path3, Ty::BOOL, 1); + cache.update(&path1, GlobalTy::NUMBER, 1); + cache.update(&path2, GlobalTy::STRING, 1); + cache.update(&path3, GlobalTy::BOOL, 1); // Retrieve and verify - assert_eq!(cache.get(&path1), Some(Ty::NUMBER)); - assert_eq!(cache.get(&path2), Some(Ty::STRING)); - assert_eq!(cache.get(&path3), Some(Ty::BOOL)); + assert_eq!(cache.get(&path1), Some(GlobalTy::NUMBER)); + assert_eq!(cache.get(&path2), Some(GlobalTy::STRING)); + assert_eq!(cache.get(&path3), Some(GlobalTy::BOOL)); } #[test] @@ -285,12 +285,12 @@ mod type_cache_tests { let path = canonical_path(&file); // Initial version - cache.update(&path, Ty::NUMBER, 1); + cache.update(&path, GlobalTy::NUMBER, 1); assert!(cache.is_up_to_date(&path, 1)); assert!(!cache.is_up_to_date(&path, 2)); // Update version - cache.update(&path, Ty::STRING, 2); + cache.update(&path, GlobalTy::STRING, 2); assert!(!cache.is_up_to_date(&path, 1)); assert!(cache.is_up_to_date(&path, 2)); } @@ -309,9 +309,9 @@ mod type_cache_tests { let path2 = canonical_path(&file2); let path3 = canonical_path(&file3); - cache.update(&path1, Ty::NUMBER, 1); - cache.update(&path2, Ty::NUMBER, 1); - cache.update(&path3, Ty::NUMBER, 1); + cache.update(&path1, GlobalTy::NUMBER, 1); + cache.update(&path2, GlobalTy::NUMBER, 1); + cache.update(&path3, GlobalTy::NUMBER, 1); assert_eq!(cache.len(), 3); @@ -338,7 +338,7 @@ mod type_cache_tests { // Cache all for path in &files { - cache.update(path, Ty::NUMBER, 1); + cache.update(path, GlobalTy::NUMBER, 1); } assert_eq!(cache.len(), 10); @@ -372,7 +372,7 @@ mod cross_file_type_tests { // Should be cached let cached = cache.read().get(&path); - assert_eq!(cached, Some(Ty::NUMBER)); + assert_eq!(cached, Some(GlobalTy::NUMBER)); } #[test] @@ -395,7 +395,7 @@ mod cross_file_type_tests { let doc = Document::new(content.to_string(), DocVersion::new(1)); let ty = analyze_and_cache(&path, &doc, &cache); - assert_eq!(ty, expected_ty, "Type mismatch for {}", name); + assert_eq!(ty, expected_ty, "Type mismatch for {name}"); } } @@ -478,7 +478,7 @@ mod transitive_update_tests { let base = write_file(&tmp, "base.jsonnet", "{ x: 1 }"); let mid = write_file(&tmp, "mid.jsonnet", "local b = import 'base.jsonnet'; b"); - let top = write_file(&tmp, "top.jsonnet", "local m = import 'mid.jsonnet'; m"); + let top_file = write_file(&tmp, "top.jsonnet", "local m = import 'mid.jsonnet'; m"); let mut graph = ImportGraph::new(); let resolver = make_resolver(base_dir); @@ -486,17 +486,17 @@ mod transitive_update_tests { let mut cache = TypeCache::new(Arc::clone(&global)); // Build graph - for file in [&base, &mid, &top] { + for file in [&base, &mid, &top_file] { let content = fs::read_to_string(file).unwrap(); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); graph.update_file(&path, &doc, &resolver); - cache.update(&path, Ty::ANY, 1); // Placeholder type + cache.update(&path, GlobalTy::ANY, 1); // Placeholder type } let base_path = canonical_path(&base); let mid_path = canonical_path(&mid); - let top_path = canonical_path(&top); + let top_path = canonical_path(&top_file); // All should be cached cache.get(&base_path).expect("base should be cached"); @@ -535,7 +535,7 @@ mod transitive_update_tests { let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); graph.update_file(&path, &doc, &resolver); - cache.update(&path, Ty::ANY, 1); + cache.update(&path, GlobalTy::ANY, 1); } let lib1_path = canonical_path(&lib1); @@ -581,7 +581,7 @@ mod transitive_update_tests { let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); graph.update_file(&path, &doc, &resolver); - cache.update(&path, Ty::ANY, 1); + cache.update(&path, GlobalTy::ANY, 1); } let d_path = canonical_path(&d); From a1f4f14fbe963f50d453938e5c5423ba6769fa04 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 14:26:43 +0000 Subject: [PATCH 008/210] feat(lsp): add implementation request support - advertise and route textDocument/implementation in the server capability and request dispatcher. - implement async go-to-implementation resolution for local bindings and import-backed fields, with separate declaration vs implementation ranges when available. - load unopened imported files on demand so implementation lookup can traverse import chains without requiring all files to be open. - add integration coverage for local and import-field implementation lookups, plus regression coverage for import diagnostics + definition stability. - document implementation-provider behavior and handler flow in docs/lsp architecture/handlers docs. --- crates/jrsonnet-lsp/src/server.rs | 351 +++++---- .../jrsonnet-lsp/src/server/async_requests.rs | 228 ++++-- crates/jrsonnet-lsp/tests/integration_test.rs | 681 ++++++++++++------ docs/lsp/ARCHITECTURE.md | 5 +- docs/lsp/HANDLERS.md | 7 + 5 files changed, 850 insertions(+), 422 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 906cbb44..89652dd2 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -28,26 +28,24 @@ use lsp_types::{ request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, Completion, DocumentHighlightRequest, DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, - HoverRequest, InlayHintRequest, PrepareRenameRequest, References, Rename, Request as _, - SemanticTokensFullRequest, SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, - WorkspaceSymbolRequest, + GotoImplementation, HoverRequest, InlayHintRequest, PrepareRenameRequest, References, + Rename, Request as _, SemanticTokensFullRequest, SemanticTokensRangeRequest, Shutdown, + SignatureHelpRequest, WorkspaceSymbolRequest, }, CodeActionKind, CodeActionOptions, CodeActionParams, CodeActionProviderCapability, - CodeActionResponse, CodeLens, CodeLensOptions, CodeLensParams, CompletionOptions, - CompletionParams, DidChangeConfigurationParams, DidChangeTextDocumentParams, - DidChangeWatchedFilesParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, - DidSaveTextDocumentParams, DocumentFormattingParams, DocumentHighlight, - DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, ExecuteCommandOptions, - ExecuteCommandParams, FileChangeType, GotoDefinitionParams, HoverParams, - HoverProviderCapability, InitializeParams, InitializeResult, InlayHintParams, OneOf, - PrepareRenameResponse, ReferenceParams, RenameParams, SemanticTokens, - SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, - SemanticTokensRangeParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, - SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, + CodeActionResponse, CodeLens, CodeLensOptions, CompletionOptions, DidChangeConfigurationParams, + DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidCloseTextDocumentParams, + DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentFormattingParams, + DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, + ExecuteCommandOptions, FileChangeType, HoverProviderCapability, InitializeParams, + InitializeResult, OneOf, PrepareRenameResponse, SemanticTokens, SemanticTokensFullOptions, + SemanticTokensOptions, SemanticTokensParams, SemanticTokensRangeParams, + SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, + SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, TextDocumentSyncCapability, TextDocumentSyncKind, TextEdit, WorkDoneProgressOptions, - WorkspaceSymbolParams, }; use parking_lot::RwLock; +use serde::{de::DeserializeOwned, Serialize}; use tracing::{debug, error, info, warn}; use self::async_requests::AsyncRequestContext; @@ -293,6 +291,9 @@ impl Server { document_symbol_provider: Some(OneOf::Left(true)), definition_provider: Some(OneOf::Left(true)), declaration_provider: Some(lsp_types::DeclarationCapability::Simple(true)), + implementation_provider: Some(lsp_types::ImplementationProviderCapability::Simple( + true, + )), hover_provider: Some(HoverProviderCapability::Simple(true)), document_highlight_provider: Some(OneOf::Left(true)), inlay_hint_provider: Some(OneOf::Left(true)), @@ -432,158 +433,208 @@ impl Server { fn handle_request(&mut self, req: Request) -> Result<()> { debug!("Handling request: {} (id={})", req.method, req.id); - match req.method.as_str() { - Shutdown::METHOD => { - info!("Shutdown request received"); - self.shutdown_requested = true; - let resp = Response::new_ok(req.id, serde_json::Value::Null); - self.connection.sender.send(Message::Response(resp))?; - } + let Request { id, method, params } = req; + match method.as_str() { + Shutdown::METHOD => self.handle_shutdown_request(id), + GotoDefinition::METHOD + | GotoDeclaration::METHOD + | GotoImplementation::METHOD + | HoverRequest::METHOD + | InlayHintRequest::METHOD + | Completion::METHOD + | References::METHOD + | WorkspaceSymbolRequest::METHOD + | Rename::METHOD + | CodeLensRequest::METHOD + | ExecuteCommand::METHOD => self.handle_async_request(id, method.as_str(), params), + DocumentSymbolRequest::METHOD + | DocumentHighlightRequest::METHOD + | CodeActionRequest::METHOD + | SignatureHelpRequest::METHOD + | Formatting::METHOD + | PrepareRenameRequest::METHOD + | SemanticTokensFullRequest::METHOD + | SemanticTokensRangeRequest::METHOD + | CodeLensResolve::METHOD => self.handle_sync_request(id, method.as_str(), params), + _ => self.send_method_not_found_response(id, &method), + } + } + + fn handle_shutdown_request(&mut self, id: RequestId) -> Result<()> { + info!("Shutdown request received"); + self.shutdown_requested = true; + let resp = Response::new_ok(id, serde_json::Value::Null); + self.connection.sender.send(Message::Response(resp))?; + Ok(()) + } + + fn send_ok_response(&self, id: RequestId, result: T) -> Result<()> + where + T: Serialize, + { + let resp = Response::new_ok(id, serde_json::to_value(result)?); + self.connection.sender.send(Message::Response(resp))?; + Ok(()) + } + + fn send_method_not_found_response(&self, id: RequestId, method: &str) -> Result<()> { + warn!("Unhandled request: {method}"); + let resp = Response::new_err( + id, + lsp_server::ErrorCode::MethodNotFound as i32, + format!("Method not found: {method}"), + ); + self.connection.sender.send(Message::Response(resp))?; + Ok(()) + } + + fn handle_sync_request( + &self, + id: RequestId, + method: &str, + params: serde_json::Value, + ) -> Result<()> { + match method { DocumentSymbolRequest::METHOD => { - let params: DocumentSymbolParams = serde_json::from_value(req.params)?; - let result = self.on_document_symbol(params); - let resp = Response::new_ok(req.id, serde_json::to_value(result)?); - self.connection.sender.send(Message::Response(resp))?; - } - GotoDefinition::METHOD => { - let params: GotoDefinitionParams = serde_json::from_value(req.params)?; - let context = self.async_request_context(); - self.spawn_async_response(req.id, GotoDefinition::METHOD, move || { - let result = context.goto_definition(params); - serde_json::to_value(result).map_err(Into::into) - }); - } - GotoDeclaration::METHOD => { - let params: GotoDefinitionParams = serde_json::from_value(req.params)?; - let context = self.async_request_context(); - self.spawn_async_response(req.id, GotoDeclaration::METHOD, move || { - let result = context.goto_declaration(params); - serde_json::to_value(result).map_err(Into::into) - }); - } - HoverRequest::METHOD => { - let params: HoverParams = serde_json::from_value(req.params)?; - let context = self.async_request_context(); - self.spawn_async_response(req.id, HoverRequest::METHOD, move || { - let result = context.hover(params); - serde_json::to_value(result).map_err(Into::into) - }); + self.handle_sync_typed(id, params, Self::on_document_symbol) } DocumentHighlightRequest::METHOD => { - let params: DocumentHighlightParams = serde_json::from_value(req.params)?; - let result = self.on_document_highlight(params); - let resp = Response::new_ok(req.id, serde_json::to_value(result)?); - self.connection.sender.send(Message::Response(resp))?; - } - InlayHintRequest::METHOD => { - let params: InlayHintParams = serde_json::from_value(req.params)?; - let context = self.async_request_context(); - self.spawn_async_response(req.id, InlayHintRequest::METHOD, move || { - let result = context.inlay_hints(params); - serde_json::to_value(result).map_err(Into::into) - }); - } - CodeActionRequest::METHOD => { - let params: CodeActionParams = serde_json::from_value(req.params)?; - let result = self.on_code_action(params); - let resp = Response::new_ok(req.id, serde_json::to_value(result)?); - self.connection.sender.send(Message::Response(resp))?; - } - Completion::METHOD => { - let params: CompletionParams = serde_json::from_value(req.params)?; - let context = self.async_request_context(); - self.spawn_async_response(req.id, Completion::METHOD, move || { - let result = context.completion(params); - serde_json::to_value(result).map_err(Into::into) - }); + self.handle_sync_typed(id, params, Self::on_document_highlight) } + CodeActionRequest::METHOD => self.handle_sync_typed(id, params, Self::on_code_action), SignatureHelpRequest::METHOD => { - let params: SignatureHelpParams = serde_json::from_value(req.params)?; - let result = self.on_signature_help(params); - let resp = Response::new_ok(req.id, serde_json::to_value(result)?); - self.connection.sender.send(Message::Response(resp))?; - } - Formatting::METHOD => { - let params: DocumentFormattingParams = serde_json::from_value(req.params)?; - let result = self.on_formatting(params); - let resp = Response::new_ok(req.id, serde_json::to_value(result)?); - self.connection.sender.send(Message::Response(resp))?; - } - References::METHOD => { - let params: ReferenceParams = serde_json::from_value(req.params)?; - let context = self.async_request_context(); - self.spawn_async_response(req.id, References::METHOD, move || { - let result = context.references(params); - serde_json::to_value(result).map_err(Into::into) - }); - } - WorkspaceSymbolRequest::METHOD => { - let params: WorkspaceSymbolParams = serde_json::from_value(req.params)?; - let context = self.async_request_context(); - self.spawn_async_response(req.id, WorkspaceSymbolRequest::METHOD, move || { - let result = context.workspace_symbol(params); - serde_json::to_value(result).map_err(Into::into) - }); + self.handle_sync_typed(id, params, Self::on_signature_help) } + Formatting::METHOD => self.handle_sync_typed(id, params, Self::on_formatting), PrepareRenameRequest::METHOD => { - let params: TextDocumentPositionParams = serde_json::from_value(req.params)?; - let result = self.on_prepare_rename(params); - let resp = Response::new_ok(req.id, serde_json::to_value(result)?); - self.connection.sender.send(Message::Response(resp))?; - } - Rename::METHOD => { - let params: RenameParams = serde_json::from_value(req.params)?; - let context = self.async_request_context(); - self.spawn_async_response(req.id, Rename::METHOD, move || { - let result = context.rename(params); - serde_json::to_value(result).map_err(Into::into) - }); + self.handle_sync_typed(id, params, Self::on_prepare_rename) } SemanticTokensFullRequest::METHOD => { - let params: SemanticTokensParams = serde_json::from_value(req.params)?; - let result = self.on_semantic_tokens_full(params); - let resp = Response::new_ok(req.id, serde_json::to_value(result)?); - self.connection.sender.send(Message::Response(resp))?; + self.handle_sync_typed(id, params, Self::on_semantic_tokens_full) } SemanticTokensRangeRequest::METHOD => { - let params: SemanticTokensRangeParams = serde_json::from_value(req.params)?; - let result = self.on_semantic_tokens_range(params); - let resp = Response::new_ok(req.id, serde_json::to_value(result)?); - self.connection.sender.send(Message::Response(resp))?; - } - CodeLensRequest::METHOD => { - let params: CodeLensParams = serde_json::from_value(req.params)?; - let context = self.async_request_context(); - self.spawn_async_response(req.id, CodeLensRequest::METHOD, move || { - let result = context.code_lens(params); - serde_json::to_value(result).map_err(Into::into) - }); + self.handle_sync_typed(id, params, Self::on_semantic_tokens_range) } CodeLensResolve::METHOD => { - let params: CodeLens = serde_json::from_value(req.params)?; - let result = Self::on_code_lens_resolve(params); - let resp = Response::new_ok(req.id, serde_json::to_value(result)?); - self.connection.sender.send(Message::Response(resp))?; - } - ExecuteCommand::METHOD => { - let params: ExecuteCommandParams = serde_json::from_value(req.params)?; - let context = self.async_request_context(); - self.spawn_async_response(req.id, ExecuteCommand::METHOD, move || { - let result = context.execute_command(params); - serde_json::to_value(result).map_err(Into::into) - }); + let params: CodeLens = serde_json::from_value(params)?; + self.send_ok_response(id, Self::on_code_lens_resolve(params)) } - _ => { - warn!("Unhandled request: {}", req.method); - let resp = Response::new_err( - req.id, - lsp_server::ErrorCode::MethodNotFound as i32, - format!("Method not found: {}", req.method), - ); - self.connection.sender.send(Message::Response(resp))?; + _ => unreachable!("sync request method already filtered: {method}"), + } + } + + fn handle_sync_typed( + &self, + id: RequestId, + params: serde_json::Value, + handler: fn(&Self, P) -> R, + ) -> Result<()> + where + P: DeserializeOwned, + R: Serialize, + { + let params: P = serde_json::from_value(params)?; + self.send_ok_response(id, handler(self, params)) + } + + fn spawn_json_response(&self, id: RequestId, method: &'static str, compute: F) + where + R: Serialize + Send + 'static, + F: FnOnce() -> R + Send + 'static, + { + self.spawn_async_response(id, method, move || { + serde_json::to_value(compute()).map_err(Into::into) + }); + } + + fn handle_async_request( + &self, + id: RequestId, + method: &str, + params: serde_json::Value, + ) -> Result<()> { + match method { + GotoDefinition::METHOD => self.handle_async_typed( + id, + GotoDefinition::METHOD, + params, + AsyncRequestContext::goto_definition, + ), + GotoDeclaration::METHOD => self.handle_async_typed( + id, + GotoDeclaration::METHOD, + params, + AsyncRequestContext::goto_declaration, + ), + GotoImplementation::METHOD => self.handle_async_typed( + id, + GotoImplementation::METHOD, + params, + AsyncRequestContext::goto_implementation, + ), + HoverRequest::METHOD => self.handle_async_typed( + id, + HoverRequest::METHOD, + params, + AsyncRequestContext::hover, + ), + InlayHintRequest::METHOD => self.handle_async_typed( + id, + InlayHintRequest::METHOD, + params, + AsyncRequestContext::inlay_hints, + ), + Completion::METHOD => self.handle_async_typed( + id, + Completion::METHOD, + params, + AsyncRequestContext::completion, + ), + References::METHOD => self.handle_async_typed( + id, + References::METHOD, + params, + AsyncRequestContext::references, + ), + WorkspaceSymbolRequest::METHOD => self.handle_async_typed( + id, + WorkspaceSymbolRequest::METHOD, + params, + AsyncRequestContext::workspace_symbol, + ), + Rename::METHOD => { + self.handle_async_typed(id, Rename::METHOD, params, AsyncRequestContext::rename) } + CodeLensRequest::METHOD => self.handle_async_typed( + id, + CodeLensRequest::METHOD, + params, + AsyncRequestContext::code_lens, + ), + ExecuteCommand::METHOD => self.handle_async_typed( + id, + ExecuteCommand::METHOD, + params, + AsyncRequestContext::execute_command, + ), + _ => unreachable!("async request method already filtered: {method}"), } + } + fn handle_async_typed( + &self, + id: RequestId, + method: &'static str, + params: serde_json::Value, + handler: fn(&AsyncRequestContext, P) -> R, + ) -> Result<()> + where + P: DeserializeOwned + Send + 'static, + R: Serialize + Send + 'static, + { + let params: P = serde_json::from_value(params)?; + let context = self.async_request_context(); + self.spawn_json_response(id, method, move || handler(&context, params)); Ok(()) } diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index c3d3a1e6..9da250f6 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -5,6 +5,7 @@ use jrsonnet_lsp_handlers as handlers; use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; use jrsonnet_lsp_types::GlobalTyStore; +use jrsonnet_rowan_parser::AstNode; use lsp_types::{ CodeLens, CodeLensParams, CompletionParams, CompletionResponse, ExecuteCommandParams, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams, InlayHint, InlayHintParams, @@ -97,6 +98,7 @@ impl AsyncRequestContext { let resolved = self.resolve_import_path(&path, &import_path)?; let range = self .find_field_in_file(&resolved, &fields) + .map(|locations| locations.declaration) .unwrap_or_default(); Some(GotoDefinitionResponse::Scalar(Location { uri: resolved.to_uri(), @@ -113,6 +115,51 @@ impl AsyncRequestContext { self.goto_definition(params) } + pub(super) fn goto_implementation( + &self, + params: GotoDefinitionParams, + ) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?.clone(); + let lsp_pos = position.into(); + + let result = handlers::goto_definition(&doc, lsp_pos)?; + match result { + handlers::DefinitionResult::Local(range) => { + let range = Self::local_implementation_range(&doc, range).unwrap_or(range); + Some(GotoDefinitionResponse::Scalar(Location { + uri: uri.clone(), + range, + })) + } + handlers::DefinitionResult::Import(import_path) => { + let resolved = self.resolve_import_path(&path, &import_path)?; + let range = self.document_root_expr_range(&resolved).unwrap_or_default(); + Some(GotoDefinitionResponse::Scalar(Location { + uri: resolved.to_uri(), + range, + })) + } + handlers::DefinitionResult::ImportField { + path: import_path, + fields, + } => { + let resolved = self.resolve_import_path(&path, &import_path)?; + let range = self + .find_field_in_file(&resolved, &fields) + .map(|locations| locations.implementation) + .or_else(|| self.document_root_expr_range(&resolved)) + .unwrap_or_default(); + Some(GotoDefinitionResponse::Scalar(Location { + uri: resolved.to_uri(), + range, + })) + } + } + } + pub(super) fn inlay_hints(&self, params: InlayHintParams) -> Option> { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri)?; @@ -425,93 +472,162 @@ impl AsyncRequestContext { None } + fn load_document_for_path(&self, path: &CanonicalPath) -> Option { + if let Some(doc) = self.documents.get(path) { + return Some(doc.clone()); + } + + let content = std::fs::read_to_string(path.as_path()).ok()?; + Some(Document::new(content, DocVersion::new(0))) + } + + fn document_root_expr_range(&self, path: &CanonicalPath) -> Option { + let doc = self.load_document_for_path(path)?; + let expr = doc.ast().expr()?; + Some(to_lsp_range( + expr.syntax().text_range(), + doc.line_index(), + doc.text(), + )) + } + + fn local_implementation_range( + document: &Document, + declaration: lsp_types::Range, + ) -> Option { + use jrsonnet_lsp_document::LspRange; + use jrsonnet_rowan_parser::{ + nodes::{Bind, ForSpec, Param}, + AstNode, + }; + + let text = document.text(); + let line_index = document.line_index(); + let declaration_range = line_index.text_range(LspRange::from(declaration), text)?; + let ast = document.ast(); + let node = ast + .syntax() + .descendants() + .find(|candidate| candidate.text_range() == declaration_range)?; + + if let Some(bind) = node.ancestors().find_map(Bind::cast) { + let value_range = match bind { + Bind::BindDestruct(bind) => bind.value()?.syntax().text_range(), + Bind::BindFunction(bind) => bind.value()?.syntax().text_range(), + }; + return Some(to_lsp_range(value_range, line_index, text)); + } + + if let Some(param) = node.ancestors().find_map(Param::cast) { + let default_value = param.expr()?; + return Some(to_lsp_range( + default_value.syntax().text_range(), + line_index, + text, + )); + } + + if let Some(for_spec) = node.ancestors().find_map(ForSpec::cast) { + let source_expr = for_spec.expr()?; + return Some(to_lsp_range( + source_expr.syntax().text_range(), + line_index, + text, + )); + } + + None + } + /// For a field chain like `foo.bar`, this finds the `bar` field /// inside the `foo` field of the top-level object. fn find_field_in_file( &self, path: &CanonicalPath, fields: &[String], - ) -> Option { + ) -> Option { use jrsonnet_rowan_parser::{ - nodes::{Member, ObjBody}, + nodes::{ExprBase, Member, ObjBody}, AstNode, }; - let doc = if let Some(d) = self.documents.get(path) { - d.clone() - } else { - let content = std::fs::read_to_string(path.as_path()).ok()?; - Document::new(content, DocVersion::new(0)) - }; + let doc = self.load_document_for_path(path)?; let ast = doc.ast(); let text = doc.text(); let line_index = doc.line_index(); let expr = ast.expr()?; - let mut current_obj_body = expr.expr_base().and_then(|base| { - if let jrsonnet_rowan_parser::nodes::ExprBase::ExprObject(obj) = base { - obj.obj_body() - } else { - None - } - })?; + let expr_base = expr.expr_base()?; + let ExprBase::ExprObject(obj) = expr_base else { + return None; + }; + let mut current_obj_body = obj.obj_body()?; for (i, field_name) in fields.iter().enumerate() { let is_last = i == fields.len() - 1; - if let ObjBody::ObjBodyMemberList(members) = ¤t_obj_body { - let mut found = false; - for member in members.members() { - let (name, field_range, value_expr) = match &member { - Member::MemberFieldNormal(field) => { - let name_node = field.field_name()?; - let name_str = extract_field_name_string(&name_node)?; - let range = name_node.syntax().text_range(); - let value = field.expr(); - (name_str, range, value) - } - Member::MemberFieldMethod(method) => { - let name_node = method.field_name()?; - let name_str = extract_field_name_string(&name_node)?; - let range = name_node.syntax().text_range(); - (name_str, range, None) - } - _ => continue, - }; - - if name == *field_name { - if is_last { - return Some(to_lsp_range(field_range, line_index, text)); - } + let ObjBody::ObjBodyMemberList(members) = ¤t_obj_body else { + return None; + }; - if let Some(value) = value_expr { - if let Some(base) = value.expr_base() { - if let jrsonnet_rowan_parser::nodes::ExprBase::ExprObject(obj) = - base - { - if let Some(body) = obj.obj_body() { - current_obj_body = body; - found = true; - break; - } - } - } - } + let field_target = members.members().find_map(|member| match member { + Member::MemberFieldNormal(field) => { + let name_node = field.field_name()?; + let name = extract_field_name_string(&name_node)?; + if name != *field_name { return None; } + + let declaration = name_node.syntax().text_range(); + let value = field.expr()?; + let implementation = value.syntax().text_range(); + let next_body = value.expr_base().and_then(|base| { + let ExprBase::ExprObject(obj) = base else { + return None; + }; + obj.obj_body() + }); + + Some((declaration, implementation, next_body)) } - if !found && !is_last { - return None; + Member::MemberFieldMethod(method) => { + let name_node = method.field_name()?; + let name = extract_field_name_string(&name_node)?; + if name != *field_name { + return None; + } + + let declaration = name_node.syntax().text_range(); + let implementation = method + .expr() + .map_or(declaration, |expr| expr.syntax().text_range()); + Some((declaration, implementation, None)) } - } else { - return None; + Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => None, + })?; + + if is_last { + let declaration = to_lsp_range(field_target.0, line_index, text); + let implementation = to_lsp_range(field_target.1, line_index, text); + return Some(ImportedFieldLocations { + declaration, + implementation, + }); } + + current_obj_body = field_target.2?; } None } } +#[derive(Debug, Clone, Copy)] +struct ImportedFieldLocations { + declaration: lsp_types::Range, + implementation: lsp_types::Range, +} + fn extract_field_name_string(name: &jrsonnet_rowan_parser::nodes::FieldName) -> Option { use jrsonnet_rowan_parser::{nodes::FieldName, AstToken}; diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 425e8d69..99f030ef 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -14,8 +14,8 @@ use lsp_types::{ }, request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, - ExecuteCommand, GotoDeclaration, GotoDefinition, Initialize, InlayHintRequest, References, - Rename, Request as _, SemanticTokensRangeRequest, Shutdown, + ExecuteCommand, GotoDeclaration, GotoDefinition, GotoImplementation, Initialize, + InlayHintRequest, References, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, }, DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, FileEvent, @@ -131,6 +131,24 @@ fn goto_declaration_request(id: i32, uri: &str, line: u32, character: u32) -> Re ) } +fn goto_implementation_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoImplementation::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + /// Helper to create a references request. fn references_request( id: i32, @@ -337,6 +355,161 @@ fn execute_command_request(id: i32, command: &str, arguments: Vec lsp_types::Range { + lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 20, + }, + } +} + +fn unused_variable_diagnostic() -> lsp_types::Diagnostic { + lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::WARNING), + code: Some(lsp_types::NumberOrString::String( + "unused-variable".to_string(), + )), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + } +} + +fn request_code_actions( + conn: &Connection, + id: i32, + uri: &str, + diagnostics: Vec, + only: Option>, +) -> Option> { + conn.sender + .send(Message::Request(code_action_request( + id, + uri, + code_action_test_range(), + diagnostics, + only, + ))) + .unwrap(); + let response = recv_response(conn, id); + assert!(response.error.is_none(), "Code action should succeed"); + serde_json::from_value(response.result.expect("should have result")).unwrap() +} + +fn expected_unused_variable_quickfix( + uri: &str, + diagnostic: lsp_types::Diagnostic, +) -> Vec { + let mut changes = std::collections::HashMap::new(); + changes.insert( + uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + ); + + vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }, + )] +} + +fn find_references_command_args(uri: &str, include_declaration: bool) -> Vec { + let mut args = vec![ + serde_json::Value::String(uri.to_string()), + serde_json::Value::Number(0_u64.into()), + serde_json::Value::Number(13_u64.into()), + ]; + if include_declaration { + args.push(serde_json::Value::Bool(true)); + } + args +} + +fn request_find_references_command( + conn: &Connection, + id: i32, + uri: &str, + include_declaration: bool, +) -> Vec { + conn.sender + .send(Message::Request(execute_command_request( + id, + "jrsonnet.findReferences", + find_references_command_args(uri, include_declaration), + ))) + .unwrap(); + let response = recv_response(conn, id); + assert!(response.error.is_none(), "Command should succeed"); + serde_json::from_value(response.result.expect("command should return result")).unwrap() +} + +fn location(uri: &str, start_character: u32, end_character: u32) -> lsp_types::Location { + lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: start_character, + }, + end: Position { + line: 0, + character: end_character, + }, + }, + } +} + +fn expected_find_references(uri: &str, include_declaration: bool) -> Vec { + let mut references = Vec::with_capacity(if include_declaration { 3 } else { 2 }); + if include_declaration { + references.push(location(uri, 6, 7)); + } + references.push(location(uri, 13, 14)); + references.push(location(uri, 17, 18)); + references +} + fn file_uri(path: &std::path::Path) -> String { format!("file://{}", path.to_string_lossy()) } @@ -428,16 +601,21 @@ fn test_initialize_shutdown() { serde_json::Value::Bool(true), "code lens resolve capability should be advertised", ); - assert_eq!( - result["capabilities"]["declarationProvider"], - serde_json::Value::Bool(true), - "declaration capability should be advertised", - ); - let server_name = result - .get("serverInfo") - .and_then(|s| s.get("name")) - .and_then(|n| n.as_str()) - .expect("should have serverInfo.name"); + assert_eq!( + result["capabilities"]["declarationProvider"], + serde_json::Value::Bool(true), + "declaration capability should be advertised", + ); + assert_eq!( + result["capabilities"]["implementationProvider"], + serde_json::Value::Bool(true), + "implementation capability should be advertised", + ); + let server_name = result + .get("serverInfo") + .and_then(|s| s.get("name")) + .and_then(|n| n.as_str()) + .expect("should have serverInfo.name"); assert!(server_name.contains("jrsonnet")); }); @@ -852,6 +1030,267 @@ fn test_goto_declaration() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_goto_implementation_local_binding() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/implementation-local.jsonnet"; + let text = "local x = 1; x + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, uri, 0, 13))) + .unwrap(); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_implementation_request(3, uri, 0, 13))) + .unwrap(); + let implementation_response = recv_response(&client_conn, 3); + let implementation_result: Option = + serde_json::from_value(implementation_response.result.expect("should have result")) + .unwrap(); + assert_eq!( + implementation_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 10, + }, + end: Position { + line: 0, + character: 11, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_implementation_import_field() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib.foo"#) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, &uri, 0, 40))) + .unwrap(); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_implementation_request( + 3, &uri, 0, 40, + ))) + .unwrap(); + let implementation_response = recv_response(&client_conn, 3); + let implementation_result: Option = + serde_json::from_value(implementation_response.result.expect("should have result")) + .unwrap(); + assert_eq!( + implementation_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 7, + }, + end: Position { + line: 0, + character: 9, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_diagnostics_import_file_and_definition_resolution() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib.foo"#) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .unwrap(); + + let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + assert_eq!(diagnostics.uri.as_str(), uri); + assert!( + diagnostics.diagnostics.is_empty(), + "import-backed file should have no diagnostics" + ); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, &uri, 0, 40))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Goto definition should succeed"); + let result: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_document_highlight() { let (client_conn, server_conn) = Connection::memory(); @@ -987,108 +1426,22 @@ fn test_code_action_unused_variable_quickfix() { .send(Message::Notification(did_open_notification(uri, text))) .unwrap(); - let diagnostic = lsp_types::Diagnostic { - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - severity: Some(lsp_types::DiagnosticSeverity::WARNING), - code: Some(lsp_types::NumberOrString::String( - "unused-variable".to_string(), - )), - code_description: None, - source: Some("jrsonnet-lint".to_string()), - message: "unused variable".to_string(), - related_information: None, - tags: None, - data: None, - }; - - client_conn - .sender - .send(Message::Request(code_action_request( - 2, - uri, - lsp_types::Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 20, - }, - }, - vec![diagnostic.clone()], - None, - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "Code action should succeed"); - let actions: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - let actions = actions.unwrap_or_default(); - assert_eq!(actions.len(), 1); - let lsp_types::CodeActionOrCommand::CodeAction(action) = &actions[0] else { - panic!("Expected code action"); - }; - assert_eq!(action.title, "Prefix `x` with `_`"); - assert_eq!(action.kind, Some(lsp_types::CodeActionKind::QUICKFIX)); - let uri_parsed: lsp_types::Uri = uri.parse().unwrap(); - let edits = action - .edit - .as_ref() - .and_then(|edit| edit.changes.as_ref()) - .and_then(|changes| changes.get(&uri_parsed)) - .expect("expected workspace edit for file"); + let diagnostic = unused_variable_diagnostic(); + let actions = request_code_actions(&client_conn, 2, uri, vec![diagnostic.clone()], None); assert_eq!( - edits, - &vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - new_text: "_x".to_string(), - }] + actions, + Some(expected_unused_variable_quickfix(uri, diagnostic.clone())) ); // Requesting non-quickfix actions should filter this out. - client_conn - .sender - .send(Message::Request(code_action_request( - 3, - uri, - lsp_types::Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 20, - }, - }, - vec![diagnostic], - Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), - ))) - .unwrap(); - let response = recv_response(&client_conn, 3); - assert!(response.error.is_none(), "Code action should succeed"); - let filtered_actions: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - assert!(filtered_actions.is_none()); + let filtered_actions = request_code_actions( + &client_conn, + 3, + uri, + vec![diagnostic], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!(filtered_actions, None); client_conn .sender @@ -1127,111 +1480,11 @@ fn test_execute_command_find_references() { .unwrap(); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - client_conn - .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.findReferences", - vec![ - serde_json::Value::String(uri.to_string()), - serde_json::Value::Number(0_u64.into()), - serde_json::Value::Number(13_u64.into()), - ], - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "Command should succeed"); - let refs: Vec = - serde_json::from_value(response.result.expect("command should return result")).unwrap(); - let expected_without_declaration = vec![ - lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 13, - }, - end: Position { - line: 0, - character: 14, - }, - }, - }, - lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 17, - }, - end: Position { - line: 0, - character: 18, - }, - }, - }, - ]; - assert_eq!(refs, expected_without_declaration); + let refs = request_find_references_command(&client_conn, 2, uri, false); + assert_eq!(refs, expected_find_references(uri, false)); - client_conn - .sender - .send(Message::Request(execute_command_request( - 3, - "jrsonnet.findReferences", - vec![ - serde_json::Value::String(uri.to_string()), - serde_json::Value::Number(0_u64.into()), - serde_json::Value::Number(13_u64.into()), - serde_json::Value::Bool(true), - ], - ))) - .unwrap(); - let response = recv_response(&client_conn, 3); - assert!(response.error.is_none(), "Command should succeed"); - let refs_with_declaration: Vec = - serde_json::from_value(response.result.expect("command should return result")).unwrap(); - let expected_with_declaration = vec![ - lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - }, - lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 13, - }, - end: Position { - line: 0, - character: 14, - }, - }, - }, - lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 17, - }, - end: Position { - line: 0, - character: 18, - }, - }, - }, - ]; - assert_eq!(refs_with_declaration, expected_with_declaration); + let refs_with_declaration = request_find_references_command(&client_conn, 3, uri, true); + assert_eq!(refs_with_declaration, expected_find_references(uri, true)); client_conn .sender diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 6534a21f..db437a58 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -101,6 +101,7 @@ Dispatched via `spawn_async_response` (Rayon): - `textDocument/definition` - `textDocument/declaration` +- `textDocument/implementation` - `textDocument/hover` - `textDocument/inlayHint` - `textDocument/completion` @@ -119,7 +120,7 @@ to documents, import graph, type cache, config, and dependency-aware analysis. `server_capabilities()` currently advertises: - incremental text sync with open/close and save notifications -- definition, declaration, hover, document symbols, document highlights +- definition, declaration, implementation, hover, document symbols, document highlights - completion (trigger `.`) - signature help (triggers `(` and `,`) - formatting @@ -135,7 +136,7 @@ For the canonical list, see `crates/jrsonnet-lsp/src/server.rs`. Not currently advertised: -- type-definition/implementation providers +- type-definition provider ## Notification Handling diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 23194707..32bd0251 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -39,6 +39,7 @@ Current request routing in `crates/jrsonnet-lsp/src/server.rs`: | `textDocument/documentSymbol` | `on_document_symbol` | handlers crate (`document_symbols`) | sync | no | | `textDocument/definition` | async context (`goto_definition`) | mixed: handlers + server import resolution | async | no | | `textDocument/declaration` | async context (`goto_declaration`) | mixed: handlers + server import resolution | async | no | +| `textDocument/implementation` | async context (`goto_implementation`) | mixed: handlers + server import resolution | async | no | | `textDocument/hover` | async context (`hover`) | handlers crate (`hover`) | async | yes | | `textDocument/documentHighlight` | `on_document_highlight` | handlers crate (`document_highlights`) | sync | no | | `textDocument/inlayHint` | async context (`inlay_hints`) | handlers crate (`inlay_hints`) | async | yes | @@ -133,6 +134,12 @@ imported files before returning final `Location`. `textDocument/declaration` currently delegates to the same resolution path as `textDocument/definition`. +`textDocument/implementation` resolves symbol values/bodies: + +- local bindings jump to bound value expressions +- imports jump to imported-file root expressions +- imported field chains jump to the target field value expression + ### Document Highlight File: `crates/jrsonnet-lsp-handlers/src/document_highlight.rs` From 6f2a8b8b9476d5abc64315740669f5b9ab0065cb Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 15:37:08 +0000 Subject: [PATCH 009/210] refactor(lsp): harden runtime paths and remove panic-prone code - make URI handling fallible and propagate/skip invalid paths in request and diagnostics flows - replace panic-prone indexing/slicing with checked access across handlers, inference, check, scope, import, and type-store code - remove remaining panic-style server dispatch fallthroughs and simplify error handling paths - tighten type-store mutation paths (object merge/narrow, union/sum simplification, safe deref fallbacks) - keep behavior stable with targeted test runs across lsp-document, lsp-types, lsp-import, lsp-scope, lsp-inference, lsp-check, and lsp-handlers --- crates/jrsonnet-lsp-check/src/diagnostic.rs | 9 + crates/jrsonnet-lsp-check/src/format_check.rs | 73 +++++---- crates/jrsonnet-lsp-check/src/lint.rs | 36 ++-- crates/jrsonnet-lsp-check/src/type_check.rs | 76 ++++----- crates/jrsonnet-lsp-document/src/ast_utils.rs | 5 + crates/jrsonnet-lsp-document/src/config.rs | 4 +- crates/jrsonnet-lsp-document/src/document.rs | 29 ++-- crates/jrsonnet-lsp-document/src/error.rs | 15 +- crates/jrsonnet-lsp-document/src/position.rs | 35 ++-- crates/jrsonnet-lsp-document/src/types.rs | 28 +++- crates/jrsonnet-lsp-handlers/src/code_lens.rs | 29 ++-- .../src/completion/fields.rs | 12 +- .../src/completion/imports.rs | 7 +- .../src/completion/mod.rs | 2 +- .../jrsonnet-lsp-handlers/src/definition.rs | 15 +- .../src/document_highlight.rs | 1 + .../jrsonnet-lsp-handlers/src/formatting.rs | 10 +- crates/jrsonnet-lsp-handlers/src/hover.rs | 5 +- .../jrsonnet-lsp-handlers/src/references.rs | 5 +- crates/jrsonnet-lsp-handlers/src/rename.rs | 7 +- .../src/semantic_tokens.rs | 18 +- .../src/signature_help.rs | 51 +++--- crates/jrsonnet-lsp-handlers/src/symbols.rs | 11 +- crates/jrsonnet-lsp-import/src/graph.rs | 29 ++-- crates/jrsonnet-lsp-import/src/parse.rs | 5 + crates/jrsonnet-lsp-import/src/work_queue.rs | 20 ++- crates/jrsonnet-lsp-inference/src/analysis.rs | 14 +- .../jrsonnet-lsp-inference/src/const_eval.rs | 12 +- crates/jrsonnet-lsp-inference/src/env.rs | 16 +- crates/jrsonnet-lsp-inference/src/expr.rs | 94 ++++------- crates/jrsonnet-lsp-inference/src/flow.rs | 72 +++++--- crates/jrsonnet-lsp-inference/src/helpers.rs | 12 +- crates/jrsonnet-lsp-inference/src/manager.rs | 4 +- crates/jrsonnet-lsp-inference/src/object.rs | 6 +- crates/jrsonnet-lsp-inference/src/poly.rs | 9 +- crates/jrsonnet-lsp-inference/src/provider.rs | 4 +- .../jrsonnet-lsp-inference/src/suggestions.rs | 1 + crates/jrsonnet-lsp-scope/src/bindings.rs | 18 +- crates/jrsonnet-lsp-scope/src/resolver.rs | 68 +++++--- crates/jrsonnet-lsp-stdlib/src/docs.rs | 1 + crates/jrsonnet-lsp-stdlib/src/signatures.rs | 15 +- crates/jrsonnet-lsp-types/src/display.rs | 31 ++-- crates/jrsonnet-lsp-types/src/global_store.rs | 40 +++-- crates/jrsonnet-lsp-types/src/local_store.rs | 30 +++- crates/jrsonnet-lsp-types/src/mut_store.rs | 81 ++++----- crates/jrsonnet-lsp-types/src/operations.rs | 47 +++--- crates/jrsonnet-lsp-types/src/store.rs | 154 ++++++++++++------ crates/jrsonnet-lsp-types/src/subst.rs | 34 ++-- crates/jrsonnet-lsp-types/src/unification.rs | 48 +++--- crates/jrsonnet-lsp/src/analysis/eval.rs | 2 + crates/jrsonnet-lsp/src/analysis/tanka.rs | 1 + crates/jrsonnet-lsp/src/async_diagnostics.rs | 45 +++-- crates/jrsonnet-lsp/src/config.rs | 8 +- .../jrsonnet-lsp/src/handlers/diagnostics.rs | 13 +- crates/jrsonnet-lsp/src/server.rs | 47 +++--- .../jrsonnet-lsp/src/server/async_requests.rs | 68 ++++---- crates/jrsonnet-lsp/tests/framework/mod.rs | 2 +- crates/jrsonnet-lsp/tests/framework/parser.rs | 5 +- crates/jrsonnet-lsp/tests/stress_tests.rs | 55 +++---- 59 files changed, 929 insertions(+), 665 deletions(-) diff --git a/crates/jrsonnet-lsp-check/src/diagnostic.rs b/crates/jrsonnet-lsp-check/src/diagnostic.rs index 27a00b9a..ec091b59 100644 --- a/crates/jrsonnet-lsp-check/src/diagnostic.rs +++ b/crates/jrsonnet-lsp-check/src/diagnostic.rs @@ -92,6 +92,7 @@ pub enum ErrorCode { impl ErrorCode { /// Get the default severity for this error code. + #[must_use] pub fn default_severity(&self) -> Severity { match self { Self::BinaryOpMismatch @@ -127,6 +128,7 @@ impl ErrorCode { /// Get the string code for LSP diagnostic. /// /// Format: `E0xx` for errors, `W0xx` for warnings. + #[must_use] pub fn as_str(&self) -> &'static str { match self { // Type errors @@ -223,6 +225,7 @@ impl Diagnostic { } /// Convert to LSP Diagnostic. + #[must_use] pub fn to_lsp(&self) -> lsp_types::Diagnostic { let related_information = if self.related.is_empty() { None @@ -271,6 +274,7 @@ pub struct DiagnosticCollector { impl DiagnosticCollector { /// Create a new empty collector. + #[must_use] pub fn new() -> Self { Self::default() } @@ -293,21 +297,25 @@ impl DiagnosticCollector { } /// Convert all diagnostics to LSP format. + #[must_use] pub fn into_lsp_diagnostics(self) -> Vec { self.diagnostics.into_iter().map(|d| d.to_lsp()).collect() } /// Get the collected diagnostics. + #[must_use] pub fn into_diagnostics(self) -> Vec { self.diagnostics } /// Check if there are any diagnostics. + #[must_use] pub fn is_empty(&self) -> bool { self.diagnostics.is_empty() } /// Check if there are any errors. + #[must_use] pub fn has_errors(&self) -> bool { self.diagnostics .iter() @@ -315,6 +323,7 @@ impl DiagnosticCollector { } /// Get the number of diagnostics. + #[must_use] pub fn len(&self) -> usize { self.diagnostics.len() } diff --git a/crates/jrsonnet-lsp-check/src/format_check.rs b/crates/jrsonnet-lsp-check/src/format_check.rs index ed6fb66e..1c9d47ce 100644 --- a/crates/jrsonnet-lsp-check/src/format_check.rs +++ b/crates/jrsonnet-lsp-check/src/format_check.rs @@ -18,7 +18,7 @@ use jrsonnet_lsp_types::{Ty, TyData, TypeStoreOps}; /// Expected type category for a format placeholder. /// -/// This is a lightweight representation that avoids needing a TyStore during parsing. +/// This is a lightweight representation that avoids needing a `TyStore` during parsing. /// Convert to `Ty` when needed for type checking. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum FormatTypeKind { @@ -147,6 +147,7 @@ pub struct FormatModifiers { impl FormatModifiers { /// Count how many extra arguments are consumed by dynamic width/precision. + #[must_use] pub fn dynamic_arg_count(&self) -> usize { let width_args = usize::from(self.width == WidthSpec::Dynamic); let prec_args = usize::from(self.precision == PrecisionSpec::Dynamic); @@ -197,6 +198,7 @@ impl FormatSpec { /// - One for each positional placeholder /// - One for each dynamic width (`*`) /// - One for each dynamic precision (`.*`) + #[must_use] pub fn positional_arg_count(&self) -> usize { self.placeholders .iter() @@ -210,6 +212,7 @@ impl FormatSpec { } /// Get all named field names required. + #[must_use] pub fn named_fields(&self) -> Vec<&str> { self.placeholders .iter() @@ -237,6 +240,10 @@ pub enum FormatParseError { } /// Parse a format string and extract placeholder information. +/// +/// # Errors +/// Returns `Err` when the format string contains an incomplete specifier, +/// malformed named placeholder, unknown specifier, or mixed named/positional placeholders. pub fn parse_format_string(fmt: &str) -> Result { let mut placeholders = Vec::new(); let mut uses_named = false; @@ -246,37 +253,43 @@ pub fn parse_format_string(fmt: &str) -> Result { let mut i = 0; while i < chars.len() { - if chars[i] != '%' { + let Some(current) = chars.get(i).copied() else { + break; + }; + if current != '%' { i += 1; continue; } // Found a % i += 1; - if i >= chars.len() { + let Some(next) = chars.get(i).copied() else { return Err(FormatParseError::IncompleteSpecifier); - } + }; // Check for %% - if chars[i] == '%' { + if next == '%' { i += 1; continue; } // Check for named placeholder %(name) - if chars[i] == '(' { + if next == '(' { i += 1; let name_start = i; // Find closing paren - while i < chars.len() && chars[i] != ')' { + while matches!(chars.get(i), Some(ch) if *ch != ')') { i += 1; } - if i >= chars.len() { + if !matches!(chars.get(i), Some(ch) if *ch == ')') { return Err(FormatParseError::UnclosedNamedPlaceholder); } - let name: String = chars[name_start..i].iter().collect(); + let Some(name_chars) = chars.get(name_start..i) else { + return Err(FormatParseError::UnclosedNamedPlaceholder); + }; + let name: String = name_chars.iter().collect(); if name.is_empty() { return Err(FormatParseError::EmptyName); } @@ -287,11 +300,9 @@ pub fn parse_format_string(fmt: &str) -> Result { let (modifiers, new_i) = parse_format_modifiers(&chars, i); i = new_i; - if i >= chars.len() { + let Some(specifier) = chars.get(i).copied() else { return Err(FormatParseError::IncompleteSpecifier); - } - - let specifier = chars[i]; + }; let expected_type = specifier_to_type_kind(specifier)?; placeholders.push(FormatPlaceholder::Named { @@ -309,11 +320,9 @@ pub fn parse_format_string(fmt: &str) -> Result { let (modifiers, new_i) = parse_format_modifiers(&chars, i); i = new_i; - if i >= chars.len() { + let Some(specifier) = chars.get(i).copied() else { return Err(FormatParseError::IncompleteSpecifier); - } - - let specifier = chars[i]; + }; let expected_type = specifier_to_type_kind(specifier)?; placeholders.push(FormatPlaceholder::Positional { @@ -342,8 +351,8 @@ fn parse_format_modifiers(chars: &[char], mut i: usize) -> (FormatModifiers, usi let mut modifiers = FormatModifiers::default(); // Parse flags: -, +, space, #, 0 - while i < chars.len() { - match chars[i] { + while let Some(ch) = chars.get(i).copied() { + match ch { '-' => modifiers.flags.insert(FormatFlag::LeftJustify), '+' => modifiers.flags.insert(FormatFlag::ShowSign), ' ' => modifiers.flags.insert(FormatFlag::SpaceSign), @@ -355,37 +364,41 @@ fn parse_format_modifiers(chars: &[char], mut i: usize) -> (FormatModifiers, usi } // Parse width (digits or *) - if i < chars.len() && chars[i] == '*' { + if matches!(chars.get(i), Some(ch) if *ch == '*') { modifiers.width = WidthSpec::Dynamic; i += 1; } else { let width_start = i; - while i < chars.len() && chars[i].is_ascii_digit() { + while matches!(chars.get(i), Some(ch) if ch.is_ascii_digit()) { i += 1; } if i > width_start { - let width_str: String = chars[width_start..i].iter().collect(); - if let Ok(width) = width_str.parse::() { - modifiers.width = WidthSpec::Fixed(width); + if let Some(width_chars) = chars.get(width_start..i) { + let width_str: String = width_chars.iter().collect(); + if let Ok(width) = width_str.parse::() { + modifiers.width = WidthSpec::Fixed(width); + } } } } // Parse precision (.digits or .*) - if i < chars.len() && chars[i] == '.' { + if matches!(chars.get(i), Some(ch) if *ch == '.') { i += 1; - if i < chars.len() && chars[i] == '*' { + if matches!(chars.get(i), Some(ch) if *ch == '*') { modifiers.precision = PrecisionSpec::Dynamic; i += 1; } else { let prec_start = i; - while i < chars.len() && chars[i].is_ascii_digit() { + while matches!(chars.get(i), Some(ch) if ch.is_ascii_digit()) { i += 1; } if i > prec_start { - let prec_str: String = chars[prec_start..i].iter().collect(); - if let Ok(prec) = prec_str.parse::() { - modifiers.precision = PrecisionSpec::Fixed(prec); + if let Some(prec_chars) = chars.get(prec_start..i) { + let prec_str: String = prec_chars.iter().collect(); + if let Ok(prec) = prec_str.parse::() { + modifiers.precision = PrecisionSpec::Fixed(prec); + } } } else { // Just "." with no digits means precision 0 diff --git a/crates/jrsonnet-lsp-check/src/lint.rs b/crates/jrsonnet-lsp-check/src/lint.rs index d899ce32..d649870c 100644 --- a/crates/jrsonnet-lsp-check/src/lint.rs +++ b/crates/jrsonnet-lsp-check/src/lint.rs @@ -85,6 +85,7 @@ impl LintConfig { } /// Create a config with all lints enabled. + #[must_use] pub fn all() -> Self { Self { enabled: LintRule::UnusedVariables.bit() @@ -271,8 +272,7 @@ fn check_shadowed_variables( // This handles cases like `local y = (local x = 2; x)` where the inner // expression creates a new scope node.parent() - .map(|p| p.kind() != SyntaxKind::SOURCE_FILE) - .unwrap_or(false) + .is_some_and(|p| p.kind() != SyntaxKind::SOURCE_FILE) } _ => false, }; @@ -350,11 +350,11 @@ fn check_for_shadow( if let Some(original) = scope.get(name) { diagnostics.push(ctx.make_diagnostic_with_related( range, - format!("variable `{}` shadows a variable from an outer scope", name), + format!("variable `{name}` shadows a variable from an outer scope"), DiagnosticSeverity::WARNING, "shadowed-variable", original.range, - format!("`{}` originally defined here", name), + format!("`{name}` originally defined here"), )); break; } @@ -450,9 +450,9 @@ fn check_param_for_shadow( } } -/// Extract variable name from a simple BindDestruct (not array/object destructuring). +/// Extract variable name from a simple `BindDestruct` (not array/object destructuring). /// -/// Returns the variable name and a reference to the BindDestruct for value access. +/// Returns the variable name and a reference to the `BindDestruct` for value access. fn extract_simple_bind_name( bind: &Bind, ) -> Option<(String, &jrsonnet_rowan_parser::nodes::BindDestruct)> { @@ -528,10 +528,7 @@ fn check_unreachable_code( // Process local bindings for the type environment for bind in local_stmt.binds() { if let Some((name, bd)) = extract_simple_bind_name(&bind) { - let ty = bd - .value() - .map(|v| infer_expr_ty(&v, env)) - .unwrap_or(Ty::ANY); + let ty = bd.value().map_or(Ty::ANY, |v| infer_expr_ty(&v, env)); env.define_ty(name, ty); } } @@ -897,7 +894,7 @@ fn check_object_for_duplicate_fields( severity: Some(DiagnosticSeverity::WARNING), code: Some(NumberOrString::String("duplicate-field".to_string())), source: Some("jrsonnet-lsp".to_string()), - message: format!("duplicate field `{}`", name), + message: format!("duplicate field `{name}`"), related_information: Some(vec![DiagnosticRelatedInformation { location: Location { uri: ctx.uri.clone(), @@ -937,7 +934,7 @@ fn extract_bind_name(bind: Option) -> Option { } } -/// Extract a static field name from a FieldName node. +/// Extract a static field name from a `FieldName` node. fn extract_static_field_name(field_name: FieldName) -> Option { match field_name { FieldName::FieldNameFixed(fixed) => { @@ -993,7 +990,7 @@ fn check_function_for_duplicate_params( severity: Some(DiagnosticSeverity::ERROR), code: Some(NumberOrString::String("duplicate-param".to_string())), source: Some("jrsonnet-lsp".to_string()), - message: format!("duplicate parameter `{}`", name), + message: format!("duplicate parameter `{name}`"), related_information: Some(vec![DiagnosticRelatedInformation { location: Location { uri: ctx.uri.clone(), @@ -1079,10 +1076,7 @@ mod tests { code: Some(NumberOrString::String("unused-variable".to_string())), code_description: None, source: Some("jrsonnet-lint".to_string()), - message: format!( - "unused variable: `{}`; prefix with `_` to silence this warning", - name - ), + message: format!("unused variable: `{name}`; prefix with `_` to silence this warning"), related_information: None, tags: None, data: None, @@ -1169,13 +1163,13 @@ mod tests { code: Some(NumberOrString::String("shadowed-variable".to_string())), code_description: None, source: Some("jrsonnet-lint".to_string()), - message: format!("variable `{}` shadows a variable from an outer scope", name), + message: format!("variable `{name}` shadows a variable from an outer scope"), related_information: Some(vec![DiagnosticRelatedInformation { location: Location { uri: test_uri(), range: original_range.to_range(), }, - message: format!("`{}` originally defined here", name), + message: format!("`{name}` originally defined here"), }]), tags: None, data: None, @@ -1391,7 +1385,7 @@ mod tests { code: Some(NumberOrString::String("duplicate-field".to_string())), code_description: None, source: Some("jrsonnet-lsp".to_string()), - message: format!("duplicate field `{}`", name), + message: format!("duplicate field `{name}`"), related_information: Some(vec![DiagnosticRelatedInformation { location: Location { uri: test_uri(), @@ -1411,7 +1405,7 @@ mod tests { code: Some(NumberOrString::String("duplicate-param".to_string())), code_description: None, source: Some("jrsonnet-lsp".to_string()), - message: format!("duplicate parameter `{}`", name), + message: format!("duplicate parameter `{name}`"), related_information: Some(vec![DiagnosticRelatedInformation { location: Location { uri: test_uri(), diff --git a/crates/jrsonnet-lsp-check/src/type_check.rs b/crates/jrsonnet-lsp-check/src/type_check.rs index ddc5c623..89170a84 100644 --- a/crates/jrsonnet-lsp-check/src/type_check.rs +++ b/crates/jrsonnet-lsp-check/src/type_check.rs @@ -236,10 +236,7 @@ impl TypeError { ) } TypeErrorKind::WrongArgCount { expected, actual } => { - format!( - "function expects {} argument(s), but {} provided", - expected, actual - ) + format!("function expects {expected} argument(s), but {actual} provided") } TypeErrorKind::TooFewArguments { function_name, @@ -247,8 +244,7 @@ impl TypeError { provided, } => { format!( - "`{}` requires at least {} argument(s), but {} provided", - function_name, required, provided + "`{function_name}` requires at least {required} argument(s), but {provided} provided" ) } TypeErrorKind::TooManyArguments { @@ -257,8 +253,7 @@ impl TypeError { provided, } => { format!( - "`{}` accepts at most {} argument(s), but {} provided", - function_name, max_allowed, provided + "`{function_name}` accepts at most {max_allowed} argument(s), but {provided} provided" ) } TypeErrorKind::NoSuchField { @@ -266,7 +261,7 @@ impl TypeError { available, suggestion, } => { - let mut msg = format!("no such field `{}`", field); + let mut msg = format!("no such field `{field}`"); if let Some(suggested) = suggestion { msg.push_str("; did you mean `"); msg.push_str(suggested); @@ -279,19 +274,13 @@ impl TypeError { msg } TypeErrorKind::TupleIndexOutOfBounds { tuple_len, index } => { - format!( - "index {} is out of bounds for tuple of length {}", - index, tuple_len - ) + format!("index {index} is out of bounds for tuple of length {tuple_len}") } TypeErrorKind::FormatStringError { message } => { - format!("invalid format string: {}", message) + format!("invalid format string: {message}") } TypeErrorKind::FormatArgCount { expected, provided } => { - format!( - "format string expects {} argument(s), but {} provided", - expected, provided - ) + format!("format string expects {expected} argument(s), but {provided} provided") } TypeErrorKind::FormatArgTypeMismatch { index, @@ -400,6 +389,7 @@ impl TypeCheckConfig { } /// Create a config with all checks enabled. + #[must_use] pub fn all() -> Self { Self { enabled: TypeCheckRule::BinaryOps.bit() @@ -895,7 +885,7 @@ fn check_obj_body( } } -/// Validate a function call using FunctionData (Ty-native version). +/// Validate a function call using `FunctionData` (Ty-native version). fn validate_function_call_ty( func_data: &FunctionData, function_name: String, @@ -929,10 +919,10 @@ fn validate_function_call_ty( } } -/// Check if an ExprCall is a stdlib function call and validate argument count and types. +/// Check if an `ExprCall` is a stdlib function call and validate argument count and types. /// /// Matches the pattern: `std.functionName(args...)` -/// - Callee must be ExprField with base being ExprVar "std" +/// - Callee must be `ExprField` with base being `ExprVar` "std" fn check_stdlib_call_expr(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut Vec) { // Get the callee - should be std.functionName (ExprField) let Some(callee_expr) = call.callee() else { @@ -972,13 +962,10 @@ fn check_stdlib_call_expr(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut }; // Count arguments - let arg_count = call - .args_desc() - .map(|args| args.args().count()) - .unwrap_or(0); + let arg_count = call.args_desc().map_or(0, |args| args.args().count()); // Validate using the unified function - let qualified_name = format!("std.{}", fn_name); + let qualified_name = format!("std.{fn_name}"); let Some(func_data) = sig.func_data() else { return; }; @@ -998,7 +985,9 @@ fn check_stdlib_call_expr(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut break; // Variadic or too many args - handled elsewhere } - let param = &func_data.params[i]; + let Some(param) = func_data.params.get(i) else { + break; + }; let stdlib_expected_ty = param.ty; // Skip if expected type is Any (no constraint) @@ -1168,7 +1157,7 @@ fn check_higher_order_call( // Extract the callback's first parameter type let callback_param_ty = analysis.with_data(callback_ty, |data| match data { - TyData::Function(ft) if !ft.params.is_empty() => Some(ft.params[0].ty), + TyData::Function(ft) => ft.params.first().map(|param| param.ty), _ => None, }); let Some(callback_param_ty) = callback_param_ty else { @@ -1184,7 +1173,7 @@ fn check_higher_order_call( if !analysis.is_subtype(element_ty, callback_param_ty) { errors.push(TypeError { kind: TypeErrorKind::CallbackTypeMismatch { - function_name: format!("std.{}", fn_name), + function_name: format!("std.{fn_name}"), callback_param: config.callback_param_name.to_string(), element_type: element_ty, callback_param_type: callback_param_ty, @@ -1194,7 +1183,7 @@ fn check_higher_order_call( } } -/// Validate a std.format() call. +/// Validate a `std.format()` call. /// /// Checks: /// - Format string is valid @@ -1228,7 +1217,7 @@ fn check_format_call(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut Vec< Err(e) => { let message = match e { FormatParseError::IncompleteSpecifier => "incomplete format specifier".to_string(), - FormatParseError::UnknownSpecifier(c) => format!("unknown specifier '%{}'", c), + FormatParseError::UnknownSpecifier(c) => format!("unknown specifier '%{c}'"), FormatParseError::UnclosedNamedPlaceholder => { "unclosed named placeholder".to_string() } @@ -1361,7 +1350,7 @@ fn unescape_string(s: &str) -> String { result } -/// Check if an ExprCall is a user function call and validate argument count. +/// Check if an `ExprCall` is a user function call and validate argument count. /// /// Matches the pattern: `varName(args...)` where varName is a known function. fn check_user_function_call_expr( @@ -1402,10 +1391,7 @@ fn check_user_function_call_expr( }; // Count arguments - let arg_count = call - .args_desc() - .map(|args| args.args().count()) - .unwrap_or(0); + let arg_count = call.args_desc().map_or(0, |args| args.args().count()); // Validate using the Ty-native function if let Some(error) = @@ -1452,6 +1438,13 @@ fn unary_op_str(op: UnaryOperatorKind) -> &'static str { } } +fn non_negative_integral_usize(value: f64) -> Option { + if !(value.is_finite() && value >= 0.0 && value.fract() == 0.0) { + return None; + } + format!("{value:.0}").parse().ok() +} + /// Extract a constant index value from an expression. /// /// Returns `Some(index)` if the expression is a non-negative integer literal. @@ -1463,12 +1456,7 @@ fn get_constant_index(expr: Option<&Expr>) -> Option { let text = num.syntax().text().to_string(); let value: f64 = text.parse().ok()?; - // Only accept non-negative integers - if value >= 0.0 && value.fract() == 0.0 { - Some(value as usize) - } else { - None - } + non_negative_integral_usize(value) } #[cfg(test)] @@ -2276,7 +2264,7 @@ mod tests { }); assert!(local_arr.is_local()); - let subst = TySubst::merge(&global, local); + let subst = TySubst::merge(&global, &local); let global_arr = subst.apply(local_arr); assert!(global_arr.is_global()); @@ -2318,7 +2306,7 @@ mod tests { is_set: false, }); - let subst = TySubst::merge(&global, local); + let subst = TySubst::merge(&global, &local); let expected_global = subst.apply(expected_local); let actual_global = subst.apply(actual_local); diff --git a/crates/jrsonnet-lsp-document/src/ast_utils.rs b/crates/jrsonnet-lsp-document/src/ast_utils.rs index ea3af298..1288a3c5 100644 --- a/crates/jrsonnet-lsp-document/src/ast_utils.rs +++ b/crates/jrsonnet-lsp-document/src/ast_utils.rs @@ -16,6 +16,7 @@ use crate::{ByteOffset, LineIndex, LspPosition}; /// - Single-quoted: `'foo'` → `foo` /// - Verbatim double: `@"foo"` → `foo` /// - Verbatim single: `@'foo'` → `foo` +#[must_use] pub fn strip_string_quotes(s: &str) -> String { s.trim_start_matches('@') .trim_start_matches('"') @@ -27,6 +28,7 @@ pub fn strip_string_quotes(s: &str) -> String { /// Find the token at the given byte offset, preferring the rightmost token /// when the offset is between two tokens. +#[must_use] pub fn token_at_offset(root: &SyntaxNode, offset: ByteOffset) -> Option { root.token_at_offset(rowan::TextSize::from(u32::from(offset))) .right_biased() @@ -36,6 +38,7 @@ pub fn token_at_offset(root: &SyntaxNode, offset: ByteOffset) -> Option Range { let start = line_index .position(range.start().into(), text) @@ -54,6 +57,7 @@ pub fn to_lsp_range(range: TextRange, line_index: &LineIndex, text: &str) -> Ran /// /// This is useful when the cursor is at whitespace or between tokens, /// where `token_at_offset` would return `None`. +#[must_use] pub fn find_node_at_offset(root: &SyntaxNode, offset: ByteOffset) -> Option { let text_size = rowan::TextSize::from(u32::from(offset)); @@ -70,6 +74,7 @@ pub fn find_node_at_offset(root: &SyntaxNode, offset: ByteOffset) -> Option Self { let (green, errors) = jrsonnet_rowan_parser::parse_green(text); Self { @@ -36,16 +37,19 @@ impl ParsedDocument { } /// Get the AST. Creates a fresh `SourceFile` cursor on each call. + #[must_use] pub fn ast(&self) -> SourceFile { jrsonnet_rowan_parser::source_file_from_green(&self.green) } /// Get syntax errors. + #[must_use] pub fn errors(&self) -> &[SyntaxError] { &self.errors } /// Check if the document has any syntax errors. + #[must_use] pub fn has_errors(&self) -> bool { !self.errors.is_empty() } @@ -69,15 +73,16 @@ pub struct Document { /// Last successful parse for graceful degradation. /// Used when current parse has errors. last_good_parse: Option, - /// Last good line index (corresponding to last_good_parse). + /// Last good line index (corresponding to `last_good_parse`). last_good_line_index: Option>, - /// Lines that have changed since last_good_parse. + /// Lines that have changed since `last_good_parse`. /// If None, no tracking is active (current parse is good). dirty_lines: Option>, } impl Document { /// Create a new document from source text. + #[must_use] pub fn new(text: String, version: DocVersion) -> Self { let line_index = Arc::new(LineIndex::new(&text)); let parsed = ParsedDocument::parse(&text); @@ -93,16 +98,19 @@ impl Document { } /// Get the source text. + #[must_use] pub fn text(&self) -> &str { &self.text } /// Get the document version. + #[must_use] pub fn version(&self) -> DocVersion { self.version } /// Get the line index. + #[must_use] pub fn line_index(&self) -> &LineIndex { &self.line_index } @@ -176,7 +184,7 @@ impl Document { // Track which lines are affected by this change let start_line = range.start.line; let end_line = range.end.line; - let new_line_count = new_text.matches('\n').count() as u32; + let new_line_count = u32::try_from(new_text.matches('\n').count()).unwrap_or(u32::MAX); let affected_lines = end_line.saturating_sub(start_line) + new_line_count + 1; // Apply the text change @@ -211,6 +219,7 @@ impl Document { /// /// Returns the current parse if successful, otherwise falls back to /// the last good parse for graceful degradation on broken files. + #[must_use] pub fn navigation_ast(&self) -> SourceFile { if self.parsed.has_errors() { if let Some(ref last_good) = self.last_good_parse { @@ -224,6 +233,7 @@ impl Document { /// /// Returns the current line index if parse is successful, otherwise /// falls back to the last good line index. + #[must_use] pub fn navigation_line_index(&self) -> &LineIndex { if self.parsed.has_errors() { if let Some(ref last_good) = self.last_good_line_index { @@ -237,22 +247,19 @@ impl Document { /// /// Returns true if the line at the given position has been modified /// since the last successful parse. + #[must_use] pub fn is_position_dirty(&self, line: u32) -> bool { - self.dirty_lines - .as_ref() - .map(|d| d.contains(&line)) - .unwrap_or(false) + self.dirty_lines.as_ref().is_some_and(|d| d.contains(&line)) } /// Check if there are any dirty lines (broken state with pending changes). + #[must_use] pub fn has_dirty_lines(&self) -> bool { - self.dirty_lines - .as_ref() - .map(|d| !d.is_empty()) - .unwrap_or(false) + self.dirty_lines.as_ref().is_some_and(|d| !d.is_empty()) } /// Get the set of dirty line numbers. + #[must_use] pub fn dirty_lines(&self) -> Option<&HashSet> { self.dirty_lines.as_ref() } diff --git a/crates/jrsonnet-lsp-document/src/error.rs b/crates/jrsonnet-lsp-document/src/error.rs index 04baecf7..9c4cf3b3 100644 --- a/crates/jrsonnet-lsp-document/src/error.rs +++ b/crates/jrsonnet-lsp-document/src/error.rs @@ -73,6 +73,7 @@ const JSONNET_KEYWORDS: &[&str] = &[ /// - Start with a letter (a-z, A-Z) or underscore /// - Contain only letters, digits, and underscores /// - Are not Jsonnet keywords +#[must_use] pub fn is_valid_jsonnet_identifier(name: &str) -> bool { if name.is_empty() { return false; @@ -80,7 +81,9 @@ pub fn is_valid_jsonnet_identifier(name: &str) -> bool { // Check first character let mut chars = name.chars(); - let first = chars.next().unwrap(); + let Some(first) = chars.next() else { + return false; + }; if !first.is_ascii_alphabetic() && first != '_' { return false; } @@ -99,6 +102,10 @@ pub fn is_valid_jsonnet_identifier(name: &str) -> bool { /// Validate an identifier for renaming operations. /// /// Returns `Ok(())` if valid, or an error describing why it's invalid. +/// +/// # Errors +/// Returns `Err(LspError::InvalidIdentifier)` when the identifier is empty, +/// starts with an invalid character, contains invalid characters, or is a keyword. pub fn validate_identifier(name: &str) -> LspResult<()> { if name.is_empty() { return Err(LspError::InvalidIdentifier( @@ -107,7 +114,11 @@ pub fn validate_identifier(name: &str) -> LspResult<()> { } let mut chars = name.chars(); - let first = chars.next().unwrap(); + let Some(first) = chars.next() else { + return Err(LspError::InvalidIdentifier( + "identifier cannot be empty".to_string(), + )); + }; if !first.is_ascii_alphabetic() && first != '_' { return Err(LspError::InvalidIdentifier(format!( "identifier must start with a letter or underscore, got '{first}'" diff --git a/crates/jrsonnet-lsp-document/src/position.rs b/crates/jrsonnet-lsp-document/src/position.rs index 3adbee8e..dfef25de 100644 --- a/crates/jrsonnet-lsp-document/src/position.rs +++ b/crates/jrsonnet-lsp-document/src/position.rs @@ -6,9 +6,13 @@ use crate::types::{ByteOffset, CharOffset, Line, LspPosition, LspRange}; +fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) +} + /// Line index - stores offsets only, no string copies. /// -/// This allows O(1) line lookup and O(line_length) character offset conversion. +/// This allows O(1) line lookup and `O(line_length)` character offset conversion. #[derive(Debug, Clone)] pub struct LineIndex { /// Byte offset of each line start (including line 0 at offset 0). @@ -17,12 +21,13 @@ pub struct LineIndex { impl LineIndex { /// Build from source text - O(n) single pass, one allocation. + #[must_use] pub fn new(text: &str) -> Self { let mut line_starts = vec![ByteOffset(0)]; for (i, ch) in text.char_indices() { if ch == '\n' { - line_starts.push(ByteOffset((i + 1) as u32)); + line_starts.push(ByteOffset(to_u32(i + 1))); } } @@ -30,27 +35,31 @@ impl LineIndex { } /// Get the number of lines in the document. + #[must_use] pub fn line_count(&self) -> u32 { - self.line_starts.len() as u32 + to_u32(self.line_starts.len()) } /// Get the byte offset of a line start. + #[must_use] pub fn line_start(&self, line: Line) -> Option { self.line_starts.get(line.0 as usize).copied() } /// Get the line number for a byte offset. + #[must_use] pub fn line_of_offset(&self, offset: ByteOffset) -> Line { // Binary search for the line containing this offset match self.line_starts.binary_search(&offset) { - Ok(line) => Line(line as u32), - Err(line) => Line(line.saturating_sub(1) as u32), + Ok(line) => Line(to_u32(line)), + Err(line) => Line(to_u32(line.saturating_sub(1))), } } /// Convert LSP position to byte offset. /// /// Returns None if the position is out of bounds. + #[must_use] pub fn offset(&self, pos: LspPosition, text: &str) -> Option { let line_start = self.line_start(pos.line)?; let line_start_usize: usize = line_start.into(); @@ -67,18 +76,19 @@ impl LineIndex { let mut utf16_count = 0u32; for (byte_idx, ch) in line_text.char_indices() { if utf16_count >= pos.character.0 { - return Some(ByteOffset((line_start_usize + byte_idx) as u32)); + return Some(ByteOffset(to_u32(line_start_usize + byte_idx))); } - utf16_count += ch.len_utf16() as u32; + utf16_count += to_u32(ch.len_utf16()); } // Position is at or past end of line - Some(ByteOffset((line_start_usize + line_text.len()) as u32)) + Some(ByteOffset(to_u32(line_start_usize + line_text.len()))) } /// Convert byte offset to LSP position. /// /// Returns None if the offset is out of bounds. + #[must_use] pub fn position(&self, offset: ByteOffset, text: &str) -> Option { let offset_usize: usize = offset.into(); if offset_usize > text.len() { @@ -90,7 +100,7 @@ impl LineIndex { // Count UTF-16 code units from line start to offset let line_prefix = text.get(line_start..offset_usize)?; - let character: u32 = line_prefix.chars().map(|ch| ch.len_utf16() as u32).sum(); + let character: u32 = line_prefix.chars().map(|ch| to_u32(ch.len_utf16())).sum(); Some(LspPosition { line, @@ -98,14 +108,16 @@ impl LineIndex { }) } - /// Convert a rowan TextRange to an LSP Range. + /// Convert a rowan `TextRange` to an LSP Range. + #[must_use] pub fn range(&self, range: rowan::TextRange, text: &str) -> Option { let start = self.position(range.start().into(), text)?; let end = self.position(range.end().into(), text)?; Some(LspRange { start, end }) } - /// Convert an LSP Range to a rowan TextRange. + /// Convert an LSP Range to a rowan `TextRange`. + #[must_use] pub fn text_range(&self, range: LspRange, text: &str) -> Option { let start = self.offset(range.start, text)?; let end = self.offset(range.end, text)?; @@ -113,6 +125,7 @@ impl LineIndex { } /// Get the text of a specific line (without trailing newline). + #[must_use] pub fn line_text<'a>(&self, line: Line, text: &'a str) -> Option<&'a str> { let start: usize = self.line_start(line)?.into(); let next_line = Line(line.0 + 1); diff --git a/crates/jrsonnet-lsp-document/src/types.rs b/crates/jrsonnet-lsp-document/src/types.rs index 3fb72f3f..b23ab53a 100644 --- a/crates/jrsonnet-lsp-document/src/types.rs +++ b/crates/jrsonnet-lsp-document/src/types.rs @@ -7,13 +7,14 @@ use std::path::PathBuf; use derive_more::{AsRef, Deref, Display, From, Into}; -use crate::error::{validate_identifier, LspResult}; +use crate::error::{validate_identifier, LspError, LspResult}; /// Byte offset within a document (rowan uses byte offsets). #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, From, Into, Default)] pub struct ByteOffset(pub u32); impl ByteOffset { + #[must_use] pub fn new(offset: u32) -> Self { Self(offset) } @@ -33,7 +34,7 @@ impl From for rowan::TextSize { impl From for ByteOffset { fn from(offset: usize) -> Self { - Self(offset as u32) + Self(u32::try_from(offset).unwrap_or(u32::MAX)) } } @@ -121,11 +122,15 @@ pub struct CanonicalPath(PathBuf); impl CanonicalPath { /// Create a new canonical path from an already-canonicalized path. + #[must_use] pub fn new(path: PathBuf) -> Self { Self(path) } /// Try to create a canonical path, canonicalizing if needed. + /// + /// # Errors + /// Returns any I/O error from [`std::path::Path::canonicalize`]. pub fn try_from_path(path: &std::path::Path) -> std::io::Result { Ok(Self(path.canonicalize()?)) } @@ -149,16 +154,21 @@ impl CanonicalPath { } /// Convert to a file URI. - pub fn to_uri(&self) -> lsp_types::Uri { + /// + /// # Errors + /// Returns `Err(LspError::InvalidUri)` if the canonical path cannot be + /// represented as a valid URI string. + pub fn to_uri(&self) -> LspResult { let path_str = self.0.to_string_lossy(); // Create file:// URI let uri_string = format!("file://{path_str}"); uri_string .parse() - .expect("canonical path should produce valid URI") + .map_err(|_| LspError::InvalidUri(uri_string)) } /// Get the inner path. + #[must_use] pub fn as_path(&self) -> &std::path::Path { &self.0 } @@ -169,6 +179,7 @@ impl CanonicalPath { pub struct DocVersion(pub i32); impl DocVersion { + #[must_use] pub fn new(version: i32) -> Self { Self(version) } @@ -187,6 +198,10 @@ impl SymbolName { /// /// Returns an error if the name is empty, starts with a digit, /// contains invalid characters, or is a reserved keyword. + /// + /// # Errors + /// Returns `Err(LspError::InvalidIdentifier)` when the name is not a valid + /// Jsonnet identifier. pub fn new(s: &str) -> LspResult { validate_identifier(s)?; Ok(Self(s.to_string())) @@ -199,11 +214,8 @@ impl SymbolName { /// /// # Safety /// The caller must ensure the string is a valid Jsonnet identifier. + #[must_use] pub fn from_token(s: &str) -> Self { - debug_assert!( - crate::error::is_valid_jsonnet_identifier(s), - "SymbolName::from_token called with invalid identifier: {s}" - ); Self(s.to_string()) } } diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens.rs b/crates/jrsonnet-lsp-handlers/src/code_lens.rs index de6e7a5e..fe8303d4 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_lens.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_lens.rs @@ -44,6 +44,7 @@ impl ErrorLensVisibility { impl CodeLensConfig { /// Create a config that shows all code lenses. + #[must_use] pub fn all() -> Self { Self { show_references: true, @@ -136,7 +137,7 @@ fn reference_count_lenses(document: &Document, uri: &Uri) -> Vec { let title = if ref_count == 1 { "1 reference".to_string() } else { - format!("{} references", ref_count) + format!("{ref_count} references") }; lenses.push(CodeLens { @@ -145,9 +146,9 @@ fn reference_count_lenses(document: &Document, uri: &Uri) -> Vec { title, command: "jrsonnet.findReferences".to_string(), arguments: Some(vec![ - serde_json::to_value(uri.to_string()).unwrap(), - serde_json::to_value(range.start.line).unwrap(), - serde_json::to_value(range.start.character).unwrap(), + serde_json::json!(uri.to_string()), + serde_json::json!(range.start.line), + serde_json::json!(range.start.character), ]), }), data: None, @@ -181,7 +182,7 @@ fn evaluate_lens(document: &Document, uri: &Uri) -> Option { command: Some(Command { title: "Evaluate".to_string(), command: "jrsonnet.evalFile".to_string(), - arguments: Some(vec![serde_json::to_value(uri.to_string()).unwrap()]), + arguments: Some(vec![serde_json::json!(uri.to_string())]), }), data: None, }) @@ -216,7 +217,7 @@ fn error_status_lens(document: &Document, uri: &Uri) -> Option { let title = if error_count == 1 { "1 syntax error".to_string() } else { - format!("{} syntax errors", error_count) + format!("{error_count} syntax errors") }; Some(CodeLens { @@ -224,7 +225,7 @@ fn error_status_lens(document: &Document, uri: &Uri) -> Option { command: Some(Command { title, command: "jrsonnet.showErrors".to_string(), - arguments: Some(vec![serde_json::to_value(uri.to_string()).unwrap()]), + arguments: Some(vec![serde_json::json!(uri.to_string())]), }), data: None, }) @@ -267,7 +268,7 @@ fn type_lenses(document: &Document, analysis: &TypeAnalysis) -> Vec { lenses.push(CodeLens { range, command: Some(Command { - title: format!(":: {}", type_str), + title: format!(":: {type_str}"), command: String::new(), // No action, just informational arguments: None, }), @@ -316,7 +317,7 @@ fn type_lenses(document: &Document, analysis: &TypeAnalysis) -> Vec { lenses.push(CodeLens { range, command: Some(Command { - title: format!(":: {}", type_str), + title: format!(":: {type_str}"), command: String::new(), arguments: None, }), @@ -334,6 +335,7 @@ fn type_lenses(document: &Document, analysis: &TypeAnalysis) -> Vec { /// /// This is called when the client requests resolution of a code lens /// that was returned without a command. +#[must_use] pub fn resolve_code_lens(lens: CodeLens) -> CodeLens { // Our code lenses always include commands, so no resolution needed lens @@ -350,7 +352,7 @@ mod tests { use super::*; fn make_uri(name: &str) -> Uri { - format!("file:///test/{}.jsonnet", name).parse().unwrap() + format!("file:///test/{name}.jsonnet").parse().unwrap() } fn test_analysis(doc: &Document) -> TypeAnalysis { @@ -433,12 +435,7 @@ mod tests { .syntax() .descendants() .filter_map(BindFunction::cast) - .find(|bind_func| { - bind_func - .name() - .map(|n| n.syntax().text() == name) - .unwrap_or(false) - }) + .find(|bind_func| bind_func.name().is_some_and(|n| n.syntax().text() == name)) .expect("function binding should exist"); let name_node = bind_func.name().expect("function should have name"); let body = bind_func.value().expect("function should have body"); diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs index 31ade04a..168406e7 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs @@ -50,7 +50,8 @@ pub fn check_object_field_completion( let ast = document.ast(); // Look for expression just before the dot (not at the dot) let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; - let before_dot_text_size = rowan::TextSize::from(before_dot_pos as u32); + let before_dot_offset = u32::try_from(before_dot_pos).ok()?; + let before_dot_text_size = rowan::TextSize::from(before_dot_offset); // Try to get fields from type inference if let Some(fields) = analysis.fields_at_position(ast.syntax(), before_dot_text_size) { @@ -72,7 +73,8 @@ pub fn check_object_field_completion( } // Fall back to AST-based field extraction for cases where type inference isn't enough - let fields = find_object_fields_for_identifier(ast.syntax(), identifier, dot_pos as u32)?; + let dot_offset = u32::try_from(dot_pos).ok()?; + let fields = find_object_fields_for_identifier(ast.syntax(), identifier, dot_offset)?; // Filter and convert to completion items let items = fields @@ -130,7 +132,7 @@ fn find_object_fields_for_identifier( None } -/// Check if a BindDestruct is for the given identifier and extract object fields. +/// Check if a `BindDestruct` is for the given identifier and extract object fields. fn check_bind_destruct_for_object(bind: &BindDestruct, identifier: &str) -> Option> { let destruct = bind.into()?; @@ -221,13 +223,13 @@ fn find_object_in_expr(node: &SyntaxNode) -> Option { None } -/// Extract field name from a MemberFieldNormal. +/// Extract field name from a `MemberFieldNormal`. fn extract_field_name(field: &MemberFieldNormal) -> Option { let field_name = field.field_name()?; extract_field_name_from_field_name(&field_name) } -/// Extract name string from a FieldName node. +/// Extract name string from a `FieldName` node. fn extract_field_name_from_field_name(field_name: &FieldName) -> Option { match field_name { FieldName::FieldNameFixed(fixed) => { diff --git a/crates/jrsonnet-lsp-handlers/src/completion/imports.rs b/crates/jrsonnet-lsp-handlers/src/completion/imports.rs index dc21ee62..b86544ef 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/imports.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/imports.rs @@ -130,6 +130,7 @@ fn import_search_roots(doc_path: Option<&Path>, import_roots: &[PathBuf]) -> Vec } /// Find the start of an import string, returning the position after the opening quote. +#[must_use] pub fn find_import_string_start(text: &str) -> Option { // Look backwards for import keyword followed by a string // Patterns: import ", import ', importstr ", importstr ', importbin ", importbin ' @@ -193,7 +194,8 @@ mod tests { fs::write(jpath.join("shared.libsonnet"), "{}").expect("jpath import should be created"); let source = r#"import "sh"#; - let items = check_import_completion(source, source.len() as u32, Some(&doc_path), &[jpath]) + let cursor_offset = u32::try_from(source.len()).expect("test source should fit in u32"); + let items = check_import_completion(source, cursor_offset, Some(&doc_path), &[jpath]) .expect("should have import completions"); let labels: Vec<_> = items.iter().map(|item| item.label.as_str()).collect(); @@ -215,7 +217,8 @@ mod tests { fs::write(jpath.join("dup.libsonnet"), "{}").expect("jpath import should be created"); let source = r#"import ""#; - let items = check_import_completion(source, source.len() as u32, Some(&doc_path), &[jpath]) + let cursor_offset = u32::try_from(source.len()).expect("test source should fit in u32"); + let items = check_import_completion(source, cursor_offset, Some(&doc_path), &[jpath]) .expect("should have import completions"); let dup_count = items diff --git a/crates/jrsonnet-lsp-handlers/src/completion/mod.rs b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs index 426fe95a..be1cc8f7 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/mod.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs @@ -154,7 +154,7 @@ mod tests { use super::*; - /// Create a TypeAnalysis for test purposes. + /// Create a `TypeAnalysis` for test purposes. fn test_analysis(doc: &Document) -> TypeAnalysis { let global_types = Arc::new(GlobalTyStore::new()); TypeAnalysis::analyze_with_global(doc, global_types) diff --git a/crates/jrsonnet-lsp-handlers/src/definition.rs b/crates/jrsonnet-lsp-handlers/src/definition.rs index 51006d45..58f6e336 100644 --- a/crates/jrsonnet-lsp-handlers/src/definition.rs +++ b/crates/jrsonnet-lsp-handlers/src/definition.rs @@ -209,6 +209,7 @@ pub enum BindingKind { } /// Collect all visible bindings at the given byte offset. +#[must_use] pub fn collect_visible_bindings(document: &Document, position: LspPosition) -> Vec { let text = document.text(); let line_index = document.line_index(); @@ -404,7 +405,7 @@ fn collect_function_params( } } -/// Collect BindFunction parameters. +/// Collect `BindFunction` parameters. fn collect_bind_function_params( func_node: &SyntaxNode, bindings: &mut Vec, @@ -502,7 +503,7 @@ fn collect_object_locals( } } -/// Collect bindings from comprehension FOR_SPEC children. +/// Collect bindings from comprehension `FOR_SPEC` children. fn collect_comprehension_bindings( comp_node: &SyntaxNode, bindings: &mut Vec, @@ -526,7 +527,7 @@ mod tests { fn expect_local(result: Option) -> Range { match result { Some(DefinitionResult::Local(r)) => r, - other => panic!("Expected Local definition, got {:?}", other), + other => panic!("Expected Local definition, got {other:?}"), } } @@ -607,7 +608,7 @@ mod tests { Some(DefinitionResult::Import(path)) => { assert_eq!(path, "lib/utils.libsonnet"); } - other => panic!("Expected Import definition, got {:?}", other), + other => panic!("Expected Import definition, got {other:?}"), } } @@ -623,7 +624,7 @@ mod tests { Some(DefinitionResult::Import(path)) => { assert_eq!(path, "data/config.txt"); } - other => panic!("Expected Import definition, got {:?}", other), + other => panic!("Expected Import definition, got {other:?}"), } } @@ -674,7 +675,7 @@ mod tests { assert_eq!(path, "lib.libsonnet"); assert_eq!(fields, vec!["foo"]); } - other => panic!("Expected ImportField definition, got {:?}", other), + other => panic!("Expected ImportField definition, got {other:?}"), } } @@ -693,7 +694,7 @@ mod tests { assert_eq!(path, "lib.libsonnet"); assert_eq!(fields, vec!["foo", "bar"]); } - other => panic!("Expected ImportField definition, got {:?}", other), + other => panic!("Expected ImportField definition, got {other:?}"), } } diff --git a/crates/jrsonnet-lsp-handlers/src/document_highlight.rs b/crates/jrsonnet-lsp-handlers/src/document_highlight.rs index b936194e..83db23e5 100644 --- a/crates/jrsonnet-lsp-handlers/src/document_highlight.rs +++ b/crates/jrsonnet-lsp-handlers/src/document_highlight.rs @@ -10,6 +10,7 @@ use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; use lsp_types::{DocumentHighlight, DocumentHighlightKind}; /// Find document highlights for the symbol at the given position. +#[must_use] pub fn document_highlights(document: &Document, position: LspPosition) -> Vec { let text = document.text(); let line_index = document.line_index(); diff --git a/crates/jrsonnet-lsp-handlers/src/formatting.rs b/crates/jrsonnet-lsp-handlers/src/formatting.rs index ca41fc5f..a1461bdd 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting.rs @@ -7,6 +7,10 @@ use std::process::{Command, Stdio}; use lsp_types::{Position, Range, TextEdit}; use serde::{Deserialize, Serialize}; +fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) +} + /// Formatting configuration options. /// /// These options correspond to the go-jsonnet formatter (jsonnetfmt) options. @@ -71,6 +75,7 @@ pub struct FormattingConfig { /// /// Returns a list of text edits to apply to the document. /// On error, returns None. +#[must_use] pub fn format_document(text: &str) -> Option> { format_document_with_config(text, &FormattingConfig::default()) } @@ -79,6 +84,7 @@ pub fn format_document(text: &str) -> Option> { /// /// Returns a list of text edits to apply to the document. /// On error, returns None. +#[must_use] pub fn format_document_with_config(text: &str, config: &FormattingConfig) -> Option> { // Try to run the formatter let formatted = run_formatter(text, config)?; @@ -90,8 +96,8 @@ pub fn format_document_with_config(text: &str, config: &FormattingConfig) -> Opt // Return a single edit that replaces the entire document let lines: Vec<&str> = text.lines().collect(); - let last_line = lines.len().saturating_sub(1) as u32; - let last_col = lines.last().map_or(0, |l| l.len()) as u32; + let last_line = to_u32(lines.len().saturating_sub(1)); + let last_col = to_u32(lines.last().map_or(0, |l| l.len())); Some(vec![TextEdit { range: Range { diff --git a/crates/jrsonnet-lsp-handlers/src/hover.rs b/crates/jrsonnet-lsp-handlers/src/hover.rs index 8678e329..cf0ed13e 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover.rs @@ -139,7 +139,8 @@ fn check_local_hover( } } - let preview_lines: Vec<&str> = lines[start_line..=def_end_line] + let preview_slice = lines.get(start_line..=def_end_line)?; + let preview_lines: Vec<&str> = preview_slice .iter() .take(MAX_HOVER_LINES) .copied() @@ -153,7 +154,7 @@ fn check_local_hover( Some(Hover { contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, - value: format!("{}```jsonnet\n{}\n```", type_str, preview), + value: format!("{type_str}```jsonnet\n{preview}\n```"), }), range: None, }) diff --git a/crates/jrsonnet-lsp-handlers/src/references.rs b/crates/jrsonnet-lsp-handlers/src/references.rs index e5791edb..475db8df 100644 --- a/crates/jrsonnet-lsp-handlers/src/references.rs +++ b/crates/jrsonnet-lsp-handlers/src/references.rs @@ -86,6 +86,7 @@ pub fn find_references( /// /// The `documents` parameter is a slice of (path, document reference) pairs representing /// all open documents to search. +#[must_use] pub fn find_cross_file_references<'a>( current_document: &Document, current_path: &CanonicalPath, @@ -137,7 +138,9 @@ pub fn find_cross_file_references<'a>( // Find imports in this document that point to our file let imports = find_imports_of_file(doc, doc_path.as_path(), current_path_str); - let doc_uri = doc_path.to_uri(); + let Ok(doc_uri) = doc_path.to_uri() else { + return Vec::new(); + }; let doc_text = doc.text(); let doc_line_index = doc.line_index(); diff --git a/crates/jrsonnet-lsp-handlers/src/rename.rs b/crates/jrsonnet-lsp-handlers/src/rename.rs index 1e36de0e..e52d797c 100644 --- a/crates/jrsonnet-lsp-handlers/src/rename.rs +++ b/crates/jrsonnet-lsp-handlers/src/rename.rs @@ -67,6 +67,7 @@ fn field_definition_range(token: &jrsonnet_rowan_parser::SyntaxToken) -> Option< /// Prepare rename response. /// Returns the range of the symbol to be renamed and its current name. +#[must_use] pub fn prepare_rename(document: &Document, position: LspPosition) -> Option { let text = document.text(); let line_index = document.line_index(); @@ -265,7 +266,7 @@ fn find_references_in_importer( use jrsonnet_rowan_parser::nodes::{ExprBase, ExprField, StmtLocal}; let doc = manager.get_document(importer_path)?; - let uri = importer_path.to_uri(); + let uri = importer_path.to_uri().ok()?; let text = doc.text(); let line_index = doc.line_index(); let ast = doc.ast(); @@ -790,7 +791,7 @@ mod tests { // Get the lib document let lib_doc = manager.get_document(&lib_canon).unwrap(); - let lib_uri = lib_canon.to_uri(); + let lib_uri = lib_canon.to_uri().expect("lib URI should be valid"); // Rename 'helper' in lib.jsonnet (position 2 is the 'h' in 'helper') // This is an object field, not a local variable, so local rename won't work @@ -837,7 +838,7 @@ mod tests { }], ); - let main_uri = main_canon.to_uri(); + let main_uri = main_canon.to_uri().expect("main URI should be valid"); expected_changes.insert( main_uri, vec![TextEdit { diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs index 33bf28d0..403796c2 100644 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs @@ -70,6 +70,10 @@ pub const TOKEN_MODIFIERS: &[lsp_types::SemanticTokenModifier] = &[ lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY, ]; +fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) +} + /// Modifier bit flags. mod token_modifier { pub const DECLARATION: u32 = 1 << 0; @@ -78,6 +82,7 @@ mod token_modifier { } /// Get the semantic tokens legend. +#[must_use] pub fn legend() -> SemanticTokensLegend { SemanticTokensLegend { token_types: TOKEN_TYPES.to_vec(), @@ -86,6 +91,7 @@ pub fn legend() -> SemanticTokensLegend { } /// Compute semantic tokens for a document. +#[must_use] pub fn semantic_tokens(document: &Document) -> SemanticTokens { let text = document.text(); let line_index = document.line_index(); @@ -104,6 +110,7 @@ pub fn semantic_tokens(document: &Document) -> SemanticTokens { } /// Compute semantic tokens for a specific range in a document. +#[must_use] pub fn semantic_tokens_range(document: &Document, range: Range) -> SemanticTokens { let text = document.text(); let line_index = document.line_index(); @@ -329,16 +336,16 @@ impl<'a> SemanticTokenBuilder<'a> { self.push_token_if_in_range(RawToken { line: start_pos.line.0, start_char: start_pos.character.0, - length: token_text.len() as u32, + length: to_u32(token_text.len()), token_type: token_type_u32, token_modifiers, }); } else { // Multi-line token - emit one token per line for (i, line) in lines.iter().enumerate() { - let line_num = start_pos.line.0 + i as u32; + let line_num = start_pos.line.0.saturating_add(to_u32(i)); let start_char = if i == 0 { start_pos.character.0 } else { 0 }; - let length = line.len() as u32; + let length = to_u32(line.len()); if length > 0 { self.push_token_if_in_range(RawToken { @@ -413,9 +420,8 @@ impl<'a> SemanticTokenBuilder<'a> { /// Classify a variable reference to determine its token type. fn classify_variable_reference(token: &SyntaxToken) -> TokenType { // Walk up the scope chain to find the definition - let mut current = match token.parent() { - Some(p) => p, - None => return TokenType::Variable, + let Some(mut current) = token.parent() else { + return TokenType::Variable; }; let name = token.text(); diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help.rs b/crates/jrsonnet-lsp-handlers/src/signature_help.rs index 2cc61d60..dc2d1867 100644 --- a/crates/jrsonnet-lsp-handlers/src/signature_help.rs +++ b/crates/jrsonnet-lsp-handlers/src/signature_help.rs @@ -28,7 +28,12 @@ struct SignatureParamInfo { name: String, } +fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) +} + /// Get signature help at the given position. +#[must_use] pub fn signature_help(document: &Document, position: LspPosition) -> Option { let text = document.text(); let line_index = document.line_index(); @@ -68,7 +73,7 @@ fn find_call_context( } } -/// Extract call information from an ExprCall node. +/// Extract call information from an `ExprCall` node. fn extract_call_info( call: &ExprCall, cursor_offset: rowan::TextSize, @@ -78,7 +83,7 @@ fn extract_call_info( Some((func_name, active_arg)) } -/// Extract the function name from the callee of an ExprCall. +/// Extract the function name from the callee of an `ExprCall`. fn extract_callee_name(call: &ExprCall) -> Option { let callee = call.callee()?; match callee.expr_base()? { @@ -88,7 +93,7 @@ fn extract_callee_name(call: &ExprCall) -> Option { } } -/// Extract the field name from an ExprField (returns just the field name, e.g., "length" from std.length). +/// Extract the field name from an `ExprField` (returns just the field name, e.g., "length" from std.length). fn extract_field_name(field: &ExprField) -> Option { Some(field.field()?.ident_lit()?.text().to_string()) } @@ -100,8 +105,8 @@ fn active_arg_for_call(call: &ExprCall, cursor_offset: rowan::TextSize) -> Activ let positional_index = positional_arg_index(&args_desc, cursor_offset); let named_arg = args_desc .args() - .nth(positional_index as usize) - .and_then(arg_name); + .nth(usize::try_from(positional_index).unwrap_or(usize::MAX)) + .and_then(|arg| arg_name(&arg)); ActiveArg { positional_index, @@ -118,25 +123,27 @@ fn positional_arg_index(args_desc: &ArgsDesc, cursor_offset: rowan::TextSize) -> for (index, arg) in args.iter().enumerate() { if cursor_offset <= arg.syntax().text_range().end() { - return index as u32; + return to_u32(index); } } count_preceding_commas(args_desc, cursor_offset) } -fn arg_name(arg: Arg) -> Option { +fn arg_name(arg: &Arg) -> Option { Some(arg.name()?.ident_lit()?.text().to_string()) } /// Count top-level commas before the cursor inside an argument list. fn count_preceding_commas(args_desc: &ArgsDesc, cursor_offset: rowan::TextSize) -> u32 { - args_desc - .syntax() - .children_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - .filter(|t| t.kind() == SyntaxKind::COMMA && t.text_range().end() <= cursor_offset) - .count() as u32 + to_u32( + args_desc + .syntax() + .children_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .filter(|t| t.kind() == SyntaxKind::COMMA && t.text_range().end() <= cursor_offset) + .count(), + ) } /// Get signature information for a function. @@ -215,11 +222,11 @@ fn resolve_active_parameter( if let Some(named_arg) = active_arg.named_arg.as_deref() { if let Some(index) = params.iter().position(|param| param.name == named_arg) { - return index as u32; + return to_u32(index); } } - let max_index = params.len().saturating_sub(1) as u32; + let max_index = to_u32(params.len().saturating_sub(1)); active_arg.positional_index.min(max_index) } @@ -360,7 +367,7 @@ fn check_bind_for_function(bind: &Bind, name: &str) -> Option Option { let params_desc = func.params()?; let params: Vec = params_desc @@ -371,7 +378,7 @@ fn extract_params_from_bind_function(func: &BindFunction) -> Option Option { @@ -425,10 +432,12 @@ mod tests { source.remove(cursor); let before = &code_with_cursor[..cursor]; - let line = before.bytes().filter(|&b| b == b'\n').count() as u32; - let column = before - .rsplit_once('\n') - .map_or(before.len(), |(_, suffix)| suffix.len()) as u32; + let line = to_u32(before.bytes().filter(|&b| b == b'\n').count()); + let column = to_u32( + before + .rsplit_once('\n') + .map_or(before.len(), |(_, suffix)| suffix.len()), + ); ( Document::new(source, DocVersion::new(1)), diff --git a/crates/jrsonnet-lsp-handlers/src/symbols.rs b/crates/jrsonnet-lsp-handlers/src/symbols.rs index a701ed84..c561aef0 100644 --- a/crates/jrsonnet-lsp-handlers/src/symbols.rs +++ b/crates/jrsonnet-lsp-handlers/src/symbols.rs @@ -22,6 +22,7 @@ use lsp_types::{DocumentSymbol, Location, SymbolInformation, SymbolKind, Uri}; use rowan::TextRange; /// Extract document symbols from a parsed document. +#[must_use] pub fn document_symbols(document: &Document) -> Vec { let ast = document.ast(); let text = document.text(); @@ -65,7 +66,9 @@ fn process_local_stmt( if binds.len() == 1 { // Single binding - return it directly - process_bind(&binds[0], text, line_index) + binds + .first() + .and_then(|bind| process_bind(bind, text, line_index)) } else if !binds.is_empty() { // Multiple bindings - create a container let range = local.syntax().text_range(); @@ -349,7 +352,7 @@ fn get_destruct_name(destruct: &jrsonnet_rowan_parser::nodes::Destruct) -> Optio } } -/// Create a DocumentSymbol with the given properties. +/// Create a `DocumentSymbol` with the given properties. fn create_symbol( name: String, kind: SymbolKind, @@ -372,7 +375,7 @@ fn create_symbol( } /// Search for symbols matching a query across a document. -/// Returns a flat list of SymbolInformation. +/// Returns a flat list of `SymbolInformation`. pub fn workspace_symbols_for_document( document: &Document, uri: &Uri, @@ -387,7 +390,7 @@ pub fn workspace_symbols_for_document( results } -/// Recursively flatten DocumentSymbol tree into SymbolInformation list. +/// Recursively flatten `DocumentSymbol` tree into `SymbolInformation` list. fn flatten_symbols( symbols: &[DocumentSymbol], uri: &Uri, diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs index 9c796b50..c4a1ca4e 100644 --- a/crates/jrsonnet-lsp-import/src/graph.rs +++ b/crates/jrsonnet-lsp-import/src/graph.rs @@ -43,6 +43,7 @@ pub struct ImportGraph { impl ImportGraph { /// Create a new empty import graph. + #[must_use] pub fn new() -> Self { Self::default() } @@ -111,6 +112,7 @@ impl ImportGraph { } /// Get the files that directly import a given file. + #[must_use] pub fn direct_importers(&self, path: &CanonicalPath) -> Vec { self.imported_by .get(path) @@ -122,6 +124,7 @@ impl ImportGraph { /// /// This performs a breadth-first search through the import graph /// to find all files that depend on the given file, directly or indirectly. + #[must_use] pub fn transitive_importers(&self, path: &CanonicalPath) -> HashSet { let mut result = HashSet::new(); let mut queue = VecDeque::from([path.clone()]); @@ -139,10 +142,11 @@ impl ImportGraph { /// Get the import entries for a file. pub fn imports(&self, path: &CanonicalPath) -> &[ImportEntry] { - self.imports.get(path).map(Vec::as_slice).unwrap_or(&[]) + self.imports.get(path).map_or(&[], Vec::as_slice) } /// Find imports in a file that point to a specific target file. + #[must_use] pub fn imports_of_target( &self, file: &CanonicalPath, @@ -160,6 +164,7 @@ impl ImportGraph { } /// Get the number of files tracked in the graph. + #[must_use] pub fn file_count(&self) -> usize { self.imports.len() } @@ -176,6 +181,7 @@ impl ImportGraph { /// in parallel. /// /// Returns `None` if there's a cycle in the import graph. + #[must_use] pub fn topological_order(&self) -> Option>> { let mut in_degree: HashMap<&CanonicalPath, usize> = HashMap::new(); let mut levels: Vec> = Vec::new(); @@ -193,8 +199,7 @@ impl ImportGraph { .filter(|e| { e.resolved_path .as_ref() - .map(|p| self.imports.contains_key(p)) - .unwrap_or(false) + .is_some_and(|p| self.imports.contains_key(p)) }) .count(); in_degree.insert(path, dep_count); @@ -230,20 +235,14 @@ impl ImportGraph { continue; } // Check if all dependencies of importer are processed - let all_deps_processed = self - .imports - .get(importer) - .map(|entries| { + let all_deps_processed = + self.imports.get(importer).map_or(true, |entries| { entries.iter().all(|e| { - e.resolved_path - .as_ref() - .map(|p| { - processed.contains(p) || !self.imports.contains_key(p) - }) - .unwrap_or(true) + e.resolved_path.as_ref().map_or(true, |p| { + processed.contains(p) || !self.imports.contains_key(p) + }) }) - }) - .unwrap_or(true); + }); if all_deps_processed && !next_level.contains(importer) { next_level.push(importer.clone()); diff --git a/crates/jrsonnet-lsp-import/src/parse.rs b/crates/jrsonnet-lsp-import/src/parse.rs index 5e1f0501..870af0c0 100644 --- a/crates/jrsonnet-lsp-import/src/parse.rs +++ b/crates/jrsonnet-lsp-import/src/parse.rs @@ -18,6 +18,7 @@ use jrsonnet_rowan_parser::{ /// // For `import @'bar.jsonnet'` returns Some("bar.jsonnet") /// let path = extract_import_path(&import_expr); /// ``` +#[must_use] pub fn extract_import_path(import: &ExprImport) -> Option { let text_token = import.text()?; let text = text_token.text(); @@ -27,6 +28,7 @@ pub fn extract_import_path(import: &ExprImport) -> Option { /// Find an import expression within a syntax node's descendants. /// /// Returns the first `ExprImport` found, or `None` if no import exists. +#[must_use] pub fn find_import_in_node(node: &SyntaxNode) -> Option { for descendant in node.descendants() { if descendant.kind() == SyntaxKind::EXPR_IMPORT { @@ -40,6 +42,7 @@ pub fn find_import_in_node(node: &SyntaxNode) -> Option { /// /// This is useful for handling "go to definition" from within an import string. /// Returns `Some((import_expr, path))` if the token is inside an import, `None` otherwise. +#[must_use] pub fn check_import_from_token(token: &SyntaxToken) -> Option<(ExprImport, String)> { // Must be a string token let kind = token.kind(); @@ -67,6 +70,7 @@ pub fn check_import_from_token(token: &SyntaxToken) -> Option<(ExprImport, Strin /// Get the import path from a syntax node if it contains an import. /// /// This searches the node's descendants for an import expression and returns its path. +#[must_use] pub fn get_import_path_from_node(node: &SyntaxNode) -> Option { let import = find_import_in_node(node)?; extract_import_path(&import) @@ -76,6 +80,7 @@ pub fn get_import_path_from_node(node: &SyntaxNode) -> Option { /// /// This is a convenience wrapper around [`check_import_from_token`] that just /// returns the path string. +#[must_use] pub fn check_import_path(token: &SyntaxToken) -> Option { check_import_from_token(token).map(|(_, path)| path) } diff --git a/crates/jrsonnet-lsp-import/src/work_queue.rs b/crates/jrsonnet-lsp-import/src/work_queue.rs index 24bb3bd9..8d04ffc3 100644 --- a/crates/jrsonnet-lsp-import/src/work_queue.rs +++ b/crates/jrsonnet-lsp-import/src/work_queue.rs @@ -69,6 +69,7 @@ where T: Eq + Hash, { /// Create a new empty work queue. + #[must_use] pub fn new() -> Self { Self { actions: Vec::new(), @@ -178,7 +179,12 @@ where let mut levels: Vec> = vec![Vec::new(); max_level + 1]; for (item, level) in self.item_levels { - levels[level].push(item); + if levels.len() <= level { + levels.resize_with(level + 1, Vec::new); + } + if let Some(items) = levels.get_mut(level) { + items.push(item); + } } levels @@ -308,9 +314,9 @@ mod tests { assert_eq!(levels.len(), 2); assert_eq!(levels[1], vec![1]); - let mut level0 = levels[0].clone(); - level0.sort_unstable(); - assert_eq!(level0, vec![2, 3]); + let mut first_level = levels[0].clone(); + first_level.sort_unstable(); + assert_eq!(first_level, vec![2, 3]); } #[test] @@ -335,9 +341,9 @@ mod tests { assert_eq!(levels[0], vec![4]); // 4 (leaf) assert_eq!(levels[2], vec![1]); // 1 (root) - let mut level1 = levels[1].clone(); - level1.sort_unstable(); - assert_eq!(level1, vec![2, 3]); // 2 and 3 + let mut middle_level = levels[1].clone(); + middle_level.sort_unstable(); + assert_eq!(middle_level, vec![2, 3]); // 2 and 3 } #[test] diff --git a/crates/jrsonnet-lsp-inference/src/analysis.rs b/crates/jrsonnet-lsp-inference/src/analysis.rs index b9654b36..568eca29 100644 --- a/crates/jrsonnet-lsp-inference/src/analysis.rs +++ b/crates/jrsonnet-lsp-inference/src/analysis.rs @@ -32,7 +32,7 @@ use crate::{ /// and cached in concurrent data structures like moka. pub struct TypeAnalysis { /// Type store for interning and looking up types. - /// Uses RwLock because some query operations (like union) may intern new types. + /// Uses `RwLock` because some query operations (like union) may intern new types. store: RwLock, /// Map from expression text range to interned type. /// Immutable after construction. @@ -59,6 +59,7 @@ impl Default for TypeAnalysis { impl TypeAnalysis { /// Create a new empty type analysis with a default global store. + #[must_use] pub fn new() -> Self { Self { store: RwLock::new(MutStore::new(Arc::new(GlobalTyStore::new()))), @@ -77,6 +78,7 @@ impl TypeAnalysis { } /// Analyze a document and return the type analysis results. + #[must_use] pub fn analyze(document: &Document) -> Self { Self::analyze_with_global(document, Arc::new(GlobalTyStore::new())) } @@ -124,7 +126,7 @@ impl TypeAnalysis { document_type: Ty, ) -> Self { let local = store.into_local(); - let subst = TySubst::merge(global.as_ref(), local); + let subst = TySubst::merge(global.as_ref(), &local); let map_ty = |ty: Ty| { let mapped = subst.apply(ty); @@ -140,7 +142,7 @@ impl TypeAnalysis { } let document_type = map_ty(document_type); - let document_type = GlobalTy::new(document_type).expect("document type must be global"); + let document_type = GlobalTy::new(document_type).unwrap_or(GlobalTy::ANY); Self { store: RwLock::new(MutStore::new(global)), @@ -269,7 +271,7 @@ impl TypeAnalysis { self.store.read().is_indexable(ty) } - /// Check if a type supports field access (object or attrs_of). + /// Check if a type supports field access (object or `attrs_of`). #[inline] pub fn supports_field_access(&self, ty: Ty) -> bool { self.store.read().supports_field_access(ty) @@ -291,7 +293,7 @@ impl TypeAnalysis { f(&mut self.store.write()) } - /// Execute a function with immutable access to the MutStore. + /// Execute a function with immutable access to the `MutStore`. pub fn with_store(&self, f: impl FnOnce(&MutStore) -> R) -> R { f(&self.store.read()) } @@ -397,7 +399,7 @@ impl TypeAnalysis { matches!(ty_data, TyData::Array { .. }) } - /// Check if a type is a string type (including Char and LiteralString). + /// Check if a type is a string type (including Char and `LiteralString`). #[inline] pub fn is_string(&self, ty: Ty) -> bool { let ty_data = self.store.read().get(ty); diff --git a/crates/jrsonnet-lsp-inference/src/const_eval.rs b/crates/jrsonnet-lsp-inference/src/const_eval.rs index 0e49abca..ba2ff9f8 100644 --- a/crates/jrsonnet-lsp-inference/src/const_eval.rs +++ b/crates/jrsonnet-lsp-inference/src/const_eval.rs @@ -87,18 +87,21 @@ impl EvalContext { /// - Field accesses on objects /// - Import expressions /// - Local bindings +#[must_use] pub fn trace_expr(expr: &Expr, document: &Document) -> Option { let mut ctx = EvalContext::new(); trace_expr_inner(expr, document, &mut ctx) } -/// Trace from an ExprBase. +/// Trace from an `ExprBase`. +#[must_use] pub fn trace_base(base: &ExprBase, document: &Document) -> Option { let mut ctx = EvalContext::new(); trace_base_inner(base, document, &mut ctx) } /// Trace from an identifier token to what it refers to. +#[must_use] pub fn trace_ident(token: &SyntaxToken, document: &Document) -> Option { if token.kind() != SyntaxKind::IDENT { return None; @@ -204,7 +207,8 @@ fn trace_field( ctx: &mut EvalContext, ) -> Option { // Get the field name - let field_name = extract_field_name(field.field()?)?; + let field_name_node = field.field()?; + let field_name = extract_field_name(&field_name_node)?; // Get the base expression let base_expr = field.base()?; @@ -391,7 +395,7 @@ fn find_field_in_member_list( None } -/// Extract the key name from a FieldName node. +/// Extract the key name from a `FieldName` node. fn extract_field_key_name(field_name: &FieldName) -> Option { match field_name { FieldName::FieldNameFixed(fixed) => { @@ -411,7 +415,7 @@ fn extract_field_key_name(field_name: &FieldName) -> Option { } /// Extract a field name from a Name node. -fn extract_field_name(name: Name) -> Option { +fn extract_field_name(name: &Name) -> Option { let ident = name.ident_lit()?; Some(ident.text().to_string()) } diff --git a/crates/jrsonnet-lsp-inference/src/env.rs b/crates/jrsonnet-lsp-inference/src/env.rs index b95fd3c5..b4c44b5b 100644 --- a/crates/jrsonnet-lsp-inference/src/env.rs +++ b/crates/jrsonnet-lsp-inference/src/env.rs @@ -44,7 +44,7 @@ pub struct TypeEnv { /// Current depth of function body inference (to prevent infinite recursion). function_depth: usize, /// Constraints observed on parameters (parameter name -> observed type constraints). - /// Only populated when in_constraint_mode is true. + /// Only populated when `in_constraint_mode` is true. constraints: FxHashMap>, /// Whether we're currently tracking parameter constraints. in_constraint_mode: bool, @@ -101,6 +101,7 @@ impl TypeEnv { /// Create a new type environment with a default global store. /// /// This is primarily for tests and backwards compatibility. + #[must_use] pub fn new_default() -> Self { Self::new(Arc::new(GlobalTyStore::new())) } @@ -109,11 +110,13 @@ impl TypeEnv { /// /// Returns `Some(ty)` if the resolver is set and the import is cached, /// `None` otherwise. + #[must_use] pub fn resolve_import(&self, import_path: &str) -> Option { self.import_resolver.as_ref()?.resolve_import(import_path) } /// Get an immutable reference to the type store. + #[must_use] pub fn store(&self) -> &MutStore { &self.store } @@ -123,14 +126,16 @@ impl TypeEnv { &mut self.store } - /// Consume and return the MutStore. + /// Consume and return the `MutStore`. /// /// This is used when transferring the store after analysis. + #[must_use] pub fn into_store(self) -> MutStore { self.store } /// Check if we can infer function bodies (not too deep). + #[must_use] pub fn can_infer_function_body(&self) -> bool { self.function_depth < MAX_FUNCTION_INFERENCE_DEPTH } @@ -146,6 +151,7 @@ impl TypeEnv { } /// Check if a function type has been cached for the given range. + #[must_use] pub fn get_cached_function(&self, range: TextRange) -> Option { self.function_cache.get(&range).copied() } @@ -156,6 +162,7 @@ impl TypeEnv { } /// Check if a function is currently being inferred (cycle detection). + #[must_use] pub fn is_function_in_progress(&self, range: TextRange) -> bool { self.functions_in_progress.contains(&range) } @@ -198,6 +205,7 @@ impl TypeEnv { } /// Check if a variable is being tracked for constraints. + #[must_use] pub fn is_tracked_param(&self, var_name: &str) -> bool { self.in_constraint_mode && self.tracked_params.contains(var_name) } @@ -222,6 +230,7 @@ impl TypeEnv { } /// Look up a variable in all scopes (innermost first). + #[must_use] pub fn lookup(&self, name: &str) -> Option { for scope in self.scopes.iter().rev() { if let Some(&ty) = scope.get(name) { @@ -274,16 +283,19 @@ impl TypeEnv { } /// Get the interned `Ty` for `self` in the current context. + #[must_use] pub fn self_ty(&self) -> Option { self.object_context.last().map(|ctx| ctx.self_type) } /// Get the interned `Ty` for `super` in the current context. + #[must_use] pub fn super_ty(&self) -> Option { self.object_context.last().and_then(|ctx| ctx.super_type) } /// Get the interned `Ty` for `$` (root object) in the current context. + #[must_use] pub fn root_ty(&self) -> Option { self.object_context.first().map(|ctx| ctx.self_type) } diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr.rs index b7e27379..6bbcf0ea 100644 --- a/crates/jrsonnet-lsp-inference/src/expr.rs +++ b/crates/jrsonnet-lsp-inference/src/expr.rs @@ -39,6 +39,7 @@ fn apply_facts_to_env(facts: &Facts, env: &mut TypeEnv) { /// Infer the type of a document's root expression, returning an interned `Ty` and the environment. /// /// This is useful for tests that need to inspect the type structure using `TyData`. +#[must_use] pub fn infer_document_type_ty(document: &Document) -> (Ty, TypeEnv) { let ast = document.ast(); let mut env = TypeEnv::new_default(); @@ -87,7 +88,7 @@ pub fn infer_expr_ty_with_expected( // Get the base expression type if let Some(base) = expr.expr_base() { - infer_base_ty(base, env, expected) + infer_base_ty(&base, env, expected) } else { Ty::ANY } @@ -102,10 +103,7 @@ pub(super) fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv) { if let Some(name_node) = full.name() { if let Some(ident) = name_node.ident_lit() { let name = ident.text().to_string(); - let ty = bd - .value() - .map(|v| infer_expr_ty(&v, env)) - .unwrap_or(Ty::ANY); + let ty = bd.value().map_or(Ty::ANY, |v| infer_expr_ty(&v, env)); env.define_ty(name, ty); } } @@ -206,10 +204,7 @@ fn lookup_destruct_field_type_ty( return Ty::ANY; }; - obj_data - .get_field(ident.text()) - .map(|fd| fd.ty) - .unwrap_or(Ty::ANY) + obj_data.get_field(ident.text()).map_or(Ty::ANY, |fd| fd.ty) } /// Check if an expression is guaranteed to diverge (never return). @@ -223,8 +218,8 @@ pub fn is_divergent(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv /// Infer the type of a base expression with an optional expected type, returning `Ty`. /// /// This is the efficient internal version that works with interned types throughout. -pub(super) fn infer_base_ty(base: ExprBase, env: &mut TypeEnv, expected: Option) -> Ty { - match &base { +pub(super) fn infer_base_ty(base: &ExprBase, env: &mut TypeEnv, expected: Option) -> Ty { + match base { // Primitives - return constants directly ExprBase::ExprLiteral(lit) => { if let Some(literal) = lit.literal() { @@ -302,10 +297,7 @@ pub(super) fn infer_base_ty(base: ExprBase, env: &mut TypeEnv, expected: Option< // Unary operators ExprBase::ExprUnary(unary) => { - let rhs_ty = unary - .rhs() - .map(|rhs| infer_expr_ty(&rhs, env)) - .unwrap_or(Ty::ANY); + let rhs_ty = unary.rhs().map_or(Ty::ANY, |rhs| infer_expr_ty(&rhs, env)); if rhs_ty == Ty::NEVER { return Ty::NEVER; } @@ -357,17 +349,11 @@ fn infer_binary_expr_base_ty( binary: &jrsonnet_rowan_parser::nodes::ExprBinary, env: &mut TypeEnv, ) -> Ty { - let lhs_ty = binary - .lhs() - .map(|e| infer_expr_ty(&e, env)) - .unwrap_or(Ty::ANY); + let lhs_ty = binary.lhs().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); if lhs_ty == Ty::NEVER { return Ty::NEVER; } - let rhs_ty = binary - .rhs() - .map(|e| infer_expr_ty(&e, env)) - .unwrap_or(Ty::ANY); + let rhs_ty = binary.rhs().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); if rhs_ty == Ty::NEVER { return Ty::NEVER; } @@ -431,10 +417,7 @@ fn infer_index_expr_base_ty( idx: &jrsonnet_rowan_parser::nodes::ExprIndex, env: &mut TypeEnv, ) -> Ty { - let base_ty = idx - .base() - .map(|e| infer_expr_ty(&e, env)) - .unwrap_or(Ty::ANY); + let base_ty = idx.base().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); if base_ty == Ty::NEVER { return Ty::NEVER; } @@ -462,10 +445,7 @@ fn infer_slice_expr_base_ty( slice: &jrsonnet_rowan_parser::nodes::ExprSlice, env: &mut TypeEnv, ) -> Ty { - let base_ty = slice - .base() - .map(|e| infer_expr_ty(&e, env)) - .unwrap_or(Ty::ANY); + let base_ty = slice.base().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); if base_ty == Ty::NEVER { return Ty::NEVER; } @@ -573,10 +553,7 @@ fn infer_field_expr_base_ty( } } - let base_ty = field - .base() - .map(|e| infer_expr_ty(&e, env)) - .unwrap_or(Ty::ANY); + let base_ty = field.base().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); if base_ty == Ty::NEVER { return Ty::NEVER; } @@ -607,10 +584,7 @@ fn infer_field_expr_base_ty( } fn infer_call_expr_base_ty(call: &jrsonnet_rowan_parser::nodes::ExprCall, env: &mut TypeEnv) -> Ty { - let base_ty = call - .callee() - .map(|e| infer_expr_ty(&e, env)) - .unwrap_or(Ty::ANY); + let base_ty = call.callee().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); if base_ty == Ty::NEVER { return Ty::NEVER; } @@ -771,10 +745,7 @@ fn infer_array_comp_expr_base_ty( for comp_spec in comp.comp_specs() { if let jrsonnet_rowan_parser::nodes::CompSpec::ForSpec(for_spec) = comp_spec { - let iter_ty = for_spec - .expr() - .map(|e| infer_expr_ty(&e, env)) - .unwrap_or(Ty::ANY); + let iter_ty = for_spec.expr().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); let elem_ty = { let store = env.store_mut(); @@ -795,10 +766,7 @@ fn infer_array_comp_expr_base_ty( } } - let body_ty = comp - .expr() - .map(|e| infer_expr_ty(&e, env)) - .unwrap_or(Ty::ANY); + let body_ty = comp.expr().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); env.pop_scope(); env.store_mut().array(body_ty) @@ -901,10 +869,11 @@ fn infer_obj_extend_expr_base_ty( extend: &jrsonnet_rowan_parser::nodes::ExprObjExtend, env: &mut TypeEnv, ) -> Ty { - let base_ty = extend - .lhs_work() - .map(|e| infer_expr_ty(&e, env)) - .unwrap_or_else(|| env.store_mut().object_any()); + let base_ty = if let Some(base_expr) = extend.lhs_work() { + infer_expr_ty(&base_expr, env) + } else { + env.store_mut().object_any() + }; if base_ty == Ty::NEVER { return Ty::NEVER; @@ -959,14 +928,14 @@ mod tests { use super::*; - /// Assert that an ObjectData has exactly the specified field names. + /// Assert that an `ObjectData` has exactly the specified field names. fn assert_fields_ty(obj: &ObjectData, expected: &[&str]) { let actual: BTreeSet<_> = obj.fields.iter().map(|(name, _)| name.as_str()).collect(); let expected: BTreeSet<_> = expected.iter().copied().collect(); assert_eq!(actual, expected, "Field mismatch"); } - /// Get TyData from TypeEnv for a Ty. + /// Get `TyData` from `TypeEnv` for a Ty. fn get_ty_data(env: &TypeEnv, ty: Ty) -> TyData { env.store().get(ty) } @@ -987,7 +956,7 @@ mod tests { } } - /// Try to extract array info (element type and is_set flag). + /// Try to extract array info (element type and `is_set` flag). fn try_array_with_set(env: &TypeEnv, ty: Ty) -> Option<(Ty, bool)> { match env.store().get(ty) { TyData::Array { elem, is_set } => Some((elem, is_set)), @@ -995,7 +964,7 @@ mod tests { } } - /// Try to extract ObjectData. + /// Try to extract `ObjectData`. fn try_object(env: &TypeEnv, ty: Ty) -> Option { match env.store().get(ty) { TyData::Object(obj) => Some(obj), @@ -1003,7 +972,7 @@ mod tests { } } - /// Try to extract FunctionData. + /// Try to extract `FunctionData`. fn try_function(env: &TypeEnv, ty: Ty) -> Option { match env.store().get(ty) { TyData::Function(func) => Some(func), @@ -1019,7 +988,7 @@ mod tests { } } - /// Get field definition from ObjectData by name. + /// Get field definition from `ObjectData` by name. fn get_field_ty<'a>(obj: &'a ObjectData, name: &str) -> Option<&'a FieldDefInterned> { obj.fields .iter() @@ -1027,7 +996,7 @@ mod tests { .map(|(_, def)| def) } - /// Helper to infer document type and return (Ty, TypeEnv) for testing. + /// Helper to infer document type and return (Ty, `TypeEnv`) for testing. fn infer_doc(code: &str) -> (Ty, TypeEnv) { let doc = Document::new(code.to_string(), DocVersion::new(1)); infer_document_type_ty(&doc) @@ -1527,7 +1496,7 @@ mod tests { } /// Helper to infer an expression with an expected type (Ty-native) - /// The expected_builder is called with the env's store to create the expected type. + /// The `expected_builder` is called with the env's store to create the expected type. fn infer_with_expected_fn(code: &str, expected_builder: F) -> (Ty, TypeEnv) where F: FnOnce(&mut MutStore) -> Ty, @@ -1696,8 +1665,7 @@ mod tests { // Verify complete structure: exactly [Number, Tuple([Number])] assert!( matches!(elems.as_slice(), [first, second] if *first == Ty::NUMBER && try_tuple(&env, *second).expect("expected tuple") == vec![Ty::NUMBER]), - "Expected [Number, Tuple([Number])], got {:?}", - elems + "Expected [Number, Tuple([Number])], got {elems:?}" ); } @@ -1961,8 +1929,8 @@ mod tests { ) { let (ty, env) = infer_doc(code); let (elem, is_set) = try_array_with_set(&env, ty).expect("expected array type"); - assert_eq!(is_set, expected_is_set, "is_set mismatch for: {}", code); - assert_eq!(elem, expected_elem, "element type mismatch for: {}", code); + assert_eq!(is_set, expected_is_set, "is_set mismatch for: {code}"); + assert_eq!(elem, expected_elem, "element type mismatch for: {code}"); } #[test] diff --git a/crates/jrsonnet-lsp-inference/src/flow.rs b/crates/jrsonnet-lsp-inference/src/flow.rs index ff4ac4e8..014cf7e2 100644 --- a/crates/jrsonnet-lsp-inference/src/flow.rs +++ b/crates/jrsonnet-lsp-inference/src/flow.rs @@ -104,6 +104,7 @@ impl PrimFact { impl Fact { /// Create a null fact. + #[must_use] pub fn null() -> Self { Self { repr: FactRepr::Prim(PrimFact::Null, Totality::Total), @@ -111,6 +112,7 @@ impl Fact { } /// Create a number fact with given totality. + #[must_use] pub fn number(totality: Totality) -> Self { Self { repr: FactRepr::Prim(PrimFact::Number, totality), @@ -118,6 +120,7 @@ impl Fact { } /// Create a string fact with given totality. + #[must_use] pub fn string(totality: Totality) -> Self { Self { repr: FactRepr::Prim(PrimFact::String, totality), @@ -125,6 +128,7 @@ impl Fact { } /// Create an array fact with given totality. + #[must_use] pub fn array(totality: Totality) -> Self { Self { repr: FactRepr::Prim(PrimFact::Array, totality), @@ -132,6 +136,7 @@ impl Fact { } /// Create an object fact with given totality. + #[must_use] pub fn object(totality: Totality) -> Self { Self { repr: FactRepr::Prim(PrimFact::Object, totality), @@ -139,6 +144,7 @@ impl Fact { } /// Create a function fact. + #[must_use] pub fn function() -> Self { Self { repr: FactRepr::Prim(PrimFact::Function, Totality::Total), @@ -146,6 +152,7 @@ impl Fact { } /// Create a boolean fact. + #[must_use] pub fn boolean() -> Self { Self { repr: FactRepr::Prim(PrimFact::Bool, Totality::Total), @@ -153,6 +160,7 @@ impl Fact { } /// Create a fact that an object has a field. + #[must_use] pub fn has_field(field: String) -> Self { Self { repr: FactRepr::HasField { @@ -163,6 +171,7 @@ impl Fact { } /// Create a fact that an object has a field with a specific type. + #[must_use] pub fn has_field_typed(field: String, field_fact: Fact) -> Self { Self { repr: FactRepr::HasField { @@ -174,6 +183,7 @@ impl Fact { /// Create a fact that a value has a specific length. /// Applies to arrays, strings, and objects. + #[must_use] pub fn has_len(len: usize) -> Self { Self { repr: FactRepr::HasLen(len), @@ -182,6 +192,7 @@ impl Fact { /// Create a fact that a value has at least a minimum length. /// Useful for non-empty checks like `std.length(x) > 0`. + #[must_use] pub fn min_len(min: usize) -> Self { Self { repr: FactRepr::MinLen(min), @@ -228,6 +239,7 @@ impl Fact { /// Create a fact that a value equals a specific boolean literal. /// Used for `x == true` or `x == false` patterns. + #[must_use] pub fn literal_bool(value: bool) -> Self { Self { repr: FactRepr::LiteralBool(value), @@ -236,6 +248,7 @@ impl Fact { /// Create a fact that a value equals a specific string literal. /// Used for `x == "literal"` patterns. + #[must_use] pub fn literal_string(value: String) -> Self { Self { repr: FactRepr::LiteralString(value), @@ -302,8 +315,7 @@ impl FactRepr { // Narrow to an object that has this field let field_ty = field_type .as_ref() - .map(|f| f.apply_to(Ty::ANY, store)) - .unwrap_or(Ty::ANY); + .map_or(Ty::ANY, |f| f.apply_to(Ty::ANY, store)); let obj_data = ObjectData { fields: vec![( @@ -486,13 +498,14 @@ pub struct Facts { impl Facts { /// Create an empty facts collection. + #[must_use] pub fn new() -> Self { Self::default() } /// Add a fact for a variable. /// - /// If a fact already exists for this variable, they are ANDed together. + /// If a fact already exists for this variable, they are `ANDed` together. pub fn add(&mut self, var_name: String, fact: Fact) { if let Some(existing) = self.facts.remove(&var_name) { self.facts.insert(var_name, existing.and(fact)); @@ -502,11 +515,13 @@ impl Facts { } /// Get the fact for a variable, if any. + #[must_use] pub fn get(&self, var_name: &str) -> Option<&Fact> { self.facts.get(var_name) } /// Check if there are any facts. + #[must_use] pub fn is_empty(&self) -> bool { self.facts.is_empty() } @@ -578,6 +593,7 @@ use jrsonnet_rowan_parser::{ /// - `a && b` → facts from a AND facts from b /// - `a || b` → facts from a OR facts from b (intersection) /// - `!a` → negated facts from a +#[must_use] pub fn extract_facts(cond: &Expr) -> Facts { let mut facts = Facts::new(); extract_facts_into(cond, &mut facts); @@ -728,7 +744,7 @@ struct StdMethodCall { args: ArgsDesc, } -/// Check if an ExprCall is a call to std.methodName and extract the method name and args. +/// Check if an `ExprCall` is a call to std.methodName and extract the method name and args. fn extract_std_method_call(call: &ExprCall) -> Option { let callee = call.callee()?; let ExprBase::ExprField(field) = callee.expr_base()? else { @@ -760,7 +776,7 @@ fn extract_std_method_from_binary( extract_std_method_call(&call) } -/// Check for std.isX(var) or std.objectHas(var, "field") pattern in an ExprCall. +/// Check for std.isX(var) or std.objectHas(var, "field") pattern in an `ExprCall`. fn check_std_call(call: &ExprCall) -> Option<(String, Fact)> { let std_call = extract_std_method_call(call)?; @@ -773,11 +789,11 @@ fn check_std_call(call: &ExprCall) -> Option<(String, Fact)> { // Try std.objectHas(obj, "field") if std_call.method_name == "objectHas" || std_call.method_name == "objectHasAll" { let arg_list: Vec<_> = std_call.args.args().collect(); - if arg_list.len() != 2 { + let [obj_arg, field_arg] = arg_list.as_slice() else { return None; - } - let var_name = extract_var_name(&arg_list[0].expr()?)?; - let field_name = extract_string_literal(&arg_list[1].expr()?)?; + }; + let var_name = extract_var_name(&obj_arg.expr()?)?; + let field_name = extract_string_literal(&field_arg.expr()?)?; return Some((var_name, Fact::has_field(field_name))); } @@ -795,13 +811,13 @@ fn check_std_call(call: &ExprCall) -> Option<(String, Fact)> { /// of `arr` are numbers, so we can narrow `arr` to `Array`. fn check_higher_order_predicate(all_args: &ArgsDesc) -> Option<(String, Fact)> { let arg_list: Vec<_> = all_args.args().collect(); - if arg_list.len() != 1 { + let [map_arg] = arg_list.as_slice() else { return None; - } + }; // The argument should be std.map(predicate, arr) - let map_arg = arg_list[0].expr()?; - let ExprBase::ExprCall(map_call) = map_arg.expr_base()? else { + let map_expr = map_arg.expr()?; + let ExprBase::ExprCall(map_call) = map_expr.expr_base()? else { return None; }; @@ -811,16 +827,16 @@ fn check_higher_order_predicate(all_args: &ArgsDesc) -> Option<(String, Fact)> { } let map_args: Vec<_> = map_std_call.args.args().collect(); - if map_args.len() != 2 { + let [pred_arg, arr_arg] = map_args.as_slice() else { return None; - } + }; // First arg is the predicate (e.g., std.isNumber) - let pred_expr = map_args[0].expr()?; + let pred_expr = pred_arg.expr()?; let elem_fact = extract_predicate_element_fact(&pred_expr)?; // Second arg is the array variable - let arr_expr = map_args[1].expr()?; + let arr_expr = arr_arg.expr()?; let var_name = extract_var_name(&arr_expr)?; Some((var_name, elem_fact)) @@ -957,6 +973,13 @@ fn check_std_type_comparison( Some((var_name, fact)) } +fn non_negative_integral_usize(value: f64) -> Option { + if !(value.is_finite() && value >= 0.0 && value.fract() == 0.0) { + return None; + } + format!("{value:.0}").parse().ok() +} + /// Check for std.length(x) == n pattern. fn check_std_length_comparison( binary: &jrsonnet_rowan_parser::nodes::ExprBinary, @@ -975,7 +998,8 @@ fn check_std_length_comparison( return None; } - Some((var_name, Fact::has_len(len as usize))) + let len = non_negative_integral_usize(len)?; + Some((var_name, Fact::has_len(len))) } /// Check for std.length(x) != 0 pattern (non-empty). @@ -1018,7 +1042,8 @@ fn check_std_length_greater( } // std.length(x) > n means length >= n+1 - Some((var_name, Fact::min_len(len as usize + 1))) + let len = non_negative_integral_usize(len)?; + Some((var_name, Fact::min_len(len + 1))) } /// Check for std.length(x) >= n pattern. @@ -1039,7 +1064,8 @@ fn check_std_length_greater_eq( return None; } - Some((var_name, Fact::min_len(len as usize))) + let len = non_negative_integral_usize(len)?; + Some((var_name, Fact::min_len(len))) } /// Extract a number literal value from an expression. @@ -1057,10 +1083,10 @@ fn extract_number_literal(expr: &Expr) -> Option { /// Extract variable name from a single-argument function call. fn extract_single_arg_var_name(args: &ArgsDesc) -> Option { let arg_list: Vec<_> = args.args().collect(); - if arg_list.len() != 1 { + let [arg] = arg_list.as_slice() else { return None; - } - let arg_expr = arg_list[0].expr()?; + }; + let arg_expr = arg.expr()?; extract_var_name(&arg_expr) } diff --git a/crates/jrsonnet-lsp-inference/src/helpers.rs b/crates/jrsonnet-lsp-inference/src/helpers.rs index 065392ae..a370d7b2 100644 --- a/crates/jrsonnet-lsp-inference/src/helpers.rs +++ b/crates/jrsonnet-lsp-inference/src/helpers.rs @@ -9,6 +9,7 @@ use jrsonnet_rowan_parser::{ use crate::{env::TypeEnv, expr::infer_expr_ty}; /// Convert AST visibility to interned field visibility. +#[must_use] pub fn convert_visibility_ty(vis: Option) -> FieldVis { match vis.map(|v| v.kind()) { Some(VisibilityKind::Coloncoloncolon) => FieldVis::ForceVisible, // ::: @@ -17,7 +18,8 @@ pub fn convert_visibility_ty(vis: Option) -> FieldVis { } } -/// Extract field name from a FieldName node. +/// Extract field name from a `FieldName` node. +#[must_use] pub fn extract_field_name(field_name: &FieldName) -> Option { match field_name { FieldName::FieldNameFixed(fixed) => { @@ -81,6 +83,7 @@ pub fn infer_stdlib_field_access_ty( /// /// This is used for constraint tracking - we can only track constraints on /// direct variable references, not complex expressions. +#[must_use] pub fn extract_var_name_from_expr(expr: &jrsonnet_rowan_parser::nodes::Expr) -> Option { let ExprBase::ExprVar(var) = expr.expr_base()? else { return None; @@ -90,9 +93,10 @@ pub fn extract_var_name_from_expr(expr: &jrsonnet_rowan_parser::nodes::Expr) -> Some(ident.text().to_string()) } -/// Extract parameter information (name and has_default) from a ParamsDesc. +/// Extract parameter information (name and `has_default`) from a `ParamsDesc`. /// -/// Returns a list of (name, has_default) pairs for each parameter. +/// Returns a list of (name, `has_default`) pairs for each parameter. +#[must_use] pub fn extract_params_with_defaults( params_desc: &jrsonnet_rowan_parser::nodes::ParamsDesc, ) -> Vec<(String, bool)> { @@ -114,7 +118,7 @@ pub fn extract_params_with_defaults( } /// Extract parameters from a function definition as interned types. -/// Returns Vec of ParamInterned with name, type from default, and has_default flag. +/// Returns Vec of `ParamInterned` with name, type from default, and `has_default` flag. pub fn extract_params_with_default_types_ty( params_desc: &jrsonnet_rowan_parser::nodes::ParamsDesc, env: &mut TypeEnv, diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index 3a838239..719ba069 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -1,7 +1,7 @@ //! Document manager for tracking open documents. //! -//! Uses DashMap for concurrent access to documents without -//! requiring a global lock. Thread-safe for use with TypeProvider. +//! Uses `DashMap` for concurrent access to documents without +//! requiring a global lock. Thread-safe for use with `TypeProvider`. use std::{num::NonZeroUsize, sync::Arc}; diff --git a/crates/jrsonnet-lsp-inference/src/object.rs b/crates/jrsonnet-lsp-inference/src/object.rs index 5f2eebc1..1167d6b6 100644 --- a/crates/jrsonnet-lsp-inference/src/object.rs +++ b/crates/jrsonnet-lsp-inference/src/object.rs @@ -102,10 +102,8 @@ pub fn infer_object_type_with_super_ty( Member::MemberFieldNormal(field) => { if let Some(field_name) = field.field_name() { if let Some(name_str) = extract_field_name(&field_name) { - let field_ty = field - .expr() - .map(|e| infer_expr_ty(&e, env)) - .unwrap_or(Ty::ANY); + let field_ty = + field.expr().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); let visibility = convert_visibility_ty(field.visibility()); final_fields.push(( name_str, diff --git a/crates/jrsonnet-lsp-inference/src/poly.rs b/crates/jrsonnet-lsp-inference/src/poly.rs index c09b65ee..ef249c0b 100644 --- a/crates/jrsonnet-lsp-inference/src/poly.rs +++ b/crates/jrsonnet-lsp-inference/src/poly.rs @@ -32,9 +32,8 @@ pub fn instantiate_function_call_ty(func_ty: Ty, arg_types: &[Ty], store: &mut T } // Get the function data - let func_data = match store.get(func_ty).clone() { - TyData::Function(f) => f, - _ => return Ty::ANY, + let TyData::Function(func_data) = store.get(func_ty).clone() else { + return Ty::ANY; }; // Build a substitution by matching parameter types with argument types @@ -99,7 +98,9 @@ pub fn collect_type_var_substitutions_ty( { // Use the union of tuple element types (need mutable store for this) // For now, just match against first element as approximation - collect_type_var_substitutions_ty(*pat_elem, tgt_elems[0], substitution, store); + if let Some(&first_elem) = tgt_elems.first() { + collect_type_var_substitutions_ty(*pat_elem, first_elem, substitution, store); + } } // Object types - match field types diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index 2af8a168..7bf91aa7 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -105,11 +105,13 @@ impl TypeProvider { } /// Get the global type store. + #[must_use] pub fn global_types(&self) -> &Arc { &self.global_types } /// Get the type cache. + #[must_use] pub fn type_cache(&self) -> &SharedTypeCache { &self.type_cache } @@ -124,7 +126,7 @@ mod tests { use super::*; use crate::type_cache::new_shared_cache; - /// Test document source backed by a DashMap. + /// Test document source backed by a `DashMap`. struct TestDocSource { docs: DashMap, } diff --git a/crates/jrsonnet-lsp-inference/src/suggestions.rs b/crates/jrsonnet-lsp-inference/src/suggestions.rs index fc8988e5..e71e1bd7 100644 --- a/crates/jrsonnet-lsp-inference/src/suggestions.rs +++ b/crates/jrsonnet-lsp-inference/src/suggestions.rs @@ -48,6 +48,7 @@ pub fn find_similar<'a>(name: &str, candidates: impl IntoIterator Option<&'static str> { match name { // Boolean literals (from other languages) diff --git a/crates/jrsonnet-lsp-scope/src/bindings.rs b/crates/jrsonnet-lsp-scope/src/bindings.rs index f74b8285..1dd2e398 100644 --- a/crates/jrsonnet-lsp-scope/src/bindings.rs +++ b/crates/jrsonnet-lsp-scope/src/bindings.rs @@ -4,7 +4,7 @@ //! and variable references in the Jsonnet AST. //! //! Import-related utilities are provided by `jrsonnet_lsp_import`. -//! General AST utilities (token_at_offset, to_lsp_range, etc.) are provided by +//! General AST utilities (`token_at_offset`, `to_lsp_range`, etc.) are provided by //! `jrsonnet_lsp_document`. use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; @@ -16,6 +16,7 @@ use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; /// - Local variable bindings: `local x = ...` /// - Function names: `local f(x) = ...` /// - Function parameters: `function(x)` +#[must_use] pub fn is_definition_site(token: &SyntaxToken) -> bool { let Some(parent) = token.parent() else { return false; @@ -42,6 +43,7 @@ pub fn is_definition_site(token: &SyntaxToken) -> bool { /// /// A variable reference is a use of a previously-defined name. /// This checks if the token is part of an `ExprVar` node. +#[must_use] pub fn is_variable_reference(token: &SyntaxToken) -> bool { let Some(parent) = token.parent() else { return false; @@ -59,6 +61,7 @@ pub fn is_variable_reference(token: &SyntaxToken) -> bool { } /// Check if a token can be renamed (is either a definition or reference to a local binding). +#[must_use] pub fn is_renameable(token: &SyntaxToken) -> bool { is_definition_site(token) || is_variable_reference(token) } @@ -67,6 +70,7 @@ pub fn is_renameable(token: &SyntaxToken) -> bool { /// /// A file-scope definition is one that could potentially be exported /// from a file via an import. +#[must_use] pub fn is_at_file_scope(token: &SyntaxToken) -> bool { let mut node = token.parent(); @@ -106,8 +110,8 @@ mod tests { let ast = doc.ast(); // Find the 'x' tokens - let mut found_def = false; - let mut found_ref = false; + let mut found_definition = false; + let mut found_reference = false; for token in ast .syntax() .descendants_with_tokens() @@ -115,14 +119,14 @@ mod tests { { if token.kind() == SyntaxKind::IDENT && token.text() == "x" { if is_definition_site(&token) { - found_def = true; + found_definition = true; } else if is_variable_reference(&token) { - found_ref = true; + found_reference = true; } } } - assert!(found_def, "Should find definition site"); - assert!(found_ref, "Should find reference site"); + assert!(found_definition, "Should find definition site"); + assert!(found_reference, "Should find reference site"); } #[test] diff --git a/crates/jrsonnet-lsp-scope/src/resolver.rs b/crates/jrsonnet-lsp-scope/src/resolver.rs index abb16616..392326b1 100644 --- a/crates/jrsonnet-lsp-scope/src/resolver.rs +++ b/crates/jrsonnet-lsp-scope/src/resolver.rs @@ -23,6 +23,7 @@ use crate::bindings::{is_definition_site, is_variable_reference}; /// /// Starting from a token that references a variable, this walks up the AST /// looking for the binding that defines the variable. +#[must_use] pub fn find_definition_range(token: &SyntaxToken, name: &str) -> Option { let mut current = token.parent()?; @@ -39,6 +40,7 @@ pub fn find_definition_range(token: &SyntaxToken, name: &str) -> Option Option { match bind { Bind::BindDestruct(bd) => { @@ -128,7 +131,7 @@ fn check_function_for_definition(func_node: &SyntaxNode, name: &str) -> Option Option { let func = BindFunction::cast(func_node.clone())?; let params = func.params()?; @@ -142,6 +145,7 @@ fn check_bind_function_for_definition(func_node: &SyntaxNode, name: &str) -> Opt } /// Check a parameter for a name. +#[must_use] pub fn check_param_for_name(param: &Param, name: &str) -> Option { let destruct = param.destruct()?; if let Destruct::DestructFull(full) = destruct { @@ -154,7 +158,7 @@ pub fn check_param_for_name(param: &Param, name: &str) -> Option { None } -/// Check ForSpec for a definition. +/// Check `ForSpec` for a definition. fn check_for_spec_for_definition(for_node: &SyntaxNode, name: &str) -> Option { let for_spec = ForSpec::cast(for_node.clone())?; let destruct = for_spec.bind()?; @@ -187,9 +191,9 @@ fn check_object_for_definition(obj_body: &SyntaxNode, name: &str) -> Option Option { for child in comp_node.children() { if child.kind() != SyntaxKind::FOR_SPEC { @@ -206,6 +210,7 @@ fn check_comprehension_for_definition(comp_node: &SyntaxNode, name: &str) -> Opt /// /// Walks up the scope chain from the token to find its definition, /// then checks if it matches the expected definition range. +#[must_use] pub fn references_definition(token: &SyntaxToken, name: &str, def_range: TextRange) -> bool { let Some(mut current) = token.parent() else { return false; @@ -305,7 +310,7 @@ pub fn find_all_references_for_rename( /// /// Precomputes a mapping from each variable reference to its definition. pub struct ScopeResolver { - /// Maps reference token start position to definition's TextRange. + /// Maps reference token start position to definition's `TextRange`. reference_to_def: FxHashMap, } @@ -338,8 +343,9 @@ impl ScopeResolver { /// Get the definition range for a reference token. /// - /// Returns the TextRange of the Name node at the definition site, + /// Returns the `TextRange` of the Name node at the definition site, /// or None if the token is not a reference or has no definition. + #[must_use] pub fn get_definition(&self, token: &SyntaxToken) -> Option { self.reference_to_def .get(&token.text_range().start()) @@ -347,6 +353,7 @@ impl ScopeResolver { } /// Check if a reference resolves to a specific definition. + #[must_use] pub fn references_definition(&self, token: &SyntaxToken, def_range: TextRange) -> bool { self.get_definition(token) == Some(def_range) } @@ -397,7 +404,7 @@ impl ScopeResolver { struct ScopeBinding { /// The name of the binding. name: String, - /// The TextRange of the definition (Name node). + /// The `TextRange` of the definition (Name node). range: TextRange, /// The position after which this binding is visible (for local bindings). /// None means visible throughout the scope (e.g., function params). @@ -426,9 +433,9 @@ pub struct ScopeIndex { scopes: Vec, /// Map from scope start position to index for quick lookup. scope_starts: Vec<(TextSize, usize)>, - /// Cached scope chains: scope_index -> chain of scope ranges (innermost first). + /// Cached scope chains: `scope_index` -> chain of scope ranges (innermost first). scope_chain_cache: RefCell>>, - /// Cached bindings per scope chain: scope_index -> all bindings in chain (with visibility info). + /// Cached bindings per scope chain: `scope_index` -> all bindings in chain (with visibility info). bindings_cache: RefCell>>, } @@ -445,6 +452,7 @@ impl ScopeIndex { /// Build a scope index from an AST root. /// /// Walks the AST once to collect all scopes and their bindings. + #[must_use] pub fn new(root: &SyntaxNode) -> Self { let mut scopes = Vec::new(); let mut scope_stack: Vec = Vec::new(); @@ -516,7 +524,7 @@ impl ScopeIndex { SyntaxKind::EXPR_FUNCTION => Self::extract_expr_function_bindings(node), SyntaxKind::BIND_FUNCTION => Self::extract_bind_function_bindings(node), SyntaxKind::FOR_SPEC => ForSpec::cast(node.clone()) - .and_then(Self::for_spec_binding) + .and_then(|for_spec| Self::for_spec_binding(&for_spec)) .into_iter() .collect(), SyntaxKind::OBJ_BODY_MEMBER_LIST => Self::extract_object_local_bindings(node), @@ -585,7 +593,7 @@ impl ScopeIndex { .collect() } - fn for_spec_binding(for_spec: ForSpec) -> Option { + fn for_spec_binding(for_spec: &ForSpec) -> Option { let destruct = for_spec.bind()?; let Destruct::DestructFull(full) = destruct else { return None; @@ -616,7 +624,7 @@ impl ScopeIndex { node.children() .filter(|child| child.kind() == SyntaxKind::FOR_SPEC) .filter_map(ForSpec::cast) - .filter_map(Self::for_spec_binding) + .filter_map(|for_spec| Self::for_spec_binding(&for_spec)) .collect() } @@ -663,8 +671,12 @@ impl ScopeIndex { let mut best_size = u32::MAX; for i in (0..search_idx).rev() { - let (_, scope_idx) = self.scope_starts[i]; - let scope = &self.scopes[scope_idx]; + let Some((_, scope_idx)) = self.scope_starts.get(i).copied() else { + continue; + }; + let Some(scope) = self.scopes.get(scope_idx) else { + continue; + }; if !scope.range.contains(pos) { continue; @@ -682,12 +694,12 @@ impl ScopeIndex { /// Find the definition for a name at a given position. /// - /// Returns the TextRange of the definition's Name node. + /// Returns the `TextRange` of the definition's Name node. pub fn find_definition(&self, pos: TextSize, name: &str) -> Option { let mut scope_idx = self.find_innermost_scope(pos)?; loop { - let scope = &self.scopes[scope_idx]; + let scope = self.scopes.get(scope_idx)?; // Search bindings in reverse order for shadowing (last match wins) for binding in scope.bindings.iter().rev() { @@ -720,9 +732,8 @@ impl ScopeIndex { /// Returns a vector of scope ranges from the innermost scope containing /// the position to the root scope. Results are memoized per scope index. pub fn scope_chain(&self, pos: TextSize) -> Vec { - let scope_idx = match self.find_innermost_scope(pos) { - Some(idx) => idx, - None => return Vec::new(), + let Some(scope_idx) = self.find_innermost_scope(pos) else { + return Vec::new(); }; // Check cache first @@ -746,8 +757,11 @@ impl ScopeIndex { let mut scope_idx = start_scope_idx; loop { - chain.push(self.scopes[scope_idx].range); - match self.scopes[scope_idx].parent { + let Some(scope) = self.scopes.get(scope_idx) else { + break; + }; + chain.push(scope.range); + match scope.parent { Some(parent_idx) => scope_idx = parent_idx, None => break, } @@ -762,9 +776,8 @@ impl ScopeIndex { /// including shadowed names. The cached bindings include visibility info, /// which is filtered at query time. pub fn bindings_at(&self, pos: TextSize) -> Vec<(String, TextRange)> { - let scope_idx = match self.find_innermost_scope(pos) { - Some(idx) => idx, - None => return Vec::new(), + let Some(scope_idx) = self.find_innermost_scope(pos) else { + return Vec::new(); }; // Get or compute cached bindings for this scope chain @@ -804,7 +817,9 @@ impl ScopeIndex { let mut scope_idx = start_scope_idx; loop { - let scope = &self.scopes[scope_idx]; + let Some(scope) = self.scopes.get(scope_idx) else { + break; + }; for binding in &scope.bindings { bindings.push(CachedBinding { @@ -1027,8 +1042,7 @@ mod tests { assert_eq!( linear_result, index_result, - "Mismatch for '{}' at position {:?}", - name, pos + "Mismatch for '{name}' at position {pos:?}" ); } } diff --git a/crates/jrsonnet-lsp-stdlib/src/docs.rs b/crates/jrsonnet-lsp-stdlib/src/docs.rs index 0f98610e..35b8c80c 100644 --- a/crates/jrsonnet-lsp-stdlib/src/docs.rs +++ b/crates/jrsonnet-lsp-stdlib/src/docs.rs @@ -22,6 +22,7 @@ pub struct StdlibDoc { impl StdlibDoc { /// Format as markdown for hover display. + #[must_use] pub fn to_markdown(&self) -> String { let mut md = format!("```jsonnet\nstd.{}{})\n```\n\n", self.name, self.signature); md.push_str(self.description); diff --git a/crates/jrsonnet-lsp-stdlib/src/signatures.rs b/crates/jrsonnet-lsp-stdlib/src/signatures.rs index 5887caa8..43d9c19e 100644 --- a/crates/jrsonnet-lsp-stdlib/src/signatures.rs +++ b/crates/jrsonnet-lsp-stdlib/src/signatures.rs @@ -26,12 +26,13 @@ struct StdlibData { pub struct StdlibSignature { /// Function name (without `std.` prefix). pub name: &'static str, - /// The function type as interned Ty (references STDLIB_DATA.store). + /// The function type as interned Ty (references `STDLIB_DATA.store`). pub func_ty: Ty, } impl StdlibSignature { /// Count of required parameters. + #[must_use] pub fn required_count(&self) -> usize { match *stdlib_store().get(self.func_ty) { TyData::Function(ref f) => f.required_count(), @@ -40,6 +41,7 @@ impl StdlibSignature { } /// Total parameter count. + #[must_use] pub fn total_count(&self) -> usize { match *stdlib_store().get(self.func_ty) { TyData::Function(ref f) => f.params.len(), @@ -48,6 +50,7 @@ impl StdlibSignature { } /// Whether the function accepts variadic arguments. + #[must_use] pub fn variadic(&self) -> bool { match *stdlib_store().get(self.func_ty) { TyData::Function(ref f) => f.variadic, @@ -56,6 +59,7 @@ impl StdlibSignature { } /// Get the function data from the global store as an owned copy. + #[must_use] pub fn func_data(&self) -> Option { match *stdlib_store().get(self.func_ty) { TyData::Function(ref f) => Some(f.clone()), @@ -80,6 +84,7 @@ pub fn get_stdlib_signature(name: &str) -> Option<&'static StdlibSignature> { } /// Get the function type (as Ty) for a stdlib function by name. +#[must_use] pub fn get_stdlib_func_ty(name: &str) -> Option { get_stdlib_signature(name).map(|s| s.func_ty) } @@ -103,7 +108,7 @@ pub fn ensure_initialized() { STDLIB_DATA.get_or_init(init_stdlib_data); } -/// Convert a spec ParamType to an interned Ty. +/// Convert a spec `ParamType` to an interned Ty. fn param_type_to_ty(store: &mut TyStore, pt: ParamType) -> Ty { match pt { ParamType::Any => Ty::ANY, @@ -132,7 +137,7 @@ fn param_type_to_ty(store: &mut TyStore, pt: ParamType) -> Ty { } } -/// Convert a spec ReturnSpec to an LSP ReturnSpec. +/// Convert a spec `ReturnSpec` to an LSP `ReturnSpec`. fn convert_return_spec(store: &mut TyStore, rs: SigReturnSpec) -> LspReturnSpec { match rs { SigReturnSpec::Fixed(pt) => LspReturnSpec::Fixed(param_type_to_ty(store, pt)), @@ -189,7 +194,7 @@ fn init_stdlib_data() -> StdlibData { StdlibData { store, signatures } } -/// Look up a stdlib function type by name and import it into a MutStore. +/// Look up a stdlib function type by name and import it into a `MutStore`. /// /// Returns the function type if found, interned into the local store. pub fn import_stdlib_func_to_mut_store(store: &mut MutStore, name: &str) -> Option { @@ -197,7 +202,7 @@ pub fn import_stdlib_func_to_mut_store(store: &mut MutStore, name: &str) -> Opti Some(import_ty_from_stdlib(store, func_ty)) } -/// Import a type from the stdlib store into a MutStore. +/// Import a type from the stdlib store into a `MutStore`. pub fn import_ty_from_stdlib(store: &mut MutStore, ty: Ty) -> Ty { // Well-known constants are the same in all stores if ty.is_well_known() { diff --git a/crates/jrsonnet-lsp-types/src/display.rs b/crates/jrsonnet-lsp-types/src/display.rs index 17f8bc5a..dc67c2ba 100644 --- a/crates/jrsonnet-lsp-types/src/display.rs +++ b/crates/jrsonnet-lsp-types/src/display.rs @@ -55,6 +55,7 @@ impl<'a, S: TypeStoreOps> DisplayContext<'a, S> { } /// Format a type using this context. + #[must_use] pub fn format(&self, ty: Ty) -> String { self.format_impl(ty, 0) } @@ -79,15 +80,15 @@ impl<'a, S: TypeStoreOps> DisplayContext<'a, S> { if self.style == DisplayStyle::Compact && s.len() > 20 { format!("\"{}...\"", &s[..17]) } else { - format!("\"{}\"", s) + format!("\"{s}\"") } } TyData::Array { elem, is_set } => { let elem_str = self.format_impl(elem, depth + 1); if is_set { - format!("set<{}>", elem_str) + format!("set<{elem_str}>") } else { - format!("array<{}>", elem_str) + format!("array<{elem_str}>") } } TyData::Tuple { elems } => self.format_tuple(&elems, depth), @@ -135,30 +136,30 @@ impl<'a, S: TypeStoreOps> DisplayContext<'a, S> { (None, None) => "number".to_string(), (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { if lo.fract() == 0.0 { - format!("{}", lo as i64) + format!("{lo:.0}") } else { - format!("{}", lo) + format!("{lo}") } } (Some(lo), Some(hi)) => { if self.style == DisplayStyle::Compact { "number".to_string() } else { - format!("number[{}..{}]", lo, hi) + format!("number[{lo}..{hi}]") } } (Some(lo), None) => { if self.style == DisplayStyle::Compact { "number".to_string() } else { - format!("number[{}..Inf]", lo) + format!("number[{lo}..Inf]") } } (None, Some(hi)) => { if self.style == DisplayStyle::Compact { "number".to_string() } else { - format!("number[-Inf..{}]", hi) + format!("number[-Inf..{hi}]") } } } @@ -175,8 +176,9 @@ impl<'a, S: TypeStoreOps> DisplayContext<'a, S> { elems.len() }; - let elem_strs: Vec = elems[..show_count] + let elem_strs: Vec = elems .iter() + .take(show_count) .map(|&t| self.format_impl(t, depth + 1)) .collect(); @@ -208,8 +210,9 @@ impl<'a, S: TypeStoreOps> DisplayContext<'a, S> { fields.len() }; - let field_strs: Vec = fields[..show_count] + let field_strs: Vec = fields .iter() + .take(show_count) .map(|(name, field)| { if self.style == DisplayStyle::Detailed { format!("{}: {}", name, self.format_impl(field.ty, depth + 1)) @@ -269,8 +272,9 @@ impl<'a, S: TypeStoreOps> DisplayContext<'a, S> { types.len() }; - let parts: Vec = types[..show_count] + let parts: Vec = types .iter() + .take(show_count) .map(|&t| self.format_impl(t, depth + 1)) .collect(); @@ -292,7 +296,7 @@ impl<'a, S: TypeStoreOps> DisplayContext<'a, S> { } } -/// Wrapper for displaying a type with a context using std::fmt. +/// Wrapper for displaying a type with a context using `std::fmt`. pub struct DisplayTy<'a, S: TypeStoreOps> { ty: Ty, cx: &'a DisplayContext<'a, S>, @@ -300,6 +304,7 @@ pub struct DisplayTy<'a, S: TypeStoreOps> { impl<'a, S: TypeStoreOps> DisplayTy<'a, S> { /// Create a new display wrapper. + #[must_use] pub fn new(ty: Ty, cx: &'a DisplayContext<'a, S>) -> Self { Self { ty, cx } } @@ -324,7 +329,7 @@ mod tests { let fields: Vec<_> = (0..10) .map(|i| { ( - format!("field{}", i), + format!("field{i}"), FieldDefInterned { ty: Ty::STRING, required: true, diff --git a/crates/jrsonnet-lsp-types/src/global_store.rs b/crates/jrsonnet-lsp-types/src/global_store.rs index 57c3c4eb..cdb6f61f 100644 --- a/crates/jrsonnet-lsp-types/src/global_store.rs +++ b/crates/jrsonnet-lsp-types/src/global_store.rs @@ -10,6 +10,12 @@ use rustc_hash::FxHashMap; use crate::store::{Ty, TyData}; +static ANY_TY_DATA: TyData = TyData::Any; + +fn to_u32(value: usize) -> Option { + u32::try_from(value).ok() +} + /// Internal storage implementation shared between global and local stores. #[derive(Debug, Clone)] pub(crate) struct TyStoreInner { @@ -53,11 +59,11 @@ impl TyStoreInner { TyData::Any, // 15 - reserved ]; - for (i, data) in builtins.into_iter().enumerate() { - let ty = Ty::from_raw(i as u32); + for (data, raw_id) in builtins.into_iter().zip(0_u32..) { + let ty = Ty::from_raw(raw_id); self.data.push(data.clone()); // Only dedup the non-padding entries - if i < 9 { + if raw_id < 9 { self.dedup.insert(data, ty); } } @@ -65,10 +71,13 @@ impl TyStoreInner { debug_assert_eq!(self.data.len(), Ty::RESERVED_COUNT as usize); } - /// Get type data by index (panics if out of bounds). + /// Get type data by index. #[inline] pub(crate) fn get_data(&self, index: u32) -> &TyData { - &self.data[index as usize] + let idx = usize::try_from(index).ok(); + idx.and_then(|i| self.data.get(i)) + .or_else(|| self.data.first()) + .unwrap_or(&ANY_TY_DATA) } /// Get the number of types in this store. @@ -85,7 +94,10 @@ impl TyStoreInner { } // Intern new type - let id = make_ty(self.data.len() as u32); + let Some(raw_id) = to_u32(self.data.len()) else { + return Ty::ANY; + }; + let id = make_ty(raw_id); self.data.push(data.clone()); self.dedup.insert(data, id); id @@ -103,6 +115,7 @@ pub struct GlobalTyStore { impl GlobalTyStore { /// Create a new global store with built-in types. + #[must_use] pub fn new() -> Self { Self { inner: RwLock::new(TyStoreInner::with_builtins()), @@ -111,20 +124,23 @@ impl GlobalTyStore { /// Get read access to the inner store. pub(crate) fn read(&self) -> std::sync::RwLockReadGuard<'_, TyStoreInner> { - self.inner.read().unwrap() + self.inner + .read() + .unwrap_or_else(std::sync::PoisonError::into_inner) } /// Get write access to the inner store. pub(crate) fn write(&self) -> std::sync::RwLockWriteGuard<'_, TyStoreInner> { - self.inner.write().unwrap() + self.inner + .write() + .unwrap_or_else(std::sync::PoisonError::into_inner) } /// Get type data for a global Ty. - /// - /// # Panics - /// Panics if `ty` is a local type (has LOCAL_BIT set). pub fn get_data(&self, ty: Ty) -> TyData { - debug_assert!(!ty.is_local(), "Cannot look up local type in global store"); + if ty.is_local() { + return TyData::Any; + } self.read().get_data(ty.raw_index()).clone() } diff --git a/crates/jrsonnet-lsp-types/src/local_store.rs b/crates/jrsonnet-lsp-types/src/local_store.rs index 6ca0e526..3c350e6b 100644 --- a/crates/jrsonnet-lsp-types/src/local_store.rs +++ b/crates/jrsonnet-lsp-types/src/local_store.rs @@ -8,6 +8,12 @@ use rustc_hash::FxHashMap; use crate::store::{Ty, TyData}; +fn to_u32(value: usize) -> Option { + u32::try_from(value).ok() +} + +static ANY_TY_DATA: TyData = TyData::Any; + /// Per-file local type store - temporary during analysis. /// /// Types interned here have `Ty` values with the `LOCAL_BIT` set. @@ -22,6 +28,7 @@ pub struct LocalTyStore { impl LocalTyStore { /// Create a new empty local store. + #[must_use] pub fn new() -> Self { Self { data: Vec::new(), @@ -30,20 +37,24 @@ impl LocalTyStore { } /// Get type data by local index. - /// - /// # Panics - /// Panics if `ty` is not a local type or index is out of bounds. + #[must_use] pub fn get_data(&self, ty: Ty) -> &TyData { - debug_assert!(ty.is_local(), "Expected local type"); - &self.data[ty.raw_index() as usize] + if !ty.is_local() { + return &ANY_TY_DATA; + } + self.data + .get(ty.raw_index() as usize) + .unwrap_or(&ANY_TY_DATA) } /// Get the number of local types. + #[must_use] pub fn len(&self) -> usize { self.data.len() } /// Check if empty. + #[must_use] pub fn is_empty(&self) -> bool { self.data.is_empty() } @@ -52,6 +63,7 @@ impl LocalTyStore { /// /// Returns an existing local type if the data is already interned locally. /// Note: Does NOT check the global store - caller should check global first. + /// pub fn intern(&mut self, data: TyData) -> Ty { // Check if already interned locally if let Some(&existing) = self.dedup.get(&data) { @@ -59,7 +71,10 @@ impl LocalTyStore { } // Intern new local type - let id = Ty::from_raw_local(self.data.len() as u32); + let Some(raw_id) = to_u32(self.data.len()) else { + return Ty::ANY; + }; + let id = Ty::from_raw_local(raw_id); self.data.push(data.clone()); self.dedup.insert(data, id); id @@ -70,10 +85,11 @@ impl LocalTyStore { self.data .iter() .enumerate() - .map(|(i, data)| (Ty::from_raw_local(i as u32), data)) + .filter_map(|(i, data)| to_u32(i).map(|raw_id| (Ty::from_raw_local(raw_id), data))) } /// Consume the store and return the type data vector. + #[must_use] pub fn into_data(self) -> Vec { self.data } diff --git a/crates/jrsonnet-lsp-types/src/mut_store.rs b/crates/jrsonnet-lsp-types/src/mut_store.rs index 29d83961..4c54bd86 100644 --- a/crates/jrsonnet-lsp-types/src/mut_store.rs +++ b/crates/jrsonnet-lsp-types/src/mut_store.rs @@ -24,8 +24,8 @@ use crate::{ /// /// Uses `Arc` for easy sharing. /// -/// Note: MutStore is intentionally not Clone. During analysis, a single -/// MutStore is used and passed by mutable reference. After analysis, +/// Note: `MutStore` is intentionally not Clone. During analysis, a single +/// `MutStore` is used and passed by mutable reference. After analysis, /// the local types are merged into the global store via `TySubst::merge`. #[derive(Debug)] pub struct MutStore { @@ -52,6 +52,7 @@ impl MutStore { } /// Get type data for any Ty (global or local). + #[must_use] pub fn get_data(&self, ty: Ty) -> TyData { if ty.is_local() { self.local.get_data(ty).clone() @@ -85,21 +86,25 @@ impl MutStore { } /// Consume and return the local store for merging. + #[must_use] pub fn into_local(self) -> LocalTyStore { self.local } /// Get reference to the global store. + #[must_use] pub fn global(&self) -> &GlobalTyStore { &self.global } /// Get the Arc to the global store. + #[must_use] pub fn global_arc(&self) -> &Arc { &self.global } /// Get reference to the local store. + #[must_use] pub fn local(&self) -> &LocalTyStore { &self.local } @@ -146,7 +151,7 @@ impl MutStore { }) } - /// Create an AttrsOf type (object with uniform value type). + /// Create an `AttrsOf` type (object with uniform value type). pub fn attrs_of(&mut self, value: Ty) -> Ty { self.intern(TyData::AttrsOf { value }) } @@ -176,11 +181,10 @@ impl MutStore { /// Create a union type. pub fn union(&mut self, mut types: Vec) -> Ty { // Simplification rules - if types.is_empty() { - return Ty::NEVER; - } - if types.len() == 1 { - return types[0]; + match types.as_slice() { + [] => return Ty::NEVER, + [only] => return *only, + _ => {} } // Flatten nested unions and remove duplicates @@ -203,20 +207,19 @@ impl MutStore { flattened.sort_by_key(|t| t.id()); flattened.dedup(); - match flattened.len() { - 0 => Ty::NEVER, - 1 => flattened[0], + match flattened.as_slice() { + [] => Ty::NEVER, + [only] => *only, _ => self.intern(TyData::Union(flattened)), } } /// Create a sum (intersection) type. pub fn sum(&mut self, mut types: Vec) -> Ty { - if types.is_empty() { - return Ty::ANY; - } - if types.len() == 1 { - return types[0]; + match types.as_slice() { + [] => return Ty::ANY, + [only] => return *only, + _ => {} } // Flatten and simplify @@ -238,9 +241,9 @@ impl MutStore { flattened.sort_by_key(|t| t.id()); flattened.dedup(); - match flattened.len() { - 0 => Ty::ANY, - 1 => flattened[0], + match flattened.as_slice() { + [] => Ty::ANY, + [only] => *only, _ => self.intern(TyData::Sum(flattened)), } } @@ -268,6 +271,7 @@ impl MutStore { // ========== Type queries ========== /// Check if type is indexable. + #[must_use] pub fn is_indexable(&self, ty: Ty) -> bool { match self.get_data(ty) { TyData::Any @@ -286,6 +290,7 @@ impl MutStore { } /// Check if type supports field access. + #[must_use] pub fn supports_field_access(&self, ty: Ty) -> bool { match self.get_data(ty) { TyData::Any | TyData::Object(_) | TyData::AttrsOf { .. } => true, @@ -298,6 +303,7 @@ impl MutStore { } /// Check if type is callable. + #[must_use] pub fn is_callable(&self, ty: Ty) -> bool { match self.get_data(ty) { TyData::Any | TyData::Function(_) => true, @@ -308,6 +314,7 @@ impl MutStore { } /// Check if a type has any type variables. + #[must_use] pub fn has_type_vars(&self, ty: Ty) -> bool { match self.get_data(ty) { TyData::TypeVar { .. } => true, @@ -327,6 +334,7 @@ impl MutStore { } /// Format a type for display. + #[must_use] pub fn display(&self, ty: Ty) -> String { match self.get_data(ty) { TyData::Any => "any".to_string(), @@ -340,18 +348,18 @@ impl MutStore { (None, None) => "number".to_string(), (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { if lo.fract() == 0.0 { - format!("{}", lo as i64) + format!("{lo:.0}") } else { - format!("{}", lo) + format!("{lo}") } } - (Some(lo), Some(hi)) => format!("number[{}..{}]", lo, hi), - (Some(lo), None) => format!("number[{}..]", lo), - (None, Some(hi)) => format!("number[..{}]", hi), + (Some(lo), Some(hi)) => format!("number[{lo}..{hi}]"), + (Some(lo), None) => format!("number[{lo}..]"), + (None, Some(hi)) => format!("number[..{hi}]"), }, TyData::String => "string".to_string(), TyData::Char => "char".to_string(), - TyData::LiteralString(s) => format!("\"{}\"", s), + TyData::LiteralString(s) => format!("\"{s}\""), TyData::Array { elem, is_set } => { let base = format!("array<{}>", self.display(elem)); if is_set { @@ -414,11 +422,12 @@ impl MutStore { } } - /// Alias for `get_data()` to ease migration from TyStore. + /// Alias for `get_data()` to ease migration from `TyStore`. /// - /// TyStore::get() returns TyRef which derefs to TyData. This returns - /// TyData directly. Callers using `*store.get(ty)` should use `store.get(ty)`. + /// `TyStore::get()` returns `TyRef` which derefs to `TyData`. This returns + /// `TyData` directly. Callers using `*store.get(ty)` should use `store.get(ty)`. #[inline] + #[must_use] pub fn get(&self, ty: Ty) -> TyData { self.get_data(ty) } @@ -539,17 +548,13 @@ impl MutStore { if let (TyData::Object(obj1), TyData::Object(obj2)) = (&ty_data, &constraint_data) { let mut fields = obj1.fields.clone(); for (name, def2) in &obj2.fields { - if let Some(pos) = fields.iter().position(|(n, _)| n == name) { - let (_, def1) = &fields[pos]; + if let Some((_, def1)) = fields.iter_mut().find(|(n, _)| n == name) { let narrowed_ty = self.narrow(def1.ty, def2.ty); - fields[pos] = ( - name.clone(), - FieldDefInterned { - ty: narrowed_ty, - required: def1.required && def2.required, - visibility: def1.visibility, - }, - ); + *def1 = FieldDefInterned { + ty: narrowed_ty, + required: def1.required && def2.required, + visibility: def1.visibility, + }; } else { fields.push((name.clone(), def2.clone())); } diff --git a/crates/jrsonnet-lsp-types/src/operations.rs b/crates/jrsonnet-lsp-types/src/operations.rs index 7246ea4f..5fc3d296 100644 --- a/crates/jrsonnet-lsp-types/src/operations.rs +++ b/crates/jrsonnet-lsp-types/src/operations.rs @@ -24,6 +24,9 @@ use crate::store::{FieldDefInterned, FieldVis, ObjectData, Ty, TyData, TypeStore /// /// Returns `Ok(result_ty)` if the operation is valid for the given operand types, /// or `Err(error_message)` if the operation is invalid. +/// +/// # Errors +/// Returns `Err` when the operand types do not support the requested operator. pub fn binary_op_result_ty( op: BinaryOperatorKind, lhs: Ty, @@ -236,6 +239,9 @@ TyData::String | TyData::Char | TyData::LiteralString(_)) => Ok(Ty::STRING), /// /// Returns `Ok(result_ty)` if the operation is valid for the given operand type, /// or `Err(error_message)` if the operation is invalid. +/// +/// # Errors +/// Returns `Err` when the operand type does not support the requested operator. pub fn unary_op_result_ty( op: UnaryOperatorKind, operand: Ty, @@ -302,12 +308,12 @@ pub fn unary_op_result_ty( } } -/// Helper to check if TyData represents a number type. +/// Helper to check if `TyData` represents a number type. fn is_number_ty(data: &TyData) -> bool { matches!(data, TyData::Number | TyData::BoundedNumber(_)) } -/// Helper to check if TyData represents a boolean type. +/// Helper to check if `TyData` represents a boolean type. fn is_bool_ty(data: &TyData) -> bool { matches!(data, TyData::Bool | TyData::True | TyData::False) } @@ -494,17 +500,13 @@ pub fn ty_and(lhs: Ty, rhs: Ty, store: &mut S) -> Ty { if let (TyData::Object(obj1), TyData::Object(obj2)) = (&lhs_data, &rhs_data) { let mut fields = obj1.fields.clone(); for (name, def2) in &obj2.fields { - if let Some(pos) = fields.iter().position(|(n, _)| n == name) { - let (_, def1) = &fields[pos]; + if let Some((_, def1)) = fields.iter_mut().find(|(n, _)| n == name) { let narrowed_ty = ty_and(def1.ty, def2.ty, store); - fields[pos] = ( - name.clone(), - FieldDefInterned { - ty: narrowed_ty, - required: def1.required || def2.required, - visibility: def1.visibility, - }, - ); + *def1 = FieldDefInterned { + ty: narrowed_ty, + required: def1.required || def2.required, + visibility: def1.visibility, + }; } else { fields.push((name.clone(), def2.clone())); } @@ -904,17 +906,13 @@ pub fn ty_with_field(ty: Ty, field: &str, field_ty: Ty, store: } TyData::Object(mut obj) => { - if let Some(pos) = obj.fields.iter().position(|(n, _)| n == field) { - let (_, existing) = &obj.fields[pos]; + if let Some((_, existing)) = obj.fields.iter_mut().find(|(n, _)| n == field) { let narrowed = ty_and(existing.ty, field_ty, store); - obj.fields[pos] = ( - field.to_string(), - FieldDefInterned { - ty: narrowed, - required: true, - visibility: existing.visibility, - }, - ); + *existing = FieldDefInterned { + ty: narrowed, + required: true, + visibility: existing.visibility, + }; } else { obj.fields.push(( field.to_string(), @@ -1257,9 +1255,8 @@ mod tests { #[test] fn test_bounded_number_intersection() { let mut store = TyStore::new(); - use crate::store::NumBounds; - let bounded1 = store.bounded_number(NumBounds::at_least(0.0)); - let bounded2 = store.bounded_number(NumBounds::between(-10.0, 10.0)); + let bounded1 = store.bounded_number(crate::store::NumBounds::at_least(0.0)); + let bounded2 = store.bounded_number(crate::store::NumBounds::between(-10.0, 10.0)); let result = ty_and(bounded1, bounded2, &mut store); // Should get [0..10] assert_matches!(store.get_data(result), TyData::BoundedNumber(bounds) => { diff --git a/crates/jrsonnet-lsp-types/src/store.rs b/crates/jrsonnet-lsp-types/src/store.rs index aa5df6b6..ed49c1a6 100644 --- a/crates/jrsonnet-lsp-types/src/store.rs +++ b/crates/jrsonnet-lsp-types/src/store.rs @@ -41,8 +41,13 @@ use std::{ use rustc_hash::FxHashMap; +fn to_u32(value: usize) -> Option { + u32::try_from(value).ok() +} + /// Global counter for generating unique type variable IDs. static TYPE_VAR_COUNTER: AtomicU32 = AtomicU32::new(0); +static ANY_TY_DATA: TyData = TyData::Any; /// An interned type reference. /// @@ -100,50 +105,56 @@ impl Ty { /// Create a Ty from a raw index (global, no local bit). #[inline] + #[must_use] pub const fn from_raw(index: u32) -> Ty { - debug_assert!(index & Self::LOCAL_BIT == 0, "Index too large"); - Ty(index) + Ty(index & Self::INDEX_MASK) } /// Create a local Ty from a raw index. #[inline] + #[must_use] pub const fn from_raw_local(index: u32) -> Ty { - debug_assert!(index & Self::LOCAL_BIT == 0, "Index too large"); - Ty(index | Self::LOCAL_BIT) + Ty((index & Self::INDEX_MASK) | Self::LOCAL_BIT) } /// Get the raw ID including the local bit (for debugging). #[inline] + #[must_use] pub fn id(self) -> u32 { self.0 } /// Get the raw index without the local bit. #[inline] + #[must_use] pub fn raw_index(self) -> u32 { self.0 & Self::INDEX_MASK } /// Check if this is a local (per-file) type. #[inline] + #[must_use] pub fn is_local(self) -> bool { self.0 & Self::LOCAL_BIT != 0 } /// Check if this is a global type. #[inline] + #[must_use] pub fn is_global(self) -> bool { !self.is_local() } /// Check if this is the Any type. #[inline] + #[must_use] pub fn is_any(self) -> bool { self == Self::ANY } /// Check if this is the Never type. #[inline] + #[must_use] pub fn is_never(self) -> bool { self == Self::NEVER } @@ -153,12 +164,14 @@ impl Ty { /// Well-known types (ANY, NEVER, NULL, BOOL, TRUE, FALSE, NUMBER, STRING, CHAR) /// have the same `Ty` value across all stores and are always global. #[inline] + #[must_use] pub fn is_well_known(self) -> bool { // Well-known types are always global (no LOCAL_BIT) and have low indices self.0 < Self::RESERVED_COUNT } - /// Get the well-known Ty for a TyData, if it matches a well-known type. + /// Get the well-known Ty for a `TyData`, if it matches a well-known type. + #[must_use] pub fn well_known_for_data(data: &TyData) -> Option { match data { TyData::Any => Some(Ty::ANY), @@ -299,7 +312,7 @@ impl std::fmt::Debug for Ty { Ty::STRING => write!(f, "Ty::STRING"), Ty::CHAR => write!(f, "Ty::CHAR"), ty if ty.is_local() => write!(f, "Ty(L{})", ty.raw_index()), - Ty(id) => write!(f, "Ty({})", id), + Ty(id) => write!(f, "Ty({id})"), } } } @@ -332,6 +345,7 @@ pub struct TyRef<'a> { impl<'a> TyRef<'a> { /// Get the Ty ID this reference points to. + #[must_use] pub fn ty(&self) -> Ty { self.ty } @@ -350,7 +364,11 @@ impl std::ops::Deref for TyRef<'_> { type Target = TyData; fn deref(&self) -> &Self::Target { - &self.store.data[self.ty.raw_index() as usize] + self.store + .data + .get(self.ty.raw_index() as usize) + .or_else(|| self.store.data.first()) + .unwrap_or(&ANY_TY_DATA) } } @@ -384,11 +402,10 @@ impl std::fmt::Display for TyVarId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // Display as Greek letters for small IDs, T prefix for larger let greek = ['α', 'β', 'γ', 'δ', 'ε', 'ζ', 'η', 'θ']; - if (self.0 as usize) < greek.len() { - write!(f, "{}", greek[self.0 as usize]) - } else { - write!(f, "T{}", self.0) + if let Some(letter) = greek.get(self.0 as usize) { + return write!(f, "{letter}"); } + write!(f, "T{}", self.0) } } @@ -403,6 +420,7 @@ pub struct NumBounds { impl NumBounds { /// Create unbounded numeric range. + #[must_use] pub fn unbounded() -> Self { Self { min: None, @@ -411,6 +429,7 @@ impl NumBounds { } /// Create a non-negative range (>= 0). + #[must_use] pub fn non_negative() -> Self { Self { min: Some(0.0_f64.to_bits()), @@ -419,6 +438,7 @@ impl NumBounds { } /// Create a range with minimum bound. + #[must_use] pub fn at_least(min: f64) -> Self { Self { min: Some(min.to_bits()), @@ -427,6 +447,7 @@ impl NumBounds { } /// Create a range with both bounds. + #[must_use] pub fn between(min: f64, max: f64) -> Self { Self { min: Some(min.to_bits()), @@ -445,6 +466,7 @@ impl NumBounds { } /// Check if this range is a subset of another range. + #[must_use] pub fn is_subset_of(&self, other: &NumBounds) -> bool { let self_min = self.min_f64(); let self_max = self.max_f64(); @@ -534,6 +556,7 @@ pub struct ObjectData { impl ObjectData { /// Create an empty closed object. + #[must_use] pub fn empty() -> Self { Self { fields: Vec::new(), @@ -542,6 +565,7 @@ impl ObjectData { } /// Create an open object (may have unknown fields). + #[must_use] pub fn open() -> Self { Self { fields: Vec::new(), @@ -550,17 +574,19 @@ impl ObjectData { } /// Get a field by name. + #[must_use] pub fn get_field(&self, name: &str) -> Option<&FieldDefInterned> { self.fields.iter().find(|(n, _)| n == name).map(|(_, f)| f) } /// Merge two objects (right fields override left). + #[must_use] pub fn merge(left: &ObjectData, right: &ObjectData) -> Self { let mut fields = left.fields.clone(); // Add/override fields from right for (name, field) in &right.fields { - if let Some(pos) = fields.iter().position(|(n, _)| n == name) { - fields[pos] = (name.clone(), field.clone()); + if let Some((_, existing)) = fields.iter_mut().find(|(n, _)| n == name) { + *existing = field.clone(); } else { fields.push((name.clone(), field.clone())); } @@ -629,11 +655,13 @@ pub struct FunctionData { impl FunctionData { /// Count of required parameters. + #[must_use] pub fn required_count(&self) -> usize { self.params.iter().filter(|p| !p.has_default).count() } /// Total parameter count. + #[must_use] pub fn total_count(&self) -> usize { self.params.len() } @@ -654,11 +682,13 @@ pub struct TyConstraints { impl TyConstraints { /// No constraints. + #[must_use] pub fn none() -> Self { Self::default() } /// Check if there are any constraints. + #[must_use] pub fn is_empty(&self) -> bool { !self.must_be_indexable && !self.must_support_fields @@ -667,6 +697,7 @@ impl TyConstraints { } /// Check if a type satisfies these constraints. + #[must_use] pub fn satisfied_by(&self, ty: Ty, store: &TyStore) -> bool { if self.must_be_indexable && !store.is_indexable(ty) { return false; @@ -696,6 +727,7 @@ pub struct TySubstitution { impl TySubstitution { /// Create an empty substitution. + #[must_use] pub fn new() -> Self { Self::default() } @@ -706,11 +738,13 @@ impl TySubstitution { } /// Look up a type variable's substitution. + #[must_use] pub fn get(&self, var: TyVarId) -> Option { self.mappings.get(&var).copied() } /// Check if a type variable ID occurs in a type. + #[must_use] pub fn occurs_in(var: TyVarId, ty: Ty, store: &TyStore) -> bool { match *store.get(ty) { TyData::TypeVar { id, .. } => id == var, @@ -790,7 +824,7 @@ pub enum TyData { /// This trait abstracts over `TyStore` and `MutStore`, allowing unification /// and type operation code to work with either store type without duplication. pub trait TypeStoreOps { - /// Get type data for a Ty (returns owned TyData for simplicity). + /// Get type data for a Ty (returns owned `TyData` for simplicity). fn get_data(&self, ty: Ty) -> TyData; /// Format a type for display. @@ -808,7 +842,7 @@ pub trait TypeStoreOps { /// Create an object type. fn object(&mut self, data: ObjectData) -> Ty; - /// Create an AttrsOf type (object with uniform value type). + /// Create an `AttrsOf` type (object with uniform value type). fn attrs_of(&mut self, value: Ty) -> Ty; /// Create a function type. @@ -842,6 +876,7 @@ pub struct TyStore { impl TyStore { /// Create a new type store with well-known types pre-populated. + #[must_use] pub fn new() -> Self { let mut store = Self { data: Vec::with_capacity(64), @@ -875,7 +910,10 @@ impl TyStore { ]; for (i, data) in builtins.into_iter().enumerate() { - let ty = Ty::from_raw(i as u32); + let Some(raw_id) = to_u32(i) else { + return; + }; + let ty = Ty::from_raw(raw_id); self.data.push(data.clone()); // Only dedup the non-padding entries if i < 9 { @@ -887,6 +925,7 @@ impl TyStore { } /// Intern a type, returning existing ID if already present. + /// pub fn intern(&mut self, data: TyData) -> Ty { // Fast path for well-known types if let Some(ty) = Ty::well_known_for_data(&data) { @@ -899,7 +938,10 @@ impl TyStore { } // Intern new type - let id = Ty::from_raw(self.data.len() as u32); + let Some(raw_id) = to_u32(self.data.len()) else { + return Ty::ANY; + }; + let id = Ty::from_raw(raw_id); self.data.push(data.clone()); self.dedup.insert(data, id); id @@ -910,16 +952,19 @@ impl TyStore { /// Returns a `TyRef` that derefs to `&TyData` and implements `Display`. /// Use `*store.get(ty)` to pattern match on the underlying `TyData`. #[inline] + #[must_use] pub fn get(&self, ty: Ty) -> TyRef<'_> { TyRef { store: self, ty } } /// Get the number of interned types. + #[must_use] pub fn len(&self) -> usize { self.data.len() } /// Check if empty (never true after init). + #[must_use] pub fn is_empty(&self) -> bool { self.data.is_empty() } @@ -965,7 +1010,7 @@ impl TyStore { }) } - /// Create an AttrsOf type (object with uniform value type). + /// Create an `AttrsOf` type (object with uniform value type). pub fn attrs_of(&mut self, value: Ty) -> Ty { self.intern(TyData::AttrsOf { value }) } @@ -995,11 +1040,10 @@ impl TyStore { /// Create a union type. pub fn union(&mut self, mut types: Vec) -> Ty { // Simplification rules - if types.is_empty() { - return Ty::NEVER; - } - if types.len() == 1 { - return types[0]; + match types.as_slice() { + [] => return Ty::NEVER, + [only] => return *only, + _ => {} } // Flatten nested unions and remove duplicates @@ -1022,20 +1066,19 @@ impl TyStore { flattened.sort_by_key(|t| t.0); flattened.dedup(); - match flattened.len() { - 0 => Ty::NEVER, - 1 => flattened[0], + match flattened.as_slice() { + [] => Ty::NEVER, + [only] => *only, _ => self.intern(TyData::Union(flattened)), } } /// Create a sum (intersection) type. pub fn sum(&mut self, mut types: Vec) -> Ty { - if types.is_empty() { - return Ty::ANY; - } - if types.len() == 1 { - return types[0]; + match types.as_slice() { + [] => return Ty::ANY, + [only] => return *only, + _ => {} } // Flatten and simplify @@ -1057,9 +1100,9 @@ impl TyStore { flattened.sort_by_key(|t| t.0); flattened.dedup(); - match flattened.len() { - 0 => Ty::ANY, - 1 => flattened[0], + match flattened.as_slice() { + [] => Ty::ANY, + [only] => *only, _ => self.intern(TyData::Sum(flattened)), } } @@ -1218,17 +1261,13 @@ impl TyStore { { let mut fields = obj1.fields.clone(); for (name, def2) in &obj2.fields { - if let Some(pos) = fields.iter().position(|(n, _)| n == name) { - let (_, def1) = &fields[pos]; + if let Some((_, def1)) = fields.iter_mut().find(|(n, _)| n == name) { let narrowed_ty = self.narrow(def1.ty, def2.ty); - fields[pos] = ( - name.clone(), - FieldDefInterned { - ty: narrowed_ty, - required: def1.required && def2.required, - visibility: def1.visibility, - }, - ); + *def1 = FieldDefInterned { + ty: narrowed_ty, + required: def1.required && def2.required, + visibility: def1.visibility, + }; } else { fields.push((name.clone(), def2.clone())); } @@ -1429,6 +1468,7 @@ impl TyStore { } /// Check if type is indexable. + #[must_use] pub fn is_indexable(&self, ty: Ty) -> bool { match *self.get(ty) { TyData::Any @@ -1449,6 +1489,7 @@ impl TyStore { } /// Check if type supports field access. + #[must_use] pub fn supports_field_access(&self, ty: Ty) -> bool { match *self.get(ty) { TyData::Any | TyData::Object(_) | TyData::AttrsOf { .. } => true, @@ -1463,6 +1504,7 @@ impl TyStore { } /// Check if type is callable. + #[must_use] pub fn is_callable(&self, ty: Ty) -> bool { match *self.get(ty) { TyData::Any | TyData::Function(_) => true, @@ -1481,6 +1523,7 @@ impl TyStore { /// Checks if `subtype` is a subtype of `supertype`. This is a simplified /// version that handles the most common cases; for full subtype checking /// use the unification module. + #[must_use] pub fn is_subtype_of(&self, subtype: Ty, supertype: Ty) -> bool { // Fast paths if subtype == supertype { @@ -1508,9 +1551,12 @@ impl TyStore { | (TyData::True | TyData::False, TyData::Bool) | (TyData::BoundedNumber(_), TyData::Number) => true, // Array covariance: Array <: Array if A <: B - (TyData::Array { elem: sub_elem, .. }, TyData::Array { elem: sup_elem, .. }) => { - self.is_subtype_of(*sub_elem, *sup_elem) - } + ( + TyData::Array { elem: sub_elem, .. }, + TyData::Array { + elem: super_elem, .. + }, + ) => self.is_subtype_of(*sub_elem, *super_elem), // Tuple <: Array if all elements are subtypes (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) => { elems.iter().all(|&e| self.is_subtype_of(e, *arr_elem)) @@ -1631,6 +1677,7 @@ impl TyStore { } /// Check if a type has any type variables. + #[must_use] pub fn has_type_vars(&self, ty: Ty) -> bool { match *self.get(ty) { TyData::TypeVar { .. } => true, @@ -1650,6 +1697,7 @@ impl TyStore { } /// Format a type for display. + #[must_use] pub fn display(&self, ty: Ty) -> String { match *self.get(ty) { TyData::Any => "any".to_string(), @@ -1663,18 +1711,18 @@ impl TyStore { (None, None) => "number".to_string(), (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { if lo.fract() == 0.0 { - format!("{}", lo as i64) + format!("{lo:.0}") } else { - format!("{}", lo) + format!("{lo}") } } - (Some(lo), Some(hi)) => format!("number[{}..{}]", lo, hi), - (Some(lo), None) => format!("number[{}..]", lo), - (None, Some(hi)) => format!("number[..{}]", hi), + (Some(lo), Some(hi)) => format!("number[{lo}..{hi}]"), + (Some(lo), None) => format!("number[{lo}..]"), + (None, Some(hi)) => format!("number[..{hi}]"), }, TyData::String => "string".to_string(), TyData::Char => "char".to_string(), - TyData::LiteralString(ref s) => format!("\"{}\"", s), + TyData::LiteralString(ref s) => format!("\"{s}\""), TyData::Array { elem, is_set } => { if is_set { format!("set<{}>", self.display(elem)) diff --git a/crates/jrsonnet-lsp-types/src/subst.rs b/crates/jrsonnet-lsp-types/src/subst.rs index 4efdf1a9..bf0679ab 100644 --- a/crates/jrsonnet-lsp-types/src/subst.rs +++ b/crates/jrsonnet-lsp-types/src/subst.rs @@ -32,6 +32,7 @@ pub struct TySubst { impl TySubst { /// Create an empty substitution. + #[must_use] pub fn new() -> Self { Self::default() } @@ -45,11 +46,11 @@ impl TySubst { /// /// # Arguments /// - `global`: The global store to merge into (mutably borrowed) - /// - `local`: The local store to merge from (consumed) + /// - `local`: The local store to merge from /// /// # Returns /// A substitution that can be used to rewrite local Ty references. - pub fn merge(global: &GlobalTyStore, local: LocalTyStore) -> Self { + pub fn merge(global: &GlobalTyStore, local: &LocalTyStore) -> Self { let mut subst = Self::new(); if local.is_empty() { @@ -57,7 +58,7 @@ impl TySubst { } // Get topological ordering of local types - let order = Self::topological_sort(&local); + let order = Self::topological_sort(local); // Process each local type in dependency order for local_ty in order { @@ -81,6 +82,7 @@ impl TySubst { /// - Global types are returned unchanged /// - Local types are looked up in the mapping /// - Unknown local types return the original (shouldn't happen after merge) + #[must_use] pub fn apply(&self, ty: Ty) -> Ty { if ty.is_global() { return ty; @@ -89,21 +91,24 @@ impl TySubst { } /// Check if this substitution contains a mapping for a type. + #[must_use] pub fn contains(&self, ty: Ty) -> bool { self.mapping.contains_key(&ty) } /// Get the number of mappings. + #[must_use] pub fn len(&self) -> usize { self.mapping.len() } /// Check if empty. + #[must_use] pub fn is_empty(&self) -> bool { self.mapping.is_empty() } - /// Apply the substitution to TyData, rewriting all Ty references. + /// Apply the substitution to `TyData`, rewriting all Ty references. fn apply_to_data(&self, data: &TyData) -> TyData { match data { // Primitives have no Ty references @@ -224,7 +229,10 @@ impl TySubst { // Count dependencies (only local ones matter) for &(ty, data) in &types { let deps = Self::get_local_dependencies(data); - *in_degree.get_mut(&ty).unwrap() = deps.len(); + let Some(in_degree_entry) = in_degree.get_mut(&ty) else { + continue; + }; + *in_degree_entry = deps.len(); for dep in deps { if let Some(dep_list) = dependents.get_mut(&dep) { dep_list.push(ty); @@ -269,7 +277,7 @@ impl TySubst { result } - /// Get local Ty references in a TyData. + /// Get local Ty references in a `TyData`. fn get_local_dependencies(data: &TyData) -> Vec { let mut deps = Vec::new(); Self::collect_local_refs(data, &mut deps); @@ -346,7 +354,7 @@ mod tests { let global = GlobalTyStore::new(); let local = LocalTyStore::new(); - let subst = TySubst::merge(&global, local); + let subst = TySubst::merge(&global, &local); assert!(subst.is_empty()); } @@ -362,7 +370,7 @@ mod tests { }); assert!(arr.is_local()); - let subst = TySubst::merge(&global, local); + let subst = TySubst::merge(&global, &local); assert_eq!(subst.len(), 1); // The mapped type should be global @@ -392,7 +400,7 @@ mod tests { assert!(inner.is_local()); assert!(outer.is_local()); - let subst = TySubst::merge(&global, local); + let subst = TySubst::merge(&global, &local); assert_eq!(subst.len(), 2); // Both should be mapped to global @@ -433,7 +441,7 @@ mod tests { is_set: false, }); - let subst = TySubst::merge(&global, local); + let subst = TySubst::merge(&global, &local); // Should map to the existing global type assert_eq!(subst.apply(local_arr), global_arr); @@ -447,7 +455,7 @@ mod tests { // Create a local union type let union = local.intern(TyData::Union(vec![Ty::NUMBER, Ty::STRING])); - let subst = TySubst::merge(&global, local); + let subst = TySubst::merge(&global, &local); let global_union = subst.apply(union); assert!(global_union.is_global()); @@ -474,7 +482,7 @@ mod tests { has_unknown: false, })); - let subst = TySubst::merge(&global, local); + let subst = TySubst::merge(&global, &local); let global_obj = subst.apply(obj); assert!(global_obj.is_global()); @@ -533,7 +541,7 @@ mod tests { is_set: false, }); - let subst = TySubst::merge(&global, local); + let subst = TySubst::merge(&global, &local); let global_0 = subst.apply(local_0); let global_1 = subst.apply(local_1); diff --git a/crates/jrsonnet-lsp-types/src/unification.rs b/crates/jrsonnet-lsp-types/src/unification.rs index 241f8503..81a8e23b 100644 --- a/crates/jrsonnet-lsp-types/src/unification.rs +++ b/crates/jrsonnet-lsp-types/src/unification.rs @@ -73,9 +73,9 @@ impl std::fmt::Display for PathElement { match self { PathElement::Parameter(i) => write!(f, "parameter {}", i + 1), PathElement::ReturnType => write!(f, "return type"), - PathElement::Field(name) => write!(f, "field '{}'", name), + PathElement::Field(name) => write!(f, "field '{name}'"), PathElement::ArrayElement => write!(f, "array element"), - PathElement::TupleElement(i) => write!(f, "element {}", i), + PathElement::TupleElement(i) => write!(f, "element {i}"), PathElement::UnionVariant => write!(f, "union variant"), } } @@ -114,6 +114,7 @@ pub struct UnifyError { impl UnifyError { /// Create a new unification error. + #[must_use] pub fn new(got: Ty, expected: Ty, reason: UnifyReason) -> Self { Self { got, @@ -151,13 +152,13 @@ impl UnifyError { match &self.reason { UnifyReason::TypeMismatch => {} UnifyReason::MissingField(name) => { - let _ = write!(msg, " (missing required field '{}')", name); + let _ = write!(msg, " (missing required field '{name}')"); } UnifyReason::ExtraField(name) => { - let _ = write!(msg, " (unexpected field '{}')", name); + let _ = write!(msg, " (unexpected field '{name}')"); } UnifyReason::ParamCountMismatch { got, expected } => { - let _ = write!(msg, " (expected {} parameters, got {})", expected, got); + let _ = write!(msg, " (expected {expected} parameters, got {got})"); } UnifyReason::Nested(inner) => { let _ = write!(msg, " caused by: {}", inner.format(store)); @@ -168,10 +169,10 @@ impl UnifyError { // Show the variant type that was attempted let reason = match &err.reason { UnifyReason::TypeMismatch => "type mismatch".to_string(), - UnifyReason::MissingField(name) => format!("missing field '{}'", name), - UnifyReason::ExtraField(name) => format!("unexpected field '{}'", name), + UnifyReason::MissingField(name) => format!("missing field '{name}'"), + UnifyReason::ExtraField(name) => format!("unexpected field '{name}'"), UnifyReason::ParamCountMismatch { got, expected } => { - format!("expected {} params, got {}", expected, got) + format!("expected {expected} params, got {got}") } UnifyReason::Nested(inner) => inner.format(store), UnifyReason::UnionMismatch(_) => "nested union mismatch".to_string(), @@ -191,7 +192,11 @@ pub type UnifyResult = Result<(), UnifyError>; /// Check if `got` type can be used where `expected` type is required. /// /// This is the primary API for type unification with interned types. -/// Works directly with TyData, avoiding allocation from export(). +/// Works directly with `TyData`, avoiding allocation from `export()`. +/// +/// # Errors +/// Returns `Err(UnifyError)` when the provided types are incompatible under the +/// requested variance. pub fn unify_ty( store: &S, got: Ty, @@ -482,10 +487,11 @@ fn unify_functions_ty( } // Parameters are CONTRAVARIANT: swap argument order - let param_count = got.params.len().min(expected.params.len()); - for i in 0..param_count { - let got_param_ty = got.params[i].ty; - let expected_param_ty = expected.params[i].ty; + for (i, (got_param, expected_param)) in + got.params.iter().zip(expected.params.iter()).enumerate() + { + let got_param_ty = got_param.ty; + let expected_param_ty = expected_param.ty; // Swap argument order for contravariant position unify_ty(store, expected_param_ty, got_param_ty, variance) @@ -507,7 +513,7 @@ fn unify_functions_ty( .map_err(|e| e.with_path(PathElement::ReturnType)) } -/// Create a UnifyError for type mismatch. +/// Create a `UnifyError` for type mismatch. fn make_error(got: Ty, expected: Ty) -> UnifyError { UnifyError::new(got, expected, UnifyReason::TypeMismatch) } @@ -556,7 +562,7 @@ mod tests { .into_iter() .enumerate() .map(|(i, ty)| ParamInterned { - name: format!("p{}", i), + name: format!("p{i}"), ty, has_default: false, }) @@ -819,13 +825,11 @@ mod tests { // Should show each variant type that was tried assert!( formatted.contains("tried `number`: type mismatch"), - "Should show number variant: {}", - formatted + "Should show number variant: {formatted}" ); assert!( formatted.contains("tried `string`: type mismatch"), - "Should show string variant: {}", - formatted + "Should show string variant: {formatted}" ); } @@ -862,13 +866,11 @@ mod tests { // Should show type mismatch for number and missing field for object assert!( formatted.contains("tried `number`"), - "Should show number variant: {}", - formatted + "Should show number variant: {formatted}" ); assert!( formatted.contains("missing field 'a'"), - "Should show missing field error: {}", - formatted + "Should show missing field error: {formatted}" ); } } diff --git a/crates/jrsonnet-lsp/src/analysis/eval.rs b/crates/jrsonnet-lsp/src/analysis/eval.rs index 23c8bfe1..9c06ac0f 100644 --- a/crates/jrsonnet-lsp/src/analysis/eval.rs +++ b/crates/jrsonnet-lsp/src/analysis/eval.rs @@ -50,6 +50,7 @@ pub struct Evaluator { impl Evaluator { /// Create a new evaluator with the given configuration. + #[must_use] pub fn new(config: &EvalConfig) -> Self { Self { base_jpath: config.jpath.clone(), @@ -81,6 +82,7 @@ impl Evaluator { /// Evaluate a document and return any diagnostics. /// /// Returns `None` if evaluation succeeds, or a diagnostic if it fails. + #[must_use] pub fn evaluate( &self, path: &CanonicalPath, diff --git a/crates/jrsonnet-lsp/src/analysis/tanka.rs b/crates/jrsonnet-lsp/src/analysis/tanka.rs index 26f02753..0cf3e91a 100644 --- a/crates/jrsonnet-lsp/src/analysis/tanka.rs +++ b/crates/jrsonnet-lsp/src/analysis/tanka.rs @@ -10,6 +10,7 @@ use std::path::{Path, PathBuf}; /// /// The root is the directory that contains either `tkrc.yaml` or `jsonnetfile.json`. /// Returns `None` if no root is found. +#[must_use] pub fn find_root(path: &Path) -> Option { let start = if path.is_file() { path.parent()? } else { path }; diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index 60359a95..596984be 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -76,6 +76,13 @@ struct WorkerDocumentSource { documents: SharedDocumentManager, } +struct WorkerRuntime { + requests: Receiver, + results: Sender, + latest_sequences: Arc>>, + config: DiagnosticsConfig, +} + impl DocumentSource for WorkerDocumentSource { fn get_document(&self, path: &CanonicalPath) -> Option { if path == &self.current_path { @@ -103,6 +110,7 @@ pub struct AsyncDiagnostics { impl AsyncDiagnostics { /// Create a new async diagnostics runner with the given configuration. + #[must_use] pub fn new(config: DiagnosticsConfig) -> Self { let (request_sender, request_receiver) = crossbeam_channel::unbounded(); let (result_sender, result_receiver) = crossbeam_channel::unbounded(); @@ -110,7 +118,12 @@ impl AsyncDiagnostics { let sequences_clone = Arc::clone(&latest_sequences); let thread_handle = thread::spawn(move || { - Self::worker_loop(request_receiver, result_sender, sequences_clone, config); + Self::worker_loop(WorkerRuntime { + requests: request_receiver, + results: result_sender, + latest_sequences: sequences_clone, + config, + }); }); Self { @@ -158,17 +171,17 @@ impl AsyncDiagnostics { } /// Background worker loop. - fn worker_loop( - requests: Receiver, - results: Sender, - latest_sequences: Arc>>, - config: DiagnosticsConfig, - ) { + fn worker_loop(runtime: WorkerRuntime) { + let WorkerRuntime { + requests, + results, + latest_sequences, + config, + } = runtime; + loop { // Wait for a request - let request = if let Ok(req) = requests.recv() { - req - } else { + let Ok(request) = requests.recv() else { // Channel closed, exit debug!("Diagnostics worker: channel closed, exiting"); break; @@ -219,13 +232,19 @@ impl AsyncDiagnostics { ); let analysis = provider.analyze(&request.path, &document, &doc_source); - let params = handlers::publish_diagnostics_params( + let Some(params) = handlers::publish_diagnostics_params( &request.path, &document, request.enable_lint, config.evaluator.as_deref(), &analysis, - ); + ) else { + debug!( + "Diagnostics worker: failed to build URI for {}, skipping diagnostics", + request.path.as_path().display() + ); + continue; + }; // Check again if still the latest (diagnostics computation may have taken time) { @@ -316,7 +335,7 @@ mod tests { // Schedule multiple requests rapidly for i in 0..5 { - let text = format!("{{ a: {} }}", i); + let text = format!("{{ a: {i} }}"); let version = DocVersion::new(i); runner.schedule(path.clone(), text, version, false); } diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index 9fce255b..33f5f986 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -17,7 +17,7 @@ use serde::{Deserialize, Serialize}; /// - `initializationOptions` in the initialize request /// - `workspace/didChangeConfiguration` notification /// -/// Field names use snake_case internally but accept camelCase from JSON +/// Field names use `snake_case` internally but accept camelCase from JSON /// for compatibility with VS Code settings. #[derive(Debug, Clone, Default, Serialize, Deserialize)] #[serde(default)] @@ -124,11 +124,13 @@ impl ServerConfigPatch { impl ServerConfig { /// Create a new default configuration. + #[must_use] pub fn new() -> Self { Self::default() } /// Parse configuration from LSP initialization options. + #[must_use] pub fn from_initialization_options(value: Option) -> Self { match value { Some(v) => serde_json::from_value(v).unwrap_or_default(), @@ -221,21 +223,25 @@ impl ServerConfig { /// Get all library paths for import resolution. /// Returns jpath entries. + #[must_use] pub fn library_paths(&self) -> &[PathBuf] { &self.jpath } /// Check if a jpath is configured. + #[must_use] pub fn has_jpath(&self) -> bool { !self.jpath.is_empty() } /// Check if evaluation diagnostics are enabled. + #[must_use] pub fn eval_diagnostics_enabled(&self) -> bool { self.enable_eval_diagnostics } /// Check if lint diagnostics are enabled. + #[must_use] pub fn lint_diagnostics_enabled(&self) -> bool { self.enable_lint_diagnostics } diff --git a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs index ac800838..0f76a83e 100644 --- a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs +++ b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs @@ -82,7 +82,7 @@ pub fn compute_diagnostics( diagnostics } -/// Create a PublishDiagnostics notification. +/// Create a `PublishDiagnostics` notification. /// /// # Arguments /// * `path` - The canonical path of the document @@ -96,15 +96,15 @@ pub fn publish_diagnostics_params( enable_lint: bool, evaluator: Option<&Evaluator>, analysis: &TypeAnalysis, -) -> lsp_types::PublishDiagnosticsParams { - let uri = path.to_uri(); +) -> Option { + let uri = path.to_uri().ok()?; let diagnostics = compute_diagnostics(document, path, enable_lint, evaluator, &uri, analysis); - lsp_types::PublishDiagnosticsParams { + Some(lsp_types::PublishDiagnosticsParams { uri, diagnostics, version: Some(document.version().0), - } + }) } #[cfg(test)] @@ -430,7 +430,6 @@ mod tests { assert!(diagnostics.iter().all(|d| d .code .as_ref() - .map(|c| matches!(c, NumberOrString::String(s) if s == "syntax-error")) - .unwrap_or(false))); + .is_some_and(|c| matches!(c, NumberOrString::String(s) if s == "syntax-error")))); } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 89652dd2..9f5a9e3b 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -86,6 +86,7 @@ pub struct Server { impl Server { /// Create a new server with the given connection. + #[must_use] pub fn new(connection: Connection) -> Self { let global_types = Arc::new(GlobalTyStore::new()); let documents = Arc::new(DocumentManager::new(Arc::clone(&global_types))); @@ -183,7 +184,7 @@ impl Server { Response::new_err( id, lsp_server::ErrorCode::InternalError as i32, - format!("{} failed: {err:#}", method), + format!("{method} failed: {err:#}"), ) } Err(_) => { @@ -191,7 +192,7 @@ impl Server { Response::new_err( id, lsp_server::ErrorCode::InternalError as i32, - format!("{} panicked", method), + format!("{method} panicked"), ) } }; @@ -202,6 +203,10 @@ impl Server { } /// Run the server, handling the initialize handshake first. + /// + /// # Errors + /// Returns an error if initialization fails, request/notification processing fails, + /// or message I/O over the LSP connection fails. pub fn run(mut self) -> Result<()> { info!("Starting jrsonnet language server"); @@ -272,7 +277,7 @@ impl Server { info!("Initialize request from: {:?}", params.client_info); Ok((req.id, params)) } - _ => anyhow::bail!("Expected initialize request, got: {:?}", msg), + _ => anyhow::bail!("Expected initialize request, got: {msg:?}"), } } @@ -519,7 +524,7 @@ impl Server { let params: CodeLens = serde_json::from_value(params)?; self.send_ok_response(id, Self::on_code_lens_resolve(params)) } - _ => unreachable!("sync request method already filtered: {method}"), + _ => self.send_method_not_found_response(id, method), } } @@ -527,14 +532,14 @@ impl Server { &self, id: RequestId, params: serde_json::Value, - handler: fn(&Self, P) -> R, + handler: fn(&Self, &P) -> R, ) -> Result<()> where P: DeserializeOwned, R: Serialize, { let params: P = serde_json::from_value(params)?; - self.send_ok_response(id, handler(self, params)) + self.send_ok_response(id, handler(self, ¶ms)) } fn spawn_json_response(&self, id: RequestId, method: &'static str, compute: F) @@ -617,7 +622,7 @@ impl Server { params, AsyncRequestContext::execute_command, ), - _ => unreachable!("async request method already filtered: {method}"), + _ => self.send_method_not_found_response(id, method), } } @@ -626,7 +631,7 @@ impl Server { id: RequestId, method: &'static str, params: serde_json::Value, - handler: fn(&AsyncRequestContext, P) -> R, + handler: fn(&AsyncRequestContext, &P) -> R, ) -> Result<()> where P: DeserializeOwned + Send + 'static, @@ -634,12 +639,12 @@ impl Server { { let params: P = serde_json::from_value(params)?; let context = self.async_request_context(); - self.spawn_json_response(id, method, move || handler(&context, params)); + self.spawn_json_response(id, method, move || handler(&context, ¶ms)); Ok(()) } /// Handle textDocument/documentSymbol request. - fn on_document_symbol(&self, params: DocumentSymbolParams) -> Option { + fn on_document_symbol(&self, params: &DocumentSymbolParams) -> Option { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri)?; let doc = self.documents.get(&path)?.clone(); @@ -651,7 +656,7 @@ impl Server { /// Handle textDocument/documentHighlight request. fn on_document_highlight( &self, - params: DocumentHighlightParams, + params: &DocumentHighlightParams, ) -> Option> { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; @@ -668,7 +673,7 @@ impl Server { } /// Handle textDocument/codeAction request. - fn on_code_action(&self, params: CodeActionParams) -> Option { + fn on_code_action(&self, params: &CodeActionParams) -> Option { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri)?; let actions = { @@ -683,7 +688,7 @@ impl Server { } /// Handle textDocument/signatureHelp request. - fn on_signature_help(&self, params: SignatureHelpParams) -> Option { + fn on_signature_help(&self, params: &SignatureHelpParams) -> Option { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; let path = CanonicalPath::from_uri(uri)?; @@ -695,7 +700,7 @@ impl Server { } /// Handle textDocument/formatting request. - fn on_formatting(&self, params: DocumentFormattingParams) -> Option> { + fn on_formatting(&self, params: &DocumentFormattingParams) -> Option> { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri)?; let doc = self.documents.get(&path)?; @@ -709,7 +714,7 @@ impl Server { /// Handle textDocument/prepareRename request. fn on_prepare_rename( &self, - params: TextDocumentPositionParams, + params: &TextDocumentPositionParams, ) -> Option { let uri = ¶ms.text_document.uri; let position = params.position; @@ -722,7 +727,7 @@ impl Server { } /// Handle textDocument/semanticTokens/full request. - fn on_semantic_tokens_full(&self, params: SemanticTokensParams) -> Option { + fn on_semantic_tokens_full(&self, params: &SemanticTokensParams) -> Option { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri)?; let doc = self.documents.get(&path)?; @@ -733,7 +738,7 @@ impl Server { /// Handle textDocument/semanticTokens/range request. fn on_semantic_tokens_range( &self, - params: SemanticTokensRangeParams, + params: &SemanticTokensRangeParams, ) -> Option { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri)?; @@ -766,7 +771,7 @@ impl Server { } DidCloseTextDocument::METHOD => { let params: DidCloseTextDocumentParams = serde_json::from_value(notif.params)?; - self.on_did_close(params)?; + self.on_did_close(¶ms)?; } DidSaveTextDocument::METHOD => { let params: DidSaveTextDocumentParams = serde_json::from_value(notif.params)?; @@ -857,7 +862,7 @@ impl Server { } /// Handle textDocument/didClose notification. - fn on_did_close(&self, params: DidCloseTextDocumentParams) -> Result<()> { + fn on_did_close(&self, params: &DidCloseTextDocumentParams) -> Result<()> { let uri = ¶ms.text_document.uri; info!("Document closed: {}", uri.as_str()); @@ -1098,6 +1103,10 @@ impl Server { } /// Run the LSP server over stdio. +/// +/// # Errors +/// Returns an error if server startup fails, the server loop returns an error, +/// or stdio worker threads fail to join. pub fn run_stdio() -> Result<()> { let (connection, io_threads) = Connection::stdio(); let server = Server::new(connection); diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 9da250f6..1974003f 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -56,7 +56,7 @@ impl AsyncRequestContext { provider.analyze(path, doc, self.documents.as_ref()) } - pub(super) fn hover(&self, params: HoverParams) -> Option { + pub(super) fn hover(&self, params: &HoverParams) -> Option { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; let path = CanonicalPath::from_uri(uri)?; @@ -68,7 +68,7 @@ impl AsyncRequestContext { pub(super) fn goto_definition( &self, - params: GotoDefinitionParams, + params: &GotoDefinitionParams, ) -> Option { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; @@ -86,8 +86,9 @@ impl AsyncRequestContext { } handlers::DefinitionResult::Import(import_path) => { let resolved = self.resolve_import_path(&path, &import_path)?; + let resolved_uri = resolved.to_uri().ok()?; Some(GotoDefinitionResponse::Scalar(Location { - uri: resolved.to_uri(), + uri: resolved_uri, range: lsp_types::Range::default(), })) } @@ -96,12 +97,13 @@ impl AsyncRequestContext { fields, } => { let resolved = self.resolve_import_path(&path, &import_path)?; + let resolved_uri = resolved.to_uri().ok()?; let range = self .find_field_in_file(&resolved, &fields) .map(|locations| locations.declaration) .unwrap_or_default(); Some(GotoDefinitionResponse::Scalar(Location { - uri: resolved.to_uri(), + uri: resolved_uri, range, })) } @@ -110,14 +112,14 @@ impl AsyncRequestContext { pub(super) fn goto_declaration( &self, - params: GotoDefinitionParams, + params: &GotoDefinitionParams, ) -> Option { self.goto_definition(params) } pub(super) fn goto_implementation( &self, - params: GotoDefinitionParams, + params: &GotoDefinitionParams, ) -> Option { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; @@ -136,9 +138,10 @@ impl AsyncRequestContext { } handlers::DefinitionResult::Import(import_path) => { let resolved = self.resolve_import_path(&path, &import_path)?; + let resolved_uri = resolved.to_uri().ok()?; let range = self.document_root_expr_range(&resolved).unwrap_or_default(); Some(GotoDefinitionResponse::Scalar(Location { - uri: resolved.to_uri(), + uri: resolved_uri, range, })) } @@ -147,20 +150,21 @@ impl AsyncRequestContext { fields, } => { let resolved = self.resolve_import_path(&path, &import_path)?; + let resolved_uri = resolved.to_uri().ok()?; let range = self .find_field_in_file(&resolved, &fields) .map(|locations| locations.implementation) .or_else(|| self.document_root_expr_range(&resolved)) .unwrap_or_default(); Some(GotoDefinitionResponse::Scalar(Location { - uri: resolved.to_uri(), + uri: resolved_uri, range, })) } } } - pub(super) fn inlay_hints(&self, params: InlayHintParams) -> Option> { + pub(super) fn inlay_hints(&self, params: &InlayHintParams) -> Option> { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri)?; let doc = self.documents.get(&path)?.clone(); @@ -172,7 +176,7 @@ impl AsyncRequestContext { Some(hints) } - pub(super) fn completion(&self, params: CompletionParams) -> Option { + pub(super) fn completion(&self, params: &CompletionParams) -> Option { let uri = ¶ms.text_document_position.text_document.uri; let position = params.text_document_position.position; let path = CanonicalPath::from_uri(uri)?; @@ -192,7 +196,7 @@ impl AsyncRequestContext { Some(CompletionResponse::List(list)) } - pub(super) fn references(&self, params: ReferenceParams) -> Option> { + pub(super) fn references(&self, params: &ReferenceParams) -> Option> { let uri = ¶ms.text_document_position.text_document.uri; let position = params.text_document_position.position; let path = CanonicalPath::from_uri(uri)?; @@ -223,14 +227,16 @@ impl AsyncRequestContext { pub(super) fn workspace_symbol( &self, - params: WorkspaceSymbolParams, + params: &WorkspaceSymbolParams, ) -> Option> { let query = ¶ms.query; let all_symbols: Vec = self .documents .par_iter() .flat_map(|entry| { - let uri = entry.key().to_uri(); + let Ok(uri) = entry.key().to_uri() else { + return Vec::new(); + }; handlers::workspace_symbols_for_document(entry.value(), &uri, query) }) .collect(); @@ -241,7 +247,7 @@ impl AsyncRequestContext { Some(all_symbols) } - pub(super) fn rename(&self, params: RenameParams) -> Option { + pub(super) fn rename(&self, params: &RenameParams) -> Option { let uri = ¶ms.text_document_position.text_document.uri; let position = params.text_document_position.position; let path = CanonicalPath::from_uri(uri)?; @@ -269,7 +275,7 @@ impl AsyncRequestContext { ) } - pub(super) fn code_lens(&self, params: CodeLensParams) -> Vec { + pub(super) fn code_lens(&self, params: &CodeLensParams) -> Vec { let uri = ¶ms.text_document.uri; let Some(path) = CanonicalPath::from_uri(uri) else { return Vec::new(); @@ -286,7 +292,7 @@ impl AsyncRequestContext { pub(super) fn execute_command( &self, - params: ExecuteCommandParams, + params: &ExecuteCommandParams, ) -> Option { info!("Execute command: {}", params.command); @@ -418,7 +424,10 @@ impl AsyncRequestContext { let importers = import_graph.transitive_importers(&path); drop(import_graph); - let importer_uris: Vec = importers.iter().map(|p| p.to_uri().to_string()).collect(); + let importer_uris: Vec = importers + .iter() + .filter_map(|p| p.to_uri().ok().map(|uri| uri.to_string())) + .collect(); Some(serde_json::json!({ "file": uri, @@ -434,19 +443,18 @@ impl AsyncRequestContext { include_declaration: bool, ) -> Option { let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let refs = self - .references(ReferenceParams { - text_document_position: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { uri: uri_parsed }, - position: Position { line, character }, - }, - context: ReferenceContext { - include_declaration, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }) - .unwrap_or_default(); + let params = ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri: uri_parsed }, + position: Position { line, character }, + }, + context: ReferenceContext { + include_declaration, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + let refs = self.references(¶ms).unwrap_or_default(); serde_json::to_value(refs).ok() } diff --git a/crates/jrsonnet-lsp/tests/framework/mod.rs b/crates/jrsonnet-lsp/tests/framework/mod.rs index b1d10c47..6d3b4120 100644 --- a/crates/jrsonnet-lsp/tests/framework/mod.rs +++ b/crates/jrsonnet-lsp/tests/framework/mod.rs @@ -57,7 +57,7 @@ pub fn check(source: &str) { } msg.push_str("\nSource:\n"); for (i, line) in parsed.source.lines().enumerate() { - let _ = writeln!(msg, "{:3}| {}", i, line); + let _ = writeln!(msg, "{i:3}| {line}"); } panic!("{}", msg); } diff --git a/crates/jrsonnet-lsp/tests/framework/parser.rs b/crates/jrsonnet-lsp/tests/framework/parser.rs index 18336cc2..c1ebcced 100644 --- a/crates/jrsonnet-lsp/tests/framework/parser.rs +++ b/crates/jrsonnet-lsp/tests/framework/parser.rs @@ -63,8 +63,7 @@ impl ParsedSource { pub fn annotations_at(&self, line: u32, col: u32) -> &[Annotation] { self.position_map .get(&(line, col)) - .map(std::vec::Vec::as_slice) - .unwrap_or(&[]) + .map_or(&[], std::vec::Vec::as_slice) } } @@ -121,7 +120,7 @@ fn parse_annotation_line(line: &str, prev_line: u32) -> Option { let caret_pos = content.find('^')?; // The column is the position of ^ in the content (accounting for leading spaces) - let column = caret_pos as u32; + let column = u32::try_from(caret_pos).ok()?; // Parse the rest after the ^ let rest = content[caret_pos + 1..].trim(); diff --git a/crates/jrsonnet-lsp/tests/stress_tests.rs b/crates/jrsonnet-lsp/tests/stress_tests.rs index 2249232c..5b8b4f7c 100644 --- a/crates/jrsonnet-lsp/tests/stress_tests.rs +++ b/crates/jrsonnet-lsp/tests/stress_tests.rs @@ -259,7 +259,7 @@ fn test_rapid_document_changes() { // Rapid full-document changes (simulating fast typing) for i in 2..=100 { - let content = format!("{{ x: {} }}", i); + let content = format!("{{ x: {i} }}"); client_conn .sender .send(Message::Notification(did_change_notification_full( @@ -314,15 +314,10 @@ fn test_rapid_incremental_changes() { let chars = ['x', ' ', '+', ' ', '1']; let mut version = 2; for (i, ch) in chars.iter().enumerate() { + let character = u32::try_from(i).expect("typing index should fit in u32"); let range = Range { - start: Position { - line: 1, - character: i as u32, - }, - end: Position { - line: 1, - character: i as u32, - }, + start: Position { line: 1, character }, + end: Position { line: 1, character }, }; client_conn .sender @@ -469,12 +464,11 @@ fn test_many_documents() { // Open many documents for i in 0..num_documents { - let uri = format!("file:///test/doc{}.jsonnet", i); + let uri = format!("file:///test/doc{i}.jsonnet"); let content = format!( r"local x{i} = {i}; local f{i}(a) = a + x{i}; -f{i}(1)", - i = i +f{i}(1)" ); client_conn .sender @@ -490,7 +484,7 @@ f{i}(1)", // Query each document to verify they're all accessible let mut successful_queries = 0; for i in 0..num_documents { - let uri = format!("file:///test/doc{}.jsonnet", i); + let uri = format!("file:///test/doc{i}.jsonnet"); client_conn .sender .send(Message::Request(hover_request(i + 100, &uri, 0, 7))) @@ -520,9 +514,7 @@ f{i}(1)", assert!( successful_queries >= num_documents / 2, - "Should successfully query at least half the documents, got {}/{}", - successful_queries, - num_documents + "Should successfully query at least half the documents, got {successful_queries}/{num_documents}" ); shutdown_server(&client_conn, server_thread, 9999); @@ -540,7 +532,7 @@ fn test_large_document() { // Generate a large document with many local bindings let mut content = String::new(); - let num_locals = 200; + let num_locals: usize = 200; for i in 0..num_locals { let _ = writeln!(content, "local x{i} = {i};"); } @@ -582,7 +574,7 @@ fn test_large_document() { .send(Message::Request(goto_definition_request( 101, uri, - middle_line as u32, + u32::try_from(middle_line).expect("middle line should fit in u32"), 12, ))) .unwrap(); @@ -597,7 +589,7 @@ fn test_large_document() { ); break; } - Ok(Message::Notification(_) | Message::Request(_)) => continue, + Ok(Message::Notification(_) | Message::Request(_)) => {} Err(err) => panic!("Should receive goto definition response: {err:?}"), } } @@ -653,13 +645,12 @@ fn test_changes_during_requests() { if let Some(err) = &resp.error { assert!( err.code != -32603, - "Should not have internal error: {:?}", - err + "Should not have internal error: {err:?}" ); } break; } - Ok(Message::Notification(_) | Message::Request(_)) => continue, + Ok(Message::Notification(_) | Message::Request(_)) => {} Err(RecvTimeoutError::Timeout | RecvTimeoutError::Disconnected) => { // Timeout is acceptable - document changed break; @@ -684,7 +675,7 @@ fn test_changes_during_requests() { ); break; } - Ok(Message::Notification(_) | Message::Request(_)) => continue, + Ok(Message::Notification(_) | Message::Request(_)) => {} Err(err) => panic!("Server should respond after document change: {err:?}"), } } @@ -729,7 +720,7 @@ f(x)"; loop { let Some(remaining) = timeout.checked_sub(start.elapsed()) else { - panic!("Request {:?} timed out", req_id); + panic!("Request {req_id:?} timed out"); }; match client_conn.receiver.recv_timeout(remaining) { Ok(Message::Response(resp)) => { @@ -737,15 +728,11 @@ f(x)"; resp.id, req_id, "Should receive response for correct request" ); - assert!( - resp.error.is_none(), - "Request {:?} should not error", - req_id - ); + assert!(resp.error.is_none(), "Request {req_id:?} should not error"); break; } - Ok(Message::Notification(_) | Message::Request(_)) => continue, - Err(err) => panic!("Request {:?} timed out: {:?}", req_id, err), + Ok(Message::Notification(_) | Message::Request(_)) => {} + Err(err) => panic!("Request {req_id:?} timed out: {err:?}"), } } } @@ -763,8 +750,8 @@ fn test_shutdown_during_processing() { // Open several documents to keep the server busy for i in 0..10 { - let uri = format!("file:///test/shutdown{}.jsonnet", i); - let content = format!("local x{} = {}; x{}", i, i, i); + let uri = format!("file:///test/shutdown{i}.jsonnet"); + let content = format!("local x{i} = {i}; x{i}"); client_conn .sender .send(Message::Notification(did_open_notification( @@ -802,7 +789,7 @@ fn test_shutdown_during_processing() { got_response = true; break; } - Ok(Message::Notification(_) | Message::Request(_)) => continue, + Ok(Message::Notification(_) | Message::Request(_)) => {} Err(RecvTimeoutError::Timeout | RecvTimeoutError::Disconnected) => break, } } From 1990d781ba81ed3120260634a1268bb07baccf6a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 15:43:22 +0000 Subject: [PATCH 010/210] refactor(lsp): split declaration navigation path from definition - route definition/declaration/implementation through explicit target-based resolution - keep declaration semantics aligned with definition for declaration sites while preserving distinct implementation jumps - strengthen integration coverage with structural assertions and declaration checks for import-field navigation - update handler docs to describe dedicated declaration dispatch --- .../jrsonnet-lsp/src/server/async_requests.rs | 84 +++++++++---------- crates/jrsonnet-lsp/tests/integration_test.rs | 64 ++++++++++---- docs/lsp/HANDLERS.md | 5 +- 3 files changed, 90 insertions(+), 63 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 1974003f..09755955 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -29,6 +29,13 @@ pub(super) struct AsyncRequestContext { config: SharedConfig, } +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum GotoTarget { + Definition, + Declaration, + Implementation, +} + impl AsyncRequestContext { pub(super) fn new( documents: SharedDocumentManager, @@ -70,56 +77,27 @@ impl AsyncRequestContext { &self, params: &GotoDefinitionParams, ) -> Option { - let uri = ¶ms.text_document_position_params.text_document.uri; - let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri)?; - let doc = self.documents.get(&path)?.clone(); - let lsp_pos = position.into(); - - let result = handlers::goto_definition(&doc, lsp_pos)?; - match result { - handlers::DefinitionResult::Local(range) => { - Some(GotoDefinitionResponse::Scalar(Location { - uri: uri.clone(), - range, - })) - } - handlers::DefinitionResult::Import(import_path) => { - let resolved = self.resolve_import_path(&path, &import_path)?; - let resolved_uri = resolved.to_uri().ok()?; - Some(GotoDefinitionResponse::Scalar(Location { - uri: resolved_uri, - range: lsp_types::Range::default(), - })) - } - handlers::DefinitionResult::ImportField { - path: import_path, - fields, - } => { - let resolved = self.resolve_import_path(&path, &import_path)?; - let resolved_uri = resolved.to_uri().ok()?; - let range = self - .find_field_in_file(&resolved, &fields) - .map(|locations| locations.declaration) - .unwrap_or_default(); - Some(GotoDefinitionResponse::Scalar(Location { - uri: resolved_uri, - range, - })) - } - } + self.goto_target(params, GotoTarget::Definition) } pub(super) fn goto_declaration( &self, params: &GotoDefinitionParams, ) -> Option { - self.goto_definition(params) + self.goto_target(params, GotoTarget::Declaration) } pub(super) fn goto_implementation( &self, params: &GotoDefinitionParams, + ) -> Option { + self.goto_target(params, GotoTarget::Implementation) + } + + fn goto_target( + &self, + params: &GotoDefinitionParams, + target: GotoTarget, ) -> Option { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; @@ -130,7 +108,11 @@ impl AsyncRequestContext { let result = handlers::goto_definition(&doc, lsp_pos)?; match result { handlers::DefinitionResult::Local(range) => { - let range = Self::local_implementation_range(&doc, range).unwrap_or(range); + let range = if target == GotoTarget::Implementation { + Self::local_implementation_range(&doc, range).unwrap_or(range) + } else { + range + }; Some(GotoDefinitionResponse::Scalar(Location { uri: uri.clone(), range, @@ -139,7 +121,11 @@ impl AsyncRequestContext { handlers::DefinitionResult::Import(import_path) => { let resolved = self.resolve_import_path(&path, &import_path)?; let resolved_uri = resolved.to_uri().ok()?; - let range = self.document_root_expr_range(&resolved).unwrap_or_default(); + let range = if target == GotoTarget::Implementation { + self.document_root_expr_range(&resolved).unwrap_or_default() + } else { + lsp_types::Range::default() + }; Some(GotoDefinitionResponse::Scalar(Location { uri: resolved_uri, range, @@ -151,11 +137,17 @@ impl AsyncRequestContext { } => { let resolved = self.resolve_import_path(&path, &import_path)?; let resolved_uri = resolved.to_uri().ok()?; - let range = self - .find_field_in_file(&resolved, &fields) - .map(|locations| locations.implementation) - .or_else(|| self.document_root_expr_range(&resolved)) - .unwrap_or_default(); + let locations = self.find_field_in_file(&resolved, &fields); + let range = if target == GotoTarget::Implementation { + locations + .map(|location| location.implementation) + .or_else(|| self.document_root_expr_range(&resolved)) + .unwrap_or_default() + } else { + locations + .map(|location| location.declaration) + .unwrap_or_default() + }; Some(GotoDefinitionResponse::Scalar(Location { uri: resolved_uri, range, diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 99f030ef..295cc6db 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -935,17 +935,27 @@ fn test_goto_definition() { // Should receive definition response let response = client_conn.receiver.recv().unwrap(); - assert_matches!(response, Message::Response(resp) => { - assert_eq!(resp.id, 2.into()); - assert!(resp.error.is_none(), "Goto definition should succeed"); - let result: GotoDefinitionResponse = - serde_json::from_value(resp.result.expect("should have result")).unwrap(); - assert_matches!(result, GotoDefinitionResponse::Scalar(location) => { - // Definition should be at position 6 (the 'x' in 'local x') - assert_eq!(location.range.start.line, 0); - assert_eq!(location.range.start.character, 6); - }); - }); + let response = assert_matches!(response, Message::Response(resp) => resp); + assert_eq!(response.id, 2.into()); + assert!(response.error.is_none(), "Goto definition should succeed"); + let result: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); // Shutdown client_conn @@ -1172,13 +1182,37 @@ fn test_goto_implementation_import_field() { })) ); + client_conn + .sender + .send(Message::Request(goto_declaration_request(3, &uri, 0, 40))) + .unwrap(); + let declaration_response = recv_response(&client_conn, 3); + let declaration_result: Option = + serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); + assert_eq!( + declaration_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + client_conn .sender .send(Message::Request(goto_implementation_request( - 3, &uri, 0, 40, + 4, &uri, 0, 40, ))) .unwrap(); - let implementation_response = recv_response(&client_conn, 3); + let implementation_response = recv_response(&client_conn, 4); let implementation_result: Option = serde_json::from_value(implementation_response.result.expect("should have result")) .unwrap(); @@ -1201,9 +1235,9 @@ fn test_goto_implementation_import_field() { client_conn .sender - .send(Message::Request(shutdown_request(4))) + .send(Message::Request(shutdown_request(5))) .unwrap(); - let _ = recv_response(&client_conn, 4); + let _ = recv_response(&client_conn, 5); client_conn .sender .send(Message::Notification(exit_notification())) diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 32bd0251..13f44d9c 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -131,8 +131,9 @@ File: `crates/jrsonnet-lsp-handlers/src/definition.rs` Async server context resolves import paths and can locate nested field ranges in imported files before returning final `Location`. -`textDocument/declaration` currently delegates to the same resolution path as -`textDocument/definition`. +`textDocument/declaration` uses a dedicated async server path. It currently +returns declaration sites (same location mapping as definition for local/import +resolution), while remaining independent from definition dispatch. `textDocument/implementation` resolves symbol values/bodies: From 521ec0162b6992418e239523f5f8f4f893ad9774 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 16:03:32 +0000 Subject: [PATCH 011/210] feat(lsp): separate declaration from canonical definition resolution Introduce distinct goto paths so declaration returns the nearest lexical binder while definition follows local alias chains to canonical origins. - add handler-level declaration vs definition modes and alias-chain resolution for local bindings/import aliases - route server declaration/implementation through lexical declaration handling, while keeping implementation expression jumps intact - add structural integration coverage for divergence cases (local alias and import-field alias) --- .../jrsonnet-lsp-handlers/src/definition.rs | 216 +++++++++++++++++- crates/jrsonnet-lsp-handlers/src/lib.rs | 3 +- .../jrsonnet-lsp/src/server/async_requests.rs | 7 +- crates/jrsonnet-lsp/tests/integration_test.rs | 210 +++++++++++++++++ 4 files changed, 425 insertions(+), 11 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/definition.rs b/crates/jrsonnet-lsp-handlers/src/definition.rs index 58f6e336..11157cb2 100644 --- a/crates/jrsonnet-lsp-handlers/src/definition.rs +++ b/crates/jrsonnet-lsp-handlers/src/definition.rs @@ -9,13 +9,13 @@ use jrsonnet_lsp_document::{ find_node_at_offset, to_lsp_range, token_at_offset, Document, LspPosition, }; -use jrsonnet_lsp_import::check_import_path; -use jrsonnet_lsp_inference::{trace_base, ConstEvalResult}; +use jrsonnet_lsp_import::{check_import_path, extract_import_path}; +use jrsonnet_lsp_inference::{trace_base, trace_expr, ConstEvalResult}; use jrsonnet_lsp_scope::{find_definition_range, is_variable_reference}; use jrsonnet_rowan_parser::{ nodes::{ - Bind, BindFunction, Destruct, ExprBase, ExprField, ExprFunction, ForSpec, MemberBindStmt, - Param, StmtLocal, + Bind, BindFunction, Destruct, Expr, ExprBase, ExprField, ExprFunction, ForSpec, + MemberBindStmt, Param, StmtLocal, }, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; @@ -23,6 +23,19 @@ use lsp_types::Range; use rowan::TextRange; use tracing::debug; +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum DefinitionMode { + Declaration, + Definition, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +enum CanonicalDefinition { + Local(TextRange), + Import(String), + ImportField { path: String, fields: Vec }, +} + /// Result of a go-to-definition request. #[derive(Debug, Clone, PartialEq, Eq)] pub enum DefinitionResult { @@ -47,6 +60,21 @@ pub enum DefinitionResult { /// - An import path that needs to be resolved by the caller /// - A field in an imported file (path + field chain) pub fn goto_definition(document: &Document, position: LspPosition) -> Option { + goto_with_mode(document, position, DefinitionMode::Definition) +} + +/// Find the declaration site of the symbol at the given position. +/// +/// Unlike `goto_definition`, this does not follow local alias chains. +pub fn goto_declaration(document: &Document, position: LspPosition) -> Option { + goto_with_mode(document, position, DefinitionMode::Declaration) +} + +fn goto_with_mode( + document: &Document, + position: LspPosition, + mode: DefinitionMode, +) -> Option { let text = document.text(); let line_index = document.line_index(); @@ -86,12 +114,106 @@ pub fn goto_definition(document: &Document, position: LspPosition) -> Option { + DefinitionResult::Local(to_lsp_range(range, line_index, text)) + } + CanonicalDefinition::Import(path) => DefinitionResult::Import(path), + CanonicalDefinition::ImportField { path, fields } => { + DefinitionResult::ImportField { path, fields } + } + } + } else { + DefinitionResult::Local(to_lsp_range(def_range, line_index, text)) + }; + + debug!(name = %name, mode = ?mode, ?result, "resolved symbol target"); + Some(result) +} + +fn resolve_canonical_definition( + document: &Document, + initial_def: TextRange, +) -> CanonicalDefinition { + let mut visited = std::collections::HashSet::new(); + let mut current = initial_def; + + loop { + if !visited.insert(current) { + return CanonicalDefinition::Local(current); + } + + let Some(bind) = find_bind_by_definition_range(document, current) else { + return CanonicalDefinition::Local(current); + }; + let Some(value_expr) = bind_value_expr(&bind) else { + return CanonicalDefinition::Local(current); + }; - // Convert to LSP range - Some(DefinitionResult::Local(to_lsp_range( - def_range, line_index, text, - ))) + if let Some((path, fields)) = resolve_expr_to_import(&value_expr, document) { + return if fields.is_empty() { + CanonicalDefinition::Import(path) + } else { + CanonicalDefinition::ImportField { path, fields } + }; + } + + let Some(next) = aliased_definition_range(&value_expr) else { + return CanonicalDefinition::Local(current); + }; + current = next; + } +} + +fn find_bind_by_definition_range(document: &Document, range: TextRange) -> Option { + document + .ast() + .syntax() + .descendants() + .filter_map(Bind::cast) + .find(|bind| { + bind_definition_range(bind).is_some_and(|definition_range| definition_range == range) + }) +} + +fn bind_definition_range(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind) => { + let destruct = bind.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) + } + Bind::BindFunction(bind) => Some(bind.name()?.syntax().text_range()), + } +} + +fn bind_value_expr(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind) => bind.value(), + Bind::BindFunction(bind) => bind.value(), + } +} + +fn resolve_expr_to_import(value_expr: &Expr, document: &Document) -> Option<(String, Vec)> { + if let ExprBase::ExprImport(import) = value_expr.expr_base()? { + return Some((extract_import_path(&import)?, Vec::new())); + } + + match trace_expr(value_expr, document)? { + ConstEvalResult::Import { path, fields } => Some((path, fields)), + ConstEvalResult::Std { .. } | ConstEvalResult::Local { .. } => None, + } +} + +fn aliased_definition_range(value_expr: &Expr) -> Option { + let ExprBase::ExprVar(var) = value_expr.expr_base()? else { + return None; + }; + let ident = var.name()?.ident_lit()?; + find_definition_range(&ident, ident.text()) } /// Check if the token is a field name in a field access expression where the base @@ -531,6 +653,20 @@ mod tests { } } + fn expect_import_field( + result: Option, + expected_path: &str, + expected_fields: &[&str], + ) { + match result { + Some(DefinitionResult::ImportField { path, fields }) => { + assert_eq!(path, expected_path); + assert_eq!(fields, expected_fields); + } + other => panic!("Expected ImportField definition, got {other:?}"), + } + } + #[test] fn test_local_variable_definition() { let code = r"local x = 1; x + 1"; @@ -713,4 +849,66 @@ mod tests { assert_eq!(r.start.line, 1); assert_eq!(r.start.character, 2); } + + #[test] + fn test_definition_follows_local_alias_chain() { + let code = "local x = 1;\nlocal y = x;\ny"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let definition = expect_local(goto_definition(&doc, (2, 0).into())); + let declaration = expect_local(goto_declaration(&doc, (2, 0).into())); + + assert_eq!( + definition, + Range { + start: lsp_types::Position { + line: 0, + character: 6, + }, + end: lsp_types::Position { + line: 0, + character: 7, + }, + } + ); + assert_eq!( + declaration, + Range { + start: lsp_types::Position { + line: 1, + character: 6, + }, + end: lsp_types::Position { + line: 1, + character: 7, + }, + } + ); + } + + #[test] + fn test_definition_resolves_alias_to_import_field() { + let code = r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let definition = goto_definition(&doc, (2, 1).into()); + expect_import_field(definition, "lib.libsonnet", &["foo"]); + + let declaration = expect_local(goto_declaration(&doc, (2, 1).into())); + assert_eq!( + declaration, + Range { + start: lsp_types::Position { + line: 1, + character: 6, + }, + end: lsp_types::Position { + line: 1, + character: 11, + }, + } + ); + } } diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index 5a426564..6541da06 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -18,7 +18,8 @@ pub use code_action::code_actions; pub use code_lens::{code_lens, resolve_code_lens, CodeLensConfig}; pub use completion::{completion, completion_with_import_roots}; pub use definition::{ - collect_visible_bindings, goto_definition, BindingKind, DefinitionResult, VisibleBinding, + collect_visible_bindings, goto_declaration, goto_definition, BindingKind, DefinitionResult, + VisibleBinding, }; pub use document_highlight::document_highlights; pub use formatting::{format_document, format_document_with_config, FormattingConfig}; diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 09755955..341391d7 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -105,7 +105,12 @@ impl AsyncRequestContext { let doc = self.documents.get(&path)?.clone(); let lsp_pos = position.into(); - let result = handlers::goto_definition(&doc, lsp_pos)?; + let result = match target { + GotoTarget::Definition => handlers::goto_definition(&doc, lsp_pos)?, + GotoTarget::Declaration | GotoTarget::Implementation => { + handlers::goto_declaration(&doc, lsp_pos)? + } + }; match result { handlers::DefinitionResult::Local(range) => { let range = if target == GotoTarget::Implementation { diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 295cc6db..872726cb 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -1126,6 +1126,91 @@ fn test_goto_implementation_local_binding() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_goto_definition_and_declaration_diverge_for_local_alias() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/alias-definition-vs-declaration.jsonnet"; + let text = "local x = 1;\nlocal y = x;\ny"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, uri, 2, 0))) + .unwrap(); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(3, uri, 2, 0))) + .unwrap(); + let declaration_response = recv_response(&client_conn, 3); + let declaration_result: Option = + serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); + assert_eq!( + declaration_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_goto_implementation_import_field() { let temp_dir = TempDir::new().expect("failed to create temp dir"); @@ -1247,6 +1332,131 @@ fn test_goto_implementation_import_field() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_goto_definition_alias_to_import_field_vs_declaration() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias"#, + ) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, &uri, 2, 0))) + .unwrap(); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(3, &uri, 2, 0))) + .unwrap(); + let declaration_response = recv_response(&client_conn, 3); + let declaration_result: Option = + serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); + assert_eq!( + declaration_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 11, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_implementation_request(4, &uri, 2, 0))) + .unwrap(); + let implementation_response = recv_response(&client_conn, 4); + let implementation_result: Option = + serde_json::from_value(implementation_response.result.expect("should have result")) + .unwrap(); + assert_eq!( + implementation_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 1, + character: 21, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(5))) + .unwrap(); + let _ = recv_response(&client_conn, 5); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_diagnostics_import_file_and_definition_resolution() { let temp_dir = TempDir::new().expect("failed to create temp dir"); From 622f6695a154c1777244f15cf4004348e564a7cb Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 16:03:45 +0000 Subject: [PATCH 012/210] docs(lsp): make navigation semantics user-oriented Rewrite declaration/definition/implementation guidance in clearer language with practical intent and worked examples. - explain when each request should be used - add concrete alias/import examples with expected jump targets - align architecture summary wording with handler-level behavior --- docs/lsp/ARCHITECTURE.md | 13 ++++++++++ docs/lsp/HANDLERS.md | 54 +++++++++++++++++++++++++++++----------- 2 files changed, 53 insertions(+), 14 deletions(-) diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index db437a58..9a74dc16 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -191,6 +191,19 @@ The common resolution order is: 1. relative to the importing file's directory 2. each configured `jpath` entry in order +Navigation semantics: + +- `textDocument/declaration`: nearest lexical declaration ("where this name is + introduced in the current scope"). +- `textDocument/definition`: canonical origin ("what this symbol resolves to + after following aliases/imports"). +- `textDocument/implementation`: value/body expression ("how this symbol is + computed"). + +In practice, declaration is usually the best jump for local editing, definition +is best for understanding provenance across aliases/imports, and implementation +is best for inspecting runtime value logic. + Import graph update path: `update_import_graph` in `crates/jrsonnet-lsp/src/server.rs`. diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 13f44d9c..604886e7 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -39,7 +39,7 @@ Current request routing in `crates/jrsonnet-lsp/src/server.rs`: | `textDocument/documentSymbol` | `on_document_symbol` | handlers crate (`document_symbols`) | sync | no | | `textDocument/definition` | async context (`goto_definition`) | mixed: handlers + server import resolution | async | no | | `textDocument/declaration` | async context (`goto_declaration`) | mixed: handlers + server import resolution | async | no | -| `textDocument/implementation` | async context (`goto_implementation`) | mixed: handlers + server import resolution | async | no | +| `textDocument/implementation` | async (`goto_implementation`) | mixed: handlers + server import resolution | async | no | | `textDocument/hover` | async context (`hover`) | handlers crate (`hover`) | async | yes | | `textDocument/documentHighlight` | `on_document_highlight` | handlers crate (`document_highlights`) | sync | no | | `textDocument/inlayHint` | async context (`inlay_hints`) | handlers crate (`inlay_hints`) | async | yes | @@ -122,24 +122,50 @@ still return items on explicit completion requests. File: `crates/jrsonnet-lsp-handlers/src/definition.rs` -`goto_definition` returns: +Navigation is split into three related requests with different intent: -- `Local(range)` -- `Import(path)` -- `ImportField { path, fields }` +- `textDocument/declaration`: where the symbol is introduced in the current + lexical scope (nearest visible binder). +- `textDocument/definition`: where the symbol ultimately comes from. This can + follow local aliases and resolve to imports or imported fields. +- `textDocument/implementation`: the value/body expression behind the symbol. + +Rule of thumb for users: + +- Use `declaration` when you want "where was this name declared here?". +- Use `definition` when you want "what does this name actually point to?". +- Use `implementation` when you want "what expression computes this value?". + +Example: + +```jsonnet +local x = 1; +local y = x; +local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias +``` -Async server context resolves import paths and can locate nested field ranges in -imported files before returning final `Location`. +At `alias` on the last line: -`textDocument/declaration` uses a dedicated async server path. It currently -returns declaration sites (same location mapping as definition for local/import -resolution), while remaining independent from definition dispatch. +- `declaration` jumps to `local alias = ...`. +- `definition` jumps to the `foo` declaration in `lib.libsonnet`. +- `implementation` jumps to the right-hand side `lib.foo`. -`textDocument/implementation` resolves symbol values/bodies: +At `y` in `local y = x;`: + +- `declaration` jumps to `local y = ...`. +- `definition` jumps to `local x = ...`. +- `implementation` jumps to the right-hand side of `y` (`x`). + +Internally, the handler returns one of: + +- `Local(range)` +- `Import(path)` +- `ImportField { path, fields }` -- local bindings jump to bound value expressions -- imports jump to imported-file root expressions -- imported field chains jump to the target field value expression +The async server context resolves import paths and nested field locations into +final LSP `Location` responses. ### Document Highlight From a96085bad9b1e2957d39eae2a4c667aac43ecb92 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 17:35:50 +0000 Subject: [PATCH 013/210] fix(lsp): resolve cross-file references from local uses Cross-file reference search no longer requires the cursor to sit on the top-level definition token. When the cursor is on a local reference, we now resolve its definition first and treat it as cross-file-exportable if that definition is file-scope. This preserves existing behavior for definitions while making references from local usage sites return importer hits as expected. --- .../jrsonnet-lsp-handlers/src/references.rs | 56 ++++++++++++++----- crates/jrsonnet-lsp/tests/integration_test.rs | 18 ++++++ 2 files changed, 60 insertions(+), 14 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/references.rs b/crates/jrsonnet-lsp-handlers/src/references.rs index 475db8df..19e2ea3a 100644 --- a/crates/jrsonnet-lsp-handlers/src/references.rs +++ b/crates/jrsonnet-lsp-handlers/src/references.rs @@ -13,7 +13,7 @@ use jrsonnet_lsp_scope::{ }; use jrsonnet_rowan_parser::{ nodes::{Bind, Destruct, ExprBase, ExprField, StmtLocal}, - AstNode, SyntaxKind, SyntaxNode, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; use lsp_types::{Location, Uri}; use rayon::prelude::*; @@ -113,20 +113,9 @@ pub fn find_cross_file_references<'a>( return Vec::new(); } - let name = token.text().to_string(); - - // Check if this symbol is at file scope (could be exported) - // For now, we check if the symbol is defined at the top level of the file - let is_file_scope_definition = is_definition_site(&token) && is_at_file_scope(&token); - - if !is_file_scope_definition { - // If it's a reference, check if it comes from an import - if is_variable_reference(&token) { - // Check if this is a reference to an imported symbol - // For now, only handle local symbols - } + let Some(name) = resolve_exported_symbol_name(current_document, &token) else { return Vec::new(); - } + }; let current_path_str = current_path.as_path(); @@ -161,6 +150,45 @@ pub fn find_cross_file_references<'a>( references } +fn resolve_exported_symbol_name(document: &Document, token: &SyntaxToken) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + if is_definition_site(token) && is_at_file_scope(token) { + return Some(token.text().to_string()); + } + + if !is_variable_reference(token) { + return None; + } + + let name = token.text(); + let definition_range = find_definition_range(token, name)?; + let definition_token = definition_token(document, definition_range, name)?; + if !is_at_file_scope(&definition_token) { + return None; + } + + Some(name.to_string()) +} + +fn definition_token(document: &Document, range: TextRange, name: &str) -> Option { + document + .ast() + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|token| { + token.kind() == SyntaxKind::IDENT + && token.text() == name + && is_definition_site(token) + && token + .parent() + .is_some_and(|parent| parent.text_range() == range) + }) +} + /// Information about an import statement. struct ImportInfo { /// The name this import is bound to (e.g., "lib" in "local lib = import 'lib.jsonnet'") diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 872726cb..98acf326 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -1971,6 +1971,24 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { "Expected cross-file reference from unopened main file, got: {refs:?}" ); + // Query references from a non-definition reference in lib1 (line 0, col 18) + client_conn + .sender + .send(Message::Request(references_request( + 21, &lib1_uri, 0, 18, false, + ))) + .unwrap(); + let response = recv_response(&client_conn, 21); + assert!(response.error.is_none(), "References should succeed"); + let refs: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let refs = refs.unwrap_or_default(); + assert!( + refs.iter() + .any(|location| location.uri.to_string() == main_uri), + "Expected cross-file reference from unopened main file when queried from a local reference, got: {refs:?}" + ); + // Update main on disk to import lib2 instead of lib1 fs::write(&main_path, "local lib = import 'lib2.jsonnet'; lib.target") .expect("main should be rewritten"); From 249c8ea3c4bfca3274907806e158d6a1a4afaa4c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 17:42:05 +0000 Subject: [PATCH 014/210] fix(lsp-inference): resolve dynamic return specs in polymorphic calls --- crates/jrsonnet-lsp-inference/src/poly.rs | 193 +++++++++++++++++++--- 1 file changed, 173 insertions(+), 20 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/poly.rs b/crates/jrsonnet-lsp-inference/src/poly.rs index ef249c0b..788f3c2a 100644 --- a/crates/jrsonnet-lsp-inference/src/poly.rs +++ b/crates/jrsonnet-lsp-inference/src/poly.rs @@ -1,6 +1,6 @@ //! Polymorphic type instantiation for function calls. -use jrsonnet_lsp_types::{ReturnSpec, Ty, TyData, TyStore, TySubstitution}; +use jrsonnet_lsp_types::{NumBounds, ReturnSpec, Ty, TyData, TyStore, TySubstitution}; /// Instantiate a polymorphic function call (Ty-native version). /// @@ -21,12 +21,14 @@ use jrsonnet_lsp_types::{ReturnSpec, Ty, TyData, TyStore, TySubstitution}; pub fn instantiate_function_call_ty(func_ty: Ty, arg_types: &[Ty], store: &mut TyStore) -> Ty { // Check if the function type has any type variables if !store.has_type_vars(func_ty) { - // No type variables - return the fixed return type or ANY + // No type variables - resolve from return spec directly. if let TyData::Function(func_data) = store.get(func_ty).clone() { - return match &func_data.return_spec { - ReturnSpec::Fixed(ret) => *ret, - _ => Ty::ANY, // Dynamic return specs without type vars - }; + return resolve_return_spec_ty( + &func_data.return_spec, + arg_types, + &TySubstitution::new(), + store, + ); } return Ty::ANY; } @@ -43,11 +45,109 @@ pub fn instantiate_function_call_ty(func_ty: Ty, arg_types: &[Ty], store: &mut T collect_type_var_substitutions_ty(param.ty, arg_ty, &mut substitution, store); } - // Apply the substitution to the return type + resolve_return_spec_ty(&func_data.return_spec, arg_types, &substitution, store) +} + +fn resolve_return_spec_ty( + return_spec: &ReturnSpec, + arg_types: &[Ty], + substitution: &TySubstitution, + store: &mut TyStore, +) -> Ty { + let applied_arg_ty = |idx: usize, store: &mut TyStore| { + arg_types + .get(idx) + .copied() + .map(|ty| store.apply_substitution(ty, substitution)) + .unwrap_or(Ty::ANY) + }; - match &func_data.return_spec { - ReturnSpec::Fixed(ret) => store.apply_substitution(*ret, &substitution), - _ => Ty::ANY, // Dynamic return specs not yet supported with substitution + match return_spec { + ReturnSpec::Fixed(ret) => store.apply_substitution(*ret, substitution), + ReturnSpec::SameAsArg(idx) => applied_arg_ty(*idx, store), + ReturnSpec::ArrayOfArg(idx) => { + let ty = applied_arg_ty(*idx, store); + store.array(ty) + } + ReturnSpec::ArrayWithSameElements(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + match store.get(arg_ty).clone() { + TyData::Array { elem, .. } => store.array(elem), + TyData::Tuple { elems } => { + let elem_union = store.union(elems); + store.array(elem_union) + } + _ => store.array(Ty::ANY), + } + } + ReturnSpec::SetWithSameElements(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + match store.get(arg_ty).clone() { + TyData::Array { elem, .. } => store.array_set(elem), + TyData::Tuple { elems } => { + let elem_union = store.union(elems); + store.array_set(elem_union) + } + _ => store.array_set(Ty::ANY), + } + } + ReturnSpec::FuncReturnType(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + match store.get(arg_ty).clone() { + TyData::Function(func_data) => match func_data.return_spec { + ReturnSpec::Fixed(ret) => store.apply_substitution(ret, substitution), + _ => Ty::ANY, + }, + _ => Ty::ANY, + } + } + ReturnSpec::ArrayOfFuncReturn(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + let ret_ty = match store.get(arg_ty).clone() { + TyData::Function(func_data) => match func_data.return_spec { + ReturnSpec::Fixed(ret) => store.apply_substitution(ret, substitution), + _ => Ty::ANY, + }, + _ => Ty::ANY, + }; + store.array(ret_ty) + } + ReturnSpec::FlatMapResult(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + match store.get(arg_ty).clone() { + TyData::Function(func_data) => match func_data.return_spec { + ReturnSpec::Fixed(ret) => { + let applied_ret = store.apply_substitution(ret, substitution); + match store.get(applied_ret).clone() { + TyData::Array { elem, .. } => store.array(elem), + _ => store.array(Ty::ANY), + } + } + _ => store.array(Ty::ANY), + }, + _ => store.array(Ty::ANY), + } + } + ReturnSpec::NonNegative => store.bounded_number(NumBounds::non_negative()), + ReturnSpec::ObjectValuesType(idx) => { + let arg_ty = applied_arg_ty(*idx, store); + match store.get(arg_ty).clone() { + TyData::Object(obj_data) => { + if obj_data.fields.is_empty() { + store.array(Ty::ANY) + } else { + let field_types: Vec = obj_data + .fields + .into_iter() + .map(|(_, field)| field.ty) + .collect(); + let field_union = store.union(field_types); + store.array(field_union) + } + } + _ => store.array(Ty::ANY), + } + } } } @@ -60,7 +160,7 @@ pub fn collect_type_var_substitutions_ty( pattern: Ty, target: Ty, substitution: &mut TySubstitution, - store: &TyStore, + store: &mut TyStore, ) { // Clone data to avoid borrow issues let pattern_data = store.get(pattern).clone(); @@ -96,11 +196,8 @@ pub fn collect_type_var_substitutions_ty( (TyData::Array { elem: pat_elem, .. }, TyData::Tuple { elems: tgt_elems }) if !tgt_elems.is_empty() => { - // Use the union of tuple element types (need mutable store for this) - // For now, just match against first element as approximation - if let Some(&first_elem) = tgt_elems.first() { - collect_type_var_substitutions_ty(*pat_elem, first_elem, substitution, store); - } + let tuple_elem_union = store.union(tgt_elems.clone()); + collect_type_var_substitutions_ty(*pat_elem, tuple_elem_union, substitution, store); } // Object types - match field types @@ -333,7 +430,7 @@ mod tests { // Create substitution T -> Number let mut sub = TySubstitution::new(); - collect_type_var_substitutions_ty(t_var, Ty::NUMBER, &mut sub, &store); + collect_type_var_substitutions_ty(t_var, Ty::NUMBER, &mut sub, &mut store); assert_eq!(sub.get(t_id), Some(Ty::NUMBER)); } @@ -352,7 +449,7 @@ mod tests { // Collect substitutions let mut sub = TySubstitution::new(); - collect_type_var_substitutions_ty(array_t, array_string, &mut sub, &store); + collect_type_var_substitutions_ty(array_t, array_string, &mut sub, &mut store); assert_eq!(sub.get(t_id), Some(Ty::STRING)); } @@ -373,15 +470,71 @@ mod tests { // Number is not indexable - should not substitute let mut sub = TySubstitution::new(); - collect_type_var_substitutions_ty(t_var, Ty::NUMBER, &mut sub, &store); + collect_type_var_substitutions_ty(t_var, Ty::NUMBER, &mut sub, &mut store); assert_eq!(sub.get(t_id), None); // Array IS indexable - should substitute let array_num = store.array(Ty::NUMBER); - collect_type_var_substitutions_ty(t_var, array_num, &mut sub, &store); + collect_type_var_substitutions_ty(t_var, array_num, &mut sub, &mut store); assert_eq!(sub.get(t_id), Some(array_num)); } + #[test] + fn test_dynamic_return_spec_without_type_vars() { + let mut store = TyStore::new(); + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: false, + }], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + })); + + let result = instantiate_function_call_ty(func_ty, &[Ty::NUMBER], &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_dynamic_return_spec_with_type_vars() { + let mut store = TyStore::new(); + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + let func_ty = store.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: t_var, + has_default: false, + }], + return_spec: ReturnSpec::SameAsArg(0), + variadic: false, + })); + + let result = instantiate_function_call_ty(func_ty, &[Ty::STRING], &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_array_pattern_matches_tuple_as_union() { + let mut store = TyStore::new(); + let t_id = TyVarId::fresh(); + let t_var = store.type_var(t_id, TyConstraints::none()); + let array_t = store.array(t_var); + let tuple_number_string = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + + let mut substitution = TySubstitution::new(); + collect_type_var_substitutions_ty( + array_t, + tuple_number_string, + &mut substitution, + &mut store, + ); + + let expected = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(substitution.get(t_id), Some(expected)); + } + #[test] fn test_ty_apply_substitution() { let mut store = TyStore::new(); From 4f1c3fabd6a49ebf0f9221efad3fb86b077826fb Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 17:44:44 +0000 Subject: [PATCH 015/210] fix(lsp-inference): trace object method fields in const-eval --- .../jrsonnet-lsp-inference/src/const_eval.rs | 44 +++++++++++++------ docs/lsp/HANDLERS.md | 3 +- 2 files changed, 32 insertions(+), 15 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/const_eval.rs b/crates/jrsonnet-lsp-inference/src/const_eval.rs index ba2ff9f8..6e9e299b 100644 --- a/crates/jrsonnet-lsp-inference/src/const_eval.rs +++ b/crates/jrsonnet-lsp-inference/src/const_eval.rs @@ -367,29 +367,29 @@ fn find_field_in_member_list( ctx: &mut EvalContext, ) -> Option { for member in members.members() { - // Extract field name and value from the member - let (name, value) = match &member { + match &member { Member::MemberFieldNormal(field) => { let name = extract_field_key_name(&field.field_name()?)?; + if name != field_name { + continue; + } + let value = field.expr()?; - (name, value) + return trace_expr_inner(&value, document, ctx); } Member::MemberFieldMethod(method) => { let name = extract_field_key_name(&method.field_name()?)?; - // Methods have params and body, but we can trace the full method expression - // For now, return Local since methods are complex - if name == field_name { - return Some(ConstEvalResult::Local { - range: method.syntax().text_range(), - }); + if name != field_name { + continue; } - continue; + + let method_range = method.syntax().text_range(); + let body = method.expr()?; + return trace_expr_inner(&body, document, ctx).or(Some(ConstEvalResult::Local { + range: method_range, + })); } Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => continue, - }; - - if name == field_name { - return trace_expr_inner(&value, document, ctx); } } None @@ -594,6 +594,22 @@ mod tests { ); } + #[test] + fn test_trace_method_field_body() { + let code = r#"local obj = { foo(x): import "lib.libsonnet" }; obj.foo"#; + let doc = make_doc(code); + + let expr = find_expr_at(&doc, 51).expect("should find `obj.foo` expression"); + let result = trace_expr(&expr, &doc); + assert_eq!( + result, + Some(ConstEvalResult::Import { + path: "lib.libsonnet".to_string(), + fields: vec![], + }) + ); + } + #[test] fn test_trace_cycle_detection() { // This would cause infinite recursion without cycle detection diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 604886e7..81f099a4 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -127,7 +127,8 @@ Navigation is split into three related requests with different intent: - `textDocument/declaration`: where the symbol is introduced in the current lexical scope (nearest visible binder). - `textDocument/definition`: where the symbol ultimately comes from. This can - follow local aliases and resolve to imports or imported fields. + follow local aliases and resolve to imports or imported fields, including + fields declared with object method syntax. - `textDocument/implementation`: the value/body expression behind the symbol. Rule of thumb for users: From 9d2de0f1b3bf6cee1326d2494f8f320badf03178 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 17:47:28 +0000 Subject: [PATCH 016/210] fix(lsp-check): validate std.format text-block literals --- crates/jrsonnet-lsp-check/src/type_check.rs | 110 +++++++++++++++++--- 1 file changed, 96 insertions(+), 14 deletions(-) diff --git a/crates/jrsonnet-lsp-check/src/type_check.rs b/crates/jrsonnet-lsp-check/src/type_check.rs index 89170a84..6d7369de 100644 --- a/crates/jrsonnet-lsp-check/src/type_check.rs +++ b/crates/jrsonnet-lsp-check/src/type_check.rs @@ -1301,28 +1301,74 @@ fn get_string_literal(expr: &Expr) -> Option { let base = expr.expr_base()?; match base { ExprBase::ExprString(s) => { - // Get the text and strip quotes let text = s.syntax().text().to_string(); - // Handle different string formats: "...", '...', @"...", @'...', |||...||| if text.starts_with("|||") { - // Text block - complex to parse, skip for now - None - } else if text.starts_with('@') { - // Verbatim string - let inner = text.get(2..text.len() - 1)?; - Some(inner.to_string()) - } else if text.starts_with('"') || text.starts_with('\'') { - // Regular string - need to handle escape sequences - let inner = text.get(1..text.len() - 1)?; - Some(unescape_string(inner)) - } else { - None + return parse_text_block_literal(&text); + } + + if let Some(inner) = text + .strip_prefix("@\"") + .and_then(|value| value.strip_suffix('"')) + .or_else(|| text.strip_prefix("@'").and_then(|value| value.strip_suffix('\''))) + { + return Some(inner.to_string()); } + + text.strip_prefix('"') + .and_then(|value| value.strip_suffix('"')) + .or_else(|| text.strip_prefix('\'').and_then(|value| value.strip_suffix('\''))) + .map(unescape_string) } _ => None, } } +/// Parse Jsonnet text block syntax (`||| ... |||`) into its string content. +fn parse_text_block_literal(text: &str) -> Option { + let after_open = text.strip_prefix("|||")?; + let (_, body_with_terminator) = after_open.split_once('\n')?; + + let mut raw_lines = Vec::new(); + let mut found_terminator = false; + for line in body_with_terminator.split('\n') { + if is_text_block_terminator(line) { + found_terminator = true; + break; + } + raw_lines.push(line); + } + if !found_terminator { + return None; + } + + let indent = raw_lines + .iter() + .find(|line| !line.is_empty()) + .map(|line| { + line.chars() + .take_while(|ch| *ch == ' ' || *ch == '\t') + .collect::() + }) + .unwrap_or_default(); + + let normalized = raw_lines + .into_iter() + .map(|line| { + if indent.is_empty() || line.is_empty() { + line.to_string() + } else { + line.strip_prefix(&indent).unwrap_or(line).to_string() + } + }) + .collect::>(); + + Some(normalized.join("\n")) +} + +fn is_text_block_terminator(line: &str) -> bool { + line.trim_start_matches([' ', '\t']) == "|||" +} + /// Unescape a string literal (simplified version). fn unescape_string(s: &str) -> String { let mut result = String::with_capacity(s.len()); @@ -2111,6 +2157,42 @@ mod tests { ); } + #[test] + fn test_format_text_block_literal_valid() { + let errors = check_code( + r#"std.format(||| + %s has %d apples +|||, "Alice", 5)"#, + ); + assert_eq!( + errors.as_slice(), + &[], + "expected no errors for text-block format string, got: {errors:?}" + ); + } + + #[test] + fn test_format_text_block_literal_arg_count_error() { + let errors = check_code( + r#"std.format(||| + %s %d +|||, "one")"#, + ); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::FormatArgCount { + expected: 2, + provided: 1 + }, + .. + }] + ), + "expected FormatArgCount error for text-block format string, got: {errors:?}" + ); + } + #[test] fn test_no_such_field_with_suggestion() { // Typo should trigger "did you mean" suggestion From ae73cf84f28fa816d72c53251677855a6f2d060b Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 17:52:55 +0000 Subject: [PATCH 017/210] style(lsp-check): apply rustfmt wrapping in string literal parsing --- crates/jrsonnet-lsp-check/src/type_check.rs | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/crates/jrsonnet-lsp-check/src/type_check.rs b/crates/jrsonnet-lsp-check/src/type_check.rs index 6d7369de..facdf242 100644 --- a/crates/jrsonnet-lsp-check/src/type_check.rs +++ b/crates/jrsonnet-lsp-check/src/type_check.rs @@ -1309,14 +1309,19 @@ fn get_string_literal(expr: &Expr) -> Option { if let Some(inner) = text .strip_prefix("@\"") .and_then(|value| value.strip_suffix('"')) - .or_else(|| text.strip_prefix("@'").and_then(|value| value.strip_suffix('\''))) - { + .or_else(|| { + text.strip_prefix("@'") + .and_then(|value| value.strip_suffix('\'')) + }) { return Some(inner.to_string()); } text.strip_prefix('"') .and_then(|value| value.strip_suffix('"')) - .or_else(|| text.strip_prefix('\'').and_then(|value| value.strip_suffix('\''))) + .or_else(|| { + text.strip_prefix('\'') + .and_then(|value| value.strip_suffix('\'')) + }) .map(unescape_string) } _ => None, From 89262dac730e80448741bb3353ac36de4c7dfe3e Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 17:53:02 +0000 Subject: [PATCH 018/210] test(lsp): implement annotated error assertions --- .../tests/framework/assertions.rs | 93 +++++++++++++++++-- 1 file changed, 86 insertions(+), 7 deletions(-) diff --git a/crates/jrsonnet-lsp/tests/framework/assertions.rs b/crates/jrsonnet-lsp/tests/framework/assertions.rs index b74cc778..8c645646 100644 --- a/crates/jrsonnet-lsp/tests/framework/assertions.rs +++ b/crates/jrsonnet-lsp/tests/framework/assertions.rs @@ -4,6 +4,7 @@ use std::sync::Arc; +use jrsonnet_lsp_check::{lint, LintConfig}; use jrsonnet_lsp_document::{ position_to_offset, token_at_offset, ByteOffset, CharOffset, DocVersion, Document, Line, LspPosition, @@ -55,6 +56,7 @@ impl VerificationResult { pub struct TestContext { pub document: Document, pub analysis: TypeAnalysis, + pub diagnostics: Vec, } impl TestContext { @@ -63,7 +65,15 @@ impl TestContext { let document = Document::new(parsed.source.clone(), DocVersion::new(1)); let global_types = Arc::new(GlobalTyStore::new()); let analysis = TypeAnalysis::analyze_with_global(&document, Arc::clone(&global_types)); - Self { document, analysis } + let uri: lsp_types::Uri = "file:///annotated-test.jsonnet" + .parse() + .expect("annotated test URI should parse"); + let diagnostics = lint(&document, &analysis, &LintConfig::all(), &uri); + Self { + document, + analysis, + diagnostics, + } } /// Get position from line and column. @@ -100,12 +110,8 @@ pub fn verify_annotations(parsed: &ParsedSource) -> VerificationResult { AnnotationKind::Type(expected) => { verify_type(&ctx, ann, expected, &mut result); } - AnnotationKind::Error(_expected) => { - // TODO: Implement error verification - result.add_failure(format!( - "{}:{}: error annotations not yet implemented", - ann.line, ann.column - )); + AnnotationKind::Error(expected) => { + verify_error(&ctx, ann, expected, &mut result); } AnnotationKind::Completion(expected) => { verify_completion(&ctx, ann, expected, &mut result); @@ -220,6 +226,53 @@ fn verify_usage(ctx: &TestContext, ann: &Annotation, name: &str, result: &mut Ve result.add_success(); } +/// Verify that diagnostics at this position include expected message text. +fn verify_error( + ctx: &TestContext, + ann: &Annotation, + expected: &str, + result: &mut VerificationResult, +) { + let pos = TestContext::position(ann.line, ann.column); + let matching: Vec<&lsp_types::Diagnostic> = ctx + .diagnostics + .iter() + .filter(|diag| range_contains_position(diag.range, pos)) + .collect(); + + if matching.is_empty() { + result.add_failure(format!( + "{}:{}: expected error containing '{}', but no diagnostics were reported here", + ann.line, ann.column, expected + )); + return; + } + + if matching.iter().any(|diag| diag.message.contains(expected)) { + result.add_success(); + return; + } + + let messages = matching + .iter() + .map(|diag| diag.message.as_str()) + .collect::>() + .join(" | "); + result.add_failure(format!( + "{}:{}: expected error containing '{}', got diagnostics: {}", + ann.line, ann.column, expected, messages + )); +} + +fn range_contains_position(range: lsp_types::Range, pos: LspPosition) -> bool { + let pos: lsp_types::Position = pos.into(); + let starts_before_or_at = pos.line > range.start.line + || (pos.line == range.start.line && pos.character >= range.start.character); + let ends_after_or_at = pos.line < range.end.line + || (pos.line == range.end.line && pos.character <= range.end.character); + starts_before_or_at && ends_after_or_at +} + /// Verify that hover contains the expected text. fn verify_hover( ctx: &TestContext, @@ -464,4 +517,30 @@ x + x assert!(result.passed, "Failures: {:?}", result.failures); assert_eq!(result.passed_count, 3); } + + #[test] + fn test_verify_error_success() { + let source = r"(1).foo +## ^ error: field access on non-object type"; + let parsed = parse_annotated_source(source); + let result = verify_annotations(&parsed); + assert!(result.passed, "Failures: {:?}", result.failures); + assert_eq!(result.passed_count, 1); + } + + #[test] + fn test_verify_error_message_mismatch() { + let source = r"(1).foo +## ^ error: completely different message"; + let parsed = parse_annotated_source(source); + let result = verify_annotations(&parsed); + assert!(!result.passed, "expected verification to fail"); + assert_eq!(result.passed_count, 0); + assert_eq!(result.total_count, 1); + assert!( + result.failures[0].contains("completely different message"), + "unexpected failure message: {:?}", + result.failures + ); + } } From 634d7bea0e6246aa2e7ccaff92792c2f2ba0c7ea Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 17:56:30 +0000 Subject: [PATCH 019/210] feat(lsp-hover): infer definition-site types from bound values --- crates/jrsonnet-lsp-handlers/src/hover.rs | 50 +++++++++++++++++-- .../jrsonnet-lsp/tests/e2e_annotated_tests.rs | 10 ++-- docs/lsp/HANDLERS.md | 2 + 3 files changed, 53 insertions(+), 9 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/hover.rs b/crates/jrsonnet-lsp-handlers/src/hover.rs index cf0ed13e..baade8cf 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover.rs @@ -4,11 +4,11 @@ //! - Standard library functions (std.*) //! - Local variable definitions (shows first few lines) -use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document, LspPosition}; +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, ByteOffset, Document, LspPosition}; use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_lsp_stdlib as stdlib; use jrsonnet_rowan_parser::{ - nodes::{Bind, ExprBase, ExprField}, + nodes::{Bind, Destruct, ExprBase, ExprField}, AstNode, SyntaxKind, SyntaxToken, }; use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; @@ -97,7 +97,8 @@ fn check_local_hover( text: &str, offset: ByteOffset, ) -> Option { - let result = goto_definition(document, position)?; + let result = goto_definition(document, position) + .or_else(|| local_definition_at_offset(document, offset))?; // Get the inferred type at this position. If the local definition site only // reports `any`, fall back to the bound value expression type. @@ -179,6 +180,37 @@ fn check_local_hover( } } +fn local_definition_at_offset(document: &Document, offset: ByteOffset) -> Option { + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name_node = token.parent()?; + let bind = name_node.ancestors().find_map(Bind::cast)?; + let definition_range = match bind { + Bind::BindDestruct(bind) => { + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + full.name()?.syntax().text_range() + } + Bind::BindFunction(bind) => bind.name()?.syntax().text_range(), + }; + + if name_node.text_range() != definition_range { + return None; + } + + Some(DefinitionResult::Local(to_lsp_range( + definition_range, + document.line_index(), + document.text(), + ))) +} + fn definition_value_type( document: &Document, analysis: &TypeAnalysis, @@ -296,6 +328,18 @@ mod tests { 25, "`function(a, b)`\n\n```jsonnet\nlocal add(a, b) = a + b; add(1, 2)\n```" )] + #[case( + "local arr = [1, 2, 3]; arr", + 0, + 6, + "`[number, number, number]`\n\n```jsonnet\nlocal arr = [1, 2, 3]; arr\n```" + )] + #[case( + "local obj = { a: 1 }; obj", + 0, + 6, + "`{ a }`\n\n```jsonnet\nlocal obj = { a: 1 }; obj\n```" + )] fn test_local_hover( #[case] code: &str, #[case] line: u32, diff --git a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs index fb084c09..eefcfa84 100644 --- a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs +++ b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs @@ -164,21 +164,19 @@ fn test_hover_null() { #[test] fn test_hover_array() { - // Hover on the variable name 'arr' at definition site shows 'any' - // TODO: Could be improved to show inferred array type + // Hover on the variable name 'arr' at definition site shows the inferred type. check_hover( r"local arr = [1, 2, 3]; -## ^ hover: any", +## ^ hover: [number, number, number]", ); } #[test] fn test_hover_object() { - // Hover on the variable name 'obj' at definition site shows 'any' - // TODO: Could be improved to show inferred object type + // Hover on the variable name 'obj' at definition site shows the inferred type. check_hover( r"local obj = { a: 1 }; -## ^ hover: any", +## ^ hover: { a }", ); } diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 81f099a4..de09d805 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -192,6 +192,8 @@ Hover combines: - inferred type information - stdlib documentation/signatures - local definition context snippets +- definition-site fallback to bound value type when token-level inference is + `any` Requires `TypeAnalysis` from async server context. From bd1ed58a44b5a9198b0997ce77124d3877335abd Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 18:02:03 +0000 Subject: [PATCH 020/210] test(flow): assert structural non-empty narrowing for std.length facts --- crates/jrsonnet-lsp-inference/src/flow.rs | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/flow.rs b/crates/jrsonnet-lsp-inference/src/flow.rs index 014cf7e2..fdbe95c8 100644 --- a/crates/jrsonnet-lsp-inference/src/flow.rs +++ b/crates/jrsonnet-lsp-inference/src/flow.rs @@ -1496,11 +1496,14 @@ mod tests { let facts = parse_and_extract("std.length(arr) > 0"); let arr_fact = facts.get("arr").expect("should have fact for arr"); - // Non-empty array - for now just confirm the fact exists - let input = store.array(Ty::NUMBER); - let result = arr_fact.apply_to(input, &mut store); - // With min_len(1), this should narrow to a non-empty array (at least 1 element) - assert!(result != Ty::NEVER); + let array_ty = store.array(Ty::NUMBER); + let tuple_non_empty = store.tuple(vec![Ty::NUMBER]); + let tuple_empty = store.tuple(vec![]); + let mixed = store.union(vec![Ty::NUMBER, array_ty, tuple_non_empty, tuple_empty]); + + let result = arr_fact.apply_to(mixed, &mut store); + let expected = store.union(vec![array_ty, tuple_non_empty]); + assert_eq!(result, expected); } #[test] @@ -1509,10 +1512,11 @@ mod tests { let facts = parse_and_extract("std.length(arr) != 0"); let arr_fact = facts.get("arr").expect("should have fact for arr"); - // Non-empty array - let input = store.array(Ty::NUMBER); + let tuple_empty = store.tuple(vec![]); + let tuple_non_empty = store.tuple(vec![Ty::STRING]); + let input = store.union(vec![tuple_empty, tuple_non_empty]); let result = arr_fact.apply_to(input, &mut store); - assert!(result != Ty::NEVER); + assert_eq!(result, tuple_non_empty); } // Higher-order predicate tests From ded1a111854d06e32bffcecdf563bc99c0b0f5a8 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 18:02:31 +0000 Subject: [PATCH 021/210] feat(lsp): add textDocument/typeDefinition support --- crates/jrsonnet-lsp/src/server.rs | 16 +++- .../jrsonnet-lsp/src/server/async_requests.rs | 12 ++- crates/jrsonnet-lsp/tests/integration_test.rs | 90 ++++++++++++++++++- docs/lsp/ARCHITECTURE.md | 9 +- docs/lsp/HANDLERS.md | 3 + 5 files changed, 119 insertions(+), 11 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 9f5a9e3b..a9c739ff 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -28,9 +28,9 @@ use lsp_types::{ request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, Completion, DocumentHighlightRequest, DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, - GotoImplementation, HoverRequest, InlayHintRequest, PrepareRenameRequest, References, - Rename, Request as _, SemanticTokensFullRequest, SemanticTokensRangeRequest, Shutdown, - SignatureHelpRequest, WorkspaceSymbolRequest, + GotoImplementation, GotoTypeDefinition, HoverRequest, InlayHintRequest, + PrepareRenameRequest, References, Rename, Request as _, SemanticTokensFullRequest, + SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, WorkspaceSymbolRequest, }, CodeActionKind, CodeActionOptions, CodeActionParams, CodeActionProviderCapability, CodeActionResponse, CodeLens, CodeLensOptions, CompletionOptions, DidChangeConfigurationParams, @@ -299,6 +299,9 @@ impl Server { implementation_provider: Some(lsp_types::ImplementationProviderCapability::Simple( true, )), + type_definition_provider: Some(lsp_types::TypeDefinitionProviderCapability::Simple( + true, + )), hover_provider: Some(HoverProviderCapability::Simple(true)), document_highlight_provider: Some(OneOf::Left(true)), inlay_hint_provider: Some(OneOf::Left(true)), @@ -443,6 +446,7 @@ impl Server { Shutdown::METHOD => self.handle_shutdown_request(id), GotoDefinition::METHOD | GotoDeclaration::METHOD + | GotoTypeDefinition::METHOD | GotoImplementation::METHOD | HoverRequest::METHOD | InlayHintRequest::METHOD @@ -577,6 +581,12 @@ impl Server { params, AsyncRequestContext::goto_implementation, ), + GotoTypeDefinition::METHOD => self.handle_async_typed( + id, + GotoTypeDefinition::METHOD, + params, + AsyncRequestContext::goto_type_definition, + ), HoverRequest::METHOD => self.handle_async_typed( id, HoverRequest::METHOD, diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 341391d7..4f08cf91 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -32,6 +32,7 @@ pub(super) struct AsyncRequestContext { #[derive(Clone, Copy, Debug, Eq, PartialEq)] enum GotoTarget { Definition, + TypeDefinition, Declaration, Implementation, } @@ -87,6 +88,13 @@ impl AsyncRequestContext { self.goto_target(params, GotoTarget::Declaration) } + pub(super) fn goto_type_definition( + &self, + params: &GotoDefinitionParams, + ) -> Option { + self.goto_target(params, GotoTarget::TypeDefinition) + } + pub(super) fn goto_implementation( &self, params: &GotoDefinitionParams, @@ -106,7 +114,9 @@ impl AsyncRequestContext { let lsp_pos = position.into(); let result = match target { - GotoTarget::Definition => handlers::goto_definition(&doc, lsp_pos)?, + GotoTarget::Definition | GotoTarget::TypeDefinition => { + handlers::goto_definition(&doc, lsp_pos)? + } GotoTarget::Declaration | GotoTarget::Implementation => { handlers::goto_declaration(&doc, lsp_pos)? } diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 98acf326..2d093afe 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -14,8 +14,9 @@ use lsp_types::{ }, request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, - ExecuteCommand, GotoDeclaration, GotoDefinition, GotoImplementation, Initialize, - InlayHintRequest, References, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, + ExecuteCommand, GotoDeclaration, GotoDefinition, GotoImplementation, GotoTypeDefinition, + Initialize, InlayHintRequest, References, Rename, Request as _, SemanticTokensRangeRequest, + Shutdown, }, DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, FileEvent, @@ -112,6 +113,24 @@ fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Req ) } +fn goto_type_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoTypeDefinition::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + /// Helper to create a goto declaration request. fn goto_declaration_request(id: i32, uri: &str, line: u32, character: u32) -> Request { let params = GotoDefinitionParams { @@ -974,6 +993,73 @@ fn test_goto_definition() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_goto_type_definition() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/type-definition.jsonnet"; + let text = "local x = 1; x + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_type_definition_request( + 2, uri, 0, 13, + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "Goto type definition request should succeed" + ); + let result: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_goto_declaration() { let (client_conn, server_conn) = Connection::memory(); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 9a74dc16..c3eef13f 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -120,7 +120,8 @@ to documents, import graph, type cache, config, and dependency-aware analysis. `server_capabilities()` currently advertises: - incremental text sync with open/close and save notifications -- definition, declaration, implementation, hover, document symbols, document highlights +- definition, declaration, implementation, type definition, hover, document + symbols, document highlights - completion (trigger `.`) - signature help (triggers `(` and `,`) - formatting @@ -134,10 +135,6 @@ to documents, import graph, type cache, config, and dependency-aware analysis. For the canonical list, see `crates/jrsonnet-lsp/src/server.rs`. -Not currently advertised: - -- type-definition provider - ## Notification Handling Implemented notifications: @@ -197,6 +194,8 @@ Navigation semantics: introduced in the current scope"). - `textDocument/definition`: canonical origin ("what this symbol resolves to after following aliases/imports"). +- `textDocument/typeDefinition`: same target as `definition` in Jsonnet + (symbols do not have separate nominal type declarations). - `textDocument/implementation`: value/body expression ("how this symbol is computed"). diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index de09d805..d2e17b0d 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -38,6 +38,7 @@ Current request routing in `crates/jrsonnet-lsp/src/server.rs`: | ----------------------------------- | ---------------------------------- | ------------------------------------------------- | --------------- | ------------------- | | `textDocument/documentSymbol` | `on_document_symbol` | handlers crate (`document_symbols`) | sync | no | | `textDocument/definition` | async context (`goto_definition`) | mixed: handlers + server import resolution | async | no | +| `textDocument/typeDefinition` | async (`goto_type_definition`) | mixed: handlers + server import resolution | async | no | | `textDocument/declaration` | async context (`goto_declaration`) | mixed: handlers + server import resolution | async | no | | `textDocument/implementation` | async (`goto_implementation`) | mixed: handlers + server import resolution | async | no | | `textDocument/hover` | async context (`hover`) | handlers crate (`hover`) | async | yes | @@ -129,6 +130,8 @@ Navigation is split into three related requests with different intent: - `textDocument/definition`: where the symbol ultimately comes from. This can follow local aliases and resolve to imports or imported fields, including fields declared with object method syntax. +- `textDocument/typeDefinition`: for Jsonnet, this follows the same target as + `definition` because symbols do not have separate nominal type declarations. - `textDocument/implementation`: the value/body expression behind the symbol. Rule of thumb for users: From dd5309c46abc6cb7a2c790d8b9357cc821494807 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 18:13:29 +0000 Subject: [PATCH 022/210] test(lsp): broaden typeDefinition navigation coverage --- crates/jrsonnet-lsp/tests/integration_test.rs | 184 ++++++++++++++++++ 1 file changed, 184 insertions(+) diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 2d093afe..dbf618c9 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -131,6 +131,54 @@ fn goto_type_definition_request(id: i32, uri: &str, line: u32, character: u32) - ) } +fn assert_type_definition_matches_definition( + conn: &Connection, + definition_id: i32, + type_definition_id: i32, + uri: &str, + line: u32, + character: u32, +) -> Option { + conn.sender + .send(Message::Request(goto_definition_request( + definition_id, + uri, + line, + character, + ))) + .unwrap(); + let definition_response = recv_response(conn, definition_id); + assert!( + definition_response.error.is_none(), + "Goto definition request should succeed" + ); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + + conn.sender + .send(Message::Request(goto_type_definition_request( + type_definition_id, + uri, + line, + character, + ))) + .unwrap(); + let type_definition_response = recv_response(conn, type_definition_id); + assert!( + type_definition_response.error.is_none(), + "Goto type definition request should succeed" + ); + let type_definition_result: Option = + serde_json::from_value(type_definition_response.result.expect("should have result")) + .unwrap(); + + assert_eq!( + type_definition_result, definition_result, + "typeDefinition should match definition for Jsonnet symbol navigation" + ); + definition_result +} + /// Helper to create a goto declaration request. fn goto_declaration_request(id: i32, uri: &str, line: u32, character: u32) -> Request { let params = GotoDefinitionParams { @@ -1060,6 +1108,142 @@ fn test_goto_type_definition() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_goto_type_definition_matches_definition_for_local_alias() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/type-definition-local-alias.jsonnet"; + let text = "local x = 1;\nlocal y = x;\ny"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + let result = assert_type_definition_matches_definition(&client_conn, 2, 3, uri, 2, 0); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_type_definition_matches_definition_for_import_targets() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +local plain = lib; +alias + std.length(plain)"#, + ) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + + // `alias` usage at line 3, col 0 resolves to imported field `foo`. + let alias_result = assert_type_definition_matches_definition(&client_conn, 2, 3, &uri, 3, 0); + assert_eq!( + alias_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + // `plain` usage at line 3, col 19 resolves to import file root. + let plain_result = assert_type_definition_matches_definition(&client_conn, 4, 5, &uri, 3, 19); + assert_eq!( + plain_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range::default(), + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .unwrap(); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_goto_declaration() { let (client_conn, server_conn) = Connection::memory(); From dfeb7e59b8e981aea5737f94ed95c4bd6e3e1527 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 18:14:31 +0000 Subject: [PATCH 023/210] test(lsp): add navigation matrix coverage --- crates/jrsonnet-lsp/tests/integration_test.rs | 251 ++++++++++++++++++ 1 file changed, 251 insertions(+) diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index dbf618c9..bd61122f 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -179,6 +179,18 @@ fn assert_type_definition_matches_definition( definition_result } +fn send_goto_and_parse( + conn: &Connection, + id: i32, + label: &str, + request: Request, +) -> Option { + conn.sender.send(Message::Request(request)).unwrap(); + let response = recv_response(conn, id); + assert!(response.error.is_none(), "{label} request should succeed"); + serde_json::from_value(response.result.expect("should have result")).unwrap() +} + /// Helper to create a goto declaration request. fn goto_declaration_request(id: i32, uri: &str, line: u32, character: u32) -> Request { let params = GotoDefinitionParams { @@ -1244,6 +1256,245 @@ alias + std.length(plain)"#, .expect("Server thread should exit cleanly"); } +#[test] +fn test_navigation_matrix_local_alias() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/navigation-matrix-local.jsonnet"; + let text = "local x = 1;\nlocal y = x;\ny"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + let declaration = send_goto_and_parse( + &client_conn, + 2, + "goto declaration", + goto_declaration_request(2, uri, 2, 0), + ); + let definition = send_goto_and_parse( + &client_conn, + 3, + "goto definition", + goto_definition_request(3, uri, 2, 0), + ); + let type_definition = send_goto_and_parse( + &client_conn, + 4, + "goto type definition", + goto_type_definition_request(4, uri, 2, 0), + ); + let implementation = send_goto_and_parse( + &client_conn, + 5, + "goto implementation", + goto_implementation_request(5, uri, 2, 0), + ); + + assert_eq!( + declaration, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + })) + ); + assert_eq!( + definition, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + assert_eq!(type_definition, definition); + assert_eq!( + implementation, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 10, + }, + end: Position { + line: 1, + character: 11, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .unwrap(); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_navigation_matrix_import_alias() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias"#, + ) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + + let declaration = send_goto_and_parse( + &client_conn, + 2, + "goto declaration", + goto_declaration_request(2, &uri, 2, 0), + ); + let definition = send_goto_and_parse( + &client_conn, + 3, + "goto definition", + goto_definition_request(3, &uri, 2, 0), + ); + let type_definition = send_goto_and_parse( + &client_conn, + 4, + "goto type definition", + goto_type_definition_request(4, &uri, 2, 0), + ); + let implementation = send_goto_and_parse( + &client_conn, + 5, + "goto implementation", + goto_implementation_request(5, &uri, 2, 0), + ); + + assert_eq!( + declaration, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 11, + }, + }, + })) + ); + assert_eq!( + definition, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + assert_eq!(type_definition, definition); + assert_eq!( + implementation, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 1, + character: 21, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .unwrap(); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_goto_declaration() { let (client_conn, server_conn) = Connection::memory(); From b04cfcde0fc8258dffb5d7b62822765eb6f8bc12 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 18:17:16 +0000 Subject: [PATCH 024/210] feat(flow): narrow literal strings with min-length facts --- crates/jrsonnet-lsp-inference/src/flow.rs | 17 ++++++++++++++ crates/jrsonnet-lsp-types/src/mut_store.rs | 9 +++++++- crates/jrsonnet-lsp-types/src/operations.rs | 25 ++++++++++++++++++++- crates/jrsonnet-lsp-types/src/store.rs | 9 +++++++- 4 files changed, 57 insertions(+), 3 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/flow.rs b/crates/jrsonnet-lsp-inference/src/flow.rs index fdbe95c8..543d6b08 100644 --- a/crates/jrsonnet-lsp-inference/src/flow.rs +++ b/crates/jrsonnet-lsp-inference/src/flow.rs @@ -1519,6 +1519,23 @@ mod tests { assert_eq!(result, tuple_non_empty); } + #[test] + fn test_extract_std_length_greater_eq_n_narrows_literal_strings() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("std.length(s) >= 2"); + let s_fact = facts.get("s").expect("should have fact for s"); + + let empty = store.literal_string(String::new()); + let one = store.literal_string("x".to_string()); + let two = store.literal_string("xy".to_string()); + let three = store.literal_string("xyz".to_string()); + let input = store.union(vec![empty, one, two, three]); + + let result = s_fact.apply_to(input, &mut store); + let expected = store.union(vec![two, three]); + assert_eq!(result, expected); + } + // Higher-order predicate tests #[test] diff --git a/crates/jrsonnet-lsp-types/src/mut_store.rs b/crates/jrsonnet-lsp-types/src/mut_store.rs index 4c54bd86..49ec5144 100644 --- a/crates/jrsonnet-lsp-types/src/mut_store.rs +++ b/crates/jrsonnet-lsp-types/src/mut_store.rs @@ -665,10 +665,17 @@ impl MutStore { | TyData::Object(_) | TyData::AttrsOf { .. } | TyData::String - | TyData::LiteralString(_) | TyData::Function(_) | TyData::TypeVar { .. } => ty, + TyData::LiteralString(s) => { + if s.chars().count() >= min { + ty + } else { + Ty::NEVER + } + } + TyData::Tuple { elems } => { if elems.len() >= min { ty diff --git a/crates/jrsonnet-lsp-types/src/operations.rs b/crates/jrsonnet-lsp-types/src/operations.rs index 5fc3d296..051821ac 100644 --- a/crates/jrsonnet-lsp-types/src/operations.rs +++ b/crates/jrsonnet-lsp-types/src/operations.rs @@ -808,6 +808,7 @@ pub fn ty_with_len(ty: Ty, len: usize, store: &mut S) -> Ty { /// - Arrays stay arrays (can have any length) /// - Tuples must have at least `n` elements /// - Strings stay strings (can have any length) +/// - Literal strings are checked exactly against `n` /// - Char requires `min <= 1` /// - Objects with unknown fields stay as-is /// @@ -816,6 +817,7 @@ pub fn ty_with_len(ty: Ty, len: usize, store: &mut S) -> Ty { /// - `ty_with_min_len(Array, 3)` → `Array` /// - `ty_with_min_len([Number, String], 1)` → `[Number, String]` /// - `ty_with_min_len([Number], 2)` → `Never` +/// - `ty_with_min_len("ok", 3)` → `Never` /// - `ty_with_min_len(Char, 2)` → `Never` pub fn ty_with_min_len(ty: Ty, min: usize, store: &mut S) -> Ty { let data = store.get_data(ty); @@ -827,10 +829,17 @@ pub fn ty_with_min_len(ty: Ty, min: usize, store: &mut S) -> Ty | TyData::Object(_) | TyData::AttrsOf { .. } | TyData::String - | TyData::LiteralString(_) | TyData::Function(_) | TyData::TypeVar { .. } => ty, + TyData::LiteralString(s) => { + if s.chars().count() >= min { + ty + } else { + Ty::NEVER + } + } + TyData::Tuple { elems } => { if elems.len() >= min { ty @@ -1425,6 +1434,20 @@ mod tests { let mut store = TyStore::new(); assert_eq!(ty_with_min_len(Ty::CHAR, 2, &mut store), Ty::NEVER); } + + #[test] + fn test_literal_string_meets_min() { + let mut store = TyStore::new(); + let literal = store.literal_string("hello".to_string()); + assert_eq!(ty_with_min_len(literal, 3, &mut store), literal); + } + + #[test] + fn test_literal_string_too_short() { + let mut store = TyStore::new(); + let literal = store.literal_string("hi".to_string()); + assert_eq!(ty_with_min_len(literal, 3, &mut store), Ty::NEVER); + } } mod ty_with_field_tests { diff --git a/crates/jrsonnet-lsp-types/src/store.rs b/crates/jrsonnet-lsp-types/src/store.rs index ed49c1a6..ca5e5012 100644 --- a/crates/jrsonnet-lsp-types/src/store.rs +++ b/crates/jrsonnet-lsp-types/src/store.rs @@ -1414,10 +1414,17 @@ impl TyStore { | TyData::Object(_) | TyData::AttrsOf { .. } | TyData::String - | TyData::LiteralString(_) | TyData::Function(_) | TyData::TypeVar { .. } => ty, + TyData::LiteralString(s) => { + if s.chars().count() >= min { + ty + } else { + Ty::NEVER + } + } + TyData::Tuple { elems } => { if elems.len() >= min { ty From e8bf1aabe1d5c8bb806f61519aa9406d1a1a88e3 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 18:23:35 +0000 Subject: [PATCH 025/210] test(lsp): add exact annotated hover/type assertions --- .../jrsonnet-lsp/tests/e2e_annotated_tests.rs | 24 ++- .../tests/framework/assertions.rs | 192 ++++++++++++------ crates/jrsonnet-lsp/tests/framework/mod.rs | 2 + crates/jrsonnet-lsp/tests/framework/parser.rs | 25 +++ 4 files changed, 178 insertions(+), 65 deletions(-) diff --git a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs index eefcfa84..4a4a0409 100644 --- a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs +++ b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs @@ -131,7 +131,7 @@ fn test_hover_number_literal() { // '42' starts at column 10 check_hover( r"local x = 42; -## ^ hover: number", +## ^ type-exact: number", ); } @@ -140,7 +140,7 @@ fn test_hover_string_literal() { // '"hello"' starts at column 10 check_hover( r#"local s = "hello"; -## ^ hover: string"#, +## ^ type-exact: string"#, ); } @@ -149,7 +149,7 @@ fn test_hover_boolean() { // 'true' starts at column 10, LSP infers literal type 'true' check_hover( r"local b = true; -## ^ hover: true", +## ^ type-exact: true", ); } @@ -158,7 +158,7 @@ fn test_hover_null() { // 'null' starts at column 10 check_hover( r"local n = null; -## ^ hover: null", +## ^ type-exact: null", ); } @@ -167,7 +167,7 @@ fn test_hover_array() { // Hover on the variable name 'arr' at definition site shows the inferred type. check_hover( r"local arr = [1, 2, 3]; -## ^ hover: [number, number, number]", +## ^ type-exact: [number, number, number]", ); } @@ -176,7 +176,7 @@ fn test_hover_object() { // Hover on the variable name 'obj' at definition site shows the inferred type. check_hover( r"local obj = { a: 1 }; -## ^ hover: { a }", +## ^ type-exact: { a }", ); } @@ -186,7 +186,7 @@ fn test_hover_function() { // Note: Currently infers as 'any' - could be improved to show function type check_hover( r"local f(x) = x; -## ^ hover: any", +## ^ type-exact: any", ); } @@ -195,6 +195,14 @@ fn test_hover_std_function() { // 'std' at column 0 - std is an object containing stdlib functions check_hover( r"std.length -##^ hover: object", +##^ type-exact: object", + ); +} + +#[test] +fn test_hover_exact_number_markdown() { + check_hover( + r"local x = 42; +## ^ hover-exact: `number`", ); } diff --git a/crates/jrsonnet-lsp/tests/framework/assertions.rs b/crates/jrsonnet-lsp/tests/framework/assertions.rs index 8c645646..c7d96434 100644 --- a/crates/jrsonnet-lsp/tests/framework/assertions.rs +++ b/crates/jrsonnet-lsp/tests/framework/assertions.rs @@ -105,10 +105,16 @@ pub fn verify_annotations(parsed: &ParsedSource) -> VerificationResult { verify_usage(&ctx, ann, name, &mut result); } AnnotationKind::Hover(expected) => { - verify_hover(&ctx, ann, expected, &mut result); + verify_hover(&ctx, ann, expected, TextMatchMode::Contains, &mut result); + } + AnnotationKind::HoverExact(expected) => { + verify_hover(&ctx, ann, expected, TextMatchMode::Exact, &mut result); } AnnotationKind::Type(expected) => { - verify_type(&ctx, ann, expected, &mut result); + verify_type(&ctx, ann, expected, TextMatchMode::Contains, &mut result); + } + AnnotationKind::TypeExact(expected) => { + verify_type(&ctx, ann, expected, TextMatchMode::Exact, &mut result); } AnnotationKind::Error(expected) => { verify_error(&ctx, ann, expected, &mut result); @@ -273,11 +279,43 @@ fn range_contains_position(range: lsp_types::Range, pos: LspPosition) -> bool { starts_before_or_at && ends_after_or_at } -/// Verify that hover contains the expected text. +#[derive(Clone, Copy)] +enum TextMatchMode { + Contains, + Exact, +} + +fn hover_contents_text(hover: &lsp_types::Hover) -> String { + match &hover.contents { + lsp_types::HoverContents::Scalar(s) => match s { + lsp_types::MarkedString::String(text) => text.clone(), + lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), + }, + lsp_types::HoverContents::Array(arr) => arr + .iter() + .map(|s| match s { + lsp_types::MarkedString::String(text) => text.clone(), + lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), + }) + .collect::>() + .join("\n"), + lsp_types::HoverContents::Markup(m) => m.value.clone(), + } +} + +fn leading_backtick_type(contents: &str) -> Option { + let trimmed = contents.trim_start(); + let rest = trimmed.strip_prefix('`')?; + let (ty, _) = rest.split_once('`')?; + Some(ty.to_string()) +} + +/// Verify that hover content matches expected text. fn verify_hover( ctx: &TestContext, ann: &Annotation, expected: &str, + mode: TextMatchMode, result: &mut VerificationResult, ) { let pos = TestContext::position(ann.line, ann.column); @@ -286,28 +324,22 @@ fn verify_hover( match hover { Some(h) => { - let contents = match &h.contents { - lsp_types::HoverContents::Scalar(s) => match s { - lsp_types::MarkedString::String(text) => text.clone(), - lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), - }, - lsp_types::HoverContents::Array(arr) => arr - .iter() - .map(|s| match s { - lsp_types::MarkedString::String(text) => text.clone(), - lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), - }) - .collect::>() - .join("\n"), - lsp_types::HoverContents::Markup(m) => m.value.clone(), + let contents = hover_contents_text(&h); + let is_match = match mode { + TextMatchMode::Contains => contents.contains(expected), + TextMatchMode::Exact => contents == expected, }; - if contents.contains(expected) { + if is_match { result.add_success(); } else { + let expected_text = match mode { + TextMatchMode::Contains => format!("contain '{expected}'"), + TextMatchMode::Exact => format!("equal '{expected}'"), + }; result.add_failure(format!( - "{}:{}: hover should contain '{}', got '{}'", - ann.line, ann.column, expected, contents + "{}:{}: hover should {}, got '{}'", + ann.line, ann.column, expected_text, contents )); } } @@ -320,53 +352,63 @@ fn verify_hover( } } -/// Verify that the type at position matches expected. +/// Verify that the inferred type at position matches expected. fn verify_type( ctx: &TestContext, ann: &Annotation, expected: &str, + mode: TextMatchMode, result: &mut VerificationResult, ) { - // Use hover to get type information - let pos = TestContext::position(ann.line, ann.column); - - let hover = handlers::hover(&ctx.document, pos, &ctx.analysis); + let Some(offset) = ctx.offset(ann.line, ann.column) else { + result.add_failure(format!( + "{}:{}: could not convert position to offset", + ann.line, ann.column + )); + return; + }; - match hover { - Some(h) => { - let contents = match &h.contents { - lsp_types::HoverContents::Markup(m) => m.value.clone(), - lsp_types::HoverContents::Scalar(s) => match s { - lsp_types::MarkedString::String(text) => text.clone(), - lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), - }, - lsp_types::HoverContents::Array(arr) => arr - .iter() - .map(|s| match s { - lsp_types::MarkedString::String(text) => text.clone(), - lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), - }) - .collect::>() - .join("\n"), - }; + let direct_type = ctx + .analysis + .type_at_position(ctx.document.ast().syntax(), offset.into()) + .map(|ty| ctx.analysis.display(ty)); + let Some(mut actual) = direct_type else { + result.add_failure(format!( + "{}:{}: expected type '{}', but no inferred type was available", + ann.line, ann.column, expected + )); + return; + }; - // Check if the hover contains the expected type - if contents.contains(expected) { - result.add_success(); - } else { - result.add_failure(format!( - "{}:{}: expected type '{}', hover shows '{}'", - ann.line, ann.column, expected, contents - )); - } - } - None => { - result.add_failure(format!( - "{}:{}: expected type '{}', got no hover", - ann.line, ann.column, expected - )); + if actual == "any" { + let pos = TestContext::position(ann.line, ann.column); + let hover_type = handlers::hover(&ctx.document, pos, &ctx.analysis) + .as_ref() + .map(hover_contents_text) + .and_then(|contents| leading_backtick_type(&contents)); + if let Some(hover_type) = hover_type { + actual = hover_type; } } + + let is_match = match mode { + TextMatchMode::Contains => actual.contains(expected), + TextMatchMode::Exact => actual == expected, + }; + + if is_match { + result.add_success(); + return; + } + + let expected_text = match mode { + TextMatchMode::Contains => format!("contain '{expected}'"), + TextMatchMode::Exact => format!("equal '{expected}'"), + }; + result.add_failure(format!( + "{}:{}: type should {}, got '{}'", + ann.line, ann.column, expected_text, actual + )); } /// Verify that completions include expected items. @@ -543,4 +585,40 @@ x + x result.failures ); } + + #[test] + fn test_verify_hover_exact_success() { + let source = r"local x = 42; +## ^ hover-exact: `number`"; + let parsed = parse_annotated_source(source); + let result = verify_annotations(&parsed); + assert!(result.passed, "Failures: {:?}", result.failures); + assert_eq!(result.passed_count, 1); + } + + #[test] + fn test_verify_type_exact_success() { + let source = r"local x = 42; +## ^ type-exact: number"; + let parsed = parse_annotated_source(source); + let result = verify_annotations(&parsed); + assert!(result.passed, "Failures: {:?}", result.failures); + assert_eq!(result.passed_count, 1); + } + + #[test] + fn test_verify_type_exact_mismatch() { + let source = r"local x = 42; +## ^ type-exact: string"; + let parsed = parse_annotated_source(source); + let result = verify_annotations(&parsed); + assert!(!result.passed, "expected verification to fail"); + assert_eq!(result.passed_count, 0); + assert_eq!(result.total_count, 1); + assert!( + result.failures[0].contains("type should equal 'string'"), + "unexpected failure message: {:?}", + result.failures + ); + } } diff --git a/crates/jrsonnet-lsp/tests/framework/mod.rs b/crates/jrsonnet-lsp/tests/framework/mod.rs index 6d3b4120..88eb1901 100644 --- a/crates/jrsonnet-lsp/tests/framework/mod.rs +++ b/crates/jrsonnet-lsp/tests/framework/mod.rs @@ -24,7 +24,9 @@ //! | `## ^ def: x` | Definition of `x` is at this position | //! | `## ^ use: x` | Usage/reference to `x` is at this position | //! | `## ^ hover: text` | Hover at this position contains "text" | +//! | `## ^ hover-exact: text` | Hover at this position equals "text" | //! | `## ^ type: T` | Type at this position is `T` | +//! | `## ^ type-exact: T` | Type at this position equals `T` | //! | `## ^ error: msg` | Error at this position contains "msg" | //! | `## ^ completion: a, b` | Completions include `a` and `b` | //! | `## ^ no-completion: x` | Completions do NOT include `x` | diff --git a/crates/jrsonnet-lsp/tests/framework/parser.rs b/crates/jrsonnet-lsp/tests/framework/parser.rs index c1ebcced..1756a8aa 100644 --- a/crates/jrsonnet-lsp/tests/framework/parser.rs +++ b/crates/jrsonnet-lsp/tests/framework/parser.rs @@ -35,8 +35,12 @@ pub enum AnnotationKind { Usage(String), /// `^ hover: text` - expect hover to contain `text` Hover(String), + /// `^ hover-exact: text` - expect hover to equal `text` + HoverExact(String), /// `^ type: T` - expect type `T` at this position Type(String), + /// `^ type-exact: T` - expect exact inferred type `T` at this position + TypeExact(String), /// `^ error: msg` - expect error containing `msg` Error(String), /// `^ completion: item1, item2` - expect these completions @@ -145,7 +149,9 @@ fn parse_annotation_kind(s: &str) -> Option { "def" => Some(AnnotationKind::Definition(value.to_string())), "use" => Some(AnnotationKind::Usage(value.to_string())), "hover" => Some(AnnotationKind::Hover(value.to_string())), + "hover-exact" => Some(AnnotationKind::HoverExact(value.to_string())), "type" => Some(AnnotationKind::Type(value.to_string())), + "type-exact" => Some(AnnotationKind::TypeExact(value.to_string())), "error" => Some(AnnotationKind::Error(value.to_string())), "completion" => { let items: Vec = value.split(',').map(|s| s.trim().to_string()).collect(); @@ -245,4 +251,23 @@ x + 1 assert_eq!(anns_at_10.len(), 1); assert!(matches!(&anns_at_10[0].kind, AnnotationKind::Type(_))); } + + #[test] + fn test_parse_exact_hover_and_type_annotations() { + let source = r"local x = 1; +## ^ hover-exact: `number` +## ^ type-exact: number"; + + let parsed = parse_annotated_source(source); + assert_eq!(parsed.annotations.len(), 2); + + assert_eq!( + parsed.annotations[0].kind, + AnnotationKind::HoverExact("`number`".to_string()) + ); + assert_eq!( + parsed.annotations[1].kind, + AnnotationKind::TypeExact("number".to_string()) + ); + } } From 2b996eb23cca61bbceee6cd400c42bf27ae8baae Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 18:23:40 +0000 Subject: [PATCH 026/210] test(lsp): remove redundant URI clones in navigation tests --- crates/jrsonnet-lsp/tests/integration_test.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index bd61122f..12c36ef6 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -1450,7 +1450,7 @@ alias"#, assert_eq!( definition, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri.clone(), + uri: lib_uri, range: lsp_types::Range { start: Position { line: 0, @@ -1901,7 +1901,7 @@ alias"#, assert_eq!( definition_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri.clone(), + uri: lib_uri, range: lsp_types::Range { start: Position { line: 0, From c59cea022a249997858d769a7fabbd331f091e66 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 18:43:34 +0000 Subject: [PATCH 027/210] refactor(lsp): unify import resolution for graph and navigation --- Cargo.lock | 1 + crates/jrsonnet-lsp-import/Cargo.toml | 3 + crates/jrsonnet-lsp-import/src/lib.rs | 2 + crates/jrsonnet-lsp-import/src/resolve.rs | 122 ++++++++++++++++++ crates/jrsonnet-lsp/src/analysis/tanka.rs | 74 +++++++++-- crates/jrsonnet-lsp/src/server.rs | 31 ++--- .../jrsonnet-lsp/src/server/async_requests.rs | 38 +++--- 7 files changed, 218 insertions(+), 53 deletions(-) create mode 100644 crates/jrsonnet-lsp-import/src/resolve.rs diff --git a/Cargo.lock b/Cargo.lock index efc25ec0..77da9964 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1902,6 +1902,7 @@ dependencies = [ "jrsonnet-rowan-parser", "rayon", "rowan", + "tempfile", ] [[package]] diff --git a/crates/jrsonnet-lsp-import/Cargo.toml b/crates/jrsonnet-lsp-import/Cargo.toml index 88b240f0..1322a0cc 100644 --- a/crates/jrsonnet-lsp-import/Cargo.toml +++ b/crates/jrsonnet-lsp-import/Cargo.toml @@ -13,5 +13,8 @@ jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-par rayon = "1.11.0" rowan.workspace = true +[dev-dependencies] +tempfile.workspace = true + [lints] workspace = true diff --git a/crates/jrsonnet-lsp-import/src/lib.rs b/crates/jrsonnet-lsp-import/src/lib.rs index e64bfe08..c3d98b2d 100644 --- a/crates/jrsonnet-lsp-import/src/lib.rs +++ b/crates/jrsonnet-lsp-import/src/lib.rs @@ -7,6 +7,7 @@ pub mod graph; pub mod parse; +pub mod resolve; pub mod work_queue; pub use graph::{parse_document_imports, ImportEntry, ImportGraph}; @@ -14,4 +15,5 @@ pub use parse::{ check_import_from_token, check_import_path, extract_import_path, find_import_in_node, get_import_path_from_node, }; +pub use resolve::{resolve_import_path, resolve_import_path_from_base}; pub use work_queue::{WorkQueue, WorkQueueExt}; diff --git a/crates/jrsonnet-lsp-import/src/resolve.rs b/crates/jrsonnet-lsp-import/src/resolve.rs new file mode 100644 index 00000000..d02c977e --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/resolve.rs @@ -0,0 +1,122 @@ +//! Shared import path resolution utilities. +//! +//! This module centralizes path resolution rules so all LSP subsystems resolve +//! imports consistently. + +use std::path::{Path, PathBuf}; + +use jrsonnet_lsp_document::CanonicalPath; + +/// Resolve an import path from an importing file. +/// +/// Resolution order: +/// 1. Relative to the importing file's directory +/// 2. Each configured import root (`jpath`, vendor roots, etc.) +#[must_use] +pub fn resolve_import_path( + importer_file: &CanonicalPath, + import_path: &str, + import_roots: &[PathBuf], +) -> Option { + resolve_import_path_from_base(importer_file.as_path(), import_path, import_roots) +} + +/// Resolve an import path from a base file path. +/// +/// The `base_file` should be the full path of the importing file. +#[must_use] +pub fn resolve_import_path_from_base( + base_file: &Path, + import_path: &str, + import_roots: &[PathBuf], +) -> Option { + let import = Path::new(import_path); + + if import.is_absolute() { + return canonical_if_exists(import); + } + + if let Some(parent) = base_file.parent() { + let relative = parent.join(import); + if let Some(canonical) = canonical_if_exists(&relative) { + return Some(canonical); + } + } + + for root in import_roots { + let candidate = root.join(import); + if let Some(canonical) = canonical_if_exists(&candidate) { + return Some(canonical); + } + } + + None +} + +fn canonical_if_exists(path: &Path) -> Option { + path.canonicalize().ok().map(CanonicalPath::new) +} + +#[cfg(test)] +mod tests { + use std::fs; + + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_resolve_relative_first() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer = root.join("main.jsonnet"); + let local_lib = root.join("lib.jsonnet"); + fs::write(&importer, "import \"lib.jsonnet\"").expect("importer should be written"); + fs::write(&local_lib, "{}").expect("local lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let resolved = resolve_import_path(&importer, "lib.jsonnet", &[]); + assert_eq!( + resolved, + Some(CanonicalPath::new( + local_lib + .canonicalize() + .expect("local lib path should canonicalize") + )) + ); + } + + #[test] + fn test_resolve_from_import_roots() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer_dir = root.join("app"); + let jpath_dir = root.join("vendor"); + fs::create_dir_all(&importer_dir).expect("importer directory should be created"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + + let importer = importer_dir.join("main.jsonnet"); + let shared_lib = jpath_dir.join("shared.libsonnet"); + fs::write(&importer, "import \"shared.libsonnet\"").expect("importer should be written"); + fs::write(&shared_lib, "{}").expect("shared lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let resolved = resolve_import_path(&importer, "shared.libsonnet", &[jpath_dir]); + assert_eq!( + resolved, + Some(CanonicalPath::new( + shared_lib + .canonicalize() + .expect("shared lib path should canonicalize") + )) + ); + } +} diff --git a/crates/jrsonnet-lsp/src/analysis/tanka.rs b/crates/jrsonnet-lsp/src/analysis/tanka.rs index 0cf3e91a..dd99a2e8 100644 --- a/crates/jrsonnet-lsp/src/analysis/tanka.rs +++ b/crates/jrsonnet-lsp/src/analysis/tanka.rs @@ -4,7 +4,10 @@ //! `jsonnetfile.json` or `tkrc.yaml` in parent directories to find the //! project root and add appropriate vendor/lib paths to the jpath. -use std::path::{Path, PathBuf}; +use std::{ + collections::HashSet, + path::{Path, PathBuf}, +}; /// Find the Tanka project root by walking up from the given path. /// @@ -90,6 +93,29 @@ pub fn resolve_jpath(path: &Path) -> Vec { jpath } +/// Compute effective import roots for LSP import resolution. +/// +/// This always includes configured roots. When Tanka mode is enabled, additional +/// Tanka-derived roots are appended and the result is de-duplicated while +/// preserving order. +#[must_use] +pub fn effective_import_roots( + path: &Path, + configured_roots: &[PathBuf], + resolve_paths_with_tanka: bool, +) -> Vec { + let mut roots: Vec = configured_roots.to_vec(); + if resolve_paths_with_tanka { + roots.extend(resolve_jpath(path)); + } + + let mut seen = HashSet::new(); + roots + .into_iter() + .filter(|root| seen.insert(root.clone())) + .collect() +} + #[cfg(test)] mod tests { use std::fs; @@ -150,10 +176,8 @@ mod tests { // Resolve jpath from environment directory let jpath = resolve_jpath(&env); - - assert!(jpath.contains(&root.join("vendor"))); - assert!(jpath.contains(&root.join("lib"))); - assert!(jpath.contains(&env)); + let expected = vec![root.join("vendor"), root.join("lib"), env]; + assert_eq!(jpath, expected); } #[test] @@ -171,16 +195,46 @@ mod tests { // Resolve jpath from environment directory let jpath = resolve_jpath(&env); - - // Both vendors should be included - assert!(jpath.contains(&root.join("vendor"))); - assert!(jpath.contains(&env.join("vendor"))); + let expected = vec![root.join("vendor"), env.join("vendor"), env]; + assert_eq!(jpath, expected); } #[test] fn test_resolve_jpath_no_root() { let tmp = TempDir::new().unwrap(); let jpath = resolve_jpath(tmp.path()); - assert!(jpath.is_empty()); + assert_eq!(jpath, Vec::::new()); + } + + #[test] + fn test_effective_import_roots_without_tanka() { + let tmp = TempDir::new().unwrap(); + let base = tmp.path().join("env").join("main.jsonnet"); + fs::create_dir_all(base.parent().expect("base should have parent")).unwrap(); + fs::write(&base, "{}").unwrap(); + + let configured = vec![ + PathBuf::from("/configured/one"), + PathBuf::from("/configured/two"), + ]; + let roots = effective_import_roots(&base, &configured, false); + assert_eq!(roots, configured); + } + + #[test] + fn test_effective_import_roots_with_tanka_dedups() { + let tmp = TempDir::new().unwrap(); + let root = tmp.path(); + fs::write(root.join("jsonnetfile.json"), "{}").unwrap(); + fs::create_dir(root.join("vendor")).unwrap(); + let env = root.join("environments").join("prod"); + fs::create_dir_all(&env).unwrap(); + let file = env.join("main.jsonnet"); + fs::write(&file, "{}").unwrap(); + + let configured = vec![root.join("vendor")]; + let roots = effective_import_roots(&file, &configured, true); + let expected = vec![root.join("vendor"), env]; + assert_eq!(roots, expected); } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index a9c739ff..de2c09c7 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -14,7 +14,7 @@ use anyhow::{Context, Result}; use crossbeam_channel::{select, Receiver, Sender}; use jrsonnet_lsp_document::{CanonicalPath, DocVersion}; use jrsonnet_lsp_handlers as handlers; -use jrsonnet_lsp_import::{parse_document_imports, ImportGraph}; +use jrsonnet_lsp_import::{parse_document_imports, resolve_import_path, ImportGraph}; use jrsonnet_lsp_inference::{ new_shared_cache, DocumentManager, SharedDocumentManager, SharedTypeCache, }; @@ -50,7 +50,7 @@ use tracing::{debug, error, info, warn}; use self::async_requests::AsyncRequestContext; use crate::{ - analysis::{EvalConfig, Evaluator}, + analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}, async_diagnostics::{AsyncDiagnostics, DiagnosticsConfig}, config::ServerConfig, }; @@ -1037,30 +1037,17 @@ impl Server { return; }; - let from_path = path.clone(); let config = self.config.read(); - let jpath = config.jpath.clone(); + let import_roots = effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); drop(config); - // Create a resolver closure that captures the path and jpath + // Create a resolver closure that captures the path and effective import roots. let resolve_import = |import: &str| -> Option { - // First, try relative to the importing file - if let Some(parent) = from_path.as_path().parent() { - let resolved = parent.join(import); - if let Ok(canonical) = resolved.canonicalize() { - return Some(CanonicalPath::new(canonical)); - } - } - - // Then, try each jpath directory - for jpath_dir in &jpath { - let resolved = jpath_dir.join(import); - if let Ok(canonical) = resolved.canonicalize() { - return Some(CanonicalPath::new(canonical)); - } - } - - None + resolve_import_path(path, import, &import_roots) }; // Parse imports OUTSIDE the lock to minimize lock hold time. diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 4f08cf91..39833a7c 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, DocVersion, Document, SymbolName}; use jrsonnet_lsp_handlers as handlers; -use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_import::{resolve_import_path, ImportGraph}; use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; use jrsonnet_lsp_types::GlobalTyStore; use jrsonnet_rowan_parser::AstNode; @@ -18,7 +18,7 @@ use rayon::prelude::*; use tracing::{info, warn}; use super::SharedConfig; -use crate::analysis::eval::create_state_with_jpath; +use crate::analysis::{eval::create_state_with_jpath, tanka::effective_import_roots}; #[derive(Clone)] pub(super) struct AsyncRequestContext { @@ -191,7 +191,13 @@ impl AsyncRequestContext { let lsp_pos = position.into(); let analysis = self.analyze_document(&path, &doc); - let import_roots = self.config.read().jpath.clone(); + let config = self.config.read(); + let import_roots = effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + drop(config); let list = handlers::completion_with_import_roots( &doc, @@ -466,25 +472,15 @@ impl AsyncRequestContext { serde_json::to_value(refs).ok() } - /// Search order: - /// 1. Relative to the importing file's directory - /// 2. Each directory in jpath (in order) fn resolve_import_path(&self, from: &CanonicalPath, import: &str) -> Option { - let parent = from.as_path().parent()?; - let resolved = parent.join(import); - if let Ok(canonical) = resolved.canonicalize() { - return Some(CanonicalPath::new(canonical)); - } - - let jpath = self.config.read().jpath.clone(); - for jpath_dir in &jpath { - let resolved = jpath_dir.join(import); - if let Ok(canonical) = resolved.canonicalize() { - return Some(CanonicalPath::new(canonical)); - } - } - - None + let config = self.config.read(); + let import_roots = effective_import_roots( + from.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + drop(config); + resolve_import_path(from, import, &import_roots) } fn load_document_for_path(&self, path: &CanonicalPath) -> Option { From f30fdb36d15c2ac6b4aa0af5a46a3f9ced0cf826 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 18:47:03 +0000 Subject: [PATCH 028/210] fix(lsp): preserve import graph semantics on didClose --- crates/jrsonnet-lsp/src/server.rs | 6 +- crates/jrsonnet-lsp/tests/integration_test.rs | 131 +++++++++++++++++- 2 files changed, 129 insertions(+), 8 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index de2c09c7..5cabe2ed 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -887,8 +887,10 @@ impl Server { // (dependents may have cached types based on this file's exports) self.invalidate_type_cache_with_dependents(&path); - // Remove from import graph - self.import_graph.write().remove_file(&path); + // Keep import graph semantics for closed documents by re-indexing from + // cached/disk content instead of dropping the file node. + self.update_import_graph(&path); + self.schedule_diagnostics_for_open_importers(&path); // Clear diagnostics for closed document self.send_notification::(lsp_types::PublishDiagnosticsParams { diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 12c36ef6..f5dfd056 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -9,8 +9,8 @@ use assert_matches::assert_matches; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{ notification::{ - DidChangeConfiguration, DidChangeWatchedFiles, DidOpenTextDocument, DidSaveTextDocument, - Notification as _, PublishDiagnostics, + DidChangeConfiguration, DidChangeWatchedFiles, DidCloseTextDocument, DidOpenTextDocument, + DidSaveTextDocument, Notification as _, PublishDiagnostics, }, request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, @@ -18,10 +18,10 @@ use lsp_types::{ Initialize, InlayHintRequest, References, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, }, - DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, - DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, FileEvent, - GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, PartialResultParams, Position, - ReferenceContext, ReferenceParams, RenameParams, SemanticTokensRangeParams, + DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidCloseTextDocumentParams, + DidOpenTextDocumentParams, DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, + FileEvent, GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, PartialResultParams, + Position, ReferenceContext, ReferenceParams, RenameParams, SemanticTokensRangeParams, TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, }; use serde_json::json; @@ -94,6 +94,18 @@ fn did_save_notification(uri: &str, text: Option<&str>) -> Notification { ) } +fn did_close_notification(uri: &str) -> Notification { + let params = DidCloseTextDocumentParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + }; + Notification::new( + DidCloseTextDocument::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + /// Helper to create a goto definition request. fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { let params = GotoDefinitionParams { @@ -2558,6 +2570,113 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_did_close_preserves_import_graph_for_references() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + + let lib_text = "local target = 1; target"; + let main_text = "local lib = import 'lib.jsonnet'; lib.target"; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_close_notification(&main_uri))) + .unwrap(); + let closed_diagnostics = + recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + assert_eq!( + closed_diagnostics, + lsp_types::PublishDiagnosticsParams { + uri: main_uri.parse().unwrap(), + diagnostics: Vec::new(), + version: None, + } + ); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib_uri.clone())], + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Command should succeed"); + let transitive_importers = response.result.expect("command should return result"); + assert_eq!( + transitive_importers, + json!({ + "file": lib_uri, + "transitiveImporters": [main_uri], + }) + ); + + client_conn + .sender + .send(Message::Request(references_request( + 3, &lib_uri, 0, 6, false, + ))) + .unwrap(); + let response = recv_response(&client_conn, 3); + assert!(response.error.is_none(), "References should succeed"); + let references: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let references = references.unwrap_or_default(); + assert_eq!( + references, + vec![location(&lib_uri, 18, 24), location(&main_uri, 38, 44)] + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_cross_file_rename_updates_definition_and_importers() { let tmp = TempDir::new().expect("tempdir should be created"); From e33d1d29dcea05242f48778c391e42807922bffa Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 19:01:15 +0000 Subject: [PATCH 029/210] refactor(lsp): drive cross-file resolution from import graph --- .../jrsonnet-lsp-handlers/src/references.rs | 120 +++------- crates/jrsonnet-lsp-handlers/src/rename.rs | 110 ++------- crates/jrsonnet-lsp-inference/src/provider.rs | 78 ++++++- .../jrsonnet-lsp-inference/src/type_cache.rs | 53 +++-- .../jrsonnet-lsp/src/server/async_requests.rs | 15 +- crates/jrsonnet-lsp/tests/integration_test.rs | 215 ++++++++++++++++++ 6 files changed, 379 insertions(+), 212 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/references.rs b/crates/jrsonnet-lsp-handlers/src/references.rs index 19e2ea3a..6400bd7d 100644 --- a/crates/jrsonnet-lsp-handlers/src/references.rs +++ b/crates/jrsonnet-lsp-handlers/src/references.rs @@ -1,18 +1,16 @@ //! Find references handler. //! //! Finds all references to a symbol within the current document and across -//! all open documents (cross-file references). - -use std::path::Path; +//! all open/importing documents (cross-file references). use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, CanonicalPath, Document, LspPosition}; -use jrsonnet_lsp_import::get_import_path_from_node; +use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_scope::{ find_definition_range, is_at_file_scope, is_definition_site, is_variable_reference, ScopeResolver, }; use jrsonnet_rowan_parser::{ - nodes::{Bind, Destruct, ExprBase, ExprField, StmtLocal}, + nodes::{ExprBase, ExprField}, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; use lsp_types::{Location, Uri}; @@ -92,6 +90,7 @@ pub fn find_cross_file_references<'a>( current_path: &CanonicalPath, position: LspPosition, documents: &[(&'a CanonicalPath, &'a Document)], + import_graph: &ImportGraph, ) -> Vec { let text = current_document.text(); let line_index = current_document.line_index(); @@ -117,15 +116,15 @@ pub fn find_cross_file_references<'a>( return Vec::new(); }; - let current_path_str = current_path.as_path(); - // Search all other documents for imports of this file (in parallel) let references: Vec = documents .par_iter() .filter(|(doc_path, _)| *doc_path != current_path) .flat_map(|(doc_path, doc)| { - // Find imports in this document that point to our file - let imports = find_imports_of_file(doc, doc_path.as_path(), current_path_str); + let import_bindings = import_binding_names(import_graph, doc_path, current_path); + if import_bindings.is_empty() { + return Vec::new(); + } let Ok(doc_uri) = doc_path.to_uri() else { return Vec::new(); @@ -133,12 +132,9 @@ pub fn find_cross_file_references<'a>( let doc_text = doc.text(); let doc_line_index = doc.line_index(); - imports + import_bindings .into_iter() - .flat_map(|import_info| { - // Find references to the imported name in this document - find_references_to_import(doc, &import_info, &name) - }) + .flat_map(|binding_name| find_references_to_import(doc, &binding_name, &name)) .map(|range| Location { uri: doc_uri.clone(), range: to_lsp_range(range, doc_line_index, doc_text), @@ -150,6 +146,21 @@ pub fn find_cross_file_references<'a>( references } +fn import_binding_names( + import_graph: &ImportGraph, + importer_path: &CanonicalPath, + target_path: &CanonicalPath, +) -> Vec { + let mut bindings: Vec = import_graph + .imports_of_target(importer_path, target_path) + .into_iter() + .filter_map(|entry| entry.binding_name.clone()) + .collect(); + bindings.sort(); + bindings.dedup(); + bindings +} + fn resolve_exported_symbol_name(document: &Document, token: &SyntaxToken) -> Option { if token.kind() != SyntaxKind::IDENT { return None; @@ -189,87 +200,10 @@ fn definition_token(document: &Document, range: TextRange, name: &str) -> Option }) } -/// Information about an import statement. -struct ImportInfo { - /// The name this import is bound to (e.g., "lib" in "local lib = import 'lib.jsonnet'") - binding_name: String, -} - -/// Find imports in a document that point to a specific file. -fn find_imports_of_file(doc: &Document, doc_path: &Path, target_path: &Path) -> Vec { - let mut imports = Vec::new(); - let ast = doc.ast(); - let doc_dir = doc_path.parent(); - - for node in ast.syntax().descendants() { - if node.kind() == SyntaxKind::STMT_LOCAL { - if let Some(stmt_local) = StmtLocal::cast(node) { - for bind in stmt_local.binds() { - if let Some(info) = check_bind_for_import(&bind, doc_dir, target_path) { - imports.push(info); - } - } - } - } - } - - imports -} - -/// Check if a bind imports from a specific file. -fn check_bind_for_import( - bind: &Bind, - doc_dir: Option<&Path>, - target_path: &Path, -) -> Option { - match bind { - Bind::BindDestruct(bd) => { - let destruct = bd.into()?; - if let Destruct::DestructFull(full) = destruct { - let bind_name = full.name()?; - let name_text = bind_name.ident_lit()?.text().to_string(); - - // Check if the expression is an import - let expr = bd.value()?; - if let Some(import_path) = get_import_path_from_node(expr.syntax()) { - if import_resolves_to(doc_dir, &import_path, target_path) { - return Some(ImportInfo { - binding_name: name_text, - }); - } - } - } - None - } - Bind::BindFunction(_) => None, // Functions can't be imports - } -} - -/// Check if an import path resolves to the target file. -fn import_resolves_to(doc_dir: Option<&Path>, import_path: &str, target_path: &Path) -> bool { - let Some(dir) = doc_dir else { - return false; - }; - - let resolved = dir.join(import_path); - - // Try to canonicalize both paths for comparison - let resolved_canonical = resolved.canonicalize().ok(); - let target_canonical = target_path.canonicalize().ok(); - - match (resolved_canonical, target_canonical) { - (Some(r), Some(t)) => r == t, - _ => { - // Fallback to simple comparison - resolved == target_path - } - } -} - /// Find references to an imported symbol in a document. fn find_references_to_import( doc: &Document, - import_info: &ImportInfo, + binding_name: &str, field_name: &str, ) -> Vec { let mut references = Vec::new(); @@ -280,7 +214,7 @@ fn find_references_to_import( // Look for field accesses: importName.fieldName if node.kind() == SyntaxKind::EXPR_FIELD { // Check if this is accessing the imported binding - if let Some(range) = check_field_access(&node, &import_info.binding_name, field_name) { + if let Some(range) = check_field_access(&node, binding_name, field_name) { references.push(range); } } diff --git a/crates/jrsonnet-lsp-handlers/src/rename.rs b/crates/jrsonnet-lsp-handlers/src/rename.rs index e52d797c..b186614a 100644 --- a/crates/jrsonnet-lsp-handlers/src/rename.rs +++ b/crates/jrsonnet-lsp-handlers/src/rename.rs @@ -218,9 +218,14 @@ pub fn rename_cross_file( // Find references in each importing file for importer_path in &importers { - if let Some((importer_uri, edits)) = - find_references_in_importer(importer_path, current_path, &old_name, new_name, manager) - { + if let Some((importer_uri, edits)) = find_references_in_importer( + importer_path, + current_path, + &old_name, + new_name, + manager, + import_graph, + ) { all_changes.entry(importer_uri).or_default().extend(edits); } } @@ -228,24 +233,16 @@ pub fn rename_cross_file( workspace_edit_from_changes(all_changes) } -/// Resolve an import path relative to the importing file. -fn resolve_import_path(importer_path: &CanonicalPath, import_str: &str) -> Option { - use std::path::Path; - - let import_path = Path::new(import_str); - - // If absolute, use directly - if import_path.is_absolute() { - return Some(CanonicalPath::new(import_path.to_path_buf())); - } - - // Resolve relative to the importer's directory - let importer_dir = importer_path.as_path().parent()?; - let resolved = importer_dir.join(import_path); - - // Canonicalize to handle . and .. - let canonical = resolved.canonicalize().ok()?; - Some(CanonicalPath::new(canonical)) +fn import_binding_names( + import_graph: &ImportGraph, + importer_path: &CanonicalPath, + source_path: &CanonicalPath, +) -> HashSet { + import_graph + .imports_of_target(importer_path, source_path) + .into_iter() + .filter_map(|entry| entry.binding_name.clone()) + .collect() } /// Find references to a symbol in a file that imports the source file. @@ -261,9 +258,9 @@ fn find_references_in_importer( old_name: &str, new_name: &SymbolName, manager: &Arc, + import_graph: &ImportGraph, ) -> Option<(Uri, Vec)> { - use jrsonnet_lsp_import::extract_import_path; - use jrsonnet_rowan_parser::nodes::{ExprBase, ExprField, StmtLocal}; + use jrsonnet_rowan_parser::nodes::{ExprBase, ExprField}; let doc = manager.get_document(importer_path)?; let uri = importer_path.to_uri().ok()?; @@ -272,72 +269,7 @@ fn find_references_in_importer( let ast = doc.ast(); let mut edits = Vec::new(); - - // Find local variables bound to imports of source_path - let mut import_bindings: HashSet = HashSet::new(); - - // Walk the AST to find import bindings - for node in ast.syntax().descendants() { - if node.kind() != SyntaxKind::STMT_LOCAL { - continue; - } - - let Some(stmt_local) = StmtLocal::cast(node) else { - continue; - }; - - for bind in stmt_local.binds() { - let jrsonnet_rowan_parser::nodes::Bind::BindDestruct(bd) = bind else { - continue; - }; - - let Some(value) = bd.value() else { - continue; - }; - - // Check if the value is an import of source_path - let Some(base) = value.expr_base() else { - continue; - }; - - let ExprBase::ExprImport(import_expr) = base else { - continue; - }; - - let Some(import_str) = extract_import_path(&import_expr) else { - continue; - }; - - // Resolve the import path relative to the importer - let Some(resolved_path) = resolve_import_path(importer_path, &import_str) else { - continue; - }; - - if resolved_path != *source_path { - continue; - } - - // Found an import of source_path, get the binding name - // Use explicit method syntax to avoid ambiguity with Into trait - let Some(destruct) = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bd) else { - continue; - }; - - let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct else { - continue; - }; - - let Some(name_node) = full.name() else { - continue; - }; - - let Some(ident) = name_node.ident_lit() else { - continue; - }; - - import_bindings.insert(ident.text().to_string()); - } - } + let import_bindings = import_binding_names(import_graph, importer_path, source_path); if import_bindings.is_empty() { return None; diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index 7bf91aa7..d6897a1d 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -9,10 +9,11 @@ use jrsonnet_lsp_document::{CanonicalPath, Document}; use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_types::GlobalTyStore; use parking_lot::RwLock; +use rustc_hash::FxHashMap; use crate::{ analysis::TypeAnalysis, - type_cache::{analyze_and_cache, CachingImportResolver, SharedTypeCache}, + type_cache::{analyze_and_cache_with_resolved_imports, CachingImportResolver, SharedTypeCache}, }; /// Trait for looking up documents by path. @@ -75,10 +76,14 @@ impl TypeProvider { ) -> TypeAnalysis { // Ensure dependencies are analyzed first (in topological order) self.ensure_dependencies_analyzed(path, doc_source); + let resolved_imports = { + let graph = self.import_graph.read(); + resolved_imports_for(&graph, path) + }; // Analyze with import resolution let import_resolver = Arc::new(CachingImportResolver::new( - path.as_path(), + resolved_imports, Arc::clone(&self.type_cache), )); @@ -94,12 +99,26 @@ impl TypeProvider { doc_source: &D, ) { let graph = self.import_graph.read(); + let resolved_imports_by_file: FxHashMap> = + graph + .all_files() + .map(|graph_path| (graph_path.clone(), resolved_imports_for(&graph, graph_path))) + .collect(); // process_with_dependencies processes in "leaves first" order, // meaning dependencies are analyzed before dependents graph.process_with_dependencies(path, |dep_path| { if let Some(doc) = doc_source.get_document(dep_path) { - analyze_and_cache(dep_path, &doc, &self.type_cache); + let resolved_imports = resolved_imports_by_file + .get(dep_path) + .cloned() + .unwrap_or_default(); + analyze_and_cache_with_resolved_imports( + dep_path, + &doc, + &self.type_cache, + resolved_imports, + ); } }); } @@ -117,6 +136,22 @@ impl TypeProvider { } } +fn resolved_imports_for( + graph: &ImportGraph, + path: &CanonicalPath, +) -> FxHashMap { + graph + .imports(path) + .iter() + .filter_map(|entry| { + entry + .resolved_path + .clone() + .map(|resolved_path| (entry.import_path.clone(), resolved_path)) + }) + .collect() +} + #[cfg(test)] mod tests { use dashmap::DashMap; @@ -220,4 +255,41 @@ mod tests { // (because ensure_dependencies_analyzed processes it first) assert!(type_cache.read().get(&dep_path).is_some()); } + + #[test] + fn test_provider_uses_graph_resolved_import_paths() { + let global_types = Arc::new(GlobalTyStore::new()); + let type_cache = new_shared_cache(Arc::clone(&global_types)); + let import_graph = Arc::new(RwLock::new(ImportGraph::new())); + let doc_source = TestDocSource::new(); + + let dep_path = test_path("deps/dep.jsonnet"); + let dep_doc = Document::new("42".to_string(), DocVersion(1)); + doc_source.insert(dep_path.clone(), dep_doc); + + let main_path = test_path("main.jsonnet"); + let main_doc = Document::new(r#"import "vendor/dep.jsonnet""#.to_string(), DocVersion(1)); + doc_source.insert(main_path.clone(), main_doc.clone()); + + { + let mut graph = import_graph.write(); + graph.update_file_with_entries( + &main_path, + vec![jrsonnet_lsp_import::ImportEntry { + import_path: "vendor/dep.jsonnet".to_string(), + resolved_path: Some(dep_path), + binding_name: None, + }], + ); + } + + let provider = TypeProvider::new( + Arc::clone(&type_cache), + Arc::clone(&import_graph), + Arc::clone(&global_types), + ); + + let analysis = provider.analyze(&main_path, &main_doc, &doc_source); + assert_eq!(analysis.document_type(), Ty::NUMBER); + } } diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs index 7d3e783e..6f84084d 100644 --- a/crates/jrsonnet-lsp-inference/src/type_cache.rs +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -14,6 +14,7 @@ use jrsonnet_lsp_document::{CanonicalPath, Document, DEFAULT_TYPE_CACHE_CAPACITY use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; use lru::LruCache; use parking_lot::RwLock; +use rustc_hash::FxHashMap; use crate::analysis::TypeAnalysis; @@ -136,6 +137,18 @@ pub fn analyze_and_cache( doc: &Document, cache: &SharedTypeCache, ) -> GlobalTy { + analyze_and_cache_with_resolved_imports(path, doc, cache, std::iter::empty()) +} + +pub(crate) fn analyze_and_cache_with_resolved_imports( + path: &CanonicalPath, + doc: &Document, + cache: &SharedTypeCache, + resolved_imports: I, +) -> GlobalTy +where + I: IntoIterator, +{ let version = doc.version().0; // Check if we already have a cached type for this version @@ -156,7 +169,7 @@ pub fn analyze_and_cache( // Create an import resolver for cross-file type resolution let import_resolver = Arc::new(CachingImportResolver::new( - path.as_path(), + resolved_imports, Arc::clone(cache), )); @@ -175,11 +188,11 @@ pub fn analyze_and_cache( /// Import resolver that looks up types from the type cache. /// -/// Resolves relative imports based on the base document's directory. +/// Uses import paths that were already resolved by the import graph. #[derive(Debug)] pub struct CachingImportResolver { - /// Base directory for resolving relative imports. - base_dir: std::path::PathBuf, + /// Import path -> resolved canonical path, built from import graph entries. + resolved_imports: FxHashMap, /// Type cache for looking up cached file types. cache: SharedTypeCache, } @@ -188,34 +201,28 @@ impl CachingImportResolver { /// Create a new import resolver. /// /// # Arguments - /// * `base_path` - Path to the document being analyzed (used to resolve relative imports) + /// * `resolved_imports` - Pre-resolved imports for the analyzed file /// * `cache` - Shared type cache for looking up cached types - pub fn new(base_path: &std::path::Path, cache: SharedTypeCache) -> Self { - let base_dir = base_path.parent().map_or_else( - || std::path::PathBuf::from("."), - std::path::Path::to_path_buf, - ); - Self { base_dir, cache } + pub fn new(resolved_imports: I, cache: SharedTypeCache) -> Self + where + I: IntoIterator, + { + Self { + resolved_imports: resolved_imports.into_iter().collect(), + cache, + } } - /// Resolve an import path to a canonical file path. - fn resolve_path(&self, import_path: &str) -> Option { - let resolved = if std::path::Path::new(import_path).is_absolute() { - std::path::PathBuf::from(import_path) - } else { - self.base_dir.join(import_path) - }; - - // Canonicalize the path (resolves .., symlinks, etc.) - resolved.canonicalize().ok().map(CanonicalPath::new) + fn resolved_path(&self, import_path: &str) -> Option<&CanonicalPath> { + self.resolved_imports.get(import_path) } } impl crate::env::ImportResolver for CachingImportResolver { fn resolve_import(&self, import_path: &str) -> Option { - let canonical_path = self.resolve_path(import_path)?; + let canonical_path = self.resolved_path(import_path)?; let cache = self.cache.read(); - cache.get(&canonical_path) + cache.get(canonical_path) } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 39833a7c..ea033ee2 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -219,9 +219,7 @@ impl AsyncRequestContext { let include_declaration = params.context.include_declaration; let mut refs = handlers::find_references(&doc, lsp_pos, uri, include_declaration); - let import_graph = self.import_graph.read(); - let importers = import_graph.transitive_importers(&path); - drop(import_graph); + let importers = self.import_graph.read().transitive_importers(&path); let importer_docs: Vec<_> = importers .into_iter() @@ -229,7 +227,16 @@ impl AsyncRequestContext { .collect(); let importer_refs: Vec<_> = importer_docs.iter().map(|(k, v)| (k, v)).collect(); - let cross_refs = handlers::find_cross_file_references(&doc, &path, lsp_pos, &importer_refs); + let cross_refs = { + let import_graph = self.import_graph.read(); + handlers::find_cross_file_references( + &doc, + &path, + lsp_pos, + &importer_refs, + &import_graph, + ) + }; refs.extend(cross_refs); if refs.is_empty() { diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index f5dfd056..5abd52f4 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -2767,3 +2767,218 @@ fn test_cross_file_rename_updates_definition_and_importers() { .join() .expect("Server thread should exit cleanly"); } + +#[test] +fn test_cross_file_references_resolve_jpath_importers() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_dir = tmp.path().join("jpath"); + let workspace_dir = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); + + let lib_path = jpath_dir.join("lib.libsonnet"); + let main_path = workspace_dir.join("main.jsonnet"); + let lib_text = "local target = 1; target"; + let main_text = r#"local lib = import "lib.libsonnet"; lib.target"#; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + json!({ + "jpath": [jpath_dir.to_string_lossy().to_string()], + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(references_request( + 2, &lib_uri, 0, 6, false, + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "References should succeed"); + let references: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!( + references.unwrap_or_default(), + vec![location(&lib_uri, 18, 24), location(&main_uri, 40, 46)] + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_cross_file_rename_updates_jpath_importers() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_dir = tmp.path().join("jpath"); + let workspace_dir = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); + + let lib_path = jpath_dir.join("lib.libsonnet"); + let main_path = workspace_dir.join("main.jsonnet"); + let lib_text = "{ helper: function(x) x * 2 }"; + let main_text = r#"local lib = import "lib.libsonnet"; lib.helper(1) + lib.helper(2)"#; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + json!({ + "jpath": [jpath_dir.to_string_lossy().to_string()], + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(rename_request(2, &lib_uri, 0, 2, "util"))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Rename should succeed"); + + let edit: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let edit = edit.expect("rename should produce workspace edit"); + + let mut expected_changes = std::collections::HashMap::new(); + expected_changes.insert( + lib_uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 8, + }, + }, + new_text: "util".to_string(), + }], + ); + expected_changes.insert( + main_uri.parse().unwrap(), + vec![ + lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 40, + }, + end: Position { + line: 0, + character: 46, + }, + }, + new_text: "util".to_string(), + }, + lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 56, + }, + end: Position { + line: 0, + character: 62, + }, + }, + new_text: "util".to_string(), + }, + ], + ); + assert_eq!( + edit, + lsp_types::WorkspaceEdit { + changes: Some(expected_changes), + document_changes: None, + change_annotations: None, + } + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} From 5d5acfb09e54e72f7ff08423712c2a4694738a30 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 19:06:59 +0000 Subject: [PATCH 030/210] refactor(lsp): resolve goto import targets from import graph --- .../jrsonnet-lsp/src/server/async_requests.rs | 26 +-- crates/jrsonnet-lsp/tests/integration_test.rs | 157 ++++++++++++++++++ docs/lsp/ARCHITECTURE.md | 9 +- docs/lsp/HANDLERS.md | 10 +- 4 files changed, 183 insertions(+), 19 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index ea033ee2..6c002b9c 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, DocVersion, Document, SymbolName}; use jrsonnet_lsp_handlers as handlers; -use jrsonnet_lsp_import::{resolve_import_path, ImportGraph}; +use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; use jrsonnet_lsp_types::GlobalTyStore; use jrsonnet_rowan_parser::AstNode; @@ -134,7 +134,7 @@ impl AsyncRequestContext { })) } handlers::DefinitionResult::Import(import_path) => { - let resolved = self.resolve_import_path(&path, &import_path)?; + let resolved = self.resolve_import_from_graph(&path, &import_path)?; let resolved_uri = resolved.to_uri().ok()?; let range = if target == GotoTarget::Implementation { self.document_root_expr_range(&resolved).unwrap_or_default() @@ -150,7 +150,7 @@ impl AsyncRequestContext { path: import_path, fields, } => { - let resolved = self.resolve_import_path(&path, &import_path)?; + let resolved = self.resolve_import_from_graph(&path, &import_path)?; let resolved_uri = resolved.to_uri().ok()?; let locations = self.find_field_in_file(&resolved, &fields); let range = if target == GotoTarget::Implementation { @@ -479,15 +479,17 @@ impl AsyncRequestContext { serde_json::to_value(refs).ok() } - fn resolve_import_path(&self, from: &CanonicalPath, import: &str) -> Option { - let config = self.config.read(); - let import_roots = effective_import_roots( - from.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ); - drop(config); - resolve_import_path(from, import, &import_roots) + fn resolve_import_from_graph( + &self, + from: &CanonicalPath, + import: &str, + ) -> Option { + let import_graph = self.import_graph.read(); + import_graph + .imports(from) + .iter() + .find(|entry| entry.import_path == import) + .and_then(|entry| entry.resolved_path.clone()) } fn load_document_for_path(&self, path: &CanonicalPath) -> Option { diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 5abd52f4..ce5bf7c8 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -2982,3 +2982,160 @@ fn test_cross_file_rename_updates_jpath_importers() { .join() .expect("Server thread should exit cleanly"); } + +#[test] +fn test_navigation_resolves_jpath_imports_from_graph() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_dir = tmp.path().join("jpath"); + let workspace_dir = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); + + let lib_path = jpath_dir.join("lib.libsonnet"); + let main_path = workspace_dir.join("main.jsonnet"); + let lib_text = "{ helper: 42 }"; + let main_text = r#"local lib = import "lib.libsonnet"; lib.helper"#; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + json!({ + "jpath": [jpath_dir.to_string_lossy().to_string()], + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_definition_request( + 2, &main_uri, 0, 22, + ))) + .unwrap(); + let import_definition_response = recv_response(&client_conn, 2); + assert!( + import_definition_response.error.is_none(), + "goto definition on import path should succeed" + ); + let import_definition: Option = serde_json::from_value( + import_definition_response + .result + .expect("should have goto definition result"), + ) + .unwrap(); + assert_eq!( + import_definition, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.parse().expect("lib URI should parse"), + range: lsp_types::Range::default(), + })) + ); + + client_conn + .sender + .send(Message::Request(goto_definition_request( + 3, &main_uri, 0, 40, + ))) + .unwrap(); + let definition_response = recv_response(&client_conn, 3); + assert!( + definition_response.error.is_none(), + "goto definition should succeed" + ); + let definition: Option = serde_json::from_value( + definition_response + .result + .expect("should have definition result"), + ) + .unwrap(); + assert_eq!( + definition, + Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request( + 4, &main_uri, 0, 40, + ))) + .unwrap(); + let declaration_response = recv_response(&client_conn, 4); + assert!( + declaration_response.error.is_none(), + "goto declaration should succeed" + ); + let declaration: Option = serde_json::from_value( + declaration_response + .result + .expect("should have declaration result"), + ) + .unwrap(); + assert_eq!( + declaration, + Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) + ); + + client_conn + .sender + .send(Message::Request(goto_type_definition_request( + 5, &main_uri, 0, 40, + ))) + .unwrap(); + let type_definition_response = recv_response(&client_conn, 5); + assert!( + type_definition_response.error.is_none(), + "goto type definition should succeed" + ); + let type_definition: Option = serde_json::from_value( + type_definition_response + .result + .expect("should have type definition result"), + ) + .unwrap(); + assert_eq!( + type_definition, + Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .unwrap(); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index c3eef13f..1a966e9b 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -162,8 +162,9 @@ For close events, the server: 1. Moves the file from open docs to closed-cache storage. 2. Invalidates the file and dependent type cache entries. -3. Removes import graph entries for the closed file. -4. Publishes empty diagnostics for the URI. +3. Rebuilds import graph entries from on-disk content for the closed file. +4. Schedules diagnostics for currently open importers of that file. +5. Publishes empty diagnostics for the closed URI. ### Save path @@ -188,6 +189,10 @@ The common resolution order is: 1. relative to the importing file's directory 2. each configured `jpath` entry in order +That ordering is applied while building import graph entries. Cross-file +navigation and reference/rename paths then use the graph's resolved entries as +the source of truth instead of re-resolving import strings independently. + Navigation semantics: - `textDocument/declaration`: nearest lexical declaration ("where this name is diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index d2e17b0d..6d63ea54 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -37,10 +37,10 @@ Current request routing in `crates/jrsonnet-lsp/src/server.rs`: | LSP method | Server entry point | Implementation owner | Execution model | Uses `TypeAnalysis` | | ----------------------------------- | ---------------------------------- | ------------------------------------------------- | --------------- | ------------------- | | `textDocument/documentSymbol` | `on_document_symbol` | handlers crate (`document_symbols`) | sync | no | -| `textDocument/definition` | async context (`goto_definition`) | mixed: handlers + server import resolution | async | no | -| `textDocument/typeDefinition` | async (`goto_type_definition`) | mixed: handlers + server import resolution | async | no | -| `textDocument/declaration` | async context (`goto_declaration`) | mixed: handlers + server import resolution | async | no | -| `textDocument/implementation` | async (`goto_implementation`) | mixed: handlers + server import resolution | async | no | +| `textDocument/definition` | async context (`goto_definition`) | mixed: handlers + server import graph resolution | async | no | +| `textDocument/typeDefinition` | async (`goto_type_definition`) | mixed: handlers + server import graph resolution | async | no | +| `textDocument/declaration` | async context (`goto_declaration`) | mixed: handlers + server import graph resolution | async | no | +| `textDocument/implementation` | async (`goto_implementation`) | mixed: handlers + server import graph resolution | async | no | | `textDocument/hover` | async context (`hover`) | handlers crate (`hover`) | async | yes | | `textDocument/documentHighlight` | `on_document_highlight` | handlers crate (`document_highlights`) | sync | no | | `textDocument/inlayHint` | async context (`inlay_hints`) | handlers crate (`inlay_hints`) | async | yes | @@ -257,7 +257,7 @@ provides shared orchestration for async methods: - dependency-aware `TypeAnalysis` creation via `TypeProvider` - import graph lookups for cross-file features - config access (`jpath`, command behavior) -- import-path resolution for definition targets +- import-target resolution via graph-backed entries This keeps handler modules focused on feature logic while server code owns runtime and cross-cutting orchestration. From 135c016f82fe453dda063d0a70ed6dcee335ace5 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 9 Feb 2026 19:09:37 +0000 Subject: [PATCH 031/210] fix(lsp): reindex tracked import graph files on config changes --- crates/jrsonnet-lsp/src/server.rs | 11 +- crates/jrsonnet-lsp/tests/integration_test.rs | 133 ++++++++++++++++++ docs/lsp/ARCHITECTURE.md | 2 +- 3 files changed, 144 insertions(+), 2 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 5cabe2ed..0ec409ca 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -979,7 +979,16 @@ impl Server { if runtime_config_changed { // Import resolution and cached file types depend on jpath/tanka settings. self.type_cache.write().clear(); - for path in self.documents.open_paths() { + + let mut paths_to_reindex = { + let import_graph = self.import_graph.read(); + import_graph.all_files().cloned().collect::>() + }; + paths_to_reindex.extend(self.documents.open_paths()); + paths_to_reindex.sort(); + paths_to_reindex.dedup(); + + for path in paths_to_reindex { self.update_import_graph(&path); } } diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index ce5bf7c8..1cd0d14d 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -935,6 +935,139 @@ fn test_configuration_change_reconfigures_eval_diagnostics() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_configuration_change_reindexes_closed_import_graph_entries() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_a = tmp.path().join("jpath-a"); + let jpath_b = tmp.path().join("jpath-b"); + let workspace = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_a).expect("jpath-a directory should be created"); + fs::create_dir_all(&jpath_b).expect("jpath-b directory should be created"); + fs::create_dir_all(&workspace).expect("workspace directory should be created"); + + let lib_a_path = jpath_a.join("lib.libsonnet"); + let lib_b_path = jpath_b.join("lib.libsonnet"); + let main_path = workspace.join("main.jsonnet"); + fs::write(&lib_a_path, "{ from: 'a' }").expect("jpath-a lib should be written"); + fs::write(&lib_b_path, "{ from: 'b' }").expect("jpath-b lib should be written"); + fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib"#) + .expect("main should be written"); + + let lib_a_uri = file_uri( + &lib_a_path + .canonicalize() + .expect("lib_a should canonicalize"), + ); + let lib_b_uri = file_uri( + &lib_b_path + .canonicalize() + .expect("lib_b should canonicalize"), + ); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + let main_text = fs::read_to_string(&main_path).expect("main text should be readable"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "jpath": [jpath_a.to_string_lossy().to_string()], + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, &main_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_close_notification(&main_uri))) + .unwrap(); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "jpath": [jpath_b.to_string_lossy().to_string()] + } + })), + )) + .unwrap(); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib_a_uri.clone())], + ))) + .unwrap(); + let old_target_response = recv_response(&client_conn, 2); + assert!( + old_target_response.error.is_none(), + "findTransitiveImporters for old jpath target should succeed" + ); + assert_eq!( + old_target_response + .result + .expect("should have old target command result"), + serde_json::json!({ + "file": lib_a_uri, + "transitiveImporters": [], + }) + ); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 3, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib_b_uri.clone())], + ))) + .unwrap(); + let new_target_response = recv_response(&client_conn, 3); + assert!( + new_target_response.error.is_none(), + "findTransitiveImporters for new jpath target should succeed" + ); + assert_eq!( + new_target_response + .result + .expect("should have new target command result"), + serde_json::json!({ + "file": lib_b_uri, + "transitiveImporters": [main_uri], + }) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_valid_document_no_errors() { let (client_conn, server_conn) = Connection::memory(); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 1a966e9b..30534cf8 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -254,7 +254,7 @@ Important behavior in `on_did_change_configuration`: - Runtime-affecting changes (`jpath`, eval diagnostics mode, Tanka mode) trigger runtime component rebuild. - Runtime-affecting changes clear the shared type cache and refresh import graph - for open files. + for all tracked files (open and closed-cache graph entries). - Runtime changes and lint toggle changes both trigger diagnostic rescheduling for open files. From 55ea677abc9e0035cc68e84194a3b0a92dd8d6f2 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 09:25:48 +0000 Subject: [PATCH 032/210] refactor(lsp-handlers): remove root-scan fallback for visible bindings --- .../jrsonnet-lsp-handlers/src/definition.rs | 72 +++++++++++-------- 1 file changed, 43 insertions(+), 29 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/definition.rs b/crates/jrsonnet-lsp-handlers/src/definition.rs index 11157cb2..a1491a7c 100644 --- a/crates/jrsonnet-lsp-handlers/src/definition.rs +++ b/crates/jrsonnet-lsp-handlers/src/definition.rs @@ -7,7 +7,7 @@ //! 4. If it's a field access on an import, returning the import path and field chain use jrsonnet_lsp_document::{ - find_node_at_offset, to_lsp_range, token_at_offset, Document, LspPosition, + find_node_at_offset, to_lsp_range, token_at_offset, ByteOffset, Document, LspPosition, }; use jrsonnet_lsp_import::{check_import_path, extract_import_path}; use jrsonnet_lsp_inference::{trace_base, trace_expr, ConstEvalResult}; @@ -307,7 +307,7 @@ fn resolve_base_to_import(base: &ExprBase, document: &Document) -> Option<(Strin } /// A binding visible at a given position. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct VisibleBinding { /// The name of the binding. pub name: String, @@ -345,18 +345,8 @@ pub fn collect_visible_bindings(document: &Document, position: LspPosition) -> V let ast = document.ast(); let root = ast.syntax(); - // Find a starting node for the scope walk - // Try token at offset first, then covering element, then root - let start_node = if let Some(token) = token_at_offset(root, offset) { - token.parent() - } else { - // At whitespace or EOF - find the deepest node that contains this offset - find_node_at_offset(root, offset) - }; - - let Some(mut current) = start_node else { - // Fall back to root node - return collect_bindings_from_root(root, u32::from(offset)); + let Some(mut current) = start_node_for_offset(root, offset) else { + return Vec::new(); }; // Walk up the scope chain and collect all bindings @@ -387,6 +377,19 @@ pub fn collect_visible_bindings(document: &Document, position: LspPosition) -> V bindings } +fn start_node_for_offset(root: &SyntaxNode, offset: ByteOffset) -> Option { + token_at_offset(root, offset) + .and_then(|token| token.parent()) + .or_else(|| { + let offset_u32 = u32::from(offset); + if offset_u32 == 0 { + return None; + } + token_at_offset(root, ByteOffset::from(offset_u32 - 1)).and_then(|token| token.parent()) + }) + .or_else(|| find_node_at_offset(root, offset)) +} + /// Check if a node is a scope-introducing node. fn is_scope_node(node: &SyntaxNode) -> bool { matches!( @@ -461,21 +464,6 @@ fn collect_expr_bindings_at_offset( } } -/// Collect bindings starting from the root (fallback). -fn collect_bindings_from_root(root: &SyntaxNode, offset: u32) -> Vec { - let mut bindings = Vec::new(); - let mut seen = std::collections::HashSet::new(); - - // Walk the entire tree looking for visible bindings - for node in root.descendants() { - if is_scope_node(&node) { - collect_scope_bindings_at_offset(&node, offset, &mut bindings, &mut seen); - } - } - - bindings -} - /// Extract a binding from a Bind node. fn extract_binding(bind: &Bind) -> Option { match bind { @@ -911,4 +899,30 @@ alias"#; } ); } + + #[test] + fn test_collect_visible_bindings_at_eof() { + let code = "local x = 1;\n"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + assert_eq!( + collect_visible_bindings(&doc, (1, 0).into()), + vec![VisibleBinding { + name: "x".to_string(), + kind: BindingKind::LocalVariable, + range: TextRange::new(6.into(), 7.into()), + }] + ); + } + + #[test] + fn test_collect_visible_bindings_out_of_bounds_position() { + let code = "local x = 1;"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + assert_eq!( + collect_visible_bindings(&doc, (9, 0).into()), + Vec::::new() + ); + } } From f92d19eefb16c7cbc11a87db43c3d66ed67bd256 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 09:32:20 +0000 Subject: [PATCH 033/210] fix(lsp): return transitive importers in stable order --- .../jrsonnet-lsp/src/server/async_requests.rs | 3 +- crates/jrsonnet-lsp/tests/integration_test.rs | 73 +++++++++++++++++++ 2 files changed, 75 insertions(+), 1 deletion(-) diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 6c002b9c..a5293f7f 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -444,10 +444,11 @@ impl AsyncRequestContext { let importers = import_graph.transitive_importers(&path); drop(import_graph); - let importer_uris: Vec = importers + let mut importer_uris: Vec = importers .iter() .filter_map(|p| p.to_uri().ok().map(|uri| uri.to_string())) .collect(); + importer_uris.sort(); Some(serde_json::json!({ "file": uri, diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 1cd0d14d..26377ad6 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -2703,6 +2703,79 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_find_transitive_importers_returns_sorted_uris() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let a_path = tmp.path().join("a.jsonnet"); + let b_path = tmp.path().join("b.jsonnet"); + fs::write(&lib_path, "{ target: 1 }").expect("lib should be written"); + fs::write(&a_path, "local lib = import 'lib.jsonnet'; lib.target") + .expect("a should be written"); + fs::write(&b_path, "local lib = import 'lib.jsonnet'; lib.target") + .expect("b should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let a_uri = file_uri(&a_path.canonicalize().expect("a should canonicalize")); + let b_uri = file_uri(&b_path.canonicalize().expect("b should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + for (uri, text) in [ + (&lib_uri, "{ target: 1 }"), + (&a_uri, "local lib = import 'lib.jsonnet'; lib.target"), + (&b_uri, "local lib = import 'lib.jsonnet'; lib.target"), + ] { + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + } + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib_uri.clone())], + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Command should succeed"); + assert_eq!( + response.result.expect("command should return result"), + json!({ + "file": lib_uri, + "transitiveImporters": [a_uri, b_uri], + }) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_did_close_preserves_import_graph_for_references() { let tmp = TempDir::new().expect("tempdir should be created"); From 9609e9dc02360939b738a4ba586c0f63408a941c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 09:34:38 +0000 Subject: [PATCH 034/210] fix(lsp): align eval commands with shared import-root logic --- .../jrsonnet-lsp/src/server/async_requests.rs | 54 +++++++---- crates/jrsonnet-lsp/tests/integration_test.rs | 96 +++++++++++++++++++ 2 files changed, 130 insertions(+), 20 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index a5293f7f..9fb9d552 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -358,12 +358,7 @@ impl AsyncRequestContext { let path = CanonicalPath::from_uri(&uri_parsed)?; let text = self.documents.get_text(&path)?; - let config = self.config.read(); - let mut jpath = config.jpath.clone(); - drop(config); - if let Some(dir) = path.as_path().parent() { - jpath.push(dir.to_path_buf()); - } + let jpath = self.eval_command_jpath(Some(&path)); let state = create_state_with_jpath(&jpath); let source_path = SourcePath::new(SourceFile::new(path.as_path().to_path_buf())); @@ -398,23 +393,19 @@ impl AsyncRequestContext { fn execute_eval_expression(&self, expr: &str, base_uri: Option<&str>) -> serde_json::Value { use jrsonnet_evaluator::manifest::JsonFormat; + use jrsonnet_parser::{SourceFile, SourcePath}; - let config = self.config.read(); - let mut jpath = config.jpath.clone(); - drop(config); - - if let Some(uri) = base_uri { - if let Ok(uri_parsed) = uri.parse::() { - if let Some(path) = CanonicalPath::from_uri(&uri_parsed) { - if let Some(dir) = path.as_path().parent() { - jpath.push(dir.to_path_buf()); - } - } - } - } + let base_path = base_uri + .and_then(|uri| uri.parse::().ok()) + .and_then(|uri| CanonicalPath::from_uri(&uri)); + let jpath = self.eval_command_jpath(base_path.as_ref()); let state = create_state_with_jpath(&jpath); + let source_name = base_path.map_or_else( + || "".to_string(), + |path| SourcePath::new(SourceFile::new(path.as_path().to_path_buf())).to_string(), + ); - match state.evaluate_snippet("".to_string(), expr) { + match state.evaluate_snippet(source_name, expr) { Ok(val) => { let json_format = JsonFormat::default(); match val.manifest(json_format) { @@ -436,6 +427,29 @@ impl AsyncRequestContext { } } + fn eval_command_jpath(&self, base_path: Option<&CanonicalPath>) -> Vec { + let config = self.config.read(); + let jpath = if let Some(base_path) = base_path { + let mut roots = effective_import_roots( + base_path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + if !config.resolve_paths_with_tanka { + if let Some(dir) = base_path.as_path().parent() { + if !roots.iter().any(|entry| entry == dir) { + roots.push(dir.to_path_buf()); + } + } + } + roots + } else { + config.jpath.clone() + }; + drop(config); + jpath + } + fn execute_find_transitive_importers(&self, uri: &str) -> Option { let uri_parsed: lsp_types::Uri = uri.parse().ok()?; let path = CanonicalPath::from_uri(&uri_parsed)?; diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 26377ad6..00fc7cec 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -2410,6 +2410,102 @@ fn test_execute_command_find_references() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_eval_commands_use_tanka_import_roots() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let env_dir = root.join("environments").join("dev"); + let vendor_dir = root.join("vendor"); + fs::create_dir_all(&env_dir).expect("environment directory should be created"); + fs::create_dir_all(&vendor_dir).expect("vendor directory should be created"); + fs::write(root.join("jsonnetfile.json"), "{}").expect("jsonnetfile should be written"); + + let lib_path = vendor_dir.join("lib.libsonnet"); + let main_path = env_dir.join("main.jsonnet"); + fs::write(&lib_path, "{ answer: 42 }").expect("vendor lib should be written"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; lib.answer"#, + ) + .expect("main should be written"); + + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "resolvePathsWithTanka": true + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.evalFile", + vec![serde_json::Value::String(main_uri.clone())], + ))) + .unwrap(); + let eval_file_response = recv_response(&client_conn, 2); + assert!( + eval_file_response.error.is_none(), + "evalFile command should succeed" + ); + assert_eq!( + eval_file_response + .result + .expect("evalFile should return a result"), + serde_json::json!(42) + ); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 3, + "jrsonnet.evalExpression", + vec![ + serde_json::Value::String(r#"(import "lib.libsonnet").answer"#.to_string()), + serde_json::Value::String(main_uri), + ], + ))) + .unwrap(); + let eval_expression_response = recv_response(&client_conn, 3); + assert!( + eval_expression_response.error.is_none(), + "evalExpression command should succeed" + ); + assert_eq!( + eval_expression_response + .result + .expect("evalExpression should return a result"), + serde_json::json!(42) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_semantic_tokens_range_request() { let (client_conn, server_conn) = Connection::memory(); From b4d2c3703c0f8bf4aaaf13afc36c4bc07ac73ef6 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 09:38:36 +0000 Subject: [PATCH 035/210] fix(lsp): advertise and implement jrsonnet.showErrors --- crates/jrsonnet-lsp/src/server.rs | 1 + .../jrsonnet-lsp/src/server/async_requests.rs | 42 +++++++- crates/jrsonnet-lsp/tests/integration_test.rs | 95 +++++++++++++++++++ docs/lsp/ARCHITECTURE.md | 6 ++ docs/lsp/HANDLERS.md | 7 +- 5 files changed, 148 insertions(+), 3 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 0ec409ca..f478517c 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -340,6 +340,7 @@ impl Server { "jrsonnet.evalExpression".to_string(), "jrsonnet.findTransitiveImporters".to_string(), "jrsonnet.findReferences".to_string(), + "jrsonnet.showErrors".to_string(), ], work_done_progress_options: WorkDoneProgressOptions::default(), }), diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 9fb9d552..6ac6040c 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -18,7 +18,9 @@ use rayon::prelude::*; use tracing::{info, warn}; use super::SharedConfig; -use crate::analysis::{eval::create_state_with_jpath, tanka::effective_import_roots}; +use crate::analysis::{ + eval::create_state_with_jpath, tanka::effective_import_roots, EvalConfig, Evaluator, +}; #[derive(Clone)] pub(super) struct AsyncRequestContext { @@ -343,6 +345,10 @@ impl AsyncRequestContext { .unwrap_or(false); self.execute_find_references(uri, line, character, include_declaration) } + "jrsonnet.showErrors" => { + let uri = params.arguments.first()?.as_str()?; + self.execute_show_errors(uri) + } _ => { warn!("Unknown command: {}", params.command); None @@ -494,6 +500,40 @@ impl AsyncRequestContext { serde_json::to_value(refs).ok() } + fn execute_show_errors(&self, uri: &str) -> Option { + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed)?; + let doc = self.load_document_for_path(&path)?; + let analysis = self.analyze_document(&path, &doc); + let (enable_lint_diagnostics, evaluator) = { + let config = self.config.read(); + let evaluator = config.enable_eval_diagnostics.then(|| { + let eval_config = EvalConfig { + jpath: config.jpath.clone(), + resolve_paths_with_tanka: config.resolve_paths_with_tanka, + }; + Evaluator::new(&eval_config) + }); + (config.enable_lint_diagnostics, evaluator) + }; + + let diagnostics = crate::handlers::compute_diagnostics( + &doc, + &path, + enable_lint_diagnostics, + evaluator.as_ref(), + &uri_parsed, + &analysis, + ); + + let response = lsp_types::PublishDiagnosticsParams { + uri: uri_parsed, + diagnostics, + version: Some(doc.version().0), + }; + serde_json::to_value(response).ok() + } + fn resolve_import_from_graph( &self, from: &CanonicalPath, diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 00fc7cec..61a5466b 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -686,6 +686,17 @@ fn test_initialize_shutdown() { result["capabilities"]["codeActionProvider"]["codeActionKinds"][0], serde_json::Value::String("quickfix".to_string()), "quickfix code action capability should be advertised", + ); + assert_eq!( + result["capabilities"]["executeCommandProvider"]["commands"], + serde_json::json!([ + "jrsonnet.evalFile", + "jrsonnet.evalExpression", + "jrsonnet.findTransitiveImporters", + "jrsonnet.findReferences", + "jrsonnet.showErrors" + ]), + "execute command capability should advertise all command IDs", ); assert_eq!( result["capabilities"]["codeLensProvider"]["resolveProvider"], @@ -2410,6 +2421,90 @@ fn test_execute_command_find_references() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_execute_command_show_errors() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/show-errors-command.jsonnet"; + let text = "{ hello: }"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.showErrors", + vec![serde_json::Value::String(uri.to_string())], + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "showErrors command should succeed" + ); + let diagnostics: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(response.result.expect("showErrors should return a result")) + .expect("showErrors result should be publish diagnostics payload"); + + assert_eq!( + diagnostics, + lsp_types::PublishDiagnosticsParams { + uri: uri.parse().unwrap(), + diagnostics: vec![lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 0, + character: 9, + }, + end: Position { + line: 0, + character: 9, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::ERROR), + code: Some(lsp_types::NumberOrString::String( + "syntax-error".to_string() + )), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected expression".to_string(), + related_information: None, + tags: None, + data: None, + }], + version: Some(1), + } + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_eval_commands_use_tanka_import_roots() { let tmp = TempDir::new().expect("tempdir should be created"); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 30534cf8..c3bbacea 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -283,6 +283,7 @@ Advertised commands: - `jrsonnet.evalExpression` - `jrsonnet.findTransitiveImporters` - `jrsonnet.findReferences` +- `jrsonnet.showErrors` Current async command implementation handles: @@ -290,6 +291,11 @@ Current async command implementation handles: - `jrsonnet.evalExpression` - `jrsonnet.findTransitiveImporters` - `jrsonnet.findReferences` +- `jrsonnet.showErrors` + +The `jrsonnet.showErrors` command reuses the same diagnostics pipeline as +`textDocument/publishDiagnostics` and returns a typed diagnostics payload for +the requested file. ## Concurrency Strategy diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 6d63ea54..3800623f 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -281,6 +281,7 @@ Advertised command IDs: - `jrsonnet.evalExpression` - `jrsonnet.findTransitiveImporters` - `jrsonnet.findReferences` +- `jrsonnet.showErrors` Current async command implementation handles: @@ -288,9 +289,11 @@ Current async command implementation handles: - `jrsonnet.evalExpression` - `jrsonnet.findTransitiveImporters` - `jrsonnet.findReferences` +- `jrsonnet.showErrors` -Code-lens output may also contain `jrsonnet.showErrors` for informational UI -actions. +`jrsonnet.showErrors` returns a `PublishDiagnosticsParams` payload for the +target URI so clients can render the same diagnostics data that the server +publishes asynchronously. ## Testing Strategy From 67b1739d3d6c8ee5d6235da6498f804fff6fcaa1 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 09:43:28 +0000 Subject: [PATCH 036/210] feat(lsp): bootstrap workspace import graph at startup --- crates/jrsonnet-lsp/src/server.rs | 140 +++++++++++++++++- crates/jrsonnet-lsp/tests/integration_test.rs | 72 +++++++++ docs/lsp/ARCHITECTURE.md | 7 +- docs/lsp/HANDLERS.md | 3 + 4 files changed, 217 insertions(+), 5 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index f478517c..8cee1315 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -7,6 +7,7 @@ mod async_requests; use std::{ panic::{catch_unwind, AssertUnwindSafe}, + path::{Path, PathBuf}, sync::Arc, }; @@ -84,6 +85,17 @@ pub struct Server { shutdown_requested: bool, } +#[derive(Debug, Clone, Default, serde::Deserialize)] +#[serde(default)] +struct InitializeRoots { + #[serde(rename = "workspaceFolders")] + workspace_folders: Option>, + #[serde(rename = "rootUri")] + root_uri: Option, + #[serde(rename = "rootPath")] + root_path: Option, +} + impl Server { /// Create a new server with the given connection. #[must_use] @@ -211,7 +223,7 @@ impl Server { info!("Starting jrsonnet language server"); // Handle initialize request - let (id, params) = self.initialize()?; + let (id, params, init_roots) = self.initialize()?; // Parse initialization options into configuration let init_config = ServerConfig::from_initialization_options(params.initialization_options); @@ -256,6 +268,8 @@ impl Server { } } + self.bootstrap_workspace_index(&init_roots); + // Main loop self.main_loop()?; @@ -263,8 +277,126 @@ impl Server { Ok(()) } + fn bootstrap_workspace_index(&self, init_roots: &InitializeRoots) { + let roots = Self::workspace_root_paths(init_roots); + if roots.is_empty() { + debug!("No workspace roots provided; skipping startup index bootstrap"); + return; + } + + let mut files = Vec::new(); + for root in &roots { + files.extend(Self::collect_workspace_files(root)); + } + files.sort(); + files.dedup(); + + let file_count = files.len(); + for path in &files { + self.update_import_graph(path); + } + + info!( + "Startup workspace index bootstrap complete: indexed {} files across {} roots", + file_count, + roots.len() + ); + } + + fn workspace_root_paths(init_roots: &InitializeRoots) -> Vec { + let mut roots = Vec::new(); + + if let Some(workspace_folders) = &init_roots.workspace_folders { + for folder in workspace_folders { + if let Some(path) = CanonicalPath::from_uri(&folder.uri) { + roots.push(path.as_path().to_path_buf()); + } + } + } + + if let Some(root_uri) = &init_roots.root_uri { + if let Some(path) = CanonicalPath::from_uri(root_uri) { + roots.push(path.as_path().to_path_buf()); + } + } + + if let Some(root_path) = &init_roots.root_path { + roots.push(PathBuf::from(root_path)); + } + + let mut normalized_roots = roots + .into_iter() + .filter_map(Self::normalize_workspace_root) + .collect::>(); + normalized_roots.sort(); + normalized_roots.dedup(); + normalized_roots + } + + fn normalize_workspace_root(path: PathBuf) -> Option { + let path = path.canonicalize().unwrap_or(path); + if path.is_dir() { + return Some(path); + } + if path.is_file() { + return path.parent().map(Path::to_path_buf); + } + None + } + + fn collect_workspace_files(root: &Path) -> Vec { + let mut files = Vec::new(); + let mut to_visit = vec![root.to_path_buf()]; + + while let Some(dir) = to_visit.pop() { + let Ok(entries) = std::fs::read_dir(&dir) else { + continue; + }; + + for entry in entries.flatten() { + let path = entry.path(); + let Ok(file_type) = entry.file_type() else { + continue; + }; + + if file_type.is_dir() { + if Self::should_skip_workspace_dir(&path) { + continue; + } + to_visit.push(path); + continue; + } + + if !file_type.is_file() || !Self::is_indexed_workspace_file(&path) { + continue; + } + + match CanonicalPath::try_from_path(&path) { + Ok(path) => files.push(path), + Err(err) => warn!("Skipping workspace file {}: {}", path.display(), err), + } + } + } + + files + } + + fn should_skip_workspace_dir(path: &Path) -> bool { + let Some(name) = path.file_name().and_then(|name| name.to_str()) else { + return false; + }; + matches!(name, ".git" | ".jj" | ".svn" | "node_modules" | "target") + } + + fn is_indexed_workspace_file(path: &Path) -> bool { + let Some(extension) = path.extension().and_then(|extension| extension.to_str()) else { + return false; + }; + matches!(extension, "jsonnet" | "libsonnet" | "json") + } + /// Handle the initialize request. - fn initialize(&self) -> Result<(RequestId, InitializeParams)> { + fn initialize(&self) -> Result<(RequestId, InitializeParams, InitializeRoots)> { let msg = self .connection .receiver @@ -273,9 +405,11 @@ impl Server { match msg { Message::Request(req) if req.method == "initialize" => { + let init_roots: InitializeRoots = + serde_json::from_value(req.params.clone()).unwrap_or_default(); let params: InitializeParams = serde_json::from_value(req.params)?; info!("Initialize request from: {:?}", params.client_info); - Ok((req.id, params)) + Ok((req.id, params, init_roots)) } _ => anyhow::bail!("Expected initialize request, got: {msg:?}"), } diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 61a5466b..527ddc6b 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -46,6 +46,15 @@ fn initialize_request_with_options(id: i32, initialization_options: serde_json:: ) } +/// Helper to create an initialize request with a workspace root URI. +fn initialize_request_with_root_uri(id: i32, root_uri: &str) -> Request { + let mut params = serde_json::to_value(InitializeParams::default()).unwrap(); + if let Some(object) = params.as_object_mut() { + object.insert("rootUri".to_string(), serde_json::json!(root_uri)); + } + Request::new(id.into(), Initialize::METHOD.to_string(), params) +} + /// Helper to create a shutdown request. fn shutdown_request(id: i32) -> Request { Request::new( @@ -2894,6 +2903,69 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_initialize_bootstraps_workspace_import_graph() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + fs::write(&lib_path, "{ value: 1 }").expect("lib should be written"); + fs::write(&main_path, "local lib = import 'lib.jsonnet'; lib.value") + .expect("main should be written"); + + let root_uri = file_uri(&tmp.path().canonicalize().expect("root should canonicalize")); + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_root_uri( + 1, &root_uri, + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib_uri.clone())], + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "findTransitiveImporters command should succeed" + ); + assert_eq!( + response.result.expect("command should return result"), + json!({ + "file": lib_uri, + "transitiveImporters": [main_uri], + }) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_find_transitive_importers_returns_sorted_uris() { let tmp = TempDir::new().expect("tempdir should be created"); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index c3bbacea..154b2f8a 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -65,7 +65,10 @@ Startup flow: 3. Configure runtime components (`Evaluator`, `AsyncDiagnostics`). 4. Send `InitializeResult` with server capabilities. 5. Wait for `initialized` notification. -6. Enter the main loop. +6. Bootstrap import-graph indexing by scanning initialize workspace roots + (`workspaceFolders`, `rootUri`, `rootPath`) for `*.jsonnet`, `*.libsonnet`, + and `*.json`. +7. Enter the main loop. Entry point: `run_stdio()` in `crates/jrsonnet-lsp/src/server.rs`. @@ -131,7 +134,7 @@ to documents, import graph, type cache, config, and dependency-aware analysis. - semantic tokens (full document and range) - code actions (quick-fix kind) - code lens (resolve enabled) -- execute command (four command IDs) +- execute command (five command IDs) For the canonical list, see `crates/jrsonnet-lsp/src/server.rs`. diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 3800623f..20d1674d 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -259,6 +259,9 @@ provides shared orchestration for async methods: - config access (`jpath`, command behavior) - import-target resolution via graph-backed entries +Import-graph state is populated from open/closed document updates and from +startup workspace-root bootstrap indexing during server initialization. + This keeps handler modules focused on feature logic while server code owns runtime and cross-cutting orchestration. From 7f81356103caf2e2a1f2fee1c2e877addea4e656 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 09:46:15 +0000 Subject: [PATCH 037/210] feat(lsp): search tracked files for workspace symbols --- .../jrsonnet-lsp/src/server/async_requests.rs | 59 +++++++++++-- crates/jrsonnet-lsp/tests/integration_test.rs | 87 ++++++++++++++++++- docs/lsp/ARCHITECTURE.md | 2 + docs/lsp/HANDLERS.md | 4 +- 4 files changed, 144 insertions(+), 8 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 6ac6040c..69633735 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -252,17 +252,64 @@ impl AsyncRequestContext { params: &WorkspaceSymbolParams, ) -> Option> { let query = ¶ms.query; - let all_symbols: Vec = self - .documents - .par_iter() - .flat_map(|entry| { - let Ok(uri) = entry.key().to_uri() else { + + let mut paths = { + let import_graph = self.import_graph.read(); + import_graph.all_files().cloned().collect::>() + }; + paths.extend(self.documents.open_paths()); + paths.sort(); + paths.dedup(); + + let mut all_symbols: Vec = paths + .into_par_iter() + .flat_map(|path| { + let Some(doc) = self.documents.get_document(&path) else { return Vec::new(); }; - handlers::workspace_symbols_for_document(entry.value(), &uri, query) + let Ok(uri) = path.to_uri() else { + return Vec::new(); + }; + handlers::workspace_symbols_for_document(&doc, &uri, query) }) .collect(); + all_symbols.sort_by(|left, right| { + left.location + .uri + .as_str() + .cmp(right.location.uri.as_str()) + .then( + left.location + .range + .start + .line + .cmp(&right.location.range.start.line), + ) + .then( + left.location + .range + .start + .character + .cmp(&right.location.range.start.character), + ) + .then( + left.location + .range + .end + .line + .cmp(&right.location.range.end.line), + ) + .then( + left.location + .range + .end + .character + .cmp(&right.location.range.end.character), + ) + .then(left.name.cmp(&right.name)) + }); + if all_symbols.is_empty() { return None; } diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 527ddc6b..9f13a1ea 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -16,7 +16,7 @@ use lsp_types::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, ExecuteCommand, GotoDeclaration, GotoDefinition, GotoImplementation, GotoTypeDefinition, Initialize, InlayHintRequest, References, Rename, Request as _, SemanticTokensRangeRequest, - Shutdown, + Shutdown, WorkspaceSymbolRequest, }, DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, @@ -455,6 +455,19 @@ fn execute_command_request(id: i32, command: &str, arguments: Vec Request { + let params = lsp_types::WorkspaceSymbolParams { + query: query.to_string(), + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + WorkspaceSymbolRequest::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + fn code_action_test_range() -> lsp_types::Range { lsp_types::Range { start: Position { @@ -2966,6 +2979,78 @@ fn test_initialize_bootstraps_workspace_import_graph() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_workspace_symbol_includes_unopened_workspace_files() { + let tmp = TempDir::new().expect("tempdir should be created"); + let closed_path = tmp.path().join("closed.jsonnet"); + let closed_text = "local workspaceOnly=1;workspaceOnly"; + fs::write(&closed_path, closed_text).expect("closed file should be written"); + + let root_uri = file_uri(&tmp.path().canonicalize().expect("root should canonicalize")); + let closed_uri = file_uri( + &closed_path + .canonicalize() + .expect("closed should canonicalize"), + ); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_root_uri( + 1, &root_uri, + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Request(workspace_symbol_request( + 2, + "workspaceOnly", + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "workspace/symbol should succeed"); + let symbols: Option> = serde_json::from_value( + response + .result + .expect("workspace/symbol should return result"), + ) + .unwrap(); + + let expected_doc = jrsonnet_lsp_document::Document::new( + closed_text.to_string(), + jrsonnet_lsp_document::DocVersion::new(0), + ); + let expected_uri: lsp_types::Uri = closed_uri.parse().unwrap(); + let expected_symbols = Some(jrsonnet_lsp_handlers::workspace_symbols_for_document( + &expected_doc, + &expected_uri, + "workspaceOnly", + )); + + assert_eq!(symbols, expected_symbols); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_find_transitive_importers_returns_sorted_uris() { let tmp = TempDir::new().expect("tempdir should be created"); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 154b2f8a..4e1ef3f0 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -117,6 +117,8 @@ Dispatched via `spawn_async_response` (Rayon): Async handlers run through `AsyncRequestContext` (`crates/jrsonnet-lsp/src/server/async_requests.rs`), which centralizes access to documents, import graph, type cache, config, and dependency-aware analysis. +`workspace/symbol` uses this context to search tracked workspace files from the +import graph, not just currently open buffers. ## Advertised LSP Capabilities diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 20d1674d..17142781 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -246,7 +246,9 @@ File: `crates/jrsonnet-lsp-handlers/src/symbols.rs` - `document_symbols` builds hierarchical document outline. - `workspace_symbols_for_document` filters symbols by query for one document. -- Async server path parallelizes across currently open documents. +- Async server path parallelizes across tracked workspace files (import-graph + entries plus currently open documents), loading unopened files from disk via + the document manager. ## Async Request Context From 3099eab2d3f75628fc905bdbf7f6316366db03a1 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 09:56:59 +0000 Subject: [PATCH 038/210] refactor(lsp-handlers): remove deprecated allow from symbols --- crates/jrsonnet-lsp-handlers/src/symbols.rs | 120 ++++++++++++-------- 1 file changed, 73 insertions(+), 47 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/symbols.rs b/crates/jrsonnet-lsp-handlers/src/symbols.rs index c561aef0..fac8da3a 100644 --- a/crates/jrsonnet-lsp-handlers/src/symbols.rs +++ b/crates/jrsonnet-lsp-handlers/src/symbols.rs @@ -5,10 +5,6 @@ //! - Object fields //! - Function definitions -// The `deprecated` field on DocumentSymbol is deprecated in lsp-types 0.96 -// in favor of `tags`, but it's still a required field. Suppress the warning. -#![allow(deprecated)] - use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; use jrsonnet_rowan_parser::{ nodes::{ @@ -81,7 +77,7 @@ fn process_local_stmt( return None; } - Some(create_symbol( + create_symbol( "local".to_string(), SymbolKind::NAMESPACE, range, @@ -89,7 +85,7 @@ fn process_local_stmt( line_index, text, Some(children), - )) + ) } else { None } @@ -140,9 +136,7 @@ fn process_bind_destruct( (SymbolKind::VARIABLE, None) }; - Some(create_symbol( - name, kind, range, range, line_index, text, children, - )) + create_symbol(name, kind, range, range, line_index, text, children) } /// Process a function binding. @@ -165,16 +159,14 @@ fn process_bind_function( None }; - Some(DocumentSymbol { + build_document_symbol( name, detail, - kind: SymbolKind::FUNCTION, - tags: None, - deprecated: None, - range: to_lsp_range(range, line_index, text), - selection_range: to_lsp_range(range, line_index, text), - children: None, - }) + SymbolKind::FUNCTION, + to_lsp_range(range, line_index, text), + to_lsp_range(range, line_index, text), + None, + ) } /// Process an expression base and extract symbols. @@ -285,9 +277,7 @@ fn process_field_normal( SymbolKind::FIELD }; - Some(create_symbol( - name, kind, range, range, line_index, text, children, - )) + create_symbol(name, kind, range, range, line_index, text, children) } /// Process a method field. @@ -310,16 +300,14 @@ fn process_field_method( None }; - Some(DocumentSymbol { + build_document_symbol( name, detail, - kind: SymbolKind::METHOD, - tags: None, - deprecated: None, - range: to_lsp_range(range, line_index, text), - selection_range: to_lsp_range(range, line_index, text), - children: None, - }) + SymbolKind::METHOD, + to_lsp_range(range, line_index, text), + to_lsp_range(range, line_index, text), + None, + ) } /// Get the name from a field name node. @@ -361,17 +349,52 @@ fn create_symbol( line_index: &LineIndex, text: &str, children: Option>, -) -> DocumentSymbol { - DocumentSymbol { +) -> Option { + build_document_symbol( name, - detail: None, + None, kind, - tags: None, - deprecated: None, - range: to_lsp_range(range, line_index, text), - selection_range: to_lsp_range(selection_range, line_index, text), + to_lsp_range(range, line_index, text), + to_lsp_range(selection_range, line_index, text), children, - } + ) +} + +fn build_document_symbol( + name: String, + detail: Option, + kind: SymbolKind, + range: lsp_types::Range, + selection_range: lsp_types::Range, + children: Option>, +) -> Option { + serde_json::from_value(serde_json::json!({ + "name": name, + "detail": detail, + "kind": kind, + "tags": Option::>::None, + "range": range, + "selectionRange": selection_range, + "children": children, + })) + .ok() +} + +fn build_symbol_information( + name: String, + kind: SymbolKind, + tags: Option>, + location: Location, + container_name: Option, +) -> Option { + serde_json::from_value(serde_json::json!({ + "name": name, + "kind": kind, + "tags": tags, + "location": location, + "containerName": container_name, + })) + .ok() } /// Search for symbols matching a query across a document. @@ -405,17 +428,20 @@ fn flatten_symbols( let matches = query.is_empty() || symbol.name.to_lowercase().contains(&query_lower); if matches { - results.push(SymbolInformation { - name: symbol.name.clone(), - kind: symbol.kind, - tags: symbol.tags.clone(), - deprecated: symbol.deprecated, - location: Location { - uri: uri.clone(), - range: symbol.range, - }, - container_name: container_name.map(String::from), - }); + let location = Location { + uri: uri.clone(), + range: symbol.range, + }; + let symbol_info = build_symbol_information( + symbol.name.clone(), + symbol.kind, + symbol.tags.clone(), + location, + container_name.map(String::from), + ); + if let Some(symbol_info) = symbol_info { + results.push(symbol_info); + } } // Recursively process children From 116001248337a22d6b9d7196e5abbde227986270 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 10:00:24 +0000 Subject: [PATCH 039/210] perf(lsp): run startup workspace indexing asynchronously --- crates/jrsonnet-lsp/src/server.rs | 60 ++++++++------ crates/jrsonnet-lsp/tests/integration_test.rs | 78 +++++++++++++------ docs/lsp/ARCHITECTURE.md | 8 +- 3 files changed, 96 insertions(+), 50 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 8cee1315..6cf223ba 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -268,7 +268,7 @@ impl Server { } } - self.bootstrap_workspace_index(&init_roots); + self.schedule_workspace_index_bootstrap(init_roots); // Main loop self.main_loop()?; @@ -277,30 +277,35 @@ impl Server { Ok(()) } - fn bootstrap_workspace_index(&self, init_roots: &InitializeRoots) { - let roots = Self::workspace_root_paths(init_roots); + fn schedule_workspace_index_bootstrap(&self, init_roots: InitializeRoots) { + let roots = Self::workspace_root_paths(&init_roots); if roots.is_empty() { debug!("No workspace roots provided; skipping startup index bootstrap"); return; } - let mut files = Vec::new(); - for root in &roots { - files.extend(Self::collect_workspace_files(root)); - } - files.sort(); - files.dedup(); + let documents = Arc::clone(&self.documents); + let import_graph = Arc::clone(&self.import_graph); + let config = Arc::clone(&self.config); + rayon::spawn(move || { + let mut files = Vec::new(); + for root in &roots { + files.extend(Self::collect_workspace_files(root)); + } + files.sort(); + files.dedup(); - let file_count = files.len(); - for path in &files { - self.update_import_graph(path); - } + let file_count = files.len(); + for path in &files { + Self::update_import_graph_for_path(&documents, &import_graph, &config, path); + } - info!( - "Startup workspace index bootstrap complete: indexed {} files across {} roots", - file_count, - roots.len() - ); + info!( + "Startup workspace index bootstrap complete: indexed {} files across {} roots", + file_count, + roots.len() + ); + }); } fn workspace_root_paths(init_roots: &InitializeRoots) -> Vec { @@ -1177,13 +1182,22 @@ impl Server { /// Parses the document's import statements and updates the graph /// so that cross-file references can be found efficiently. fn update_import_graph(&self, path: &CanonicalPath) { - let Some(doc) = self.documents.get_document(path) else { + Self::update_import_graph_for_path(&self.documents, &self.import_graph, &self.config, path); + } + + fn update_import_graph_for_path( + documents: &SharedDocumentManager, + import_graph: &Arc>, + config: &SharedConfig, + path: &CanonicalPath, + ) { + let Some(doc) = documents.get_document(path) else { // File no longer exists or cannot be read. - self.import_graph.write().remove_file(path); + import_graph.write().remove_file(path); return; }; - let config = self.config.read(); + let config = config.read(); let import_roots = effective_import_roots( path.as_path(), &config.jpath, @@ -1201,9 +1215,7 @@ impl Server { let entries = parse_document_imports(&doc, &resolve_import); // Now acquire the write lock and do the quick data structure update - self.import_graph - .write() - .update_file_with_entries(path, entries); + import_graph.write().update_file_with_entries(path, entries); } /// Schedule diagnostics for currently-open files that import `path`. diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 9f13a1ea..490b7244 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -2944,26 +2944,37 @@ fn test_initialize_bootstraps_workspace_import_graph() { .send(Message::Notification(initialized_notification())) .unwrap(); - client_conn - .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib_uri.clone())], - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!( - response.error.is_none(), - "findTransitiveImporters command should succeed" - ); - assert_eq!( - response.result.expect("command should return result"), - json!({ - "file": lib_uri, - "transitiveImporters": [main_uri], - }) - ); + let expected_result = json!({ + "file": lib_uri, + "transitiveImporters": [main_uri], + }); + let mut actual_result = serde_json::Value::Null; + for request_id in 2..=42 { + client_conn + .sender + .send(Message::Request(execute_command_request( + request_id, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String( + expected_result["file"] + .as_str() + .expect("expected file URI should be a string") + .to_string(), + )], + ))) + .unwrap(); + let response = recv_response(&client_conn, request_id); + assert!( + response.error.is_none(), + "findTransitiveImporters command should succeed" + ); + actual_result = response.result.expect("command should return result"); + if actual_result == expected_result { + break; + } + thread::sleep(Duration::from_millis(25)); + } + assert_eq!(actual_result, expected_result); client_conn .sender @@ -3034,8 +3045,31 @@ fn test_workspace_symbol_includes_unopened_workspace_files() { &expected_uri, "workspaceOnly", )); - - assert_eq!(symbols, expected_symbols); + let mut actual_symbols = symbols; + if actual_symbols != expected_symbols { + for request_id in 3..=43 { + client_conn + .sender + .send(Message::Request(workspace_symbol_request( + request_id, + "workspaceOnly", + ))) + .unwrap(); + let response = recv_response(&client_conn, request_id); + assert!(response.error.is_none(), "workspace/symbol should succeed"); + actual_symbols = serde_json::from_value( + response + .result + .expect("workspace/symbol should return result"), + ) + .unwrap(); + if actual_symbols == expected_symbols { + break; + } + thread::sleep(Duration::from_millis(25)); + } + } + assert_eq!(actual_symbols, expected_symbols); client_conn .sender diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 4e1ef3f0..d387b55a 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -65,10 +65,10 @@ Startup flow: 3. Configure runtime components (`Evaluator`, `AsyncDiagnostics`). 4. Send `InitializeResult` with server capabilities. 5. Wait for `initialized` notification. -6. Bootstrap import-graph indexing by scanning initialize workspace roots - (`workspaceFolders`, `rootUri`, `rootPath`) for `*.jsonnet`, `*.libsonnet`, - and `*.json`. -7. Enter the main loop. +6. Schedule background bootstrap indexing for initialize workspace roots + (`workspaceFolders`, `rootUri`, `rootPath`) scanning `*.jsonnet`, + `*.libsonnet`, and `*.json`. +7. Enter the main loop immediately while bootstrap continues asynchronously. Entry point: `run_stdio()` in `crates/jrsonnet-lsp/src/server.rs`. From ab0edd6631aed82fd884d49d39a9433de3ca8bac Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 10:05:32 +0000 Subject: [PATCH 040/210] feat(lsp): rank and cap workspace symbols --- .../jrsonnet-lsp/src/server/async_requests.rs | 65 +++--- crates/jrsonnet-lsp/tests/integration_test.rs | 188 +++++++++++++++--- docs/lsp/ARCHITECTURE.md | 7 +- docs/lsp/HANDLERS.md | 4 + 4 files changed, 197 insertions(+), 67 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 69633735..da9906c2 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -22,6 +22,8 @@ use crate::analysis::{ eval::create_state_with_jpath, tanka::effective_import_roots, EvalConfig, Evaluator, }; +const MAX_WORKSPACE_SYMBOL_RESULTS: usize = 128; + #[derive(Clone)] pub(super) struct AsyncRequestContext { documents: SharedDocumentManager, @@ -274,41 +276,23 @@ impl AsyncRequestContext { }) .collect(); - all_symbols.sort_by(|left, right| { - left.location - .uri - .as_str() - .cmp(right.location.uri.as_str()) - .then( - left.location - .range - .start - .line - .cmp(&right.location.range.start.line), - ) - .then( - left.location - .range - .start - .character - .cmp(&right.location.range.start.character), - ) - .then( - left.location - .range - .end - .line - .cmp(&right.location.range.end.line), - ) - .then( - left.location - .range - .end - .character - .cmp(&right.location.range.end.character), - ) - .then(left.name.cmp(&right.name)) + let query_lower = query.to_lowercase(); + all_symbols.sort_by_cached_key(|symbol| { + let name_lower = symbol.name.to_lowercase(); + ( + workspace_symbol_match_rank(&name_lower, &query_lower), + name_lower.len(), + name_lower, + symbol.location.uri.as_str().to_string(), + symbol.location.range.start.line, + symbol.location.range.start.character, + symbol.location.range.end.line, + symbol.location.range.end.character, + ) }); + if all_symbols.len() > MAX_WORKSPACE_SYMBOL_RESULTS { + all_symbols.truncate(MAX_WORKSPACE_SYMBOL_RESULTS); + } if all_symbols.is_empty() { return None; @@ -744,6 +728,19 @@ impl AsyncRequestContext { } } +fn workspace_symbol_match_rank(name_lower: &str, query_lower: &str) -> u8 { + if query_lower.is_empty() { + return 0; + } + if name_lower == query_lower { + return 0; + } + if name_lower.starts_with(query_lower) { + return 1; + } + 2 +} + #[derive(Debug, Clone, Copy)] struct ImportedFieldLocations { declaration: lsp_types::Range, diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 490b7244..d40fdfee 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -468,6 +468,24 @@ fn workspace_symbol_request(id: i32, query: &str) -> Request { ) } +fn request_workspace_symbols( + conn: &Connection, + id: i32, + query: &str, +) -> Option> { + conn.sender + .send(Message::Request(workspace_symbol_request(id, query))) + .unwrap(); + let response = recv_response(conn, id); + assert!(response.error.is_none(), "workspace/symbol should succeed"); + serde_json::from_value( + response + .result + .expect("workspace/symbol should return result"), + ) + .unwrap() +} + fn code_action_test_range() -> lsp_types::Range { lsp_types::Range { start: Position { @@ -3019,21 +3037,7 @@ fn test_workspace_symbol_includes_unopened_workspace_files() { .send(Message::Notification(initialized_notification())) .unwrap(); - client_conn - .sender - .send(Message::Request(workspace_symbol_request( - 2, - "workspaceOnly", - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "workspace/symbol should succeed"); - let symbols: Option> = serde_json::from_value( - response - .result - .expect("workspace/symbol should return result"), - ) - .unwrap(); + let symbols = request_workspace_symbols(&client_conn, 2, "workspaceOnly"); let expected_doc = jrsonnet_lsp_document::Document::new( closed_text.to_string(), @@ -3048,21 +3052,7 @@ fn test_workspace_symbol_includes_unopened_workspace_files() { let mut actual_symbols = symbols; if actual_symbols != expected_symbols { for request_id in 3..=43 { - client_conn - .sender - .send(Message::Request(workspace_symbol_request( - request_id, - "workspaceOnly", - ))) - .unwrap(); - let response = recv_response(&client_conn, request_id); - assert!(response.error.is_none(), "workspace/symbol should succeed"); - actual_symbols = serde_json::from_value( - response - .result - .expect("workspace/symbol should return result"), - ) - .unwrap(); + actual_symbols = request_workspace_symbols(&client_conn, request_id, "workspaceOnly"); if actual_symbols == expected_symbols { break; } @@ -3085,6 +3075,144 @@ fn test_workspace_symbol_includes_unopened_workspace_files() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_workspace_symbol_ranks_exact_prefix_then_substring() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/workspace-symbol-ranking.jsonnet"; + let text = + "local needle = 1; local has_needle_inside = 2; local needlePrefix = 3; local zneedle = 4; needle"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + let symbols = request_workspace_symbols(&client_conn, 2, "needle"); + let expected_doc = jrsonnet_lsp_document::Document::new( + text.to_string(), + jrsonnet_lsp_document::DocVersion::new(1), + ); + let expected_uri: lsp_types::Uri = uri.parse().unwrap(); + let expected_all = jrsonnet_lsp_handlers::workspace_symbols_for_document( + &expected_doc, + &expected_uri, + "needle", + ); + let expected_symbols = vec![ + expected_all + .iter() + .find(|symbol| symbol.name == "needle") + .expect("expected exact match symbol") + .clone(), + expected_all + .iter() + .find(|symbol| symbol.name == "needlePrefix") + .expect("expected prefix match symbol") + .clone(), + expected_all + .iter() + .find(|symbol| symbol.name == "zneedle") + .expect("expected shorter substring symbol") + .clone(), + expected_all + .iter() + .find(|symbol| symbol.name == "has_needle_inside") + .expect("expected longer substring symbol") + .clone(), + ]; + assert_eq!(symbols, Some(expected_symbols)); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_workspace_symbol_caps_results_with_deterministic_order() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/workspace-symbol-cap.jsonnet"; + let text = { + let locals = (0..140) + .rev() + .map(|idx| format!("local capsymbol{idx:03} = {idx};")) + .collect::>() + .join(" "); + format!("{locals} capsymbol000") + }; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, &text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + let symbols = request_workspace_symbols(&client_conn, 2, "capsymbol"); + let expected_doc = + jrsonnet_lsp_document::Document::new(text, jrsonnet_lsp_document::DocVersion::new(1)); + let expected_uri: lsp_types::Uri = uri.parse().unwrap(); + let expected_all = jrsonnet_lsp_handlers::workspace_symbols_for_document( + &expected_doc, + &expected_uri, + "capsymbol", + ); + let expected_symbols = (0..128) + .map(|idx| format!("capsymbol{idx:03}")) + .map(|name| { + expected_all + .iter() + .find(|symbol| symbol.name == name) + .expect("expected symbol to exist") + .clone() + }) + .collect::>(); + assert_eq!(symbols, Some(expected_symbols)); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_find_transitive_importers_returns_sorted_uris() { let tmp = TempDir::new().expect("tempdir should be created"); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index d387b55a..e847b576 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -118,7 +118,8 @@ Async handlers run through `AsyncRequestContext` (`crates/jrsonnet-lsp/src/server/async_requests.rs`), which centralizes access to documents, import graph, type cache, config, and dependency-aware analysis. `workspace/symbol` uses this context to search tracked workspace files from the -import graph, not just currently open buffers. +import graph, not just currently open buffers. Results are ranked by match +quality (exact, then prefix, then substring) and capped to 128 entries. ## Advertised LSP Capabilities @@ -204,8 +205,8 @@ Navigation semantics: introduced in the current scope"). - `textDocument/definition`: canonical origin ("what this symbol resolves to after following aliases/imports"). -- `textDocument/typeDefinition`: same target as `definition` in Jsonnet - (symbols do not have separate nominal type declarations). +- `textDocument/typeDefinition`: same target as `definition` in Jsonnet (symbols + do not have separate nominal type declarations). - `textDocument/implementation`: value/body expression ("how this symbol is computed"). diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 17142781..62c0b5a6 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -249,6 +249,10 @@ File: `crates/jrsonnet-lsp-handlers/src/symbols.rs` - Async server path parallelizes across tracked workspace files (import-graph entries plus currently open documents), loading unopened files from disk via the document manager. +- Server-side aggregation then ranks matches so exact names come first, then + prefix matches, then other substring matches. +- Workspace symbol responses are capped at `128` entries to keep results bounded + and responsive on large workspaces. ## Async Request Context From da6e9342a9ee372e2948b2f4542f104b1ea61261 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 10:08:36 +0000 Subject: [PATCH 041/210] fix(lsp): return InvalidParams for unknown execute commands --- crates/jrsonnet-lsp/src/server.rs | 63 +++++++++++++------ crates/jrsonnet-lsp/tests/integration_test.rs | 49 +++++++++++++++ docs/lsp/ARCHITECTURE.md | 2 + docs/lsp/HANDLERS.md | 3 + 4 files changed, 99 insertions(+), 18 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 6cf223ba..6c696d90 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -38,11 +38,11 @@ use lsp_types::{ DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentFormattingParams, DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, - ExecuteCommandOptions, FileChangeType, HoverProviderCapability, InitializeParams, - InitializeResult, OneOf, PrepareRenameResponse, SemanticTokens, SemanticTokensFullOptions, - SemanticTokensOptions, SemanticTokensParams, SemanticTokensRangeParams, - SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, - SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, + ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, HoverProviderCapability, + InitializeParams, InitializeResult, OneOf, PrepareRenameResponse, SemanticTokens, + SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, + SemanticTokensRangeParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, + SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, TextDocumentSyncCapability, TextDocumentSyncKind, TextEdit, WorkDoneProgressOptions, }; use parking_lot::RwLock; @@ -96,6 +96,14 @@ struct InitializeRoots { root_path: Option, } +const SUPPORTED_EXECUTE_COMMANDS: [&str; 5] = [ + "jrsonnet.evalFile", + "jrsonnet.evalExpression", + "jrsonnet.findTransitiveImporters", + "jrsonnet.findReferences", + "jrsonnet.showErrors", +]; + impl Server { /// Create a new server with the given connection. #[must_use] @@ -474,13 +482,10 @@ impl Server { }), ), execute_command_provider: Some(ExecuteCommandOptions { - commands: vec![ - "jrsonnet.evalFile".to_string(), - "jrsonnet.evalExpression".to_string(), - "jrsonnet.findTransitiveImporters".to_string(), - "jrsonnet.findReferences".to_string(), - "jrsonnet.showErrors".to_string(), - ], + commands: SUPPORTED_EXECUTE_COMMANDS + .into_iter() + .map(ToString::to_string) + .collect(), work_done_progress_options: WorkDoneProgressOptions::default(), }), code_lens_provider: Some(CodeLensOptions { @@ -637,6 +642,12 @@ impl Server { Ok(()) } + fn send_invalid_params_response(&self, id: RequestId, message: String) -> Result<()> { + let response = Response::new_err(id, lsp_server::ErrorCode::InvalidParams as i32, message); + self.connection.sender.send(Message::Response(response))?; + Ok(()) + } + fn handle_sync_request( &self, id: RequestId, @@ -766,12 +777,17 @@ impl Server { params, AsyncRequestContext::code_lens, ), - ExecuteCommand::METHOD => self.handle_async_typed( - id, - ExecuteCommand::METHOD, - params, - AsyncRequestContext::execute_command, - ), + ExecuteCommand::METHOD => { + let params: ExecuteCommandParams = serde_json::from_value(params)?; + if !Self::is_supported_execute_command(¶ms.command) { + return self.send_invalid_params_response( + id, + format!("Unknown execute command: {}", params.command), + ); + } + self.handle_async_execute_command(id, params); + Ok(()) + } _ => self.send_method_not_found_response(id, method), } } @@ -793,6 +809,13 @@ impl Server { Ok(()) } + fn handle_async_execute_command(&self, id: RequestId, params: ExecuteCommandParams) { + let context = self.async_request_context(); + self.spawn_json_response(id, ExecuteCommand::METHOD, move || { + context.execute_command(¶ms) + }); + } + /// Handle textDocument/documentSymbol request. fn on_document_symbol(&self, params: &DocumentSymbolParams) -> Option { let uri = ¶ms.text_document.uri; @@ -904,6 +927,10 @@ impl Server { } impl Server { + fn is_supported_execute_command(command: &str) -> bool { + SUPPORTED_EXECUTE_COMMANDS.contains(&command) + } + /// Handle an incoming notification. /// /// Returns true if exit notification was received. diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index d40fdfee..b929741d 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -2545,6 +2545,55 @@ fn test_execute_command_show_errors() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_execute_command_unknown_returns_invalid_params_error() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.unknownCommand", + vec![], + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert_eq!(response.result, None); + let error = response + .error + .expect("unknown execute command should return an error"); + assert_eq!(error.code, lsp_server::ErrorCode::InvalidParams as i32); + assert_eq!( + error.message, + "Unknown execute command: jrsonnet.unknownCommand" + ); + assert_eq!(error.data, None); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_eval_commands_use_tanka_import_roots() { let tmp = TempDir::new().expect("tempdir should be created"); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index e847b576..a4bb4ad1 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -299,6 +299,8 @@ Current async command implementation handles: - `jrsonnet.findReferences` - `jrsonnet.showErrors` +Unknown command IDs are returned as explicit LSP `InvalidParams` errors. + The `jrsonnet.showErrors` command reuses the same diagnostics pipeline as `textDocument/publishDiagnostics` and returns a typed diagnostics payload for the requested file. diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 62c0b5a6..4e45b508 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -300,6 +300,9 @@ Current async command implementation handles: - `jrsonnet.findReferences` - `jrsonnet.showErrors` +Unknown command IDs are rejected with an explicit LSP `InvalidParams` response +error. + `jrsonnet.showErrors` returns a `PublishDiagnosticsParams` payload for the target URI so clients can render the same diagnostics data that the server publishes asynchronously. From 861355813bf6016606b088c7e28cb16ce9dff985 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 10:50:17 +0000 Subject: [PATCH 042/210] lsp: add InflightRequests boundary for request lifecycle --- crates/jrsonnet-lsp/src/lib.rs | 1 + .../src/protocol/inflight_requests.rs | 107 ++++++++++++++++++ crates/jrsonnet-lsp/src/protocol/mod.rs | 1 + crates/jrsonnet-lsp/src/server.rs | 89 +++++++++------ 4 files changed, 164 insertions(+), 34 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/protocol/inflight_requests.rs create mode 100644 crates/jrsonnet-lsp/src/protocol/mod.rs diff --git a/crates/jrsonnet-lsp/src/lib.rs b/crates/jrsonnet-lsp/src/lib.rs index a0ab3d80..949c4a54 100644 --- a/crates/jrsonnet-lsp/src/lib.rs +++ b/crates/jrsonnet-lsp/src/lib.rs @@ -24,6 +24,7 @@ pub mod analysis; pub mod async_diagnostics; pub mod config; pub mod handlers; +mod protocol; pub mod server; pub use config::ServerConfig; diff --git a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs new file mode 100644 index 00000000..82428f7f --- /dev/null +++ b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs @@ -0,0 +1,107 @@ +use anyhow::Result; +use crossbeam_channel::Sender; +use lsp_server::{ErrorCode, Message, ReqQueue, RequestId, Response}; +use serde::Serialize; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct IncomingRequestMeta { + pub(crate) method: String, +} + +#[derive(Debug)] +pub(crate) struct InflightRequests { + queue: ReqQueue, + sender: Sender, +} + +impl InflightRequests { + #[must_use] + pub(crate) fn new(sender: Sender) -> Self { + Self { + queue: ReqQueue::default(), + sender, + } + } + + pub(crate) fn register_incoming(&mut self, id: RequestId, method: &str) { + self.queue.incoming.register( + id, + IncomingRequestMeta { + method: method.to_string(), + }, + ); + } + + pub(crate) fn send_response(&mut self, response: Response) -> Result { + if self.queue.incoming.complete(&response.id).is_none() { + return Ok(false); + } + + self.sender.send(Message::Response(response))?; + Ok(true) + } + + pub(crate) fn send_ok(&mut self, id: RequestId, result: T) -> Result + where + T: Serialize, + { + let response = Response::new_ok(id, serde_json::to_value(result)?); + self.send_response(response) + } + + pub(crate) fn send_err( + &mut self, + id: RequestId, + code: ErrorCode, + message: impl Into, + ) -> Result { + self.send_response(Response::new_err(id, code as i32, message.into())) + } + + pub(crate) fn complete_outgoing(&mut self, id: RequestId) -> bool { + self.queue.outgoing.complete(id).is_some() + } +} + +#[cfg(test)] +mod tests { + use crossbeam_channel::unbounded; + use lsp_server::{Message, RequestId}; + + use super::InflightRequests; + + #[test] + fn send_ok_requires_registered_incoming_id() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + + let id = RequestId::from(7); + assert!(!inflight + .send_ok(id.clone(), serde_json::json!({"ok": true})) + .unwrap()); + assert!(receiver.try_recv().is_err()); + + inflight.register_incoming(id.clone(), "example/method"); + assert!(inflight + .send_ok(id, serde_json::json!({"ok": true})) + .unwrap()); + + let message = receiver.recv().unwrap(); + match message { + Message::Response(response) => { + assert_eq!(response.id, RequestId::from(7)); + assert!(response.error.is_none()); + assert_eq!(response.result, Some(serde_json::json!({"ok": true}))); + } + other => panic!("unexpected message: {other:?}"), + } + } + + #[test] + fn complete_outgoing_returns_none_for_untracked_response() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + assert!(!inflight.complete_outgoing(RequestId::from(11))); + assert!(receiver.try_recv().is_err()); + } +} diff --git a/crates/jrsonnet-lsp/src/protocol/mod.rs b/crates/jrsonnet-lsp/src/protocol/mod.rs new file mode 100644 index 00000000..9cd9c900 --- /dev/null +++ b/crates/jrsonnet-lsp/src/protocol/mod.rs @@ -0,0 +1 @@ +pub(crate) mod inflight_requests; diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 6c696d90..012ddf84 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -54,6 +54,7 @@ use crate::{ analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}, async_diagnostics::{AsyncDiagnostics, DiagnosticsConfig}, config::ServerConfig, + protocol::inflight_requests::InflightRequests, }; /// Shared server configuration. @@ -77,6 +78,8 @@ pub struct Server { evaluator: Option>, /// Async diagnostics runner. diagnostics: AsyncDiagnostics, + /// In-flight request tracker and response boundary. + inflight_requests: InflightRequests, /// Channel for async request responses. request_response_sender: Sender, /// Channel for async request responses. @@ -113,6 +116,7 @@ impl Server { let import_graph = Arc::new(RwLock::new(ImportGraph::new())); let type_cache = new_shared_cache(Arc::clone(&global_types)); let (request_response_sender, request_response_receiver) = crossbeam_channel::unbounded(); + let inflight_requests = InflightRequests::new(connection.sender.clone()); let diagnostics = AsyncDiagnostics::new(DiagnosticsConfig { evaluator: None, documents: Arc::clone(&documents), @@ -130,6 +134,7 @@ impl Server { config: Arc::new(RwLock::new(ServerConfig::default())), evaluator: None, diagnostics, + inflight_requests, request_response_sender, request_response_receiver, shutdown_requested: false, @@ -544,7 +549,9 @@ impl Server { debug!("Diagnostics channel closed"); } SelectResult::AsyncRequestResponse(Ok(response)) => { - self.connection.sender.send(Message::Response(response))?; + if !self.inflight_requests.send_response(response)? { + debug!("Dropping async response for non-pending request"); + } } SelectResult::AsyncRequestResponse(Err(_)) => { debug!("Async request response channel closed"); @@ -562,20 +569,25 @@ impl Server { match msg { Message::Request(req) => { if self.shutdown_requested { - // After shutdown, only respond with errors - let resp = Response::new_err( + // After shutdown, only respond with errors. + self.inflight_requests + .register_incoming(req.id.clone(), req.method.as_str()); + let _ = self.inflight_requests.send_err( req.id, - lsp_server::ErrorCode::InvalidRequest as i32, - "Server is shutting down".to_string(), - ); - self.connection.sender.send(Message::Response(resp))?; + lsp_server::ErrorCode::InvalidRequest, + "Server is shutting down", + )?; } else { self.handle_request(req)?; } Ok(false) } Message::Response(resp) => { - debug!("Received response: {:?}", resp.id); + if self.inflight_requests.complete_outgoing(resp.id.clone()) { + debug!("Received response for outgoing request {}", resp.id); + } else { + debug!("Received untracked response: {:?}", resp.id); + } Ok(false) } Message::Notification(notif) => self.handle_notification(notif), @@ -587,6 +599,9 @@ impl Server { debug!("Handling request: {} (id={})", req.method, req.id); let Request { id, method, params } = req; + self.inflight_requests + .register_incoming(id.clone(), method.as_str()); + match method.as_str() { Shutdown::METHOD => self.handle_shutdown_request(id), GotoDefinition::METHOD @@ -610,46 +625,43 @@ impl Server { | SemanticTokensFullRequest::METHOD | SemanticTokensRangeRequest::METHOD | CodeLensResolve::METHOD => self.handle_sync_request(id, method.as_str(), params), - _ => self.send_method_not_found_response(id, &method), + _ => { + let _ = self.send_method_not_found_response(id, &method)?; + Ok(()) + } } } fn handle_shutdown_request(&mut self, id: RequestId) -> Result<()> { info!("Shutdown request received"); self.shutdown_requested = true; - let resp = Response::new_ok(id, serde_json::Value::Null); - self.connection.sender.send(Message::Response(resp))?; + let _ = self.send_ok_response(id, serde_json::Value::Null)?; Ok(()) } - fn send_ok_response(&self, id: RequestId, result: T) -> Result<()> + fn send_ok_response(&mut self, id: RequestId, result: T) -> Result where T: Serialize, { - let resp = Response::new_ok(id, serde_json::to_value(result)?); - self.connection.sender.send(Message::Response(resp))?; - Ok(()) + self.inflight_requests.send_ok(id, result) } - fn send_method_not_found_response(&self, id: RequestId, method: &str) -> Result<()> { + fn send_method_not_found_response(&mut self, id: RequestId, method: &str) -> Result { warn!("Unhandled request: {method}"); - let resp = Response::new_err( + self.inflight_requests.send_err( id, - lsp_server::ErrorCode::MethodNotFound as i32, + lsp_server::ErrorCode::MethodNotFound, format!("Method not found: {method}"), - ); - self.connection.sender.send(Message::Response(resp))?; - Ok(()) + ) } - fn send_invalid_params_response(&self, id: RequestId, message: String) -> Result<()> { - let response = Response::new_err(id, lsp_server::ErrorCode::InvalidParams as i32, message); - self.connection.sender.send(Message::Response(response))?; - Ok(()) + fn send_invalid_params_response(&mut self, id: RequestId, message: String) -> Result { + self.inflight_requests + .send_err(id, lsp_server::ErrorCode::InvalidParams, message) } fn handle_sync_request( - &self, + &mut self, id: RequestId, method: &str, params: serde_json::Value, @@ -677,14 +689,18 @@ impl Server { } CodeLensResolve::METHOD => { let params: CodeLens = serde_json::from_value(params)?; - self.send_ok_response(id, Self::on_code_lens_resolve(params)) + let _ = self.send_ok_response(id, Self::on_code_lens_resolve(params))?; + Ok(()) + } + _ => { + let _ = self.send_method_not_found_response(id, method)?; + Ok(()) } - _ => self.send_method_not_found_response(id, method), } } fn handle_sync_typed( - &self, + &mut self, id: RequestId, params: serde_json::Value, handler: fn(&Self, &P) -> R, @@ -694,7 +710,8 @@ impl Server { R: Serialize, { let params: P = serde_json::from_value(params)?; - self.send_ok_response(id, handler(self, ¶ms)) + let _ = self.send_ok_response(id, handler(self, ¶ms))?; + Ok(()) } fn spawn_json_response(&self, id: RequestId, method: &'static str, compute: F) @@ -708,7 +725,7 @@ impl Server { } fn handle_async_request( - &self, + &mut self, id: RequestId, method: &str, params: serde_json::Value, @@ -780,15 +797,19 @@ impl Server { ExecuteCommand::METHOD => { let params: ExecuteCommandParams = serde_json::from_value(params)?; if !Self::is_supported_execute_command(¶ms.command) { - return self.send_invalid_params_response( + let _ = self.send_invalid_params_response( id, format!("Unknown execute command: {}", params.command), - ); + )?; + return Ok(()); } self.handle_async_execute_command(id, params); Ok(()) } - _ => self.send_method_not_found_response(id, method), + _ => { + let _ = self.send_method_not_found_response(id, method)?; + Ok(()) + } } } From ff0829c72ebc57d16dc897d577d23274e8e6294b Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 10:53:58 +0000 Subject: [PATCH 043/210] lsp: route codeLens/resolve through typed inflight request --- .../src/protocol/inflight_requests.rs | 80 +++++++++++++++++++ crates/jrsonnet-lsp/src/server.rs | 30 +++++-- crates/jrsonnet-lsp/tests/integration_test.rs | 53 ++++++++++++ crates/jrsonnet-lsp/tests/stress_tests.rs | 20 +++-- 4 files changed, 171 insertions(+), 12 deletions(-) diff --git a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs index 82428f7f..c475be25 100644 --- a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs +++ b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs @@ -1,3 +1,5 @@ +use std::marker::PhantomData; + use anyhow::Result; use crossbeam_channel::Sender; use lsp_server::{ErrorCode, Message, ReqQueue, RequestId, Response}; @@ -14,6 +16,22 @@ pub(crate) struct InflightRequests { sender: Sender, } +#[derive(Debug)] +pub(crate) struct IncomingRequest { + id: RequestId, + _marker: PhantomData R>, +} + +impl IncomingRequest { + #[must_use] + pub(crate) fn new(id: RequestId) -> Self { + Self { + id, + _marker: PhantomData, + } + } +} + impl InflightRequests { #[must_use] pub(crate) fn new(sender: Sender) -> Self { @@ -58,6 +76,35 @@ impl InflightRequests { self.send_response(Response::new_err(id, code as i32, message.into())) } + #[must_use] + pub(crate) fn typed_incoming(id: RequestId) -> IncomingRequest { + IncomingRequest::new(id) + } + + pub(crate) fn send_typed_ok( + &mut self, + request: IncomingRequest, + result: R::Result, + ) -> Result + where + R: lsp_types::request::Request, + R::Result: Serialize, + { + self.send_ok(request.id, result) + } + + pub(crate) fn send_typed_err( + &mut self, + request: IncomingRequest, + code: ErrorCode, + message: impl Into, + ) -> Result + where + R: lsp_types::request::Request, + { + self.send_err(request.id, code, message) + } + pub(crate) fn complete_outgoing(&mut self, id: RequestId) -> bool { self.queue.outgoing.complete(id).is_some() } @@ -67,6 +114,7 @@ impl InflightRequests { mod tests { use crossbeam_channel::unbounded; use lsp_server::{Message, RequestId}; + use lsp_types::request::{CodeLensResolve, Request as _}; use super::InflightRequests; @@ -104,4 +152,36 @@ mod tests { assert!(!inflight.complete_outgoing(RequestId::from(11))); assert!(receiver.try_recv().is_err()); } + + #[test] + fn send_typed_ok_uses_typed_handle() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + let id = RequestId::from(15); + inflight.register_incoming(id.clone(), CodeLensResolve::METHOD); + + let request = InflightRequests::typed_incoming::(id); + let expected = lsp_types::CodeLens { + range: lsp_types::Range { + start: lsp_types::Position::new(0, 0), + end: lsp_types::Position::new(0, 1), + }, + command: None, + data: None, + }; + assert!(inflight.send_typed_ok(request, expected.clone()).unwrap()); + + let message = receiver.recv().unwrap(); + match message { + Message::Response(response) => { + assert_eq!(response.id, RequestId::from(15)); + assert!(response.error.is_none()); + assert_eq!( + response.result, + Some(serde_json::to_value(expected).unwrap()) + ); + } + other => panic!("unexpected message: {other:?}"), + } + } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 012ddf84..99b90e43 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -687,11 +687,7 @@ impl Server { SemanticTokensRangeRequest::METHOD => { self.handle_sync_typed(id, params, Self::on_semantic_tokens_range) } - CodeLensResolve::METHOD => { - let params: CodeLens = serde_json::from_value(params)?; - let _ = self.send_ok_response(id, Self::on_code_lens_resolve(params))?; - Ok(()) - } + CodeLensResolve::METHOD => self.handle_code_lens_resolve_typed(id, params), _ => { let _ = self.send_method_not_found_response(id, method)?; Ok(()) @@ -714,6 +710,30 @@ impl Server { Ok(()) } + fn handle_code_lens_resolve_typed( + &mut self, + id: RequestId, + params: serde_json::Value, + ) -> Result<()> { + let request = InflightRequests::typed_incoming::(id); + let params: CodeLens = match serde_json::from_value(params) { + Ok(params) => params, + Err(err) => { + let _ = self.inflight_requests.send_typed_err( + request, + lsp_server::ErrorCode::InvalidParams, + format!("Invalid params for {}: {err}", CodeLensResolve::METHOD), + )?; + return Ok(()); + } + }; + + let _ = self + .inflight_requests + .send_typed_ok(request, Self::on_code_lens_resolve(params))?; + Ok(()) + } + fn spawn_json_response(&self, id: RequestId, method: &'static str, compute: F) where R: Serialize + Send + 'static, diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index b929741d..fb7ad45a 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -2827,6 +2827,59 @@ fn test_code_lens_resolve_request() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_code_lens_resolve_invalid_params_returns_invalid_params_error() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Request(Request::new( + 2.into(), + CodeLensResolve::METHOD.to_string(), + json!({"not": "a code lens"}), + ))) + .unwrap(); + + let response = recv_response(&client_conn, 2); + assert_eq!(response.result, None); + let error = response + .error + .expect("invalid code lens resolve params should return an error"); + assert_eq!(error.code, lsp_server::ErrorCode::InvalidParams as i32); + assert!( + error + .message + .starts_with("Invalid params for codeLens/resolve:"), + "unexpected error message: {}", + error.message + ); + assert_eq!(error.data, None); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_watched_file_refreshes_unopened_importers_for_references() { let tmp = TempDir::new().expect("tempdir should be created"); diff --git a/crates/jrsonnet-lsp/tests/stress_tests.rs b/crates/jrsonnet-lsp/tests/stress_tests.rs index 5b8b4f7c..662e8611 100644 --- a/crates/jrsonnet-lsp/tests/stress_tests.rs +++ b/crates/jrsonnet-lsp/tests/stress_tests.rs @@ -559,13 +559,19 @@ fn test_large_document() { .send(Message::Request(hover_request(100, uri, 0, 7))) .unwrap(); - let response = client_conn - .receiver - .recv_timeout(Duration::from_secs(5)) - .expect("Server should respond to hover on large document"); - assert_matches!(response, Message::Response(resp) => { - assert!(resp.error.is_none(), "Hover should succeed on large document"); - }); + loop { + match client_conn.receiver.recv_timeout(Duration::from_secs(5)) { + Ok(Message::Response(resp)) => { + assert!( + resp.error.is_none(), + "Hover should succeed on large document" + ); + break; + } + Ok(Message::Notification(_) | Message::Request(_)) => {} + Err(err) => panic!("Server should respond to hover on large document: {err:?}"), + } + } // Test goto definition in the middle let middle_line = num_locals + (num_locals / 2); From babef6f2eaba865fc4dd1f1cf2d808440a3d49e6 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 11:08:06 +0000 Subject: [PATCH 044/210] lsp: route all requests through typed inflight lifecycle --- .../src/protocol/inflight_requests.rs | 134 +++++-- crates/jrsonnet-lsp/src/server.rs | 346 +++++++++--------- .../jrsonnet-lsp/src/server/async_requests.rs | 19 +- docs/lsp/ARCHITECTURE.md | 19 + docs/lsp/HANDLERS.md | 6 +- 5 files changed, 314 insertions(+), 210 deletions(-) diff --git a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs index c475be25..b69f7b97 100644 --- a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs +++ b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs @@ -6,8 +6,8 @@ use lsp_server::{ErrorCode, Message, ReqQueue, RequestId, Response}; use serde::Serialize; #[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct IncomingRequestMeta { - pub(crate) method: String, +struct IncomingRequestMeta { + method: String, } #[derive(Debug)] @@ -22,14 +22,32 @@ pub(crate) struct IncomingRequest { _marker: PhantomData R>, } +#[derive(Debug)] +pub(crate) struct UnknownIncomingRequest { + id: RequestId, + method: String, +} + impl IncomingRequest { #[must_use] - pub(crate) fn new(id: RequestId) -> Self { + fn new(id: RequestId) -> Self { Self { id, _marker: PhantomData, } } + + #[must_use] + pub(crate) fn into_id(self) -> RequestId { + self.id + } +} + +impl UnknownIncomingRequest { + #[must_use] + pub(crate) fn method(&self) -> &str { + &self.method + } } impl InflightRequests { @@ -41,47 +59,59 @@ impl InflightRequests { } } - pub(crate) fn register_incoming(&mut self, id: RequestId, method: &str) { + fn register_incoming(&mut self, id: RequestId, method: &str) { self.queue.incoming.register( id, IncomingRequestMeta { - method: method.to_string(), + method: method.to_owned(), }, ); } - pub(crate) fn send_response(&mut self, response: Response) -> Result { - if self.queue.incoming.complete(&response.id).is_none() { - return Ok(false); + pub(crate) fn begin(&mut self, id: RequestId) -> IncomingRequest + where + R: lsp_types::request::Request, + { + self.register_incoming(id.clone(), R::METHOD); + IncomingRequest::new(id) + } + + pub(crate) fn begin_unknown(&mut self, id: RequestId, method: &str) -> UnknownIncomingRequest { + self.register_incoming(id.clone(), method); + UnknownIncomingRequest { + id, + method: method.to_owned(), } + } + + pub(crate) fn send_inflight_response(&mut self, response: Response) -> Result { + let Some(meta) = self.queue.incoming.complete(&response.id) else { + return Ok(false); + }; + debug_assert!(!meta.method.is_empty()); self.sender.send(Message::Response(response))?; Ok(true) } - pub(crate) fn send_ok(&mut self, id: RequestId, result: T) -> Result + fn send_ok_by_id(&mut self, id: RequestId, result: T) -> Result where T: Serialize, { let response = Response::new_ok(id, serde_json::to_value(result)?); - self.send_response(response) + self.send_inflight_response(response) } - pub(crate) fn send_err( + fn send_err_by_id( &mut self, id: RequestId, code: ErrorCode, message: impl Into, ) -> Result { - self.send_response(Response::new_err(id, code as i32, message.into())) + self.send_inflight_response(Response::new_err(id, code as i32, message.into())) } - #[must_use] - pub(crate) fn typed_incoming(id: RequestId) -> IncomingRequest { - IncomingRequest::new(id) - } - - pub(crate) fn send_typed_ok( + pub(crate) fn send_ok( &mut self, request: IncomingRequest, result: R::Result, @@ -90,10 +120,10 @@ impl InflightRequests { R: lsp_types::request::Request, R::Result: Serialize, { - self.send_ok(request.id, result) + self.send_ok_by_id(request.id, result) } - pub(crate) fn send_typed_err( + pub(crate) fn send_err( &mut self, request: IncomingRequest, code: ErrorCode, @@ -102,7 +132,16 @@ impl InflightRequests { where R: lsp_types::request::Request, { - self.send_err(request.id, code, message) + self.send_err_by_id(request.id, code, message) + } + + pub(crate) fn send_unknown_err( + &mut self, + request: UnknownIncomingRequest, + code: ErrorCode, + message: impl Into, + ) -> Result { + self.send_err_by_id(request.id, code, message) } pub(crate) fn complete_outgoing(&mut self, id: RequestId) -> bool { @@ -113,25 +152,28 @@ impl InflightRequests { #[cfg(test)] mod tests { use crossbeam_channel::unbounded; - use lsp_server::{Message, RequestId}; - use lsp_types::request::{CodeLensResolve, Request as _}; + use lsp_server::{ErrorCode, Message, RequestId, Response}; + use lsp_types::request::CodeLensResolve; use super::InflightRequests; #[test] - fn send_ok_requires_registered_incoming_id() { + fn send_inflight_response_requires_registered_request_id() { let (sender, receiver) = unbounded(); let mut inflight = InflightRequests::new(sender); - let id = RequestId::from(7); + assert!(!inflight - .send_ok(id.clone(), serde_json::json!({"ok": true})) + .send_inflight_response(Response::new_ok( + id.clone(), + serde_json::json!({"ok": true}), + )) .unwrap()); assert!(receiver.try_recv().is_err()); - inflight.register_incoming(id.clone(), "example/method"); + inflight.begin_unknown(id.clone(), "example/method"); assert!(inflight - .send_ok(id, serde_json::json!({"ok": true})) + .send_inflight_response(Response::new_ok(id, serde_json::json!({"ok": true}))) .unwrap()); let message = receiver.recv().unwrap(); @@ -154,13 +196,11 @@ mod tests { } #[test] - fn send_typed_ok_uses_typed_handle() { + fn send_ok_uses_typed_handle() { let (sender, receiver) = unbounded(); let mut inflight = InflightRequests::new(sender); let id = RequestId::from(15); - inflight.register_incoming(id.clone(), CodeLensResolve::METHOD); - - let request = InflightRequests::typed_incoming::(id); + let request = inflight.begin::(id); let expected = lsp_types::CodeLens { range: lsp_types::Range { start: lsp_types::Position::new(0, 0), @@ -169,7 +209,7 @@ mod tests { command: None, data: None, }; - assert!(inflight.send_typed_ok(request, expected.clone()).unwrap()); + assert!(inflight.send_ok(request, expected.clone()).unwrap()); let message = receiver.recv().unwrap(); match message { @@ -184,4 +224,32 @@ mod tests { other => panic!("unexpected message: {other:?}"), } } + + #[test] + fn send_unknown_err_uses_unknown_handle() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + + let request = inflight.begin_unknown(RequestId::from(23), "custom/method"); + assert_eq!(request.method(), "custom/method"); + assert!(inflight + .send_unknown_err( + request, + ErrorCode::MethodNotFound, + "Method not found: custom/method", + ) + .unwrap()); + + let message = receiver.recv().unwrap(); + match message { + Message::Response(response) => { + assert_eq!(response.id, RequestId::from(23)); + let error = response.error.expect("expected method-not-found error"); + assert_eq!(error.code, ErrorCode::MethodNotFound as i32); + assert_eq!(error.message, "Method not found: custom/method"); + assert_eq!(response.result, None); + } + other => panic!("unexpected message: {other:?}"), + } + } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 99b90e43..5e804f28 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -39,10 +39,10 @@ use lsp_types::{ DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentFormattingParams, DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, HoverProviderCapability, - InitializeParams, InitializeResult, OneOf, PrepareRenameResponse, SemanticTokens, - SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, - SemanticTokensRangeParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, - SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, + InitializeParams, InitializeResult, OneOf, PrepareRenameResponse, SemanticTokensFullOptions, + SemanticTokensOptions, SemanticTokensParams, SemanticTokensRangeParams, + SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, + SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, TextDocumentSyncCapability, TextDocumentSyncKind, TextEdit, WorkDoneProgressOptions, }; use parking_lot::RwLock; @@ -54,7 +54,7 @@ use crate::{ analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}, async_diagnostics::{AsyncDiagnostics, DiagnosticsConfig}, config::ServerConfig, - protocol::inflight_requests::InflightRequests, + protocol::inflight_requests::{IncomingRequest, InflightRequests}, }; /// Shared server configuration. @@ -549,7 +549,7 @@ impl Server { debug!("Diagnostics channel closed"); } SelectResult::AsyncRequestResponse(Ok(response)) => { - if !self.inflight_requests.send_response(response)? { + if !self.inflight_requests.send_inflight_response(response)? { debug!("Dropping async response for non-pending request"); } } @@ -570,10 +570,10 @@ impl Server { Message::Request(req) => { if self.shutdown_requested { // After shutdown, only respond with errors. - self.inflight_requests - .register_incoming(req.id.clone(), req.method.as_str()); - let _ = self.inflight_requests.send_err( - req.id, + let Request { id, method, .. } = req; + let request = self.inflight_requests.begin_unknown(id, method.as_str()); + let _ = self.inflight_requests.send_unknown_err( + request, lsp_server::ErrorCode::InvalidRequest, "Server is shutting down", )?; @@ -599,11 +599,11 @@ impl Server { debug!("Handling request: {} (id={})", req.method, req.id); let Request { id, method, params } = req; - self.inflight_requests - .register_incoming(id.clone(), method.as_str()); - match method.as_str() { - Shutdown::METHOD => self.handle_shutdown_request(id), + Shutdown::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_shutdown_request(request) + } GotoDefinition::METHOD | GotoDeclaration::METHOD | GotoTypeDefinition::METHOD @@ -626,40 +626,26 @@ impl Server { | SemanticTokensRangeRequest::METHOD | CodeLensResolve::METHOD => self.handle_sync_request(id, method.as_str(), params), _ => { - let _ = self.send_method_not_found_response(id, &method)?; + let request = self.inflight_requests.begin_unknown(id, method.as_str()); + warn!("Unhandled request: {}", request.method()); + let message = format!("Method not found: {}", request.method()); + let _ = self.inflight_requests.send_unknown_err( + request, + lsp_server::ErrorCode::MethodNotFound, + message, + )?; Ok(()) } } } - fn handle_shutdown_request(&mut self, id: RequestId) -> Result<()> { + fn handle_shutdown_request(&mut self, request: IncomingRequest) -> Result<()> { info!("Shutdown request received"); self.shutdown_requested = true; - let _ = self.send_ok_response(id, serde_json::Value::Null)?; + let _ = self.inflight_requests.send_ok(request, ())?; Ok(()) } - fn send_ok_response(&mut self, id: RequestId, result: T) -> Result - where - T: Serialize, - { - self.inflight_requests.send_ok(id, result) - } - - fn send_method_not_found_response(&mut self, id: RequestId, method: &str) -> Result { - warn!("Unhandled request: {method}"); - self.inflight_requests.send_err( - id, - lsp_server::ErrorCode::MethodNotFound, - format!("Method not found: {method}"), - ) - } - - fn send_invalid_params_response(&mut self, id: RequestId, message: String) -> Result { - self.inflight_requests - .send_err(id, lsp_server::ErrorCode::InvalidParams, message) - } - fn handle_sync_request( &mut self, id: RequestId, @@ -668,61 +654,77 @@ impl Server { ) -> Result<()> { match method { DocumentSymbolRequest::METHOD => { - self.handle_sync_typed(id, params, Self::on_document_symbol) + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed(request, params, Self::on_document_symbol) } DocumentHighlightRequest::METHOD => { - self.handle_sync_typed(id, params, Self::on_document_highlight) + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed(request, params, Self::on_document_highlight) + } + CodeActionRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed(request, params, Self::on_code_action) } - CodeActionRequest::METHOD => self.handle_sync_typed(id, params, Self::on_code_action), SignatureHelpRequest::METHOD => { - self.handle_sync_typed(id, params, Self::on_signature_help) + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed(request, params, Self::on_signature_help) + } + Formatting::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed(request, params, Self::on_formatting) } - Formatting::METHOD => self.handle_sync_typed(id, params, Self::on_formatting), PrepareRenameRequest::METHOD => { - self.handle_sync_typed(id, params, Self::on_prepare_rename) + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed(request, params, Self::on_prepare_rename) } SemanticTokensFullRequest::METHOD => { - self.handle_sync_typed(id, params, Self::on_semantic_tokens_full) + let request = self + .inflight_requests + .begin::(id); + self.handle_sync_typed(request, params, Self::on_semantic_tokens_full) } SemanticTokensRangeRequest::METHOD => { - self.handle_sync_typed(id, params, Self::on_semantic_tokens_range) + let request = self + .inflight_requests + .begin::(id); + self.handle_sync_typed(request, params, Self::on_semantic_tokens_range) + } + CodeLensResolve::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed(request, params, Self::resolve_code_lens) } - CodeLensResolve::METHOD => self.handle_code_lens_resolve_typed(id, params), _ => { - let _ = self.send_method_not_found_response(id, method)?; + let request = self.inflight_requests.begin_unknown(id, method); + warn!("Unhandled request: {}", request.method()); + let message = format!("Method not found: {}", request.method()); + let _ = self.inflight_requests.send_unknown_err( + request, + lsp_server::ErrorCode::MethodNotFound, + message, + )?; Ok(()) } } } - fn handle_sync_typed( + fn handle_sync_typed( &mut self, - id: RequestId, + request: IncomingRequest, params: serde_json::Value, - handler: fn(&Self, &P) -> R, + handler: fn(&Self, &R::Params) -> R::Result, ) -> Result<()> where - P: DeserializeOwned, - R: Serialize, + R: lsp_types::request::Request, + R::Params: DeserializeOwned, + R::Result: Serialize, { - let params: P = serde_json::from_value(params)?; - let _ = self.send_ok_response(id, handler(self, ¶ms))?; - Ok(()) - } - - fn handle_code_lens_resolve_typed( - &mut self, - id: RequestId, - params: serde_json::Value, - ) -> Result<()> { - let request = InflightRequests::typed_incoming::(id); - let params: CodeLens = match serde_json::from_value(params) { + let params: R::Params = match serde_json::from_value(params) { Ok(params) => params, Err(err) => { - let _ = self.inflight_requests.send_typed_err( + let _ = self.inflight_requests.send_err( request, lsp_server::ErrorCode::InvalidParams, - format!("Invalid params for {}: {err}", CodeLensResolve::METHOD), + format!("Invalid params for {}: {err}", R::METHOD), )?; return Ok(()); } @@ -730,16 +732,18 @@ impl Server { let _ = self .inflight_requests - .send_typed_ok(request, Self::on_code_lens_resolve(params))?; + .send_ok(request, handler(self, ¶ms))?; Ok(()) } - fn spawn_json_response(&self, id: RequestId, method: &'static str, compute: F) + fn spawn_typed_json_response(&self, request: IncomingRequest, compute: F) where - R: Serialize + Send + 'static, - F: FnOnce() -> R + Send + 'static, + R: lsp_types::request::Request, + R::Result: Serialize + Send + 'static, + F: FnOnce() -> R::Result + Send + 'static, { - self.spawn_async_response(id, method, move || { + let id = request.into_id(); + self.spawn_async_response(id, R::METHOD, move || { serde_json::to_value(compute()).map_err(Into::into) }); } @@ -751,110 +755,123 @@ impl Server { params: serde_json::Value, ) -> Result<()> { match method { - GotoDefinition::METHOD => self.handle_async_typed( - id, - GotoDefinition::METHOD, - params, - AsyncRequestContext::goto_definition, - ), - GotoDeclaration::METHOD => self.handle_async_typed( - id, - GotoDeclaration::METHOD, - params, - AsyncRequestContext::goto_declaration, - ), - GotoImplementation::METHOD => self.handle_async_typed( - id, - GotoImplementation::METHOD, - params, - AsyncRequestContext::goto_implementation, - ), - GotoTypeDefinition::METHOD => self.handle_async_typed( - id, - GotoTypeDefinition::METHOD, - params, - AsyncRequestContext::goto_type_definition, - ), - HoverRequest::METHOD => self.handle_async_typed( - id, - HoverRequest::METHOD, - params, - AsyncRequestContext::hover, - ), - InlayHintRequest::METHOD => self.handle_async_typed( - id, - InlayHintRequest::METHOD, - params, - AsyncRequestContext::inlay_hints, - ), - Completion::METHOD => self.handle_async_typed( - id, - Completion::METHOD, - params, - AsyncRequestContext::completion, - ), - References::METHOD => self.handle_async_typed( - id, - References::METHOD, - params, - AsyncRequestContext::references, - ), - WorkspaceSymbolRequest::METHOD => self.handle_async_typed( - id, - WorkspaceSymbolRequest::METHOD, - params, - AsyncRequestContext::workspace_symbol, - ), + GotoDefinition::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::goto_definition) + } + GotoDeclaration::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::goto_declaration) + } + GotoImplementation::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::goto_implementation) + } + GotoTypeDefinition::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::goto_type_definition) + } + HoverRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::hover) + } + InlayHintRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::inlay_hints) + } + Completion::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::completion) + } + References::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::references) + } + WorkspaceSymbolRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::workspace_symbol) + } Rename::METHOD => { - self.handle_async_typed(id, Rename::METHOD, params, AsyncRequestContext::rename) + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::rename) + } + CodeLensRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, AsyncRequestContext::code_lens) } - CodeLensRequest::METHOD => self.handle_async_typed( - id, - CodeLensRequest::METHOD, - params, - AsyncRequestContext::code_lens, - ), ExecuteCommand::METHOD => { - let params: ExecuteCommandParams = serde_json::from_value(params)?; - if !Self::is_supported_execute_command(¶ms.command) { - let _ = self.send_invalid_params_response( - id, - format!("Unknown execute command: {}", params.command), - )?; - return Ok(()); - } - self.handle_async_execute_command(id, params); - Ok(()) + let request = self.inflight_requests.begin::(id); + self.handle_async_execute_command(request, params) } _ => { - let _ = self.send_method_not_found_response(id, method)?; + let request = self.inflight_requests.begin_unknown(id, method); + warn!("Unhandled request: {}", request.method()); + let message = format!("Method not found: {}", request.method()); + let _ = self.inflight_requests.send_unknown_err( + request, + lsp_server::ErrorCode::MethodNotFound, + message, + )?; Ok(()) } } } - fn handle_async_typed( - &self, - id: RequestId, - method: &'static str, + fn handle_async_typed( + &mut self, + request: IncomingRequest, params: serde_json::Value, - handler: fn(&AsyncRequestContext, &P) -> R, + handler: fn(&AsyncRequestContext, &R::Params) -> R::Result, ) -> Result<()> where - P: DeserializeOwned + Send + 'static, - R: Serialize + Send + 'static, + R: lsp_types::request::Request, + R::Params: DeserializeOwned + Send + 'static, + R::Result: Serialize + Send + 'static, { - let params: P = serde_json::from_value(params)?; + let params: R::Params = match serde_json::from_value(params) { + Ok(params) => params, + Err(err) => { + let _ = self.inflight_requests.send_err( + request, + lsp_server::ErrorCode::InvalidParams, + format!("Invalid params for {}: {err}", R::METHOD), + )?; + return Ok(()); + } + }; let context = self.async_request_context(); - self.spawn_json_response(id, method, move || handler(&context, ¶ms)); + self.spawn_typed_json_response(request, move || handler(&context, ¶ms)); Ok(()) } - fn handle_async_execute_command(&self, id: RequestId, params: ExecuteCommandParams) { + fn handle_async_execute_command( + &mut self, + request: IncomingRequest, + params: serde_json::Value, + ) -> Result<()> { + let params: ExecuteCommandParams = match serde_json::from_value(params) { + Ok(params) => params, + Err(err) => { + let _ = self.inflight_requests.send_err( + request, + lsp_server::ErrorCode::InvalidParams, + format!("Invalid params for {}: {err}", ExecuteCommand::METHOD), + )?; + return Ok(()); + } + }; + if !Self::is_supported_execute_command(¶ms.command) { + let _ = self.inflight_requests.send_err( + request, + lsp_server::ErrorCode::InvalidParams, + format!("Unknown execute command: {}", params.command), + )?; + return Ok(()); + } + let context = self.async_request_context(); - self.spawn_json_response(id, ExecuteCommand::METHOD, move || { - context.execute_command(¶ms) - }); + self.spawn_typed_json_response(request, move || context.execute_command(¶ms)); + Ok(()) } /// Handle textDocument/documentSymbol request. @@ -941,29 +958,32 @@ impl Server { } /// Handle textDocument/semanticTokens/full request. - fn on_semantic_tokens_full(&self, params: &SemanticTokensParams) -> Option { + fn on_semantic_tokens_full( + &self, + params: &SemanticTokensParams, + ) -> Option { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri)?; let doc = self.documents.get(&path)?; - Some(handlers::semantic_tokens(&doc)) + Some(handlers::semantic_tokens(&doc).into()) } /// Handle textDocument/semanticTokens/range request. fn on_semantic_tokens_range( &self, params: &SemanticTokensRangeParams, - ) -> Option { + ) -> Option { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri)?; let doc = self.documents.get(&path)?; - Some(handlers::semantic_tokens_range(&doc, params.range)) + Some(handlers::semantic_tokens_range(&doc, params.range).into()) } /// Handle codeLens/resolve request. - fn on_code_lens_resolve(params: CodeLens) -> CodeLens { - handlers::resolve_code_lens(params) + fn resolve_code_lens(_server: &Self, params: &CodeLens) -> CodeLens { + handlers::resolve_code_lens(params.clone()) } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index da9906c2..00093f11 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -11,7 +11,7 @@ use lsp_types::{ GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams, InlayHint, InlayHintParams, Location, PartialResultParams, Position, ReferenceContext, ReferenceParams, RenameParams, SymbolInformation, TextDocumentIdentifier, TextDocumentPositionParams, WorkDoneProgressParams, - WorkspaceEdit, WorkspaceSymbolParams, + WorkspaceEdit, WorkspaceSymbolParams, WorkspaceSymbolResponse, }; use parking_lot::RwLock; use rayon::prelude::*; @@ -252,7 +252,7 @@ impl AsyncRequestContext { pub(super) fn workspace_symbol( &self, params: &WorkspaceSymbolParams, - ) -> Option> { + ) -> Option { let query = ¶ms.query; let mut paths = { @@ -297,7 +297,7 @@ impl AsyncRequestContext { if all_symbols.is_empty() { return None; } - Some(all_symbols) + Some(WorkspaceSymbolResponse::Flat(all_symbols)) } pub(super) fn rename(&self, params: &RenameParams) -> Option { @@ -328,19 +328,14 @@ impl AsyncRequestContext { ) } - pub(super) fn code_lens(&self, params: &CodeLensParams) -> Vec { + pub(super) fn code_lens(&self, params: &CodeLensParams) -> Option> { let uri = ¶ms.text_document.uri; - let Some(path) = CanonicalPath::from_uri(uri) else { - return Vec::new(); - }; - let Some(doc) = self.documents.get(&path) else { - return Vec::new(); - }; - let doc = doc.clone(); + let path = CanonicalPath::from_uri(uri)?; + let doc = self.documents.get(&path)?.clone(); let config = handlers::CodeLensConfig::all(); let analysis = self.analyze_document(&path, &doc); - handlers::code_lens(&doc, uri, &config, Some(&analysis)) + Some(handlers::code_lens(&doc, uri, &config, Some(&analysis))) } pub(super) fn execute_command( diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index a4bb4ad1..0e4055f3 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -49,6 +49,7 @@ diagnostics and command execution. - `config: SharedConfig` - `evaluator: Option>` - `diagnostics: AsyncDiagnostics` +- `inflight_requests: InflightRequests` - async request response channels - shutdown flag @@ -96,6 +97,8 @@ Handled directly on the server thread: - `textDocument/formatting` - `textDocument/prepareRename` - `textDocument/semanticTokens/full` +- `textDocument/semanticTokens/range` +- `codeLens/resolve` - `shutdown` ### Asynchronous request handlers @@ -121,6 +124,22 @@ to documents, import graph, type cache, config, and dependency-aware analysis. import graph, not just currently open buffers. Results are ranked by match quality (exact, then prefix, then substring) and capped to 128 entries. +### In-Flight Request Boundary + +`Server` routes request lifecycle through +`crates/jrsonnet-lsp/src/protocol/inflight_requests.rs`. + +This layer: + +- registers incoming request IDs +- enforces typed request handling via `begin::()` +- sends typed success/error responses via `send_ok` and `send_err` +- has an explicit unknown-method fallback (`begin_unknown`, `send_unknown_err`) +- accepts async worker responses only for currently pending request IDs + (`send_inflight_response`) + +The main loop uses this boundary for all request responses after initialization. + ## Advertised LSP Capabilities `server_capabilities()` currently advertises: diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 4e45b508..83c44502 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -55,12 +55,14 @@ Current request routing in `crates/jrsonnet-lsp/src/server.rs`: | `textDocument/semanticTokens/full` | `on_semantic_tokens_full` | handlers crate (`semantic_tokens`) | sync | no | | `textDocument/semanticTokens/range` | `on_semantic_tokens_range` | handlers crate (`semantic_tokens_range`) | sync | no | | `textDocument/codeLens` | async context (`code_lens`) | handlers crate (`code_lens`) | async | yes | -| `codeLens/resolve` | `on_code_lens_resolve` | handlers crate (`resolve_code_lens`) | sync | no | +| `codeLens/resolve` | `resolve_code_lens` | handlers crate (`resolve_code_lens`) | sync | no | | `workspace/executeCommand` | async context (`execute_command`) | server async context | async | no | | `shutdown` | direct in `handle_request` | server | sync | no | Async requests are sent back over the server's async response channel after -worker completion. +worker completion. All request handlers enter through the `InflightRequests` +protocol boundary, which tracks pending IDs and uses typed request handles for +method-specific responses. ## Handler Details From b3456d6d76fa8c0b6240ca12be3e904572860801 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 11:12:52 +0000 Subject: [PATCH 045/210] lsp: support request cancellation through inflight lifecycle --- .../src/protocol/inflight_requests.rs | 44 ++++++++++ crates/jrsonnet-lsp/src/server.rs | 32 +++++-- crates/jrsonnet-lsp/tests/integration_test.rs | 88 +++++++++++++++++-- docs/lsp/ARCHITECTURE.md | 3 + docs/lsp/HANDLERS.md | 4 +- 5 files changed, 155 insertions(+), 16 deletions(-) diff --git a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs index b69f7b97..d3ad76ab 100644 --- a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs +++ b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs @@ -94,6 +94,19 @@ impl InflightRequests { Ok(true) } + pub(crate) fn cancel_request(&mut self, id: RequestId) -> Result { + let Some(meta) = self.queue.incoming.complete(&id) else { + return Ok(false); + }; + let message = format!("Request canceled: {}", meta.method); + self.sender.send(Message::Response(Response::new_err( + id, + ErrorCode::RequestCanceled as i32, + message, + )))?; + Ok(true) + } + fn send_ok_by_id(&mut self, id: RequestId, result: T) -> Result where T: Serialize, @@ -252,4 +265,35 @@ mod tests { other => panic!("unexpected message: {other:?}"), } } + + #[test] + fn cancel_request_sends_request_canceled_error_for_pending_request() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + let id = RequestId::from(31); + inflight.begin_unknown(id.clone(), "textDocument/codeLens"); + + assert!(inflight.cancel_request(id).unwrap()); + + let message = receiver.recv().unwrap(); + match message { + Message::Response(response) => { + assert_eq!(response.id, RequestId::from(31)); + assert!(response.result.is_none()); + let error = response.error.expect("expected cancel error"); + assert_eq!(error.code, ErrorCode::RequestCanceled as i32); + assert_eq!(error.message, "Request canceled: textDocument/codeLens"); + } + other => panic!("unexpected message: {other:?}"), + } + } + + #[test] + fn cancel_request_ignores_untracked_request() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + + assert!(!inflight.cancel_request(RequestId::from(41)).unwrap()); + assert!(receiver.try_recv().is_err()); + } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 5e804f28..68497738 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -23,8 +23,9 @@ use jrsonnet_lsp_types::GlobalTyStore; use lsp_server::{Connection, Message, Notification, Request, RequestId, Response}; use lsp_types::{ notification::{ - DidChangeConfiguration, DidChangeTextDocument, DidChangeWatchedFiles, DidCloseTextDocument, - DidOpenTextDocument, DidSaveTextDocument, Notification as _, PublishDiagnostics, + Cancel, DidChangeConfiguration, DidChangeTextDocument, DidChangeWatchedFiles, + DidCloseTextDocument, DidOpenTextDocument, DidSaveTextDocument, Notification as _, + PublishDiagnostics, }, request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, Completion, DocumentHighlightRequest, @@ -39,10 +40,10 @@ use lsp_types::{ DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentFormattingParams, DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, HoverProviderCapability, - InitializeParams, InitializeResult, OneOf, PrepareRenameResponse, SemanticTokensFullOptions, - SemanticTokensOptions, SemanticTokensParams, SemanticTokensRangeParams, - SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, - SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, + InitializeParams, InitializeResult, NumberOrString, OneOf, PrepareRenameResponse, + SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, + SemanticTokensRangeParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, + SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, TextDocumentSyncCapability, TextDocumentSyncKind, TextEdit, WorkDoneProgressOptions, }; use parking_lot::RwLock; @@ -992,6 +993,13 @@ impl Server { SUPPORTED_EXECUTE_COMMANDS.contains(&command) } + fn request_id_from_number_or_string(id: NumberOrString) -> RequestId { + match id { + NumberOrString::Number(id) => id.into(), + NumberOrString::String(id) => id.into(), + } + } + /// Handle an incoming notification. /// /// Returns true if exit notification was received. @@ -999,6 +1007,10 @@ impl Server { debug!("Handling notification: {}", notif.method); match notif.method.as_str() { + Cancel::METHOD => { + let params: lsp_types::CancelParams = serde_json::from_value(notif.params)?; + self.on_cancel_request(params)?; + } DidOpenTextDocument::METHOD => { let params: DidOpenTextDocumentParams = serde_json::from_value(notif.params)?; self.on_did_open(params); @@ -1035,6 +1047,14 @@ impl Server { Ok(false) } + fn on_cancel_request(&mut self, params: lsp_types::CancelParams) -> Result<()> { + let request_id = Self::request_id_from_number_or_string(params.id); + if !self.inflight_requests.cancel_request(request_id.clone())? { + debug!("Ignoring cancel request for non-pending id {}", request_id); + } + Ok(()) + } + /// Handle textDocument/didOpen notification. fn on_did_open(&self, params: DidOpenTextDocumentParams) { let uri = ¶ms.text_document.uri; diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index fb7ad45a..ab7b75a8 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -3,14 +3,14 @@ //! Uses in-process testing with channels rather than subprocess management, //! following patterns from ast-grep and simple-completion-language-server. -use std::{fs, thread, time::Duration}; +use std::{fmt::Write as _, fs, thread, time::Duration}; use assert_matches::assert_matches; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{ notification::{ - DidChangeConfiguration, DidChangeWatchedFiles, DidCloseTextDocument, DidOpenTextDocument, - DidSaveTextDocument, Notification as _, PublishDiagnostics, + Cancel, DidChangeConfiguration, DidChangeWatchedFiles, DidCloseTextDocument, + DidOpenTextDocument, DidSaveTextDocument, Notification as _, PublishDiagnostics, }, request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, @@ -18,11 +18,12 @@ use lsp_types::{ Initialize, InlayHintRequest, References, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, WorkspaceSymbolRequest, }, - DidChangeConfigurationParams, DidChangeWatchedFilesParams, DidCloseTextDocumentParams, - DidOpenTextDocumentParams, DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, - FileEvent, GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, PartialResultParams, - Position, ReferenceContext, ReferenceParams, RenameParams, SemanticTokensRangeParams, - TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, + CancelParams, DidChangeConfigurationParams, DidChangeWatchedFilesParams, + DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, + ExecuteCommandParams, FileChangeType, FileEvent, GotoDefinitionParams, GotoDefinitionResponse, + InitializeParams, NumberOrString, PartialResultParams, Position, ReferenceContext, + ReferenceParams, RenameParams, SemanticTokensRangeParams, TextDocumentIdentifier, + TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, }; use serde_json::json; use tempfile::TempDir; @@ -74,6 +75,17 @@ fn exit_notification() -> Notification { Notification::new("exit".to_string(), json!({})) } +/// Helper to create a $/cancelRequest notification. +fn cancel_request_notification(request_id: i32) -> Notification { + let params = CancelParams { + id: NumberOrString::Number(request_id), + }; + Notification::new( + Cancel::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + /// Helper to create a didOpen notification. fn did_open_notification(uri: &str, text: &str) -> Notification { let params = DidOpenTextDocumentParams { @@ -2827,6 +2839,66 @@ fn test_code_lens_resolve_request() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_cancel_request_returns_request_canceled_error() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/cancel-code-lens.jsonnet"; + let mut text = String::new(); + for index in 0..20_000 { + writeln!(&mut text, "local value_{index} = {index};") + .expect("writing to String should succeed"); + } + text.push_str("value_19999\n"); + + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, &text))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(code_lens_request(2, uri))) + .unwrap(); + client_conn + .sender + .send(Message::Notification(cancel_request_notification(2))) + .unwrap(); + + let response = recv_response(&client_conn, 2); + assert_eq!(response.result, None); + let error = response + .error + .expect("cancelled request should return request-canceled error"); + assert_eq!(error.code, lsp_server::ErrorCode::RequestCanceled as i32); + assert_eq!(error.message, "Request canceled: textDocument/codeLens"); + assert_eq!(error.data, None); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_code_lens_resolve_invalid_params_returns_invalid_params_error() { let (client_conn, server_conn) = Connection::memory(); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 0e4055f3..90d3888a 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -135,6 +135,8 @@ This layer: - enforces typed request handling via `begin::()` - sends typed success/error responses via `send_ok` and `send_err` - has an explicit unknown-method fallback (`begin_unknown`, `send_unknown_err`) +- supports cancellation of pending requests with `cancel_request` + (`RequestCanceled` error) - accepts async worker responses only for currently pending request IDs (`send_inflight_response`) @@ -170,6 +172,7 @@ Implemented notifications: - `textDocument/didClose` - `workspace/didChangeConfiguration` - `workspace/didChangeWatchedFiles` +- `$/cancelRequest` - `exit` ### Open/change path diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 83c44502..fbda8cb4 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -61,8 +61,8 @@ Current request routing in `crates/jrsonnet-lsp/src/server.rs`: Async requests are sent back over the server's async response channel after worker completion. All request handlers enter through the `InflightRequests` -protocol boundary, which tracks pending IDs and uses typed request handles for -method-specific responses. +protocol boundary, which tracks pending IDs, uses typed request handles for +method-specific responses, and supports request cancellation. ## Handler Details From 264d075fa2ee648a2f6bd24a43c88a64bc9582c0 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 11:17:42 +0000 Subject: [PATCH 046/210] lsp: dynamically register watched-file notifications --- .../src/protocol/inflight_requests.rs | 40 ++++++- crates/jrsonnet-lsp/src/server.rs | 66 +++++++++-- crates/jrsonnet-lsp/tests/integration_test.rs | 112 +++++++++++++++++- docs/lsp/ARCHITECTURE.md | 7 +- 4 files changed, 207 insertions(+), 18 deletions(-) diff --git a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs index d3ad76ab..958ee781 100644 --- a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs +++ b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs @@ -84,6 +84,19 @@ impl InflightRequests { } } + pub(crate) fn send_outgoing_request(&mut self, params: R::Params) -> Result<()> + where + R: lsp_types::request::Request, + R::Params: Serialize, + { + let request = self + .queue + .outgoing + .register(R::METHOD.to_owned(), params, ()); + self.sender.send(Message::Request(request))?; + Ok(()) + } + pub(crate) fn send_inflight_response(&mut self, response: Response) -> Result { let Some(meta) = self.queue.incoming.complete(&response.id) else { return Ok(false); @@ -166,7 +179,7 @@ impl InflightRequests { mod tests { use crossbeam_channel::unbounded; use lsp_server::{ErrorCode, Message, RequestId, Response}; - use lsp_types::request::CodeLensResolve; + use lsp_types::request::{CodeLensResolve, RegisterCapability, Request as _}; use super::InflightRequests; @@ -296,4 +309,29 @@ mod tests { assert!(!inflight.cancel_request(RequestId::from(41)).unwrap()); assert!(receiver.try_recv().is_err()); } + + #[test] + fn send_outgoing_request_registers_and_tracks_response() { + let (sender, receiver) = unbounded(); + let mut inflight = InflightRequests::new(sender); + let params = lsp_types::RegistrationParams { + registrations: vec![], + }; + + inflight + .send_outgoing_request::(params.clone()) + .unwrap(); + + let message = receiver.recv().unwrap(); + match message { + Message::Request(request) => { + assert_eq!(request.method, RegisterCapability::METHOD); + let parsed_params: lsp_types::RegistrationParams = + serde_json::from_value(request.params).unwrap(); + assert_eq!(parsed_params, params); + assert!(inflight.complete_outgoing(request.id)); + } + other => panic!("unexpected message: {other:?}"), + } + } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 68497738..250e3008 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -31,19 +31,22 @@ use lsp_types::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, Completion, DocumentHighlightRequest, DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, GotoImplementation, GotoTypeDefinition, HoverRequest, InlayHintRequest, - PrepareRenameRequest, References, Rename, Request as _, SemanticTokensFullRequest, - SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, WorkspaceSymbolRequest, + PrepareRenameRequest, References, RegisterCapability, Rename, Request as _, + SemanticTokensFullRequest, SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, + WorkspaceSymbolRequest, }, CodeActionKind, CodeActionOptions, CodeActionParams, CodeActionProviderCapability, CodeActionResponse, CodeLens, CodeLensOptions, CompletionOptions, DidChangeConfigurationParams, - DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidCloseTextDocumentParams, + DidChangeTextDocumentParams, DidChangeWatchedFilesParams, + DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentFormattingParams, DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, - ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, HoverProviderCapability, - InitializeParams, InitializeResult, NumberOrString, OneOf, PrepareRenameResponse, - SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, - SemanticTokensRangeParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, - SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, + ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, FileSystemWatcher, GlobPattern, + HoverProviderCapability, InitializeParams, InitializeResult, NumberOrString, OneOf, + PrepareRenameResponse, Registration, RegistrationParams, SemanticTokensFullOptions, + SemanticTokensOptions, SemanticTokensParams, SemanticTokensRangeParams, + SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, + SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, TextDocumentSyncCapability, TextDocumentSyncKind, TextEdit, WorkDoneProgressOptions, }; use parking_lot::RwLock; @@ -108,6 +111,8 @@ const SUPPORTED_EXECUTE_COMMANDS: [&str; 5] = [ "jrsonnet.showErrors", ]; +const WATCHED_FILE_GLOB_PATTERNS: [&str; 3] = ["**/*.jsonnet", "**/*.libsonnet", "**/*.json"]; + impl Server { /// Create a new server with the given connection. #[must_use] @@ -240,7 +245,8 @@ impl Server { let (id, params, init_roots) = self.initialize()?; // Parse initialization options into configuration - let init_config = ServerConfig::from_initialization_options(params.initialization_options); + let init_config = + ServerConfig::from_initialization_options(params.initialization_options.clone()); self.update_config(init_config.clone()); info!( "Configuration: jpath={:?}, eval_diagnostics={}, tanka_mode={}", @@ -282,6 +288,7 @@ impl Server { } } + self.register_did_change_watched_files(¶ms)?; self.schedule_workspace_index_bootstrap(init_roots); // Main loop @@ -993,6 +1000,47 @@ impl Server { SUPPORTED_EXECUTE_COMMANDS.contains(&command) } + fn supports_dynamic_watched_files_registration(params: &InitializeParams) -> bool { + params + .capabilities + .workspace + .as_ref() + .and_then(|workspace| workspace.did_change_watched_files) + .and_then(|capabilities| capabilities.dynamic_registration) + .unwrap_or(false) + } + + fn register_did_change_watched_files( + &mut self, + initialize_params: &InitializeParams, + ) -> Result<()> { + if !Self::supports_dynamic_watched_files_registration(initialize_params) { + return Ok(()); + } + + let watchers = WATCHED_FILE_GLOB_PATTERNS + .into_iter() + .map(|pattern| FileSystemWatcher { + glob_pattern: GlobPattern::String(pattern.to_owned()), + kind: None, + }) + .collect::>(); + let options = DidChangeWatchedFilesRegistrationOptions { watchers }; + let registration = Registration { + id: "jrsonnet-lsp.did-change-watched-files".to_owned(), + method: DidChangeWatchedFiles::METHOD.to_owned(), + register_options: Some(serde_json::to_value(options)?), + }; + let params = RegistrationParams { + registrations: vec![registration], + }; + + self.inflight_requests + .send_outgoing_request::(params)?; + info!("Requested dynamic file-watch registration"); + Ok(()) + } + fn request_id_from_number_or_string(id: NumberOrString) -> RequestId { match id { NumberOrString::Number(id) => id.into(), diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index ab7b75a8..1a71d03f 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -15,15 +15,18 @@ use lsp_types::{ request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, ExecuteCommand, GotoDeclaration, GotoDefinition, GotoImplementation, GotoTypeDefinition, - Initialize, InlayHintRequest, References, Rename, Request as _, SemanticTokensRangeRequest, - Shutdown, WorkspaceSymbolRequest, + Initialize, InlayHintRequest, References, RegisterCapability, Rename, Request as _, + SemanticTokensRangeRequest, Shutdown, WorkspaceSymbolRequest, }, - CancelParams, DidChangeConfigurationParams, DidChangeWatchedFilesParams, + CancelParams, DidChangeConfigurationParams, DidChangeWatchedFilesClientCapabilities, + DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, - ExecuteCommandParams, FileChangeType, FileEvent, GotoDefinitionParams, GotoDefinitionResponse, - InitializeParams, NumberOrString, PartialResultParams, Position, ReferenceContext, - ReferenceParams, RenameParams, SemanticTokensRangeParams, TextDocumentIdentifier, + ExecuteCommandParams, FileChangeType, FileEvent, FileSystemWatcher, GlobPattern, + GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, NumberOrString, + PartialResultParams, Position, ReferenceContext, ReferenceParams, Registration, + RegistrationParams, RenameParams, SemanticTokensRangeParams, TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, + WorkspaceClientCapabilities, }; use serde_json::json; use tempfile::TempDir; @@ -56,6 +59,25 @@ fn initialize_request_with_root_uri(id: i32, root_uri: &str) -> Request { Request::new(id.into(), Initialize::METHOD.to_string(), params) } +/// Helper to create an initialize request that advertises dynamic watched-file +/// registration support. +fn initialize_request_with_dynamic_watched_files(id: i32) -> Request { + let mut params = InitializeParams::default(); + params.capabilities.workspace = Some(WorkspaceClientCapabilities { + did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { + dynamic_registration: Some(true), + relative_pattern_support: Some(false), + }), + ..WorkspaceClientCapabilities::default() + }); + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + /// Helper to create a shutdown request. fn shutdown_request(id: i32) -> Request { Request::new( @@ -1185,6 +1207,84 @@ fn test_valid_document_no_errors() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_initialize_registers_did_change_watched_files_when_supported() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request( + initialize_request_with_dynamic_watched_files(1), + )) + .unwrap(); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let register_request = loop { + let message = client_conn + .receiver + .recv_timeout(Duration::from_secs(3)) + .expect("expected registerCapability request"); + if let Message::Request(request) = message { + break request; + } + }; + assert_eq!(register_request.method, RegisterCapability::METHOD); + + let actual_params: RegistrationParams = + serde_json::from_value(register_request.params).unwrap(); + let expected_options = DidChangeWatchedFilesRegistrationOptions { + watchers: vec![ + FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.jsonnet".to_owned()), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.libsonnet".to_owned()), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.json".to_owned()), + kind: None, + }, + ], + }; + let expected_params = RegistrationParams { + registrations: vec![Registration { + id: "jrsonnet-lsp.did-change-watched-files".to_owned(), + method: DidChangeWatchedFiles::METHOD.to_owned(), + register_options: Some(serde_json::to_value(expected_options).unwrap()), + }], + }; + assert_eq!(actual_params, expected_params); + + client_conn + .sender + .send(Message::Response(lsp_server::Response::new_ok( + register_request.id, + serde_json::Value::Null, + ))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_goto_definition() { let (client_conn, server_conn) = Connection::memory(); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 90d3888a..206e651d 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -66,10 +66,13 @@ Startup flow: 3. Configure runtime components (`Evaluator`, `AsyncDiagnostics`). 4. Send `InitializeResult` with server capabilities. 5. Wait for `initialized` notification. -6. Schedule background bootstrap indexing for initialize workspace roots +6. If client supports dynamic watched-files registration, send + `client/registerCapability` for `workspace/didChangeWatchedFiles` with + `**/*.jsonnet`, `**/*.libsonnet`, and `**/*.json` watchers. +7. Schedule background bootstrap indexing for initialize workspace roots (`workspaceFolders`, `rootUri`, `rootPath`) scanning `*.jsonnet`, `*.libsonnet`, and `*.json`. -7. Enter the main loop immediately while bootstrap continues asynchronously. +8. Enter the main loop immediately while bootstrap continues asynchronously. Entry point: `run_stdio()` in `crates/jrsonnet-lsp/src/server.rs`. From 3f3366d53793315332e202d72de95c954bf39822 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 11:21:28 +0000 Subject: [PATCH 047/210] lsp: use relative watched-file patterns when supported --- crates/jrsonnet-lsp/src/server.rs | 97 ++++++++++-- crates/jrsonnet-lsp/tests/integration_test.rs | 148 ++++++++++++++++-- docs/lsp/ARCHITECTURE.md | 4 +- 3 files changed, 224 insertions(+), 25 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 250e3008..fd100abc 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -43,10 +43,10 @@ use lsp_types::{ DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, FileSystemWatcher, GlobPattern, HoverProviderCapability, InitializeParams, InitializeResult, NumberOrString, OneOf, - PrepareRenameResponse, Registration, RegistrationParams, SemanticTokensFullOptions, - SemanticTokensOptions, SemanticTokensParams, SemanticTokensRangeParams, - SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, - SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, + PrepareRenameResponse, Registration, RegistrationParams, RelativePattern, + SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, + SemanticTokensRangeParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, + SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, TextDocumentSyncCapability, TextDocumentSyncKind, TextEdit, WorkDoneProgressOptions, }; use parking_lot::RwLock; @@ -288,7 +288,7 @@ impl Server { } } - self.register_did_change_watched_files(¶ms)?; + self.register_did_change_watched_files(¶ms, &init_roots)?; self.schedule_workspace_index_bootstrap(init_roots); // Main loop @@ -1010,21 +1010,94 @@ impl Server { .unwrap_or(false) } + fn supports_relative_watch_patterns(params: &InitializeParams) -> bool { + params + .capabilities + .workspace + .as_ref() + .and_then(|workspace| workspace.did_change_watched_files) + .and_then(|capabilities| capabilities.relative_pattern_support) + .unwrap_or(false) + } + + fn watched_file_base_uris(init_roots: &InitializeRoots) -> Vec { + let mut uris = Vec::new(); + + if let Some(workspace_folders) = &init_roots.workspace_folders { + for folder in workspace_folders { + uris.push(folder.uri.clone()); + } + } + + if let Some(root_uri) = &init_roots.root_uri { + uris.push(root_uri.clone()); + } + + if let Some(root_path) = &init_roots.root_path { + let root_path = PathBuf::from(root_path); + if let Ok(path) = CanonicalPath::try_from_path(&root_path) { + if let Ok(uri) = path.to_uri() { + uris.push(uri); + } + } + } + + uris.sort_by(|lhs, rhs| lhs.as_str().cmp(rhs.as_str())); + uris.dedup_by(|lhs, rhs| lhs.as_str() == rhs.as_str()); + uris + } + + fn watched_file_watchers( + initialize_params: &InitializeParams, + init_roots: &InitializeRoots, + ) -> Vec { + if !Self::supports_relative_watch_patterns(initialize_params) { + return WATCHED_FILE_GLOB_PATTERNS + .into_iter() + .map(|pattern| FileSystemWatcher { + glob_pattern: GlobPattern::String(pattern.to_owned()), + kind: None, + }) + .collect(); + } + + let base_uris = Self::watched_file_base_uris(init_roots); + if base_uris.is_empty() { + return WATCHED_FILE_GLOB_PATTERNS + .into_iter() + .map(|pattern| FileSystemWatcher { + glob_pattern: GlobPattern::String(pattern.to_owned()), + kind: None, + }) + .collect(); + } + + base_uris + .into_iter() + .flat_map(|base_uri| { + WATCHED_FILE_GLOB_PATTERNS + .into_iter() + .map(move |pattern| FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(base_uri.clone()), + pattern: pattern.to_owned(), + }), + kind: None, + }) + }) + .collect() + } + fn register_did_change_watched_files( &mut self, initialize_params: &InitializeParams, + init_roots: &InitializeRoots, ) -> Result<()> { if !Self::supports_dynamic_watched_files_registration(initialize_params) { return Ok(()); } - let watchers = WATCHED_FILE_GLOB_PATTERNS - .into_iter() - .map(|pattern| FileSystemWatcher { - glob_pattern: GlobPattern::String(pattern.to_owned()), - kind: None, - }) - .collect::>(); + let watchers = Self::watched_file_watchers(initialize_params, init_roots); let options = DidChangeWatchedFilesRegistrationOptions { watchers }; let registration = Registration { id: "jrsonnet-lsp.did-change-watched-files".to_owned(), diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 1a71d03f..638b613b 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -22,11 +22,11 @@ use lsp_types::{ DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, FileEvent, FileSystemWatcher, GlobPattern, - GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, NumberOrString, + GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, NumberOrString, OneOf, PartialResultParams, Position, ReferenceContext, ReferenceParams, Registration, - RegistrationParams, RenameParams, SemanticTokensRangeParams, TextDocumentIdentifier, - TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, - WorkspaceClientCapabilities, + RegistrationParams, RelativePattern, RenameParams, SemanticTokensRangeParams, + TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, + WorkspaceClientCapabilities, WorkspaceFolder, }; use serde_json::json; use tempfile::TempDir; @@ -62,14 +62,47 @@ fn initialize_request_with_root_uri(id: i32, root_uri: &str) -> Request { /// Helper to create an initialize request that advertises dynamic watched-file /// registration support. fn initialize_request_with_dynamic_watched_files(id: i32) -> Request { - let mut params = InitializeParams::default(); - params.capabilities.workspace = Some(WorkspaceClientCapabilities { - did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { - dynamic_registration: Some(true), - relative_pattern_support: Some(false), - }), - ..WorkspaceClientCapabilities::default() - }); + let params = InitializeParams { + capabilities: lsp_types::ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { + dynamic_registration: Some(true), + relative_pattern_support: Some(false), + }), + ..WorkspaceClientCapabilities::default() + }), + ..lsp_types::ClientCapabilities::default() + }, + ..InitializeParams::default() + }; + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + +/// Helper to create an initialize request that advertises dynamic watched-file +/// registration support and relative pattern support. +fn initialize_request_with_dynamic_watched_files_relative(id: i32, root_uri: &str) -> Request { + let params = InitializeParams { + workspace_folders: Some(vec![WorkspaceFolder { + uri: root_uri.parse().unwrap(), + name: "workspace".to_owned(), + }]), + capabilities: lsp_types::ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { + dynamic_registration: Some(true), + relative_pattern_support: Some(true), + }), + ..WorkspaceClientCapabilities::default() + }), + ..lsp_types::ClientCapabilities::default() + }, + ..InitializeParams::default() + }; Request::new( id.into(), @@ -1285,6 +1318,97 @@ fn test_initialize_registers_did_change_watched_files_when_supported() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_initialize_uses_relative_watch_patterns_when_supported() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root_uri = file_uri(tmp.path()); + let parsed_root_uri: lsp_types::Uri = root_uri.parse().unwrap(); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request( + initialize_request_with_dynamic_watched_files_relative(1, &root_uri), + )) + .unwrap(); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let register_request = loop { + let message = client_conn + .receiver + .recv_timeout(Duration::from_secs(3)) + .expect("expected registerCapability request"); + if let Message::Request(request) = message { + break request; + } + }; + assert_eq!(register_request.method, RegisterCapability::METHOD); + + let actual_params: RegistrationParams = + serde_json::from_value(register_request.params).unwrap(); + let expected_options = DidChangeWatchedFilesRegistrationOptions { + watchers: vec![ + FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(parsed_root_uri.clone()), + pattern: "**/*.jsonnet".to_owned(), + }), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(parsed_root_uri.clone()), + pattern: "**/*.libsonnet".to_owned(), + }), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(parsed_root_uri), + pattern: "**/*.json".to_owned(), + }), + kind: None, + }, + ], + }; + let expected_params = RegistrationParams { + registrations: vec![Registration { + id: "jrsonnet-lsp.did-change-watched-files".to_owned(), + method: DidChangeWatchedFiles::METHOD.to_owned(), + register_options: Some(serde_json::to_value(expected_options).unwrap()), + }], + }; + assert_eq!(actual_params, expected_params); + + client_conn + .sender + .send(Message::Response(lsp_server::Response::new_ok( + register_request.id, + serde_json::Value::Null, + ))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_goto_definition() { let (client_conn, server_conn) = Connection::memory(); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 206e651d..4c57bd2f 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -68,7 +68,9 @@ Startup flow: 5. Wait for `initialized` notification. 6. If client supports dynamic watched-files registration, send `client/registerCapability` for `workspace/didChangeWatchedFiles` with - `**/*.jsonnet`, `**/*.libsonnet`, and `**/*.json` watchers. + `**/*.jsonnet`, `**/*.libsonnet`, and `**/*.json` watchers. When the client + also supports relative patterns, watchers are registered relative to + workspace roots. 7. Schedule background bootstrap indexing for initialize workspace roots (`workspaceFolders`, `rootUri`, `rootPath`) scanning `*.jsonnet`, `*.libsonnet`, and `*.json`. From 6b0aaee59db964430cb20a566a656bbf282ed087 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 11:23:29 +0000 Subject: [PATCH 048/210] lsp: track outgoing request methods for response logging --- .../src/protocol/inflight_requests.rs | 29 +++++++++++++------ crates/jrsonnet-lsp/src/server.rs | 14 +++++++-- docs/lsp/ARCHITECTURE.md | 2 ++ 3 files changed, 34 insertions(+), 11 deletions(-) diff --git a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs index 958ee781..a7c8ce42 100644 --- a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs +++ b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs @@ -10,9 +10,14 @@ struct IncomingRequestMeta { method: String, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct OutgoingRequestMeta { + pub(crate) method: String, +} + #[derive(Debug)] pub(crate) struct InflightRequests { - queue: ReqQueue, + queue: ReqQueue, sender: Sender, } @@ -89,10 +94,13 @@ impl InflightRequests { R: lsp_types::request::Request, R::Params: Serialize, { - let request = self - .queue - .outgoing - .register(R::METHOD.to_owned(), params, ()); + let request = self.queue.outgoing.register( + R::METHOD.to_owned(), + params, + OutgoingRequestMeta { + method: R::METHOD.to_owned(), + }, + ); self.sender.send(Message::Request(request))?; Ok(()) } @@ -170,8 +178,8 @@ impl InflightRequests { self.send_err_by_id(request.id, code, message) } - pub(crate) fn complete_outgoing(&mut self, id: RequestId) -> bool { - self.queue.outgoing.complete(id).is_some() + pub(crate) fn complete_outgoing(&mut self, id: RequestId) -> Option { + self.queue.outgoing.complete(id) } } @@ -217,7 +225,7 @@ mod tests { fn complete_outgoing_returns_none_for_untracked_response() { let (sender, receiver) = unbounded(); let mut inflight = InflightRequests::new(sender); - assert!(!inflight.complete_outgoing(RequestId::from(11))); + assert_eq!(inflight.complete_outgoing(RequestId::from(11)), None); assert!(receiver.try_recv().is_err()); } @@ -329,7 +337,10 @@ mod tests { let parsed_params: lsp_types::RegistrationParams = serde_json::from_value(request.params).unwrap(); assert_eq!(parsed_params, params); - assert!(inflight.complete_outgoing(request.id)); + let meta = inflight + .complete_outgoing(request.id) + .expect("outgoing request should be tracked"); + assert_eq!(meta.method, RegisterCapability::METHOD); } other => panic!("unexpected message: {other:?}"), } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index fd100abc..092f30a8 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -591,8 +591,18 @@ impl Server { Ok(false) } Message::Response(resp) => { - if self.inflight_requests.complete_outgoing(resp.id.clone()) { - debug!("Received response for outgoing request {}", resp.id); + if let Some(meta) = self.inflight_requests.complete_outgoing(resp.id.clone()) { + if let Some(error) = &resp.error { + warn!( + "Outgoing request {} ({}) failed: {} ({})", + resp.id, meta.method, error.message, error.code + ); + } else { + debug!( + "Received response for outgoing request {} ({})", + resp.id, meta.method + ); + } } else { debug!("Received untracked response: {:?}", resp.id); } diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 4c57bd2f..5974d352 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -144,6 +144,8 @@ This layer: (`RequestCanceled` error) - accepts async worker responses only for currently pending request IDs (`send_inflight_response`) +- tracks outgoing request metadata so response errors can be logged with method + context The main loop uses this boundary for all request responses after initialization. From 6cb6be928e669e5516147c0c180bad44d2d20269 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 11:47:18 +0000 Subject: [PATCH 049/210] lsp-inference: record types in a single inference pass --- crates/jrsonnet-lsp-inference/src/analysis.rs | 308 +++--------------- crates/jrsonnet-lsp-inference/src/expr.rs | 219 ++++++++++--- crates/jrsonnet-lsp-inference/src/object.rs | 14 +- .../jrsonnet-lsp/tests/e2e_annotated_tests.rs | 109 ++++++- 4 files changed, 343 insertions(+), 307 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/analysis.rs b/crates/jrsonnet-lsp-inference/src/analysis.rs index 568eca29..04dff460 100644 --- a/crates/jrsonnet-lsp-inference/src/analysis.rs +++ b/crates/jrsonnet-lsp-inference/src/analysis.rs @@ -10,17 +10,14 @@ use jrsonnet_lsp_document::Document; use jrsonnet_lsp_types::{ is_subtype_ty, FunctionData, GlobalTy, GlobalTyStore, MutStore, ObjectData, Ty, TyData, TySubst, }; -use jrsonnet_rowan_parser::{ - nodes::{Bind, Expr, ExprBase, Member, ObjBody, StmtLocal}, - AstNode, SyntaxNode, -}; +use jrsonnet_rowan_parser::SyntaxNode; use parking_lot::RwLock; use rowan::TextRange; use rustc_hash::FxHashMap; use crate::{ env::{ImportResolver, TypeEnv}, - expr::infer_expr_ty, + expr::infer_expr_ty_and_record, }; /// Stores inferred types for all expressions, allowing queries by position. @@ -423,263 +420,17 @@ impl TypeAnalysis { /// Analyze an expression and record types for it and all sub-expressions. fn analyze_and_record( - expr: &Expr, + expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv, expr_types: &mut FxHashMap, ) -> Ty { - let ty = infer_expr_ty(expr, env); - - // Record the type for the expression - expr_types.insert(expr.syntax().text_range(), ty); - - // Also record the type for the base expression if present - if let Some(base) = expr.expr_base() { - expr_types.insert(base.syntax().text_range(), ty); - } - - // Recursively visit all child expressions to record their types - visit_children(expr, env, expr_types); - - ty -} - -/// Visit all child expressions and record their types. -fn visit_children(expr: &Expr, env: &mut TypeEnv, expr_types: &mut FxHashMap) { - // Visit statements - for stmt in expr.stmts() { - if let Some(stmt_local) = StmtLocal::cast(stmt.syntax().clone()) { - for bind in stmt_local.binds() { - visit_bind(&bind, env, expr_types); - } - } - } - - // Visit base expression children - if let Some(base) = expr.expr_base() { - visit_base(&base, env, expr_types); - } -} - -/// Visit a binding and record types. -fn visit_bind(bind: &Bind, env: &mut TypeEnv, expr_types: &mut FxHashMap) { - match bind { - Bind::BindDestruct(bd) => { - if let Some(value) = bd.value() { - analyze_and_record(&value, env, expr_types); - } - } - Bind::BindFunction(bf) => { - if let Some(body) = bf.value() { - env.push_scope(); - if let Some(params) = bf.params() { - for param in params.params() { - if let Some(name_node) = param.destruct() { - if let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = - name_node - { - if let Some(name) = full.name() { - if let Some(ident) = name.ident_lit() { - env.define_ty(ident.text().to_string(), Ty::ANY); - } - } - } - } - } - } - analyze_and_record(&body, env, expr_types); - env.pop_scope(); - } - } - } -} - -/// Visit a base expression and record types for all children. -fn visit_base(base: &ExprBase, env: &mut TypeEnv, expr_types: &mut FxHashMap) { - match base { - ExprBase::ExprArray(arr) => visit_array_base(arr, env, expr_types), - ExprBase::ExprObject(obj) => { - if let Some(body) = obj.obj_body() { - visit_obj_body(&body, env, expr_types); - } - } - ExprBase::ExprFunction(func) => visit_function_base(func, env, expr_types), - ExprBase::ExprParened(parens) => visit_optional_expr(parens.expr(), env, expr_types), - ExprBase::ExprIfThenElse(if_expr) => visit_if_then_else_base(if_expr, env, expr_types), - ExprBase::ExprBinary(binary) => visit_binary_base(binary, env, expr_types), - ExprBase::ExprUnary(unary) => visit_optional_expr(unary.rhs(), env, expr_types), - ExprBase::ExprObjExtend(extend) => visit_optional_expr(extend.expr(), env, expr_types), - ExprBase::ExprArrayComp(comp) => visit_optional_expr(comp.expr(), env, expr_types), - ExprBase::ExprField(field) => visit_optional_expr(field.base(), env, expr_types), - ExprBase::ExprIndex(idx) => visit_index_base(idx, env, expr_types), - ExprBase::ExprSlice(slice) => visit_optional_expr(slice.base(), env, expr_types), - ExprBase::ExprCall(call) => visit_call_base(call, env, expr_types), - // Leaf nodes - no children to visit - ExprBase::ExprLiteral(_) - | ExprBase::ExprNumber(_) - | ExprBase::ExprString(_) - | ExprBase::ExprError(_) - | ExprBase::ExprVar(_) - | ExprBase::ExprImport(_) => {} - } -} - -fn define_param_as_any(param: &jrsonnet_rowan_parser::nodes::Param, env: &mut TypeEnv) { - let Some(name_node) = param.destruct() else { - return; - }; - let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = name_node else { - return; - }; - let Some(name) = full.name() else { - return; - }; - let Some(ident) = name.ident_lit() else { - return; - }; - env.define_ty(ident.text().to_string(), Ty::ANY); -} - -fn visit_optional_expr( - expr: Option, - env: &mut TypeEnv, - expr_types: &mut FxHashMap, -) { - if let Some(expr) = expr { - analyze_and_record(&expr, env, expr_types); - } -} - -fn visit_array_base( - arr: &jrsonnet_rowan_parser::nodes::ExprArray, - env: &mut TypeEnv, - expr_types: &mut FxHashMap, -) { - for elem in arr.exprs() { - analyze_and_record(&elem, env, expr_types); - } -} - -fn visit_function_base( - func: &jrsonnet_rowan_parser::nodes::ExprFunction, - env: &mut TypeEnv, - expr_types: &mut FxHashMap, -) { - let Some(body) = func.expr() else { - return; - }; - - env.push_scope(); - if let Some(params) = func.params_desc() { - for param in params.params() { - define_param_as_any(¶m, env); - } - } - analyze_and_record(&body, env, expr_types); - env.pop_scope(); -} - -fn visit_if_then_else_base( - if_expr: &jrsonnet_rowan_parser::nodes::ExprIfThenElse, - env: &mut TypeEnv, - expr_types: &mut FxHashMap, -) { - visit_optional_expr(if_expr.cond(), env, expr_types); - let then_expr = if_expr.then().and_then(|then_clause| then_clause.expr()); - visit_optional_expr(then_expr, env, expr_types); - let else_expr = if_expr.else_().and_then(|else_clause| else_clause.expr()); - visit_optional_expr(else_expr, env, expr_types); -} - -fn visit_binary_base( - binary: &jrsonnet_rowan_parser::nodes::ExprBinary, - env: &mut TypeEnv, - expr_types: &mut FxHashMap, -) { - visit_optional_expr(binary.lhs(), env, expr_types); - visit_optional_expr(binary.rhs(), env, expr_types); -} - -fn visit_index_base( - idx: &jrsonnet_rowan_parser::nodes::ExprIndex, - env: &mut TypeEnv, - expr_types: &mut FxHashMap, -) { - visit_optional_expr(idx.base(), env, expr_types); - visit_optional_expr(idx.index(), env, expr_types); -} - -fn visit_call_base( - call: &jrsonnet_rowan_parser::nodes::ExprCall, - env: &mut TypeEnv, - expr_types: &mut FxHashMap, -) { - visit_optional_expr(call.callee(), env, expr_types); - let Some(args) = call.args_desc() else { - return; - }; - for arg in args.args() { - visit_optional_expr(arg.expr(), env, expr_types); - } -} - -/// Visit object body members. -fn visit_obj_body(body: &ObjBody, env: &mut TypeEnv, expr_types: &mut FxHashMap) { - match body { - ObjBody::ObjBodyMemberList(members) => { - for member in members.members() { - match member { - Member::MemberFieldNormal(field) => { - if let Some(expr) = field.expr() { - analyze_and_record(&expr, env, expr_types); - } - } - Member::MemberFieldMethod(method) => { - if let Some(expr) = method.expr() { - env.push_scope(); - if let Some(params) = method.params_desc() { - for param in params.params() { - if let Some(name_node) = param.destruct() { - if let jrsonnet_rowan_parser::nodes::Destruct::DestructFull( - full, - ) = name_node - { - if let Some(name) = full.name() { - if let Some(ident) = name.ident_lit() { - env.define_ty( - ident.text().to_string(), - Ty::ANY, - ); - } - } - } - } - } - } - analyze_and_record(&expr, env, expr_types); - env.pop_scope(); - } - } - _ => {} - } - } - } - ObjBody::ObjBodyComp(comp) => { - for comp_spec in comp.comp_specs() { - if let Some(for_spec) = - jrsonnet_rowan_parser::nodes::ForSpec::cast(comp_spec.syntax().clone()) - { - if let Some(expr) = for_spec.expr() { - analyze_and_record(&expr, env, expr_types); - } - } - } - } - } + infer_expr_ty_and_record(expr, env, None, expr_types) } #[cfg(test)] mod tests { use jrsonnet_lsp_document::DocVersion; + use jrsonnet_rowan_parser::AstNode; use super::*; @@ -759,6 +510,55 @@ mod tests { assert!(analysis.is_closed_object(ty)); } + fn nth_offset(code: &str, needle: &str, occurrence: usize) -> rowan::TextSize { + let idx = code + .match_indices(needle) + .nth(occurrence) + .map(|(idx, _)| idx) + .expect("expected needle occurrence in source"); + rowan::TextSize::new(u32::try_from(idx).expect("offset should fit into u32")) + } + + #[test] + fn test_type_at_position_uses_flow_narrowing_in_else_branch() { + let code = r#"local f(x) = + assert std.isNumber(x) || std.isString(x); + if std.isNumber(x) then + x + 1 + else + std.length(x); +f(3) + f("hi")"#; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + let call_start = nth_offset(code, "std.length(x)", 0); + let x_offset = call_start + rowan::TextSize::new(11); + + let ty = analysis + .type_at_position(&root, x_offset) + .expect("expected type at else branch x"); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_type_at_position_uses_flow_narrowing_for_non_null_branch() { + let code = r"local f(x) = + assert x == null || std.isString(x); + if x != null then + std.length(x) + else + 0; +f(null)"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + let call_start = nth_offset(code, "std.length(x)", 0); + let x_offset = call_start + rowan::TextSize::new(11); + + let ty = analysis + .type_at_position(&root, x_offset) + .expect("expected type at non-null branch x"); + assert_eq!(ty, Ty::STRING); + } + #[test] fn test_object_has_field() { let code = "{ existing: 1 }"; diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr.rs index 6bbcf0ea..7e89eeda 100644 --- a/crates/jrsonnet-lsp-inference/src/expr.rs +++ b/crates/jrsonnet-lsp-inference/src/expr.rs @@ -9,6 +9,7 @@ use jrsonnet_rowan_parser::{ nodes::{BinaryOperatorKind, Bind, ExprBase, LiteralKind, UnaryOperatorKind}, AstNode, AstToken, }; +use rowan::TextRange; use rustc_hash::FxHashMap; use crate::{ @@ -36,6 +37,33 @@ fn apply_facts_to_env(facts: &Facts, env: &mut TypeEnv) { } } +trait TypeRecorder { + fn record(&mut self, range: TextRange, ty: Ty); +} + +struct NoopRecorder; + +impl TypeRecorder for NoopRecorder { + fn record(&mut self, _range: TextRange, _ty: Ty) {} +} + +impl TypeRecorder for FxHashMap { + fn record(&mut self, range: TextRange, ty: Ty) { + self.insert(range, ty); + } +} + +fn record_expr_and_base( + recorder: &mut R, + expr: &jrsonnet_rowan_parser::nodes::Expr, + ty: Ty, +) { + recorder.record(expr.syntax().text_range(), ty); + if let Some(base) = expr.expr_base() { + recorder.record(base.syntax().text_range(), ty); + } +} + /// Infer the type of a document's root expression, returning an interned `Ty` and the environment. /// /// This is useful for tests that need to inspect the type structure using `TyData`. @@ -58,6 +86,15 @@ pub fn infer_expr_ty(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEn infer_expr_ty_with_expected(expr, env, None) } +pub(crate) fn infer_expr_ty_and_record( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, + recorder: &mut FxHashMap, +) -> Ty { + infer_expr_ty_impl(expr, env, expected, recorder) +} + /// Infer the type of an expression with an optional expected type, returning `Ty`. /// /// This is the efficient internal version that works with interned types throughout. @@ -65,13 +102,23 @@ pub fn infer_expr_ty_with_expected( expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv, expected: Option, +) -> Ty { + let mut recorder = NoopRecorder; + infer_expr_ty_impl(expr, env, expected, &mut recorder) +} + +fn infer_expr_ty_impl( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, + recorder: &mut R, ) -> Ty { // First, handle local bindings and assert statements that may precede the expression for stmt in expr.stmts() { match stmt { jrsonnet_rowan_parser::nodes::Stmt::StmtLocal(stmt_local) => { for bind in stmt_local.binds() { - infer_bind_type_ty(&bind, env); + infer_bind_type_ty(&bind, env, recorder); } } jrsonnet_rowan_parser::nodes::Stmt::StmtAssert(stmt_assert) => { @@ -87,15 +134,17 @@ pub fn infer_expr_ty_with_expected( } // Get the base expression type - if let Some(base) = expr.expr_base() { - infer_base_ty(&base, env, expected) + let ty = if let Some(base) = expr.expr_base() { + infer_base_ty(&base, env, expected, recorder) } else { Ty::ANY - } + }; + record_expr_and_base(recorder, expr, ty); + ty } /// Infer types from a bind (local variable definition) using interned types. -pub(super) fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv) { +fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: &mut R) { match bind { Bind::BindDestruct(bd) => { if let Some(destruct) = bd.into() { @@ -103,7 +152,9 @@ pub(super) fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv) { if let Some(name_node) = full.name() { if let Some(ident) = name_node.ident_lit() { let name = ident.text().to_string(); - let ty = bd.value().map_or(Ty::ANY, |v| infer_expr_ty(&v, env)); + let ty = bd + .value() + .map_or(Ty::ANY, |v| infer_expr_ty_impl(&v, env, None, recorder)); env.define_ty(name, ty); } } @@ -127,6 +178,32 @@ pub(super) fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv) { env.define_ty(name, func_ty); } } + + // Record types inside function bodies during the same inference pass. + // Keep parameter assumptions aligned with historical analysis behavior. + if let Some(body) = bf.value() { + env.push_scope(); + if let Some(params) = bf.params() { + for param in params.params() { + let Some(destruct) = param.destruct() else { + continue; + }; + let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct + else { + continue; + }; + let Some(name_node) = full.name() else { + continue; + }; + let Some(ident) = name_node.ident_lit() else { + continue; + }; + env.define_ty(ident.text().to_string(), Ty::ANY); + } + } + let _ = infer_expr_ty_impl(&body, env, None, recorder); + env.pop_scope(); + } } } } @@ -218,7 +295,12 @@ pub fn is_divergent(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv /// Infer the type of a base expression with an optional expected type, returning `Ty`. /// /// This is the efficient internal version that works with interned types throughout. -pub(super) fn infer_base_ty(base: &ExprBase, env: &mut TypeEnv, expected: Option) -> Ty { +fn infer_base_ty( + base: &ExprBase, + env: &mut TypeEnv, + expected: Option, + recorder: &mut R, +) -> Ty { match base { // Primitives - return constants directly ExprBase::ExprLiteral(lit) => { @@ -271,7 +353,7 @@ pub(super) fn infer_base_ty(base: &ExprBase, env: &mut TypeEnv, expected: Option // Parenthesized expression - recurse with expected type ExprBase::ExprParened(parens) => { if let Some(inner) = parens.expr() { - return infer_expr_ty_with_expected(&inner, env, expected); + return infer_expr_ty_impl(&inner, env, expected, recorder); } Ty::ANY } @@ -297,7 +379,9 @@ pub(super) fn infer_base_ty(base: &ExprBase, env: &mut TypeEnv, expected: Option // Unary operators ExprBase::ExprUnary(unary) => { - let rhs_ty = unary.rhs().map_or(Ty::ANY, |rhs| infer_expr_ty(&rhs, env)); + let rhs_ty = unary + .rhs() + .map_or(Ty::ANY, |rhs| infer_expr_ty_impl(&rhs, env, None, recorder)); if rhs_ty == Ty::NEVER { return Ty::NEVER; } @@ -309,51 +393,60 @@ pub(super) fn infer_base_ty(base: &ExprBase, env: &mut TypeEnv, expected: Option } // Binary operators - handle simple cases directly - ExprBase::ExprBinary(binary) => infer_binary_expr_base_ty(binary, env), + ExprBase::ExprBinary(binary) => infer_binary_expr_base_ty(binary, env, recorder), // Index access: arr[0], obj["field"], str[0] - ExprBase::ExprIndex(idx) => infer_index_expr_base_ty(idx, env), + ExprBase::ExprIndex(idx) => infer_index_expr_base_ty(idx, env, recorder), // Slice: arr[1:3], str[::2] - ExprBase::ExprSlice(slice) => infer_slice_expr_base_ty(slice, env), + ExprBase::ExprSlice(slice) => infer_slice_expr_base_ty(slice, env, recorder), // If-then-else with flow typing ExprBase::ExprIfThenElse(if_expr) => { - infer_if_then_else_expr_base_ty(if_expr, env, expected) + infer_if_then_else_expr_base_ty(if_expr, env, expected, recorder) } // Field access: obj.field or std.fn - ExprBase::ExprField(field) => infer_field_expr_base_ty(field, env), + ExprBase::ExprField(field) => infer_field_expr_base_ty(field, env, recorder), // Function call: fn(args) - ExprBase::ExprCall(call) => infer_call_expr_base_ty(call, env), + ExprBase::ExprCall(call) => infer_call_expr_base_ty(call, env, recorder), // Array literal: [a, b, c] - ExprBase::ExprArray(arr) => infer_array_expr_base_ty(arr, env, expected), + ExprBase::ExprArray(arr) => infer_array_expr_base_ty(arr, env, expected, recorder), // Array comprehension: [expr for x in arr] - ExprBase::ExprArrayComp(comp) => infer_array_comp_expr_base_ty(comp, env), + ExprBase::ExprArrayComp(comp) => infer_array_comp_expr_base_ty(comp, env, recorder), // Object literal: { field: value } - ExprBase::ExprObject(obj) => infer_object_type_ty(obj.obj_body().as_ref(), env), + ExprBase::ExprObject(obj) => { + infer_object_type_ty(obj.obj_body().as_ref(), env, &mut |expr, env| { + infer_expr_ty_impl(expr, env, None, recorder) + }) + } // Function definition: function(x) body - ExprBase::ExprFunction(func) => infer_function_expr_base_ty(func, env), + ExprBase::ExprFunction(func) => infer_function_expr_base_ty(func, env, recorder), // Object extension: base { ... } - ExprBase::ExprObjExtend(extend) => infer_obj_extend_expr_base_ty(extend, env), + ExprBase::ExprObjExtend(extend) => infer_obj_extend_expr_base_ty(extend, env, recorder), } } -fn infer_binary_expr_base_ty( +fn infer_binary_expr_base_ty( binary: &jrsonnet_rowan_parser::nodes::ExprBinary, env: &mut TypeEnv, + recorder: &mut R, ) -> Ty { - let lhs_ty = binary.lhs().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); + let lhs_ty = binary + .lhs() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); if lhs_ty == Ty::NEVER { return Ty::NEVER; } - let rhs_ty = binary.rhs().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); + let rhs_ty = binary + .rhs() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); if rhs_ty == Ty::NEVER { return Ty::NEVER; } @@ -413,16 +506,19 @@ fn infer_binary_expr_base_ty( } } -fn infer_index_expr_base_ty( +fn infer_index_expr_base_ty( idx: &jrsonnet_rowan_parser::nodes::ExprIndex, env: &mut TypeEnv, + recorder: &mut R, ) -> Ty { - let base_ty = idx.base().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); + let base_ty = idx + .base() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); if base_ty == Ty::NEVER { return Ty::NEVER; } if let Some(index_expr) = idx.index() { - let idx_ty = infer_expr_ty(&index_expr, env); + let idx_ty = infer_expr_ty_impl(&index_expr, env, None, recorder); if idx_ty == Ty::NEVER { return Ty::NEVER; } @@ -441,11 +537,14 @@ fn infer_index_expr_base_ty( } } -fn infer_slice_expr_base_ty( +fn infer_slice_expr_base_ty( slice: &jrsonnet_rowan_parser::nodes::ExprSlice, env: &mut TypeEnv, + recorder: &mut R, ) -> Ty { - let base_ty = slice.base().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); + let base_ty = slice + .base() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); if base_ty == Ty::NEVER { return Ty::NEVER; } @@ -462,13 +561,14 @@ fn infer_slice_expr_base_ty( } } -fn infer_if_then_else_expr_base_ty( +fn infer_if_then_else_expr_base_ty( if_expr: &jrsonnet_rowan_parser::nodes::ExprIfThenElse, env: &mut TypeEnv, expected: Option, + recorder: &mut R, ) -> Ty { let facts = if let Some(cond) = if_expr.cond() { - let cond_ty = infer_expr_ty(&cond, env); + let cond_ty = infer_expr_ty_impl(&cond, env, None, recorder); if cond_ty == Ty::NEVER { return Ty::NEVER; } @@ -485,7 +585,7 @@ fn infer_if_then_else_expr_base_ty( let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); env.define_ty(var_name.clone(), narrowed_ty); } - let ty = infer_expr_ty_with_expected(&then_expr, env, expected); + let ty = infer_expr_ty_impl(&then_expr, env, expected, recorder); env.pop_scope(); ty } else { @@ -503,7 +603,7 @@ fn infer_if_then_else_expr_base_ty( let widened_ty = fact.apply_negated(current_ty, env.store_mut()); env.define_ty(var_name.clone(), widened_ty); } - let ty = infer_expr_ty_with_expected(&else_expr, env, expected); + let ty = infer_expr_ty_impl(&else_expr, env, expected, recorder); env.pop_scope(); ty } else { @@ -516,11 +616,15 @@ fn infer_if_then_else_expr_base_ty( env.store_mut().union(vec![then_ty, else_ty]) } -fn infer_field_expr_base_ty( +fn infer_field_expr_base_ty( field: &jrsonnet_rowan_parser::nodes::ExprField, env: &mut TypeEnv, + recorder: &mut R, ) -> Ty { if let Some(ty) = infer_stdlib_field_access_ty(field, env) { + if let Some(base_expr) = field.base() { + let _ = infer_expr_ty_impl(&base_expr, env, None, recorder); + } return ty; } @@ -553,7 +657,9 @@ fn infer_field_expr_base_ty( } } - let base_ty = field.base().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); + let base_ty = field + .base() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); if base_ty == Ty::NEVER { return Ty::NEVER; } @@ -583,8 +689,14 @@ fn infer_field_expr_base_ty( Ty::ANY } -fn infer_call_expr_base_ty(call: &jrsonnet_rowan_parser::nodes::ExprCall, env: &mut TypeEnv) -> Ty { - let base_ty = call.callee().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); +fn infer_call_expr_base_ty( + call: &jrsonnet_rowan_parser::nodes::ExprCall, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + let base_ty = call + .callee() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); if base_ty == Ty::NEVER { return Ty::NEVER; } @@ -593,7 +705,7 @@ fn infer_call_expr_base_ty(call: &jrsonnet_rowan_parser::nodes::ExprCall, env: & if let Some(args) = call.args_desc() { for arg in args.args() { if let Some(expr) = arg.expr() { - let arg_ty = infer_expr_ty(&expr, env); + let arg_ty = infer_expr_ty_impl(&expr, env, None, recorder); if arg_ty == Ty::NEVER { return Ty::NEVER; } @@ -693,10 +805,11 @@ fn infer_call_expr_base_ty(call: &jrsonnet_rowan_parser::nodes::ExprCall, env: & Ty::ANY } -fn infer_array_expr_base_ty( +fn infer_array_expr_base_ty( arr: &jrsonnet_rowan_parser::nodes::ExprArray, env: &mut TypeEnv, expected: Option, + recorder: &mut R, ) -> Ty { let (expected_array_elem, expected_tuple_elems) = if let Some(exp_ty) = expected { let store = env.store(); @@ -717,7 +830,7 @@ fn infer_array_expr_base_ty( .as_ref() .and_then(|elems| elems.get(i).copied()) .or(expected_array_elem); - infer_expr_ty_with_expected(&e, env, elem_expected) + infer_expr_ty_impl(&e, env, elem_expected, recorder) }) .collect(); @@ -737,15 +850,18 @@ fn infer_array_expr_base_ty( env.store_mut().array(elem_ty) } -fn infer_array_comp_expr_base_ty( +fn infer_array_comp_expr_base_ty( comp: &jrsonnet_rowan_parser::nodes::ExprArrayComp, env: &mut TypeEnv, + recorder: &mut R, ) -> Ty { env.push_scope(); for comp_spec in comp.comp_specs() { if let jrsonnet_rowan_parser::nodes::CompSpec::ForSpec(for_spec) = comp_spec { - let iter_ty = for_spec.expr().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); + let iter_ty = for_spec + .expr() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); let elem_ty = { let store = env.store_mut(); @@ -766,15 +882,18 @@ fn infer_array_comp_expr_base_ty( } } - let body_ty = comp.expr().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); + let body_ty = comp + .expr() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); env.pop_scope(); env.store_mut().array(body_ty) } -fn infer_function_expr_base_ty( +fn infer_function_expr_base_ty( func: &jrsonnet_rowan_parser::nodes::ExprFunction, env: &mut TypeEnv, + recorder: &mut R, ) -> Ty { let func_range = func.syntax().text_range(); @@ -822,7 +941,7 @@ fn infer_function_expr_base_ty( env.start_constraint_tracking(¶m_names); env.enter_function(); - let body_ty = infer_expr_ty(&body, env); + let body_ty = infer_expr_ty_impl(&body, env, None, recorder); env.exit_function(); let constraints = env.stop_constraint_tracking_ty(); @@ -865,12 +984,13 @@ fn infer_function_expr_base_ty( result } -fn infer_obj_extend_expr_base_ty( +fn infer_obj_extend_expr_base_ty( extend: &jrsonnet_rowan_parser::nodes::ExprObjExtend, env: &mut TypeEnv, + recorder: &mut R, ) -> Ty { let base_ty = if let Some(base_expr) = extend.lhs_work() { - infer_expr_ty(&base_expr, env) + infer_expr_ty_impl(&base_expr, env, None, recorder) } else { env.store_mut().object_any() }; @@ -887,7 +1007,12 @@ fn infer_obj_extend_expr_base_ty( } }); - let extension_ty = infer_object_type_with_super_ty(extension_body.as_ref(), env, Some(base_ty)); + let extension_ty = infer_object_type_with_super_ty( + extension_body.as_ref(), + env, + Some(base_ty), + &mut |expr, env| infer_expr_ty_impl(expr, env, None, recorder), + ); let (base_data, ext_data) = { let store = env.store(); diff --git a/crates/jrsonnet-lsp-inference/src/object.rs b/crates/jrsonnet-lsp-inference/src/object.rs index 1167d6b6..8f1c495c 100644 --- a/crates/jrsonnet-lsp-inference/src/object.rs +++ b/crates/jrsonnet-lsp-inference/src/object.rs @@ -3,17 +3,20 @@ use jrsonnet_lsp_types::{ FieldDefInterned, FieldVis, FunctionData, ObjectData, ReturnSpec, Ty, TyData, }; -use jrsonnet_rowan_parser::nodes::{Member, ObjBody}; +use jrsonnet_rowan_parser::nodes::{Expr, Member, ObjBody}; use crate::{ env::TypeEnv, - expr::infer_expr_ty, helpers::{convert_visibility_ty, extract_field_name, extract_params_with_default_types_ty}, }; /// Infer the type of an object body, returning interned `Ty`. -pub fn infer_object_type_ty(body: Option<&ObjBody>, env: &mut TypeEnv) -> Ty { - infer_object_type_with_super_ty(body, env, None) +pub fn infer_object_type_ty( + body: Option<&ObjBody>, + env: &mut TypeEnv, + infer_expr: &mut impl FnMut(&Expr, &mut TypeEnv) -> Ty, +) -> Ty { + infer_object_type_with_super_ty(body, env, None, infer_expr) } /// Infer the type of an object body with an optional super type, returning interned `Ty`. @@ -23,6 +26,7 @@ pub fn infer_object_type_with_super_ty( body: Option<&ObjBody>, env: &mut TypeEnv, super_type: Option, + infer_expr: &mut impl FnMut(&Expr, &mut TypeEnv) -> Ty, ) -> Ty { let Some(body) = body else { return env.store_mut().object(ObjectData::empty()); @@ -103,7 +107,7 @@ pub fn infer_object_type_with_super_ty( if let Some(field_name) = field.field_name() { if let Some(name_str) = extract_field_name(&field_name) { let field_ty = - field.expr().map_or(Ty::ANY, |e| infer_expr_ty(&e, env)); + field.expr().map_or(Ty::ANY, |e| infer_expr(&e, env)); let visibility = convert_visibility_ty(field.visibility()); final_fields.push(( name_str, diff --git a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs index 4a4a0409..2c933f8a 100644 --- a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs +++ b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs @@ -7,7 +7,7 @@ mod framework; -use framework::{check_definition, check_hover}; +use framework::{check, check_definition, check_hover}; // ============================================================================ // Definition and Reference Tests @@ -206,3 +206,110 @@ fn test_hover_exact_number_markdown() { ## ^ hover-exact: `number`", ); } + +// ============================================================================ +// Flow Typing Tests +// ============================================================================ + +#[test] +fn test_flow_union_guard_refines_both_branches() { + check( + r#"local f(x) = + assert std.isNumber(x) || std.isString(x); + if std.isNumber(x) then + x + 1 +## ^ type: number + else + std.length(x); +## ^ type-exact: string + +f(3) + f("hi")"#, + ); +} + +#[test] +fn test_flow_negated_guard_refines_branches() { + check( + r#"local f(x) = + assert std.isNumber(x) || std.isString(x); + if !std.isNumber(x) then + std.length(x) +## ^ type-exact: string + else + x + 1 +## ^ type: number + +f(3) + f("hi")"#, + ); +} + +#[test] +fn test_flow_null_guard_refines_non_null_branch() { + check( + r"local f(x) = + assert x == null || std.isString(x); + if x != null then + std.length(x) +## ^ type-exact: string + else + 0; + +f(null)", + ); +} + +#[test] +fn test_flow_length_eq_refines_array_to_tuple() { + check( + r"local f(xs) = + assert std.isArray(xs) && std.length(xs) == 3; + xs +##^ type: [any, any, any] + +f([1, 2, 3])", + ); +} + +#[test] +fn test_flow_higher_order_all_map_refines_array_elements() { + check( + r"local f(arr) = + if std.all(std.map(std.isNumber, arr)) then + arr +## ^^^ type-exact: array[number] + else + arr; + +f([1, 2, 3])", + ); +} + +#[test] +fn test_flow_filter_with_predicate_refines_output_array() { + check( + r#"local f(xs) = + assert std.isArray(xs); + assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs)); + local ys = std.filter(std.isNumber, xs); +## ^^ type: array[number | string] + ys +##^^ type: array[number] + +f([1, "x", 2])"#, + ); +} + +#[test] +fn test_flow_object_field_presence_refines_object_shape() { + check( + r#"local f(obj) = + assert std.isObject(obj); + if "foo" in obj then + obj +## ^^^ type: foo + else + 0; + +f({ foo: 1 })"#, + ); +} From 8d49e845ec1e96bfcea36f62db26180105c06a9a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 11:52:21 +0000 Subject: [PATCH 050/210] stdlib/evaluator: resolve clippy warnings --- cmds/rtk/src/tanka.rs | 36 +-- crates/jrsonnet-evaluator/src/manifest.rs | 4 +- .../jrsonnet-evaluator/src/stdlib/format.rs | 6 +- crates/jrsonnet-stdlib/src/manifest/yaml.rs | 293 ++++++++++-------- 4 files changed, 181 insertions(+), 158 deletions(-) diff --git a/cmds/rtk/src/tanka.rs b/cmds/rtk/src/tanka.rs index 5b207904..1bf11a0f 100644 --- a/cmds/rtk/src/tanka.rs +++ b/cmds/rtk/src/tanka.rs @@ -3,7 +3,7 @@ // Tanka-compatible API accessible via std.native() use std::{ - collections::HashMap, + collections::{HashMap, HashSet}, io::{BufReader, Read, Write}, process::{Command, Stdio}, rc::Rc, @@ -52,7 +52,7 @@ fn get_helm_cache() -> &'static RwLock>> { /// Generate a key for a manifest using the nameFormat template /// This is a simplified implementation that handles the common case where nameFormat /// includes namespace in the key format -fn generate_manifest_key_from_val(val: &Val, name_format: Option<&str>) -> Result { +fn generate_manifest_key_from_val(val: &Val, name_format: Option<&str>) -> String { // Check if we should use nameFormat or default format let use_namespace_in_key = name_format .map(|fmt| fmt.contains("metadata.namespace") || fmt.contains(".or .metadata.namespace")) @@ -94,13 +94,13 @@ fn generate_manifest_key_from_val(val: &Val, name_format: Option<&str>) -> Resul }) .unwrap_or_else(|| "cluster".to_string()); - return Ok(format!("{}_{}_{}", namespace, kind, name)); + return format!("{}_{}_{}", namespace, kind, name); } - return Ok(format!("{}_{}", kind, name)); + return format!("{}_{}", kind, name); } } - Ok("unknown".to_string()) + "unknown".to_string() } /// Parse YAML output from helm into a Val object @@ -117,7 +117,7 @@ fn parse_helm_yaml_output(yaml_content: &str, name_format: Option<&str>) -> Resu }; let documents: Vec = serde_saphyr::from_multiple_with_options(yaml_content, options) .map_err(|e| RuntimeError(format!("failed to parse helm output: {e}").into()))?; - let mut seen_keys = HashMap::new(); + let mut seen_keys = HashSet::new(); for val in documents { // Skip null documents @@ -131,16 +131,16 @@ fn parse_helm_yaml_output(yaml_content: &str, name_format: Option<&str>) -> Resu } // Use the nameFormat-aware key generation - let key = generate_manifest_key_from_val(&val, name_format)?; + let key = generate_manifest_key_from_val(&val, name_format); // Check for duplicate keys and add counter if needed let mut final_key = key.clone(); let mut counter = 2; - while seen_keys.contains_key(&final_key) { + while seen_keys.contains(&final_key) { final_key = format!("{}_{}", key, counter); counter += 1; } - seen_keys.insert(final_key.clone(), ()); + seen_keys.insert(final_key.clone()); builder.field(&final_key).try_value(val)?; } @@ -183,7 +183,8 @@ fn helm_cache_key( /// Matches Go Tanka's naming behavior which inserts underscores: /// - Before uppercase letters (CamelCase -> camel_case) /// - Between letter-digit-letter sequences (k8s -> k_8s) -/// Note: Does NOT insert underscore when digit is at word boundary (flux2 stays flux2) +/// +/// Note: Does NOT insert underscore when digit is at word boundary (flux2 stays flux2). fn to_snake_case(s: &str) -> String { let mut result = String::new(); let chars: Vec = s.chars().collect(); @@ -210,10 +211,9 @@ fn to_snake_case(s: &str) -> String { // Look ahead past all consecutive digits to see if there's a letter let has_letter_after_digits = chars[i..] .iter() - .skip_while(|c| c.is_ascii_digit()) - .next() - .map(char::is_ascii_alphabetic) - .unwrap_or(false); + .find(|c| !c.is_ascii_digit()) + .copied() + .is_some_and(|c| c.is_ascii_alphabetic()); if has_letter_after_digits { result.push('_'); } @@ -402,7 +402,7 @@ pub fn builtin_tanka_manifest_yaml_from_json(json: String) -> Result { empty_array_as_brackets: true, block_scalar_indent_in_seq: Some(2), // 2 spaces absolute for block scalar body in arrays line_width: None, // go-yaml v3's Marshal() doesn't wrap lines by default - scientific_notation_threshold: Some(1000000), // 1 million - large numbers use scientific notation + scientific_notation_threshold: Some(1_000_000), // 1 million - large numbers use scientific notation scientific_notation_small_threshold: Some(0.0001), // 1e-4 - small numbers use scientific notation (Go yaml.v3) quote_numeric_strings: true, // Quote numeric string keys like "12345" ..Default::default() @@ -832,7 +832,7 @@ pub fn builtin_tanka_kustomize_build(path: String, opts: ObjValue) -> Result = serde_saphyr::from_multiple_with_options(&yaml_content, options) .map_err(|e| RuntimeError(format!("failed to parse kustomize output: {e}").into()))?; - let mut seen_keys = HashMap::new(); + let mut seen_keys = HashSet::new(); for val in documents { // Skip null documents @@ -871,11 +871,11 @@ pub fn builtin_tanka_kustomize_build(path: String, opts: ObjValue) -> Result { - if v == v.floor() { + let floor = v.floor(); + let integer_margin = f64::EPSILON * v.abs().max(1.0); + if (v - floor).abs() <= integer_margin { write!(buf, "{:.0}", v).unwrap(); } else if should_use_go_style_floats() { buf.push_str(&format_float_go_g17(v)); diff --git a/crates/jrsonnet-evaluator/src/stdlib/format.rs b/crates/jrsonnet-evaluator/src/stdlib/format.rs index eb1d0ebf..745c35e7 100644 --- a/crates/jrsonnet-evaluator/src/stdlib/format.rs +++ b/crates/jrsonnet-evaluator/src/stdlib/format.rs @@ -1,6 +1,8 @@ //! faster std.format impl #![allow(clippy::too_many_arguments)] +use std::fmt::Write as _; + use jrsonnet_gcmodule::Trace; use jrsonnet_interner::IStr; use jrsonnet_types::ValType; @@ -532,7 +534,7 @@ pub fn format_code( // Check if it's an integer (no fractional part) if n.fract() == 0.0 && n.abs() < 1e15 { // Format as integer without decimal point - tmp_out.push_str(&format!("{:.0}", n)); + write!(tmp_out, "{n:.0}").unwrap(); } else { // Use Go-style %.17g format if enabled, otherwise use Rust's Display (shortest) if manifest::should_use_go_style_floats() { @@ -541,7 +543,7 @@ pub fn format_code( // Use Rust's Display formatting (ryu algorithm) which produces // the shortest decimal representation, avoiding precision artifacts // like 0.80000000000000004 -> 0.8 - tmp_out.push_str(&format!("{}", n)); + write!(tmp_out, "{n}").unwrap(); } } } else { diff --git a/crates/jrsonnet-stdlib/src/manifest/yaml.rs b/crates/jrsonnet-stdlib/src/manifest/yaml.rs index c2f46bd5..98c4b11a 100644 --- a/crates/jrsonnet-stdlib/src/manifest/yaml.rs +++ b/crates/jrsonnet-stdlib/src/manifest/yaml.rs @@ -1,9 +1,10 @@ use std::{borrow::Cow, fmt::Write}; +use jrsonnet_evaluator::val::ArrValue; use jrsonnet_evaluator::{ bail, in_description_frame, manifest::{escape_string_json_buf, ManifestFormat}, - Result, ResultExt, Val, + ObjValue, Result, ResultExt, Val, }; pub struct YamlFormat<'s> { @@ -188,6 +189,158 @@ fn manifest_yaml_ex(val: &Val, options: &YamlFormat<'_>) -> Result { Ok(out) } +fn manifest_yaml_array_buf( + a: &ArrValue, + buf: &mut String, + cur_padding: &mut String, + options: &YamlFormat<'_>, +) -> Result<()> { + let mut had_items = false; + for (i, item) in a.iter().enumerate() { + had_items = true; + let item = item.with_description(|| format!("elem <{i}> evaluation"))?; + if i != 0 { + buf.push('\n'); + buf.push_str(cur_padding); + } + buf.push('-'); + match &item { + Val::Arr(a) if !a.is_empty() => { + // Nested arrays need a newline and extra indentation + buf.push('\n'); + buf.push_str(cur_padding); + buf.push_str(&options.padding); + } + _ => buf.push(' '), + } + // For nested arrays, add padding to cur_padding + let prev_len = cur_padding.len(); + if let Val::Arr(a) = &item { + if !a.is_empty() { + cur_padding.push_str(&options.padding); + } + } + // Objects in arrays need special handling: their fields should + // align with the first field (after "- "), but nested structures + // should not inherit this offset + let is_object_in_array = matches!(&item, Val::Obj(o) if !o.is_empty()); + in_description_frame( + || format!("elem <{i}> manifestification"), + || manifest_yaml_ex_buf(&item, buf, cur_padding, options, is_object_in_array), + )?; + cur_padding.truncate(prev_len); + } + if !had_items { + buf.push_str("[]"); + } + Ok(()) +} + +fn manifest_yaml_object_buf( + o: &ObjValue, + buf: &mut String, + cur_padding: &mut String, + options: &YamlFormat<'_>, + in_array_context: bool, +) -> Result<()> { + let mut had_fields = false; + // Store the base padding BEFORE any in_array_context adjustment. + let base_padding_len = cur_padding.len(); + + // For key alignment: if this object is an array element, keys (except the first) + // need 2 extra spaces to align with the first key (which appears after "- "). + // This offset is ONLY for key alignment, NOT for nested content. + let key_padding = if in_array_context { + let mut kp = cur_padding.clone(); + kp.push_str(" "); + kp + } else { + cur_padding.clone() + }; + + for (i, (key, value)) in o + .iter( + #[cfg(feature = "exp-preserve-order")] + options.preserve_order, + ) + .enumerate() + { + had_fields = true; + let value = value.with_description(|| format!("field <{key}> evaluation"))?; + if i != 0 { + buf.push('\n'); + buf.push_str(&key_padding); + } + if !options.quote_keys && !yaml_needs_quotes(&key) { + buf.push_str(&key); + } else { + escape_string_json_buf(&key, buf); + } + buf.push(':'); + + // For nested content (arrays/objects as values), we need to account for + // whether this object is an array element. If so, the first field starts + // at cur_padding + 2 (after "- "), so nested content should be relative + // to that position. + // + // When in_array_context, we add +2 to account for the "- " prefix, but we + // DON'T add arr_element_padding for arrays - the +2 offset already provides + // the correct indentation. For non-array context, we DO add arr_element_padding. + let content_base = if in_array_context { + let mut base = cur_padding.clone(); + base.push_str(" "); + base + } else { + cur_padding.clone() + }; + + let prev_len = cur_padding.len(); + match &value { + Val::Arr(a) if !a.is_empty() => { + buf.push('\n'); + // For arrays in object fields, use content_base (which includes the + // in_array_context alignment) plus arr_element_padding. + buf.push_str(&content_base); + buf.push_str(&options.arr_element_padding); + // Set cur_padding for nested content + cur_padding.clear(); + cur_padding.push_str(&content_base); + cur_padding.push_str(&options.arr_element_padding); + } + Val::Obj(o) if !o.is_empty() => { + buf.push('\n'); + buf.push_str(&content_base); + buf.push_str(&options.padding); + // Set cur_padding for nested content + cur_padding.clear(); + cur_padding.push_str(&content_base); + cur_padding.push_str(&options.padding); + } + _ => { + buf.push(' '); + // Set cur_padding for block scalar indentation in array context + // This ensures block scalar content is indented relative to key position + if in_array_context { + cur_padding.clear(); + cur_padding.push_str(&content_base); + } + } + } + in_description_frame( + || format!("field <{key}> manifestification"), + || manifest_yaml_ex_buf(&value, buf, cur_padding, options, false), + )?; + cur_padding.truncate(prev_len); + } + // Restore cur_padding to original value + cur_padding.truncate(base_padding_len); + if !had_fields { + buf.push_str("{}"); + } + + Ok(()) +} + #[allow(clippy::too_many_lines)] fn manifest_yaml_ex_buf( val: &Val, @@ -256,142 +409,8 @@ fn manifest_yaml_ex_buf( } #[cfg(feature = "exp-bigint")] Val::BigInt(n) => write!(buf, "{}", *n).unwrap(), - Val::Arr(a) => { - let mut had_items = false; - for (i, item) in a.iter().enumerate() { - had_items = true; - let item = item.with_description(|| format!("elem <{i}> evaluation"))?; - if i != 0 { - buf.push('\n'); - buf.push_str(cur_padding); - } - buf.push('-'); - match &item { - Val::Arr(a) if !a.is_empty() => { - // Nested arrays need a newline and extra indentation - buf.push('\n'); - buf.push_str(cur_padding); - buf.push_str(&options.padding); - } - _ => buf.push(' '), - } - // For nested arrays, add padding to cur_padding - let prev_len = cur_padding.len(); - if let Val::Arr(a) = &item { - if !a.is_empty() { - cur_padding.push_str(&options.padding); - } - } - // Objects in arrays need special handling: their fields should - // align with the first field (after "- "), but nested structures - // should not inherit this offset - let is_object_in_array = matches!(&item, Val::Obj(o) if !o.is_empty()); - in_description_frame( - || format!("elem <{i}> manifestification"), - || manifest_yaml_ex_buf(&item, buf, cur_padding, options, is_object_in_array), - )?; - cur_padding.truncate(prev_len); - } - if !had_items { - buf.push_str("[]"); - } - } - Val::Obj(o) => { - let mut had_fields = false; - // Store the base padding BEFORE any in_array_context adjustment. - let base_padding_len = cur_padding.len(); - - // For key alignment: if this object is an array element, keys (except the first) - // need 2 extra spaces to align with the first key (which appears after "- "). - // This offset is ONLY for key alignment, NOT for nested content. - let key_padding = if in_array_context { - let mut kp = cur_padding.clone(); - kp.push_str(" "); - kp - } else { - cur_padding.clone() - }; - - for (i, (key, value)) in o - .iter( - #[cfg(feature = "exp-preserve-order")] - options.preserve_order, - ) - .enumerate() - { - had_fields = true; - let value = value.with_description(|| format!("field <{key}> evaluation"))?; - if i != 0 { - buf.push('\n'); - buf.push_str(&key_padding); - } - if !options.quote_keys && !yaml_needs_quotes(&key) { - buf.push_str(&key); - } else { - escape_string_json_buf(&key, buf); - } - buf.push(':'); - - // For nested content (arrays/objects as values), we need to account for - // whether this object is an array element. If so, the first field starts - // at cur_padding + 2 (after "- "), so nested content should be relative - // to that position. - // - // When in_array_context, we add +2 to account for the "- " prefix, but we - // DON'T add arr_element_padding for arrays - the +2 offset already provides - // the correct indentation. For non-array context, we DO add arr_element_padding. - let content_base = if in_array_context { - let mut base = cur_padding.clone(); - base.push_str(" "); - base - } else { - cur_padding.clone() - }; - - let prev_len = cur_padding.len(); - match &value { - Val::Arr(a) if !a.is_empty() => { - buf.push('\n'); - // For arrays in object fields, use content_base (which includes the - // in_array_context alignment) plus arr_element_padding. - buf.push_str(&content_base); - buf.push_str(&options.arr_element_padding); - // Set cur_padding for nested content - cur_padding.clear(); - cur_padding.push_str(&content_base); - cur_padding.push_str(&options.arr_element_padding); - } - Val::Obj(o) if !o.is_empty() => { - buf.push('\n'); - buf.push_str(&content_base); - buf.push_str(&options.padding); - // Set cur_padding for nested content - cur_padding.clear(); - cur_padding.push_str(&content_base); - cur_padding.push_str(&options.padding); - } - _ => { - buf.push(' '); - // Set cur_padding for block scalar indentation in array context - // This ensures block scalar content is indented relative to key position - if in_array_context { - cur_padding.clear(); - cur_padding.push_str(&content_base); - } - } - } - in_description_frame( - || format!("field <{key}> manifestification"), - || manifest_yaml_ex_buf(&value, buf, cur_padding, options, false), - )?; - cur_padding.truncate(prev_len); - } - // Restore cur_padding to original value - cur_padding.truncate(base_padding_len); - if !had_fields { - buf.push_str("{}"); - } - } + Val::Arr(a) => manifest_yaml_array_buf(a, buf, cur_padding, options)?, + Val::Obj(o) => manifest_yaml_object_buf(o, buf, cur_padding, options, in_array_context)?, Val::Func(_) => bail!("tried to manifest function"), } Ok(()) From 0ca14fd7931fc76279112fbabdd795e46cdebed2 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 12:04:13 +0000 Subject: [PATCH 051/210] lsp-inference/types: narrow function-length flow and comprehensions --- crates/jrsonnet-lsp-inference/src/expr.rs | 57 ++++++++---- crates/jrsonnet-lsp-types/src/mut_store.rs | 10 ++- crates/jrsonnet-lsp-types/src/operations.rs | 33 ++++++- crates/jrsonnet-lsp-types/src/store.rs | 33 ++++++- .../jrsonnet-lsp/tests/e2e_annotated_tests.rs | 88 +++++++++++++++++++ 5 files changed, 200 insertions(+), 21 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr.rs index 7e89eeda..f9525aa1 100644 --- a/crates/jrsonnet-lsp-inference/src/expr.rs +++ b/crates/jrsonnet-lsp-inference/src/expr.rs @@ -858,26 +858,40 @@ fn infer_array_comp_expr_base_ty( env.push_scope(); for comp_spec in comp.comp_specs() { - if let jrsonnet_rowan_parser::nodes::CompSpec::ForSpec(for_spec) = comp_spec { - let iter_ty = for_spec - .expr() - .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); - - let elem_ty = { - let store = env.store_mut(); - match store.get(iter_ty) { - TyData::Array { elem, .. } => elem, - TyData::Tuple { ref elems } => { - let elems_copy: Vec = elems.clone(); - store.union(elems_copy) + match comp_spec { + jrsonnet_rowan_parser::nodes::CompSpec::ForSpec(for_spec) => { + let iter_ty = for_spec + .expr() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + + let elem_ty = { + let store = env.store_mut(); + match store.get(iter_ty) { + TyData::Array { elem, .. } => elem, + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + store.union(elems_copy) + } + _ if iter_ty == Ty::NEVER => Ty::NEVER, + _ => Ty::ANY, } - _ if iter_ty == Ty::NEVER => Ty::NEVER, - _ => Ty::ANY, - } - }; + }; - if let Some(destruct) = for_spec.bind() { - bind_destruct_with_type_ty(&destruct, elem_ty, env); + if let Some(destruct) = for_spec.bind() { + bind_destruct_with_type_ty(&destruct, elem_ty, env); + } + } + jrsonnet_rowan_parser::nodes::CompSpec::IfSpec(if_spec) => { + let Some(cond) = if_spec.expr() else { + continue; + }; + let _ = infer_expr_ty_impl(&cond, env, None, recorder); + let facts = flow::extract_facts(&cond); + for (var_name, fact) in facts.iter() { + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); + env.define_ty(var_name.clone(), narrowed_ty); + } } } } @@ -1554,6 +1568,13 @@ mod tests { let _ = try_array(&env, ty).expect("expected array"); } + #[test] + fn test_comprehension_if_filter_narrows_loop_var() { + let (ty, env) = infer_doc("local xs = [1, null, 2]; [x for x in xs if x != null]"); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + // Parameter constraint tests #[test] diff --git a/crates/jrsonnet-lsp-types/src/mut_store.rs b/crates/jrsonnet-lsp-types/src/mut_store.rs index 49ec5144..a3c1b1b4 100644 --- a/crates/jrsonnet-lsp-types/src/mut_store.rs +++ b/crates/jrsonnet-lsp-types/src/mut_store.rs @@ -625,7 +625,15 @@ impl MutStore { } } - TyData::Function(_) | TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, + TyData::Function(func) => { + if func.required_count() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, TyData::Never | TyData::Null diff --git a/crates/jrsonnet-lsp-types/src/operations.rs b/crates/jrsonnet-lsp-types/src/operations.rs index 051821ac..e408d108 100644 --- a/crates/jrsonnet-lsp-types/src/operations.rs +++ b/crates/jrsonnet-lsp-types/src/operations.rs @@ -745,7 +745,15 @@ pub fn ty_with_len(ty: Ty, len: usize, store: &mut S) -> Ty { } } - TyData::AttrsOf { .. } | TyData::Function(_) | TyData::TypeVar { .. } => ty, + TyData::Function(func) => { + if func.required_count() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, TyData::String => { if len == 1 { @@ -1397,6 +1405,29 @@ mod tests { let union = store.union(vec![tuple2, tuple3]); assert_eq!(ty_with_len(union, 2, &mut store), tuple2); } + + #[test] + fn test_function_matches_required_arity() { + let mut store = TyStore::new(); + let func = store.function(crate::store::FunctionData { + params: vec![ + crate::store::ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false, + }, + crate::store::ParamInterned { + name: "y".to_string(), + ty: Ty::ANY, + has_default: true, + }, + ], + return_spec: crate::store::ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + }); + assert_eq!(ty_with_len(func, 1, &mut store), func); + assert_eq!(ty_with_len(func, 2, &mut store), Ty::NEVER); + } } mod ty_with_min_len_tests { diff --git a/crates/jrsonnet-lsp-types/src/store.rs b/crates/jrsonnet-lsp-types/src/store.rs index ca5e5012..6dd2c97e 100644 --- a/crates/jrsonnet-lsp-types/src/store.rs +++ b/crates/jrsonnet-lsp-types/src/store.rs @@ -1374,7 +1374,15 @@ impl TyStore { } } - TyData::Function(_) | TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, + TyData::Function(func) => { + if func.required_count() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, TyData::Never | TyData::Null @@ -2419,6 +2427,29 @@ mod tests { assert_eq!(store.with_len(ty, len), expected); } + #[test] + fn test_with_len_function_uses_required_arity() { + let mut store = TyStore::new(); + let func = store.function(FunctionData { + params: vec![ + ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false, + }, + ParamInterned { + name: "y".to_string(), + ty: Ty::ANY, + has_default: true, + }, + ], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + }); + assert_eq!(store.with_len(func, 1), func); + assert_eq!(store.with_len(func, 2), Ty::NEVER); + } + mod test_is_subtype_of { use super::*; diff --git a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs index 2c933f8a..197b896b 100644 --- a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs +++ b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs @@ -313,3 +313,91 @@ fn test_flow_object_field_presence_refines_object_shape() { f({ foo: 1 })"#, ); } + +#[test] +fn test_flow_length_known_object_refines_impossible_branches() { + check( + r#"local x = { a: 1, b: "hi" }; + +if std.length(x) == 1 then + x +## ^ type-exact: never +else if std.length(x) == 3 then + x +## ^ type-exact: never +else if std.length(x) == 2 then + x.a +## ^ type-exact: number"#, + ); +} + +#[test] +fn test_flow_length_known_function_refines_impossible_branches() { + check( + r"local f(x, y) = y + 1; + +if std.length(f) == 1 then + f +## ^ type-exact: never +else if std.length(f) == 3 then + f +## ^ type-exact: never +else if std.length(f) == 2 then + f +## ^ type: function(", + ); +} + +#[test] +fn test_flow_partial_numeric_predicates_refine_arithmetic_paths() { + check( + r"local f(x) = + if std.isDecimal(x) then + x + 0.5 +## ^ type-exact: number + else if std.isInteger(x) then + x + 1 +## ^ type-exact: number + else + null; + +local n = f(5); +if n == null then 5 else n + 2 +## ^ type-exact: number", + ); +} + +#[test] +fn test_flow_filter_map_predicate_refines_output_array() { + check( + r#"local inc(x) = + assert std.isNumber(x); + x + 1; + +local f(xs) = + assert std.isArray(xs); + assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs)); + local ys = std.filterMap(std.isNumber, inc, xs); +## ^^ type: array[number | string] + ys +##^^ type-exact: array[number]; + +f([1, "x", 2])"#, + ); +} + +#[test] +fn test_flow_conditional_comprehension_refines_element_type() { + check( + r#"local f(xs) = + assert std.isArray(xs); + assert std.all(std.map(function(x) x == null || std.isNumber(x), xs)); + local zs = [x for x in xs if x != null]; +## ^^ type-exact: array[number] + local ys = [(if x == null then "no" else x - 1) for x in xs]; +## ^^ type-exact: array[number | string] + { zs: zs, ys: ys }; + +f([1, null, 2])"#, + ); +} From eee7c4abbfb1d1f3a1673cc462c2a0ee93dc41bc Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 12:07:56 +0000 Subject: [PATCH 052/210] lsp-types: refine function_any arity via std.length flow --- crates/jrsonnet-lsp-types/src/mut_store.rs | 15 +++++++- crates/jrsonnet-lsp-types/src/operations.rs | 31 +++++++++++++++- crates/jrsonnet-lsp-types/src/store.rs | 36 ++++++++++++++++++- .../jrsonnet-lsp/tests/e2e_annotated_tests.rs | 18 ++++++++++ 4 files changed, 97 insertions(+), 3 deletions(-) diff --git a/crates/jrsonnet-lsp-types/src/mut_store.rs b/crates/jrsonnet-lsp-types/src/mut_store.rs index a3c1b1b4..9acc2aca 100644 --- a/crates/jrsonnet-lsp-types/src/mut_store.rs +++ b/crates/jrsonnet-lsp-types/src/mut_store.rs @@ -626,7 +626,20 @@ impl MutStore { } TyData::Function(func) => { - if func.required_count() == len { + if func.variadic && func.params.is_empty() { + let params = (0..len) + .map(|idx| ParamInterned { + name: format!("arg{idx}"), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + self.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }) + } else if func.required_count() == len { ty } else { Ty::NEVER diff --git a/crates/jrsonnet-lsp-types/src/operations.rs b/crates/jrsonnet-lsp-types/src/operations.rs index e408d108..2ebc09c5 100644 --- a/crates/jrsonnet-lsp-types/src/operations.rs +++ b/crates/jrsonnet-lsp-types/src/operations.rs @@ -746,7 +746,20 @@ pub fn ty_with_len(ty: Ty, len: usize, store: &mut S) -> Ty { } TyData::Function(func) => { - if func.required_count() == len { + if func.variadic && func.params.is_empty() { + let params = (0..len) + .map(|idx| crate::store::ParamInterned { + name: format!("arg{idx}"), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + store.function(crate::store::FunctionData { + params, + return_spec: crate::store::ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }) + } else if func.required_count() == len { ty } else { Ty::NEVER @@ -1428,6 +1441,22 @@ mod tests { assert_eq!(ty_with_len(func, 1, &mut store), func); assert_eq!(ty_with_len(func, 2, &mut store), Ty::NEVER); } + + #[test] + fn test_function_any_narrows_to_exact_arity() { + let mut store = TyStore::new(); + let func = store.function_any(); + let narrowed = ty_with_len(func, 2, &mut store); + assert_matches!(store.get_data(narrowed), TyData::Function(func_data) => { + assert!(!func_data.variadic); + assert_eq!(func_data.params.len(), 2); + assert_eq!(func_data.params[0].name, "arg0"); + assert_eq!(func_data.params[1].name, "arg1"); + assert!(func_data.params.iter().all(|p| p.ty == Ty::ANY)); + assert!(func_data.params.iter().all(|p| !p.has_default)); + assert_eq!(func_data.return_spec, crate::store::ReturnSpec::Fixed(Ty::ANY)); + }); + } } mod ty_with_min_len_tests { diff --git a/crates/jrsonnet-lsp-types/src/store.rs b/crates/jrsonnet-lsp-types/src/store.rs index 6dd2c97e..1a4dc5b4 100644 --- a/crates/jrsonnet-lsp-types/src/store.rs +++ b/crates/jrsonnet-lsp-types/src/store.rs @@ -1375,7 +1375,20 @@ impl TyStore { } TyData::Function(func) => { - if func.required_count() == len { + if func.variadic && func.params.is_empty() { + let params = (0..len) + .map(|idx| ParamInterned { + name: format!("arg{idx}"), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + self.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }) + } else if func.required_count() == len { ty } else { Ty::NEVER @@ -2450,6 +2463,27 @@ mod tests { assert_eq!(store.with_len(func, 2), Ty::NEVER); } + #[test] + fn test_with_len_function_any_narrows_to_exact_arity() { + let mut store = TyStore::new(); + let func_any = store.function_any(); + let narrowed = store.with_len(func_any, 2); + let TyData::Function(func) = store.get(narrowed).clone() else { + panic!("expected function"); + }; + assert!(!func.variadic); + assert_eq!( + func.params + .iter() + .map(|p| p.name.as_str()) + .collect::>(), + vec!["arg0", "arg1"] + ); + assert!(func.params.iter().all(|p| p.ty == Ty::ANY)); + assert!(func.params.iter().all(|p| !p.has_default)); + assert_eq!(func.return_spec, ReturnSpec::Fixed(Ty::ANY)); + } + mod test_is_subtype_of { use super::*; diff --git a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs index 197b896b..087b1f77 100644 --- a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs +++ b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs @@ -348,6 +348,24 @@ else if std.length(f) == 2 then ); } +#[test] +fn test_flow_length_unknown_function_refines_arity() { + check( + r"local f(x) = + if std.isFunction(x) then + if std.length(x) == 2 then + x +## ^ type-exact: function(arg0, arg1) + else + x +## ^ type-exact: function() + else + null; + +f(function(a, b) a + b)", + ); +} + #[test] fn test_flow_partial_numeric_predicates_refine_arithmetic_paths() { check( From b4151039d3e9512c7dc1b6c8f16841b6a63422a7 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 12:09:22 +0000 Subject: [PATCH 053/210] lsp-tests: expand flow typing coverage A bit sparse - we can do better. --- .../jrsonnet-lsp/tests/e2e_annotated_tests.rs | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs index 087b1f77..e9cca69c 100644 --- a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs +++ b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs @@ -385,6 +385,54 @@ if n == null then 5 else n + 2 ); } +#[test] +fn test_flow_literal_string_equality_after_string_assert() { + check( + r#"local f(x) = + assert std.isString(x); + if x == "hi" then + "hey" + else if x == "bye" then + "see ya" + else + x +## ^ type-exact: string + +std.length(f("hello"))"#, + ); +} + +#[test] +fn test_flow_literal_string_equality_partial_without_assert() { + check( + r#"local f(x) = + if x == "hi" then + std.length(x) +## ^ type-exact: "hi" + else if x == "bye" then + std.length(x) +## ^ type-exact: "bye" + else + x +## ^ type-exact: any + +std.length(f("hello"))"#, + ); +} + +#[test] +fn test_flow_function_length_assert_narrows_arity() { + check( + r"local wrap(f) = + assert std.isFunction(f); + assert std.length(f) == 2; + f +## ^ type-exact: function(arg0, arg1) + +wrap(function(a, b) a + b)", + ); +} + #[test] fn test_flow_filter_map_predicate_refines_output_array() { check( From 950dcde24ed8d094e8b0d1c25d482ce0f1dd15f1 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 12:58:57 +0000 Subject: [PATCH 054/210] lsp-tests: table-drive flow typing e2e cases with rstest macro --- .../jrsonnet-lsp/tests/e2e_annotated_tests.rs | 288 ++++++++---------- 1 file changed, 128 insertions(+), 160 deletions(-) diff --git a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs index e9cca69c..6968cf76 100644 --- a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs +++ b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs @@ -8,6 +8,7 @@ mod framework; use framework::{check, check_definition, check_hover}; +use rstest::rstest; // ============================================================================ // Definition and Reference Tests @@ -211,10 +212,22 @@ fn test_hover_exact_number_markdown() { // Flow Typing Tests // ============================================================================ -#[test] -fn test_flow_union_guard_refines_both_branches() { - check( - r#"local f(x) = +fn check_flow_case(source: &str) { + check(source); +} + +macro_rules! flow_case_table { + ($name:ident { $( $case:ident => $source:expr, )+ }) => { + #[rstest] + $(#[case::$case($source)])+ + fn $name(#[case] source: &str) { + check_flow_case(source); + } + }; +} + +flow_case_table!(test_flow_guards_and_literals { + union_guard_refines_both_branches => r#"local f(x) = assert std.isNumber(x) || std.isString(x); if std.isNumber(x) then x + 1 @@ -224,13 +237,7 @@ fn test_flow_union_guard_refines_both_branches() { ## ^ type-exact: string f(3) + f("hi")"#, - ); -} - -#[test] -fn test_flow_negated_guard_refines_branches() { - check( - r#"local f(x) = + negated_guard_refines_branches => r#"local f(x) = assert std.isNumber(x) || std.isString(x); if !std.isNumber(x) then std.length(x) @@ -240,13 +247,7 @@ fn test_flow_negated_guard_refines_branches() { ## ^ type: number f(3) + f("hi")"#, - ); -} - -#[test] -fn test_flow_null_guard_refines_non_null_branch() { - check( - r"local f(x) = + null_guard_refines_non_null_branch => r"local f(x) = assert x == null || std.isString(x); if x != null then std.length(x) @@ -255,69 +256,72 @@ fn test_flow_null_guard_refines_non_null_branch() { 0; f(null)", - ); -} - -#[test] -fn test_flow_length_eq_refines_array_to_tuple() { - check( - r"local f(xs) = - assert std.isArray(xs) && std.length(xs) == 3; - xs -##^ type: [any, any, any] - -f([1, 2, 3])", - ); -} - -#[test] -fn test_flow_higher_order_all_map_refines_array_elements() { - check( - r"local f(arr) = - if std.all(std.map(std.isNumber, arr)) then - arr -## ^^^ type-exact: array[number] + partial_numeric_predicates_refine_arithmetic_paths => r"local f(x) = + if std.isDecimal(x) then + x + 0.5 +## ^ type-exact: number + else if std.isInteger(x) then + x + 1 +## ^ type-exact: number else - arr; + null; -f([1, 2, 3])", - ); -} +local n = f(5); +if n == null then 5 else n + 2 +## ^ type-exact: number", + literal_string_equality_after_string_assert => r#"local f(x) = + assert std.isString(x); + if x == "hi" then + "hey" + else if x == "bye" then + "see ya" + else + x +## ^ type-exact: string -#[test] -fn test_flow_filter_with_predicate_refines_output_array() { - check( - r#"local f(xs) = - assert std.isArray(xs); - assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs)); - local ys = std.filter(std.isNumber, xs); -## ^^ type: array[number | string] - ys -##^^ type: array[number] +std.length(f("hello"))"#, + literal_string_equality_partial_without_assert => r#"local f(x) = + if x == "hi" then + std.length(x) +## ^ type-exact: "hi" + else if x == "bye" then + std.length(x) +## ^ type-exact: "bye" + else + x +## ^ type-exact: any -f([1, "x", 2])"#, - ); -} +std.length(f("hello"))"#, + null_and_length_guard => r#"local f(x) = + assert x == null || std.isString(x); + if x != null && std.length(x) >= 10 then + x +## ^ type-exact: string + else + "Hi"; -#[test] -fn test_flow_object_field_presence_refines_object_shape() { - check( - r#"local f(obj) = - assert std.isObject(obj); - if "foo" in obj then - obj -## ^^^ type: foo +f(null)"#, + null_branch_split => r"local f(x) = + assert x == null || std.isNumber(x); + if x != null then + x +## ^ type-exact: number else - 0; + assert x == null; + x +## ^ type-exact: null -f({ foo: 1 })"#, - ); -} +[f(null), f(3)]", + }); -#[test] -fn test_flow_length_known_object_refines_impossible_branches() { - check( - r#"local x = { a: 1, b: "hi" }; +flow_case_table!(test_flow_length_and_arity { + length_eq_refines_array_to_tuple => r"local f(xs) = + assert std.isArray(xs) && std.length(xs) == 3; + xs +##^ type: [any, any, any] + +f([1, 2, 3])", + length_known_object_refines_impossible_branches => r#"local x = { a: 1, b: "hi" }; if std.length(x) == 1 then x @@ -328,13 +332,7 @@ else if std.length(x) == 3 then else if std.length(x) == 2 then x.a ## ^ type-exact: number"#, - ); -} - -#[test] -fn test_flow_length_known_function_refines_impossible_branches() { - check( - r"local f(x, y) = y + 1; + length_known_function_refines_impossible_branches => r"local f(x, y) = y + 1; if std.length(f) == 1 then f @@ -345,13 +343,7 @@ else if std.length(f) == 3 then else if std.length(f) == 2 then f ## ^ type: function(", - ); -} - -#[test] -fn test_flow_length_unknown_function_refines_arity() { - check( - r"local f(x) = + length_unknown_function_refines_arity => r"local f(x) = if std.isFunction(x) then if std.length(x) == 2 then x @@ -363,80 +355,63 @@ fn test_flow_length_unknown_function_refines_arity() { null; f(function(a, b) a + b)", - ); -} - -#[test] -fn test_flow_partial_numeric_predicates_refine_arithmetic_paths() { - check( - r"local f(x) = - if std.isDecimal(x) then - x + 0.5 -## ^ type-exact: number - else if std.isInteger(x) then - x + 1 -## ^ type-exact: number - else - null; + function_length_assert_narrows_arity => r"local wrap(f) = + assert std.isFunction(f); + assert std.length(f) == 2; + f +## ^ type-exact: function(arg0, arg1) -local n = f(5); -if n == null then 5 else n + 2 -## ^ type-exact: number", - ); -} +wrap(function(a, b) a + b)", +}); -#[test] -fn test_flow_literal_string_equality_after_string_assert() { - check( - r#"local f(x) = - assert std.isString(x); - if x == "hi" then - "hey" - else if x == "bye" then - "see ya" +flow_case_table!(test_flow_objects_and_membership { + object_field_presence_refines_object_shape => r#"local f(obj) = + assert std.isObject(obj); + if "foo" in obj then + obj +## ^^^ type: foo else - x -## ^ type-exact: string - -std.length(f("hello"))"#, - ); -} + 0; -#[test] -fn test_flow_literal_string_equality_partial_without_assert() { - check( - r#"local f(x) = - if x == "hi" then - std.length(x) -## ^ type-exact: "hi" - else if x == "bye" then - std.length(x) -## ^ type-exact: "bye" +f({ foo: 1 })"#, + object_membership_known_union => r#"local f(b) = + local obj = if b then { foo: 3 } else {}; + if "foo" in obj then + obj +## ^^^ type: foo else + 4; + +[f(true), f(false)]"#, + negated_membership_or_is_conservative => r#"local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x || "bar" in x) then x -## ^ type-exact: any +## ^ type-exact: object + else + null; -std.length(f("hello"))"#, - ); -} +f({ foo: 1 })"#, +}); -#[test] -fn test_flow_function_length_assert_narrows_arity() { - check( - r"local wrap(f) = - assert std.isFunction(f); - assert std.length(f) == 2; - f -## ^ type-exact: function(arg0, arg1) +flow_case_table!(test_flow_collections { + higher_order_all_map_refines_array_elements => r"local f(arr) = + if std.all(std.map(std.isNumber, arr)) then + arr +## ^^^ type-exact: array[number] + else + arr; -wrap(function(a, b) a + b)", - ); -} +f([1, 2, 3])", + filter_with_predicate_refines_output_array => r#"local f(xs) = + assert std.isArray(xs); + assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs)); + local ys = std.filter(std.isNumber, xs); +## ^^ type: array[number | string] + ys +##^^ type: array[number] -#[test] -fn test_flow_filter_map_predicate_refines_output_array() { - check( - r#"local inc(x) = +f([1, "x", 2])"#, + filter_map_predicate_refines_output_array => r#"local inc(x) = assert std.isNumber(x); x + 1; @@ -449,13 +424,7 @@ local f(xs) = ##^^ type-exact: array[number]; f([1, "x", 2])"#, - ); -} - -#[test] -fn test_flow_conditional_comprehension_refines_element_type() { - check( - r#"local f(xs) = + conditional_comprehension_refines_element_type => r#"local f(xs) = assert std.isArray(xs); assert std.all(std.map(function(x) x == null || std.isNumber(x), xs)); local zs = [x for x in xs if x != null]; @@ -465,5 +434,4 @@ fn test_flow_conditional_comprehension_refines_element_type() { { zs: zs, ys: ys }; f([1, null, 2])"#, - ); -} +}); From 7765a16961162cbc975b5754a274fa3c0fdc2f5c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 14:41:10 +0000 Subject: [PATCH 055/210] lsp-flow: resolve std aliases and refine nested fact extraction --- crates/jrsonnet-lsp-inference/src/flow.rs | 271 ++++++++++++++---- crates/jrsonnet-lsp-scope/src/lib.rs | 4 + crates/jrsonnet-lsp-scope/src/stdlib.rs | 189 ++++++++++++ .../jrsonnet-lsp/tests/e2e_annotated_tests.rs | 110 ++++++- 4 files changed, 506 insertions(+), 68 deletions(-) create mode 100644 crates/jrsonnet-lsp-scope/src/stdlib.rs diff --git a/crates/jrsonnet-lsp-inference/src/flow.rs b/crates/jrsonnet-lsp-inference/src/flow.rs index 543d6b08..8ca514c1 100644 --- a/crates/jrsonnet-lsp-inference/src/flow.rs +++ b/crates/jrsonnet-lsp-inference/src/flow.rs @@ -100,6 +100,75 @@ impl PrimFact { }), } } + + /// Approximate the complement of this primitive type when starting from `any`. + fn negated_any_ty(self, store: &mut MutStore) -> Ty { + let array_any = store.array(Ty::ANY); + let object_any = store.object(ObjectData::open()); + let function_any = store.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }); + match self { + PrimFact::Null => store.union(vec![ + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + array_any, + object_any, + function_any, + ]), + PrimFact::Bool => store.union(vec![ + Ty::NULL, + Ty::NUMBER, + Ty::STRING, + array_any, + object_any, + function_any, + ]), + PrimFact::Number => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::STRING, + array_any, + object_any, + function_any, + ]), + PrimFact::String => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + array_any, + object_any, + function_any, + ]), + PrimFact::Array => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + object_any, + function_any, + ]), + PrimFact::Object => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + array_any, + function_any, + ]), + PrimFact::Function => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + array_any, + object_any, + ]), + } + } } impl Fact { @@ -418,10 +487,26 @@ impl FactRepr { } } - FactRepr::HasField { .. } => { - // Negating "has field" doesn't narrow the type in a useful way - // (the object might just not have that field) - ty + FactRepr::HasField { field, field_type } => { + let negated_field_ty = match field_type { + Some(inner) => match &inner.repr { + FactRepr::Prim(prim, Totality::Total) => prim.negated_any_ty(store), + _ => inner.apply_negated(Ty::ANY, store), + }, + None => Ty::NEVER, + }; + let constraint = store.object(ObjectData { + fields: vec![( + field.clone(), + FieldDefInterned { + ty: negated_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }); + store.narrow(ty, constraint) } FactRepr::HasLen(_) => { @@ -569,6 +654,7 @@ impl Facts { } } +use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, var_resolves_to_builtin_std}; use jrsonnet_rowan_parser::{ nodes::{ ArgsDesc, BinaryOperatorKind, Expr, ExprBase, ExprCall, LiteralKind, UnaryOperatorKind, @@ -744,6 +830,16 @@ struct StdMethodCall { args: ArgsDesc, } +/// Reference to a variable with optional field path. +/// +/// For `x.a.b`, this stores: +/// - `var_name = "x"` +/// - `path = ["b", "a"]` (innermost-to-outermost field order) +struct VarRef { + var_name: String, + path: Vec, +} + /// Check if an `ExprCall` is a call to std.methodName and extract the method name and args. fn extract_std_method_call(call: &ExprCall) -> Option { let callee = call.callee()?; @@ -751,12 +847,8 @@ fn extract_std_method_call(call: &ExprCall) -> Option { return None; }; - // Check base is std let base = field.base()?; - let ExprBase::ExprVar(var) = base.expr_base()? else { - return None; - }; - if var.name()?.ident_lit()?.text() != "std" { + if !expr_resolves_to_builtin_std(&base) { return None; } @@ -776,25 +868,39 @@ fn extract_std_method_from_binary( extract_std_method_call(&call) } +fn with_var_path(path: Vec, fact: Fact) -> Fact { + path.into_iter() + .fold(fact, |inner, field| Fact::has_field_typed(field, inner)) +} + +fn bind_fact_to_expr(expr: &Expr, fact: Fact) -> Option<(String, Fact)> { + let var_ref = extract_var_ref(expr)?; + let fact = with_var_path(var_ref.path, fact); + Some((var_ref.var_name, fact)) +} + /// Check for std.isX(var) or std.objectHas(var, "field") pattern in an `ExprCall`. fn check_std_call(call: &ExprCall) -> Option<(String, Fact)> { let std_call = extract_std_method_call(call)?; // Try std.isX patterns first if let Some(fact) = unary_std_fn_fact(&std_call.method_name) { - let var_name = extract_single_arg_var_name(&std_call.args)?; - return Some((var_name, fact)); + let arg_expr = extract_single_arg_expr(&std_call.args)?; + return bind_fact_to_expr(&arg_expr, fact); } // Try std.objectHas(obj, "field") - if std_call.method_name == "objectHas" || std_call.method_name == "objectHasAll" { + if std_call.method_name == "objectHas" + || std_call.method_name == "objectHasAll" + || std_call.method_name == "objectHasEx" + { let arg_list: Vec<_> = std_call.args.args().collect(); - let [obj_arg, field_arg] = arg_list.as_slice() else { + let ([obj_arg, field_arg] | [obj_arg, field_arg, ..]) = arg_list.as_slice() else { return None; }; - let var_name = extract_var_name(&obj_arg.expr()?)?; + let obj_expr = obj_arg.expr()?; let field_name = extract_string_literal(&field_arg.expr()?)?; - return Some((var_name, Fact::has_field(field_name))); + return bind_fact_to_expr(&obj_expr, Fact::has_field(field_name)); } // Try std.all(std.map(predicate, arr)) pattern @@ -837,9 +943,7 @@ fn check_higher_order_predicate(all_args: &ArgsDesc) -> Option<(String, Fact)> { // Second arg is the array variable let arr_expr = arr_arg.expr()?; - let var_name = extract_var_name(&arr_expr)?; - - Some((var_name, elem_fact)) + bind_fact_to_expr(&arr_expr, elem_fact) } /// Extract the element type fact from a predicate expression. @@ -853,12 +957,8 @@ fn extract_predicate_element_fact(pred: &Expr) -> Option { return None; }; - // Check base is std let base = field.base()?; - let ExprBase::ExprVar(var) = base.expr_base()? else { - return None; - }; - if var.name()?.ident_lit()?.text() != "std" { + if !expr_resolves_to_builtin_std(&base) { return None; } @@ -911,7 +1011,6 @@ fn check_null_equality( null_side: &Expr, totality: Totality, ) -> Option<(String, Fact)> { - let var_name = extract_var_name(var_side)?; if !is_null_literal(null_side) { return None; } @@ -922,28 +1021,25 @@ fn check_null_equality( } else { Fact::null() }; - Some((var_name, fact)) + bind_fact_to_expr(var_side, fact) } /// Check for var == "literal" pattern. fn check_literal_string_equality(var_side: &Expr, literal_side: &Expr) -> Option<(String, Fact)> { - let var_name = extract_var_name(var_side)?; let literal_value = extract_string_literal(literal_side)?; - Some((var_name, Fact::literal_string(literal_value))) + bind_fact_to_expr(var_side, Fact::literal_string(literal_value)) } /// Check for var == true/false pattern. fn check_literal_bool_equality(var_side: &Expr, literal_side: &Expr) -> Option<(String, Fact)> { - let var_name = extract_var_name(var_side)?; let literal_value = extract_bool_literal(literal_side)?; - Some((var_name, Fact::literal_bool(literal_value))) + bind_fact_to_expr(var_side, Fact::literal_bool(literal_value)) } /// Check for "field" in obj pattern. fn check_in_operator(lhs: &Expr, rhs: &Expr) -> Option<(String, Fact)> { let field_name = extract_string_literal(lhs)?; - let var_name = extract_var_name(rhs)?; - Some((var_name, Fact::has_field(field_name))) + bind_fact_to_expr(rhs, Fact::has_field(field_name)) } /// Check for std.type(x) == "typename" pattern. @@ -956,7 +1052,7 @@ fn check_std_type_comparison( return None; } - let var_name = extract_single_arg_var_name(&call.args)?; + let arg_expr = extract_single_arg_expr(&call.args)?; let type_str = extract_string_literal(rhs)?; let fact = match type_str.as_str() { @@ -970,7 +1066,7 @@ fn check_std_type_comparison( _ => return None, }; - Some((var_name, fact)) + bind_fact_to_expr(&arg_expr, fact) } fn non_negative_integral_usize(value: f64) -> Option { @@ -990,7 +1086,7 @@ fn check_std_length_comparison( return None; } - let var_name = extract_single_arg_var_name(&call.args)?; + let arg_expr = extract_single_arg_expr(&call.args)?; let len = extract_number_literal(rhs)?; // Length must be a non-negative integer @@ -999,7 +1095,7 @@ fn check_std_length_comparison( } let len = non_negative_integral_usize(len)?; - Some((var_name, Fact::has_len(len))) + bind_fact_to_expr(&arg_expr, Fact::has_len(len)) } /// Check for std.length(x) != 0 pattern (non-empty). @@ -1012,7 +1108,7 @@ fn check_std_length_not_zero( return None; } - let var_name = extract_single_arg_var_name(&call.args)?; + let arg_expr = extract_single_arg_expr(&call.args)?; let len = extract_number_literal(rhs)?; // Only handle != 0 @@ -1020,7 +1116,7 @@ fn check_std_length_not_zero( return None; } - Some((var_name, Fact::min_len(1))) + bind_fact_to_expr(&arg_expr, Fact::min_len(1)) } /// Check for std.length(x) > n pattern. @@ -1033,7 +1129,7 @@ fn check_std_length_greater( return None; } - let var_name = extract_single_arg_var_name(&call.args)?; + let arg_expr = extract_single_arg_expr(&call.args)?; let len = extract_number_literal(rhs)?; // Length must be a non-negative integer @@ -1043,7 +1139,7 @@ fn check_std_length_greater( // std.length(x) > n means length >= n+1 let len = non_negative_integral_usize(len)?; - Some((var_name, Fact::min_len(len + 1))) + bind_fact_to_expr(&arg_expr, Fact::min_len(len + 1)) } /// Check for std.length(x) >= n pattern. @@ -1056,7 +1152,7 @@ fn check_std_length_greater_eq( return None; } - let var_name = extract_single_arg_var_name(&call.args)?; + let arg_expr = extract_single_arg_expr(&call.args)?; let len = extract_number_literal(rhs)?; // Length must be a non-negative integer @@ -1065,7 +1161,7 @@ fn check_std_length_greater_eq( } let len = non_negative_integral_usize(len)?; - Some((var_name, Fact::min_len(len))) + bind_fact_to_expr(&arg_expr, Fact::min_len(len)) } /// Extract a number literal value from an expression. @@ -1080,30 +1176,55 @@ fn extract_number_literal(expr: &Expr) -> Option { text.parse().ok() } -/// Extract variable name from a single-argument function call. -fn extract_single_arg_var_name(args: &ArgsDesc) -> Option { +/// Extract single argument expression from a single-argument function call. +fn extract_single_arg_expr(args: &ArgsDesc) -> Option { let arg_list: Vec<_> = args.args().collect(); let [arg] = arg_list.as_slice() else { return None; }; - let arg_expr = arg.expr()?; - extract_var_name(&arg_expr) + arg.expr() } -/// Extract a variable name from an expression if it's a simple variable reference. -fn extract_var_name(expr: &Expr) -> Option { - let base = expr.expr_base()?; - let ExprBase::ExprVar(var) = base else { - return None; - }; - let name_node = var.name()?; - let ident = name_node.ident_lit()?; - let name = ident.text(); - // Don't count std as a variable for narrowing - if name == "std" { - return None; +/// Extract a variable with optional field path. +/// +/// Supports: +/// - `x` -> `VarRef { x, [] }` +/// - `x.a` -> `VarRef { x, ["a"] }` +/// - `x.a.b` -> `VarRef { x, ["b", "a"] }` +/// - `x["a"]` -> `VarRef { x, ["a"] }` +fn extract_var_ref(expr: &Expr) -> Option { + let mut current = expr.clone(); + let mut path = Vec::new(); + + loop { + let base = current.expr_base()?; + match base { + ExprBase::ExprVar(var) => { + if var_resolves_to_builtin_std(&var) { + return None; + } + let name = var.name()?.ident_lit()?.text().to_string(); + return Some(VarRef { + var_name: name, + path, + }); + } + ExprBase::ExprField(field) => { + let field_name = field.field()?.ident_lit()?.text().to_string(); + path.push(field_name); + current = field.base()?; + } + ExprBase::ExprIndex(index) => { + let field_name = extract_string_literal(&index.index()?)?; + path.push(field_name); + current = index.base()?; + } + ExprBase::ExprParened(parens) => { + current = parens.expr()?; + } + _ => return None, + } } - Some(name.to_string()) } /// Extract a string literal value from an expression. @@ -1354,6 +1475,30 @@ mod tests { assert_eq!(result, expected); } + #[test] + fn test_extract_std_call_through_alias() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("local s = std; s.isNumber(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::NUMBER); + } + + #[test] + fn test_extract_std_call_through_alias_chain() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract("local s = std; local t = s; t.isString(x)"); + let x_fact = facts.get("x").expect("should have fact for x"); + let result = x_fact.apply_to(Ty::ANY, &mut store); + assert_eq!(result, Ty::STRING); + } + + #[test] + fn test_extract_std_call_shadowed_std_is_not_builtin() { + let facts = parse_and_extract("local std = { isNumber(v): true }; std.isNumber(x)"); + assert_eq!(facts.get("x"), None); + } + #[test] fn test_extract_std_is_array() { let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); @@ -1412,6 +1557,16 @@ mod tests { assert_eq!(result, expected); } + #[test] + fn test_extract_std_object_has_ex() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = parse_and_extract(r#"std.objectHasEx(obj, "field", true)"#); + let obj_fact = facts.get("obj").expect("should have fact for obj"); + let result = obj_fact.apply_to(Ty::ANY, &mut store); + let expected = store.object(object_with_field("field", Ty::ANY, true)); + assert_eq!(result, expected); + } + #[test] fn test_extract_in_operator() { let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); diff --git a/crates/jrsonnet-lsp-scope/src/lib.rs b/crates/jrsonnet-lsp-scope/src/lib.rs index 596edd12..bfd5317e 100644 --- a/crates/jrsonnet-lsp-scope/src/lib.rs +++ b/crates/jrsonnet-lsp-scope/src/lib.rs @@ -9,6 +9,7 @@ pub mod bindings; pub mod resolver; +pub mod stdlib; pub use bindings::{is_at_file_scope, is_definition_site, is_renameable, is_variable_reference}; pub use resolver::{ @@ -16,3 +17,6 @@ pub use resolver::{ find_all_references_for_rename, find_definition_range, references_definition, ScopeIndex, ScopeResolver, }; +pub use stdlib::{ + expr_resolves_to_builtin_std, ident_resolves_to_builtin_std, var_resolves_to_builtin_std, +}; diff --git a/crates/jrsonnet-lsp-scope/src/stdlib.rs b/crates/jrsonnet-lsp-scope/src/stdlib.rs new file mode 100644 index 00000000..b829f3da --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/stdlib.rs @@ -0,0 +1,189 @@ +//! Standard-library (`std`) binding resolution helpers. +//! +//! These helpers determine whether an expression resolves to the built-in +//! Jsonnet `std` object, following lexical scope and aliases. + +use jrsonnet_rowan_parser::{ + nodes::{BindDestruct, Expr, ExprBase, ExprVar}, + AstNode, SyntaxKind, SyntaxToken, +}; +use rowan::TextRange; +use rustc_hash::FxHashSet; + +use crate::find_definition_range; + +/// Return true if this expression resolves to the built-in `std` object. +#[must_use] +pub fn expr_resolves_to_builtin_std(expr: &Expr) -> bool { + let mut seen_defs = FxHashSet::default(); + expr_resolves_to_builtin_std_inner(expr, &mut seen_defs) +} + +/// Return true if this variable expression resolves to the built-in `std` object. +#[must_use] +pub fn var_resolves_to_builtin_std(var: &ExprVar) -> bool { + let Some(ident) = var.name().and_then(|name| name.ident_lit()) else { + return false; + }; + + let mut seen_defs = FxHashSet::default(); + ident_resolves_to_builtin_std_inner(&ident, &mut seen_defs) +} + +/// Return true if this identifier token resolves to the built-in `std` object. +#[must_use] +pub fn ident_resolves_to_builtin_std(ident: &SyntaxToken) -> bool { + if ident.kind() != SyntaxKind::IDENT { + return false; + } + + let mut seen_defs = FxHashSet::default(); + ident_resolves_to_builtin_std_inner(ident, &mut seen_defs) +} + +fn expr_resolves_to_builtin_std_inner(expr: &Expr, seen_defs: &mut FxHashSet) -> bool { + let Some(base) = expr.expr_base() else { + return false; + }; + + match base { + ExprBase::ExprParened(parens) => { + let Some(inner) = parens.expr() else { + return false; + }; + expr_resolves_to_builtin_std_inner(&inner, seen_defs) + } + ExprBase::ExprVar(var) => { + let Some(ident) = var.name().and_then(|name| name.ident_lit()) else { + return false; + }; + ident_resolves_to_builtin_std_inner(&ident, seen_defs) + } + _ => false, + } +} + +fn ident_resolves_to_builtin_std_inner( + ident: &SyntaxToken, + seen_defs: &mut FxHashSet, +) -> bool { + let name = ident.text(); + let def_range = find_definition_range(ident, name); + + // Bare `std` with no local definition resolves to builtin std. + if name == "std" && def_range.is_none() { + return true; + } + + let Some(def_range) = def_range else { + return false; + }; + if !seen_defs.insert(def_range) { + // Cycle detected in alias chain. + return false; + } + + let Some(root) = ident.parent_ancestors().last() else { + return false; + }; + let Some(def_node) = root + .descendants() + .find(|node| node.text_range() == def_range) + else { + return false; + }; + let Some(bind_node) = def_node.ancestors().find(|node| { + matches!( + node.kind(), + SyntaxKind::BIND_DESTRUCT | SyntaxKind::BIND_FUNCTION + ) + }) else { + return false; + }; + let Some(bind_destruct) = BindDestruct::cast(bind_node) else { + return false; + }; + let Some(value_expr) = bind_destruct.value() else { + return false; + }; + expr_resolves_to_builtin_std_inner(&value_expr, seen_defs) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_rowan_parser::{nodes::ExprBase, AstNode}; + use rowan::NodeOrToken; + + use super::*; + + fn make_doc(code: &str) -> Document { + Document::new(code.to_string(), DocVersion::new(1)) + } + + fn token_for_ident(code: &str, needle: &str, occurrence: usize) -> SyntaxToken { + let doc = make_doc(code); + let ast = doc.ast(); + ast.syntax() + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .filter(|token| token.kind() == SyntaxKind::IDENT && token.text() == needle) + .nth(occurrence) + .expect("identifier token") + } + + fn root_field_base_expr(code: &str) -> Expr { + let doc = make_doc(code); + let ast = doc.ast(); + let expr = ast.expr().expect("root expr"); + let ExprBase::ExprField(field) = expr.expr_base().expect("root base") else { + panic!("expected field expression"); + }; + field.base().expect("field base") + } + + #[test] + fn test_ident_resolves_builtin_std_direct() { + let ident = token_for_ident("std.length", "std", 0); + assert!(ident_resolves_to_builtin_std(&ident)); + } + + #[test] + fn test_ident_resolves_builtin_std_alias() { + let ident = token_for_ident("local s = std; s.length", "s", 1); + assert!(ident_resolves_to_builtin_std(&ident)); + } + + #[test] + fn test_ident_resolves_builtin_std_alias_chain() { + let ident = token_for_ident("local s = std; local t = s; t.length", "t", 1); + assert!(ident_resolves_to_builtin_std(&ident)); + } + + #[test] + fn test_ident_resolves_builtin_std_shadowed_std() { + let ident = token_for_ident("local std = { length(x): x }; std.length", "std", 1); + assert!(!ident_resolves_to_builtin_std(&ident)); + } + + #[test] + fn test_ident_resolves_builtin_std_cycle() { + let ident = token_for_ident("local a = b; local b = a; a.length", "a", 2); + assert!(!ident_resolves_to_builtin_std(&ident)); + } + + #[test] + fn test_expr_resolves_builtin_std_parenthesized_alias() { + let base = root_field_base_expr("local s = std; (s).length"); + assert!(expr_resolves_to_builtin_std(&base)); + } + + #[test] + fn test_var_resolves_builtin_std_shadowed() { + let base = root_field_base_expr("local std = { length(x): x }; std.length"); + let ExprBase::ExprVar(var) = base.expr_base().expect("base kind") else { + panic!("expected expr var"); + }; + assert!(!var_resolves_to_builtin_std(&var)); + } +} diff --git a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs index 6968cf76..c61c5107 100644 --- a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs +++ b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs @@ -227,7 +227,7 @@ macro_rules! flow_case_table { } flow_case_table!(test_flow_guards_and_literals { - union_guard_refines_both_branches => r#"local f(x) = +union_guard_refines_both_branches => r#"local f(x) = assert std.isNumber(x) || std.isString(x); if std.isNumber(x) then x + 1 @@ -237,7 +237,7 @@ flow_case_table!(test_flow_guards_and_literals { ## ^ type-exact: string f(3) + f("hi")"#, - negated_guard_refines_branches => r#"local f(x) = +negated_guard_refines_branches => r#"local f(x) = assert std.isNumber(x) || std.isString(x); if !std.isNumber(x) then std.length(x) @@ -247,7 +247,7 @@ f(3) + f("hi")"#, ## ^ type: number f(3) + f("hi")"#, - null_guard_refines_non_null_branch => r"local f(x) = +null_guard_refines_non_null_branch => r"local f(x) = assert x == null || std.isString(x); if x != null then std.length(x) @@ -256,7 +256,7 @@ f(3) + f("hi")"#, 0; f(null)", - partial_numeric_predicates_refine_arithmetic_paths => r"local f(x) = +partial_numeric_predicates_refine_arithmetic_paths => r"local f(x) = if std.isDecimal(x) then x + 0.5 ## ^ type-exact: number @@ -269,7 +269,7 @@ f(null)", local n = f(5); if n == null then 5 else n + 2 ## ^ type-exact: number", - literal_string_equality_after_string_assert => r#"local f(x) = +literal_string_equality_after_string_assert => r#"local f(x) = assert std.isString(x); if x == "hi" then "hey" @@ -280,7 +280,7 @@ if n == null then 5 else n + 2 ## ^ type-exact: string std.length(f("hello"))"#, - literal_string_equality_partial_without_assert => r#"local f(x) = +literal_string_equality_partial_without_assert => r#"local f(x) = if x == "hi" then std.length(x) ## ^ type-exact: "hi" @@ -292,7 +292,7 @@ std.length(f("hello"))"#, ## ^ type-exact: any std.length(f("hello"))"#, - null_and_length_guard => r#"local f(x) = +null_and_length_guard => r#"local f(x) = assert x == null || std.isString(x); if x != null && std.length(x) >= 10 then x @@ -301,7 +301,7 @@ std.length(f("hello"))"#, "Hi"; f(null)"#, - null_branch_split => r"local f(x) = +null_branch_split => r"local f(x) = assert x == null || std.isNumber(x); if x != null then x @@ -312,7 +312,7 @@ f(null)"#, ## ^ type-exact: null [f(null), f(3)]", - }); +}); flow_case_table!(test_flow_length_and_arity { length_eq_refines_array_to_tuple => r"local f(xs) = @@ -362,6 +362,42 @@ f(function(a, b) a + b)", ## ^ type-exact: function(arg0, arg1) wrap(function(a, b) a + b)", + length_unknown_object_composition_refines_by_shape => r#"local f(x) = + if std.isObject(x) then + if "a" in x && std.isString(x.a) then + if "b" in x && std.isNumber(x.b) then + if std.length(x) == 2 then + std.length(x.a) + x.b +## ^^^^^^^^^^^^^^^^^^^^^^^^ type-exact: number + else if std.length(x) == 1 then + x +## ^ type-exact: never + else if std.length(x) == 3 then + x.b +## ^ type-exact: number + else + x.b +## ^ type-exact: number + else + 0 + else + 0 + else + 0; + +f({ a: "hello", b: 4 })"#, + length_unknown_function_allows_typed_call_site => r"local f(x) = + if std.isFunction(x) then + if std.length(x) == 2 then + x(3, 5) +## ^ type: function(arg0, arg1) + else + x +## ^ type-exact: function() + else + x; + +f(function(a, b) a + b)", }); flow_case_table!(test_flow_objects_and_membership { @@ -383,7 +419,48 @@ f({ foo: 1 })"#, 4; [f(true), f(false)]"#, - negated_membership_or_is_conservative => r#"local f(x) = + object_field_type_guards_refine_nested_paths => r#"local f(obj) = + assert std.isObject(obj); + if "a" in obj then + if std.isNumber(obj.a) then + obj.a + 7 +## ^^^ type-exact: number + else + assert !std.isBoolean(obj.a) && obj.a != null; + std.length(obj.a) +## ^ type: string +## ^ type: array +## ^ type: object +## ^ type: function + else + std.length(obj); + +[f({ b: null }), f({ a: "hello" }), f({ a: 4 })]"#, + negated_membership_and_is_precise => r#"local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x && "bar" in x) then + x.foo +## ^^^ type-exact: any + else + null; + +f({ foo: 1 })"#, + negated_membership_or_eliminates_required_field => r#"local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x || "bar" in x) then + x.foo +## ^^^ type-exact: never + else + null; + +f({ foo: 1 })"#, + negated_membership_or_with_length_still_eliminates_field => r#"local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x || std.length(x) == 5) then + x.foo +## ^^^ type-exact: never + else + null; + +f({ foo: 1 })"#, + negated_membership_or_is_conservative => r#"local f(x) = if std.isObject(x) && "foo" in x && !("foo" in x || "bar" in x) then x ## ^ type-exact: object @@ -391,6 +468,19 @@ f({ foo: 1 })"#, null; f({ foo: 1 })"#, + object_field_literal_chain_preserves_base_type => r#"local f(x) = + assert std.isObject(x) && std.isString(x.t); + if x.t == "foo" then + 1 + else if x.t == "bar" then + 2 + else if x.t == "quz" then + 3 + else + std.length(x.t) +## ^ type-exact: string + +f({ t: "abc" })"#, }); flow_case_table!(test_flow_collections { From e65ff0cd3b208b6eb69d991f290b628c2369e390 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 14:41:43 +0000 Subject: [PATCH 056/210] lsp: route stdlib detection through semantic builtin-std resolution --- Cargo.lock | 1 + crates/jrsonnet-lsp-check/Cargo.toml | 1 + crates/jrsonnet-lsp-check/src/lint.rs | 15 ++++- crates/jrsonnet-lsp-check/src/type_check.rs | 57 ++++++++++------ .../src/completion/fields.rs | 24 ++++--- .../src/completion/helpers.rs | 10 --- .../src/completion/mod.rs | 55 +++++++++++++++ .../src/completion/stdlib.rs | 67 ++++++------------- crates/jrsonnet-lsp-handlers/src/hover.rs | 42 ++++++++++-- .../src/semantic_tokens.rs | 36 ++++------ .../jrsonnet-lsp-inference/src/const_eval.rs | 18 +++-- crates/jrsonnet-lsp-inference/src/expr.rs | 18 +++-- crates/jrsonnet-lsp-inference/src/helpers.rs | 9 +-- 13 files changed, 212 insertions(+), 141 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 77da9964..5fdb939f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1852,6 +1852,7 @@ dependencies = [ "assert_matches", "jrsonnet-lsp-document", "jrsonnet-lsp-inference", + "jrsonnet-lsp-scope", "jrsonnet-lsp-stdlib", "jrsonnet-lsp-types", "jrsonnet-rowan-parser", diff --git a/crates/jrsonnet-lsp-check/Cargo.toml b/crates/jrsonnet-lsp-check/Cargo.toml index ff088556..3ebe9344 100644 --- a/crates/jrsonnet-lsp-check/Cargo.toml +++ b/crates/jrsonnet-lsp-check/Cargo.toml @@ -10,6 +10,7 @@ description = "Type checking and linting for jrsonnet LSP" [dependencies] jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } jrsonnet-lsp-inference = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-inference" } +jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } jrsonnet-lsp-stdlib = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-stdlib" } jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } diff --git a/crates/jrsonnet-lsp-check/src/lint.rs b/crates/jrsonnet-lsp-check/src/lint.rs index d649870c..e4bd3c96 100644 --- a/crates/jrsonnet-lsp-check/src/lint.rs +++ b/crates/jrsonnet-lsp-check/src/lint.rs @@ -7,6 +7,7 @@ use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; use jrsonnet_lsp_inference::{infer_expr_ty, TypeAnalysis, TypeEnv}; +use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; use jrsonnet_lsp_types::Ty; use jrsonnet_rowan_parser::{ nodes::{ @@ -751,7 +752,7 @@ fn collect_definitions_and_references( if let Some(var) = ExprVar::cast(node.clone()) { if let Some(name) = var.name().and_then(|n| n.ident_lit()) { let text = name.text().to_string(); - if text != "std" { + if !ident_resolves_to_builtin_std(&name) { references.insert(text); } } @@ -1107,6 +1108,18 @@ mod tests { ); } + #[test] + fn test_shadowed_std_reference_counts_for_unused_analysis() { + let code = "local std = 1; std"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let config = LintConfig::default().with_enabled(LintRule::UnusedVariables); + + assert_eq!( + lint(&doc, &TypeAnalysis::analyze(&doc), &config, &test_uri()), + vec![] + ); + } + #[test] fn test_underscore_prefixed_variable_not_flagged() { let code = "local _unused = 1; local used = 2; used"; diff --git a/crates/jrsonnet-lsp-check/src/type_check.rs b/crates/jrsonnet-lsp-check/src/type_check.rs index facdf242..0e6bf0b1 100644 --- a/crates/jrsonnet-lsp-check/src/type_check.rs +++ b/crates/jrsonnet-lsp-check/src/type_check.rs @@ -10,6 +10,7 @@ use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; use jrsonnet_lsp_inference::{find_best_match, TypeAnalysis, TypeEnv}; +use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, ident_resolves_to_builtin_std}; use jrsonnet_lsp_stdlib::get_stdlib_signature; use jrsonnet_lsp_types::{binary_op_result_ty, unary_op_result_ty, FunctionData, Ty, TyData}; use jrsonnet_rowan_parser::{ @@ -921,8 +922,8 @@ fn validate_function_call_ty( /// Check if an `ExprCall` is a stdlib function call and validate argument count and types. /// -/// Matches the pattern: `std.functionName(args...)` -/// - Callee must be `ExprField` with base being `ExprVar` "std" +/// Matches the pattern: `std.functionName(args...)`, including aliases that +/// resolve to the builtin std object. fn check_stdlib_call_expr(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut Vec) { // Get the callee - should be std.functionName (ExprField) let Some(callee_expr) = call.callee() else { @@ -932,18 +933,11 @@ fn check_stdlib_call_expr(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut return; }; - // Check if base is `std` + // Check if base resolves to builtin std. let Some(base_expr) = field.base() else { return; }; - let Some(ExprBase::ExprVar(var)) = base_expr.expr_base() else { - return; - }; - let is_std = var - .name() - .and_then(|n| n.ident_lit()) - .is_some_and(|t| t.text() == "std"); - if !is_std { + if !expr_resolves_to_builtin_std(&base_expr) { return; } @@ -1417,17 +1411,13 @@ fn check_user_function_call_expr( return; }; - // Get the variable name - let Some(var_name) = var - .name() - .and_then(|n| n.ident_lit()) - .map(|t| t.text().to_string()) - else { + let Some(var_ident) = var.name().and_then(|n| n.ident_lit()) else { return; }; + let var_name = var_ident.text().to_string(); - // Skip `std` - handled by check_stdlib_call_expr - if var_name == "std" { + // Skip builtin std - handled by check_stdlib_call_expr. + if ident_resolves_to_builtin_std(&var_ident) { return; } @@ -1762,6 +1752,35 @@ mod tests { assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); } + #[test] + fn test_stdlib_alias_uses_builtin_signature() { + let errors = check_code("local s = std; s.length([1, 2], 3)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::TooManyArguments { + function_name, + max_allowed: 1, + provided: 2, + }, + .. + }] if function_name == "std.length" + ), + "expected TooManyArguments for alias call to std.length, got: {errors:?}" + ); + } + + #[test] + fn test_shadowed_std_does_not_use_builtin_signature() { + let errors = check_code("local std = { length(x, y): x }; std.length(1, 2)"); + assert_eq!( + errors.as_slice(), + &[], + "expected no stdlib arg-count checks for shadowed std, got: {errors:?}" + ); + } + #[test] fn test_stdlib_optional_args() { // std.sort has 1 required and 1 optional argument diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs index 168406e7..9ee2a411 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs @@ -1,7 +1,8 @@ //! Object field completions for `obj.` patterns. -use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document}; use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; use jrsonnet_rowan_parser::{ nodes::{BindDestruct, Destruct, FieldName, MemberFieldNormal, ObjBody, StmtLocal}, AstNode, AstToken, SyntaxKind, SyntaxNode, @@ -37,17 +38,25 @@ pub fn check_object_field_completion( .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') .map_or(0, |i| i + 1); let identifier = before_dot[ident_start..].trim(); + let ast = document.ast(); - if identifier.is_empty() || identifier == "std" { - // Skip empty identifiers and std (handled separately) + if identifier.is_empty() { return None; } + if identifier == "std" { + // Skip builtin std (handled separately). If `std` is shadowed by user code, + // keep object-field completion enabled. + let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; + let before_dot_offset = ByteOffset::new(u32::try_from(before_dot_pos).ok()?); + let token = token_at_offset(ast.syntax(), before_dot_offset)?; + if token.kind() == SyntaxKind::IDENT && ident_resolves_to_builtin_std(&token) { + return None; + } + } // Get what the user is typing after the dot (for filtering) let prefix = after_dot.trim(); - // Try type inference for richer type information - let ast = document.ast(); // Look for expression just before the dot (not at the dot) let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; let before_dot_offset = u32::try_from(before_dot_pos).ok()?; @@ -89,10 +98,9 @@ pub fn check_object_field_completion( .collect::>(); if items.is_empty() { - None - } else { - Some(items) + return None; } + Some(items) } /// Find object fields for an identifier by looking up its definition. diff --git a/crates/jrsonnet-lsp-handlers/src/completion/helpers.rs b/crates/jrsonnet-lsp-handlers/src/completion/helpers.rs index 15644d58..c4594828 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/helpers.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/helpers.rs @@ -17,16 +17,6 @@ pub fn get_identifier_prefix(text: &str, offset: usize) -> &str { &before[start..] } -/// Check if a string is a valid identifier prefix. -pub fn is_identifier_prefix(s: &str) -> bool { - let mut chars = s.chars(); - match chars.next() { - Some(c) if c.is_ascii_alphabetic() || c == '_' => {} - _ => return false, - } - chars.all(|c| c.is_ascii_alphanumeric() || c == '_') -} - /// Check if the given offset is inside an object body. /// /// Walks up the AST from the token at the offset looking for object-related nodes. diff --git a/crates/jrsonnet-lsp-handlers/src/completion/mod.rs b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs index be1cc8f7..c661d3c7 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/mod.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs @@ -193,6 +193,61 @@ mod tests { assert_eq!(labels, vec!["xnor", "xor"]); } + #[test] + fn test_stdlib_completion_with_alias_prefix_xo() { + let code = "local s = std; s.xo"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + assert_eq!(labels, vec!["xor"]); + } + + #[test] + fn test_shadowed_std_uses_object_field_completion() { + let code = "local std = { foo: 1 }; std."; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + assert_eq!( + result.items, + vec![CompletionItem { + label: "foo".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + #[test] fn test_completion_includes_std_and_local() { // Simple case: one local variable, position at end where std and x are visible diff --git a/crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs b/crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs index 850fcfcc..4d984dba 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/stdlib.rs @@ -1,63 +1,38 @@ //! Standard library completions for `std.` prefix. +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; use jrsonnet_lsp_stdlib as stdlib; -use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; +use jrsonnet_rowan_parser::{nodes::ExprField, AstNode, SyntaxKind, SyntaxToken}; use lsp_types::{CompletionItem, CompletionItemKind, Documentation, MarkupContent, MarkupKind}; -use super::helpers::is_identifier_prefix; - /// Check if we should provide stdlib completions. pub fn check_stdlib_completion( token: &SyntaxToken, - text: &str, - offset: u32, + _text: &str, + _offset: u32, ) -> Option> { - // Look backwards from the cursor to find `std.` - let offset_usize = offset as usize; - - // Find the start of the current line - let line_start = text[..offset_usize].rfind('\n').map_or(0, |i| i + 1); - - let line_prefix = &text[line_start..offset_usize]; - - // Check if we have `std.` followed by an optional partial identifier - if let Some(std_pos) = line_prefix.rfind("std.") { - // Get what's after `std.` - let after_std_dot = &line_prefix[std_pos + 4..]; - - // If there's text after `std.`, it should be a valid identifier prefix - if after_std_dot.is_empty() || is_identifier_prefix(after_std_dot) { - let prefix = after_std_dot; - return Some(get_stdlib_completions(prefix)); - } - } - - // Also check if the token itself indicates we're after `std.` - // This handles cases where the parser created a proper AST + // Cursor is right after the dot in `x.|`. if token.kind() == SyntaxKind::DOT { - // Check if the expression before the dot is `std` - if let Some(parent) = token.parent() { - // Look for std identifier before this dot - for sibling in parent.children_with_tokens() { - if let Some(t) = sibling.as_token() { - if t.kind() == SyntaxKind::IDENT && t.text() == "std" { - return Some(get_stdlib_completions("")); - } - } - } + let parent = token.parent()?; + let field = parent.ancestors().find_map(ExprField::cast)?; + let base = field.base()?; + if expr_resolves_to_builtin_std(&base) { + return Some(get_stdlib_completions("")); } + return None; } - // Check if we're on an identifier that comes after `std.` + // Cursor is inside the field name in `x.fo|`. if token.kind() == SyntaxKind::IDENT { - // Walk back to see if there's a `std.` pattern - let token_start = u32::from(token.text_range().start()) as usize; - if token_start >= 4 { - let before_token = &text[token_start.saturating_sub(4)..token_start]; - if before_token == "std." { - let prefix = token.text(); - return Some(get_stdlib_completions(prefix)); - } + let prev = token.prev_token()?; + if prev.kind() != SyntaxKind::DOT { + return None; + } + let parent = token.parent()?; + let field = parent.ancestors().find_map(ExprField::cast)?; + let base = field.base()?; + if expr_resolves_to_builtin_std(&base) { + return Some(get_stdlib_completions(token.text())); } } diff --git a/crates/jrsonnet-lsp-handlers/src/hover.rs b/crates/jrsonnet-lsp-handlers/src/hover.rs index baade8cf..83af229d 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover.rs @@ -6,9 +6,10 @@ use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, ByteOffset, Document, LspPosition}; use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; use jrsonnet_lsp_stdlib as stdlib; use jrsonnet_rowan_parser::{ - nodes::{Bind, Destruct, ExprBase, ExprField}, + nodes::{Bind, Destruct, ExprField}, AstNode, SyntaxKind, SyntaxToken, }; use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; @@ -249,12 +250,9 @@ fn check_stdlib_hover(token: &SyntaxToken) -> Option { let field = ExprField::cast(parent.parent()?)?; - // Check if base is `std` + // Check if base resolves to builtin std. let base = field.base()?; - let ExprBase::ExprVar(var) = base.expr_base()? else { - return None; - }; - if var.name()?.ident_lit()?.text() != "std" { + if !expr_resolves_to_builtin_std(&base) { return None; } @@ -295,6 +293,11 @@ mod tests { 0, 4, "```jsonnet\nstd.map(func, arr)\n```\n\nApplies `func` to each element of `arr`.\n\n**Example:**\n```jsonnet\nstd.map(function(x) x * 2, [1,2,3]) // [2, 4, 6]\n```" )] + #[case( + "local s = std; s.map(function(x) x, [])", + 0, 17, + "```jsonnet\nstd.map(func, arr)\n```\n\nApplies `func` to each element of `arr`.\n\n**Example:**\n```jsonnet\nstd.map(function(x) x * 2, [1,2,3]) // [2, 4, 6]\n```" + )] #[case( "std.filter(function(x) x > 0, [1, -1, 2])", 0, 4, @@ -315,6 +318,33 @@ mod tests { }); } + #[test] + fn test_stdlib_hover_shadowed_std_returns_none() { + let code = "local std = { map(x): x }; std.map(1)"; + let global_types = Arc::new(GlobalTyStore::new()); + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze_with_global(&doc, global_types); + + let map_offset = + u32::try_from(code.rfind("map").expect("test source contains map token")).unwrap(); + let pos = (0, map_offset).into(); + let offset = doc + .line_index() + .offset(pos, doc.text()) + .expect("offset should exist"); + let token = token_at_offset(doc.ast().syntax(), offset).expect("token should exist"); + assert_eq!(check_stdlib_hover(&token), None); + + let hover_result = hover(&doc, pos, &analysis); + assert_matches!( + hover_result, + Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) if value == "`function(x)`" + ); + } + #[rstest] #[case( "local foo = 1; foo", diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs index 403796c2..6b88acbe 100644 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs @@ -3,8 +3,9 @@ //! Provides semantic highlighting for Jsonnet code. use jrsonnet_lsp_document::{Document, LineIndex}; +use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, ident_resolves_to_builtin_std}; use jrsonnet_rowan_parser::{ - nodes::{BindFunction, Destruct, ParamsDesc, StmtLocal}, + nodes::{BindFunction, Destruct, ExprField, ParamsDesc, StmtLocal}, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; use lsp_types::{Range, SemanticToken, SemanticTokenType, SemanticTokens, SemanticTokensLegend}; @@ -242,7 +243,7 @@ impl<'a> SemanticTokenBuilder<'a> { }; // Check if this is "std" - if token.text() == "std" { + if token.text() == "std" && ident_resolves_to_builtin_std(token) { self.add_token(token, TokenType::Namespace, token_modifier::DEFAULT_LIBRARY); return; } @@ -539,28 +540,15 @@ fn classify_definition_site(destruct_node: &SyntaxNode) -> (TokenType, u32) { ) } -/// Check if an index access is accessing the stdlib. -fn is_stdlib_access(suffix_index: &SyntaxNode) -> bool { - // Look for a preceding EXPR_VAR with "std" - if let Some(parent) = suffix_index.parent() { - for child in parent.children() { - if child.kind() == SyntaxKind::EXPR_VAR { - for token in child - .descendants_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - { - if token.kind() == SyntaxKind::IDENT && token.text() == "std" { - return true; - } - } - } - // Stop when we reach the suffix_index - if child.text_range() == suffix_index.text_range() { - break; - } - } - } - false +/// Check if a field access is on builtin std. +fn is_stdlib_access(expr_field: &SyntaxNode) -> bool { + let Some(field) = ExprField::cast(expr_field.clone()) else { + return false; + }; + let Some(base) = field.base() else { + return false; + }; + expr_resolves_to_builtin_std(&base) } #[cfg(test)] diff --git a/crates/jrsonnet-lsp-inference/src/const_eval.rs b/crates/jrsonnet-lsp-inference/src/const_eval.rs index 6e9e299b..99eae35f 100644 --- a/crates/jrsonnet-lsp-inference/src/const_eval.rs +++ b/crates/jrsonnet-lsp-inference/src/const_eval.rs @@ -11,7 +11,9 @@ use jrsonnet_lsp_document::Document; use jrsonnet_lsp_import::extract_import_path; -use jrsonnet_lsp_scope::find_definition_range; +use jrsonnet_lsp_scope::{ + find_definition_range, ident_resolves_to_builtin_std, var_resolves_to_builtin_std, +}; use jrsonnet_rowan_parser::{ nodes::{ BindDestruct, Expr, ExprBase, ExprField, ExprImport, ExprIndex, ExprVar, FieldName, Member, @@ -108,12 +110,10 @@ pub fn trace_ident(token: &SyntaxToken, document: &Document) -> Option Option { + if var_resolves_to_builtin_std(var) { + return Some(ConstEvalResult::Std { field: None }); + } let name_node = var.name()?; let ident = name_node.ident_lit()?; let name = ident.text(); - // Check for 'std' - if name == "std" { - return Some(ConstEvalResult::Std { field: None }); - } - // Find the definition let def_range = find_definition_range(&ident, name)?; diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr.rs index f9525aa1..8b56b79e 100644 --- a/crates/jrsonnet-lsp-inference/src/expr.rs +++ b/crates/jrsonnet-lsp-inference/src/expr.rs @@ -1,6 +1,7 @@ //! Expression type inference. use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_scope::var_resolves_to_builtin_std; use jrsonnet_lsp_types::{ FieldDefInterned, FieldVis, FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, @@ -337,17 +338,14 @@ fn infer_base_ty( // Variable lookup - use Ty-native lookup ExprBase::ExprVar(var) => { - if let Some(name_node) = var.name() { - if let Some(ident) = name_node.ident_lit() { - let name = ident.text(); - if name == "std" { - // std is a special open object - return env.store_mut().object_any(); - } - return env.lookup(name).unwrap_or(Ty::ANY); - } + if var_resolves_to_builtin_std(var) { + return env.store_mut().object_any(); } - Ty::ANY + + var.name() + .and_then(|name| name.ident_lit()) + .and_then(|ident| env.lookup(ident.text())) + .unwrap_or(Ty::ANY) } // Parenthesized expression - recurse with expected type diff --git a/crates/jrsonnet-lsp-inference/src/helpers.rs b/crates/jrsonnet-lsp-inference/src/helpers.rs index a370d7b2..71cbb8c2 100644 --- a/crates/jrsonnet-lsp-inference/src/helpers.rs +++ b/crates/jrsonnet-lsp-inference/src/helpers.rs @@ -1,5 +1,6 @@ //! Helper functions for type inference. +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; use jrsonnet_lsp_types::{FieldVis, ParamInterned, Ty}; use jrsonnet_rowan_parser::{ nodes::{ExprBase, FieldName, Visibility, VisibilityKind}, @@ -54,14 +55,8 @@ pub fn infer_stdlib_field_access_ty( field: &jrsonnet_rowan_parser::nodes::ExprField, env: &mut TypeEnv, ) -> Option { - // Check that the base is 'std' let base = field.base()?; - let ExprBase::ExprVar(var) = base.expr_base()? else { - return None; - }; - let name_node = var.name()?; - let ident = name_node.ident_lit()?; - if ident.text() != "std" { + if !expr_resolves_to_builtin_std(&base) { return None; } From 5767173ac6e0b4570a4f579361943c61ef15bc5b Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 14:42:07 +0000 Subject: [PATCH 057/210] lsp-types: delegate narrow/widen/length transforms to operations --- crates/jrsonnet-lsp-types/src/mut_store.rs | 333 +------------------- crates/jrsonnet-lsp-types/src/store.rs | 346 +-------------------- 2 files changed, 10 insertions(+), 669 deletions(-) diff --git a/crates/jrsonnet-lsp-types/src/mut_store.rs b/crates/jrsonnet-lsp-types/src/mut_store.rs index 9acc2aca..31930ea1 100644 --- a/crates/jrsonnet-lsp-types/src/mut_store.rs +++ b/crates/jrsonnet-lsp-types/src/mut_store.rs @@ -10,8 +10,8 @@ use crate::{ global_store::GlobalTyStore, local_store::LocalTyStore, store::{ - FieldDefInterned, FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, - TyConstraints, TyData, TyVarId, TypeStoreOps, + FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, TyData, + TyVarId, TypeStoreOps, }, }; @@ -436,138 +436,7 @@ impl MutStore { /// Narrow a type by intersecting with a constraint. pub fn narrow(&mut self, ty: Ty, constraint: Ty) -> Ty { - // Fast paths - if ty == Ty::NEVER || constraint == Ty::NEVER { - return Ty::NEVER; - } - if ty == Ty::ANY { - return constraint; - } - if constraint == Ty::ANY { - return ty; - } - if ty == constraint { - return ty; - } - - let ty_data = self.get_data(ty); - let constraint_data = self.get_data(constraint); - - // Handle unions: narrow each element - if let TyData::Union(types) = ty_data { - let narrowed: Vec = types - .iter() - .map(|&t| self.narrow(t, constraint)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return self.union(narrowed); - } - if let TyData::Union(types) = constraint_data { - let narrowed: Vec = types - .iter() - .map(|&t| self.narrow(ty, t)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return self.union(narrowed); - } - - // Handle literal types as subtypes - match (&ty_data, &constraint_data) { - (TyData::Bool, TyData::True) | (TyData::True, TyData::Bool) => return Ty::TRUE, - (TyData::Bool, TyData::False) | (TyData::False, TyData::Bool) => return Ty::FALSE, - (TyData::String, TyData::Char) | (TyData::Char, TyData::String) => return Ty::CHAR, - (TyData::String, TyData::LiteralString(s)) - | (TyData::LiteralString(s), TyData::String) => { - return self.literal_string(s.clone()); - } - _ => {} - } - - // Handle arrays (preserve is_set if both are sets) - if let ( - TyData::Array { - elem: e1, - is_set: s1, - }, - TyData::Array { - elem: e2, - is_set: s2, - }, - ) = (&ty_data, &constraint_data) - { - let elem = self.narrow(*e1, *e2); - if elem == Ty::NEVER { - return Ty::NEVER; - } - // Result is a set only if both inputs are sets - if *s1 && *s2 { - return self.array_set(elem); - } - return self.array(elem); - } - - // Handle tuples with arrays - if let (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) = - (&ty_data, &constraint_data) - { - let narrowed: Vec = elems.iter().map(|&e| self.narrow(e, *arr_elem)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return self.tuple(narrowed); - } - if let (TyData::Array { elem: arr_elem, .. }, TyData::Tuple { elems }) = - (&ty_data, &constraint_data) - { - let narrowed: Vec = elems.iter().map(|&e| self.narrow(*arr_elem, e)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return self.tuple(narrowed); - } - - // Handle tuples with tuples - if let (TyData::Tuple { elems: e1 }, TyData::Tuple { elems: e2 }) = - (&ty_data, &constraint_data) - { - if e1.len() != e2.len() { - return Ty::NEVER; - } - let narrowed: Vec = e1 - .iter() - .zip(e2.iter()) - .map(|(&a, &b)| self.narrow(a, b)) - .collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return self.tuple(narrowed); - } - - // Handle objects - if let (TyData::Object(obj1), TyData::Object(obj2)) = (&ty_data, &constraint_data) { - let mut fields = obj1.fields.clone(); - for (name, def2) in &obj2.fields { - if let Some((_, def1)) = fields.iter_mut().find(|(n, _)| n == name) { - let narrowed_ty = self.narrow(def1.ty, def2.ty); - *def1 = FieldDefInterned { - ty: narrowed_ty, - required: def1.required && def2.required, - visibility: def1.visibility, - }; - } else { - fields.push((name.clone(), def2.clone())); - } - } - let has_unknown = obj1.has_unknown && obj2.has_unknown; - return self.object(ObjectData { - fields, - has_unknown, - }); - } - - // Different concrete types have no intersection - Ty::NEVER + crate::operations::ty_and(ty, constraint, self) } /// Narrow a type to one with an exact length. @@ -576,207 +445,17 @@ impl MutStore { /// - Strings with length 1 become Char /// - Tuples must have matching length pub fn with_len(&mut self, ty: Ty, len: usize) -> Ty { - match self.get_data(ty) { - TyData::Any => Ty::ANY, - - TyData::Array { elem, .. } => { - let elems = vec![elem; len]; - self.tuple(elems) - } - - TyData::Tuple { elems } => { - if elems.len() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::Object(obj) => match (obj.fields.len().cmp(&len), obj.has_unknown) { - (std::cmp::Ordering::Equal, false) | (std::cmp::Ordering::Less, true) => ty, - (std::cmp::Ordering::Equal, true) => self.object(ObjectData { - fields: obj.fields, - has_unknown: false, - }), - (std::cmp::Ordering::Less, false) | (std::cmp::Ordering::Greater, _) => Ty::NEVER, - }, - - TyData::String => { - if len == 1 { - Ty::CHAR - } else { - ty - } - } - - TyData::LiteralString(s) => { - if s.len() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::Char => { - if len == 1 { - ty - } else { - Ty::NEVER - } - } - - TyData::Function(func) => { - if func.variadic && func.params.is_empty() { - let params = (0..len) - .map(|idx| ParamInterned { - name: format!("arg{idx}"), - ty: Ty::ANY, - has_default: false, - }) - .collect(); - self.function(FunctionData { - params, - return_spec: ReturnSpec::Fixed(Ty::ANY), - variadic: false, - }) - } else if func.required_count() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, - - TyData::Never - | TyData::Null - | TyData::Bool - | TyData::True - | TyData::False - | TyData::Number - | TyData::BoundedNumber(_) => Ty::NEVER, - - TyData::Union(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| self.with_len(t, len)) - .filter(|&t| t != Ty::NEVER) - .collect(); - self.union(narrowed) - } - - TyData::Sum(types) => { - let narrowed: Vec = types.iter().map(|&t| self.with_len(t, len)).collect(); - if narrowed.contains(&Ty::NEVER) { - Ty::NEVER - } else { - self.sum(narrowed) - } - } - } + crate::operations::ty_with_len(ty, len, self) } /// Narrow a type to one with at least a minimum length. pub fn with_min_len(&mut self, ty: Ty, min: usize) -> Ty { - match self.get_data(ty) { - TyData::Any => Ty::ANY, - TyData::Never => Ty::NEVER, - - TyData::Array { .. } - | TyData::Object(_) - | TyData::AttrsOf { .. } - | TyData::String - | TyData::Function(_) - | TyData::TypeVar { .. } => ty, - - TyData::LiteralString(s) => { - if s.chars().count() >= min { - ty - } else { - Ty::NEVER - } - } - - TyData::Tuple { elems } => { - if elems.len() >= min { - ty - } else { - Ty::NEVER - } - } - - TyData::Char => { - if min <= 1 { - ty - } else { - Ty::NEVER - } - } - - TyData::Null - | TyData::Bool - | TyData::True - | TyData::False - | TyData::Number - | TyData::BoundedNumber(_) => { - if min == 0 { - ty - } else { - Ty::NEVER - } - } - - TyData::Union(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| self.with_min_len(t, min)) - .filter(|&t| t != Ty::NEVER) - .collect(); - self.union(narrowed) - } - - TyData::Sum(types) => { - let narrowed: Vec = types.iter().map(|&t| self.with_min_len(t, min)).collect(); - if narrowed.contains(&Ty::NEVER) { - Ty::NEVER - } else { - self.sum(narrowed) - } - } - } + crate::operations::ty_with_min_len(ty, min, self) } /// Widen a type by removing a constraint. pub fn widen(&mut self, base: Ty, remove: Ty) -> Ty { - // Fast paths - if base == Ty::NEVER { - return Ty::NEVER; - } - if remove == Ty::NEVER { - return base; - } - if remove == Ty::ANY { - return Ty::NEVER; - } - if base == Ty::ANY { - return Ty::ANY; - } - if base == remove { - return Ty::NEVER; - } - - // Handle unions: remove from each element - if let TyData::Union(types) = self.get_data(base) { - let remaining: Vec = types - .iter() - .map(|&t| self.widen(t, remove)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return self.union(remaining); - } - - // Different concrete types: nothing to remove - base + crate::operations::ty_minus(base, remove, self) } } diff --git a/crates/jrsonnet-lsp-types/src/store.rs b/crates/jrsonnet-lsp-types/src/store.rs index 1a4dc5b4..7eac2e65 100644 --- a/crates/jrsonnet-lsp-types/src/store.rs +++ b/crates/jrsonnet-lsp-types/src/store.rs @@ -1132,155 +1132,7 @@ impl TyStore { /// Returns the most specific type that satisfies both. /// For example, `narrow(Any, Number)` returns `Number`. pub fn narrow(&mut self, ty: Ty, constraint: Ty) -> Ty { - // Fast paths - if ty == Ty::NEVER || constraint == Ty::NEVER { - return Ty::NEVER; - } - if ty == Ty::ANY { - return constraint; - } - if constraint == Ty::ANY { - return ty; - } - if ty == constraint { - return ty; - } - - // Handle unions: narrow each element - if let TyData::Union(types) = self.get(ty).clone() { - let narrowed: Vec = types - .iter() - .map(|&t| self.narrow(t, constraint)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return self.union(narrowed); - } - if let TyData::Union(types) = self.get(constraint).clone() { - let narrowed: Vec = types - .iter() - .map(|&t| self.narrow(ty, t)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return self.union(narrowed); - } - - // Handle literal types as subtypes - match (&*self.get(ty), &*self.get(constraint)) { - (TyData::Bool, TyData::True) | (TyData::True, TyData::Bool) => return Ty::TRUE, - (TyData::Bool, TyData::False) | (TyData::False, TyData::Bool) => return Ty::FALSE, - (TyData::String, TyData::Char) | (TyData::Char, TyData::String) => return Ty::CHAR, - (TyData::String, TyData::LiteralString(s)) - | (TyData::LiteralString(s), TyData::String) => { - return self.literal_string(s.clone()); - } - _ => {} - } - - // Handle bounded numbers - match (&*self.get(ty), &*self.get(constraint)) { - // Number narrowed with BoundedNumber -> BoundedNumber - // BoundedNumber narrowed with Number -> BoundedNumber (unchanged) - (TyData::Number, TyData::BoundedNumber(bounds)) - | (TyData::BoundedNumber(bounds), TyData::Number) => { - return self.bounded_number(*bounds); - } - // Two BoundedNumbers -> intersect bounds - (TyData::BoundedNumber(b1), TyData::BoundedNumber(b2)) => { - return match b1.intersect(b2) { - Some(bounds) => self.bounded_number(bounds), - None => Ty::NEVER, // Empty intersection - }; - } - _ => {} - } - - // Handle arrays (preserve is_set if both are sets) - if let ( - TyData::Array { - elem: e1, - is_set: s1, - }, - TyData::Array { - elem: e2, - is_set: s2, - }, - ) = (self.get(ty).clone(), self.get(constraint).clone()) - { - let elem = self.narrow(e1, e2); - if elem == Ty::NEVER { - return Ty::NEVER; - } - // Result is a set only if both inputs are sets - if s1 && s2 { - return self.array_set(elem); - } - return self.array(elem); - } - - // Handle tuples with arrays - if let (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) = - (self.get(ty).clone(), self.get(constraint).clone()) - { - let narrowed: Vec = elems.iter().map(|&e| self.narrow(e, arr_elem)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return self.tuple(narrowed); - } - if let (TyData::Array { elem: arr_elem, .. }, TyData::Tuple { elems }) = - (self.get(ty).clone(), self.get(constraint).clone()) - { - let narrowed: Vec = elems.iter().map(|&e| self.narrow(arr_elem, e)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return self.tuple(narrowed); - } - - // Handle tuples with tuples - if let (TyData::Tuple { elems: e1 }, TyData::Tuple { elems: e2 }) = - (self.get(ty).clone(), self.get(constraint).clone()) - { - if e1.len() != e2.len() { - return Ty::NEVER; - } - let narrowed: Vec = e1 - .iter() - .zip(e2.iter()) - .map(|(&a, &b)| self.narrow(a, b)) - .collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return self.tuple(narrowed); - } - - // Handle objects - if let (TyData::Object(obj1), TyData::Object(obj2)) = - (self.get(ty).clone(), self.get(constraint).clone()) - { - let mut fields = obj1.fields.clone(); - for (name, def2) in &obj2.fields { - if let Some((_, def1)) = fields.iter_mut().find(|(n, _)| n == name) { - let narrowed_ty = self.narrow(def1.ty, def2.ty); - *def1 = FieldDefInterned { - ty: narrowed_ty, - required: def1.required && def2.required, - visibility: def1.visibility, - }; - } else { - fields.push((name.clone(), def2.clone())); - } - } - let has_unknown = obj1.has_unknown && obj2.has_unknown; - return self.object(ObjectData { - fields, - has_unknown, - }); - } - - // Different concrete types have no intersection - Ty::NEVER + crate::operations::ty_and(ty, constraint, self) } /// Widen a type by removing a constraint. @@ -1288,35 +1140,7 @@ impl TyStore { /// Returns the type with the constraint removed. /// For example, `widen(Union(Number, String), Number)` returns `String`. pub fn widen(&mut self, base: Ty, remove: Ty) -> Ty { - // Fast paths - if base == Ty::NEVER { - return Ty::NEVER; - } - if remove == Ty::NEVER { - return base; - } - if remove == Ty::ANY { - return Ty::NEVER; - } - if base == Ty::ANY { - return Ty::ANY; - } - if base == remove { - return Ty::NEVER; - } - - // Handle unions: remove from each element - if let TyData::Union(types) = self.get(base).clone() { - let remaining: Vec = types - .iter() - .map(|&t| self.widen(t, remove)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return self.union(remaining); - } - - // Different concrete types: nothing to remove - base + crate::operations::ty_minus(base, remove, self) } /// Narrow a type to one with a specific length. @@ -1325,174 +1149,12 @@ impl TyStore { /// - Strings with length 1 become Char /// - Tuples must have matching length pub fn with_len(&mut self, ty: Ty, len: usize) -> Ty { - match self.get(ty).clone() { - TyData::Any => Ty::ANY, - - TyData::Array { elem, .. } => { - let elems = vec![elem; len]; - self.tuple(elems) - } - - TyData::Tuple { elems } => { - if elems.len() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::Object(obj) => match (obj.fields.len().cmp(&len), obj.has_unknown) { - (std::cmp::Ordering::Equal, false) | (std::cmp::Ordering::Less, true) => ty, - (std::cmp::Ordering::Equal, true) => self.object(ObjectData { - fields: obj.fields, - has_unknown: false, - }), - (std::cmp::Ordering::Less, false) | (std::cmp::Ordering::Greater, _) => Ty::NEVER, - }, - - TyData::String => { - if len == 1 { - Ty::CHAR - } else { - ty - } - } - - TyData::LiteralString(s) => { - if s.len() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::Char => { - if len == 1 { - ty - } else { - Ty::NEVER - } - } - - TyData::Function(func) => { - if func.variadic && func.params.is_empty() { - let params = (0..len) - .map(|idx| ParamInterned { - name: format!("arg{idx}"), - ty: Ty::ANY, - has_default: false, - }) - .collect(); - self.function(FunctionData { - params, - return_spec: ReturnSpec::Fixed(Ty::ANY), - variadic: false, - }) - } else if func.required_count() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, - - TyData::Never - | TyData::Null - | TyData::Bool - | TyData::True - | TyData::False - | TyData::Number - | TyData::BoundedNumber(_) => Ty::NEVER, - - TyData::Union(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| self.with_len(t, len)) - .filter(|&t| t != Ty::NEVER) - .collect(); - self.union(narrowed) - } - - TyData::Sum(types) => { - let narrowed: Vec = types.iter().map(|&t| self.with_len(t, len)).collect(); - if narrowed.contains(&Ty::NEVER) { - Ty::NEVER - } else { - self.sum(narrowed) - } - } - } + crate::operations::ty_with_len(ty, len, self) } /// Narrow a type to one with at least a minimum length. pub fn with_min_len(&mut self, ty: Ty, min: usize) -> Ty { - match self.get(ty).clone() { - TyData::Any => Ty::ANY, - TyData::Never => Ty::NEVER, - - TyData::Array { .. } - | TyData::Object(_) - | TyData::AttrsOf { .. } - | TyData::String - | TyData::Function(_) - | TyData::TypeVar { .. } => ty, - - TyData::LiteralString(s) => { - if s.chars().count() >= min { - ty - } else { - Ty::NEVER - } - } - - TyData::Tuple { elems } => { - if elems.len() >= min { - ty - } else { - Ty::NEVER - } - } - - TyData::Char => { - if min <= 1 { - ty - } else { - Ty::NEVER - } - } - - TyData::Null - | TyData::Bool - | TyData::True - | TyData::False - | TyData::Number - | TyData::BoundedNumber(_) => { - if min == 0 { - ty - } else { - Ty::NEVER - } - } - - TyData::Union(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| self.with_min_len(t, min)) - .filter(|&t| t != Ty::NEVER) - .collect(); - self.union(narrowed) - } - - TyData::Sum(types) => { - let narrowed: Vec = types.iter().map(|&t| self.with_min_len(t, min)).collect(); - if narrowed.contains(&Ty::NEVER) { - Ty::NEVER - } else { - self.sum(narrowed) - } - } - } + crate::operations::ty_with_min_len(ty, min, self) } /// Check if type is indexable. From 18cb18db0720bc612d9e14d134fdb7dda1ea83a1 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 14:43:51 +0000 Subject: [PATCH 058/210] lsp-handlers: split completion implementation out of mod.rs --- .../src/completion/handler.rs | 687 ++++++++++++++++++ .../src/completion/mod.rs | 683 +---------------- 2 files changed, 690 insertions(+), 680 deletions(-) create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/handler.rs diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler.rs new file mode 100644 index 00000000..125ae66a --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler.rs @@ -0,0 +1,687 @@ +//! Completion handler for providing code completions. +//! +//! Supports: +//! - Standard library functions (triggered by `std.`) +//! - Local variables in scope +//! - Object field completion (triggered by `obj.`) +//! - Import path completion (inside import strings) + +use std::path::{Path, PathBuf}; + +use jrsonnet_lsp_document::{token_at_offset, Document, LspPosition}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_rowan_parser::AstNode; +use lsp_types::{CompletionItem, CompletionItemKind, CompletionList}; +use tracing::debug; + +use super::{ + fields::check_object_field_completion, + helpers::{get_identifier_prefix, is_inside_object}, + imports::check_import_completion, + locals::get_local_completions, + stdlib::check_stdlib_completion, +}; + +/// Get completion items for the given position. +/// +/// `doc_path` is the path to the current document, used for import path completion. +/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure +/// that import types are properly resolved. +pub fn completion( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + analysis: &TypeAnalysis, +) -> Option { + completion_with_import_roots(document, position, doc_path, &[], analysis) +} + +/// Get completion items with explicit import search roots. +/// +/// `import_roots` is typically configured from server `jpath` entries and used +/// when completing inside import strings. +pub fn completion_with_import_roots( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + import_roots: &[PathBuf], + analysis: &TypeAnalysis, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + // Convert LSP position to byte offset + let offset = line_index.offset(position, text)?; + + // Get the AST root + let ast = document.ast(); + + // Find the token at or before the offset (may be None at whitespace/EOF) + let token = token_at_offset(ast.syntax(), offset); + + // Check if we're completing after `std.` + if let Some(ref t) = token { + if let Some(items) = check_stdlib_completion(t, text, offset.into()) { + debug!(count = items.len(), "providing stdlib completions"); + return Some(CompletionList { + is_incomplete: false, + items, + }); + } + } + + // Check if we're inside an import string + if let Some(items) = check_import_completion(text, offset.into(), doc_path, import_roots) { + debug!(count = items.len(), "providing import completions"); + return Some(CompletionList { + is_incomplete: false, + items, + }); + } + + // Check if we're completing after `obj.` (object field access) + if let Some(items) = check_object_field_completion(document, text, offset.into(), analysis) { + debug!(count = items.len(), "providing object field completions"); + return Some(CompletionList { + is_incomplete: false, + items, + }); + } + + // For general completion, provide local variables in scope + let mut items = get_local_completions(document, position, text, offset.into()); + + // Also include `std` as a completion option since it's always available + items.push(CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }); + + // Add `self`, `super`, and `$` if inside an object + if is_inside_object(ast.syntax(), offset) { + let prefix = get_identifier_prefix(text, offset.into()); + + if prefix.is_empty() || "$".starts_with(prefix) { + items.push(CompletionItem { + label: "$".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to root object".to_string()), + ..Default::default() + }); + } + + if prefix.is_empty() || "self".starts_with(prefix) { + items.push(CompletionItem { + label: "self".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to current object".to_string()), + ..Default::default() + }); + } + + if prefix.is_empty() || "super".starts_with(prefix) { + items.push(CompletionItem { + label: "super".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to inherited object".to_string()), + ..Default::default() + }); + } + } + + Some(CompletionList { + is_incomplete: false, + items, + }) +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::super::imports::find_import_string_start; + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_types::GlobalTyStore; + + use super::*; + + /// Create a `TypeAnalysis` for test purposes. + fn test_analysis(doc: &Document) -> TypeAnalysis { + let global_types = Arc::new(GlobalTyStore::new()); + TypeAnalysis::analyze_with_global(doc, global_types) + } + + #[test] + fn test_stdlib_completion_with_prefix_xor() { + // Use prefix "xo" which only matches "xor" in stdlib + let code = "std.xo"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 6).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + + // Extract just labels for comparison (full CompletionItem has dynamic detail text) + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + assert_eq!(labels, vec!["xor"]); + } + + #[test] + fn test_stdlib_completion_with_prefix_x() { + // Use prefix "x" which matches "xnor" and "xor" in stdlib + let code = "std.x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 5).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + + // Extract labels and sort for comparison + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["xnor", "xor"]); + } + + #[test] + fn test_stdlib_completion_with_alias_prefix_xo() { + let code = "local s = std; s.xo"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + assert_eq!(labels, vec!["xor"]); + } + + #[test] + fn test_shadowed_std_uses_object_field_completion() { + let code = "local std = { foo: 1 }; std."; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + assert_eq!( + result.items, + vec![CompletionItem { + label: "foo".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + + #[test] + fn test_completion_includes_std_and_local() { + // Simple case: one local variable, position at end where std and x are visible + let code = "local x = 1; "; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 13).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + + // Extract labels and sort for comparison + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_completion_item_has_documentation() { + // Use a simple object where we can assert the full completion list + let code = "local obj = { foo: 1 }; obj."; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 28).into(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + + // Assert the full completion list for object field access + assert_eq!( + result.items, + vec![CompletionItem { + label: "foo".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + + #[test] + fn test_local_variable_completion() { + let code = "local foo = 1; local bar = 2; "; + // ^ cursor here (character 30) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 30).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["bar", "foo", "std"]); + } + + #[test] + fn test_function_parameter_completion() { + let code = "local f(x, y) = x + "; + // ^ cursor here (character 20) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 20).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + // x, y are params; f is in scope; std is always available + assert_eq!(labels, vec!["f", "std", "x", "y"]); + } + + #[test] + fn test_completion_with_prefix() { + let code = "local foo = 1; local bar = 2; f"; + // ^ cursor at 'f' (character 31) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 31).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + // 'foo' starts with 'f', and 'std' is always included (no prefix filtering on keywords) + assert_eq!(labels, vec!["foo", "std"]); + } + + #[test] + fn test_object_field_completion() { + let code = "local obj = { foo: 1, bar: 2 }; obj."; + // ^ cursor here (character 36) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 36).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get object field completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["bar", "foo"]); + } + + #[test] + fn test_object_field_completion_with_prefix() { + let code = "local obj = { foo: 1, bar: 2 }; obj.f"; + // ^ cursor here (character 37) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 37).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get object field completions with prefix"); + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + // Only 'foo' starts with 'f' + assert_eq!(labels, vec!["foo"]); + } + + #[test] + fn test_import_string_detection() { + // Test that we correctly detect import string context + // import " - cursor inside open string after import keyword + let text1 = r#"import ""#; + assert_eq!(find_import_string_start(text1), Some(8)); // position after opening quote + + // importstr " - cursor inside open string after importstr keyword + let text2 = r#"importstr ""#; + assert_eq!(find_import_string_start(text2), Some(11)); // position after opening quote + + // local x = " - not an import, just a regular string + let text3 = r#"local x = ""#; + assert_eq!(find_import_string_start(text3), None); + + // import "foo.jsonnet" - closed string, not inside import path + let text4 = r#"import "foo.jsonnet""#; + assert_eq!(find_import_string_start(text4), None); + } + + #[test] + fn test_import_completion_with_path() { + use tempfile::TempDir; + + // Create a temp directory with some files + let temp_dir = TempDir::new().unwrap(); + let temp_path = temp_dir.path(); + + // Create test files + std::fs::write(temp_path.join("utils.libsonnet"), "{}").unwrap(); + std::fs::write(temp_path.join("config.jsonnet"), "{}").unwrap(); + std::fs::create_dir(temp_path.join("lib")).unwrap(); + std::fs::write(temp_path.join("lib/helper.libsonnet"), "{}").unwrap(); + + // Create a document in the temp directory + let doc_path = temp_path.join("main.jsonnet"); + let code = r#"import ""#; + // ^ cursor here (character 8) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 8).into(); + + let list = completion(&doc, pos, Some(&doc_path), &analysis) + .expect("should get import completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["config.jsonnet", "lib", "utils.libsonnet"]); + } + + #[test] + fn test_for_comprehension_variable_completion() { + let code = "[x for x in [1, 2, 3] if ]"; + // ^ cursor here (character 25) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 25).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in for comprehension"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_object_local_completion() { + let code = "{ local helper = 1, field: }"; + // ^ cursor here (character 27) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 27).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get completions in object"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["$", "helper", "self", "std", "super"]); + } + + #[test] + fn test_nested_function_completion() { + // Cursor inside inner function body, after "a + " + let code = "local outer(a) = local inner(b) = a + ; inner(1); outer(1)"; + // ^ cursor at position 37 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 37).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in nested function"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + // a (outer param), b (inner param), std + assert_eq!(labels, vec!["a", "b", "std"]); + } + + #[test] + fn test_completion_at_eof() { + let code = "local x = 1;\n"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = (1, 0).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions at EOF"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_completion_in_object() { + let code = "{ foo: 1, bar: }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = (0, 14).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get completions in object"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "$".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to root object".to_string()), + ..Default::default() + }, + CompletionItem { + label: "self".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to current object".to_string()), + ..Default::default() + }, + CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }, + CompletionItem { + label: "super".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to inherited object".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_no_self_super_outside_object() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = (0, 13).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get completions"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }, + CompletionItem { + label: "x".to_string(), + kind: Some(CompletionItemKind::VARIABLE), + detail: Some("local variable".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_multiple_for_specs_completion() { + // Multiple for specs in array comprehension + let code = "[x + y for x in [1] for y in [2]]"; + // ^ cursor at position 3 (inside expression) + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (0, 3).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in multi-for comprehension"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x", "y"]); + } + + #[test] + fn test_completion_with_syntax_error() { + // Incomplete expression with syntax error + let code = "local x = 1;\nlocal y = 2;\n{ foo: x +"; + // ^ cursor at position 9 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (2, 10).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions despite syntax error"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["$", "self", "std", "super", "x", "y"]); + } + + #[test] + fn test_shadowed_variable_completion() { + // Inner binding shadows outer binding + let code = "local x = 1;\nlocal f(x) = x +"; + // ^ cursor at position 16 + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let pos = (1, 16).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get completions with shadowing"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + // Only one 'x' (the parameter) should be visible - outer 'x' is shadowed + assert_eq!(labels, vec!["f", "std", "x"]); + } + + #[test] + fn test_object_field_completion_with_types() { + let code = "local obj = { num: 42, str: \"hello\", arr: [1, 2] }; obj."; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = (0, 56).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get object field completions"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "arr".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("[number, number]".to_string()), // Tuple type + ..Default::default() + }, + CompletionItem { + label: "num".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "str".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("string".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_object_field_completion_nested() { + let code = "local obj = { inner: { x: 1, y: 2 } }; obj.inner."; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = (0, 49).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get nested object field completions"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "x".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "y".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + ] + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/mod.rs b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs index c661d3c7..a95a1a0d 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/mod.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs @@ -7,688 +7,11 @@ //! - Import path completion (inside import strings) mod fields; +mod handler; mod helpers; mod imports; mod locals; mod stdlib; -use std::path::{Path, PathBuf}; - -use jrsonnet_lsp_document::{token_at_offset, Document, LspPosition}; -use jrsonnet_lsp_inference::TypeAnalysis; -use jrsonnet_rowan_parser::AstNode; -use lsp_types::{CompletionItem, CompletionItemKind, CompletionList}; -use tracing::debug; - -// Re-export for tests -pub use self::imports::find_import_string_start; -use self::{ - fields::check_object_field_completion, - helpers::{get_identifier_prefix, is_inside_object}, - imports::check_import_completion, - locals::get_local_completions, - stdlib::check_stdlib_completion, -}; - -/// Get completion items for the given position. -/// -/// `doc_path` is the path to the current document, used for import path completion. -/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure -/// that import types are properly resolved. -pub fn completion( - document: &Document, - position: LspPosition, - doc_path: Option<&Path>, - analysis: &TypeAnalysis, -) -> Option { - completion_with_import_roots(document, position, doc_path, &[], analysis) -} - -/// Get completion items with explicit import search roots. -/// -/// `import_roots` is typically configured from server `jpath` entries and used -/// when completing inside import strings. -pub fn completion_with_import_roots( - document: &Document, - position: LspPosition, - doc_path: Option<&Path>, - import_roots: &[PathBuf], - analysis: &TypeAnalysis, -) -> Option { - let text = document.text(); - let line_index = document.line_index(); - - // Convert LSP position to byte offset - let offset = line_index.offset(position, text)?; - - // Get the AST root - let ast = document.ast(); - - // Find the token at or before the offset (may be None at whitespace/EOF) - let token = token_at_offset(ast.syntax(), offset); - - // Check if we're completing after `std.` - if let Some(ref t) = token { - if let Some(items) = check_stdlib_completion(t, text, offset.into()) { - debug!(count = items.len(), "providing stdlib completions"); - return Some(CompletionList { - is_incomplete: false, - items, - }); - } - } - - // Check if we're inside an import string - if let Some(items) = check_import_completion(text, offset.into(), doc_path, import_roots) { - debug!(count = items.len(), "providing import completions"); - return Some(CompletionList { - is_incomplete: false, - items, - }); - } - - // Check if we're completing after `obj.` (object field access) - if let Some(items) = check_object_field_completion(document, text, offset.into(), analysis) { - debug!(count = items.len(), "providing object field completions"); - return Some(CompletionList { - is_incomplete: false, - items, - }); - } - - // For general completion, provide local variables in scope - let mut items = get_local_completions(document, position, text, offset.into()); - - // Also include `std` as a completion option since it's always available - items.push(CompletionItem { - label: "std".to_string(), - kind: Some(CompletionItemKind::MODULE), - detail: Some("Jsonnet standard library".to_string()), - ..Default::default() - }); - - // Add `self`, `super`, and `$` if inside an object - if is_inside_object(ast.syntax(), offset) { - let prefix = get_identifier_prefix(text, offset.into()); - - if prefix.is_empty() || "$".starts_with(prefix) { - items.push(CompletionItem { - label: "$".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to root object".to_string()), - ..Default::default() - }); - } - - if prefix.is_empty() || "self".starts_with(prefix) { - items.push(CompletionItem { - label: "self".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to current object".to_string()), - ..Default::default() - }); - } - - if prefix.is_empty() || "super".starts_with(prefix) { - items.push(CompletionItem { - label: "super".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to inherited object".to_string()), - ..Default::default() - }); - } - } - - Some(CompletionList { - is_incomplete: false, - items, - }) -} - -#[cfg(test)] -mod tests { - use std::sync::Arc; - - use jrsonnet_lsp_document::DocVersion; - use jrsonnet_lsp_types::GlobalTyStore; - - use super::*; - - /// Create a `TypeAnalysis` for test purposes. - fn test_analysis(doc: &Document) -> TypeAnalysis { - let global_types = Arc::new(GlobalTyStore::new()); - TypeAnalysis::analyze_with_global(doc, global_types) - } - - #[test] - fn test_stdlib_completion_with_prefix_xor() { - // Use prefix "xo" which only matches "xor" in stdlib - let code = "std.xo"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 6).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - - // Extract just labels for comparison (full CompletionItem has dynamic detail text) - let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - assert_eq!(labels, vec!["xor"]); - } - - #[test] - fn test_stdlib_completion_with_prefix_x() { - // Use prefix "x" which matches "xnor" and "xor" in stdlib - let code = "std.x"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 5).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - - // Extract labels and sort for comparison - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["xnor", "xor"]); - } - - #[test] - fn test_stdlib_completion_with_alias_prefix_xo() { - let code = "local s = std; s.xo"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = ( - 0, - u32::try_from(code.len()).expect("test code length fits in u32"), - ) - .into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - assert_eq!(labels, vec!["xor"]); - } - - #[test] - fn test_shadowed_std_uses_object_field_completion() { - let code = "local std = { foo: 1 }; std."; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = ( - 0, - u32::try_from(code.len()).expect("test code length fits in u32"), - ) - .into(); - - let result = completion(&doc, pos, None, &analysis).expect("should get completions"); - assert_eq!( - result.items, - vec![CompletionItem { - label: "foo".to_string(), - label_details: None, - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: None, - insert_text_format: None, - insert_text_mode: None, - text_edit: None, - additional_text_edits: None, - command: None, - commit_characters: None, - data: None, - tags: None, - }] - ); - } - - #[test] - fn test_completion_includes_std_and_local() { - // Simple case: one local variable, position at end where std and x are visible - let code = "local x = 1; "; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 13).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - - // Extract labels and sort for comparison - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["std", "x"]); - } - - #[test] - fn test_completion_item_has_documentation() { - // Use a simple object where we can assert the full completion list - let code = "local obj = { foo: 1 }; obj."; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 28).into(); - - let result = completion(&doc, pos, None, &analysis).expect("should get completions"); - - // Assert the full completion list for object field access - assert_eq!( - result.items, - vec![CompletionItem { - label: "foo".to_string(), - label_details: None, - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: None, - insert_text_format: None, - insert_text_mode: None, - text_edit: None, - additional_text_edits: None, - command: None, - commit_characters: None, - data: None, - tags: None, - }] - ); - } - - #[test] - fn test_local_variable_completion() { - let code = "local foo = 1; local bar = 2; "; - // ^ cursor here (character 30) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 30).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["bar", "foo", "std"]); - } - - #[test] - fn test_function_parameter_completion() { - let code = "local f(x, y) = x + "; - // ^ cursor here (character 20) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 20).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - // x, y are params; f is in scope; std is always available - assert_eq!(labels, vec!["f", "std", "x", "y"]); - } - - #[test] - fn test_completion_with_prefix() { - let code = "local foo = 1; local bar = 2; f"; - // ^ cursor at 'f' (character 31) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 31).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - // 'foo' starts with 'f', and 'std' is always included (no prefix filtering on keywords) - assert_eq!(labels, vec!["foo", "std"]); - } - - #[test] - fn test_object_field_completion() { - let code = "local obj = { foo: 1, bar: 2 }; obj."; - // ^ cursor here (character 36) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 36).into(); - - let list = - completion(&doc, pos, None, &analysis).expect("should get object field completions"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["bar", "foo"]); - } - - #[test] - fn test_object_field_completion_with_prefix() { - let code = "local obj = { foo: 1, bar: 2 }; obj.f"; - // ^ cursor here (character 37) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 37).into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get object field completions with prefix"); - let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - // Only 'foo' starts with 'f' - assert_eq!(labels, vec!["foo"]); - } - - #[test] - fn test_import_string_detection() { - // Test that we correctly detect import string context - // import " - cursor inside open string after import keyword - let text1 = r#"import ""#; - assert_eq!(find_import_string_start(text1), Some(8)); // position after opening quote - - // importstr " - cursor inside open string after importstr keyword - let text2 = r#"importstr ""#; - assert_eq!(find_import_string_start(text2), Some(11)); // position after opening quote - - // local x = " - not an import, just a regular string - let text3 = r#"local x = ""#; - assert_eq!(find_import_string_start(text3), None); - - // import "foo.jsonnet" - closed string, not inside import path - let text4 = r#"import "foo.jsonnet""#; - assert_eq!(find_import_string_start(text4), None); - } - - #[test] - fn test_import_completion_with_path() { - use tempfile::TempDir; - - // Create a temp directory with some files - let temp_dir = TempDir::new().unwrap(); - let temp_path = temp_dir.path(); - - // Create test files - std::fs::write(temp_path.join("utils.libsonnet"), "{}").unwrap(); - std::fs::write(temp_path.join("config.jsonnet"), "{}").unwrap(); - std::fs::create_dir(temp_path.join("lib")).unwrap(); - std::fs::write(temp_path.join("lib/helper.libsonnet"), "{}").unwrap(); - - // Create a document in the temp directory - let doc_path = temp_path.join("main.jsonnet"); - let code = r#"import ""#; - // ^ cursor here (character 8) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 8).into(); - - let list = completion(&doc, pos, Some(&doc_path), &analysis) - .expect("should get import completions"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["config.jsonnet", "lib", "utils.libsonnet"]); - } - - #[test] - fn test_for_comprehension_variable_completion() { - let code = "[x for x in [1, 2, 3] if ]"; - // ^ cursor here (character 25) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 25).into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get completions in for comprehension"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["std", "x"]); - } - - #[test] - fn test_object_local_completion() { - let code = "{ local helper = 1, field: }"; - // ^ cursor here (character 27) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 27).into(); - - let list = - completion(&doc, pos, None, &analysis).expect("should get completions in object"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["$", "helper", "self", "std", "super"]); - } - - #[test] - fn test_nested_function_completion() { - // Cursor inside inner function body, after "a + " - let code = "local outer(a) = local inner(b) = a + ; inner(1); outer(1)"; - // ^ cursor at position 37 - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 37).into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get completions in nested function"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - // a (outer param), b (inner param), std - assert_eq!(labels, vec!["a", "b", "std"]); - } - - #[test] - fn test_completion_at_eof() { - let code = "local x = 1;\n"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = (1, 0).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions at EOF"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["std", "x"]); - } - - #[test] - fn test_completion_in_object() { - let code = "{ foo: 1, bar: }"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = (0, 14).into(); - - let result = completion(&doc, pos, None, &analysis); - let list = result.expect("Should get completions in object"); - - let mut items: Vec<_> = list.items; - items.sort_by(|a, b| a.label.cmp(&b.label)); - - assert_eq!( - items, - vec![ - CompletionItem { - label: "$".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to root object".to_string()), - ..Default::default() - }, - CompletionItem { - label: "self".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to current object".to_string()), - ..Default::default() - }, - CompletionItem { - label: "std".to_string(), - kind: Some(CompletionItemKind::MODULE), - detail: Some("Jsonnet standard library".to_string()), - ..Default::default() - }, - CompletionItem { - label: "super".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to inherited object".to_string()), - ..Default::default() - }, - ] - ); - } - - #[test] - fn test_no_self_super_outside_object() { - let code = "local x = 1; x"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = (0, 13).into(); - - let result = completion(&doc, pos, None, &analysis); - let list = result.expect("Should get completions"); - - let mut items: Vec<_> = list.items; - items.sort_by(|a, b| a.label.cmp(&b.label)); - - assert_eq!( - items, - vec![ - CompletionItem { - label: "std".to_string(), - kind: Some(CompletionItemKind::MODULE), - detail: Some("Jsonnet standard library".to_string()), - ..Default::default() - }, - CompletionItem { - label: "x".to_string(), - kind: Some(CompletionItemKind::VARIABLE), - detail: Some("local variable".to_string()), - ..Default::default() - }, - ] - ); - } - - #[test] - fn test_multiple_for_specs_completion() { - // Multiple for specs in array comprehension - let code = "[x + y for x in [1] for y in [2]]"; - // ^ cursor at position 3 (inside expression) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 3).into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get completions in multi-for comprehension"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["std", "x", "y"]); - } - - #[test] - fn test_completion_with_syntax_error() { - // Incomplete expression with syntax error - let code = "local x = 1;\nlocal y = 2;\n{ foo: x +"; - // ^ cursor at position 9 - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (2, 10).into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get completions despite syntax error"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["$", "self", "std", "super", "x", "y"]); - } - - #[test] - fn test_shadowed_variable_completion() { - // Inner binding shadows outer binding - let code = "local x = 1;\nlocal f(x) = x +"; - // ^ cursor at position 16 - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (1, 16).into(); - - let list = - completion(&doc, pos, None, &analysis).expect("should get completions with shadowing"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - // Only one 'x' (the parameter) should be visible - outer 'x' is shadowed - assert_eq!(labels, vec!["f", "std", "x"]); - } - - #[test] - fn test_object_field_completion_with_types() { - let code = "local obj = { num: 42, str: \"hello\", arr: [1, 2] }; obj."; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = (0, 56).into(); - - let result = completion(&doc, pos, None, &analysis); - let list = result.expect("Should get object field completions"); - - let mut items: Vec<_> = list.items; - items.sort_by(|a, b| a.label.cmp(&b.label)); - - assert_eq!( - items, - vec![ - CompletionItem { - label: "arr".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("[number, number]".to_string()), // Tuple type - ..Default::default() - }, - CompletionItem { - label: "num".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - ..Default::default() - }, - CompletionItem { - label: "str".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("string".to_string()), - ..Default::default() - }, - ] - ); - } - - #[test] - fn test_object_field_completion_nested() { - let code = "local obj = { inner: { x: 1, y: 2 } }; obj.inner."; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = (0, 49).into(); - - let result = completion(&doc, pos, None, &analysis); - let list = result.expect("Should get nested object field completions"); - - let mut items: Vec<_> = list.items; - items.sort_by(|a, b| a.label.cmp(&b.label)); - - assert_eq!( - items, - vec![ - CompletionItem { - label: "x".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - ..Default::default() - }, - CompletionItem { - label: "y".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - ..Default::default() - }, - ] - ); - } -} +pub use handler::{completion, completion_with_import_roots}; +pub use imports::find_import_string_start; From f6bcfbe21dee0974c95074b47171b1e1fd5bb772 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 15:10:42 +0000 Subject: [PATCH 059/210] lsp: surface unresolved imports as first-class diagnostics --- crates/jrsonnet-lsp-import/src/graph.rs | 108 +++++++++++---- crates/jrsonnet-lsp-import/src/lib.rs | 5 +- crates/jrsonnet-lsp/src/async_diagnostics.rs | 17 ++- .../jrsonnet-lsp/src/handlers/diagnostics.rs | 123 +++++++++++++++++- crates/jrsonnet-lsp/src/server.rs | 14 +- .../jrsonnet-lsp/src/server/async_requests.rs | 14 +- docs/lsp/ARCHITECTURE.md | 8 +- docs/lsp/HANDLERS.md | 3 +- 8 files changed, 255 insertions(+), 37 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs index c4a1ca4e..f6271fc2 100644 --- a/crates/jrsonnet-lsp-import/src/graph.rs +++ b/crates/jrsonnet-lsp-import/src/graph.rs @@ -8,7 +8,7 @@ use std::collections::{HashMap, HashSet, VecDeque}; use jrsonnet_lsp_document::{CanonicalPath, Document}; use jrsonnet_rowan_parser::{ nodes::{Bind, Destruct, ExprImport, StmtLocal}, - AstNode, SyntaxKind, + AstNode, AstToken, SyntaxKind, }; use crate::{ @@ -28,6 +28,15 @@ pub struct ImportEntry { pub resolved_path: Option, } +/// One import occurrence in source, including its location. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ImportOccurrence { + /// Semantic import entry data. + pub entry: ImportEntry, + /// Range of the import path token in source (e.g. `"foo.libsonnet"`). + pub import_range: rowan::TextRange, +} + /// Import graph tracking dependencies between files. /// /// This structure maintains two maps: @@ -392,7 +401,24 @@ pub fn parse_document_imports(doc: &Document, resolve_import: &F) -> Vec Option, { - let mut entries = Vec::new(); + parse_document_import_occurrences(doc, resolve_import) + .into_iter() + .map(|occurrence| occurrence.entry) + .collect() +} + +/// Parse import occurrences from a document with source ranges. +/// +/// This is useful for diagnostics where callers need to point at the exact +/// import token in source when a path cannot be resolved. +pub fn parse_document_import_occurrences( + doc: &Document, + resolve_import: &F, +) -> Vec +where + F: Fn(&str) -> Option, +{ + let mut occurrences = Vec::new(); let mut seen_import_ranges = std::collections::HashSet::new(); let ast = doc.ast(); @@ -401,11 +427,11 @@ where if node.kind() == SyntaxKind::STMT_LOCAL { if let Some(stmt_local) = StmtLocal::cast(node.clone()) { for bind in stmt_local.binds() { - if let Some((entry, import_range)) = + if let Some((occurrence, import_range)) = parse_bind_import_with_range(&bind, resolve_import) { seen_import_ranges.insert(import_range); - entries.push(entry); + occurrences.push(occurrence); } } } @@ -421,26 +447,21 @@ where continue; } if let Some(import) = ExprImport::cast(node) { - if let Some(path) = extract_import_path(&import) { - let resolved = resolve_import(&path); - entries.push(ImportEntry { - binding_name: None, - import_path: path, - resolved_path: resolved, - }); + if let Some(occurrence) = parse_import_occurrence(&import, None, resolve_import) { + occurrences.push(occurrence); } } } } - entries + occurrences } /// Parse a bind to extract import information, returning the import's text range. fn parse_bind_import_with_range( bind: &Bind, resolve_import: &F, -) -> Option<(ImportEntry, rowan::TextRange)> +) -> Option<(ImportOccurrence, rowan::TextRange)> where F: Fn(&str) -> Option, { @@ -461,16 +482,10 @@ where if node.kind() == SyntaxKind::EXPR_IMPORT { let range = node.text_range(); if let Some(import) = ExprImport::cast(node) { - if let Some(path) = extract_import_path(&import) { - let resolved = resolve_import(&path); - return Some(( - ImportEntry { - binding_name: Some(bind_name), - import_path: path, - resolved_path: resolved, - }, - range, - )); + if let Some(occurrence) = + parse_import_occurrence(&import, Some(bind_name.clone()), resolve_import) + { + return Some((occurrence, range)); } } } @@ -479,6 +494,28 @@ where None } +fn parse_import_occurrence( + import: &ExprImport, + binding_name: Option, + resolve_import: &F, +) -> Option +where + F: Fn(&str) -> Option, +{ + let path = extract_import_path(import)?; + let resolved = resolve_import(&path); + let import_range = import.text()?.syntax().text_range(); + + Some(ImportOccurrence { + entry: ImportEntry { + binding_name, + import_path: path, + resolved_path: resolved, + }, + import_range, + }) +} + #[cfg(test)] mod tests { use std::path::PathBuf; @@ -517,6 +554,31 @@ mod tests { ); } + #[test] + fn test_parse_import_occurrences_include_string_token_range() { + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let occurrences = parse_document_import_occurrences(&doc, &simple_resolver); + let start = u32::try_from( + code.find("\"lib.jsonnet\"") + .expect("import string should exist"), + ) + .unwrap(); + let end = start + u32::try_from("\"lib.jsonnet\"".len()).unwrap(); + + assert_eq!( + occurrences, + vec![ImportOccurrence { + entry: ImportEntry { + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(test_path("lib.jsonnet")), + }, + import_range: rowan::TextRange::new(start.into(), end.into()), + }] + ); + } + #[test] fn test_parse_local_import_single_quote() { let code = "local lib = import 'lib.jsonnet'; lib"; diff --git a/crates/jrsonnet-lsp-import/src/lib.rs b/crates/jrsonnet-lsp-import/src/lib.rs index c3d98b2d..948bf414 100644 --- a/crates/jrsonnet-lsp-import/src/lib.rs +++ b/crates/jrsonnet-lsp-import/src/lib.rs @@ -10,7 +10,10 @@ pub mod parse; pub mod resolve; pub mod work_queue; -pub use graph::{parse_document_imports, ImportEntry, ImportGraph}; +pub use graph::{ + parse_document_import_occurrences, parse_document_imports, ImportEntry, ImportGraph, + ImportOccurrence, +}; pub use parse::{ check_import_from_token, check_import_path, extract_import_path, find_import_in_node, get_import_path_from_node, diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index 596984be..0acae7b0 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -4,6 +4,7 @@ //! Debounces rapid edits to avoid computing diagnostics for intermediate states. use std::{ + path::PathBuf, sync::{ atomic::{AtomicU64, Ordering}, Arc, @@ -14,7 +15,7 @@ use std::{ use crossbeam_channel::{Receiver, Sender}; use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document}; -use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_import::{parse_document_import_occurrences, resolve_import_path, ImportGraph}; use jrsonnet_lsp_inference::{ DocumentSource, SharedDocumentManager, SharedTypeCache, TypeProvider, }; @@ -42,6 +43,8 @@ struct DiagnosticsRequest { version: DocVersion, /// Whether to enable lint diagnostics. enable_lint: bool, + /// Import roots to use for resolving import paths. + import_roots: Vec, /// Sequence number to detect stale requests. sequence: u64, } @@ -145,6 +148,7 @@ impl AsyncDiagnostics { text: String, version: DocVersion, enable_lint: bool, + import_roots: Vec, ) { let sequence = self.sequence.fetch_add(1, Ordering::SeqCst); @@ -156,6 +160,7 @@ impl AsyncDiagnostics { text, version, enable_lint, + import_roots, sequence, }; @@ -231,6 +236,9 @@ impl AsyncDiagnostics { Arc::clone(&config.global_types), ); let analysis = provider.analyze(&request.path, &document, &doc_source); + let resolve_import = + |import: &str| resolve_import_path(&request.path, import, &request.import_roots); + let import_occurrences = parse_document_import_occurrences(&document, &resolve_import); let Some(params) = handlers::publish_diagnostics_params( &request.path, @@ -238,6 +246,7 @@ impl AsyncDiagnostics { request.enable_lint, config.evaluator.as_deref(), &analysis, + &import_occurrences, ) else { debug!( "Diagnostics worker: failed to build URI for {}, skipping diagnostics", @@ -315,7 +324,7 @@ mod tests { let text = "{ a: 1 }".to_string(); let version = DocVersion::new(1); - runner.schedule(path.clone(), text, version, false); + runner.schedule(path.clone(), text, version, false, vec![]); // Wait for result with timeout let result = runner @@ -337,7 +346,7 @@ mod tests { for i in 0..5 { let text = format!("{{ a: {i} }}"); let version = DocVersion::new(i); - runner.schedule(path.clone(), text, version, false); + runner.schedule(path.clone(), text, version, false, vec![]); } // Should only get one result (the last one) due to debouncing @@ -364,7 +373,7 @@ mod tests { let text = "{ a: }".to_string(); let version = DocVersion::new(1); - runner.schedule(path, text, version, false); + runner.schedule(path, text, version, false, vec![]); let result = runner .results() diff --git a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs index 0f76a83e..fdad0a97 100644 --- a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs +++ b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs @@ -2,6 +2,7 @@ use jrsonnet_lsp_check::lint; use jrsonnet_lsp_document::{CanonicalPath, Document, LineIndex, SyntaxError}; +use jrsonnet_lsp_import::ImportOccurrence; use jrsonnet_lsp_inference::TypeAnalysis; use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString, Range}; @@ -39,6 +40,34 @@ fn syntax_error_to_diagnostic( } } +fn unresolved_import_to_diagnostic( + occurrence: &ImportOccurrence, + line_index: &LineIndex, + text: &str, +) -> Option { + if occurrence.entry.resolved_path.is_some() { + return None; + } + + let start = line_index.position(occurrence.import_range.start().into(), text)?; + let end = line_index.position(occurrence.import_range.end().into(), text)?; + + Some(Diagnostic { + range: Range { + start: start.into(), + end: end.into(), + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("unresolved-import".to_string())), + code_description: None, + source: Some("jrsonnet-import".to_string()), + message: format!("unable to resolve import: {}", occurrence.entry.import_path), + related_information: None, + tags: None, + data: None, + }) +} + /// Compute diagnostics for a document. /// /// # Arguments @@ -48,6 +77,7 @@ fn syntax_error_to_diagnostic( /// * `evaluator` - Optional evaluator for runtime error detection /// * `uri` - The URI of the document (needed for lint related information) /// * `analysis` - Precomputed type analysis used by lint type checks +/// * `import_occurrences` - Parsed import occurrences with source ranges pub fn compute_diagnostics( document: &Document, path: &CanonicalPath, @@ -55,6 +85,7 @@ pub fn compute_diagnostics( evaluator: Option<&Evaluator>, uri: &lsp_types::Uri, analysis: &TypeAnalysis, + import_occurrences: &[ImportOccurrence], ) -> Vec { let text = document.text(); let line_index = document.line_index(); @@ -65,6 +96,12 @@ pub fn compute_diagnostics( .map(|e| syntax_error_to_diagnostic(e, line_index, text)) .collect(); + if errors.is_empty() { + diagnostics.extend(import_occurrences.iter().filter_map(|occurrence| { + unresolved_import_to_diagnostic(occurrence, line_index, text) + })); + } + // Add lint diagnostics if enabled and the document parsed successfully if enable_lint && errors.is_empty() { let lint_config = lint::LintConfig::all(); @@ -90,15 +127,25 @@ pub fn compute_diagnostics( /// * `enable_lint` - Whether to include lint warnings /// * `evaluator` - Optional evaluator for runtime error detection /// * `analysis` - Precomputed type analysis used by lint type checks +/// * `import_occurrences` - Parsed import occurrences with source ranges pub fn publish_diagnostics_params( path: &CanonicalPath, document: &Document, enable_lint: bool, evaluator: Option<&Evaluator>, analysis: &TypeAnalysis, + import_occurrences: &[ImportOccurrence], ) -> Option { let uri = path.to_uri().ok()?; - let diagnostics = compute_diagnostics(document, path, enable_lint, evaluator, &uri, analysis); + let diagnostics = compute_diagnostics( + document, + path, + enable_lint, + evaluator, + &uri, + analysis, + import_occurrences, + ); Some(lsp_types::PublishDiagnosticsParams { uri, @@ -110,6 +157,7 @@ pub fn publish_diagnostics_params( #[cfg(test)] mod tests { use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_import::{parse_document_import_occurrences, ImportOccurrence}; use jrsonnet_lsp_inference::TypeAnalysis; use lsp_types::{Position, Uri}; @@ -132,6 +180,15 @@ mod tests { doc: &Document, enable_lint: bool, evaluator: Option<&Evaluator>, + ) -> Vec { + diagnostics_for_with_occurrences(doc, enable_lint, evaluator, &[]) + } + + fn diagnostics_for_with_occurrences( + doc: &Document, + enable_lint: bool, + evaluator: Option<&Evaluator>, + import_occurrences: &[ImportOccurrence], ) -> Vec { let analysis = TypeAnalysis::analyze(doc); compute_diagnostics( @@ -141,9 +198,22 @@ mod tests { evaluator, &test_uri(), &analysis, + import_occurrences, ) } + fn parse_occurrences(doc: &Document) -> Vec { + let path = test_path(); + parse_document_import_occurrences(doc, &|import| { + if import == "exists.libsonnet" { + let mut resolved = path.as_path().parent()?.to_path_buf(); + resolved.push(import); + return Some(CanonicalPath::new(resolved)); + } + None + }) + } + #[test] fn test_valid_document_no_diagnostics() { let doc = Document::new(r#"{ hello: "world" }"#.to_string(), DocVersion::new(1)); @@ -432,4 +502,55 @@ mod tests { .as_ref() .is_some_and(|c| matches!(c, NumberOrString::String(s) if s == "syntax-error")))); } + + #[test] + fn test_unresolved_import_reports_diagnostic() { + let doc = Document::new( + r#"local lib = import "missing.libsonnet"; lib"#.to_string(), + DocVersion::new(1), + ); + let occurrences = parse_occurrences(&doc); + let diagnostics = diagnostics_for_with_occurrences(&doc, false, None, &occurrences); + + assert_eq!( + diagnostics, + vec![Diagnostic { + range: Range { + start: Position { + line: 0, + character: 19 + }, + end: Position { + line: 0, + character: 38 + }, + }, + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("unresolved-import".to_string())), + code_description: None, + source: Some("jrsonnet-import".to_string()), + message: "unable to resolve import: missing.libsonnet".to_string(), + related_information: None, + tags: None, + data: None, + }] + ); + } + + #[test] + fn test_unresolved_import_not_reported_on_syntax_error() { + let doc = Document::new( + r#"local lib = import "missing.libsonnet"; {"#.to_string(), + DocVersion::new(1), + ); + let occurrences = parse_occurrences(&doc); + let diagnostics = diagnostics_for_with_occurrences(&doc, false, None, &occurrences); + + assert!(diagnostics.iter().all(|diag| { + matches!( + diag.code.as_ref(), + Some(NumberOrString::String(code)) if code == "syntax-error" + ) + })); + } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 092f30a8..c3385957 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -1475,13 +1475,23 @@ impl Server { return; }; - let enable_lint = self.config.read().lint_diagnostics_enabled(); + let (enable_lint, import_roots) = { + let config = self.config.read(); + ( + config.lint_diagnostics_enabled(), + effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ), + ) + }; let text = doc.text().to_string(); let version = doc.version(); drop(doc); // Release the borrow before scheduling self.diagnostics - .schedule(path.clone(), text, version, enable_lint); + .schedule(path.clone(), text, version, enable_lint, import_roots); } /// Send a notification to the client. diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 00093f11..a546f50b 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, DocVersion, Document, SymbolName}; use jrsonnet_lsp_handlers as handlers; -use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_import::{parse_document_import_occurrences, resolve_import_path, ImportGraph}; use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; use jrsonnet_lsp_types::GlobalTyStore; use jrsonnet_rowan_parser::AstNode; @@ -531,7 +531,7 @@ impl AsyncRequestContext { let path = CanonicalPath::from_uri(&uri_parsed)?; let doc = self.load_document_for_path(&path)?; let analysis = self.analyze_document(&path, &doc); - let (enable_lint_diagnostics, evaluator) = { + let (enable_lint_diagnostics, evaluator, import_roots) = { let config = self.config.read(); let evaluator = config.enable_eval_diagnostics.then(|| { let eval_config = EvalConfig { @@ -540,8 +540,15 @@ impl AsyncRequestContext { }; Evaluator::new(&eval_config) }); - (config.enable_lint_diagnostics, evaluator) + let import_roots = effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + (config.enable_lint_diagnostics, evaluator, import_roots) }; + let resolve_import = |import: &str| resolve_import_path(&path, import, &import_roots); + let import_occurrences = parse_document_import_occurrences(&doc, &resolve_import); let diagnostics = crate::handlers::compute_diagnostics( &doc, @@ -550,6 +557,7 @@ impl AsyncRequestContext { evaluator.as_ref(), &uri_parsed, &analysis, + &import_occurrences, ); let response = lsp_types::PublishDiagnosticsParams { diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 5974d352..89397943 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -254,6 +254,8 @@ Diagnostics run in a dedicated background worker (`AsyncDiagnostics` in - Requests are debounced per-file (`500 ms`). - Stale requests/results are discarded by sequence numbers. - Worker reconstructs `Document` from scheduled text/version. +- Worker parses import occurrences and resolves them against scheduled import + roots. - Worker uses `TypeProvider` + `TypeCache` + `ImportGraph` for dependency-aware analysis. - Result is sent back as `PublishDiagnosticsParams`. @@ -261,9 +263,11 @@ Diagnostics run in a dedicated background worker (`AsyncDiagnostics` in Diagnostic composition (`crates/jrsonnet-lsp/src/handlers/diagnostics.rs`): 1. Syntax diagnostics from parser errors. -2. Lint/type diagnostics from `jrsonnet-lsp-check` when lint is enabled and +2. Unresolved-import diagnostics from AST import occurrences when parse + succeeded. +3. Lint/type diagnostics from `jrsonnet-lsp-check` when lint is enabled and parse succeeded. -3. Evaluation diagnostic from `Evaluator` (optional) when parse succeeded. +4. Evaluation diagnostic from `Evaluator` (optional) when parse succeeded. Evaluation diagnostics use `analysis/eval.rs` and can optionally apply Tanka-aware `jpath` expansion via `analysis/tanka.rs`. diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index fbda8cb4..a73cd543 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -307,7 +307,8 @@ error. `jrsonnet.showErrors` returns a `PublishDiagnosticsParams` payload for the target URI so clients can render the same diagnostics data that the server -publishes asynchronously. +publishes asynchronously (syntax, unresolved imports, lint/type checks, and +optional eval diagnostics). ## Testing Strategy From 4e38b4ac1eeb9fab33a24eeda1b8a395b4345223 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 15:14:12 +0000 Subject: [PATCH 060/210] lsp-handlers: improve field completion edits and signature spans --- .../src/completion/fields.rs | 66 ++++++++-- .../src/completion/handler.rs | 116 ++++++++++++++++++ .../src/signature_help.rs | 105 ++++++++++++---- docs/lsp/HANDLERS.md | 4 + 4 files changed, 253 insertions(+), 38 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs index 9ee2a411..cb57780a 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs @@ -1,13 +1,15 @@ //! Object field completions for `obj.` patterns. -use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document}; +use jrsonnet_lsp_document::{ + is_valid_jsonnet_identifier, token_at_offset, ByteOffset, Document, LineIndex, +}; use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; use jrsonnet_rowan_parser::{ nodes::{BindDestruct, Destruct, FieldName, MemberFieldNormal, ObjBody, StmtLocal}, AstNode, AstToken, SyntaxKind, SyntaxNode, }; -use lsp_types::{CompletionItem, CompletionItemKind}; +use lsp_types::{CompletionItem, CompletionItemKind, CompletionTextEdit, Range, TextEdit}; /// Check if we're completing object fields after `obj.`. /// @@ -39,6 +41,7 @@ pub fn check_object_field_completion( .map_or(0, |i| i + 1); let identifier = before_dot[ident_start..].trim(); let ast = document.ast(); + let line_index = document.line_index(); if identifier.is_empty() { return None; @@ -68,11 +71,8 @@ pub fn check_object_field_completion( let items = fields .into_iter() .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) - .map(|(name, ty)| CompletionItem { - label: name, - kind: Some(CompletionItemKind::FIELD), - detail: Some(store.display(ty)), - ..Default::default() + .filter_map(|(name, ty)| { + field_completion_item(name, store.display(ty), dot_pos, offset, line_index, text) }) .collect::>(); @@ -89,11 +89,15 @@ pub fn check_object_field_completion( let items = fields .into_iter() .filter(|f| prefix.is_empty() || f.starts_with(prefix)) - .map(|name| CompletionItem { - label: name, - kind: Some(CompletionItemKind::FIELD), - detail: Some("object field".to_string()), - ..Default::default() + .filter_map(|name| { + field_completion_item( + name, + "object field".to_string(), + dot_pos, + offset, + line_index, + text, + ) }) .collect::>(); @@ -103,6 +107,44 @@ pub fn check_object_field_completion( Some(items) } +fn field_completion_item( + name: String, + detail: String, + dot_pos: usize, + offset: u32, + line_index: &LineIndex, + text: &str, +) -> Option { + if is_valid_jsonnet_identifier(&name) { + return Some(CompletionItem { + label: name, + kind: Some(CompletionItemKind::FIELD), + detail: Some(detail), + ..Default::default() + }); + } + + let start_offset = ByteOffset::new(u32::try_from(dot_pos).ok()?); + let end_offset = ByteOffset::new(offset); + let start = line_index.position(start_offset, text)?; + let end = line_index.position(end_offset, text)?; + let escaped = serde_json::to_string(&name).ok()?; + + Some(CompletionItem { + label: name, + kind: Some(CompletionItemKind::FIELD), + detail: Some(detail), + text_edit: Some(CompletionTextEdit::Edit(TextEdit { + range: Range { + start: start.into(), + end: end.into(), + }, + new_text: format!("[{escaped}]"), + })), + ..Default::default() + }) +} + /// Find object fields for an identifier by looking up its definition. fn find_object_fields_for_identifier( root: &SyntaxNode, diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler.rs index 125ae66a..9395a7b2 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler.rs @@ -374,6 +374,122 @@ mod tests { assert_eq!(labels, vec!["foo"]); } + #[test] + fn test_object_field_completion_non_identifier_uses_bracket_text_edit() { + let code = r#"local obj = { "my-field": 1, normal: 2 }; obj."#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + let dot = u32::try_from(code.rfind('.').expect("dot should exist")).unwrap(); + let end = u32::try_from(code.len()).unwrap(); + + let mut result = completion(&doc, pos, None, &analysis).expect("should get completions"); + result + .items + .sort_by(|left, right| left.label.cmp(&right.label)); + + assert_eq!( + result.items, + vec![ + CompletionItem { + label: "my-field".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: Some(lsp_types::CompletionTextEdit::Edit(lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position::new(0, dot), + end: lsp_types::Position::new(0, end), + }, + new_text: r#"["my-field"]"#.to_string(), + })), + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }, + CompletionItem { + label: "normal".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }, + ] + ); + } + + #[test] + fn test_object_field_completion_non_identifier_prefix_rewrites_dot_expression() { + let code = r#"local obj = { "my-field": 1, normal: 2 }; obj.my"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + let dot = u32::try_from(code.rfind('.').expect("dot should exist")).unwrap(); + let end = u32::try_from(code.len()).unwrap(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + assert_eq!( + result.items, + vec![CompletionItem { + label: "my-field".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: Some(lsp_types::CompletionTextEdit::Edit(lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position::new(0, dot), + end: lsp_types::Position::new(0, end), + }, + new_text: r#"["my-field"]"#.to_string(), + })), + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + #[test] fn test_import_string_detection() { // Test that we correctly detect import string context diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help.rs b/crates/jrsonnet-lsp-handlers/src/signature_help.rs index dc2d1867..cdb4e3e5 100644 --- a/crates/jrsonnet-lsp-handlers/src/signature_help.rs +++ b/crates/jrsonnet-lsp-handlers/src/signature_help.rs @@ -156,9 +156,9 @@ fn get_signature_for_function( if let Some(doc) = stdlib::get_stdlib_doc(name) { let (params_info, variadic) = stdlib_params(name, &doc.signature); let active_param = resolve_active_parameter(¶ms_info, variadic, active_arg); - let params = to_lsp_params(¶ms_info); - - let label = format!("std.{}{})", name, doc.signature); + let signature_name = format!("std.{name}"); + let (label, param_offsets) = signature_label_with_offsets(&signature_name, ¶ms_info); + let params = to_lsp_params(¶m_offsets); return Some(SignatureHelp { signatures: vec![SignatureInformation { @@ -178,13 +178,8 @@ fn get_signature_for_function( // Check for local function definitions if let Some(sig) = find_local_function_signature(token, name) { let active_param = resolve_active_parameter(&sig.params, false, active_arg); - let params = to_lsp_params(&sig.params); - let labels: Vec<_> = sig - .params - .iter() - .map(|param| param.label.as_str()) - .collect(); - let label = format!("{}({})", name, labels.join(", ")); + let (label, param_offsets) = signature_label_with_offsets(name, &sig.params); + let params = to_lsp_params(¶m_offsets); return Some(SignatureHelp { signatures: vec![SignatureInformation { @@ -201,11 +196,34 @@ fn get_signature_for_function( None } -fn to_lsp_params(params: &[SignatureParamInfo]) -> Vec { - params +fn signature_label_with_offsets( + name: &str, + params: &[SignatureParamInfo], +) -> (String, Vec<[u32; 2]>) { + let mut label = String::new(); + let mut offsets = Vec::with_capacity(params.len()); + label.push_str(name); + label.push('('); + + for (index, param) in params.iter().enumerate() { + if index > 0 { + label.push_str(", "); + } + let start = to_u32(label.len()); + label.push_str(¶m.label); + let end = to_u32(label.len()); + offsets.push([start, end]); + } + + label.push(')'); + (label, offsets) +} + +fn to_lsp_params(offsets: &[[u32; 2]]) -> Vec { + offsets .iter() - .map(|param| ParameterInformation { - label: ParameterLabel::Simple(param.label.clone()), + .map(|offset| ParameterInformation { + label: ParameterLabel::LabelOffsets(*offset), documentation: None, }) .collect() @@ -467,12 +485,31 @@ mod tests { let pos = (0, 11).into(); let help = signature_help(&doc, pos); - // May be None if we can't parse incomplete code well - if let Some(help) = help { - // Exactly one signature for std.filter - let labels: Vec<_> = help.signatures.iter().map(|s| s.label.as_str()).collect(); - assert_eq!(labels, vec!["std.filter(func, arr)"]); - } + assert_eq!( + help, + Some(SignatureHelp { + signatures: vec![SignatureInformation { + label: "std.filter(func, arr)".to_string(), + documentation: Some(Documentation::MarkupContent(MarkupContent { + kind: MarkupKind::Markdown, + value: "Returns elements of `arr` where `func(x)` is true.".to_string(), + })), + parameters: Some(vec![ + ParameterInformation { + label: ParameterLabel::LabelOffsets([11, 15]), + documentation: None, + }, + ParameterInformation { + label: ParameterLabel::LabelOffsets([17, 20]), + documentation: None, + }, + ]), + active_parameter: Some(0), + }], + active_signature: Some(0), + active_parameter: Some(0), + }) + ); } #[test] @@ -484,12 +521,28 @@ mod tests { let pos = (0, 30).into(); let help = signature_help(&doc, pos); - // May be None depending on parser error recovery - if let Some(help) = help { - // Exactly one signature for local add function - let labels: Vec<_> = help.signatures.iter().map(|s| s.label.as_str()).collect(); - assert_eq!(labels, vec!["add(a, b)"]); - } + assert_eq!( + help, + Some(SignatureHelp { + signatures: vec![SignatureInformation { + label: "add(a, b)".to_string(), + documentation: None, + parameters: Some(vec![ + ParameterInformation { + label: ParameterLabel::LabelOffsets([4, 5]), + documentation: None, + }, + ParameterInformation { + label: ParameterLabel::LabelOffsets([7, 8]), + documentation: None, + }, + ]), + active_parameter: Some(0), + }], + active_signature: Some(0), + active_parameter: Some(0), + }) + ); } #[test] diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index a73cd543..e4b62fcb 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -117,6 +117,8 @@ Completion sources include: - object fields from inferred types - import paths using file path + configured import roots - object-context keywords (`$`, `self`, `super`) +- non-identifier object fields are emitted with bracket-form text edits (for + example `obj.` + `my-field` completion inserts `obj["my-field"]`) Server capabilities advertise `.` as trigger. Other completion contexts can still return items on explicit completion requests. @@ -241,6 +243,8 @@ File: `crates/jrsonnet-lsp-handlers/src/signature_help.rs` - Trigger characters: `(` and `,`. - Uses stdlib signatures and local function information. - Computes active parameter index from call context. +- Emits parameter labels as structural `LabelOffsets` spans within the full + signature label (instead of plain string labels). ### Symbols From 0ead8be936d909c4e199a4e01b2b38266e6a36d3 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 15:59:58 +0000 Subject: [PATCH 061/210] lsp: centralize token semantics in rowan parser --- crates/jrsonnet-lsp-handlers/src/hover.rs | 84 +- .../src/semantic_tokens.rs | 284 ++++--- crates/jrsonnet-lsp-inference/src/expr.rs | 101 ++- crates/jrsonnet-lsp-inference/src/flow.rs | 177 ++-- crates/jrsonnet-rowan-parser/src/lib.rs | 1 + .../src/syntax_semantics.rs | 756 ++++++++++++++++++ docs/lsp/HANDLERS.md | 6 + 7 files changed, 1143 insertions(+), 266 deletions(-) create mode 100644 crates/jrsonnet-rowan-parser/src/syntax_semantics.rs diff --git a/crates/jrsonnet-lsp-handlers/src/hover.rs b/crates/jrsonnet-lsp-handlers/src/hover.rs index 83af229d..6f80e0ce 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover.rs @@ -32,7 +32,7 @@ pub fn hover(document: &Document, position: LspPosition, analysis: &TypeAnalysis let token = token_at_offset(ast.syntax(), offset)?; // Only provide hover for meaningful tokens - if !is_meaningful_token(&token) { + if !token.kind().is_hover_eligible() { return None; } @@ -44,50 +44,41 @@ pub fn hover(document: &Document, position: LspPosition, analysis: &TypeAnalysis return Some(hover); } - // Fall back to showing just the inferred type - if let Some(hover) = check_type_hover(document, analysis, offset) { - return Some(hover); + let type_markdown = inferred_type_markdown(document, analysis, offset); + let token_markdown = token.kind().token_doc().map(str::to_owned); + if let Some(value) = merge_markdown_sections(type_markdown, token_markdown) { + return Some(Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value, + }), + range: None, + }); } None } -/// Check if a token is meaningful for hover (identifiers, literals, keywords). -fn is_meaningful_token(token: &SyntaxToken) -> bool { - matches!( - token.kind(), - SyntaxKind::IDENT - | SyntaxKind::FLOAT - | SyntaxKind::STRING_DOUBLE - | SyntaxKind::STRING_SINGLE - | SyntaxKind::STRING_DOUBLE_VERBATIM - | SyntaxKind::STRING_SINGLE_VERBATIM - | SyntaxKind::STRING_BLOCK - | SyntaxKind::NULL_KW - | SyntaxKind::TRUE_KW - | SyntaxKind::FALSE_KW - | SyntaxKind::SELF_KW - | SyntaxKind::SUPER_KW - | SyntaxKind::DOLLAR - ) -} - -/// Check for inferred type at position. -fn check_type_hover( +fn inferred_type_markdown( document: &Document, analysis: &TypeAnalysis, offset: ByteOffset, -) -> Option { +) -> Option { let ast = document.ast(); let ty = analysis.type_at_position(ast.syntax(), offset.into())?; + Some(format!("`{}`", analysis.display(ty))) +} - Some(Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: format!("`{}`", analysis.display(ty)), - }), - range: None, - }) +fn merge_markdown_sections( + type_markdown: Option, + token_markdown: Option, +) -> Option { + match (type_markdown, token_markdown) { + (Some(ty), Some(doc)) => Some(format!("{ty}\n\n---\n\n{doc}")), + (Some(ty), None) => Some(ty), + (None, Some(doc)) => Some(doc), + (None, None) => None, + } } /// Check for hover on a local variable reference. @@ -444,6 +435,31 @@ mod tests { }); } + #[test] + fn test_keyword_hover_includes_token_docs() { + let result = get_hover("null", 0, 0); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, "`null`\n\n---\n\nLiteral `null` value."); + }); + } + + #[test] + fn test_operator_hover_docs() { + let result = get_hover("1 + 2", 0, 2); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!( + value, + "`number`\n\n---\n\n`+` adds numbers, concatenates strings/arrays, or merges objects." + ); + }); + } + #[test] fn test_no_hover_on_whitespace() { let result = get_hover("local x = 1; x", 0, 13); diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs index 6b88acbe..abda36a6 100644 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs @@ -159,81 +159,29 @@ impl<'a> SemanticTokenBuilder<'a> { fn visit_token(&mut self, token: &SyntaxToken) { let kind = token.kind(); - match kind { - // Keywords - SyntaxKind::LOCAL_KW - | SyntaxKind::IF_KW - | SyntaxKind::THEN_KW - | SyntaxKind::ELSE_KW - | SyntaxKind::FUNCTION_KW - | SyntaxKind::IMPORT_KW - | SyntaxKind::IMPORTSTR_KW - | SyntaxKind::IMPORTBIN_KW - | SyntaxKind::FOR_KW - | SyntaxKind::IN_KW - | SyntaxKind::TRUE_KW - | SyntaxKind::FALSE_KW - | SyntaxKind::NULL_KW - | SyntaxKind::SELF_KW - | SyntaxKind::SUPER_KW - | SyntaxKind::ERROR_KW - | SyntaxKind::ASSERT_KW - | SyntaxKind::TAILSTRICT_KW => { - self.add_token(token, TokenType::Keyword, 0); - } - - // Comments - SyntaxKind::SINGLE_LINE_SLASH_COMMENT - | SyntaxKind::SINGLE_LINE_HASH_COMMENT - | SyntaxKind::MULTI_LINE_COMMENT => { - self.add_token(token, TokenType::Comment, 0); - } - - // Strings - SyntaxKind::STRING_DOUBLE - | SyntaxKind::STRING_SINGLE - | SyntaxKind::STRING_DOUBLE_VERBATIM - | SyntaxKind::STRING_SINGLE_VERBATIM - | SyntaxKind::STRING_BLOCK => { - self.add_token(token, TokenType::String, 0); - } - - // Numbers - SyntaxKind::FLOAT => { - self.add_token(token, TokenType::Number, 0); - } - - // Identifiers - need context to determine type - SyntaxKind::IDENT => { - self.visit_identifier(token); - } - - // Operators - SyntaxKind::PLUS - | SyntaxKind::MINUS - | SyntaxKind::MUL - | SyntaxKind::DIV - | SyntaxKind::MODULO - | SyntaxKind::BIT_AND - | SyntaxKind::BIT_OR - | SyntaxKind::BIT_XOR - | SyntaxKind::BIT_NOT - | SyntaxKind::LT - | SyntaxKind::GT - | SyntaxKind::NOT - | SyntaxKind::ASSIGN - | SyntaxKind::LE - | SyntaxKind::GE - | SyntaxKind::EQ - | SyntaxKind::NE - | SyntaxKind::AND - | SyntaxKind::OR - | SyntaxKind::LHS - | SyntaxKind::RHS => { - self.add_token(token, TokenType::Operator, 0); - } - - _ => {} + if kind.is_semantic_keyword_token() { + self.add_token(token, TokenType::Keyword, 0); + return; + } + if kind.is_semantic_comment_token() { + self.add_token(token, TokenType::Comment, 0); + return; + } + if kind.is_semantic_string_token() { + self.add_token(token, TokenType::String, 0); + return; + } + if kind.is_semantic_number_token() { + self.add_token(token, TokenType::Number, 0); + return; + } + if kind == SyntaxKind::IDENT { + // Identifiers need AST context for precise token type. + self.visit_identifier(token); + return; + } + if kind.is_semantic_operator_token() { + self.add_token(token, TokenType::Operator, 0); } } @@ -557,15 +505,82 @@ mod tests { use super::*; + #[derive(Debug, Clone, PartialEq, Eq)] + struct AbsoluteToken { + line: u32, + start_char: u32, + length: u32, + token_type: u32, + token_modifiers: u32, + } + + fn token( + line: u32, + start_char: u32, + length: u32, + token_type: TokenType, + token_modifiers: u32, + ) -> AbsoluteToken { + AbsoluteToken { + line, + start_char, + length, + token_type: token_type as u32, + token_modifiers, + } + } + + fn decode_absolute(tokens: &SemanticTokens) -> Vec { + let mut line = 0_u32; + let mut start_char = 0_u32; + let mut out = Vec::with_capacity(tokens.data.len()); + + for token in &tokens.data { + line = line.saturating_add(token.delta_line); + start_char = if token.delta_line == 0 { + start_char.saturating_add(token.delta_start) + } else { + token.delta_start + }; + out.push(AbsoluteToken { + line, + start_char, + length: token.length, + token_type: token.token_type, + token_modifiers: token.token_modifiers_bitset, + }); + } + + out + } + #[test] fn test_semantic_tokens_keywords() { let code = "local x = if true then 1 else 2; x"; let doc = Document::new(code.to_string(), DocVersion::new(1)); let tokens = semantic_tokens(&doc); - assert!(!tokens.data.is_empty()); - - // Should have tokens for: local, if, true, then, else, and identifiers + assert_eq!( + decode_absolute(&tokens), + vec![ + token(0, 0, 5, TokenType::Keyword, 0), + token( + 0, + 6, + 1, + TokenType::Variable, + token_modifier::DECLARATION | token_modifier::DEFINITION + ), + token(0, 8, 1, TokenType::Operator, 0), + token(0, 10, 2, TokenType::Keyword, 0), + token(0, 13, 4, TokenType::Keyword, 0), + token(0, 18, 4, TokenType::Keyword, 0), + token(0, 23, 1, TokenType::Number, 0), + token(0, 25, 4, TokenType::Keyword, 0), + token(0, 30, 1, TokenType::Number, 0), + token(0, 33, 1, TokenType::Variable, 0), + ] + ); } #[test] @@ -574,7 +589,40 @@ mod tests { let doc = Document::new(code.to_string(), DocVersion::new(1)); let tokens = semantic_tokens(&doc); - assert!(!tokens.data.is_empty()); + assert_eq!( + decode_absolute(&tokens), + vec![ + token(0, 0, 5, TokenType::Keyword, 0), + token( + 0, + 6, + 3, + TokenType::Function, + token_modifier::DECLARATION | token_modifier::DEFINITION + ), + token( + 0, + 10, + 1, + TokenType::Parameter, + token_modifier::DECLARATION | token_modifier::DEFINITION + ), + token( + 0, + 13, + 1, + TokenType::Parameter, + token_modifier::DECLARATION | token_modifier::DEFINITION + ), + token(0, 16, 1, TokenType::Operator, 0), + token(0, 18, 1, TokenType::Parameter, 0), + token(0, 20, 1, TokenType::Operator, 0), + token(0, 22, 1, TokenType::Parameter, 0), + token(0, 25, 3, TokenType::Function, 0), + token(0, 29, 1, TokenType::Number, 0), + token(0, 32, 1, TokenType::Number, 0), + ] + ); } #[test] @@ -583,7 +631,36 @@ mod tests { let doc = Document::new(code.to_string(), DocVersion::new(1)); let tokens = semantic_tokens(&doc); - assert!(!tokens.data.is_empty()); + assert_eq!( + decode_absolute(&tokens), + vec![ + token( + 0, + 2, + 4, + TokenType::Property, + token_modifier::DECLARATION | token_modifier::DEFINITION + ), + token(0, 8, 6, TokenType::String, 0), + token( + 0, + 16, + 5, + TokenType::Method, + token_modifier::DECLARATION | token_modifier::DEFINITION + ), + token( + 0, + 22, + 1, + TokenType::Parameter, + token_modifier::DECLARATION | token_modifier::DEFINITION + ), + token(0, 26, 8, TokenType::String, 0), + token(0, 35, 1, TokenType::Operator, 0), + token(0, 37, 1, TokenType::Variable, 0), + ] + ); } #[test] @@ -592,14 +669,35 @@ mod tests { let doc = Document::new(code.to_string(), DocVersion::new(1)); let tokens = semantic_tokens(&doc); - assert!(!tokens.data.is_empty()); + assert_eq!( + decode_absolute(&tokens), + vec![ + token( + 0, + 0, + 3, + TokenType::Namespace, + token_modifier::DEFAULT_LIBRARY + ), + token( + 0, + 4, + 6, + TokenType::Function, + token_modifier::DEFAULT_LIBRARY + ), + token(0, 12, 1, TokenType::Number, 0), + token(0, 15, 1, TokenType::Number, 0), + token(0, 18, 1, TokenType::Number, 0), + ] + ); } #[test] fn test_legend() { let leg = legend(); - assert!(!leg.token_types.is_empty()); - assert!(!leg.token_modifiers.is_empty()); + assert_eq!(leg.token_types, TOKEN_TYPES.to_vec()); + assert_eq!(leg.token_modifiers, TOKEN_MODIFIERS.to_vec()); } #[test] @@ -620,15 +718,15 @@ mod tests { }, }, ); - assert!(!tokens.data.is_empty()); - - let mut absolute_line = 0_u32; - for token in tokens.data { - absolute_line += token.delta_line; - assert_eq!( - absolute_line, 1, - "token line should stay inside requested range" - ); - } + assert_eq!( + decode_absolute(&tokens), + vec![ + token(1, 0, 5, TokenType::Keyword, 0), + token(1, 6, 1, TokenType::Variable, 0), + token(1, 8, 1, TokenType::Operator, 0), + token(1, 12, 1, TokenType::Operator, 0), + token(1, 14, 1, TokenType::Number, 0), + ] + ); } } diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr.rs index 8b56b79e..f65859cd 100644 --- a/crates/jrsonnet-lsp-inference/src/expr.rs +++ b/crates/jrsonnet-lsp-inference/src/expr.rs @@ -7,7 +7,7 @@ use jrsonnet_lsp_types::{ TyData, }; use jrsonnet_rowan_parser::{ - nodes::{BinaryOperatorKind, Bind, ExprBase, LiteralKind, UnaryOperatorKind}, + nodes::{BinaryOperatorKind, Bind, ExprBase, LiteralKind}, AstNode, AstToken, }; use rowan::TextRange; @@ -383,11 +383,16 @@ fn infer_base_ty( if rhs_ty == Ty::NEVER { return Ty::NEVER; } - match unary.unary_operator().map(|op| op.kind()) { - Some(UnaryOperatorKind::Not) => Ty::BOOL, - Some(UnaryOperatorKind::Minus | UnaryOperatorKind::BitNot) => Ty::NUMBER, - _ => Ty::ANY, + let Some(op_kind) = unary.unary_operator().map(|op| op.kind()) else { + return Ty::ANY; + }; + if op_kind.returns_boolean() { + return Ty::BOOL; + } + if op_kind.returns_number() { + return Ty::NUMBER; } + Ty::ANY } // Binary operators - handle simple cases directly @@ -449,59 +454,47 @@ fn infer_binary_expr_base_ty( return Ty::NEVER; } - match binary.binary_operator().map(|op| op.kind()) { - Some( - BinaryOperatorKind::Minus - | BinaryOperatorKind::Mul - | BinaryOperatorKind::Div - | BinaryOperatorKind::Modulo - | BinaryOperatorKind::BitAnd - | BinaryOperatorKind::BitOr - | BinaryOperatorKind::BitXor - | BinaryOperatorKind::Lhs - | BinaryOperatorKind::Rhs, - ) => Ty::NUMBER, - Some( - BinaryOperatorKind::Lt - | BinaryOperatorKind::Le - | BinaryOperatorKind::Gt - | BinaryOperatorKind::Ge - | BinaryOperatorKind::Eq - | BinaryOperatorKind::Ne - | BinaryOperatorKind::InKw, - ) => Ty::BOOL, - Some(BinaryOperatorKind::Plus) => { - if lhs_ty == Ty::STRING && rhs_ty == Ty::STRING { - return Ty::STRING; + let Some(op_kind) = binary.binary_operator().map(|op| op.kind()) else { + return Ty::ANY; + }; + + if op_kind.returns_number() { + return Ty::NUMBER; + } + if op_kind.returns_boolean() { + return Ty::BOOL; + } + if op_kind == BinaryOperatorKind::Plus { + if lhs_ty == Ty::STRING && rhs_ty == Ty::STRING { + return Ty::STRING; + } + if lhs_ty == Ty::NUMBER && rhs_ty == Ty::NUMBER { + return Ty::NUMBER; + } + let store = env.store_mut(); + let lhs_data = store.get(lhs_ty); + let rhs_data = store.get(rhs_ty); + return match (&lhs_data, &rhs_data) { + (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { + let elem_union = store.union(vec![*l, *r]); + store.array(elem_union) } - if lhs_ty == Ty::NUMBER && rhs_ty == Ty::NUMBER { - return Ty::NUMBER; + (TyData::Tuple { elems: l }, TyData::Tuple { elems: r }) => { + let mut elems = l.clone(); + elems.extend(r.iter().copied()); + store.tuple(elems) } - let store = env.store_mut(); - let lhs_data = store.get(lhs_ty); - let rhs_data = store.get(rhs_ty); - match (&lhs_data, &rhs_data) { - (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { - let elem_union = store.union(vec![*l, *r]); - store.array(elem_union) - } - (TyData::Tuple { elems: l }, TyData::Tuple { elems: r }) => { - let mut elems = l.clone(); - elems.extend(r.iter().copied()); - store.tuple(elems) - } - (TyData::Object(left_obj), TyData::Object(right_obj)) => { - let merged = ObjectData::merge(left_obj, right_obj); - store.object(merged) - } - _ => Ty::NUMBER, + (TyData::Object(left_obj), TyData::Object(right_obj)) => { + let merged = ObjectData::merge(left_obj, right_obj); + store.object(merged) } - } - Some( - BinaryOperatorKind::And | BinaryOperatorKind::Or | BinaryOperatorKind::NullCoaelse, - ) => env.store_mut().union(vec![lhs_ty, rhs_ty]), - _ => Ty::ANY, + _ => Ty::NUMBER, + }; } + if op_kind.is_logical_short_circuit() { + return env.store_mut().union(vec![lhs_ty, rhs_ty]); + } + Ty::ANY } fn infer_index_expr_base_ty( diff --git a/crates/jrsonnet-lsp-inference/src/flow.rs b/crates/jrsonnet-lsp-inference/src/flow.rs index 8ca514c1..c9cd1ed2 100644 --- a/crates/jrsonnet-lsp-inference/src/flow.rs +++ b/crates/jrsonnet-lsp-inference/src/flow.rs @@ -656,9 +656,7 @@ impl Facts { use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, var_resolves_to_builtin_std}; use jrsonnet_rowan_parser::{ - nodes::{ - ArgsDesc, BinaryOperatorKind, Expr, ExprBase, ExprCall, LiteralKind, UnaryOperatorKind, - }, + nodes::{ArgsDesc, BinaryOperatorKind, Expr, ExprBase, ExprCall, LiteralKind}, AstNode, AstToken, }; @@ -705,7 +703,7 @@ fn extract_facts_into(cond: &Expr, facts: &mut Facts) { let Some(op) = unary.unary_operator() else { return; }; - if op.kind() != UnaryOperatorKind::Not { + if !op.kind().is_logical_not() { return; } // !expr - extract facts from inner and negate @@ -733,94 +731,103 @@ fn extract_binary_facts(binary: &jrsonnet_rowan_parser::nodes::ExprBinary, facts return; }; - match op.kind() { - BinaryOperatorKind::Eq => { - // var == null or null == var - if let Some((var_name, fact)) = check_null_equality(&lhs, &rhs, Totality::Total) { - facts.add(var_name, fact); - } else if let Some((var_name, fact)) = check_null_equality(&rhs, &lhs, Totality::Total) - { - facts.add(var_name, fact); - } - // var == "literal" or "literal" == var - if let Some((var_name, fact)) = check_literal_string_equality(&lhs, &rhs) { - facts.add(var_name, fact); - } else if let Some((var_name, fact)) = check_literal_string_equality(&rhs, &lhs) { - facts.add(var_name, fact); - } - // var == true/false or true/false == var - if let Some((var_name, fact)) = check_literal_bool_equality(&lhs, &rhs) { - facts.add(var_name, fact); - } else if let Some((var_name, fact)) = check_literal_bool_equality(&rhs, &lhs) { - facts.add(var_name, fact); - } - // std.type(x) == "typename" - if let Some((var_name, fact)) = check_std_type_comparison(binary, &rhs) { - facts.add(var_name, fact); - } - // std.length(x) == n - if let Some((var_name, fact)) = check_std_length_comparison(binary, &rhs) { - facts.add(var_name, fact); - } + let op_kind = op.kind(); + + if op_kind == BinaryOperatorKind::Eq { + // var == null or null == var + if let Some((var_name, fact)) = check_null_equality(&lhs, &rhs, Totality::Total) { + facts.add(var_name, fact); + } else if let Some((var_name, fact)) = check_null_equality(&rhs, &lhs, Totality::Total) { + facts.add(var_name, fact); } - BinaryOperatorKind::Ne => { - // var != null - create fact and negate it - if let Some((var_name, fact)) = check_null_equality(&lhs, &rhs, Totality::Total) { - facts.add(var_name, !fact); - } else if let Some((var_name, fact)) = check_null_equality(&rhs, &lhs, Totality::Total) - { - facts.add(var_name, !fact); - } - // var != "literal" or "literal" != var - if let Some((var_name, fact)) = check_literal_string_equality(&lhs, &rhs) { - facts.add(var_name, !fact); - } else if let Some((var_name, fact)) = check_literal_string_equality(&rhs, &lhs) { - facts.add(var_name, !fact); - } - // var != true/false or true/false != var - if let Some((var_name, fact)) = check_literal_bool_equality(&lhs, &rhs) { - facts.add(var_name, !fact); - } else if let Some((var_name, fact)) = check_literal_bool_equality(&rhs, &lhs) { - facts.add(var_name, !fact); - } - // std.length(x) != 0 means non-empty - if let Some((var_name, fact)) = check_std_length_not_zero(binary, &rhs) { - facts.add(var_name, fact); - } + // var == "literal" or "literal" == var + if let Some((var_name, fact)) = check_literal_string_equality(&lhs, &rhs) { + facts.add(var_name, fact); + } else if let Some((var_name, fact)) = check_literal_string_equality(&rhs, &lhs) { + facts.add(var_name, fact); } - BinaryOperatorKind::Gt => { - // std.length(x) > n means length >= n+1 - if let Some((var_name, fact)) = check_std_length_greater(binary, &rhs) { - facts.add(var_name, fact); - } + // var == true/false or true/false == var + if let Some((var_name, fact)) = check_literal_bool_equality(&lhs, &rhs) { + facts.add(var_name, fact); + } else if let Some((var_name, fact)) = check_literal_bool_equality(&rhs, &lhs) { + facts.add(var_name, fact); } - BinaryOperatorKind::Ge => { - // std.length(x) >= n means length >= n - if let Some((var_name, fact)) = check_std_length_greater_eq(binary, &rhs) { - facts.add(var_name, fact); - } + // std.type(x) == "typename" + if let Some((var_name, fact)) = check_std_type_comparison(binary, &rhs) { + facts.add(var_name, fact); } - BinaryOperatorKind::InKw => { - // "field" in obj - if let Some((var_name, fact)) = check_in_operator(&lhs, &rhs) { - facts.add(var_name, fact); - } + // std.length(x) == n + if let Some((var_name, fact)) = check_std_length_comparison(binary, &rhs) { + facts.add(var_name, fact); } - BinaryOperatorKind::And => { - // a && b - extract facts from both sides - extract_facts_into(&lhs, facts); - extract_facts_into(&rhs, facts); + return; + } + + if op_kind == BinaryOperatorKind::Ne { + // var != null - create fact and negate it + if let Some((var_name, fact)) = check_null_equality(&lhs, &rhs, Totality::Total) { + facts.add(var_name, !fact); + } else if let Some((var_name, fact)) = check_null_equality(&rhs, &lhs, Totality::Total) { + facts.add(var_name, !fact); } - BinaryOperatorKind::Or => { - // a || b - only keep facts that are in both - let lhs_facts = extract_facts(&lhs); - let rhs_facts = extract_facts(&rhs); - let combined = lhs_facts.or_combine(rhs_facts); - for (var_name, fact) in combined.facts { - facts.add(var_name, fact); - } + // var != "literal" or "literal" != var + if let Some((var_name, fact)) = check_literal_string_equality(&lhs, &rhs) { + facts.add(var_name, !fact); + } else if let Some((var_name, fact)) = check_literal_string_equality(&rhs, &lhs) { + facts.add(var_name, !fact); + } + // var != true/false or true/false != var + if let Some((var_name, fact)) = check_literal_bool_equality(&lhs, &rhs) { + facts.add(var_name, !fact); + } else if let Some((var_name, fact)) = check_literal_bool_equality(&rhs, &lhs) { + facts.add(var_name, !fact); + } + // std.length(x) != 0 means non-empty + if let Some((var_name, fact)) = check_std_length_not_zero(binary, &rhs) { + facts.add(var_name, fact); + } + return; + } + + if op_kind == BinaryOperatorKind::Gt { + // std.length(x) > n means length >= n+1 + if let Some((var_name, fact)) = check_std_length_greater(binary, &rhs) { + facts.add(var_name, fact); + } + return; + } + + if op_kind == BinaryOperatorKind::Ge { + // std.length(x) >= n means length >= n + if let Some((var_name, fact)) = check_std_length_greater_eq(binary, &rhs) { + facts.add(var_name, fact); + } + return; + } + + if op_kind.is_membership() { + // "field" in obj + if let Some((var_name, fact)) = check_in_operator(&lhs, &rhs) { + facts.add(var_name, fact); + } + return; + } + + if op_kind.is_logical_and() { + // a && b - extract facts from both sides + extract_facts_into(&lhs, facts); + extract_facts_into(&rhs, facts); + return; + } + + if op_kind.is_logical_or() { + // a || b - only keep facts that are in both + let lhs_facts = extract_facts(&lhs); + let rhs_facts = extract_facts(&rhs); + let combined = lhs_facts.or_combine(rhs_facts); + for (var_name, fact) in combined.facts { + facts.add(var_name, fact); } - _ => {} } } diff --git a/crates/jrsonnet-rowan-parser/src/lib.rs b/crates/jrsonnet-rowan-parser/src/lib.rs index ffc5fa1e..7f674b40 100644 --- a/crates/jrsonnet-rowan-parser/src/lib.rs +++ b/crates/jrsonnet-rowan-parser/src/lib.rs @@ -15,6 +15,7 @@ mod marker; mod parser; mod precedence; mod string_block; +mod syntax_semantics; mod tests; mod token_set; diff --git a/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs b/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs new file mode 100644 index 00000000..0333d2e5 --- /dev/null +++ b/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs @@ -0,0 +1,756 @@ +use crate::{ + nodes::{BinaryOperatorKind, UnaryOperatorKind}, + SyntaxKind, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum SemanticTokenClass { + Keyword, + Comment, + String, + Number, + Operator, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum BinaryOperatorClass { + Add, + Numeric, + Equality, + Ordering, + Membership, + LogicalAnd, + LogicalOr, + NullCoalesce, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum UnaryOperatorClass { + Numeric, + LogicalNot, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct TokenDocExample { + pub code: &'static str, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct TokenSemantics { + pub kind: SyntaxKind, + pub hover_doc: Option<&'static str>, + pub hover_example: Option, + pub hover_eligible: bool, + pub semantic_class: Option, + pub binary_op_class: Option, + pub unary_op_class: Option, +} + +macro_rules! token_meta { + ( + $kind:ident + $(, hover_doc = $hover_doc:expr)? + $(, hover_example = $hover_example:expr)? + $(, hover_eligible = $hover_eligible:expr)? + $(, semantic = $semantic:ident)? + $(, binary = $binary:ident)? + $(, unary = $unary:ident)? + ) => { + TokenSemantics { + kind: SyntaxKind::$kind, + hover_doc: token_meta!(@opt_str $($hover_doc)?), + hover_example: token_meta!(@opt_example $($hover_example)?), + hover_eligible: token_meta!(@bool $($hover_eligible)?), + semantic_class: token_meta!(@opt_semantic $($semantic)?), + binary_op_class: token_meta!(@opt_binary $($binary)?), + unary_op_class: token_meta!(@opt_unary $($unary)?), + } + }; + (@opt_str $value:expr) => { Some($value) }; + (@opt_str) => { None }; + (@opt_example $value:expr) => { Some(TokenDocExample { code: $value }) }; + (@opt_example) => { None }; + (@bool $value:expr) => { $value }; + (@bool) => { false }; + (@opt_semantic $value:ident) => { Some(SemanticTokenClass::$value) }; + (@opt_semantic) => { None }; + (@opt_binary $value:ident) => { Some(BinaryOperatorClass::$value) }; + (@opt_binary) => { None }; + (@opt_unary $value:ident) => { Some(UnaryOperatorClass::$value) }; + (@opt_unary) => { None }; +} + +const TOKEN_SEMANTICS: &[TokenSemantics] = &[ + token_meta!(IDENT, hover_eligible = true), + token_meta!(FLOAT, hover_eligible = true, semantic = Number), + token_meta!(STRING_DOUBLE, hover_eligible = true, semantic = String), + token_meta!(STRING_SINGLE, hover_eligible = true, semantic = String), + token_meta!( + STRING_DOUBLE_VERBATIM, + hover_eligible = true, + semantic = String + ), + token_meta!( + STRING_SINGLE_VERBATIM, + hover_eligible = true, + semantic = String + ), + token_meta!(STRING_BLOCK, hover_eligible = true, semantic = String), + token_meta!( + NULL_KW, + hover_doc = "Literal `null` value.", + hover_example = "null", + hover_eligible = true, + semantic = Keyword + ), + token_meta!( + TRUE_KW, + hover_doc = "Boolean literal `true`.", + hover_example = "true", + hover_eligible = true, + semantic = Keyword + ), + token_meta!( + FALSE_KW, + hover_doc = "Boolean literal `false`.", + hover_example = "false", + hover_eligible = true, + semantic = Keyword + ), + token_meta!( + SELF_KW, + hover_doc = "`self` refers to the current object value.", + hover_example = "{ value: self }", + hover_eligible = true, + semantic = Keyword + ), + token_meta!( + SUPER_KW, + hover_doc = "`super` refers to inherited object fields.", + hover_example = "{ x: 1 } + { y: super.x }", + hover_eligible = true, + semantic = Keyword + ), + token_meta!( + DOLLAR, + hover_doc = "`$` refers to the root object.", + hover_example = "{ x: 1, y: $.x }", + hover_eligible = true, + semantic = Operator + ), + token_meta!( + PLUS, + hover_doc = "`+` adds numbers, concatenates strings/arrays, or merges objects.", + hover_example = "1 + 2", + hover_eligible = true, + semantic = Operator, + binary = Add + ), + token_meta!( + MINUS, + hover_doc = "`-` subtracts numbers (or negates with unary form).", + hover_example = "2 - 1", + hover_eligible = true, + semantic = Operator, + binary = Numeric, + unary = Numeric + ), + token_meta!( + MUL, + hover_doc = "`*` multiplies numbers.", + hover_example = "2 * 3", + hover_eligible = true, + semantic = Operator, + binary = Numeric + ), + token_meta!( + DIV, + hover_doc = "`/` divides numbers.", + hover_example = "4 / 2", + hover_eligible = true, + semantic = Operator, + binary = Numeric + ), + token_meta!( + MODULO, + hover_doc = "`%` computes numeric remainder.", + hover_example = "5 % 2", + hover_eligible = true, + semantic = Operator, + binary = Numeric + ), + token_meta!( + AND, + hover_doc = "`&&` requires both operands to be truthy.", + hover_example = "true && false", + hover_eligible = true, + semantic = Operator, + binary = LogicalAnd + ), + token_meta!( + OR, + hover_doc = "`||` requires at least one operand to be truthy.", + hover_example = "true || false", + hover_eligible = true, + semantic = Operator, + binary = LogicalOr + ), + token_meta!( + NOT, + hover_doc = "`!` negates a boolean expression.", + hover_example = "!true", + hover_eligible = true, + semantic = Operator, + unary = LogicalNot + ), + token_meta!( + EQ, + hover_doc = "`==` checks value equality.", + hover_example = "1 == 1", + hover_eligible = true, + semantic = Operator, + binary = Equality + ), + token_meta!( + NE, + hover_doc = "`!=` checks value inequality.", + hover_example = "1 != 2", + hover_eligible = true, + semantic = Operator, + binary = Equality + ), + token_meta!( + LT, + hover_doc = "`<` checks strict less-than ordering.", + hover_example = "1 < 2", + hover_eligible = true, + semantic = Operator, + binary = Ordering + ), + token_meta!( + LE, + hover_doc = "`<=` checks less-than-or-equal ordering.", + hover_example = "1 <= 2", + hover_eligible = true, + semantic = Operator, + binary = Ordering + ), + token_meta!( + GT, + hover_doc = "`>` checks strict greater-than ordering.", + hover_example = "2 > 1", + hover_eligible = true, + semantic = Operator, + binary = Ordering + ), + token_meta!( + GE, + hover_doc = "`>=` checks greater-than-or-equal ordering.", + hover_example = "2 >= 2", + hover_eligible = true, + semantic = Operator, + binary = Ordering + ), + token_meta!( + NULL_COAELSE, + hover_doc = "`??` returns right-hand value when left side is `null`.", + hover_example = "null ?? 1", + hover_eligible = true, + semantic = Operator, + binary = NullCoalesce + ), + token_meta!(LOCAL_KW, semantic = Keyword), + token_meta!(IF_KW, semantic = Keyword), + token_meta!(THEN_KW, semantic = Keyword), + token_meta!(ELSE_KW, semantic = Keyword), + token_meta!(FUNCTION_KW, semantic = Keyword), + token_meta!(IMPORT_KW, semantic = Keyword), + token_meta!(IMPORTSTR_KW, semantic = Keyword), + token_meta!(IMPORTBIN_KW, semantic = Keyword), + token_meta!(FOR_KW, semantic = Keyword), + token_meta!(IN_KW, semantic = Keyword, binary = Membership), + token_meta!(ERROR_KW, semantic = Keyword), + token_meta!(ASSERT_KW, semantic = Keyword), + token_meta!(TAILSTRICT_KW, semantic = Keyword), + token_meta!(SINGLE_LINE_SLASH_COMMENT, semantic = Comment), + token_meta!(SINGLE_LINE_HASH_COMMENT, semantic = Comment), + token_meta!(MULTI_LINE_COMMENT, semantic = Comment), + token_meta!(BIT_AND, semantic = Operator, binary = Numeric), + token_meta!(BIT_OR, semantic = Operator, binary = Numeric), + token_meta!(BIT_XOR, semantic = Operator, binary = Numeric), + token_meta!(BIT_NOT, semantic = Operator, unary = Numeric), + token_meta!(LHS, semantic = Operator, binary = Numeric), + token_meta!(RHS, semantic = Operator, binary = Numeric), + token_meta!(ASSIGN, semantic = Operator), +]; + +fn token_semantics(kind: SyntaxKind) -> Option<&'static TokenSemantics> { + TOKEN_SEMANTICS + .iter() + .find(|metadata| metadata.kind == kind) +} + +impl SyntaxKind { + #[must_use] + pub fn token_semantics(self) -> Option<&'static TokenSemantics> { + token_semantics(self) + } + + /// Returns language-level documentation for tokens/operators. + #[must_use] + pub fn token_doc(self) -> Option<&'static str> { + self.token_semantics() + .and_then(|metadata| metadata.hover_doc) + } + + /// Returns a runnable example snippet for token hover docs. + #[must_use] + pub fn token_doc_example(self) -> Option<&'static str> { + self.token_semantics() + .and_then(|metadata| metadata.hover_example.map(|example| example.code)) + } + + /// Tokens that should participate in hover lookup. + #[must_use] + pub fn is_hover_eligible(self) -> bool { + self.token_semantics() + .is_some_and(|metadata| metadata.hover_eligible) + } + + /// Semantic token class for this lexical token, if any. + #[must_use] + pub fn semantic_token_class(self) -> Option { + self.token_semantics() + .and_then(|metadata| metadata.semantic_class) + } + + /// Keyword tokens used for semantic highlighting. + #[must_use] + pub fn is_semantic_keyword_token(self) -> bool { + self.semantic_token_class() + .is_some_and(|class| class == SemanticTokenClass::Keyword) + } + + /// Comment tokens used for semantic highlighting. + #[must_use] + pub fn is_semantic_comment_token(self) -> bool { + self.semantic_token_class() + .is_some_and(|class| class == SemanticTokenClass::Comment) + } + + /// String tokens used for semantic highlighting. + #[must_use] + pub fn is_semantic_string_token(self) -> bool { + self.semantic_token_class() + .is_some_and(|class| class == SemanticTokenClass::String) + } + + /// Numeric tokens used for semantic highlighting. + #[must_use] + pub fn is_semantic_number_token(self) -> bool { + self.semantic_token_class() + .is_some_and(|class| class == SemanticTokenClass::Number) + } + + /// Operator tokens used for semantic highlighting. + #[must_use] + pub fn is_semantic_operator_token(self) -> bool { + self.semantic_token_class() + .is_some_and(|class| class == SemanticTokenClass::Operator) + } +} + +impl BinaryOperatorKind { + #[must_use] + pub fn class(self) -> Option { + self.token_kind() + .and_then(SyntaxKind::token_semantics) + .and_then(|metadata| metadata.binary_op_class) + } + + /// Whether this operator always yields a `number`. + #[must_use] + pub fn returns_number(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::Numeric) + } + + /// Whether this operator always yields a `boolean`. + #[must_use] + pub fn returns_boolean(self) -> bool { + self.class().is_some_and(|class| { + matches!( + class, + BinaryOperatorClass::Equality + | BinaryOperatorClass::Ordering + | BinaryOperatorClass::Membership + ) + }) + } + + /// Whether this operator is `&&`, `||`, or `??`. + #[must_use] + pub fn is_logical_short_circuit(self) -> bool { + self.class().is_some_and(|class| { + matches!( + class, + BinaryOperatorClass::LogicalAnd + | BinaryOperatorClass::LogicalOr + | BinaryOperatorClass::NullCoalesce + ) + }) + } + + #[must_use] + pub fn is_logical_and(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::LogicalAnd) + } + + #[must_use] + pub fn is_logical_or(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::LogicalOr) + } + + #[must_use] + pub fn is_equality(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::Equality) + } + + #[must_use] + pub fn is_ordering(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::Ordering) + } + + #[must_use] + pub fn is_membership(self) -> bool { + self.class() + .is_some_and(|class| class == BinaryOperatorClass::Membership) + } + + fn token_kind(self) -> Option { + match self { + BinaryOperatorKind::Or => Some(SyntaxKind::OR), + BinaryOperatorKind::NullCoaelse => Some(SyntaxKind::NULL_COAELSE), + BinaryOperatorKind::And => Some(SyntaxKind::AND), + BinaryOperatorKind::BitOr => Some(SyntaxKind::BIT_OR), + BinaryOperatorKind::BitXor => Some(SyntaxKind::BIT_XOR), + BinaryOperatorKind::BitAnd => Some(SyntaxKind::BIT_AND), + BinaryOperatorKind::Eq => Some(SyntaxKind::EQ), + BinaryOperatorKind::Ne => Some(SyntaxKind::NE), + BinaryOperatorKind::Lt => Some(SyntaxKind::LT), + BinaryOperatorKind::Gt => Some(SyntaxKind::GT), + BinaryOperatorKind::Le => Some(SyntaxKind::LE), + BinaryOperatorKind::Ge => Some(SyntaxKind::GE), + BinaryOperatorKind::InKw => Some(SyntaxKind::IN_KW), + BinaryOperatorKind::Lhs => Some(SyntaxKind::LHS), + BinaryOperatorKind::Rhs => Some(SyntaxKind::RHS), + BinaryOperatorKind::Plus => Some(SyntaxKind::PLUS), + BinaryOperatorKind::Minus => Some(SyntaxKind::MINUS), + BinaryOperatorKind::Mul => Some(SyntaxKind::MUL), + BinaryOperatorKind::Div => Some(SyntaxKind::DIV), + BinaryOperatorKind::Modulo => Some(SyntaxKind::MODULO), + BinaryOperatorKind::MetaObjectApply | BinaryOperatorKind::ErrorNoOperator => None, + } + } +} + +impl UnaryOperatorKind { + #[must_use] + pub fn class(self) -> Option { + self.token_kind() + .token_semantics() + .and_then(|metadata| metadata.unary_op_class) + } + + /// Whether this operator always yields a `boolean`. + #[must_use] + pub fn returns_boolean(self) -> bool { + self.class() + .is_some_and(|class| class == UnaryOperatorClass::LogicalNot) + } + + /// Whether this operator always yields a `number`. + #[must_use] + pub fn returns_number(self) -> bool { + self.class() + .is_some_and(|class| class == UnaryOperatorClass::Numeric) + } + + #[must_use] + pub fn is_logical_not(self) -> bool { + self.class() + .is_some_and(|class| class == UnaryOperatorClass::LogicalNot) + } + + fn token_kind(self) -> SyntaxKind { + match self { + UnaryOperatorKind::Minus => SyntaxKind::MINUS, + UnaryOperatorKind::Not => SyntaxKind::NOT, + UnaryOperatorKind::BitNot => SyntaxKind::BIT_NOT, + } + } +} + +#[cfg(test)] +mod tests { + use super::{ + BinaryOperatorClass, BinaryOperatorKind, SemanticTokenClass, SyntaxKind, + UnaryOperatorClass, UnaryOperatorKind, TOKEN_SEMANTICS, + }; + use crate::rowan::NodeOrToken; + + // Test-only explicit decision list: lexical tokens that are intentionally not + // semantic-highlighted. Coverage tests fail if any token is neither classified nor ignored. + const SEMANTIC_TOKEN_EXPLICITLY_IGNORED: &[SyntaxKind] = &[ + SyntaxKind::L_BRACK, + SyntaxKind::R_BRACK, + SyntaxKind::L_PAREN, + SyntaxKind::R_PAREN, + SyntaxKind::L_BRACE, + SyntaxKind::R_BRACE, + SyntaxKind::COLON, + SyntaxKind::COLONCOLON, + SyntaxKind::COLONCOLONCOLON, + SyntaxKind::SEMI, + SyntaxKind::DOT, + SyntaxKind::DOTDOTDOT, + SyntaxKind::COMMA, + SyntaxKind::QUESTION_MARK, + SyntaxKind::ERROR_FLOAT_JUNK_AFTER_POINT, + SyntaxKind::ERROR_FLOAT_JUNK_AFTER_EXPONENT, + SyntaxKind::ERROR_FLOAT_JUNK_AFTER_EXPONENT_SIGN, + SyntaxKind::ERROR_STRING_DOUBLE_UNTERMINATED, + SyntaxKind::ERROR_STRING_SINGLE_UNTERMINATED, + SyntaxKind::ERROR_STRING_DOUBLE_VERBATIM_UNTERMINATED, + SyntaxKind::ERROR_STRING_SINGLE_VERBATIM_UNTERMINATED, + SyntaxKind::ERROR_STRING_VERBATIM_MISSING_QUOTES, + SyntaxKind::ERROR_STRING_BLOCK_UNEXPECTED_END, + SyntaxKind::ERROR_STRING_BLOCK_MISSING_NEW_LINE, + SyntaxKind::ERROR_STRING_BLOCK_MISSING_TERMINATION, + SyntaxKind::ERROR_STRING_BLOCK_MISSING_INDENT, + SyntaxKind::WHITESPACE, + SyntaxKind::ERROR_COMMENT_TOO_SHORT, + SyntaxKind::ERROR_COMMENT_UNTERMINATED, + SyntaxKind::META_OBJECT_APPLY, + SyntaxKind::ERROR_NO_OPERATOR, + SyntaxKind::ERROR_MISSING_TOKEN, + SyntaxKind::ERROR_UNEXPECTED_TOKEN, + SyntaxKind::ERROR_CUSTOM, + SyntaxKind::LEXING_ERROR, + ]; + const BINARY_OPERATOR_EXPLICITLY_IGNORED: &[BinaryOperatorKind] = &[ + BinaryOperatorKind::MetaObjectApply, + BinaryOperatorKind::ErrorNoOperator, + ]; + + fn all_lexical_token_kinds() -> impl Iterator { + (SyntaxKind::OR.into_raw()..=SyntaxKind::LEXING_ERROR.into_raw()).map(SyntaxKind::from_raw) + } + + #[test] + fn token_doc_lookup() { + assert_eq!( + SyntaxKind::PLUS.token_doc(), + Some("`+` adds numbers, concatenates strings/arrays, or merges objects.") + ); + assert_eq!( + SyntaxKind::NULL_COAELSE.token_doc(), + Some("`??` returns right-hand value when left side is `null`.") + ); + assert_eq!(SyntaxKind::IDENT.token_doc(), None); + } + + #[test] + fn hover_and_semantic_token_classification() { + assert!(SyntaxKind::IDENT.is_hover_eligible()); + assert!(SyntaxKind::PLUS.is_hover_eligible()); + assert!(!SyntaxKind::WHITESPACE.is_hover_eligible()); + assert!(SyntaxKind::LOCAL_KW.is_semantic_keyword_token()); + assert!(SyntaxKind::SINGLE_LINE_SLASH_COMMENT.is_semantic_comment_token()); + assert!(SyntaxKind::STRING_DOUBLE.is_semantic_string_token()); + assert!(SyntaxKind::FLOAT.is_semantic_number_token()); + assert!(SyntaxKind::NULL_COAELSE.is_semantic_operator_token()); + } + + #[test] + fn binary_operator_categories() { + assert_eq!( + BinaryOperatorKind::Mul.class(), + Some(BinaryOperatorClass::Numeric) + ); + assert_eq!( + BinaryOperatorKind::Eq.class(), + Some(BinaryOperatorClass::Equality) + ); + assert_eq!( + BinaryOperatorKind::And.class(), + Some(BinaryOperatorClass::LogicalAnd) + ); + assert!(BinaryOperatorKind::Mul.returns_number()); + assert!(BinaryOperatorKind::Eq.returns_boolean()); + assert!(BinaryOperatorKind::And.is_logical_short_circuit()); + assert!(BinaryOperatorKind::Eq.is_equality()); + assert!(BinaryOperatorKind::Gt.is_ordering()); + assert!(BinaryOperatorKind::InKw.is_membership()); + } + + #[test] + fn unary_operator_categories() { + assert_eq!( + UnaryOperatorKind::Not.class(), + Some(UnaryOperatorClass::LogicalNot) + ); + assert_eq!( + UnaryOperatorKind::Minus.class(), + Some(UnaryOperatorClass::Numeric) + ); + assert!(UnaryOperatorKind::Not.returns_boolean()); + assert!(UnaryOperatorKind::Minus.returns_number()); + assert!(UnaryOperatorKind::Not.is_logical_not()); + } + + #[test] + fn semantic_token_classification_covers_all_lexical_tokens() { + let mut missing = Vec::new(); + let mut conflict = Vec::new(); + + for kind in all_lexical_token_kinds() { + let classified = kind.semantic_token_class().is_some() || kind == SyntaxKind::IDENT; + let ignored = SEMANTIC_TOKEN_EXPLICITLY_IGNORED.contains(&kind); + match (classified, ignored) { + (false, false) => missing.push(kind), + (true, true) => conflict.push(kind), + _ => {} + } + } + + assert!( + conflict.is_empty(), + "semantic-token kinds cannot be both classified and ignored: {conflict:?}" + ); + assert!( + missing.is_empty(), + "lexical token kinds missing semantic classification decision: {missing:?}" + ); + } + + #[test] + fn binary_operator_classification_covers_all_variants() { + let mut missing = Vec::new(); + let mut conflict = Vec::new(); + + for op in [ + BinaryOperatorKind::Or, + BinaryOperatorKind::NullCoaelse, + BinaryOperatorKind::And, + BinaryOperatorKind::BitOr, + BinaryOperatorKind::BitXor, + BinaryOperatorKind::BitAnd, + BinaryOperatorKind::Eq, + BinaryOperatorKind::Ne, + BinaryOperatorKind::Lt, + BinaryOperatorKind::Gt, + BinaryOperatorKind::Le, + BinaryOperatorKind::Ge, + BinaryOperatorKind::InKw, + BinaryOperatorKind::Lhs, + BinaryOperatorKind::Rhs, + BinaryOperatorKind::Plus, + BinaryOperatorKind::Minus, + BinaryOperatorKind::Mul, + BinaryOperatorKind::Div, + BinaryOperatorKind::Modulo, + BinaryOperatorKind::MetaObjectApply, + BinaryOperatorKind::ErrorNoOperator, + ] { + let classified = op.class().is_some(); + let ignored = BINARY_OPERATOR_EXPLICITLY_IGNORED.contains(&op); + match (classified, ignored) { + (false, false) => missing.push(op), + (true, true) => conflict.push(op), + _ => {} + } + } + + assert!( + conflict.is_empty(), + "binary operators cannot be both classified and ignored: {conflict:?}" + ); + assert!( + missing.is_empty(), + "binary operators missing classification decision: {missing:?}" + ); + } + + #[test] + fn token_docs_are_consistent_and_examples_parse() { + for metadata in TOKEN_SEMANTICS { + let has_doc = metadata.hover_doc.is_some(); + let has_example = metadata.hover_example.is_some(); + assert_eq!( + has_doc, has_example, + "token {:?} has mismatched doc/example presence", + metadata.kind + ); + + if !has_doc { + continue; + } + assert!( + metadata.hover_eligible, + "token {:?} has hover docs but is not hover-eligible", + metadata.kind + ); + + let example = metadata.hover_example.expect("checked above").code; + let (source, errors) = crate::parse(example); + assert!( + errors.is_empty(), + "token {:?} example did not parse cleanly: {example:?}, errors: {errors:?}", + metadata.kind + ); + + let contains_token = source + .syntax + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .any(|token| token.kind() == metadata.kind); + assert!( + contains_token, + "token {:?} example does not contain the token: {example:?}", + metadata.kind + ); + } + } + + #[test] + fn semantic_token_class_matches_helper_methods() { + for kind in all_lexical_token_kinds() { + let class = kind.semantic_token_class(); + assert_eq!( + kind.is_semantic_keyword_token(), + class == Some(SemanticTokenClass::Keyword) + ); + assert_eq!( + kind.is_semantic_comment_token(), + class == Some(SemanticTokenClass::Comment) + ); + assert_eq!( + kind.is_semantic_string_token(), + class == Some(SemanticTokenClass::String) + ); + assert_eq!( + kind.is_semantic_number_token(), + class == Some(SemanticTokenClass::Number) + ); + assert_eq!( + kind.is_semantic_operator_token(), + class == Some(SemanticTokenClass::Operator) + ); + } + } +} diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index e4b62fcb..e44312e1 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -201,6 +201,9 @@ Hover combines: - local definition context snippets - definition-site fallback to bound value type when token-level inference is `any` +- keyword/operator token docs for language primitives where available + (`jrsonnet-rowan-parser/src/syntax_semantics.rs`, via + `SyntaxKind::token_doc()`) Requires `TypeAnalysis` from async server context. @@ -235,6 +238,9 @@ File: `crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs` - Produces encoded semantic tokens for full-document requests. - Produces encoded semantic tokens for range requests as well. +- Token-class mapping for keyword/comment/string/number/operator tokens is + centralized in `jrsonnet-rowan-parser/src/syntax_semantics.rs` via + `SyntaxKind` semantic helpers. ### Signature Help From 110a21ef255ad1f152ab4c425d9e0d03b30a2fa2 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 17:06:48 +0000 Subject: [PATCH 062/210] lsp: support multi-purpose token docs in hover --- crates/jrsonnet-lsp-handlers/src/hover.rs | 23 +- crates/jrsonnet-lsp-inference/src/expr.rs | 15 ++ .../src/syntax_semantics.rs | 253 +++++++++++------- docs/lsp/HANDLERS.md | 2 +- 4 files changed, 196 insertions(+), 97 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/hover.rs b/crates/jrsonnet-lsp-handlers/src/hover.rs index 6f80e0ce..09e4c658 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover.rs @@ -45,7 +45,7 @@ pub fn hover(document: &Document, position: LspPosition, analysis: &TypeAnalysis } let type_markdown = inferred_type_markdown(document, analysis, offset); - let token_markdown = token.kind().token_doc().map(str::to_owned); + let token_markdown = token.kind().token_doc_markdown(); if let Some(value) = merge_markdown_sections(type_markdown, token_markdown) { return Some(Hover { contents: HoverContents::Markup(MarkupContent { @@ -442,7 +442,10 @@ mod tests { contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), range: None }) => { - assert_eq!(value, "`null`\n\n---\n\nLiteral `null` value."); + assert_eq!( + value, + "`null`\n\n---\n\nLiteral `null` value.\n\n```jsonnet\nnull\n```" + ); }); } @@ -455,7 +458,21 @@ mod tests { }) => { assert_eq!( value, - "`number`\n\n---\n\n`+` adds numbers, concatenates strings/arrays, or merges objects." + "`number`\n\n---\n\n`+` adds numbers, concatenates strings/arrays, or merges objects.\n\n```jsonnet\n1 + 2\n```" + ); + }); + } + + #[test] + fn test_multi_purpose_operator_hover_docs() { + let result = get_hover("\"hello %s\" % \"world\"", 0, 11); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!( + value, + "`string`\n\n---\n\nThis token has multiple purposes:\n\n**Purpose 1**\n\n`%` computes numeric remainder.\n\n```jsonnet\n5 % 2\n```\n\n**Purpose 2**\n\n`%` formats strings with placeholders.\n\n```jsonnet\n\"hello %s\" % \"world\"\n```" ); }); } diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr.rs index f65859cd..4c52094f 100644 --- a/crates/jrsonnet-lsp-inference/src/expr.rs +++ b/crates/jrsonnet-lsp-inference/src/expr.rs @@ -458,6 +458,9 @@ fn infer_binary_expr_base_ty( return Ty::ANY; }; + if op_kind == BinaryOperatorKind::Modulo && lhs_ty == Ty::STRING { + return Ty::STRING; + } if op_kind.returns_number() { return Ty::NUMBER; } @@ -1144,6 +1147,18 @@ mod tests { assert_eq!(ty, Ty::NUMBER); } + #[test] + fn test_infer_modulo_remainder() { + let (ty, _) = infer_doc("5 % 2"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_infer_modulo_string_formatting() { + let (ty, _) = infer_doc(r#""hello %s" % "world""#); + assert_eq!(ty, Ty::STRING); + } + #[test] fn test_infer_string() { let (ty, _) = infer_doc(r#""hello""#); diff --git a/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs b/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs index 0333d2e5..6b92cce5 100644 --- a/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs +++ b/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs @@ -31,26 +31,34 @@ pub enum UnaryOperatorClass { } #[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct TokenDocExample { - pub code: &'static str, +pub struct TokenDocPurpose { + pub doc: &'static str, + pub example: &'static str, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct TokenSemantics { pub kind: SyntaxKind, - pub hover_doc: Option<&'static str>, - pub hover_example: Option, + pub hover_purposes: &'static [TokenDocPurpose], pub hover_eligible: bool, pub semantic_class: Option, pub binary_op_class: Option, pub unary_op_class: Option, } +macro_rules! token_purpose { + ($doc:expr, $example:expr) => { + TokenDocPurpose { + doc: $doc, + example: $example, + } + }; +} + macro_rules! token_meta { ( $kind:ident - $(, hover_doc = $hover_doc:expr)? - $(, hover_example = $hover_example:expr)? + $(, purposes = $purposes:expr)? $(, hover_eligible = $hover_eligible:expr)? $(, semantic = $semantic:ident)? $(, binary = $binary:ident)? @@ -58,18 +66,15 @@ macro_rules! token_meta { ) => { TokenSemantics { kind: SyntaxKind::$kind, - hover_doc: token_meta!(@opt_str $($hover_doc)?), - hover_example: token_meta!(@opt_example $($hover_example)?), + hover_purposes: token_meta!(@purposes $($purposes)?), hover_eligible: token_meta!(@bool $($hover_eligible)?), semantic_class: token_meta!(@opt_semantic $($semantic)?), binary_op_class: token_meta!(@opt_binary $($binary)?), unary_op_class: token_meta!(@opt_unary $($unary)?), } }; - (@opt_str $value:expr) => { Some($value) }; - (@opt_str) => { None }; - (@opt_example $value:expr) => { Some(TokenDocExample { code: $value }) }; - (@opt_example) => { None }; + (@purposes $value:expr) => { $value }; + (@purposes) => { &[] }; (@bool $value:expr) => { $value }; (@bool) => { false }; (@opt_semantic $value:ident) => { Some(SemanticTokenClass::$value) }; @@ -98,58 +103,65 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ token_meta!(STRING_BLOCK, hover_eligible = true, semantic = String), token_meta!( NULL_KW, - hover_doc = "Literal `null` value.", - hover_example = "null", + purposes = &[token_purpose!("Literal `null` value.", "null")], hover_eligible = true, semantic = Keyword ), token_meta!( TRUE_KW, - hover_doc = "Boolean literal `true`.", - hover_example = "true", + purposes = &[token_purpose!("Boolean literal `true`.", "true")], hover_eligible = true, semantic = Keyword ), token_meta!( FALSE_KW, - hover_doc = "Boolean literal `false`.", - hover_example = "false", + purposes = &[token_purpose!("Boolean literal `false`.", "false")], hover_eligible = true, semantic = Keyword ), token_meta!( SELF_KW, - hover_doc = "`self` refers to the current object value.", - hover_example = "{ value: self }", + purposes = &[token_purpose!( + "`self` refers to the current object value.", + "{ value: self }" + )], hover_eligible = true, semantic = Keyword ), token_meta!( SUPER_KW, - hover_doc = "`super` refers to inherited object fields.", - hover_example = "{ x: 1 } + { y: super.x }", + purposes = &[token_purpose!( + "`super` refers to inherited object fields.", + "{ x: 1 } + { y: super.x }" + )], hover_eligible = true, semantic = Keyword ), token_meta!( DOLLAR, - hover_doc = "`$` refers to the root object.", - hover_example = "{ x: 1, y: $.x }", + purposes = &[token_purpose!( + "`$` refers to the root object.", + "{ x: 1, y: $.x }" + )], hover_eligible = true, semantic = Operator ), token_meta!( PLUS, - hover_doc = "`+` adds numbers, concatenates strings/arrays, or merges objects.", - hover_example = "1 + 2", + purposes = &[token_purpose!( + "`+` adds numbers, concatenates strings/arrays, or merges objects.", + "1 + 2" + )], hover_eligible = true, semantic = Operator, binary = Add ), token_meta!( MINUS, - hover_doc = "`-` subtracts numbers (or negates with unary form).", - hover_example = "2 - 1", + purposes = &[token_purpose!( + "`-` subtracts numbers (or negates with unary form).", + "2 - 1" + )], hover_eligible = true, semantic = Operator, binary = Numeric, @@ -157,104 +169,118 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ ), token_meta!( MUL, - hover_doc = "`*` multiplies numbers.", - hover_example = "2 * 3", + purposes = &[token_purpose!("`*` multiplies numbers.", "2 * 3")], hover_eligible = true, semantic = Operator, binary = Numeric ), token_meta!( DIV, - hover_doc = "`/` divides numbers.", - hover_example = "4 / 2", + purposes = &[token_purpose!("`/` divides numbers.", "4 / 2")], hover_eligible = true, semantic = Operator, binary = Numeric ), token_meta!( MODULO, - hover_doc = "`%` computes numeric remainder.", - hover_example = "5 % 2", + purposes = &[ + token_purpose!("`%` computes numeric remainder.", "5 % 2"), + token_purpose!( + "`%` formats strings with placeholders.", + "\"hello %s\" % \"world\"" + ), + ], hover_eligible = true, semantic = Operator, binary = Numeric ), token_meta!( AND, - hover_doc = "`&&` requires both operands to be truthy.", - hover_example = "true && false", + purposes = &[token_purpose!( + "`&&` requires both operands to be truthy.", + "true && false" + )], hover_eligible = true, semantic = Operator, binary = LogicalAnd ), token_meta!( OR, - hover_doc = "`||` requires at least one operand to be truthy.", - hover_example = "true || false", + purposes = &[token_purpose!( + "`||` requires at least one operand to be truthy.", + "true || false" + )], hover_eligible = true, semantic = Operator, binary = LogicalOr ), token_meta!( NOT, - hover_doc = "`!` negates a boolean expression.", - hover_example = "!true", + purposes = &[token_purpose!("`!` negates a boolean expression.", "!true")], hover_eligible = true, semantic = Operator, unary = LogicalNot ), token_meta!( EQ, - hover_doc = "`==` checks value equality.", - hover_example = "1 == 1", + purposes = &[token_purpose!("`==` checks value equality.", "1 == 1")], hover_eligible = true, semantic = Operator, binary = Equality ), token_meta!( NE, - hover_doc = "`!=` checks value inequality.", - hover_example = "1 != 2", + purposes = &[token_purpose!("`!=` checks value inequality.", "1 != 2")], hover_eligible = true, semantic = Operator, binary = Equality ), token_meta!( LT, - hover_doc = "`<` checks strict less-than ordering.", - hover_example = "1 < 2", + purposes = &[token_purpose!( + "`<` checks strict less-than ordering.", + "1 < 2" + )], hover_eligible = true, semantic = Operator, binary = Ordering ), token_meta!( LE, - hover_doc = "`<=` checks less-than-or-equal ordering.", - hover_example = "1 <= 2", + purposes = &[token_purpose!( + "`<=` checks less-than-or-equal ordering.", + "1 <= 2" + )], hover_eligible = true, semantic = Operator, binary = Ordering ), token_meta!( GT, - hover_doc = "`>` checks strict greater-than ordering.", - hover_example = "2 > 1", + purposes = &[token_purpose!( + "`>` checks strict greater-than ordering.", + "2 > 1" + )], hover_eligible = true, semantic = Operator, binary = Ordering ), token_meta!( GE, - hover_doc = "`>=` checks greater-than-or-equal ordering.", - hover_example = "2 >= 2", + purposes = &[token_purpose!( + "`>=` checks greater-than-or-equal ordering.", + "2 >= 2" + )], hover_eligible = true, semantic = Operator, binary = Ordering ), token_meta!( NULL_COAELSE, - hover_doc = "`??` returns right-hand value when left side is `null`.", - hover_example = "null ?? 1", + purposes = &[token_purpose!( + "`??` returns right-hand value when left side is `null`.", + "null ?? 1" + )], hover_eligible = true, semantic = Operator, binary = NullCoalesce @@ -290,24 +316,47 @@ fn token_semantics(kind: SyntaxKind) -> Option<&'static TokenSemantics> { .find(|metadata| metadata.kind == kind) } +fn token_purpose_markdown(purpose: &TokenDocPurpose) -> String { + format!("{}\n\n```jsonnet\n{}\n```", purpose.doc, purpose.example) +} + +fn token_doc_markdown(purposes: &[TokenDocPurpose]) -> Option { + match purposes { + [] => None, + [purpose] => Some(token_purpose_markdown(purpose)), + _ => { + use std::fmt::Write as _; + + let mut markdown = String::from("This token has multiple purposes:"); + for (idx, purpose) in purposes.iter().enumerate() { + markdown.push_str("\n\n"); + if write!(markdown, "**Purpose {}**\n\n", idx + 1).is_err() { + return None; + } + markdown.push_str(&token_purpose_markdown(purpose)); + } + Some(markdown) + } + } +} + impl SyntaxKind { #[must_use] pub fn token_semantics(self) -> Option<&'static TokenSemantics> { token_semantics(self) } - /// Returns language-level documentation for tokens/operators. + /// Returns token documentation purposes, each with prose and an example. #[must_use] - pub fn token_doc(self) -> Option<&'static str> { + pub fn token_doc_purposes(self) -> &'static [TokenDocPurpose] { self.token_semantics() - .and_then(|metadata| metadata.hover_doc) + .map_or(&[], |metadata| metadata.hover_purposes) } - /// Returns a runnable example snippet for token hover docs. + /// Returns language-level markdown documentation for tokens/operators. #[must_use] - pub fn token_doc_example(self) -> Option<&'static str> { - self.token_semantics() - .and_then(|metadata| metadata.hover_example.map(|example| example.code)) + pub fn token_doc_markdown(self) -> Option { + token_doc_markdown(self.token_doc_purposes()) } /// Tokens that should participate in hover lookup. @@ -554,14 +603,27 @@ mod tests { #[test] fn token_doc_lookup() { assert_eq!( - SyntaxKind::PLUS.token_doc(), - Some("`+` adds numbers, concatenates strings/arrays, or merges objects.") + SyntaxKind::PLUS.token_doc_markdown(), + Some( + "`+` adds numbers, concatenates strings/arrays, or merges objects.\n\n```jsonnet\n1 + 2\n```" + .to_owned() + ) ); assert_eq!( - SyntaxKind::NULL_COAELSE.token_doc(), - Some("`??` returns right-hand value when left side is `null`.") + SyntaxKind::NULL_COAELSE.token_doc_markdown(), + Some( + "`??` returns right-hand value when left side is `null`.\n\n```jsonnet\nnull ?? 1\n```" + .to_owned() + ) ); - assert_eq!(SyntaxKind::IDENT.token_doc(), None); + assert_eq!( + SyntaxKind::MODULO.token_doc_markdown(), + Some( + "This token has multiple purposes:\n\n**Purpose 1**\n\n`%` computes numeric remainder.\n\n```jsonnet\n5 % 2\n```\n\n**Purpose 2**\n\n`%` formats strings with placeholders.\n\n```jsonnet\n\"hello %s\" % \"world\"\n```" + .to_owned() + ) + ); + assert_eq!(SyntaxKind::IDENT.token_doc_markdown(), None); } #[test] @@ -689,15 +751,7 @@ mod tests { #[test] fn token_docs_are_consistent_and_examples_parse() { for metadata in TOKEN_SEMANTICS { - let has_doc = metadata.hover_doc.is_some(); - let has_example = metadata.hover_example.is_some(); - assert_eq!( - has_doc, has_example, - "token {:?} has mismatched doc/example presence", - metadata.kind - ); - - if !has_doc { + if metadata.hover_purposes.is_empty() { continue; } assert!( @@ -706,24 +760,37 @@ mod tests { metadata.kind ); - let example = metadata.hover_example.expect("checked above").code; - let (source, errors) = crate::parse(example); - assert!( - errors.is_empty(), - "token {:?} example did not parse cleanly: {example:?}, errors: {errors:?}", - metadata.kind - ); - - let contains_token = source - .syntax - .descendants_with_tokens() - .filter_map(NodeOrToken::into_token) - .any(|token| token.kind() == metadata.kind); - assert!( - contains_token, - "token {:?} example does not contain the token: {example:?}", - metadata.kind - ); + for purpose in metadata.hover_purposes { + assert!( + !purpose.doc.trim().is_empty(), + "token {:?} has an empty doc purpose", + metadata.kind + ); + assert!( + !purpose.example.trim().is_empty(), + "token {:?} has an empty example purpose", + metadata.kind + ); + + let (source, errors) = crate::parse(purpose.example); + assert!( + errors.is_empty(), + "token {:?} example did not parse cleanly: {:?}, errors: {errors:?}", + metadata.kind, + purpose.example + ); + + let contains_token = source + .syntax + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .any(|token| token.kind() == metadata.kind); + assert!( + contains_token, + "token {:?} example does not contain the token: {:?}", + metadata.kind, purpose.example + ); + } } } diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index e44312e1..307145ee 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -203,7 +203,7 @@ Hover combines: `any` - keyword/operator token docs for language primitives where available (`jrsonnet-rowan-parser/src/syntax_semantics.rs`, via - `SyntaxKind::token_doc()`) + `SyntaxKind::token_doc_markdown()`) Requires `TypeAnalysis` from async server context. From 51a0ed6698973234dd58e9189b49876c0978be0c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 18:04:36 +0000 Subject: [PATCH 063/210] lsp: enforce token doc outcomes and semantics table invariants --- Cargo.lock | 1 + crates/jrsonnet-rowan-parser/Cargo.toml | 1 + .../src/syntax_semantics.rs | 167 +++++++++++++++--- 3 files changed, 145 insertions(+), 24 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5fdb939f..5ad8f7ac 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1988,6 +1988,7 @@ dependencies = [ "drop_bomb", "indoc", "insta", + "jrsonnet-evaluator", "logos", "rowan", "thiserror 1.0.69", diff --git a/crates/jrsonnet-rowan-parser/Cargo.toml b/crates/jrsonnet-rowan-parser/Cargo.toml index 3430ba1d..9ecfc999 100644 --- a/crates/jrsonnet-rowan-parser/Cargo.toml +++ b/crates/jrsonnet-rowan-parser/Cargo.toml @@ -21,3 +21,4 @@ thiserror.workspace = true indoc.workspace = true insta.workspace = true anyhow.workspace = true +jrsonnet-evaluator = { workspace = true, features = ["exp-null-coaelse"] } diff --git a/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs b/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs index 6b92cce5..0c9e1a94 100644 --- a/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs +++ b/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs @@ -34,6 +34,16 @@ pub enum UnaryOperatorClass { pub struct TokenDocPurpose { pub doc: &'static str, pub example: &'static str, + pub outcome: TokenDocOutcome, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TokenDocOutcome { + Number, + String, + Boolean, + Null, + Object, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -47,10 +57,11 @@ pub struct TokenSemantics { } macro_rules! token_purpose { - ($doc:expr, $example:expr) => { + ($doc:expr, $example:expr, $outcome:ident) => { TokenDocPurpose { doc: $doc, example: $example, + outcome: TokenDocOutcome::$outcome, } }; } @@ -103,19 +114,19 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ token_meta!(STRING_BLOCK, hover_eligible = true, semantic = String), token_meta!( NULL_KW, - purposes = &[token_purpose!("Literal `null` value.", "null")], + purposes = &[token_purpose!("Literal `null` value.", "null", Null)], hover_eligible = true, semantic = Keyword ), token_meta!( TRUE_KW, - purposes = &[token_purpose!("Boolean literal `true`.", "true")], + purposes = &[token_purpose!("Boolean literal `true`.", "true", Boolean)], hover_eligible = true, semantic = Keyword ), token_meta!( FALSE_KW, - purposes = &[token_purpose!("Boolean literal `false`.", "false")], + purposes = &[token_purpose!("Boolean literal `false`.", "false", Boolean)], hover_eligible = true, semantic = Keyword ), @@ -123,7 +134,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ SELF_KW, purposes = &[token_purpose!( "`self` refers to the current object value.", - "{ value: self }" + "{ value: self }", + Object )], hover_eligible = true, semantic = Keyword @@ -132,7 +144,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ SUPER_KW, purposes = &[token_purpose!( "`super` refers to inherited object fields.", - "{ x: 1 } + { y: super.x }" + "{ x: 1 } + { y: super.x }", + Object )], hover_eligible = true, semantic = Keyword @@ -141,7 +154,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ DOLLAR, purposes = &[token_purpose!( "`$` refers to the root object.", - "{ x: 1, y: $.x }" + "{ x: 1, y: $.x }", + Object )], hover_eligible = true, semantic = Operator @@ -150,7 +164,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ PLUS, purposes = &[token_purpose!( "`+` adds numbers, concatenates strings/arrays, or merges objects.", - "1 + 2" + "1 + 2", + Number )], hover_eligible = true, semantic = Operator, @@ -160,7 +175,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ MINUS, purposes = &[token_purpose!( "`-` subtracts numbers (or negates with unary form).", - "2 - 1" + "2 - 1", + Number )], hover_eligible = true, semantic = Operator, @@ -169,14 +185,14 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ ), token_meta!( MUL, - purposes = &[token_purpose!("`*` multiplies numbers.", "2 * 3")], + purposes = &[token_purpose!("`*` multiplies numbers.", "2 * 3", Number)], hover_eligible = true, semantic = Operator, binary = Numeric ), token_meta!( DIV, - purposes = &[token_purpose!("`/` divides numbers.", "4 / 2")], + purposes = &[token_purpose!("`/` divides numbers.", "4 / 2", Number)], hover_eligible = true, semantic = Operator, binary = Numeric @@ -184,10 +200,11 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ token_meta!( MODULO, purposes = &[ - token_purpose!("`%` computes numeric remainder.", "5 % 2"), + token_purpose!("`%` computes numeric remainder.", "5 % 2", Number), token_purpose!( "`%` formats strings with placeholders.", - "\"hello %s\" % \"world\"" + "\"hello %s\" % \"world\"", + String ), ], hover_eligible = true, @@ -198,7 +215,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ AND, purposes = &[token_purpose!( "`&&` requires both operands to be truthy.", - "true && false" + "true && false", + Boolean )], hover_eligible = true, semantic = Operator, @@ -208,7 +226,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ OR, purposes = &[token_purpose!( "`||` requires at least one operand to be truthy.", - "true || false" + "true || false", + Boolean )], hover_eligible = true, semantic = Operator, @@ -216,21 +235,33 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ ), token_meta!( NOT, - purposes = &[token_purpose!("`!` negates a boolean expression.", "!true")], + purposes = &[token_purpose!( + "`!` negates a boolean expression.", + "!true", + Boolean + )], hover_eligible = true, semantic = Operator, unary = LogicalNot ), token_meta!( EQ, - purposes = &[token_purpose!("`==` checks value equality.", "1 == 1")], + purposes = &[token_purpose!( + "`==` checks value equality.", + "1 == 1", + Boolean + )], hover_eligible = true, semantic = Operator, binary = Equality ), token_meta!( NE, - purposes = &[token_purpose!("`!=` checks value inequality.", "1 != 2")], + purposes = &[token_purpose!( + "`!=` checks value inequality.", + "1 != 2", + Boolean + )], hover_eligible = true, semantic = Operator, binary = Equality @@ -239,7 +270,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ LT, purposes = &[token_purpose!( "`<` checks strict less-than ordering.", - "1 < 2" + "1 < 2", + Boolean )], hover_eligible = true, semantic = Operator, @@ -249,7 +281,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ LE, purposes = &[token_purpose!( "`<=` checks less-than-or-equal ordering.", - "1 <= 2" + "1 <= 2", + Boolean )], hover_eligible = true, semantic = Operator, @@ -259,7 +292,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ GT, purposes = &[token_purpose!( "`>` checks strict greater-than ordering.", - "2 > 1" + "2 > 1", + Boolean )], hover_eligible = true, semantic = Operator, @@ -269,7 +303,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ GE, purposes = &[token_purpose!( "`>=` checks greater-than-or-equal ordering.", - "2 >= 2" + "2 >= 2", + Boolean )], hover_eligible = true, semantic = Operator, @@ -279,7 +314,8 @@ const TOKEN_SEMANTICS: &[TokenSemantics] = &[ NULL_COAELSE, purposes = &[token_purpose!( "`??` returns right-hand value when left side is `null`.", - "null ?? 1" + "null ?? 1", + Number )], hover_eligible = true, semantic = Operator, @@ -546,11 +582,14 @@ impl UnaryOperatorKind { #[cfg(test)] mod tests { + use std::collections::HashSet; + use super::{ - BinaryOperatorClass, BinaryOperatorKind, SemanticTokenClass, SyntaxKind, + BinaryOperatorClass, BinaryOperatorKind, SemanticTokenClass, SyntaxKind, TokenDocOutcome, UnaryOperatorClass, UnaryOperatorKind, TOKEN_SEMANTICS, }; use crate::rowan::NodeOrToken; + use jrsonnet_evaluator::{State, Val}; // Test-only explicit decision list: lexical tokens that are intentionally not // semantic-highlighted. Coverage tests fail if any token is neither classified nor ignored. @@ -600,6 +639,16 @@ mod tests { (SyntaxKind::OR.into_raw()..=SyntaxKind::LEXING_ERROR.into_raw()).map(SyntaxKind::from_raw) } + fn value_matches_outcome(value: &Val, outcome: TokenDocOutcome) -> bool { + match outcome { + TokenDocOutcome::Number => matches!(value, Val::Num(_)), + TokenDocOutcome::String => matches!(value, Val::Str(_)), + TokenDocOutcome::Boolean => matches!(value, Val::Bool(_)), + TokenDocOutcome::Null => matches!(value, Val::Null), + TokenDocOutcome::Object => matches!(value, Val::Obj(_)), + } + } + #[test] fn token_doc_lookup() { assert_eq!( @@ -750,6 +799,7 @@ mod tests { #[test] fn token_docs_are_consistent_and_examples_parse() { + let state = State::default(); for metadata in TOKEN_SEMANTICS { if metadata.hover_purposes.is_empty() { continue; @@ -790,8 +840,77 @@ mod tests { "token {:?} example does not contain the token: {:?}", metadata.kind, purpose.example ); + + let value = state + .evaluate_snippet("", purpose.example) + .unwrap_or_else(|err| { + panic!( + "token {:?} example did not evaluate cleanly: {:?}, error: {err:#}", + metadata.kind, purpose.example + ) + }); + assert!( + value_matches_outcome(&value, purpose.outcome), + "token {:?} example had unexpected outcome {:?}: expected {:?}, got {:?}", + metadata.kind, + purpose.example, + purpose.outcome, + value + ); + } + } + } + + #[test] + fn token_semantics_table_has_unique_kinds() { + let mut seen = HashSet::new(); + let mut duplicates = Vec::new(); + for metadata in TOKEN_SEMANTICS { + if !seen.insert(metadata.kind) { + duplicates.push(metadata.kind); } } + assert!( + duplicates.is_empty(), + "token semantics table contains duplicate entries: {duplicates:?}" + ); + } + + #[test] + fn token_semantics_operator_flags_are_consistent() { + let mut binary_without_operator_class = Vec::new(); + let mut unary_without_operator_class = Vec::new(); + let mut hover_docs_on_non_eligible = Vec::new(); + for metadata in TOKEN_SEMANTICS { + if metadata.binary_op_class.is_some() + && !matches!( + metadata.semantic_class, + Some(SemanticTokenClass::Operator | SemanticTokenClass::Keyword) + ) { + binary_without_operator_class.push(metadata.kind); + } + if metadata.unary_op_class.is_some() + && metadata.semantic_class != Some(SemanticTokenClass::Operator) + { + unary_without_operator_class.push(metadata.kind); + } + if !metadata.hover_purposes.is_empty() && !metadata.hover_eligible { + hover_docs_on_non_eligible.push(metadata.kind); + } + } + + assert!( + binary_without_operator_class.is_empty(), + "binary-op token semantics must be operator-classified: {binary_without_operator_class:?}" + ); + assert!( + unary_without_operator_class.is_empty(), + "unary-op token semantics must be operator-classified: {unary_without_operator_class:?}" + ); + assert!( + hover_docs_on_non_eligible.is_empty(), + "tokens with hover docs must be hover-eligible: {hover_docs_on_non_eligible:?}" + ); } #[test] From 38305bd2acd509eaba7cd409a8cc7cd442a5603f Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 10 Feb 2026 18:06:09 +0000 Subject: [PATCH 064/210] lsp: validate docs/lsp jsonnet snippets in tests --- .../jrsonnet-lsp/tests/docs_lsp_examples.rs | 137 ++++++++++++++++++ 1 file changed, 137 insertions(+) create mode 100644 crates/jrsonnet-lsp/tests/docs_lsp_examples.rs diff --git a/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs b/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs new file mode 100644 index 00000000..09d4887f --- /dev/null +++ b/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs @@ -0,0 +1,137 @@ +use std::{ + fs, + path::{Path, PathBuf}, +}; + +#[derive(Debug, Clone, PartialEq, Eq)] +struct MarkdownCodeBlock { + language: String, + start_line: usize, + code: String, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct JsonnetDocExample { + path: String, + start_line: usize, + code: String, +} + +fn docs_lsp_dir() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../docs/lsp") +} + +fn docs_lsp_markdown_paths() -> Vec { + let mut paths = fs::read_dir(docs_lsp_dir()) + .unwrap_or_else(|err| panic!("failed to list docs/lsp: {err}")) + .filter_map(Result::ok) + .map(|entry| entry.path()) + .filter(|path| path.extension().is_some_and(|ext| ext == "md")) + .collect::>(); + paths.sort(); + paths +} + +fn relative_path(path: &Path) -> String { + let workspace_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../.."); + let relative = path + .strip_prefix(workspace_root) + .unwrap_or_else(|err| panic!("failed to relativize {}: {err}", path.display())); + relative.display().to_string() +} + +fn extract_markdown_fenced_blocks(markdown: &str) -> Vec { + let mut blocks = Vec::new(); + let mut current_fence_len: Option = None; + let mut current_language = String::new(); + let mut current_start_line = 0usize; + let mut current_lines = Vec::new(); + + for (idx, line) in markdown.lines().enumerate() { + let trimmed = line.trim_start(); + let backticks = trimmed.chars().take_while(|&ch| ch == '`').count(); + if backticks >= 3 { + let rest = &trimmed[backticks..]; + + if let Some(fence_len) = current_fence_len { + if backticks >= fence_len && rest.trim().is_empty() { + blocks.push(MarkdownCodeBlock { + language: current_language.clone(), + start_line: current_start_line, + code: current_lines.join("\n"), + }); + current_fence_len = None; + current_language.clear(); + current_start_line = 0; + current_lines.clear(); + continue; + } + } else { + current_fence_len = Some(backticks); + current_language = rest + .split_ascii_whitespace() + .next() + .unwrap_or_default() + .to_ascii_lowercase(); + current_start_line = idx + 2; + current_lines.clear(); + continue; + } + } + + if current_fence_len.is_some() { + current_lines.push(line.to_owned()); + } + } + + blocks +} + +fn collect_jsonnet_doc_examples() -> Vec { + let mut examples = Vec::new(); + for path in docs_lsp_markdown_paths() { + let text = fs::read_to_string(&path) + .unwrap_or_else(|err| panic!("failed to read {}: {err}", path.display())); + let source_path = relative_path(&path); + for block in extract_markdown_fenced_blocks(&text) { + if block.language == "jsonnet" { + examples.push(JsonnetDocExample { + path: source_path.clone(), + start_line: block.start_line, + code: block.code, + }); + } + } + } + examples +} + +#[test] +fn lsp_docs_jsonnet_examples_parse_cleanly() { + let examples = collect_jsonnet_doc_examples(); + assert!( + !examples.is_empty(), + "expected at least one ```jsonnet fenced block in docs/lsp" + ); + + let failures = examples + .iter() + .filter_map(|example| { + let (_, errors) = jrsonnet_rowan_parser::parse(&example.code); + if errors.is_empty() { + None + } else { + Some(format!( + "{}:{} failed with parse errors: {errors:?}\n---\n{}\n---", + example.path, example.start_line, example.code + )) + } + }) + .collect::>(); + + assert!( + failures.is_empty(), + "jsonnet examples in docs/lsp must parse cleanly:\n{}", + failures.join("\n\n") + ); +} From 77f508537402e65f34510bd421200ef4371dca93 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 10:51:22 +0000 Subject: [PATCH 065/210] lsp: unify import resolution behind shared boundary API --- crates/jrsonnet-lsp-import/src/lib.rs | 2 +- crates/jrsonnet-lsp-import/src/resolve.rs | 124 +++++++++++++++++- crates/jrsonnet-lsp/src/async_diagnostics.rs | 7 +- crates/jrsonnet-lsp/src/server.rs | 10 +- .../jrsonnet-lsp/src/server/async_requests.rs | 6 +- docs/lsp/ARCHITECTURE.md | 11 +- 6 files changed, 141 insertions(+), 19 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/lib.rs b/crates/jrsonnet-lsp-import/src/lib.rs index 948bf414..b3bc32c7 100644 --- a/crates/jrsonnet-lsp-import/src/lib.rs +++ b/crates/jrsonnet-lsp-import/src/lib.rs @@ -18,5 +18,5 @@ pub use parse::{ check_import_from_token, check_import_path, extract_import_path, find_import_in_node, get_import_path_from_node, }; -pub use resolve::{resolve_import_path, resolve_import_path_from_base}; +pub use resolve::{resolve_import_path, resolve_import_path_from_base, ImportResolution}; pub use work_queue::{WorkQueue, WorkQueueExt}; diff --git a/crates/jrsonnet-lsp-import/src/resolve.rs b/crates/jrsonnet-lsp-import/src/resolve.rs index d02c977e..eeb72904 100644 --- a/crates/jrsonnet-lsp-import/src/resolve.rs +++ b/crates/jrsonnet-lsp-import/src/resolve.rs @@ -5,7 +5,47 @@ use std::path::{Path, PathBuf}; -use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_document::{CanonicalPath, Document}; + +use crate::graph::{ + parse_document_import_occurrences, parse_document_imports, ImportEntry, ImportOccurrence, +}; + +/// Import-resolution boundary for one importing file. +/// +/// This captures the importing file path and effective import roots once, +/// then exposes a single API that callers can use for raw path resolution and +/// import parsing with consistent behavior. +#[derive(Debug, Clone, Copy)] +pub struct ImportResolution<'a> { + importer_file: &'a CanonicalPath, + import_roots: &'a [PathBuf], +} + +impl<'a> ImportResolution<'a> { + #[must_use] + pub fn new(importer_file: &'a CanonicalPath, import_roots: &'a [PathBuf]) -> Self { + Self { + importer_file, + import_roots, + } + } + + #[must_use] + pub fn resolve(self, import_path: &str) -> Option { + resolve_import_path(self.importer_file, import_path, self.import_roots) + } + + #[must_use] + pub fn parse_entries(self, doc: &Document) -> Vec { + parse_document_imports(doc, &|import_path| self.resolve(import_path)) + } + + #[must_use] + pub fn parse_occurrences(self, doc: &Document) -> Vec { + parse_document_import_occurrences(doc, &|import_path| self.resolve(import_path)) + } +} /// Resolve an import path from an importing file. /// @@ -61,6 +101,7 @@ fn canonical_if_exists(path: &Path) -> Option { mod tests { use std::fs; + use jrsonnet_lsp_document::DocVersion; use tempfile::TempDir; use super::*; @@ -119,4 +160,85 @@ mod tests { )) ); } + + #[test] + fn test_import_resolution_parse_entries() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer = root.join("main.jsonnet"); + let local_lib = root.join("lib.jsonnet"); + fs::write(&importer, "local lib = import \"lib.jsonnet\"; lib") + .expect("importer should be written"); + fs::write(&local_lib, "{}").expect("local lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let resolved_lib = CanonicalPath::new( + local_lib + .canonicalize() + .expect("local lib path should canonicalize"), + ); + let doc = Document::new( + "local lib = import \"lib.jsonnet\"; lib".to_string(), + DocVersion::new(1), + ); + let import_resolution = ImportResolution::new(&importer, &[]); + + assert_eq!( + import_resolution.parse_entries(&doc), + vec![ImportEntry { + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(resolved_lib), + }] + ); + } + + #[test] + fn test_import_resolution_parse_occurrences() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer = root.join("main.jsonnet"); + let local_lib = root.join("lib.jsonnet"); + let code = "local lib = import \"lib.jsonnet\"; lib"; + fs::write(&importer, code).expect("importer should be written"); + fs::write(&local_lib, "{}").expect("local lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let resolved_lib = CanonicalPath::new( + local_lib + .canonicalize() + .expect("local lib path should canonicalize"), + ); + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let import_resolution = ImportResolution::new(&importer, &[]); + + let start = u32::try_from( + code.find("\"lib.jsonnet\"") + .expect("import path should exist in source"), + ) + .expect("start offset should fit into u32"); + let end = start + + u32::try_from("\"lib.jsonnet\"".len()) + .expect("import path length should fit into u32"); + + assert_eq!( + import_resolution.parse_occurrences(&doc), + vec![ImportOccurrence { + entry: ImportEntry { + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(resolved_lib), + }, + import_range: rowan::TextRange::new(start.into(), end.into()), + }] + ); + } } diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index 0acae7b0..756c8c6a 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -15,7 +15,7 @@ use std::{ use crossbeam_channel::{Receiver, Sender}; use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document}; -use jrsonnet_lsp_import::{parse_document_import_occurrences, resolve_import_path, ImportGraph}; +use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{ DocumentSource, SharedDocumentManager, SharedTypeCache, TypeProvider, }; @@ -236,9 +236,8 @@ impl AsyncDiagnostics { Arc::clone(&config.global_types), ); let analysis = provider.analyze(&request.path, &document, &doc_source); - let resolve_import = - |import: &str| resolve_import_path(&request.path, import, &request.import_roots); - let import_occurrences = parse_document_import_occurrences(&document, &resolve_import); + let import_resolution = ImportResolution::new(&request.path, &request.import_roots); + let import_occurrences = import_resolution.parse_occurrences(&document); let Some(params) = handlers::publish_diagnostics_params( &request.path, diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index c3385957..63be97df 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -15,7 +15,7 @@ use anyhow::{Context, Result}; use crossbeam_channel::{select, Receiver, Sender}; use jrsonnet_lsp_document::{CanonicalPath, DocVersion}; use jrsonnet_lsp_handlers as handlers; -use jrsonnet_lsp_import::{parse_document_imports, resolve_import_path, ImportGraph}; +use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{ new_shared_cache, DocumentManager, SharedDocumentManager, SharedTypeCache, }; @@ -1444,14 +1444,10 @@ impl Server { ); drop(config); - // Create a resolver closure that captures the path and effective import roots. - let resolve_import = |import: &str| -> Option { - resolve_import_path(path, import, &import_roots) - }; - // Parse imports OUTSIDE the lock to minimize lock hold time. // This is important for responsiveness when parsing large files. - let entries = parse_document_imports(&doc, &resolve_import); + let import_resolution = ImportResolution::new(path, &import_roots); + let entries = import_resolution.parse_entries(&doc); // Now acquire the write lock and do the quick data structure update import_graph.write().update_file_with_entries(path, entries); diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index a546f50b..0362fb6c 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, DocVersion, Document, SymbolName}; use jrsonnet_lsp_handlers as handlers; -use jrsonnet_lsp_import::{parse_document_import_occurrences, resolve_import_path, ImportGraph}; +use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; use jrsonnet_lsp_types::GlobalTyStore; use jrsonnet_rowan_parser::AstNode; @@ -547,8 +547,8 @@ impl AsyncRequestContext { ); (config.enable_lint_diagnostics, evaluator, import_roots) }; - let resolve_import = |import: &str| resolve_import_path(&path, import, &import_roots); - let import_occurrences = parse_document_import_occurrences(&doc, &resolve_import); + let import_resolution = ImportResolution::new(&path, &import_roots); + let import_occurrences = import_resolution.parse_occurrences(&doc); let diagnostics = crate::handlers::compute_diagnostics( &doc, diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 89397943..9db24184 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -224,9 +224,14 @@ The common resolution order is: 1. relative to the importing file's directory 2. each configured `jpath` entry in order -That ordering is applied while building import graph entries. Cross-file -navigation and reference/rename paths then use the graph's resolved entries as -the source of truth instead of re-resolving import strings independently. +That ordering is applied through a shared boundary API: +`jrsonnet_lsp_import::ImportResolution`. Server graph updates, async +diagnostics, and `showErrors` all resolve and parse imports through this single +type so import behavior stays consistent across subsystems. + +Cross-file navigation and reference/rename paths then use the graph's resolved +entries as the source of truth instead of re-resolving import strings +independently. Navigation semantics: From 8afbff2d3b75c3cbb5db10faad0cdfa2f46c56b4 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 11:14:48 +0000 Subject: [PATCH 066/210] rowan-parser: remove unused Parse::syntax API --- crates/jrsonnet-rowan-parser/src/parser.rs | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/crates/jrsonnet-rowan-parser/src/parser.rs b/crates/jrsonnet-rowan-parser/src/parser.rs index 4cde73ef..4f33e452 100644 --- a/crates/jrsonnet-rowan-parser/src/parser.rs +++ b/crates/jrsonnet-rowan-parser/src/parser.rs @@ -10,7 +10,7 @@ use crate::{ token_set::SyntaxKindSet, AstToken, SyntaxKind, SyntaxKind::*, - SyntaxNode, T, TS, + T, TS, }; /// Token set for field visibility (:, ::, :::). @@ -946,9 +946,3 @@ fn lhs_basic(p: &mut Parser) -> Result { return Err(p.error_with_no_skip()); }) } - -impl Parse { - pub fn syntax(&self) -> SyntaxNode { - SyntaxNode::new_root(self.green_node.clone()) - } -} From 94542fccdebcfaa14e186df87d5d4a7778f9578c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 11:15:18 +0000 Subject: [PATCH 067/210] lsp-import: make ImportResolution the public resolve boundary --- crates/jrsonnet-lsp-import/src/lib.rs | 2 +- crates/jrsonnet-lsp-import/src/resolve.rs | 19 +++++++------------ 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/lib.rs b/crates/jrsonnet-lsp-import/src/lib.rs index b3bc32c7..c8942c34 100644 --- a/crates/jrsonnet-lsp-import/src/lib.rs +++ b/crates/jrsonnet-lsp-import/src/lib.rs @@ -18,5 +18,5 @@ pub use parse::{ check_import_from_token, check_import_path, extract_import_path, find_import_in_node, get_import_path_from_node, }; -pub use resolve::{resolve_import_path, resolve_import_path_from_base, ImportResolution}; +pub use resolve::ImportResolution; pub use work_queue::{WorkQueue, WorkQueueExt}; diff --git a/crates/jrsonnet-lsp-import/src/resolve.rs b/crates/jrsonnet-lsp-import/src/resolve.rs index eeb72904..95cf9bcc 100644 --- a/crates/jrsonnet-lsp-import/src/resolve.rs +++ b/crates/jrsonnet-lsp-import/src/resolve.rs @@ -47,13 +47,8 @@ impl<'a> ImportResolution<'a> { } } -/// Resolve an import path from an importing file. -/// -/// Resolution order: -/// 1. Relative to the importing file's directory -/// 2. Each configured import root (`jpath`, vendor roots, etc.) #[must_use] -pub fn resolve_import_path( +fn resolve_import_path( importer_file: &CanonicalPath, import_path: &str, import_roots: &[PathBuf], @@ -61,11 +56,8 @@ pub fn resolve_import_path( resolve_import_path_from_base(importer_file.as_path(), import_path, import_roots) } -/// Resolve an import path from a base file path. -/// -/// The `base_file` should be the full path of the importing file. #[must_use] -pub fn resolve_import_path_from_base( +fn resolve_import_path_from_base( base_file: &Path, import_path: &str, import_roots: &[PathBuf], @@ -120,7 +112,8 @@ mod tests { .canonicalize() .expect("importer path should canonicalize"), ); - let resolved = resolve_import_path(&importer, "lib.jsonnet", &[]); + let import_resolution = ImportResolution::new(&importer, &[]); + let resolved = import_resolution.resolve("lib.jsonnet"); assert_eq!( resolved, Some(CanonicalPath::new( @@ -150,7 +143,9 @@ mod tests { .canonicalize() .expect("importer path should canonicalize"), ); - let resolved = resolve_import_path(&importer, "shared.libsonnet", &[jpath_dir]); + let import_roots = vec![jpath_dir]; + let import_resolution = ImportResolution::new(&importer, &import_roots); + let resolved = import_resolution.resolve("shared.libsonnet"); assert_eq!( resolved, Some(CanonicalPath::new( From a2f2fbb99f09c723589da4a46de7fef738cc288a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 11:46:03 +0000 Subject: [PATCH 068/210] feat: expand stdlib signatures We were missing some methods. --- .../data/jrsonnet_legacy_functions.txt | 150 ++++++++++++++++ .../data/official_functions.txt | 145 +++++++++++++++ .../data/rjsonnet_functions.txt | 147 ++++++++++++++++ crates/jrsonnet-std-sig/src/lib.rs | 165 +++++++++++++++++- docs/lsp/HANDLERS.md | 12 +- 5 files changed, 611 insertions(+), 8 deletions(-) create mode 100644 crates/jrsonnet-std-sig/data/jrsonnet_legacy_functions.txt create mode 100644 crates/jrsonnet-std-sig/data/official_functions.txt create mode 100644 crates/jrsonnet-std-sig/data/rjsonnet_functions.txt diff --git a/crates/jrsonnet-std-sig/data/jrsonnet_legacy_functions.txt b/crates/jrsonnet-std-sig/data/jrsonnet_legacy_functions.txt new file mode 100644 index 00000000..aab563fc --- /dev/null +++ b/crates/jrsonnet-std-sig/data/jrsonnet_legacy_functions.txt @@ -0,0 +1,150 @@ +abs +acos +all +any +asciiLower +asciiUpper +asin +assertEqual +atan +avg +base64 +base64Decode +base64DecodeBytes +bigint +ceil +char +clamp +codepoint +contains +cos +count +decodeUTF8 +deepJoin +encodeUTF8 +endsWith +equals +equalsIgnoreCase +escapeStringBash +escapeStringDollars +escapeStringJson +escapeStringPython +escapeStringXML +escapeStringXml +exp +exponent +extVar +filter +filterMap +find +findSubstr +flatMap +flatten +flattenArrays +flattenDeepArray +floor +foldl +foldr +format +get +isArray +isBoolean +isDecimal +isEmpty +isEven +isFunction +isInteger +isNumber +isObject +isOdd +isString +join +length +lines +log +lstripChars +makeArray +manifestIni +manifestJson +manifestJsonEx +manifestJsonMinified +manifestPython +manifestPythonVars +manifestToml +manifestTomlEx +manifestXmlJsonml +manifestYamlDoc +manifestYamlStream +mantissa +map +mapWithIndex +mapWithKey +max +maxArray +md5 +member +mergePatch +min +minArray +mod +modulo +native +objectFields +objectFieldsAll +objectFieldsEx +objectHas +objectHasAll +objectHasEx +objectKeysValues +objectKeysValuesAll +objectRemoveKey +objectValues +objectValuesAll +parseHex +parseInt +parseJson +parseOctal +parseYaml +pow +primitiveEquals +prune +range +regexQuoteMeta +remove +removeAt +repeat +resolvePath +reverse +round +rstripChars +set +setDiff +setInter +setMember +setUnion +sha1 +sha256 +sha3 +sha512 +sign +sin +slice +sort +split +splitLimit +splitLimitR +sqrt +startsWith +strReplace +stringChars +stripChars +substr +sum +tan +thisFile +toString +trace +type +uniq +xnor +xor diff --git a/crates/jrsonnet-std-sig/data/official_functions.txt b/crates/jrsonnet-std-sig/data/official_functions.txt new file mode 100644 index 00000000..edc5f38d --- /dev/null +++ b/crates/jrsonnet-std-sig/data/official_functions.txt @@ -0,0 +1,145 @@ +abs +acos +all +any +asciiLower +asciiUpper +asin +assertEqual +atan +atan2 +avg +base64 +base64Decode +base64DecodeBytes +ceil +char +clamp +codepoint +contains +cos +count +decodeUTF8 +deepJoin +deg2rad +encodeUTF8 +endsWith +equalsIgnoreCase +escapeStringBash +escapeStringDollars +escapeStringJson +escapeStringPython +escapeStringXml +exp +exponent +extVar +filter +filterMap +find +findSubstr +flatMap +flattenArrays +flattenDeepArray +floor +foldl +foldr +format +get +hypot +isArray +isBoolean +isDecimal +isEmpty +isEven +isFunction +isInteger +isNull +isNumber +isObject +isOdd +isString +join +length +lines +log +log10 +log2 +lstripChars +makeArray +manifestIni +manifestJson +manifestJsonEx +manifestJsonMinified +manifestPython +manifestPythonVars +manifestTomlEx +manifestXmlJsonml +manifestYamlDoc +manifestYamlStream +mantissa +map +mapWithIndex +mapWithKey +max +maxArray +md5 +member +mergePatch +min +minArray +mod +objectFields +objectFieldsAll +objectHas +objectHasAll +objectKeysValues +objectKeysValuesAll +objectRemoveKey +objectValues +objectValuesAll +parseHex +parseInt +parseJson +parseOctal +parseYaml +pow +prune +rad2deg +range +remove +removeAt +repeat +reverse +round +rstripChars +set +setDiff +setInter +setMember +setUnion +sha1 +sha256 +sha3 +sha512 +sign +sin +slice +sort +split +splitLimit +splitLimitR +sqrt +startsWith +strReplace +stringChars +stripChars +substr +sum +tan +toString +trace +trim +type +uniq +xnor +xor diff --git a/crates/jrsonnet-std-sig/data/rjsonnet_functions.txt b/crates/jrsonnet-std-sig/data/rjsonnet_functions.txt new file mode 100644 index 00000000..18ee0d02 --- /dev/null +++ b/crates/jrsonnet-std-sig/data/rjsonnet_functions.txt @@ -0,0 +1,147 @@ +abs +acos +all +any +asciiLower +asciiUpper +asin +assertEqual +atan +atan2 +avg +base64 +base64Decode +base64DecodeBytes +ceil +char +clamp +codepoint +contains +cos +count +decodeUTF8 +deepJoin +deg2rad +encodeUTF8 +endsWith +equals +equalsIgnoreCase +escapeStringBash +escapeStringDollars +escapeStringJson +escapeStringPython +escapeStringXml +exp +exponent +extVar +filter +filterMap +find +findSubstr +flatMap +flattenArrays +flattenDeepArray +floor +foldl +foldr +format +get +hypot +isArray +isBoolean +isDecimal +isEmpty +isEven +isFunction +isInteger +isNumber +isObject +isOdd +isString +join +length +lines +log +log10 +log2 +lstripChars +makeArray +manifestIni +manifestJson +manifestJsonEx +manifestJsonMinified +manifestPython +manifestPythonVars +manifestTomlEx +manifestXmlJsonml +manifestYamlDoc +manifestYamlStream +mantissa +map +mapWithIndex +mapWithKey +max +maxArray +md5 +member +mergePatch +min +minArray +native +objectFields +objectFieldsAll +objectHas +objectHasAll +objectHasEx +objectKeysValues +objectKeysValuesAll +objectRemoveKey +objectValues +objectValuesAll +parseHex +parseInt +parseJson +parseOctal +parseYaml +pi +pow +prune +rad2deg +range +remove +removeAt +repeat +reverse +round +rstripChars +set +setDiff +setInter +setMember +setUnion +sha1 +sha256 +sha3 +sha512 +sign +sin +slice +sort +split +splitLimit +splitLimitR +sqrt +startsWith +strReplace +stringChars +stripChars +substr +sum +tan +thisFile +toString +trace +trim +uniq +xnor +xor diff --git a/crates/jrsonnet-std-sig/src/lib.rs b/crates/jrsonnet-std-sig/src/lib.rs index 829532de..5cc4f357 100644 --- a/crates/jrsonnet-std-sig/src/lib.rs +++ b/crates/jrsonnet-std-sig/src/lib.rs @@ -75,7 +75,7 @@ impl Default for ReturnSpec { } /// Parameter definition. -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct Param { /// Parameter name pub name: &'static str, @@ -87,6 +87,7 @@ pub struct Param { impl Param { /// Create a required parameter. + #[must_use] pub const fn req(name: &'static str, ty: ParamType) -> Self { Self { name, @@ -96,6 +97,7 @@ impl Param { } /// Create an optional parameter. + #[must_use] pub const fn opt(name: &'static str, ty: ParamType) -> Self { Self { name, @@ -134,7 +136,7 @@ pub enum NarrowsTo { /// /// When a function like `std.isNumber(x)` returns true, we can narrow /// the type of `x` to `Number` in the then-branch. -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct FlowTyping { /// Index of the parameter being narrowed (usually 0) pub param_idx: usize, @@ -145,7 +147,7 @@ pub struct FlowTyping { } /// Complete specification for a stdlib function. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct StdFn { /// Function name (without "std." prefix) pub name: &'static str, @@ -200,6 +202,15 @@ pub static FNS: &[StdFn] = &[ example: Some(r#"std.type([1,2]) // "array""#), flow_typing: None, }, + StdFn { + name: "isNull", + params: &[Param::req("x", ANY)], + return_spec: ReturnSpec::Fixed(BOOL), + variadic: false, + doc: "Returns true if `x` is null.", + example: Some("std.isNull(null) // true"), + flow_typing: Some(flow(NarrowsTo::Null, Totality::Total)), + }, StdFn { name: "isString", params: &[Param::req("v", ANY)], @@ -837,6 +848,15 @@ pub static FNS: &[StdFn] = &[ example: None, flow_typing: None, }, + StdFn { + name: "trim", + params: &[Param::req("str", STR)], + return_spec: ReturnSpec::Fixed(STR), + variadic: false, + doc: "Trims leading and trailing whitespace.", + example: Some(r#"std.trim(" hello ") // "hello""#), + flow_typing: None, + }, StdFn { name: "asciiLower", params: &[Param::req("s", STR)], @@ -1122,6 +1142,15 @@ pub static FNS: &[StdFn] = &[ example: None, flow_typing: None, }, + StdFn { + name: "pi", + params: &[], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "The mathematical constant pi.", + example: Some("std.pi // 3.141592653589793"), + flow_typing: None, + }, StdFn { name: "sign", params: &[Param::req("n", NUM)], @@ -1198,6 +1227,24 @@ pub static FNS: &[StdFn] = &[ example: None, flow_typing: None, }, + StdFn { + name: "log2", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Base-2 logarithm.", + example: Some("std.log2(8) // 3"), + flow_typing: None, + }, + StdFn { + name: "log10", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Base-10 logarithm.", + example: Some("std.log10(100) // 2"), + flow_typing: None, + }, StdFn { name: "sin", params: &[Param::req("x", NUM)], @@ -1252,6 +1299,42 @@ pub static FNS: &[StdFn] = &[ example: None, flow_typing: None, }, + StdFn { + name: "atan2", + params: &[Param::req("y", NUM), Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Arc tangent of y/x using the signs of both arguments.", + example: Some("std.atan2(1, 1) // 0.7853981633974483"), + flow_typing: None, + }, + StdFn { + name: "hypot", + params: &[Param::req("a", NUM), Param::req("b", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Hypotenuse length sqrt(a*a + b*b).", + example: Some("std.hypot(3, 4) // 5"), + flow_typing: None, + }, + StdFn { + name: "deg2rad", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Converts degrees to radians.", + example: Some("std.deg2rad(180) // 3.141592653589793"), + flow_typing: None, + }, + StdFn { + name: "rad2deg", + params: &[Param::req("x", NUM)], + return_spec: ReturnSpec::Fixed(NUM), + variadic: false, + doc: "Converts radians to degrees.", + example: Some("std.rad2deg(std.pi) // 180"), + flow_typing: None, + }, StdFn { name: "floor", params: &[Param::req("x", NUM)], @@ -1653,25 +1736,71 @@ pub static FNS: &[StdFn] = &[ ]; /// Look up a stdlib function by name. +#[must_use] pub fn get_fn(name: &str) -> Option<&'static StdFn> { FNS.iter().find(|f| f.name == name) } /// Get flow typing info for a function by name. +#[must_use] pub fn get_flow_typing(name: &str) -> Option<&'static FlowTyping> { get_fn(name).and_then(|f| f.flow_typing.as_ref()) } #[cfg(test)] mod tests { + use std::collections::BTreeSet; + use super::*; + const OFFICIAL_FUNCTIONS: &str = include_str!("../data/official_functions.txt"); + const RJSONNET_FUNCTIONS: &str = include_str!("../data/rjsonnet_functions.txt"); + const JRSONNET_LEGACY_FUNCTIONS: &str = include_str!("../data/jrsonnet_legacy_functions.txt"); + + fn actual_function_names() -> BTreeSet { + FNS.iter().map(|f| f.name.to_string()).collect() + } + + fn parse_name_set(raw: &str) -> BTreeSet { + raw.lines() + .map(str::trim) + .filter(|line| !line.is_empty()) + .map(str::to_string) + .collect() + } + + fn missing_names(actual: &BTreeSet, expected: &BTreeSet) -> Vec { + expected.difference(actual).cloned().collect() + } + #[test] fn test_get_fn() { let f = get_fn("isNumber").unwrap(); - assert_eq!(f.name, "isNumber"); - assert_eq!(f.params.len(), 1); - assert_eq!(f.params[0].name, "v"); + let expected_params = [Param::req("v", ANY)]; + assert_eq!( + ( + f.name, + f.params, + f.return_spec, + f.variadic, + f.doc, + f.example, + f.flow_typing + ), + ( + "isNumber", + &expected_params[..], + ReturnSpec::Fixed(BOOL), + false, + "Returns true if `v` is a number.", + None, + Some(FlowTyping { + param_idx: 0, + narrows_to: NarrowsTo::Number, + totality: Totality::Total, + }) + ) + ); } #[test] @@ -1702,4 +1831,28 @@ mod tests { } } } + + #[test] + fn test_covers_official_stdlib_functions() { + let actual = actual_function_names(); + let official = parse_name_set(OFFICIAL_FUNCTIONS); + let missing = missing_names(&actual, &official); + assert_eq!(missing, Vec::::new()); + } + + #[test] + fn test_covers_rjsonnet_stdlib_functions() { + let actual = actual_function_names(); + let rjsonnet = parse_name_set(RJSONNET_FUNCTIONS); + let missing = missing_names(&actual, &rjsonnet); + assert_eq!(missing, Vec::::new()); + } + + #[test] + fn test_preserves_jrsonnet_legacy_stdlib_functions() { + let actual = actual_function_names(); + let legacy = parse_name_set(JRSONNET_LEGACY_FUNCTIONS); + let missing = missing_names(&actual, &legacy); + assert_eq!(missing, Vec::::new()); + } } diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 307145ee..c5a3e455 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -112,7 +112,8 @@ Files: Completion sources include: -- stdlib functions and docs +- stdlib functions/docs from `jrsonnet-std-sig` (surfaced through + `jrsonnet-lsp-stdlib`) - in-scope locals - object fields from inferred types - import paths using file path + configured import roots @@ -123,6 +124,12 @@ Completion sources include: Server capabilities advertise `.` as trigger. Other completion contexts can still return items on explicit completion requests. +Stdlib metadata coverage is enforced by tests in `jrsonnet-std-sig` against: + +- official Jsonnet stdlib function names (`data/official_functions.txt`) +- rjsonnet stdlib function names (`data/rjsonnet_functions.txt`) +- legacy jrsonnet stdlib names (`data/jrsonnet_legacy_functions.txt`) + ### Definition File: `crates/jrsonnet-lsp-handlers/src/definition.rs` @@ -197,7 +204,8 @@ File: `crates/jrsonnet-lsp-handlers/src/hover.rs` Hover combines: - inferred type information -- stdlib documentation/signatures +- stdlib documentation/signatures from `jrsonnet-lsp-stdlib` (generated from + `jrsonnet-std-sig`) - local definition context snippets - definition-site fallback to bound value type when token-level inference is `any` From 1a31e454f4604cd186cfa359ba2c3abe8e4719c5 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 11:50:32 +0000 Subject: [PATCH 069/210] Recover imports from syntax-broken documents --- crates/jrsonnet-lsp-import/src/graph.rs | 142 +++++++++++++++++++++++- docs/lsp/ARCHITECTURE.md | 12 +- 2 files changed, 148 insertions(+), 6 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs index f6271fc2..b620fd69 100644 --- a/crates/jrsonnet-lsp-import/src/graph.rs +++ b/crates/jrsonnet-lsp-import/src/graph.rs @@ -5,7 +5,7 @@ use std::collections::{HashMap, HashSet, VecDeque}; -use jrsonnet_lsp_document::{CanonicalPath, Document}; +use jrsonnet_lsp_document::{strip_string_quotes, CanonicalPath, Document}; use jrsonnet_rowan_parser::{ nodes::{Bind, Destruct, ExprImport, StmtLocal}, AstNode, AstToken, SyntaxKind, @@ -419,7 +419,8 @@ where F: Fn(&str) -> Option, { let mut occurrences = Vec::new(); - let mut seen_import_ranges = std::collections::HashSet::new(); + let mut seen_expr_import_ranges = std::collections::HashSet::new(); + let mut seen_string_ranges = std::collections::HashSet::new(); let ast = doc.ast(); // First pass: find imports in local statements (these have bindings) @@ -430,7 +431,8 @@ where if let Some((occurrence, import_range)) = parse_bind_import_with_range(&bind, resolve_import) { - seen_import_ranges.insert(import_range); + seen_expr_import_ranges.insert(import_range); + seen_string_ranges.insert(occurrence.import_range); occurrences.push(occurrence); } } @@ -443,17 +445,26 @@ where if node.kind() == SyntaxKind::EXPR_IMPORT { let range = node.text_range(); // Skip if we already captured this import in a local statement - if seen_import_ranges.contains(&range) { + if seen_expr_import_ranges.contains(&range) { continue; } if let Some(import) = ExprImport::cast(node) { if let Some(occurrence) = parse_import_occurrence(&import, None, resolve_import) { + seen_expr_import_ranges.insert(range); + seen_string_ranges.insert(occurrence.import_range); occurrences.push(occurrence); } } } } + // Third pass fallback: recover imports from token stream for syntax-broken files. + occurrences.extend(parse_token_fallback_import_occurrences( + doc, + resolve_import, + &mut seen_string_ranges, + )); + occurrences } @@ -516,6 +527,104 @@ where }) } +fn parse_token_fallback_import_occurrences( + doc: &Document, + resolve_import: &F, + seen_string_ranges: &mut std::collections::HashSet, +) -> Vec +where + F: Fn(&str) -> Option, +{ + let tokens: Vec<_> = doc + .ast() + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .collect(); + + let mut occurrences = Vec::new(); + for (idx, token) in tokens.iter().enumerate() { + if !is_import_keyword(token.kind()) { + continue; + } + + let Some(import_text) = next_non_trivia_token(&tokens, idx + 1) else { + continue; + }; + if !is_import_string_token(import_text.kind()) { + continue; + } + + let import_range = import_text.text_range(); + if !seen_string_ranges.insert(import_range) { + continue; + } + + let import_path = strip_string_quotes(import_text.text()); + if import_path.is_empty() { + continue; + } + + occurrences.push(ImportOccurrence { + entry: ImportEntry { + binding_name: binding_name_from_import_token(import_text), + resolved_path: resolve_import(&import_path), + import_path, + }, + import_range, + }); + } + occurrences +} + +fn next_non_trivia_token( + tokens: &[jrsonnet_rowan_parser::SyntaxToken], + start_idx: usize, +) -> Option<&jrsonnet_rowan_parser::SyntaxToken> { + tokens.get(start_idx..)?.iter().find(|token| { + !matches!( + token.kind(), + SyntaxKind::WHITESPACE + | SyntaxKind::MULTI_LINE_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::SINGLE_LINE_SLASH_COMMENT + ) + }) +} + +const fn is_import_keyword(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::IMPORT_KW | SyntaxKind::IMPORTSTR_KW | SyntaxKind::IMPORTBIN_KW + ) +} + +const fn is_import_string_token(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::STRING_DOUBLE + | SyntaxKind::STRING_SINGLE + | SyntaxKind::STRING_DOUBLE_VERBATIM + | SyntaxKind::STRING_SINGLE_VERBATIM + | SyntaxKind::ERROR_STRING_DOUBLE_UNTERMINATED + | SyntaxKind::ERROR_STRING_SINGLE_UNTERMINATED + | SyntaxKind::ERROR_STRING_DOUBLE_VERBATIM_UNTERMINATED + | SyntaxKind::ERROR_STRING_SINGLE_VERBATIM_UNTERMINATED + ) +} + +fn binding_name_from_import_token(token: &jrsonnet_rowan_parser::SyntaxToken) -> Option { + let bind = token.parent()?.ancestors().find_map(Bind::cast)?; + let Bind::BindDestruct(bind_destruct) = bind else { + return None; + }; + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.ident_lit()?.text().to_string()) +} + #[cfg(test)] mod tests { use std::path::PathBuf; @@ -579,6 +688,31 @@ mod tests { ); } + #[test] + fn test_parse_import_occurrences_fallback_unterminated_string() { + let code = r#"local lib = import "lib.jsonnet"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let occurrences = parse_document_import_occurrences(&doc, &simple_resolver); + let start = u32::try_from( + code.find("\"lib.jsonnet") + .expect("unterminated import string should exist"), + ) + .unwrap(); + let end = u32::try_from(code.len()).unwrap(); + + assert_eq!( + occurrences, + vec![ImportOccurrence { + entry: ImportEntry { + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(test_path("lib.jsonnet")), + }, + import_range: rowan::TextRange::new(start.into(), end.into()), + }] + ); + } + #[test] fn test_parse_local_import_single_quote() { let code = "local lib = import 'lib.jsonnet'; lib"; diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 9db24184..64b26fd7 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -233,6 +233,14 @@ Cross-file navigation and reference/rename paths then use the graph's resolved entries as the source of truth instead of re-resolving import strings independently. +Import extraction for graph updates uses a two-stage strategy: + +1. AST-based import discovery for complete syntax (captures binding names when + available). +2. Token-stream fallback for syntax-broken files (for example unterminated + import strings), so unresolved-import diagnostics and dependency updates do + not silently miss imports while the user is mid-edit. + Navigation semantics: - `textDocument/declaration`: nearest lexical declaration ("where this name is @@ -268,8 +276,8 @@ Diagnostics run in a dedicated background worker (`AsyncDiagnostics` in Diagnostic composition (`crates/jrsonnet-lsp/src/handlers/diagnostics.rs`): 1. Syntax diagnostics from parser errors. -2. Unresolved-import diagnostics from AST import occurrences when parse - succeeded. +2. Unresolved-import diagnostics from import occurrences (AST first with a + token-stream fallback for syntax-broken imports). 3. Lint/type diagnostics from `jrsonnet-lsp-check` when lint is enabled and parse succeeded. 4. Evaluation diagnostic from `Evaluator` (optional) when parse succeeded. From 6cefcae567babd2aeffac52bf7a2ea69f0679f2c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 11:51:47 +0000 Subject: [PATCH 070/210] Limit TypeProvider pre-analysis to dependency closure --- crates/jrsonnet-lsp-inference/src/provider.rs | 10 +--------- docs/lsp/ARCHITECTURE.md | 2 ++ 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index d6897a1d..9442f2d2 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -99,20 +99,12 @@ impl TypeProvider { doc_source: &D, ) { let graph = self.import_graph.read(); - let resolved_imports_by_file: FxHashMap> = - graph - .all_files() - .map(|graph_path| (graph_path.clone(), resolved_imports_for(&graph, graph_path))) - .collect(); // process_with_dependencies processes in "leaves first" order, // meaning dependencies are analyzed before dependents graph.process_with_dependencies(path, |dep_path| { if let Some(doc) = doc_source.get_document(dep_path) { - let resolved_imports = resolved_imports_by_file - .get(dep_path) - .cloned() - .unwrap_or_default(); + let resolved_imports = resolved_imports_for(&graph, dep_path); analyze_and_cache_with_resolved_imports( dep_path, &doc, diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 64b26fd7..6ffe6a74 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -290,6 +290,8 @@ Tanka-aware `jpath` expansion via `analysis/tanka.rs`. `TypeProvider` in `jrsonnet-lsp-inference` ensures imports are analyzed first: - Uses import graph dependency ordering (`process_with_dependencies`). +- Resolves import maps only for files in the transitive dependency closure of + the requested root (not the full workspace graph). - Uses `TypeCache` to reuse previously inferred top-level types. - Uses `CachingImportResolver` so import expressions can consult cached types. From e84be8080e3c36267f1556793362d22efe2cf3d2 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 11:55:55 +0000 Subject: [PATCH 071/210] Add structural remove-unused binding quickfix --- .../jrsonnet-lsp-handlers/src/code_action.rs | 181 +++++++++++++++--- crates/jrsonnet-lsp/tests/integration_test.rs | 50 ++++- docs/lsp/HANDLERS.md | 3 + 3 files changed, 203 insertions(+), 31 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/code_action.rs b/crates/jrsonnet-lsp-handlers/src/code_action.rs index 4f31e2a6..9cb91ef8 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_action.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_action.rs @@ -5,7 +5,10 @@ use std::collections::HashMap; use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document}; -use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, StmtLocal}, + AstNode, SyntaxKind, +}; use lsp_types::{ CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, NumberOrString, Range, TextEdit, Uri, WorkspaceEdit, @@ -80,6 +83,76 @@ fn unused_variable_action( ) } +fn remove_unused_binding_action( + document: &Document, + uri: &Uri, + diagnostic: &Diagnostic, +) -> Option { + let NumberOrString::String(code) = diagnostic.code.as_ref()? else { + return None; + }; + if code != "unused-variable" { + return None; + } + + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(diagnostic.range.start.into(), text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let bind = token.parent()?.ancestors().find_map(Bind::cast)?; + let Bind::BindDestruct(bind_destruct) = bind else { + return None; + }; + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let bind_name = full.name()?.ident_lit()?.text().to_string(); + if bind_name.starts_with('_') { + return None; + } + + let stmt_local = bind_destruct + .syntax() + .ancestors() + .find_map(StmtLocal::cast)?; + if stmt_local.binds().count() != 1 { + return None; + } + + let mut changes = HashMap::new(); + changes.insert( + uri.clone(), + vec![TextEdit { + range: to_lsp_range(stmt_local.syntax().text_range(), line_index, text), + new_text: String::new(), + }], + ); + + Some( + CodeAction { + title: format!("Remove unused binding `{bind_name}`"), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + } + .into(), + ) +} + /// Build code actions for a given range and context. pub fn code_actions( document: &Document, @@ -95,7 +168,14 @@ pub fn code_actions( .diagnostics .iter() .filter(|diagnostic| range_overlaps(diagnostic.range, range)) - .filter_map(|diagnostic| unused_variable_action(document, uri, diagnostic)) + .flat_map(|diagnostic| { + [ + unused_variable_action(document, uri, diagnostic), + remove_unused_binding_action(document, uri, diagnostic), + ] + .into_iter() + .flatten() + }) .collect() } @@ -147,27 +227,50 @@ mod tests { }; let actions = code_actions(&document, &uri(), range(0, 20), &context); - assert_eq!(actions.len(), 1); - - let CodeActionOrCommand::CodeAction(action) = &actions[0] else { - panic!("Expected code action") - }; - assert_eq!(action.kind, Some(CodeActionKind::QUICKFIX)); - assert_eq!(action.is_preferred, Some(true)); - - let edit = action.edit.as_ref().expect("action should include edit"); - let uri = uri(); - let edits = edit - .changes - .as_ref() - .and_then(|c| c.get(&uri)) - .expect("edits for document should exist"); assert_eq!( - edits, - &vec![TextEdit { - range: range(6, 7), - new_text: "_x".to_string(), - }] + actions, + vec![ + CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(0, 12), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] ); } @@ -196,4 +299,38 @@ mod tests { let actions = code_actions(&document, &uri(), range(0, 20), &context); assert!(actions.is_empty()); } + + #[test] + fn test_remove_unused_binding_requires_single_bind_statement() { + let document = Document::new("local x = 1, y = 2; y".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + + assert_eq!( + code_actions(&document, &uri(), range(0, 22), &context), + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + })] + ); + } } diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 638b613b..8c52cdf0 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -616,9 +616,10 @@ fn expected_unused_variable_quickfix( uri: &str, diagnostic: lsp_types::Diagnostic, ) -> Vec { - let mut changes = std::collections::HashMap::new(); - changes.insert( - uri.parse().unwrap(), + let parsed_uri: lsp_types::Uri = uri.parse().unwrap(); + let mut prefix_changes = std::collections::HashMap::new(); + prefix_changes.insert( + parsed_uri.clone(), vec![lsp_types::TextEdit { range: lsp_types::Range { start: Position { @@ -633,14 +634,31 @@ fn expected_unused_variable_quickfix( new_text: "_x".to_string(), }], ); + let mut remove_changes = std::collections::HashMap::new(); + remove_changes.insert( + parsed_uri, + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 12, + }, + }, + new_text: String::new(), + }], + ); - vec![lsp_types::CodeActionOrCommand::CodeAction( - lsp_types::CodeAction { + vec![ + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { title: "Prefix `x` with `_`".to_string(), kind: Some(lsp_types::CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic]), + diagnostics: Some(vec![diagnostic.clone()]), edit: Some(lsp_types::WorkspaceEdit { - changes: Some(changes), + changes: Some(prefix_changes), document_changes: None, change_annotations: None, }), @@ -648,8 +666,22 @@ fn expected_unused_variable_quickfix( is_preferred: Some(true), disabled: None, data: None, - }, - )] + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(remove_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] } fn find_references_command_args(uri: &str, include_declaration: bool) -> Vec { diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index c5a3e455..0bf1bd42 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -72,6 +72,9 @@ File: `crates/jrsonnet-lsp-handlers/src/code_action.rs` - Produces quick fixes from diagnostic context (for example unused-variable fixes). + Current unused-variable quickfixes include: + `Prefix with _` and `Remove unused binding ` (for single-binding + `local` statements). - Uses current document plus selected range and diagnostics from request params. - Returned through sync `textDocument/codeAction`. From 25240d48a54da549ba84757eb6ced6f2128f2d2b Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 12:22:35 +0000 Subject: [PATCH 072/210] lsp-handlers: introduce typed remove-unused action policy --- .../jrsonnet-lsp-handlers/src/code_action.rs | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/crates/jrsonnet-lsp-handlers/src/code_action.rs b/crates/jrsonnet-lsp-handlers/src/code_action.rs index 9cb91ef8..b8d4a1df 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_action.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_action.rs @@ -14,6 +14,31 @@ use lsp_types::{ Range, TextEdit, Uri, WorkspaceEdit, }; +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum RemovalFlavor { + All, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum CommentPolicy { + None, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +struct RemoveUnusedPolicy { + flavor: RemovalFlavor, + comments: CommentPolicy, +} + +impl RemoveUnusedPolicy { + const fn all() -> Self { + Self { + flavor: RemovalFlavor::All, + comments: CommentPolicy::None, + } + } +} + fn range_overlaps(a: Range, b: Range) -> bool { (a.start.line, a.start.character) <= (b.end.line, b.end.character) && (b.start.line, b.start.character) <= (a.end.line, a.end.character) @@ -87,6 +112,15 @@ fn remove_unused_binding_action( document: &Document, uri: &Uri, diagnostic: &Diagnostic, +) -> Option { + remove_unused_binding_action_with_policy(document, uri, diagnostic, RemoveUnusedPolicy::all()) +} + +fn remove_unused_binding_action_with_policy( + document: &Document, + uri: &Uri, + diagnostic: &Diagnostic, + policy: RemoveUnusedPolicy, ) -> Option { let NumberOrString::String(code) = diagnostic.code.as_ref()? else { return None; @@ -94,6 +128,11 @@ fn remove_unused_binding_action( if code != "unused-variable" { return None; } + if !matches!(policy.flavor, RemovalFlavor::All) + || !matches!(policy.comments, CommentPolicy::None) + { + return None; + } let text = document.text(); let line_index = document.line_index(); From b21b5efea8fcb6f17ef4b72e5e7d4bf1c859cfa1 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 12:31:06 +0000 Subject: [PATCH 073/210] lsp-handlers: plan remove-unused edits for local/object bindings --- .../jrsonnet-lsp-handlers/src/code_action.rs | 280 ++++++++++++++---- 1 file changed, 229 insertions(+), 51 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/code_action.rs b/crates/jrsonnet-lsp-handlers/src/code_action.rs index b8d4a1df..584c90a0 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_action.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_action.rs @@ -6,8 +6,9 @@ use std::collections::HashMap; use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document}; use jrsonnet_rowan_parser::{ - nodes::{Bind, Destruct, StmtLocal}, - AstNode, SyntaxKind, + nodes::{BindDestruct, BindFunction, Destruct, Expr, ExprBase, MemberBindStmt, StmtLocal}, + rowan::{TextRange, TextSize}, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; use lsp_types::{ CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, NumberOrString, @@ -39,6 +40,131 @@ impl RemoveUnusedPolicy { } } +#[derive(Debug, Clone, PartialEq, Eq)] +struct RemoveUnusedEdit { + binding_name: String, + range: TextRange, +} + +const fn is_trivia_kind(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::WHITESPACE + | SyntaxKind::MULTI_LINE_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::SINGLE_LINE_SLASH_COMMENT + ) +} + +fn previous_significant_token(token: &SyntaxToken) -> Option { + std::iter::successors(token.prev_token(), SyntaxToken::prev_token) + .find(|candidate| !is_trivia_kind(candidate.kind())) +} + +fn next_significant_token(token: &SyntaxToken) -> Option { + std::iter::successors(token.next_token(), SyntaxToken::next_token) + .find(|candidate| !is_trivia_kind(candidate.kind())) +} + +fn single_line_trivia_end_after(token: &SyntaxToken) -> Option { + let trivia = token.next_token()?; + if !is_trivia_kind(trivia.kind()) || trivia.text().contains('\n') { + return None; + } + Some(trivia.text_range().end()) +} + +fn remove_range_for_list_entry(entry: &SyntaxNode) -> Option { + let first = entry.first_token()?; + let last = entry.last_token()?; + + if let Some(next) = next_significant_token(&last) { + if next.kind() == SyntaxKind::COMMA { + let end = + single_line_trivia_end_after(&next).unwrap_or_else(|| next.text_range().end()); + return Some(TextRange::new(first.text_range().start(), end)); + } + } + + if let Some(previous) = previous_significant_token(&first) { + if previous.kind() == SyntaxKind::COMMA { + return Some(TextRange::new( + previous.text_range().start(), + last.text_range().end(), + )); + } + } + + Some(entry.text_range()) +} + +fn is_import_expression(expr: Expr) -> bool { + match expr.expr_base() { + Some(ExprBase::ExprImport(_)) => true, + Some(ExprBase::ExprParened(parened)) => parened.expr().is_some_and(is_import_expression), + _ => false, + } +} + +fn remove_edit_for_bind( + binding_name: String, + bind_node: SyntaxNode, + value_expr: Option, +) -> Option<(RemoveUnusedEdit, bool)> { + if binding_name.starts_with('_') { + return None; + } + + let removal_range = if let Some(stmt_local) = bind_node.ancestors().find_map(StmtLocal::cast) { + let bind_count = stmt_local.binds().count(); + if bind_count == 1 { + stmt_local.syntax().text_range() + } else { + remove_range_for_list_entry(&bind_node)? + } + } else if let Some(member_bind_stmt) = bind_node.ancestors().find_map(MemberBindStmt::cast) { + remove_range_for_list_entry(member_bind_stmt.syntax())? + } else { + return None; + }; + + let import_binding = value_expr.is_some_and(is_import_expression); + Some(( + RemoveUnusedEdit { + binding_name, + range: removal_range, + }, + import_binding, + )) +} + +fn remove_unused_edit_for_diagnostic( + document: &Document, + diagnostic: &Diagnostic, +) -> Option<(RemoveUnusedEdit, bool)> { + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(diagnostic.range.start.into(), text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + if token.kind() != SyntaxKind::IDENT { + return None; + } + + if let Some(bind_destruct) = token.parent()?.ancestors().find_map(BindDestruct::cast) { + let destruct = BindDestruct::into(&bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let name = full.name()?.ident_lit()?.text().to_string(); + return remove_edit_for_bind(name, bind_destruct.syntax().clone(), bind_destruct.value()); + } + + let bind_function = token.parent()?.ancestors().find_map(BindFunction::cast)?; + let name = bind_function.name()?.ident_lit()?.text().to_string(); + remove_edit_for_bind(name, bind_function.syntax().clone(), bind_function.value()) +} + fn range_overlaps(a: Range, b: Range) -> bool { (a.start.line, a.start.character) <= (b.end.line, b.end.character) && (b.start.line, b.start.character) <= (a.end.line, a.end.character) @@ -136,46 +262,20 @@ fn remove_unused_binding_action_with_policy( let text = document.text(); let line_index = document.line_index(); - let offset = line_index.offset(diagnostic.range.start.into(), text)?; - let ast = document.ast(); - let token = token_at_offset(ast.syntax(), offset)?; - if token.kind() != SyntaxKind::IDENT { - return None; - } - - let bind = token.parent()?.ancestors().find_map(Bind::cast)?; - let Bind::BindDestruct(bind_destruct) = bind else { - return None; - }; - let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind_destruct)?; - let Destruct::DestructFull(full) = destruct else { - return None; - }; - let bind_name = full.name()?.ident_lit()?.text().to_string(); - if bind_name.starts_with('_') { - return None; - } - - let stmt_local = bind_destruct - .syntax() - .ancestors() - .find_map(StmtLocal::cast)?; - if stmt_local.binds().count() != 1 { - return None; - } + let (edit, _import_binding) = remove_unused_edit_for_diagnostic(document, diagnostic)?; let mut changes = HashMap::new(); changes.insert( uri.clone(), vec![TextEdit { - range: to_lsp_range(stmt_local.syntax().text_range(), line_index, text), + range: to_lsp_range(edit.range, line_index, text), new_text: String::new(), }], ); Some( CodeAction { - title: format!("Remove unused binding `{bind_name}`"), + title: format!("Remove unused binding `{}`", edit.binding_name), kind: Some(CodeActionKind::QUICKFIX), diagnostics: Some(vec![diagnostic.clone()]), edit: Some(WorkspaceEdit { @@ -340,7 +440,7 @@ mod tests { } #[test] - fn test_remove_unused_binding_requires_single_bind_statement() { + fn test_remove_unused_binding_handles_multi_bind_statement() { let document = Document::new("local x = 1, y = 2; y".to_string(), DocVersion::new(1)); let context = CodeActionContext { diagnostics: vec![diag_unused(range(6, 7))], @@ -350,26 +450,104 @@ mod tests { assert_eq!( code_actions(&document, &uri(), range(0, 22), &context), - vec![CodeActionOrCommand::CodeAction(CodeAction { - title: "Prefix `x` with `_`".to_string(), - kind: Some(CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diag_unused(range(6, 7))]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri(), - vec![TextEdit { - range: range(6, 7), - new_text: "_x".to_string(), - }], - )])), - document_changes: None, - change_annotations: None, + vec![ + CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, }), - command: None, - is_preferred: Some(true), - disabled: None, - data: None, - })] + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 13), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] + ); + } + + #[test] + fn test_remove_unused_binding_handles_object_local() { + let document = Document::new("{ local x = 1, a: x }".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(8, 9))], + only: None, + trigger_kind: None, + }; + + assert_eq!( + code_actions(&document, &uri(), range(0, 21), &context), + vec![ + CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(8, 9))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(8, 9), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(8, 9))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(2, 15), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] ); } } From 8e7ee88f95d0ff2a148963b309adc78dd40f52cc Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 12:39:51 +0000 Subject: [PATCH 074/210] lsp: add source.fixAll remove-unused code action --- .../jrsonnet-lsp-handlers/src/code_action.rs | 446 ++++++++++++++++-- crates/jrsonnet-lsp/src/server.rs | 5 +- crates/jrsonnet-lsp/tests/integration_test.rs | 87 +++- 3 files changed, 500 insertions(+), 38 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/code_action.rs b/crates/jrsonnet-lsp-handlers/src/code_action.rs index 584c90a0..aae4905a 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_action.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_action.rs @@ -2,11 +2,14 @@ //! //! Provides quick fixes for diagnostics. -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document}; use jrsonnet_rowan_parser::{ - nodes::{BindDestruct, BindFunction, Destruct, Expr, ExprBase, MemberBindStmt, StmtLocal}, + nodes::{ + Bind, BindDestruct, BindFunction, Destruct, Expr, ExprBase, Member, MemberBindStmt, + ObjBodyMemberList, StmtLocal, + }, rowan::{TextRange, TextSize}, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; @@ -15,6 +18,8 @@ use lsp_types::{ Range, TextEdit, Uri, WorkspaceEdit, }; +const UNUSED_VARIABLE_CODE: &str = "unused-variable"; + #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum RemovalFlavor { All, @@ -98,6 +103,195 @@ fn remove_range_for_list_entry(entry: &SyntaxNode) -> Option { Some(entry.text_range()) } +fn remove_range_for_entry_run( + entries: &[SyntaxNode], + run_start: usize, + run_end: usize, +) -> Option { + let first = entries.get(run_start)?.first_token()?; + let last = entries.get(run_end)?.last_token()?; + + let start = if run_start == 0 { + first.text_range().start() + } else { + let previous = previous_significant_token(&first)?; + if previous.kind() == SyntaxKind::COMMA { + previous.text_range().start() + } else { + first.text_range().start() + } + }; + + let end = if run_start == 0 && run_end + 1 < entries.len() { + match next_significant_token(&last) { + Some(next) if next.kind() == SyntaxKind::COMMA => { + single_line_trivia_end_after(&next).unwrap_or_else(|| next.text_range().end()) + } + _ => last.text_range().end(), + } + } else { + last.text_range().end() + }; + + Some(TextRange::new(start, end)) +} + +fn contiguous_runs(indices: &[usize]) -> Vec<(usize, usize)> { + if indices.is_empty() { + return Vec::new(); + } + + let mut runs = Vec::new(); + let mut run_start = indices[0]; + let mut previous = indices[0]; + for &index in indices.iter().skip(1) { + if index == previous + 1 { + previous = index; + continue; + } + runs.push((run_start, previous)); + run_start = index; + previous = index; + } + runs.push((run_start, previous)); + runs +} + +fn is_unused_variable_diagnostic(diagnostic: &Diagnostic) -> bool { + matches!( + diagnostic.code.as_ref(), + Some(NumberOrString::String(code)) if code == UNUSED_VARIABLE_CODE + ) +} + +fn bind_name_range(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind_destruct) => { + let destruct = BindDestruct::into(bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) + } + Bind::BindFunction(bind_function) => Some(bind_function.name()?.syntax().text_range()), + } +} + +fn binding_name_range_for_diagnostic( + document: &Document, + diagnostic: &Diagnostic, +) -> Option { + if !is_unused_variable_diagnostic(diagnostic) { + return None; + } + + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(diagnostic.range.start.into(), text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + if token.kind() != SyntaxKind::IDENT { + return None; + } + Some(token.text_range()) +} + +fn unused_binding_name_ranges( + document: &Document, + diagnostics: &[Diagnostic], +) -> HashSet { + diagnostics + .iter() + .filter_map(|diagnostic| binding_name_range_for_diagnostic(document, diagnostic)) + .collect() +} + +fn removal_ranges_for_stmt_local( + stmt_local: &StmtLocal, + unused_name_ranges: &HashSet, +) -> Vec { + let binds: Vec = stmt_local.binds().collect(); + let unused_indices: Vec = binds + .iter() + .enumerate() + .filter_map(|(idx, bind)| { + let name_range = bind_name_range(bind)?; + unused_name_ranges.contains(&name_range).then_some(idx) + }) + .collect(); + if unused_indices.is_empty() { + return Vec::new(); + } + if unused_indices.len() == binds.len() { + return vec![stmt_local.syntax().text_range()]; + } + + let entries: Vec = binds.iter().map(|bind| bind.syntax().clone()).collect(); + contiguous_runs(&unused_indices) + .into_iter() + .filter_map(|(run_start, run_end)| remove_range_for_entry_run(&entries, run_start, run_end)) + .collect() +} + +fn removal_ranges_for_member_list( + member_list: &ObjBodyMemberList, + unused_name_ranges: &HashSet, +) -> Vec { + let members: Vec = member_list.members().collect(); + let unused_indices: Vec = members + .iter() + .enumerate() + .filter_map(|(idx, member)| { + let Member::MemberBindStmt(bind_stmt) = member else { + return None; + }; + let bind = bind_stmt.obj_local()?.bind()?; + let name_range = bind_name_range(&bind)?; + unused_name_ranges.contains(&name_range).then_some(idx) + }) + .collect(); + if unused_indices.is_empty() { + return Vec::new(); + } + + let entries: Vec = members + .iter() + .map(|member| member.syntax().clone()) + .collect(); + contiguous_runs(&unused_indices) + .into_iter() + .filter_map(|(run_start, run_end)| remove_range_for_entry_run(&entries, run_start, run_end)) + .collect() +} + +fn removal_ranges_for_fix_all(document: &Document, diagnostics: &[Diagnostic]) -> Vec { + let unused_name_ranges = unused_binding_name_ranges(document, diagnostics); + if unused_name_ranges.is_empty() { + return Vec::new(); + } + + let mut ranges = Vec::new(); + let ast = document.ast(); + for stmt_local in ast.syntax().descendants().filter_map(StmtLocal::cast) { + ranges.extend(removal_ranges_for_stmt_local( + &stmt_local, + &unused_name_ranges, + )); + } + for member_list in ast + .syntax() + .descendants() + .filter_map(ObjBodyMemberList::cast) + { + ranges.extend(removal_ranges_for_member_list( + &member_list, + &unused_name_ranges, + )); + } + ranges.sort_unstable_by_key(|range| (range.start(), range.end())); + ranges +} + fn is_import_expression(expr: Expr) -> bool { match expr.expr_base() { Some(ExprBase::ExprImport(_)) => true, @@ -179,15 +373,22 @@ fn wants_quickfix(context: &CodeActionContext) -> bool { } } +fn wants_fix_all(context: &CodeActionContext) -> bool { + match &context.only { + None => true, + Some(kinds) => kinds.iter().any(|kind| { + kind.as_str() + .starts_with(CodeActionKind::SOURCE_FIX_ALL.as_str()) + }), + } +} + fn unused_variable_action( document: &Document, uri: &Uri, diagnostic: &Diagnostic, ) -> Option { - let NumberOrString::String(code) = diagnostic.code.as_ref()? else { - return None; - }; - if code != "unused-variable" { + if !is_unused_variable_diagnostic(diagnostic) { return None; } @@ -248,10 +449,7 @@ fn remove_unused_binding_action_with_policy( diagnostic: &Diagnostic, policy: RemoveUnusedPolicy, ) -> Option { - let NumberOrString::String(code) = diagnostic.code.as_ref()? else { - return None; - }; - if code != "unused-variable" { + if !is_unused_variable_diagnostic(diagnostic) { return None; } if !matches!(policy.flavor, RemovalFlavor::All) @@ -292,6 +490,70 @@ fn remove_unused_binding_action_with_policy( ) } +fn remove_all_unused_bindings_action_with_policy( + document: &Document, + uri: &Uri, + context: &CodeActionContext, + policy: RemoveUnusedPolicy, +) -> Option { + if !wants_fix_all(context) { + return None; + } + if !matches!(policy.flavor, RemovalFlavor::All) + || !matches!(policy.comments, CommentPolicy::None) + { + return None; + } + + let diagnostics: Vec = context + .diagnostics + .iter() + .filter(|diagnostic| is_unused_variable_diagnostic(diagnostic)) + .cloned() + .collect(); + if diagnostics.is_empty() { + return None; + } + + let mut ranges = removal_ranges_for_fix_all(document, &diagnostics); + if ranges.is_empty() { + return None; + } + ranges.sort_unstable_by_key(|range| (range.start(), range.end())); + let text = document.text(); + let line_index = document.line_index(); + + let edits: Vec = ranges + .into_iter() + .rev() + .map(|range| TextEdit { + range: to_lsp_range(range, line_index, text), + new_text: String::new(), + }) + .collect(); + + let mut changes = HashMap::new(); + changes.insert(uri.clone(), edits); + + Some( + CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(diagnostics), + edit: Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + } + .into(), + ) +} + /// Build code actions for a given range and context. pub fn code_actions( document: &Document, @@ -299,23 +561,35 @@ pub fn code_actions( range: Range, context: &CodeActionContext, ) -> Vec { - if !wants_quickfix(context) { - return Vec::new(); + let mut actions = Vec::new(); + + if wants_quickfix(context) { + actions.extend( + context + .diagnostics + .iter() + .filter(|diagnostic| range_overlaps(diagnostic.range, range)) + .flat_map(|diagnostic| { + [ + unused_variable_action(document, uri, diagnostic), + remove_unused_binding_action(document, uri, diagnostic), + ] + .into_iter() + .flatten() + }), + ); } - context - .diagnostics - .iter() - .filter(|diagnostic| range_overlaps(diagnostic.range, range)) - .flat_map(|diagnostic| { - [ - unused_variable_action(document, uri, diagnostic), - remove_unused_binding_action(document, uri, diagnostic), - ] - .into_iter() - .flatten() - }) - .collect() + if let Some(fix_all_action) = remove_all_unused_bindings_action_with_policy( + document, + uri, + context, + RemoveUnusedPolicy::all(), + ) { + actions.push(fix_all_action); + } + + actions } #[cfg(test)] @@ -329,7 +603,7 @@ mod tests { Diagnostic { range, severity: Some(DiagnosticSeverity::WARNING), - code: Some(NumberOrString::String("unused-variable".to_string())), + code: Some(NumberOrString::String(UNUSED_VARIABLE_CODE.to_string())), code_description: None, source: Some("jrsonnet-lint".to_string()), message: "unused variable".to_string(), @@ -409,12 +683,32 @@ mod tests { disabled: None, data: None, }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(0, 12), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), ] ); } #[test] - fn test_code_action_respects_only_filter() { + fn test_code_action_returns_fix_all_for_source_fix_all_filter() { let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); let context = CodeActionContext { diagnostics: vec![diag_unused(range(6, 7))], @@ -423,7 +717,29 @@ mod tests { }; let actions = code_actions(&document, &uri(), range(0, 20), &context); - assert!(actions.is_empty()); + assert_eq!( + actions, + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(0, 12), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + })] + ); } #[test] @@ -491,6 +807,26 @@ mod tests { disabled: None, data: None, }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 13), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), ] ); } @@ -547,7 +883,61 @@ mod tests { disabled: None, data: None, }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(8, 9))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(2, 15), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), ] ); } + + #[test] + fn test_fix_all_removes_entire_local_when_all_bindings_unused() { + let document = Document::new("local x = 1, y = 2; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7)), diag_unused(range(13, 14))], + only: Some(vec![CodeActionKind::SOURCE_FIX_ALL]), + trigger_kind: None, + }; + + assert_eq!( + code_actions(&document, &uri(), range(0, 22), &context), + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(6, 7)), diag_unused(range(13, 14))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(0, 19), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + })] + ); + } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 63be97df..8572284b 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -466,7 +466,10 @@ impl Server { document_highlight_provider: Some(OneOf::Left(true)), inlay_hint_provider: Some(OneOf::Left(true)), code_action_provider: Some(CodeActionProviderCapability::Options(CodeActionOptions { - code_action_kinds: Some(vec![CodeActionKind::QUICKFIX]), + code_action_kinds: Some(vec![ + CodeActionKind::QUICKFIX, + CodeActionKind::SOURCE_FIX_ALL, + ]), work_done_progress_options: WorkDoneProgressOptions::default(), resolve_provider: Some(false), })), diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 8c52cdf0..213434d1 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -636,6 +636,23 @@ fn expected_unused_variable_quickfix( ); let mut remove_changes = std::collections::HashMap::new(); remove_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 12, + }, + }, + new_text: String::new(), + }], + ); + let mut fix_all_changes = std::collections::HashMap::new(); + fix_all_changes.insert( parsed_uri, vec![lsp_types::TextEdit { range: lsp_types::Range { @@ -670,7 +687,7 @@ fn expected_unused_variable_quickfix( lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { title: "Remove unused binding `x`".to_string(), kind: Some(lsp_types::CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic]), + diagnostics: Some(vec![diagnostic.clone()]), edit: Some(lsp_types::WorkspaceEdit { changes: Some(remove_changes), document_changes: None, @@ -681,6 +698,20 @@ fn expected_unused_variable_quickfix( disabled: None, data: None, }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(fix_all_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), ] } @@ -821,11 +852,16 @@ fn test_initialize_shutdown() { serde_json::Value::Bool(true), "inlay hint capability should be advertised", ); - assert_eq!( - result["capabilities"]["codeActionProvider"]["codeActionKinds"][0], - serde_json::Value::String("quickfix".to_string()), - "quickfix code action capability should be advertised", - ); + assert_eq!( + result["capabilities"]["codeActionProvider"]["codeActionKinds"][0], + serde_json::Value::String("quickfix".to_string()), + "quickfix code action capability should be advertised", + ); + assert_eq!( + result["capabilities"]["codeActionProvider"]["codeActionKinds"][1], + serde_json::Value::String("source.fixAll".to_string()), + "source fix-all code action capability should be advertised", + ); assert_eq!( result["capabilities"]["executeCommandProvider"]["commands"], serde_json::json!([ @@ -2662,15 +2698,48 @@ fn test_code_action_unused_variable_quickfix() { Some(expected_unused_variable_quickfix(uri, diagnostic.clone())) ); - // Requesting non-quickfix actions should filter this out. + // Requesting source fix-all actions should return the document-level fix-all action. let filtered_actions = request_code_actions( &client_conn, 3, uri, - vec![diagnostic], + vec![diagnostic.clone()], Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), ); - assert_eq!(filtered_actions, None); + assert_eq!( + filtered_actions, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 12, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }, + )]) + ); client_conn .sender From b0acb3e2268c145bce8651edc602bbe322189fe9 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 12:49:39 +0000 Subject: [PATCH 075/210] lsp: wire remove-unused action policy through config --- .../jrsonnet-lsp-handlers/src/code_action.rs | 155 ++++++++++--- crates/jrsonnet-lsp-handlers/src/lib.rs | 2 +- crates/jrsonnet-lsp/src/config.rs | 50 ++++- crates/jrsonnet-lsp/src/server.rs | 9 +- crates/jrsonnet-lsp/tests/integration_test.rs | 207 ++++++++++++++++++ docs/lsp/ARCHITECTURE.md | 5 +- docs/lsp/HANDLERS.md | 14 +- 7 files changed, 405 insertions(+), 37 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/code_action.rs b/crates/jrsonnet-lsp-handlers/src/code_action.rs index aae4905a..449c785e 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_action.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_action.rs @@ -17,12 +17,28 @@ use lsp_types::{ CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, NumberOrString, Range, TextEdit, Uri, WorkspaceEdit, }; +use serde::{Deserialize, Serialize}; const UNUSED_VARIABLE_CODE: &str = "unused-variable"; +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum RemoveUnusedMode { + #[default] + All, + NonImportBindings, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +#[serde(default, rename_all = "camelCase")] +pub struct CodeActionConfig { + pub remove_unused: RemoveUnusedMode, +} + #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum RemovalFlavor { All, + NonImportBindings, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -36,10 +52,23 @@ struct RemoveUnusedPolicy { comments: CommentPolicy, } +impl RemovalFlavor { + const fn allows(self, import_binding: bool) -> bool { + match self { + Self::All => true, + Self::NonImportBindings => !import_binding, + } + } +} + impl RemoveUnusedPolicy { - const fn all() -> Self { + const fn from_config(config: CodeActionConfig) -> Self { + let flavor = match config.remove_unused { + RemoveUnusedMode::All => RemovalFlavor::All, + RemoveUnusedMode::NonImportBindings => RemovalFlavor::NonImportBindings, + }; Self { - flavor: RemovalFlavor::All, + flavor, comments: CommentPolicy::None, } } @@ -435,14 +464,6 @@ fn unused_variable_action( ) } -fn remove_unused_binding_action( - document: &Document, - uri: &Uri, - diagnostic: &Diagnostic, -) -> Option { - remove_unused_binding_action_with_policy(document, uri, diagnostic, RemoveUnusedPolicy::all()) -} - fn remove_unused_binding_action_with_policy( document: &Document, uri: &Uri, @@ -460,7 +481,10 @@ fn remove_unused_binding_action_with_policy( let text = document.text(); let line_index = document.line_index(); - let (edit, _import_binding) = remove_unused_edit_for_diagnostic(document, diagnostic)?; + let (edit, import_binding) = remove_unused_edit_for_diagnostic(document, diagnostic)?; + if !policy.flavor.allows(import_binding) { + return None; + } let mut changes = HashMap::new(); changes.insert( @@ -499,9 +523,7 @@ fn remove_all_unused_bindings_action_with_policy( if !wants_fix_all(context) { return None; } - if !matches!(policy.flavor, RemovalFlavor::All) - || !matches!(policy.comments, CommentPolicy::None) - { + if !matches!(policy.comments, CommentPolicy::None) { return None; } @@ -509,6 +531,10 @@ fn remove_all_unused_bindings_action_with_policy( .diagnostics .iter() .filter(|diagnostic| is_unused_variable_diagnostic(diagnostic)) + .filter(|diagnostic| { + remove_unused_edit_for_diagnostic(document, diagnostic) + .is_some_and(|(_, import_binding)| policy.flavor.allows(import_binding)) + }) .cloned() .collect(); if diagnostics.is_empty() { @@ -560,8 +586,10 @@ pub fn code_actions( uri: &Uri, range: Range, context: &CodeActionContext, + config: &CodeActionConfig, ) -> Vec { let mut actions = Vec::new(); + let policy = RemoveUnusedPolicy::from_config(*config); if wants_quickfix(context) { actions.extend( @@ -572,7 +600,7 @@ pub fn code_actions( .flat_map(|diagnostic| { [ unused_variable_action(document, uri, diagnostic), - remove_unused_binding_action(document, uri, diagnostic), + remove_unused_binding_action_with_policy(document, uri, diagnostic, policy), ] .into_iter() .flatten() @@ -580,12 +608,9 @@ pub fn code_actions( ); } - if let Some(fix_all_action) = remove_all_unused_bindings_action_with_policy( - document, - uri, - context, - RemoveUnusedPolicy::all(), - ) { + if let Some(fix_all_action) = + remove_all_unused_bindings_action_with_policy(document, uri, context, policy) + { actions.push(fix_all_action); } @@ -639,7 +664,13 @@ mod tests { trigger_kind: None, }; - let actions = code_actions(&document, &uri(), range(0, 20), &context); + let actions = code_actions( + &document, + &uri(), + range(0, 20), + &context, + &CodeActionConfig::default(), + ); assert_eq!( actions, vec![ @@ -716,7 +747,13 @@ mod tests { trigger_kind: None, }; - let actions = code_actions(&document, &uri(), range(0, 20), &context); + let actions = code_actions( + &document, + &uri(), + range(0, 20), + &context, + &CodeActionConfig::default(), + ); assert_eq!( actions, vec![CodeActionOrCommand::CodeAction(CodeAction { @@ -751,7 +788,13 @@ mod tests { trigger_kind: None, }; - let actions = code_actions(&document, &uri(), range(0, 20), &context); + let actions = code_actions( + &document, + &uri(), + range(0, 20), + &context, + &CodeActionConfig::default(), + ); assert!(actions.is_empty()); } @@ -765,7 +808,13 @@ mod tests { }; assert_eq!( - code_actions(&document, &uri(), range(0, 22), &context), + code_actions( + &document, + &uri(), + range(0, 22), + &context, + &CodeActionConfig::default(), + ), vec![ CodeActionOrCommand::CodeAction(CodeAction { title: "Prefix `x` with `_`".to_string(), @@ -841,7 +890,13 @@ mod tests { }; assert_eq!( - code_actions(&document, &uri(), range(0, 21), &context), + code_actions( + &document, + &uri(), + range(0, 21), + &context, + &CodeActionConfig::default(), + ), vec![ CodeActionOrCommand::CodeAction(CodeAction { title: "Prefix `x` with `_`".to_string(), @@ -917,7 +972,13 @@ mod tests { }; assert_eq!( - code_actions(&document, &uri(), range(0, 22), &context), + code_actions( + &document, + &uri(), + range(0, 22), + &context, + &CodeActionConfig::default(), + ), vec![CodeActionOrCommand::CodeAction(CodeAction { title: "Remove all unused bindings".to_string(), kind: Some(CodeActionKind::SOURCE_FIX_ALL), @@ -940,4 +1001,44 @@ mod tests { })] ); } + + #[test] + fn test_non_import_policy_skips_remove_actions_for_import_bindings() { + let document = Document::new( + "local x = import \"foo.libsonnet\"; 42".to_string(), + DocVersion::new(1), + ); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + let config = CodeActionConfig { + remove_unused: RemoveUnusedMode::NonImportBindings, + }; + + assert_eq!( + code_actions(&document, &uri(), range(0, 35), &context, &config), + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + })] + ); + } } diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index 6541da06..35c00554 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -14,7 +14,7 @@ pub mod semantic_tokens; pub mod signature_help; pub mod symbols; -pub use code_action::code_actions; +pub use code_action::{code_actions, CodeActionConfig, RemoveUnusedMode}; pub use code_lens::{code_lens, resolve_code_lens, CodeLensConfig}; pub use completion::{completion, completion_with_import_roots}; pub use definition::{ diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index 33f5f986..ac7c0b5c 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -7,8 +7,8 @@ use std::{collections::HashMap, path::PathBuf}; -// Re-export FormattingConfig from handlers crate -pub use jrsonnet_lsp_handlers::FormattingConfig; +// Re-export config types from handlers crate +pub use jrsonnet_lsp_handlers::{CodeActionConfig, FormattingConfig, RemoveUnusedMode}; use serde::{Deserialize, Serialize}; /// Server configuration options. @@ -57,6 +57,10 @@ pub struct ServerConfig { #[serde(default)] pub formatting: FormattingConfig, + /// Code action options. + #[serde(default, alias = "codeActions")] + pub code_actions: CodeActionConfig, + /// Log level for the server (error, warn, info, debug). #[serde(alias = "logLevel", alias = "log_level")] pub log_level: Option, @@ -78,6 +82,8 @@ struct ServerConfigPatch { #[serde(alias = "resolvePathsWithTanka", alias = "tankaMode")] resolve_paths_with_tanka: Option, formatting: Option, + #[serde(rename = "codeActions")] + code_actions: Option, #[serde(alias = "logLevel", alias = "log_level")] log_level: Option, } @@ -91,6 +97,7 @@ impl ServerConfigPatch { && self.enable_lint_diagnostics.is_none() && self.resolve_paths_with_tanka.is_none() && self.formatting.is_none() + && self.code_actions.is_none() && self.log_level.is_none() } @@ -116,6 +123,9 @@ impl ServerConfigPatch { if let Some(formatting) = self.formatting { config.merge_formatting(formatting); } + if let Some(code_actions) = self.code_actions { + config.code_actions = code_actions; + } if let Some(log_level) = self.log_level { config.log_level = log_level.as_str().map(ToString::to_string); } @@ -176,6 +186,9 @@ impl ServerConfig { self.log_level = other.log_level; } self.merge_formatting(other.formatting); + if other.code_actions != CodeActionConfig::default() { + self.code_actions = other.code_actions; + } } /// Merge formatting configuration. @@ -257,6 +270,7 @@ mod tests { assert!(config.jpath.is_empty()); assert!(config.ext_vars.is_empty()); assert!(!config.enable_eval_diagnostics); + assert_eq!(config.code_actions, CodeActionConfig::default()); } #[test] @@ -365,6 +379,38 @@ mod tests { assert!(!config.enable_eval_diagnostics); } + #[test] + fn test_code_action_config_from_initialization_options() { + let json = serde_json::json!({ + "codeActions": { + "removeUnused": "nonImportBindings" + } + }); + + let config = ServerConfig::from_initialization_options(Some(json)); + assert_eq!( + config.code_actions.remove_unused, + RemoveUnusedMode::NonImportBindings + ); + } + + #[test] + fn test_update_from_settings_updates_code_actions_config() { + let mut config = ServerConfig::new(); + + let settings = serde_json::json!({ + "codeActions": { + "removeUnused": "nonImportBindings" + } + }); + + assert!(config.update_from_settings(settings)); + assert_eq!( + config.code_actions.remove_unused, + RemoveUnusedMode::NonImportBindings + ); + } + #[test] fn test_formatting_config() { let json = serde_json::json!({ diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 8572284b..6df1dd16 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -930,7 +930,14 @@ impl Server { let path = CanonicalPath::from_uri(uri)?; let actions = { let doc = self.documents.get(&path)?; - handlers::code_actions(&doc, uri, params.range, ¶ms.context) + let code_action_config = self.config.read().code_actions; + handlers::code_actions( + &doc, + uri, + params.range, + ¶ms.context, + &code_action_config, + ) }; if actions.is_empty() { return None; diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 213434d1..d13d8a24 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -715,6 +715,109 @@ fn expected_unused_variable_quickfix( ] } +fn expected_unused_import_binding_actions( + uri: &str, + diagnostic: lsp_types::Diagnostic, +) -> Vec { + let parsed_uri: lsp_types::Uri = uri.parse().unwrap(); + let mut prefix_changes = std::collections::HashMap::new(); + prefix_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + ); + let mut remove_changes = std::collections::HashMap::new(); + remove_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 33, + }, + }, + new_text: String::new(), + }], + ); + let mut fix_all_changes = std::collections::HashMap::new(); + fix_all_changes.insert( + parsed_uri, + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 33, + }, + }, + new_text: String::new(), + }], + ); + + vec![ + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(prefix_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(remove_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(fix_all_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] +} + fn find_references_command_args(uri: &str, include_declaration: bool) -> Vec { let mut args = vec![ serde_json::Value::String(uri.to_string()), @@ -2755,6 +2858,110 @@ fn test_code_action_unused_variable_quickfix() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_code_action_policy_updates_via_configuration_change() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/code-action-policy.jsonnet"; + let text = "local x = import \"foo.libsonnet\"; 42"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + let diagnostic = unused_variable_diagnostic(); + let actions_before = request_code_actions(&client_conn, 2, uri, vec![diagnostic.clone()], None); + assert_eq!( + actions_before, + Some(expected_unused_import_binding_actions( + uri, + diagnostic.clone(), + )) + ); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "codeActions": { + "removeUnused": "nonImportBindings" + } + } + })), + )) + .unwrap(); + + let actions_after = request_code_actions(&client_conn, 3, uri, vec![diagnostic.clone()], None); + assert_eq!( + actions_after, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }, + )]) + ); + + let fix_all_after = request_code_actions( + &client_conn, + 4, + uri, + vec![diagnostic], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!(fix_all_after, None); + + client_conn + .sender + .send(Message::Request(shutdown_request(5))) + .unwrap(); + let _ = recv_response(&client_conn, 5); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_execute_command_find_references() { let (client_conn, server_conn) = Connection::memory(); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 6ffe6a74..f0bf4954 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -163,7 +163,7 @@ The main loop uses this boundary for all request responses after initialization. - workspace symbol search - rename with `prepareRename` - semantic tokens (full document and range) -- code actions (quick-fix kind) +- code actions (quick-fix and source.fixAll kinds) - code lens (resolve enabled) - execute command (five command IDs) @@ -311,6 +311,8 @@ Important behavior in `on_did_change_configuration`: for all tracked files (open and closed-cache graph entries). - Runtime changes and lint toggle changes both trigger diagnostic rescheduling for open files. +- Code action policy updates (`codeActions`) are applied immediately and do not + require runtime rebuild. ## Configuration Surface @@ -323,6 +325,7 @@ Important behavior in `on_did_change_configuration`: - `enable_lint_diagnostics` - `resolve_paths_with_tanka` - `formatting` +- `code_actions` - `log_level` Configuration can arrive via initialization options or diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 0bf1bd42..289aa3e6 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -70,11 +70,15 @@ method-specific responses, and supports request cancellation. File: `crates/jrsonnet-lsp-handlers/src/code_action.rs` -- Produces quick fixes from diagnostic context (for example unused-variable - fixes). - Current unused-variable quickfixes include: - `Prefix with _` and `Remove unused binding ` (for single-binding - `local` statements). +- Produces quick-fix and source fix-all actions from diagnostic context. +- Current unused-variable actions include: + `Prefix with _`, `Remove unused binding `, and + `Remove all unused bindings`. +- Remove-unused edits support: + top-level `local` statements (single and multi-binding) and object-local + members in object bodies. +- `CodeActionConfig.remove_unused` supports policy modes: + `all` and `nonImportBindings`. - Uses current document plus selected range and diagnostics from request params. - Returned through sync `textDocument/codeAction`. From 02028e30f0b0eb648ca9fda1e4ca01c8f2eeebc9 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 13:00:21 +0000 Subject: [PATCH 076/210] lsp-code-action: add import/comment remove-unused policy modes --- .../jrsonnet-lsp-handlers/src/code_action.rs | 376 ++++++++++++++++-- crates/jrsonnet-lsp-handlers/src/lib.rs | 2 +- crates/jrsonnet-lsp/src/config.rs | 18 +- crates/jrsonnet-lsp/tests/integration_test.rs | 187 ++++++++- docs/lsp/ARCHITECTURE.md | 5 +- docs/lsp/HANDLERS.md | 4 +- 6 files changed, 550 insertions(+), 42 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/code_action.rs b/crates/jrsonnet-lsp-handlers/src/code_action.rs index 449c785e..3c142500 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_action.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_action.rs @@ -10,7 +10,7 @@ use jrsonnet_rowan_parser::{ Bind, BindDestruct, BindFunction, Destruct, Expr, ExprBase, Member, MemberBindStmt, ObjBodyMemberList, StmtLocal, }, - rowan::{TextRange, TextSize}, + rowan::{TextRange, TextSize, TokenAtOffset}, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; use lsp_types::{ @@ -26,24 +26,40 @@ const UNUSED_VARIABLE_CODE: &str = "unused-variable"; pub enum RemoveUnusedMode { #[default] All, + ImportBindings, NonImportBindings, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum RemoveUnusedCommentsMode { + #[default] + None, + Above, + Below, + All, +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] #[serde(default, rename_all = "camelCase")] pub struct CodeActionConfig { pub remove_unused: RemoveUnusedMode, + pub remove_unused_comments: RemoveUnusedCommentsMode, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum RemovalFlavor { All, + ImportBindings, NonImportBindings, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum CommentPolicy { None, + Above, + Below, + All, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -56,21 +72,36 @@ impl RemovalFlavor { const fn allows(self, import_binding: bool) -> bool { match self { Self::All => true, + Self::ImportBindings => import_binding, Self::NonImportBindings => !import_binding, } } } +impl CommentPolicy { + const fn keeps_above_comments(self) -> bool { + matches!(self, Self::Above | Self::All) + } + + const fn keeps_below_comments(self) -> bool { + matches!(self, Self::Below | Self::All) + } +} + impl RemoveUnusedPolicy { const fn from_config(config: CodeActionConfig) -> Self { let flavor = match config.remove_unused { RemoveUnusedMode::All => RemovalFlavor::All, + RemoveUnusedMode::ImportBindings => RemovalFlavor::ImportBindings, RemoveUnusedMode::NonImportBindings => RemovalFlavor::NonImportBindings, }; - Self { - flavor, - comments: CommentPolicy::None, - } + let comments = match config.remove_unused_comments { + RemoveUnusedCommentsMode::None => CommentPolicy::None, + RemoveUnusedCommentsMode::Above => CommentPolicy::Above, + RemoveUnusedCommentsMode::Below => CommentPolicy::Below, + RemoveUnusedCommentsMode::All => CommentPolicy::All, + }; + Self { flavor, comments } } } @@ -90,6 +121,27 @@ const fn is_trivia_kind(kind: SyntaxKind) -> bool { ) } +const fn is_comment_kind(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::MULTI_LINE_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::SINGLE_LINE_SLASH_COMMENT + ) +} + +const fn is_whitespace_kind(kind: SyntaxKind) -> bool { + matches!(kind, SyntaxKind::WHITESPACE) +} + +fn can_absorb_trivia(token: &SyntaxToken, keep_comments: bool) -> bool { + if is_whitespace_kind(token.kind()) { + return !token.text().contains("\n\n"); + } + + is_comment_kind(token.kind()) && keep_comments +} + fn previous_significant_token(token: &SyntaxToken) -> Option { std::iter::successors(token.prev_token(), SyntaxToken::prev_token) .find(|candidate| !is_trivia_kind(candidate.kind())) @@ -108,6 +160,52 @@ fn single_line_trivia_end_after(token: &SyntaxToken) -> Option { Some(trivia.text_range().end()) } +fn token_at_range_start(syntax: &SyntaxNode, range: TextRange) -> Option { + match syntax.token_at_offset(range.start()) { + TokenAtOffset::None => None, + TokenAtOffset::Single(token) => Some(token), + TokenAtOffset::Between(_, right) => Some(right), + } +} + +fn token_at_range_end(syntax: &SyntaxNode, range: TextRange) -> Option { + let end = range.end().checked_sub(TextSize::new(1))?; + match syntax.token_at_offset(end) { + TokenAtOffset::None => None, + TokenAtOffset::Single(token) => Some(token), + TokenAtOffset::Between(left, _) => Some(left), + } +} + +fn expand_range_with_policy( + syntax: &SyntaxNode, + range: TextRange, + comments: CommentPolicy, +) -> Option { + let mut start = token_at_range_start(syntax, range)?; + while let Some(previous) = start.prev_token() { + if can_absorb_trivia(&previous, comments.keeps_above_comments()) { + start = previous; + continue; + } + break; + } + + let mut end = token_at_range_end(syntax, range)?; + while let Some(next) = end.next_token() { + if can_absorb_trivia(&next, comments.keeps_below_comments()) { + end = next; + continue; + } + break; + } + + Some(TextRange::new( + start.text_range().start(), + end.text_range().end(), + )) +} + fn remove_range_for_list_entry(entry: &SyntaxNode) -> Option { let first = entry.first_token()?; let last = entry.last_token()?; @@ -293,7 +391,38 @@ fn removal_ranges_for_member_list( .collect() } -fn removal_ranges_for_fix_all(document: &Document, diagnostics: &[Diagnostic]) -> Vec { +fn merge_overlapping_ranges(mut ranges: Vec) -> Vec { + if ranges.is_empty() { + return ranges; + } + ranges.sort_unstable_by_key(|range| (range.start(), range.end())); + + let mut merged = Vec::with_capacity(ranges.len()); + for range in ranges { + let Some(last) = merged.last_mut() else { + merged.push(range); + continue; + }; + if range.start() <= last.end() { + let end = if range.end() > last.end() { + range.end() + } else { + last.end() + }; + *last = TextRange::new(last.start(), end); + continue; + } + merged.push(range); + } + + merged +} + +fn removal_ranges_for_fix_all( + document: &Document, + diagnostics: &[Diagnostic], + policy: RemoveUnusedPolicy, +) -> Vec { let unused_name_ranges = unused_binding_name_ranges(document, diagnostics); if unused_name_ranges.is_empty() { return Vec::new(); @@ -317,8 +446,13 @@ fn removal_ranges_for_fix_all(document: &Document, diagnostics: &[Diagnostic]) - &unused_name_ranges, )); } - ranges.sort_unstable_by_key(|range| (range.start(), range.end())); - ranges + + let syntax = document.ast().syntax().clone(); + let expanded: Vec = ranges + .into_iter() + .filter_map(|range| expand_range_with_policy(&syntax, range, policy.comments)) + .collect(); + merge_overlapping_ranges(expanded) } fn is_import_expression(expr: Expr) -> bool { @@ -330,6 +464,8 @@ fn is_import_expression(expr: Expr) -> bool { } fn remove_edit_for_bind( + document: &Document, + policy: RemoveUnusedPolicy, binding_name: String, bind_node: SyntaxNode, value_expr: Option, @@ -351,11 +487,13 @@ fn remove_edit_for_bind( return None; }; + let expanded_range = + expand_range_with_policy(document.ast().syntax(), removal_range, policy.comments)?; let import_binding = value_expr.is_some_and(is_import_expression); Some(( RemoveUnusedEdit { binding_name, - range: removal_range, + range: expanded_range, }, import_binding, )) @@ -364,6 +502,7 @@ fn remove_edit_for_bind( fn remove_unused_edit_for_diagnostic( document: &Document, diagnostic: &Diagnostic, + policy: RemoveUnusedPolicy, ) -> Option<(RemoveUnusedEdit, bool)> { let text = document.text(); let line_index = document.line_index(); @@ -380,12 +519,24 @@ fn remove_unused_edit_for_diagnostic( return None; }; let name = full.name()?.ident_lit()?.text().to_string(); - return remove_edit_for_bind(name, bind_destruct.syntax().clone(), bind_destruct.value()); + return remove_edit_for_bind( + document, + policy, + name, + bind_destruct.syntax().clone(), + bind_destruct.value(), + ); } let bind_function = token.parent()?.ancestors().find_map(BindFunction::cast)?; let name = bind_function.name()?.ident_lit()?.text().to_string(); - remove_edit_for_bind(name, bind_function.syntax().clone(), bind_function.value()) + remove_edit_for_bind( + document, + policy, + name, + bind_function.syntax().clone(), + bind_function.value(), + ) } fn range_overlaps(a: Range, b: Range) -> bool { @@ -473,15 +624,10 @@ fn remove_unused_binding_action_with_policy( if !is_unused_variable_diagnostic(diagnostic) { return None; } - if !matches!(policy.flavor, RemovalFlavor::All) - || !matches!(policy.comments, CommentPolicy::None) - { - return None; - } let text = document.text(); let line_index = document.line_index(); - let (edit, import_binding) = remove_unused_edit_for_diagnostic(document, diagnostic)?; + let (edit, import_binding) = remove_unused_edit_for_diagnostic(document, diagnostic, policy)?; if !policy.flavor.allows(import_binding) { return None; } @@ -523,16 +669,13 @@ fn remove_all_unused_bindings_action_with_policy( if !wants_fix_all(context) { return None; } - if !matches!(policy.comments, CommentPolicy::None) { - return None; - } let diagnostics: Vec = context .diagnostics .iter() .filter(|diagnostic| is_unused_variable_diagnostic(diagnostic)) .filter(|diagnostic| { - remove_unused_edit_for_diagnostic(document, diagnostic) + remove_unused_edit_for_diagnostic(document, diagnostic, policy) .is_some_and(|(_, import_binding)| policy.flavor.allows(import_binding)) }) .cloned() @@ -541,7 +684,7 @@ fn remove_all_unused_bindings_action_with_policy( return None; } - let mut ranges = removal_ranges_for_fix_all(document, &diagnostics); + let mut ranges = removal_ranges_for_fix_all(document, &diagnostics, policy); if ranges.is_empty() { return None; } @@ -655,6 +798,19 @@ mod tests { } } + fn span(start_line: u32, start_char: u32, end_line: u32, end_char: u32) -> Range { + Range { + start: Position { + line: start_line, + character: start_char, + }, + end: Position { + line: end_line, + character: end_char, + }, + } + } + #[test] fn test_unused_variable_quickfix() { let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); @@ -702,7 +858,7 @@ mod tests { changes: Some(HashMap::from([( uri(), vec![TextEdit { - range: range(0, 12), + range: range(0, 11), new_text: String::new(), }], )])), @@ -722,7 +878,7 @@ mod tests { changes: Some(HashMap::from([( uri(), vec![TextEdit { - range: range(0, 12), + range: range(0, 11), new_text: String::new(), }], )])), @@ -764,7 +920,7 @@ mod tests { changes: Some(HashMap::from([( uri(), vec![TextEdit { - range: range(0, 12), + range: range(0, 11), new_text: String::new(), }], )])), @@ -844,7 +1000,7 @@ mod tests { changes: Some(HashMap::from([( uri(), vec![TextEdit { - range: range(6, 13), + range: range(5, 13), new_text: String::new(), }], )])), @@ -864,7 +1020,7 @@ mod tests { changes: Some(HashMap::from([( uri(), vec![TextEdit { - range: range(6, 13), + range: range(5, 13), new_text: String::new(), }], )])), @@ -926,7 +1082,7 @@ mod tests { changes: Some(HashMap::from([( uri(), vec![TextEdit { - range: range(2, 15), + range: range(1, 15), new_text: String::new(), }], )])), @@ -946,7 +1102,7 @@ mod tests { changes: Some(HashMap::from([( uri(), vec![TextEdit { - range: range(2, 15), + range: range(1, 15), new_text: String::new(), }], )])), @@ -987,7 +1143,7 @@ mod tests { changes: Some(HashMap::from([( uri(), vec![TextEdit { - range: range(0, 19), + range: range(0, 18), new_text: String::new(), }], )])), @@ -1015,6 +1171,7 @@ mod tests { }; let config = CodeActionConfig { remove_unused: RemoveUnusedMode::NonImportBindings, + ..CodeActionConfig::default() }; assert_eq!( @@ -1041,4 +1198,163 @@ mod tests { })] ); } + + #[test] + fn test_non_import_policy_keeps_remove_actions_for_non_import_bindings() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + let config = CodeActionConfig { + remove_unused: RemoveUnusedMode::NonImportBindings, + ..CodeActionConfig::default() + }; + + assert_eq!( + code_actions(&document, &uri(), range(0, 20), &context, &config), + vec![ + CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(0, 11), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(0, 11), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] + ); + } + + #[test] + fn test_import_only_policy_skips_remove_actions_for_non_import_bindings() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + let config = CodeActionConfig { + remove_unused: RemoveUnusedMode::ImportBindings, + ..CodeActionConfig::default() + }; + + assert_eq!( + code_actions(&document, &uri(), range(0, 20), &context, &config), + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diag_unused(range(6, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: range(6, 7), + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + })] + ); + } + + #[test] + fn test_fix_all_can_remove_above_comments_when_configured() { + let document = Document::new( + "// heading\nlocal x = 1;\n42".to_string(), + DocVersion::new(1), + ); + let context = CodeActionContext { + diagnostics: vec![diag_unused(span(1, 6, 1, 7))], + only: Some(vec![CodeActionKind::SOURCE_FIX_ALL]), + trigger_kind: None, + }; + let config = CodeActionConfig { + remove_unused_comments: RemoveUnusedCommentsMode::Above, + ..CodeActionConfig::default() + }; + + assert_eq!( + code_actions(&document, &uri(), span(0, 0, 2, 2), &context, &config), + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diag_unused(span(1, 6, 1, 7))]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri(), + vec![TextEdit { + range: span(0, 0, 1, 11), + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + })] + ); + } } diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index 35c00554..ef350a9a 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -14,7 +14,7 @@ pub mod semantic_tokens; pub mod signature_help; pub mod symbols; -pub use code_action::{code_actions, CodeActionConfig, RemoveUnusedMode}; +pub use code_action::{code_actions, CodeActionConfig, RemoveUnusedCommentsMode, RemoveUnusedMode}; pub use code_lens::{code_lens, resolve_code_lens, CodeLensConfig}; pub use completion::{completion, completion_with_import_roots}; pub use definition::{ diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index ac7c0b5c..d438c3c4 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -8,7 +8,9 @@ use std::{collections::HashMap, path::PathBuf}; // Re-export config types from handlers crate -pub use jrsonnet_lsp_handlers::{CodeActionConfig, FormattingConfig, RemoveUnusedMode}; +pub use jrsonnet_lsp_handlers::{ + CodeActionConfig, FormattingConfig, RemoveUnusedCommentsMode, RemoveUnusedMode, +}; use serde::{Deserialize, Serialize}; /// Server configuration options. @@ -383,7 +385,8 @@ mod tests { fn test_code_action_config_from_initialization_options() { let json = serde_json::json!({ "codeActions": { - "removeUnused": "nonImportBindings" + "removeUnused": "nonImportBindings", + "removeUnusedComments": "above" } }); @@ -392,6 +395,10 @@ mod tests { config.code_actions.remove_unused, RemoveUnusedMode::NonImportBindings ); + assert_eq!( + config.code_actions.remove_unused_comments, + RemoveUnusedCommentsMode::Above + ); } #[test] @@ -400,7 +407,8 @@ mod tests { let settings = serde_json::json!({ "codeActions": { - "removeUnused": "nonImportBindings" + "removeUnused": "nonImportBindings", + "removeUnusedComments": "below" } }); @@ -409,6 +417,10 @@ mod tests { config.code_actions.remove_unused, RemoveUnusedMode::NonImportBindings ); + assert_eq!( + config.code_actions.remove_unused_comments, + RemoveUnusedCommentsMode::Below + ); } #[test] diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index d13d8a24..b62faf77 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -645,7 +645,7 @@ fn expected_unused_variable_quickfix( }, end: Position { line: 0, - character: 12, + character: 11, }, }, new_text: String::new(), @@ -662,7 +662,7 @@ fn expected_unused_variable_quickfix( }, end: Position { line: 0, - character: 12, + character: 11, }, }, new_text: String::new(), @@ -748,7 +748,7 @@ fn expected_unused_import_binding_actions( }, end: Position { line: 0, - character: 33, + character: 32, }, }, new_text: String::new(), @@ -765,7 +765,7 @@ fn expected_unused_import_binding_actions( }, end: Position { line: 0, - character: 33, + character: 32, }, }, new_text: String::new(), @@ -2827,7 +2827,7 @@ fn test_code_action_unused_variable_quickfix() { }, end: Position { line: 0, - character: 12, + character: 11, }, }, new_text: String::new(), @@ -2962,6 +2962,183 @@ fn test_code_action_policy_updates_via_configuration_change() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_code_action_comment_policy_updates_via_configuration_change() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/code-action-comment-policy.jsonnet"; + let text = "// heading\nlocal x = 1;\n42"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + let diagnostic = lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::WARNING), + code: Some(lsp_types::NumberOrString::String( + "unused-variable".to_string(), + )), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + }; + + let fix_all_before = request_code_actions( + &client_conn, + 2, + uri, + vec![diagnostic.clone()], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!( + fix_all_before, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 1, + character: 0, + }, + end: Position { + line: 1, + character: 11, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }, + )]) + ); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "codeActions": { + "removeUnusedComments": "above" + } + } + })), + )) + .unwrap(); + + let fix_all_after = request_code_actions( + &client_conn, + 3, + uri, + vec![diagnostic], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!( + fix_all_after, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::WARNING), + code: Some(lsp_types::NumberOrString::String( + "unused-variable".to_string(), + )), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + }]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 1, + character: 11, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }, + )]) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_execute_command_find_references() { let (client_conn, server_conn) = Connection::memory(); diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index f0bf4954..6df68b97 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -311,8 +311,9 @@ Important behavior in `on_did_change_configuration`: for all tracked files (open and closed-cache graph entries). - Runtime changes and lint toggle changes both trigger diagnostic rescheduling for open files. -- Code action policy updates (`codeActions`) are applied immediately and do not - require runtime rebuild. +- Code action policy updates (`codeActions.removeUnused`, + `codeActions.removeUnusedComments`) are applied immediately and do not require + runtime rebuild. ## Configuration Surface diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 289aa3e6..fc46b809 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -78,7 +78,9 @@ File: `crates/jrsonnet-lsp-handlers/src/code_action.rs` top-level `local` statements (single and multi-binding) and object-local members in object bodies. - `CodeActionConfig.remove_unused` supports policy modes: - `all` and `nonImportBindings`. + `all`, `importBindings`, and `nonImportBindings`. +- `CodeActionConfig.remove_unused_comments` supports comment retention modes: + `none`, `above`, `below`, and `all`. - Uses current document plus selected range and diagnostics from request params. - Returned through sync `textDocument/codeAction`. From 2e37686c05d06449019e67d64ada00c304bf472e Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 13:06:11 +0000 Subject: [PATCH 077/210] lsp-tests: add typed timeline scenario model and parser --- crates/jrsonnet-lsp/tests/framework/mod.rs | 1 + .../jrsonnet-lsp/tests/framework/scenario.rs | 332 ++++++++++++++++++ 2 files changed, 333 insertions(+) create mode 100644 crates/jrsonnet-lsp/tests/framework/scenario.rs diff --git a/crates/jrsonnet-lsp/tests/framework/mod.rs b/crates/jrsonnet-lsp/tests/framework/mod.rs index 88eb1901..99a35db6 100644 --- a/crates/jrsonnet-lsp/tests/framework/mod.rs +++ b/crates/jrsonnet-lsp/tests/framework/mod.rs @@ -34,6 +34,7 @@ pub mod assertions; pub mod parser; +pub mod scenario; use std::fmt::Write as _; diff --git a/crates/jrsonnet-lsp/tests/framework/scenario.rs b/crates/jrsonnet-lsp/tests/framework/scenario.rs new file mode 100644 index 00000000..76a25582 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/framework/scenario.rs @@ -0,0 +1,332 @@ +//! Scenario model/parser for multi-file, multi-step LSP timeline tests. +//! +//! The format is JSON and maps directly to these types via serde. +//! +//! Example: +//! ```json +//! { +//! "steps": [ +//! { "step": "open", "uri": "file:///main.jsonnet", "text": "local x = 1; x" }, +//! { +//! "step": "requestCodeAction", +//! "id": 2, +//! "uri": "file:///main.jsonnet", +//! "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 12 } }, +//! "diagnostics": [] +//! }, +//! { "step": "expectCodeAction", "id": 2, "result": null } +//! ] +//! } +//! ``` + +use lsp_types::{ + CodeActionKind, CodeActionOrCommand, Diagnostic, Position, Range, + TextDocumentContentChangeEvent, +}; +use serde::{Deserialize, Serialize}; + +/// A full timeline scenario. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct Scenario { + pub steps: Vec, +} + +impl Scenario { + #[must_use] + pub fn new(steps: Vec) -> Self { + Self { steps } + } +} + +/// Parse a scenario from JSON text. +pub fn parse_scenario_json(input: &str) -> Result { + serde_json::from_str(input) +} + +/// One timeline step. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(tag = "step", rename_all = "camelCase")] +pub enum ScenarioStep { + Open(OpenStep), + ChangeFull(ChangeFullStep), + ChangeIncremental(ChangeIncrementalStep), + Save(SaveStep), + Close(CloseStep), + Config(ConfigStep), + RequestCodeAction(RequestCodeActionStep), + ExpectCodeAction(ExpectCodeActionStep), + ExpectDiagnostics(ExpectDiagnosticsStep), + DiagnosticsSettled(DiagnosticsSettledStep), +} + +/// `textDocument/didOpen`. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct OpenStep { + pub uri: String, + pub text: String, + #[serde(default = "default_language_id")] + pub language_id: String, + #[serde(default = "default_open_version")] + pub version: i32, +} + +const fn default_open_version() -> i32 { + 1 +} + +fn default_language_id() -> String { + "jsonnet".to_string() +} + +/// `textDocument/didChange` full-document replacement. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct ChangeFullStep { + pub uri: String, + pub text: String, + pub version: i32, +} + +impl ChangeFullStep { + #[must_use] + pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { + TextDocumentContentChangeEvent { + range: None, + range_length: None, + text: self.text.clone(), + } + } +} + +/// `textDocument/didChange` incremental edit. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct ChangeIncrementalStep { + pub uri: String, + pub range: Range, + pub text: String, + pub version: i32, +} + +impl ChangeIncrementalStep { + #[must_use] + pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { + TextDocumentContentChangeEvent { + range: Some(self.range), + range_length: None, + text: self.text.clone(), + } + } +} + +/// `textDocument/didSave`. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct SaveStep { + pub uri: String, + pub text: Option, +} + +/// `textDocument/didClose`. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct CloseStep { + pub uri: String, +} + +/// `workspace/didChangeConfiguration`. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct ConfigStep { + pub settings: serde_json::Value, +} + +/// `textDocument/codeAction` request. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct RequestCodeActionStep { + pub id: i32, + pub uri: String, + pub range: Range, + #[serde(default)] + pub diagnostics: Vec, + pub only: Option>, +} + +/// Expected `textDocument/codeAction` response. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct ExpectCodeActionStep { + pub id: i32, + pub result: Option>, +} + +/// Expected diagnostics notification for a URI. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct ExpectDiagnosticsStep { + pub uri: String, + #[serde(default)] + pub diagnostics: Vec, +} + +/// Barrier for "no new diagnostics arrive for idle_ms before timeout_ms". +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct DiagnosticsSettledStep { + #[serde(default = "default_timeout_ms")] + pub timeout_ms: u64, + #[serde(default = "default_idle_ms")] + pub idle_ms: u64, +} + +const fn default_timeout_ms() -> u64 { + 1_000 +} + +const fn default_idle_ms() -> u64 { + 50 +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_minimal_scenario() { + let parsed = parse_scenario_json( + r#"{ + "steps": [ + { + "step": "open", + "uri": "file:///main.jsonnet", + "text": "local x = 1; x" + }, + { + "step": "requestCodeAction", + "id": 2, + "uri": "file:///main.jsonnet", + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 0, "character": 12 } + }, + "diagnostics": [], + "only": null + }, + { + "step": "expectCodeAction", + "id": 2, + "result": null + } + ] + }"#, + ) + .unwrap(); + + assert_eq!( + parsed, + Scenario::new(vec![ + ScenarioStep::Open(OpenStep { + uri: "file:///main.jsonnet".to_string(), + text: "local x = 1; x".to_string(), + language_id: "jsonnet".to_string(), + version: 1, + }), + ScenarioStep::RequestCodeAction(RequestCodeActionStep { + id: 2, + uri: "file:///main.jsonnet".to_string(), + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 12, + }, + }, + diagnostics: Vec::new(), + only: None, + }), + ScenarioStep::ExpectCodeAction(ExpectCodeActionStep { + id: 2, + result: None, + }), + ]) + ); + } + + #[test] + fn test_change_step_conversions_are_structural() { + let full = ChangeFullStep { + uri: "file:///main.jsonnet".to_string(), + text: ["{", "a:1", "}"].concat(), + version: 3, + }; + assert_eq!( + full.as_change_event(), + TextDocumentContentChangeEvent { + range: None, + range_length: None, + text: ["{", "a:1", "}"].concat(), + } + ); + + let incremental = ChangeIncrementalStep { + uri: "file:///main.jsonnet".to_string(), + range: Range { + start: Position { + line: 0, + character: 3, + }, + end: Position { + line: 0, + character: 4, + }, + }, + text: "2".to_string(), + version: 4, + }; + assert_eq!( + incremental.as_change_event(), + TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 0, + character: 3, + }, + end: Position { + line: 0, + character: 4, + }, + }), + range_length: None, + text: "2".to_string(), + } + ); + } + + #[test] + fn test_diagnostics_settled_defaults() { + let parsed = parse_scenario_json( + r#"{ + "steps": [ + { "step": "diagnosticsSettled" } + ] + }"#, + ) + .unwrap(); + + assert_eq!( + parsed, + Scenario::new(vec![ScenarioStep::DiagnosticsSettled( + DiagnosticsSettledStep { + timeout_ms: 1_000, + idle_ms: 50, + }, + )]) + ); + } +} From e281ed0b9eed42f9efa5d021bdca3797e9eca2a8 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 13:14:39 +0000 Subject: [PATCH 078/210] lsp-tests: add scenario runner and async timeline coverage --- crates/jrsonnet-lsp/tests/framework/mod.rs | 1 + .../jrsonnet-lsp/tests/framework/scenario.rs | 85 +- .../tests/framework/scenario_runner.rs | 942 ++++++++++++++++++ docs/lsp/ARCHITECTURE.md | 14 + 4 files changed, 1041 insertions(+), 1 deletion(-) create mode 100644 crates/jrsonnet-lsp/tests/framework/scenario_runner.rs diff --git a/crates/jrsonnet-lsp/tests/framework/mod.rs b/crates/jrsonnet-lsp/tests/framework/mod.rs index 99a35db6..04673fb5 100644 --- a/crates/jrsonnet-lsp/tests/framework/mod.rs +++ b/crates/jrsonnet-lsp/tests/framework/mod.rs @@ -35,6 +35,7 @@ pub mod assertions; pub mod parser; pub mod scenario; +pub mod scenario_runner; use std::fmt::Write as _; diff --git a/crates/jrsonnet-lsp/tests/framework/scenario.rs b/crates/jrsonnet-lsp/tests/framework/scenario.rs index 76a25582..2c692059 100644 --- a/crates/jrsonnet-lsp/tests/framework/scenario.rs +++ b/crates/jrsonnet-lsp/tests/framework/scenario.rs @@ -20,7 +20,7 @@ //! ``` use lsp_types::{ - CodeActionKind, CodeActionOrCommand, Diagnostic, Position, Range, + CodeActionKind, CodeActionOrCommand, Diagnostic, Location, Position, Range, TextDocumentContentChangeEvent, }; use serde::{Deserialize, Serialize}; @@ -56,6 +56,8 @@ pub enum ScenarioStep { Config(ConfigStep), RequestCodeAction(RequestCodeActionStep), ExpectCodeAction(ExpectCodeActionStep), + RequestReferences(RequestReferencesStep), + ExpectReferences(ExpectReferencesStep), ExpectDiagnostics(ExpectDiagnosticsStep), DiagnosticsSettled(DiagnosticsSettledStep), } @@ -163,6 +165,25 @@ pub struct ExpectCodeActionStep { pub result: Option>, } +/// `textDocument/references` request. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct RequestReferencesStep { + pub id: i32, + pub uri: String, + pub position: Position, + #[serde(default)] + pub include_declaration: bool, +} + +/// Expected `textDocument/references` response. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct ExpectReferencesStep { + pub id: i32, + pub result: Option>, +} + /// Expected diagnostics notification for a URI. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(deny_unknown_fields)] @@ -329,4 +350,66 @@ mod tests { )]) ); } + + #[test] + fn test_parse_references_request_and_expectation() { + let parsed = parse_scenario_json( + r#"{ + "steps": [ + { + "step": "requestReferences", + "id": 4, + "uri": "file:///lib.jsonnet", + "position": { "line": 0, "character": 6 }, + "include_declaration": false + }, + { + "step": "expectReferences", + "id": 4, + "result": [ + { + "uri": "file:///lib.jsonnet", + "range": { + "start": { "line": 0, "character": 18 }, + "end": { "line": 0, "character": 24 } + } + } + ] + } + ] + }"#, + ) + .unwrap(); + + assert_eq!( + parsed, + Scenario::new(vec![ + ScenarioStep::RequestReferences(RequestReferencesStep { + id: 4, + uri: "file:///lib.jsonnet".to_string(), + position: Position { + line: 0, + character: 6, + }, + include_declaration: false, + }), + ScenarioStep::ExpectReferences(ExpectReferencesStep { + id: 4, + result: Some(vec![Location { + uri: "file:///lib.jsonnet".parse().unwrap(), + range: Range { + start: Position { + line: 0, + character: 18, + }, + end: Position { + line: 0, + character: 24, + }, + }, + }]), + }), + ]) + ); + } } diff --git a/crates/jrsonnet-lsp/tests/framework/scenario_runner.rs b/crates/jrsonnet-lsp/tests/framework/scenario_runner.rs new file mode 100644 index 00000000..28bebe18 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/framework/scenario_runner.rs @@ -0,0 +1,942 @@ +//! In-memory runner for `Scenario` timelines. + +use std::{ + collections::{HashMap, VecDeque}, + thread, + time::{Duration, Instant}, +}; + +use crossbeam_channel::RecvTimeoutError; +use lsp_server::{Connection, Message, Notification, Request, Response}; +use lsp_types::{ + notification::{ + DidChangeConfiguration, DidChangeTextDocument, DidCloseTextDocument, DidOpenTextDocument, + DidSaveTextDocument, Notification as _, PublishDiagnostics, + }, + request::{CodeActionRequest, Initialize, References, Request as _, Shutdown}, + CodeActionContext, CodeActionParams, DidChangeConfigurationParams, DidChangeTextDocumentParams, + DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, + InitializeParams, PartialResultParams, ReferenceContext, ReferenceParams, + TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, +}; + +use crate::framework::scenario::{ + ChangeFullStep, ChangeIncrementalStep, CloseStep, ConfigStep, DiagnosticsSettledStep, + ExpectCodeActionStep, ExpectDiagnosticsStep, ExpectReferencesStep, OpenStep, + RequestCodeActionStep, RequestReferencesStep, SaveStep, Scenario, ScenarioStep, +}; + +/// Run a full timeline scenario against an in-memory LSP server. +pub fn run_scenario(scenario: &Scenario) -> Result<(), String> { + let mut runner = ScenarioRunner::start(); + runner.initialize()?; + for step in &scenario.steps { + runner.run_step(step)?; + } + runner.shutdown() +} + +struct ScenarioRunner { + conn: Connection, + server_thread: thread::JoinHandle<()>, + pending_responses: Vec, + pending_diagnostics: HashMap>, + last_diagnostic_at: Option, +} + +impl ScenarioRunner { + fn start() -> Self { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = thread::spawn(move || { + let server = jrsonnet_lsp::server::Server::new(server_conn); + let _ = server.run(); + }); + Self { + conn: client_conn, + server_thread, + pending_responses: Vec::new(), + pending_diagnostics: HashMap::new(), + last_diagnostic_at: None, + } + } + + fn initialize(&mut self) -> Result<(), String> { + let params = serde_json::to_value(InitializeParams::default()) + .map_err(|error| format!("serialize initialize params: {error}"))?; + self.send_request(Request::new( + 1.into(), + Initialize::METHOD.to_string(), + params, + ))?; + let response = self.wait_response(1, Duration::from_secs(5))?; + if response.error.is_some() { + return Err(format!("initialize returned error: {:?}", response.error)); + } + self.send_notification(Notification::new( + "initialized".to_string(), + serde_json::json!({}), + )) + } + + fn shutdown(mut self) -> Result<(), String> { + self.send_request(Request::new( + 9_999.into(), + Shutdown::METHOD.to_string(), + serde_json::Value::Null, + ))?; + let response = self.wait_response(9_999, Duration::from_secs(5))?; + if response.error.is_some() { + return Err(format!("shutdown returned error: {:?}", response.error)); + } + self.send_notification(Notification::new( + "exit".to_string(), + serde_json::Value::Null, + ))?; + self.server_thread + .join() + .map_err(|_| "server thread panicked".to_string()) + } + + fn run_step(&mut self, step: &ScenarioStep) -> Result<(), String> { + match step { + ScenarioStep::Open(open) => self.step_open(open), + ScenarioStep::ChangeFull(change) => self.step_change_full(change), + ScenarioStep::ChangeIncremental(change) => self.step_change_incremental(change), + ScenarioStep::Save(save) => self.step_save(save), + ScenarioStep::Close(close) => self.step_close(close), + ScenarioStep::Config(config) => self.step_config(config), + ScenarioStep::RequestCodeAction(request) => self.step_request_code_action(request), + ScenarioStep::ExpectCodeAction(expectation) => { + self.step_expect_code_action(expectation) + } + ScenarioStep::RequestReferences(request) => self.step_request_references(request), + ScenarioStep::ExpectReferences(expectation) => self.step_expect_references(expectation), + ScenarioStep::ExpectDiagnostics(expectation) => { + self.step_expect_diagnostics(expectation) + } + ScenarioStep::DiagnosticsSettled(settled) => self.step_diagnostics_settled(*settled), + } + } + + fn step_open(&self, step: &OpenStep) -> Result<(), String> { + let uri: lsp_types::Uri = step + .uri + .parse() + .map_err(|error| format!("parse open uri '{}': {error}", step.uri))?; + let params = DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri, + language_id: step.language_id.clone(), + version: step.version, + text: step.text.clone(), + }, + }; + let payload = serde_json::to_value(params) + .map_err(|error| format!("serialize didOpen params: {error}"))?; + self.send_notification(Notification::new( + DidOpenTextDocument::METHOD.to_string(), + payload, + )) + } + + fn step_change_full(&self, step: &ChangeFullStep) -> Result<(), String> { + let uri: lsp_types::Uri = step + .uri + .parse() + .map_err(|error| format!("parse didChange(full) uri '{}': {error}", step.uri))?; + let params = DidChangeTextDocumentParams { + text_document: lsp_types::VersionedTextDocumentIdentifier { + uri, + version: step.version, + }, + content_changes: vec![step.as_change_event()], + }; + let payload = serde_json::to_value(params) + .map_err(|error| format!("serialize didChange(full) params: {error}"))?; + self.send_notification(Notification::new( + DidChangeTextDocument::METHOD.to_string(), + payload, + )) + } + + fn step_change_incremental(&self, step: &ChangeIncrementalStep) -> Result<(), String> { + let uri: lsp_types::Uri = step + .uri + .parse() + .map_err(|error| format!("parse didChange(incremental) uri '{}': {error}", step.uri))?; + let params = DidChangeTextDocumentParams { + text_document: lsp_types::VersionedTextDocumentIdentifier { + uri, + version: step.version, + }, + content_changes: vec![step.as_change_event()], + }; + let payload = serde_json::to_value(params) + .map_err(|error| format!("serialize didChange(incremental) params: {error}"))?; + self.send_notification(Notification::new( + DidChangeTextDocument::METHOD.to_string(), + payload, + )) + } + + fn step_save(&self, step: &SaveStep) -> Result<(), String> { + let uri: lsp_types::Uri = step + .uri + .parse() + .map_err(|error| format!("parse didSave uri '{}': {error}", step.uri))?; + let params = DidSaveTextDocumentParams { + text_document: TextDocumentIdentifier { uri }, + text: step.text.clone(), + }; + let payload = serde_json::to_value(params) + .map_err(|error| format!("serialize didSave params: {error}"))?; + self.send_notification(Notification::new( + DidSaveTextDocument::METHOD.to_string(), + payload, + )) + } + + fn step_close(&self, step: &CloseStep) -> Result<(), String> { + let uri: lsp_types::Uri = step + .uri + .parse() + .map_err(|error| format!("parse didClose uri '{}': {error}", step.uri))?; + let params = DidCloseTextDocumentParams { + text_document: TextDocumentIdentifier { uri }, + }; + let payload = serde_json::to_value(params) + .map_err(|error| format!("serialize didClose params: {error}"))?; + self.send_notification(Notification::new( + DidCloseTextDocument::METHOD.to_string(), + payload, + )) + } + + fn step_config(&self, step: &ConfigStep) -> Result<(), String> { + let params = DidChangeConfigurationParams { + settings: step.settings.clone(), + }; + let payload = serde_json::to_value(params) + .map_err(|error| format!("serialize didChangeConfiguration params: {error}"))?; + self.send_notification(Notification::new( + DidChangeConfiguration::METHOD.to_string(), + payload, + )) + } + + fn step_request_code_action(&self, step: &RequestCodeActionStep) -> Result<(), String> { + let uri: lsp_types::Uri = step + .uri + .parse() + .map_err(|error| format!("parse codeAction uri '{}': {error}", step.uri))?; + let params = CodeActionParams { + text_document: TextDocumentIdentifier { uri }, + range: step.range, + context: CodeActionContext { + diagnostics: step.diagnostics.clone(), + only: step.only.clone(), + trigger_kind: None, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + let payload = serde_json::to_value(params) + .map_err(|error| format!("serialize codeAction request params: {error}"))?; + self.send_request(Request::new( + step.id.into(), + CodeActionRequest::METHOD.to_string(), + payload, + )) + } + + fn step_request_references(&self, step: &RequestReferencesStep) -> Result<(), String> { + let uri: lsp_types::Uri = step + .uri + .parse() + .map_err(|error| format!("parse references uri '{}': {error}", step.uri))?; + let params = ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri }, + position: step.position, + }, + context: ReferenceContext { + include_declaration: step.include_declaration, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + let payload = serde_json::to_value(params) + .map_err(|error| format!("serialize references request params: {error}"))?; + self.send_request(Request::new( + step.id.into(), + References::METHOD.to_string(), + payload, + )) + } + + fn step_expect_code_action(&mut self, step: &ExpectCodeActionStep) -> Result<(), String> { + let response = self.wait_response(step.id, Duration::from_secs(5))?; + if let Some(error) = response.error { + return Err(format!( + "codeAction response {id} returned error: {error:?}", + id = step.id + )); + } + let actual: Option> = match response.result { + Some(value) => serde_json::from_value(value).map_err(|error| { + format!( + "deserialize codeAction response result for id {}: {error}", + step.id + ) + })?, + None => None, + }; + if actual != step.result { + return Err(format!( + "codeAction response mismatch for id {}: actual={actual:?} expected={expected:?}", + step.id, + expected = step.result + )); + } + Ok(()) + } + + fn step_expect_references(&mut self, step: &ExpectReferencesStep) -> Result<(), String> { + let response = self.wait_response(step.id, Duration::from_secs(5))?; + if let Some(error) = response.error { + return Err(format!( + "references response {id} returned error: {error:?}", + id = step.id + )); + } + let actual: Option> = match response.result { + Some(value) => serde_json::from_value(value).map_err(|error| { + format!( + "deserialize references response result for id {}: {error}", + step.id + ) + })?, + None => None, + }; + if actual != step.result { + return Err(format!( + "references response mismatch for id {}: actual={actual:?} expected={expected:?}", + step.id, + expected = step.result + )); + } + Ok(()) + } + + fn step_expect_diagnostics(&mut self, step: &ExpectDiagnosticsStep) -> Result<(), String> { + let actual = self.wait_diagnostics_for_uri(&step.uri, Duration::from_secs(5))?; + if actual.diagnostics != step.diagnostics { + return Err(format!( + "diagnostics mismatch for uri {}: actual={actual:?} expected={expected:?}", + step.uri, + expected = step.diagnostics + )); + } + Ok(()) + } + + fn step_diagnostics_settled(&mut self, step: DiagnosticsSettledStep) -> Result<(), String> { + let timeout = Duration::from_millis(step.timeout_ms); + let idle = Duration::from_millis(step.idle_ms); + let start = Instant::now(); + let mut last_diagnostic = self.last_diagnostic_at.unwrap_or(start); + + loop { + if start.elapsed() > timeout { + return Err(format!( + "diagnostics did not settle within {}ms", + step.timeout_ms + )); + } + + if last_diagnostic.elapsed() >= idle { + return Ok(()); + } + + let remaining_timeout = timeout.saturating_sub(start.elapsed()); + let remaining_idle = idle.saturating_sub(last_diagnostic.elapsed()); + let wait_for = remaining_timeout.min(remaining_idle); + + match self.conn.receiver.recv_timeout(wait_for) { + Ok(message) => self.capture_background_message(message)?, + Err(RecvTimeoutError::Timeout) => {} + Err(RecvTimeoutError::Disconnected) => { + return Err( + "connection closed while waiting for diagnostics to settle".to_string() + ); + } + } + + if let Some(latest) = self.last_diagnostic_at { + last_diagnostic = latest; + } + } + } + + fn wait_response(&mut self, id: i32, timeout: Duration) -> Result { + if let Some(index) = self + .pending_responses + .iter() + .position(|response| response.id == id.into()) + { + return Ok(self.pending_responses.swap_remove(index)); + } + + let deadline = Instant::now() + timeout; + loop { + let remaining = deadline.saturating_duration_since(Instant::now()); + if remaining.is_zero() { + return Err(format!("timed out waiting for response id {}", id)); + } + match self.conn.receiver.recv_timeout(remaining) { + Ok(Message::Response(response)) if response.id == id.into() => return Ok(response), + Ok(message) => self.capture_background_message(message)?, + Err(RecvTimeoutError::Timeout) => { + return Err(format!("timed out waiting for response id {}", id)); + } + Err(RecvTimeoutError::Disconnected) => { + return Err("connection closed while waiting for response".to_string()); + } + } + } + } + + fn wait_diagnostics_for_uri( + &mut self, + uri: &str, + timeout: Duration, + ) -> Result { + if let Some(queue) = self.pending_diagnostics.get_mut(uri) { + if let Some(params) = queue.pop_front() { + return Ok(params); + } + } + + let deadline = Instant::now() + timeout; + loop { + let remaining = deadline.saturating_duration_since(Instant::now()); + if remaining.is_zero() { + return Err(format!("timed out waiting for diagnostics for uri {}", uri)); + } + match self.conn.receiver.recv_timeout(remaining) { + Ok(message) => { + self.capture_background_message(message)?; + if let Some(queue) = self.pending_diagnostics.get_mut(uri) { + if let Some(params) = queue.pop_front() { + return Ok(params); + } + } + } + Err(RecvTimeoutError::Timeout) => { + return Err(format!("timed out waiting for diagnostics for uri {}", uri)); + } + Err(RecvTimeoutError::Disconnected) => { + return Err("connection closed while waiting for diagnostics".to_string()); + } + } + } + } + + fn capture_background_message(&mut self, message: Message) -> Result<(), String> { + match message { + Message::Response(response) => { + self.pending_responses.push(response); + Ok(()) + } + Message::Notification(notification) + if notification.method == PublishDiagnostics::METHOD => + { + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notification.params) + .map_err(|error| format!("deserialize publish diagnostics: {error}"))?; + let key = params.uri.as_str().to_string(); + self.pending_diagnostics + .entry(key) + .or_default() + .push_back(params); + self.last_diagnostic_at = Some(Instant::now()); + Ok(()) + } + Message::Notification(_) | Message::Request(_) => Ok(()), + } + } + + fn send_notification(&self, notification: Notification) -> Result<(), String> { + self.conn + .sender + .send(Message::Notification(notification)) + .map_err(|error| format!("send notification: {error}")) + } + + fn send_request(&self, request: Request) -> Result<(), String> { + self.conn + .sender + .send(Message::Request(request)) + .map_err(|error| format!("send request: {error}")) + } +} + +#[cfg(test)] +mod tests { + use std::fs; + + use lsp_types::{ + CodeAction, CodeActionKind, CodeActionOrCommand, Diagnostic, DiagnosticSeverity, Location, + NumberOrString, Position, Range, TextEdit, WorkspaceEdit, + }; + use tempfile::TempDir; + + use super::*; + use crate::framework::scenario::{ + CloseStep, ConfigStep, DiagnosticsSettledStep, ExpectCodeActionStep, ExpectReferencesStep, + OpenStep, RequestCodeActionStep, RequestReferencesStep, Scenario, ScenarioStep, + }; + + fn unused_variable_diagnostic() -> Diagnostic { + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("unused-variable".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + } + } + + fn expected_unused_actions(uri: &str, diagnostic: Diagnostic) -> Vec { + vec![ + CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri.parse().unwrap(), + vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri.parse().unwrap(), + vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 32, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri.parse().unwrap(), + vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 32, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] + } + + fn expected_prefix_only_action(uri: &str, diagnostic: Diagnostic) -> Vec { + vec![CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri.parse().unwrap(), + vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + })] + } + + #[test] + fn test_runner_executes_open_request_expect_sequence() { + let uri = "file:///scenario-runner.jsonnet"; + let diagnostic = unused_variable_diagnostic(); + let expected = vec![ + CodeActionOrCommand::CodeAction(CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri.parse().unwrap(), + vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri.parse().unwrap(), + vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 11, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + CodeActionOrCommand::CodeAction(CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(HashMap::from([( + uri.parse().unwrap(), + vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 11, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ]; + + let scenario = Scenario::new(vec![ + ScenarioStep::Open(OpenStep { + uri: uri.to_string(), + text: "local x = 1; 42".to_string(), + language_id: "jsonnet".to_string(), + version: 1, + }), + ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { + timeout_ms: 1_000, + idle_ms: 25, + }), + ScenarioStep::RequestCodeAction(RequestCodeActionStep { + id: 2, + uri: uri.to_string(), + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 20, + }, + }, + diagnostics: vec![diagnostic], + only: None, + }), + ScenarioStep::ExpectCodeAction(ExpectCodeActionStep { + id: 2, + result: Some(expected), + }), + ]); + + let result = run_scenario(&scenario); + assert_eq!(result, Ok(())); + } + + #[test] + fn test_runner_timeline_applies_config_change_for_code_actions() { + let uri = "file:///scenario-code-action-policy.jsonnet"; + let diagnostic = unused_variable_diagnostic(); + let scenario = Scenario::new(vec![ + ScenarioStep::Open(OpenStep { + uri: uri.to_string(), + text: "local x = import \"foo.libsonnet\"; 42".to_string(), + language_id: "jsonnet".to_string(), + version: 1, + }), + ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { + timeout_ms: 1_000, + idle_ms: 25, + }), + ScenarioStep::RequestCodeAction(RequestCodeActionStep { + id: 10, + uri: uri.to_string(), + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 35, + }, + }, + diagnostics: vec![diagnostic.clone()], + only: None, + }), + ScenarioStep::ExpectCodeAction(ExpectCodeActionStep { + id: 10, + result: Some(expected_unused_actions(uri, diagnostic.clone())), + }), + ScenarioStep::Config(ConfigStep { + settings: serde_json::json!({ + "jsonnet": { + "codeActions": { + "removeUnused": "nonImportBindings" + } + } + }), + }), + ScenarioStep::RequestCodeAction(RequestCodeActionStep { + id: 11, + uri: uri.to_string(), + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 35, + }, + }, + diagnostics: vec![diagnostic.clone()], + only: None, + }), + ScenarioStep::ExpectCodeAction(ExpectCodeActionStep { + id: 11, + result: Some(expected_prefix_only_action(uri, diagnostic)), + }), + ]); + + let result = run_scenario(&scenario); + assert_eq!(result, Ok(())); + } + + #[test] + fn test_runner_timeline_preserves_cross_file_references_after_close() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + let lib_text = "local target = 1; target"; + let main_text = "local lib = import 'lib.jsonnet'; lib.target"; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = format!( + "file://{}", + lib_path + .canonicalize() + .expect("lib should canonicalize") + .to_string_lossy() + ); + let main_uri = format!( + "file://{}", + main_path + .canonicalize() + .expect("main should canonicalize") + .to_string_lossy() + ); + + let scenario = Scenario::new(vec![ + ScenarioStep::Open(OpenStep { + uri: lib_uri.clone(), + text: lib_text.to_string(), + language_id: "jsonnet".to_string(), + version: 1, + }), + ScenarioStep::Open(OpenStep { + uri: main_uri.clone(), + text: main_text.to_string(), + language_id: "jsonnet".to_string(), + version: 1, + }), + ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { + timeout_ms: 1_000, + idle_ms: 25, + }), + ScenarioStep::Close(CloseStep { + uri: main_uri.clone(), + }), + ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { + timeout_ms: 1_000, + idle_ms: 25, + }), + ScenarioStep::RequestReferences(RequestReferencesStep { + id: 20, + uri: lib_uri.clone(), + position: Position { + line: 0, + character: 6, + }, + include_declaration: false, + }), + ScenarioStep::ExpectReferences(ExpectReferencesStep { + id: 20, + result: Some(vec![ + Location { + uri: lib_uri.parse().unwrap(), + range: Range { + start: Position { + line: 0, + character: 18, + }, + end: Position { + line: 0, + character: 24, + }, + }, + }, + Location { + uri: main_uri.parse().unwrap(), + range: Range { + start: Position { + line: 0, + character: 38, + }, + end: Position { + line: 0, + character: 44, + }, + }, + }, + ]), + }), + ]); + + let result = run_scenario(&scenario); + assert_eq!(result, Ok(())); + } +} diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 6df68b97..febf3ae9 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -333,6 +333,20 @@ Configuration can arrive via initialization options or `workspace/didChangeConfiguration` settings payloads. The update logic accepts both flat and namespaced settings (`jsonnet`, `jsonnet-language-server`). +## Timeline Test Harness + +The LSP integration test framework now includes a typed timeline model and +runner: + +- `crates/jrsonnet-lsp/tests/framework/scenario.rs` +- `crates/jrsonnet-lsp/tests/framework/scenario_runner.rs` + +`scenario.rs` defines JSON-deserializable steps for +open/change/save/close/config/request/expect flows. +`scenario_runner.rs` executes those steps against an in-memory server +connection, buffers out-of-order responses/diagnostics, and provides a +`diagnosticsSettled` idle barrier for deterministic async sequencing. + ## Execute Commands Advertised commands: From 9783b3ca5f1d79356cdce0007a7bea8ac563652d Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 13:24:39 +0000 Subject: [PATCH 079/210] lsp-formatting: add workspace contract engine and typed context --- .../jrsonnet-lsp-handlers/src/formatting.rs | 279 +++++++++++++++--- crates/jrsonnet-lsp-handlers/src/lib.rs | 5 +- crates/jrsonnet-lsp/src/config.rs | 12 +- crates/jrsonnet-lsp/src/server.rs | 14 +- docs/lsp/ARCHITECTURE.md | 15 +- docs/lsp/HANDLERS.md | 10 +- 6 files changed, 289 insertions(+), 46 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/formatting.rs b/crates/jrsonnet-lsp-handlers/src/formatting.rs index a1461bdd..03de3bf9 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting.rs @@ -2,7 +2,11 @@ //! //! Formats Jsonnet code using an external formatter (jrsonnet-fmt, jsonnetfmt, etc.). -use std::process::{Command, Stdio}; +use std::{ + ffi::OsStr, + path::{Path, PathBuf}, + process::{Command, Stdio}, +}; use lsp_types::{Position, Range, TextEdit}; use serde::{Deserialize, Serialize}; @@ -11,6 +15,27 @@ fn to_u32(value: usize) -> u32 { u32::try_from(value).unwrap_or(u32::MAX) } +/// Formatter execution mode. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum FormatterEngine { + /// Resolve formatter via PATH (auto mode). + #[serde(rename = "path", alias = "auto", alias = "auto-path")] + Path, + /// Use workspace-local contract: `{workspace}/bin/jsonnetfmt -stdio `. + #[serde( + rename = "bin-jsonnetfmt-stdio", + alias = "workspace-jsonnetfmt-stdio", + alias = "workspaceBinJsonnetfmtStdio" + )] + BinJsonnetfmtStdio, +} + +impl Default for FormatterEngine { + fn default() -> Self { + Self::Path + } +} + /// Formatting configuration options. /// /// These options correspond to the go-jsonnet formatter (jsonnetfmt) options. @@ -69,6 +94,37 @@ pub struct FormattingConfig { /// Path to the formatter binary (default: searches PATH for jrsonnet-fmt or jsonnetfmt). #[serde(alias = "FormatterPath")] pub formatter_path: Option, + + /// Formatter engine mode. + /// + /// - `None` or `path`: try `jrsonnet-fmt` then `jsonnetfmt` in `PATH` + /// - `bin-jsonnetfmt-stdio`: run `{workspaceRoot}/bin/jsonnetfmt -stdio ` + #[serde(alias = "FormatterEngine")] + pub formatter_engine: Option, +} + +/// Context required for formatter resolution. +#[derive(Debug, Clone, Copy, Default)] +pub struct FormattingContext<'a> { + /// Absolute path to the document being formatted. + pub document_path: Option<&'a Path>, + /// Known workspace roots from initialization. + pub workspace_roots: &'a [PathBuf], +} + +impl<'a> FormattingContext<'a> { + #[must_use] + pub fn detached() -> Self { + Self::default() + } + + #[must_use] + pub fn for_document(document_path: &'a Path, workspace_roots: &'a [PathBuf]) -> Self { + Self { + document_path: Some(document_path), + workspace_roots, + } + } } /// Format a Jsonnet document with default configuration. @@ -77,7 +133,11 @@ pub struct FormattingConfig { /// On error, returns None. #[must_use] pub fn format_document(text: &str) -> Option> { - format_document_with_config(text, &FormattingConfig::default()) + format_document_with_config( + text, + &FormattingConfig::default(), + FormattingContext::detached(), + ) } /// Format a Jsonnet document with the given configuration. @@ -85,9 +145,13 @@ pub fn format_document(text: &str) -> Option> { /// Returns a list of text edits to apply to the document. /// On error, returns None. #[must_use] -pub fn format_document_with_config(text: &str, config: &FormattingConfig) -> Option> { +pub fn format_document_with_config( + text: &str, + config: &FormattingConfig, + context: FormattingContext<'_>, +) -> Option> { // Try to run the formatter - let formatted = run_formatter(text, config)?; + let formatted = run_formatter(text, config, context)?; if formatted == text { // No changes needed @@ -117,23 +181,39 @@ pub fn format_document_with_config(text: &str, config: &FormattingConfig) -> Opt /// Run the formatter on the input text. /// /// If `config.formatter_path` is set, that formatter is used directly. -/// Otherwise, searches PATH for jrsonnet-fmt or jsonnetfmt. -fn run_formatter(input: &str, config: &FormattingConfig) -> Option { +/// Otherwise, formatter behavior is selected by `formatter_engine`. +fn run_formatter( + input: &str, + config: &FormattingConfig, + context: FormattingContext<'_>, +) -> Option { // If a custom formatter path is provided, use it directly if let Some(path) = &config.formatter_path { - return try_run_formatter_binary(path, input, config); + let args = build_formatter_args(config); + return try_run_formatter_binary(path.as_str(), &args, None, input); } - // Try common locations for the formatter - let formatter_names = ["jrsonnet-fmt", "jsonnetfmt"]; - - for name in &formatter_names { - if let Some(result) = try_run_formatter_binary(name, input, config) { - return Some(result); + match config.formatter_engine.unwrap_or_default() { + FormatterEngine::Path => { + let args = build_formatter_args(config); + for name in ["jrsonnet-fmt", "jsonnetfmt"] { + if let Some(result) = try_run_formatter_binary(name, &args, None, input) { + return Some(result); + } + } + None + } + FormatterEngine::BinJsonnetfmtStdio => { + let document_path = context.document_path?; + let workspace_root = select_workspace_root(document_path, context.workspace_roots)?; + let formatter = workspace_root.join("bin").join("jsonnetfmt"); + let args = vec![ + "-stdio".to_string(), + document_path.to_string_lossy().into_owned(), + ]; + try_run_formatter_binary(formatter.as_os_str(), &args, Some(workspace_root), input) } } - - None } /// Build CLI arguments for the formatter based on config. @@ -212,11 +292,21 @@ fn build_formatter_args(config: &FormattingConfig) -> Vec { } /// Try to run a specific formatter binary. -fn try_run_formatter_binary(name: &str, input: &str, config: &FormattingConfig) -> Option { - let args = build_formatter_args(config); - - let mut child = Command::new(name) - .args(&args) +fn try_run_formatter_binary( + program: S, + args: &[String], + current_dir: Option<&Path>, + input: &str, +) -> Option +where + S: AsRef, +{ + let mut command = Command::new(program); + command.args(args); + if let Some(current_dir) = current_dir { + command.current_dir(current_dir); + } + let mut child = command .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) @@ -227,6 +317,7 @@ fn try_run_formatter_binary(name: &str, input: &str, config: &FormattingConfig) if let Some(mut stdin) = child.stdin.take() { use std::io::Write; stdin.write_all(input.as_bytes()).ok()?; + drop(stdin); } let output = child.wait_with_output().ok()?; @@ -238,29 +329,145 @@ fn try_run_formatter_binary(name: &str, input: &str, config: &FormattingConfig) } } +fn select_workspace_root<'a>( + document_path: &Path, + workspace_roots: &'a [PathBuf], +) -> Option<&'a PathBuf> { + workspace_roots + .iter() + .filter(|root| document_path.starts_with(root)) + .max_by_key(|root| root.components().count()) + .or_else(|| workspace_roots.first()) +} + #[cfg(test)] mod tests { + use std::{fs, path::Path}; + use super::*; #[test] - fn test_format_already_formatted() { - // This test only passes if jrsonnet-fmt is available - let code = "{\n foo: 1,\n}\n"; - - if let Some(edits) = format_document(code) { - // Either no edits (already formatted) or some edits - // We can't assert specific behavior without the formatter - let _ = edits; - } - // If formatter not available, test is skipped implicitly + fn test_select_workspace_root_prefers_deepest_match() { + let roots = vec![ + PathBuf::from("/workspace"), + PathBuf::from("/workspace/team/project"), + ]; + let document = Path::new("/workspace/team/project/src/main.jsonnet"); + + assert_eq!( + select_workspace_root(document, &roots), + Some(&PathBuf::from("/workspace/team/project")) + ); + } + + #[test] + fn test_format_contract_engine_without_document_context_returns_none() { + let config = FormattingConfig { + formatter_engine: Some(FormatterEngine::BinJsonnetfmtStdio), + ..FormattingConfig::default() + }; + + assert_eq!( + format_document_with_config("{}", &config, FormattingContext::detached()), + None + ); + } + + #[cfg(unix)] + fn write_executable_script(path: &Path, body: &str) { + use std::os::unix::fs::PermissionsExt; + + fs::write(path, body).unwrap(); + let permissions = fs::Permissions::from_mode(0o755); + fs::set_permissions(path, permissions).unwrap(); + } + + fn full_replacement_edit(old: &str, new_text: String) -> Vec { + vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: to_u32(old.len()), + }, + }, + new_text, + }] + } + + #[cfg(unix)] + #[test] + fn test_format_contract_engine_uses_workspace_bin_jsonnetfmt_stdio() { + let temp_dir = tempfile::tempdir().unwrap(); + let root = temp_dir.path(); + let bin_dir = root.join("bin"); + let src_dir = root.join("src"); + fs::create_dir_all(&bin_dir).unwrap(); + fs::create_dir_all(&src_dir).unwrap(); + + let formatter_path = bin_dir.join("jsonnetfmt"); + write_executable_script( + &formatter_path, + "#!/usr/bin/env sh\nset -eu\nprintf 'cwd=%s\\narg1=%s\\narg2=%s\\n' \"$(pwd)\" \"$1\" \"$2\"\ncat\n", + ); + + let document_path = src_dir.join("main.jsonnet"); + let workspace_roots = vec![root.to_path_buf()]; + let input = "{}"; + let config = FormattingConfig { + formatter_engine: Some(FormatterEngine::BinJsonnetfmtStdio), + ..FormattingConfig::default() + }; + let context = FormattingContext::for_document(document_path.as_path(), &workspace_roots); + + let output = format!( + "cwd={}\narg1=-stdio\narg2={}\n{input}", + root.display(), + document_path.display() + ); + assert_eq!( + format_document_with_config(input, &config, context), + Some(full_replacement_edit(input, output)) + ); } + #[cfg(unix)] #[test] - fn test_format_returns_none_without_formatter() { - // Test that we handle missing formatter gracefully - // This is difficult to test since it depends on the environment - // Just verify the function doesn't panic - let code = "{ foo: 1 }"; - let _ = format_document(code); + fn test_formatter_path_takes_precedence_over_engine() { + let temp_dir = tempfile::tempdir().unwrap(); + let root = temp_dir.path(); + let bin_dir = root.join("bin"); + let src_dir = root.join("src"); + fs::create_dir_all(&bin_dir).unwrap(); + fs::create_dir_all(&src_dir).unwrap(); + + let workspace_formatter = bin_dir.join("jsonnetfmt"); + write_executable_script( + &workspace_formatter, + "#!/usr/bin/env sh\nset -eu\necho workspace\n", + ); + + let custom_formatter = root.join("custom-formatter"); + write_executable_script( + &custom_formatter, + "#!/usr/bin/env sh\nset -eu\necho custom\n", + ); + + let document_path = src_dir.join("main.jsonnet"); + let workspace_roots = vec![root.to_path_buf()]; + let config = FormattingConfig { + formatter_path: Some(custom_formatter.to_string_lossy().into_owned()), + formatter_engine: Some(FormatterEngine::BinJsonnetfmtStdio), + ..FormattingConfig::default() + }; + let context = FormattingContext::for_document(document_path.as_path(), &workspace_roots); + + assert_eq!( + format_document_with_config("{}", &config, context), + Some(full_replacement_edit("{}", "custom\n".to_string())) + ); } } diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index ef350a9a..61a3f331 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -22,7 +22,10 @@ pub use definition::{ VisibleBinding, }; pub use document_highlight::document_highlights; -pub use formatting::{format_document, format_document_with_config, FormattingConfig}; +pub use formatting::{ + format_document, format_document_with_config, FormatterEngine, FormattingConfig, + FormattingContext, +}; pub use hover::hover; pub use inlay_hint::inlay_hints; pub use references::{find_cross_file_references, find_references}; diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index d438c3c4..d324e4cd 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -9,7 +9,7 @@ use std::{collections::HashMap, path::PathBuf}; // Re-export config types from handlers crate pub use jrsonnet_lsp_handlers::{ - CodeActionConfig, FormattingConfig, RemoveUnusedCommentsMode, RemoveUnusedMode, + CodeActionConfig, FormatterEngine, FormattingConfig, RemoveUnusedCommentsMode, RemoveUnusedMode, }; use serde::{Deserialize, Serialize}; @@ -234,6 +234,9 @@ impl ServerConfig { if other.formatter_path.is_some() { self.formatting.formatter_path = other.formatter_path; } + if other.formatter_engine.is_some() { + self.formatting.formatter_engine = other.formatter_engine; + } } /// Get all library paths for import resolution. @@ -429,7 +432,8 @@ mod tests { "formatting": { "Indent": 4, "StringStyle": "double", - "PadArrays": true + "PadArrays": true, + "FormatterEngine": "bin-jsonnetfmt-stdio" } }); @@ -437,5 +441,9 @@ mod tests { assert_eq!(config.formatting.indent, Some(4)); assert_eq!(config.formatting.string_style, Some("double".to_string())); assert_eq!(config.formatting.pad_arrays, Some(true)); + assert_eq!( + config.formatting.formatter_engine, + Some(FormatterEngine::BinJsonnetfmtStdio) + ); } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 6df1dd16..fb31b845 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -78,6 +78,8 @@ pub struct Server { type_cache: SharedTypeCache, /// Server configuration. config: SharedConfig, + /// Workspace roots derived from initialize params. + workspace_roots: Vec, /// Evaluator for runtime diagnostics (wrapped in Arc for sharing with async diagnostics). evaluator: Option>, /// Async diagnostics runner. @@ -138,6 +140,7 @@ impl Server { type_cache, global_types, config: Arc::new(RwLock::new(ServerConfig::default())), + workspace_roots: Vec::new(), evaluator: None, diagnostics, inflight_requests, @@ -289,7 +292,9 @@ impl Server { } self.register_did_change_watched_files(¶ms, &init_roots)?; - self.schedule_workspace_index_bootstrap(init_roots); + let workspace_roots = Self::workspace_root_paths(&init_roots); + self.workspace_roots.clone_from(&workspace_roots); + self.schedule_workspace_index_bootstrap(workspace_roots); // Main loop self.main_loop()?; @@ -298,8 +303,7 @@ impl Server { Ok(()) } - fn schedule_workspace_index_bootstrap(&self, init_roots: InitializeRoots) { - let roots = Self::workspace_root_paths(&init_roots); + fn schedule_workspace_index_bootstrap(&self, roots: Vec) { if roots.is_empty() { debug!("No workspace roots provided; skipping startup index bootstrap"); return; @@ -966,8 +970,10 @@ impl Server { // Get formatting config let config = self.config.read().formatting.clone(); + let context = + handlers::FormattingContext::for_document(path.as_path(), &self.workspace_roots); - handlers::format_document_with_config(doc.text(), &config) + handlers::format_document_with_config(doc.text(), &config, context) } /// Handle textDocument/prepareRename request. diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index febf3ae9..772f8fca 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -74,7 +74,9 @@ Startup flow: 7. Schedule background bootstrap indexing for initialize workspace roots (`workspaceFolders`, `rootUri`, `rootPath`) scanning `*.jsonnet`, `*.libsonnet`, and `*.json`. -8. Enter the main loop immediately while bootstrap continues asynchronously. +8. Persist normalized workspace roots for runtime features that need + workspace-scoped execution context (for example formatting contract mode). +9. Enter the main loop immediately while bootstrap continues asynchronously. Entry point: `run_stdio()` in `crates/jrsonnet-lsp/src/server.rs`. @@ -329,6 +331,17 @@ Important behavior in `on_did_change_configuration`: - `code_actions` - `log_level` +`formatting` currently includes: + +- formatter option flags forwarded to CLI formatters (`indent`, + `max_blank_lines`, string/comment style, padding/sort/strip toggles) +- `formatter_path` (explicit formatter binary path, highest precedence) +- `formatter_engine`: + - unset or `path`: PATH probing (`jrsonnet-fmt`, then `jsonnetfmt`) + - `bin-jsonnetfmt-stdio`: workspace-local contract mode + (`{workspaceRoot}/bin/jsonnetfmt -stdio `, cwd at workspace + root) + Configuration can arrive via initialization options or `workspace/didChangeConfiguration` settings payloads. The update logic accepts both flat and namespaced settings (`jsonnet`, `jsonnet-language-server`). diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index fc46b809..f2cf5b88 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -201,8 +201,14 @@ File: `crates/jrsonnet-lsp-handlers/src/document_highlight.rs` File: `crates/jrsonnet-lsp-handlers/src/formatting.rs` -- Uses `formatter_path` when configured. -- Otherwise tries `jrsonnet-fmt` then `jsonnetfmt`. +- Uses `formatter_path` when configured (highest precedence). +- Otherwise uses `formatting.formatter_engine`. +- `path` (or unset): tries `jrsonnet-fmt` then `jsonnetfmt` in `PATH`. +- `bin-jsonnetfmt-stdio`: runs + `{workspaceRoot}/bin/jsonnetfmt -stdio `. +- Workspace contract mode receives request context from the server: + document path and initialize-time workspace roots. The nearest containing + workspace root is selected. - Returns a full-document replacement edit when formatting changes text. - Returns `None` when formatting is unavailable or fails. From 7fdf449a997ecb235a44ab75cb7ebde028000c47 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 11 Feb 2026 13:27:13 +0000 Subject: [PATCH 080/210] lsp-formatting: surface formatter failures with typed errors --- .../jrsonnet-lsp-handlers/src/formatting.rs | 181 ++++++++++++++++-- 1 file changed, 161 insertions(+), 20 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/formatting.rs b/crates/jrsonnet-lsp-handlers/src/formatting.rs index 03de3bf9..5f5ab031 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting.rs @@ -4,12 +4,14 @@ use std::{ ffi::OsStr, + fmt, io, path::{Path, PathBuf}, process::{Command, Stdio}, }; use lsp_types::{Position, Range, TextEdit}; use serde::{Deserialize, Serialize}; +use tracing::debug; fn to_u32(value: usize) -> u32 { u32::try_from(value).unwrap_or(u32::MAX) @@ -127,6 +129,99 @@ impl<'a> FormattingContext<'a> { } } +#[derive(Debug)] +enum FormatterError { + NoFormatterInPath, + MissingDocumentPath, + MissingWorkspaceRoot { + document_path: PathBuf, + }, + Spawn { + program: String, + source: io::Error, + }, + MissingStdin { + program: String, + }, + WriteStdin { + program: String, + source: io::Error, + }, + Wait { + program: String, + source: io::Error, + }, + UnsuccessfulExit { + program: String, + stderr: String, + }, + NonUtf8Stdout { + program: String, + source: std::string::FromUtf8Error, + }, +} + +impl FormatterError { + fn is_not_found(&self) -> bool { + matches!( + self, + Self::Spawn { source, .. } if source.kind() == io::ErrorKind::NotFound + ) + } +} + +impl fmt::Display for FormatterError { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NoFormatterInPath => { + formatter.write_str("no formatter found in PATH (tried jrsonnet-fmt, jsonnetfmt)") + } + Self::MissingDocumentPath => { + formatter.write_str("document path is required for workspace formatter engine") + } + Self::MissingWorkspaceRoot { document_path } => write!( + formatter, + "no workspace root available for document {}", + document_path.display() + ), + Self::Spawn { program, source } => { + write!(formatter, "failed to spawn formatter {program}: {source}") + } + Self::MissingStdin { program } => { + write!( + formatter, + "formatter {program} did not provide stdin handle" + ) + } + Self::WriteStdin { program, source } => { + write!( + formatter, + "failed to write to formatter {program} stdin: {source}" + ) + } + Self::Wait { program, source } => { + write!( + formatter, + "failed waiting for formatter {program}: {source}" + ) + } + Self::UnsuccessfulExit { program, stderr } => { + write!( + formatter, + "formatter {program} exited with failure: {}", + stderr.trim() + ) + } + Self::NonUtf8Stdout { program, source } => { + write!( + formatter, + "formatter {program} emitted non-UTF8 stdout: {source}" + ) + } + } + } +} + /// Format a Jsonnet document with default configuration. /// /// Returns a list of text edits to apply to the document. @@ -151,7 +246,13 @@ pub fn format_document_with_config( context: FormattingContext<'_>, ) -> Option> { // Try to run the formatter - let formatted = run_formatter(text, config, context)?; + let formatted = match run_formatter(text, config, context) { + Ok(formatted) => formatted, + Err(err) => { + debug!("Formatting unavailable: {err}"); + return None; + } + }; if formatted == text { // No changes needed @@ -186,7 +287,7 @@ fn run_formatter( input: &str, config: &FormattingConfig, context: FormattingContext<'_>, -) -> Option { +) -> Result { // If a custom formatter path is provided, use it directly if let Some(path) = &config.formatter_path { let args = build_formatter_args(config); @@ -196,16 +297,29 @@ fn run_formatter( match config.formatter_engine.unwrap_or_default() { FormatterEngine::Path => { let args = build_formatter_args(config); + let mut first_error = None; for name in ["jrsonnet-fmt", "jsonnetfmt"] { - if let Some(result) = try_run_formatter_binary(name, &args, None, input) { - return Some(result); + match try_run_formatter_binary(name, &args, None, input) { + Ok(result) => return Ok(result), + Err(err) if err.is_not_found() => continue, + Err(err) => { + if first_error.is_none() { + first_error = Some(err); + } + } } } - None + + Err(first_error.unwrap_or(FormatterError::NoFormatterInPath)) } FormatterEngine::BinJsonnetfmtStdio => { - let document_path = context.document_path?; - let workspace_root = select_workspace_root(document_path, context.workspace_roots)?; + let document_path = context + .document_path + .ok_or(FormatterError::MissingDocumentPath)?; + let workspace_root = select_workspace_root(document_path, context.workspace_roots) + .ok_or_else(|| FormatterError::MissingWorkspaceRoot { + document_path: document_path.to_path_buf(), + })?; let formatter = workspace_root.join("bin").join("jsonnetfmt"); let args = vec![ "-stdio".to_string(), @@ -297,10 +411,13 @@ fn try_run_formatter_binary( args: &[String], current_dir: Option<&Path>, input: &str, -) -> Option +) -> Result where S: AsRef, { + let program = program.as_ref(); + let program_name = program.to_string_lossy().into_owned(); + let mut command = Command::new(program); command.args(args); if let Some(current_dir) = current_dir { @@ -311,22 +428,46 @@ where .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() - .ok()?; + .map_err(|source| FormatterError::Spawn { + program: program_name.clone(), + source, + })?; // Write input to stdin - if let Some(mut stdin) = child.stdin.take() { - use std::io::Write; - stdin.write_all(input.as_bytes()).ok()?; - drop(stdin); + let mut stdin = child + .stdin + .take() + .ok_or_else(|| FormatterError::MissingStdin { + program: program_name.clone(), + })?; + use std::io::Write; + stdin + .write_all(input.as_bytes()) + .map_err(|source| FormatterError::WriteStdin { + program: program_name.clone(), + source, + })?; + drop(stdin); + + let output = child + .wait_with_output() + .map_err(|source| FormatterError::Wait { + program: program_name.clone(), + source, + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + return Err(FormatterError::UnsuccessfulExit { + program: program_name, + stderr, + }); } - let output = child.wait_with_output().ok()?; - - if output.status.success() { - String::from_utf8(output.stdout).ok() - } else { - None - } + String::from_utf8(output.stdout).map_err(|source| FormatterError::NonUtf8Stdout { + program: program_name, + source, + }) } fn select_workspace_root<'a>( From e7d4121d4c28a262d9a6c8f1a37054ce8b44d184 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 16:32:11 +0000 Subject: [PATCH 081/210] feat(lsp-import): classify imports by kind and filter traversal - track import flavor (import/importstr/importbin) in ImportEntry. - preserve import kind in both AST and token-fallback parsing paths. - add dependency-walk filtering support for downstream analysis callers. --- crates/jrsonnet-lsp-import/src/graph.rs | 105 ++++++++++++++++++++-- crates/jrsonnet-lsp-import/src/lib.rs | 2 +- crates/jrsonnet-lsp-import/src/resolve.rs | 2 + 3 files changed, 102 insertions(+), 7 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs index b620fd69..57b01c42 100644 --- a/crates/jrsonnet-lsp-import/src/graph.rs +++ b/crates/jrsonnet-lsp-import/src/graph.rs @@ -7,7 +7,7 @@ use std::collections::{HashMap, HashSet, VecDeque}; use jrsonnet_lsp_document::{strip_string_quotes, CanonicalPath, Document}; use jrsonnet_rowan_parser::{ - nodes::{Bind, Destruct, ExprImport, StmtLocal}, + nodes::{Bind, Destruct, ExprImport, ImportKindKind, StmtLocal}, AstNode, AstToken, SyntaxKind, }; @@ -19,6 +19,8 @@ use crate::{ /// Information about an import in a file. #[derive(Debug, Clone, PartialEq, Eq)] pub struct ImportEntry { + /// Import flavor (`import`, `importstr`, `importbin`). + pub kind: ImportKind, /// The binding name if this import is bound to a variable. /// e.g., "lib" in `local lib = import "lib.jsonnet"` pub binding_name: Option, @@ -28,6 +30,14 @@ pub struct ImportEntry { pub resolved_path: Option, } +/// Jsonnet import flavor. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ImportKind { + Code, + String, + Binary, +} + /// One import occurrence in source, including its location. #[derive(Debug, Clone, PartialEq, Eq)] pub struct ImportOccurrence { @@ -338,13 +348,14 @@ impl ImportGraph { /// /// # Example /// ```ignore - /// graph.process_with_dependencies(&path, |p| { + /// graph.process_with_dependencies(&path, |_| true, |p| { /// analyze_file(p); /// }); /// ``` - pub fn process_with_dependencies(&self, root: &CanonicalPath, f: F) + pub fn process_with_dependencies(&self, root: &CanonicalPath, include_dependency: P, f: F) where F: Fn(&CanonicalPath) + Sync, + P: Fn(&ImportEntry) -> bool + Sync, { let mut work = WorkQueue::new(); work.push(root.clone()); @@ -352,6 +363,9 @@ impl ImportGraph { let levels = work.run(|path, deps| { // Get dependencies from import graph for entry in self.imports(path) { + if !include_dependency(entry) { + continue; + } if let Some(ref resolved) = entry.resolved_path { deps.push(resolved.clone()); } @@ -513,12 +527,14 @@ fn parse_import_occurrence( where F: Fn(&str) -> Option, { + let kind = import_kind_from_expr(import)?; let path = extract_import_path(import)?; let resolved = resolve_import(&path); let import_range = import.text()?.syntax().text_range(); Some(ImportOccurrence { entry: ImportEntry { + kind, binding_name, import_path: path, resolved_path: resolved, @@ -564,9 +580,13 @@ where if import_path.is_empty() { continue; } + let Some(kind) = import_kind_from_keyword_token(token.kind()) else { + continue; + }; occurrences.push(ImportOccurrence { entry: ImportEntry { + kind, binding_name: binding_name_from_import_token(import_text), resolved_path: resolve_import(&import_path), import_path, @@ -625,6 +645,24 @@ fn binding_name_from_import_token(token: &jrsonnet_rowan_parser::SyntaxToken) -> Some(full.name()?.ident_lit()?.text().to_string()) } +fn import_kind_from_expr(import: &ExprImport) -> Option { + let token_kind = import.import_kind()?.kind(); + Some(match token_kind { + ImportKindKind::ImportKw => ImportKind::Code, + ImportKindKind::ImportstrKw => ImportKind::String, + ImportKindKind::ImportbinKw => ImportKind::Binary, + }) +} + +const fn import_kind_from_keyword_token(kind: SyntaxKind) -> Option { + match kind { + SyntaxKind::IMPORT_KW => Some(ImportKind::Code), + SyntaxKind::IMPORTSTR_KW => Some(ImportKind::String), + SyntaxKind::IMPORTBIN_KW => Some(ImportKind::Binary), + _ => None, + } +} + #[cfg(test)] mod tests { use std::path::PathBuf; @@ -656,6 +694,7 @@ mod tests { assert_eq!( entries, vec![ImportEntry { + kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), resolved_path: Some(test_path("lib.jsonnet")), @@ -679,6 +718,7 @@ mod tests { occurrences, vec![ImportOccurrence { entry: ImportEntry { + kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), resolved_path: Some(test_path("lib.jsonnet")), @@ -704,6 +744,7 @@ mod tests { occurrences, vec![ImportOccurrence { entry: ImportEntry { + kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), resolved_path: Some(test_path("lib.jsonnet")), @@ -723,6 +764,7 @@ mod tests { assert_eq!( entries, vec![ImportEntry { + kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), resolved_path: Some(test_path("lib.jsonnet")), @@ -745,11 +787,13 @@ lib1 + lib2 entries, vec![ ImportEntry { + kind: ImportKind::Code, binding_name: Some("lib1".to_string()), import_path: "lib1.jsonnet".to_string(), resolved_path: Some(test_path("lib1.jsonnet")), }, ImportEntry { + kind: ImportKind::Code, binding_name: Some("lib2".to_string()), import_path: "lib2.jsonnet".to_string(), resolved_path: Some(test_path("lib2.jsonnet")), @@ -774,6 +818,7 @@ lib1 + lib2 assert_eq!( imports, vec![ImportEntry { + kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), resolved_path: Some(lib.clone()), @@ -850,6 +895,7 @@ lib + other assert_eq!( imports, vec![&ImportEntry { + kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), resolved_path: Some(lib), @@ -1009,9 +1055,13 @@ u1 + u2 let processed_clone = Arc::clone(&processed); // Process main and its dependencies - graph.process_with_dependencies(&main, move |path| { - processed_clone.lock().unwrap().push(path.clone()); - }); + graph.process_with_dependencies( + &main, + |_| true, + move |path| { + processed_clone.lock().unwrap().push(path.clone()); + }, + ); let order: Vec = processed.lock().unwrap().clone(); @@ -1019,6 +1069,49 @@ u1 + u2 assert_eq!(order, vec![lib, utils, main]); } + #[test] + fn test_process_with_dependencies_filtered_by_kind() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(); + + let main = test_path("main.jsonnet"); + let data = test_path("data.jsonnet"); + let script = test_path("script.k"); + + graph.update_file_with_entries( + &main, + vec![ + ImportEntry { + kind: ImportKind::Code, + binding_name: Some("data".to_string()), + import_path: "data.jsonnet".to_string(), + resolved_path: Some(data.clone()), + }, + ImportEntry { + kind: ImportKind::String, + binding_name: Some("payload".to_string()), + import_path: "script.k".to_string(), + resolved_path: Some(script), + }, + ], + ); + + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + graph.process_with_dependencies( + &main, + |entry| entry.kind == ImportKind::Code, + move |path| { + processed_clone.lock().unwrap().push(path.clone()); + }, + ); + + let order: Vec = processed.lock().unwrap().clone(); + assert_eq!(order, vec![data, main]); + } + #[test] fn test_process_importers_with_work_queue() { use std::sync::{Arc, Mutex}; diff --git a/crates/jrsonnet-lsp-import/src/lib.rs b/crates/jrsonnet-lsp-import/src/lib.rs index c8942c34..ddda2f6e 100644 --- a/crates/jrsonnet-lsp-import/src/lib.rs +++ b/crates/jrsonnet-lsp-import/src/lib.rs @@ -12,7 +12,7 @@ pub mod work_queue; pub use graph::{ parse_document_import_occurrences, parse_document_imports, ImportEntry, ImportGraph, - ImportOccurrence, + ImportKind, ImportOccurrence, }; pub use parse::{ check_import_from_token, check_import_path, extract_import_path, find_import_in_node, diff --git a/crates/jrsonnet-lsp-import/src/resolve.rs b/crates/jrsonnet-lsp-import/src/resolve.rs index 95cf9bcc..c38856f6 100644 --- a/crates/jrsonnet-lsp-import/src/resolve.rs +++ b/crates/jrsonnet-lsp-import/src/resolve.rs @@ -185,6 +185,7 @@ mod tests { assert_eq!( import_resolution.parse_entries(&doc), vec![ImportEntry { + kind: crate::graph::ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), resolved_path: Some(resolved_lib), @@ -228,6 +229,7 @@ mod tests { import_resolution.parse_occurrences(&doc), vec![ImportOccurrence { entry: ImportEntry { + kind: crate::graph::ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), resolved_path: Some(resolved_lib), From 7e946931242f52a702238ae349ff6dfa1c6f2ee6 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 16:33:41 +0000 Subject: [PATCH 082/210] feat(lsp-inference): deepen import and flow type narrowing - infer import expressions by kind (import/importstr/importbin). - improve function bind inference with provisional recursion and parameter constraints. - extend flow facts for array predicates/object fields and tighten std.filter/filterMap behavior. --- Cargo.lock | 3 + crates/jrsonnet-lsp-inference/src/expr.rs | 369 ++++++++++++++---- crates/jrsonnet-lsp-inference/src/flow.rs | 237 ++++++++--- crates/jrsonnet-lsp-inference/src/provider.rs | 108 ++++- crates/jrsonnet-std-sig/Cargo.toml | 3 + crates/jrsonnet-std-sig/src/lib.rs | 30 +- 6 files changed, 622 insertions(+), 128 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5ad8f7ac..d9617bea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1997,6 +1997,9 @@ dependencies = [ [[package]] name = "jrsonnet-std-sig" version = "0.5.0-pre97" +dependencies = [ + "jrsonnet-rowan-parser", +] [[package]] name = "jrsonnet-stdlib" diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr.rs index 4c52094f..8ac3106a 100644 --- a/crates/jrsonnet-lsp-inference/src/expr.rs +++ b/crates/jrsonnet-lsp-inference/src/expr.rs @@ -1,14 +1,15 @@ //! Expression type inference. use jrsonnet_lsp_document::Document; -use jrsonnet_lsp_scope::var_resolves_to_builtin_std; +use jrsonnet_lsp_import::extract_import_path; +use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, var_resolves_to_builtin_std}; use jrsonnet_lsp_types::{ FieldDefInterned, FieldVis, FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, }; use jrsonnet_rowan_parser::{ - nodes::{BinaryOperatorKind, Bind, ExprBase, LiteralKind}, - AstNode, AstToken, + nodes::{BinaryOperatorKind, Bind, ExprBase, ImportKindKind, LiteralKind}, + AstNode, }; use rowan::TextRange; use rustc_hash::FxHashMap; @@ -163,48 +164,73 @@ fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: } } Bind::BindFunction(bf) => { - if let Some(name_node) = bf.name() { - if let Some(ident) = name_node.ident_lit() { - let name = ident.text().to_string(); - let params = bf - .params() - .map(|p| extract_params_with_default_types_ty(&p, env)) - .unwrap_or_default(); - let func_data = FunctionData { - params, - return_spec: ReturnSpec::Fixed(Ty::ANY), - variadic: false, - }; - let func_ty = env.store_mut().intern(TyData::Function(func_data)); - env.define_ty(name, func_ty); + let Some(name_node) = bf.name() else { + return; + }; + let Some(ident) = name_node.ident_lit() else { + return; + }; + let name = ident.text().to_string(); + let params = bf + .params() + .map(|p| extract_params_with_default_types_ty(&p, env)) + .unwrap_or_default(); + + // Install a provisional function first so recursive self-calls can resolve. + let provisional_func = FunctionData { + params: params.clone(), + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }; + let provisional_ty = env.store_mut().intern(TyData::Function(provisional_func)); + env.define_ty(name.clone(), provisional_ty); + + let (return_ty, param_constraints) = if env.can_infer_function_body() { + if let Some(body) = bf.value() { + env.push_scope(); + let param_names: Vec = params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + + env.start_constraint_tracking(¶m_names); + env.enter_function(); + let body_ty = infer_expr_ty_impl(&body, env, None, recorder); + env.exit_function(); + let constraints = env.stop_constraint_tracking_ty(); + env.pop_scope(); + (body_ty, constraints) + } else { + (Ty::ANY, FxHashMap::default()) } - } + } else { + (Ty::ANY, FxHashMap::default()) + }; - // Record types inside function bodies during the same inference pass. - // Keep parameter assumptions aligned with historical analysis behavior. - if let Some(body) = bf.value() { - env.push_scope(); - if let Some(params) = bf.params() { - for param in params.params() { - let Some(destruct) = param.destruct() else { - continue; - }; - let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct - else { - continue; - }; - let Some(name_node) = full.name() else { - continue; - }; - let Some(ident) = name_node.ident_lit() else { - continue; - }; - env.define_ty(ident.text().to_string(), Ty::ANY); + let final_params: Vec = params + .into_iter() + .map(|param| { + let mut narrowed_ty = param.ty; + if let Some(constraints) = param_constraints.get(¶m.name) { + for constraint_ty in constraints { + narrowed_ty = env.store_mut().narrow(narrowed_ty, *constraint_ty); + } } - } - let _ = infer_expr_ty_impl(&body, env, None, recorder); - env.pop_scope(); - } + ParamInterned { + name: param.name, + ty: narrowed_ty, + has_default: param.has_default, + } + }) + .collect(); + + let final_func = FunctionData { + params: final_params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }; + let final_ty = env.store_mut().intern(TyData::Function(final_func)); + env.define_ty(name, final_ty); } } } @@ -358,21 +384,28 @@ fn infer_base_ty( // Import - try to resolve the type from the import cache ExprBase::ExprImport(import) => { - let Some(text) = import.text() else { + let Some(kind) = import.import_kind().map(|token| token.kind()) else { return Ty::ANY; }; - let s = text.syntax().text(); - // Need at least 2 chars for the quotes - if s.len() < 2 { - return Ty::ANY; - } - // Strip quotes from the string literal - let path_str = &s[1..s.len() - 1]; - if path_str.is_empty() { - return Ty::ANY; + match kind { + ImportKindKind::ImportKw => { + let Some(path) = extract_import_path(import) else { + return Ty::ANY; + }; + if path.is_empty() { + return Ty::ANY; + } + // Try to resolve the import type, fall back to ANY + env.resolve_import(&path).map_or(Ty::ANY, Ty::from) + } + ImportKindKind::ImportstrKw => Ty::STRING, + ImportKindKind::ImportbinKw => { + let byte_ty = env + .store_mut() + .bounded_number(NumBounds::between(0.0, 255.0)); + env.store_mut().array(byte_ty) + } } - // Try to resolve the import type, fall back to ANY - env.resolve_import(path_str).map_or(Ty::ANY, Ty::from) } // Unary operators @@ -511,11 +544,13 @@ fn infer_index_expr_base_ty( if base_ty == Ty::NEVER { return Ty::NEVER; } + let mut index_literal = None; if let Some(index_expr) = idx.index() { let idx_ty = infer_expr_ty_impl(&index_expr, env, None, recorder); if idx_ty == Ty::NEVER { return Ty::NEVER; } + index_literal = extract_string_literal(&index_expr); } if base_ty == Ty::STRING { return Ty::STRING; @@ -527,10 +562,57 @@ fn infer_index_expr_base_ty( let elems_copy: Vec = elems.clone(); store.union(elems_copy) } + TyData::Object(_) | TyData::Union(_) => { + if let Some(field_name) = index_literal.as_deref() { + return object_field_ty(base_ty, field_name, store).unwrap_or(Ty::ANY); + } + Ty::ANY + } _ => Ty::ANY, } } +fn object_field_ty( + ty: Ty, + field_name: &str, + store: &mut jrsonnet_lsp_types::MutStore, +) -> Option { + match store.get(ty) { + TyData::Object(obj) => obj + .fields + .iter() + .find(|(name, _)| name == field_name) + .map(|(_, field)| field.ty) + .or_else(|| obj.has_unknown.then_some(Ty::ANY)), + TyData::Union(types) => { + let field_types = types + .into_iter() + .filter_map(|variant| object_field_ty(variant, field_name, store)) + .collect::>(); + if field_types.is_empty() { + None + } else { + Some(store.union(field_types)) + } + } + _ => None, + } +} + +fn extract_string_literal(expr: &jrsonnet_rowan_parser::nodes::Expr) -> Option { + let base = expr.expr_base()?; + let ExprBase::ExprString(s) = base else { + return None; + }; + let text = s.syntax().first_token()?.text().to_string(); + if (text.starts_with('"') && text.ends_with('"')) + || (text.starts_with('\'') && text.ends_with('\'')) + { + return Some(text[1..text.len() - 1].to_string()); + } + None +} + fn infer_slice_expr_base_ty( slice: &jrsonnet_rowan_parser::nodes::ExprSlice, env: &mut TypeEnv, @@ -666,18 +748,12 @@ fn infer_field_expr_base_ty( .and_then(|n| n.ident_lit()) .map(|t| t.text().to_string()); - let store = env.store(); - if let TyData::Object(ref obj_data) = store.get(base_ty) { - if let Some(field_name) = &field_name { - for (name, field_def) in &obj_data.fields { - if name == field_name { - return field_def.ty; - } - } - } - if obj_data.has_unknown { - return Ty::ANY; - } + let Some(field_name) = field_name.as_deref() else { + return Ty::ANY; + }; + let store = env.store_mut(); + if let Some(ty) = object_field_ty(base_ty, field_name, store) { + return ty; } Ty::ANY @@ -708,6 +784,10 @@ fn infer_call_expr_base_ty( } } + if let Some(filtered_ty) = infer_std_filter_result_ty(call, &arg_types, env) { + return filtered_ty; + } + let store = env.store_mut(); if let TyData::Function(ref func_data) = store.get(base_ty) { let return_spec = func_data.return_spec.clone(); @@ -799,6 +879,66 @@ fn infer_call_expr_base_ty( Ty::ANY } +fn infer_std_filter_result_ty( + call: &jrsonnet_rowan_parser::nodes::ExprCall, + arg_types: &[Ty], + env: &mut TypeEnv, +) -> Option { + let callee = call.callee()?; + let ExprBase::ExprField(field) = callee.expr_base()? else { + return None; + }; + let base = field.base()?; + if !expr_resolves_to_builtin_std(&base) { + return None; + } + if field.field()?.ident_lit()?.text() != "filter" { + return None; + } + + let args_desc = call.args_desc()?; + let args: Vec<_> = args_desc.args().collect(); + let [pred_arg, _arr_arg] = args.as_slice() else { + return None; + }; + let pred_expr = pred_arg.expr()?; + let arr_ty = arg_types.get(1).copied().unwrap_or(Ty::ANY); + let elem_fact = flow::extract_array_predicate_fact(&pred_expr)?; + let narrowed = elem_fact.apply_to(arr_ty, env.store_mut()); + Some(collection_to_array_ty(narrowed, env.store_mut())) +} + +fn collection_to_array_ty(ty: Ty, store: &mut jrsonnet_lsp_types::MutStore) -> Ty { + match store.get(ty) { + TyData::Array { elem, .. } => store.array(elem), + TyData::Tuple { ref elems } => { + let elems_copy = elems.clone(); + let elem_union = store.union(elems_copy); + store.array(elem_union) + } + TyData::Union(types) => { + let mut elem_types = Vec::new(); + for variant in types { + match store.get(variant) { + TyData::Array { elem, .. } => elem_types.push(elem), + TyData::Tuple { ref elems } => { + let elems_copy = elems.clone(); + elem_types.push(store.union(elems_copy)); + } + _ => return store.array(Ty::ANY), + } + } + if elem_types.is_empty() { + store.array(Ty::ANY) + } else { + let elem_union = store.union(elem_types); + store.array(elem_union) + } + } + _ => store.array(Ty::ANY), + } +} + fn infer_array_expr_base_ty( arr: &jrsonnet_rowan_parser::nodes::ExprArray, env: &mut TypeEnv, @@ -866,6 +1006,27 @@ fn infer_array_comp_expr_base_ty( let elems_copy: Vec = elems.clone(); store.union(elems_copy) } + TyData::Union(types) => { + let mut elem_types = Vec::new(); + for variant in types { + match store.get(variant) { + TyData::Array { elem, .. } => elem_types.push(elem), + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + elem_types.push(store.union(elems_copy)); + } + _ => { + elem_types.clear(); + break; + } + } + } + if elem_types.is_empty() { + Ty::ANY + } else { + store.union(elem_types) + } + } _ if iter_ty == Ty::NEVER => Ty::NEVER, _ => Ty::ANY, } @@ -1491,6 +1652,25 @@ mod tests { assert_eq!(elem, Ty::NUMBER); } + #[test] + fn test_stdlib_filter_with_std_predicate_narrows_elements() { + let (ty, env) = infer_doc(r#"std.filter(std.isNumber, [1, "x", 2])"#); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + + #[test] + fn test_stdlib_filter_map_uses_mapper_return_type() { + let (ty, env) = infer_doc( + r#"local inc(x) = + assert std.isNumber(x); + x + 1; +std.filterMap(std.isNumber, inc, [1, "x", 2])"#, + ); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + #[test] fn test_stdlib_foldl_returns_accumulator_type() { let (ty, _) = infer_doc("std.foldl(function(acc, x) acc + x, [1, 2, 3], 0)"); @@ -1581,6 +1761,17 @@ mod tests { assert_eq!(elem, Ty::NUMBER); } + #[test] + fn test_comprehension_if_filter_with_all_map_predicate() { + let (ty, env) = infer_doc( + r"local xs = [1, null, 2]; +assert std.all(std.map(function(x) x == null || std.isNumber(x), xs)); +[x for x in xs if x != null]", + ); + let elem = try_array(&env, ty).expect("expected array"); + assert_eq!(elem, Ty::NUMBER); + } + // Parameter constraint tests #[test] @@ -1828,10 +2019,56 @@ mod tests { let mut env = TypeEnv::new_default(); let expr = doc.ast().expr().unwrap(); let ty = infer_expr_ty(&expr, &mut env); + assert_eq!(ty, Ty::NEVER); + } + + #[test] + fn test_assert_object_has_narrows_unknown_object() { + let code = r#"local obj = std.parseJson("{}"); assert std.objectHas(obj, "key"); obj"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let mut env = TypeEnv::new_default(); + let expr = doc.ast().expr().unwrap(); + let ty = infer_expr_ty(&expr, &mut env); let obj = try_object(&env, ty).expect("expected object"); assert_fields_ty(&obj, &["key"]); } + #[test] + fn test_object_string_index_then_field_access_preserves_type() { + let (ty, _) = infer_doc(r#"local hm = { foo: { a: true } }; hm["foo"].a"#); + assert_eq!(ty, Ty::TRUE); + } + + #[test] + fn test_union_object_field_access_preserves_union_members() { + let (ty, env) = infer_doc( + r#" +local f(x, y, z) = + assert std.isBoolean(x) && std.isBoolean(y) && std.isBoolean(z); + local obj = { + a: + if x then + { b: if y then 1 else false } + else + { b: if z then true else "hi" }, + }; + obj.a.b; +f(true, false, true) +"#, + ); + + let TyData::Union(variants) = get_ty_data(&env, ty) else { + panic!("expected union type, got {:?}", get_ty_data(&env, ty)); + }; + assert!(variants.contains(&Ty::NUMBER)); + assert!(variants.contains(&Ty::STRING)); + assert!( + variants.contains(&Ty::BOOL) + || variants.contains(&Ty::TRUE) + || variants.contains(&Ty::FALSE) + ); + } + #[rstest] #[case::normal_visibility("{ a: 1 }", "a", FieldVis::Normal)] #[case::hidden_visibility("{ a:: 1 }", "a", FieldVis::Hidden)] diff --git a/crates/jrsonnet-lsp-inference/src/flow.rs b/crates/jrsonnet-lsp-inference/src/flow.rs index c9cd1ed2..b7b50777 100644 --- a/crates/jrsonnet-lsp-inference/src/flow.rs +++ b/crates/jrsonnet-lsp-inference/src/flow.rs @@ -381,24 +381,7 @@ impl FactRepr { } FactRepr::HasField { field, field_type } => { - // Narrow to an object that has this field - let field_ty = field_type - .as_ref() - .map_or(Ty::ANY, |f| f.apply_to(Ty::ANY, store)); - - let obj_data = ObjectData { - fields: vec![( - field.clone(), - FieldDefInterned { - ty: field_ty, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: true, - }; - let constraint = store.object(obj_data); - store.narrow(ty, constraint) + apply_has_field_fact(ty, field, field_type.as_deref(), store) } FactRepr::HasLen(len) => store.with_len(ty, *len), @@ -406,34 +389,8 @@ impl FactRepr { FactRepr::MinLen(min) => store.with_min_len(ty, *min), FactRepr::ArrayElemType(prim, _) => { - // Narrow array element type: Array -> Array let prim_ty = prim.as_ty(store); - match store.get(ty) { - TyData::Array { .. } | TyData::Any => store.array(prim_ty), - TyData::Tuple { elems } => { - // Narrow each tuple element - let narrowed: Vec<_> = elems - .iter() - .map(|&e| store.narrow(e, prim_ty)) - .filter(|&e| e != Ty::NEVER) - .collect(); - if narrowed.is_empty() { - Ty::NEVER - } else { - store.tuple(narrowed) - } - } - TyData::Union(types) => { - // Apply to each variant - let narrowed: Vec<_> = types - .iter() - .map(|&t| self.apply_to(t, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - store.union(narrowed) - } - _ => ty, // Non-array types unchanged - } + apply_array_elem_constraint(ty, prim_ty, store) } FactRepr::LiteralBool(value) => { @@ -455,6 +412,16 @@ impl FactRepr { } FactRepr::Or(lhs, rhs) => { + if let ( + FactRepr::ArrayElemType(lhs_prim, _), + FactRepr::ArrayElemType(rhs_prim, _), + ) = (&**lhs, &**rhs) + { + let lhs_ty = lhs_prim.as_ty(store); + let rhs_ty = rhs_prim.as_ty(store); + let union_ty = store.union(vec![lhs_ty, rhs_ty]); + return apply_array_elem_constraint(ty, union_ty, store); + } // Apply each fact and union the results // (a || b) means: either a is true OR b is true // So the type is: (ty narrowed by a) | (ty narrowed by b) @@ -574,6 +541,107 @@ impl FactRepr { } } +fn apply_array_elem_constraint(ty: Ty, elem_constraint: Ty, store: &mut MutStore) -> Ty { + match store.get(ty) { + TyData::Array { .. } | TyData::Any => store.array(elem_constraint), + TyData::Tuple { elems } => { + let narrowed: Vec<_> = elems + .iter() + .map(|&e| store.narrow(e, elem_constraint)) + .filter(|&e| e != Ty::NEVER) + .collect(); + if narrowed.is_empty() { + Ty::NEVER + } else { + store.tuple(narrowed) + } + } + TyData::Union(types) => { + let narrowed: Vec<_> = types + .iter() + .map(|&variant| apply_array_elem_constraint(variant, elem_constraint, store)) + .filter(|&variant| variant != Ty::NEVER) + .collect(); + store.union(narrowed) + } + _ => ty, + } +} + +fn apply_has_field_fact( + ty: Ty, + field: &str, + field_fact: Option<&Fact>, + store: &mut MutStore, +) -> Ty { + let required_field_ty = field_fact.map_or(Ty::ANY, |fact| fact.apply_to(Ty::ANY, store)); + + match store.get(ty) { + TyData::Object(mut obj_data) => { + if let Some((_, existing_field)) = + obj_data.fields.iter_mut().find(|(name, _)| name == field) + { + let next_field_ty = field_fact.map_or(existing_field.ty, |fact| { + fact.apply_to(existing_field.ty, store) + }); + if next_field_ty == Ty::NEVER { + return Ty::NEVER; + } + existing_field.ty = next_field_ty; + existing_field.required = true; + return store.object(obj_data); + } + + if obj_data.has_unknown { + obj_data.fields.push(( + field.to_string(), + FieldDefInterned { + ty: required_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )); + return store.object(obj_data); + } + + Ty::NEVER + } + TyData::Union(types) => { + let narrowed: Vec<_> = types + .iter() + .map(|&variant| apply_has_field_fact(variant, field, field_fact, store)) + .filter(|&variant| variant != Ty::NEVER) + .collect(); + store.union(narrowed) + } + TyData::Any => store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: required_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }), + _ => { + let constraint = store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: required_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }); + store.narrow(ty, constraint) + } + } +} + /// A collection of facts about multiple variables. #[derive(Debug, Clone, Default)] pub struct Facts { @@ -957,6 +1025,20 @@ fn check_higher_order_predicate(all_args: &ArgsDesc) -> Option<(String, Fact)> { /// /// For `std.isNumber`, returns `Fact::array_elem_number(Totality::Total)`. fn extract_predicate_element_fact(pred: &Expr) -> Option { + extract_std_predicate_element_fact(pred) + .or_else(|| extract_function_predicate_element_fact(pred)) +} + +/// Extract an array-element narrowing fact from a predicate expression. +/// +/// This supports stdlib predicates (`std.isNumber`) and simple single-parameter +/// function predicates (`function(x) x == null || std.isNumber(x)`). +#[must_use] +pub fn extract_array_predicate_fact(pred: &Expr) -> Option { + extract_predicate_element_fact(pred) +} + +fn extract_std_predicate_element_fact(pred: &Expr) -> Option { use jrsonnet_std_sig::{get_flow_typing, NarrowsTo, Totality as SigTotality}; // Check if it's a std.isX function reference @@ -987,10 +1069,60 @@ fn extract_predicate_element_fact(pred: &Expr) -> Option { NarrowsTo::Array => Fact::array_elem_array(totality), NarrowsTo::Object => Fact::array_elem_object(totality), NarrowsTo::Function => Fact::array_elem_function(totality), - NarrowsTo::Null => return None, // No array element fact for null + NarrowsTo::Null => Fact { + repr: FactRepr::ArrayElemType(PrimFact::Null, totality), + }, + }) +} + +fn extract_function_predicate_element_fact(pred: &Expr) -> Option { + let ExprBase::ExprFunction(function) = pred.expr_base()? else { + return None; + }; + let params_desc = function.params_desc()?; + let params: Vec<_> = params_desc.params().collect(); + let [param] = params.as_slice() else { + return None; + }; + if param.assign_token().is_some() || param.expr().is_some() { + return None; + } + + let destruct = param.destruct()?; + let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct else { + return None; + }; + let param_name = full.name()?.ident_lit()?.text().to_string(); + let body = function.expr()?; + let facts = extract_facts(&body); + let param_fact = facts.get(¶m_name)?; + lift_fact_to_array_elements(param_fact) +} + +fn lift_fact_to_array_elements(fact: &Fact) -> Option { + Some(Fact { + repr: lift_fact_repr_to_array_elements(&fact.repr)?, }) } +fn lift_fact_repr_to_array_elements(repr: &FactRepr) -> Option { + match repr { + FactRepr::Prim(prim, totality) => Some(FactRepr::ArrayElemType(*prim, *totality)), + FactRepr::And(lhs, rhs) => Some(FactRepr::And( + Box::new(lift_fact_repr_to_array_elements(lhs)?), + Box::new(lift_fact_repr_to_array_elements(rhs)?), + )), + FactRepr::Or(lhs, rhs) => Some(FactRepr::Or( + Box::new(lift_fact_repr_to_array_elements(lhs)?), + Box::new(lift_fact_repr_to_array_elements(rhs)?), + )), + FactRepr::Not(inner) => Some(FactRepr::Not(Box::new(lift_fact_repr_to_array_elements( + inner, + )?))), + _ => None, + } +} + /// Map std function names to type facts using the spec. fn unary_std_fn_fact(fn_name: &str) -> Option { use jrsonnet_std_sig::{get_flow_typing, NarrowsTo, Totality as SigTotality}; @@ -1766,6 +1898,19 @@ mod tests { assert_eq!(result, expected); } + #[test] + fn test_higher_order_predicate_with_function_predicate_union() { + let mut store = MutStore::new(std::sync::Arc::new(GlobalTyStore::new())); + let facts = + parse_and_extract("std.all(std.map(function(x) x == null || std.isNumber(x), arr))"); + let arr_fact = facts.get("arr").expect("should have fact for arr"); + + let result = arr_fact.apply_to(Ty::ANY, &mut store); + let elem_union = store.union(vec![Ty::NULL, Ty::NUMBER]); + let expected = store.array(elem_union); + assert_eq!(result, expected); + } + #[rstest] #[case::is_decimal(Fact::number(Totality::Partial))] #[case::is_integer(Fact::number(Totality::Partial))] diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index 9442f2d2..0b5c4a63 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -6,7 +6,7 @@ use std::sync::Arc; use jrsonnet_lsp_document::{CanonicalPath, Document}; -use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_import::{ImportGraph, ImportKind}; use jrsonnet_lsp_types::GlobalTyStore; use parking_lot::RwLock; use rustc_hash::FxHashMap; @@ -99,20 +99,21 @@ impl TypeProvider { doc_source: &D, ) { let graph = self.import_graph.read(); - - // process_with_dependencies processes in "leaves first" order, - // meaning dependencies are analyzed before dependents - graph.process_with_dependencies(path, |dep_path| { - if let Some(doc) = doc_source.get_document(dep_path) { - let resolved_imports = resolved_imports_for(&graph, dep_path); - analyze_and_cache_with_resolved_imports( - dep_path, - &doc, - &self.type_cache, - resolved_imports, - ); - } - }); + graph.process_with_dependencies( + path, + |entry| entry.kind == ImportKind::Code, + |dep_path| { + if let Some(doc) = doc_source.get_document(dep_path) { + let resolved_imports = resolved_imports_for(&graph, dep_path); + analyze_and_cache_with_resolved_imports( + dep_path, + &doc, + &self.type_cache, + resolved_imports, + ); + } + }, + ); } /// Get the global type store. @@ -135,6 +136,7 @@ fn resolved_imports_for( graph .imports(path) .iter() + .filter(|entry| entry.kind == ImportKind::Code) .filter_map(|entry| { entry .resolved_path @@ -224,6 +226,7 @@ mod tests { graph.update_file_with_entries( &main_path, vec![jrsonnet_lsp_import::ImportEntry { + kind: jrsonnet_lsp_import::ImportKind::Code, import_path: "dep.jsonnet".to_string(), resolved_path: Some(dep_path.clone()), binding_name: None, @@ -268,6 +271,7 @@ mod tests { graph.update_file_with_entries( &main_path, vec![jrsonnet_lsp_import::ImportEntry { + kind: jrsonnet_lsp_import::ImportKind::Code, import_path: "vendor/dep.jsonnet".to_string(), resolved_path: Some(dep_path), binding_name: None, @@ -284,4 +288,78 @@ mod tests { let analysis = provider.analyze(&main_path, &main_doc, &doc_source); assert_eq!(analysis.document_type(), Ty::NUMBER); } + + #[test] + fn test_provider_importstr_infers_string() { + let global_types = Arc::new(GlobalTyStore::new()); + let type_cache = new_shared_cache(Arc::clone(&global_types)); + let import_graph = Arc::new(RwLock::new(ImportGraph::new())); + let doc_source = TestDocSource::new(); + + let main_path = test_path("main.jsonnet"); + let main_doc = Document::new(r#"importstr "./script.k""#.to_string(), DocVersion(1)); + doc_source.insert(main_path.clone(), main_doc.clone()); + + let script_path = test_path("script.k"); + + { + let mut graph = import_graph.write(); + graph.update_file_with_entries( + &main_path, + vec![jrsonnet_lsp_import::ImportEntry { + kind: jrsonnet_lsp_import::ImportKind::String, + import_path: "./script.k".to_string(), + resolved_path: Some(script_path), + binding_name: None, + }], + ); + } + + let provider = TypeProvider::new( + Arc::clone(&type_cache), + Arc::clone(&import_graph), + Arc::clone(&global_types), + ); + + let analysis = provider.analyze(&main_path, &main_doc, &doc_source); + assert_eq!(analysis.document_type(), Ty::STRING); + } + + #[test] + fn test_provider_importbin_infers_bounded_byte_array() { + let global_types = Arc::new(GlobalTyStore::new()); + let type_cache = new_shared_cache(Arc::clone(&global_types)); + let import_graph = Arc::new(RwLock::new(ImportGraph::new())); + let doc_source = TestDocSource::new(); + + let main_path = test_path("main.jsonnet"); + let main_doc = Document::new(r#"importbin "./script.k""#.to_string(), DocVersion(1)); + doc_source.insert(main_path.clone(), main_doc.clone()); + + let script_path = test_path("script.k"); + + { + let mut graph = import_graph.write(); + graph.update_file_with_entries( + &main_path, + vec![jrsonnet_lsp_import::ImportEntry { + kind: jrsonnet_lsp_import::ImportKind::Binary, + import_path: "./script.k".to_string(), + resolved_path: Some(script_path), + binding_name: None, + }], + ); + } + + let provider = TypeProvider::new( + Arc::clone(&type_cache), + Arc::clone(&import_graph), + Arc::clone(&global_types), + ); + + let analysis = provider.analyze(&main_path, &main_doc, &doc_source); + let ty = analysis.document_type(); + assert!(analysis.is_array(ty)); + assert_eq!(analysis.display(ty), "array"); + } } diff --git a/crates/jrsonnet-std-sig/Cargo.toml b/crates/jrsonnet-std-sig/Cargo.toml index d148c5d1..2a8f202a 100644 --- a/crates/jrsonnet-std-sig/Cargo.toml +++ b/crates/jrsonnet-std-sig/Cargo.toml @@ -9,5 +9,8 @@ description = "Jsonnet stdlib function specifications for LSP" [dependencies] +[dev-dependencies] +jrsonnet-rowan-parser.workspace = true + [lints] workspace = true diff --git a/crates/jrsonnet-std-sig/src/lib.rs b/crates/jrsonnet-std-sig/src/lib.rs index 5cc4f357..351bb376 100644 --- a/crates/jrsonnet-std-sig/src/lib.rs +++ b/crates/jrsonnet-std-sig/src/lib.rs @@ -386,7 +386,7 @@ pub static FNS: &[StdFn] = &[ Param::req("map_func", FUNC), Param::req("arr", ARR), ], - return_spec: ReturnSpec::Fixed(ARR), + return_spec: ReturnSpec::ArrayOfFuncReturn(1), variadic: false, doc: "Filters then maps array elements.", example: None, @@ -1751,6 +1751,8 @@ pub fn get_flow_typing(name: &str) -> Option<&'static FlowTyping> { mod tests { use std::collections::BTreeSet; + use jrsonnet_rowan_parser::parse; + use super::*; const OFFICIAL_FUNCTIONS: &str = include_str!("../data/official_functions.txt"); @@ -1832,6 +1834,32 @@ mod tests { } } + #[test] + fn test_all_fns_have_non_empty_docs() { + for f in FNS { + assert!( + !f.doc.trim().is_empty(), + "{} has an empty documentation string", + f.name + ); + } + } + + #[test] + fn test_examples_parse_cleanly() { + for f in FNS { + let Some(example) = f.example else { + continue; + }; + let (_, errors) = parse(example); + assert!( + errors.is_empty(), + "example for {} must parse cleanly; errors={errors:?}; example={example}", + f.name + ); + } + } + #[test] fn test_covers_official_stdlib_functions() { let actual = actual_function_names(); From e47f987e12b1f1a037f74ed2976a18dcc8d780e9 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 16:34:31 +0000 Subject: [PATCH 083/210] fix(lsp): refresh watched-file lifecycle and cross-file fallbacks - refresh closed-document cache from disk on watched create/change and evict on delete. - trigger global reindex on create/delete to keep import resolution consistent. - add async import-field fallback resolution for declaration/implementation/references. --- crates/jrsonnet-lsp-inference/src/manager.rs | 23 ++++++++++ crates/jrsonnet-lsp/src/server.rs | 44 +++++++++++++++---- .../jrsonnet-lsp/src/server/async_requests.rs | 38 ++++++++++++++-- docs/lsp/ARCHITECTURE.md | 5 +++ 4 files changed, 98 insertions(+), 12 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index 719ba069..411dddc9 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -120,6 +120,29 @@ impl DocumentManager { } } + /// Refresh a closed document from disk after external file changes. + /// + /// Returns `true` when the closed cache was updated. + pub fn refresh_closed_from_disk(&self, path: &CanonicalPath) -> bool { + if self.is_open(path) { + return false; + } + + let Ok(text) = std::fs::read_to_string(path.as_path()) else { + return false; + }; + + let mut closed = self.closed.write(); + closed.put(path.clone(), Document::new(text, DocVersion::new(0))); + true + } + + /// Remove a closed document entry from the cache. + pub fn remove_closed(&self, path: &CanonicalPath) { + let mut closed = self.closed.write(); + closed.pop(path); + } + /// Get a reference to an open document. pub fn get( &self, diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index fb31b845..75478eb1 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -1375,15 +1375,7 @@ impl Server { // Import resolution and cached file types depend on jpath/tanka settings. self.type_cache.write().clear(); - let mut paths_to_reindex = { - let import_graph = self.import_graph.read(); - import_graph.all_files().cloned().collect::>() - }; - paths_to_reindex.extend(self.documents.open_paths()); - paths_to_reindex.sort(); - paths_to_reindex.dedup(); - - for path in paths_to_reindex { + for path in self.tracked_paths_for_reindex() { self.update_import_graph(&path); } } @@ -1408,6 +1400,9 @@ impl Server { /// This keeps import graph and type cache up to date for files that change on disk /// while not being open in the editor. fn on_did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { + let mut changed_paths = Vec::new(); + let mut requires_global_reindex = false; + for change in params.changes { let Some(path) = CanonicalPath::from_uri(&change.uri) else { continue; @@ -1417,14 +1412,34 @@ impl Server { match change.typ { FileChangeType::DELETED => { + self.documents.remove_closed(&path); self.import_graph.write().remove_file(&path); + requires_global_reindex = true; } FileChangeType::CHANGED | FileChangeType::CREATED => { + if !self.documents.is_open(&path) { + self.documents.refresh_closed_from_disk(&path); + } self.update_import_graph(&path); + if change.typ == FileChangeType::CREATED { + requires_global_reindex = true; + } } _ => {} } + changed_paths.push(path); + } + + if requires_global_reindex { + for path in self.tracked_paths_for_reindex() { + self.update_import_graph(&path); + } + } + + changed_paths.sort(); + changed_paths.dedup(); + for path in changed_paths { if self.documents.is_open(&path) { self.schedule_diagnostics(&path); } @@ -1440,6 +1455,17 @@ impl Server { Self::update_import_graph_for_path(&self.documents, &self.import_graph, &self.config, path); } + fn tracked_paths_for_reindex(&self) -> Vec { + let mut paths = { + let import_graph = self.import_graph.read(); + import_graph.all_files().cloned().collect::>() + }; + paths.extend(self.documents.open_paths()); + paths.sort(); + paths.dedup(); + paths + } + fn update_import_graph_for_path( documents: &SharedDocumentManager, import_graph: &Arc>, diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 0362fb6c..365e33b8 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -5,7 +5,7 @@ use jrsonnet_lsp_handlers as handlers; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; use jrsonnet_lsp_types::GlobalTyStore; -use jrsonnet_rowan_parser::AstNode; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; use lsp_types::{ CodeLens, CodeLensParams, CompletionParams, CompletionResponse, ExecuteCommandParams, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams, InlayHint, InlayHintParams, @@ -160,12 +160,13 @@ impl AsyncRequestContext { let range = if target == GotoTarget::Implementation { locations .map(|location| location.implementation) + .or_else(|| self.find_export_binding_in_file(&resolved, &fields)) .or_else(|| self.document_root_expr_range(&resolved)) .unwrap_or_default() } else { locations .map(|location| location.declaration) - .unwrap_or_default() + .or_else(|| self.find_export_binding_in_file(&resolved, &fields))? }; Some(GotoDefinitionResponse::Scalar(Location { uri: resolved_uri, @@ -217,7 +218,7 @@ impl AsyncRequestContext { let uri = ¶ms.text_document_position.text_document.uri; let position = params.text_document_position.position; let path = CanonicalPath::from_uri(uri)?; - let doc = self.documents.get(&path)?.clone(); + let doc = self.documents.get_document(&path)?; let lsp_pos = position.into(); let include_declaration = params.context.include_declaration; @@ -600,6 +601,37 @@ impl AsyncRequestContext { )) } + fn find_export_binding_in_file( + &self, + path: &CanonicalPath, + fields: &[String], + ) -> Option { + let [field_name] = fields else { + return None; + }; + + let doc = self.load_document_for_path(path)?; + let text = doc.text(); + let line_index = doc.line_index(); + + doc.ast() + .syntax() + .descendants_with_tokens() + .filter_map(jrsonnet_rowan_parser::rowan::NodeOrToken::into_token) + .filter(|token| token.kind() == SyntaxKind::IDENT && token.text() == field_name) + .find_map(|token| { + let position = line_index.position(token.text_range().start().into(), text)?; + match handlers::goto_definition(&doc, position) { + Some(handlers::DefinitionResult::Local(range)) => Some(range), + Some( + handlers::DefinitionResult::Import(_) + | handlers::DefinitionResult::ImportField { .. }, + ) + | None => None, + } + }) + } + fn local_implementation_range( document: &Document, declaration: lsp_types::Range, diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 772f8fca..cd1904bc 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -261,6 +261,11 @@ is best for inspecting runtime value logic. Import graph update path: `update_import_graph` in `crates/jrsonnet-lsp/src/server.rs`. +`workspace/didChangeWatchedFiles` changes update the changed file immediately. +For create/delete events, the server also reindexes tracked files so unresolved +imports can transition to resolved (and vice versa) without requiring manual +file reopen. + ## Diagnostics Architecture Diagnostics run in a dedicated background worker (`AsyncDiagnostics` in From 35bba77dc8b76368ceebbcd0478c7a4364f122a9 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 16:34:46 +0000 Subject: [PATCH 084/210] fix(lsp-completion): improve field completion after bracket lookups - prefer exact expression-range type lookup before dot completion. - add bracket-anchor and local-binding fallbacks for indexed object access. - add completion regression coverage and range-based field extraction API. --- .../src/completion/fields.rs | 271 +++++++++++++++++- .../src/completion/handler.rs | 44 ++- crates/jrsonnet-lsp-inference/src/analysis.rs | 9 + 3 files changed, 315 insertions(+), 9 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs index cb57780a..d441539b 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs @@ -6,7 +6,7 @@ use jrsonnet_lsp_document::{ use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; use jrsonnet_rowan_parser::{ - nodes::{BindDestruct, Destruct, FieldName, MemberFieldNormal, ObjBody, StmtLocal}, + nodes::{BindDestruct, Destruct, Expr, FieldName, MemberFieldNormal, ObjBody, StmtLocal}, AstNode, AstToken, SyntaxKind, SyntaxNode, }; use lsp_types::{CompletionItem, CompletionItemKind, CompletionTextEdit, Range, TextEdit}; @@ -43,9 +43,6 @@ pub fn check_object_field_completion( let ast = document.ast(); let line_index = document.line_index(); - if identifier.is_empty() { - return None; - } if identifier == "std" { // Skip builtin std (handled separately). If `std` is shadowed by user code, // keep object-field completion enabled. @@ -63,9 +60,68 @@ pub fn check_object_field_completion( // Look for expression just before the dot (not at the dot) let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; let before_dot_offset = u32::try_from(before_dot_pos).ok()?; - let before_dot_text_size = rowan::TextSize::from(before_dot_offset); + let dot_offset = u32::try_from(dot_pos).ok()?; + + // Prefer exact expression-range lookup to avoid ambiguity at punctuation + // positions (e.g. `hm["foo"].`, where querying at `]` picks the inner + // string literal instead of the full index expression). + if let Some(expr_range) = + expression_range_before_dot(ast.syntax(), before_dot_offset, dot_offset) + { + if let Some(fields) = analysis.fields_for_range(expr_range) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item( + name, + store.display(ty), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + + if !items.is_empty() { + return Some(items); + } + } + } + + // Bracket lookups (for example `obj["field"].`) can place the cursor on `]`, + // which does not reliably map back to the index expression type. Anchor at the + // matching `[` and retry from there. + if let Some(index_anchor) = bracket_index_anchor(before_cursor, dot_pos) { + let index_anchor_text_size = rowan::TextSize::from(index_anchor); + if let Some(fields) = analysis.fields_at_position(ast.syntax(), index_anchor_text_size) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item( + name, + store.display(ty), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + + if !items.is_empty() { + return Some(items); + } + } + } - // Try to get fields from type inference + // Fall back to position-based lookup when we can't find a matching + // expression range (e.g. broken syntax around the dot). + let before_dot_text_size = rowan::TextSize::from(before_dot_offset); if let Some(fields) = analysis.fields_at_position(ast.syntax(), before_dot_text_size) { let store = analysis.store(); let items = fields @@ -81,8 +137,64 @@ pub fn check_object_field_completion( } } + if let Some((base_identifier, key)) = parse_bracket_lookup(before_dot) { + if let Some(target_range) = + find_bracket_lookup_target_expr_range(ast.syntax(), &base_identifier, &key, dot_offset) + { + if let Some(fields) = analysis.fields_for_range(target_range) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item( + name, + store.display(ty), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + if !items.is_empty() { + return Some(items); + } + } + + if let Some(target_node) = ast + .syntax() + .descendants() + .find(|node| node.text_range() == target_range) + { + if let Some(fields) = extract_object_fields(&target_node) { + let items = fields + .into_iter() + .filter(|name| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|name| { + field_completion_item( + name, + "object field".to_string(), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + if !items.is_empty() { + return Some(items); + } + } + } + } + } + + if identifier.is_empty() { + return None; + } + // Fall back to AST-based field extraction for cases where type inference isn't enough - let dot_offset = u32::try_from(dot_pos).ok()?; let fields = find_object_fields_for_identifier(ast.syntax(), identifier, dot_offset)?; // Filter and convert to completion items @@ -107,6 +219,75 @@ pub fn check_object_field_completion( Some(items) } +fn expression_range_before_dot( + root: &SyntaxNode, + before_dot_offset: u32, + dot_offset: u32, +) -> Option { + let token = token_at_offset(root, ByteOffset::new(before_dot_offset))?; + let dot = rowan::TextSize::from(dot_offset); + token + .parent_ancestors() + .filter_map(Expr::cast) + .map(|expr| expr.syntax().text_range()) + .filter(|range| range.end() == dot) + .min_by_key(|range| range.len()) +} + +fn bracket_index_anchor(before_cursor: &str, dot_pos: usize) -> Option { + let before_dot = before_cursor.get(..dot_pos)?; + let mut bracket_depth = 0usize; + + for (index, byte) in before_dot.as_bytes().iter().enumerate().rev() { + match *byte { + b']' => bracket_depth += 1, + b'[' => { + if bracket_depth == 0 { + continue; + } + bracket_depth -= 1; + if bracket_depth == 0 { + return u32::try_from(index).ok(); + } + } + _ => {} + } + } + + None +} + +fn parse_bracket_lookup(before_dot: &str) -> Option<(String, String)> { + let trimmed = before_dot.trim_end(); + let close_bracket = trimmed.rfind(']')?; + if close_bracket + 1 != trimmed.len() { + return None; + } + + let open_bracket = trimmed[..close_bracket].rfind('[')?; + let base_expr = trimmed[..open_bracket].trim_end(); + let key_expr = trimmed[open_bracket + 1..close_bracket].trim(); + let key = key_expr + .strip_prefix('"') + .and_then(|value| value.strip_suffix('"')) + .or_else(|| { + key_expr + .strip_prefix('\'') + .and_then(|value| value.strip_suffix('\'')) + })? + .to_string(); + + let ident_start = base_expr + .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') + .map_or(0, |index| index + 1); + let identifier = base_expr[ident_start..].trim(); + if identifier.is_empty() { + return None; + } + + Some((identifier.to_string(), key)) +} + fn field_completion_item( name: String, detail: String, @@ -182,6 +363,40 @@ fn find_object_fields_for_identifier( None } +fn find_bracket_lookup_target_expr_range( + root: &SyntaxNode, + identifier: &str, + key: &str, + offset: u32, +) -> Option { + let text_size = rowan::TextSize::from(offset); + + for node in root.descendants() { + if node.kind() != SyntaxKind::STMT_LOCAL { + continue; + } + if node.text_range().end() > text_size { + continue; + } + + let Some(stmt_local) = StmtLocal::cast(node.clone()) else { + continue; + }; + for bind in stmt_local.binds() { + let jrsonnet_rowan_parser::nodes::Bind::BindDestruct(bind_destruct) = bind else { + continue; + }; + if let Some(range) = + check_bind_destruct_for_bracket_target(&bind_destruct, identifier, key) + { + return Some(range); + } + } + } + + None +} + /// Check if a `BindDestruct` is for the given identifier and extract object fields. fn check_bind_destruct_for_object(bind: &BindDestruct, identifier: &str) -> Option> { let destruct = bind.into()?; @@ -205,6 +420,48 @@ fn check_bind_destruct_for_object(bind: &BindDestruct, identifier: &str) -> Opti } } +fn check_bind_destruct_for_bracket_target( + bind: &BindDestruct, + identifier: &str, + key: &str, +) -> Option { + let destruct = bind.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() != identifier { + return None; + } + + let value_expr = bind.value()?; + find_object_field_expr_range(value_expr.syntax(), key) +} + +fn find_object_field_expr_range(expr: &SyntaxNode, key: &str) -> Option { + let obj_node = find_object_in_expr(expr)?; + let obj_body = ObjBody::cast(obj_node)?; + let ObjBody::ObjBodyMemberList(member_list) = obj_body else { + return None; + }; + + for member in member_list.members() { + let jrsonnet_rowan_parser::nodes::Member::MemberFieldNormal(field) = member else { + continue; + }; + let Some(field_name) = extract_field_name(&field) else { + continue; + }; + if field_name != key { + continue; + } + return Some(field.expr()?.syntax().text_range()); + } + + None +} + /// Extract field names from an object expression. fn extract_object_fields(expr: &SyntaxNode) -> Option> { // The expression might be wrapped in Expr nodes diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler.rs index 9395a7b2..225de439 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler.rs @@ -141,11 +141,10 @@ pub fn completion_with_import_roots( mod tests { use std::sync::Arc; - use super::super::imports::find_import_string_start; use jrsonnet_lsp_document::DocVersion; use jrsonnet_lsp_types::GlobalTyStore; - use super::*; + use super::{super::imports::find_import_string_start, *}; /// Create a `TypeAnalysis` for test purposes. fn test_analysis(doc: &Document) -> TypeAnalysis { @@ -800,4 +799,45 @@ mod tests { ] ); } + + #[test] + fn test_object_field_completion_after_bracket_lookup() { + let code = r#"local hm = { foo: { a: true, b: 4, c: "hi" } }; hm["foo"]."#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get object field completions after bracket lookup"); + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "a".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("true".to_string()), + ..Default::default() + }, + CompletionItem { + label: "b".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "c".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("string".to_string()), + ..Default::default() + }, + ] + ); + } } diff --git a/crates/jrsonnet-lsp-inference/src/analysis.rs b/crates/jrsonnet-lsp-inference/src/analysis.rs index 04dff460..e13512c3 100644 --- a/crates/jrsonnet-lsp-inference/src/analysis.rs +++ b/crates/jrsonnet-lsp-inference/src/analysis.rs @@ -221,6 +221,15 @@ impl TypeAnalysis { self.extract_fields(ty) } + /// Get all known fields for the expression that exactly matches `range`. + /// + /// This is useful when callers have already identified a concrete expression + /// node and want to avoid position-based ambiguity. + pub fn fields_for_range(&self, range: TextRange) -> Option> { + let ty = self.type_for_range(range)?; + self.extract_fields(ty) + } + /// Extract fields from a type (handles unions by merging fields). fn extract_fields(&self, ty: Ty) -> Option> { let ty_data = self.store.read().get(ty); From 15ed5dadfba057dedeb97e7eb62f01d600e9b435 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 16:35:23 +0000 Subject: [PATCH 085/210] feat(lsp-semantic-tokens): formalize legend contract and docs sync - expose typed semantic token type/modifier enums and index helpers. - generate legend reference markdown and assert docs block stays synchronized. - tighten integration range assertions to exact semantic token streams. --- Cargo.lock | 21 ++ crates/jrsonnet-lsp-handlers/Cargo.toml | 2 + crates/jrsonnet-lsp-handlers/src/lib.rs | 3 +- .../src/semantic_tokens.rs | 295 +++++++++++++----- crates/jrsonnet-lsp/tests/integration_test.rs | 94 +++++- docs/lsp/HANDLERS.md | 43 +++ 6 files changed, 358 insertions(+), 100 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d9617bea..e98c06f4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1891,6 +1891,8 @@ dependencies = [ "rstest 0.23.0", "serde", "serde_json", + "strum", + "strum_macros", "tempfile", "tracing", ] @@ -3884,6 +3886,25 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn", +] + [[package]] name = "subtle" version = "2.6.1" diff --git a/crates/jrsonnet-lsp-handlers/Cargo.toml b/crates/jrsonnet-lsp-handlers/Cargo.toml index e9b6f01d..98059cf4 100644 --- a/crates/jrsonnet-lsp-handlers/Cargo.toml +++ b/crates/jrsonnet-lsp-handlers/Cargo.toml @@ -20,6 +20,8 @@ rayon = "1.11.0" serde = { workspace = true, features = ["derive"] } rowan.workspace = true serde_json.workspace = true +strum = "0.26.3" +strum_macros = "0.26.4" tracing = "0.1.44" [lints] diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index 61a3f331..e24f9dd7 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -31,7 +31,8 @@ pub use inlay_hint::inlay_hints; pub use references::{find_cross_file_references, find_references}; pub use rename::{prepare_rename, rename, rename_cross_file}; pub use semantic_tokens::{ - legend as semantic_tokens_legend, semantic_tokens, semantic_tokens_range, + legend as semantic_tokens_legend, semantic_token_reference_markdown, semantic_tokens, + semantic_tokens_range, SemanticTokenModifierName, SemanticTokenTypeName, }; pub use signature_help::signature_help; pub use symbols::{document_symbols, workspace_symbols_for_document}; diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs index abda36a6..3e64ab93 100644 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs @@ -2,6 +2,8 @@ //! //! Provides semantic highlighting for Jsonnet code. +use std::fmt::Write as _; + use jrsonnet_lsp_document::{Document, LineIndex}; use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, ident_resolves_to_builtin_std}; use jrsonnet_rowan_parser::{ @@ -9,13 +11,15 @@ use jrsonnet_rowan_parser::{ AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; use lsp_types::{Range, SemanticToken, SemanticTokenType, SemanticTokens, SemanticTokensLegend}; +use strum_macros::{AsRefStr, EnumString, FromRepr}; /// Semantic token type with compile-time index. /// /// The enum values match the indices in `TOKEN_TYPES`. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, FromRepr, AsRefStr)] +#[strum(ascii_case_insensitive, serialize_all = "snake_case")] #[repr(u32)] -enum TokenType { +pub enum SemanticTokenTypeName { Namespace = 0, Parameter = 7, Variable = 8, @@ -29,9 +33,50 @@ enum TokenType { Operator = 21, } +impl SemanticTokenTypeName { + #[must_use] + pub const fn as_index(self) -> u32 { + self as u32 + } +} + +/// Semantic token modifier names with their corresponding LSP bit flags. +#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, FromRepr, AsRefStr)] +#[strum(ascii_case_insensitive, serialize_all = "snake_case")] +#[repr(u32)] +pub enum SemanticTokenModifierName { + Declaration = 0, + Definition = 1, + Readonly = 2, + Static = 3, + Deprecated = 4, + Abstract = 5, + Async = 6, + Modification = 7, + Documentation = 8, + #[strum( + serialize = "default_library", + serialize = "default-library", + serialize = "defaultlibrary" + )] + DefaultLibrary = 9, +} + +impl SemanticTokenModifierName { + #[must_use] + pub const fn as_index(self) -> u32 { + self as u32 + } + + #[must_use] + pub const fn as_bitset(self) -> u32 { + 1 << self.as_index() + } +} + /// Semantic token types we support. /// -/// The indices in this array must match the `TokenType` enum values. +/// The indices in this array must match the `SemanticTokenTypeName` enum values. pub const TOKEN_TYPES: &[SemanticTokenType] = &[ SemanticTokenType::NAMESPACE, // 0: std SemanticTokenType::TYPE, // 1: (unused) @@ -71,15 +116,36 @@ pub const TOKEN_MODIFIERS: &[lsp_types::SemanticTokenModifier] = &[ lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY, ]; -fn to_u32(value: usize) -> u32 { - u32::try_from(value).unwrap_or(u32::MAX) +/// Render a Markdown reference block for semantic token types and modifiers. +/// +/// This output is consumed by docs validation tests to keep docs in sync with +/// the actual semantic token legend. +#[must_use] +pub fn semantic_token_reference_markdown() -> String { + let mut markdown = String::new(); + markdown.push_str("#### Semantic Token Legend (Generated)\n\n"); + markdown.push_str("Token types (`index`: `lsp_name`, usage):\n"); + for (index, token_type) in TOKEN_TYPES.iter().enumerate() { + let used = SemanticTokenTypeName::from_repr(index as u32); + let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_ref().to_owned()); + let _ = writeln!(markdown, "- `{index}`: `{}` ({usage})", token_type.as_str()); + } + markdown.push('\n'); + markdown.push_str("Token modifiers (`bit`: `lsp_name`, usage):\n"); + for (index, modifier) in TOKEN_MODIFIERS.iter().enumerate() { + let used = SemanticTokenModifierName::from_repr(index as u32); + let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_ref().to_owned()); + let _ = writeln!( + markdown, + "- `1 << {index}`: `{}` ({usage})", + modifier.as_str() + ); + } + markdown } -/// Modifier bit flags. -mod token_modifier { - pub const DECLARATION: u32 = 1 << 0; - pub const DEFINITION: u32 = 1 << 1; - pub const DEFAULT_LIBRARY: u32 = 1 << 9; +fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) } /// Get the semantic tokens legend. @@ -160,19 +226,19 @@ impl<'a> SemanticTokenBuilder<'a> { let kind = token.kind(); if kind.is_semantic_keyword_token() { - self.add_token(token, TokenType::Keyword, 0); + self.add_token(token, SemanticTokenTypeName::Keyword, 0); return; } if kind.is_semantic_comment_token() { - self.add_token(token, TokenType::Comment, 0); + self.add_token(token, SemanticTokenTypeName::Comment, 0); return; } if kind.is_semantic_string_token() { - self.add_token(token, TokenType::String, 0); + self.add_token(token, SemanticTokenTypeName::String, 0); return; } if kind.is_semantic_number_token() { - self.add_token(token, TokenType::Number, 0); + self.add_token(token, SemanticTokenTypeName::Number, 0); return; } if kind == SyntaxKind::IDENT { @@ -181,7 +247,7 @@ impl<'a> SemanticTokenBuilder<'a> { return; } if kind.is_semantic_operator_token() { - self.add_token(token, TokenType::Operator, 0); + self.add_token(token, SemanticTokenTypeName::Operator, 0); } } @@ -192,7 +258,11 @@ impl<'a> SemanticTokenBuilder<'a> { // Check if this is "std" if token.text() == "std" && ident_resolves_to_builtin_std(token) { - self.add_token(token, TokenType::Namespace, token_modifier::DEFAULT_LIBRARY); + self.add_token( + token, + SemanticTokenTypeName::Namespace, + SemanticTokenModifierName::DefaultLibrary.as_bitset(), + ); return; } @@ -218,8 +288,9 @@ impl<'a> SemanticTokenBuilder<'a> { // Function definition self.add_token( token, - TokenType::Function, - token_modifier::DECLARATION | token_modifier::DEFINITION, + SemanticTokenTypeName::Function, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), ); } @@ -229,11 +300,11 @@ impl<'a> SemanticTokenBuilder<'a> { if is_stdlib_access(&grandparent) { self.add_token( token, - TokenType::Function, - token_modifier::DEFAULT_LIBRARY, + SemanticTokenTypeName::Function, + SemanticTokenModifierName::DefaultLibrary.as_bitset(), ); } else { - self.add_token(token, TokenType::Property, 0); + self.add_token(token, SemanticTokenTypeName::Property, 0); } } @@ -251,14 +322,16 @@ impl<'a> SemanticTokenBuilder<'a> { if great_grandparent.kind() == SyntaxKind::MEMBER_FIELD_METHOD { self.add_token( token, - TokenType::Method, - token_modifier::DECLARATION | token_modifier::DEFINITION, + SemanticTokenTypeName::Method, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), ); } else if great_grandparent.kind() == SyntaxKind::MEMBER_FIELD_NORMAL { self.add_token( token, - TokenType::Property, - token_modifier::DECLARATION | token_modifier::DEFINITION, + SemanticTokenTypeName::Property, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), ); } } @@ -267,7 +340,12 @@ impl<'a> SemanticTokenBuilder<'a> { } } - fn add_token(&mut self, token: &SyntaxToken, token_type: TokenType, token_modifiers: u32) { + fn add_token( + &mut self, + token: &SyntaxToken, + token_type: SemanticTokenTypeName, + token_modifiers: u32, + ) { let range = token.text_range(); let start_pos = self .line_index @@ -367,25 +445,25 @@ impl<'a> SemanticTokenBuilder<'a> { } /// Classify a variable reference to determine its token type. -fn classify_variable_reference(token: &SyntaxToken) -> TokenType { +fn classify_variable_reference(token: &SyntaxToken) -> SemanticTokenTypeName { // Walk up the scope chain to find the definition let Some(mut current) = token.parent() else { - return TokenType::Variable; + return SemanticTokenTypeName::Variable; }; let name = token.text(); while let Some(parent) = current.parent() { if is_parameter_in_scope(&parent, name) { - return TokenType::Parameter; + return SemanticTokenTypeName::Parameter; } if is_function_in_scope(&parent, ¤t, name) { - return TokenType::Function; + return SemanticTokenTypeName::Function; } current = parent; } - TokenType::Variable + SemanticTokenTypeName::Variable } /// Check if a name is a parameter in the given scope. @@ -459,7 +537,7 @@ fn is_function_in_scope(scope: &SyntaxNode, child: &SyntaxNode, name: &str) -> b } /// Classify a definition site. -fn classify_definition_site(destruct_node: &SyntaxNode) -> (TokenType, u32) { +fn classify_definition_site(destruct_node: &SyntaxNode) -> (SemanticTokenTypeName, u32) { // Walk up to find if this is a parameter or local variable let mut current = destruct_node.clone(); @@ -467,14 +545,16 @@ fn classify_definition_site(destruct_node: &SyntaxNode) -> (TokenType, u32) { match parent.kind() { SyntaxKind::PARAM => { return ( - TokenType::Parameter, - token_modifier::DECLARATION | token_modifier::DEFINITION, + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), ); } SyntaxKind::BIND_DESTRUCT | SyntaxKind::FOR_SPEC => { return ( - TokenType::Variable, - token_modifier::DECLARATION | token_modifier::DEFINITION, + SemanticTokenTypeName::Variable, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), ); } _ => {} @@ -483,8 +563,9 @@ fn classify_definition_site(destruct_node: &SyntaxNode) -> (TokenType, u32) { } ( - TokenType::Variable, - token_modifier::DECLARATION | token_modifier::DEFINITION, + SemanticTokenTypeName::Variable, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), ) } @@ -518,7 +599,7 @@ mod tests { line: u32, start_char: u32, length: u32, - token_type: TokenType, + token_type: SemanticTokenTypeName, token_modifiers: u32, ) -> AbsoluteToken { AbsoluteToken { @@ -563,22 +644,23 @@ mod tests { assert_eq!( decode_absolute(&tokens), vec![ - token(0, 0, 5, TokenType::Keyword, 0), + token(0, 0, 5, SemanticTokenTypeName::Keyword, 0), token( 0, 6, 1, - TokenType::Variable, - token_modifier::DECLARATION | token_modifier::DEFINITION + SemanticTokenTypeName::Variable, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() ), - token(0, 8, 1, TokenType::Operator, 0), - token(0, 10, 2, TokenType::Keyword, 0), - token(0, 13, 4, TokenType::Keyword, 0), - token(0, 18, 4, TokenType::Keyword, 0), - token(0, 23, 1, TokenType::Number, 0), - token(0, 25, 4, TokenType::Keyword, 0), - token(0, 30, 1, TokenType::Number, 0), - token(0, 33, 1, TokenType::Variable, 0), + token(0, 8, 1, SemanticTokenTypeName::Operator, 0), + token(0, 10, 2, SemanticTokenTypeName::Keyword, 0), + token(0, 13, 4, SemanticTokenTypeName::Keyword, 0), + token(0, 18, 4, SemanticTokenTypeName::Keyword, 0), + token(0, 23, 1, SemanticTokenTypeName::Number, 0), + token(0, 25, 4, SemanticTokenTypeName::Keyword, 0), + token(0, 30, 1, SemanticTokenTypeName::Number, 0), + token(0, 33, 1, SemanticTokenTypeName::Variable, 0), ] ); } @@ -592,35 +674,38 @@ mod tests { assert_eq!( decode_absolute(&tokens), vec![ - token(0, 0, 5, TokenType::Keyword, 0), + token(0, 0, 5, SemanticTokenTypeName::Keyword, 0), token( 0, 6, 3, - TokenType::Function, - token_modifier::DECLARATION | token_modifier::DEFINITION + SemanticTokenTypeName::Function, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() ), token( 0, 10, 1, - TokenType::Parameter, - token_modifier::DECLARATION | token_modifier::DEFINITION + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() ), token( 0, 13, 1, - TokenType::Parameter, - token_modifier::DECLARATION | token_modifier::DEFINITION + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() ), - token(0, 16, 1, TokenType::Operator, 0), - token(0, 18, 1, TokenType::Parameter, 0), - token(0, 20, 1, TokenType::Operator, 0), - token(0, 22, 1, TokenType::Parameter, 0), - token(0, 25, 3, TokenType::Function, 0), - token(0, 29, 1, TokenType::Number, 0), - token(0, 32, 1, TokenType::Number, 0), + token(0, 16, 1, SemanticTokenTypeName::Operator, 0), + token(0, 18, 1, SemanticTokenTypeName::Parameter, 0), + token(0, 20, 1, SemanticTokenTypeName::Operator, 0), + token(0, 22, 1, SemanticTokenTypeName::Parameter, 0), + token(0, 25, 3, SemanticTokenTypeName::Function, 0), + token(0, 29, 1, SemanticTokenTypeName::Number, 0), + token(0, 32, 1, SemanticTokenTypeName::Number, 0), ] ); } @@ -638,27 +723,30 @@ mod tests { 0, 2, 4, - TokenType::Property, - token_modifier::DECLARATION | token_modifier::DEFINITION + SemanticTokenTypeName::Property, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() ), - token(0, 8, 6, TokenType::String, 0), + token(0, 8, 6, SemanticTokenTypeName::String, 0), token( 0, 16, 5, - TokenType::Method, - token_modifier::DECLARATION | token_modifier::DEFINITION + SemanticTokenTypeName::Method, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() ), token( 0, 22, 1, - TokenType::Parameter, - token_modifier::DECLARATION | token_modifier::DEFINITION + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() ), - token(0, 26, 8, TokenType::String, 0), - token(0, 35, 1, TokenType::Operator, 0), - token(0, 37, 1, TokenType::Variable, 0), + token(0, 26, 8, SemanticTokenTypeName::String, 0), + token(0, 35, 1, SemanticTokenTypeName::Operator, 0), + token(0, 37, 1, SemanticTokenTypeName::Variable, 0), ] ); } @@ -676,19 +764,19 @@ mod tests { 0, 0, 3, - TokenType::Namespace, - token_modifier::DEFAULT_LIBRARY + SemanticTokenTypeName::Namespace, + SemanticTokenModifierName::DefaultLibrary.as_bitset() ), token( 0, 4, 6, - TokenType::Function, - token_modifier::DEFAULT_LIBRARY + SemanticTokenTypeName::Function, + SemanticTokenModifierName::DefaultLibrary.as_bitset() ), - token(0, 12, 1, TokenType::Number, 0), - token(0, 15, 1, TokenType::Number, 0), - token(0, 18, 1, TokenType::Number, 0), + token(0, 12, 1, SemanticTokenTypeName::Number, 0), + token(0, 15, 1, SemanticTokenTypeName::Number, 0), + token(0, 18, 1, SemanticTokenTypeName::Number, 0), ] ); } @@ -700,6 +788,41 @@ mod tests { assert_eq!(leg.token_modifiers, TOKEN_MODIFIERS.to_vec()); } + #[test] + fn test_semantic_token_legend_docs_are_in_sync() { + let docs_path = + std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("../../docs/lsp/HANDLERS.md"); + let docs = std::fs::read_to_string(&docs_path).expect("read HANDLERS.md"); + let start_marker = ""; + let end_marker = ""; + let start_index = docs + .find(start_marker) + .expect("semantic token legend start marker should exist"); + let end_marker_index = docs + .find(end_marker) + .expect("semantic token legend end marker should exist"); + let end_index = end_marker_index + end_marker.len(); + let actual = docs[start_index..end_index].trim_end(); + let expected = format!( + "{start_marker}\n{}\n{end_marker}", + semantic_token_reference_markdown().trim_end() + ); + let normalized_actual = actual + .lines() + .map(str::trim_end) + .filter(|line| !line.is_empty()) + .collect::>(); + let normalized_expected = expected + .lines() + .map(str::trim_end) + .filter(|line| !line.is_empty()) + .collect::>(); + assert_eq!( + normalized_actual, normalized_expected, + "semantic token docs drifted; update docs/lsp/HANDLERS.md legend block" + ); + } + #[test] fn test_semantic_tokens_range_filters_lines() { let code = "local x = 1\nlocal y = x + 1"; @@ -721,11 +844,11 @@ mod tests { assert_eq!( decode_absolute(&tokens), vec![ - token(1, 0, 5, TokenType::Keyword, 0), - token(1, 6, 1, TokenType::Variable, 0), - token(1, 8, 1, TokenType::Operator, 0), - token(1, 12, 1, TokenType::Operator, 0), - token(1, 14, 1, TokenType::Number, 0), + token(1, 0, 5, SemanticTokenTypeName::Keyword, 0), + token(1, 6, 1, SemanticTokenTypeName::Variable, 0), + token(1, 8, 1, SemanticTokenTypeName::Operator, 0), + token(1, 12, 1, SemanticTokenTypeName::Operator, 0), + token(1, 14, 1, SemanticTokenTypeName::Number, 0), ] ); } diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index b62faf77..771d10b5 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -6,6 +6,7 @@ use std::{fmt::Write as _, fs, thread, time::Duration}; use assert_matches::assert_matches; +use jrsonnet_lsp_handlers::{SemanticTokenModifierName, SemanticTokenTypeName}; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{ notification::{ @@ -31,6 +32,78 @@ use lsp_types::{ use serde_json::json; use tempfile::TempDir; +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +struct ExpectedSemanticToken { + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: u32, +} + +impl ExpectedSemanticToken { + const fn new( + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: u32, + ) -> Self { + Self { + line, + start, + len, + token_type, + modifiers, + } + } +} + +fn semantic_modifiers(modifiers: &[SemanticTokenModifierName]) -> u32 { + modifiers + .iter() + .fold(0_u32, |acc, modifier| acc | modifier.as_bitset()) +} + +fn semantic_token( + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: &[SemanticTokenModifierName], +) -> ExpectedSemanticToken { + ExpectedSemanticToken::new(line, start, len, token_type, semantic_modifiers(modifiers)) +} + +fn encode_semantic_tokens(mut tokens: Vec) -> lsp_types::SemanticTokens { + tokens.sort_by_key(|token| (token.line, token.start)); + let mut encoded = Vec::with_capacity(tokens.len()); + let mut prev_line = 0_u32; + let mut prev_start = 0_u32; + for token in tokens { + let delta_line = token.line - prev_line; + let delta_start = if delta_line == 0 { + token.start - prev_start + } else { + token.start + }; + encoded.push(lsp_types::SemanticToken { + delta_line, + delta_start, + length: token.len, + token_type: token.token_type.as_index(), + token_modifiers_bitset: token.modifiers, + }); + prev_line = token.line; + prev_start = token.start; + } + + lsp_types::SemanticTokens { + result_id: None, + data: encoded, + } +} + /// Helper to create an initialize request. fn initialize_request(id: i32) -> Request { initialize_request_with_options(id, serde_json::Value::Null) @@ -3449,19 +3522,14 @@ fn test_semantic_tokens_range_request() { let tokens: Option = serde_json::from_value(response.result.expect("should have result")).unwrap(); let tokens = tokens.expect("semantic tokens range should be returned"); - assert!( - !tokens.data.is_empty(), - "range should include tokens from requested line" - ); - - let mut absolute_line = 0_u32; - for token in tokens.data { - absolute_line += token.delta_line; - assert_eq!( - absolute_line, 1, - "token should be constrained to requested range line" - ); - } + let expected = encode_semantic_tokens(vec![ + semantic_token(1, 0, 5, SemanticTokenTypeName::Keyword, &[]), + semantic_token(1, 6, 6, SemanticTokenTypeName::Variable, &[]), + semantic_token(1, 13, 1, SemanticTokenTypeName::Operator, &[]), + semantic_token(1, 21, 1, SemanticTokenTypeName::Operator, &[]), + semantic_token(1, 23, 1, SemanticTokenTypeName::Number, &[]), + ]); + assert_eq!(tokens, expected, "semantic tokens range mismatch"); client_conn .sender diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index f2cf5b88..c5d4701f 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -265,6 +265,49 @@ File: `crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs` centralized in `jrsonnet-rowan-parser/src/syntax_semantics.rs` via `SyntaxKind` semantic helpers. + + +#### Semantic Token Legend (Generated) + +Token types (`index`: `lsp_name`, usage): + +- `0`: `namespace` (namespace) +- `1`: `type` (unused) +- `2`: `class` (unused) +- `3`: `enum` (unused) +- `4`: `interface` (unused) +- `5`: `struct` (unused) +- `6`: `typeParameter` (unused) +- `7`: `parameter` (parameter) +- `8`: `variable` (variable) +- `9`: `property` (property) +- `10`: `enumMember` (unused) +- `11`: `event` (unused) +- `12`: `function` (function) +- `13`: `method` (method) +- `14`: `macro` (unused) +- `15`: `keyword` (keyword) +- `16`: `modifier` (unused) +- `17`: `comment` (comment) +- `18`: `string` (string) +- `19`: `number` (number) +- `20`: `regexp` (unused) +- `21`: `operator` (operator) + +Token modifiers (`bit`: `lsp_name`, usage): + +- `1 << 0`: `declaration` (declaration) +- `1 << 1`: `definition` (definition) +- `1 << 2`: `readonly` (readonly) +- `1 << 3`: `static` (static) +- `1 << 4`: `deprecated` (deprecated) +- `1 << 5`: `abstract` (abstract) +- `1 << 6`: `async` (async) +- `1 << 7`: `modification` (modification) +- `1 << 8`: `documentation` (documentation) +- `1 << 9`: `defaultLibrary` (default-library) + + ### Signature Help File: `crates/jrsonnet-lsp-handlers/src/signature_help.rs` From f25dd5b969ca472a23d80f2515e928533f27c61b Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 16:36:01 +0000 Subject: [PATCH 086/210] test(lsp): migrate e2e harness to scenario crate and YAML fixtures - add the dedicated jrsonnet-lsp-scenario crate with typed scenario model, YAML compiler, and runner. - replace legacy tests/framework + annotated harness with fixture-driven scenario tests. - wire jrsonnet-lsp dev-dependencies/docs/lockfile to the new scenario crate. --- Cargo.lock | 18 + crates/jrsonnet-lsp-scenario/Cargo.toml | 25 + crates/jrsonnet-lsp-scenario/src/fixture.rs | 60 + crates/jrsonnet-lsp-scenario/src/lib.rs | 17 + crates/jrsonnet-lsp-scenario/src/scenario.rs | 1826 +++++++++++++++++ .../src/scenario_runner/document_steps.rs | 156 ++ .../src/scenario_runner/errors.rs | 64 + .../src/scenario_runner/expectation_steps.rs | 431 ++++ .../src/scenario_runner/helpers.rs | 385 ++++ .../src/scenario_runner/mod.rs | 15 + .../src/scenario_runner/request_steps.rs | 297 +++ .../src/scenario_runner/runner.rs | 424 ++++ .../src/scenario_runner/transport.rs | 239 +++ .../src/scenario_script/compile.rs | 1348 ++++++++++++ .../src/scenario_script/inputs.rs | 1147 +++++++++++ .../src/scenario_script/markers.rs | 1034 ++++++++++ .../src/scenario_script/mod.rs | 20 + .../src/scenario_script/parse.rs | 152 ++ .../src/scenario_script/paths.rs | 28 + .../src/scenario_script/registry.rs | 150 ++ .../src/semantic_tokens.rs | 80 + crates/jrsonnet-lsp/Cargo.toml | 1 + .../jrsonnet-lsp/tests/e2e_annotated_tests.rs | 527 ----- .../jrsonnet-lsp/tests/e2e_scenario_tests.rs | 18 + .../tests/framework/assertions.rs | 624 ------ crates/jrsonnet-lsp/tests/framework/mod.rs | 78 - crates/jrsonnet-lsp/tests/framework/parser.rs | 273 --- .../jrsonnet-lsp/tests/framework/scenario.rs | 415 ---- .../tests/framework/scenario_runner.rs | 942 --------- ...al_comprehension_refines_element_type.yaml | 22 + ...er_map_predicate_refines_output_array.yaml | 26 + ...r_with_predicate_refines_output_array.yaml | 23 + ..._order_all_map_refines_array_elements.yaml | 19 + ...l_string_equality_after_string_assert.yaml | 22 + ...tring_equality_partial_without_assert.yaml | 25 + .../negated_guard_refines_branches.yaml | 23 + .../null_and_length_guard.yaml | 20 + .../null_branch_split.yaml | 23 + .../null_guard_refines_non_null_branch.yaml | 20 + ...ic_predicates_refine_arithmetic_paths.yaml | 26 + .../union_guard_refines_both_branches.yaml | 23 + .../function_length_assert_narrows_arity.yaml | 18 + .../length_eq_refines_array_to_tuple.yaml | 18 + ..._function_refines_impossible_branches.yaml | 25 + ...wn_object_refines_impossible_branches.yaml | 24 + ...known_function_allows_typed_call_site.yaml | 25 + ...length_unknown_function_refines_arity.yaml | 24 + ...n_object_composition_refines_by_shape.yaml | 38 + .../negated_membership_and_is_precise.yaml | 19 + ...mbership_or_eliminates_required_field.yaml | 19 + ...negated_membership_or_is_conservative.yaml | 19 + ...or_with_length_still_eliminates_field.yaml | 19 + ...eld_literal_chain_preserves_base_type.yaml | 24 + ...t_field_presence_refines_object_shape.yaml | 20 + ...field_type_guards_refine_nested_paths.yaml | 27 + .../object_membership_known_union.yaml | 20 + .../tests/scenarios/hover/array_variable.yaml | 20 + .../scenarios/hover/boolean_literal.yaml | 24 + .../scenarios/hover/function_variable.yaml | 20 + .../hover_new_import_invalid_syntax.yaml | 30 + .../tests/scenarios/hover/null_literal.yaml | 24 + .../tests/scenarios/hover/number_literal.yaml | 15 + .../scenarios/hover/object_variable.yaml | 20 + .../tests/scenarios/hover/std_object.yaml | 15 + .../tests/scenarios/hover/string_literal.yaml | 15 + .../references/for_comprehension_binding.yaml | 20 + .../references/function_param_usage.yaml | 39 + .../references/function_parameters.yaml | 36 + .../references/local_variable_definition.yaml | 22 + .../scenarios/references/multiple_usages.yaml | 25 + .../scenarios/references/nested_local.yaml | 40 + .../scenarios/references/object_local.yaml | 24 + .../shadowing_different_scopes.yaml | 40 + ...e_then_watched_change_then_references.yaml | 36 + .../code_lens_references_and_evaluate.yaml | 32 + .../completion_bracket_lookup_fields.yaml | 26 + .../completion_nested_object_fields.yaml | 18 + ...letion_non_identifier_field_text_edit.yaml | 35 + .../runner/completion_syntax_error_scope.yaml | 21 + .../runner/completion_union_nested_flow.yaml | 28 + .../runner/config_change_code_actions.yaml | 110 + ...ross_file_rename_navigation_roundtrip.yaml | 99 + ...ross_file_rename_references_roundtrip.yaml | 70 + ...gnostics_no_such_field_available_list.yaml | 25 + ...nostics_no_such_field_with_suggestion.yaml | 25 + .../runner/diagnostics_save_roundtrip.yaml | 38 + .../document_symbol_local_and_object.yaml | 30 + ...l_unused_multifile_comments_preserved.yaml | 81 + .../hover_bracket_lookup_field_type.yaml | 25 + .../runner/hover_cjk_import_shape.yaml | 21 + .../hover_completion_execute_command.yaml | 40 + .../runner/hover_edge_positions.yaml | 39 + .../hover_no_result_on_comment_token.yaml | 18 + .../hover_non_identifier_field_names.yaml | 19 + .../runner/hover_union_nested_field_type.yaml | 28 + .../runner/importstr_non_json_dependency.yaml | 52 + ...inlay_hints_after_flow_narrowing_edit.yaml | 43 + .../runner/lint_config_flip_mid_session.yaml | 52 + .../marker_syntax_definition_smoke.yaml | 26 + .../runner/missing_step_coverage.yaml | 149 ++ .../runner/open_request_expect_sequence.yaml | 72 + .../preserve_cross_file_refs_after_close.yaml | 30 + .../rapid_incremental_edit_recovery.yaml | 61 + .../runner/relative_paths_smoke.yaml | 25 + .../rename_reopen_references_roundtrip.yaml | 80 + ...ens_stability_after_incremental_edits.yaml | 101 + .../signature_help_named_arguments.yaml | 29 + .../runner/watched_file_lifecycle.yaml | 74 + .../workspace_symbol_flat_positive.yaml | 23 + .../workspace_symbol_nested_container.yaml | 23 + ...mbol_updates_after_incremental_rename.yaml | 68 + docs/lsp/ARCHITECTURE.md | 117 +- 112 files changed, 10855 insertions(+), 2868 deletions(-) create mode 100644 crates/jrsonnet-lsp-scenario/Cargo.toml create mode 100644 crates/jrsonnet-lsp-scenario/src/fixture.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/lib.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_runner/document_steps.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_runner/mod.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_script/mod.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_script/paths.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs delete mode 100644 crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs create mode 100644 crates/jrsonnet-lsp/tests/e2e_scenario_tests.rs delete mode 100644 crates/jrsonnet-lsp/tests/framework/assertions.rs delete mode 100644 crates/jrsonnet-lsp/tests/framework/mod.rs delete mode 100644 crates/jrsonnet-lsp/tests/framework/parser.rs delete mode 100644 crates/jrsonnet-lsp/tests/framework/scenario.rs delete mode 100644 crates/jrsonnet-lsp/tests/framework/scenario_runner.rs create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/references/for_comprehension_binding.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/references/function_param_usage.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/references/function_parameters.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/references/local_variable_definition.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/references/multiple_usages.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/references/nested_local.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/references/object_local.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/references/shadowing_different_scopes.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/close_then_watched_change_then_references.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/completion_non_identifier_field_text_edit.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/config_change_code_actions.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_references_roundtrip.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_available_list.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_with_suggestion.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_save_roundtrip.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/document_symbol_local_and_object.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/fix_all_unused_multifile_comments_preserved.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_no_result_on_comment_token.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_after_flow_narrowing_edit.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/lint_config_flip_mid_session.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/marker_syntax_definition_smoke.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/open_request_expect_sequence.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/preserve_cross_file_refs_after_close.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/rapid_incremental_edit_recovery.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/relative_paths_smoke.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/rename_reopen_references_roundtrip.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/semantic_tokens_stability_after_incremental_edits.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/signature_help_named_arguments.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/watched_file_lifecycle.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_flat_positive.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_nested_container.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_updates_after_incremental_rename.yaml diff --git a/Cargo.lock b/Cargo.lock index e98c06f4..a3b6a753 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1817,6 +1817,7 @@ dependencies = [ "jrsonnet-lsp-handlers", "jrsonnet-lsp-import", "jrsonnet-lsp-inference", + "jrsonnet-lsp-scenario", "jrsonnet-lsp-scope", "jrsonnet-lsp-types", "jrsonnet-parser", @@ -1933,6 +1934,23 @@ dependencies = [ "tracing", ] +[[package]] +name = "jrsonnet-lsp-scenario" +version = "0.5.0-pre97" +dependencies = [ + "assert_matches", + "crossbeam-channel", + "jrsonnet-lsp-handlers", + "lsp-server", + "lsp-types", + "rowan", + "serde", + "serde_json", + "serde_yaml_with_quirks", + "tempfile", + "thiserror 1.0.69", +] + [[package]] name = "jrsonnet-lsp-scope" version = "0.5.0-pre97" diff --git a/crates/jrsonnet-lsp-scenario/Cargo.toml b/crates/jrsonnet-lsp-scenario/Cargo.toml new file mode 100644 index 00000000..fe5a326d --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "jrsonnet-lsp-scenario" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +version.workspace = true + +[dependencies] +crossbeam-channel = "0.5" +jrsonnet-lsp-handlers = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-handlers" } +lsp-server.workspace = true +lsp-types.workspace = true +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true +serde_yaml_with_quirks.workspace = true +tempfile.workspace = true +thiserror.workspace = true +rowan.workspace = true + +[dev-dependencies] +assert_matches = "1.5.0" + +[lints] +workspace = true diff --git a/crates/jrsonnet-lsp-scenario/src/fixture.rs b/crates/jrsonnet-lsp-scenario/src/fixture.rs new file mode 100644 index 00000000..2868f279 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/fixture.rs @@ -0,0 +1,60 @@ +use std::{ + fs, + path::{Path, PathBuf}, +}; + +use lsp_server::Connection; +use tempfile::TempDir; +use thiserror::Error; + +use crate::{ + scenario_runner::{run_scenario, RunnerError}, + scenario_script::{parse_scenario_yaml, ParseScenarioError}, +}; + +#[derive(Debug, Error)] +pub enum ScenarioFixtureError { + #[error("create temp directory for scenario fixture: {source}")] + CreateTempDir { + #[source] + source: std::io::Error, + }, + #[error("read scenario fixture {path}: {source}")] + ReadFixture { + path: PathBuf, + #[source] + source: std::io::Error, + }, + #[error("parse scenario fixture {path}: {source}")] + ParseFixture { + path: PathBuf, + #[source] + source: ParseScenarioError, + }, + #[error(transparent)] + RunScenario(#[from] RunnerError), +} + +/// Parse and run a YAML fixture file against an in-memory LSP server. +/// +/// `start_server` receives the server-side `Connection` and should run the +/// server event loop until shutdown/exit. +pub fn run_yaml_fixture(path: &Path, start_server: S) -> Result<(), ScenarioFixtureError> +where + S: FnOnce(Connection) + Send + 'static, +{ + let base_dir = + TempDir::new().map_err(|source| ScenarioFixtureError::CreateTempDir { source })?; + let script = fs::read_to_string(path).map_err(|source| ScenarioFixtureError::ReadFixture { + path: path.to_path_buf(), + source, + })?; + let scenario = parse_scenario_yaml(&script, base_dir.path()).map_err(|source| { + ScenarioFixtureError::ParseFixture { + path: path.to_path_buf(), + source, + } + })?; + run_scenario(&scenario, start_server)?; + Ok(()) +} diff --git a/crates/jrsonnet-lsp-scenario/src/lib.rs b/crates/jrsonnet-lsp-scenario/src/lib.rs new file mode 100644 index 00000000..680042a3 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/lib.rs @@ -0,0 +1,17 @@ +//! Scenario testing framework for Jsonnet LSP end-to-end tests. +//! +//! This crate provides: +//! - Strongly typed timeline scenario model ([`scenario`]) +//! - Human-readable YAML DSL parser ([`scenario_script`]) +//! - In-memory LSP request/notification runner ([`scenario_runner`]) +//! - Fixture helper for rstest file-based tests ([`run_yaml_fixture`]) + +pub mod fixture; +pub mod scenario; +pub mod scenario_runner; +pub mod scenario_script; +pub mod semantic_tokens; + +pub use fixture::{run_yaml_fixture, ScenarioFixtureError}; +pub use scenario_runner::{run_scenario, RunnerError}; +pub use scenario_script::{parse_scenario_yaml, ParseScenarioError}; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario.rs b/crates/jrsonnet-lsp-scenario/src/scenario.rs new file mode 100644 index 00000000..bdbca233 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario.rs @@ -0,0 +1,1826 @@ +//! Canonical scenario model for multi-file, multi-step LSP timeline tests. +//! +//! YAML scenarios are parsed by [`crate::scenario_script::parse_scenario_yaml`] +//! and compiled into these strongly typed structures. +//! +//! This module is the compiled execution model; author scenarios in YAML. +//! +//! Example: +//! ```rust +//! use jrsonnet_lsp_scenario::{ +//! parse_scenario_yaml, +//! scenario::doctest_assertions::assert_scenario_runs_without_error, +//! }; +//! +//! let base_dir = tempfile::tempdir().expect("tempdir"); +//! +//! let yaml = r#" +//! steps: +//! - step: create +//! files: +//! main.jsonnet: |- +//! { answer: 42 } +//! open: [main.jsonnet] +//! - step: diagnosticsSettled +//! - step: expectDiagnostics +//! file: main.jsonnet +//! diagnostics: [] +//! "#; +//! +//! let actual = parse_scenario_yaml(yaml, base_dir.path()).expect("parse scenario"); +//! assert_scenario_runs_without_error(&actual); +//! ``` + +use lsp_types::{ + CodeActionKind, CodeActionOrCommand, CodeLens, CompletionResponse, Diagnostic, + DocumentSymbolResponse, FileChangeType, GotoDefinitionResponse, Hover, InlayHint, Location, + Position, PrepareRenameResponse, Range, SemanticTokensRangeResult, SemanticTokensResult, + SignatureHelp, TextDocumentContentChangeEvent, TextEdit, WorkspaceEdit, + WorkspaceSymbolResponse, +}; +use serde::Deserialize; + +/// A full timeline scenario. +#[derive(Debug, Clone, PartialEq)] +pub struct Scenario { + pub steps: Vec, +} + +impl Scenario { + #[must_use] + pub fn new(steps: Vec) -> Self { + Self { steps } + } +} + +/// One timeline step. +#[derive(Debug, Clone, PartialEq)] +pub enum ScenarioStep { + Open(OpenStep), + ChangeFull(ChangeFullStep), + ChangeIncremental(ChangeIncrementalStep), + Save(SaveStep), + Close(CloseStep), + Config(ConfigStep), + WriteFile(WriteFileStep), + DeleteFile(DeleteFileStep), + NotifyWatchedFiles(NotifyWatchedFilesStep), + RequestCodeAction(RequestCodeActionStep), + ExpectCodeAction(ExpectCodeActionStep), + RequestReferences(RequestReferencesStep), + ExpectReferences(ExpectReferencesStep), + RequestDefinition(RequestDefinitionStep), + ExpectDefinition(ExpectDefinitionStep), + RequestDeclaration(RequestDeclarationStep), + ExpectDeclaration(ExpectDeclarationStep), + RequestTypeDefinition(RequestTypeDefinitionStep), + ExpectTypeDefinition(ExpectTypeDefinitionStep), + RequestPrepareRename(RequestPrepareRenameStep), + ExpectPrepareRename(ExpectPrepareRenameStep), + RequestRename(RequestRenameStep), + ExpectRename(ExpectRenameStep), + RequestHover(RequestHoverStep), + ExpectHover(ExpectHoverStep), + ExpectHoverType(ExpectHoverTypeStep), + RequestSignatureHelp(RequestSignatureHelpStep), + ExpectSignatureHelp(ExpectSignatureHelpStep), + RequestCompletion(RequestCompletionStep), + ExpectCompletion(ExpectCompletionStep), + RequestFormatting(RequestFormattingStep), + ExpectFormatting(ExpectFormattingStep), + RequestSemanticTokensFull(RequestSemanticTokensFullStep), + ExpectSemanticTokensFull(ExpectSemanticTokensFullStep), + RequestSemanticTokensRange(RequestSemanticTokensRangeStep), + ExpectSemanticTokensRange(ExpectSemanticTokensRangeStep), + RequestInlayHints(RequestInlayHintsStep), + ExpectInlayHints(ExpectInlayHintsStep), + RequestDocumentSymbol(RequestDocumentSymbolStep), + ExpectDocumentSymbol(ExpectDocumentSymbolStep), + RequestWorkspaceSymbol(RequestWorkspaceSymbolStep), + ExpectWorkspaceSymbol(ExpectWorkspaceSymbolStep), + RequestCodeLens(RequestCodeLensStep), + ExpectCodeLens(ExpectCodeLensStep), + RequestExecuteCommand(RequestExecuteCommandStep), + ExpectExecuteCommand(ExpectExecuteCommandStep), + ExpectDiagnostics(ExpectDiagnosticsStep), + DiagnosticsSettled(DiagnosticsSettledStep), +} + +/// `textDocument/didOpen`. +/// +/// Opens a document in the scenario session. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: | +/// local x = 1; +/// x +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional fields: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// language_id: jsonnet +/// version: 3 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct OpenStep { + pub uri: String, + pub text: String, + pub language_id: String, + pub version: i32, +} + +/// `textDocument/didChange` full-document replacement. +/// +/// Replaces the full contents of an already-open document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: changeFull +/// file: main.jsonnet +/// text: "2" +/// version: 2 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ChangeFullStep { + pub uri: String, + pub text: String, + pub version: i32, +} + +impl ChangeFullStep { + #[must_use] + pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { + TextDocumentContentChangeEvent { + range: None, + range_length: None, + text: self.text.clone(), + } + } +} + +/// `textDocument/didChange` incremental edit. +/// +/// Applies a range edit to an already-open document. +/// +/// Example using marker shorthand (`at` + `len`): +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: | +/// local [[target:1]] = 1; +/// target +/// - step: changeIncremental +/// file: main.jsonnet +/// at: target +/// len: 1 +/// text: "2" +/// version: 2 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ChangeIncrementalStep { + pub uri: String, + pub range: Range, + pub text: String, + pub version: i32, +} + +impl ChangeIncrementalStep { + #[must_use] + pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { + TextDocumentContentChangeEvent { + range: Some(self.range), + range_length: None, + text: self.text.clone(), + } + } +} + +/// `textDocument/didSave`. +/// +/// Emits a save notification for an open document. +/// +/// Example without text payload: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: save +/// file: main.jsonnet +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional `text` payload: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: save +/// file: main.jsonnet +/// text: "{ answer: 42 }" +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct SaveStep { + pub uri: String, + pub text: Option, +} + +/// `textDocument/didClose`. +/// +/// Closes a previously-open document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: close +/// file: main.jsonnet +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CloseStep { + pub uri: String, +} + +/// `workspace/didChangeConfiguration`. +/// +/// Pushes configuration updates to the server. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: config +/// settings: +/// jsonnet: +/// diagnostics: +/// maxProblems: 200 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ConfigStep { + pub settings: serde_json::Value, +} + +/// Writes text to a file on disk. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: writeFile +/// path: libs/util.jsonnet +/// text: "{ x: 1 }" +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct WriteFileStep { + pub path: String, + pub text: String, +} + +/// Deletes a file from disk. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: writeFile +/// path: libs/util.jsonnet +/// text: "{ x: 1 }" +/// - step: deleteFile +/// path: libs/util.jsonnet +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct DeleteFileStep { + pub path: String, +} + +/// File change kind for watched-files notifications. +/// +/// Example values: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: notifyWatchedFiles +/// changes: +/// - path: vendor/new.jsonnet +/// type: created +/// - path: vendor/existing.jsonnet +/// type: changed +/// - path: vendor/old.jsonnet +/// type: deleted +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum ScenarioFileChangeType { + Created, + Changed, + Deleted, +} + +impl ScenarioFileChangeType { + #[must_use] + pub const fn as_lsp(self) -> FileChangeType { + match self { + Self::Created => FileChangeType::CREATED, + Self::Changed => FileChangeType::CHANGED, + Self::Deleted => FileChangeType::DELETED, + } + } +} + +/// One watched-file change event. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: notifyWatchedFiles +/// changes: +/// - path: vendor/lib.jsonnet +/// type: changed +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct WatchedFileChangeStep { + pub uri: String, + pub change_type: ScenarioFileChangeType, +} + +/// `workspace/didChangeWatchedFiles`. +/// +/// Emits a watched-files change notification. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: notifyWatchedFiles +/// changes: +/// - path: vendor/lib.jsonnet +/// type: created +/// - path: vendor/old.jsonnet +/// type: deleted +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct NotifyWatchedFilesStep { + pub changes: Vec, +} + +/// `textDocument/codeAction` request. +/// +/// Requests code actions for a range in a document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestCodeAction +/// as: actions +/// file: main.jsonnet +/// range: m +/// - step: expectCodeAction +/// request: actions +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional filters: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestCodeAction +/// as: actions +/// file: main.jsonnet +/// range: m +/// diagnostics: [] +/// only: [quickfix] +/// - step: expectCodeAction +/// request: actions +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestCodeActionStep { + pub(crate) id: i32, + pub uri: String, + pub range: Range, + pub diagnostics: Vec, + pub only: Option>, +} + +/// Expected `textDocument/codeAction` response. +/// +/// Asserts the response for a preceding `requestCodeAction`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestCodeAction +/// as: actions +/// file: main.jsonnet +/// range: m +/// - step: expectCodeAction +/// request: actions +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectCodeActionStep { + pub(crate) id: i32, + pub result: Option>, +} + +/// `textDocument/references` request. +/// +/// Requests symbol references at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestReferences +/// as: refs +/// file: main.jsonnet +/// at: m +/// - step: expectReferences +/// request: refs +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional `include_declaration`: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestReferences +/// as: refs +/// file: main.jsonnet +/// at: m +/// include_declaration: true +/// - step: expectReferences +/// request: refs +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestReferencesStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, + pub include_declaration: bool, +} + +/// Expected `textDocument/references` response. +/// +/// Asserts the response for a preceding `requestReferences`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestReferences +/// as: refs +/// file: main.jsonnet +/// at: m +/// - step: expectReferences +/// request: refs +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectReferencesStep { + pub(crate) id: i32, + pub result: Option>, +} + +/// `textDocument/definition` request. +/// +/// Requests definition locations at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestDefinition +/// as: def +/// file: main.jsonnet +/// at: m +/// - step: expectDefinition +/// request: def +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestDefinitionStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/definition` response. +/// +/// Asserts the response for a preceding `requestDefinition`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestDefinition +/// as: def +/// file: main.jsonnet +/// at: m +/// - step: expectDefinition +/// request: def +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectDefinitionStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/declaration` request. +/// +/// Requests declaration locations at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestDeclaration +/// as: decl +/// file: main.jsonnet +/// at: m +/// - step: expectDeclaration +/// request: decl +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestDeclarationStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/declaration` response. +/// +/// Asserts the response for a preceding `requestDeclaration`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestDeclaration +/// as: decl +/// file: main.jsonnet +/// at: m +/// - step: expectDeclaration +/// request: decl +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectDeclarationStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/typeDefinition` request. +/// +/// Requests type-definition locations at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestTypeDefinition +/// as: type_def +/// file: main.jsonnet +/// at: m +/// - step: expectTypeDefinition +/// request: type_def +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestTypeDefinitionStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/typeDefinition` response. +/// +/// Asserts the response for a preceding `requestTypeDefinition`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestTypeDefinition +/// as: type_def +/// file: main.jsonnet +/// at: m +/// - step: expectTypeDefinition +/// request: type_def +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectTypeDefinitionStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/prepareRename` request. +/// +/// Requests rename-preparation data at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestPrepareRename +/// as: prep +/// file: main.jsonnet +/// at: m +/// - step: expectPrepareRename +/// request: prep +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestPrepareRenameStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/prepareRename` response. +/// +/// Asserts the response for a preceding `requestPrepareRename`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestPrepareRename +/// as: prep +/// file: main.jsonnet +/// at: m +/// - step: expectPrepareRename +/// request: prep +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectPrepareRenameStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/rename` request. +/// +/// Requests a rename operation. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestRename +/// as: rename +/// file: main.jsonnet +/// at: m +/// new_name: renamed +/// - step: expectRename +/// request: rename +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestRenameStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, + pub new_name: String, +} + +/// Expected `textDocument/rename` response. +/// +/// Asserts the response for a preceding `requestRename`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestRename +/// as: rename +/// file: main.jsonnet +/// at: m +/// new_name: renamed +/// - step: expectRename +/// request: rename +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectRenameStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/hover` request. +/// +/// Requests hover information at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestHover +/// as: hover +/// file: main.jsonnet +/// at: m +/// - step: expectHover +/// request: hover +/// result: +/// contents: +/// kind: markdown +/// value: "`number`" +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestHoverStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/hover` response. +/// +/// Asserts the response for a preceding `requestHover`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestHover +/// as: hover +/// file: main.jsonnet +/// at: m +/// - step: expectHover +/// request: hover +/// result: +/// contents: +/// kind: markdown +/// value: "`number`" +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectHoverStep { + pub(crate) id: i32, + pub result: Option, +} + +/// Expected inferred type extracted from a `textDocument/hover` response. +/// +/// Asserts the inferred type prefix in a hover response. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestHover +/// as: hover +/// file: main.jsonnet +/// at: m +/// - step: expectHoverType +/// request: hover +/// type: number +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional `match: contains`: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestHover +/// as: hover +/// file: main.jsonnet +/// at: m +/// - step: expectHoverType +/// request: hover +/// type: um +/// match: contains +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectHoverTypeStep { + pub(crate) id: i32, + pub expected_type: String, + pub match_mode: TypeMatchMode, +} + +/// `textDocument/signatureHelp` request. +/// +/// Requests signature help at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local f(a) = a; f(((sig:|))1)" +/// - step: requestSignatureHelp +/// as: sig +/// file: main.jsonnet +/// at: sig +/// - step: expectSignatureHelp +/// request: sig +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestSignatureHelpStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/signatureHelp` response. +/// +/// Asserts the response for a preceding `requestSignatureHelp`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local f(a) = a; f(((sig:|))1)" +/// - step: requestSignatureHelp +/// as: sig +/// file: main.jsonnet +/// at: sig +/// - step: expectSignatureHelp +/// request: sig +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectSignatureHelpStep { + pub(crate) id: i32, + pub result: Option, +} + +/// Type string match mode for `ExpectHoverTypeStep`. +/// +/// Example values in YAML: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[m:x]] = 1; m" +/// - step: requestHover +/// as: hover_exact +/// file: main.jsonnet +/// at: m +/// - step: expectHoverType +/// request: hover_exact +/// type: number +/// - step: requestHover +/// as: hover_contains +/// file: main.jsonnet +/// at: m +/// - step: expectHoverType +/// request: hover_contains +/// type: um +/// match: contains +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum TypeMatchMode { + #[default] + Exact, + Contains, +} + +/// `textDocument/completion` request. +/// +/// Requests completion items at a position. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "std.((site:|))" +/// - step: requestCompletion +/// as: completion +/// file: main.jsonnet +/// at: site +/// - step: expectCompletion +/// request: completion +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestCompletionStep { + pub(crate) id: i32, + pub uri: String, + pub position: Position, +} + +/// Expected `textDocument/completion` response. +/// +/// Asserts the response for a preceding `requestCompletion`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "std.((site:|))" +/// - step: requestCompletion +/// as: completion +/// file: main.jsonnet +/// at: site +/// - step: expectCompletion +/// request: completion +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectCompletionStep { + pub(crate) id: i32, + /// Full completion response assertion. + pub result: Option, + /// Optional label-only assertion mode. Compared order-insensitively. + pub labels: Option>, + /// When true, `labels` is treated as a required subset of actual labels. + pub allow_extra: bool, +} + +/// `textDocument/formatting` request. +/// +/// Requests formatting edits for a document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{x:1}" +/// - step: requestFormatting +/// as: formatting +/// file: main.jsonnet +/// - step: expectFormatting +/// request: formatting +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional formatting options: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{x:1}" +/// - step: requestFormatting +/// as: formatting +/// file: main.jsonnet +/// tab_size: 4 +/// insert_spaces: false +/// - step: expectFormatting +/// request: formatting +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestFormattingStep { + pub(crate) id: i32, + pub uri: String, + pub tab_size: u32, + pub insert_spaces: bool, +} + +/// Expected `textDocument/formatting` response. +/// +/// Asserts the response for a preceding `requestFormatting`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{x:1}" +/// - step: requestFormatting +/// as: formatting +/// file: main.jsonnet +/// - step: expectFormatting +/// request: formatting +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectFormattingStep { + pub(crate) id: i32, + pub result: Option>, +} + +/// `textDocument/semanticTokens/full` request. +/// +/// Requests semantic tokens for the whole document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestSemanticTokensFull +/// as: tokens +/// file: main.jsonnet +/// - step: expectSemanticTokensFull +/// request: tokens +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestSemanticTokensFullStep { + pub(crate) id: i32, + pub uri: String, +} + +/// Expected `textDocument/semanticTokens/full` response. +/// +/// Asserts the response for a preceding `requestSemanticTokensFull`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestSemanticTokensFull +/// as: tokens +/// file: main.jsonnet +/// - step: expectSemanticTokensFull +/// request: tokens +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectSemanticTokensFullStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/semanticTokens/range` request. +/// +/// Requests semantic tokens for a selected range. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestSemanticTokensRange +/// as: tokens +/// file: main.jsonnet +/// range: name +/// - step: expectSemanticTokensRange +/// request: tokens +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestSemanticTokensRangeStep { + pub(crate) id: i32, + pub uri: String, + pub range: Range, +} + +/// Expected `textDocument/semanticTokens/range` response. +/// +/// Asserts the response for a preceding `requestSemanticTokensRange`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestSemanticTokensRange +/// as: tokens +/// file: main.jsonnet +/// range: name +/// - step: expectSemanticTokensRange +/// request: tokens +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectSemanticTokensRangeStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/inlayHint` request. +/// +/// Requests inlay hints for a range. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestInlayHints +/// as: hints +/// file: main.jsonnet +/// range: name +/// - step: expectInlayHints +/// request: hints +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestInlayHintsStep { + pub(crate) id: i32, + pub uri: String, + pub range: Range, +} + +/// Expected `textDocument/inlayHint` response. +/// +/// Asserts the response for a preceding `requestInlayHints`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "local [[name:x]] = 1; x" +/// - step: requestInlayHints +/// as: hints +/// file: main.jsonnet +/// range: name +/// - step: expectInlayHints +/// request: hints +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone)] +pub struct ExpectInlayHintsStep { + pub(crate) id: i32, + pub result: Option>, +} + +impl PartialEq for ExpectInlayHintsStep { + fn eq(&self, other: &Self) -> bool { + if self.id != other.id { + return false; + } + serde_json::to_value(&self.result).ok() == serde_json::to_value(&other.result).ok() + } +} + +/// `textDocument/documentSymbol` request. +/// +/// Requests document symbol outline data. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{ local x = 1, y: x }" +/// - step: requestDocumentSymbol +/// as: symbols +/// file: main.jsonnet +/// - step: expectDocumentSymbol +/// request: symbols +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestDocumentSymbolStep { + pub(crate) id: i32, + pub uri: String, +} + +/// Expected `textDocument/documentSymbol` response. +/// +/// Asserts the response for a preceding `requestDocumentSymbol`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{ local x = 1, y: x }" +/// - step: requestDocumentSymbol +/// as: symbols +/// file: main.jsonnet +/// - step: expectDocumentSymbol +/// request: symbols +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectDocumentSymbolStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `workspace/symbol` request. +/// +/// Requests workspace-wide symbol search results. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: requestWorkspaceSymbol +/// as: symbols +/// query: mySymbol +/// - step: expectWorkspaceSymbol +/// request: symbols +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestWorkspaceSymbolStep { + pub(crate) id: i32, + pub query: String, +} + +/// Expected `workspace/symbol` response. +/// +/// Asserts the response for a preceding `requestWorkspaceSymbol`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: requestWorkspaceSymbol +/// as: symbols +/// query: mySymbol +/// - step: expectWorkspaceSymbol +/// request: symbols +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectWorkspaceSymbolStep { + pub(crate) id: i32, + pub result: Option, +} + +/// `textDocument/codeLens` request. +/// +/// Requests code lenses for a document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{ x: 1 }" +/// - step: requestCodeLens +/// as: lenses +/// file: main.jsonnet +/// - step: expectCodeLens +/// request: lenses +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestCodeLensStep { + pub(crate) id: i32, + pub uri: String, +} + +/// Expected `textDocument/codeLens` response. +/// +/// Asserts the response for a preceding `requestCodeLens`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{ x: 1 }" +/// - step: requestCodeLens +/// as: lenses +/// file: main.jsonnet +/// - step: expectCodeLens +/// request: lenses +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpectCodeLensStep { + pub(crate) id: i32, + pub result: Option>, +} + +/// `workspace/executeCommand` request. +/// +/// Requests execution of a workspace command. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: requestExecuteCommand +/// as: command +/// command: jsonnet.test.command +/// - step: expectExecuteCommand +/// request: command +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional command arguments: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: requestExecuteCommand +/// as: command +/// command: jsonnet.test.command +/// arguments: [1, "arg"] +/// - step: expectExecuteCommand +/// request: command +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestExecuteCommandStep { + pub(crate) id: i32, + pub command: String, + pub arguments: Vec, +} + +/// Expected `workspace/executeCommand` response. +/// +/// Asserts the response for a preceding `requestExecuteCommand`. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: requestExecuteCommand +/// as: command +/// command: jsonnet.test.command +/// - step: expectExecuteCommand +/// request: command +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectExecuteCommandStep { + pub(crate) id: i32, + pub result: Option, +} + +/// Expected diagnostics notification for a URI. +/// +/// Asserts the full diagnostics payload for a file. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: diagnosticsSettled +/// - step: expectDiagnostics +/// file: main.jsonnet +/// diagnostics: [] +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectDiagnosticsStep { + pub uri: String, + pub diagnostics: Vec, +} + +/// Barrier for "no new diagnostics arrive for idle_ms before timeout_ms". +/// +/// Waits until diagnostics traffic becomes idle. +/// +/// Example with defaults: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: diagnosticsSettled +/// - step: expectDiagnostics +/// file: main.jsonnet +/// diagnostics: [] +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional custom timing: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: diagnosticsSettled +/// timeout_ms: 2000 +/// idle_ms: 100 +/// - step: expectDiagnostics +/// file: main.jsonnet +/// diagnostics: [] +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct DiagnosticsSettledStep { + pub timeout_ms: u64, + pub idle_ms: u64, +} + +#[doc(hidden)] +pub mod doctest_assertions { + use lsp_server::{Message, Notification, Response}; + use lsp_types::{ + notification::{DidOpenTextDocument, Notification as _, PublishDiagnostics}, + request::{HoverRequest, Request as _}, + Hover, HoverContents, MarkupContent, MarkupKind, PublishDiagnosticsParams, + }; + + use super::Scenario; + + pub fn assert_yaml_scenario_runs_without_error(yaml: &str) { + let base_dir = tempfile::tempdir().expect("create temp directory for scenario"); + let scenario = + crate::parse_scenario_yaml(yaml, base_dir.path()).expect("parse scenario yaml"); + assert_scenario_runs_without_error(&scenario); + } + + pub fn assert_scenario_runs_without_error(scenario: &Scenario) { + let result = crate::run_scenario(scenario, |connection| loop { + let Ok(message) = connection.receiver.recv() else { + break; + }; + match message { + Message::Request(request) => { + let response = match request.method.as_str() { + HoverRequest::METHOD => { + let hover = Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: "`number`".to_string(), + }), + range: None, + }; + match serde_json::to_value(hover) { + Ok(result) => Response::new_ok(request.id, result), + Err(_) => break, + } + } + _ => Response { + id: request.id, + result: None, + error: None, + }, + }; + if connection.sender.send(Message::Response(response)).is_err() { + break; + } + } + Message::Notification(notification) + if notification.method == DidOpenTextDocument::METHOD => + { + let Ok(params) = serde_json::from_value::( + notification.params, + ) else { + break; + }; + let publish = PublishDiagnosticsParams { + uri: params.text_document.uri, + version: Some(params.text_document.version), + diagnostics: vec![], + }; + let Ok(payload) = serde_json::to_value(publish) else { + break; + }; + let publish_notification = + Notification::new(PublishDiagnostics::METHOD.to_string(), payload); + if connection + .sender + .send(Message::Notification(publish_notification)) + .is_err() + { + break; + } + } + Message::Notification(notification) if notification.method == "exit" => break, + Message::Notification(_) | Message::Response(_) => {} + } + }); + assert!( + result.is_ok(), + "scenario should run without error: {result:?}" + ); + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/document_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/document_steps.rs new file mode 100644 index 00000000..fc1bee2d --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/document_steps.rs @@ -0,0 +1,156 @@ +use std::{fs, path::PathBuf}; + +use lsp_types::{ + notification::{ + DidChangeConfiguration, DidChangeTextDocument, DidChangeWatchedFiles, DidCloseTextDocument, + DidOpenTextDocument, DidSaveTextDocument, Notification as _, + }, + DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams, + DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, FileEvent, + TextDocumentIdentifier, TextDocumentItem, +}; +use thiserror::Error; + +use super::{helpers::parse_uri, RunnerResult, ScenarioRunner}; +use crate::scenario::{ + ChangeFullStep, ChangeIncrementalStep, CloseStep, ConfigStep, DeleteFileStep, + NotifyWatchedFilesStep, OpenStep, SaveStep, WriteFileStep, +}; + +#[derive(Debug, Error)] +pub enum FilesystemError { + #[error("create parent directories for writeFile {path}: {source}")] + CreateParentDirs { + path: PathBuf, + #[source] + source: std::io::Error, + }, + #[error("writeFile {path} failed: {source}")] + WriteFile { + path: PathBuf, + #[source] + source: std::io::Error, + }, + #[error("deleteFile {path} failed: {source}")] + DeleteFile { + path: PathBuf, + #[source] + source: std::io::Error, + }, +} + +impl ScenarioRunner { + pub(super) fn step_open(&self, step: &OpenStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "open")?; + let params = DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri, + language_id: step.language_id.clone(), + version: step.version, + text: step.text.clone(), + }, + }; + self.send_notification_with_params(DidOpenTextDocument::METHOD, params, "didOpen") + } + + pub(super) fn step_change_full(&self, step: &ChangeFullStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "didChange(full)")?; + let params = DidChangeTextDocumentParams { + text_document: lsp_types::VersionedTextDocumentIdentifier { + uri, + version: step.version, + }, + content_changes: vec![step.as_change_event()], + }; + self.send_notification_with_params(DidChangeTextDocument::METHOD, params, "didChange(full)") + } + + pub(super) fn step_change_incremental(&self, step: &ChangeIncrementalStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "didChange(incremental)")?; + let params = DidChangeTextDocumentParams { + text_document: lsp_types::VersionedTextDocumentIdentifier { + uri, + version: step.version, + }, + content_changes: vec![step.as_change_event()], + }; + self.send_notification_with_params( + DidChangeTextDocument::METHOD, + params, + "didChange(incremental)", + ) + } + + pub(super) fn step_save(&self, step: &SaveStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "didSave")?; + let params = DidSaveTextDocumentParams { + text_document: TextDocumentIdentifier { uri }, + text: step.text.clone(), + }; + self.send_notification_with_params(DidSaveTextDocument::METHOD, params, "didSave") + } + + pub(super) fn step_close(&self, step: &CloseStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "didClose")?; + let params = DidCloseTextDocumentParams { + text_document: TextDocumentIdentifier { uri }, + }; + self.send_notification_with_params(DidCloseTextDocument::METHOD, params, "didClose") + } + + pub(super) fn step_config(&self, step: &ConfigStep) -> RunnerResult<()> { + let params = DidChangeConfigurationParams { + settings: step.settings.clone(), + }; + self.send_notification_with_params( + DidChangeConfiguration::METHOD, + params, + "didChangeConfiguration", + ) + } + + pub(super) fn step_write_file(step: &WriteFileStep) -> RunnerResult<()> { + let path = PathBuf::from(&step.path); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).map_err(|source| FilesystemError::CreateParentDirs { + path: path.clone(), + source, + })?; + } + fs::write(&path, &step.text) + .map_err(|source| FilesystemError::WriteFile { path, source })?; + Ok(()) + } + + pub(super) fn step_delete_file(step: &DeleteFileStep) -> RunnerResult<()> { + let path = PathBuf::from(&step.path); + match fs::remove_file(&path) { + Ok(()) => Ok(()), + Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(()), + Err(source) => Err(FilesystemError::DeleteFile { path, source }.into()), + } + } + + pub(super) fn step_notify_watched_files( + &self, + step: &NotifyWatchedFilesStep, + ) -> RunnerResult<()> { + let changes = step + .changes + .iter() + .map(|change| { + let uri = parse_uri(&change.uri, "watched-files")?; + Ok::(FileEvent { + uri, + typ: change.change_type.as_lsp(), + }) + }) + .collect::, _>>()?; + let params = DidChangeWatchedFilesParams { changes }; + self.send_notification_with_params( + DidChangeWatchedFiles::METHOD, + params, + "didChangeWatchedFiles", + ) + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs new file mode 100644 index 00000000..ff9ccf6b --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs @@ -0,0 +1,64 @@ +use thiserror::Error; + +use super::{ + document_steps::FilesystemError, + expectation_steps::AssertionError, + helpers::UriError, + transport::{RpcError, SerdeError, TransportError}, +}; + +#[derive(Debug, Error)] +pub enum RunnerError { + #[error(transparent)] + Uri(Box), + #[error(transparent)] + Serde(Box), + #[error(transparent)] + Transport(Box), + #[error(transparent)] + Rpc(Box), + #[error(transparent)] + Assertion(Box), + #[error(transparent)] + Filesystem(Box), + #[error("server thread panicked")] + ServerThreadPanicked, +} + +pub(super) type RunnerResult = Result; + +impl From for RunnerError { + fn from(error: UriError) -> Self { + Self::Uri(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: SerdeError) -> Self { + Self::Serde(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: TransportError) -> Self { + Self::Transport(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: RpcError) -> Self { + Self::Rpc(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: AssertionError) -> Self { + Self::Assertion(Box::new(error)) + } +} + +impl From for RunnerError { + fn from(error: FilesystemError) -> Self { + Self::Filesystem(Box::new(error)) + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs new file mode 100644 index 00000000..fe662fa8 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs @@ -0,0 +1,431 @@ +use std::time::Duration; + +use serde::{de::DeserializeOwned, Serialize}; +use serde_json::Value; +use thiserror::Error; + +use super::{ + helpers::{ + completion_label_counts, completion_labels, hover_leading_type, json_mismatch_report, + label_counts, JsonMismatchReport, + }, + transport::{RpcError, SerdeError, TransportError}, + RunnerResult, ScenarioRunner, REQUEST_TIMEOUT, +}; +use crate::scenario::{ + DiagnosticsSettledStep, ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, + ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, ExpectDocumentSymbolStep, + ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, ExpectHoverTypeStep, + ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, + ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, + ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, TypeMatchMode, +}; + +#[derive(Debug, Error)] +pub enum AssertionError { + #[error("hover response {id} had no result, expected inferred type {expected_type}")] + HoverMissingResult { id: i32, expected_type: String }, + #[error("hover response {id} did not start with a backticked type: {hover}")] + HoverMalformedLeadingType { id: i32, hover: String }, + #[error("hover type mismatch for id {id}: actual={actual} expected {match_mode} {expected}")] + HoverTypeMismatch { + id: i32, + match_mode: &'static str, + actual: String, + expected: String, + }, + #[error("completion response {id} had no result, expected labels {expected_labels:?}")] + CompletionMissingResult { + id: i32, + expected_labels: Vec, + }, + #[error( + "completion labels mismatch for id {id}: expected at least {expected_count} of `{label}`, got {actual_count} (actual labels: {actual_labels:?})" + )] + CompletionAtLeastLabelMismatch { + id: i32, + label: String, + expected_count: usize, + actual_count: usize, + actual_labels: Vec, + }, + #[error("completion labels mismatch for id {id}: actual={actual_labels:?} expected={expected_labels:?}")] + CompletionLabelsMismatch { + id: i32, + actual_labels: Vec, + expected_labels: Vec, + }, + #[error("diagnostics mismatch for uri {uri}\n{details}")] + DiagnosticsMismatch { + uri: String, + details: JsonMismatchReport, + }, +} + +impl ScenarioRunner { + fn mismatch_details(actual: &A, expected: &E) -> RunnerResult + where + A: Serialize, + E: Serialize, + { + let actual_json = + serde_json::to_value(actual).map_err(|source| SerdeError::SerializeValue { + context: "response mismatch actual", + source, + })?; + let expected_json = + serde_json::to_value(expected).map_err(|source| SerdeError::SerializeValue { + context: "response mismatch expected", + source, + })?; + Ok(json_mismatch_report(&actual_json, &expected_json)) + } + + fn response_mismatch( + method: &'static str, + id: i32, + actual: &A, + expected: &E, + ) -> RunnerResult<()> + where + A: Serialize, + E: Serialize, + { + Err(RpcError::ResponseMismatch { + method, + id, + details: Self::mismatch_details(actual, expected)?, + } + .into()) + } + + fn response_mismatch_json_values( + method: &'static str, + id: i32, + actual: &Value, + expected: &Value, + ) -> RunnerResult<()> { + Err(RpcError::ResponseMismatch { + method, + id, + details: json_mismatch_report(actual, expected), + } + .into()) + } + + fn expect_typed_response( + &mut self, + method: &'static str, + id: i32, + expected: &Option, + ) -> RunnerResult<()> + where + T: DeserializeOwned + PartialEq + Serialize, + { + let actual = self.response_result::(method, id)?; + if &actual != expected { + return Self::response_mismatch(method, id, &actual, expected); + } + Ok(()) + } + + pub(super) fn step_expect_code_action( + &mut self, + step: &ExpectCodeActionStep, + ) -> RunnerResult<()> { + let actual = + self.response_result::>("codeAction", step.id)?; + if actual != step.result { + return Self::response_mismatch("codeAction", step.id, &actual, &step.result); + } + Ok(()) + } + + pub(super) fn step_expect_references( + &mut self, + step: &ExpectReferencesStep, + ) -> RunnerResult<()> { + let actual = self.response_result::>("references", step.id)?; + if actual != step.result { + return Self::response_mismatch("references", step.id, &actual, &step.result); + } + Ok(()) + } + + pub(super) fn step_expect_definition( + &mut self, + step: &ExpectDefinitionStep, + ) -> RunnerResult<()> { + self.expect_typed_response("definition", step.id, &step.result) + } + + pub(super) fn step_expect_declaration( + &mut self, + step: &ExpectDeclarationStep, + ) -> RunnerResult<()> { + self.expect_typed_response("declaration", step.id, &step.result) + } + + pub(super) fn step_expect_type_definition( + &mut self, + step: &ExpectTypeDefinitionStep, + ) -> RunnerResult<()> { + self.expect_typed_response("typeDefinition", step.id, &step.result) + } + + pub(super) fn step_expect_prepare_rename( + &mut self, + step: &ExpectPrepareRenameStep, + ) -> RunnerResult<()> { + self.expect_typed_response("prepareRename", step.id, &step.result) + } + + pub(super) fn step_expect_rename(&mut self, step: &ExpectRenameStep) -> RunnerResult<()> { + self.expect_typed_response("rename", step.id, &step.result) + } + + pub(super) fn step_expect_hover(&mut self, step: &ExpectHoverStep) -> RunnerResult<()> { + let actual = self.response_result::("hover", step.id)?; + if actual != step.result { + return Self::response_mismatch("hover", step.id, &actual, &step.result); + } + Ok(()) + } + + pub(super) fn step_expect_hover_type( + &mut self, + step: &ExpectHoverTypeStep, + ) -> RunnerResult<()> { + let actual_hover = self + .response_result::("hover", step.id)? + .ok_or_else(|| AssertionError::HoverMissingResult { + id: step.id, + expected_type: step.expected_type.clone(), + })?; + let actual_type = hover_leading_type(&actual_hover).ok_or_else(|| { + AssertionError::HoverMalformedLeadingType { + id: step.id, + hover: format!("{actual_hover:?}"), + } + })?; + match step.match_mode { + TypeMatchMode::Exact if actual_type != step.expected_type => { + Err(AssertionError::HoverTypeMismatch { + id: step.id, + match_mode: "exactly", + actual: actual_type, + expected: step.expected_type.clone(), + } + .into()) + } + TypeMatchMode::Contains if !actual_type.contains(&step.expected_type) => { + Err(AssertionError::HoverTypeMismatch { + id: step.id, + match_mode: "to contain", + actual: actual_type, + expected: step.expected_type.clone(), + } + .into()) + } + TypeMatchMode::Exact | TypeMatchMode::Contains => Ok(()), + } + } + + pub(super) fn step_expect_signature_help( + &mut self, + step: &ExpectSignatureHelpStep, + ) -> RunnerResult<()> { + self.expect_typed_response("signatureHelp", step.id, &step.result) + } + + pub(super) fn step_expect_completion( + &mut self, + step: &ExpectCompletionStep, + ) -> RunnerResult<()> { + let actual = + self.response_result::("completion", step.id)?; + + let check_full_result = step.result.is_some() || step.labels.is_none(); + if check_full_result && actual != step.result { + return Self::response_mismatch("completion", step.id, &actual, &step.result); + } + + if let Some(expected_labels) = &step.labels { + let Some(actual_response) = actual.as_ref() else { + return Err(AssertionError::CompletionMissingResult { + id: step.id, + expected_labels: expected_labels.clone(), + } + .into()); + }; + + let actual_counts = completion_label_counts(actual_response); + let expected_counts = label_counts(expected_labels); + if step.allow_extra { + for (label, expected_count) in &expected_counts { + let actual_count = actual_counts.get(label).copied().unwrap_or_default(); + if actual_count < *expected_count { + return Err(AssertionError::CompletionAtLeastLabelMismatch { + id: step.id, + label: label.clone(), + expected_count: *expected_count, + actual_count, + actual_labels: completion_labels(actual_response), + } + .into()); + } + } + } else if actual_counts != expected_counts { + return Err(AssertionError::CompletionLabelsMismatch { + id: step.id, + actual_labels: completion_labels(actual_response), + expected_labels: expected_labels.clone(), + } + .into()); + } + } + + Ok(()) + } + + pub(super) fn step_expect_formatting( + &mut self, + step: &ExpectFormattingStep, + ) -> RunnerResult<()> { + self.expect_typed_response("formatting", step.id, &step.result) + } + + pub(super) fn step_expect_semantic_tokens_full( + &mut self, + step: &ExpectSemanticTokensFullStep, + ) -> RunnerResult<()> { + self.expect_typed_response("semanticTokens/full", step.id, &step.result) + } + + pub(super) fn step_expect_semantic_tokens_range( + &mut self, + step: &ExpectSemanticTokensRangeStep, + ) -> RunnerResult<()> { + self.expect_typed_response("semanticTokens/range", step.id, &step.result) + } + + pub(super) fn step_expect_inlay_hints( + &mut self, + step: &ExpectInlayHintsStep, + ) -> RunnerResult<()> { + let actual = self.response_result::>("inlayHints", step.id)?; + let actual_json = + serde_json::to_value(&actual).map_err(|source| SerdeError::SerializeValue { + context: "response mismatch actual", + source, + })?; + let expected_json = + serde_json::to_value(&step.result).map_err(|source| SerdeError::SerializeValue { + context: "response mismatch expected", + source, + })?; + if actual_json != expected_json { + return Self::response_mismatch_json_values( + "inlayHints", + step.id, + &actual_json, + &expected_json, + ); + } + Ok(()) + } + + pub(super) fn step_expect_document_symbol( + &mut self, + step: &ExpectDocumentSymbolStep, + ) -> RunnerResult<()> { + self.expect_typed_response("documentSymbol", step.id, &step.result) + } + + pub(super) fn step_expect_workspace_symbol( + &mut self, + step: &ExpectWorkspaceSymbolStep, + ) -> RunnerResult<()> { + self.expect_typed_response("workspaceSymbol", step.id, &step.result) + } + + pub(super) fn step_expect_code_lens(&mut self, step: &ExpectCodeLensStep) -> RunnerResult<()> { + self.expect_typed_response("codeLens", step.id, &step.result) + } + + pub(super) fn step_expect_execute_command( + &mut self, + step: &ExpectExecuteCommandStep, + ) -> RunnerResult<()> { + let response = self.wait_response(step.id, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: "executeCommand", + id: step.id, + error, + } + .into()); + } + if response.result != step.result { + return Self::response_mismatch( + "executeCommand", + step.id, + &response.result, + &step.result, + ); + } + Ok(()) + } + + pub(super) fn step_expect_diagnostics( + &mut self, + step: &ExpectDiagnosticsStep, + ) -> RunnerResult<()> { + let actual = self.wait_diagnostics_for_uri(&step.uri, REQUEST_TIMEOUT)?; + if actual.diagnostics != step.diagnostics { + let details = Self::mismatch_details(&actual.diagnostics, &step.diagnostics)?; + return Err(AssertionError::DiagnosticsMismatch { + uri: step.uri.clone(), + details, + } + .into()); + } + Ok(()) + } + + pub(super) fn step_diagnostics_settled( + &mut self, + step: DiagnosticsSettledStep, + ) -> RunnerResult<()> { + let timeout = Duration::from_millis(step.timeout_ms); + let idle = Duration::from_millis(step.idle_ms); + let start = std::time::Instant::now(); + let mut last_diagnostic = self.last_diagnostic_at.unwrap_or(start); + + loop { + if start.elapsed() > timeout { + return Err(TransportError::DiagnosticsDidNotSettle { timeout }.into()); + } + + if last_diagnostic.elapsed() >= idle { + return Ok(()); + } + + let remaining_timeout = timeout.saturating_sub(start.elapsed()); + let remaining_idle = idle.saturating_sub(last_diagnostic.elapsed()); + let wait_for = remaining_timeout.min(remaining_idle); + + match self.conn.receiver.recv_timeout(wait_for) { + Ok(message) => self.capture_background_message(message)?, + Err(crossbeam_channel::RecvTimeoutError::Timeout) => {} + Err(crossbeam_channel::RecvTimeoutError::Disconnected) => { + return Err(TransportError::DiagnosticsSettledDisconnected.into()); + } + } + + if let Some(latest) = self.last_diagnostic_at { + last_diagnostic = latest; + } + } + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs new file mode 100644 index 00000000..5f709903 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs @@ -0,0 +1,385 @@ +use std::collections::{BTreeSet, HashMap}; +use std::fmt; + +use serde_json::Value; +use thiserror::Error; + +use super::RunnerResult; + +#[derive(Debug, Error)] +pub enum UriError { + #[error("parse {context} uri '{uri}': {reason}")] + Parse { + context: &'static str, + uri: String, + reason: String, + }, +} + +pub(super) fn parse_uri(uri: &str, context: &'static str) -> RunnerResult { + uri.parse::() + .map_err(|error| UriError::Parse { + context, + uri: uri.to_owned(), + reason: error.to_string(), + }) + .map_err(Into::into) +} + +pub(super) fn hover_leading_type(hover: &lsp_types::Hover) -> Option { + let text = match &hover.contents { + lsp_types::HoverContents::Markup(markup) => markup.value.as_str(), + lsp_types::HoverContents::Scalar(scalar) => match scalar { + lsp_types::MarkedString::String(value) => value.as_str(), + lsp_types::MarkedString::LanguageString(language) => language.value.as_str(), + }, + lsp_types::HoverContents::Array(items) => { + let first = items.first()?; + match first { + lsp_types::MarkedString::String(value) => value.as_str(), + lsp_types::MarkedString::LanguageString(language) => language.value.as_str(), + } + } + }; + let trimmed = text.trim_start(); + let rest = trimmed.strip_prefix('`')?; + let (ty, _) = rest.split_once('`')?; + Some(ty.to_string()) +} + +pub(super) fn completion_items( + response: &lsp_types::CompletionResponse, +) -> &[lsp_types::CompletionItem] { + match response { + lsp_types::CompletionResponse::Array(items) => items, + lsp_types::CompletionResponse::List(list) => &list.items, + } +} + +pub(super) fn completion_labels(response: &lsp_types::CompletionResponse) -> Vec { + completion_items(response) + .iter() + .map(|item| item.label.clone()) + .collect() +} + +pub(super) fn label_counts(labels: &[String]) -> HashMap { + let mut counts = HashMap::new(); + for label in labels { + *counts.entry(label.clone()).or_insert(0) += 1; + } + counts +} + +pub(super) fn completion_label_counts( + response: &lsp_types::CompletionResponse, +) -> HashMap { + label_counts(&completion_labels(response)) +} + +const MAX_DIFF_LINES: usize = 20; +const MAX_VALUE_PREVIEW_CHARS: usize = 120; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum JsonDifference { + ValueMismatch { + path: String, + actual: Value, + expected: Value, + }, + MissingValue { + path: String, + expected: Value, + }, + UnexpectedValue { + path: String, + actual: Value, + }, + LengthMismatch { + path: String, + actual: usize, + expected: usize, + }, +} + +impl JsonDifference { + fn render(&self) -> String { + match self { + Self::ValueMismatch { + path, + actual, + expected, + } => format!( + "{path}: actual {} != expected {}", + preview_json_value(actual), + preview_json_value(expected) + ), + Self::MissingValue { path, expected } => { + format!( + "{path}: missing value, expected {}", + preview_json_value(expected) + ) + } + Self::UnexpectedValue { path, actual } => { + format!("{path}: unexpected value {}", preview_json_value(actual)) + } + Self::LengthMismatch { + path, + actual, + expected, + } => format!("{path}: length mismatch (actual {actual}, expected {expected})"), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct JsonMismatchReport { + differences: Vec, + truncated: bool, +} + +impl JsonMismatchReport { + fn new() -> Self { + Self { + differences: Vec::new(), + truncated: false, + } + } + + pub fn differences(&self) -> &[JsonDifference] { + &self.differences + } + + pub const fn truncated(&self) -> bool { + self.truncated + } +} + +impl fmt::Display for JsonMismatchReport { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.differences().is_empty() { + return f.write_str("values differ"); + } + + f.write_str("differences:\n")?; + for difference in self.differences() { + f.write_str(" - ")?; + f.write_str(&difference.render())?; + f.write_str("\n")?; + } + if self.truncated() { + f.write_str(" - ... additional differences omitted ...\n")?; + } + Ok(()) + } +} + +pub(super) fn json_mismatch_report(actual: &Value, expected: &Value) -> JsonMismatchReport { + let mut report = JsonMismatchReport::new(); + collect_json_differences("$", actual, expected, &mut report); + report +} + +fn collect_json_differences( + path: &str, + actual: &Value, + expected: &Value, + report: &mut JsonMismatchReport, +) { + if actual == expected || report.truncated { + return; + } + + match (actual, expected) { + (Value::Object(actual_object), Value::Object(expected_object)) => { + let keys: BTreeSet<&str> = actual_object + .keys() + .chain(expected_object.keys()) + .map(String::as_str) + .collect(); + for key in keys { + if report.truncated { + return; + } + let child_path = child_object_path(path, key); + match (actual_object.get(key), expected_object.get(key)) { + (Some(actual_value), Some(expected_value)) => { + collect_json_differences(&child_path, actual_value, expected_value, report) + } + (Some(actual_value), None) => push_difference( + report, + JsonDifference::UnexpectedValue { + path: child_path, + actual: actual_value.clone(), + }, + ), + (None, Some(expected_value)) => push_difference( + report, + JsonDifference::MissingValue { + path: child_path, + expected: expected_value.clone(), + }, + ), + (None, None) => {} + } + } + } + (Value::Array(actual_array), Value::Array(expected_array)) => { + if actual_array.len() != expected_array.len() { + push_difference( + report, + JsonDifference::LengthMismatch { + path: path.to_string(), + actual: actual_array.len(), + expected: expected_array.len(), + }, + ); + } + let min_len = actual_array.len().min(expected_array.len()); + for index in 0..min_len { + if report.truncated { + return; + } + let child_path = format!("{path}[{index}]"); + collect_json_differences( + &child_path, + &actual_array[index], + &expected_array[index], + report, + ); + } + } + _ => push_difference( + report, + JsonDifference::ValueMismatch { + path: path.to_string(), + actual: actual.clone(), + expected: expected.clone(), + }, + ), + } +} + +fn push_difference(report: &mut JsonMismatchReport, difference: JsonDifference) { + if report.differences.len() >= MAX_DIFF_LINES { + report.truncated = true; + return; + } + report.differences.push(difference); + if report.differences.len() >= MAX_DIFF_LINES { + report.truncated = true; + } +} + +fn child_object_path(path: &str, key: &str) -> String { + if is_identifier_key(key) { + format!("{path}.{key}") + } else { + format!("{path}[{}]", serde_json::to_string(key).unwrap_or_default()) + } +} + +fn is_identifier_key(key: &str) -> bool { + let mut chars = key.chars(); + let Some(first) = chars.next() else { + return false; + }; + if !(first.is_ascii_alphabetic() || first == '_') { + return false; + } + chars.all(|ch| ch.is_ascii_alphanumeric() || ch == '_') +} + +fn preview_json_value(value: &Value) -> String { + let serialized = serde_json::to_string(value) + .unwrap_or_else(|_| "\"\"".to_string()); + ellipsis(&serialized, MAX_VALUE_PREVIEW_CHARS) +} + +fn ellipsis(input: &str, max_chars: usize) -> String { + let mut result = String::new(); + + for (count, ch) in input.chars().enumerate() { + if count == max_chars { + result.push_str("..."); + return result; + } + result.push(ch); + } + + result +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + use serde_json::json; + + use super::*; + use crate::scenario_runner::RunnerError; + + #[test] + fn parse_uri_reports_structured_error_variant() { + let result = parse_uri("://not-a-uri", "hover"); + let (context, uri, reason) = assert_matches!( + result, + Err(RunnerError::Uri(error_box)) => { + let UriError::Parse { + context, + uri, + reason, + } = *error_box; + (context, uri, reason) + } + ); + assert_eq!(context, "hover"); + assert_eq!(uri, "://not-a-uri"); + assert!(!reason.is_empty()); + } + + #[test] + fn json_mismatch_report_is_structural_and_concise() { + let actual = json!({ + "items": [ + { "label": "a", "kind": 1 }, + { "label": "b", "kind": 2 } + ], + "isIncomplete": false + }); + let expected = json!({ + "items": [ + { "label": "a", "kind": 1 }, + { "label": "c", "kind": 2 } + ], + "isIncomplete": true + }); + + let report = json_mismatch_report(&actual, &expected); + assert_eq!( + report.differences(), + [ + JsonDifference::ValueMismatch { + path: "$.isIncomplete".to_string(), + actual: json!(false), + expected: json!(true), + }, + JsonDifference::ValueMismatch { + path: "$.items[1].label".to_string(), + actual: json!("b"), + expected: json!("c"), + }, + ] + ); + assert!(!report.truncated()); + + let rendered = report.to_string(); + assert_eq!( + rendered, + concat!( + "differences:\n", + " - $.isIncomplete: actual false != expected true\n", + " - $.items[1].label: actual \"b\" != expected \"c\"\n" + ) + ); + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/mod.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/mod.rs new file mode 100644 index 00000000..52f63851 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/mod.rs @@ -0,0 +1,15 @@ +//! In-memory runner for `Scenario` timelines. + +mod document_steps; +mod errors; +mod expectation_steps; +mod helpers; +mod request_steps; +mod runner; +mod transport; + +pub use self::{errors::RunnerError, runner::run_scenario}; +use self::{ + errors::RunnerResult, + runner::{ScenarioRunner, REQUEST_TIMEOUT}, +}; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs new file mode 100644 index 00000000..f8eddf16 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs @@ -0,0 +1,297 @@ +use std::collections::HashMap; + +use lsp_types::{ + request::{ + CodeActionRequest, CodeLensRequest, Completion, DocumentSymbolRequest, ExecuteCommand, + Formatting, GotoDeclaration, GotoDefinition, GotoTypeDefinition, HoverRequest, + InlayHintRequest, PrepareRenameRequest, References, Rename, Request as _, + SemanticTokensFullRequest, SemanticTokensRangeRequest, SignatureHelpRequest, + WorkspaceSymbolRequest, + }, + CodeActionContext, CodeActionParams, CodeLensParams, DocumentFormattingParams, + DocumentSymbolParams, ExecuteCommandParams, FormattingOptions, GotoDefinitionParams, + HoverParams, InlayHintParams, PartialResultParams, ReferenceContext, ReferenceParams, + RenameParams, SemanticTokensParams, SemanticTokensRangeParams, SignatureHelpParams, + TextDocumentIdentifier, TextDocumentPositionParams, WorkDoneProgressParams, + WorkspaceSymbolParams, +}; + +use super::{helpers::parse_uri, RunnerResult, ScenarioRunner}; +use crate::scenario::{ + RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestDeclarationStep, + RequestDefinitionStep, RequestDocumentSymbolStep, RequestExecuteCommandStep, + RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, RequestPrepareRenameStep, + RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, + RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, + RequestWorkspaceSymbolStep, +}; + +fn text_document_position_params( + uri: lsp_types::Uri, + position: lsp_types::Position, +) -> TextDocumentPositionParams { + TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri }, + position, + } +} + +fn goto_definition_params( + uri: lsp_types::Uri, + position: lsp_types::Position, +) -> GotoDefinitionParams { + GotoDefinitionParams { + text_document_position_params: text_document_position_params(uri, position), + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + } +} + +impl ScenarioRunner { + pub(super) fn step_request_code_action( + &self, + step: &RequestCodeActionStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "codeAction")?; + let params = CodeActionParams { + text_document: TextDocumentIdentifier { uri }, + range: step.range, + context: CodeActionContext { + diagnostics: step.diagnostics.clone(), + only: step.only.clone(), + trigger_kind: None, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params(step.id, CodeActionRequest::METHOD, params, "codeAction") + } + + pub(super) fn step_request_references(&self, step: &RequestReferencesStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "references")?; + let params = ReferenceParams { + text_document_position: text_document_position_params(uri, step.position), + context: ReferenceContext { + include_declaration: step.include_declaration, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params(step.id, References::METHOD, params, "references") + } + + pub(super) fn step_request_definition(&self, step: &RequestDefinitionStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "definition")?; + let params = goto_definition_params(uri, step.position); + self.send_request_with_params(step.id, GotoDefinition::METHOD, params, "definition") + } + + pub(super) fn step_request_declaration( + &self, + step: &RequestDeclarationStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "declaration")?; + let params = goto_definition_params(uri, step.position); + self.send_request_with_params(step.id, GotoDeclaration::METHOD, params, "declaration") + } + + pub(super) fn step_request_type_definition( + &self, + step: &RequestTypeDefinitionStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "typeDefinition")?; + let params = goto_definition_params(uri, step.position); + self.send_request_with_params( + step.id, + GotoTypeDefinition::METHOD, + params, + "typeDefinition", + ) + } + + pub(super) fn step_request_prepare_rename( + &self, + step: &RequestPrepareRenameStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "prepareRename")?; + let params = text_document_position_params(uri, step.position); + self.send_request_with_params( + step.id, + PrepareRenameRequest::METHOD, + params, + "prepareRename", + ) + } + + pub(super) fn step_request_rename(&self, step: &RequestRenameStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "rename")?; + let params = RenameParams { + text_document_position: text_document_position_params(uri, step.position), + new_name: step.new_name.clone(), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, Rename::METHOD, params, "rename") + } + + pub(super) fn step_request_hover(&self, step: &RequestHoverStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "hover")?; + let params = HoverParams { + text_document_position_params: text_document_position_params(uri, step.position), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, HoverRequest::METHOD, params, "hover") + } + + pub(super) fn step_request_signature_help( + &self, + step: &RequestSignatureHelpStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "signatureHelp")?; + let params = SignatureHelpParams { + text_document_position_params: text_document_position_params(uri, step.position), + work_done_progress_params: WorkDoneProgressParams::default(), + context: None, + }; + self.send_request_with_params( + step.id, + SignatureHelpRequest::METHOD, + params, + "signatureHelp", + ) + } + + pub(super) fn step_request_completion(&self, step: &RequestCompletionStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "completion")?; + let params = lsp_types::CompletionParams { + text_document_position: text_document_position_params(uri, step.position), + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + }; + self.send_request_with_params(step.id, Completion::METHOD, params, "completion") + } + + pub(super) fn step_request_formatting(&self, step: &RequestFormattingStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "formatting")?; + let params = DocumentFormattingParams { + text_document: TextDocumentIdentifier { uri }, + options: FormattingOptions { + tab_size: step.tab_size, + insert_spaces: step.insert_spaces, + properties: HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: None, + trim_final_newlines: None, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, Formatting::METHOD, params, "formatting") + } + + pub(super) fn step_request_semantic_tokens_full( + &self, + step: &RequestSemanticTokensFullStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "semanticTokens/full")?; + let params = SemanticTokensParams { + text_document: TextDocumentIdentifier { uri }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params( + step.id, + SemanticTokensFullRequest::METHOD, + params, + "semanticTokens/full", + ) + } + + pub(super) fn step_request_semantic_tokens_range( + &self, + step: &RequestSemanticTokensRangeStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "semanticTokens/range")?; + let params = SemanticTokensRangeParams { + text_document: TextDocumentIdentifier { uri }, + range: step.range, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params( + step.id, + SemanticTokensRangeRequest::METHOD, + params, + "semanticTokens/range", + ) + } + + pub(super) fn step_request_inlay_hints( + &self, + step: &RequestInlayHintsStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "inlayHints")?; + let params = InlayHintParams { + text_document: TextDocumentIdentifier { uri }, + range: step.range, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, InlayHintRequest::METHOD, params, "inlayHints") + } + + pub(super) fn step_request_document_symbol( + &self, + step: &RequestDocumentSymbolStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "documentSymbol")?; + let params = DocumentSymbolParams { + text_document: TextDocumentIdentifier { uri }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params( + step.id, + DocumentSymbolRequest::METHOD, + params, + "documentSymbol", + ) + } + + pub(super) fn step_request_workspace_symbol( + &self, + step: &RequestWorkspaceSymbolStep, + ) -> RunnerResult<()> { + let params = WorkspaceSymbolParams { + query: step.query.clone(), + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params( + step.id, + WorkspaceSymbolRequest::METHOD, + params, + "workspaceSymbol", + ) + } + + pub(super) fn step_request_code_lens(&self, step: &RequestCodeLensStep) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "codeLens")?; + let params = CodeLensParams { + text_document: TextDocumentIdentifier { uri }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + self.send_request_with_params(step.id, CodeLensRequest::METHOD, params, "codeLens") + } + + pub(super) fn step_request_execute_command( + &self, + step: &RequestExecuteCommandStep, + ) -> RunnerResult<()> { + let params = ExecuteCommandParams { + command: step.command.clone(), + arguments: step.arguments.clone(), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, ExecuteCommand::METHOD, params, "executeCommand") + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs new file mode 100644 index 00000000..51bbb230 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs @@ -0,0 +1,424 @@ +use std::{ + collections::{HashMap, VecDeque}, + thread, + time::{Duration, Instant}, +}; + +use lsp_server::{Connection, Response}; +use lsp_types::{ + request::{Initialize, Request as _, Shutdown}, + InitializeParams, +}; + +use super::{transport::RpcError, RunnerError, RunnerResult}; +use crate::scenario::{Scenario, ScenarioStep}; + +const INITIALIZE_REQUEST_ID: i32 = 1; +const SHUTDOWN_REQUEST_ID: i32 = 9_999; +pub(super) const REQUEST_TIMEOUT: Duration = Duration::from_secs(5); + +/// Run a full timeline scenario against an in-memory LSP server. +/// +/// `start_server` receives the server-side `Connection` and should run the +/// server event loop until shutdown/exit. +pub fn run_scenario(scenario: &Scenario, start_server: S) -> RunnerResult<()> +where + S: FnOnce(Connection) + Send + 'static, +{ + run_scenario_typed(scenario, start_server) +} + +fn run_scenario_typed(scenario: &Scenario, start_server: S) -> RunnerResult<()> +where + S: FnOnce(Connection) + Send + 'static, +{ + let mut runner = ScenarioRunner::start(start_server); + runner.initialize()?; + for step in &scenario.steps { + runner.run_step(step)?; + } + runner.shutdown() +} + +pub(super) struct ScenarioRunner { + pub(super) conn: Connection, + pub(super) server_thread: thread::JoinHandle<()>, + pub(super) pending_responses: Vec, + pub(super) pending_diagnostics: HashMap>, + pub(super) last_diagnostic_at: Option, +} + +impl ScenarioRunner { + pub(super) fn start(start_server: S) -> Self + where + S: FnOnce(Connection) + Send + 'static, + { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = thread::spawn(move || start_server(server_conn)); + Self { + conn: client_conn, + server_thread, + pending_responses: Vec::new(), + pending_diagnostics: HashMap::new(), + last_diagnostic_at: None, + } + } + + fn initialize(&mut self) -> RunnerResult<()> { + self.send_request_with_params( + INITIALIZE_REQUEST_ID, + Initialize::METHOD, + InitializeParams::default(), + "initialize", + )?; + let response = self.wait_response(INITIALIZE_REQUEST_ID, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: Initialize::METHOD, + id: INITIALIZE_REQUEST_ID, + error, + } + .into()); + } + self.send_notification_with_params("initialized", serde_json::json!({}), "initialized") + } + + fn shutdown(self) -> RunnerResult<()> { + let mut runner = self; + runner.send_request_with_params( + SHUTDOWN_REQUEST_ID, + Shutdown::METHOD, + serde_json::Value::Null, + "shutdown", + )?; + let response = runner.wait_response(SHUTDOWN_REQUEST_ID, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: Shutdown::METHOD, + id: SHUTDOWN_REQUEST_ID, + error, + } + .into()); + } + runner.send_notification_with_params("exit", serde_json::Value::Null, "exit")?; + runner + .server_thread + .join() + .map_err(|_| RunnerError::ServerThreadPanicked) + } + + fn run_step(&mut self, step: &ScenarioStep) -> RunnerResult<()> { + match step { + ScenarioStep::Open(open) => self.step_open(open), + ScenarioStep::ChangeFull(change) => self.step_change_full(change), + ScenarioStep::ChangeIncremental(change) => self.step_change_incremental(change), + ScenarioStep::Save(save) => self.step_save(save), + ScenarioStep::Close(close) => self.step_close(close), + ScenarioStep::Config(config) => self.step_config(config), + ScenarioStep::WriteFile(write_file) => Self::step_write_file(write_file), + ScenarioStep::DeleteFile(delete_file) => Self::step_delete_file(delete_file), + ScenarioStep::NotifyWatchedFiles(watched_files) => { + self.step_notify_watched_files(watched_files) + } + ScenarioStep::RequestCodeAction(request) => self.step_request_code_action(request), + ScenarioStep::ExpectCodeAction(expectation) => { + self.step_expect_code_action(expectation) + } + ScenarioStep::RequestReferences(request) => self.step_request_references(request), + ScenarioStep::ExpectReferences(expectation) => self.step_expect_references(expectation), + ScenarioStep::RequestDefinition(request) => self.step_request_definition(request), + ScenarioStep::ExpectDefinition(expectation) => self.step_expect_definition(expectation), + ScenarioStep::RequestDeclaration(request) => self.step_request_declaration(request), + ScenarioStep::ExpectDeclaration(expectation) => { + self.step_expect_declaration(expectation) + } + ScenarioStep::RequestTypeDefinition(request) => { + self.step_request_type_definition(request) + } + ScenarioStep::ExpectTypeDefinition(expectation) => { + self.step_expect_type_definition(expectation) + } + ScenarioStep::RequestPrepareRename(request) => { + self.step_request_prepare_rename(request) + } + ScenarioStep::ExpectPrepareRename(expectation) => { + self.step_expect_prepare_rename(expectation) + } + ScenarioStep::RequestRename(request) => self.step_request_rename(request), + ScenarioStep::ExpectRename(expectation) => self.step_expect_rename(expectation), + ScenarioStep::RequestHover(request) => self.step_request_hover(request), + ScenarioStep::ExpectHover(expectation) => self.step_expect_hover(expectation), + ScenarioStep::ExpectHoverType(expectation) => self.step_expect_hover_type(expectation), + ScenarioStep::RequestSignatureHelp(request) => { + self.step_request_signature_help(request) + } + ScenarioStep::ExpectSignatureHelp(expectation) => { + self.step_expect_signature_help(expectation) + } + ScenarioStep::RequestCompletion(request) => self.step_request_completion(request), + ScenarioStep::ExpectCompletion(expectation) => self.step_expect_completion(expectation), + ScenarioStep::RequestFormatting(request) => self.step_request_formatting(request), + ScenarioStep::ExpectFormatting(expectation) => self.step_expect_formatting(expectation), + ScenarioStep::RequestSemanticTokensFull(request) => { + self.step_request_semantic_tokens_full(request) + } + ScenarioStep::ExpectSemanticTokensFull(expectation) => { + self.step_expect_semantic_tokens_full(expectation) + } + ScenarioStep::RequestSemanticTokensRange(request) => { + self.step_request_semantic_tokens_range(request) + } + ScenarioStep::ExpectSemanticTokensRange(expectation) => { + self.step_expect_semantic_tokens_range(expectation) + } + ScenarioStep::RequestInlayHints(request) => self.step_request_inlay_hints(request), + ScenarioStep::ExpectInlayHints(expectation) => { + self.step_expect_inlay_hints(expectation) + } + ScenarioStep::RequestDocumentSymbol(request) => { + self.step_request_document_symbol(request) + } + ScenarioStep::ExpectDocumentSymbol(expectation) => { + self.step_expect_document_symbol(expectation) + } + ScenarioStep::RequestWorkspaceSymbol(request) => { + self.step_request_workspace_symbol(request) + } + ScenarioStep::ExpectWorkspaceSymbol(expectation) => { + self.step_expect_workspace_symbol(expectation) + } + ScenarioStep::RequestCodeLens(request) => self.step_request_code_lens(request), + ScenarioStep::ExpectCodeLens(expectation) => self.step_expect_code_lens(expectation), + ScenarioStep::RequestExecuteCommand(request) => { + self.step_request_execute_command(request) + } + ScenarioStep::ExpectExecuteCommand(expectation) => { + self.step_expect_execute_command(expectation) + } + ScenarioStep::ExpectDiagnostics(expectation) => { + self.step_expect_diagnostics(expectation) + } + ScenarioStep::DiagnosticsSettled(settled) => self.step_diagnostics_settled(*settled), + } + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + use lsp_server::{Message, Notification, Response}; + use lsp_types::{ + notification::{DidOpenTextDocument, Notification as _, PublishDiagnostics}, + request::{HoverRequest, Request as _}, + Diagnostic, DiagnosticSeverity, Hover, HoverContents, MarkupContent, MarkupKind, Position, + PublishDiagnosticsParams, Range, + }; + + use super::{super::transport::RpcError, run_scenario}; + use crate::scenario::{ + DiagnosticsSettledStep, ExpectDiagnosticsStep, ExpectHoverStep, OpenStep, RequestHoverStep, + Scenario, ScenarioStep, + }; + use crate::scenario_runner::{helpers::JsonDifference, RunnerError}; + + fn test_diagnostic() -> Diagnostic { + Diagnostic { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 1, + }, + }, + severity: Some(DiagnosticSeverity::WARNING), + code: None, + code_description: None, + source: Some("scenario-test".to_string()), + message: "test diagnostic".to_string(), + related_information: None, + tags: None, + data: None, + } + } + + fn test_hover() -> Hover { + Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: "`number`".to_string(), + }), + range: None, + } + } + + #[test] + fn run_scenario_handles_requests_and_diagnostics() -> Result<(), super::RunnerError> { + let uri = "file:///workspace/main.jsonnet".to_string(); + let expected_diagnostic = test_diagnostic(); + let expected_hover = test_hover(); + let scenario = Scenario::new(vec![ + ScenarioStep::Open(OpenStep { + uri: uri.clone(), + text: "42\n".to_string(), + language_id: "jsonnet".to_string(), + version: 1, + }), + ScenarioStep::RequestHover(RequestHoverStep { + id: 7, + uri: uri.clone(), + position: Position { + line: 0, + character: 0, + }, + }), + ScenarioStep::ExpectHover(ExpectHoverStep { + id: 7, + result: Some(expected_hover.clone()), + }), + ScenarioStep::ExpectDiagnostics(ExpectDiagnosticsStep { + uri, + diagnostics: vec![expected_diagnostic.clone()], + }), + ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { + timeout_ms: 300, + idle_ms: 30, + }), + ]); + + run_scenario(&scenario, move |connection| loop { + let Ok(message) = connection.receiver.recv() else { + break; + }; + + match message { + Message::Request(request) => { + let response = match request.method.as_str() { + lsp_types::request::Initialize::METHOD => { + Response::new_ok(request.id, serde_json::json!({})) + } + HoverRequest::METHOD => { + let Ok(result) = serde_json::to_value(&expected_hover) else { + break; + }; + Response::new_ok(request.id, result) + } + lsp_types::request::Shutdown::METHOD => { + Response::new_ok(request.id, serde_json::Value::Null) + } + _ => Response::new_ok(request.id, serde_json::Value::Null), + }; + if connection.sender.send(Message::Response(response)).is_err() { + break; + } + } + Message::Notification(notification) => { + if notification.method == DidOpenTextDocument::METHOD { + let Ok(params) = serde_json::from_value::< + lsp_types::DidOpenTextDocumentParams, + >(notification.params) else { + break; + }; + let publish = PublishDiagnosticsParams { + uri: params.text_document.uri, + version: Some(params.text_document.version), + diagnostics: vec![expected_diagnostic.clone()], + }; + let Ok(payload) = serde_json::to_value(publish) else { + break; + }; + let diag = + Notification::new(PublishDiagnostics::METHOD.to_string(), payload); + if connection.sender.send(Message::Notification(diag)).is_err() { + break; + } + } else if notification.method == "exit" { + break; + } + } + Message::Response(_) => {} + } + }) + } + + #[test] + fn run_scenario_reports_mismatched_expectation() { + let scenario = Scenario::new(vec![ + ScenarioStep::RequestHover(RequestHoverStep { + id: 5, + uri: "file:///workspace/main.jsonnet".to_string(), + position: Position { + line: 0, + character: 0, + }, + }), + ScenarioStep::ExpectHover(ExpectHoverStep { + id: 5, + result: None, + }), + ]); + + let result = run_scenario(&scenario, move |connection| loop { + let Ok(message) = connection.receiver.recv() else { + break; + }; + match message { + Message::Request(request) => { + let response = match request.method.as_str() { + lsp_types::request::Initialize::METHOD => { + Response::new_ok(request.id, serde_json::json!({})) + } + HoverRequest::METHOD => { + let Ok(result) = serde_json::to_value(test_hover()) else { + break; + }; + Response::new_ok(request.id, result) + } + lsp_types::request::Shutdown::METHOD => { + Response::new_ok(request.id, serde_json::Value::Null) + } + _ => Response::new_ok(request.id, serde_json::Value::Null), + }; + if connection.sender.send(Message::Response(response)).is_err() { + break; + } + } + Message::Notification(notification) => { + if notification.method == "exit" { + break; + } + } + Message::Response(_) => {} + } + }); + + let error = result.expect_err("scenario should report mismatch"); + let error_box = assert_matches!( + error, + RunnerError::Rpc(error_box) => error_box + ); + let details = assert_matches!( + *error_box, + RpcError::ResponseMismatch { + method: "hover", + id: 5, + details, + } => details + ); + assert_eq!( + details.differences(), + [JsonDifference::ValueMismatch { + path: "$".to_string(), + actual: serde_json::json!({ + "contents": { + "kind": "markdown", + "value": "`number`" + } + }), + expected: serde_json::Value::Null, + }] + ); + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs new file mode 100644 index 00000000..8d8063a3 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs @@ -0,0 +1,239 @@ +use std::time::{Duration, Instant}; + +use crossbeam_channel::RecvTimeoutError; +use lsp_server::{Message, Notification, Request, Response, ResponseError}; +use lsp_types::notification::{Notification as _, PublishDiagnostics}; +use serde::{de::DeserializeOwned, Serialize}; +use thiserror::Error; + +use super::{helpers::JsonMismatchReport, RunnerResult, ScenarioRunner, REQUEST_TIMEOUT}; + +#[derive(Debug, Error)] +pub enum SerdeError { + #[error("serialize {context}: {source}")] + SerializeParams { + context: &'static str, + #[source] + source: serde_json::Error, + }, + #[error("deserialize {method} response result for id {id}: {source}")] + DeserializeResponseResult { + method: &'static str, + id: i32, + #[source] + source: serde_json::Error, + }, + #[error("deserialize publishDiagnostics params: {source}")] + DeserializePublishDiagnostics { + #[source] + source: serde_json::Error, + }, + #[error("serialize {context}: {source}")] + SerializeValue { + context: &'static str, + #[source] + source: serde_json::Error, + }, +} + +#[derive(Debug, Error)] +pub enum TransportError { + #[error("timed out waiting for response id {id}")] + ResponseTimeout { id: i32 }, + #[error("connection closed while waiting for response")] + ResponseDisconnected, + #[error("timed out waiting for diagnostics for uri {uri}")] + DiagnosticsTimeout { uri: String }, + #[error("connection closed while waiting for diagnostics")] + DiagnosticsDisconnected, + #[error("diagnostics did not settle within {timeout:?}")] + DiagnosticsDidNotSettle { timeout: Duration }, + #[error("connection closed while waiting for diagnostics to settle")] + DiagnosticsSettledDisconnected, + #[error("send notification failed: {source}")] + SendNotification { + #[source] + source: Box>, + }, + #[error("send request failed: {source}")] + SendRequest { + #[source] + source: Box>, + }, +} + +#[derive(Debug, Error)] +pub enum RpcError { + #[error("response {method} id {id} returned error: {error:?}")] + ResponseReturnedError { + method: &'static str, + id: i32, + error: ResponseError, + }, + #[error("{method} response mismatch for id {id}\n{details}")] + ResponseMismatch { + method: &'static str, + id: i32, + details: JsonMismatchReport, + }, +} + +impl ScenarioRunner { + pub(super) fn send_notification_with_params( + &self, + method: &str, + params: P, + context: &'static str, + ) -> RunnerResult<()> { + let payload = serde_json::to_value(params) + .map_err(|source| SerdeError::SerializeParams { context, source })?; + self.send_notification(Notification::new(method.to_owned(), payload)) + } + + pub(super) fn send_request_with_params( + &self, + id: i32, + method: &str, + params: P, + context: &'static str, + ) -> RunnerResult<()> { + let payload = serde_json::to_value(params) + .map_err(|source| SerdeError::SerializeParams { context, source })?; + self.send_request(Request::new(id.into(), method.to_owned(), payload)) + } + + pub(super) fn response_result( + &mut self, + method: &'static str, + id: i32, + ) -> RunnerResult> + where + T: DeserializeOwned, + { + let response = self.wait_response(id, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { method, id, error }.into()); + } + match response.result { + Some(value) => serde_json::from_value(value) + .map_err(|source| SerdeError::DeserializeResponseResult { method, id, source }) + .map_err(Into::into), + None => Ok(None), + } + } + + pub(super) fn wait_response(&mut self, id: i32, timeout: Duration) -> RunnerResult { + if let Some(index) = self + .pending_responses + .iter() + .position(|response| response.id == id.into()) + { + return Ok(self.pending_responses.swap_remove(index)); + } + + let deadline = Instant::now() + timeout; + loop { + let remaining = deadline.saturating_duration_since(Instant::now()); + if remaining.is_zero() { + return Err(TransportError::ResponseTimeout { id }.into()); + } + + match self.conn.receiver.recv_timeout(remaining) { + Ok(Message::Response(response)) if response.id == id.into() => return Ok(response), + Ok(message) => self.capture_background_message(message)?, + Err(RecvTimeoutError::Timeout) => { + return Err(TransportError::ResponseTimeout { id }.into()); + } + Err(RecvTimeoutError::Disconnected) => { + return Err(TransportError::ResponseDisconnected.into()); + } + } + } + } + + pub(super) fn wait_diagnostics_for_uri( + &mut self, + uri: &str, + timeout: Duration, + ) -> RunnerResult { + if let Some(queue) = self.pending_diagnostics.get_mut(uri) { + if let Some(params) = queue.pop_front() { + return Ok(params); + } + } + + let deadline = Instant::now() + timeout; + loop { + let remaining = deadline.saturating_duration_since(Instant::now()); + if remaining.is_zero() { + return Err(TransportError::DiagnosticsTimeout { + uri: uri.to_owned(), + } + .into()); + } + match self.conn.receiver.recv_timeout(remaining) { + Ok(message) => { + self.capture_background_message(message)?; + if let Some(queue) = self.pending_diagnostics.get_mut(uri) { + if let Some(params) = queue.pop_front() { + return Ok(params); + } + } + } + Err(RecvTimeoutError::Timeout) => { + return Err(TransportError::DiagnosticsTimeout { + uri: uri.to_owned(), + } + .into()); + } + Err(RecvTimeoutError::Disconnected) => { + return Err(TransportError::DiagnosticsDisconnected.into()); + } + } + } + } + + pub(super) fn capture_background_message(&mut self, message: Message) -> RunnerResult<()> { + match message { + Message::Response(response) => { + self.pending_responses.push(response); + Ok(()) + } + Message::Notification(notification) + if notification.method == PublishDiagnostics::METHOD => + { + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notification.params) + .map_err(|source| SerdeError::DeserializePublishDiagnostics { source })?; + let key = params.uri.as_str().to_owned(); + self.pending_diagnostics + .entry(key) + .or_default() + .push_back(params); + self.last_diagnostic_at = Some(Instant::now()); + Ok(()) + } + Message::Notification(_) | Message::Request(_) => Ok(()), + } + } + + fn send_notification(&self, notification: Notification) -> RunnerResult<()> { + self.conn + .sender + .send(Message::Notification(notification)) + .map_err(|source| TransportError::SendNotification { + source: Box::new(source), + }) + .map_err(Into::into) + } + + fn send_request(&self, request: Request) -> RunnerResult<()> { + self.conn + .sender + .send(Message::Request(request)) + .map_err(|source| TransportError::SendRequest { + source: Box::new(source), + }) + .map_err(Into::into) + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs new file mode 100644 index 00000000..2cedc6da --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs @@ -0,0 +1,1348 @@ +//! Semantic compilation from YAML script AST to executable scenario steps. +//! +//! This module translates deserialized script records into +//! [`crate::scenario::ScenarioStep`] values. During compilation it: +//! - resolves relative paths and file URIs from the scenario base directory +//! - parses and tracks inline markers through file mutations +//! - allocates/matches request IDs and aliases +//! - translates shorthand expected payloads into concrete LSP structures +//! - enforces cross-step invariants (duplicate aliases, unknown markers, etc.) + +use std::{ + collections::{BTreeMap, HashMap}, + path::Path, +}; + +use lsp_types::CodeActionKind; +use serde::Deserialize; +use thiserror::Error; + +use super::{ + inputs::{ + CodeActionOrCommandInput, CodeLensInput, DiagnosticInput, ExpectCompletionScriptStep, + ExpectDocumentSymbolScriptStep, ExpectExecuteCommandScriptStep, ExpectFormattingScriptStep, + ExpectHoverScriptStep, ExpectPrepareRenameScriptStep, ExpectSignatureHelpScriptStep, + GotoDefinitionResponseInput, InlayHintInput, LocationInput, SemanticTokensResultInput, + WorkspaceEditInput, WorkspaceSymbolResponseInput, + }, + markers::{MarkerStore, PositionSpec, RangeInput}, + paths::{file_path, file_uri}, + registry::{RequestKind, RequestRegistry}, +}; +use crate::scenario::{ + ChangeFullStep, ChangeIncrementalStep, CloseStep, ConfigStep, DeleteFileStep, + DiagnosticsSettledStep, ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, + ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, ExpectDocumentSymbolStep, + ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, ExpectHoverTypeStep, + ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, + ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, + ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, NotifyWatchedFilesStep, OpenStep, + RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestDeclarationStep, + RequestDefinitionStep, RequestDocumentSymbolStep, RequestExecuteCommandStep, + RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, RequestPrepareRenameStep, + RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, + RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, + RequestWorkspaceSymbolStep, SaveStep, Scenario, ScenarioFileChangeType, ScenarioStep, + TypeMatchMode, WatchedFileChangeStep, WriteFileStep, +}; + +/// Parsed YAML root object for one scenario script file. +/// +/// Step order is significant and preserved exactly during compilation. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ScenarioScript { + steps: Vec, +} + +/// Error returned when deserialized script data is semantically invalid. +/// +/// This wraps human-readable validation failures discovered during compilation +/// (for example unknown marker names, alias mismatches, or missing request +/// context for shorthand expectations). +#[derive(Debug, Error)] +#[error("{message}")] +pub struct CompileScenarioError { + message: String, +} + +impl From for CompileScenarioError { + fn from(message: String) -> Self { + Self { message } + } +} + +impl ScenarioScript { + /// Compile parsed script steps into an executable scenario timeline. + /// + /// Compilation is stateful: it threads marker state and request alias state + /// through the step sequence so later steps can reference earlier outputs. + /// + /// Most script steps translate to exactly one executable step, with two notable + /// expansions: + /// - `create` expands to one or more `writeFile` steps plus `open` steps. + /// - `expectTypes` expands to repeated `requestHover` + `expectHoverType` pairs. + pub(super) fn compile(self, base_dir: &Path) -> Result { + let mut registry = RequestRegistry::new(); + let mut marker_store = MarkerStore::new(); + // Some expect-step shorthands need the originating request file in order to + // resolve marker-backed positions/ranges in their expected payloads. + let mut code_action_request_files = HashMap::new(); + let mut code_lens_request_files = HashMap::new(); + let mut inlay_hint_request_files = HashMap::new(); + let mut completion_request_files = HashMap::new(); + let mut prepare_rename_request_files = HashMap::new(); + let mut document_symbol_request_files = HashMap::new(); + let mut semantic_tokens_full_request_files = HashMap::new(); + let mut semantic_tokens_range_request_files = HashMap::new(); + let mut steps = Vec::with_capacity(self.steps.len()); + + // Compile in source order so aliases, marker state, and pending requests + // evolve exactly as authored in the scenario script. + for step in self.steps { + let compiled = match step { + ScenarioScriptStep::Open(step) => { + let text = + marker_store.register_full_text(&step.file, step.text, "open.text")?; + vec![ScenarioStep::Open(OpenStep { + uri: file_uri(base_dir, &step.file), + text, + language_id: step.language_id, + version: step.version, + })] + } + ScenarioScriptStep::Create(step) => { + compile_create_step(step, base_dir, &mut marker_store)? + } + ScenarioScriptStep::ChangeFull(step) => { + let text = marker_store.register_full_text( + &step.file, + step.text, + "changeFull.text", + )?; + vec![ScenarioStep::ChangeFull(ChangeFullStep { + uri: file_uri(base_dir, &step.file), + text, + version: step.version, + })] + } + ScenarioScriptStep::ChangeIncremental(step) => { + let range = marker_store + .resolve_range(&step.file, step.range, "changeIncremental") + .map_err(|error| format!("compile changeIncremental: {error}"))?; + let text = marker_store + .register_incremental_text( + &step.file, + range, + step.text, + "changeIncremental.text", + ) + .map_err(|error| format!("compile changeIncremental: {error}"))?; + vec![ScenarioStep::ChangeIncremental(ChangeIncrementalStep { + uri: file_uri(base_dir, &step.file), + range, + text, + version: step.version, + })] + } + ScenarioScriptStep::Save(step) => vec![ScenarioStep::Save(SaveStep { + uri: file_uri(base_dir, &step.file), + text: step + .text + .map(|text| marker_store.register_full_text(&step.file, text, "save.text")) + .transpose()?, + })], + ScenarioScriptStep::Close(step) => vec![ScenarioStep::Close(CloseStep { + uri: file_uri(base_dir, &step.file), + })], + ScenarioScriptStep::Config(step) => vec![ScenarioStep::Config(ConfigStep { + settings: step.settings, + })], + ScenarioScriptStep::WriteFile(step) => { + let text = + marker_store.register_full_text(&step.path, step.text, "writeFile.text")?; + vec![ScenarioStep::WriteFile(WriteFileStep { + path: file_path(base_dir, &step.path), + text, + })] + } + ScenarioScriptStep::DeleteFile(step) => { + marker_store.remove(&step.path); + vec![ScenarioStep::DeleteFile(DeleteFileStep { + path: file_path(base_dir, &step.path), + })] + } + ScenarioScriptStep::NotifyWatchedFiles(step) => { + for change in &step.changes { + if change.change_type == ScenarioFileChangeType::Deleted { + marker_store.remove(&change.path); + } + } + vec![ScenarioStep::NotifyWatchedFiles(NotifyWatchedFilesStep { + changes: step + .changes + .into_iter() + .map(|change| WatchedFileChangeStep { + uri: file_uri(base_dir, &change.path), + change_type: change.change_type, + }) + .collect(), + })] + } + ScenarioScriptStep::RequestCodeAction(step) => { + let diagnostics = step + .diagnostics + .into_iter() + .map(|diagnostic| { + diagnostic.resolve_with_file( + &marker_store, + &step.file, + "requestCodeAction.diagnostics", + ) + }) + .collect::, _>>()?; + let request_id = + registry.allocate(RequestKind::CodeAction, step.request_name)?; + code_action_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestCodeAction(RequestCodeActionStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + range: marker_store + .resolve_range(&step.file, step.range, "requestCodeAction") + .map_err(|error| format!("compile requestCodeAction: {error}"))?, + diagnostics, + only: step.only, + })] + } + ScenarioScriptStep::ExpectCodeAction(step) => { + let request_id = + registry.claim(RequestKind::CodeAction, step.request.as_deref())?; + let default_file = code_action_request_files + .get(&request_id) + .map(String::as_str); + let result = step + .result + .map(|actions| { + actions + .into_iter() + .map(|action| { + action.resolve( + base_dir, + &marker_store, + default_file, + "expectCodeAction.result", + ) + }) + .collect::, _>>() + }) + .transpose()?; + vec![ScenarioStep::ExpectCodeAction(ExpectCodeActionStep { + id: request_id, + result, + })] + } + ScenarioScriptStep::RequestReferences(step) => { + vec![ScenarioStep::RequestReferences(RequestReferencesStep { + id: registry.allocate(RequestKind::References, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestReferences", + )?, + include_declaration: step.include_declaration, + })] + } + ScenarioScriptStep::ExpectReferences(step) => { + let result = step + .result + .map(|locations| { + locations + .into_iter() + .map(|location| { + location.resolve_location( + base_dir, + &marker_store, + "expectReferences.result", + ) + }) + .collect::, _>>() + }) + .transpose()?; + vec![ScenarioStep::ExpectReferences(ExpectReferencesStep { + id: registry.claim(RequestKind::References, step.request.as_deref())?, + result, + })] + } + ScenarioScriptStep::RequestDefinition(step) => { + vec![ScenarioStep::RequestDefinition(RequestDefinitionStep { + id: registry.allocate(RequestKind::Definition, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestDefinition", + )?, + })] + } + ScenarioScriptStep::ExpectDefinition(step) => { + vec![ScenarioStep::ExpectDefinition(ExpectDefinitionStep { + id: registry.claim(RequestKind::Definition, step.request.as_deref())?, + result: step + .result + .map(|result| { + result.resolve(base_dir, &marker_store, "expectDefinition.result") + }) + .transpose()?, + })] + } + ScenarioScriptStep::RequestDeclaration(step) => { + vec![ScenarioStep::RequestDeclaration(RequestDeclarationStep { + id: registry.allocate(RequestKind::Declaration, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestDeclaration", + )?, + })] + } + ScenarioScriptStep::ExpectDeclaration(step) => { + vec![ScenarioStep::ExpectDeclaration(ExpectDeclarationStep { + id: registry.claim(RequestKind::Declaration, step.request.as_deref())?, + result: step + .result + .map(|result| { + result.resolve(base_dir, &marker_store, "expectDeclaration.result") + }) + .transpose()?, + })] + } + ScenarioScriptStep::RequestTypeDefinition(step) => { + vec![ScenarioStep::RequestTypeDefinition( + RequestTypeDefinitionStep { + id: registry + .allocate(RequestKind::TypeDefinition, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestTypeDefinition", + )?, + }, + )] + } + ScenarioScriptStep::ExpectTypeDefinition(step) => { + vec![ScenarioStep::ExpectTypeDefinition( + ExpectTypeDefinitionStep { + id: registry + .claim(RequestKind::TypeDefinition, step.request.as_deref())?, + result: step + .result + .map(|result| { + result.resolve( + base_dir, + &marker_store, + "expectTypeDefinition.result", + ) + }) + .transpose()?, + }, + )] + } + ScenarioScriptStep::RequestPrepareRename(step) => { + let request_id = + registry.allocate(RequestKind::PrepareRename, step.request_name)?; + prepare_rename_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestPrepareRename( + RequestPrepareRenameStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestPrepareRename", + )?, + }, + )] + } + ScenarioScriptStep::ExpectPrepareRename(step) => { + let request_id = + registry.claim(RequestKind::PrepareRename, step.request.as_deref())?; + let file = prepare_rename_request_files + .get(&request_id) + .ok_or_else(|| { + format!( + "expectPrepareRename: missing request file context for request id {request_id}" + ) + })?; + vec![ScenarioStep::ExpectPrepareRename(ExpectPrepareRenameStep { + id: request_id, + result: step.resolve_result( + &marker_store, + file, + "expectPrepareRename.result", + )?, + })] + } + ScenarioScriptStep::RequestRename(step) => { + vec![ScenarioStep::RequestRename(RequestRenameStep { + id: registry.allocate(RequestKind::Rename, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestRename", + )?, + new_name: step.new_name, + })] + } + ScenarioScriptStep::ExpectRename(step) => { + vec![ScenarioStep::ExpectRename(ExpectRenameStep { + id: registry.claim(RequestKind::Rename, step.request.as_deref())?, + result: step + .result + .map(|result| { + result.resolve(base_dir, &marker_store, "expectRename.result") + }) + .transpose()?, + })] + } + ScenarioScriptStep::RequestHover(step) => { + vec![ScenarioStep::RequestHover(RequestHoverStep { + id: registry.allocate(RequestKind::Hover, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestHover", + )?, + })] + } + ScenarioScriptStep::ExpectHover(step) => { + vec![ScenarioStep::ExpectHover(ExpectHoverStep { + id: registry.claim(RequestKind::Hover, step.request.as_deref())?, + result: step.result, + })] + } + ScenarioScriptStep::ExpectHoverType(step) => { + vec![ScenarioStep::ExpectHoverType(ExpectHoverTypeStep { + id: registry.claim(RequestKind::Hover, step.request.as_deref())?, + expected_type: step.expected_type, + match_mode: step.match_mode, + })] + } + ScenarioScriptStep::RequestSignatureHelp(step) => { + vec![ScenarioStep::RequestSignatureHelp( + RequestSignatureHelpStep { + id: registry.allocate(RequestKind::SignatureHelp, step.request_name)?, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestSignatureHelp", + )?, + }, + )] + } + ScenarioScriptStep::ExpectSignatureHelp(step) => { + vec![ScenarioStep::ExpectSignatureHelp(ExpectSignatureHelpStep { + id: registry.claim(RequestKind::SignatureHelp, step.request.as_deref())?, + result: step.result, + })] + } + ScenarioScriptStep::ExpectTypes(step) => { + let file = step.file; + // Sugar step: each type check expands to an isolated hover + // request/expect pair so failures report per-check locations. + step.checks + .into_iter() + .try_fold(Vec::new(), |mut steps, check| { + let id = registry.allocate(RequestKind::Hover, None)?; + let request = ScenarioStep::RequestHover(RequestHoverStep { + id, + uri: file_uri(base_dir, &file), + position: marker_store.resolve_position( + &file, + check.at, + "expectTypes.checks", + )?, + }); + let expect = ScenarioStep::ExpectHoverType(ExpectHoverTypeStep { + id, + expected_type: check.expected_type, + match_mode: check.match_mode, + }); + steps.push(request); + steps.push(expect); + Ok::, String>(steps) + })? + } + ScenarioScriptStep::RequestCompletion(step) => { + let request_id = + registry.allocate(RequestKind::Completion, step.request_name)?; + completion_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestCompletion(RequestCompletionStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + position: marker_store.resolve_position( + &step.file, + step.at, + "requestCompletion", + )?, + })] + } + ScenarioScriptStep::ExpectCompletion(step) => { + let request_id = + registry.claim(RequestKind::Completion, step.request.as_deref())?; + let file = completion_request_files.get(&request_id).ok_or_else(|| { + format!( + "expectCompletion: missing request file context for request id {request_id}" + ) + })?; + let labels = step.labels.clone(); + let allow_extra = step.allow_extra; + vec![ScenarioStep::ExpectCompletion(ExpectCompletionStep { + id: request_id, + result: step.resolve_result( + &marker_store, + file, + "expectCompletion.result", + )?, + labels, + allow_extra, + })] + } + ScenarioScriptStep::RequestFormatting(step) => { + vec![ScenarioStep::RequestFormatting(RequestFormattingStep { + id: registry.allocate(RequestKind::Formatting, step.request_name)?, + uri: file_uri(base_dir, &step.file), + tab_size: step.tab_size, + insert_spaces: step.insert_spaces, + })] + } + ScenarioScriptStep::ExpectFormatting(step) => { + vec![ScenarioStep::ExpectFormatting(ExpectFormattingStep { + id: registry.claim(RequestKind::Formatting, step.request.as_deref())?, + result: step.result, + })] + } + ScenarioScriptStep::RequestSemanticTokensFull(step) => { + let request_id = + registry.allocate(RequestKind::SemanticTokensFull, step.request_name)?; + semantic_tokens_full_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestSemanticTokensFull( + RequestSemanticTokensFullStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + }, + )] + } + ScenarioScriptStep::ExpectSemanticTokensFull(step) => { + let request_id = + registry.claim(RequestKind::SemanticTokensFull, step.request.as_deref())?; + let file = semantic_tokens_full_request_files + .get(&request_id) + .ok_or_else(|| { + format!( + "expectSemanticTokensFull: missing request file context for request id {request_id}" + ) + })?; + vec![ScenarioStep::ExpectSemanticTokensFull( + ExpectSemanticTokensFullStep { + id: request_id, + result: step + .result + .map(|result| { + result.resolve_full( + &marker_store, + file, + "expectSemanticTokensFull.result", + ) + }) + .transpose()?, + }, + )] + } + ScenarioScriptStep::RequestSemanticTokensRange(step) => { + let request_id = + registry.allocate(RequestKind::SemanticTokensRange, step.request_name)?; + semantic_tokens_range_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestSemanticTokensRange( + RequestSemanticTokensRangeStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + range: marker_store.resolve_range( + &step.file, + step.range, + "requestSemanticTokensRange", + )?, + }, + )] + } + ScenarioScriptStep::ExpectSemanticTokensRange(step) => { + let request_id = registry + .claim(RequestKind::SemanticTokensRange, step.request.as_deref())?; + let file = semantic_tokens_range_request_files + .get(&request_id) + .ok_or_else(|| { + format!( + "expectSemanticTokensRange: missing request file context for request id {request_id}" + ) + })?; + vec![ScenarioStep::ExpectSemanticTokensRange( + ExpectSemanticTokensRangeStep { + id: request_id, + result: step + .result + .map(|result| { + result.resolve_range( + &marker_store, + file, + "expectSemanticTokensRange.result", + ) + }) + .transpose()?, + }, + )] + } + ScenarioScriptStep::RequestInlayHints(step) => { + let request_id = + registry.allocate(RequestKind::InlayHints, step.request_name)?; + inlay_hint_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestInlayHints(RequestInlayHintsStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + range: marker_store.resolve_range( + &step.file, + step.range, + "requestInlayHints", + )?, + })] + } + ScenarioScriptStep::ExpectInlayHints(step) => { + let request_id = + registry.claim(RequestKind::InlayHints, step.request.as_deref())?; + let file = inlay_hint_request_files.get(&request_id).ok_or_else(|| { + format!( + "expectInlayHints: missing request file context for request id {request_id}" + ) + })?; + let result = step + .result + .map(|hints| { + hints + .into_iter() + .map(|hint| { + hint.resolve(&marker_store, file, "expectInlayHints.result") + }) + .collect::, _>>() + }) + .transpose()?; + vec![ScenarioStep::ExpectInlayHints(ExpectInlayHintsStep { + id: request_id, + result, + })] + } + ScenarioScriptStep::RequestDocumentSymbol(step) => { + let request_id = + registry.allocate(RequestKind::DocumentSymbol, step.request_name)?; + document_symbol_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestDocumentSymbol( + RequestDocumentSymbolStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + }, + )] + } + ScenarioScriptStep::ExpectDocumentSymbol(step) => { + let request_id = + registry.claim(RequestKind::DocumentSymbol, step.request.as_deref())?; + let file = document_symbol_request_files + .get(&request_id) + .ok_or_else(|| { + format!( + "expectDocumentSymbol: missing request file context for request id {request_id}" + ) + })?; + vec![ScenarioStep::ExpectDocumentSymbol( + ExpectDocumentSymbolStep { + id: request_id, + result: step.resolve_result( + &marker_store, + file, + "expectDocumentSymbol.result", + )?, + }, + )] + } + ScenarioScriptStep::RequestWorkspaceSymbol(step) => { + vec![ScenarioStep::RequestWorkspaceSymbol( + RequestWorkspaceSymbolStep { + id: registry + .allocate(RequestKind::WorkspaceSymbol, step.request_name)?, + query: step.query, + }, + )] + } + ScenarioScriptStep::ExpectWorkspaceSymbol(step) => { + vec![ScenarioStep::ExpectWorkspaceSymbol( + ExpectWorkspaceSymbolStep { + id: registry + .claim(RequestKind::WorkspaceSymbol, step.request.as_deref())?, + result: step + .result + .map(|result| { + result.resolve( + base_dir, + &marker_store, + "expectWorkspaceSymbol.result", + ) + }) + .transpose()?, + }, + )] + } + ScenarioScriptStep::RequestCodeLens(step) => { + let request_id = registry.allocate(RequestKind::CodeLens, step.request_name)?; + code_lens_request_files.insert(request_id, step.file.clone()); + vec![ScenarioStep::RequestCodeLens(RequestCodeLensStep { + id: request_id, + uri: file_uri(base_dir, &step.file), + })] + } + ScenarioScriptStep::ExpectCodeLens(step) => { + let request_id = + registry.claim(RequestKind::CodeLens, step.request.as_deref())?; + let default_file = code_lens_request_files.get(&request_id).map(String::as_str); + vec![ScenarioStep::ExpectCodeLens(ExpectCodeLensStep { + id: request_id, + result: step + .result + .map(|result| { + result + .into_iter() + .map(|lens| { + lens.resolve( + base_dir, + &marker_store, + default_file, + "expectCodeLens.result", + ) + }) + .collect::, _>>() + }) + .transpose()?, + })] + } + ScenarioScriptStep::RequestExecuteCommand(step) => { + vec![ScenarioStep::RequestExecuteCommand( + RequestExecuteCommandStep { + id: registry + .allocate(RequestKind::ExecuteCommand, step.request_name)?, + command: step.command, + arguments: step.arguments, + }, + )] + } + ScenarioScriptStep::ExpectExecuteCommand(step) => { + vec![ScenarioStep::ExpectExecuteCommand( + ExpectExecuteCommandStep { + id: registry + .claim(RequestKind::ExecuteCommand, step.request.as_deref())?, + result: step.result, + }, + )] + } + ScenarioScriptStep::ExpectDiagnostics(step) => { + let diagnostics = step + .diagnostics + .into_iter() + .map(|diagnostic| { + diagnostic.resolve_with_file( + &marker_store, + &step.file, + "expectDiagnostics.diagnostics", + ) + }) + .collect::, _>>()?; + vec![ScenarioStep::ExpectDiagnostics(ExpectDiagnosticsStep { + uri: file_uri(base_dir, &step.file), + diagnostics, + })] + } + ScenarioScriptStep::DiagnosticsSettled(step) => { + vec![ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { + timeout_ms: step.timeout_ms, + idle_ms: step.idle_ms, + })] + } + }; + steps.extend(compiled); + } + + Ok(Scenario::new(steps)) + } +} + +/// Lower a `create` script step into `writeFile` + `open` scenario steps. +/// +/// This also seeds marker tracking for each created file, so all subsequent +/// marker-based references resolve against the cleaned text content. +fn compile_create_step( + step: CreateScriptStep, + base_dir: &Path, + marker_store: &mut MarkerStore, +) -> Result, String> { + if step.files.is_empty() { + return Err("create: `files` must include at least one file".to_string()); + } + + let mut steps = Vec::with_capacity(step.files.len() * 2); + for (relative_path, text) in &step.files { + let text = marker_store.register_full_text(relative_path, text.clone(), "create.files")?; + steps.push(ScenarioStep::WriteFile(WriteFileStep { + path: file_path(base_dir, relative_path), + text, + })); + } + + let files_to_open = match step.open { + Some(paths) => paths, + None => step.files.keys().cloned().collect(), + }; + for relative_path in files_to_open { + if !step.files.contains_key(&relative_path) { + return Err(format!( + "create: `open` references unknown file '{relative_path}', expected one of: {:?}", + step.files.keys().collect::>() + )); + } + let text = marker_store + .full_text(&relative_path) + .ok_or_else(|| format!("create: missing parsed text for '{relative_path}'"))? + .to_string(); + steps.push(ScenarioStep::Open(OpenStep { + uri: file_uri(base_dir, &relative_path), + text, + language_id: step.language_id.clone(), + version: step.version, + })); + } + + Ok(steps) +} + +/// Raw YAML `step` union, deserialized before semantic validation. +/// +/// These payload types intentionally mirror the DSL surface. Cross-step checks +/// (alias matching, marker existence, request ordering) happen in `compile`. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(tag = "step", rename_all = "camelCase")] +enum ScenarioScriptStep { + Open(OpenScriptStep), + Create(CreateScriptStep), + ChangeFull(ChangeFullScriptStep), + ChangeIncremental(ChangeIncrementalScriptStep), + Save(SaveScriptStep), + Close(CloseScriptStep), + Config(ConfigScriptStep), + WriteFile(WriteFileScriptStep), + DeleteFile(DeleteFileScriptStep), + NotifyWatchedFiles(NotifyWatchedFilesScriptStep), + RequestCodeAction(RequestCodeActionScriptStep), + ExpectCodeAction(ExpectCodeActionScriptStep), + RequestReferences(RequestReferencesScriptStep), + ExpectReferences(ExpectReferencesScriptStep), + RequestDefinition(RequestDefinitionScriptStep), + ExpectDefinition(ExpectDefinitionScriptStep), + RequestDeclaration(RequestDeclarationScriptStep), + ExpectDeclaration(ExpectDeclarationScriptStep), + RequestTypeDefinition(RequestTypeDefinitionScriptStep), + ExpectTypeDefinition(ExpectTypeDefinitionScriptStep), + RequestPrepareRename(RequestPrepareRenameScriptStep), + ExpectPrepareRename(ExpectPrepareRenameScriptStep), + RequestRename(RequestRenameScriptStep), + ExpectRename(ExpectRenameScriptStep), + RequestHover(RequestHoverScriptStep), + ExpectHover(ExpectHoverScriptStep), + ExpectHoverType(ExpectHoverTypeScriptStep), + RequestSignatureHelp(RequestSignatureHelpScriptStep), + ExpectSignatureHelp(ExpectSignatureHelpScriptStep), + ExpectTypes(ExpectTypesScriptStep), + RequestCompletion(RequestCompletionScriptStep), + ExpectCompletion(ExpectCompletionScriptStep), + RequestFormatting(RequestFormattingScriptStep), + ExpectFormatting(ExpectFormattingScriptStep), + RequestSemanticTokensFull(RequestSemanticTokensFullScriptStep), + ExpectSemanticTokensFull(ExpectSemanticTokensFullScriptStep), + RequestSemanticTokensRange(RequestSemanticTokensRangeScriptStep), + ExpectSemanticTokensRange(ExpectSemanticTokensRangeScriptStep), + RequestInlayHints(RequestInlayHintsScriptStep), + ExpectInlayHints(ExpectInlayHintsScriptStep), + RequestDocumentSymbol(RequestDocumentSymbolScriptStep), + ExpectDocumentSymbol(ExpectDocumentSymbolScriptStep), + RequestWorkspaceSymbol(RequestWorkspaceSymbolScriptStep), + ExpectWorkspaceSymbol(ExpectWorkspaceSymbolScriptStep), + RequestCodeLens(RequestCodeLensScriptStep), + ExpectCodeLens(ExpectCodeLensScriptStep), + RequestExecuteCommand(RequestExecuteCommandScriptStep), + ExpectExecuteCommand(ExpectExecuteCommandScriptStep), + ExpectDiagnostics(ExpectDiagnosticsScriptStep), + DiagnosticsSettled(DiagnosticsSettledScriptStep), +} + +/// `create` creates one or more files and optionally opens a subset of them. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct CreateScriptStep { + files: BTreeMap, + #[serde(default)] + open: Option>, + #[serde(default = "default_language_id")] + language_id: String, + #[serde(default = "default_open_version")] + version: i32, +} + +/// `open` seeds marker-aware text for one file and opens it in the runner. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct OpenScriptStep { + file: String, + text: String, + #[serde(default = "default_language_id")] + language_id: String, + #[serde(default = "default_open_version")] + version: i32, +} + +/// `changeFull` replaces full document text and updates marker tracking. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ChangeFullScriptStep { + file: String, + text: String, + version: i32, +} + +/// `changeIncremental` applies a ranged edit with marker-aware range parsing. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ChangeIncrementalScriptStep { + file: String, + #[serde(flatten)] + range: RangeInput, + text: String, + version: i32, +} + +/// `save` can optionally send explicit text (for save-with-content scenarios). +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct SaveScriptStep { + file: String, + text: Option, +} + +/// `close` closes one open document URI. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct CloseScriptStep { + file: String, +} + +/// `config` mutates runtime server settings. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ConfigScriptStep { + settings: serde_json::Value, +} + +/// `writeFile` mutates workspace files without opening a document. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct WriteFileScriptStep { + path: String, + text: String, +} + +/// `deleteFile` removes a workspace file. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct DeleteFileScriptStep { + path: String, +} + +/// `notifyWatchedFiles` sends synthetic file-watch notifications. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct NotifyWatchedFilesScriptStep { + changes: Vec, +} + +/// One watched-file change entry. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct WatchedFileChangeScriptStep { + path: String, + #[serde(rename = "type")] + change_type: ScenarioFileChangeType, +} + +// Request/expect payloads generally follow the same aliasing contract: +// `request*` steps may define `as`, and matching `expect*` steps may reference +// that alias via `request` or consume by FIFO order when omitted. +/// `requestCodeAction` optionally names the request and captures context. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestCodeActionScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(flatten)] + range: RangeInput, + #[serde(default)] + diagnostics: Vec, + #[serde(default)] + only: Option>, +} + +/// `expectCodeAction` can match by explicit request alias or FIFO order. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectCodeActionScriptStep { + #[serde(default)] + request: Option, + result: Option>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestReferencesScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, + #[serde(default)] + include_declaration: bool, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectReferencesScriptStep { + #[serde(default)] + request: Option, + result: Option>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestDefinitionScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectDefinitionScriptStep { + #[serde(default)] + request: Option, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestDeclarationScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectDeclarationScriptStep { + #[serde(default)] + request: Option, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestTypeDefinitionScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectTypeDefinitionScriptStep { + #[serde(default)] + request: Option, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestPrepareRenameScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestRenameScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, + new_name: String, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectRenameScriptStep { + #[serde(default)] + request: Option, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestHoverScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectHoverTypeScriptStep { + #[serde(default)] + request: Option, + #[serde(rename = "type")] + expected_type: String, + #[serde(default, rename = "match")] + match_mode: TypeMatchMode, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestSignatureHelpScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +/// `expectTypes` shorthand for a list of marker-position type assertions. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectTypesScriptStep { + file: String, + checks: Vec, +} + +/// One `expectTypes.checks` entry, expanded to requestHover + expectHoverType. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct TypeExpectationScriptStep { + #[serde(default)] + at: Option, + #[serde(rename = "type")] + expected_type: String, + #[serde(default, rename = "match")] + match_mode: TypeMatchMode, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestCompletionScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default)] + at: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestFormattingScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(default = "default_formatting_tab_size")] + tab_size: u32, + #[serde(default = "default_formatting_insert_spaces")] + insert_spaces: bool, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestSemanticTokensFullScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectSemanticTokensFullScriptStep { + #[serde(default)] + request: Option, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestSemanticTokensRangeScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(flatten)] + range: RangeInput, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectSemanticTokensRangeScriptStep { + #[serde(default)] + request: Option, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestInlayHintsScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(flatten)] + range: RangeInput, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectInlayHintsScriptStep { + #[serde(default)] + request: Option, + result: Option>, +} + +// `InlayHintInput` defines custom equality semantics (JSON-form comparison for +// selected fields), so this step keeps equality aligned with that behavior. +impl PartialEq for ExpectInlayHintsScriptStep { + fn eq(&self, other: &Self) -> bool { + self.request == other.request && self.result == other.result + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestDocumentSymbolScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestWorkspaceSymbolScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + query: String, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectWorkspaceSymbolScriptStep { + #[serde(default)] + request: Option, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestCodeLensScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectCodeLensScriptStep { + #[serde(default)] + request: Option, + result: Option>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestExecuteCommandScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + command: String, + #[serde(default)] + arguments: Vec, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectDiagnosticsScriptStep { + file: String, + #[serde(default)] + diagnostics: Vec, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct DiagnosticsSettledScriptStep { + /// Total time budget waiting for diagnostics to settle. + #[serde(default = "default_timeout_ms")] + timeout_ms: u64, + /// Required quiet window with no diagnostics updates before success. + #[serde(default = "default_idle_ms")] + idle_ms: u64, +} + +// DSL defaults chosen to mirror common client behavior in tests. +const fn default_open_version() -> i32 { + 1 +} + +fn default_language_id() -> String { + "jsonnet".to_string() +} + +const fn default_formatting_tab_size() -> u32 { + 2 +} + +const fn default_formatting_insert_spaces() -> bool { + true +} + +const fn default_timeout_ms() -> u64 { + 1_000 +} + +const fn default_idle_ms() -> u64 { + 50 +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs new file mode 100644 index 00000000..9ad79483 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs @@ -0,0 +1,1147 @@ +//! Shorthand input decoders used by scenario compilation. +//! +//! The scenario YAML supports both full LSP JSON objects and concise shorthand +//! forms. This module resolves shorthand into concrete `lsp_types` structures, +//! including marker-aware expansions such as `{ positionOf: "name" }` and +//! `{ rangeOf: "name" }` inside expectation payloads. + +use std::{ + collections::{BTreeMap, HashMap}, + path::Path, + str::FromStr, +}; + +use jrsonnet_lsp_handlers::{SemanticTokenModifierName, SemanticTokenTypeName}; +use lsp_types::{ + CodeAction, CodeActionKind, CodeActionOrCommand, CodeLens, CompletionResponse, Diagnostic, + DiagnosticSeverity, DocumentSymbolResponse, GotoDefinitionResponse, Hover, InlayHint, Location, + NumberOrString, PrepareRenameResponse, SemanticTokens, SemanticTokensRangeResult, + SemanticTokensResult, SignatureHelp, SymbolInformation, SymbolKind, TextEdit, WorkspaceEdit, + WorkspaceSymbolResponse, +}; +use serde::{de::DeserializeOwned, Deserialize}; + +use super::{ + markers::{MarkerStore, PositionFieldInput, PositionSpec, RangeFieldInput, RangeInput}, + paths::file_uri, +}; +use crate::semantic_tokens::{encode_semantic_tokens, semantic_modifiers, ExpectedSemanticToken}; + +/// Accept either full `Diagnostic` JSON or concise shorthand fields. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum DiagnosticInput { + Shorthand(DiagnosticShorthandInput), + Full(Diagnostic), +} + +impl DiagnosticInput { + /// Resolve a diagnostic input using marker context from a specific file. + pub(super) fn resolve_with_file( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(diagnostic) => Ok(diagnostic), + Self::Shorthand(shorthand) => shorthand.resolve(marker_store, file, context), + } + } + + /// Resolve a diagnostic input without marker context. + pub(super) fn resolve_without_markers(self, context: &str) -> Result { + match self { + Self::Full(diagnostic) => Ok(diagnostic), + Self::Shorthand(shorthand) => shorthand.resolve_without_markers(context), + } + } + + fn resolve_with_optional_file( + self, + marker_store: &MarkerStore, + file: Option<&str>, + context: &str, + ) -> Result { + match file { + Some(file) => self.resolve_with_file(marker_store, file, context), + None => self.resolve_without_markers(context), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct DiagnosticShorthandInput { + #[serde(flatten)] + range: RangeInput, + #[serde(default)] + severity: Option, + #[serde(default)] + code: Option, + #[serde(default)] + source: Option, + message: String, +} + +impl DiagnosticShorthandInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + let range = marker_store.resolve_range(file, self.range, context)?; + Ok(Diagnostic { + range, + severity: self.severity.map(DiagnosticSeverityInput::resolve), + code: self.code.map(DiagnosticCodeInput::resolve), + code_description: None, + source: self.source, + message: self.message, + related_information: None, + tags: None, + data: None, + }) + } + + fn resolve_without_markers(self, context: &str) -> Result { + let range = self.range.resolve_range(context)?; + Ok(Diagnostic { + range, + severity: self.severity.map(DiagnosticSeverityInput::resolve), + code: self.code.map(DiagnosticCodeInput::resolve), + code_description: None, + source: self.source, + message: self.message, + related_information: None, + tags: None, + data: None, + }) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] +#[serde(rename_all = "lowercase")] +enum DiagnosticSeverityInput { + Error, + Warning, + Information, + Hint, +} + +impl DiagnosticSeverityInput { + const fn resolve(self) -> DiagnosticSeverity { + match self { + Self::Error => DiagnosticSeverity::ERROR, + Self::Warning => DiagnosticSeverity::WARNING, + Self::Information => DiagnosticSeverity::INFORMATION, + Self::Hint => DiagnosticSeverity::HINT, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +enum DiagnosticCodeInput { + Number(i32), + String(String), +} + +impl DiagnosticCodeInput { + fn resolve(self) -> NumberOrString { + match self { + Self::Number(value) => NumberOrString::Number(value), + Self::String(value) => NumberOrString::String(value), + } + } +} + +/// Accept either full `CodeActionOrCommand` payloads or shorthand code actions. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum CodeActionOrCommandInput { + Shorthand(CodeActionShorthandInput), + Full(Box), +} + +impl CodeActionOrCommandInput { + /// Resolve shorthand/full code action representation into an LSP response item. + pub(super) fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + default_file: Option<&str>, + context: &str, + ) -> Result { + match self { + Self::Full(action) => Ok(*action), + Self::Shorthand(action) => { + action.resolve(base_dir, marker_store, default_file, context) + } + } + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct CodeActionShorthandInput { + title: String, + #[serde(default)] + kind: Option, + #[serde(default, rename = "isPreferred")] + is_preferred: Option, + #[serde(default)] + diagnostics: Vec, + #[serde(default)] + edits: BTreeMap>, +} + +impl CodeActionShorthandInput { + fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + default_file: Option<&str>, + context: &str, + ) -> Result { + let diagnostics = self + .diagnostics + .into_iter() + .map(|diagnostic| { + diagnostic.resolve_with_optional_file( + marker_store, + default_file, + "expectCodeAction.result.diagnostics", + ) + }) + .collect::, _>>()?; + let diagnostics = (!diagnostics.is_empty()).then_some(diagnostics); + + let edit = if self.edits.is_empty() { + None + } else { + let changes = self + .edits + .into_iter() + .map(|(relative_path, edits)| { + let uri: lsp_types::Uri = + file_uri(base_dir, &relative_path) + .parse() + .map_err(|error| { + format!( + "{context}: parse edit URI for '{}': {error}", + relative_path + ) + })?; + let edits = edits + .into_iter() + .map(|edit| { + edit.resolve_with_markers( + marker_store, + &relative_path, + "expectCodeAction.result.edits", + ) + .map_err(|error| { + format!("{context}: resolve edit for '{}': {error}", relative_path) + }) + }) + .collect::, _>>()?; + Ok::<_, String>((uri, edits)) + }) + .collect::, _>>()?; + + Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }) + }; + + Ok(CodeActionOrCommand::CodeAction(CodeAction { + title: self.title, + kind: self.kind, + diagnostics, + edit, + command: None, + is_preferred: self.is_preferred, + disabled: None, + data: None, + })) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct TextEditInput { + #[serde(flatten)] + range: RangeInput, + replace: String, +} + +impl TextEditInput { + /// Resolve text edit range using marker references from one file. + pub(super) fn resolve_with_markers( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + Ok(TextEdit { + range: marker_store.resolve_range(file, self.range, context)?, + new_text: self.replace, + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct LocationInput { + file: String, + #[serde(flatten)] + range: RangeInput, +} + +impl LocationInput { + /// Resolve shorthand location into an absolute file URI and concrete range. + pub(super) fn resolve_location( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + let uri: lsp_types::Uri = file_uri(base_dir, &self.file).parse().map_err(|error| { + format!("{context}: parse location URI for '{}': {error}", self.file) + })?; + let range = marker_store.resolve_range(&self.file, self.range, context)?; + Ok(Location { uri, range }) + } +} + +/// Accept either scalar/array shorthand locations or full LSP response payload. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum GotoDefinitionResponseInput { + Single(LocationInput), + Many(Vec), + Full(GotoDefinitionResponse), +} + +impl GotoDefinitionResponseInput { + /// Resolve shorthand definition/declaration/typeDefinition response payloads. + pub(super) fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + match self { + Self::Full(result) => Ok(result), + Self::Single(location) => location + .resolve_location(base_dir, marker_store, context) + .map(GotoDefinitionResponse::Scalar), + Self::Many(locations) => locations + .into_iter() + .map(|location| location.resolve_location(base_dir, marker_store, context)) + .collect::, _>>() + .map(GotoDefinitionResponse::Array), + } + } +} + +/// Accept either shorthand `edits` mapping or full `WorkspaceEdit`. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum WorkspaceEditInput { + Shorthand(WorkspaceEditShorthandInput), + Full(WorkspaceEdit), +} + +impl WorkspaceEditInput { + /// Resolve shorthand workspace edits into concrete `WorkspaceEdit`. + pub(super) fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + match self { + Self::Full(edit) => Ok(edit), + Self::Shorthand(shorthand) => shorthand.resolve(base_dir, marker_store, context), + } + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct WorkspaceEditShorthandInput { + #[serde(default)] + edits: BTreeMap>, +} + +impl WorkspaceEditShorthandInput { + fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + let changes = self + .edits + .into_iter() + .map(|(relative_path, edits)| { + let uri: lsp_types::Uri = + file_uri(base_dir, &relative_path) + .parse() + .map_err(|error| { + format!("{context}: parse edit URI for '{}': {error}", relative_path) + })?; + let edits = edits + .into_iter() + .map(|edit| edit.resolve_with_markers(marker_store, &relative_path, context)) + .collect::, _>>()?; + Ok::<_, String>((uri, edits)) + }) + .collect::, _>>()?; + + Ok(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }) + } +} + +/// Accept either shorthand symbol entries or a full LSP workspace symbol response. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum WorkspaceSymbolResponseInput { + Shorthand(WorkspaceSymbolResponseShorthandInput), + Full(WorkspaceSymbolResponse), +} + +impl WorkspaceSymbolResponseInput { + /// Resolve shorthand workspace symbol responses into concrete LSP payloads. + pub(super) fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + match self { + Self::Full(response) => Ok(response), + Self::Shorthand(shorthand) => shorthand.resolve(base_dir, marker_store, context), + } + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct WorkspaceSymbolResponseShorthandInput { + symbols: Vec, +} + +impl WorkspaceSymbolResponseShorthandInput { + fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + let symbols = self + .symbols + .into_iter() + .map(|symbol| symbol.resolve(base_dir, marker_store, context)) + .collect::, _>>()?; + Ok(WorkspaceSymbolResponse::Flat(symbols)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct WorkspaceSymbolInput { + name: String, + kind: SymbolKind, + file: String, + #[serde(flatten)] + range: RangeInput, + #[serde(default)] + container_name: Option, +} + +impl WorkspaceSymbolInput { + fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + context: &str, + ) -> Result { + let uri: lsp_types::Uri = file_uri(base_dir, &self.file).parse().map_err(|error| { + format!( + "{context}: parse workspace symbol URI for '{}': {error}", + self.file + ) + })?; + // Build via JSON so field names/types track the wire representation directly, + // including deprecated-but-still-used `containerName`. + let value = serde_json::json!({ + "name": self.name, + "kind": self.kind, + "tags": null, + "location": { + "uri": uri, + "range": marker_store.resolve_range(&self.file, self.range, context)?, + }, + "containerName": self.container_name, + }); + serde_json::from_value(value).map_err(|error| { + format!("{context}: decode workspace symbol shorthand into SymbolInformation: {error}") + }) + } +} + +/// Accept either shorthand code-lens rows or full `CodeLens` values. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum CodeLensInput { + Shorthand(CodeLensShorthandInput), + Full(CodeLens), +} + +impl CodeLensInput { + /// Resolve shorthand code lens values into concrete `CodeLens`. + pub(super) fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + default_file: Option<&str>, + context: &str, + ) -> Result { + match self { + Self::Full(lens) => Ok(lens), + Self::Shorthand(shorthand) => { + shorthand.resolve(base_dir, marker_store, default_file, context) + } + } + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct CodeLensShorthandInput { + #[serde(flatten)] + range: RangeInput, + #[serde(default)] + command: Option, +} + +impl CodeLensShorthandInput { + fn resolve( + self, + base_dir: &Path, + marker_store: &MarkerStore, + default_file: Option<&str>, + context: &str, + ) -> Result { + let range = match default_file { + Some(file) => marker_store.resolve_range(file, self.range, context)?, + None => self.range.resolve_range(context)?, + }; + let command = self + .command + .map(|command| command.resolve(base_dir, context)) + .transpose()?; + Ok(CodeLens { + range, + command, + data: None, + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct CodeLensCommandInput { + title: String, + command: String, + #[serde(default)] + arguments: Vec, +} + +impl CodeLensCommandInput { + fn resolve(self, base_dir: &Path, context: &str) -> Result { + let arguments = self + .arguments + .into_iter() + .map(|argument| argument.resolve(base_dir, context)) + .collect::, _>>()?; + Ok(lsp_types::Command { + title: self.title, + command: self.command, + arguments: (!arguments.is_empty()).then_some(arguments), + }) + } +} + +/// YAML command-argument convenience: +/// - `{ file: "" }` becomes a file URI string. +/// - any other JSON value is passed through unchanged. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +enum CommandArgumentInput { + File { file: String }, + Value(serde_json::Value), +} + +impl CommandArgumentInput { + fn resolve(self, base_dir: &Path, context: &str) -> Result { + match self { + Self::File { file } => Ok(serde_json::Value::String(file_uri(base_dir, &file))), + Self::Value(value) => { + if let Some(file) = value.as_object().and_then(|object| object.get("file")) { + if let Some(file) = file.as_str() { + return Ok(serde_json::Value::String(file_uri(base_dir, file))); + } + return Err(format!( + "{context}: command argument `file` must be a string" + )); + } + Ok(value) + } + } + } +} + +/// Accept either a full semantic-token result or marker-driven shorthand. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum SemanticTokensResultInput { + Full(SemanticTokensResult), + Shorthand(SemanticTokensShorthandInput), +} + +impl SemanticTokensResultInput { + /// Resolve expected semantic-tokens payload for `semanticTokens/full`. + pub(super) fn resolve_full( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(result) => Ok(result), + Self::Shorthand(shorthand) => Ok(SemanticTokensResult::Tokens(shorthand.resolve( + marker_store, + file, + context, + )?)), + } + } + + /// Resolve expected semantic-tokens payload for `semanticTokens/range`. + pub(super) fn resolve_range( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(SemanticTokensResult::Tokens(tokens)) => { + Ok(SemanticTokensRangeResult::Tokens(tokens)) + } + Self::Full(SemanticTokensResult::Partial(partial)) => { + Ok(SemanticTokensRangeResult::Partial(partial)) + } + Self::Shorthand(shorthand) => Ok(SemanticTokensRangeResult::Tokens( + shorthand.resolve(marker_store, file, context)?, + )), + } + } +} + +/// Marker-only semantic token shorthand used in YAML expectations. +/// +/// Absolute token tuples are intentionally not supported here to keep tests +/// resilient to unrelated token-position churn. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct SemanticTokensShorthandInput { + #[serde(default, rename = "tokensByMarker")] + tokens_by_marker: Vec, +} + +impl SemanticTokensShorthandInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + let absolute_tokens = self + .tokens_by_marker + .into_iter() + .enumerate() + .map(|(index, token)| token.resolve(context, marker_store, file, index)) + .collect::, _>>()?; + Ok(encode_semantic_tokens(absolute_tokens)) + } +} + +/// One expected semantic token anchored to a named marker range. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct SemanticTokenByMarkerInput { + marker: String, + #[serde(rename = "type")] + token_type: SemanticTokenTypeInput, + #[serde(default)] + modifiers: Vec, +} + +impl SemanticTokenByMarkerInput { + fn resolve( + self, + context: &str, + marker_store: &MarkerStore, + file: &str, + index: usize, + ) -> Result { + let token_type = self + .token_type + .resolve_at(context, &format!("tokensByMarker[{index}].type"))?; + let modifiers = self + .modifiers + .into_iter() + .enumerate() + .map(|(modifier_index, modifier)| { + modifier.resolve_at( + context, + &format!("tokensByMarker[{index}].modifiers[{modifier_index}]"), + ) + }) + .collect::, _>>()?; + let range = marker_store.resolve_named_range(file, &self.marker, context)?; + if range.start.line != range.end.line { + return Err(format!( + "{context}: tokensByMarker[{index}] marker '{}' spans multiple lines", + self.marker + )); + } + let len = range + .end + .character + .checked_sub(range.start.character) + .ok_or_else(|| { + format!( + "{context}: tokensByMarker[{index}] marker '{}' has invalid range", + self.marker + ) + })?; + if len == 0 { + return Err(format!( + "{context}: tokensByMarker[{index}] marker '{}' resolves to an empty range", + self.marker + )); + } + + Ok(ExpectedSemanticToken::new( + range.start.line, + range.start.character, + len, + token_type, + semantic_modifiers(&modifiers), + )) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(transparent)] +struct SemanticTokenTypeInput(String); + +impl SemanticTokenTypeInput { + fn resolve_at(self, context: &str, location: &str) -> Result { + SemanticTokenTypeName::from_str(&self.0).map_err(|_| { + format!( + "{context}: unknown semantic token type '{}' at {location}", + self.0 + ) + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(transparent)] +struct SemanticTokenModifierInput(String); + +impl SemanticTokenModifierInput { + fn resolve_at( + self, + context: &str, + location: &str, + ) -> Result { + SemanticTokenModifierName::from_str(&self.0).map_err(|_| { + format!( + "{context}: unknown semantic token modifier '{}' at {location}", + self.0 + ) + }) + } +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub(super) struct InlayHintInput { + #[serde(flatten)] + position: PositionFieldInput, + label: lsp_types::InlayHintLabel, + #[serde(default)] + kind: Option, + #[serde(default)] + text_edits: Option>, + #[serde(default)] + tooltip: Option, + #[serde(default)] + padding_left: Option, + #[serde(default)] + padding_right: Option, + #[serde(default)] + data: Option, +} + +impl InlayHintInput { + /// Resolve shorthand inlay hint fields, including marker-based positions/ranges. + pub(super) fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + let text_edits = self + .text_edits + .map(|edits| { + edits + .into_iter() + .map(|edit| { + edit.resolve(marker_store, file, "expectInlayHints.result.textEdits") + }) + .collect::, _>>() + }) + .transpose()?; + Ok(InlayHint { + position: self.position.resolve(marker_store, file, context)?, + label: self.label, + kind: self.kind, + text_edits, + tooltip: self.tooltip, + padding_left: self.padding_left, + padding_right: self.padding_right, + data: self.data, + }) + } +} + +impl PartialEq for InlayHintInput { + // `InlayHintLabel` and `InlayHintTooltip` enums do not expose stable structural + // comparison helpers for all variants, so compare their JSON forms instead. + fn eq(&self, other: &Self) -> bool { + self.position == other.position + && self.kind == other.kind + && self.text_edits == other.text_edits + && self.padding_left == other.padding_left + && self.padding_right == other.padding_right + && self.data == other.data + && serde_json::to_value(&self.label).ok() == serde_json::to_value(&other.label).ok() + && serde_json::to_value(&self.tooltip).ok() == serde_json::to_value(&other.tooltip).ok() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +struct InlayHintTextEditInput { + #[serde(flatten)] + range: RangeFieldInput, + new_text: String, +} + +impl InlayHintTextEditInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + Ok(TextEdit { + range: self.range.resolve(marker_store, file, context)?, + new_text: self.new_text, + }) + } +} + +/// Resolve marker references in arbitrary JSON, then deserialize into `T`. +fn resolve_marker_json_input( + value: serde_json::Value, + marker_store: &MarkerStore, + file: &str, + context: &str, +) -> Result +where + T: DeserializeOwned, +{ + let resolved = resolve_marker_references_json(value, marker_store, file, context, "$")?; + serde_json::from_value(resolved) + .map_err(|error| format!("{context}: decode marker-expanded value: {error}")) +} + +/// Recursively rewrite `{ positionOf: ... }` and `{ rangeOf: ... }` objects. +/// +/// Only single-key objects with these keys are treated as marker directives. +/// Objects with additional keys are treated as normal JSON objects and their +/// children are traversed recursively. +fn resolve_marker_references_json( + value: serde_json::Value, + marker_store: &MarkerStore, + file: &str, + context: &str, + path: &str, +) -> Result { + match value { + serde_json::Value::Object(mut object) => { + if object.len() == 1 { + if let Some(marker) = object.remove("positionOf") { + let marker_name = + marker_name_from_value(marker, context, &format!("{path}.positionOf"))?; + let position = marker_store.resolve_position_spec( + file, + PositionSpec::Marker(marker_name), + context, + )?; + return serde_json::to_value(position).map_err(|error| { + format!("{context}: serialize position at {path}: {error}") + }); + } + if let Some(marker) = object.remove("rangeOf") { + let marker_name = + marker_name_from_value(marker, context, &format!("{path}.rangeOf"))?; + let range = marker_store.resolve_named_range(file, &marker_name, context)?; + return serde_json::to_value(range) + .map_err(|error| format!("{context}: serialize range at {path}: {error}")); + } + } + + let mut resolved = serde_json::Map::with_capacity(object.len()); + for (key, child) in object { + let child_path = format!("{path}.{key}"); + let resolved_child = resolve_marker_references_json( + child, + marker_store, + file, + context, + &child_path, + )?; + resolved.insert(key, resolved_child); + } + Ok(serde_json::Value::Object(resolved)) + } + serde_json::Value::Array(items) => items + .into_iter() + .enumerate() + .map(|(index, item)| { + let child_path = format!("{path}[{index}]"); + resolve_marker_references_json(item, marker_store, file, context, &child_path) + }) + .collect::, _>>() + .map(serde_json::Value::Array), + other => Ok(other), + } +} + +/// Validate and extract marker names from JSON directive values. +fn marker_name_from_value( + value: serde_json::Value, + context: &str, + path: &str, +) -> Result { + match value { + serde_json::Value::String(name) if !name.is_empty() => Ok(name), + serde_json::Value::String(_) => Err(format!("{context}: {path} cannot be empty")), + other => Err(format!( + "{context}: {path} must be a string marker name, got {other}" + )), + } +} + +/// Accept either direct LSP payload or marker-directive JSON for completion. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum CompletionResponseInput { + Full(CompletionResponse), + WithMarkers(serde_json::Value), +} + +impl CompletionResponseInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(response) => Ok(response), + Self::WithMarkers(value) => { + resolve_marker_json_input(value, marker_store, file, context) + } + } + } +} + +/// Accept either direct LSP payload or marker-directive JSON for document symbols. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum DocumentSymbolResponseInput { + Full(DocumentSymbolResponse), + WithMarkers(serde_json::Value), +} + +impl DocumentSymbolResponseInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(response) => Ok(response), + Self::WithMarkers(value) => { + resolve_marker_json_input(value, marker_store, file, context) + } + } + } +} + +/// Accept either direct LSP payload or marker-directive JSON for prepare-rename. +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(untagged)] +pub(super) enum PrepareRenameResponseInput { + Full(PrepareRenameResponse), + WithMarkers(serde_json::Value), +} + +impl PrepareRenameResponseInput { + fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self { + Self::Full(response) => Ok(response), + Self::WithMarkers(value) => { + resolve_marker_json_input(value, marker_store, file, context) + } + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectHoverScriptStep { + #[serde(default)] + pub(super) request: Option, + pub(super) result: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectSignatureHelpScriptStep { + #[serde(default)] + pub(super) request: Option, + pub(super) result: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectFormattingScriptStep { + #[serde(default)] + pub(super) request: Option, + pub(super) result: Option>, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectDocumentSymbolScriptStep { + #[serde(default)] + pub(super) request: Option, + pub(super) result: Option, +} + +impl ExpectDocumentSymbolScriptStep { + /// Resolve marker-aware expected result for `expectDocumentSymbol`. + pub(super) fn resolve_result( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result, String> { + self.result + .map(|result| result.resolve(marker_store, file, context)) + .transpose() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectExecuteCommandScriptStep { + #[serde(default)] + pub(super) request: Option, + pub(super) result: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectPrepareRenameScriptStep { + #[serde(default)] + pub(super) request: Option, + pub(super) result: Option, +} + +impl ExpectPrepareRenameScriptStep { + /// Resolve marker-aware expected result for `expectPrepareRename`. + pub(super) fn resolve_result( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result, String> { + self.result + .map(|result| result.resolve(marker_store, file, context)) + .transpose() + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct ExpectCompletionScriptStep { + #[serde(default)] + pub(super) request: Option, + pub(super) result: Option, + /// Optional label subset assertion, checked by the scenario runner. + #[serde(default)] + pub(super) labels: Option>, + /// When true, allow result items beyond `labels`. + #[serde(default)] + pub(super) allow_extra: bool, +} + +impl ExpectCompletionScriptStep { + /// Resolve marker-aware expected result for `expectCompletion`. + pub(super) fn resolve_result( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result, String> { + self.result + .map(|result| result.resolve(marker_store, file, context)) + .transpose() + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs new file mode 100644 index 00000000..5abd1ebf --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs @@ -0,0 +1,1034 @@ +//! Inline marker parser and resolver for scenario source text. +//! +//! Marker syntax supported in file text: +//! - `[[name:text]]` records a named range covering `text`. +//! - `((name:before|after))` records a named cursor position between +//! `before` and `after` (stored as a zero-width range). +//! +//! Parsing is a two-step process: +//! 1. Build a small Rowan syntax tree for nested marker constructs. +//! 2. Lower that tree to plain text (marker wrappers removed) and a marker map +//! with character offsets into the plain text. +//! +//! `MarkerStore` keeps one parsed document per file and updates marker offsets +//! across full and incremental text changes during scenario compilation. + +use std::collections::HashMap; + +use lsp_types::{Position, Range}; +use rowan::{GreenNodeBuilder, Language, NodeOrToken}; +use serde::Deserialize; + +/// Position selector used by scenario script fields like `at`. +/// +/// Current DSL only supports named marker references. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +pub(super) enum PositionSpec { + Marker(String), +} + +/// Range selector used by scenario script fields like `range`. +/// +/// Current DSL only supports named marker references. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +pub(super) enum RangeSpec { + Marker(String), +} + +/// Range input shape used in request and expectation shorthand fields. +/// +/// Accepted forms: +/// - `range: ` +/// - shorthand using `at: ` with either `text` or `len` +#[derive(Debug, Clone, PartialEq, Eq, Default, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct RangeInput { + #[serde(default)] + pub(super) range: Option, + #[serde(default)] + pub(super) at: Option, + #[serde(default)] + pub(super) text: Option, + #[serde(default)] + pub(super) len: Option, +} + +impl RangeInput { + /// Resolve a range without file context. + /// + /// This is only valid for non-marker data. Marker references require file + /// context and must be resolved through [`MarkerStore`]. + pub(super) fn resolve_range(self, context: &str) -> Result { + if let Some(range_spec) = self.range { + if self.at.is_some() || self.text.is_some() || self.len.is_some() { + return Err(format!( + "{context}: specify either `range` or shorthand (`at` + `text`/`len`), not both" + )); + } + return match range_spec { + RangeSpec::Marker(name) => Err(format!( + "{context}: marker range '{name}' requires file context" + )), + }; + } + + let Some(start_spec) = self.at else { + return Err(format!( + "{context}: missing range, provide `range` or shorthand (`at` + `text`/`len`)" + )); + }; + + match (self.text, self.len) { + (Some(_), Some(_)) => { + return Err(format!( + "{context}: shorthand cannot include both `text` and `len`" + )); + } + (None, None) => { + return Err(format!( + "{context}: shorthand requires one of `text` or `len`" + )); + } + _ => {} + } + + match start_spec { + PositionSpec::Marker(name) => Err(format!( + "{context}: marker position '{name}' requires file context" + )), + } + } +} + +/// Marker-backed position object field input (`positionOf`). +#[derive(Debug, Clone, PartialEq, Eq, Default, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct PositionFieldInput { + #[serde(default, rename = "positionOf")] + position_of: Option, +} + +impl PositionFieldInput { + /// Resolve `positionOf` against one file in the marker store. + pub(super) fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self.position_of { + Some(name) => { + marker_store.resolve_position_spec(file, PositionSpec::Marker(name), context) + } + None => Err(format!("{context}: missing position, provide `positionOf`")), + } + } +} + +/// Marker-backed range object field input (`rangeOf`). +#[derive(Debug, Clone, PartialEq, Eq, Default, Deserialize)] +#[serde(deny_unknown_fields)] +pub(super) struct RangeFieldInput { + #[serde(default, rename = "rangeOf")] + range_of: Option, +} + +impl RangeFieldInput { + /// Resolve `rangeOf` against one file in the marker store. + pub(super) fn resolve( + self, + marker_store: &MarkerStore, + file: &str, + context: &str, + ) -> Result { + match self.range_of { + Some(marker) => marker_store.resolve_named_range(file, &marker, context), + None => Err(format!("{context}: missing range, provide `rangeOf`")), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct MarkerRangeOffsets { + start: usize, + end: usize, +} + +impl MarkerRangeOffsets { + const fn new(start: usize, end: usize) -> Self { + Self { start, end } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct MarkerDocument { + text: String, + markers: HashMap, +} + +#[derive(Debug, Default)] +pub(super) struct MarkerStore { + documents: HashMap, +} + +impl MarkerStore { + /// Create an empty marker store. + pub(super) fn new() -> Self { + Self { + documents: HashMap::new(), + } + } + + /// Parse and register full file text with inline markers. + /// + /// Returns cleaned text (marker wrappers removed) that should be sent to the + /// LSP server for this file. + pub(super) fn register_full_text( + &mut self, + file: &str, + raw_text: String, + context: &str, + ) -> Result { + let (text, markers) = parse_marked_text(&raw_text, context)?; + self.documents.insert( + file.to_string(), + MarkerDocument { + text: text.clone(), + markers, + }, + ); + Ok(text) + } + + /// Apply an incremental edit and update tracked marker offsets. + /// + /// Existing markers before the replaced range are retained as-is. Markers + /// after the range are shifted by the edit delta. Markers overlapping the + /// replaced region are dropped. Markers in inserted text are parsed and + /// inserted at their shifted positions. + pub(super) fn register_incremental_text( + &mut self, + file: &str, + range: Range, + raw_text: String, + context: &str, + ) -> Result { + let (text, inserted_markers) = parse_marked_text(&raw_text, context)?; + let Some(document) = self.documents.get_mut(file) else { + return Ok(text); + }; + + let start_offset = position_to_offset(&document.text, range.start, context)?; + let end_offset = position_to_offset(&document.text, range.end, context)?; + if start_offset > end_offset { + return Err(format!( + "{context}: incremental range start is after end for file '{file}'" + )); + } + + let start_byte = char_offset_to_byte_offset(&document.text, start_offset) + .ok_or_else(|| format!("{context}: start offset out of bounds in '{file}'"))?; + let end_byte = char_offset_to_byte_offset(&document.text, end_offset) + .ok_or_else(|| format!("{context}: end offset out of bounds in '{file}'"))?; + + let mut next_text = String::with_capacity( + start_byte + text.len() + document.text.len().saturating_sub(end_byte), + ); + next_text.push_str(&document.text[..start_byte]); + next_text.push_str(&text); + next_text.push_str(&document.text[end_byte..]); + + let replaced_width = end_offset.saturating_sub(start_offset); + let inserted_width = text.chars().count(); + let delta = isize::try_from(inserted_width) + .and_then(|inserted| { + isize::try_from(replaced_width).map(|replaced| inserted - replaced) + }) + .map_err(|_| format!("{context}: incremental text width overflow for '{file}'"))?; + + let mut next_markers = + HashMap::with_capacity(document.markers.len() + inserted_markers.len()); + for (name, marker) in &document.markers { + if marker.end <= start_offset { + next_markers.insert(name.clone(), marker.clone()); + continue; + } + if marker.start >= end_offset { + let shifted_start = shift_offset(marker.start, delta, context)?; + let shifted_end = shift_offset(marker.end, delta, context)?; + next_markers.insert( + name.clone(), + MarkerRangeOffsets::new(shifted_start, shifted_end), + ); + } + } + + for (name, marker) in inserted_markers { + if next_markers.contains_key(&name) { + return Err(format!( + "{context}: duplicate marker name '{name}' in file '{file}'" + )); + } + let start = marker + .start + .checked_add(start_offset) + .ok_or_else(|| format!("{context}: marker '{name}' start overflow"))?; + let end = marker + .end + .checked_add(start_offset) + .ok_or_else(|| format!("{context}: marker '{name}' end overflow"))?; + next_markers.insert(name, MarkerRangeOffsets::new(start, end)); + } + + document.text = next_text; + document.markers = next_markers; + Ok(text) + } + + /// Resolve an optional `at` field into a concrete LSP position. + pub(super) fn resolve_position( + &self, + file: &str, + at: Option, + context: &str, + ) -> Result { + match at { + Some(spec) => self.resolve_position_spec(file, spec, context), + None => Err(format!("{context}: missing position, provide `at`")), + } + } + + /// Resolve a marker-aware range input for one file. + pub(super) fn resolve_range( + &self, + file: &str, + input: RangeInput, + context: &str, + ) -> Result { + if let Some(range_spec) = input.range { + if input.at.is_some() || input.text.is_some() || input.len.is_some() { + return Err(format!( + "{context}: specify either `range` or shorthand (`at` + `text`/`len`), not both" + )); + } + return match range_spec { + RangeSpec::Marker(name) => self.resolve_named_range(file, &name, context), + }; + } + + let Some(start_spec) = input.at else { + return Err(format!( + "{context}: missing range, provide `range` or shorthand (`at` + `text`/`len`)" + )); + }; + + match (input.text, input.len) { + (Some(text), None) => { + let width = u32::try_from(text.chars().count()).map_err(|_| { + format!("{context}: shorthand `text` length does not fit in u32") + })?; + let start = self.resolve_position_spec(file, start_spec, context)?; + let end_character = start + .character + .checked_add(width) + .ok_or_else(|| format!("{context}: range end overflow"))?; + Ok(Range { + start, + end: Position { + line: start.line, + character: end_character, + }, + }) + } + (None, Some(len)) => { + let start = self.resolve_position_spec(file, start_spec, context)?; + let end_character = start + .character + .checked_add(len) + .ok_or_else(|| format!("{context}: range end overflow"))?; + Ok(Range { + start, + end: Position { + line: start.line, + character: end_character, + }, + }) + } + (Some(_), Some(_)) => Err(format!( + "{context}: shorthand cannot include both `text` and `len`" + )), + (None, None) => match start_spec { + PositionSpec::Marker(name) => self.resolve_named_range(file, &name, context), + }, + } + } + + /// Resolve a named marker into an LSP range. + pub(super) fn resolve_named_range( + &self, + file: &str, + marker_name: &str, + context: &str, + ) -> Result { + let marker = self.lookup_marker(file, marker_name, context)?; + let text = self.full_text(file).ok_or_else(|| { + format!( + "{context}: no tracked text for file '{file}' while resolving marker '{marker_name}'" + ) + })?; + let start = offset_to_position(text, marker.start, context)?; + let end = offset_to_position(text, marker.end, context)?; + Ok(Range { start, end }) + } + + /// Resolve a position selector into an LSP position. + pub(super) fn resolve_position_spec( + &self, + file: &str, + spec: PositionSpec, + context: &str, + ) -> Result { + match spec { + PositionSpec::Marker(name) => { + let marker = self.lookup_marker(file, &name, context)?; + let text = self.full_text(file).ok_or_else(|| { + format!( + "{context}: no tracked text for file '{file}' while resolving marker '{name}'" + ) + })?; + offset_to_position(text, marker.start, context) + } + } + } + + /// Look up one marker range in one tracked file. + fn lookup_marker( + &self, + file: &str, + marker_name: &str, + context: &str, + ) -> Result<&MarkerRangeOffsets, String> { + let Some(document) = self.documents.get(file) else { + return Err(format!( + "{context}: file '{file}' has no parsed text/markers; define it in `create`, `open`, `writeFile`, or `changeFull` first" + )); + }; + + document.markers.get(marker_name).ok_or_else(|| { + let available = document + .markers + .keys() + .map(String::as_str) + .collect::>(); + format!( + "{context}: unknown marker '{marker_name}' in file '{file}', available markers: {:?}", + available + ) + }) + } + + /// Return current cleaned text for a file, if tracked. + pub(super) fn full_text(&self, file: &str) -> Option<&str> { + self.documents + .get(file) + .map(|document| document.text.as_str()) + } + + /// Drop all tracked text/marker state for a file. + pub(super) fn remove(&mut self, file: &str) { + self.documents.remove(file); + } +} + +/// Parse inline marker syntax and return `(clean_text, marker_offsets)`. +fn parse_marked_text( + input: &str, + context: &str, +) -> Result<(String, HashMap), String> { + let root = MarkerSyntaxParser::new(input, context).parse()?; + let mut segment = ParsedSegment::default(); + append_marker_node_contents(&mut segment, &root, context)?; + Ok((segment.text, segment.markers)) +} + +#[derive(Debug, Default)] +struct ParsedSegment { + text: String, + char_len: usize, + markers: HashMap, +} + +impl ParsedSegment { + /// Append plain text and advance tracked character length. + fn push_text(&mut self, text: &str, context: &str) -> Result<(), String> { + self.text.push_str(text); + self.char_len = self + .char_len + .checked_add(text.chars().count()) + .ok_or_else(|| format!("{context}: text length overflow"))?; + Ok(()) + } + + /// Insert a unique named marker range for this parsed segment. + fn insert_marker( + &mut self, + name: String, + start: usize, + end: usize, + context: &str, + ) -> Result<(), String> { + if self.markers.contains_key(&name) { + return Err(format!( + "{context}: duplicate marker name '{name}' in one text block" + )); + } + self.markers + .insert(name, MarkerRangeOffsets::new(start, end)); + Ok(()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[repr(u16)] +enum MarkerSyntaxKind { + Root, + RangeMarker, + CursorMarker, + RangeBody, + CursorBefore, + CursorAfter, + Text, + MarkerName, + OpenRange, + CloseRange, + OpenCursor, + CloseCursor, + Colon, + Pipe, + Error, +} + +impl MarkerSyntaxKind { + const fn into_raw(self) -> u16 { + self as u16 + } + + const fn from_raw(raw: u16) -> Self { + match raw { + 0 => Self::Root, + 1 => Self::RangeMarker, + 2 => Self::CursorMarker, + 3 => Self::RangeBody, + 4 => Self::CursorBefore, + 5 => Self::CursorAfter, + 6 => Self::Text, + 7 => Self::MarkerName, + 8 => Self::OpenRange, + 9 => Self::CloseRange, + 10 => Self::OpenCursor, + 11 => Self::CloseCursor, + 12 => Self::Colon, + 13 => Self::Pipe, + _ => Self::Error, + } + } +} + +impl From for rowan::SyntaxKind { + fn from(value: MarkerSyntaxKind) -> Self { + Self(value.into_raw()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +enum MarkerLanguage {} + +impl Language for MarkerLanguage { + type Kind = MarkerSyntaxKind; + + fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind { + MarkerSyntaxKind::from_raw(raw.0) + } + + fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind { + kind.into() + } +} + +type MarkerSyntaxNode = rowan::SyntaxNode; +type MarkerSyntaxToken = rowan::SyntaxToken; + +/// Append translated content of all children from one syntax node. +fn append_marker_node_contents( + target: &mut ParsedSegment, + node: &MarkerSyntaxNode, + context: &str, +) -> Result<(), String> { + for child in node.children_with_tokens() { + match child { + NodeOrToken::Node(child_node) => append_marker_node(target, &child_node, context)?, + NodeOrToken::Token(child_token) => append_marker_token(target, &child_token, context)?, + } + } + Ok(()) +} + +/// Translate one syntax node into output text + marker ranges. +fn append_marker_node( + target: &mut ParsedSegment, + node: &MarkerSyntaxNode, + context: &str, +) -> Result<(), String> { + match node.kind() { + MarkerSyntaxKind::Root + | MarkerSyntaxKind::RangeBody + | MarkerSyntaxKind::CursorBefore + | MarkerSyntaxKind::CursorAfter => append_marker_node_contents(target, node, context), + MarkerSyntaxKind::RangeMarker => append_range_marker(target, node, context), + MarkerSyntaxKind::CursorMarker => append_cursor_marker(target, node, context), + kind => Err(format!( + "{context}: unexpected marker syntax node {kind:?} while translating marker tree" + )), + } +} + +/// Translate a parsed range marker node and record its start/end character offsets. +fn append_range_marker( + target: &mut ParsedSegment, + node: &MarkerSyntaxNode, + context: &str, +) -> Result<(), String> { + let marker_name = marker_name_for_node(node, context)?; + let marker_start = target.char_len; + let mut body_seen = false; + + for child in node.children() { + if child.kind() == MarkerSyntaxKind::RangeBody { + body_seen = true; + append_marker_node_contents(target, &child, context)?; + } + } + + if !body_seen { + return Err(format!( + "{context}: malformed range marker '{marker_name}', missing body" + )); + } + + let marker_end = target.char_len; + target.insert_marker(marker_name, marker_start, marker_end, context) +} + +/// Translate a parsed cursor marker node and record a zero-width marker range. +fn append_cursor_marker( + target: &mut ParsedSegment, + node: &MarkerSyntaxNode, + context: &str, +) -> Result<(), String> { + let marker_name = marker_name_for_node(node, context)?; + let mut before = None::; + let mut after = None::; + + for child in node.children() { + match child.kind() { + MarkerSyntaxKind::CursorBefore => before = Some(child), + MarkerSyntaxKind::CursorAfter => after = Some(child), + _ => {} + } + } + + let before = before.ok_or_else(|| { + format!("{context}: malformed cursor marker '{marker_name}', missing before segment") + })?; + let after = after.ok_or_else(|| { + format!("{context}: malformed cursor marker '{marker_name}', missing after segment") + })?; + + append_marker_node_contents(target, &before, context)?; + let cursor = target.char_len; + append_marker_node_contents(target, &after, context)?; + target.insert_marker(marker_name, cursor, cursor, context) +} + +/// Translate one token from marker syntax tree into plain text stream. +fn append_marker_token( + target: &mut ParsedSegment, + token: &MarkerSyntaxToken, + context: &str, +) -> Result<(), String> { + match token.kind() { + MarkerSyntaxKind::Text => target.push_text(token.text(), context), + MarkerSyntaxKind::MarkerName + | MarkerSyntaxKind::OpenRange + | MarkerSyntaxKind::CloseRange + | MarkerSyntaxKind::OpenCursor + | MarkerSyntaxKind::CloseCursor + | MarkerSyntaxKind::Colon + | MarkerSyntaxKind::Pipe => Ok(()), + kind => Err(format!( + "{context}: unexpected marker syntax token {kind:?} while translating marker tree" + )), + } +} + +/// Extract marker name token from a marker node. +fn marker_name_for_node(node: &MarkerSyntaxNode, context: &str) -> Result { + node.children_with_tokens() + .find_map(|element| match element { + NodeOrToken::Node(_) => None, + NodeOrToken::Token(token) => { + (token.kind() == MarkerSyntaxKind::MarkerName).then_some(token) + } + }) + .map(|token| token.text().to_string()) + .ok_or_else(|| format!("{context}: malformed marker node, missing marker name")) +} + +/// Recursive-descent parser that builds a Rowan tree for marker syntax. +struct MarkerSyntaxParser<'a> { + input: &'a str, + index: usize, + context: &'a str, + builder: GreenNodeBuilder<'static>, +} + +impl<'a> MarkerSyntaxParser<'a> { + /// Create a parser for one raw text block. + fn new(input: &'a str, context: &'a str) -> Self { + Self { + input, + index: 0, + context, + builder: GreenNodeBuilder::new(), + } + } + + /// Parse full input into a syntax tree rooted at `Root`. + fn parse(mut self) -> Result { + self.start_node(MarkerSyntaxKind::Root); + while !self.is_eof() { + self.parse_item()?; + } + self.finish_node(); + Ok(MarkerSyntaxNode::new_root(self.builder.finish())) + } + + /// Parse one top-level item, preferring marker constructs over raw text. + fn parse_item(&mut self) -> Result<(), String> { + if self.try_parse_range_marker()? { + return Ok(()); + } + if self.try_parse_cursor_marker()? { + return Ok(()); + } + + self.parse_text_token() + } + + /// Attempt to parse `[[name:...]]`. + /// + /// Returns `Ok(false)` if current cursor is not at a valid range marker. + fn try_parse_range_marker(&mut self) -> Result { + if !self.starts_with("[[") { + return Ok(false); + } + + let start = self.index; + let Some((name_width, marker_name)) = self.peek_marker_name_and_colon( + start + .checked_add(2) + .ok_or_else(|| format!("{}: marker parser index overflow", self.context))?, + ) else { + return Ok(false); + }; + let marker_name = parse_marker_name(marker_name, self.context)?; + + self.start_node(MarkerSyntaxKind::RangeMarker); + self.emit_fixed_token(MarkerSyntaxKind::OpenRange, "[[")?; + self.emit_text_token(MarkerSyntaxKind::MarkerName, marker_name); + self.advance_bytes(name_width)?; + self.emit_fixed_token(MarkerSyntaxKind::Colon, ":")?; + self.start_node(MarkerSyntaxKind::RangeBody); + while !self.is_eof() { + if self.starts_with("]]") { + self.finish_node(); + self.emit_fixed_token(MarkerSyntaxKind::CloseRange, "]]")?; + self.finish_node(); + return Ok(true); + } + self.parse_item()?; + } + + Err(format!( + "{}: unterminated range marker starting at byte {start}", + self.context + )) + } + + /// Attempt to parse `((name:before|after))`. + /// + /// Cursor markers must contain exactly one top-level `|`. + /// Returns `Ok(false)` if current cursor is not at a valid cursor marker. + fn try_parse_cursor_marker(&mut self) -> Result { + if !self.starts_with("((") { + return Ok(false); + } + + let start = self.index; + let Some((name_width, marker_name)) = self.peek_marker_name_and_colon( + start + .checked_add(2) + .ok_or_else(|| format!("{}: marker parser index overflow", self.context))?, + ) else { + return Ok(false); + }; + let marker_name = parse_marker_name(marker_name, self.context)?; + + self.start_node(MarkerSyntaxKind::CursorMarker); + self.emit_fixed_token(MarkerSyntaxKind::OpenCursor, "((")?; + self.emit_text_token(MarkerSyntaxKind::MarkerName, marker_name); + self.advance_bytes(name_width)?; + self.emit_fixed_token(MarkerSyntaxKind::Colon, ":")?; + + self.start_node(MarkerSyntaxKind::CursorBefore); + while !self.is_eof() { + if self.starts_with("))") { + return Err(format!( + "{}: cursor marker '{}' must include exactly one top-level `|`", + self.context, marker_name + )); + } + if self.peek_char() == Some('|') { + self.finish_node(); + self.emit_fixed_token(MarkerSyntaxKind::Pipe, "|")?; + self.start_node(MarkerSyntaxKind::CursorAfter); + break; + } + self.parse_item()?; + } + + if self.is_eof() { + return Err(format!( + "{}: unterminated cursor marker starting at byte {start}", + self.context + )); + } + + while !self.is_eof() { + if self.starts_with("))") { + self.finish_node(); + self.emit_fixed_token(MarkerSyntaxKind::CloseCursor, "))")?; + self.finish_node(); + return Ok(true); + } + if self.peek_char() == Some('|') { + return Err(format!( + "{}: cursor marker '{}' must include exactly one top-level `|`", + self.context, marker_name + )); + } + self.parse_item()?; + } + + Err(format!( + "{}: unterminated cursor marker starting at byte {start}", + self.context + )) + } + + /// Emit one non-marker UTF-8 scalar as plain text. + fn parse_text_token(&mut self) -> Result<(), String> { + let ch = self + .peek_char() + .ok_or_else(|| format!("{}: marker parser unexpectedly reached EOF", self.context))?; + let width = ch.len_utf8(); + let end = self + .index + .checked_add(width) + .ok_or_else(|| format!("{}: marker parser index overflow", self.context))?; + let text = &self.input[self.index..end]; + self.emit_text_token(MarkerSyntaxKind::Text, text); + self.index = end; + Ok(()) + } + + /// Look ahead for `:` from `start`. + /// + /// Returns consumed byte width and marker name slice if valid. + fn peek_marker_name_and_colon(&self, start: usize) -> Option<(usize, &'a str)> { + let tail = self.input.get(start..)?; + let mut chars = tail.char_indices(); + let (_, first) = chars.next()?; + if !is_marker_name_start(first) { + return None; + } + + for (offset, ch) in chars { + if is_marker_name_continue(ch) { + continue; + } + if ch == ':' { + let name = &tail[..offset]; + return Some((offset, name)); + } + return None; + } + + None + } + + fn start_node(&mut self, kind: MarkerSyntaxKind) { + self.builder.start_node(kind.into()); + } + + fn finish_node(&mut self) { + self.builder.finish_node(); + } + + fn emit_text_token(&mut self, kind: MarkerSyntaxKind, text: &str) { + self.builder.token(kind.into(), text); + } + + fn emit_fixed_token(&mut self, kind: MarkerSyntaxKind, text: &str) -> Result<(), String> { + self.emit_text_token(kind, text); + self.advance_bytes(text.len()) + } + + fn starts_with(&self, token: &str) -> bool { + self.input[self.index..].starts_with(token) + } + + fn peek_char(&self) -> Option { + self.input[self.index..].chars().next() + } + + fn advance_bytes(&mut self, bytes: usize) -> Result<(), String> { + self.index = self + .index + .checked_add(bytes) + .ok_or_else(|| format!("{}: marker parser index overflow", self.context))?; + Ok(()) + } + + fn is_eof(&self) -> bool { + self.index >= self.input.len() + } +} + +const fn is_marker_name_start(ch: char) -> bool { + ch.is_ascii_alphabetic() || ch == '_' +} + +const fn is_marker_name_continue(ch: char) -> bool { + ch.is_ascii_alphanumeric() || ch == '_' || ch == '-' +} + +/// Validate marker name lexical constraints. +fn parse_marker_name<'a>(name: &'a str, context: &str) -> Result<&'a str, String> { + if name.is_empty() { + return Err(format!("{context}: marker name cannot be empty")); + } + Ok(name) +} + +/// Convert a character offset into an LSP `(line, character)` position. +fn offset_to_position(text: &str, offset: usize, context: &str) -> Result { + let mut line = 0u32; + let mut character = 0u32; + let mut consumed = 0usize; + + for ch in text.chars() { + if consumed == offset { + return Ok(Position { line, character }); + } + consumed = consumed + .checked_add(1) + .ok_or_else(|| format!("{context}: offset overflow"))?; + if ch == '\n' { + line = line + .checked_add(1) + .ok_or_else(|| format!("{context}: line overflow"))?; + character = 0; + } else { + character = character + .checked_add(1) + .ok_or_else(|| format!("{context}: character overflow"))?; + } + } + + if consumed == offset { + return Ok(Position { line, character }); + } + + Err(format!( + "{context}: marker offset {offset} is out of bounds (text has {consumed} chars)" + )) +} + +/// Convert an LSP `(line, character)` position into a character offset. +fn position_to_offset(text: &str, position: Position, context: &str) -> Result { + let mut line = 0u32; + let mut character = 0u32; + let mut offset = 0usize; + + for ch in text.chars() { + if line == position.line && character == position.character { + return Ok(offset); + } + offset = offset + .checked_add(1) + .ok_or_else(|| format!("{context}: offset overflow"))?; + if ch == '\n' { + line = line + .checked_add(1) + .ok_or_else(|| format!("{context}: line overflow"))?; + character = 0; + } else { + character = character + .checked_add(1) + .ok_or_else(|| format!("{context}: character overflow"))?; + } + } + + if line == position.line && character == position.character { + return Ok(offset); + } + + Err(format!( + "{context}: position ({}, {}) is out of bounds for current text", + position.line, position.character + )) +} + +/// Convert a character offset to byte offset in UTF-8 text. +fn char_offset_to_byte_offset(text: &str, target: usize) -> Option { + let mut offset = 0usize; + for (byte_index, _) in text.char_indices() { + if offset == target { + return Some(byte_index); + } + offset = offset.checked_add(1)?; + } + if offset == target { + return Some(text.len()); + } + None +} + +/// Shift an offset by signed delta with overflow/underflow checks. +fn shift_offset(value: usize, delta: isize, context: &str) -> Result { + if delta.is_negative() { + let amount = delta.unsigned_abs(); + value + .checked_sub(amount) + .ok_or_else(|| format!("{context}: marker offset underflow while shifting")) + } else { + let amount = + usize::try_from(delta).map_err(|_| format!("{context}: marker shift overflow"))?; + value + .checked_add(amount) + .ok_or_else(|| format!("{context}: marker offset overflow while shifting")) + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/mod.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/mod.rs new file mode 100644 index 00000000..0c2b52ed --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/mod.rs @@ -0,0 +1,20 @@ +//! YAML scenario script parsing and compilation pipeline. +//! +//! This module is split into focused internal stages: +//! - [`parse`]: YAML deserialization boundary and top-level error type. +//! - [`compile`]: converts script steps into executable [`crate::scenario::ScenarioStep`] values. +//! - [`markers`]: inline marker parsing (`[[name:text]]`, `((name:before|after))`) and resolution. +//! - [`inputs`]: typed shorthand decoders for LSP payloads used by expect steps. +//! - [`registry`]: request alias/id tracking so expects match the intended requests. +//! - [`paths`]: base-dir-relative path and file URI helpers. +//! +//! External callers should use [`parse_scenario_yaml`]. + +mod compile; +mod inputs; +mod markers; +mod parse; +mod paths; +mod registry; + +pub use parse::{parse_scenario_yaml, ParseScenarioError}; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs new file mode 100644 index 00000000..bb65fe03 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs @@ -0,0 +1,152 @@ +//! Public entry point for parsing scenario YAML. +//! +//! `parse_scenario_yaml` does two explicit stages: +//! 1. Deserialize YAML into an internal script model (`ScenarioScript`). +//! 2. Compile that model into executable, strongly typed [`crate::scenario::Scenario`] data. +//! +//! The DSL is tuned for integration/e2e readability: +//! - file paths are relative to a caller-provided base directory +//! - `create.files` lets scenarios define their own workspace contents +//! - requests use optional aliases (`as`) instead of numeric IDs +//! - markers allow readable position/range references: +//! - `[[name:text]]` for named ranges +//! - `((name:before|after))` for named cursor positions +//! - expected payloads can reference markers via `positionOf` / `rangeOf` +//! +//! YAML parse errors and compile errors are separated in [`ParseScenarioError`]. + +use std::path::Path; + +use thiserror::Error; + +use super::compile::{CompileScenarioError, ScenarioScript}; + +/// Errors returned by [`parse_scenario_yaml`]. +#[derive(Debug, Error)] +pub enum ParseScenarioError { + /// The input was not valid scenario YAML. + #[error("parse scenario yaml: {source}")] + ParseYaml { + #[source] + source: serde_yaml_with_quirks::Error, + }, + /// YAML parsed, but failed semantic compilation to executable steps. + #[error(transparent)] + Compile(#[from] CompileScenarioError), +} + +/// Parse YAML scenario script and compile it to an executable scenario. +/// +/// `base_dir` is used to resolve relative `file`/`path` entries into absolute +/// on-disk paths and `file://` URIs inside the compiled scenario model. +pub fn parse_scenario_yaml( + input: &str, + base_dir: &Path, +) -> Result { + let parsed: ScenarioScript = serde_yaml_with_quirks::from_str(input) + .map_err(|source| ParseScenarioError::ParseYaml { source })?; + parsed.compile(base_dir).map_err(Into::into) +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use assert_matches::assert_matches; + use jrsonnet_lsp_handlers::SemanticTokenTypeName; + + use super::{parse_scenario_yaml, ParseScenarioError}; + use crate::{ + scenario::{ + ExpectSemanticTokensFullStep, OpenStep, RequestSemanticTokensFullStep, Scenario, + ScenarioStep, WriteFileStep, + }, + semantic_tokens::{encode_semantic_tokens, semantic_modifiers, ExpectedSemanticToken}, + }; + + fn main_file(tmp: &tempfile::TempDir) -> PathBuf { + tmp.path().join("main.jsonnet") + } + + fn main_uri(tmp: &tempfile::TempDir) -> String { + format!("file://{}", main_file(tmp).display()) + } + + #[test] + fn parse_compiles_tokens_by_marker_to_semantic_tokens() { + let tmp = tempfile::tempdir().expect("tempdir"); + let script = r" +steps: +- step: create + files: + main.jsonnet: |- + local [[nameTok:name]] = 1; + name + open: [main.jsonnet] +- step: requestSemanticTokensFull + as: tokens + file: main.jsonnet +- step: expectSemanticTokensFull + request: tokens + result: + tokensByMarker: + - marker: nameTok + type: variable +"; + let actual = parse_scenario_yaml(script, tmp.path()).expect("parse should succeed"); + let uri = main_uri(&tmp); + let expected = Scenario::new(vec![ + ScenarioStep::WriteFile(WriteFileStep { + path: main_file(&tmp).display().to_string(), + text: "local name = 1;\nname".to_string(), + }), + ScenarioStep::Open(OpenStep { + uri: uri.clone(), + text: "local name = 1;\nname".to_string(), + language_id: "jsonnet".to_string(), + version: 1, + }), + ScenarioStep::RequestSemanticTokensFull(RequestSemanticTokensFullStep { id: 1, uri }), + ScenarioStep::ExpectSemanticTokensFull(ExpectSemanticTokensFullStep { + id: 1, + result: Some(lsp_types::SemanticTokensResult::Tokens( + encode_semantic_tokens(vec![ExpectedSemanticToken::new( + 0, + 6, + 4, + SemanticTokenTypeName::Variable, + semantic_modifiers(&[]), + )]), + )), + }), + ]); + assert_eq!(actual, expected); + } + + #[test] + fn parse_rejects_absolute_semantic_token_yaml_input() { + let tmp = tempfile::tempdir().expect("tempdir"); + let script = r" +steps: +- step: create + files: + main.jsonnet: |- + local [[nameTok:name]] = 1; + name + open: [main.jsonnet] +- step: requestSemanticTokensFull + as: tokens + file: main.jsonnet +- step: expectSemanticTokensFull + request: tokens + result: + tokens: + - line: 0 + start: 6 + len: 4 + type: variable +"; + let error = parse_scenario_yaml(script, tmp.path()).expect_err("parse should fail"); + assert_matches!(error, ParseScenarioError::ParseYaml { .. }); + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/paths.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/paths.rs new file mode 100644 index 00000000..92c4c12d --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/paths.rs @@ -0,0 +1,28 @@ +//! Path helpers for scenario compilation. +//! +//! Scenario YAML always expresses file locations relative to a test-specific +//! base directory. These helpers centralize conversion to absolute filesystem +//! paths and `file://` URI strings. + +use std::path::{Path, PathBuf}; + +/// Resolve a scenario-relative path to an absolute OS path string. +pub(super) fn file_path(base_dir: &Path, relative_path: &str) -> String { + resolve_path(base_dir, relative_path) + .to_string_lossy() + .into_owned() +} + +/// Resolve a scenario-relative path to an absolute `file://` URI string. +/// +/// This intentionally performs no canonicalization or URL escaping; callers use +/// this for temporary test workspaces with already-controlled relative paths. +pub(super) fn file_uri(base_dir: &Path, relative_path: &str) -> String { + let absolute_path = resolve_path(base_dir, relative_path); + format!("file://{}", absolute_path.to_string_lossy()) +} + +/// Join `base_dir` with a relative scenario path. +fn resolve_path(base_dir: &Path, relative_path: &str) -> PathBuf { + base_dir.join(relative_path) +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs new file mode 100644 index 00000000..6704c5ea --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs @@ -0,0 +1,150 @@ +//! Request alias and ID bookkeeping for scenario compilation. +//! +//! Scenario scripts can name requests with `as: some_alias` and later refer to +//! them from `expect*` steps using `request: some_alias`. This registry maps +//! aliases to generated request IDs and also maintains per-kind FIFO queues for +//! unnamed request/expect pairs. + +use std::collections::{HashMap, VecDeque}; + +/// LSP request kinds supported by the scenario DSL. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub(super) enum RequestKind { + CodeAction, + References, + Definition, + Declaration, + TypeDefinition, + PrepareRename, + Rename, + Hover, + SignatureHelp, + Completion, + Formatting, + SemanticTokensFull, + SemanticTokensRange, + InlayHints, + DocumentSymbol, + WorkspaceSymbol, + CodeLens, + ExecuteCommand, +} + +impl RequestKind { + /// Canonical DSL step label for diagnostics and error messages. + const fn label(self) -> &'static str { + match self { + Self::CodeAction => "requestCodeAction", + Self::References => "requestReferences", + Self::Definition => "requestDefinition", + Self::Declaration => "requestDeclaration", + Self::TypeDefinition => "requestTypeDefinition", + Self::PrepareRename => "requestPrepareRename", + Self::Rename => "requestRename", + Self::Hover => "requestHover", + Self::SignatureHelp => "requestSignatureHelp", + Self::Completion => "requestCompletion", + Self::Formatting => "requestFormatting", + Self::SemanticTokensFull => "requestSemanticTokensFull", + Self::SemanticTokensRange => "requestSemanticTokensRange", + Self::InlayHints => "requestInlayHints", + Self::DocumentSymbol => "requestDocumentSymbol", + Self::WorkspaceSymbol => "requestWorkspaceSymbol", + Self::CodeLens => "requestCodeLens", + Self::ExecuteCommand => "requestExecuteCommand", + } + } +} + +/// Tracks pending request IDs and named aliases while compiling one scenario. +#[derive(Debug)] +pub(super) struct RequestRegistry { + next_id: i32, + named: HashMap, + pending: HashMap>, +} + +impl RequestRegistry { + /// Create an empty request registry with IDs starting at `1`. + pub(super) fn new() -> Self { + Self { + next_id: 1, + named: HashMap::new(), + pending: HashMap::new(), + } + } + + /// Allocate a new request ID and optionally bind a unique alias. + /// + /// The new ID is always queued under `kind` so unnamed expects can claim in + /// issue order. + pub(super) fn allocate( + &mut self, + kind: RequestKind, + name: Option, + ) -> Result { + let id = self.next_id; + self.next_id = self + .next_id + .checked_add(1) + .ok_or_else(|| "request id overflow".to_string())?; + + if let Some(name) = name { + if self.named.contains_key(&name) { + return Err(format!( + "duplicate request alias '{name}', request aliases must be unique" + )); + } + self.named.insert(name, (kind, id)); + } + + self.pending.entry(kind).or_default().push_back(id); + Ok(id) + } + + /// Claim the next pending request ID for `kind`. + /// + /// If `name` is provided, the claim is by alias and kind-checked. + /// Otherwise this pops from the per-kind FIFO queue. + pub(super) fn claim(&mut self, kind: RequestKind, name: Option<&str>) -> Result { + if let Some(name) = name { + let (named_kind, id) = self.named.get(name).copied().ok_or_else(|| { + format!( + "unknown request alias '{name}' for {}, define it with `as`", + kind.label() + ) + })?; + if named_kind != kind { + return Err(format!( + "request alias '{name}' has kind {}, cannot match {}", + named_kind.label(), + kind.label() + )); + } + let queue = self.pending.entry(kind).or_default(); + let Some(index) = queue.iter().position(|candidate| *candidate == id) else { + return Err(format!( + "request alias '{name}' for {} was already matched", + kind.label() + )); + }; + let Some(claimed) = queue.remove(index) else { + return Err(format!( + "failed to claim queued request alias '{name}' for {}", + kind.label() + )); + }; + return Ok(claimed); + } + + self.pending + .get_mut(&kind) + .and_then(VecDeque::pop_front) + .ok_or_else(|| { + format!( + "no pending {} to match; add the request first or reference it via `request`", + kind.label() + ) + }) + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs b/crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs new file mode 100644 index 00000000..6efe9172 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs @@ -0,0 +1,80 @@ +use jrsonnet_lsp_handlers::{SemanticTokenModifierName, SemanticTokenTypeName}; +use lsp_types::{SemanticToken, SemanticTokens}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct ExpectedSemanticToken { + pub line: u32, + pub start: u32, + pub len: u32, + pub token_type: SemanticTokenTypeName, + pub modifiers: u32, +} + +impl ExpectedSemanticToken { + #[must_use] + pub const fn new( + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: u32, + ) -> Self { + Self { + line, + start, + len, + token_type, + modifiers, + } + } +} + +#[must_use] +pub fn semantic_token( + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: &[SemanticTokenModifierName], +) -> ExpectedSemanticToken { + ExpectedSemanticToken::new(line, start, len, token_type, semantic_modifiers(modifiers)) +} + +#[must_use] +pub fn semantic_modifiers(modifiers: &[SemanticTokenModifierName]) -> u32 { + modifiers + .iter() + .fold(0_u32, |acc, modifier| acc | modifier.as_bitset()) +} + +#[must_use] +pub fn encode_semantic_tokens(mut tokens: Vec) -> SemanticTokens { + tokens.sort_by_key(|token| (token.line, token.start)); + + let mut encoded = Vec::with_capacity(tokens.len()); + let mut prev_line = 0_u32; + let mut prev_start = 0_u32; + + for token in tokens { + let delta_line = token.line.saturating_sub(prev_line); + let delta_start = if delta_line == 0 { + token.start.saturating_sub(prev_start) + } else { + token.start + }; + encoded.push(SemanticToken { + delta_line, + delta_start, + length: token.len, + token_type: token.token_type.as_index(), + token_modifiers_bitset: token.modifiers, + }); + prev_line = token.line; + prev_start = token.start; + } + + SemanticTokens { + result_id: None, + data: encoded, + } +} diff --git a/crates/jrsonnet-lsp/Cargo.toml b/crates/jrsonnet-lsp/Cargo.toml index 8680b64e..b1171384 100644 --- a/crates/jrsonnet-lsp/Cargo.toml +++ b/crates/jrsonnet-lsp/Cargo.toml @@ -34,6 +34,7 @@ workspace = true [dev-dependencies] assert_matches = "1.5.0" criterion = { version = "0.5", features = ["html_reports"] } +jrsonnet-lsp-scenario = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scenario" } jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } rstest = "0.23" serde_json.workspace = true diff --git a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs b/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs deleted file mode 100644 index c61c5107..00000000 --- a/crates/jrsonnet-lsp/tests/e2e_annotated_tests.rs +++ /dev/null @@ -1,527 +0,0 @@ -//! End-to-end tests using the annotation-based testing framework. -//! -//! These tests use embedded annotations to verify LSP behavior at specific positions. -//! -//! The column alignment is critical: the `^` marker's position after `##` indicates -//! the column on the previous code line being annotated (0-indexed). - -mod framework; - -use framework::{check, check_definition, check_hover}; -use rstest::rstest; - -// ============================================================================ -// Definition and Reference Tests -// ============================================================================ - -#[test] -fn test_local_variable_definition() { - // 'x' is at column 6: l(0)o(1)c(2)a(3)l(4)space(5)x(6) - // Second line 'x' is at column 0 - check_definition( - r"local x = 1; -## ^ def: x -x + 1 -##^ use: x", - ); -} - -#[test] -fn test_multiple_usages() { - // 'x' def at column 6 - // First 'x' use at column 0 - // Second 'x' use at column 4: x(0)space(1)+(2)space(3)x(4) - check_definition( - r"local x = 1; -## ^ def: x -x + x -##^ use: x -## ^ use: x", - ); -} - -#[test] -fn test_function_parameters() { - // 'a' at column 8: local(5)space(5)f(6)((7)a(8) - // 'b' at column 11: ,(9)space(10)b(11) - check_definition( - r"local f(a, b) = a + b; -## ^ def: a -## ^ def: b", - ); -} - -#[test] -fn test_function_param_usage() { - // 'x' def at column 10: local(5)space(5)add(3)((9)x(10) - // 'y' def at column 13 - // 'x' use at column 2 (after 2-space indent) - // 'y' use at column 6 - check_definition( - r"local add(x, y) = -## ^ def: x -## ^ def: y - x + y; -## ^ use: x -## ^ use: y -add(1, 2)", - ); -} - -#[test] -fn test_nested_local() { - // 'outer' at column 6 - // 'inner' at column 8 (after 2-space indent) - // 'inner' use at column 2 - check_definition( - r"local outer = -## ^ def: outer - local inner = 1; -## ^ def: inner - inner + 1; -## ^ use: inner -outer", - ); -} - -#[test] -fn test_shadowing_different_scopes() { - // First 'x' at column 6 - // Second 'x' (param) at column 8 - check_definition( - r"local x = 1; -## ^ def: x -local f(x) = -## ^ def: x - x; -x", - ); -} - -#[test] -fn test_object_local() { - // 'helper' def at column 8 (after 2-space indent) - // 'helper' use at column 9: 2 spaces + "value: " = 9 - check_definition( - r"{ - local helper = 42, -## ^ def: helper - value: helper, -## ^ use: helper -}", - ); -} - -#[test] -fn test_for_comprehension_binding() { - // First 'x' at column 1 - // 'x' def (after 'for') at column 7 - check_definition( - r"[x for x in [1,2,3]] -## ^ use: x -## ^ def: x", - ); -} - -// ============================================================================ -// Hover Tests -// ============================================================================ - -#[test] -fn test_hover_number_literal() { - // '42' starts at column 10 - check_hover( - r"local x = 42; -## ^ type-exact: number", - ); -} - -#[test] -fn test_hover_string_literal() { - // '"hello"' starts at column 10 - check_hover( - r#"local s = "hello"; -## ^ type-exact: string"#, - ); -} - -#[test] -fn test_hover_boolean() { - // 'true' starts at column 10, LSP infers literal type 'true' - check_hover( - r"local b = true; -## ^ type-exact: true", - ); -} - -#[test] -fn test_hover_null() { - // 'null' starts at column 10 - check_hover( - r"local n = null; -## ^ type-exact: null", - ); -} - -#[test] -fn test_hover_array() { - // Hover on the variable name 'arr' at definition site shows the inferred type. - check_hover( - r"local arr = [1, 2, 3]; -## ^ type-exact: [number, number, number]", - ); -} - -#[test] -fn test_hover_object() { - // Hover on the variable name 'obj' at definition site shows the inferred type. - check_hover( - r"local obj = { a: 1 }; -## ^ type-exact: { a }", - ); -} - -#[test] -fn test_hover_function() { - // Hover on the function name 'f' at column 6 - // Note: Currently infers as 'any' - could be improved to show function type - check_hover( - r"local f(x) = x; -## ^ type-exact: any", - ); -} - -#[test] -fn test_hover_std_function() { - // 'std' at column 0 - std is an object containing stdlib functions - check_hover( - r"std.length -##^ type-exact: object", - ); -} - -#[test] -fn test_hover_exact_number_markdown() { - check_hover( - r"local x = 42; -## ^ hover-exact: `number`", - ); -} - -// ============================================================================ -// Flow Typing Tests -// ============================================================================ - -fn check_flow_case(source: &str) { - check(source); -} - -macro_rules! flow_case_table { - ($name:ident { $( $case:ident => $source:expr, )+ }) => { - #[rstest] - $(#[case::$case($source)])+ - fn $name(#[case] source: &str) { - check_flow_case(source); - } - }; -} - -flow_case_table!(test_flow_guards_and_literals { -union_guard_refines_both_branches => r#"local f(x) = - assert std.isNumber(x) || std.isString(x); - if std.isNumber(x) then - x + 1 -## ^ type: number - else - std.length(x); -## ^ type-exact: string - -f(3) + f("hi")"#, -negated_guard_refines_branches => r#"local f(x) = - assert std.isNumber(x) || std.isString(x); - if !std.isNumber(x) then - std.length(x) -## ^ type-exact: string - else - x + 1 -## ^ type: number - -f(3) + f("hi")"#, -null_guard_refines_non_null_branch => r"local f(x) = - assert x == null || std.isString(x); - if x != null then - std.length(x) -## ^ type-exact: string - else - 0; - -f(null)", -partial_numeric_predicates_refine_arithmetic_paths => r"local f(x) = - if std.isDecimal(x) then - x + 0.5 -## ^ type-exact: number - else if std.isInteger(x) then - x + 1 -## ^ type-exact: number - else - null; - -local n = f(5); -if n == null then 5 else n + 2 -## ^ type-exact: number", -literal_string_equality_after_string_assert => r#"local f(x) = - assert std.isString(x); - if x == "hi" then - "hey" - else if x == "bye" then - "see ya" - else - x -## ^ type-exact: string - -std.length(f("hello"))"#, -literal_string_equality_partial_without_assert => r#"local f(x) = - if x == "hi" then - std.length(x) -## ^ type-exact: "hi" - else if x == "bye" then - std.length(x) -## ^ type-exact: "bye" - else - x -## ^ type-exact: any - -std.length(f("hello"))"#, -null_and_length_guard => r#"local f(x) = - assert x == null || std.isString(x); - if x != null && std.length(x) >= 10 then - x -## ^ type-exact: string - else - "Hi"; - -f(null)"#, -null_branch_split => r"local f(x) = - assert x == null || std.isNumber(x); - if x != null then - x -## ^ type-exact: number - else - assert x == null; - x -## ^ type-exact: null - -[f(null), f(3)]", -}); - -flow_case_table!(test_flow_length_and_arity { - length_eq_refines_array_to_tuple => r"local f(xs) = - assert std.isArray(xs) && std.length(xs) == 3; - xs -##^ type: [any, any, any] - -f([1, 2, 3])", - length_known_object_refines_impossible_branches => r#"local x = { a: 1, b: "hi" }; - -if std.length(x) == 1 then - x -## ^ type-exact: never -else if std.length(x) == 3 then - x -## ^ type-exact: never -else if std.length(x) == 2 then - x.a -## ^ type-exact: number"#, - length_known_function_refines_impossible_branches => r"local f(x, y) = y + 1; - -if std.length(f) == 1 then - f -## ^ type-exact: never -else if std.length(f) == 3 then - f -## ^ type-exact: never -else if std.length(f) == 2 then - f -## ^ type: function(", - length_unknown_function_refines_arity => r"local f(x) = - if std.isFunction(x) then - if std.length(x) == 2 then - x -## ^ type-exact: function(arg0, arg1) - else - x -## ^ type-exact: function() - else - null; - -f(function(a, b) a + b)", - function_length_assert_narrows_arity => r"local wrap(f) = - assert std.isFunction(f); - assert std.length(f) == 2; - f -## ^ type-exact: function(arg0, arg1) - -wrap(function(a, b) a + b)", - length_unknown_object_composition_refines_by_shape => r#"local f(x) = - if std.isObject(x) then - if "a" in x && std.isString(x.a) then - if "b" in x && std.isNumber(x.b) then - if std.length(x) == 2 then - std.length(x.a) + x.b -## ^^^^^^^^^^^^^^^^^^^^^^^^ type-exact: number - else if std.length(x) == 1 then - x -## ^ type-exact: never - else if std.length(x) == 3 then - x.b -## ^ type-exact: number - else - x.b -## ^ type-exact: number - else - 0 - else - 0 - else - 0; - -f({ a: "hello", b: 4 })"#, - length_unknown_function_allows_typed_call_site => r"local f(x) = - if std.isFunction(x) then - if std.length(x) == 2 then - x(3, 5) -## ^ type: function(arg0, arg1) - else - x -## ^ type-exact: function() - else - x; - -f(function(a, b) a + b)", -}); - -flow_case_table!(test_flow_objects_and_membership { - object_field_presence_refines_object_shape => r#"local f(obj) = - assert std.isObject(obj); - if "foo" in obj then - obj -## ^^^ type: foo - else - 0; - -f({ foo: 1 })"#, - object_membership_known_union => r#"local f(b) = - local obj = if b then { foo: 3 } else {}; - if "foo" in obj then - obj -## ^^^ type: foo - else - 4; - -[f(true), f(false)]"#, - object_field_type_guards_refine_nested_paths => r#"local f(obj) = - assert std.isObject(obj); - if "a" in obj then - if std.isNumber(obj.a) then - obj.a + 7 -## ^^^ type-exact: number - else - assert !std.isBoolean(obj.a) && obj.a != null; - std.length(obj.a) -## ^ type: string -## ^ type: array -## ^ type: object -## ^ type: function - else - std.length(obj); - -[f({ b: null }), f({ a: "hello" }), f({ a: 4 })]"#, - negated_membership_and_is_precise => r#"local f(x) = - if std.isObject(x) && "foo" in x && !("foo" in x && "bar" in x) then - x.foo -## ^^^ type-exact: any - else - null; - -f({ foo: 1 })"#, - negated_membership_or_eliminates_required_field => r#"local f(x) = - if std.isObject(x) && "foo" in x && !("foo" in x || "bar" in x) then - x.foo -## ^^^ type-exact: never - else - null; - -f({ foo: 1 })"#, - negated_membership_or_with_length_still_eliminates_field => r#"local f(x) = - if std.isObject(x) && "foo" in x && !("foo" in x || std.length(x) == 5) then - x.foo -## ^^^ type-exact: never - else - null; - -f({ foo: 1 })"#, - negated_membership_or_is_conservative => r#"local f(x) = - if std.isObject(x) && "foo" in x && !("foo" in x || "bar" in x) then - x -## ^ type-exact: object - else - null; - -f({ foo: 1 })"#, - object_field_literal_chain_preserves_base_type => r#"local f(x) = - assert std.isObject(x) && std.isString(x.t); - if x.t == "foo" then - 1 - else if x.t == "bar" then - 2 - else if x.t == "quz" then - 3 - else - std.length(x.t) -## ^ type-exact: string - -f({ t: "abc" })"#, -}); - -flow_case_table!(test_flow_collections { - higher_order_all_map_refines_array_elements => r"local f(arr) = - if std.all(std.map(std.isNumber, arr)) then - arr -## ^^^ type-exact: array[number] - else - arr; - -f([1, 2, 3])", - filter_with_predicate_refines_output_array => r#"local f(xs) = - assert std.isArray(xs); - assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs)); - local ys = std.filter(std.isNumber, xs); -## ^^ type: array[number | string] - ys -##^^ type: array[number] - -f([1, "x", 2])"#, - filter_map_predicate_refines_output_array => r#"local inc(x) = - assert std.isNumber(x); - x + 1; - -local f(xs) = - assert std.isArray(xs); - assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs)); - local ys = std.filterMap(std.isNumber, inc, xs); -## ^^ type: array[number | string] - ys -##^^ type-exact: array[number]; - -f([1, "x", 2])"#, - conditional_comprehension_refines_element_type => r#"local f(xs) = - assert std.isArray(xs); - assert std.all(std.map(function(x) x == null || std.isNumber(x), xs)); - local zs = [x for x in xs if x != null]; -## ^^ type-exact: array[number] - local ys = [(if x == null then "no" else x - 1) for x in xs]; -## ^^ type-exact: array[number | string] - { zs: zs, ys: ys }; - -f([1, null, 2])"#, -}); diff --git a/crates/jrsonnet-lsp/tests/e2e_scenario_tests.rs b/crates/jrsonnet-lsp/tests/e2e_scenario_tests.rs new file mode 100644 index 00000000..4276b2dc --- /dev/null +++ b/crates/jrsonnet-lsp/tests/e2e_scenario_tests.rs @@ -0,0 +1,18 @@ +//! Integration-test entry point for test framework modules. +//! +//! E2E coverage is now driven by YAML scenarios in `tests/scenarios/**`. + +use std::path::PathBuf; + +use jrsonnet_lsp_scenario::{run_yaml_fixture, ScenarioFixtureError}; +use rstest::rstest; + +#[rstest] +fn scenario_yaml_fixture( + #[files("tests/scenarios/**/*.yaml")] fixture: PathBuf, +) -> Result<(), ScenarioFixtureError> { + run_yaml_fixture(&fixture, |connection| { + let server = jrsonnet_lsp::server::Server::new(connection); + let _ = server.run(); + }) +} diff --git a/crates/jrsonnet-lsp/tests/framework/assertions.rs b/crates/jrsonnet-lsp/tests/framework/assertions.rs deleted file mode 100644 index c7d96434..00000000 --- a/crates/jrsonnet-lsp/tests/framework/assertions.rs +++ /dev/null @@ -1,624 +0,0 @@ -//! Test assertion helpers for annotated tests. -//! -//! Provides functions to verify that handler results match annotation expectations. - -use std::sync::Arc; - -use jrsonnet_lsp_check::{lint, LintConfig}; -use jrsonnet_lsp_document::{ - position_to_offset, token_at_offset, ByteOffset, CharOffset, DocVersion, Document, Line, - LspPosition, -}; -use jrsonnet_lsp_handlers as handlers; -use jrsonnet_lsp_inference::TypeAnalysis; -use jrsonnet_lsp_scope::{find_definition_range, is_variable_reference}; -use jrsonnet_lsp_types::GlobalTyStore; -use jrsonnet_rowan_parser::AstNode; - -use crate::framework::parser::{Annotation, AnnotationKind, ParsedSource}; - -/// Result of verifying annotations against actual behavior. -#[derive(Debug)] -pub struct VerificationResult { - /// Whether all checks passed. - pub passed: bool, - /// List of failures with descriptions. - pub failures: Vec, - /// Number of checks that passed. - pub passed_count: usize, - /// Total number of checks. - pub total_count: usize, -} - -impl VerificationResult { - fn new() -> Self { - Self { - passed: true, - failures: Vec::new(), - passed_count: 0, - total_count: 0, - } - } - - fn add_failure(&mut self, msg: String) { - self.passed = false; - self.failures.push(msg); - self.total_count += 1; - } - - fn add_success(&mut self) { - self.passed_count += 1; - self.total_count += 1; - } -} - -/// Context for running annotated tests. -pub struct TestContext { - pub document: Document, - pub analysis: TypeAnalysis, - pub diagnostics: Vec, -} - -impl TestContext { - /// Create a new test context from parsed source. - pub fn new(parsed: &ParsedSource) -> Self { - let document = Document::new(parsed.source.clone(), DocVersion::new(1)); - let global_types = Arc::new(GlobalTyStore::new()); - let analysis = TypeAnalysis::analyze_with_global(&document, Arc::clone(&global_types)); - let uri: lsp_types::Uri = "file:///annotated-test.jsonnet" - .parse() - .expect("annotated test URI should parse"); - let diagnostics = lint(&document, &analysis, &LintConfig::all(), &uri); - Self { - document, - analysis, - diagnostics, - } - } - - /// Get position from line and column. - pub fn position(line: u32, column: u32) -> LspPosition { - LspPosition { - line: Line(line), - character: CharOffset(column), - } - } - - /// Get byte offset from position. - pub fn offset(&self, line: u32, column: u32) -> Option { - let pos = Self::position(line, column); - position_to_offset(self.document.line_index(), pos, self.document.text()) - } -} - -/// Verify all annotations in a parsed source. -pub fn verify_annotations(parsed: &ParsedSource) -> VerificationResult { - let ctx = TestContext::new(parsed); - let mut result = VerificationResult::new(); - - for ann in &parsed.annotations { - match &ann.kind { - AnnotationKind::Definition(name) => { - verify_definition(&ctx, ann, name, &mut result); - } - AnnotationKind::Usage(name) => { - verify_usage(&ctx, ann, name, &mut result); - } - AnnotationKind::Hover(expected) => { - verify_hover(&ctx, ann, expected, TextMatchMode::Contains, &mut result); - } - AnnotationKind::HoverExact(expected) => { - verify_hover(&ctx, ann, expected, TextMatchMode::Exact, &mut result); - } - AnnotationKind::Type(expected) => { - verify_type(&ctx, ann, expected, TextMatchMode::Contains, &mut result); - } - AnnotationKind::TypeExact(expected) => { - verify_type(&ctx, ann, expected, TextMatchMode::Exact, &mut result); - } - AnnotationKind::Error(expected) => { - verify_error(&ctx, ann, expected, &mut result); - } - AnnotationKind::Completion(expected) => { - verify_completion(&ctx, ann, expected, &mut result); - } - AnnotationKind::NoCompletion(unexpected) => { - verify_no_completion(&ctx, ann, unexpected, &mut result); - } - AnnotationKind::Goto { line, column } => { - verify_goto(&ctx, ann, *line, *column, &mut result); - } - } - } - - result -} - -/// Verify that a definition exists at the annotated position. -fn verify_definition( - ctx: &TestContext, - ann: &Annotation, - name: &str, - result: &mut VerificationResult, -) { - let Some(offset) = ctx.offset(ann.line, ann.column) else { - result.add_failure(format!( - "{}:{}: could not convert position to offset", - ann.line, ann.column - )); - return; - }; - - let Some(token) = token_at_offset(ctx.document.ast().syntax(), offset) else { - result.add_failure(format!( - "{}:{}: no token at position (def: {})", - ann.line, ann.column, name - )); - return; - }; - - // Verify the token text matches the expected name - if token.text() != name { - result.add_failure(format!( - "{}:{}: expected definition of '{}', found token '{}'", - ann.line, - ann.column, - name, - token.text() - )); - return; - } - - // Verify this is a definition site (not a usage) - if is_variable_reference(&token) { - result.add_failure(format!( - "{}:{}: expected definition of '{}', but found a usage/reference", - ann.line, ann.column, name - )); - return; - } - - result.add_success(); -} - -/// Verify that a usage/reference exists at the annotated position. -fn verify_usage(ctx: &TestContext, ann: &Annotation, name: &str, result: &mut VerificationResult) { - let Some(offset) = ctx.offset(ann.line, ann.column) else { - result.add_failure(format!( - "{}:{}: could not convert position to offset", - ann.line, ann.column - )); - return; - }; - - let Some(token) = token_at_offset(ctx.document.ast().syntax(), offset) else { - result.add_failure(format!( - "{}:{}: no token at position (use: {})", - ann.line, ann.column, name - )); - return; - }; - - // Verify the token text matches - if token.text() != name { - result.add_failure(format!( - "{}:{}: expected usage of '{}', found token '{}'", - ann.line, - ann.column, - name, - token.text() - )); - return; - } - - // Verify this is a variable reference - if !is_variable_reference(&token) { - result.add_failure(format!( - "{}:{}: expected usage/reference of '{}', but token is not a variable reference", - ann.line, ann.column, name - )); - return; - } - - // Verify it has a definition (can be resolved) - if find_definition_range(&token, name).is_none() { - result.add_failure(format!( - "{}:{}: usage of '{}' does not resolve to any definition", - ann.line, ann.column, name - )); - return; - } - - result.add_success(); -} - -/// Verify that diagnostics at this position include expected message text. -fn verify_error( - ctx: &TestContext, - ann: &Annotation, - expected: &str, - result: &mut VerificationResult, -) { - let pos = TestContext::position(ann.line, ann.column); - let matching: Vec<&lsp_types::Diagnostic> = ctx - .diagnostics - .iter() - .filter(|diag| range_contains_position(diag.range, pos)) - .collect(); - - if matching.is_empty() { - result.add_failure(format!( - "{}:{}: expected error containing '{}', but no diagnostics were reported here", - ann.line, ann.column, expected - )); - return; - } - - if matching.iter().any(|diag| diag.message.contains(expected)) { - result.add_success(); - return; - } - - let messages = matching - .iter() - .map(|diag| diag.message.as_str()) - .collect::>() - .join(" | "); - result.add_failure(format!( - "{}:{}: expected error containing '{}', got diagnostics: {}", - ann.line, ann.column, expected, messages - )); -} - -fn range_contains_position(range: lsp_types::Range, pos: LspPosition) -> bool { - let pos: lsp_types::Position = pos.into(); - let starts_before_or_at = pos.line > range.start.line - || (pos.line == range.start.line && pos.character >= range.start.character); - let ends_after_or_at = pos.line < range.end.line - || (pos.line == range.end.line && pos.character <= range.end.character); - starts_before_or_at && ends_after_or_at -} - -#[derive(Clone, Copy)] -enum TextMatchMode { - Contains, - Exact, -} - -fn hover_contents_text(hover: &lsp_types::Hover) -> String { - match &hover.contents { - lsp_types::HoverContents::Scalar(s) => match s { - lsp_types::MarkedString::String(text) => text.clone(), - lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), - }, - lsp_types::HoverContents::Array(arr) => arr - .iter() - .map(|s| match s { - lsp_types::MarkedString::String(text) => text.clone(), - lsp_types::MarkedString::LanguageString(ls) => ls.value.clone(), - }) - .collect::>() - .join("\n"), - lsp_types::HoverContents::Markup(m) => m.value.clone(), - } -} - -fn leading_backtick_type(contents: &str) -> Option { - let trimmed = contents.trim_start(); - let rest = trimmed.strip_prefix('`')?; - let (ty, _) = rest.split_once('`')?; - Some(ty.to_string()) -} - -/// Verify that hover content matches expected text. -fn verify_hover( - ctx: &TestContext, - ann: &Annotation, - expected: &str, - mode: TextMatchMode, - result: &mut VerificationResult, -) { - let pos = TestContext::position(ann.line, ann.column); - - let hover = handlers::hover(&ctx.document, pos, &ctx.analysis); - - match hover { - Some(h) => { - let contents = hover_contents_text(&h); - let is_match = match mode { - TextMatchMode::Contains => contents.contains(expected), - TextMatchMode::Exact => contents == expected, - }; - - if is_match { - result.add_success(); - } else { - let expected_text = match mode { - TextMatchMode::Contains => format!("contain '{expected}'"), - TextMatchMode::Exact => format!("equal '{expected}'"), - }; - result.add_failure(format!( - "{}:{}: hover should {}, got '{}'", - ann.line, ann.column, expected_text, contents - )); - } - } - None => { - result.add_failure(format!( - "{}:{}: expected hover containing '{}', got no hover", - ann.line, ann.column, expected - )); - } - } -} - -/// Verify that the inferred type at position matches expected. -fn verify_type( - ctx: &TestContext, - ann: &Annotation, - expected: &str, - mode: TextMatchMode, - result: &mut VerificationResult, -) { - let Some(offset) = ctx.offset(ann.line, ann.column) else { - result.add_failure(format!( - "{}:{}: could not convert position to offset", - ann.line, ann.column - )); - return; - }; - - let direct_type = ctx - .analysis - .type_at_position(ctx.document.ast().syntax(), offset.into()) - .map(|ty| ctx.analysis.display(ty)); - let Some(mut actual) = direct_type else { - result.add_failure(format!( - "{}:{}: expected type '{}', but no inferred type was available", - ann.line, ann.column, expected - )); - return; - }; - - if actual == "any" { - let pos = TestContext::position(ann.line, ann.column); - let hover_type = handlers::hover(&ctx.document, pos, &ctx.analysis) - .as_ref() - .map(hover_contents_text) - .and_then(|contents| leading_backtick_type(&contents)); - if let Some(hover_type) = hover_type { - actual = hover_type; - } - } - - let is_match = match mode { - TextMatchMode::Contains => actual.contains(expected), - TextMatchMode::Exact => actual == expected, - }; - - if is_match { - result.add_success(); - return; - } - - let expected_text = match mode { - TextMatchMode::Contains => format!("contain '{expected}'"), - TextMatchMode::Exact => format!("equal '{expected}'"), - }; - result.add_failure(format!( - "{}:{}: type should {}, got '{}'", - ann.line, ann.column, expected_text, actual - )); -} - -/// Verify that completions include expected items. -fn verify_completion( - ctx: &TestContext, - ann: &Annotation, - expected: &[String], - result: &mut VerificationResult, -) { - let pos = TestContext::position(ann.line, ann.column); - - let completion = handlers::completion(&ctx.document, pos, None, &ctx.analysis); - - match completion { - Some(list) => { - let items: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - - for exp in expected { - if items.contains(&exp.as_str()) { - result.add_success(); - } else { - result.add_failure(format!( - "{}:{}: completion should include '{}', available: {:?}", - ann.line, - ann.column, - exp, - &items[..items.len().min(10)] - )); - } - } - } - None => { - result.add_failure(format!( - "{}:{}: expected completions {:?}, got none", - ann.line, ann.column, expected - )); - } - } -} - -/// Verify that completions do NOT include a specific item. -fn verify_no_completion( - ctx: &TestContext, - ann: &Annotation, - unexpected: &str, - result: &mut VerificationResult, -) { - let pos = TestContext::position(ann.line, ann.column); - - let completion = handlers::completion(&ctx.document, pos, None, &ctx.analysis); - - match completion { - Some(list) => { - let has_item = list.items.iter().any(|i| i.label == unexpected); - if has_item { - result.add_failure(format!( - "{}:{}: completion should NOT include '{}'", - ann.line, ann.column, unexpected - )); - } else { - result.add_success(); - } - } - None => { - // No completions means the unexpected item is not there - result.add_success(); - } - } -} - -/// Verify that go-to-definition jumps to the expected position. -fn verify_goto( - ctx: &TestContext, - ann: &Annotation, - expected_line: u32, - expected_col: u32, - result: &mut VerificationResult, -) { - let pos = TestContext::position(ann.line, ann.column); - - let goto_result = handlers::goto_definition(&ctx.document, pos); - - match goto_result { - Some(handlers::DefinitionResult::Local(range)) => { - let start = range.start; - if start.line == expected_line && start.character == expected_col { - result.add_success(); - } else { - result.add_failure(format!( - "{}:{}: go-to-definition should jump to {}:{}, got {}:{}", - ann.line, ann.column, expected_line, expected_col, start.line, start.character - )); - } - } - Some(_) => { - result.add_failure(format!( - "{}:{}: expected local definition, got import definition", - ann.line, ann.column - )); - } - None => { - result.add_failure(format!( - "{}:{}: expected go-to-definition to {}:{}, got no result", - ann.line, ann.column, expected_line, expected_col - )); - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::framework::parser::parse_annotated_source; - - #[test] - fn test_verify_definition_success() { - // 'x' is at column 6 - let source = r"local x = 1; -## ^ def: x"; - let parsed = parse_annotated_source(source); - let result = verify_annotations(&parsed); - assert!(result.passed, "Failures: {:?}", result.failures); - assert_eq!(result.passed_count, 1); - } - - #[test] - fn test_verify_usage_success() { - // 'x' usage is at column 0 - let source = r"local x = 1; -x + 1 -##^ use: x"; - let parsed = parse_annotated_source(source); - let result = verify_annotations(&parsed); - assert!(result.passed, "Failures: {:?}", result.failures); - assert_eq!(result.passed_count, 1); - } - - #[test] - fn test_verify_definition_and_usage() { - // 'x' def at column 6, first usage at column 0, second at column 4 - let source = r"local x = 1; -## ^ def: x -x + x -##^ use: x -## ^ use: x"; - let parsed = parse_annotated_source(source); - let result = verify_annotations(&parsed); - assert!(result.passed, "Failures: {:?}", result.failures); - assert_eq!(result.passed_count, 3); - } - - #[test] - fn test_verify_error_success() { - let source = r"(1).foo -## ^ error: field access on non-object type"; - let parsed = parse_annotated_source(source); - let result = verify_annotations(&parsed); - assert!(result.passed, "Failures: {:?}", result.failures); - assert_eq!(result.passed_count, 1); - } - - #[test] - fn test_verify_error_message_mismatch() { - let source = r"(1).foo -## ^ error: completely different message"; - let parsed = parse_annotated_source(source); - let result = verify_annotations(&parsed); - assert!(!result.passed, "expected verification to fail"); - assert_eq!(result.passed_count, 0); - assert_eq!(result.total_count, 1); - assert!( - result.failures[0].contains("completely different message"), - "unexpected failure message: {:?}", - result.failures - ); - } - - #[test] - fn test_verify_hover_exact_success() { - let source = r"local x = 42; -## ^ hover-exact: `number`"; - let parsed = parse_annotated_source(source); - let result = verify_annotations(&parsed); - assert!(result.passed, "Failures: {:?}", result.failures); - assert_eq!(result.passed_count, 1); - } - - #[test] - fn test_verify_type_exact_success() { - let source = r"local x = 42; -## ^ type-exact: number"; - let parsed = parse_annotated_source(source); - let result = verify_annotations(&parsed); - assert!(result.passed, "Failures: {:?}", result.failures); - assert_eq!(result.passed_count, 1); - } - - #[test] - fn test_verify_type_exact_mismatch() { - let source = r"local x = 42; -## ^ type-exact: string"; - let parsed = parse_annotated_source(source); - let result = verify_annotations(&parsed); - assert!(!result.passed, "expected verification to fail"); - assert_eq!(result.passed_count, 0); - assert_eq!(result.total_count, 1); - assert!( - result.failures[0].contains("type should equal 'string'"), - "unexpected failure message: {:?}", - result.failures - ); - } -} diff --git a/crates/jrsonnet-lsp/tests/framework/mod.rs b/crates/jrsonnet-lsp/tests/framework/mod.rs deleted file mode 100644 index 04673fb5..00000000 --- a/crates/jrsonnet-lsp/tests/framework/mod.rs +++ /dev/null @@ -1,78 +0,0 @@ -//! Annotation-based E2E testing framework for Jsonnet LSP. -//! -//! This framework allows writing self-documenting tests with embedded annotations -//! that specify expected behavior at specific positions in the code. -//! -//! # Example -//! -//! ```ignore -//! check_definition(r" -//! local x = 1; -//! ## ^ def: x -//! x + 1 -//! ## ^ use: x -//! "); -//! ``` -//! -//! # Annotation Syntax -//! -//! Annotations are lines starting with `##` followed by spaces and a `^` marker. -//! The `^` indicates the column on the previous line being annotated. -//! -//! | Annotation | Meaning | -//! |------------|---------| -//! | `## ^ def: x` | Definition of `x` is at this position | -//! | `## ^ use: x` | Usage/reference to `x` is at this position | -//! | `## ^ hover: text` | Hover at this position contains "text" | -//! | `## ^ hover-exact: text` | Hover at this position equals "text" | -//! | `## ^ type: T` | Type at this position is `T` | -//! | `## ^ type-exact: T` | Type at this position equals `T` | -//! | `## ^ error: msg` | Error at this position contains "msg" | -//! | `## ^ completion: a, b` | Completions include `a` and `b` | -//! | `## ^ no-completion: x` | Completions do NOT include `x` | -//! | `## ^ goto: 5:10` | Go-to-definition jumps to line 5, col 10 | - -pub mod assertions; -pub mod parser; -pub mod scenario; -pub mod scenario_runner; - -use std::fmt::Write as _; - -pub use assertions::verify_annotations; -pub use parser::parse_annotated_source; - -/// Run annotated test and panic on failure. -/// -/// This is the main entry point for annotated tests. -pub fn check(source: &str) { - let parsed = parse_annotated_source(source); - let result = verify_annotations(&parsed); - - if !result.passed { - let mut msg = format!( - "Annotated test failed ({}/{} checks passed):\n", - result.passed_count, result.total_count - ); - for failure in &result.failures { - msg.push_str(" - "); - msg.push_str(failure); - msg.push('\n'); - } - msg.push_str("\nSource:\n"); - for (i, line) in parsed.source.lines().enumerate() { - let _ = writeln!(msg, "{i:3}| {line}"); - } - panic!("{}", msg); - } -} - -/// Check definition and usage annotations. -pub fn check_definition(source: &str) { - check(source); -} - -/// Check hover annotations. -pub fn check_hover(source: &str) { - check(source); -} diff --git a/crates/jrsonnet-lsp/tests/framework/parser.rs b/crates/jrsonnet-lsp/tests/framework/parser.rs deleted file mode 100644 index 1756a8aa..00000000 --- a/crates/jrsonnet-lsp/tests/framework/parser.rs +++ /dev/null @@ -1,273 +0,0 @@ -//! Annotation parser for test sources. -//! -//! Parses embedded annotations in test code that specify expected behavior. -//! -//! Annotation format: -//! ```text -//! local x = 1; -//! ## ^ def: x -//! x + 1 -//! ## ^ use: x -//! ``` -//! -//! The `##` prefix marks annotation lines. The `^` marker indicates -//! the column on the previous code line being annotated. - -use std::collections::HashMap; - -/// A parsed annotation from test source. -#[derive(Debug, Clone)] -pub struct Annotation { - /// Line number (0-indexed) of the code line being annotated. - pub line: u32, - /// Column (0-indexed) indicated by the `^` marker. - pub column: u32, - /// The annotation kind and value. - pub kind: AnnotationKind, -} - -/// The type of annotation. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum AnnotationKind { - /// `^ def: name` - expect definition of `name` here - Definition(String), - /// `^ use: name` - expect reference to `name` here - Usage(String), - /// `^ hover: text` - expect hover to contain `text` - Hover(String), - /// `^ hover-exact: text` - expect hover to equal `text` - HoverExact(String), - /// `^ type: T` - expect type `T` at this position - Type(String), - /// `^ type-exact: T` - expect exact inferred type `T` at this position - TypeExact(String), - /// `^ error: msg` - expect error containing `msg` - Error(String), - /// `^ completion: item1, item2` - expect these completions - Completion(Vec), - /// `^ no-completion: item` - expect this NOT in completions - NoCompletion(String), - /// `^ goto: line:col` - expect go-to-definition to jump here - Goto { line: u32, column: u32 }, -} - -/// Result of parsing annotated source. -#[derive(Debug)] -pub struct ParsedSource { - /// The clean source code (annotations stripped). - pub source: String, - /// Parsed annotations with their positions. - pub annotations: Vec, - /// Map from (line, col) to annotation for quick lookup. - pub position_map: HashMap<(u32, u32), Vec>, -} - -impl ParsedSource { - /// Get annotations at a specific position. - pub fn annotations_at(&self, line: u32, col: u32) -> &[Annotation] { - self.position_map - .get(&(line, col)) - .map_or(&[], std::vec::Vec::as_slice) - } -} - -/// Parse source code with embedded annotations. -/// -/// Returns the clean source (annotations stripped) and the parsed annotations. -pub fn parse_annotated_source(source: &str) -> ParsedSource { - let mut clean_lines: Vec<&str> = Vec::new(); - let mut annotations: Vec = Vec::new(); - - let lines: Vec<&str> = source.lines().collect(); - let mut clean_line_idx: u32 = 0; - - for line in &lines { - let trimmed = line.trim_start(); - - if trimmed.starts_with("##") { - // This is an annotation line - parse it - if let Some(ann) = parse_annotation_line(trimmed, clean_line_idx.saturating_sub(1)) { - annotations.push(ann); - } - // Don't add to clean_lines - } else { - // Regular code line - clean_lines.push(line); - clean_line_idx += 1; - } - } - - // Build position map for quick lookup - let mut position_map: HashMap<(u32, u32), Vec> = HashMap::new(); - for ann in &annotations { - position_map - .entry((ann.line, ann.column)) - .or_default() - .push(ann.clone()); - } - - ParsedSource { - source: clean_lines.join("\n"), - annotations, - position_map, - } -} - -/// Parse a single annotation line. -/// -/// Format: `## ^ kind: value` -fn parse_annotation_line(line: &str, prev_line: u32) -> Option { - // Strip the ## prefix - let content = line.strip_prefix("##")?; - - // Find the ^ marker - let caret_pos = content.find('^')?; - - // The column is the position of ^ in the content (accounting for leading spaces) - let column = u32::try_from(caret_pos).ok()?; - - // Parse the rest after the ^ - let rest = content[caret_pos + 1..].trim(); - - // Parse the kind: value part - let kind = parse_annotation_kind(rest)?; - - Some(Annotation { - line: prev_line, - column, - kind, - }) -} - -/// Parse the annotation kind from "kind: value" format. -fn parse_annotation_kind(s: &str) -> Option { - let (kind_str, value) = s.split_once(':')?; - let kind_str = kind_str.trim(); - let value = value.trim(); - - match kind_str { - "def" => Some(AnnotationKind::Definition(value.to_string())), - "use" => Some(AnnotationKind::Usage(value.to_string())), - "hover" => Some(AnnotationKind::Hover(value.to_string())), - "hover-exact" => Some(AnnotationKind::HoverExact(value.to_string())), - "type" => Some(AnnotationKind::Type(value.to_string())), - "type-exact" => Some(AnnotationKind::TypeExact(value.to_string())), - "error" => Some(AnnotationKind::Error(value.to_string())), - "completion" => { - let items: Vec = value.split(',').map(|s| s.trim().to_string()).collect(); - Some(AnnotationKind::Completion(items)) - } - "no-completion" => Some(AnnotationKind::NoCompletion(value.to_string())), - "goto" => { - let (line_str, col_str) = value.split_once(':')?; - let line: u32 = line_str.trim().parse().ok()?; - let column: u32 = col_str.trim().parse().ok()?; - Some(AnnotationKind::Goto { line, column }) - } - _ => None, - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_parse_simple_annotation() { - // 'x' is at column 6 in "local x = 1;" - // 'x' is at column 0 in "x + 1" - let source = r"local x = 1; -## ^ def: x -x + 1 -##^ use: x"; - - let parsed = parse_annotated_source(source); - - // Clean source should have annotations stripped - assert_eq!(parsed.source, "local x = 1;\nx + 1"); - - // Should have 2 annotations - assert_eq!(parsed.annotations.len(), 2); - - // First annotation: def: x at line 0, column 6 - let ann1 = &parsed.annotations[0]; - assert_eq!(ann1.line, 0); - assert_eq!(ann1.column, 6); - assert_eq!(ann1.kind, AnnotationKind::Definition("x".to_string())); - - // Second annotation: use: x at line 1, column 0 - let ann2 = &parsed.annotations[1]; - assert_eq!(ann2.line, 1); - assert_eq!(ann2.column, 0); - assert_eq!(ann2.kind, AnnotationKind::Usage("x".to_string())); - } - - #[test] - fn test_parse_completion_annotation() { - let source = r"std. -## ^ completion: length, type, isNumber"; - - let parsed = parse_annotated_source(source); - assert_eq!(parsed.source, "std."); - assert_eq!(parsed.annotations.len(), 1); - - let ann = &parsed.annotations[0]; - assert!(matches!( - &ann.kind, - AnnotationKind::Completion(items) if items == &["length", "type", "isNumber"] - )); - } - - #[test] - fn test_parse_goto_annotation() { - let source = r"local f(x) = x; -## ^ goto: 0:8"; - - let parsed = parse_annotated_source(source); - assert_eq!(parsed.annotations.len(), 1); - - let ann = &parsed.annotations[0]; - assert!(matches!( - &ann.kind, - AnnotationKind::Goto { line: 0, column: 8 } - )); - } - - #[test] - fn test_position_map() { - // 'x' at column 6, '1' at column 10 - let source = r"local x = 1; -## ^ def: x -## ^ type: number"; - - let parsed = parse_annotated_source(source); - - // Should be able to look up by position - let anns_at_6 = parsed.annotations_at(0, 6); - assert_eq!(anns_at_6.len(), 1); - assert!(matches!(&anns_at_6[0].kind, AnnotationKind::Definition(_))); - - let anns_at_10 = parsed.annotations_at(0, 10); - assert_eq!(anns_at_10.len(), 1); - assert!(matches!(&anns_at_10[0].kind, AnnotationKind::Type(_))); - } - - #[test] - fn test_parse_exact_hover_and_type_annotations() { - let source = r"local x = 1; -## ^ hover-exact: `number` -## ^ type-exact: number"; - - let parsed = parse_annotated_source(source); - assert_eq!(parsed.annotations.len(), 2); - - assert_eq!( - parsed.annotations[0].kind, - AnnotationKind::HoverExact("`number`".to_string()) - ); - assert_eq!( - parsed.annotations[1].kind, - AnnotationKind::TypeExact("number".to_string()) - ); - } -} diff --git a/crates/jrsonnet-lsp/tests/framework/scenario.rs b/crates/jrsonnet-lsp/tests/framework/scenario.rs deleted file mode 100644 index 2c692059..00000000 --- a/crates/jrsonnet-lsp/tests/framework/scenario.rs +++ /dev/null @@ -1,415 +0,0 @@ -//! Scenario model/parser for multi-file, multi-step LSP timeline tests. -//! -//! The format is JSON and maps directly to these types via serde. -//! -//! Example: -//! ```json -//! { -//! "steps": [ -//! { "step": "open", "uri": "file:///main.jsonnet", "text": "local x = 1; x" }, -//! { -//! "step": "requestCodeAction", -//! "id": 2, -//! "uri": "file:///main.jsonnet", -//! "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 12 } }, -//! "diagnostics": [] -//! }, -//! { "step": "expectCodeAction", "id": 2, "result": null } -//! ] -//! } -//! ``` - -use lsp_types::{ - CodeActionKind, CodeActionOrCommand, Diagnostic, Location, Position, Range, - TextDocumentContentChangeEvent, -}; -use serde::{Deserialize, Serialize}; - -/// A full timeline scenario. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct Scenario { - pub steps: Vec, -} - -impl Scenario { - #[must_use] - pub fn new(steps: Vec) -> Self { - Self { steps } - } -} - -/// Parse a scenario from JSON text. -pub fn parse_scenario_json(input: &str) -> Result { - serde_json::from_str(input) -} - -/// One timeline step. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(tag = "step", rename_all = "camelCase")] -pub enum ScenarioStep { - Open(OpenStep), - ChangeFull(ChangeFullStep), - ChangeIncremental(ChangeIncrementalStep), - Save(SaveStep), - Close(CloseStep), - Config(ConfigStep), - RequestCodeAction(RequestCodeActionStep), - ExpectCodeAction(ExpectCodeActionStep), - RequestReferences(RequestReferencesStep), - ExpectReferences(ExpectReferencesStep), - ExpectDiagnostics(ExpectDiagnosticsStep), - DiagnosticsSettled(DiagnosticsSettledStep), -} - -/// `textDocument/didOpen`. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct OpenStep { - pub uri: String, - pub text: String, - #[serde(default = "default_language_id")] - pub language_id: String, - #[serde(default = "default_open_version")] - pub version: i32, -} - -const fn default_open_version() -> i32 { - 1 -} - -fn default_language_id() -> String { - "jsonnet".to_string() -} - -/// `textDocument/didChange` full-document replacement. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct ChangeFullStep { - pub uri: String, - pub text: String, - pub version: i32, -} - -impl ChangeFullStep { - #[must_use] - pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { - TextDocumentContentChangeEvent { - range: None, - range_length: None, - text: self.text.clone(), - } - } -} - -/// `textDocument/didChange` incremental edit. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct ChangeIncrementalStep { - pub uri: String, - pub range: Range, - pub text: String, - pub version: i32, -} - -impl ChangeIncrementalStep { - #[must_use] - pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { - TextDocumentContentChangeEvent { - range: Some(self.range), - range_length: None, - text: self.text.clone(), - } - } -} - -/// `textDocument/didSave`. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct SaveStep { - pub uri: String, - pub text: Option, -} - -/// `textDocument/didClose`. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct CloseStep { - pub uri: String, -} - -/// `workspace/didChangeConfiguration`. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct ConfigStep { - pub settings: serde_json::Value, -} - -/// `textDocument/codeAction` request. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct RequestCodeActionStep { - pub id: i32, - pub uri: String, - pub range: Range, - #[serde(default)] - pub diagnostics: Vec, - pub only: Option>, -} - -/// Expected `textDocument/codeAction` response. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct ExpectCodeActionStep { - pub id: i32, - pub result: Option>, -} - -/// `textDocument/references` request. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct RequestReferencesStep { - pub id: i32, - pub uri: String, - pub position: Position, - #[serde(default)] - pub include_declaration: bool, -} - -/// Expected `textDocument/references` response. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct ExpectReferencesStep { - pub id: i32, - pub result: Option>, -} - -/// Expected diagnostics notification for a URI. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct ExpectDiagnosticsStep { - pub uri: String, - #[serde(default)] - pub diagnostics: Vec, -} - -/// Barrier for "no new diagnostics arrive for idle_ms before timeout_ms". -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct DiagnosticsSettledStep { - #[serde(default = "default_timeout_ms")] - pub timeout_ms: u64, - #[serde(default = "default_idle_ms")] - pub idle_ms: u64, -} - -const fn default_timeout_ms() -> u64 { - 1_000 -} - -const fn default_idle_ms() -> u64 { - 50 -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_parse_minimal_scenario() { - let parsed = parse_scenario_json( - r#"{ - "steps": [ - { - "step": "open", - "uri": "file:///main.jsonnet", - "text": "local x = 1; x" - }, - { - "step": "requestCodeAction", - "id": 2, - "uri": "file:///main.jsonnet", - "range": { - "start": { "line": 0, "character": 0 }, - "end": { "line": 0, "character": 12 } - }, - "diagnostics": [], - "only": null - }, - { - "step": "expectCodeAction", - "id": 2, - "result": null - } - ] - }"#, - ) - .unwrap(); - - assert_eq!( - parsed, - Scenario::new(vec![ - ScenarioStep::Open(OpenStep { - uri: "file:///main.jsonnet".to_string(), - text: "local x = 1; x".to_string(), - language_id: "jsonnet".to_string(), - version: 1, - }), - ScenarioStep::RequestCodeAction(RequestCodeActionStep { - id: 2, - uri: "file:///main.jsonnet".to_string(), - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 12, - }, - }, - diagnostics: Vec::new(), - only: None, - }), - ScenarioStep::ExpectCodeAction(ExpectCodeActionStep { - id: 2, - result: None, - }), - ]) - ); - } - - #[test] - fn test_change_step_conversions_are_structural() { - let full = ChangeFullStep { - uri: "file:///main.jsonnet".to_string(), - text: ["{", "a:1", "}"].concat(), - version: 3, - }; - assert_eq!( - full.as_change_event(), - TextDocumentContentChangeEvent { - range: None, - range_length: None, - text: ["{", "a:1", "}"].concat(), - } - ); - - let incremental = ChangeIncrementalStep { - uri: "file:///main.jsonnet".to_string(), - range: Range { - start: Position { - line: 0, - character: 3, - }, - end: Position { - line: 0, - character: 4, - }, - }, - text: "2".to_string(), - version: 4, - }; - assert_eq!( - incremental.as_change_event(), - TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 0, - character: 3, - }, - end: Position { - line: 0, - character: 4, - }, - }), - range_length: None, - text: "2".to_string(), - } - ); - } - - #[test] - fn test_diagnostics_settled_defaults() { - let parsed = parse_scenario_json( - r#"{ - "steps": [ - { "step": "diagnosticsSettled" } - ] - }"#, - ) - .unwrap(); - - assert_eq!( - parsed, - Scenario::new(vec![ScenarioStep::DiagnosticsSettled( - DiagnosticsSettledStep { - timeout_ms: 1_000, - idle_ms: 50, - }, - )]) - ); - } - - #[test] - fn test_parse_references_request_and_expectation() { - let parsed = parse_scenario_json( - r#"{ - "steps": [ - { - "step": "requestReferences", - "id": 4, - "uri": "file:///lib.jsonnet", - "position": { "line": 0, "character": 6 }, - "include_declaration": false - }, - { - "step": "expectReferences", - "id": 4, - "result": [ - { - "uri": "file:///lib.jsonnet", - "range": { - "start": { "line": 0, "character": 18 }, - "end": { "line": 0, "character": 24 } - } - } - ] - } - ] - }"#, - ) - .unwrap(); - - assert_eq!( - parsed, - Scenario::new(vec![ - ScenarioStep::RequestReferences(RequestReferencesStep { - id: 4, - uri: "file:///lib.jsonnet".to_string(), - position: Position { - line: 0, - character: 6, - }, - include_declaration: false, - }), - ScenarioStep::ExpectReferences(ExpectReferencesStep { - id: 4, - result: Some(vec![Location { - uri: "file:///lib.jsonnet".parse().unwrap(), - range: Range { - start: Position { - line: 0, - character: 18, - }, - end: Position { - line: 0, - character: 24, - }, - }, - }]), - }), - ]) - ); - } -} diff --git a/crates/jrsonnet-lsp/tests/framework/scenario_runner.rs b/crates/jrsonnet-lsp/tests/framework/scenario_runner.rs deleted file mode 100644 index 28bebe18..00000000 --- a/crates/jrsonnet-lsp/tests/framework/scenario_runner.rs +++ /dev/null @@ -1,942 +0,0 @@ -//! In-memory runner for `Scenario` timelines. - -use std::{ - collections::{HashMap, VecDeque}, - thread, - time::{Duration, Instant}, -}; - -use crossbeam_channel::RecvTimeoutError; -use lsp_server::{Connection, Message, Notification, Request, Response}; -use lsp_types::{ - notification::{ - DidChangeConfiguration, DidChangeTextDocument, DidCloseTextDocument, DidOpenTextDocument, - DidSaveTextDocument, Notification as _, PublishDiagnostics, - }, - request::{CodeActionRequest, Initialize, References, Request as _, Shutdown}, - CodeActionContext, CodeActionParams, DidChangeConfigurationParams, DidChangeTextDocumentParams, - DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, - InitializeParams, PartialResultParams, ReferenceContext, ReferenceParams, - TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, -}; - -use crate::framework::scenario::{ - ChangeFullStep, ChangeIncrementalStep, CloseStep, ConfigStep, DiagnosticsSettledStep, - ExpectCodeActionStep, ExpectDiagnosticsStep, ExpectReferencesStep, OpenStep, - RequestCodeActionStep, RequestReferencesStep, SaveStep, Scenario, ScenarioStep, -}; - -/// Run a full timeline scenario against an in-memory LSP server. -pub fn run_scenario(scenario: &Scenario) -> Result<(), String> { - let mut runner = ScenarioRunner::start(); - runner.initialize()?; - for step in &scenario.steps { - runner.run_step(step)?; - } - runner.shutdown() -} - -struct ScenarioRunner { - conn: Connection, - server_thread: thread::JoinHandle<()>, - pending_responses: Vec, - pending_diagnostics: HashMap>, - last_diagnostic_at: Option, -} - -impl ScenarioRunner { - fn start() -> Self { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = thread::spawn(move || { - let server = jrsonnet_lsp::server::Server::new(server_conn); - let _ = server.run(); - }); - Self { - conn: client_conn, - server_thread, - pending_responses: Vec::new(), - pending_diagnostics: HashMap::new(), - last_diagnostic_at: None, - } - } - - fn initialize(&mut self) -> Result<(), String> { - let params = serde_json::to_value(InitializeParams::default()) - .map_err(|error| format!("serialize initialize params: {error}"))?; - self.send_request(Request::new( - 1.into(), - Initialize::METHOD.to_string(), - params, - ))?; - let response = self.wait_response(1, Duration::from_secs(5))?; - if response.error.is_some() { - return Err(format!("initialize returned error: {:?}", response.error)); - } - self.send_notification(Notification::new( - "initialized".to_string(), - serde_json::json!({}), - )) - } - - fn shutdown(mut self) -> Result<(), String> { - self.send_request(Request::new( - 9_999.into(), - Shutdown::METHOD.to_string(), - serde_json::Value::Null, - ))?; - let response = self.wait_response(9_999, Duration::from_secs(5))?; - if response.error.is_some() { - return Err(format!("shutdown returned error: {:?}", response.error)); - } - self.send_notification(Notification::new( - "exit".to_string(), - serde_json::Value::Null, - ))?; - self.server_thread - .join() - .map_err(|_| "server thread panicked".to_string()) - } - - fn run_step(&mut self, step: &ScenarioStep) -> Result<(), String> { - match step { - ScenarioStep::Open(open) => self.step_open(open), - ScenarioStep::ChangeFull(change) => self.step_change_full(change), - ScenarioStep::ChangeIncremental(change) => self.step_change_incremental(change), - ScenarioStep::Save(save) => self.step_save(save), - ScenarioStep::Close(close) => self.step_close(close), - ScenarioStep::Config(config) => self.step_config(config), - ScenarioStep::RequestCodeAction(request) => self.step_request_code_action(request), - ScenarioStep::ExpectCodeAction(expectation) => { - self.step_expect_code_action(expectation) - } - ScenarioStep::RequestReferences(request) => self.step_request_references(request), - ScenarioStep::ExpectReferences(expectation) => self.step_expect_references(expectation), - ScenarioStep::ExpectDiagnostics(expectation) => { - self.step_expect_diagnostics(expectation) - } - ScenarioStep::DiagnosticsSettled(settled) => self.step_diagnostics_settled(*settled), - } - } - - fn step_open(&self, step: &OpenStep) -> Result<(), String> { - let uri: lsp_types::Uri = step - .uri - .parse() - .map_err(|error| format!("parse open uri '{}': {error}", step.uri))?; - let params = DidOpenTextDocumentParams { - text_document: TextDocumentItem { - uri, - language_id: step.language_id.clone(), - version: step.version, - text: step.text.clone(), - }, - }; - let payload = serde_json::to_value(params) - .map_err(|error| format!("serialize didOpen params: {error}"))?; - self.send_notification(Notification::new( - DidOpenTextDocument::METHOD.to_string(), - payload, - )) - } - - fn step_change_full(&self, step: &ChangeFullStep) -> Result<(), String> { - let uri: lsp_types::Uri = step - .uri - .parse() - .map_err(|error| format!("parse didChange(full) uri '{}': {error}", step.uri))?; - let params = DidChangeTextDocumentParams { - text_document: lsp_types::VersionedTextDocumentIdentifier { - uri, - version: step.version, - }, - content_changes: vec![step.as_change_event()], - }; - let payload = serde_json::to_value(params) - .map_err(|error| format!("serialize didChange(full) params: {error}"))?; - self.send_notification(Notification::new( - DidChangeTextDocument::METHOD.to_string(), - payload, - )) - } - - fn step_change_incremental(&self, step: &ChangeIncrementalStep) -> Result<(), String> { - let uri: lsp_types::Uri = step - .uri - .parse() - .map_err(|error| format!("parse didChange(incremental) uri '{}': {error}", step.uri))?; - let params = DidChangeTextDocumentParams { - text_document: lsp_types::VersionedTextDocumentIdentifier { - uri, - version: step.version, - }, - content_changes: vec![step.as_change_event()], - }; - let payload = serde_json::to_value(params) - .map_err(|error| format!("serialize didChange(incremental) params: {error}"))?; - self.send_notification(Notification::new( - DidChangeTextDocument::METHOD.to_string(), - payload, - )) - } - - fn step_save(&self, step: &SaveStep) -> Result<(), String> { - let uri: lsp_types::Uri = step - .uri - .parse() - .map_err(|error| format!("parse didSave uri '{}': {error}", step.uri))?; - let params = DidSaveTextDocumentParams { - text_document: TextDocumentIdentifier { uri }, - text: step.text.clone(), - }; - let payload = serde_json::to_value(params) - .map_err(|error| format!("serialize didSave params: {error}"))?; - self.send_notification(Notification::new( - DidSaveTextDocument::METHOD.to_string(), - payload, - )) - } - - fn step_close(&self, step: &CloseStep) -> Result<(), String> { - let uri: lsp_types::Uri = step - .uri - .parse() - .map_err(|error| format!("parse didClose uri '{}': {error}", step.uri))?; - let params = DidCloseTextDocumentParams { - text_document: TextDocumentIdentifier { uri }, - }; - let payload = serde_json::to_value(params) - .map_err(|error| format!("serialize didClose params: {error}"))?; - self.send_notification(Notification::new( - DidCloseTextDocument::METHOD.to_string(), - payload, - )) - } - - fn step_config(&self, step: &ConfigStep) -> Result<(), String> { - let params = DidChangeConfigurationParams { - settings: step.settings.clone(), - }; - let payload = serde_json::to_value(params) - .map_err(|error| format!("serialize didChangeConfiguration params: {error}"))?; - self.send_notification(Notification::new( - DidChangeConfiguration::METHOD.to_string(), - payload, - )) - } - - fn step_request_code_action(&self, step: &RequestCodeActionStep) -> Result<(), String> { - let uri: lsp_types::Uri = step - .uri - .parse() - .map_err(|error| format!("parse codeAction uri '{}': {error}", step.uri))?; - let params = CodeActionParams { - text_document: TextDocumentIdentifier { uri }, - range: step.range, - context: CodeActionContext { - diagnostics: step.diagnostics.clone(), - only: step.only.clone(), - trigger_kind: None, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - let payload = serde_json::to_value(params) - .map_err(|error| format!("serialize codeAction request params: {error}"))?; - self.send_request(Request::new( - step.id.into(), - CodeActionRequest::METHOD.to_string(), - payload, - )) - } - - fn step_request_references(&self, step: &RequestReferencesStep) -> Result<(), String> { - let uri: lsp_types::Uri = step - .uri - .parse() - .map_err(|error| format!("parse references uri '{}': {error}", step.uri))?; - let params = ReferenceParams { - text_document_position: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { uri }, - position: step.position, - }, - context: ReferenceContext { - include_declaration: step.include_declaration, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - let payload = serde_json::to_value(params) - .map_err(|error| format!("serialize references request params: {error}"))?; - self.send_request(Request::new( - step.id.into(), - References::METHOD.to_string(), - payload, - )) - } - - fn step_expect_code_action(&mut self, step: &ExpectCodeActionStep) -> Result<(), String> { - let response = self.wait_response(step.id, Duration::from_secs(5))?; - if let Some(error) = response.error { - return Err(format!( - "codeAction response {id} returned error: {error:?}", - id = step.id - )); - } - let actual: Option> = match response.result { - Some(value) => serde_json::from_value(value).map_err(|error| { - format!( - "deserialize codeAction response result for id {}: {error}", - step.id - ) - })?, - None => None, - }; - if actual != step.result { - return Err(format!( - "codeAction response mismatch for id {}: actual={actual:?} expected={expected:?}", - step.id, - expected = step.result - )); - } - Ok(()) - } - - fn step_expect_references(&mut self, step: &ExpectReferencesStep) -> Result<(), String> { - let response = self.wait_response(step.id, Duration::from_secs(5))?; - if let Some(error) = response.error { - return Err(format!( - "references response {id} returned error: {error:?}", - id = step.id - )); - } - let actual: Option> = match response.result { - Some(value) => serde_json::from_value(value).map_err(|error| { - format!( - "deserialize references response result for id {}: {error}", - step.id - ) - })?, - None => None, - }; - if actual != step.result { - return Err(format!( - "references response mismatch for id {}: actual={actual:?} expected={expected:?}", - step.id, - expected = step.result - )); - } - Ok(()) - } - - fn step_expect_diagnostics(&mut self, step: &ExpectDiagnosticsStep) -> Result<(), String> { - let actual = self.wait_diagnostics_for_uri(&step.uri, Duration::from_secs(5))?; - if actual.diagnostics != step.diagnostics { - return Err(format!( - "diagnostics mismatch for uri {}: actual={actual:?} expected={expected:?}", - step.uri, - expected = step.diagnostics - )); - } - Ok(()) - } - - fn step_diagnostics_settled(&mut self, step: DiagnosticsSettledStep) -> Result<(), String> { - let timeout = Duration::from_millis(step.timeout_ms); - let idle = Duration::from_millis(step.idle_ms); - let start = Instant::now(); - let mut last_diagnostic = self.last_diagnostic_at.unwrap_or(start); - - loop { - if start.elapsed() > timeout { - return Err(format!( - "diagnostics did not settle within {}ms", - step.timeout_ms - )); - } - - if last_diagnostic.elapsed() >= idle { - return Ok(()); - } - - let remaining_timeout = timeout.saturating_sub(start.elapsed()); - let remaining_idle = idle.saturating_sub(last_diagnostic.elapsed()); - let wait_for = remaining_timeout.min(remaining_idle); - - match self.conn.receiver.recv_timeout(wait_for) { - Ok(message) => self.capture_background_message(message)?, - Err(RecvTimeoutError::Timeout) => {} - Err(RecvTimeoutError::Disconnected) => { - return Err( - "connection closed while waiting for diagnostics to settle".to_string() - ); - } - } - - if let Some(latest) = self.last_diagnostic_at { - last_diagnostic = latest; - } - } - } - - fn wait_response(&mut self, id: i32, timeout: Duration) -> Result { - if let Some(index) = self - .pending_responses - .iter() - .position(|response| response.id == id.into()) - { - return Ok(self.pending_responses.swap_remove(index)); - } - - let deadline = Instant::now() + timeout; - loop { - let remaining = deadline.saturating_duration_since(Instant::now()); - if remaining.is_zero() { - return Err(format!("timed out waiting for response id {}", id)); - } - match self.conn.receiver.recv_timeout(remaining) { - Ok(Message::Response(response)) if response.id == id.into() => return Ok(response), - Ok(message) => self.capture_background_message(message)?, - Err(RecvTimeoutError::Timeout) => { - return Err(format!("timed out waiting for response id {}", id)); - } - Err(RecvTimeoutError::Disconnected) => { - return Err("connection closed while waiting for response".to_string()); - } - } - } - } - - fn wait_diagnostics_for_uri( - &mut self, - uri: &str, - timeout: Duration, - ) -> Result { - if let Some(queue) = self.pending_diagnostics.get_mut(uri) { - if let Some(params) = queue.pop_front() { - return Ok(params); - } - } - - let deadline = Instant::now() + timeout; - loop { - let remaining = deadline.saturating_duration_since(Instant::now()); - if remaining.is_zero() { - return Err(format!("timed out waiting for diagnostics for uri {}", uri)); - } - match self.conn.receiver.recv_timeout(remaining) { - Ok(message) => { - self.capture_background_message(message)?; - if let Some(queue) = self.pending_diagnostics.get_mut(uri) { - if let Some(params) = queue.pop_front() { - return Ok(params); - } - } - } - Err(RecvTimeoutError::Timeout) => { - return Err(format!("timed out waiting for diagnostics for uri {}", uri)); - } - Err(RecvTimeoutError::Disconnected) => { - return Err("connection closed while waiting for diagnostics".to_string()); - } - } - } - } - - fn capture_background_message(&mut self, message: Message) -> Result<(), String> { - match message { - Message::Response(response) => { - self.pending_responses.push(response); - Ok(()) - } - Message::Notification(notification) - if notification.method == PublishDiagnostics::METHOD => - { - let params: lsp_types::PublishDiagnosticsParams = - serde_json::from_value(notification.params) - .map_err(|error| format!("deserialize publish diagnostics: {error}"))?; - let key = params.uri.as_str().to_string(); - self.pending_diagnostics - .entry(key) - .or_default() - .push_back(params); - self.last_diagnostic_at = Some(Instant::now()); - Ok(()) - } - Message::Notification(_) | Message::Request(_) => Ok(()), - } - } - - fn send_notification(&self, notification: Notification) -> Result<(), String> { - self.conn - .sender - .send(Message::Notification(notification)) - .map_err(|error| format!("send notification: {error}")) - } - - fn send_request(&self, request: Request) -> Result<(), String> { - self.conn - .sender - .send(Message::Request(request)) - .map_err(|error| format!("send request: {error}")) - } -} - -#[cfg(test)] -mod tests { - use std::fs; - - use lsp_types::{ - CodeAction, CodeActionKind, CodeActionOrCommand, Diagnostic, DiagnosticSeverity, Location, - NumberOrString, Position, Range, TextEdit, WorkspaceEdit, - }; - use tempfile::TempDir; - - use super::*; - use crate::framework::scenario::{ - CloseStep, ConfigStep, DiagnosticsSettledStep, ExpectCodeActionStep, ExpectReferencesStep, - OpenStep, RequestCodeActionStep, RequestReferencesStep, Scenario, ScenarioStep, - }; - - fn unused_variable_diagnostic() -> Diagnostic { - Diagnostic { - range: Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - severity: Some(DiagnosticSeverity::WARNING), - code: Some(NumberOrString::String("unused-variable".to_string())), - code_description: None, - source: Some("jrsonnet-lint".to_string()), - message: "unused variable".to_string(), - related_information: None, - tags: None, - data: None, - } - } - - fn expected_unused_actions(uri: &str, diagnostic: Diagnostic) -> Vec { - vec![ - CodeActionOrCommand::CodeAction(CodeAction { - title: "Prefix `x` with `_`".to_string(), - kind: Some(CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri.parse().unwrap(), - vec![TextEdit { - range: Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - new_text: "_x".to_string(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(true), - disabled: None, - data: None, - }), - CodeActionOrCommand::CodeAction(CodeAction { - title: "Remove unused binding `x`".to_string(), - kind: Some(CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri.parse().unwrap(), - vec![TextEdit { - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 32, - }, - }, - new_text: String::new(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }), - CodeActionOrCommand::CodeAction(CodeAction { - title: "Remove all unused bindings".to_string(), - kind: Some(CodeActionKind::SOURCE_FIX_ALL), - diagnostics: Some(vec![diagnostic]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri.parse().unwrap(), - vec![TextEdit { - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 32, - }, - }, - new_text: String::new(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }), - ] - } - - fn expected_prefix_only_action(uri: &str, diagnostic: Diagnostic) -> Vec { - vec![CodeActionOrCommand::CodeAction(CodeAction { - title: "Prefix `x` with `_`".to_string(), - kind: Some(CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri.parse().unwrap(), - vec![TextEdit { - range: Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - new_text: "_x".to_string(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(true), - disabled: None, - data: None, - })] - } - - #[test] - fn test_runner_executes_open_request_expect_sequence() { - let uri = "file:///scenario-runner.jsonnet"; - let diagnostic = unused_variable_diagnostic(); - let expected = vec![ - CodeActionOrCommand::CodeAction(CodeAction { - title: "Prefix `x` with `_`".to_string(), - kind: Some(CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri.parse().unwrap(), - vec![TextEdit { - range: Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - new_text: "_x".to_string(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(true), - disabled: None, - data: None, - }), - CodeActionOrCommand::CodeAction(CodeAction { - title: "Remove unused binding `x`".to_string(), - kind: Some(CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri.parse().unwrap(), - vec![TextEdit { - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 11, - }, - }, - new_text: String::new(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }), - CodeActionOrCommand::CodeAction(CodeAction { - title: "Remove all unused bindings".to_string(), - kind: Some(CodeActionKind::SOURCE_FIX_ALL), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri.parse().unwrap(), - vec![TextEdit { - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 11, - }, - }, - new_text: String::new(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }), - ]; - - let scenario = Scenario::new(vec![ - ScenarioStep::Open(OpenStep { - uri: uri.to_string(), - text: "local x = 1; 42".to_string(), - language_id: "jsonnet".to_string(), - version: 1, - }), - ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { - timeout_ms: 1_000, - idle_ms: 25, - }), - ScenarioStep::RequestCodeAction(RequestCodeActionStep { - id: 2, - uri: uri.to_string(), - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 20, - }, - }, - diagnostics: vec![diagnostic], - only: None, - }), - ScenarioStep::ExpectCodeAction(ExpectCodeActionStep { - id: 2, - result: Some(expected), - }), - ]); - - let result = run_scenario(&scenario); - assert_eq!(result, Ok(())); - } - - #[test] - fn test_runner_timeline_applies_config_change_for_code_actions() { - let uri = "file:///scenario-code-action-policy.jsonnet"; - let diagnostic = unused_variable_diagnostic(); - let scenario = Scenario::new(vec![ - ScenarioStep::Open(OpenStep { - uri: uri.to_string(), - text: "local x = import \"foo.libsonnet\"; 42".to_string(), - language_id: "jsonnet".to_string(), - version: 1, - }), - ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { - timeout_ms: 1_000, - idle_ms: 25, - }), - ScenarioStep::RequestCodeAction(RequestCodeActionStep { - id: 10, - uri: uri.to_string(), - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 35, - }, - }, - diagnostics: vec![diagnostic.clone()], - only: None, - }), - ScenarioStep::ExpectCodeAction(ExpectCodeActionStep { - id: 10, - result: Some(expected_unused_actions(uri, diagnostic.clone())), - }), - ScenarioStep::Config(ConfigStep { - settings: serde_json::json!({ - "jsonnet": { - "codeActions": { - "removeUnused": "nonImportBindings" - } - } - }), - }), - ScenarioStep::RequestCodeAction(RequestCodeActionStep { - id: 11, - uri: uri.to_string(), - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 35, - }, - }, - diagnostics: vec![diagnostic.clone()], - only: None, - }), - ScenarioStep::ExpectCodeAction(ExpectCodeActionStep { - id: 11, - result: Some(expected_prefix_only_action(uri, diagnostic)), - }), - ]); - - let result = run_scenario(&scenario); - assert_eq!(result, Ok(())); - } - - #[test] - fn test_runner_timeline_preserves_cross_file_references_after_close() { - let tmp = TempDir::new().expect("tempdir should be created"); - let lib_path = tmp.path().join("lib.jsonnet"); - let main_path = tmp.path().join("main.jsonnet"); - let lib_text = "local target = 1; target"; - let main_text = "local lib = import 'lib.jsonnet'; lib.target"; - fs::write(&lib_path, lib_text).expect("lib should be written"); - fs::write(&main_path, main_text).expect("main should be written"); - - let lib_uri = format!( - "file://{}", - lib_path - .canonicalize() - .expect("lib should canonicalize") - .to_string_lossy() - ); - let main_uri = format!( - "file://{}", - main_path - .canonicalize() - .expect("main should canonicalize") - .to_string_lossy() - ); - - let scenario = Scenario::new(vec![ - ScenarioStep::Open(OpenStep { - uri: lib_uri.clone(), - text: lib_text.to_string(), - language_id: "jsonnet".to_string(), - version: 1, - }), - ScenarioStep::Open(OpenStep { - uri: main_uri.clone(), - text: main_text.to_string(), - language_id: "jsonnet".to_string(), - version: 1, - }), - ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { - timeout_ms: 1_000, - idle_ms: 25, - }), - ScenarioStep::Close(CloseStep { - uri: main_uri.clone(), - }), - ScenarioStep::DiagnosticsSettled(DiagnosticsSettledStep { - timeout_ms: 1_000, - idle_ms: 25, - }), - ScenarioStep::RequestReferences(RequestReferencesStep { - id: 20, - uri: lib_uri.clone(), - position: Position { - line: 0, - character: 6, - }, - include_declaration: false, - }), - ScenarioStep::ExpectReferences(ExpectReferencesStep { - id: 20, - result: Some(vec![ - Location { - uri: lib_uri.parse().unwrap(), - range: Range { - start: Position { - line: 0, - character: 18, - }, - end: Position { - line: 0, - character: 24, - }, - }, - }, - Location { - uri: main_uri.parse().unwrap(), - range: Range { - start: Position { - line: 0, - character: 38, - }, - end: Position { - line: 0, - character: 44, - }, - }, - }, - ]), - }), - ]); - - let result = run_scenario(&scenario); - assert_eq!(result, Ok(())); - } -} diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml new file mode 100644 index 00000000..9b97aab6 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml @@ -0,0 +1,22 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(xs) = + assert std.isArray(xs); + assert std.all(std.map(function(x) x == null || std.isNumber(x), xs)); + local zs = [x for x in xs if x != null]; + local ys = [(if x == null then "no" else x - 1) for x in xs]; + { zs: ((m1:|))zs, ys: ((m2:|))ys }; + + f([1, null, 2]) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'array' + - at: m2 + type: 'array' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml new file mode 100644 index 00000000..cd552a54 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml @@ -0,0 +1,26 @@ +steps: +- step: create + files: + main.jsonnet: | + local inc(x) = + assert std.isNumber(x); + x + 1; + + local f(xs) = + assert std.isArray(xs); + assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs)); + local ys = std.filterMap(std.isNumber, inc, ((m1:|))xs); + ((m2:|))ys + + f([1, "x", 2]) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'array' + match: exact + - at: m2 + type: 'array' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml new file mode 100644 index 00000000..6c2efd1d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml @@ -0,0 +1,23 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(xs) = + assert std.isArray(xs); + assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs)); + local ys = std.filter(std.isNumber, ((m1:|))xs); + ((m2:|))ys + + f([1, "x", 2]) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'array' + match: exact + - at: m2 + type: 'array' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml new file mode 100644 index 00000000..ecf1d816 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml @@ -0,0 +1,19 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(arr) = + if std.all(std.map(std.isNumber, arr)) then + ((m1:|))arr + else + arr; + + f([1, 2, 3]) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'array' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml new file mode 100644 index 00000000..29ee52d9 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml @@ -0,0 +1,22 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert std.isString(x); + if x == "hi" then + "hey" + else if x == "bye" then + "see ya" + else + ((m1:|))x + + std.length(f("hello")) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'string' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml new file mode 100644 index 00000000..0d4c2106 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml @@ -0,0 +1,25 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if x == "hi" then + std.length(((m1:|))x) + else if x == "bye" then + std.length(((m2:|))x) + else + ((m3:|))x + + std.length(f("hello")) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: '"hi"' + - at: m2 + type: '"bye"' + - at: m3 + type: 'any' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml new file mode 100644 index 00000000..d0676c40 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml @@ -0,0 +1,23 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert std.isNumber(x) || std.isString(x); + if !std.isNumber(x) then + std.length(((m1:|))x) + else + ((m2:|))x + 1 + + f(3) + f("hi") + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'string' + - at: m2 + type: 'number' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml new file mode 100644 index 00000000..48fd4b19 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml @@ -0,0 +1,20 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert x == null || std.isString(x); + if x != null && std.length(x) >= 10 then + ((m1:|))x + else + "Hi"; + + f(null) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'string' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml new file mode 100644 index 00000000..b6707b6a --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml @@ -0,0 +1,23 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert x == null || std.isNumber(x); + if x != null then + ((m1:|))x + else + assert x == null; + ((m2:|))x + + [f(null), f(3)] + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'number' + - at: m2 + type: 'null' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml new file mode 100644 index 00000000..86137c42 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml @@ -0,0 +1,20 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert x == null || std.isString(x); + if x != null then + std.length(((m1:|))x) + else + 0; + + f(null) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'string' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml new file mode 100644 index 00000000..a62a43fa --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml @@ -0,0 +1,26 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isDecimal(x) then + ((m1:|))x + 0.5 + else if std.isInteger(x) then + ((m2:|))x + 1 + else + null; + + local n = f(5); + if n == null then 5 else ((m3:|))n + 2 + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'number' + - at: m2 + type: 'number' + - at: m3 + type: 'number' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml new file mode 100644 index 00000000..f664534e --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml @@ -0,0 +1,23 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert std.isNumber(x) || std.isString(x); + if std.isNumber(x) then + ((m1:|))x + 1 + else + std.length(((m2:|))x); + + f(3) + f("hi") + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'number' + match: exact + - at: m2 + type: 'string' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml new file mode 100644 index 00000000..fda80ed7 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml @@ -0,0 +1,18 @@ +steps: +- step: create + files: + main.jsonnet: | + local wrap(f) = + assert std.isFunction(f); + assert std.length(f) == 2; + ((m1:|))f + + wrap(function(a, b) a + b) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'function(arg0, arg1)' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml new file mode 100644 index 00000000..f64df1c1 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml @@ -0,0 +1,18 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(xs) = + assert std.isArray(xs) && std.length(xs) == 3; + ((m1:|))xs + + f([1, 2, 3]) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: '[any, any, any]' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml new file mode 100644 index 00000000..aaea2023 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml @@ -0,0 +1,25 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x, y) = y + 1; + + if std.length(f) == 1 then + ((m1:|))f + else if std.length(f) == 3 then + ((m2:|))f + else if std.length(f) == 2 then + ((m3:|))f + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'never' + - at: m2 + type: 'never' + - at: m3 + type: 'function(x, y)' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml new file mode 100644 index 00000000..ec8e1ad1 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml @@ -0,0 +1,24 @@ +steps: +- step: create + files: + main.jsonnet: | + local x = { a: 1, b: "hi" }; + + if std.length(x) == 1 then + ((m1:|))x + else if std.length(x) == 3 then + ((m2:|))x + else if std.length(x) == 2 then + x.((m3:|))a + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'never' + - at: m2 + type: 'never' + - at: m3 + type: 'number' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml new file mode 100644 index 00000000..ca0918e8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml @@ -0,0 +1,25 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isFunction(x) then + if std.length(x) == 2 then + ((m1:|))x(3, 5) + else + ((m2:|))x + else + x; + + f(function(a, b) a + b) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'function(arg0, arg1)' + match: exact + - at: m2 + type: 'function()' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml new file mode 100644 index 00000000..0ad6ae78 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml @@ -0,0 +1,24 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isFunction(x) then + if std.length(x) == 2 then + ((m1:|))x + else + ((m2:|))x + else + null; + + f(function(a, b) a + b) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'function(arg0, arg1)' + - at: m2 + type: 'function()' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml new file mode 100644 index 00000000..2d1bb8ef --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml @@ -0,0 +1,38 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isObject(x) then + if "a" in x && std.isString(x.a) then + if "b" in x && std.isNumber(x.b) then + if std.length(x) == 2 then + std.length(x.a) + x.((m1:|))b + else if std.length(x) == 1 then + ((m2:|))x + else if std.length(x) == 3 then + x.((m3:|))b + else + x.((m4:|))b + else + 0 + else + 0 + else + 0; + + f({ a: "hello", b: 4 }) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'number' + - at: m2 + type: 'never' + - at: m3 + type: 'number' + - at: m4 + type: 'number' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml new file mode 100644 index 00000000..6a7be070 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml @@ -0,0 +1,19 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x && "bar" in x) then + x.((m1:|))foo + else + null; + + f({ foo: 1 }) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'any' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml new file mode 100644 index 00000000..e783bc8c --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml @@ -0,0 +1,19 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x || "bar" in x) then + x.((m1:|))foo + else + null; + + f({ foo: 1 }) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'never' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml new file mode 100644 index 00000000..b534e9b1 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml @@ -0,0 +1,19 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x || "bar" in x) then + ((m1:|))x + else + null; + + f({ foo: 1 }) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'object' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml new file mode 100644 index 00000000..55dac638 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml @@ -0,0 +1,19 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + if std.isObject(x) && "foo" in x && !("foo" in x || std.length(x) == 5) then + x.((m1:|))foo + else + null; + + f({ foo: 1 }) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'never' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml new file mode 100644 index 00000000..d8339b46 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml @@ -0,0 +1,24 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(x) = + assert std.isObject(x) && std.isString(x.t); + if x.t == "foo" then + 1 + else if x.t == "bar" then + 2 + else if x.t == "quz" then + 3 + else + std.length(x.((m1:|))t) + + f({ t: "abc" }) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'string' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml new file mode 100644 index 00000000..218c1026 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml @@ -0,0 +1,20 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(obj) = + assert std.isObject(obj); + if "foo" in obj then + obj.((m1:|))foo + else + 0; + + f({ foo: 1 }) + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'any' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml new file mode 100644 index 00000000..3852695f --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml @@ -0,0 +1,27 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(obj) = + assert std.isObject(obj); + if "a" in obj then + if std.isNumber(obj.a) then + obj.((m1:|))a + 7 + else + assert !std.isBoolean(obj.a) && obj.a != null; + std.length(obj.((m2:|))a) + else + std.length(obj); + + [f({ b: null }), f({ a: "hello" }), f({ a: 4 })] + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'number' + - at: m2 + type: 'string | function() | object | array' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml new file mode 100644 index 00000000..305e0a9e --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml @@ -0,0 +1,20 @@ +steps: +- step: create + files: + main.jsonnet: | + local f(b) = + local obj = if b then { foo: 3 } else {}; + if "foo" in obj then + obj.((m1:|))foo + else + 4; + + [f(true), f(false)] + +- step: diagnosticsSettled + +- step: expectTypes + file: main.jsonnet + checks: + - at: m1 + type: 'number' diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml new file mode 100644 index 00000000..30260ba2 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml @@ -0,0 +1,20 @@ +steps: +- step: create + files: + main.jsonnet: local ((m1:|))arr = [1, 2, 3]; + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + contents: + kind: markdown + value: |- + `[number, number, number]` + + ```jsonnet + local arr = [1, 2, 3]; + ``` diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml new file mode 100644 index 00000000..7b391046 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml @@ -0,0 +1,24 @@ +steps: +- step: create + files: + main.jsonnet: local b = ((m1:|))true; + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + contents: + kind: markdown + value: |- + `true` + + --- + + Boolean literal `true`. + + ```jsonnet + true + ``` diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml new file mode 100644 index 00000000..be1cbd2d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml @@ -0,0 +1,20 @@ +steps: +- step: create + files: + main.jsonnet: local ((m1:|))f(x) = x; + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + contents: + kind: markdown + value: |- + `any` + + ```jsonnet + local f(x) = x; + ``` diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml new file mode 100644 index 00000000..263e407f --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml @@ -0,0 +1,30 @@ +steps: +- step: create + files: + main.libsonnet: | + local module = import './module/main.libsonnet'; + + { + build(input):: [ + module.n((m1:|))ew(input), + ], + } + module/main.libsonnet: | + local helper = import './helper.libsonnet'; + + { + new(input):: { + value: input, + }, + } + module/helper.libsonnet: | + import 1 + +- step: requestHover + as: hover_new + file: main.libsonnet + at: m1 +- step: expectHoverType + request: hover_new + type: '{ build }' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml new file mode 100644 index 00000000..a33ff710 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml @@ -0,0 +1,24 @@ +steps: +- step: create + files: + main.jsonnet: local n = ((m1:|))null; + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + contents: + kind: markdown + value: |- + `null` + + --- + + Literal `null` value. + + ```jsonnet + null + ``` diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml new file mode 100644 index 00000000..73c1bdeb --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml @@ -0,0 +1,15 @@ +steps: +- step: create + files: + main.jsonnet: local x = ((m1:|))42; + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + contents: + kind: markdown + value: "`number`" diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml new file mode 100644 index 00000000..f89a2af3 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml @@ -0,0 +1,20 @@ +steps: +- step: create + files: + main.jsonnet: "local ((m1:|))obj = { a: 1 };" + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + contents: + kind: markdown + value: |- + `{ a }` + + ```jsonnet + local obj = { a: 1 }; + ``` diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml new file mode 100644 index 00000000..b4cc2c3e --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml @@ -0,0 +1,15 @@ +steps: +- step: create + files: + main.jsonnet: ((m1:|))std.length + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + contents: + kind: markdown + value: "`object`" diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml new file mode 100644 index 00000000..7d967b13 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml @@ -0,0 +1,15 @@ +steps: +- step: create + files: + main.jsonnet: 'local s = ((m1:|))"hello";' + +- step: requestHover + as: hover + file: main.jsonnet + at: m1 +- step: expectHover + request: hover + result: + contents: + kind: markdown + value: "`string`" diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/for_comprehension_binding.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/for_comprehension_binding.yaml new file mode 100644 index 00000000..84f36ad8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/for_comprehension_binding.yaml @@ -0,0 +1,20 @@ +steps: +- step: create + files: + main.jsonnet: "[((m2:|))x for ((m1:|((m3:|))))x in [1,2,3]]" + +- step: requestReferences + as: xRefs + file: main.jsonnet + at: m1 + include_declaration: true + +- step: expectReferences + request: xRefs + result: + - file: main.jsonnet + at: m2 + text: x + - file: main.jsonnet + at: m3 + text: x diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/function_param_usage.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/function_param_usage.yaml new file mode 100644 index 00000000..069f2690 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/function_param_usage.yaml @@ -0,0 +1,39 @@ +steps: +- step: create + files: + main.jsonnet: | + local add(((m1:|x)), ((m4:|y))) = + ((m3:|x)) + ((m6:|y)); + add(1, 2) + +- step: requestReferences + as: refsX + file: main.jsonnet + at: m1 + include_declaration: true + +- step: expectReferences + request: refsX + result: + - file: main.jsonnet + at: m1 + text: x + - file: main.jsonnet + at: m3 + text: x + +- step: requestReferences + as: refsY + file: main.jsonnet + at: m4 + include_declaration: true + +- step: expectReferences + request: refsY + result: + - file: main.jsonnet + at: m4 + text: y + - file: main.jsonnet + at: m6 + text: y diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/function_parameters.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/function_parameters.yaml new file mode 100644 index 00000000..524da96d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/function_parameters.yaml @@ -0,0 +1,36 @@ +steps: +- step: create + files: + main.jsonnet: local f(((m1:|a)), ((m4:|b)))((m6:|)) = ((m3:|a)) + ((m5:|b)); + +- step: requestReferences + as: refsA + file: main.jsonnet + at: m1 + include_declaration: true + +- step: expectReferences + request: refsA + result: + - file: main.jsonnet + at: m1 + text: a + - file: main.jsonnet + at: m3 + text: a + +- step: requestReferences + as: refsB + file: main.jsonnet + at: m4 + include_declaration: true + +- step: expectReferences + request: refsB + result: + - file: main.jsonnet + at: m4 + text: b + - file: main.jsonnet + at: m5 + text: b diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/local_variable_definition.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/local_variable_definition.yaml new file mode 100644 index 00000000..6e66f131 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/local_variable_definition.yaml @@ -0,0 +1,22 @@ +steps: +- step: create + files: + main.jsonnet: | + local [[decl:x]] = 1; + [[use:x]] + 1 + +- step: requestReferences + as: refs + file: main.jsonnet + at: decl + include_declaration: true + +- step: expectReferences + request: refs + result: + - file: main.jsonnet + at: decl + text: x + - file: main.jsonnet + at: use + text: x diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/multiple_usages.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/multiple_usages.yaml new file mode 100644 index 00000000..876f129a --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/multiple_usages.yaml @@ -0,0 +1,25 @@ +steps: +- step: create + files: + main.jsonnet: | + local [[decl:x]] = 1; + [[use1:x]] + [[use2:x]] + +- step: requestReferences + as: refs + file: main.jsonnet + at: decl + include_declaration: true + +- step: expectReferences + request: refs + result: + - file: main.jsonnet + at: decl + text: x + - file: main.jsonnet + at: use1 + text: x + - file: main.jsonnet + at: use2 + text: x diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/nested_local.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/nested_local.yaml new file mode 100644 index 00000000..7d73521e --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/nested_local.yaml @@ -0,0 +1,40 @@ +steps: +- step: create + files: + main.jsonnet: | + local [[outerDecl:outer]] = + local [[innerDecl:inner]] = 1; + [[innerUse:inner]] + 1; + [[outerUse:outer]] + +- step: requestReferences + as: refsOuter + file: main.jsonnet + at: outerDecl + include_declaration: true + +- step: expectReferences + request: refsOuter + result: + - file: main.jsonnet + at: outerDecl + text: outer + - file: main.jsonnet + at: outerUse + text: outer + +- step: requestReferences + as: refsInner + file: main.jsonnet + at: innerDecl + include_declaration: true + +- step: expectReferences + request: refsInner + result: + - file: main.jsonnet + at: innerDecl + text: inner + - file: main.jsonnet + at: innerUse + text: inner diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/object_local.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/object_local.yaml new file mode 100644 index 00000000..232d099d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/object_local.yaml @@ -0,0 +1,24 @@ +steps: +- step: create + files: + main.jsonnet: | + { + local ((m1:|((m2:|))))helper = 42, + value: ((m3:|))helper, + } + +- step: requestReferences + as: helperRefs + file: main.jsonnet + at: m1 + include_declaration: true + +- step: expectReferences + request: helperRefs + result: + - file: main.jsonnet + at: m2 + text: helper + - file: main.jsonnet + at: m3 + text: helper diff --git a/crates/jrsonnet-lsp/tests/scenarios/references/shadowing_different_scopes.yaml b/crates/jrsonnet-lsp/tests/scenarios/references/shadowing_different_scopes.yaml new file mode 100644 index 00000000..b4175467 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/references/shadowing_different_scopes.yaml @@ -0,0 +1,40 @@ +steps: +- step: create + files: + main.jsonnet: | + local [[outerDecl:x]] = 1; + local f([[innerDecl:x]]) = + [[innerUse:x]]; + [[outerUse:x]] + +- step: requestReferences + as: outerX + file: main.jsonnet + at: outerDecl + include_declaration: true + +- step: expectReferences + request: outerX + result: + - file: main.jsonnet + at: outerDecl + text: x + - file: main.jsonnet + at: outerUse + text: x + +- step: requestReferences + as: innerX + file: main.jsonnet + at: innerDecl + include_declaration: true + +- step: expectReferences + request: innerX + result: + - file: main.jsonnet + at: innerDecl + text: x + - file: main.jsonnet + at: innerUse + text: x diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/close_then_watched_change_then_references.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/close_then_watched_change_then_references.yaml new file mode 100644 index 00000000..f47b8028 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/close_then_watched_change_then_references.yaml @@ -0,0 +1,36 @@ +# Ensure closing one file does not drop reference data for another file, +# and that watched-file changes update dependency graph state. +steps: +- step: create + files: + lib.jsonnet: local ((m1:|))target = 1; ((m2:|))target + main.jsonnet: local lib = import "lib.jsonnet"; lib.target + +- step: diagnosticsSettled + +- step: close + file: main.jsonnet + +- step: writeFile + path: main.jsonnet + text: local lib = import "lib.jsonnet"; lib.other + +- step: notifyWatchedFiles + changes: + - path: main.jsonnet + type: changed + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterWatchedChange + file: lib.jsonnet + at: m1 + include_declaration: false + +- step: expectReferences + request: refsAfterWatchedChange + result: + - file: lib.jsonnet + at: m2 + text: target diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml new file mode 100644 index 00000000..383114fc --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml @@ -0,0 +1,32 @@ +# Verify two concrete code lenses for `local x = 1; x`: +# - a references lens at the `x` binding with a stable find-references payload +# - a file-level Evaluate lens with the expected eval command payload. +steps: +- step: create + files: + main.jsonnet: ((fileStart:|))local [[decl:x]] = 1; x + +- step: diagnosticsSettled + +- step: requestCodeLens + as: lenses + file: main.jsonnet + +- step: expectCodeLens + request: lenses + result: + - at: decl + text: x + command: + title: 1 reference + command: jrsonnet.findReferences + arguments: + - file: main.jsonnet + - 0 + - 6 + - range: fileStart + command: + title: Evaluate + command: jrsonnet.evalFile + arguments: + - file: main.jsonnet diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml new file mode 100644 index 00000000..fa30ae92 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml @@ -0,0 +1,26 @@ +# Verify completion through bracket object access resolves the nested object type: +# `hm["foo"].` should return exactly the fields `a`, `b`, and `c`. +steps: +- step: create + files: + main.jsonnet: | + local hm = { + foo: { + a: true, + b: 4, + c: "hi", + }, + }; + + hm["foo"].((m1:|)) + +- step: diagnosticsSettled + +- step: requestCompletion + as: bracketCompletion + file: main.jsonnet + at: m1 +- step: expectCompletion + request: bracketCompletion + labels: [a, b, c] + allow_extra: false diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml new file mode 100644 index 00000000..0a4136f6 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml @@ -0,0 +1,18 @@ +# Verify deep object-path completion returns only nested fields at the cursor. +steps: +- step: create + files: + main.jsonnet: | + local obj = { inner: { x: 1, y: 2 } }; + obj.inner.((m1:|)) + +- step: diagnosticsSettled + +- step: requestCompletion + as: nestedFields + file: main.jsonnet + at: m1 +- step: expectCompletion + request: nestedFields + labels: [x, y] + allow_extra: false diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_non_identifier_field_text_edit.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_non_identifier_field_text_edit.yaml new file mode 100644 index 00000000..fbf3cca1 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_non_identifier_field_text_edit.yaml @@ -0,0 +1,35 @@ +# Verify completion for non-identifier object fields uses bracket rewrite text edits. +# `obj.` should include: +# - `"my-field"` with a text edit rewriting `.` +# - `normal` as a plain field completion. +steps: +- step: create + files: + main.jsonnet: | + local obj = { + "my-field": 1, + normal: 2, + }; + obj[[dotBeforeCompletion:.]]((m1:|)) + +- step: diagnosticsSettled + +- step: requestCompletion + as: objectFieldCompletion + file: main.jsonnet + at: m1 +- step: expectCompletion + request: objectFieldCompletion + result: + isIncomplete: false + items: + - label: my-field + kind: 5 + detail: number + textEdit: + range: + rangeOf: dotBeforeCompletion + newText: "[\"my-field\"]" + - label: normal + kind: 5 + detail: number diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml new file mode 100644 index 00000000..beb2dc0b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml @@ -0,0 +1,21 @@ +# Ensure completion remains useful when the current expression is syntactically +# incomplete: in-scope locals and object keywords should still be offered. +steps: +- step: create + files: + main.jsonnet: | + local x = 1; + local y = 2; + { foo: x +((m1:|)) + +- step: diagnosticsSettled + +- step: requestCompletion + as: completionAtError + file: main.jsonnet + at: m1 +- step: expectCompletion + request: completionAtError + labels: [$, self, std, super, x, y] + allow_extra: false + diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml new file mode 100644 index 00000000..4f186b47 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml @@ -0,0 +1,28 @@ +# Verify nested union object construction keeps member completion precise: +# `obj.a.` should offer exactly the shared field `b`. +steps: +- step: create + files: + main.jsonnet: | + local f(x, y, z) = + assert std.isBoolean(x) && std.isBoolean(y) && std.isBoolean(z); + local obj = { + a: + if x then + { b: if y then 1 else false } + else + { b: if z then true else "hi" }, + }; + obj.a.((m1:|)) + f(true, false, true) + +- step: diagnosticsSettled + +- step: requestCompletion + as: unionFieldCompletion + file: main.jsonnet + at: m1 +- step: expectCompletion + request: unionFieldCompletion + labels: [b] + allow_extra: false diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/config_change_code_actions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/config_change_code_actions.yaml new file mode 100644 index 00000000..90ea2503 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/config_change_code_actions.yaml @@ -0,0 +1,110 @@ +# Verify config updates take effect immediately for code-action policy: +# default policy offers import-removal variants, restricted policy does not. +steps: +- step: create + files: + scenario-code-action-policy.jsonnet: ((rangeStart:|))local ((x:|x)) = import "foo.libsonnet"; 42 + +- step: diagnosticsSettled + +- step: requestCodeAction + as: defaultPolicy + file: scenario-code-action-policy.jsonnet + at: rangeStart + text: local x = import "foo.libsonnet"; 42 + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + +- step: expectCodeAction + request: defaultPolicy + result: + - title: Prefix `x` with `_` + kind: quickfix + isPreferred: true + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-code-action-policy.jsonnet: + - at: x + text: x + replace: _x + + - title: Remove unused binding `x` + kind: quickfix + isPreferred: false + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-code-action-policy.jsonnet: + - at: rangeStart + len: 32 + replace: "" + + - title: Remove all unused bindings + kind: source.fixAll + isPreferred: false + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-code-action-policy.jsonnet: + - at: rangeStart + len: 32 + replace: "" + +- step: config + settings: + jsonnet: + codeActions: + removeUnused: nonImportBindings + +- step: requestCodeAction + as: nonImportBindingsPolicy + file: scenario-code-action-policy.jsonnet + at: rangeStart + text: local x = import "foo.libsonnet"; 42 + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + +- step: expectCodeAction + request: nonImportBindingsPolicy + result: + - title: Prefix `x` with `_` + kind: quickfix + isPreferred: true + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-code-action-policy.jsonnet: + - at: x + text: x + replace: _x diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml new file mode 100644 index 00000000..2afd0d4d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml @@ -0,0 +1,99 @@ +# Exercise cross-file rename end-to-end, then validate navigation APIs on +# the updated text state after edits are applied. +steps: +- step: create + files: + lib.jsonnet: "{ ((libField:|helper)): function(x) x * 2 }" + main.jsonnet: "local lib = import \"lib.jsonnet\"; lib.((mainField:|helper))(2) + lib.((mainField2:|helper))(3)" + +- step: diagnosticsSettled + +- step: requestDefinition + as: defBefore + file: main.jsonnet + at: mainField +- step: expectDefinition + request: defBefore + result: + file: lib.jsonnet + at: libField + text: helper + +- step: requestRename + as: renameHelper + file: lib.jsonnet + at: libField + new_name: util + +- step: expectRename + request: renameHelper + result: + edits: + lib.jsonnet: + - at: libField + text: helper + replace: util + main.jsonnet: + - at: mainField + text: helper + replace: util + - at: mainField2 + text: helper + replace: util + +- step: changeFull + file: lib.jsonnet + text: "{ ((libFieldAfter:|util)): function(x) x * 2 }" + version: 2 + +- step: changeFull + file: main.jsonnet + text: "local lib = import \"lib.jsonnet\"; lib.((mainFieldAfter:|util))(2) + lib.((mainFieldAfter2:|util))(3)" + version: 2 + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfter + file: lib.jsonnet + at: libFieldAfter + include_declaration: false + + # For object-field rename, references currently only assert no stale entries; + # positive post-rename references are covered in cross_file_rename_references_roundtrip. +- step: expectReferences + request: refsAfter + result: + +- step: requestDefinition + as: defAfter + file: main.jsonnet + at: mainFieldAfter +- step: expectDefinition + request: defAfter + result: + file: lib.jsonnet + at: libFieldAfter + text: util + +- step: requestDeclaration + as: declAfter + file: main.jsonnet + at: mainFieldAfter +- step: expectDeclaration + request: declAfter + result: + file: lib.jsonnet + at: libFieldAfter + text: util + +- step: requestTypeDefinition + as: typeDefAfter + file: main.jsonnet + at: mainFieldAfter2 +- step: expectTypeDefinition + request: typeDefAfter + result: + file: lib.jsonnet + at: libFieldAfter + text: util diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_references_roundtrip.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_references_roundtrip.yaml new file mode 100644 index 00000000..fa0d8949 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_references_roundtrip.yaml @@ -0,0 +1,70 @@ +# Verify cross-file rename of an exported local keeps references and navigation +# coherent after applying edits to both source and importer files. +steps: +- step: create + files: + lib.jsonnet: local ((libDecl:|target)) = 1; ((libUse:|target)) + main.jsonnet: local lib = import "lib.jsonnet"; lib.((mainUse:|target)) + +- step: diagnosticsSettled + +- step: requestRename + as: renameTarget + file: lib.jsonnet + at: libDecl + new_name: renamed + +- step: expectRename + request: renameTarget + result: + edits: + lib.jsonnet: + - at: libDecl + text: target + replace: renamed + - at: libUse + text: target + replace: renamed + main.jsonnet: + - at: mainUse + text: target + replace: renamed + +- step: changeFull + file: lib.jsonnet + text: local ((libDeclAfter:|renamed)) = 1; ((libUseAfter:|renamed)) + version: 2 + +- step: changeFull + file: main.jsonnet + text: local lib = import "lib.jsonnet"; lib.((mainUseAfter:|renamed)) + version: 2 + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterRename + file: lib.jsonnet + at: libDeclAfter + include_declaration: false + +- step: expectReferences + request: refsAfterRename + result: + - file: lib.jsonnet + at: libUseAfter + text: renamed + - file: main.jsonnet + at: mainUseAfter + text: renamed + +- step: requestDefinition + as: defAfterRename + file: main.jsonnet + at: mainUseAfter +- step: expectDefinition + request: defAfterRename + result: + file: lib.jsonnet + at: libDeclAfter + text: renamed diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_available_list.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_available_list.yaml new file mode 100644 index 00000000..49e92a5d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_available_list.yaml @@ -0,0 +1,25 @@ +# Verify type-error diagnostics fall back to listing available fields when no +# close suggestion exists. +steps: +- step: create + files: + main.jsonnet: | + local dims = { length: 1, width: 2 }; + ((m1:|))dims.xyz + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: dims.xyz + severity: warning + code: type-error + source: jrsonnet-lint + message: "no such field `xyz`; available fields: length, width" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_with_suggestion.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_with_suggestion.yaml new file mode 100644 index 00000000..dbb44374 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_no_such_field_with_suggestion.yaml @@ -0,0 +1,25 @@ +# Verify type-error diagnostics include did-you-mean suggestions for close field typos. +# `dims.lenght` should suggest `length`. +steps: +- step: create + files: + main.jsonnet: | + local dims = { length: 1, width: 2 }; + ((m1:|))dims.lenght + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: dims.lenght + severity: warning + code: type-error + source: jrsonnet-lint + message: "no such field `lenght`; did you mean `length`?" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_save_roundtrip.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_save_roundtrip.yaml new file mode 100644 index 00000000..96c46fe5 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_save_roundtrip.yaml @@ -0,0 +1,38 @@ +# Verify diagnostics clear after a full-document edit plus save cycle. +steps: +- step: create + files: + main.jsonnet: local ((m1:|))unused = 1; 42 + + # Lint diagnostics are opt-in; enable them so unused-variable appears. +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: "unused variable: `unused`; prefix with `_` to silence this warning" + +- step: changeFull + file: main.jsonnet + text: "42" + version: 2 + +- step: save + file: main.jsonnet + text: + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/document_symbol_local_and_object.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/document_symbol_local_and_object.yaml new file mode 100644 index 00000000..e1dfbe51 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/document_symbol_local_and_object.yaml @@ -0,0 +1,30 @@ +# Verify `textDocument/documentSymbol` returns both: +# - a local binding symbol for `x` +# - an object field symbol for `result` +# and that each symbol carries the expected kind and exact ranges. +steps: +- step: create + files: + main.jsonnet: | + local [[localBinding:x = 1]]; + { [[resultField:result: x]] } + +- step: diagnosticsSettled + +- step: requestDocumentSymbol + as: docSymbols + file: main.jsonnet + +- step: expectDocumentSymbol + request: docSymbols + result: + - name: x + kind: 13 + range: {rangeOf: localBinding} + selectionRange: {rangeOf: localBinding} + children: + - name: result + kind: 8 + range: {rangeOf: resultField} + selectionRange: {rangeOf: resultField} + children: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/fix_all_unused_multifile_comments_preserved.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/fix_all_unused_multifile_comments_preserved.yaml new file mode 100644 index 00000000..a5c12a81 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/fix_all_unused_multifile_comments_preserved.yaml @@ -0,0 +1,81 @@ +# Validate source.fixAll for unused bindings in multiple files while keeping +# surrounding comments intact by applying precise edit ranges only. +steps: +- step: create + files: + main.jsonnet: | + ((m1:|))// main heading + ((m4:|))local ((m2:|((m3:|))))main_unused = import "lib.jsonnet"; + 42 + lib.jsonnet: | + ((m5:|))// lib heading + ((m8:|))local ((m6:|((m7:|))))lib_unused = 1; + 7 + +- step: diagnosticsSettled + +- step: requestCodeAction + as: fixMain + file: main.jsonnet + at: m1 + len: 1 + only: [source.fixAll] + diagnostics: + - at: m2 + text: main_unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + +- step: expectCodeAction + request: fixMain + result: + - title: Remove all unused bindings + kind: source.fixAll + isPreferred: false + diagnostics: + - at: m3 + text: main_unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + main.jsonnet: + - at: m4 + len: 40 + replace: "" + +- step: requestCodeAction + as: fixLib + file: lib.jsonnet + at: m5 + len: 1 + only: [source.fixAll] + diagnostics: + - at: m6 + text: lib_unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + +- step: expectCodeAction + request: fixLib + result: + - title: Remove all unused bindings + kind: source.fixAll + isPreferred: false + diagnostics: + - at: m7 + text: lib_unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + lib.jsonnet: + - at: m8 + len: 20 + replace: "" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml new file mode 100644 index 00000000..53fe4138 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml @@ -0,0 +1,25 @@ +# Verify hover reflects field precision through a string-literal bracket lookup: +# `hm["foo"].a` should preserve the literal `true` type for `a`. +steps: +- step: create + files: + main.jsonnet: | + local hm = { + foo: { + a: true, + b: 4, + c: "hi", + }, + }; + hm["foo"].((m1:|))a + +- step: diagnosticsSettled + +- step: requestHover + as: bracketFieldHover + file: main.jsonnet + at: m1 +- step: expectHoverType + request: bracketFieldHover + type: "true" + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml new file mode 100644 index 00000000..6a3a7de3 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml @@ -0,0 +1,21 @@ +# Verify hover preserves CJK object key names across an import boundary. +# The imported value should still expose the `"æ°´"` field in the rendered type. +steps: +- step: create + files: + a.jsonnet: | + { "æ°´": 1 + 2 } + b.jsonnet: | + local imported = import "a.jsonnet"; + ((m1:|))imported["æ°´"] + +- step: diagnosticsSettled + +- step: requestHover + as: importedTypeHasCjkField + file: b.jsonnet + at: m1 +- step: expectHoverType + request: importedTypeHasCjkField + type: '{ æ°´ }' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml new file mode 100644 index 00000000..50ac63e8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml @@ -0,0 +1,40 @@ +# Smoke-test heterogeneous request handling in one scenario: +# hover, completion, and executeCommand request/expect flow. +steps: +- step: create + files: + lib.jsonnet: local value = 1; value + main.jsonnet: ((m1:|((m2:|))))local lib = import 'lib.jsonnet'; lib.value + +- step: diagnosticsSettled + +- step: requestHover + as: hoverRequest + file: main.jsonnet + at: m1 +- step: expectHover + request: hoverRequest + result: + +- step: requestCompletion + as: completionRequest + file: main.jsonnet + at: m2 +- step: expectCompletion + request: completionRequest + result: + isIncomplete: false + items: + - label: std + kind: 9 + detail: Jsonnet standard library + +- step: requestExecuteCommand + as: evalExpression + command: jrsonnet.evalExpression + arguments: + - 1 + 2 + +- step: expectExecuteCommand + request: evalExpression + result: 3 diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml new file mode 100644 index 00000000..d514eb8b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml @@ -0,0 +1,39 @@ +# Exercise hover behavior at non-symbol positions (should be null) and on +# a real symbol site (should yield a concrete inferred type). +steps: +- step: create + files: + main.jsonnet: | + local((m1:|)) x ((m2:|))= 1; + ((m3:|))x + +- step: diagnosticsSettled + + # Whitespace between `local` and `x`. +- step: requestHover + as: hoverWhitespace + file: main.jsonnet + at: m1 +- step: expectHover + request: hoverWhitespace + result: + + # Operator token. +- step: requestHover + as: hoverEquals + file: main.jsonnet + at: m2 +- step: expectHover + request: hoverEquals + result: + + # Real symbol usage should still resolve to a type. +- step: requestHover + as: hoverUsage + file: main.jsonnet + at: m3 +- step: expectHoverType + request: hoverUsage + type: number + match: exact + diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_no_result_on_comment_token.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_no_result_on_comment_token.yaml new file mode 100644 index 00000000..d91e0367 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_no_result_on_comment_token.yaml @@ -0,0 +1,18 @@ +# Verify hover does not trigger on comment text. +# Requesting hover within `//` should return `null`. +steps: +- step: create + files: + main.jsonnet: | + // ((m1:|))hi there + 1 + 1 + +- step: diagnosticsSettled + +- step: requestHover + as: hoverOnComment + file: main.jsonnet + at: m1 +- step: expectHover + request: hoverOnComment + result: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml new file mode 100644 index 00000000..1765bad5 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml @@ -0,0 +1,19 @@ +# Verify hover preserves non-identifier object keys in the displayed object type. +# Hovering `obj` usage should include both `"foo bar"` and `"the-field"`. +steps: +- step: create + files: + main.jsonnet: | + local obj = { "foo bar": 3, "the-field": 4 }; + ((m1:|))obj + +- step: diagnosticsSettled + +- step: requestHover + as: objectTypeHasSpacedKey + file: main.jsonnet + at: m1 +- step: expectHoverType + request: objectTypeHasSpacedKey + type: '{ foo bar, the-field }' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml new file mode 100644 index 00000000..182b3446 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml @@ -0,0 +1,28 @@ +# Verify hover keeps union precision for nested branch-dependent field values: +# `obj.a.b` should include number, boolean, and string in its inferred type. +steps: +- step: create + files: + main.jsonnet: | + local f(x, y, z) = + assert std.isBoolean(x) && std.isBoolean(y) && std.isBoolean(z); + local obj = { + a: + if x then + { b: if y then 1 else false } + else + { b: if z then true else "hi" }, + }; + obj.a.((m1:|))b; + f(true, false, true) + +- step: diagnosticsSettled + +- step: requestHover + as: unionFieldHoverNumber + file: main.jsonnet + at: m1 +- step: expectHoverType + request: unionFieldHoverNumber + type: 'true | false | number | string' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml new file mode 100644 index 00000000..95a671d5 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml @@ -0,0 +1,52 @@ +# Ensure importstr dependencies on non-Jsonnet files are not interpreted +# as Jsonnet during analysis. +steps: + # Build a tiny workspace with: + # - `main.libsonnet` consuming values + # - `data.libsonnet` defining `importstr`/`importbin` + # - `script.k` intentionally non-Jsonnet content +- step: create + files: + # Consumer file: we hover the final object value positions to validate inferred types. + main.libsonnet: | + local data = import "./data.libsonnet"; + local payload = data.payload; + local bytes = data.bytes; + { payload: ((m1:|))payload, bytes: b((m2:|))ytes } + # Producer file: this is the behavior under test. + # `importstr` should infer `string`; `importbin` should infer byte array semantics. + data.libsonnet: | + { + payload: importstr "./script.k", + bytes: importbin "./script.k", + } + # Non-Jsonnet payload that previously caused crashes when traversed like code imports. + script.k: | + import regex + x = "abc" + open: + # Open both relevant Jsonnet files so hover/type inference has active documents. + - main.libsonnet + - data.libsonnet + + # Hover the `payload` value in the final object expression. +- step: requestHover + as: payloadHover + file: main.libsonnet + at: m1 + # `importstr` must infer a string type. +- step: expectHoverType + request: payloadHover + type: string + match: exact + + # Hover the `bytes` value in the same final object expression. +- step: requestHover + as: bytesHover + file: main.libsonnet + at: m2 + # `importbin` must infer bounded byte numbers (0..255). +- step: expectHoverType + request: bytesHover + type: array + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_after_flow_narrowing_edit.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_after_flow_narrowing_edit.yaml new file mode 100644 index 00000000..235858ea --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_after_flow_narrowing_edit.yaml @@ -0,0 +1,43 @@ +# Confirm flow-sensitive inlay hints appear when a guard narrows a type, +# and disappear after an edit that removes the narrowing condition. +steps: +- step: create + files: + main.jsonnet: | + [[hintRangeBefore:local y = std.extVar("y"); + local ((hintPos:x|)) = if std.isNumber(y) then y else 0; + x]] + +- step: diagnosticsSettled + +- step: requestInlayHints + as: narrowed + file: main.jsonnet + range: hintRangeBefore + +- step: expectInlayHints + request: narrowed + result: + - positionOf: hintPos + label: ": number" + kind: 1 + paddingLeft: true + +- step: changeFull + file: main.jsonnet + text: | + [[hintRangeAfter:local y = std.extVar("y"); + local x = if true then y else 0; + x]] + version: 2 + +- step: diagnosticsSettled + +- step: requestInlayHints + as: widened + file: main.jsonnet + range: hintRangeAfter + +- step: expectInlayHints + request: widened + result: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/lint_config_flip_mid_session.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/lint_config_flip_mid_session.yaml new file mode 100644 index 00000000..b843b68a --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/lint_config_flip_mid_session.yaml @@ -0,0 +1,52 @@ +# Verify runtime config flips propagate immediately: enabling lint emits +# diagnostics, disabling clears them, and re-enabling restores them. +steps: +- step: create + files: + main.jsonnet: local ((m1:|((m2:|))))unused = 1; 42 + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: "unused variable: `unused`; prefix with `_` to silence this warning" + +- step: config + settings: + jsonnet: + enableLintDiagnostics: false + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m2 + text: unused + severity: warning + code: unused-variable + source: jrsonnet-lint + message: "unused variable: `unused`; prefix with `_` to silence this warning" + diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/marker_syntax_definition_smoke.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/marker_syntax_definition_smoke.yaml new file mode 100644 index 00000000..a38b9b41 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/marker_syntax_definition_smoke.yaml @@ -0,0 +1,26 @@ +# Marker syntax smoke test: +# - `[[name:text]]` defines a range marker. +# - `((name:|x))` defines a cursor marker before `x`. +steps: +- step: create + files: + main.jsonnet: | + local [[valueDecl:value]] = 1; + ((valueUse:|value)) + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] + +- step: requestDefinition + as: def + file: main.jsonnet + at: valueUse + +- step: expectDefinition + request: def + result: + file: main.jsonnet + at: valueDecl diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml new file mode 100644 index 00000000..4c1aa331 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml @@ -0,0 +1,149 @@ +# Coverage scenario for request/expect pairs that were previously unexercised +# in YAML runner tests. This intentionally mixes many handler types. +steps: +- step: create + files: + main.jsonnet: | + local add(a, b) = a + b; + local value = add(1, 2); + { result: value } + open: [] + +- step: open + file: main.jsonnet + text: | + local add(a, b) = a + b; + local ((valueDecl:|value)) = add(1, ((sigPos:|2))); + { result: ((valueUse:|[[valueUseRange:value]])) } + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] + +- step: save + file: main.jsonnet + text: + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] + +- step: requestDefinition + as: def + file: main.jsonnet + at: valueUse +- step: expectDefinition + request: def + result: + file: main.jsonnet + at: valueDecl + text: value + +- step: requestDeclaration + as: decl + file: main.jsonnet + at: valueUse +- step: expectDeclaration + request: decl + result: + file: main.jsonnet + at: valueDecl + text: value + +- step: requestTypeDefinition + as: typeDef + file: main.jsonnet + at: valueUse +- step: expectTypeDefinition + request: typeDef + result: + file: main.jsonnet + at: valueDecl + text: value + +- step: requestPrepareRename + as: prep + file: main.jsonnet + at: valueUse +- step: expectPrepareRename + request: prep + result: {rangeOf: valueUseRange} + +- step: requestRename + as: rename + file: main.jsonnet + at: valueUse + new_name: output + +- step: expectRename + request: rename + result: + edits: + main.jsonnet: + - at: valueDecl + text: value + replace: output + - at: valueUse + text: value + replace: output + +- step: requestSignatureHelp + as: sig + file: main.jsonnet + at: sigPos +- step: expectSignatureHelp + request: sig + result: + signatures: + - label: add(a, b) + documentation: + parameters: + - label: [4, 5] + documentation: + - label: [7, 8] + documentation: + activeParameter: 1 + activeSignature: 0 + activeParameter: 1 + +- step: config + settings: + jsonnet: + formatting: + formatterPath: /definitely/missing-jsonnetfmt + +- step: requestFormatting + as: fmt + file: main.jsonnet + +- step: expectFormatting + request: fmt + result: + +- step: requestDocumentSymbol + as: docSymbols + file: missing.jsonnet + +- step: expectDocumentSymbol + request: docSymbols + result: + +- step: requestWorkspaceSymbol + as: wsSymbols + query: does-not-exist-anywhere + +- step: expectWorkspaceSymbol + request: wsSymbols + result: + +- step: requestCodeLens + as: codeLens + file: missing.jsonnet + +- step: expectCodeLens + request: codeLens + result: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/open_request_expect_sequence.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/open_request_expect_sequence.yaml new file mode 100644 index 00000000..2f3e2a75 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/open_request_expect_sequence.yaml @@ -0,0 +1,72 @@ +# Baseline open -> diagnostics -> codeAction request/expect sequence with +# full structural quickfix assertions. +steps: +- step: create + files: + scenario-runner.jsonnet: ((rangeStart:|))local ((x:|x)) = 1; 42 + +- step: diagnosticsSettled + +- step: requestCodeAction + as: unusedActions + file: scenario-runner.jsonnet + at: rangeStart + text: local x = 1; 42 + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + +- step: expectCodeAction + request: unusedActions + result: + - title: Prefix `x` with `_` + kind: quickfix + isPreferred: true + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-runner.jsonnet: + - at: x + text: x + replace: _x + + - title: Remove unused binding `x` + kind: quickfix + isPreferred: false + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-runner.jsonnet: + - at: rangeStart + len: 11 + replace: "" + + - title: Remove all unused bindings + kind: source.fixAll + isPreferred: false + diagnostics: + - at: x + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + scenario-runner.jsonnet: + - at: rangeStart + len: 11 + replace: "" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/preserve_cross_file_refs_after_close.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/preserve_cross_file_refs_after_close.yaml new file mode 100644 index 00000000..ae4d4cf8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/preserve_cross_file_refs_after_close.yaml @@ -0,0 +1,30 @@ +# Closing an importer should not evict cross-file reference information +# needed to answer references from an imported file. +steps: +- step: create + files: + lib.jsonnet: local ((m1:|))target = 1; ((m2:|))target + main.jsonnet: local lib = import 'lib.jsonnet'; lib.((m3:|))target + +- step: diagnosticsSettled + +- step: close + file: main.jsonnet + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterClose + file: lib.jsonnet + at: m1 + include_declaration: false + +- step: expectReferences + request: refsAfterClose + result: + - file: lib.jsonnet + at: m2 + text: target + - file: main.jsonnet + at: m3 + text: target diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/rapid_incremental_edit_recovery.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/rapid_incremental_edit_recovery.yaml new file mode 100644 index 00000000..0012cdd8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/rapid_incremental_edit_recovery.yaml @@ -0,0 +1,61 @@ +# Stress incremental edits: introduce a transient parse break and then fix it, +# then assert final diagnostics, navigation, and tokenization are coherent. +steps: +- step: create + files: + main.jsonnet: "[[kw_local:local]] ((m2:|[[decl_x:x]])) [[eq:=]] ((mLit:|))1; ((m1:|[[use_x:x]]))" + +- step: diagnosticsSettled + + # Temporary invalid edit at the literal site. +- step: changeIncremental + file: main.jsonnet + at: mLit + len: 1 + text: "\"" + version: 2 + + # Immediate correction to a valid number literal. +- step: changeIncremental + file: main.jsonnet + at: mLit + len: 1 + text: "[[num_tok:2]]" + version: 3 + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: [] + +- step: requestDefinition + as: xDefinition + file: main.jsonnet + at: m1 +- step: expectDefinition + request: xDefinition + result: + file: main.jsonnet + at: m2 + text: x + +- step: requestSemanticTokensFull + as: tokensAfterRecovery + file: main.jsonnet + +- step: expectSemanticTokensFull + request: tokensAfterRecovery + result: + tokensByMarker: + - marker: kw_local + type: keyword + - marker: decl_x + type: variable + modifiers: [declaration, definition] + - marker: eq + type: operator + - marker: num_tok + type: number + - marker: use_x + type: variable diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/relative_paths_smoke.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/relative_paths_smoke.yaml new file mode 100644 index 00000000..d4406267 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/relative_paths_smoke.yaml @@ -0,0 +1,25 @@ +# Smoke-test relative path handling in scenario files and imports for +# cross-file references under nested directories. +steps: +- step: create + files: + lib/helper.jsonnet: local ((m1:|))target = 1; ((m2:|))target + app/main.jsonnet: local lib = import '../lib/helper.jsonnet'; lib.((m3:|))target + +- step: diagnosticsSettled + +- step: requestReferences + as: refs + file: lib/helper.jsonnet + at: m1 + include_declaration: false + +- step: expectReferences + request: refs + result: + - file: lib/helper.jsonnet + at: m2 + text: target + - file: app/main.jsonnet + at: m3 + text: target diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/rename_reopen_references_roundtrip.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/rename_reopen_references_roundtrip.yaml new file mode 100644 index 00000000..8d093100 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/rename_reopen_references_roundtrip.yaml @@ -0,0 +1,80 @@ +# Cross-file rename should remain coherent after close/reopen boundaries: +# references and definition must reflect the renamed symbol everywhere. +steps: +- step: create + files: + lib.jsonnet: local ((libDecl:|target)) = 1; ((libUse:|target)) + main.jsonnet: local lib = import "lib.jsonnet"; lib.((mainUse:|target)) + +- step: diagnosticsSettled + +- step: requestRename + as: renameTarget + file: lib.jsonnet + at: libDecl + new_name: renamed + +- step: expectRename + request: renameTarget + result: + edits: + lib.jsonnet: + - at: libDecl + text: target + replace: renamed + - at: libUse + text: target + replace: renamed + main.jsonnet: + - at: mainUse + text: target + replace: renamed + + # Apply the edits as-if the client accepted the workspace edit. +- step: changeFull + file: lib.jsonnet + text: local ((libDeclAfter:|renamed)) = 1; ((libUseAfter:|renamed)) + version: 2 + +- step: changeFull + file: main.jsonnet + text: local lib = import "lib.jsonnet"; lib.renamed + version: 2 + + # Simulate editor lifecycle transitions around the importer document. +- step: close + file: main.jsonnet + +- step: open + file: main.jsonnet + text: local lib = import "lib.jsonnet"; lib.((mainUseAfter:|renamed)) + version: 3 + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterReopen + file: lib.jsonnet + at: libDeclAfter + include_declaration: false + +- step: expectReferences + request: refsAfterReopen + result: + - file: lib.jsonnet + at: libUseAfter + text: renamed + - file: main.jsonnet + at: mainUseAfter + text: renamed + +- step: requestDefinition + as: defAfterReopen + file: main.jsonnet + at: mainUseAfter +- step: expectDefinition + request: defAfterReopen + result: + file: lib.jsonnet + at: libDeclAfter + text: renamed diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/semantic_tokens_stability_after_incremental_edits.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/semantic_tokens_stability_after_incremental_edits.yaml new file mode 100644 index 00000000..95b5d4b0 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/semantic_tokens_stability_after_incremental_edits.yaml @@ -0,0 +1,101 @@ +# Verify semantic token classification stays structurally consistent across +# an incremental edit that changes value text but not token kinds. +steps: +- step: create + files: + main.jsonnet: "((rangeStart:|))[[kw_local:local]] [[decl_x:x]] [[eq:=]] ((num:|))[[num_tok:1]]; [[use_x:x]]" + +- step: diagnosticsSettled + +- step: requestSemanticTokensFull + as: beforeFull + file: main.jsonnet + +- step: expectSemanticTokensFull + request: beforeFull + result: + tokensByMarker: + - marker: kw_local + type: keyword + - marker: decl_x + type: variable + modifiers: [declaration, definition] + - marker: eq + type: operator + - marker: num_tok + type: number + - marker: use_x + type: variable + +- step: requestSemanticTokensRange + as: beforeRange + file: main.jsonnet + at: rangeStart + len: 40 + +- step: expectSemanticTokensRange + request: beforeRange + result: + tokensByMarker: + - marker: kw_local + type: keyword + - marker: decl_x + type: variable + modifiers: [declaration, definition] + - marker: eq + type: operator + - marker: num_tok + type: number + - marker: use_x + type: variable + +- step: changeIncremental + file: main.jsonnet + at: num + len: 1 + text: "[[num_tok:2]]" + version: 2 + +- step: diagnosticsSettled + +- step: requestSemanticTokensFull + as: afterFull + file: main.jsonnet + +- step: expectSemanticTokensFull + request: afterFull + result: + tokensByMarker: + - marker: kw_local + type: keyword + - marker: decl_x + type: variable + modifiers: [declaration, definition] + - marker: eq + type: operator + - marker: num_tok + type: number + - marker: use_x + type: variable + +- step: requestSemanticTokensRange + as: afterRange + file: main.jsonnet + at: rangeStart + len: 40 + +- step: expectSemanticTokensRange + request: afterRange + result: + tokensByMarker: + - marker: kw_local + type: keyword + - marker: decl_x + type: variable + modifiers: [declaration, definition] + - marker: eq + type: operator + - marker: num_tok + type: number + - marker: use_x + type: variable diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/signature_help_named_arguments.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/signature_help_named_arguments.yaml new file mode 100644 index 00000000..267cb0b6 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/signature_help_named_arguments.yaml @@ -0,0 +1,29 @@ +# Ensure signature help stays correct with named arguments provided out of +# declaration order and reports the active parameter precisely. +steps: +- step: create + files: + main.jsonnet: local add(a, b, c) = a + b + c; add(c=3, a=1, b=2((m1:|))) + +- step: diagnosticsSettled + +- step: requestSignatureHelp + as: sigNamed + file: main.jsonnet + at: m1 +- step: expectSignatureHelp + request: sigNamed + result: + signatures: + - label: add(a, b, c) + documentation: + parameters: + - label: [4, 5] + documentation: + - label: [7, 8] + documentation: + - label: [10, 11] + documentation: + activeParameter: 1 + activeSignature: 0 + activeParameter: 1 diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/watched_file_lifecycle.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/watched_file_lifecycle.yaml new file mode 100644 index 00000000..6c78c99b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/watched_file_lifecycle.yaml @@ -0,0 +1,74 @@ +# End-to-end watched-file lifecycle scenario: create, change, delete events +# should update analysis and preserve stable cross-file reference answers. +steps: +- step: create + files: + main.jsonnet: local lib = import 'lib1.jsonnet'; lib.((m3:|((m6:|))))target + open: + - main.jsonnet + +- step: diagnosticsSettled + +- step: writeFile + path: lib1.jsonnet + text: local ((m1:|))target = 1; ((m2:|))target + +- step: notifyWatchedFiles + changes: + - path: lib1.jsonnet + type: created + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterCreate + file: lib1.jsonnet + at: m1 + include_declaration: false + +- step: expectReferences + request: refsAfterCreate + result: + - file: lib1.jsonnet + at: m2 + text: target + - file: main.jsonnet + at: m3 + text: target + +- step: writeFile + path: lib1.jsonnet + text: local ((m4:|))target = 2; ((m5:|))target + +- step: notifyWatchedFiles + changes: + - path: lib1.jsonnet + type: changed + +- step: diagnosticsSettled + +- step: requestReferences + as: refsAfterChange + file: lib1.jsonnet + at: m4 + include_declaration: false + +- step: expectReferences + request: refsAfterChange + result: + - file: lib1.jsonnet + at: m5 + text: target + - file: main.jsonnet + at: m6 + text: target + +- step: deleteFile + path: lib1.jsonnet + +- step: notifyWatchedFiles + changes: + - path: lib1.jsonnet + type: deleted + +- step: diagnosticsSettled diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_flat_positive.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_flat_positive.yaml new file mode 100644 index 00000000..2623ac8b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_flat_positive.yaml @@ -0,0 +1,23 @@ +# Verify `workspace/symbol` can find `result` and report a concrete symbol +# location/range in the source file via shorthand structural assertions. +steps: +- step: create + files: + main.jsonnet: | + local x = 1; { ((m1:|))result: x } + +- step: diagnosticsSettled + +- step: requestWorkspaceSymbol + as: wsSymbols + query: result + +- step: expectWorkspaceSymbol + request: wsSymbols + result: + symbols: + - name: result + kind: 8 + file: main.jsonnet + at: m1 + text: "result: x" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_nested_container.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_nested_container.yaml new file mode 100644 index 00000000..445e0638 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_nested_container.yaml @@ -0,0 +1,23 @@ +# Workspace symbol queries should preserve nesting context via container_name. +steps: +- step: create + files: + main.jsonnet: "{ outer: { ((m1:|))innerField: 1 } }" + +- step: diagnosticsSettled + +- step: requestWorkspaceSymbol + as: wsNested + query: inner + +- step: expectWorkspaceSymbol + request: wsNested + result: + symbols: + - name: innerField + kind: 8 + file: main.jsonnet + at: m1 + text: "innerField: 1" + container_name: outer + diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_updates_after_incremental_rename.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_updates_after_incremental_rename.yaml new file mode 100644 index 00000000..bbc79bda --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/workspace_symbol_updates_after_incremental_rename.yaml @@ -0,0 +1,68 @@ +# Verify workspace symbol indexing tracks incremental edits in-place: +# - query for the old field name returns no symbols after rename +# - query for the new field name returns the renamed field at the new range. +steps: + - step: create + files: + main.jsonnet: | + local obj = { + [[resultOneField:resultOne]]: 1, + keepMe: 2, + }; + obj.[[resultOneUse:resultOne]] + + - step: diagnosticsSettled + + - step: requestWorkspaceSymbol + as: beforeRename + query: resultOne + + - step: expectWorkspaceSymbol + request: beforeRename + result: + symbols: + - name: resultOne + kind: 8 + file: main.jsonnet + at: resultOneField + text: "resultOne: 1" + container_name: obj + + - step: changeIncremental + file: main.jsonnet + at: resultOneField + text: ((finalValueField:|finalValue)) + len: 9 + version: 2 + + - step: changeIncremental + file: main.jsonnet + at: resultOneUse + text: ((finalValueUse:|finalValue)) + len: 9 + version: 3 + + - step: diagnosticsSettled + + - step: requestWorkspaceSymbol + as: oldNameGone + query: resultOne + + - step: expectWorkspaceSymbol + request: oldNameGone + result: null + + - step: requestWorkspaceSymbol + as: newNamePresent + query: finalValue + + - step: expectWorkspaceSymbol + request: newNamePresent + result: + symbols: + - name: finalValue + kind: 8 + file: main.jsonnet + at: finalValueField + text: "finalValue: 1" + container_name: obj diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index cd1904bc..197fc8d4 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -353,17 +353,116 @@ both flat and namespaced settings (`jsonnet`, `jsonnet-language-server`). ## Timeline Test Harness -The LSP integration test framework now includes a typed timeline model and -runner: +The scenario harness is split into a dedicated crate plus fixture-driven tests: + +- `crates/jrsonnet-lsp-scenario/src/scenario.rs` +- `crates/jrsonnet-lsp-scenario/src/scenario_script/` +- `crates/jrsonnet-lsp-scenario/src/scenario_runner/` +- `crates/jrsonnet-lsp/tests/e2e_scenario_tests.rs` +- `crates/jrsonnet-lsp/tests/scenarios/**/*.yaml` + +`jrsonnet-lsp-scenario` provides: + +- a typed executable timeline model +- a YAML DSL compiler (`parse_scenario_yaml`) +- an in-memory LSP runner with async diagnostics settle barriers +- rstest fixture helpers for file-based scenario discovery + +The YAML DSL is marker-driven: + +- inline source markers define reusable positions/ranges in file content: + - `[[name:text]]` for named ranges + - `((name:|x))`, `((name:x|))`, `((name:|))` for named cursor positions +- request positions use marker names (`at: markerName`) +- request ranges use marker names (`range: markerName`) or shorthand + (`at` + `text`/`len`) +- request/expect pairs use optional aliases (`as` / `request`) instead of raw + numeric IDs +- `diagnosticsSettled` defaults are `timeout_ms: 1000` and `idle_ms: 50` +- `create.files` keeps scenarios self-contained and path-relative + +The compiled timeline supports cross-file edits and notifications +(`writeFile`, `deleteFile`, `notifyWatchedFiles`) and request/response +assertions for all implemented handlers. + +### Scenario Script Examples + +#### Diagnostics warning + +```yaml +steps: + - step: create + files: + main.jsonnet: local [[unused:x]] = 1; 42 + - step: diagnosticsSettled + - step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: unused + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable +``` -- `crates/jrsonnet-lsp/tests/framework/scenario.rs` -- `crates/jrsonnet-lsp/tests/framework/scenario_runner.rs` +#### Type error diagnostic + +```yaml +steps: + - step: create + files: + main.jsonnet: | + local x = 1; + [[badAccess:x.foo]] + - step: diagnosticsSettled + - step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: badAccess + text: x.foo + severity: error + message: field access on non-object type +``` -`scenario.rs` defines JSON-deserializable steps for -open/change/save/close/config/request/expect flows. -`scenario_runner.rs` executes those steps against an in-memory server -connection, buffers out-of-order responses/diagnostics, and provides a -`diagnosticsSettled` idle barrier for deterministic async sequencing. +#### Code action request/expect + +```yaml +steps: + - step: create + files: + main.jsonnet: ((root:|))local [[unused:x]] = 1; 42 + - step: requestCodeAction + as: fixUnused + file: main.jsonnet + at: root + text: local x = 1; 42 + diagnostics: + - at: unused + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + - step: expectCodeAction + request: fixUnused + result: + - title: Prefix `x` with `_` + kind: quickfix + isPreferred: true + diagnostics: + - at: unused + text: x + severity: warning + code: unused-variable + source: jrsonnet-lint + message: unused variable + edits: + main.jsonnet: + - at: unused + text: x + replace: _x +``` ## Execute Commands From a65c3e5df2633b6c33fb2ca19ae2f9c80589b590 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 16:36:24 +0000 Subject: [PATCH 087/210] chore(cleanups): parser recovery and minor non-functional tidy-ups - replace parser helper asserts with recoverable errors for text/number/literal parsing. - keep small import-order cleanups from formatter output. - replace wording-only type-substitution terminology for clarity. --- crates/jrsonnet-lsp-types/src/subst.rs | 2 +- crates/jrsonnet-rowan-parser/src/parser.rs | 21 +++++++++++++------ .../src/syntax_semantics.rs | 3 ++- crates/jrsonnet-stdlib/src/manifest/yaml.rs | 2 +- 4 files changed, 19 insertions(+), 9 deletions(-) diff --git a/crates/jrsonnet-lsp-types/src/subst.rs b/crates/jrsonnet-lsp-types/src/subst.rs index bf0679ab..d372d828 100644 --- a/crates/jrsonnet-lsp-types/src/subst.rs +++ b/crates/jrsonnet-lsp-types/src/subst.rs @@ -195,7 +195,7 @@ impl TySubst { /// Apply substitution during merge. /// - /// Any unresolved local reference is lowered to `any` instead of leaking a + /// Any unresolved local reference is converted to `any` instead of leaking a /// local `Ty` into the global store. fn apply_for_merge(&self, ty: Ty) -> Ty { if ty.is_global() { diff --git a/crates/jrsonnet-rowan-parser/src/parser.rs b/crates/jrsonnet-rowan-parser/src/parser.rs index 4f33e452..3f156932 100644 --- a/crates/jrsonnet-rowan-parser/src/parser.rs +++ b/crates/jrsonnet-rowan-parser/src/parser.rs @@ -867,16 +867,25 @@ fn bind(p: &mut Parser) { }; } fn text(p: &mut Parser) { - assert!(Text::can_cast(p.current())); - p.bump(); + if Text::can_cast(p.current()) { + p.bump(); + } else { + p.error_with_recovery_set(TS![]); + } } fn number(p: &mut Parser) { - assert!(Number::can_cast(p.current())); - p.bump(); + if Number::can_cast(p.current()) { + p.bump(); + } else { + p.error_with_recovery_set(TS![]); + } } fn literal(p: &mut Parser) { - assert!(Literal::can_cast(p.current())); - p.bump(); + if Literal::can_cast(p.current()) { + p.bump(); + } else { + p.error_with_recovery_set(TS![]); + } } fn lhs_basic(p: &mut Parser) -> Result { let _e = p.expected_syntax_name("expression"); diff --git a/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs b/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs index 0c9e1a94..38b0658b 100644 --- a/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs +++ b/crates/jrsonnet-rowan-parser/src/syntax_semantics.rs @@ -584,12 +584,13 @@ impl UnaryOperatorKind { mod tests { use std::collections::HashSet; + use jrsonnet_evaluator::{State, Val}; + use super::{ BinaryOperatorClass, BinaryOperatorKind, SemanticTokenClass, SyntaxKind, TokenDocOutcome, UnaryOperatorClass, UnaryOperatorKind, TOKEN_SEMANTICS, }; use crate::rowan::NodeOrToken; - use jrsonnet_evaluator::{State, Val}; // Test-only explicit decision list: lexical tokens that are intentionally not // semantic-highlighted. Coverage tests fail if any token is neither classified nor ignored. diff --git a/crates/jrsonnet-stdlib/src/manifest/yaml.rs b/crates/jrsonnet-stdlib/src/manifest/yaml.rs index 98c4b11a..7b9fe96c 100644 --- a/crates/jrsonnet-stdlib/src/manifest/yaml.rs +++ b/crates/jrsonnet-stdlib/src/manifest/yaml.rs @@ -1,9 +1,9 @@ use std::{borrow::Cow, fmt::Write}; -use jrsonnet_evaluator::val::ArrValue; use jrsonnet_evaluator::{ bail, in_description_frame, manifest::{escape_string_json_buf, ManifestFormat}, + val::ArrValue, ObjValue, Result, ResultExt, Val, }; From bfa6f00c0c0dc802df52dd82233a6b3dac3328d7 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 16:46:47 +0000 Subject: [PATCH 088/210] feat(lsp): improve hover type precision for imported methods --- crates/jrsonnet-lsp-handlers/src/hover.rs | 70 +++++++++++- crates/jrsonnet-lsp-handlers/src/lib.rs | 2 +- crates/jrsonnet-lsp-inference/src/object.rs | 108 ++++++++++++++++-- .../jrsonnet-lsp/src/server/async_requests.rs | 70 +++++++++++- .../hover_new_import_invalid_syntax.yaml | 2 +- .../hover_import_field_method_type.yaml | 22 ++++ 6 files changed, 256 insertions(+), 18 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml diff --git a/crates/jrsonnet-lsp-handlers/src/hover.rs b/crates/jrsonnet-lsp-handlers/src/hover.rs index 09e4c658..b41b40c5 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover.rs @@ -18,12 +18,26 @@ use crate::definition::{goto_definition, DefinitionResult}; /// Maximum number of lines to show in hover for local definitions. const MAX_HOVER_LINES: usize = 5; +type ImportFieldTypeResolver<'a> = dyn Fn(&str, &[String]) -> Option + 'a; /// Get hover information for the given position. /// /// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure /// that import types are properly resolved. pub fn hover(document: &Document, position: LspPosition, analysis: &TypeAnalysis) -> Option { + hover_with_import_field_type(document, position, analysis, None) +} + +/// Get hover information with an optional callback for imported field type lookup. +/// +/// When provided, `import_field_type_resolver` is used to improve hover precision for +/// `DefinitionResult::ImportField` targets by querying the imported document directly. +pub fn hover_with_import_field_type( + document: &Document, + position: LspPosition, + analysis: &TypeAnalysis, + import_field_type_resolver: Option<&ImportFieldTypeResolver<'_>>, +) -> Option { let text = document.text(); let line_index = document.line_index(); @@ -40,7 +54,14 @@ pub fn hover(document: &Document, position: LspPosition, analysis: &TypeAnalysis return Some(hover); } - if let Some(hover) = check_local_hover(document, analysis, position, text, offset) { + if let Some(hover) = check_local_hover( + document, + analysis, + position, + text, + offset, + import_field_type_resolver, + ) { return Some(hover); } @@ -88,6 +109,7 @@ fn check_local_hover( position: LspPosition, text: &str, offset: ByteOffset, + import_field_type_resolver: Option<&ImportFieldTypeResolver<'_>>, ) -> Option { let result = goto_definition(document, position) .or_else(|| local_definition_at_offset(document, offset))?; @@ -99,10 +121,20 @@ fn check_local_hover( .type_at_position(ast.syntax(), offset.into()) .map(|ty| analysis.display(ty)); - if matches!(inferred_type.as_deref(), None | Some("any")) { - if let DefinitionResult::Local(range) = &result { - inferred_type = definition_value_type(document, analysis, range); + match &result { + DefinitionResult::ImportField { path, fields } => { + if let Some(resolver) = import_field_type_resolver { + if let Some(resolved_type) = resolver(path, fields) { + inferred_type = Some(resolved_type); + } + } } + DefinitionResult::Local(range) => { + if matches!(inferred_type.as_deref(), None | Some("any")) { + inferred_type = definition_value_type(document, analysis, range); + } + } + DefinitionResult::Import(_) => {} } let type_str = inferred_type @@ -278,6 +310,19 @@ mod tests { hover(&doc, pos, &analysis) } + fn get_hover_with_import_field_type( + code: &str, + line: u32, + character: u32, + resolver: &ImportFieldTypeResolver<'_>, + ) -> Option { + let global_types = Arc::new(GlobalTyStore::new()); + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze_with_global(&doc, global_types); + let pos = (line, character).into(); + hover_with_import_field_type(&doc, pos, &analysis, Some(resolver)) + } + #[rstest] #[case( "std.map(function(x) x, [])", @@ -404,6 +449,23 @@ mod tests { }); } + #[test] + fn test_import_field_hover_prefers_resolved_imported_type() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo"#; + let result = get_hover_with_import_field_type(code, 0, 40, &|path, fields| { + assert_eq!(path, "lib.libsonnet"); + assert_eq!(fields, &["foo".to_string()]); + Some("function(x)".to_string()) + }); + + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, "`function(x)`\n\n`foo` from `lib.libsonnet`"); + }); + } + #[rstest] #[case( "local x = 42; x", diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index e24f9dd7..4cc809f4 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -26,7 +26,7 @@ pub use formatting::{ format_document, format_document_with_config, FormatterEngine, FormattingConfig, FormattingContext, }; -pub use hover::hover; +pub use hover::{hover, hover_with_import_field_type}; pub use inlay_hint::inlay_hints; pub use references::{find_cross_file_references, find_references}; pub use rename::{prepare_rename, rename, rename_cross_file}; diff --git a/crates/jrsonnet-lsp-inference/src/object.rs b/crates/jrsonnet-lsp-inference/src/object.rs index 8f1c495c..728e3211 100644 --- a/crates/jrsonnet-lsp-inference/src/object.rs +++ b/crates/jrsonnet-lsp-inference/src/object.rs @@ -4,6 +4,7 @@ use jrsonnet_lsp_types::{ FieldDefInterned, FieldVis, FunctionData, ObjectData, ReturnSpec, Ty, TyData, }; use jrsonnet_rowan_parser::nodes::{Expr, Member, ObjBody}; +use rustc_hash::FxHashMap; use crate::{ env::TypeEnv, @@ -123,16 +124,58 @@ pub fn infer_object_type_with_super_ty( Member::MemberFieldMethod(method) => { if let Some(field_name) = method.field_name() { if let Some(name_str) = extract_field_name(&field_name) { - // Methods are functions - infer from params let params = method .params_desc() .map(|p| extract_params_with_default_types_ty(&p, env)) .unwrap_or_default(); + let (return_ty, param_constraints) = + if env.can_infer_function_body() { + if let Some(body) = method.expr() { + env.push_scope(); + let param_names: Vec = + params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + env.start_constraint_tracking(¶m_names); + env.enter_function(); + let body_ty = infer_expr(&body, env); + env.exit_function(); + let constraints = env.stop_constraint_tracking_ty(); + env.pop_scope(); + (body_ty, constraints) + } else { + (Ty::ANY, FxHashMap::default()) + } + } else { + (Ty::ANY, FxHashMap::default()) + }; + + let final_params = params + .into_iter() + .map(|param| { + let mut param_ty = param.ty; + if let Some(constraints) = + param_constraints.get(¶m.name) + { + for constraint_ty in constraints { + param_ty = env + .store_mut() + .narrow(param_ty, *constraint_ty); + } + } + jrsonnet_lsp_types::ParamInterned { + name: param.name, + ty: param_ty, + has_default: param.has_default, + } + }) + .collect(); let visibility = convert_visibility_ty(method.visibility()); let func_ty = env.store_mut().function(FunctionData { - params, - return_spec: ReturnSpec::Fixed(Ty::ANY), + params: final_params, + return_spec: ReturnSpec::Fixed(return_ty), variadic: false, }); @@ -174,7 +217,7 @@ mod tests { use std::collections::BTreeSet; use jrsonnet_lsp_document::{DocVersion, Document}; - use jrsonnet_lsp_types::{FunctionData, ObjectData, TyData}; + use jrsonnet_lsp_types::{FunctionData, ObjectData, ParamInterned, ReturnSpec, Ty, TyData}; use super::*; use crate::expr::infer_document_type_ty; @@ -234,18 +277,63 @@ mod tests { #[test] fn test_method_field_inference() { // Methods should be inferred as functions - let (ty, env) = infer_doc("{ greet(name): 'Hello, ' + name }"); + let (ty, env) = infer_doc("{ greet(name): name }"); let obj = try_object(&env, ty).expect("expected object"); assert_fields_ty(&obj, &["greet"]); let field_def = get_field_ty(&obj, "greet").expect("Should have 'greet' field"); let func = try_function(&env, field_def.ty).expect("expected function"); assert_eq!( - func.params - .iter() - .map(|p| p.name.as_str()) - .collect::>(), - vec!["name"] + func, + FunctionData { + params: vec![ParamInterned { + name: "name".to_string(), + ty: Ty::ANY, + has_default: false + }], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + } + ); + } + + #[test] + fn test_method_field_infers_body_return_type() { + let (ty, env) = infer_doc("{ inc(x):: x + 1 }"); + let obj = try_object(&env, ty).expect("expected object"); + let field_def = get_field_ty(&obj, "inc").expect("Should have 'inc' field"); + let func = try_function(&env, field_def.ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } + ); + } + + #[test] + fn test_method_field_with_assert_still_infers_body_return_type() { + let (ty, env) = infer_doc("{\n inc(x)::\n assert std.isNumber(x);\n x + 1,\n}"); + let obj = try_object(&env, ty).expect("expected object"); + let field_def = get_field_ty(&obj, "inc").expect("Should have 'inc' field"); + let func = try_function(&env, field_def.ty).expect("expected function"); + assert_eq!( + func, + FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + } ); } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 365e33b8..3970052f 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -4,7 +4,7 @@ use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, DocVersion, Document, S use jrsonnet_lsp_handlers as handlers; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; -use jrsonnet_lsp_types::GlobalTyStore; +use jrsonnet_lsp_types::{GlobalTyStore, Ty, TyData}; use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; use lsp_types::{ CodeLens, CodeLensParams, CompletionParams, CompletionResponse, ExecuteCommandParams, @@ -75,7 +75,15 @@ impl AsyncRequestContext { let doc = self.documents.get(&path)?.clone(); let lsp_pos = position.into(); let analysis = self.analyze_document(&path, &doc); - handlers::hover(&doc, lsp_pos, &analysis) + let import_field_type_resolver = |import_path: &str, fields: &[String]| { + self.resolve_import_field_type(&path, import_path, fields) + }; + handlers::hover_with_import_field_type( + &doc, + lsp_pos, + &analysis, + Some(&import_field_type_resolver), + ) } pub(super) fn goto_definition( @@ -582,6 +590,64 @@ impl AsyncRequestContext { .and_then(|entry| entry.resolved_path.clone()) } + fn resolve_import_field_type( + &self, + from: &CanonicalPath, + import_path: &str, + fields: &[String], + ) -> Option { + let resolved = self.resolve_import_from_graph(from, import_path)?; + let doc = self.load_document_for_path(&resolved)?; + let analysis = self.analyze_document(&resolved, &doc); + let ty = self.type_for_field_path(&analysis, analysis.document_type(), fields)?; + Some(analysis.display(ty)) + } + + fn type_for_field_path( + &self, + analysis: &TypeAnalysis, + root_ty: Ty, + fields: &[String], + ) -> Option { + fields.iter().try_fold(root_ty, |ty, field| { + self.type_for_field(analysis, ty, field) + }) + } + + fn type_for_field(&self, analysis: &TypeAnalysis, ty: Ty, field: &str) -> Option { + match analysis.get_data(ty) { + TyData::Any => Some(Ty::ANY), + TyData::Object(obj) => obj + .get_field(field) + .map(|field_def| field_def.ty) + .or_else(|| obj.has_unknown.then_some(Ty::ANY)), + TyData::AttrsOf { value } => Some(value), + TyData::Union(types) => { + let variants: Vec<_> = types + .into_iter() + .filter_map(|variant| self.type_for_field(analysis, variant, field)) + .collect(); + if variants.is_empty() { + None + } else { + Some(analysis.union(variants)) + } + } + TyData::Sum(types) => { + let variants: Vec<_> = types + .into_iter() + .filter_map(|variant| self.type_for_field(analysis, variant, field)) + .collect(); + if variants.is_empty() { + None + } else { + Some(analysis.union(variants)) + } + } + _ => None, + } + } + fn load_document_for_path(&self, path: &CanonicalPath) -> Option { if let Some(doc) = self.documents.get(path) { return Some(doc.clone()); diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml index 263e407f..5e7bae27 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml @@ -26,5 +26,5 @@ steps: at: m1 - step: expectHoverType request: hover_new - type: '{ build }' + type: function(input) match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml new file mode 100644 index 00000000..31ab4282 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml @@ -0,0 +1,22 @@ +# Verify hover on an imported method field resolves to the imported callable type, +# rather than falling back to the enclosing local object type. +steps: +- step: create + files: + main.libsonnet: | + local module = import "./module/main.libsonnet"; + + { value: module.((m1:|))new(1) } + module/main.libsonnet: | + { new(x):: { y: x + 1 } } + +- step: diagnosticsSettled + +- step: requestHover + as: importedMethodHover + file: main.libsonnet + at: m1 +- step: expectHoverType + request: importedMethodHover + type: function(x) + match: exact From 8c230c567ee1d210470f2df17d95a4dcba1aac54 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 17:05:08 +0000 Subject: [PATCH 089/210] refactor(lsp): split server and integration test hotspots - move notification, watched-file, and import-graph helpers into dedicated server submodules. - split integration_test into lifecycle/navigation/features/workspace_cross_file modules with shared harness helpers. - preserve behavior and verify with cargo test -p jrsonnet-lsp. --- crates/jrsonnet-lsp/src/server.rs | 513 +-- .../jrsonnet-lsp/src/server/import_graph.rs | 88 + .../jrsonnet-lsp/src/server/notifications.rs | 303 ++ .../jrsonnet-lsp/src/server/watched_files.rs | 128 + crates/jrsonnet-lsp/tests/integration_test.rs | 3817 +---------------- .../tests/integration_test/features.rs | 998 +++++ .../tests/integration_test/lifecycle.rs | 657 +++ .../tests/integration_test/navigation.rs | 1080 +++++ .../integration_test/workspace_cross_file.rs | 1079 +++++ 9 files changed, 4344 insertions(+), 4319 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/server/import_graph.rs create mode 100644 crates/jrsonnet-lsp/src/server/notifications.rs create mode 100644 crates/jrsonnet-lsp/src/server/watched_files.rs create mode 100644 crates/jrsonnet-lsp/tests/integration_test/features.rs create mode 100644 crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs create mode 100644 crates/jrsonnet-lsp/tests/integration_test/navigation.rs create mode 100644 crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 75478eb1..ef9a6bc2 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -4,6 +4,9 @@ //! Diagnostics are computed asynchronously with debouncing to avoid blocking the event loop. mod async_requests; +mod import_graph; +mod notifications; +mod watched_files; use std::{ panic::{catch_unwind, AssertUnwindSafe}, @@ -1022,516 +1025,6 @@ impl Server { } impl Server { - fn is_supported_execute_command(command: &str) -> bool { - SUPPORTED_EXECUTE_COMMANDS.contains(&command) - } - - fn supports_dynamic_watched_files_registration(params: &InitializeParams) -> bool { - params - .capabilities - .workspace - .as_ref() - .and_then(|workspace| workspace.did_change_watched_files) - .and_then(|capabilities| capabilities.dynamic_registration) - .unwrap_or(false) - } - - fn supports_relative_watch_patterns(params: &InitializeParams) -> bool { - params - .capabilities - .workspace - .as_ref() - .and_then(|workspace| workspace.did_change_watched_files) - .and_then(|capabilities| capabilities.relative_pattern_support) - .unwrap_or(false) - } - - fn watched_file_base_uris(init_roots: &InitializeRoots) -> Vec { - let mut uris = Vec::new(); - - if let Some(workspace_folders) = &init_roots.workspace_folders { - for folder in workspace_folders { - uris.push(folder.uri.clone()); - } - } - - if let Some(root_uri) = &init_roots.root_uri { - uris.push(root_uri.clone()); - } - - if let Some(root_path) = &init_roots.root_path { - let root_path = PathBuf::from(root_path); - if let Ok(path) = CanonicalPath::try_from_path(&root_path) { - if let Ok(uri) = path.to_uri() { - uris.push(uri); - } - } - } - - uris.sort_by(|lhs, rhs| lhs.as_str().cmp(rhs.as_str())); - uris.dedup_by(|lhs, rhs| lhs.as_str() == rhs.as_str()); - uris - } - - fn watched_file_watchers( - initialize_params: &InitializeParams, - init_roots: &InitializeRoots, - ) -> Vec { - if !Self::supports_relative_watch_patterns(initialize_params) { - return WATCHED_FILE_GLOB_PATTERNS - .into_iter() - .map(|pattern| FileSystemWatcher { - glob_pattern: GlobPattern::String(pattern.to_owned()), - kind: None, - }) - .collect(); - } - - let base_uris = Self::watched_file_base_uris(init_roots); - if base_uris.is_empty() { - return WATCHED_FILE_GLOB_PATTERNS - .into_iter() - .map(|pattern| FileSystemWatcher { - glob_pattern: GlobPattern::String(pattern.to_owned()), - kind: None, - }) - .collect(); - } - - base_uris - .into_iter() - .flat_map(|base_uri| { - WATCHED_FILE_GLOB_PATTERNS - .into_iter() - .map(move |pattern| FileSystemWatcher { - glob_pattern: GlobPattern::Relative(RelativePattern { - base_uri: OneOf::Right(base_uri.clone()), - pattern: pattern.to_owned(), - }), - kind: None, - }) - }) - .collect() - } - - fn register_did_change_watched_files( - &mut self, - initialize_params: &InitializeParams, - init_roots: &InitializeRoots, - ) -> Result<()> { - if !Self::supports_dynamic_watched_files_registration(initialize_params) { - return Ok(()); - } - - let watchers = Self::watched_file_watchers(initialize_params, init_roots); - let options = DidChangeWatchedFilesRegistrationOptions { watchers }; - let registration = Registration { - id: "jrsonnet-lsp.did-change-watched-files".to_owned(), - method: DidChangeWatchedFiles::METHOD.to_owned(), - register_options: Some(serde_json::to_value(options)?), - }; - let params = RegistrationParams { - registrations: vec![registration], - }; - - self.inflight_requests - .send_outgoing_request::(params)?; - info!("Requested dynamic file-watch registration"); - Ok(()) - } - - fn request_id_from_number_or_string(id: NumberOrString) -> RequestId { - match id { - NumberOrString::Number(id) => id.into(), - NumberOrString::String(id) => id.into(), - } - } - - /// Handle an incoming notification. - /// - /// Returns true if exit notification was received. - fn handle_notification(&mut self, notif: Notification) -> Result { - debug!("Handling notification: {}", notif.method); - - match notif.method.as_str() { - Cancel::METHOD => { - let params: lsp_types::CancelParams = serde_json::from_value(notif.params)?; - self.on_cancel_request(params)?; - } - DidOpenTextDocument::METHOD => { - let params: DidOpenTextDocumentParams = serde_json::from_value(notif.params)?; - self.on_did_open(params); - } - DidChangeTextDocument::METHOD => { - let params: DidChangeTextDocumentParams = serde_json::from_value(notif.params)?; - self.on_did_change(params); - } - DidCloseTextDocument::METHOD => { - let params: DidCloseTextDocumentParams = serde_json::from_value(notif.params)?; - self.on_did_close(¶ms)?; - } - DidSaveTextDocument::METHOD => { - let params: DidSaveTextDocumentParams = serde_json::from_value(notif.params)?; - self.on_did_save(params); - } - DidChangeConfiguration::METHOD => { - let params: DidChangeConfigurationParams = serde_json::from_value(notif.params)?; - self.on_did_change_configuration(params); - } - DidChangeWatchedFiles::METHOD => { - let params: DidChangeWatchedFilesParams = serde_json::from_value(notif.params)?; - self.on_did_change_watched_files(params); - } - "exit" => { - info!("Exit notification received"); - return Ok(true); - } - _ => { - debug!("Unhandled notification: {}", notif.method); - } - } - - Ok(false) - } - - fn on_cancel_request(&mut self, params: lsp_types::CancelParams) -> Result<()> { - let request_id = Self::request_id_from_number_or_string(params.id); - if !self.inflight_requests.cancel_request(request_id.clone())? { - debug!("Ignoring cancel request for non-pending id {}", request_id); - } - Ok(()) - } - - /// Handle textDocument/didOpen notification. - fn on_did_open(&self, params: DidOpenTextDocumentParams) { - let uri = ¶ms.text_document.uri; - info!("Document opened: {}", uri.as_str()); - - let Some(path) = CanonicalPath::from_uri(uri) else { - warn!("Could not convert URI to path: {}", uri.as_str()); - return; - }; - - let text = params.text_document.text; - let version = DocVersion::new(params.text_document.version); - - self.documents.open(path.clone(), text, version); - - // Invalidate type cache for this file and all files that depend on it - self.invalidate_type_cache_with_dependents(&path); - - // Update import graph - self.update_import_graph(&path); - - // Publish diagnostics - self.schedule_diagnostics(&path); - } - - /// Handle textDocument/didChange notification. - fn on_did_change(&self, params: DidChangeTextDocumentParams) { - let uri = ¶ms.text_document.uri; - debug!("Document changed: {}", uri.as_str()); - - let Some(path) = CanonicalPath::from_uri(uri) else { - warn!("Could not convert URI to path: {}", uri.as_str()); - return; - }; - - let version = DocVersion::new(params.text_document.version); - - // Process each change (INCREMENTAL sync may send multiple changes) - for change in params.content_changes { - let success = if let Some(range) = change.range { - // Incremental change: apply the range-based edit - self.documents - .apply_incremental_change(&path, range, &change.text, version) - } else { - // Full change: no range means full document replacement - self.documents.update(&path, change.text, version) - }; - - if !success { - warn!("Failed to apply change to document: {}", uri.as_str()); - return; - } - } - - // Invalidate type cache for this file and all files that depend on it - self.invalidate_type_cache_with_dependents(&path); - - // Update import graph (imports may have changed) - self.update_import_graph(&path); - - // Publish diagnostics - self.schedule_diagnostics(&path); - } - - /// Handle textDocument/didClose notification. - fn on_did_close(&self, params: &DidCloseTextDocumentParams) -> Result<()> { - let uri = ¶ms.text_document.uri; - info!("Document closed: {}", uri.as_str()); - - let Some(path) = CanonicalPath::from_uri(uri) else { - warn!("Could not convert URI to path: {}", uri.as_str()); - return Ok(()); - }; - - self.documents.close(&path); - - // Invalidate type cache for this file and all files that depend on it - // (dependents may have cached types based on this file's exports) - self.invalidate_type_cache_with_dependents(&path); - - // Keep import graph semantics for closed documents by re-indexing from - // cached/disk content instead of dropping the file node. - self.update_import_graph(&path); - self.schedule_diagnostics_for_open_importers(&path); - - // Clear diagnostics for closed document - self.send_notification::(lsp_types::PublishDiagnosticsParams { - uri: uri.clone(), - diagnostics: vec![], - version: None, - })?; - - Ok(()) - } - - /// Handle textDocument/didSave notification. - fn on_did_save(&self, params: DidSaveTextDocumentParams) { - let uri = ¶ms.text_document.uri; - debug!("Document saved: {}", uri.as_str()); - - let Some(path) = CanonicalPath::from_uri(uri) else { - warn!("Could not convert URI to path: {}", uri.as_str()); - return; - }; - - if let Some(text) = params.text { - let Some(doc) = self.documents.get(&path) else { - return; - }; - let version = doc.version(); - drop(doc); - - if !self.documents.update(&path, text, version) { - warn!("Failed to update saved document contents: {}", uri.as_str()); - return; - } - } - - self.invalidate_type_cache_with_dependents(&path); - self.update_import_graph(&path); - if self.documents.is_open(&path) { - self.schedule_diagnostics(&path); - } - self.schedule_diagnostics_for_open_importers(&path); - } - - /// Handle workspace/didChangeConfiguration notification. - fn on_did_change_configuration(&mut self, params: DidChangeConfigurationParams) { - info!("Configuration changed"); - - // The settings can come in different formats depending on the client - // VS Code sends settings under a "jsonnet" key, others may send flat settings - let settings = if let Some(jsonnet_settings) = params - .settings - .as_object() - .and_then(|o| o.get("jsonnet")) - .or_else(|| { - params - .settings - .as_object() - .and_then(|o| o.get("jsonnet-language-server")) - }) { - jsonnet_settings.clone() - } else { - params.settings - }; - - let old_config = self.config.read().clone(); - let updated_config = { - let mut config = self.config.write(); - if config.update_from_settings(settings) { - Some(config.clone()) - } else { - None - } - }; - - if let Some(updated_config) = updated_config { - let runtime_config_changed = old_config.jpath != updated_config.jpath - || old_config.enable_eval_diagnostics != updated_config.enable_eval_diagnostics - || old_config.resolve_paths_with_tanka != updated_config.resolve_paths_with_tanka; - let diagnostics_config_changed = - old_config.enable_lint_diagnostics != updated_config.enable_lint_diagnostics; - - if runtime_config_changed { - self.reconfigure_runtime_components(&updated_config); - debug!("Runtime components reconfigured after settings update"); - } - - if runtime_config_changed { - // Import resolution and cached file types depend on jpath/tanka settings. - self.type_cache.write().clear(); - - for path in self.tracked_paths_for_reindex() { - self.update_import_graph(&path); - } - } - - if runtime_config_changed || diagnostics_config_changed { - for path in self.documents.open_paths() { - self.schedule_diagnostics(&path); - } - } - - info!( - "Configuration updated: jpath={:?}, eval_diagnostics={}, tanka_mode={}", - updated_config.jpath, - updated_config.enable_eval_diagnostics, - updated_config.resolve_paths_with_tanka - ); - } - } - - /// Handle workspace/didChangeWatchedFiles notification. - /// - /// This keeps import graph and type cache up to date for files that change on disk - /// while not being open in the editor. - fn on_did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let mut changed_paths = Vec::new(); - let mut requires_global_reindex = false; - - for change in params.changes { - let Some(path) = CanonicalPath::from_uri(&change.uri) else { - continue; - }; - - self.invalidate_type_cache_with_dependents(&path); - - match change.typ { - FileChangeType::DELETED => { - self.documents.remove_closed(&path); - self.import_graph.write().remove_file(&path); - requires_global_reindex = true; - } - FileChangeType::CHANGED | FileChangeType::CREATED => { - if !self.documents.is_open(&path) { - self.documents.refresh_closed_from_disk(&path); - } - self.update_import_graph(&path); - if change.typ == FileChangeType::CREATED { - requires_global_reindex = true; - } - } - _ => {} - } - - changed_paths.push(path); - } - - if requires_global_reindex { - for path in self.tracked_paths_for_reindex() { - self.update_import_graph(&path); - } - } - - changed_paths.sort(); - changed_paths.dedup(); - for path in changed_paths { - if self.documents.is_open(&path) { - self.schedule_diagnostics(&path); - } - self.schedule_diagnostics_for_open_importers(&path); - } - } - - /// Update the import graph for a document. - /// - /// Parses the document's import statements and updates the graph - /// so that cross-file references can be found efficiently. - fn update_import_graph(&self, path: &CanonicalPath) { - Self::update_import_graph_for_path(&self.documents, &self.import_graph, &self.config, path); - } - - fn tracked_paths_for_reindex(&self) -> Vec { - let mut paths = { - let import_graph = self.import_graph.read(); - import_graph.all_files().cloned().collect::>() - }; - paths.extend(self.documents.open_paths()); - paths.sort(); - paths.dedup(); - paths - } - - fn update_import_graph_for_path( - documents: &SharedDocumentManager, - import_graph: &Arc>, - config: &SharedConfig, - path: &CanonicalPath, - ) { - let Some(doc) = documents.get_document(path) else { - // File no longer exists or cannot be read. - import_graph.write().remove_file(path); - return; - }; - - let config = config.read(); - let import_roots = effective_import_roots( - path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ); - drop(config); - - // Parse imports OUTSIDE the lock to minimize lock hold time. - // This is important for responsiveness when parsing large files. - let import_resolution = ImportResolution::new(path, &import_roots); - let entries = import_resolution.parse_entries(&doc); - - // Now acquire the write lock and do the quick data structure update - import_graph.write().update_file_with_entries(path, entries); - } - - /// Schedule diagnostics for currently-open files that import `path`. - fn schedule_diagnostics_for_open_importers(&self, path: &CanonicalPath) { - let importers = self.import_graph.read().transitive_importers(path); - for importer in importers { - if self.documents.is_open(&importer) { - self.schedule_diagnostics(&importer); - } - } - } - - /// Schedule diagnostics computation for a document. - /// - /// Diagnostics are computed asynchronously with debouncing. - fn schedule_diagnostics(&self, path: &CanonicalPath) { - let Some(doc) = self.documents.get(path) else { - return; - }; - - let (enable_lint, import_roots) = { - let config = self.config.read(); - ( - config.lint_diagnostics_enabled(), - effective_import_roots( - path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ), - ) - }; - let text = doc.text().to_string(); - let version = doc.version(); - drop(doc); // Release the borrow before scheduling - - self.diagnostics - .schedule(path.clone(), text, version, enable_lint, import_roots); - } - /// Send a notification to the client. fn send_notification( &self, diff --git a/crates/jrsonnet-lsp/src/server/import_graph.rs b/crates/jrsonnet-lsp/src/server/import_graph.rs new file mode 100644 index 00000000..eb6f25e7 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/import_graph.rs @@ -0,0 +1,88 @@ +use super::*; + +impl Server { + /// Update the import graph for a document. + /// + /// Parses the document's import statements and updates the graph + /// so that cross-file references can be found efficiently. + pub(super) fn update_import_graph(&self, path: &CanonicalPath) { + Self::update_import_graph_for_path(&self.documents, &self.import_graph, &self.config, path); + } + + pub(super) fn tracked_paths_for_reindex(&self) -> Vec { + let mut paths = { + let import_graph = self.import_graph.read(); + import_graph.all_files().cloned().collect::>() + }; + paths.extend(self.documents.open_paths()); + paths.sort(); + paths.dedup(); + paths + } + + pub(super) fn update_import_graph_for_path( + documents: &SharedDocumentManager, + import_graph: &Arc>, + config: &SharedConfig, + path: &CanonicalPath, + ) { + let Some(doc) = documents.get_document(path) else { + // File no longer exists or cannot be read. + import_graph.write().remove_file(path); + return; + }; + + let config = config.read(); + let import_roots = effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + drop(config); + + // Parse imports OUTSIDE the lock to minimize lock hold time. + // This is important for responsiveness when parsing large files. + let import_resolution = ImportResolution::new(path, &import_roots); + let entries = import_resolution.parse_entries(&doc); + + // Now acquire the write lock and do the quick data structure update + import_graph.write().update_file_with_entries(path, entries); + } + + /// Schedule diagnostics for currently-open files that import `path`. + pub(super) fn schedule_diagnostics_for_open_importers(&self, path: &CanonicalPath) { + let importers = self.import_graph.read().transitive_importers(path); + for importer in importers { + if self.documents.is_open(&importer) { + self.schedule_diagnostics(&importer); + } + } + } + + /// Schedule diagnostics computation for a document. + /// + /// Diagnostics are computed asynchronously with debouncing. + pub(super) fn schedule_diagnostics(&self, path: &CanonicalPath) { + let Some(doc) = self.documents.get(path) else { + return; + }; + + let (enable_lint, import_roots) = { + let config = self.config.read(); + ( + config.lint_diagnostics_enabled(), + effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ), + ) + }; + let text = doc.text().to_string(); + let version = doc.version(); + drop(doc); // Release the borrow before scheduling + + self.diagnostics + .schedule(path.clone(), text, version, enable_lint, import_roots); + } +} diff --git a/crates/jrsonnet-lsp/src/server/notifications.rs b/crates/jrsonnet-lsp/src/server/notifications.rs new file mode 100644 index 00000000..51aabe89 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/notifications.rs @@ -0,0 +1,303 @@ +use super::*; + +impl Server { + /// Handle an incoming notification. + /// + /// Returns true if exit notification was received. + pub(super) fn handle_notification(&mut self, notif: Notification) -> Result { + debug!("Handling notification: {}", notif.method); + + match notif.method.as_str() { + Cancel::METHOD => { + let params: lsp_types::CancelParams = serde_json::from_value(notif.params)?; + self.on_cancel_request(params)?; + } + DidOpenTextDocument::METHOD => { + let params: DidOpenTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_open(params); + } + DidChangeTextDocument::METHOD => { + let params: DidChangeTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_change(params); + } + DidCloseTextDocument::METHOD => { + let params: DidCloseTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_close(¶ms)?; + } + DidSaveTextDocument::METHOD => { + let params: DidSaveTextDocumentParams = serde_json::from_value(notif.params)?; + self.on_did_save(params); + } + DidChangeConfiguration::METHOD => { + let params: DidChangeConfigurationParams = serde_json::from_value(notif.params)?; + self.on_did_change_configuration(params); + } + DidChangeWatchedFiles::METHOD => { + let params: DidChangeWatchedFilesParams = serde_json::from_value(notif.params)?; + self.on_did_change_watched_files(params); + } + "exit" => { + info!("Exit notification received"); + return Ok(true); + } + _ => { + debug!("Unhandled notification: {}", notif.method); + } + } + + Ok(false) + } + + pub(super) fn on_cancel_request(&mut self, params: lsp_types::CancelParams) -> Result<()> { + let request_id = Self::request_id_from_number_or_string(params.id); + if !self.inflight_requests.cancel_request(request_id.clone())? { + debug!("Ignoring cancel request for non-pending id {}", request_id); + } + Ok(()) + } + + /// Handle textDocument/didOpen notification. + pub(super) fn on_did_open(&self, params: DidOpenTextDocumentParams) { + let uri = ¶ms.text_document.uri; + info!("Document opened: {}", uri.as_str()); + + let Some(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return; + }; + + let text = params.text_document.text; + let version = DocVersion::new(params.text_document.version); + + self.documents.open(path.clone(), text, version); + + // Invalidate type cache for this file and all files that depend on it + self.invalidate_type_cache_with_dependents(&path); + + // Update import graph + self.update_import_graph(&path); + + // Publish diagnostics + self.schedule_diagnostics(&path); + } + + /// Handle textDocument/didChange notification. + pub(super) fn on_did_change(&self, params: DidChangeTextDocumentParams) { + let uri = ¶ms.text_document.uri; + debug!("Document changed: {}", uri.as_str()); + + let Some(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return; + }; + + let version = DocVersion::new(params.text_document.version); + + // Process each change (INCREMENTAL sync may send multiple changes) + for change in params.content_changes { + let success = if let Some(range) = change.range { + // Incremental change: apply the range-based edit + self.documents + .apply_incremental_change(&path, range, &change.text, version) + } else { + // Full change: no range means full document replacement + self.documents.update(&path, change.text, version) + }; + + if !success { + warn!("Failed to apply change to document: {}", uri.as_str()); + return; + } + } + + // Invalidate type cache for this file and all files that depend on it + self.invalidate_type_cache_with_dependents(&path); + + // Update import graph (imports may have changed) + self.update_import_graph(&path); + + // Publish diagnostics + self.schedule_diagnostics(&path); + } + + /// Handle textDocument/didClose notification. + pub(super) fn on_did_close(&self, params: &DidCloseTextDocumentParams) -> Result<()> { + let uri = ¶ms.text_document.uri; + info!("Document closed: {}", uri.as_str()); + + let Some(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return Ok(()); + }; + + self.documents.close(&path); + + // Invalidate type cache for this file and all files that depend on it + // (dependents may have cached types based on this file's exports) + self.invalidate_type_cache_with_dependents(&path); + + // Keep import graph semantics for closed documents by re-indexing from + // cached/disk content instead of dropping the file node. + self.update_import_graph(&path); + self.schedule_diagnostics_for_open_importers(&path); + + // Clear diagnostics for closed document + self.send_notification::(lsp_types::PublishDiagnosticsParams { + uri: uri.clone(), + diagnostics: vec![], + version: None, + })?; + + Ok(()) + } + + /// Handle textDocument/didSave notification. + pub(super) fn on_did_save(&self, params: DidSaveTextDocumentParams) { + let uri = ¶ms.text_document.uri; + debug!("Document saved: {}", uri.as_str()); + + let Some(path) = CanonicalPath::from_uri(uri) else { + warn!("Could not convert URI to path: {}", uri.as_str()); + return; + }; + + if let Some(text) = params.text { + let Some(doc) = self.documents.get(&path) else { + return; + }; + let version = doc.version(); + drop(doc); + + if !self.documents.update(&path, text, version) { + warn!("Failed to update saved document contents: {}", uri.as_str()); + return; + } + } + + self.invalidate_type_cache_with_dependents(&path); + self.update_import_graph(&path); + if self.documents.is_open(&path) { + self.schedule_diagnostics(&path); + } + self.schedule_diagnostics_for_open_importers(&path); + } + + /// Handle workspace/didChangeConfiguration notification. + pub(super) fn on_did_change_configuration(&mut self, params: DidChangeConfigurationParams) { + info!("Configuration changed"); + + // The settings can come in different formats depending on the client + // VS Code sends settings under a "jsonnet" key, others may send flat settings + let settings = if let Some(jsonnet_settings) = params + .settings + .as_object() + .and_then(|o| o.get("jsonnet")) + .or_else(|| { + params + .settings + .as_object() + .and_then(|o| o.get("jsonnet-language-server")) + }) { + jsonnet_settings.clone() + } else { + params.settings + }; + + let old_config = self.config.read().clone(); + let updated_config = { + let mut config = self.config.write(); + if config.update_from_settings(settings) { + Some(config.clone()) + } else { + None + } + }; + + if let Some(updated_config) = updated_config { + let runtime_config_changed = old_config.jpath != updated_config.jpath + || old_config.enable_eval_diagnostics != updated_config.enable_eval_diagnostics + || old_config.resolve_paths_with_tanka != updated_config.resolve_paths_with_tanka; + let diagnostics_config_changed = + old_config.enable_lint_diagnostics != updated_config.enable_lint_diagnostics; + + if runtime_config_changed { + self.reconfigure_runtime_components(&updated_config); + debug!("Runtime components reconfigured after settings update"); + } + + if runtime_config_changed { + // Import resolution and cached file types depend on jpath/tanka settings. + self.type_cache.write().clear(); + + for path in self.tracked_paths_for_reindex() { + self.update_import_graph(&path); + } + } + + if runtime_config_changed || diagnostics_config_changed { + for path in self.documents.open_paths() { + self.schedule_diagnostics(&path); + } + } + + info!( + "Configuration updated: jpath={:?}, eval_diagnostics={}, tanka_mode={}", + updated_config.jpath, + updated_config.enable_eval_diagnostics, + updated_config.resolve_paths_with_tanka + ); + } + } + + /// Handle workspace/didChangeWatchedFiles notification. + /// + /// This keeps import graph and type cache up to date for files that change on disk + /// while not being open in the editor. + pub(super) fn on_did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { + let mut changed_paths = Vec::new(); + let mut requires_global_reindex = false; + + for change in params.changes { + let Some(path) = CanonicalPath::from_uri(&change.uri) else { + continue; + }; + + self.invalidate_type_cache_with_dependents(&path); + + match change.typ { + FileChangeType::DELETED => { + self.documents.remove_closed(&path); + self.import_graph.write().remove_file(&path); + requires_global_reindex = true; + } + FileChangeType::CHANGED | FileChangeType::CREATED => { + if !self.documents.is_open(&path) { + self.documents.refresh_closed_from_disk(&path); + } + self.update_import_graph(&path); + if change.typ == FileChangeType::CREATED { + requires_global_reindex = true; + } + } + _ => {} + } + + changed_paths.push(path); + } + + if requires_global_reindex { + for path in self.tracked_paths_for_reindex() { + self.update_import_graph(&path); + } + } + + changed_paths.sort(); + changed_paths.dedup(); + for path in changed_paths { + if self.documents.is_open(&path) { + self.schedule_diagnostics(&path); + } + self.schedule_diagnostics_for_open_importers(&path); + } + } +} diff --git a/crates/jrsonnet-lsp/src/server/watched_files.rs b/crates/jrsonnet-lsp/src/server/watched_files.rs new file mode 100644 index 00000000..6ffff8ab --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/watched_files.rs @@ -0,0 +1,128 @@ +use super::*; + +impl Server { + pub(super) fn is_supported_execute_command(command: &str) -> bool { + SUPPORTED_EXECUTE_COMMANDS.contains(&command) + } + + pub(super) fn supports_dynamic_watched_files_registration(params: &InitializeParams) -> bool { + params + .capabilities + .workspace + .as_ref() + .and_then(|workspace| workspace.did_change_watched_files) + .and_then(|capabilities| capabilities.dynamic_registration) + .unwrap_or(false) + } + + pub(super) fn supports_relative_watch_patterns(params: &InitializeParams) -> bool { + params + .capabilities + .workspace + .as_ref() + .and_then(|workspace| workspace.did_change_watched_files) + .and_then(|capabilities| capabilities.relative_pattern_support) + .unwrap_or(false) + } + + pub(super) fn watched_file_base_uris(init_roots: &InitializeRoots) -> Vec { + let mut uris = Vec::new(); + + if let Some(workspace_folders) = &init_roots.workspace_folders { + for folder in workspace_folders { + uris.push(folder.uri.clone()); + } + } + + if let Some(root_uri) = &init_roots.root_uri { + uris.push(root_uri.clone()); + } + + if let Some(root_path) = &init_roots.root_path { + let root_path = PathBuf::from(root_path); + if let Ok(path) = CanonicalPath::try_from_path(&root_path) { + if let Ok(uri) = path.to_uri() { + uris.push(uri); + } + } + } + + uris.sort_by(|lhs, rhs| lhs.as_str().cmp(rhs.as_str())); + uris.dedup_by(|lhs, rhs| lhs.as_str() == rhs.as_str()); + uris + } + + pub(super) fn watched_file_watchers( + initialize_params: &InitializeParams, + init_roots: &InitializeRoots, + ) -> Vec { + if !Self::supports_relative_watch_patterns(initialize_params) { + return WATCHED_FILE_GLOB_PATTERNS + .into_iter() + .map(|pattern| FileSystemWatcher { + glob_pattern: GlobPattern::String(pattern.to_owned()), + kind: None, + }) + .collect(); + } + + let base_uris = Self::watched_file_base_uris(init_roots); + if base_uris.is_empty() { + return WATCHED_FILE_GLOB_PATTERNS + .into_iter() + .map(|pattern| FileSystemWatcher { + glob_pattern: GlobPattern::String(pattern.to_owned()), + kind: None, + }) + .collect(); + } + + base_uris + .into_iter() + .flat_map(|base_uri| { + WATCHED_FILE_GLOB_PATTERNS + .into_iter() + .map(move |pattern| FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(base_uri.clone()), + pattern: pattern.to_owned(), + }), + kind: None, + }) + }) + .collect() + } + + pub(super) fn register_did_change_watched_files( + &mut self, + initialize_params: &InitializeParams, + init_roots: &InitializeRoots, + ) -> Result<()> { + if !Self::supports_dynamic_watched_files_registration(initialize_params) { + return Ok(()); + } + + let watchers = Self::watched_file_watchers(initialize_params, init_roots); + let options = DidChangeWatchedFilesRegistrationOptions { watchers }; + let registration = Registration { + id: "jrsonnet-lsp.did-change-watched-files".to_owned(), + method: DidChangeWatchedFiles::METHOD.to_owned(), + register_options: Some(serde_json::to_value(options)?), + }; + let params = RegistrationParams { + registrations: vec![registration], + }; + + self.inflight_requests + .send_outgoing_request::(params)?; + info!("Requested dynamic file-watch registration"); + Ok(()) + } + + pub(super) fn request_id_from_number_or_string(id: NumberOrString) -> RequestId { + match id { + NumberOrString::Number(id) => id.into(), + NumberOrString::String(id) => id.into(), + } + } +} diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 771d10b5..73c5d5f8 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -997,3812 +997,11 @@ fn run_server(connection: Connection) -> thread::JoinHandle<()> { }) } -#[test] -fn test_initialize_shutdown() { - // Create an in-memory connection pair - let (client_conn, server_conn) = Connection::memory(); - - // Run the server in a background thread - let server_thread = run_server(server_conn); - - // Send initialize request - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - - // Receive initialize response - let response = client_conn.receiver.recv().unwrap(); - assert_matches!(response, Message::Response(resp) => { - assert_eq!(resp.id, 1.into()); - assert!(resp.error.is_none(), "Initialize should succeed"); - let result = resp.result.expect("should have result"); - assert!(result.get("capabilities").is_some(), "should have capabilities"); - assert_eq!( - result["capabilities"]["documentHighlightProvider"], - serde_json::Value::Bool(true), - "document highlight capability should be advertised", - ); - assert_eq!( - result["capabilities"]["inlayHintProvider"], - serde_json::Value::Bool(true), - "inlay hint capability should be advertised", - ); - assert_eq!( - result["capabilities"]["codeActionProvider"]["codeActionKinds"][0], - serde_json::Value::String("quickfix".to_string()), - "quickfix code action capability should be advertised", - ); - assert_eq!( - result["capabilities"]["codeActionProvider"]["codeActionKinds"][1], - serde_json::Value::String("source.fixAll".to_string()), - "source fix-all code action capability should be advertised", - ); - assert_eq!( - result["capabilities"]["executeCommandProvider"]["commands"], - serde_json::json!([ - "jrsonnet.evalFile", - "jrsonnet.evalExpression", - "jrsonnet.findTransitiveImporters", - "jrsonnet.findReferences", - "jrsonnet.showErrors" - ]), - "execute command capability should advertise all command IDs", - ); - assert_eq!( - result["capabilities"]["codeLensProvider"]["resolveProvider"], - serde_json::Value::Bool(true), - "code lens resolve capability should be advertised", - ); - assert_eq!( - result["capabilities"]["declarationProvider"], - serde_json::Value::Bool(true), - "declaration capability should be advertised", - ); - assert_eq!( - result["capabilities"]["implementationProvider"], - serde_json::Value::Bool(true), - "implementation capability should be advertised", - ); - let server_name = result - .get("serverInfo") - .and_then(|s| s.get("name")) - .and_then(|n| n.as_str()) - .expect("should have serverInfo.name"); - assert!(server_name.contains("jrsonnet")); - }); - - // Send initialized notification - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - // Send shutdown request - client_conn - .sender - .send(Message::Request(shutdown_request(2))) - .unwrap(); - - // Receive shutdown response - let response = client_conn.receiver.recv().unwrap(); - assert_matches!(response, Message::Response(resp) => { - assert_eq!(resp.id, 2.into()); - assert!(resp.error.is_none(), "Shutdown should succeed"); - }); - - // Send exit notification - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - - // Wait for server to exit - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_diagnostics_on_open() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - // Initialize - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); // ignore response - - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - // Open a document with a syntax error - let uri = "file:///test/error.jsonnet"; - let text = "{ a: }"; // Missing value - syntax error - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - - // Should receive diagnostics notification - let notification = client_conn.receiver.recv().unwrap(); - assert_matches!(notification, Message::Notification(notif) => { - assert_eq!(notif.method, PublishDiagnostics::METHOD); - let params: lsp_types::PublishDiagnosticsParams = - serde_json::from_value(notif.params).unwrap(); - assert!( - !params.diagnostics.is_empty(), - "Should have diagnostics for syntax error" - ); - }); - - // Shutdown - client_conn - .sender - .send(Message::Request(shutdown_request(2))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); - - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_diagnostics_refresh_on_did_save_with_text() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/save-refresh.jsonnet"; - client_conn - .sender - .send(Message::Notification(did_open_notification( - uri, "{ a: 1 }", - ))) - .unwrap(); - let opened = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - assert!(opened.diagnostics.is_empty()); - - client_conn - .sender - .send(Message::Notification(did_save_notification( - uri, - Some("{ a: }"), - ))) - .unwrap(); - let saved_invalid = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - assert!( - !saved_invalid.diagnostics.is_empty(), - "saving invalid text should publish diagnostics" - ); - - client_conn - .sender - .send(Message::Notification(did_save_notification( - uri, - Some("{ a: 2 }"), - ))) - .unwrap(); - let saved_valid = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - assert!(saved_valid.diagnostics.is_empty()); - - client_conn - .sender - .send(Message::Request(shutdown_request(2))) - .unwrap(); - let _ = recv_response(&client_conn, 2); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_configuration_change_reconfigures_eval_diagnostics() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - // Initialize with eval diagnostics enabled. - client_conn - .sender - .send(Message::Request(initialize_request_with_options( - 1, - serde_json::json!({ - "enableEvalDiagnostics": true - }), - ))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/eval-config-change.jsonnet"; - let text = "error 'boom'"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - - let initial_diagnostics = - recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - assert!( - initial_diagnostics - .diagnostics - .iter() - .any(|diag| diag.source.as_deref() == Some("jrsonnet-eval")), - "expected eval diagnostics to be present before config change" - ); - - client_conn - .sender - .send(Message::Notification( - did_change_configuration_notification(serde_json::json!({ - "jsonnet": { - "enableEvalDiagnostics": false - } - })), - )) - .unwrap(); - - let updated_diagnostics = - recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - assert!( - updated_diagnostics - .diagnostics - .iter() - .all(|diag| diag.source.as_deref() != Some("jrsonnet-eval")), - "expected eval diagnostics to be removed after config change" - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(2))) - .unwrap(); - let _ = recv_response(&client_conn, 2); - - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_configuration_change_reindexes_closed_import_graph_entries() { - let tmp = TempDir::new().expect("tempdir should be created"); - let jpath_a = tmp.path().join("jpath-a"); - let jpath_b = tmp.path().join("jpath-b"); - let workspace = tmp.path().join("workspace"); - fs::create_dir_all(&jpath_a).expect("jpath-a directory should be created"); - fs::create_dir_all(&jpath_b).expect("jpath-b directory should be created"); - fs::create_dir_all(&workspace).expect("workspace directory should be created"); - - let lib_a_path = jpath_a.join("lib.libsonnet"); - let lib_b_path = jpath_b.join("lib.libsonnet"); - let main_path = workspace.join("main.jsonnet"); - fs::write(&lib_a_path, "{ from: 'a' }").expect("jpath-a lib should be written"); - fs::write(&lib_b_path, "{ from: 'b' }").expect("jpath-b lib should be written"); - fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib"#) - .expect("main should be written"); - - let lib_a_uri = file_uri( - &lib_a_path - .canonicalize() - .expect("lib_a should canonicalize"), - ); - let lib_b_uri = file_uri( - &lib_b_path - .canonicalize() - .expect("lib_b should canonicalize"), - ); - let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); - let main_text = fs::read_to_string(&main_path).expect("main text should be readable"); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request_with_options( - 1, - serde_json::json!({ - "jpath": [jpath_a.to_string_lossy().to_string()], - }), - ))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Notification(did_open_notification( - &main_uri, &main_text, - ))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Notification(did_close_notification(&main_uri))) - .unwrap(); - - client_conn - .sender - .send(Message::Notification( - did_change_configuration_notification(serde_json::json!({ - "jsonnet": { - "jpath": [jpath_b.to_string_lossy().to_string()] - } - })), - )) - .unwrap(); - - client_conn - .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib_a_uri.clone())], - ))) - .unwrap(); - let old_target_response = recv_response(&client_conn, 2); - assert!( - old_target_response.error.is_none(), - "findTransitiveImporters for old jpath target should succeed" - ); - assert_eq!( - old_target_response - .result - .expect("should have old target command result"), - serde_json::json!({ - "file": lib_a_uri, - "transitiveImporters": [], - }) - ); - - client_conn - .sender - .send(Message::Request(execute_command_request( - 3, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib_b_uri.clone())], - ))) - .unwrap(); - let new_target_response = recv_response(&client_conn, 3); - assert!( - new_target_response.error.is_none(), - "findTransitiveImporters for new jpath target should succeed" - ); - assert_eq!( - new_target_response - .result - .expect("should have new target command result"), - serde_json::json!({ - "file": lib_b_uri, - "transitiveImporters": [main_uri], - }) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_valid_document_no_errors() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - // Initialize - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); - - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - // Open a valid document - let uri = "file:///test/valid.jsonnet"; - let text = r#"{ hello: "world", answer: 42 }"#; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - - // Should receive diagnostics notification with empty diagnostics - let notification = client_conn.receiver.recv().unwrap(); - assert_matches!(notification, Message::Notification(notif) => { - assert_eq!(notif.method, PublishDiagnostics::METHOD); - let params: lsp_types::PublishDiagnosticsParams = - serde_json::from_value(notif.params).unwrap(); - assert!( - params.diagnostics.is_empty(), - "Valid document should have no diagnostics" - ); - }); - - // Shutdown - client_conn - .sender - .send(Message::Request(shutdown_request(2))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); - - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_initialize_registers_did_change_watched_files_when_supported() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request( - initialize_request_with_dynamic_watched_files(1), - )) - .unwrap(); - let _ = recv_response(&client_conn, 1); - - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let register_request = loop { - let message = client_conn - .receiver - .recv_timeout(Duration::from_secs(3)) - .expect("expected registerCapability request"); - if let Message::Request(request) = message { - break request; - } - }; - assert_eq!(register_request.method, RegisterCapability::METHOD); - - let actual_params: RegistrationParams = - serde_json::from_value(register_request.params).unwrap(); - let expected_options = DidChangeWatchedFilesRegistrationOptions { - watchers: vec![ - FileSystemWatcher { - glob_pattern: GlobPattern::String("**/*.jsonnet".to_owned()), - kind: None, - }, - FileSystemWatcher { - glob_pattern: GlobPattern::String("**/*.libsonnet".to_owned()), - kind: None, - }, - FileSystemWatcher { - glob_pattern: GlobPattern::String("**/*.json".to_owned()), - kind: None, - }, - ], - }; - let expected_params = RegistrationParams { - registrations: vec![Registration { - id: "jrsonnet-lsp.did-change-watched-files".to_owned(), - method: DidChangeWatchedFiles::METHOD.to_owned(), - register_options: Some(serde_json::to_value(expected_options).unwrap()), - }], - }; - assert_eq!(actual_params, expected_params); - - client_conn - .sender - .send(Message::Response(lsp_server::Response::new_ok( - register_request.id, - serde_json::Value::Null, - ))) - .unwrap(); - - client_conn - .sender - .send(Message::Request(shutdown_request(2))) - .unwrap(); - let _ = recv_response(&client_conn, 2); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_initialize_uses_relative_watch_patterns_when_supported() { - let tmp = TempDir::new().expect("tempdir should be created"); - let root_uri = file_uri(tmp.path()); - let parsed_root_uri: lsp_types::Uri = root_uri.parse().unwrap(); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request( - initialize_request_with_dynamic_watched_files_relative(1, &root_uri), - )) - .unwrap(); - let _ = recv_response(&client_conn, 1); - - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let register_request = loop { - let message = client_conn - .receiver - .recv_timeout(Duration::from_secs(3)) - .expect("expected registerCapability request"); - if let Message::Request(request) = message { - break request; - } - }; - assert_eq!(register_request.method, RegisterCapability::METHOD); - - let actual_params: RegistrationParams = - serde_json::from_value(register_request.params).unwrap(); - let expected_options = DidChangeWatchedFilesRegistrationOptions { - watchers: vec![ - FileSystemWatcher { - glob_pattern: GlobPattern::Relative(RelativePattern { - base_uri: OneOf::Right(parsed_root_uri.clone()), - pattern: "**/*.jsonnet".to_owned(), - }), - kind: None, - }, - FileSystemWatcher { - glob_pattern: GlobPattern::Relative(RelativePattern { - base_uri: OneOf::Right(parsed_root_uri.clone()), - pattern: "**/*.libsonnet".to_owned(), - }), - kind: None, - }, - FileSystemWatcher { - glob_pattern: GlobPattern::Relative(RelativePattern { - base_uri: OneOf::Right(parsed_root_uri), - pattern: "**/*.json".to_owned(), - }), - kind: None, - }, - ], - }; - let expected_params = RegistrationParams { - registrations: vec![Registration { - id: "jrsonnet-lsp.did-change-watched-files".to_owned(), - method: DidChangeWatchedFiles::METHOD.to_owned(), - register_options: Some(serde_json::to_value(expected_options).unwrap()), - }], - }; - assert_eq!(actual_params, expected_params); - - client_conn - .sender - .send(Message::Response(lsp_server::Response::new_ok( - register_request.id, - serde_json::Value::Null, - ))) - .unwrap(); - - client_conn - .sender - .send(Message::Request(shutdown_request(2))) - .unwrap(); - let _ = recv_response(&client_conn, 2); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_goto_definition() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - // Initialize - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); - - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - // Open a document with a local binding - let uri = "file:///test/definition.jsonnet"; - let text = r"local x = 1; x + 1"; - // ^^^^^^ def ^ use at position (0, 13) - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - - // Receive diagnostics notification (discard) - let _ = client_conn.receiver.recv().unwrap(); - - // Send goto definition request for 'x' usage at position (0, 13) - client_conn - .sender - .send(Message::Request(goto_definition_request(2, uri, 0, 13))) - .unwrap(); - - // Should receive definition response - let response = client_conn.receiver.recv().unwrap(); - let response = assert_matches!(response, Message::Response(resp) => resp); - assert_eq!(response.id, 2.into()); - assert!(response.error.is_none(), "Goto definition should succeed"); - let result: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - assert_eq!( - result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - })) - ); - - // Shutdown - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); - - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_goto_type_definition() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/type-definition.jsonnet"; - let text = "local x = 1; x + 1"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(goto_type_definition_request( - 2, uri, 0, 13, - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!( - response.error.is_none(), - "Goto type definition request should succeed" - ); - let result: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - assert_eq!( - result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_goto_type_definition_matches_definition_for_local_alias() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/type-definition-local-alias.jsonnet"; - let text = "local x = 1;\nlocal y = x;\ny"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - let result = assert_type_definition_matches_definition(&client_conn, 2, 3, uri, 2, 0); - assert_eq!( - result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_goto_type_definition_matches_definition_for_import_targets() { - let temp_dir = TempDir::new().expect("failed to create temp dir"); - let lib_path = temp_dir.path().join("lib.libsonnet"); - let main_path = temp_dir.path().join("main.jsonnet"); - - fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); - fs::write( - &main_path, - r#"local lib = import "lib.libsonnet"; -local alias = lib.foo; -local plain = lib; -alias + std.length(plain)"#, - ) - .expect("failed to write main file"); - - let uri = file_uri(&main_path); - let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); - let text = fs::read_to_string(&main_path).expect("failed to read main file"); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - client_conn - .sender - .send(Message::Notification(did_open_notification(&uri, &text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); - - // `alias` usage at line 3, col 0 resolves to imported field `foo`. - let alias_result = assert_type_definition_matches_definition(&client_conn, 2, 3, &uri, 3, 0); - assert_eq!( - alias_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri.clone(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 2, - }, - end: Position { - line: 0, - character: 5, - }, - }, - })) - ); - - // `plain` usage at line 3, col 19 resolves to import file root. - let plain_result = assert_type_definition_matches_definition(&client_conn, 4, 5, &uri, 3, 19); - assert_eq!( - plain_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri, - range: lsp_types::Range::default(), - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(6))) - .unwrap(); - let _ = recv_response(&client_conn, 6); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_navigation_matrix_local_alias() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/navigation-matrix-local.jsonnet"; - let text = "local x = 1;\nlocal y = x;\ny"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - let declaration = send_goto_and_parse( - &client_conn, - 2, - "goto declaration", - goto_declaration_request(2, uri, 2, 0), - ); - let definition = send_goto_and_parse( - &client_conn, - 3, - "goto definition", - goto_definition_request(3, uri, 2, 0), - ); - let type_definition = send_goto_and_parse( - &client_conn, - 4, - "goto type definition", - goto_type_definition_request(4, uri, 2, 0), - ); - let implementation = send_goto_and_parse( - &client_conn, - 5, - "goto implementation", - goto_implementation_request(5, uri, 2, 0), - ); - - assert_eq!( - declaration, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 1, - character: 6, - }, - end: Position { - line: 1, - character: 7, - }, - }, - })) - ); - assert_eq!( - definition, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - })) - ); - assert_eq!(type_definition, definition); - assert_eq!( - implementation, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 1, - character: 10, - }, - end: Position { - line: 1, - character: 11, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(6))) - .unwrap(); - let _ = recv_response(&client_conn, 6); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_navigation_matrix_import_alias() { - let temp_dir = TempDir::new().expect("failed to create temp dir"); - let lib_path = temp_dir.path().join("lib.libsonnet"); - let main_path = temp_dir.path().join("main.jsonnet"); - - fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); - fs::write( - &main_path, - r#"local lib = import "lib.libsonnet"; -local alias = lib.foo; -alias"#, - ) - .expect("failed to write main file"); - - let uri = file_uri(&main_path); - let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); - let text = fs::read_to_string(&main_path).expect("failed to read main file"); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Notification(did_open_notification(&uri, &text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); - - let declaration = send_goto_and_parse( - &client_conn, - 2, - "goto declaration", - goto_declaration_request(2, &uri, 2, 0), - ); - let definition = send_goto_and_parse( - &client_conn, - 3, - "goto definition", - goto_definition_request(3, &uri, 2, 0), - ); - let type_definition = send_goto_and_parse( - &client_conn, - 4, - "goto type definition", - goto_type_definition_request(4, &uri, 2, 0), - ); - let implementation = send_goto_and_parse( - &client_conn, - 5, - "goto implementation", - goto_implementation_request(5, &uri, 2, 0), - ); - - assert_eq!( - declaration, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 1, - character: 6, - }, - end: Position { - line: 1, - character: 11, - }, - }, - })) - ); - assert_eq!( - definition, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri, - range: lsp_types::Range { - start: Position { - line: 0, - character: 2, - }, - end: Position { - line: 0, - character: 5, - }, - }, - })) - ); - assert_eq!(type_definition, definition); - assert_eq!( - implementation, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 1, - character: 14, - }, - end: Position { - line: 1, - character: 21, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(6))) - .unwrap(); - let _ = recv_response(&client_conn, 6); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_goto_declaration() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/declaration.jsonnet"; - let text = "local x = 1; x + 1"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(goto_declaration_request(2, uri, 0, 13))) - .unwrap(); - - let response = recv_response(&client_conn, 2); - assert!( - response.error.is_none(), - "Goto declaration request should succeed" - ); - let result: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - assert_eq!( - result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_goto_implementation_local_binding() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/implementation-local.jsonnet"; - let text = "local x = 1; x + 1"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(goto_definition_request(2, uri, 0, 13))) - .unwrap(); - let definition_response = recv_response(&client_conn, 2); - let definition_result: Option = - serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); - assert_eq!( - definition_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(goto_implementation_request(3, uri, 0, 13))) - .unwrap(); - let implementation_response = recv_response(&client_conn, 3); - let implementation_result: Option = - serde_json::from_value(implementation_response.result.expect("should have result")) - .unwrap(); - assert_eq!( - implementation_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 10, - }, - end: Position { - line: 0, - character: 11, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_goto_definition_and_declaration_diverge_for_local_alias() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/alias-definition-vs-declaration.jsonnet"; - let text = "local x = 1;\nlocal y = x;\ny"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(goto_definition_request(2, uri, 2, 0))) - .unwrap(); - let definition_response = recv_response(&client_conn, 2); - let definition_result: Option = - serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); - assert_eq!( - definition_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(goto_declaration_request(3, uri, 2, 0))) - .unwrap(); - let declaration_response = recv_response(&client_conn, 3); - let declaration_result: Option = - serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); - assert_eq!( - declaration_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 1, - character: 6, - }, - end: Position { - line: 1, - character: 7, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_goto_implementation_import_field() { - let temp_dir = TempDir::new().expect("failed to create temp dir"); - let lib_path = temp_dir.path().join("lib.libsonnet"); - let main_path = temp_dir.path().join("main.jsonnet"); - - fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); - fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib.foo"#) - .expect("failed to write main file"); - - let uri = file_uri(&main_path); - let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); - let text = fs::read_to_string(&main_path).expect("failed to read main file"); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Notification(did_open_notification(&uri, &text))) - .unwrap(); - - client_conn - .sender - .send(Message::Request(goto_definition_request(2, &uri, 0, 40))) - .unwrap(); - let definition_response = recv_response(&client_conn, 2); - let definition_result: Option = - serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); - assert_eq!( - definition_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri.clone(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 2, - }, - end: Position { - line: 0, - character: 5, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(goto_declaration_request(3, &uri, 0, 40))) - .unwrap(); - let declaration_response = recv_response(&client_conn, 3); - let declaration_result: Option = - serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); - assert_eq!( - declaration_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri.clone(), - range: lsp_types::Range { - start: Position { - line: 0, - character: 2, - }, - end: Position { - line: 0, - character: 5, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(goto_implementation_request( - 4, &uri, 0, 40, - ))) - .unwrap(); - let implementation_response = recv_response(&client_conn, 4); - let implementation_result: Option = - serde_json::from_value(implementation_response.result.expect("should have result")) - .unwrap(); - assert_eq!( - implementation_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri, - range: lsp_types::Range { - start: Position { - line: 0, - character: 7, - }, - end: Position { - line: 0, - character: 9, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(5))) - .unwrap(); - let _ = recv_response(&client_conn, 5); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_goto_definition_alias_to_import_field_vs_declaration() { - let temp_dir = TempDir::new().expect("failed to create temp dir"); - let lib_path = temp_dir.path().join("lib.libsonnet"); - let main_path = temp_dir.path().join("main.jsonnet"); - - fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); - fs::write( - &main_path, - r#"local lib = import "lib.libsonnet"; -local alias = lib.foo; -alias"#, - ) - .expect("failed to write main file"); - - let uri = file_uri(&main_path); - let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); - let text = fs::read_to_string(&main_path).expect("failed to read main file"); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Notification(did_open_notification(&uri, &text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(goto_definition_request(2, &uri, 2, 0))) - .unwrap(); - let definition_response = recv_response(&client_conn, 2); - let definition_result: Option = - serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); - assert_eq!( - definition_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri, - range: lsp_types::Range { - start: Position { - line: 0, - character: 2, - }, - end: Position { - line: 0, - character: 5, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(goto_declaration_request(3, &uri, 2, 0))) - .unwrap(); - let declaration_response = recv_response(&client_conn, 3); - let declaration_result: Option = - serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); - assert_eq!( - declaration_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 1, - character: 6, - }, - end: Position { - line: 1, - character: 11, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(goto_implementation_request(4, &uri, 2, 0))) - .unwrap(); - let implementation_response = recv_response(&client_conn, 4); - let implementation_result: Option = - serde_json::from_value(implementation_response.result.expect("should have result")) - .unwrap(); - assert_eq!( - implementation_result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), - range: lsp_types::Range { - start: Position { - line: 1, - character: 14, - }, - end: Position { - line: 1, - character: 21, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(5))) - .unwrap(); - let _ = recv_response(&client_conn, 5); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_diagnostics_import_file_and_definition_resolution() { - let temp_dir = TempDir::new().expect("failed to create temp dir"); - let lib_path = temp_dir.path().join("lib.libsonnet"); - let main_path = temp_dir.path().join("main.jsonnet"); - - fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); - fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib.foo"#) - .expect("failed to write main file"); - - let uri = file_uri(&main_path); - let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); - let text = fs::read_to_string(&main_path).expect("failed to read main file"); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Notification(did_open_notification(&uri, &text))) - .unwrap(); - - let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); - assert_eq!(diagnostics.uri.as_str(), uri); - assert!( - diagnostics.diagnostics.is_empty(), - "import-backed file should have no diagnostics" - ); - - client_conn - .sender - .send(Message::Request(goto_definition_request(2, &uri, 0, 40))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "Goto definition should succeed"); - let result: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - assert_eq!( - result, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri, - range: lsp_types::Range { - start: Position { - line: 0, - character: 2, - }, - end: Position { - line: 0, - character: 5, - }, - }, - })) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_document_highlight() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/highlight.jsonnet"; - let text = "local x = 1; x + x"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - - client_conn - .sender - .send(Message::Request(document_highlight_request(2, uri, 0, 13))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!( - response.error.is_none(), - "Document highlight should succeed" - ); - - let highlights: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - let highlights = highlights.unwrap_or_default(); - assert_eq!(highlights.len(), 3); - assert!( - highlights.iter().any(|highlight| { - highlight.range.start.character == 6 - && highlight.kind == Some(lsp_types::DocumentHighlightKind::WRITE) - }), - "Definition should be highlighted as WRITE" - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_inlay_hint() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/inlay.jsonnet"; - let text = "local x = 1; x"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - - client_conn - .sender - .send(Message::Request(inlay_hint_request(2, uri, 0, 0, 0, 50))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "Inlay hint should succeed"); - - let hints: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - let hints = hints.unwrap_or_default(); - let hints_json = serde_json::to_value(&hints).expect("hints should serialize"); - let expected_json = serde_json::json!([{ - "position": { "line": 0, "character": 7 }, - "label": ": number", - "kind": 1, - "paddingLeft": true - }]); - assert_eq!(hints_json, expected_json); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_code_action_unused_variable_quickfix() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/code-action.jsonnet"; - let text = "local x = 1; 42"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - - let diagnostic = unused_variable_diagnostic(); - let actions = request_code_actions(&client_conn, 2, uri, vec![diagnostic.clone()], None); - assert_eq!( - actions, - Some(expected_unused_variable_quickfix(uri, diagnostic.clone())) - ); - - // Requesting source fix-all actions should return the document-level fix-all action. - let filtered_actions = request_code_actions( - &client_conn, - 3, - uri, - vec![diagnostic.clone()], - Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), - ); - assert_eq!( - filtered_actions, - Some(vec![lsp_types::CodeActionOrCommand::CodeAction( - lsp_types::CodeAction { - title: "Remove all unused bindings".to_string(), - kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), - diagnostics: Some(vec![diagnostic]), - edit: Some(lsp_types::WorkspaceEdit { - changes: Some(std::collections::HashMap::from([( - uri.parse().unwrap(), - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 11, - }, - }, - new_text: String::new(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }, - )]) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_code_action_policy_updates_via_configuration_change() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/code-action-policy.jsonnet"; - let text = "local x = import \"foo.libsonnet\"; 42"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - - let diagnostic = unused_variable_diagnostic(); - let actions_before = request_code_actions(&client_conn, 2, uri, vec![diagnostic.clone()], None); - assert_eq!( - actions_before, - Some(expected_unused_import_binding_actions( - uri, - diagnostic.clone(), - )) - ); - - client_conn - .sender - .send(Message::Notification( - did_change_configuration_notification(serde_json::json!({ - "jsonnet": { - "codeActions": { - "removeUnused": "nonImportBindings" - } - } - })), - )) - .unwrap(); - - let actions_after = request_code_actions(&client_conn, 3, uri, vec![diagnostic.clone()], None); - assert_eq!( - actions_after, - Some(vec![lsp_types::CodeActionOrCommand::CodeAction( - lsp_types::CodeAction { - title: "Prefix `x` with `_`".to_string(), - kind: Some(lsp_types::CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(lsp_types::WorkspaceEdit { - changes: Some(std::collections::HashMap::from([( - uri.parse().unwrap(), - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - new_text: "_x".to_string(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(true), - disabled: None, - data: None, - }, - )]) - ); - - let fix_all_after = request_code_actions( - &client_conn, - 4, - uri, - vec![diagnostic], - Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), - ); - assert_eq!(fix_all_after, None); - - client_conn - .sender - .send(Message::Request(shutdown_request(5))) - .unwrap(); - let _ = recv_response(&client_conn, 5); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_code_action_comment_policy_updates_via_configuration_change() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/code-action-comment-policy.jsonnet"; - let text = "// heading\nlocal x = 1;\n42"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - - let diagnostic = lsp_types::Diagnostic { - range: lsp_types::Range { - start: Position { - line: 1, - character: 6, - }, - end: Position { - line: 1, - character: 7, - }, - }, - severity: Some(lsp_types::DiagnosticSeverity::WARNING), - code: Some(lsp_types::NumberOrString::String( - "unused-variable".to_string(), - )), - code_description: None, - source: Some("jrsonnet-lint".to_string()), - message: "unused variable".to_string(), - related_information: None, - tags: None, - data: None, - }; - - let fix_all_before = request_code_actions( - &client_conn, - 2, - uri, - vec![diagnostic.clone()], - Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), - ); - assert_eq!( - fix_all_before, - Some(vec![lsp_types::CodeActionOrCommand::CodeAction( - lsp_types::CodeAction { - title: "Remove all unused bindings".to_string(), - kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(lsp_types::WorkspaceEdit { - changes: Some(std::collections::HashMap::from([( - uri.parse().unwrap(), - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 1, - character: 0, - }, - end: Position { - line: 1, - character: 11, - }, - }, - new_text: String::new(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }, - )]) - ); - - client_conn - .sender - .send(Message::Notification( - did_change_configuration_notification(serde_json::json!({ - "jsonnet": { - "codeActions": { - "removeUnusedComments": "above" - } - } - })), - )) - .unwrap(); - - let fix_all_after = request_code_actions( - &client_conn, - 3, - uri, - vec![diagnostic], - Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), - ); - assert_eq!( - fix_all_after, - Some(vec![lsp_types::CodeActionOrCommand::CodeAction( - lsp_types::CodeAction { - title: "Remove all unused bindings".to_string(), - kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), - diagnostics: Some(vec![lsp_types::Diagnostic { - range: lsp_types::Range { - start: Position { - line: 1, - character: 6, - }, - end: Position { - line: 1, - character: 7, - }, - }, - severity: Some(lsp_types::DiagnosticSeverity::WARNING), - code: Some(lsp_types::NumberOrString::String( - "unused-variable".to_string(), - )), - code_description: None, - source: Some("jrsonnet-lint".to_string()), - message: "unused variable".to_string(), - related_information: None, - tags: None, - data: None, - }]), - edit: Some(lsp_types::WorkspaceEdit { - changes: Some(std::collections::HashMap::from([( - uri.parse().unwrap(), - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 1, - character: 11, - }, - }, - new_text: String::new(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }, - )]) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_execute_command_find_references() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/find-refs-command.jsonnet"; - let text = "local x = 1; x + x"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - let refs = request_find_references_command(&client_conn, 2, uri, false); - assert_eq!(refs, expected_find_references(uri, false)); - - let refs_with_declaration = request_find_references_command(&client_conn, 3, uri, true); - assert_eq!(refs_with_declaration, expected_find_references(uri, true)); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_execute_command_show_errors() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/show-errors-command.jsonnet"; - let text = "{ hello: }"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.showErrors", - vec![serde_json::Value::String(uri.to_string())], - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!( - response.error.is_none(), - "showErrors command should succeed" - ); - let diagnostics: lsp_types::PublishDiagnosticsParams = - serde_json::from_value(response.result.expect("showErrors should return a result")) - .expect("showErrors result should be publish diagnostics payload"); - - assert_eq!( - diagnostics, - lsp_types::PublishDiagnosticsParams { - uri: uri.parse().unwrap(), - diagnostics: vec![lsp_types::Diagnostic { - range: lsp_types::Range { - start: Position { - line: 0, - character: 9, - }, - end: Position { - line: 0, - character: 9, - }, - }, - severity: Some(lsp_types::DiagnosticSeverity::ERROR), - code: Some(lsp_types::NumberOrString::String( - "syntax-error".to_string() - )), - code_description: None, - source: Some("jrsonnet".to_string()), - message: "expected expression".to_string(), - related_information: None, - tags: None, - data: None, - }], - version: Some(1), - } - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_execute_command_unknown_returns_invalid_params_error() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.unknownCommand", - vec![], - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert_eq!(response.result, None); - let error = response - .error - .expect("unknown execute command should return an error"); - assert_eq!(error.code, lsp_server::ErrorCode::InvalidParams as i32); - assert_eq!( - error.message, - "Unknown execute command: jrsonnet.unknownCommand" - ); - assert_eq!(error.data, None); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_eval_commands_use_tanka_import_roots() { - let tmp = TempDir::new().expect("tempdir should be created"); - let root = tmp.path(); - let env_dir = root.join("environments").join("dev"); - let vendor_dir = root.join("vendor"); - fs::create_dir_all(&env_dir).expect("environment directory should be created"); - fs::create_dir_all(&vendor_dir).expect("vendor directory should be created"); - fs::write(root.join("jsonnetfile.json"), "{}").expect("jsonnetfile should be written"); - - let lib_path = vendor_dir.join("lib.libsonnet"); - let main_path = env_dir.join("main.jsonnet"); - fs::write(&lib_path, "{ answer: 42 }").expect("vendor lib should be written"); - fs::write( - &main_path, - r#"local lib = import "lib.libsonnet"; lib.answer"#, - ) - .expect("main should be written"); - - let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request_with_options( - 1, - serde_json::json!({ - "resolvePathsWithTanka": true - }), - ))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.evalFile", - vec![serde_json::Value::String(main_uri.clone())], - ))) - .unwrap(); - let eval_file_response = recv_response(&client_conn, 2); - assert!( - eval_file_response.error.is_none(), - "evalFile command should succeed" - ); - assert_eq!( - eval_file_response - .result - .expect("evalFile should return a result"), - serde_json::json!(42) - ); - - client_conn - .sender - .send(Message::Request(execute_command_request( - 3, - "jrsonnet.evalExpression", - vec![ - serde_json::Value::String(r#"(import "lib.libsonnet").answer"#.to_string()), - serde_json::Value::String(main_uri), - ], - ))) - .unwrap(); - let eval_expression_response = recv_response(&client_conn, 3); - assert!( - eval_expression_response.error.is_none(), - "evalExpression command should succeed" - ); - assert_eq!( - eval_expression_response - .result - .expect("evalExpression should return a result"), - serde_json::json!(42) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_semantic_tokens_range_request() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/semantic-range.jsonnet"; - let text = "local first = 1\nlocal second = first + 1"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(semantic_tokens_range_request( - 2, uri, 1, 0, 1, 100, - ))) - .unwrap(); - - let response = recv_response(&client_conn, 2); - assert!( - response.error.is_none(), - "semantic tokens range request should succeed" - ); - let tokens: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - let tokens = tokens.expect("semantic tokens range should be returned"); - let expected = encode_semantic_tokens(vec![ - semantic_token(1, 0, 5, SemanticTokenTypeName::Keyword, &[]), - semantic_token(1, 6, 6, SemanticTokenTypeName::Variable, &[]), - semantic_token(1, 13, 1, SemanticTokenTypeName::Operator, &[]), - semantic_token(1, 21, 1, SemanticTokenTypeName::Operator, &[]), - semantic_token(1, 23, 1, SemanticTokenTypeName::Number, &[]), - ]); - assert_eq!(tokens, expected, "semantic tokens range mismatch"); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_code_lens_resolve_request() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/code-lens-resolve.jsonnet"; - let text = "local x = 1; x + x"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(code_lens_request(2, uri))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "code lens request should succeed"); - let lenses: Vec = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - assert!(!lenses.is_empty(), "expected code lenses for test document"); - let reference_lens = lenses - .into_iter() - .find(|lens| { - lens.command - .as_ref() - .is_some_and(|command| command.command == "jrsonnet.findReferences") - }) - .expect("expected reference count code lens"); - - client_conn - .sender - .send(Message::Request(code_lens_resolve_request( - 3, - reference_lens.clone(), - ))) - .unwrap(); - let response = recv_response(&client_conn, 3); - assert!( - response.error.is_none(), - "code lens resolve request should succeed" - ); - let resolved: lsp_types::CodeLens = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - assert_eq!(resolved, reference_lens); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_cancel_request_returns_request_canceled_error() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/cancel-code-lens.jsonnet"; - let mut text = String::new(); - for index in 0..20_000 { - writeln!(&mut text, "local value_{index} = {index};") - .expect("writing to String should succeed"); - } - text.push_str("value_19999\n"); - - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, &text))) - .unwrap(); - - client_conn - .sender - .send(Message::Request(code_lens_request(2, uri))) - .unwrap(); - client_conn - .sender - .send(Message::Notification(cancel_request_notification(2))) - .unwrap(); - - let response = recv_response(&client_conn, 2); - assert_eq!(response.result, None); - let error = response - .error - .expect("cancelled request should return request-canceled error"); - assert_eq!(error.code, lsp_server::ErrorCode::RequestCanceled as i32); - assert_eq!(error.message, "Request canceled: textDocument/codeLens"); - assert_eq!(error.data, None); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_code_lens_resolve_invalid_params_returns_invalid_params_error() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Request(Request::new( - 2.into(), - CodeLensResolve::METHOD.to_string(), - json!({"not": "a code lens"}), - ))) - .unwrap(); - - let response = recv_response(&client_conn, 2); - assert_eq!(response.result, None); - let error = response - .error - .expect("invalid code lens resolve params should return an error"); - assert_eq!(error.code, lsp_server::ErrorCode::InvalidParams as i32); - assert!( - error - .message - .starts_with("Invalid params for codeLens/resolve:"), - "unexpected error message: {}", - error.message - ); - assert_eq!(error.data, None); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_watched_file_refreshes_unopened_importers_for_references() { - let tmp = TempDir::new().expect("tempdir should be created"); - let lib1_path = tmp.path().join("lib1.jsonnet"); - let lib2_path = tmp.path().join("lib2.jsonnet"); - let main_path = tmp.path().join("main.jsonnet"); - - fs::write(&lib1_path, "local target = 1; target").expect("lib1 should be written"); - fs::write(&lib2_path, "local target = 2; target").expect("lib2 should be written"); - fs::write(&main_path, "local lib = import 'lib1.jsonnet'; lib.target") - .expect("main should be written"); - - let lib1_uri = file_uri(&lib1_path.canonicalize().expect("lib1 should canonicalize")); - let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - // Initialize - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - // Open lib1 (current document for references requests) - client_conn - .sender - .send(Message::Notification(did_open_notification( - &lib1_uri, - "local target = 1; target", - ))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); // initial diagnostics - - // Index unopened main file via watched-files notification - client_conn - .sender - .send(Message::Notification( - did_change_watched_files_notification(vec![FileEvent { - uri: main_uri.parse().unwrap(), - typ: FileChangeType::CREATED, - }]), - )) - .unwrap(); - - client_conn - .sender - .send(Message::Request(execute_command_request( - 20, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib1_uri.clone())], - ))) - .unwrap(); - let response = recv_response(&client_conn, 20); - assert!(response.error.is_none(), "Command should succeed"); - let command_result = response.result.expect("command should return result"); - let importers = command_result["transitiveImporters"] - .as_array() - .expect("transitiveImporters should be an array") - .iter() - .filter_map(|value| value.as_str()) - .collect::>(); - assert!( - importers.iter().any(|uri| *uri == main_uri), - "Expected main to be indexed as lib1 importer, got: {importers:?}" - ); - - // Query references to `target` definition in lib1 (line 0, col 6) - client_conn - .sender - .send(Message::Request(references_request( - 2, &lib1_uri, 0, 6, false, - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "References should succeed"); - let refs: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - let refs = refs.unwrap_or_default(); - assert!( - refs.iter() - .any(|location| location.uri.to_string() == main_uri), - "Expected cross-file reference from unopened main file, got: {refs:?}" - ); - - // Query references from a non-definition reference in lib1 (line 0, col 18) - client_conn - .sender - .send(Message::Request(references_request( - 21, &lib1_uri, 0, 18, false, - ))) - .unwrap(); - let response = recv_response(&client_conn, 21); - assert!(response.error.is_none(), "References should succeed"); - let refs: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - let refs = refs.unwrap_or_default(); - assert!( - refs.iter() - .any(|location| location.uri.to_string() == main_uri), - "Expected cross-file reference from unopened main file when queried from a local reference, got: {refs:?}" - ); - - // Update main on disk to import lib2 instead of lib1 - fs::write(&main_path, "local lib = import 'lib2.jsonnet'; lib.target") - .expect("main should be rewritten"); - - client_conn - .sender - .send(Message::Notification( - did_change_watched_files_notification(vec![FileEvent { - uri: main_uri.parse().unwrap(), - typ: FileChangeType::CHANGED, - }]), - )) - .unwrap(); - - // References to lib1 target should no longer include main - client_conn - .sender - .send(Message::Request(references_request( - 3, &lib1_uri, 0, 6, false, - ))) - .unwrap(); - let response = recv_response(&client_conn, 3); - assert!(response.error.is_none(), "References should succeed"); - let refs: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - let refs = refs.unwrap_or_default(); - assert!( - !refs - .iter() - .any(|location| location.uri.to_string() == main_uri), - "Main should no longer reference lib1 after watched-file update" - ); - - // Shutdown - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_initialize_bootstraps_workspace_import_graph() { - let tmp = TempDir::new().expect("tempdir should be created"); - let lib_path = tmp.path().join("lib.jsonnet"); - let main_path = tmp.path().join("main.jsonnet"); - fs::write(&lib_path, "{ value: 1 }").expect("lib should be written"); - fs::write(&main_path, "local lib = import 'lib.jsonnet'; lib.value") - .expect("main should be written"); - - let root_uri = file_uri(&tmp.path().canonicalize().expect("root should canonicalize")); - let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); - let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request_with_root_uri( - 1, &root_uri, - ))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let expected_result = json!({ - "file": lib_uri, - "transitiveImporters": [main_uri], - }); - let mut actual_result = serde_json::Value::Null; - for request_id in 2..=42 { - client_conn - .sender - .send(Message::Request(execute_command_request( - request_id, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String( - expected_result["file"] - .as_str() - .expect("expected file URI should be a string") - .to_string(), - )], - ))) - .unwrap(); - let response = recv_response(&client_conn, request_id); - assert!( - response.error.is_none(), - "findTransitiveImporters command should succeed" - ); - actual_result = response.result.expect("command should return result"); - if actual_result == expected_result { - break; - } - thread::sleep(Duration::from_millis(25)); - } - assert_eq!(actual_result, expected_result); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_workspace_symbol_includes_unopened_workspace_files() { - let tmp = TempDir::new().expect("tempdir should be created"); - let closed_path = tmp.path().join("closed.jsonnet"); - let closed_text = "local workspaceOnly=1;workspaceOnly"; - fs::write(&closed_path, closed_text).expect("closed file should be written"); - - let root_uri = file_uri(&tmp.path().canonicalize().expect("root should canonicalize")); - let closed_uri = file_uri( - &closed_path - .canonicalize() - .expect("closed should canonicalize"), - ); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request_with_root_uri( - 1, &root_uri, - ))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let symbols = request_workspace_symbols(&client_conn, 2, "workspaceOnly"); - - let expected_doc = jrsonnet_lsp_document::Document::new( - closed_text.to_string(), - jrsonnet_lsp_document::DocVersion::new(0), - ); - let expected_uri: lsp_types::Uri = closed_uri.parse().unwrap(); - let expected_symbols = Some(jrsonnet_lsp_handlers::workspace_symbols_for_document( - &expected_doc, - &expected_uri, - "workspaceOnly", - )); - let mut actual_symbols = symbols; - if actual_symbols != expected_symbols { - for request_id in 3..=43 { - actual_symbols = request_workspace_symbols(&client_conn, request_id, "workspaceOnly"); - if actual_symbols == expected_symbols { - break; - } - thread::sleep(Duration::from_millis(25)); - } - } - assert_eq!(actual_symbols, expected_symbols); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_workspace_symbol_ranks_exact_prefix_then_substring() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/workspace-symbol-ranking.jsonnet"; - let text = - "local needle = 1; local has_needle_inside = 2; local needlePrefix = 3; local zneedle = 4; needle"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - let symbols = request_workspace_symbols(&client_conn, 2, "needle"); - let expected_doc = jrsonnet_lsp_document::Document::new( - text.to_string(), - jrsonnet_lsp_document::DocVersion::new(1), - ); - let expected_uri: lsp_types::Uri = uri.parse().unwrap(); - let expected_all = jrsonnet_lsp_handlers::workspace_symbols_for_document( - &expected_doc, - &expected_uri, - "needle", - ); - let expected_symbols = vec![ - expected_all - .iter() - .find(|symbol| symbol.name == "needle") - .expect("expected exact match symbol") - .clone(), - expected_all - .iter() - .find(|symbol| symbol.name == "needlePrefix") - .expect("expected prefix match symbol") - .clone(), - expected_all - .iter() - .find(|symbol| symbol.name == "zneedle") - .expect("expected shorter substring symbol") - .clone(), - expected_all - .iter() - .find(|symbol| symbol.name == "has_needle_inside") - .expect("expected longer substring symbol") - .clone(), - ]; - assert_eq!(symbols, Some(expected_symbols)); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_workspace_symbol_caps_results_with_deterministic_order() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/workspace-symbol-cap.jsonnet"; - let text = { - let locals = (0..140) - .rev() - .map(|idx| format!("local capsymbol{idx:03} = {idx};")) - .collect::>() - .join(" "); - format!("{locals} capsymbol000") - }; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, &text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - - let symbols = request_workspace_symbols(&client_conn, 2, "capsymbol"); - let expected_doc = - jrsonnet_lsp_document::Document::new(text, jrsonnet_lsp_document::DocVersion::new(1)); - let expected_uri: lsp_types::Uri = uri.parse().unwrap(); - let expected_all = jrsonnet_lsp_handlers::workspace_symbols_for_document( - &expected_doc, - &expected_uri, - "capsymbol", - ); - let expected_symbols = (0..128) - .map(|idx| format!("capsymbol{idx:03}")) - .map(|name| { - expected_all - .iter() - .find(|symbol| symbol.name == name) - .expect("expected symbol to exist") - .clone() - }) - .collect::>(); - assert_eq!(symbols, Some(expected_symbols)); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_find_transitive_importers_returns_sorted_uris() { - let tmp = TempDir::new().expect("tempdir should be created"); - let lib_path = tmp.path().join("lib.jsonnet"); - let a_path = tmp.path().join("a.jsonnet"); - let b_path = tmp.path().join("b.jsonnet"); - fs::write(&lib_path, "{ target: 1 }").expect("lib should be written"); - fs::write(&a_path, "local lib = import 'lib.jsonnet'; lib.target") - .expect("a should be written"); - fs::write(&b_path, "local lib = import 'lib.jsonnet'; lib.target") - .expect("b should be written"); - - let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); - let a_uri = file_uri(&a_path.canonicalize().expect("a should canonicalize")); - let b_uri = file_uri(&b_path.canonicalize().expect("b should canonicalize")); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - for (uri, text) in [ - (&lib_uri, "{ target: 1 }"), - (&a_uri, "local lib = import 'lib.jsonnet'; lib.target"), - (&b_uri, "local lib = import 'lib.jsonnet'; lib.target"), - ] { - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - } - - client_conn - .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib_uri.clone())], - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "Command should succeed"); - assert_eq!( - response.result.expect("command should return result"), - json!({ - "file": lib_uri, - "transitiveImporters": [a_uri, b_uri], - }) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_did_close_preserves_import_graph_for_references() { - let tmp = TempDir::new().expect("tempdir should be created"); - let lib_path = tmp.path().join("lib.jsonnet"); - let main_path = tmp.path().join("main.jsonnet"); - - let lib_text = "local target = 1; target"; - let main_text = "local lib = import 'lib.jsonnet'; lib.target"; - fs::write(&lib_path, lib_text).expect("lib should be written"); - fs::write(&main_path, main_text).expect("main should be written"); - - let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); - let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Notification(did_open_notification( - &lib_uri, lib_text, - ))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Notification(did_open_notification( - &main_uri, main_text, - ))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Notification(did_close_notification(&main_uri))) - .unwrap(); - let closed_diagnostics = - recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); - assert_eq!( - closed_diagnostics, - lsp_types::PublishDiagnosticsParams { - uri: main_uri.parse().unwrap(), - diagnostics: Vec::new(), - version: None, - } - ); - - client_conn - .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib_uri.clone())], - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "Command should succeed"); - let transitive_importers = response.result.expect("command should return result"); - assert_eq!( - transitive_importers, - json!({ - "file": lib_uri, - "transitiveImporters": [main_uri], - }) - ); - - client_conn - .sender - .send(Message::Request(references_request( - 3, &lib_uri, 0, 6, false, - ))) - .unwrap(); - let response = recv_response(&client_conn, 3); - assert!(response.error.is_none(), "References should succeed"); - let references: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - let references = references.unwrap_or_default(); - assert_eq!( - references, - vec![location(&lib_uri, 18, 24), location(&main_uri, 38, 44)] - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_cross_file_rename_updates_definition_and_importers() { - let tmp = TempDir::new().expect("tempdir should be created"); - let lib_path = tmp.path().join("lib.jsonnet"); - let main_path = tmp.path().join("main.jsonnet"); - - fs::write(&lib_path, "{ helper: function(x) x * 2 }").expect("lib should be written"); - fs::write( - &main_path, - "local lib = import 'lib.jsonnet'; lib.helper(1) + lib.helper(2)", - ) - .expect("main should be written"); - - let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); - let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Notification(did_open_notification( - &lib_uri, - "{ helper: function(x) x * 2 }", - ))) - .unwrap(); - - client_conn - .sender - .send(Message::Notification( - did_change_watched_files_notification(vec![FileEvent { - uri: main_uri.parse().unwrap(), - typ: FileChangeType::CREATED, - }]), - )) - .unwrap(); - - client_conn - .sender - .send(Message::Request(rename_request(2, &lib_uri, 0, 2, "util"))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "Rename should succeed"); - - let edit: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - let edit = edit.expect("rename should produce workspace edit"); - let changes = edit.changes.expect("workspace edit should include changes"); - - let lib_edits = changes - .iter() - .find_map(|(uri, edits)| (uri.as_str() == lib_uri).then_some(edits)) - .expect("lib file should be edited"); - assert_eq!(lib_edits.len(), 1, "lib should have one definition rename"); - assert_eq!(lib_edits[0].new_text, "util"); - - let main_edits = changes - .iter() - .find_map(|(uri, edits)| (uri.as_str() == main_uri).then_some(edits)) - .expect("main importer should be edited"); - assert_eq!( - main_edits.len(), - 2, - "main should rename both field references" - ); - assert!(main_edits.iter().all(|edit| edit.new_text == "util")); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_cross_file_references_resolve_jpath_importers() { - let tmp = TempDir::new().expect("tempdir should be created"); - let jpath_dir = tmp.path().join("jpath"); - let workspace_dir = tmp.path().join("workspace"); - fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); - fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); - - let lib_path = jpath_dir.join("lib.libsonnet"); - let main_path = workspace_dir.join("main.jsonnet"); - let lib_text = "local target = 1; target"; - let main_text = r#"local lib = import "lib.libsonnet"; lib.target"#; - fs::write(&lib_path, lib_text).expect("lib should be written"); - fs::write(&main_path, main_text).expect("main should be written"); - - let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); - let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request_with_options( - 1, - json!({ - "jpath": [jpath_dir.to_string_lossy().to_string()], - }), - ))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Notification(did_open_notification( - &lib_uri, lib_text, - ))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Notification(did_open_notification( - &main_uri, main_text, - ))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(references_request( - 2, &lib_uri, 0, 6, false, - ))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "References should succeed"); - let references: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - assert_eq!( - references.unwrap_or_default(), - vec![location(&lib_uri, 18, 24), location(&main_uri, 40, 46)] - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_cross_file_rename_updates_jpath_importers() { - let tmp = TempDir::new().expect("tempdir should be created"); - let jpath_dir = tmp.path().join("jpath"); - let workspace_dir = tmp.path().join("workspace"); - fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); - fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); - - let lib_path = jpath_dir.join("lib.libsonnet"); - let main_path = workspace_dir.join("main.jsonnet"); - let lib_text = "{ helper: function(x) x * 2 }"; - let main_text = r#"local lib = import "lib.libsonnet"; lib.helper(1) + lib.helper(2)"#; - fs::write(&lib_path, lib_text).expect("lib should be written"); - fs::write(&main_path, main_text).expect("main should be written"); - - let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); - let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request_with_options( - 1, - json!({ - "jpath": [jpath_dir.to_string_lossy().to_string()], - }), - ))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Notification(did_open_notification( - &lib_uri, lib_text, - ))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Notification(did_open_notification( - &main_uri, main_text, - ))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(rename_request(2, &lib_uri, 0, 2, "util"))) - .unwrap(); - let response = recv_response(&client_conn, 2); - assert!(response.error.is_none(), "Rename should succeed"); - - let edit: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); - let edit = edit.expect("rename should produce workspace edit"); - - let mut expected_changes = std::collections::HashMap::new(); - expected_changes.insert( - lib_uri.parse().unwrap(), - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 2, - }, - end: Position { - line: 0, - character: 8, - }, - }, - new_text: "util".to_string(), - }], - ); - expected_changes.insert( - main_uri.parse().unwrap(), - vec![ - lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 40, - }, - end: Position { - line: 0, - character: 46, - }, - }, - new_text: "util".to_string(), - }, - lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 56, - }, - end: Position { - line: 0, - character: 62, - }, - }, - new_text: "util".to_string(), - }, - ], - ); - assert_eq!( - edit, - lsp_types::WorkspaceEdit { - changes: Some(expected_changes), - document_changes: None, - change_annotations: None, - } - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = recv_response(&client_conn, 3); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_navigation_resolves_jpath_imports_from_graph() { - let tmp = TempDir::new().expect("tempdir should be created"); - let jpath_dir = tmp.path().join("jpath"); - let workspace_dir = tmp.path().join("workspace"); - fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); - fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); - - let lib_path = jpath_dir.join("lib.libsonnet"); - let main_path = workspace_dir.join("main.jsonnet"); - let lib_text = "{ helper: 42 }"; - let main_text = r#"local lib = import "lib.libsonnet"; lib.helper"#; - fs::write(&lib_path, lib_text).expect("lib should be written"); - fs::write(&main_path, main_text).expect("main should be written"); - - let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); - let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); - - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request_with_options( - 1, - json!({ - "jpath": [jpath_dir.to_string_lossy().to_string()], - }), - ))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - client_conn - .sender - .send(Message::Notification(did_open_notification( - &lib_uri, lib_text, - ))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Notification(did_open_notification( - &main_uri, main_text, - ))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); - - client_conn - .sender - .send(Message::Request(goto_definition_request( - 2, &main_uri, 0, 22, - ))) - .unwrap(); - let import_definition_response = recv_response(&client_conn, 2); - assert!( - import_definition_response.error.is_none(), - "goto definition on import path should succeed" - ); - let import_definition: Option = serde_json::from_value( - import_definition_response - .result - .expect("should have goto definition result"), - ) - .unwrap(); - assert_eq!( - import_definition, - Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: lib_uri.parse().expect("lib URI should parse"), - range: lsp_types::Range::default(), - })) - ); - - client_conn - .sender - .send(Message::Request(goto_definition_request( - 3, &main_uri, 0, 40, - ))) - .unwrap(); - let definition_response = recv_response(&client_conn, 3); - assert!( - definition_response.error.is_none(), - "goto definition should succeed" - ); - let definition: Option = serde_json::from_value( - definition_response - .result - .expect("should have definition result"), - ) - .unwrap(); - assert_eq!( - definition, - Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) - ); - - client_conn - .sender - .send(Message::Request(goto_declaration_request( - 4, &main_uri, 0, 40, - ))) - .unwrap(); - let declaration_response = recv_response(&client_conn, 4); - assert!( - declaration_response.error.is_none(), - "goto declaration should succeed" - ); - let declaration: Option = serde_json::from_value( - declaration_response - .result - .expect("should have declaration result"), - ) - .unwrap(); - assert_eq!( - declaration, - Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) - ); - - client_conn - .sender - .send(Message::Request(goto_type_definition_request( - 5, &main_uri, 0, 40, - ))) - .unwrap(); - let type_definition_response = recv_response(&client_conn, 5); - assert!( - type_definition_response.error.is_none(), - "goto type definition should succeed" - ); - let type_definition: Option = serde_json::from_value( - type_definition_response - .result - .expect("should have type definition result"), - ) - .unwrap(); - assert_eq!( - type_definition, - Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) - ); - - client_conn - .sender - .send(Message::Request(shutdown_request(6))) - .unwrap(); - let _ = recv_response(&client_conn, 6); - client_conn - .sender - .send(Message::Notification(exit_notification())) - .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} +#[path = "integration_test/features.rs"] +mod features; +#[path = "integration_test/lifecycle.rs"] +mod lifecycle; +#[path = "integration_test/navigation.rs"] +mod navigation; +#[path = "integration_test/workspace_cross_file.rs"] +mod workspace_cross_file; diff --git a/crates/jrsonnet-lsp/tests/integration_test/features.rs b/crates/jrsonnet-lsp/tests/integration_test/features.rs new file mode 100644 index 00000000..07e47fb9 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration_test/features.rs @@ -0,0 +1,998 @@ +use super::*; + +#[test] +fn test_document_highlight() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/highlight.jsonnet"; + let text = "local x = 1; x + x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(document_highlight_request(2, uri, 0, 13))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "Document highlight should succeed" + ); + + let highlights: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let highlights = highlights.unwrap_or_default(); + assert_eq!(highlights.len(), 3); + assert!( + highlights.iter().any(|highlight| { + highlight.range.start.character == 6 + && highlight.kind == Some(lsp_types::DocumentHighlightKind::WRITE) + }), + "Definition should be highlighted as WRITE" + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_inlay_hint() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/inlay.jsonnet"; + let text = "local x = 1; x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(inlay_hint_request(2, uri, 0, 0, 0, 50))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Inlay hint should succeed"); + + let hints: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let hints = hints.unwrap_or_default(); + let hints_json = serde_json::to_value(&hints).expect("hints should serialize"); + let expected_json = serde_json::json!([{ + "position": { "line": 0, "character": 7 }, + "label": ": number", + "kind": 1, + "paddingLeft": true + }]); + assert_eq!(hints_json, expected_json); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_action_unused_variable_quickfix() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/code-action.jsonnet"; + let text = "local x = 1; 42"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + let diagnostic = unused_variable_diagnostic(); + let actions = request_code_actions(&client_conn, 2, uri, vec![diagnostic.clone()], None); + assert_eq!( + actions, + Some(expected_unused_variable_quickfix(uri, diagnostic.clone())) + ); + + // Requesting source fix-all actions should return the document-level fix-all action. + let filtered_actions = request_code_actions( + &client_conn, + 3, + uri, + vec![diagnostic.clone()], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!( + filtered_actions, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 11, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }, + )]) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_action_policy_updates_via_configuration_change() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/code-action-policy.jsonnet"; + let text = "local x = import \"foo.libsonnet\"; 42"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + let diagnostic = unused_variable_diagnostic(); + let actions_before = request_code_actions(&client_conn, 2, uri, vec![diagnostic.clone()], None); + assert_eq!( + actions_before, + Some(expected_unused_import_binding_actions( + uri, + diagnostic.clone(), + )) + ); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "codeActions": { + "removeUnused": "nonImportBindings" + } + } + })), + )) + .unwrap(); + + let actions_after = request_code_actions(&client_conn, 3, uri, vec![diagnostic.clone()], None); + assert_eq!( + actions_after, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }, + )]) + ); + + let fix_all_after = request_code_actions( + &client_conn, + 4, + uri, + vec![diagnostic], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!(fix_all_after, None); + + client_conn + .sender + .send(Message::Request(shutdown_request(5))) + .unwrap(); + let _ = recv_response(&client_conn, 5); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_action_comment_policy_updates_via_configuration_change() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/code-action-comment-policy.jsonnet"; + let text = "// heading\nlocal x = 1;\n42"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + let diagnostic = lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::WARNING), + code: Some(lsp_types::NumberOrString::String( + "unused-variable".to_string(), + )), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + }; + + let fix_all_before = request_code_actions( + &client_conn, + 2, + uri, + vec![diagnostic.clone()], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!( + fix_all_before, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 1, + character: 0, + }, + end: Position { + line: 1, + character: 11, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }, + )]) + ); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "codeActions": { + "removeUnusedComments": "above" + } + } + })), + )) + .unwrap(); + + let fix_all_after = request_code_actions( + &client_conn, + 3, + uri, + vec![diagnostic], + Some(vec![lsp_types::CodeActionKind::SOURCE_FIX_ALL]), + ); + assert_eq!( + fix_all_after, + Some(vec![lsp_types::CodeActionOrCommand::CodeAction( + lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::WARNING), + code: Some(lsp_types::NumberOrString::String( + "unused-variable".to_string(), + )), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + }]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(std::collections::HashMap::from([( + uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 1, + character: 11, + }, + }, + new_text: String::new(), + }], + )])), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }, + )]) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_execute_command_find_references() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/find-refs-command.jsonnet"; + let text = "local x = 1; x + x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + let refs = request_find_references_command(&client_conn, 2, uri, false); + assert_eq!(refs, expected_find_references(uri, false)); + + let refs_with_declaration = request_find_references_command(&client_conn, 3, uri, true); + assert_eq!(refs_with_declaration, expected_find_references(uri, true)); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_execute_command_show_errors() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/show-errors-command.jsonnet"; + let text = "{ hello: }"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.showErrors", + vec![serde_json::Value::String(uri.to_string())], + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "showErrors command should succeed" + ); + let diagnostics: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(response.result.expect("showErrors should return a result")) + .expect("showErrors result should be publish diagnostics payload"); + + assert_eq!( + diagnostics, + lsp_types::PublishDiagnosticsParams { + uri: uri.parse().unwrap(), + diagnostics: vec![lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 0, + character: 9, + }, + end: Position { + line: 0, + character: 9, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::ERROR), + code: Some(lsp_types::NumberOrString::String( + "syntax-error".to_string() + )), + code_description: None, + source: Some("jrsonnet".to_string()), + message: "expected expression".to_string(), + related_information: None, + tags: None, + data: None, + }], + version: Some(1), + } + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_execute_command_unknown_returns_invalid_params_error() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.unknownCommand", + vec![], + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert_eq!(response.result, None); + let error = response + .error + .expect("unknown execute command should return an error"); + assert_eq!(error.code, lsp_server::ErrorCode::InvalidParams as i32); + assert_eq!( + error.message, + "Unknown execute command: jrsonnet.unknownCommand" + ); + assert_eq!(error.data, None); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_eval_commands_use_tanka_import_roots() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let env_dir = root.join("environments").join("dev"); + let vendor_dir = root.join("vendor"); + fs::create_dir_all(&env_dir).expect("environment directory should be created"); + fs::create_dir_all(&vendor_dir).expect("vendor directory should be created"); + fs::write(root.join("jsonnetfile.json"), "{}").expect("jsonnetfile should be written"); + + let lib_path = vendor_dir.join("lib.libsonnet"); + let main_path = env_dir.join("main.jsonnet"); + fs::write(&lib_path, "{ answer: 42 }").expect("vendor lib should be written"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; lib.answer"#, + ) + .expect("main should be written"); + + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "resolvePathsWithTanka": true + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.evalFile", + vec![serde_json::Value::String(main_uri.clone())], + ))) + .unwrap(); + let eval_file_response = recv_response(&client_conn, 2); + assert!( + eval_file_response.error.is_none(), + "evalFile command should succeed" + ); + assert_eq!( + eval_file_response + .result + .expect("evalFile should return a result"), + serde_json::json!(42) + ); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 3, + "jrsonnet.evalExpression", + vec![ + serde_json::Value::String(r#"(import "lib.libsonnet").answer"#.to_string()), + serde_json::Value::String(main_uri), + ], + ))) + .unwrap(); + let eval_expression_response = recv_response(&client_conn, 3); + assert!( + eval_expression_response.error.is_none(), + "evalExpression command should succeed" + ); + assert_eq!( + eval_expression_response + .result + .expect("evalExpression should return a result"), + serde_json::json!(42) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_semantic_tokens_range_request() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/semantic-range.jsonnet"; + let text = "local first = 1\nlocal second = first + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(semantic_tokens_range_request( + 2, uri, 1, 0, 1, 100, + ))) + .unwrap(); + + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "semantic tokens range request should succeed" + ); + let tokens: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let tokens = tokens.expect("semantic tokens range should be returned"); + let expected = encode_semantic_tokens(vec![ + semantic_token(1, 0, 5, SemanticTokenTypeName::Keyword, &[]), + semantic_token(1, 6, 6, SemanticTokenTypeName::Variable, &[]), + semantic_token(1, 13, 1, SemanticTokenTypeName::Operator, &[]), + semantic_token(1, 21, 1, SemanticTokenTypeName::Operator, &[]), + semantic_token(1, 23, 1, SemanticTokenTypeName::Number, &[]), + ]); + assert_eq!(tokens, expected, "semantic tokens range mismatch"); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_lens_resolve_request() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/code-lens-resolve.jsonnet"; + let text = "local x = 1; x + x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(code_lens_request(2, uri))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "code lens request should succeed"); + let lenses: Vec = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert!(!lenses.is_empty(), "expected code lenses for test document"); + let reference_lens = lenses + .into_iter() + .find(|lens| { + lens.command + .as_ref() + .is_some_and(|command| command.command == "jrsonnet.findReferences") + }) + .expect("expected reference count code lens"); + + client_conn + .sender + .send(Message::Request(code_lens_resolve_request( + 3, + reference_lens.clone(), + ))) + .unwrap(); + let response = recv_response(&client_conn, 3); + assert!( + response.error.is_none(), + "code lens resolve request should succeed" + ); + let resolved: lsp_types::CodeLens = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!(resolved, reference_lens); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_cancel_request_returns_request_canceled_error() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/cancel-code-lens.jsonnet"; + let mut text = String::new(); + for index in 0..20_000 { + writeln!(&mut text, "local value_{index} = {index};") + .expect("writing to String should succeed"); + } + text.push_str("value_19999\n"); + + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, &text))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(code_lens_request(2, uri))) + .unwrap(); + client_conn + .sender + .send(Message::Notification(cancel_request_notification(2))) + .unwrap(); + + let response = recv_response(&client_conn, 2); + assert_eq!(response.result, None); + let error = response + .error + .expect("cancelled request should return request-canceled error"); + assert_eq!(error.code, lsp_server::ErrorCode::RequestCanceled as i32); + assert_eq!(error.message, "Request canceled: textDocument/codeLens"); + assert_eq!(error.data, None); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_code_lens_resolve_invalid_params_returns_invalid_params_error() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Request(Request::new( + 2.into(), + CodeLensResolve::METHOD.to_string(), + json!({"not": "a code lens"}), + ))) + .unwrap(); + + let response = recv_response(&client_conn, 2); + assert_eq!(response.result, None); + let error = response + .error + .expect("invalid code lens resolve params should return an error"); + assert_eq!(error.code, lsp_server::ErrorCode::InvalidParams as i32); + assert!( + error + .message + .starts_with("Invalid params for codeLens/resolve:"), + "unexpected error message: {}", + error.message + ); + assert_eq!(error.data, None); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} diff --git a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs new file mode 100644 index 00000000..f4a0081a --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs @@ -0,0 +1,657 @@ +use super::*; + +#[test] +fn test_initialize_shutdown() { + // Create an in-memory connection pair + let (client_conn, server_conn) = Connection::memory(); + + // Run the server in a background thread + let server_thread = run_server(server_conn); + + // Send initialize request + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + + // Receive initialize response + let response = client_conn.receiver.recv().unwrap(); + assert_matches!(response, Message::Response(resp) => { + assert_eq!(resp.id, 1.into()); + assert!(resp.error.is_none(), "Initialize should succeed"); + let result = resp.result.expect("should have result"); + assert!(result.get("capabilities").is_some(), "should have capabilities"); + assert_eq!( + result["capabilities"]["documentHighlightProvider"], + serde_json::Value::Bool(true), + "document highlight capability should be advertised", + ); + assert_eq!( + result["capabilities"]["inlayHintProvider"], + serde_json::Value::Bool(true), + "inlay hint capability should be advertised", + ); + assert_eq!( + result["capabilities"]["codeActionProvider"]["codeActionKinds"][0], + serde_json::Value::String("quickfix".to_string()), + "quickfix code action capability should be advertised", + ); + assert_eq!( + result["capabilities"]["codeActionProvider"]["codeActionKinds"][1], + serde_json::Value::String("source.fixAll".to_string()), + "source fix-all code action capability should be advertised", + ); + assert_eq!( + result["capabilities"]["executeCommandProvider"]["commands"], + serde_json::json!([ + "jrsonnet.evalFile", + "jrsonnet.evalExpression", + "jrsonnet.findTransitiveImporters", + "jrsonnet.findReferences", + "jrsonnet.showErrors" + ]), + "execute command capability should advertise all command IDs", + ); + assert_eq!( + result["capabilities"]["codeLensProvider"]["resolveProvider"], + serde_json::Value::Bool(true), + "code lens resolve capability should be advertised", + ); + assert_eq!( + result["capabilities"]["declarationProvider"], + serde_json::Value::Bool(true), + "declaration capability should be advertised", + ); + assert_eq!( + result["capabilities"]["implementationProvider"], + serde_json::Value::Bool(true), + "implementation capability should be advertised", + ); + let server_name = result + .get("serverInfo") + .and_then(|s| s.get("name")) + .and_then(|n| n.as_str()) + .expect("should have serverInfo.name"); + assert!(server_name.contains("jrsonnet")); + }); + + // Send initialized notification + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + // Send shutdown request + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + + // Receive shutdown response + let response = client_conn.receiver.recv().unwrap(); + assert_matches!(response, Message::Response(resp) => { + assert_eq!(resp.id, 2.into()); + assert!(resp.error.is_none(), "Shutdown should succeed"); + }); + + // Send exit notification + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + // Wait for server to exit + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_diagnostics_on_open() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); // ignore response + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + // Open a document with a syntax error + let uri = "file:///test/error.jsonnet"; + let text = "{ a: }"; // Missing value - syntax error + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + // Should receive diagnostics notification + let notification = client_conn.receiver.recv().unwrap(); + assert_matches!(notification, Message::Notification(notif) => { + assert_eq!(notif.method, PublishDiagnostics::METHOD); + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notif.params).unwrap(); + assert!( + !params.diagnostics.is_empty(), + "Should have diagnostics for syntax error" + ); + }); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_diagnostics_refresh_on_did_save_with_text() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/save-refresh.jsonnet"; + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, "{ a: 1 }", + ))) + .unwrap(); + let opened = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert!(opened.diagnostics.is_empty()); + + client_conn + .sender + .send(Message::Notification(did_save_notification( + uri, + Some("{ a: }"), + ))) + .unwrap(); + let saved_invalid = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert!( + !saved_invalid.diagnostics.is_empty(), + "saving invalid text should publish diagnostics" + ); + + client_conn + .sender + .send(Message::Notification(did_save_notification( + uri, + Some("{ a: 2 }"), + ))) + .unwrap(); + let saved_valid = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert!(saved_valid.diagnostics.is_empty()); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_configuration_change_reconfigures_eval_diagnostics() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize with eval diagnostics enabled. + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "enableEvalDiagnostics": true + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/eval-config-change.jsonnet"; + let text = "error 'boom'"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + let initial_diagnostics = + recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert!( + initial_diagnostics + .diagnostics + .iter() + .any(|diag| diag.source.as_deref() == Some("jrsonnet-eval")), + "expected eval diagnostics to be present before config change" + ); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "enableEvalDiagnostics": false + } + })), + )) + .unwrap(); + + let updated_diagnostics = + recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert!( + updated_diagnostics + .diagnostics + .iter() + .all(|diag| diag.source.as_deref() != Some("jrsonnet-eval")), + "expected eval diagnostics to be removed after config change" + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = recv_response(&client_conn, 2); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_configuration_change_reindexes_closed_import_graph_entries() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_a = tmp.path().join("jpath-a"); + let jpath_b = tmp.path().join("jpath-b"); + let workspace = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_a).expect("jpath-a directory should be created"); + fs::create_dir_all(&jpath_b).expect("jpath-b directory should be created"); + fs::create_dir_all(&workspace).expect("workspace directory should be created"); + + let lib_a_path = jpath_a.join("lib.libsonnet"); + let lib_b_path = jpath_b.join("lib.libsonnet"); + let main_path = workspace.join("main.jsonnet"); + fs::write(&lib_a_path, "{ from: 'a' }").expect("jpath-a lib should be written"); + fs::write(&lib_b_path, "{ from: 'b' }").expect("jpath-b lib should be written"); + fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib"#) + .expect("main should be written"); + + let lib_a_uri = file_uri( + &lib_a_path + .canonicalize() + .expect("lib_a should canonicalize"), + ); + let lib_b_uri = file_uri( + &lib_b_path + .canonicalize() + .expect("lib_b should canonicalize"), + ); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + let main_text = fs::read_to_string(&main_path).expect("main text should be readable"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "jpath": [jpath_a.to_string_lossy().to_string()], + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, &main_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_close_notification(&main_uri))) + .unwrap(); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "jpath": [jpath_b.to_string_lossy().to_string()] + } + })), + )) + .unwrap(); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib_a_uri.clone())], + ))) + .unwrap(); + let old_target_response = recv_response(&client_conn, 2); + assert!( + old_target_response.error.is_none(), + "findTransitiveImporters for old jpath target should succeed" + ); + assert_eq!( + old_target_response + .result + .expect("should have old target command result"), + serde_json::json!({ + "file": lib_a_uri, + "transitiveImporters": [], + }) + ); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 3, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib_b_uri.clone())], + ))) + .unwrap(); + let new_target_response = recv_response(&client_conn, 3); + assert!( + new_target_response.error.is_none(), + "findTransitiveImporters for new jpath target should succeed" + ); + assert_eq!( + new_target_response + .result + .expect("should have new target command result"), + serde_json::json!({ + "file": lib_b_uri, + "transitiveImporters": [main_uri], + }) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_valid_document_no_errors() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + // Open a valid document + let uri = "file:///test/valid.jsonnet"; + let text = r#"{ hello: "world", answer: 42 }"#; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + // Should receive diagnostics notification with empty diagnostics + let notification = client_conn.receiver.recv().unwrap(); + assert_matches!(notification, Message::Notification(notif) => { + assert_eq!(notif.method, PublishDiagnostics::METHOD); + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notif.params).unwrap(); + assert!( + params.diagnostics.is_empty(), + "Valid document should have no diagnostics" + ); + }); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_initialize_registers_did_change_watched_files_when_supported() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request( + initialize_request_with_dynamic_watched_files(1), + )) + .unwrap(); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let register_request = loop { + let message = client_conn + .receiver + .recv_timeout(Duration::from_secs(3)) + .expect("expected registerCapability request"); + if let Message::Request(request) = message { + break request; + } + }; + assert_eq!(register_request.method, RegisterCapability::METHOD); + + let actual_params: RegistrationParams = + serde_json::from_value(register_request.params).unwrap(); + let expected_options = DidChangeWatchedFilesRegistrationOptions { + watchers: vec![ + FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.jsonnet".to_owned()), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.libsonnet".to_owned()), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.json".to_owned()), + kind: None, + }, + ], + }; + let expected_params = RegistrationParams { + registrations: vec![Registration { + id: "jrsonnet-lsp.did-change-watched-files".to_owned(), + method: DidChangeWatchedFiles::METHOD.to_owned(), + register_options: Some(serde_json::to_value(expected_options).unwrap()), + }], + }; + assert_eq!(actual_params, expected_params); + + client_conn + .sender + .send(Message::Response(lsp_server::Response::new_ok( + register_request.id, + serde_json::Value::Null, + ))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_initialize_uses_relative_watch_patterns_when_supported() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root_uri = file_uri(tmp.path()); + let parsed_root_uri: lsp_types::Uri = root_uri.parse().unwrap(); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request( + initialize_request_with_dynamic_watched_files_relative(1, &root_uri), + )) + .unwrap(); + let _ = recv_response(&client_conn, 1); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let register_request = loop { + let message = client_conn + .receiver + .recv_timeout(Duration::from_secs(3)) + .expect("expected registerCapability request"); + if let Message::Request(request) = message { + break request; + } + }; + assert_eq!(register_request.method, RegisterCapability::METHOD); + + let actual_params: RegistrationParams = + serde_json::from_value(register_request.params).unwrap(); + let expected_options = DidChangeWatchedFilesRegistrationOptions { + watchers: vec![ + FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(parsed_root_uri.clone()), + pattern: "**/*.jsonnet".to_owned(), + }), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(parsed_root_uri.clone()), + pattern: "**/*.libsonnet".to_owned(), + }), + kind: None, + }, + FileSystemWatcher { + glob_pattern: GlobPattern::Relative(RelativePattern { + base_uri: OneOf::Right(parsed_root_uri), + pattern: "**/*.json".to_owned(), + }), + kind: None, + }, + ], + }; + let expected_params = RegistrationParams { + registrations: vec![Registration { + id: "jrsonnet-lsp.did-change-watched-files".to_owned(), + method: DidChangeWatchedFiles::METHOD.to_owned(), + register_options: Some(serde_json::to_value(expected_options).unwrap()), + }], + }; + assert_eq!(actual_params, expected_params); + + client_conn + .sender + .send(Message::Response(lsp_server::Response::new_ok( + register_request.id, + serde_json::Value::Null, + ))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} diff --git a/crates/jrsonnet-lsp/tests/integration_test/navigation.rs b/crates/jrsonnet-lsp/tests/integration_test/navigation.rs new file mode 100644 index 00000000..28f2d8e6 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration_test/navigation.rs @@ -0,0 +1,1080 @@ +use super::*; + +#[test] +fn test_goto_definition() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + // Open a document with a local binding + let uri = "file:///test/definition.jsonnet"; + let text = r"local x = 1; x + 1"; + // ^^^^^^ def ^ use at position (0, 13) + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + // Receive diagnostics notification (discard) + let _ = client_conn.receiver.recv().unwrap(); + + // Send goto definition request for 'x' usage at position (0, 13) + client_conn + .sender + .send(Message::Request(goto_definition_request(2, uri, 0, 13))) + .unwrap(); + + // Should receive definition response + let response = client_conn.receiver.recv().unwrap(); + let response = assert_matches!(response, Message::Response(resp) => resp); + assert_eq!(response.id, 2.into()); + assert!(response.error.is_none(), "Goto definition should succeed"); + let result: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_type_definition() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/type-definition.jsonnet"; + let text = "local x = 1; x + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_type_definition_request( + 2, uri, 0, 13, + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "Goto type definition request should succeed" + ); + let result: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_type_definition_matches_definition_for_local_alias() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/type-definition-local-alias.jsonnet"; + let text = "local x = 1;\nlocal y = x;\ny"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + let result = assert_type_definition_matches_definition(&client_conn, 2, 3, uri, 2, 0); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_type_definition_matches_definition_for_import_targets() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +local plain = lib; +alias + std.length(plain)"#, + ) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + + // `alias` usage at line 3, col 0 resolves to imported field `foo`. + let alias_result = assert_type_definition_matches_definition(&client_conn, 2, 3, &uri, 3, 0); + assert_eq!( + alias_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + // `plain` usage at line 3, col 19 resolves to import file root. + let plain_result = assert_type_definition_matches_definition(&client_conn, 4, 5, &uri, 3, 19); + assert_eq!( + plain_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range::default(), + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .unwrap(); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_navigation_matrix_local_alias() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/navigation-matrix-local.jsonnet"; + let text = "local x = 1;\nlocal y = x;\ny"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + let declaration = send_goto_and_parse( + &client_conn, + 2, + "goto declaration", + goto_declaration_request(2, uri, 2, 0), + ); + let definition = send_goto_and_parse( + &client_conn, + 3, + "goto definition", + goto_definition_request(3, uri, 2, 0), + ); + let type_definition = send_goto_and_parse( + &client_conn, + 4, + "goto type definition", + goto_type_definition_request(4, uri, 2, 0), + ); + let implementation = send_goto_and_parse( + &client_conn, + 5, + "goto implementation", + goto_implementation_request(5, uri, 2, 0), + ); + + assert_eq!( + declaration, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + })) + ); + assert_eq!( + definition, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + assert_eq!(type_definition, definition); + assert_eq!( + implementation, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 10, + }, + end: Position { + line: 1, + character: 11, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .unwrap(); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_navigation_matrix_import_alias() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias"#, + ) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + + let declaration = send_goto_and_parse( + &client_conn, + 2, + "goto declaration", + goto_declaration_request(2, &uri, 2, 0), + ); + let definition = send_goto_and_parse( + &client_conn, + 3, + "goto definition", + goto_definition_request(3, &uri, 2, 0), + ); + let type_definition = send_goto_and_parse( + &client_conn, + 4, + "goto type definition", + goto_type_definition_request(4, &uri, 2, 0), + ); + let implementation = send_goto_and_parse( + &client_conn, + 5, + "goto implementation", + goto_implementation_request(5, &uri, 2, 0), + ); + + assert_eq!( + declaration, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 11, + }, + }, + })) + ); + assert_eq!( + definition, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + assert_eq!(type_definition, definition); + assert_eq!( + implementation, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 1, + character: 21, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .unwrap(); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_declaration() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/declaration.jsonnet"; + let text = "local x = 1; x + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(2, uri, 0, 13))) + .unwrap(); + + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "Goto declaration request should succeed" + ); + let result: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_implementation_local_binding() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/implementation-local.jsonnet"; + let text = "local x = 1; x + 1"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, uri, 0, 13))) + .unwrap(); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_implementation_request(3, uri, 0, 13))) + .unwrap(); + let implementation_response = recv_response(&client_conn, 3); + let implementation_result: Option = + serde_json::from_value(implementation_response.result.expect("should have result")) + .unwrap(); + assert_eq!( + implementation_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 10, + }, + end: Position { + line: 0, + character: 11, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_definition_and_declaration_diverge_for_local_alias() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/alias-definition-vs-declaration.jsonnet"; + let text = "local x = 1;\nlocal y = x;\ny"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, uri, 2, 0))) + .unwrap(); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(3, uri, 2, 0))) + .unwrap(); + let declaration_response = recv_response(&client_conn, 3); + let declaration_result: Option = + serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); + assert_eq!( + declaration_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 7, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_implementation_import_field() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib.foo"#) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, &uri, 0, 40))) + .unwrap(); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(3, &uri, 0, 40))) + .unwrap(); + let declaration_response = recv_response(&client_conn, 3); + let declaration_result: Option = + serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); + assert_eq!( + declaration_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.clone(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_implementation_request( + 4, &uri, 0, 40, + ))) + .unwrap(); + let implementation_response = recv_response(&client_conn, 4); + let implementation_result: Option = + serde_json::from_value(implementation_response.result.expect("should have result")) + .unwrap(); + assert_eq!( + implementation_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 7, + }, + end: Position { + line: 0, + character: 9, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(5))) + .unwrap(); + let _ = recv_response(&client_conn, 5); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_goto_definition_alias_to_import_field_vs_declaration() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write( + &main_path, + r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias"#, + ) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, &uri, 2, 0))) + .unwrap(); + let definition_response = recv_response(&client_conn, 2); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + assert_eq!( + definition_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request(3, &uri, 2, 0))) + .unwrap(); + let declaration_response = recv_response(&client_conn, 3); + let declaration_result: Option = + serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); + assert_eq!( + declaration_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 1, + character: 11, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(goto_implementation_request(4, &uri, 2, 0))) + .unwrap(); + let implementation_response = recv_response(&client_conn, 4); + let implementation_result: Option = + serde_json::from_value(implementation_response.result.expect("should have result")) + .unwrap(); + assert_eq!( + implementation_result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: uri.parse().unwrap(), + range: lsp_types::Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 1, + character: 21, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(5))) + .unwrap(); + let _ = recv_response(&client_conn, 5); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_diagnostics_import_file_and_definition_resolution() { + let temp_dir = TempDir::new().expect("failed to create temp dir"); + let lib_path = temp_dir.path().join("lib.libsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ foo: 42 }").expect("failed to write lib file"); + fs::write(&main_path, r#"local lib = import "lib.libsonnet"; lib.foo"#) + .expect("failed to write main file"); + + let uri = file_uri(&main_path); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let text = fs::read_to_string(&main_path).expect("failed to read main file"); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification(&uri, &text))) + .unwrap(); + + let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + assert_eq!(diagnostics.uri.as_str(), uri); + assert!( + diagnostics.diagnostics.is_empty(), + "import-backed file should have no diagnostics" + ); + + client_conn + .sender + .send(Message::Request(goto_definition_request(2, &uri, 0, 40))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Goto definition should succeed"); + let result: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!( + result, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri, + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 5, + }, + }, + })) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} diff --git a/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs b/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs new file mode 100644 index 00000000..b3454433 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs @@ -0,0 +1,1079 @@ +use super::*; + +#[test] +fn test_watched_file_refreshes_unopened_importers_for_references() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib1_path = tmp.path().join("lib1.jsonnet"); + let lib2_path = tmp.path().join("lib2.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + + fs::write(&lib1_path, "local target = 1; target").expect("lib1 should be written"); + fs::write(&lib2_path, "local target = 2; target").expect("lib2 should be written"); + fs::write(&main_path, "local lib = import 'lib1.jsonnet'; lib.target") + .expect("main should be written"); + + let lib1_uri = file_uri(&lib1_path.canonicalize().expect("lib1 should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + // Initialize + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + // Open lib1 (current document for references requests) + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib1_uri, + "local target = 1; target", + ))) + .unwrap(); + let _ = client_conn.receiver.recv().unwrap(); // initial diagnostics + + // Index unopened main file via watched-files notification + client_conn + .sender + .send(Message::Notification( + did_change_watched_files_notification(vec![FileEvent { + uri: main_uri.parse().unwrap(), + typ: FileChangeType::CREATED, + }]), + )) + .unwrap(); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 20, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib1_uri.clone())], + ))) + .unwrap(); + let response = recv_response(&client_conn, 20); + assert!(response.error.is_none(), "Command should succeed"); + let command_result = response.result.expect("command should return result"); + let importers = command_result["transitiveImporters"] + .as_array() + .expect("transitiveImporters should be an array") + .iter() + .filter_map(|value| value.as_str()) + .collect::>(); + assert!( + importers.iter().any(|uri| *uri == main_uri), + "Expected main to be indexed as lib1 importer, got: {importers:?}" + ); + + // Query references to `target` definition in lib1 (line 0, col 6) + client_conn + .sender + .send(Message::Request(references_request( + 2, &lib1_uri, 0, 6, false, + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "References should succeed"); + let refs: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let refs = refs.unwrap_or_default(); + assert!( + refs.iter() + .any(|location| location.uri.to_string() == main_uri), + "Expected cross-file reference from unopened main file, got: {refs:?}" + ); + + // Query references from a non-definition reference in lib1 (line 0, col 18) + client_conn + .sender + .send(Message::Request(references_request( + 21, &lib1_uri, 0, 18, false, + ))) + .unwrap(); + let response = recv_response(&client_conn, 21); + assert!(response.error.is_none(), "References should succeed"); + let refs: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let refs = refs.unwrap_or_default(); + assert!( + refs.iter() + .any(|location| location.uri.to_string() == main_uri), + "Expected cross-file reference from unopened main file when queried from a local reference, got: {refs:?}" + ); + + // Update main on disk to import lib2 instead of lib1 + fs::write(&main_path, "local lib = import 'lib2.jsonnet'; lib.target") + .expect("main should be rewritten"); + + client_conn + .sender + .send(Message::Notification( + did_change_watched_files_notification(vec![FileEvent { + uri: main_uri.parse().unwrap(), + typ: FileChangeType::CHANGED, + }]), + )) + .unwrap(); + + // References to lib1 target should no longer include main + client_conn + .sender + .send(Message::Request(references_request( + 3, &lib1_uri, 0, 6, false, + ))) + .unwrap(); + let response = recv_response(&client_conn, 3); + assert!(response.error.is_none(), "References should succeed"); + let refs: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let refs = refs.unwrap_or_default(); + assert!( + !refs + .iter() + .any(|location| location.uri.to_string() == main_uri), + "Main should no longer reference lib1 after watched-file update" + ); + + // Shutdown + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_initialize_bootstraps_workspace_import_graph() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + fs::write(&lib_path, "{ value: 1 }").expect("lib should be written"); + fs::write(&main_path, "local lib = import 'lib.jsonnet'; lib.value") + .expect("main should be written"); + + let root_uri = file_uri(&tmp.path().canonicalize().expect("root should canonicalize")); + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_root_uri( + 1, &root_uri, + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let expected_result = json!({ + "file": lib_uri, + "transitiveImporters": [main_uri], + }); + let mut actual_result = serde_json::Value::Null; + for request_id in 2..=42 { + client_conn + .sender + .send(Message::Request(execute_command_request( + request_id, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String( + expected_result["file"] + .as_str() + .expect("expected file URI should be a string") + .to_string(), + )], + ))) + .unwrap(); + let response = recv_response(&client_conn, request_id); + assert!( + response.error.is_none(), + "findTransitiveImporters command should succeed" + ); + actual_result = response.result.expect("command should return result"); + if actual_result == expected_result { + break; + } + thread::sleep(Duration::from_millis(25)); + } + assert_eq!(actual_result, expected_result); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_workspace_symbol_includes_unopened_workspace_files() { + let tmp = TempDir::new().expect("tempdir should be created"); + let closed_path = tmp.path().join("closed.jsonnet"); + let closed_text = "local workspaceOnly=1;workspaceOnly"; + fs::write(&closed_path, closed_text).expect("closed file should be written"); + + let root_uri = file_uri(&tmp.path().canonicalize().expect("root should canonicalize")); + let closed_uri = file_uri( + &closed_path + .canonicalize() + .expect("closed should canonicalize"), + ); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_root_uri( + 1, &root_uri, + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let symbols = request_workspace_symbols(&client_conn, 2, "workspaceOnly"); + + let expected_doc = jrsonnet_lsp_document::Document::new( + closed_text.to_string(), + jrsonnet_lsp_document::DocVersion::new(0), + ); + let expected_uri: lsp_types::Uri = closed_uri.parse().unwrap(); + let expected_symbols = Some(jrsonnet_lsp_handlers::workspace_symbols_for_document( + &expected_doc, + &expected_uri, + "workspaceOnly", + )); + let mut actual_symbols = symbols; + if actual_symbols != expected_symbols { + for request_id in 3..=43 { + actual_symbols = request_workspace_symbols(&client_conn, request_id, "workspaceOnly"); + if actual_symbols == expected_symbols { + break; + } + thread::sleep(Duration::from_millis(25)); + } + } + assert_eq!(actual_symbols, expected_symbols); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_workspace_symbol_ranks_exact_prefix_then_substring() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/workspace-symbol-ranking.jsonnet"; + let text = + "local needle = 1; local has_needle_inside = 2; local needlePrefix = 3; local zneedle = 4; needle"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + let symbols = request_workspace_symbols(&client_conn, 2, "needle"); + let expected_doc = jrsonnet_lsp_document::Document::new( + text.to_string(), + jrsonnet_lsp_document::DocVersion::new(1), + ); + let expected_uri: lsp_types::Uri = uri.parse().unwrap(); + let expected_all = jrsonnet_lsp_handlers::workspace_symbols_for_document( + &expected_doc, + &expected_uri, + "needle", + ); + let expected_symbols = vec![ + expected_all + .iter() + .find(|symbol| symbol.name == "needle") + .expect("expected exact match symbol") + .clone(), + expected_all + .iter() + .find(|symbol| symbol.name == "needlePrefix") + .expect("expected prefix match symbol") + .clone(), + expected_all + .iter() + .find(|symbol| symbol.name == "zneedle") + .expect("expected shorter substring symbol") + .clone(), + expected_all + .iter() + .find(|symbol| symbol.name == "has_needle_inside") + .expect("expected longer substring symbol") + .clone(), + ]; + assert_eq!(symbols, Some(expected_symbols)); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_workspace_symbol_caps_results_with_deterministic_order() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/workspace-symbol-cap.jsonnet"; + let text = { + let locals = (0..140) + .rev() + .map(|idx| format!("local capsymbol{idx:03} = {idx};")) + .collect::>() + .join(" "); + format!("{locals} capsymbol000") + }; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, &text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + let symbols = request_workspace_symbols(&client_conn, 2, "capsymbol"); + let expected_doc = + jrsonnet_lsp_document::Document::new(text, jrsonnet_lsp_document::DocVersion::new(1)); + let expected_uri: lsp_types::Uri = uri.parse().unwrap(); + let expected_all = jrsonnet_lsp_handlers::workspace_symbols_for_document( + &expected_doc, + &expected_uri, + "capsymbol", + ); + let expected_symbols = (0..128) + .map(|idx| format!("capsymbol{idx:03}")) + .map(|name| { + expected_all + .iter() + .find(|symbol| symbol.name == name) + .expect("expected symbol to exist") + .clone() + }) + .collect::>(); + assert_eq!(symbols, Some(expected_symbols)); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_find_transitive_importers_returns_sorted_uris() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let a_path = tmp.path().join("a.jsonnet"); + let b_path = tmp.path().join("b.jsonnet"); + fs::write(&lib_path, "{ target: 1 }").expect("lib should be written"); + fs::write(&a_path, "local lib = import 'lib.jsonnet'; lib.target") + .expect("a should be written"); + fs::write(&b_path, "local lib = import 'lib.jsonnet'; lib.target") + .expect("b should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let a_uri = file_uri(&a_path.canonicalize().expect("a should canonicalize")); + let b_uri = file_uri(&b_path.canonicalize().expect("b should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + for (uri, text) in [ + (&lib_uri, "{ target: 1 }"), + (&a_uri, "local lib = import 'lib.jsonnet'; lib.target"), + (&b_uri, "local lib = import 'lib.jsonnet'; lib.target"), + ] { + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + } + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib_uri.clone())], + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Command should succeed"); + assert_eq!( + response.result.expect("command should return result"), + json!({ + "file": lib_uri, + "transitiveImporters": [a_uri, b_uri], + }) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_did_close_preserves_import_graph_for_references() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + + let lib_text = "local target = 1; target"; + let main_text = "local lib = import 'lib.jsonnet'; lib.target"; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_close_notification(&main_uri))) + .unwrap(); + let closed_diagnostics = + recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + assert_eq!( + closed_diagnostics, + lsp_types::PublishDiagnosticsParams { + uri: main_uri.parse().unwrap(), + diagnostics: Vec::new(), + version: None, + } + ); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 2, + "jrsonnet.findTransitiveImporters", + vec![serde_json::Value::String(lib_uri.clone())], + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Command should succeed"); + let transitive_importers = response.result.expect("command should return result"); + assert_eq!( + transitive_importers, + json!({ + "file": lib_uri, + "transitiveImporters": [main_uri], + }) + ); + + client_conn + .sender + .send(Message::Request(references_request( + 3, &lib_uri, 0, 6, false, + ))) + .unwrap(); + let response = recv_response(&client_conn, 3); + assert!(response.error.is_none(), "References should succeed"); + let references: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let references = references.unwrap_or_default(); + assert_eq!( + references, + vec![location(&lib_uri, 18, 24), location(&main_uri, 38, 44)] + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_cross_file_rename_updates_definition_and_importers() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + let main_path = tmp.path().join("main.jsonnet"); + + fs::write(&lib_path, "{ helper: function(x) x * 2 }").expect("lib should be written"); + fs::write( + &main_path, + "local lib = import 'lib.jsonnet'; lib.helper(1) + lib.helper(2)", + ) + .expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, + "{ helper: function(x) x * 2 }", + ))) + .unwrap(); + + client_conn + .sender + .send(Message::Notification( + did_change_watched_files_notification(vec![FileEvent { + uri: main_uri.parse().unwrap(), + typ: FileChangeType::CREATED, + }]), + )) + .unwrap(); + + client_conn + .sender + .send(Message::Request(rename_request(2, &lib_uri, 0, 2, "util"))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Rename should succeed"); + + let edit: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let edit = edit.expect("rename should produce workspace edit"); + let changes = edit.changes.expect("workspace edit should include changes"); + + let lib_edits = changes + .iter() + .find_map(|(uri, edits)| (uri.as_str() == lib_uri).then_some(edits)) + .expect("lib file should be edited"); + assert_eq!(lib_edits.len(), 1, "lib should have one definition rename"); + assert_eq!(lib_edits[0].new_text, "util"); + + let main_edits = changes + .iter() + .find_map(|(uri, edits)| (uri.as_str() == main_uri).then_some(edits)) + .expect("main importer should be edited"); + assert_eq!( + main_edits.len(), + 2, + "main should rename both field references" + ); + assert!(main_edits.iter().all(|edit| edit.new_text == "util")); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_cross_file_references_resolve_jpath_importers() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_dir = tmp.path().join("jpath"); + let workspace_dir = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); + + let lib_path = jpath_dir.join("lib.libsonnet"); + let main_path = workspace_dir.join("main.jsonnet"); + let lib_text = "local target = 1; target"; + let main_text = r#"local lib = import "lib.libsonnet"; lib.target"#; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + json!({ + "jpath": [jpath_dir.to_string_lossy().to_string()], + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(references_request( + 2, &lib_uri, 0, 6, false, + ))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "References should succeed"); + let references: Option> = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + assert_eq!( + references.unwrap_or_default(), + vec![location(&lib_uri, 18, 24), location(&main_uri, 40, 46)] + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_cross_file_rename_updates_jpath_importers() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_dir = tmp.path().join("jpath"); + let workspace_dir = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); + + let lib_path = jpath_dir.join("lib.libsonnet"); + let main_path = workspace_dir.join("main.jsonnet"); + let lib_text = "{ helper: function(x) x * 2 }"; + let main_text = r#"local lib = import "lib.libsonnet"; lib.helper(1) + lib.helper(2)"#; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + json!({ + "jpath": [jpath_dir.to_string_lossy().to_string()], + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(rename_request(2, &lib_uri, 0, 2, "util"))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "Rename should succeed"); + + let edit: Option = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let edit = edit.expect("rename should produce workspace edit"); + + let mut expected_changes = std::collections::HashMap::new(); + expected_changes.insert( + lib_uri.parse().unwrap(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 2, + }, + end: Position { + line: 0, + character: 8, + }, + }, + new_text: "util".to_string(), + }], + ); + expected_changes.insert( + main_uri.parse().unwrap(), + vec![ + lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 40, + }, + end: Position { + line: 0, + character: 46, + }, + }, + new_text: "util".to_string(), + }, + lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 56, + }, + end: Position { + line: 0, + character: 62, + }, + }, + new_text: "util".to_string(), + }, + ], + ); + assert_eq!( + edit, + lsp_types::WorkspaceEdit { + changes: Some(expected_changes), + document_changes: None, + change_annotations: None, + } + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .unwrap(); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + +#[test] +fn test_navigation_resolves_jpath_imports_from_graph() { + let tmp = TempDir::new().expect("tempdir should be created"); + let jpath_dir = tmp.path().join("jpath"); + let workspace_dir = tmp.path().join("workspace"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + fs::create_dir_all(&workspace_dir).expect("workspace directory should be created"); + + let lib_path = jpath_dir.join("lib.libsonnet"); + let main_path = workspace_dir.join("main.jsonnet"); + let lib_text = "{ helper: 42 }"; + let main_text = r#"local lib = import "lib.libsonnet"; lib.helper"#; + fs::write(&lib_path, lib_text).expect("lib should be written"); + fs::write(&main_path, main_text).expect("main should be written"); + + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + let main_uri = file_uri(&main_path.canonicalize().expect("main should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + json!({ + "jpath": [jpath_dir.to_string_lossy().to_string()], + }), + ))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &lib_uri, lib_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Notification(did_open_notification( + &main_uri, main_text, + ))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(goto_definition_request( + 2, &main_uri, 0, 22, + ))) + .unwrap(); + let import_definition_response = recv_response(&client_conn, 2); + assert!( + import_definition_response.error.is_none(), + "goto definition on import path should succeed" + ); + let import_definition: Option = serde_json::from_value( + import_definition_response + .result + .expect("should have goto definition result"), + ) + .unwrap(); + assert_eq!( + import_definition, + Some(GotoDefinitionResponse::Scalar(lsp_types::Location { + uri: lib_uri.parse().expect("lib URI should parse"), + range: lsp_types::Range::default(), + })) + ); + + client_conn + .sender + .send(Message::Request(goto_definition_request( + 3, &main_uri, 0, 40, + ))) + .unwrap(); + let definition_response = recv_response(&client_conn, 3); + assert!( + definition_response.error.is_none(), + "goto definition should succeed" + ); + let definition: Option = serde_json::from_value( + definition_response + .result + .expect("should have definition result"), + ) + .unwrap(); + assert_eq!( + definition, + Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) + ); + + client_conn + .sender + .send(Message::Request(goto_declaration_request( + 4, &main_uri, 0, 40, + ))) + .unwrap(); + let declaration_response = recv_response(&client_conn, 4); + assert!( + declaration_response.error.is_none(), + "goto declaration should succeed" + ); + let declaration: Option = serde_json::from_value( + declaration_response + .result + .expect("should have declaration result"), + ) + .unwrap(); + assert_eq!( + declaration, + Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) + ); + + client_conn + .sender + .send(Message::Request(goto_type_definition_request( + 5, &main_uri, 0, 40, + ))) + .unwrap(); + let type_definition_response = recv_response(&client_conn, 5); + assert!( + type_definition_response.error.is_none(), + "goto type definition should succeed" + ); + let type_definition: Option = serde_json::from_value( + type_definition_response + .result + .expect("should have type definition result"), + ) + .unwrap(); + assert_eq!( + type_definition, + Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(6))) + .unwrap(); + let _ = recv_response(&client_conn, 6); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} From 9de2fed9b629794a68797fa4a310bfd492d857fd Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 17:09:05 +0000 Subject: [PATCH 090/210] chore(lint): ratchet lsp clippy checks in xtask - extend xtask lint to run strict cargo clippy (-D warnings) for the LSP crate set with --no-deps. - preserve fix mode by forwarding --fix/--allow-dirty/--allow-staged to clippy when requested. - clear new strict findings by converting recursive type helpers to associated fns and fixing scenario helper statement style. --- .../src/scenario_runner/helpers.rs | 2 +- .../jrsonnet-lsp/src/server/async_requests.rs | 17 ++++------ xtask/src/main.rs | 31 +++++++++++++++++++ 3 files changed, 38 insertions(+), 12 deletions(-) diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs index 5f709903..7e479399 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs @@ -204,7 +204,7 @@ fn collect_json_differences( let child_path = child_object_path(path, key); match (actual_object.get(key), expected_object.get(key)) { (Some(actual_value), Some(expected_value)) => { - collect_json_differences(&child_path, actual_value, expected_value, report) + collect_json_differences(&child_path, actual_value, expected_value, report); } (Some(actual_value), None) => push_difference( report, diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 3970052f..cef4fccc 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -599,22 +599,17 @@ impl AsyncRequestContext { let resolved = self.resolve_import_from_graph(from, import_path)?; let doc = self.load_document_for_path(&resolved)?; let analysis = self.analyze_document(&resolved, &doc); - let ty = self.type_for_field_path(&analysis, analysis.document_type(), fields)?; + let ty = Self::type_for_field_path(&analysis, analysis.document_type(), fields)?; Some(analysis.display(ty)) } - fn type_for_field_path( - &self, - analysis: &TypeAnalysis, - root_ty: Ty, - fields: &[String], - ) -> Option { + fn type_for_field_path(analysis: &TypeAnalysis, root_ty: Ty, fields: &[String]) -> Option { fields.iter().try_fold(root_ty, |ty, field| { - self.type_for_field(analysis, ty, field) + Self::type_for_field(analysis, ty, field) }) } - fn type_for_field(&self, analysis: &TypeAnalysis, ty: Ty, field: &str) -> Option { + fn type_for_field(analysis: &TypeAnalysis, ty: Ty, field: &str) -> Option { match analysis.get_data(ty) { TyData::Any => Some(Ty::ANY), TyData::Object(obj) => obj @@ -625,7 +620,7 @@ impl AsyncRequestContext { TyData::Union(types) => { let variants: Vec<_> = types .into_iter() - .filter_map(|variant| self.type_for_field(analysis, variant, field)) + .filter_map(|variant| Self::type_for_field(analysis, variant, field)) .collect(); if variants.is_empty() { None @@ -636,7 +631,7 @@ impl AsyncRequestContext { TyData::Sum(types) => { let variants: Vec<_> = types .into_iter() - .filter_map(|variant| self.type_for_field(analysis, variant, field)) + .filter_map(|variant| Self::type_for_field(analysis, variant, field)) .collect(); if variants.is_empty() { None diff --git a/xtask/src/main.rs b/xtask/src/main.rs index e21533c6..88c6cb82 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -4,6 +4,29 @@ use xshell::{cmd, Shell}; mod sourcegen; +const LSP_LINT_PACKAGES: &[&str] = &[ + "-p", + "jrsonnet-lsp", + "-p", + "jrsonnet-lsp-check", + "-p", + "jrsonnet-lsp-document", + "-p", + "jrsonnet-lsp-handlers", + "-p", + "jrsonnet-lsp-import", + "-p", + "jrsonnet-lsp-inference", + "-p", + "jrsonnet-lsp-scenario", + "-p", + "jrsonnet-lsp-scope", + "-p", + "jrsonnet-lsp-stdlib", + "-p", + "jrsonnet-lsp-types", +]; + #[derive(Parser)] enum Opts { /// Generate files for rowan parser @@ -80,6 +103,14 @@ fn main() -> Result<()> { Opts::Lint { fix } => { let fmt_check = if fix { None } else { Some("--check") }; cmd!(sh, "cargo fmt {fmt_check...}").run()?; + let clippy_fix = if fix { Some("--fix") } else { None }; + let allow_dirty = if fix { Some("--allow-dirty") } else { None }; + let allow_staged = if fix { Some("--allow-staged") } else { None }; + cmd!( + sh, + "cargo clippy {LSP_LINT_PACKAGES...} --all-targets --no-deps {clippy_fix...} {allow_dirty...} {allow_staged...} -- -D warnings" + ) + .run()?; Ok(()) } Opts::TestCBindings { From 43fd37856386ffbf8ec5025a59f45d509ab46971 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 17:30:26 +0000 Subject: [PATCH 091/210] Improve hover type detail and import root resolution --- crates/jrsonnet-lsp-handlers/src/hover.rs | 22 +++++++++----- crates/jrsonnet-lsp-inference/src/analysis.rs | 30 +++++++++++++------ .../jrsonnet-lsp/src/server/async_requests.rs | 27 +++++++++++++++-- .../function_length_assert_narrows_arity.yaml | 2 +- ..._function_refines_impossible_branches.yaml | 2 +- ...known_function_allows_typed_call_site.yaml | 2 +- ...length_unknown_function_refines_arity.yaml | 2 +- ...negated_membership_or_is_conservative.yaml | 2 +- .../hover_new_import_invalid_syntax.yaml | 2 +- .../scenarios/hover/object_variable.yaml | 2 +- .../runner/hover_cjk_import_shape.yaml | 2 +- .../hover_import_field_method_type.yaml | 2 +- .../hover_non_identifier_field_names.yaml | 2 +- 13 files changed, 70 insertions(+), 29 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/hover.rs b/crates/jrsonnet-lsp-handlers/src/hover.rs index b41b40c5..1ab27804 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover.rs @@ -87,7 +87,7 @@ fn inferred_type_markdown( ) -> Option { let ast = document.ast(); let ty = analysis.type_at_position(ast.syntax(), offset.into())?; - Some(format!("`{}`", analysis.display(ty))) + Some(format!("`{}`", analysis.display_for_hover(ty))) } fn merge_markdown_sections( @@ -119,7 +119,7 @@ fn check_local_hover( let ast = document.ast(); let mut inferred_type = analysis .type_at_position(ast.syntax(), offset.into()) - .map(|ty| analysis.display(ty)); + .map(|ty| analysis.display_for_hover(ty)); match &result { DefinitionResult::ImportField { path, fields } => { @@ -134,7 +134,13 @@ fn check_local_hover( inferred_type = definition_value_type(document, analysis, range); } } - DefinitionResult::Import(_) => {} + DefinitionResult::Import(path) => { + if let Some(resolver) = import_field_type_resolver { + if let Some(resolved_type) = resolver(path, &[]) { + inferred_type = Some(resolved_type); + } + } + } } let type_str = inferred_type @@ -254,7 +260,7 @@ fn definition_value_type( Bind::BindFunction(bind) => bind.value()?, }; let ty = analysis.type_for_range(value.syntax().text_range())?; - Some(analysis.display(ty)) + Some(analysis.display_for_hover(ty)) } /// Check if the token is a stdlib function call and return hover info. @@ -377,7 +383,7 @@ mod tests { Some(Hover { contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), range: None - }) if value == "`function(x)`" + }) if value == "`function(x: any)`" ); } @@ -392,7 +398,7 @@ mod tests { "local add(a, b) = a + b; add(1, 2)", 0, 25, - "`function(a, b)`\n\n```jsonnet\nlocal add(a, b) = a + b; add(1, 2)\n```" + "`(a: any, b: any) -> number`\n\n```jsonnet\nlocal add(a, b) = a + b; add(1, 2)\n```" )] #[case( "local arr = [1, 2, 3]; arr", @@ -404,7 +410,7 @@ mod tests { "local obj = { a: 1 }; obj", 0, 6, - "`{ a }`\n\n```jsonnet\nlocal obj = { a: 1 }; obj\n```" + "`{ a: number }`\n\n```jsonnet\nlocal obj = { a: 1 }; obj\n```" )] fn test_local_hover( #[case] code: &str, @@ -477,7 +483,7 @@ mod tests { "local obj = { a: 1, b: \"hello\" }; obj", 0, 35, - "`{ a, b }`\n\n```jsonnet\nlocal obj = { a: 1, b: \"hello\" }; obj\n```" + "`{ a: number, b: string }`\n\n```jsonnet\nlocal obj = { a: 1, b: \"hello\" }; obj\n```" )] #[case("42", 0, 0, "`number`")] #[case("\"hello\"", 0, 1, "`string`")] diff --git a/crates/jrsonnet-lsp-inference/src/analysis.rs b/crates/jrsonnet-lsp-inference/src/analysis.rs index e13512c3..f8a7034e 100644 --- a/crates/jrsonnet-lsp-inference/src/analysis.rs +++ b/crates/jrsonnet-lsp-inference/src/analysis.rs @@ -8,7 +8,8 @@ use std::sync::Arc; use jrsonnet_lsp_document::Document; use jrsonnet_lsp_types::{ - is_subtype_ty, FunctionData, GlobalTy, GlobalTyStore, MutStore, ObjectData, Ty, TyData, TySubst, + is_subtype_ty, DisplayContext, FunctionData, GlobalTy, GlobalTyStore, MutStore, ObjectData, Ty, + TyData, TySubst, }; use jrsonnet_rowan_parser::SyntaxNode; use parking_lot::RwLock; @@ -87,11 +88,9 @@ impl TypeAnalysis { let mut expr_types = FxHashMap::default(); - let doc_ty = if let Some(expr) = ast.expr() { + let doc_ty = ast.expr().map_or(Ty::ANY, |expr| { analyze_and_record(&expr, &mut env, &mut expr_types) - } else { - Ty::ANY - }; + }); Self::finalize_analysis(global, env.into_store(), expr_types, doc_ty) } @@ -107,11 +106,9 @@ impl TypeAnalysis { let mut expr_types = FxHashMap::default(); - let doc_ty = if let Some(expr) = ast.expr() { + let doc_ty = ast.expr().map_or(Ty::ANY, |expr| { analyze_and_record(&expr, &mut env, &mut expr_types) - } else { - Ty::ANY - }; + }); Self::finalize_analysis(global, env.into_store(), expr_types, doc_ty) } @@ -194,6 +191,21 @@ impl TypeAnalysis { self.store.read().display(ty) } + /// Display a Ty in detailed form suitable for hover text. + pub fn display_for_hover(&self, ty: Ty) -> String { + let store = self.store.read(); + let rendered = { + let mut context = DisplayContext::detailed(&*store); + // Keep hover output readable for very large nested types. + context.max_depth = 6; + context.max_items = 8; + context.max_union_members = 8; + context.format(ty) + }; + drop(store); + rendered + } + /// Check if `subtype` is a subtype of `supertype`. pub fn is_subtype(&self, subtype: Ty, supertype: Ty) -> bool { is_subtype_ty(&*self.store.read(), subtype, supertype) diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index cef4fccc..049ab5d3 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -590,17 +590,40 @@ impl AsyncRequestContext { .and_then(|entry| entry.resolved_path.clone()) } + fn resolve_import_from_fs(from: &CanonicalPath, import: &str) -> Option { + let import_path = std::path::Path::new(import); + let candidate = if import_path.is_absolute() { + import_path.to_path_buf() + } else if import.starts_with("./") || import.starts_with("../") { + from.as_path().parent()?.join(import_path) + } else { + return None; + }; + + CanonicalPath::try_from_path(&candidate).ok() + } + + fn resolve_import_path(&self, from: &CanonicalPath, import: &str) -> Option { + if import.starts_with("./") || import.starts_with("../") || import.starts_with('/') { + return Self::resolve_import_from_fs(from, import) + .or_else(|| self.resolve_import_from_graph(from, import)); + } + + self.resolve_import_from_graph(from, import) + .or_else(|| Self::resolve_import_from_fs(from, import)) + } + fn resolve_import_field_type( &self, from: &CanonicalPath, import_path: &str, fields: &[String], ) -> Option { - let resolved = self.resolve_import_from_graph(from, import_path)?; + let resolved = self.resolve_import_path(from, import_path)?; let doc = self.load_document_for_path(&resolved)?; let analysis = self.analyze_document(&resolved, &doc); let ty = Self::type_for_field_path(&analysis, analysis.document_type(), fields)?; - Some(analysis.display(ty)) + Some(analysis.display_for_hover(ty)) } fn type_for_field_path(analysis: &TypeAnalysis, root_ty: Ty, fields: &[String]) -> Option { diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml index fda80ed7..38f303cc 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml @@ -15,4 +15,4 @@ steps: file: main.jsonnet checks: - at: m1 - type: 'function(arg0, arg1)' + type: 'function(arg0: any, arg1: any)' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml index aaea2023..e0e520e4 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml @@ -21,5 +21,5 @@ steps: - at: m2 type: 'never' - at: m3 - type: 'function(x, y)' + type: '(x: any, y: any) -> number' match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml index ca0918e8..1ca31e3d 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml @@ -19,7 +19,7 @@ steps: file: main.jsonnet checks: - at: m1 - type: 'function(arg0, arg1)' + type: 'function(arg0: any, arg1: any)' match: exact - at: m2 type: 'function()' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml index 0ad6ae78..488d4a78 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml @@ -19,6 +19,6 @@ steps: file: main.jsonnet checks: - at: m1 - type: 'function(arg0, arg1)' + type: 'function(arg0: any, arg1: any)' - at: m2 type: 'function()' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml index b534e9b1..93870d76 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml @@ -16,4 +16,4 @@ steps: file: main.jsonnet checks: - at: m1 - type: 'object' + type: '{ bar: never, foo: never, ... }' diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml index 5e7bae27..4ff30de3 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml @@ -26,5 +26,5 @@ steps: at: m1 - step: expectHoverType request: hover_new - type: function(input) + type: '(input: any) -> { value: any }' match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml index f89a2af3..694dac3d 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml @@ -13,7 +13,7 @@ steps: contents: kind: markdown value: |- - `{ a }` + `{ a: number }` ```jsonnet local obj = { a: 1 }; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml index 6a3a7de3..668e5921 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml @@ -17,5 +17,5 @@ steps: at: m1 - step: expectHoverType request: importedTypeHasCjkField - type: '{ æ°´ }' + type: '{ æ°´: number }' match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml index 31ab4282..42275d06 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml @@ -18,5 +18,5 @@ steps: at: m1 - step: expectHoverType request: importedMethodHover - type: function(x) + type: '(x: any) -> { y: number }' match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml index 1765bad5..6ad2eaf0 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml @@ -15,5 +15,5 @@ steps: at: m1 - step: expectHoverType request: objectTypeHasSpacedKey - type: '{ foo bar, the-field }' + type: '{ foo bar: number, the-field: number }' match: exact From c8fffc7a60a4262d895184ae69fdf1c2a78f2a89 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 17:55:05 +0000 Subject: [PATCH 092/210] chore(lint): move lsp clippy ratchet to workspace lints --- Cargo.toml | 6 +- crates/jrsonnet-lsp-document/src/types.rs | 5 +- .../jrsonnet-lsp-handlers/src/code_action.rs | 18 ++- crates/jrsonnet-lsp-handlers/src/code_lens.rs | 2 +- .../jrsonnet-lsp-handlers/src/formatting.rs | 2 +- crates/jrsonnet-lsp-handlers/src/symbols.rs | 81 +++++------- crates/jrsonnet-lsp-import/src/graph.rs | 13 +- .../jrsonnet-lsp-inference/src/const_eval.rs | 2 +- crates/jrsonnet-lsp-inference/src/expr.rs | 123 ++++++++---------- crates/jrsonnet-lsp-inference/src/flow.rs | 13 +- crates/jrsonnet-lsp-inference/src/helpers.rs | 8 +- crates/jrsonnet-lsp-inference/src/manager.rs | 54 ++++---- crates/jrsonnet-lsp-inference/src/object.rs | 35 ++--- crates/jrsonnet-lsp-inference/src/poly.rs | 3 +- .../src/scenario_runner/transport.rs | 14 +- .../src/scenario_script/markers.rs | 26 ++-- crates/jrsonnet-lsp-scope/src/bindings.rs | 2 +- crates/jrsonnet-lsp-scope/src/resolver.rs | 6 +- crates/jrsonnet-lsp/src/config.rs | 7 +- crates/jrsonnet-lsp/src/server.rs | 9 +- .../jrsonnet-lsp/src/server/async_requests.rs | 26 ++-- 21 files changed, 216 insertions(+), 239 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 32a9628b..a8e1166b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -155,19 +155,19 @@ similar_names = "allow" # Pre-existing issues in jrsonnet crates unnecessary_semicolon = "allow" too_long_first_doc_paragraph = "allow" -map_unwrap_or = "allow" +map_unwrap_or = "warn" option_if_let_else = "allow" manual_repeat_n = "allow" mem_replace_option_with_some = "allow" unnecessary_literal_bound = "allow" -unnecessary_map_or = "allow" +unnecessary_map_or = "warn" collapsible_match = "allow" doc_markdown = "allow" items_after_statements = "allow" manual_ignore_case_cmp = "allow" manual_midpoint = "allow" needless_borrows_for_generic_args = "allow" -needless_continue = "allow" +needless_continue = "warn" ref_option = "allow" single_component_path_imports = "allow" too_many_lines = "allow" diff --git a/crates/jrsonnet-lsp-document/src/types.rs b/crates/jrsonnet-lsp-document/src/types.rs index b23ab53a..b8fe1581 100644 --- a/crates/jrsonnet-lsp-document/src/types.rs +++ b/crates/jrsonnet-lsp-document/src/types.rs @@ -147,10 +147,7 @@ impl CanonicalPath { let decoded = path_str.replace("%20", " "); let path = PathBuf::from(decoded); // Try to canonicalize, but if it fails (file doesn't exist yet), use as-is - match path.canonicalize() { - Ok(canonical) => Some(Self(canonical)), - Err(_) => Some(Self(path)), - } + Some(Self(path.canonicalize().unwrap_or(path))) } /// Convert to a file URI. diff --git a/crates/jrsonnet-lsp-handlers/src/code_action.rs b/crates/jrsonnet-lsp-handlers/src/code_action.rs index 3c142500..5ce9a528 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_action.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_action.rs @@ -545,22 +545,20 @@ fn range_overlaps(a: Range, b: Range) -> bool { } fn wants_quickfix(context: &CodeActionContext) -> bool { - match &context.only { - None => true, - Some(kinds) => kinds + context.only.as_ref().is_none_or(|kinds| { + kinds .iter() - .any(|kind| kind.as_str().starts_with(CodeActionKind::QUICKFIX.as_str())), - } + .any(|kind| kind.as_str().starts_with(CodeActionKind::QUICKFIX.as_str())) + }) } fn wants_fix_all(context: &CodeActionContext) -> bool { - match &context.only { - None => true, - Some(kinds) => kinds.iter().any(|kind| { + context.only.as_ref().is_none_or(|kinds| { + kinds.iter().any(|kind| { kind.as_str() .starts_with(CodeActionKind::SOURCE_FIX_ALL.as_str()) - }), - } + }) + }) } fn unused_variable_action( diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens.rs b/crates/jrsonnet-lsp-handlers/src/code_lens.rs index fe8303d4..80ee4cb9 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_lens.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_lens.rs @@ -286,7 +286,7 @@ fn type_lenses(document: &Document, analysis: &TypeAnalysis) -> Vec { }; // Check if it's a "complex" expression (object, array, function call) - let is_complex = value.expr_base().map_or(false, |base| { + let is_complex = value.expr_base().is_some_and(|base| { matches!( base.syntax().kind(), SyntaxKind::EXPR_OBJECT diff --git a/crates/jrsonnet-lsp-handlers/src/formatting.rs b/crates/jrsonnet-lsp-handlers/src/formatting.rs index 5f5ab031..4a76fe4f 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting.rs @@ -301,7 +301,7 @@ fn run_formatter( for name in ["jrsonnet-fmt", "jsonnetfmt"] { match try_run_formatter_binary(name, &args, None, input) { Ok(result) => return Ok(result), - Err(err) if err.is_not_found() => continue, + Err(err) if err.is_not_found() => {} Err(err) => { if first_error.is_none() { first_error = Some(err); diff --git a/crates/jrsonnet-lsp-handlers/src/symbols.rs b/crates/jrsonnet-lsp-handlers/src/symbols.rs index fac8da3a..e3206d1a 100644 --- a/crates/jrsonnet-lsp-handlers/src/symbols.rs +++ b/crates/jrsonnet-lsp-handlers/src/symbols.rs @@ -112,29 +112,27 @@ fn process_bind_destruct( let range = bind.syntax().text_range(); // Check if the value is a function - let (kind, children) = if let Some(value_expr) = bind.value() { - if let Some(base) = value_expr.expr_base() { - match &base { - ExprBase::ExprFunction(_) => (SymbolKind::FUNCTION, None), - ExprBase::ExprObject(obj) => { - let children = process_object(obj, text, line_index); - ( - SymbolKind::OBJECT, - if children.is_empty() { - None - } else { - Some(children) - }, - ) - } - _ => (SymbolKind::VARIABLE, None), - } - } else { - (SymbolKind::VARIABLE, None) - } - } else { - (SymbolKind::VARIABLE, None) - }; + let (kind, children) = bind + .value() + .map_or((SymbolKind::VARIABLE, None), |value_expr| { + value_expr + .expr_base() + .map_or((SymbolKind::VARIABLE, None), |base| match &base { + ExprBase::ExprFunction(_) => (SymbolKind::FUNCTION, None), + ExprBase::ExprObject(obj) => { + let children = process_object(obj, text, line_index); + ( + SymbolKind::OBJECT, + if children.is_empty() { + None + } else { + Some(children) + }, + ) + } + _ => (SymbolKind::VARIABLE, None), + }) + }); create_symbol(name, kind, range, range, line_index, text, children) } @@ -149,15 +147,13 @@ fn process_bind_function( let range = bind.syntax().text_range(); // Get parameter names for detail - let detail = if let Some(params) = bind.params() { + let detail = bind.params().map(|params| { let param_names: Vec<_> = params .params() .filter_map(|p| p.destruct().and_then(|d| get_destruct_name(&d))) .collect(); - Some(format!("({})", param_names.join(", "))) - } else { - None - }; + format!("({})", param_names.join(", ")) + }); build_document_symbol( name, @@ -246,7 +242,7 @@ fn process_field_normal( let range = field.syntax().text_range(); // Check if the value is an object (for nested symbols) - let children = if let Some(expr) = field.expr() { + let children = field.expr().and_then(|expr| { if let Some(ExprBase::ExprObject(obj)) = expr.expr_base() { let children = process_object(&obj, text, line_index); if children.is_empty() { @@ -257,25 +253,18 @@ fn process_field_normal( } else { None } - } else { - None - }; + }); // Determine kind based on value - let kind = if let Some(expr) = field.expr() { - if let Some(base) = expr.expr_base() { - match base { + let kind = field.expr().map_or(SymbolKind::FIELD, |expr| { + expr.expr_base() + .map_or(SymbolKind::FIELD, |base| match base { ExprBase::ExprFunction(_) => SymbolKind::FUNCTION, ExprBase::ExprObject(_) => SymbolKind::OBJECT, ExprBase::ExprArray(_) => SymbolKind::ARRAY, _ => SymbolKind::FIELD, - } - } else { - SymbolKind::FIELD - } - } else { - SymbolKind::FIELD - }; + }) + }); create_symbol(name, kind, range, range, line_index, text, children) } @@ -290,15 +279,13 @@ fn process_field_method( let range = method.syntax().text_range(); // Get parameter names for detail - let detail = if let Some(params) = method.params_desc() { + let detail = method.params_desc().map(|params| { let param_names: Vec<_> = params .params() .filter_map(|p| p.destruct().and_then(|d| get_destruct_name(&d))) .collect(); - Some(format!("({})", param_names.join(", "))) - } else { - None - }; + format!("({})", param_names.join(", ")) + }); build_document_symbol( name, diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs index 57b01c42..312742b3 100644 --- a/crates/jrsonnet-lsp-import/src/graph.rs +++ b/crates/jrsonnet-lsp-import/src/graph.rs @@ -254,14 +254,13 @@ impl ImportGraph { continue; } // Check if all dependencies of importer are processed - let all_deps_processed = - self.imports.get(importer).map_or(true, |entries| { - entries.iter().all(|e| { - e.resolved_path.as_ref().map_or(true, |p| { - processed.contains(p) || !self.imports.contains_key(p) - }) + let all_deps_processed = self.imports.get(importer).is_none_or(|entries| { + entries.iter().all(|e| { + e.resolved_path.as_ref().is_none_or(|p| { + processed.contains(p) || !self.imports.contains_key(p) }) - }); + }) + }); if all_deps_processed && !next_level.contains(importer) { next_level.push(importer.clone()); diff --git a/crates/jrsonnet-lsp-inference/src/const_eval.rs b/crates/jrsonnet-lsp-inference/src/const_eval.rs index 99eae35f..f75b544b 100644 --- a/crates/jrsonnet-lsp-inference/src/const_eval.rs +++ b/crates/jrsonnet-lsp-inference/src/const_eval.rs @@ -387,7 +387,7 @@ fn find_field_in_member_list( range: method_range, })); } - Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => continue, + Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => {} } } None diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr.rs index 8ac3106a..fb5a2b98 100644 --- a/crates/jrsonnet-lsp-inference/src/expr.rs +++ b/crates/jrsonnet-lsp-inference/src/expr.rs @@ -74,11 +74,9 @@ pub fn infer_document_type_ty(document: &Document) -> (Ty, TypeEnv) { let ast = document.ast(); let mut env = TypeEnv::new_default(); - let ty = if let Some(expr) = ast.expr() { - infer_expr_ty(&expr, &mut env) - } else { - Ty::ANY - }; + let ty = ast + .expr() + .map_or(Ty::ANY, |expr| infer_expr_ty(&expr, &mut env)); (ty, env) } @@ -136,11 +134,9 @@ fn infer_expr_ty_impl( } // Get the base expression type - let ty = if let Some(base) = expr.expr_base() { + let ty = expr.expr_base().map_or(Ty::ANY, |base| { infer_base_ty(&base, env, expected, recorder) - } else { - Ty::ANY - }; + }); record_expr_and_base(recorder, expr, ty); ty } @@ -186,23 +182,25 @@ fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: env.define_ty(name.clone(), provisional_ty); let (return_ty, param_constraints) = if env.can_infer_function_body() { - if let Some(body) = bf.value() { - env.push_scope(); - let param_names: Vec = params.iter().map(|p| p.name.clone()).collect(); - for param in ¶ms { - env.define_ty(param.name.clone(), param.ty); - } + bf.value().map_or_else( + || (Ty::ANY, FxHashMap::default()), + |body| { + env.push_scope(); + let param_names: Vec = + params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } - env.start_constraint_tracking(¶m_names); - env.enter_function(); - let body_ty = infer_expr_ty_impl(&body, env, None, recorder); - env.exit_function(); - let constraints = env.stop_constraint_tracking_ty(); - env.pop_scope(); - (body_ty, constraints) - } else { - (Ty::ANY, FxHashMap::default()) - } + env.start_constraint_tracking(¶m_names); + env.enter_function(); + let body_ty = infer_expr_ty_impl(&body, env, None, recorder); + env.exit_function(); + let constraints = env.stop_constraint_tracking_ty(); + env.pop_scope(); + (body_ty, constraints) + }, + ) } else { (Ty::ANY, FxHashMap::default()) }; @@ -653,8 +651,8 @@ fn infer_if_then_else_expr_base_ty( Facts::new() }; - let then_ty = if let Some(then_clause) = if_expr.then() { - if let Some(then_expr) = then_clause.expr() { + let then_ty = if_expr.then().map_or(Ty::ANY, |then_clause| { + then_clause.expr().map_or(Ty::ANY, |then_expr| { env.push_scope(); for (var_name, fact) in facts.iter() { let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); @@ -664,15 +662,11 @@ fn infer_if_then_else_expr_base_ty( let ty = infer_expr_ty_impl(&then_expr, env, expected, recorder); env.pop_scope(); ty - } else { - Ty::ANY - } - } else { - Ty::ANY - }; + }) + }); - let else_ty = if let Some(else_clause) = if_expr.else_() { - if let Some(else_expr) = else_clause.expr() { + let else_ty = if_expr.else_().map_or(Ty::ANY, |else_clause| { + else_clause.expr().map_or(Ty::ANY, |else_expr| { env.push_scope(); for (var_name, fact) in facts.iter() { let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); @@ -682,12 +676,8 @@ fn infer_if_then_else_expr_base_ty( let ty = infer_expr_ty_impl(&else_expr, env, expected, recorder); env.pop_scope(); ty - } else { - Ty::ANY - } - } else { - Ty::ANY - }; + }) + }); env.store_mut().union(vec![then_ty, else_ty]) } @@ -945,16 +935,14 @@ fn infer_array_expr_base_ty( expected: Option, recorder: &mut R, ) -> Ty { - let (expected_array_elem, expected_tuple_elems) = if let Some(exp_ty) = expected { + let (expected_array_elem, expected_tuple_elems) = expected.map_or((None, None), |exp_ty| { let store = env.store(); match store.get(exp_ty) { TyData::Array { elem, .. } => (Some(elem), None), TyData::Tuple { elems } => (None, Some(elems)), _ => (None, None), } - } else { - (None, None) - }; + }); let elem_types: Vec = arr .exprs() @@ -1093,33 +1081,32 @@ fn infer_function_expr_base_ty( env.start_function_inference(func_range); - let params = if let Some(params_desc) = func.params_desc() { + let params = func.params_desc().map_or_else(Vec::new, |params_desc| { extract_params_with_default_types_ty(¶ms_desc, env) - } else { - Vec::new() - }; + }); let (return_ty, param_constraints) = if env.can_infer_function_body() { - if let Some(body) = func.expr() { - env.push_scope(); - let param_names: Vec = params.iter().map(|p| p.name.clone()).collect(); - for param in ¶ms { - env.define_ty(param.name.clone(), param.ty); - } + func.expr().map_or_else( + || (Ty::ANY, FxHashMap::default()), + |body| { + env.push_scope(); + let param_names: Vec = params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } - env.start_constraint_tracking(¶m_names); + env.start_constraint_tracking(¶m_names); - env.enter_function(); - let body_ty = infer_expr_ty_impl(&body, env, None, recorder); - env.exit_function(); + env.enter_function(); + let body_ty = infer_expr_ty_impl(&body, env, None, recorder); + env.exit_function(); - let constraints = env.stop_constraint_tracking_ty(); + let constraints = env.stop_constraint_tracking_ty(); - env.pop_scope(); - (body_ty, constraints) - } else { - (Ty::ANY, FxHashMap::default()) - } + env.pop_scope(); + (body_ty, constraints) + }, + ) } else { (Ty::ANY, FxHashMap::default()) }; @@ -1847,11 +1834,9 @@ assert std.all(std.map(function(x) x == null || std.isNumber(x), xs)); let doc = Document::new(code.to_string(), DocVersion::new(1)); let mut env = TypeEnv::new_default(); let expected = expected_builder(env.store_mut()); - let ty = if let Some(expr) = doc.ast().expr() { + let ty = doc.ast().expr().map_or(Ty::ANY, |expr| { infer_expr_ty_with_expected(&expr, &mut env, Some(expected)) - } else { - Ty::ANY - }; + }); (ty, env) } diff --git a/crates/jrsonnet-lsp-inference/src/flow.rs b/crates/jrsonnet-lsp-inference/src/flow.rs index b7b50777..3b04d578 100644 --- a/crates/jrsonnet-lsp-inference/src/flow.rs +++ b/crates/jrsonnet-lsp-inference/src/flow.rs @@ -455,13 +455,12 @@ impl FactRepr { } FactRepr::HasField { field, field_type } => { - let negated_field_ty = match field_type { - Some(inner) => match &inner.repr { - FactRepr::Prim(prim, Totality::Total) => prim.negated_any_ty(store), - _ => inner.apply_negated(Ty::ANY, store), - }, - None => Ty::NEVER, - }; + let negated_field_ty = field_type.as_ref().map_or(Ty::NEVER, |inner| match &inner + .repr + { + FactRepr::Prim(prim, Totality::Total) => prim.negated_any_ty(store), + _ => inner.apply_negated(Ty::ANY, store), + }); let constraint = store.object(ObjectData { fields: vec![( field.clone(), diff --git a/crates/jrsonnet-lsp-inference/src/helpers.rs b/crates/jrsonnet-lsp-inference/src/helpers.rs index 71cbb8c2..16a8c25d 100644 --- a/crates/jrsonnet-lsp-inference/src/helpers.rs +++ b/crates/jrsonnet-lsp-inference/src/helpers.rs @@ -132,11 +132,9 @@ pub fn extract_params_with_default_types_ty( let has_default = param.assign_token().is_some() || param.expr().is_some(); // Infer type from default value if present - let default_ty = if let Some(default_expr) = param.expr() { - infer_expr_ty(&default_expr, env) - } else { - Ty::ANY - }; + let default_ty = param + .expr() + .map_or(Ty::ANY, |default_expr| infer_expr_ty(&default_expr, env)); Some(ParamInterned { name, diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index 411dddc9..f61f03b1 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -85,12 +85,10 @@ impl DocumentManager { /// /// Returns true if the document was found and updated. pub fn update(&self, path: &CanonicalPath, text: String, version: DocVersion) -> bool { - if let Some(mut doc) = self.open.get_mut(path) { + self.open.get_mut(path).is_some_and(|mut doc| { doc.update(text, version); true - } else { - false - } + }) } /// Apply an incremental change to an open document. @@ -103,11 +101,9 @@ impl DocumentManager { new_text: &str, version: DocVersion, ) -> bool { - if let Some(mut doc) = self.open.get_mut(path) { - doc.apply_incremental_change(range, new_text, version) - } else { - false - } + self.open + .get_mut(path) + .is_some_and(|mut doc| doc.apply_incremental_change(range, new_text, version)) } /// Close a document (called on textDocument/didClose). @@ -190,9 +186,14 @@ impl DocumentManager { } } - // Try to read from disk and parse + // Read from disk once, then cache in `closed` for reuse. let text = std::fs::read_to_string(path.as_path()).ok()?; - Some(Document::new(text, DocVersion::new(0))) + let document = Document::new(text, DocVersion::new(0)); + { + let mut closed = self.closed.write(); + closed.put(path.clone(), document.clone()); + } + Some(document) } /// Check if a document is currently open. @@ -242,20 +243,30 @@ impl DocumentManager { let doc = self.get_document(path)?; let version = doc.version(); - // Check if we have a valid cached analysis + Some(self.get_or_compute_analysis(path, version, || { + TypeAnalysis::analyze_with_global(&doc, Arc::clone(&self.global_types)) + })) + } + + /// Get cached analysis for `path@version`, or compute and cache it. + pub fn get_or_compute_analysis( + &self, + path: &CanonicalPath, + version: DocVersion, + compute: F, + ) -> Arc + where + F: FnOnce() -> TypeAnalysis, + { if let Some(cached) = self.analysis_cache.get(path) { if cached.version == version { - return Some(Arc::clone(&cached.analysis)); + eprintln!("analysis_cache_hit {}", path.as_path().display()); + return Arc::clone(&cached.analysis); } } + eprintln!("analysis_cache_miss {}", path.as_path().display()); - // Compute new analysis using shared global store - let analysis = Arc::new(TypeAnalysis::analyze_with_global( - &doc, - Arc::clone(&self.global_types), - )); - - // Cache it + let analysis = Arc::new(compute()); self.analysis_cache.insert( path.clone(), CachedAnalysis { @@ -263,8 +274,7 @@ impl DocumentManager { analysis: Arc::clone(&analysis), }, ); - - Some(analysis) + analysis } /// Invalidate the analysis cache for a document. diff --git a/crates/jrsonnet-lsp-inference/src/object.rs b/crates/jrsonnet-lsp-inference/src/object.rs index 728e3211..1f1ccf72 100644 --- a/crates/jrsonnet-lsp-inference/src/object.rs +++ b/crates/jrsonnet-lsp-inference/src/object.rs @@ -130,23 +130,24 @@ pub fn infer_object_type_with_super_ty( .unwrap_or_default(); let (return_ty, param_constraints) = if env.can_infer_function_body() { - if let Some(body) = method.expr() { - env.push_scope(); - let param_names: Vec = - params.iter().map(|p| p.name.clone()).collect(); - for param in ¶ms { - env.define_ty(param.name.clone(), param.ty); - } - env.start_constraint_tracking(¶m_names); - env.enter_function(); - let body_ty = infer_expr(&body, env); - env.exit_function(); - let constraints = env.stop_constraint_tracking_ty(); - env.pop_scope(); - (body_ty, constraints) - } else { - (Ty::ANY, FxHashMap::default()) - } + method.expr().map_or_else( + || (Ty::ANY, FxHashMap::default()), + |body| { + env.push_scope(); + let param_names: Vec = + params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + env.start_constraint_tracking(¶m_names); + env.enter_function(); + let body_ty = infer_expr(&body, env); + env.exit_function(); + let constraints = env.stop_constraint_tracking_ty(); + env.pop_scope(); + (body_ty, constraints) + }, + ) } else { (Ty::ANY, FxHashMap::default()) }; diff --git a/crates/jrsonnet-lsp-inference/src/poly.rs b/crates/jrsonnet-lsp-inference/src/poly.rs index 788f3c2a..8eeab153 100644 --- a/crates/jrsonnet-lsp-inference/src/poly.rs +++ b/crates/jrsonnet-lsp-inference/src/poly.rs @@ -58,8 +58,7 @@ fn resolve_return_spec_ty( arg_types .get(idx) .copied() - .map(|ty| store.apply_substitution(ty, substitution)) - .unwrap_or(Ty::ANY) + .map_or(Ty::ANY, |ty| store.apply_substitution(ty, substitution)) }; match return_spec { diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs index 8d8063a3..313e5968 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs @@ -114,12 +114,14 @@ impl ScenarioRunner { if let Some(error) = response.error { return Err(RpcError::ResponseReturnedError { method, id, error }.into()); } - match response.result { - Some(value) => serde_json::from_value(value) - .map_err(|source| SerdeError::DeserializeResponseResult { method, id, source }) - .map_err(Into::into), - None => Ok(None), - } + response.result.map_or_else( + || Ok(None), + |value| { + serde_json::from_value(value) + .map_err(|source| SerdeError::DeserializeResponseResult { method, id, source }) + .map_err(Into::into) + }, + ) } pub(super) fn wait_response(&mut self, id: i32, timeout: Duration) -> RunnerResult { diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs index 5abd1ebf..f3c0920b 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs @@ -118,12 +118,10 @@ impl PositionFieldInput { file: &str, context: &str, ) -> Result { - match self.position_of { - Some(name) => { - marker_store.resolve_position_spec(file, PositionSpec::Marker(name), context) - } - None => Err(format!("{context}: missing position, provide `positionOf`")), - } + self.position_of.map_or_else( + || Err(format!("{context}: missing position, provide `positionOf`")), + |name| marker_store.resolve_position_spec(file, PositionSpec::Marker(name), context), + ) } } @@ -143,10 +141,10 @@ impl RangeFieldInput { file: &str, context: &str, ) -> Result { - match self.range_of { - Some(marker) => marker_store.resolve_named_range(file, &marker, context), - None => Err(format!("{context}: missing range, provide `rangeOf`")), - } + self.range_of.map_or_else( + || Err(format!("{context}: missing range, provide `rangeOf`")), + |marker| marker_store.resolve_named_range(file, &marker, context), + ) } } @@ -294,10 +292,10 @@ impl MarkerStore { at: Option, context: &str, ) -> Result { - match at { - Some(spec) => self.resolve_position_spec(file, spec, context), - None => Err(format!("{context}: missing position, provide `at`")), - } + at.map_or_else( + || Err(format!("{context}: missing position, provide `at`")), + |spec| self.resolve_position_spec(file, spec, context), + ) } /// Resolve a marker-aware range input for one file. diff --git a/crates/jrsonnet-lsp-scope/src/bindings.rs b/crates/jrsonnet-lsp-scope/src/bindings.rs index 1dd2e398..6da9e1ef 100644 --- a/crates/jrsonnet-lsp-scope/src/bindings.rs +++ b/crates/jrsonnet-lsp-scope/src/bindings.rs @@ -82,7 +82,7 @@ pub fn is_at_file_scope(token: &SyntaxToken) -> bool { SyntaxKind::EXPR => { // Check if this is the root expression if n.parent() - .map_or(false, |p| p.kind() == SyntaxKind::SOURCE_FILE) + .is_some_and(|p| p.kind() == SyntaxKind::SOURCE_FILE) { // File-level locals are at depth 1 return depth <= 1; diff --git a/crates/jrsonnet-lsp-scope/src/resolver.rs b/crates/jrsonnet-lsp-scope/src/resolver.rs index 392326b1..ac3fe7c2 100644 --- a/crates/jrsonnet-lsp-scope/src/resolver.rs +++ b/crates/jrsonnet-lsp-scope/src/resolver.rs @@ -786,9 +786,9 @@ impl ScopeIndex { // Filter by visibility at the query position cached .into_iter() - .filter(|b| match b.visible_after { - Some(visible_after) => pos >= visible_after, - None => true, + .filter(|b| { + b.visible_after + .is_none_or(|visible_after| pos >= visible_after) }) .map(|b| (b.name, b.range)) .collect() diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index d324e4cd..1c4deb72 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -144,10 +144,9 @@ impl ServerConfig { /// Parse configuration from LSP initialization options. #[must_use] pub fn from_initialization_options(value: Option) -> Self { - match value { - Some(v) => serde_json::from_value(v).unwrap_or_default(), - None => Self::default(), - } + value.map_or_else(Self::default, |v| { + serde_json::from_value(v).unwrap_or_default() + }) } /// Update configuration from a didChangeConfiguration notification. diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index ef9a6bc2..f15bb877 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -175,7 +175,14 @@ impl Server { // Invalidate the changed file and all its dependents let mut cache = self.type_cache.write(); cache.invalidate(path); - cache.invalidate_many(dependents); + cache.invalidate_many(dependents.iter().cloned()); + drop(cache); + + // Keep analysis cache consistent with type cache invalidation. + self.documents.invalidate_analysis(path); + for dependent in dependents { + self.documents.invalidate_analysis(&dependent); + } } fn async_request_context(&self) -> AsyncRequestContext { diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 049ab5d3..2c730e80 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, DocVersion, Document, SymbolName}; +use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, Document, SymbolName}; use jrsonnet_lsp_handlers as handlers; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; @@ -59,13 +59,16 @@ impl AsyncRequestContext { } /// Analyze a document with dependency-aware import resolution. - fn analyze_document(&self, path: &CanonicalPath, doc: &Document) -> TypeAnalysis { - let provider = TypeProvider::new( - Arc::clone(&self.type_cache), - Arc::clone(&self.import_graph), - Arc::clone(&self.global_types), - ); - provider.analyze(path, doc, self.documents.as_ref()) + fn analyze_document(&self, path: &CanonicalPath, doc: &Document) -> Arc { + let version = doc.version(); + self.documents.get_or_compute_analysis(path, version, || { + let provider = TypeProvider::new( + Arc::clone(&self.type_cache), + Arc::clone(&self.import_graph), + Arc::clone(&self.global_types), + ); + provider.analyze(path, doc, self.documents.as_ref()) + }) } pub(super) fn hover(&self, params: &HoverParams) -> Option { @@ -667,12 +670,7 @@ impl AsyncRequestContext { } fn load_document_for_path(&self, path: &CanonicalPath) -> Option { - if let Some(doc) = self.documents.get(path) { - return Some(doc.clone()); - } - - let content = std::fs::read_to_string(path.as_path()).ok()?; - Some(Document::new(content, DocVersion::new(0))) + self.documents.get_document(path) } fn document_root_expr_range(&self, path: &CanonicalPath) -> Option { From 1d34637529e7fa9ec31445a9dc2573a036a03d6a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 18:13:12 +0000 Subject: [PATCH 093/210] Speed up LSP request pipeline and reuse analysis artifacts --- .../jrsonnet-lsp-handlers/src/definition.rs | 30 ++++++++--- crates/jrsonnet-lsp-inference/src/analysis.rs | 51 ++++++++++++++++++- crates/jrsonnet-lsp-inference/src/manager.rs | 16 +++++- crates/jrsonnet-lsp/src/async_diagnostics.rs | 11 +++- crates/jrsonnet-lsp/src/server.rs | 17 +++++-- ...hover_import_binding_member_signature.yaml | 40 +++++++++++++++ 6 files changed, 147 insertions(+), 18 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml diff --git a/crates/jrsonnet-lsp-handlers/src/definition.rs b/crates/jrsonnet-lsp-handlers/src/definition.rs index a1491a7c..3a494b03 100644 --- a/crates/jrsonnet-lsp-handlers/src/definition.rs +++ b/crates/jrsonnet-lsp-handlers/src/definition.rs @@ -167,14 +167,28 @@ fn resolve_canonical_definition( } fn find_bind_by_definition_range(document: &Document, range: TextRange) -> Option { - document - .ast() - .syntax() - .descendants() - .filter_map(Bind::cast) - .find(|bind| { - bind_definition_range(bind).is_some_and(|definition_range| definition_range == range) - }) + let ast = document.ast(); + let root = ast.syntax(); + let offset = ByteOffset::from(u32::from(range.start())); + + if let Some(token) = token_at_offset(root, offset) { + if token.kind() == SyntaxKind::IDENT { + if let Some(bind) = token + .parent() + .and_then(|node| node.ancestors().find_map(Bind::cast)) + { + if bind_definition_range(&bind) + .is_some_and(|definition_range| definition_range == range) + { + return Some(bind); + } + } + } + } + + root.descendants().filter_map(Bind::cast).find(|bind| { + bind_definition_range(bind).is_some_and(|definition_range| definition_range == range) + }) } fn bind_definition_range(bind: &Bind) -> Option { diff --git a/crates/jrsonnet-lsp-inference/src/analysis.rs b/crates/jrsonnet-lsp-inference/src/analysis.rs index f8a7034e..95457408 100644 --- a/crates/jrsonnet-lsp-inference/src/analysis.rs +++ b/crates/jrsonnet-lsp-inference/src/analysis.rs @@ -160,8 +160,55 @@ impl TypeAnalysis { /// Get the type of an expression at a specific position. /// /// Finds the smallest expression containing the position and returns its type. - pub fn type_at_position(&self, _root: &SyntaxNode, offset: rowan::TextSize) -> Option { - self.find_type_at(offset) + pub fn type_at_position(&self, root: &SyntaxNode, offset: rowan::TextSize) -> Option { + self.find_type_at_in_syntax(root, offset) + .or_else(|| self.find_type_at(offset)) + } + + /// Find the smallest expression type containing `offset` by walking syntax ancestors. + fn find_type_at_in_syntax(&self, root: &SyntaxNode, offset: rowan::TextSize) -> Option { + let candidate_for_token = |token: rowan::SyntaxToken<_>| { + let mut best: Option<(TextRange, Ty)> = self + .expr_types + .get(&token.text_range()) + .copied() + .map(|ty| (token.text_range(), ty)); + + let mut current = token.parent(); + while let Some(node) = current { + let range = node.text_range(); + if let Some(ty) = self.expr_types.get(&range).copied() { + match best { + None => best = Some((range, ty)), + Some((best_range, _)) if range.len() < best_range.len() => { + best = Some((range, ty)); + } + _ => {} + } + } + current = node.parent(); + } + + best + }; + + let best = match root.token_at_offset(offset) { + rowan::TokenAtOffset::None => None, + rowan::TokenAtOffset::Single(token) => candidate_for_token(token), + rowan::TokenAtOffset::Between(left, right) => { + match (candidate_for_token(left), candidate_for_token(right)) { + (None, None) => None, + (Some(candidate), None) | (None, Some(candidate)) => Some(candidate), + (Some(left), Some(right)) => Some(if left.0.len() <= right.0.len() { + left + } else { + right + }), + } + } + }; + + best.map(|(_, ty)| ty) } /// Find a type at the given offset. diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index f61f03b1..10f40878 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -260,11 +260,9 @@ impl DocumentManager { { if let Some(cached) = self.analysis_cache.get(path) { if cached.version == version { - eprintln!("analysis_cache_hit {}", path.as_path().display()); return Arc::clone(&cached.analysis); } } - eprintln!("analysis_cache_miss {}", path.as_path().display()); let analysis = Arc::new(compute()); self.analysis_cache.insert( @@ -277,6 +275,20 @@ impl DocumentManager { analysis } + /// Insert a precomputed analysis for `path@version` into the cache. + /// + /// This is useful when background workers (for example diagnostics) already + /// computed analysis and want subsequent requests to reuse it. + pub fn cache_analysis( + &self, + path: CanonicalPath, + version: DocVersion, + analysis: Arc, + ) { + self.analysis_cache + .insert(path, CachedAnalysis { version, analysis }); + } + /// Invalidate the analysis cache for a document. /// /// Call this when a document's content changes to ensure the next diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index 756c8c6a..d82e1592 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -235,7 +235,7 @@ impl AsyncDiagnostics { Arc::clone(&config.import_graph), Arc::clone(&config.global_types), ); - let analysis = provider.analyze(&request.path, &document, &doc_source); + let analysis = Arc::new(provider.analyze(&request.path, &document, &doc_source)); let import_resolution = ImportResolution::new(&request.path, &request.import_roots); let import_occurrences = import_resolution.parse_occurrences(&document); @@ -244,7 +244,7 @@ impl AsyncDiagnostics { &document, request.enable_lint, config.evaluator.as_deref(), - &analysis, + analysis.as_ref(), &import_occurrences, ) else { debug!( @@ -270,6 +270,13 @@ impl AsyncDiagnostics { } } + // Reuse analysis work done for diagnostics in foreground requests. + config.documents.cache_analysis( + request.path.clone(), + request.version, + Arc::clone(&analysis), + ); + // Send result let result = DiagnosticsResult { path: request.path, diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index f15bb877..055539f2 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -45,9 +45,9 @@ use lsp_types::{ DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentFormattingParams, DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, FileSystemWatcher, GlobPattern, - HoverProviderCapability, InitializeParams, InitializeResult, NumberOrString, OneOf, - PrepareRenameResponse, Registration, RegistrationParams, RelativePattern, - SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, + Hover, HoverParams, HoverProviderCapability, InitializeParams, InitializeResult, + NumberOrString, OneOf, PrepareRenameResponse, Registration, RegistrationParams, + RelativePattern, SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, TextDocumentSyncCapability, TextDocumentSyncKind, TextEdit, WorkDoneProgressOptions, @@ -643,7 +643,6 @@ impl Server { | GotoDeclaration::METHOD | GotoTypeDefinition::METHOD | GotoImplementation::METHOD - | HoverRequest::METHOD | InlayHintRequest::METHOD | Completion::METHOD | References::METHOD @@ -654,6 +653,7 @@ impl Server { DocumentSymbolRequest::METHOD | DocumentHighlightRequest::METHOD | CodeActionRequest::METHOD + | HoverRequest::METHOD | SignatureHelpRequest::METHOD | Formatting::METHOD | PrepareRenameRequest::METHOD @@ -700,6 +700,10 @@ impl Server { let request = self.inflight_requests.begin::(id); self.handle_sync_typed(request, params, Self::on_code_action) } + HoverRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed(request, params, Self::on_hover) + } SignatureHelpRequest::METHOD => { let request = self.inflight_requests.begin::(id); self.handle_sync_typed(request, params, Self::on_signature_help) @@ -919,6 +923,11 @@ impl Server { Some(DocumentSymbolResponse::Nested(symbols)) } + /// Handle textDocument/hover request. + fn on_hover(&self, params: &HoverParams) -> Option { + self.async_request_context().hover(params) + } + /// Handle textDocument/documentHighlight request. fn on_document_highlight( &self, diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml new file mode 100644 index 00000000..6d499840 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml @@ -0,0 +1,40 @@ +# Hover on imported module bindings should include member type details, +# and member access should resolve to a callable signature. +steps: +- step: create + files: + main.libsonnet: | + local database = import "./database/main.libsonnet"; + + { + binding: ((mBinding:|))database, + methodValue: database.((mMethod:|))new("ns", { provider: "eks" }), + } + database/main.libsonnet: | + { + new(namespace, cluster):: { + definitions: { + enabled: cluster.provider == "eks", + }, + }, + } + +- step: diagnosticsSettled + +- step: requestHover + as: bindingHover + file: main.libsonnet + at: mBinding +- step: expectHoverType + request: bindingHover + type: 'new: (namespace: any, cluster: { provider: any, ... }) ->' + match: contains + +- step: requestHover + as: methodHover + file: main.libsonnet + at: mMethod +- step: expectHoverType + request: methodHover + type: '(namespace: any, cluster: { provider: any, ... }) -> { definitions: { enabled: boolean } }' + match: contains From 9999cddf1faae81b11f5c90420e32e2f6868aa83 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 12 Feb 2026 18:15:24 +0000 Subject: [PATCH 094/210] Avoid unnecessary cross-file import hover resolution --- crates/jrsonnet-lsp-handlers/src/hover.rs | 160 ++++++++++++++++++---- 1 file changed, 131 insertions(+), 29 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/hover.rs b/crates/jrsonnet-lsp-handlers/src/hover.rs index 1ab27804..832f49e6 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover.rs @@ -8,6 +8,7 @@ use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, ByteOffset, Document, use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; use jrsonnet_lsp_stdlib as stdlib; +use jrsonnet_lsp_types::{Ty, TyData}; use jrsonnet_rowan_parser::{ nodes::{Bind, Destruct, ExprField}, AstNode, SyntaxKind, SyntaxToken, @@ -117,15 +118,19 @@ fn check_local_hover( // Get the inferred type at this position. If the local definition site only // reports `any`, fall back to the bound value expression type. let ast = document.ast(); - let mut inferred_type = analysis - .type_at_position(ast.syntax(), offset.into()) - .map(|ty| analysis.display_for_hover(ty)); + let inferred_ty = analysis.type_at_position(ast.syntax(), offset.into()); + let mut inferred_type = inferred_ty.map(|ty| analysis.display_for_hover(ty)); + let inferred_is_any = inferred_ty.is_none_or(|ty| ty == Ty::ANY); + let inferred_is_object = inferred_ty + .is_some_and(|ty| analysis.with_data(ty, |data| matches!(data, TyData::Object(_)))); match &result { DefinitionResult::ImportField { path, fields } => { - if let Some(resolver) = import_field_type_resolver { - if let Some(resolved_type) = resolver(path, fields) { - inferred_type = Some(resolved_type); + if inferred_is_any || inferred_is_object { + if let Some(resolver) = import_field_type_resolver { + if let Some(resolved_type) = resolver(path, fields) { + inferred_type = Some(resolved_type); + } } } } @@ -135,9 +140,11 @@ fn check_local_hover( } } DefinitionResult::Import(path) => { - if let Some(resolver) = import_field_type_resolver { - if let Some(resolved_type) = resolver(path, &[]) { - inferred_type = Some(resolved_type); + if inferred_is_any { + if let Some(resolver) = import_field_type_resolver { + if let Some(resolved_type) = resolver(path, &[]) { + inferred_type = Some(resolved_type); + } } } } @@ -303,7 +310,11 @@ mod tests { use assert_matches::assert_matches; use jrsonnet_lsp_document::DocVersion; - use jrsonnet_lsp_types::GlobalTyStore; + use jrsonnet_lsp_inference::ImportResolver; + use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, FunctionData, GlobalTy, GlobalTyStore, ObjectData, + ParamInterned, ReturnSpec, Ty, TyData, + }; use rstest::rstest; use super::*; @@ -316,17 +327,54 @@ mod tests { hover(&doc, pos, &analysis) } - fn get_hover_with_import_field_type( - code: &str, - line: u32, - character: u32, - resolver: &ImportFieldTypeResolver<'_>, - ) -> Option { - let global_types = Arc::new(GlobalTyStore::new()); - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = TypeAnalysis::analyze_with_global(&doc, global_types); - let pos = (line, character).into(); - hover_with_import_field_type(&doc, pos, &analysis, Some(resolver)) + #[derive(Debug)] + struct StaticImportResolver { + path: &'static str, + ty: GlobalTy, + } + + impl ImportResolver for StaticImportResolver { + fn resolve_import(&self, import_path: &str) -> Option { + (import_path == self.path).then_some(self.ty) + } + } + + fn function_type_x_to_number(global_types: &Arc) -> Ty { + global_types.intern(TyData::Function(FunctionData { + params: vec![ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false, + }], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + })) + } + + fn closed_object_type(global_types: &Arc, fields: Vec<(&str, Ty)>) -> Ty { + let mut object_fields: Vec<(String, FieldDefInterned)> = fields + .into_iter() + .map(|(name, ty)| { + ( + name.to_string(), + FieldDefInterned { + ty, + required: true, + visibility: FieldVis::Normal, + }, + ) + }) + .collect(); + object_fields.sort_by(|(left, _), (right, _)| left.cmp(right)); + global_types.intern(TyData::Object(ObjectData { + fields: object_fields, + has_unknown: false, + })) + } + + fn module_type_with_foo(global_types: &Arc, foo_ty: Ty) -> GlobalTy { + let module_ty = closed_object_type(global_types, vec![("foo", foo_ty)]); + GlobalTy::new(module_ty).expect("module type should be global") } #[rstest] @@ -455,20 +503,74 @@ mod tests { }); } - #[test] - fn test_import_field_hover_prefers_resolved_imported_type() { + #[rstest] + #[case(None, "`function(x)`\n\n`foo` from `lib.libsonnet`", "function(x)")] + #[case( + Some("function"), + "`(x: any) -> number`\n\n`foo` from `lib.libsonnet`", + "function(x)" + )] + #[case( + Some("object"), + "`{ resolved: string }`\n\n`foo` from `lib.libsonnet`", + "{ resolved: string }" + )] + fn test_import_field_hover_resolution_exact_shape( + #[case] inferred_import_kind: Option<&str>, + #[case] expected: &str, + #[case] resolved_type: &str, + ) { let code = r#"local lib = import "lib.libsonnet"; lib.foo"#; - let result = get_hover_with_import_field_type(code, 0, 40, &|path, fields| { - assert_eq!(path, "lib.libsonnet"); - assert_eq!(fields, &["foo".to_string()]); - Some("function(x)".to_string()) - }); + let global_types = Arc::new(GlobalTyStore::new()); + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = match inferred_import_kind { + None => TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global_types)), + Some("function") => { + let module_ty = + module_type_with_foo(&global_types, function_type_x_to_number(&global_types)); + let import_resolver = Arc::new(StaticImportResolver { + path: "lib.libsonnet", + ty: module_ty, + }); + TypeAnalysis::analyze_with_resolver( + &doc, + Arc::clone(&global_types), + import_resolver, + ) + } + Some("object") => { + let local_object_ty = + closed_object_type(&global_types, vec![("localOnly", Ty::NUMBER)]); + let module_ty = module_type_with_foo(&global_types, local_object_ty); + let import_resolver = Arc::new(StaticImportResolver { + path: "lib.libsonnet", + ty: module_ty, + }); + TypeAnalysis::analyze_with_resolver( + &doc, + Arc::clone(&global_types), + import_resolver, + ) + } + Some(other) => panic!("unsupported inferred_import_kind test case: {other}"), + }; + + let result = hover_with_import_field_type( + &doc, + (0, 40).into(), + &analysis, + Some(&|path, fields| { + assert_eq!(path, "lib.libsonnet"); + assert_eq!(fields, &["foo".to_string()]); + Some(resolved_type.to_string()) + }), + ); assert_matches!(result, Some(Hover { contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), range: None }) => { - assert_eq!(value, "`function(x)`\n\n`foo` from `lib.libsonnet`"); + assert_eq!(value, expected); }); } From ab90df6408fe2ed4ea898c1c60cf5779e7d1167c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 11:42:40 +0000 Subject: [PATCH 095/210] refactor(lsp-scenario): replace script string errors with typed errors --- .../src/scenario_script/compile.rs | 152 ++++++------ .../src/scenario_script/inputs.rs | 192 ++++++++++------ .../src/scenario_script/markers.rs | 216 ++++++++++-------- .../src/scenario_script/registry.rs | 92 +++++--- 4 files changed, 383 insertions(+), 269 deletions(-) diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs index 2cedc6da..0547af76 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs @@ -22,12 +22,12 @@ use super::{ CodeActionOrCommandInput, CodeLensInput, DiagnosticInput, ExpectCompletionScriptStep, ExpectDocumentSymbolScriptStep, ExpectExecuteCommandScriptStep, ExpectFormattingScriptStep, ExpectHoverScriptStep, ExpectPrepareRenameScriptStep, ExpectSignatureHelpScriptStep, - GotoDefinitionResponseInput, InlayHintInput, LocationInput, SemanticTokensResultInput, - WorkspaceEditInput, WorkspaceSymbolResponseInput, + GotoDefinitionResponseInput, InlayHintInput, InputError, LocationInput, + SemanticTokensResultInput, WorkspaceEditInput, WorkspaceSymbolResponseInput, }, - markers::{MarkerStore, PositionSpec, RangeInput}, + markers::{MarkerError, MarkerStore, PositionSpec, RangeInput}, paths::{file_path, file_uri}, - registry::{RequestKind, RequestRegistry}, + registry::{RequestKind, RequestRegistry, RequestRegistryError}, }; use crate::scenario::{ ChangeFullStep, ChangeIncrementalStep, CloseStep, ConfigStep, DeleteFileStep, @@ -61,15 +61,26 @@ pub(super) struct ScenarioScript { /// (for example unknown marker names, alias mismatches, or missing request /// context for shorthand expectations). #[derive(Debug, Error)] -#[error("{message}")] -pub struct CompileScenarioError { - message: String, -} - -impl From for CompileScenarioError { - fn from(message: String) -> Self { - Self { message } - } +pub enum CompileScenarioError { + #[error(transparent)] + Registry(#[from] RequestRegistryError), + #[error(transparent)] + Markers(#[from] MarkerError), + #[error(transparent)] + Inputs(#[from] InputError), + #[error("create: `files` must include at least one file")] + CreateRequiresFiles, + #[error( + "create: `open` references unknown file '{relative_path}', expected one of: {expected:?}" + )] + CreateOpenReferencesUnknownFile { + relative_path: String, + expected: Vec, + }, + #[error("create: missing parsed text for '{relative_path}'")] + CreateMissingParsedText { relative_path: String }, + #[error("{step}: missing request file context for request id {request_id}")] + MissingRequestFileContext { step: &'static str, request_id: i32 }, } impl ScenarioScript { @@ -127,17 +138,14 @@ impl ScenarioScript { })] } ScenarioScriptStep::ChangeIncremental(step) => { - let range = marker_store - .resolve_range(&step.file, step.range, "changeIncremental") - .map_err(|error| format!("compile changeIncremental: {error}"))?; - let text = marker_store - .register_incremental_text( - &step.file, - range, - step.text, - "changeIncremental.text", - ) - .map_err(|error| format!("compile changeIncremental: {error}"))?; + let range = + marker_store.resolve_range(&step.file, step.range, "changeIncremental")?; + let text = marker_store.register_incremental_text( + &step.file, + range, + step.text, + "changeIncremental.text", + )?; vec![ScenarioStep::ChangeIncremental(ChangeIncrementalStep { uri: file_uri(base_dir, &step.file), range, @@ -207,9 +215,11 @@ impl ScenarioScript { vec![ScenarioStep::RequestCodeAction(RequestCodeActionStep { id: request_id, uri: file_uri(base_dir, &step.file), - range: marker_store - .resolve_range(&step.file, step.range, "requestCodeAction") - .map_err(|error| format!("compile requestCodeAction: {error}"))?, + range: marker_store.resolve_range( + &step.file, + step.range, + "requestCodeAction", + )?, diagnostics, only: step.only, })] @@ -369,13 +379,12 @@ impl ScenarioScript { ScenarioScriptStep::ExpectPrepareRename(step) => { let request_id = registry.claim(RequestKind::PrepareRename, step.request.as_deref())?; - let file = prepare_rename_request_files - .get(&request_id) - .ok_or_else(|| { - format!( - "expectPrepareRename: missing request file context for request id {request_id}" - ) - })?; + let file = prepare_rename_request_files.get(&request_id).ok_or( + CompileScenarioError::MissingRequestFileContext { + step: "expectPrepareRename", + request_id, + }, + )?; vec![ScenarioStep::ExpectPrepareRename(ExpectPrepareRenameStep { id: request_id, result: step.resolve_result( @@ -475,7 +484,7 @@ impl ScenarioScript { }); steps.push(request); steps.push(expect); - Ok::, String>(steps) + Ok::, CompileScenarioError>(steps) })? } ScenarioScriptStep::RequestCompletion(step) => { @@ -495,10 +504,11 @@ impl ScenarioScript { ScenarioScriptStep::ExpectCompletion(step) => { let request_id = registry.claim(RequestKind::Completion, step.request.as_deref())?; - let file = completion_request_files.get(&request_id).ok_or_else(|| { - format!( - "expectCompletion: missing request file context for request id {request_id}" - ) + let file = completion_request_files.get(&request_id).ok_or({ + CompileScenarioError::MissingRequestFileContext { + step: "expectCompletion", + request_id, + } })?; let labels = step.labels.clone(); let allow_extra = step.allow_extra; @@ -541,13 +551,12 @@ impl ScenarioScript { ScenarioScriptStep::ExpectSemanticTokensFull(step) => { let request_id = registry.claim(RequestKind::SemanticTokensFull, step.request.as_deref())?; - let file = semantic_tokens_full_request_files - .get(&request_id) - .ok_or_else(|| { - format!( - "expectSemanticTokensFull: missing request file context for request id {request_id}" - ) - })?; + let file = semantic_tokens_full_request_files.get(&request_id).ok_or( + CompileScenarioError::MissingRequestFileContext { + step: "expectSemanticTokensFull", + request_id, + }, + )?; vec![ScenarioStep::ExpectSemanticTokensFull( ExpectSemanticTokensFullStep { id: request_id, @@ -583,13 +592,12 @@ impl ScenarioScript { ScenarioScriptStep::ExpectSemanticTokensRange(step) => { let request_id = registry .claim(RequestKind::SemanticTokensRange, step.request.as_deref())?; - let file = semantic_tokens_range_request_files - .get(&request_id) - .ok_or_else(|| { - format!( - "expectSemanticTokensRange: missing request file context for request id {request_id}" - ) - })?; + let file = semantic_tokens_range_request_files.get(&request_id).ok_or( + CompileScenarioError::MissingRequestFileContext { + step: "expectSemanticTokensRange", + request_id, + }, + )?; vec![ScenarioStep::ExpectSemanticTokensRange( ExpectSemanticTokensRangeStep { id: request_id, @@ -623,10 +631,11 @@ impl ScenarioScript { ScenarioScriptStep::ExpectInlayHints(step) => { let request_id = registry.claim(RequestKind::InlayHints, step.request.as_deref())?; - let file = inlay_hint_request_files.get(&request_id).ok_or_else(|| { - format!( - "expectInlayHints: missing request file context for request id {request_id}" - ) + let file = inlay_hint_request_files.get(&request_id).ok_or({ + CompileScenarioError::MissingRequestFileContext { + step: "expectInlayHints", + request_id, + } })?; let result = step .result @@ -658,13 +667,12 @@ impl ScenarioScript { ScenarioScriptStep::ExpectDocumentSymbol(step) => { let request_id = registry.claim(RequestKind::DocumentSymbol, step.request.as_deref())?; - let file = document_symbol_request_files - .get(&request_id) - .ok_or_else(|| { - format!( - "expectDocumentSymbol: missing request file context for request id {request_id}" - ) - })?; + let file = document_symbol_request_files.get(&request_id).ok_or( + CompileScenarioError::MissingRequestFileContext { + step: "expectDocumentSymbol", + request_id, + }, + )?; vec![ScenarioStep::ExpectDocumentSymbol( ExpectDocumentSymbolStep { id: request_id, @@ -793,9 +801,9 @@ fn compile_create_step( step: CreateScriptStep, base_dir: &Path, marker_store: &mut MarkerStore, -) -> Result, String> { +) -> Result, CompileScenarioError> { if step.files.is_empty() { - return Err("create: `files` must include at least one file".to_string()); + return Err(CompileScenarioError::CreateRequiresFiles); } let mut steps = Vec::with_capacity(step.files.len() * 2); @@ -813,14 +821,16 @@ fn compile_create_step( }; for relative_path in files_to_open { if !step.files.contains_key(&relative_path) { - return Err(format!( - "create: `open` references unknown file '{relative_path}', expected one of: {:?}", - step.files.keys().collect::>() - )); + return Err(CompileScenarioError::CreateOpenReferencesUnknownFile { + relative_path, + expected: step.files.keys().cloned().collect(), + }); } let text = marker_store .full_text(&relative_path) - .ok_or_else(|| format!("create: missing parsed text for '{relative_path}'"))? + .ok_or_else(|| CompileScenarioError::CreateMissingParsedText { + relative_path: relative_path.clone(), + })? .to_string(); steps.push(ScenarioStep::Open(OpenStep { uri: file_uri(base_dir, &relative_path), diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs index 9ad79483..c81d1dae 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs @@ -20,13 +20,44 @@ use lsp_types::{ WorkspaceSymbolResponse, }; use serde::{de::DeserializeOwned, Deserialize}; +use thiserror::Error; use super::{ - markers::{MarkerStore, PositionFieldInput, PositionSpec, RangeFieldInput, RangeInput}, + markers::{ + MarkerError, MarkerStore, PositionFieldInput, PositionSpec, RangeFieldInput, RangeInput, + }, paths::file_uri, }; use crate::semantic_tokens::{encode_semantic_tokens, semantic_modifiers, ExpectedSemanticToken}; +#[derive(Debug, Error)] +pub enum InputError { + #[error(transparent)] + Markers(#[from] MarkerError), + #[error("{message}")] + Message { message: String }, +} + +impl InputError { + fn message(message: impl Into) -> Self { + Self::Message { + message: message.into(), + } + } +} + +impl From for InputError { + fn from(message: String) -> Self { + Self::message(message) + } +} + +macro_rules! input_err { + ($($arg:tt)*) => { + InputError::message(format!($($arg)*)) + }; +} + /// Accept either full `Diagnostic` JSON or concise shorthand fields. #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(untagged)] @@ -42,7 +73,7 @@ impl DiagnosticInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(diagnostic) => Ok(diagnostic), Self::Shorthand(shorthand) => shorthand.resolve(marker_store, file, context), @@ -50,7 +81,7 @@ impl DiagnosticInput { } /// Resolve a diagnostic input without marker context. - pub(super) fn resolve_without_markers(self, context: &str) -> Result { + pub(super) fn resolve_without_markers(self, context: &str) -> Result { match self { Self::Full(diagnostic) => Ok(diagnostic), Self::Shorthand(shorthand) => shorthand.resolve_without_markers(context), @@ -62,7 +93,7 @@ impl DiagnosticInput { marker_store: &MarkerStore, file: Option<&str>, context: &str, - ) -> Result { + ) -> Result { match file { Some(file) => self.resolve_with_file(marker_store, file, context), None => self.resolve_without_markers(context), @@ -90,7 +121,7 @@ impl DiagnosticShorthandInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { let range = marker_store.resolve_range(file, self.range, context)?; Ok(Diagnostic { range, @@ -105,7 +136,7 @@ impl DiagnosticShorthandInput { }) } - fn resolve_without_markers(self, context: &str) -> Result { + fn resolve_without_markers(self, context: &str) -> Result { let range = self.range.resolve_range(context)?; Ok(Diagnostic { range, @@ -173,7 +204,7 @@ impl CodeActionOrCommandInput { marker_store: &MarkerStore, default_file: Option<&str>, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(action) => Ok(*action), Self::Shorthand(action) => { @@ -204,7 +235,7 @@ impl CodeActionShorthandInput { marker_store: &MarkerStore, default_file: Option<&str>, context: &str, - ) -> Result { + ) -> Result { let diagnostics = self .diagnostics .into_iter() @@ -221,35 +252,37 @@ impl CodeActionShorthandInput { let edit = if self.edits.is_empty() { None } else { - let changes = self - .edits - .into_iter() - .map(|(relative_path, edits)| { - let uri: lsp_types::Uri = - file_uri(base_dir, &relative_path) + let changes = + self.edits + .into_iter() + .map(|(relative_path, edits)| { + let uri: lsp_types::Uri = file_uri(base_dir, &relative_path) .parse() .map_err(|error| { - format!( + input_err!( "{context}: parse edit URI for '{}': {error}", relative_path ) })?; - let edits = edits - .into_iter() - .map(|edit| { - edit.resolve_with_markers( - marker_store, - &relative_path, - "expectCodeAction.result.edits", - ) - .map_err(|error| { - format!("{context}: resolve edit for '{}': {error}", relative_path) + let edits = edits + .into_iter() + .map(|edit| { + edit.resolve_with_markers( + marker_store, + &relative_path, + "expectCodeAction.result.edits", + ) + .map_err(|error| { + input_err!( + "{context}: resolve edit for '{}': {error}", + relative_path + ) + }) }) - }) - .collect::, _>>()?; - Ok::<_, String>((uri, edits)) - }) - .collect::, _>>()?; + .collect::, _>>()?; + Ok::<_, InputError>((uri, edits)) + }) + .collect::, _>>()?; Some(WorkspaceEdit { changes: Some(changes), @@ -286,7 +319,7 @@ impl TextEditInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { Ok(TextEdit { range: marker_store.resolve_range(file, self.range, context)?, new_text: self.replace, @@ -309,9 +342,9 @@ impl LocationInput { base_dir: &Path, marker_store: &MarkerStore, context: &str, - ) -> Result { + ) -> Result { let uri: lsp_types::Uri = file_uri(base_dir, &self.file).parse().map_err(|error| { - format!("{context}: parse location URI for '{}': {error}", self.file) + input_err!("{context}: parse location URI for '{}': {error}", self.file) })?; let range = marker_store.resolve_range(&self.file, self.range, context)?; Ok(Location { uri, range }) @@ -334,7 +367,7 @@ impl GotoDefinitionResponseInput { base_dir: &Path, marker_store: &MarkerStore, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(result) => Ok(result), Self::Single(location) => location @@ -364,7 +397,7 @@ impl WorkspaceEditInput { base_dir: &Path, marker_store: &MarkerStore, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(edit) => Ok(edit), Self::Shorthand(shorthand) => shorthand.resolve(base_dir, marker_store, context), @@ -385,7 +418,7 @@ impl WorkspaceEditShorthandInput { base_dir: &Path, marker_store: &MarkerStore, context: &str, - ) -> Result { + ) -> Result { let changes = self .edits .into_iter() @@ -394,13 +427,13 @@ impl WorkspaceEditShorthandInput { file_uri(base_dir, &relative_path) .parse() .map_err(|error| { - format!("{context}: parse edit URI for '{}': {error}", relative_path) + input_err!("{context}: parse edit URI for '{}': {error}", relative_path) })?; let edits = edits .into_iter() .map(|edit| edit.resolve_with_markers(marker_store, &relative_path, context)) .collect::, _>>()?; - Ok::<_, String>((uri, edits)) + Ok::<_, InputError>((uri, edits)) }) .collect::, _>>()?; @@ -427,7 +460,7 @@ impl WorkspaceSymbolResponseInput { base_dir: &Path, marker_store: &MarkerStore, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(response) => Ok(response), Self::Shorthand(shorthand) => shorthand.resolve(base_dir, marker_store, context), @@ -447,7 +480,7 @@ impl WorkspaceSymbolResponseShorthandInput { base_dir: &Path, marker_store: &MarkerStore, context: &str, - ) -> Result { + ) -> Result { let symbols = self .symbols .into_iter() @@ -475,9 +508,9 @@ impl WorkspaceSymbolInput { base_dir: &Path, marker_store: &MarkerStore, context: &str, - ) -> Result { + ) -> Result { let uri: lsp_types::Uri = file_uri(base_dir, &self.file).parse().map_err(|error| { - format!( + input_err!( "{context}: parse workspace symbol URI for '{}': {error}", self.file ) @@ -495,7 +528,9 @@ impl WorkspaceSymbolInput { "containerName": self.container_name, }); serde_json::from_value(value).map_err(|error| { - format!("{context}: decode workspace symbol shorthand into SymbolInformation: {error}") + input_err!( + "{context}: decode workspace symbol shorthand into SymbolInformation: {error}" + ) }) } } @@ -516,7 +551,7 @@ impl CodeLensInput { marker_store: &MarkerStore, default_file: Option<&str>, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(lens) => Ok(lens), Self::Shorthand(shorthand) => { @@ -542,7 +577,7 @@ impl CodeLensShorthandInput { marker_store: &MarkerStore, default_file: Option<&str>, context: &str, - ) -> Result { + ) -> Result { let range = match default_file { Some(file) => marker_store.resolve_range(file, self.range, context)?, None => self.range.resolve_range(context)?, @@ -569,7 +604,7 @@ struct CodeLensCommandInput { } impl CodeLensCommandInput { - fn resolve(self, base_dir: &Path, context: &str) -> Result { + fn resolve(self, base_dir: &Path, context: &str) -> Result { let arguments = self .arguments .into_iter() @@ -594,7 +629,7 @@ enum CommandArgumentInput { } impl CommandArgumentInput { - fn resolve(self, base_dir: &Path, context: &str) -> Result { + fn resolve(self, base_dir: &Path, context: &str) -> Result { match self { Self::File { file } => Ok(serde_json::Value::String(file_uri(base_dir, &file))), Self::Value(value) => { @@ -602,7 +637,7 @@ impl CommandArgumentInput { if let Some(file) = file.as_str() { return Ok(serde_json::Value::String(file_uri(base_dir, file))); } - return Err(format!( + return Err(input_err!( "{context}: command argument `file` must be a string" )); } @@ -627,7 +662,7 @@ impl SemanticTokensResultInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(result) => Ok(result), Self::Shorthand(shorthand) => Ok(SemanticTokensResult::Tokens(shorthand.resolve( @@ -644,7 +679,7 @@ impl SemanticTokensResultInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(SemanticTokensResult::Tokens(tokens)) => { Ok(SemanticTokensRangeResult::Tokens(tokens)) @@ -676,7 +711,7 @@ impl SemanticTokensShorthandInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { let absolute_tokens = self .tokens_by_marker .into_iter() @@ -705,7 +740,7 @@ impl SemanticTokenByMarkerInput { marker_store: &MarkerStore, file: &str, index: usize, - ) -> Result { + ) -> Result { let token_type = self .token_type .resolve_at(context, &format!("tokensByMarker[{index}].type"))?; @@ -722,7 +757,7 @@ impl SemanticTokenByMarkerInput { .collect::, _>>()?; let range = marker_store.resolve_named_range(file, &self.marker, context)?; if range.start.line != range.end.line { - return Err(format!( + return Err(input_err!( "{context}: tokensByMarker[{index}] marker '{}' spans multiple lines", self.marker )); @@ -732,13 +767,13 @@ impl SemanticTokenByMarkerInput { .character .checked_sub(range.start.character) .ok_or_else(|| { - format!( + input_err!( "{context}: tokensByMarker[{index}] marker '{}' has invalid range", self.marker ) })?; if len == 0 { - return Err(format!( + return Err(input_err!( "{context}: tokensByMarker[{index}] marker '{}' resolves to an empty range", self.marker )); @@ -759,9 +794,13 @@ impl SemanticTokenByMarkerInput { struct SemanticTokenTypeInput(String); impl SemanticTokenTypeInput { - fn resolve_at(self, context: &str, location: &str) -> Result { + fn resolve_at( + self, + context: &str, + location: &str, + ) -> Result { SemanticTokenTypeName::from_str(&self.0).map_err(|_| { - format!( + input_err!( "{context}: unknown semantic token type '{}' at {location}", self.0 ) @@ -778,9 +817,9 @@ impl SemanticTokenModifierInput { self, context: &str, location: &str, - ) -> Result { + ) -> Result { SemanticTokenModifierName::from_str(&self.0).map_err(|_| { - format!( + input_err!( "{context}: unknown semantic token modifier '{}' at {location}", self.0 ) @@ -815,7 +854,7 @@ impl InlayHintInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { let text_edits = self .text_edits .map(|edits| { @@ -869,7 +908,7 @@ impl InlayHintTextEditInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { Ok(TextEdit { range: self.range.resolve(marker_store, file, context)?, new_text: self.new_text, @@ -883,13 +922,13 @@ fn resolve_marker_json_input( marker_store: &MarkerStore, file: &str, context: &str, -) -> Result +) -> Result where T: DeserializeOwned, { let resolved = resolve_marker_references_json(value, marker_store, file, context, "$")?; serde_json::from_value(resolved) - .map_err(|error| format!("{context}: decode marker-expanded value: {error}")) + .map_err(|error| input_err!("{context}: decode marker-expanded value: {error}")) } /// Recursively rewrite `{ positionOf: ... }` and `{ rangeOf: ... }` objects. @@ -903,7 +942,7 @@ fn resolve_marker_references_json( file: &str, context: &str, path: &str, -) -> Result { +) -> Result { match value { serde_json::Value::Object(mut object) => { if object.len() == 1 { @@ -916,15 +955,16 @@ fn resolve_marker_references_json( context, )?; return serde_json::to_value(position).map_err(|error| { - format!("{context}: serialize position at {path}: {error}") + input_err!("{context}: serialize position at {path}: {error}") }); } if let Some(marker) = object.remove("rangeOf") { let marker_name = marker_name_from_value(marker, context, &format!("{path}.rangeOf"))?; let range = marker_store.resolve_named_range(file, &marker_name, context)?; - return serde_json::to_value(range) - .map_err(|error| format!("{context}: serialize range at {path}: {error}")); + return serde_json::to_value(range).map_err(|error| { + input_err!("{context}: serialize range at {path}: {error}") + }); } } @@ -960,11 +1000,11 @@ fn marker_name_from_value( value: serde_json::Value, context: &str, path: &str, -) -> Result { +) -> Result { match value { serde_json::Value::String(name) if !name.is_empty() => Ok(name), - serde_json::Value::String(_) => Err(format!("{context}: {path} cannot be empty")), - other => Err(format!( + serde_json::Value::String(_) => Err(input_err!("{context}: {path} cannot be empty")), + other => Err(input_err!( "{context}: {path} must be a string marker name, got {other}" )), } @@ -984,7 +1024,7 @@ impl CompletionResponseInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(response) => Ok(response), Self::WithMarkers(value) => { @@ -1008,7 +1048,7 @@ impl DocumentSymbolResponseInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(response) => Ok(response), Self::WithMarkers(value) => { @@ -1032,7 +1072,7 @@ impl PrepareRenameResponseInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { match self { Self::Full(response) => Ok(response), Self::WithMarkers(value) => { @@ -1081,7 +1121,7 @@ impl ExpectDocumentSymbolScriptStep { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result, String> { + ) -> Result, InputError> { self.result .map(|result| result.resolve(marker_store, file, context)) .transpose() @@ -1111,7 +1151,7 @@ impl ExpectPrepareRenameScriptStep { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result, String> { + ) -> Result, InputError> { self.result .map(|result| result.resolve(marker_store, file, context)) .transpose() @@ -1139,7 +1179,7 @@ impl ExpectCompletionScriptStep { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result, String> { + ) -> Result, InputError> { self.result .map(|result| result.resolve(marker_store, file, context)) .transpose() diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs index f3c0920b..8c2776cb 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs @@ -18,6 +18,33 @@ use std::collections::HashMap; use lsp_types::{Position, Range}; use rowan::{GreenNodeBuilder, Language, NodeOrToken}; use serde::Deserialize; +use thiserror::Error; + +#[derive(Debug, Error)] +#[error("{message}")] +pub struct MarkerError { + message: String, +} + +impl MarkerError { + fn message(message: impl Into) -> Self { + Self { + message: message.into(), + } + } +} + +impl From for MarkerError { + fn from(message: String) -> Self { + Self::message(message) + } +} + +macro_rules! marker_err { + ($($arg:tt)*) => { + MarkerError::message(format!($($arg)*)) + }; +} /// Position selector used by scenario script fields like `at`. /// @@ -60,34 +87,34 @@ impl RangeInput { /// /// This is only valid for non-marker data. Marker references require file /// context and must be resolved through [`MarkerStore`]. - pub(super) fn resolve_range(self, context: &str) -> Result { + pub(super) fn resolve_range(self, context: &str) -> Result { if let Some(range_spec) = self.range { if self.at.is_some() || self.text.is_some() || self.len.is_some() { - return Err(format!( + return Err(marker_err!( "{context}: specify either `range` or shorthand (`at` + `text`/`len`), not both" )); } return match range_spec { - RangeSpec::Marker(name) => Err(format!( + RangeSpec::Marker(name) => Err(marker_err!( "{context}: marker range '{name}' requires file context" )), }; } let Some(start_spec) = self.at else { - return Err(format!( + return Err(marker_err!( "{context}: missing range, provide `range` or shorthand (`at` + `text`/`len`)" )); }; match (self.text, self.len) { (Some(_), Some(_)) => { - return Err(format!( + return Err(marker_err!( "{context}: shorthand cannot include both `text` and `len`" )); } (None, None) => { - return Err(format!( + return Err(marker_err!( "{context}: shorthand requires one of `text` or `len`" )); } @@ -95,7 +122,7 @@ impl RangeInput { } match start_spec { - PositionSpec::Marker(name) => Err(format!( + PositionSpec::Marker(name) => Err(marker_err!( "{context}: marker position '{name}' requires file context" )), } @@ -117,9 +144,13 @@ impl PositionFieldInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { self.position_of.map_or_else( - || Err(format!("{context}: missing position, provide `positionOf`")), + || { + Err(marker_err!( + "{context}: missing position, provide `positionOf`" + )) + }, |name| marker_store.resolve_position_spec(file, PositionSpec::Marker(name), context), ) } @@ -140,9 +171,9 @@ impl RangeFieldInput { marker_store: &MarkerStore, file: &str, context: &str, - ) -> Result { + ) -> Result { self.range_of.map_or_else( - || Err(format!("{context}: missing range, provide `rangeOf`")), + || Err(marker_err!("{context}: missing range, provide `rangeOf`")), |marker| marker_store.resolve_named_range(file, &marker, context), ) } @@ -188,7 +219,7 @@ impl MarkerStore { file: &str, raw_text: String, context: &str, - ) -> Result { + ) -> Result { let (text, markers) = parse_marked_text(&raw_text, context)?; self.documents.insert( file.to_string(), @@ -212,7 +243,7 @@ impl MarkerStore { range: Range, raw_text: String, context: &str, - ) -> Result { + ) -> Result { let (text, inserted_markers) = parse_marked_text(&raw_text, context)?; let Some(document) = self.documents.get_mut(file) else { return Ok(text); @@ -221,15 +252,15 @@ impl MarkerStore { let start_offset = position_to_offset(&document.text, range.start, context)?; let end_offset = position_to_offset(&document.text, range.end, context)?; if start_offset > end_offset { - return Err(format!( + return Err(marker_err!( "{context}: incremental range start is after end for file '{file}'" )); } let start_byte = char_offset_to_byte_offset(&document.text, start_offset) - .ok_or_else(|| format!("{context}: start offset out of bounds in '{file}'"))?; + .ok_or_else(|| marker_err!("{context}: start offset out of bounds in '{file}'"))?; let end_byte = char_offset_to_byte_offset(&document.text, end_offset) - .ok_or_else(|| format!("{context}: end offset out of bounds in '{file}'"))?; + .ok_or_else(|| marker_err!("{context}: end offset out of bounds in '{file}'"))?; let mut next_text = String::with_capacity( start_byte + text.len() + document.text.len().saturating_sub(end_byte), @@ -244,7 +275,7 @@ impl MarkerStore { .and_then(|inserted| { isize::try_from(replaced_width).map(|replaced| inserted - replaced) }) - .map_err(|_| format!("{context}: incremental text width overflow for '{file}'"))?; + .map_err(|_| marker_err!("{context}: incremental text width overflow for '{file}'"))?; let mut next_markers = HashMap::with_capacity(document.markers.len() + inserted_markers.len()); @@ -265,18 +296,18 @@ impl MarkerStore { for (name, marker) in inserted_markers { if next_markers.contains_key(&name) { - return Err(format!( + return Err(marker_err!( "{context}: duplicate marker name '{name}' in file '{file}'" )); } let start = marker .start .checked_add(start_offset) - .ok_or_else(|| format!("{context}: marker '{name}' start overflow"))?; + .ok_or_else(|| marker_err!("{context}: marker '{name}' start overflow"))?; let end = marker .end .checked_add(start_offset) - .ok_or_else(|| format!("{context}: marker '{name}' end overflow"))?; + .ok_or_else(|| marker_err!("{context}: marker '{name}' end overflow"))?; next_markers.insert(name, MarkerRangeOffsets::new(start, end)); } @@ -291,9 +322,9 @@ impl MarkerStore { file: &str, at: Option, context: &str, - ) -> Result { + ) -> Result { at.map_or_else( - || Err(format!("{context}: missing position, provide `at`")), + || Err(marker_err!("{context}: missing position, provide `at`")), |spec| self.resolve_position_spec(file, spec, context), ) } @@ -304,10 +335,10 @@ impl MarkerStore { file: &str, input: RangeInput, context: &str, - ) -> Result { + ) -> Result { if let Some(range_spec) = input.range { if input.at.is_some() || input.text.is_some() || input.len.is_some() { - return Err(format!( + return Err(marker_err!( "{context}: specify either `range` or shorthand (`at` + `text`/`len`), not both" )); } @@ -317,7 +348,7 @@ impl MarkerStore { } let Some(start_spec) = input.at else { - return Err(format!( + return Err(marker_err!( "{context}: missing range, provide `range` or shorthand (`at` + `text`/`len`)" )); }; @@ -325,13 +356,13 @@ impl MarkerStore { match (input.text, input.len) { (Some(text), None) => { let width = u32::try_from(text.chars().count()).map_err(|_| { - format!("{context}: shorthand `text` length does not fit in u32") + marker_err!("{context}: shorthand `text` length does not fit in u32") })?; let start = self.resolve_position_spec(file, start_spec, context)?; let end_character = start .character .checked_add(width) - .ok_or_else(|| format!("{context}: range end overflow"))?; + .ok_or_else(|| marker_err!("{context}: range end overflow"))?; Ok(Range { start, end: Position { @@ -345,7 +376,7 @@ impl MarkerStore { let end_character = start .character .checked_add(len) - .ok_or_else(|| format!("{context}: range end overflow"))?; + .ok_or_else(|| marker_err!("{context}: range end overflow"))?; Ok(Range { start, end: Position { @@ -354,7 +385,7 @@ impl MarkerStore { }, }) } - (Some(_), Some(_)) => Err(format!( + (Some(_), Some(_)) => Err(marker_err!( "{context}: shorthand cannot include both `text` and `len`" )), (None, None) => match start_spec { @@ -369,10 +400,10 @@ impl MarkerStore { file: &str, marker_name: &str, context: &str, - ) -> Result { + ) -> Result { let marker = self.lookup_marker(file, marker_name, context)?; let text = self.full_text(file).ok_or_else(|| { - format!( + marker_err!( "{context}: no tracked text for file '{file}' while resolving marker '{marker_name}'" ) })?; @@ -387,12 +418,12 @@ impl MarkerStore { file: &str, spec: PositionSpec, context: &str, - ) -> Result { + ) -> Result { match spec { PositionSpec::Marker(name) => { let marker = self.lookup_marker(file, &name, context)?; let text = self.full_text(file).ok_or_else(|| { - format!( + marker_err!( "{context}: no tracked text for file '{file}' while resolving marker '{name}'" ) })?; @@ -407,9 +438,9 @@ impl MarkerStore { file: &str, marker_name: &str, context: &str, - ) -> Result<&MarkerRangeOffsets, String> { + ) -> Result<&MarkerRangeOffsets, MarkerError> { let Some(document) = self.documents.get(file) else { - return Err(format!( + return Err(marker_err!( "{context}: file '{file}' has no parsed text/markers; define it in `create`, `open`, `writeFile`, or `changeFull` first" )); }; @@ -420,7 +451,7 @@ impl MarkerStore { .keys() .map(String::as_str) .collect::>(); - format!( + marker_err!( "{context}: unknown marker '{marker_name}' in file '{file}', available markers: {:?}", available ) @@ -444,7 +475,7 @@ impl MarkerStore { fn parse_marked_text( input: &str, context: &str, -) -> Result<(String, HashMap), String> { +) -> Result<(String, HashMap), MarkerError> { let root = MarkerSyntaxParser::new(input, context).parse()?; let mut segment = ParsedSegment::default(); append_marker_node_contents(&mut segment, &root, context)?; @@ -460,12 +491,12 @@ struct ParsedSegment { impl ParsedSegment { /// Append plain text and advance tracked character length. - fn push_text(&mut self, text: &str, context: &str) -> Result<(), String> { + fn push_text(&mut self, text: &str, context: &str) -> Result<(), MarkerError> { self.text.push_str(text); self.char_len = self .char_len .checked_add(text.chars().count()) - .ok_or_else(|| format!("{context}: text length overflow"))?; + .ok_or_else(|| marker_err!("{context}: text length overflow"))?; Ok(()) } @@ -476,9 +507,9 @@ impl ParsedSegment { start: usize, end: usize, context: &str, - ) -> Result<(), String> { + ) -> Result<(), MarkerError> { if self.markers.contains_key(&name) { - return Err(format!( + return Err(marker_err!( "{context}: duplicate marker name '{name}' in one text block" )); } @@ -563,7 +594,7 @@ fn append_marker_node_contents( target: &mut ParsedSegment, node: &MarkerSyntaxNode, context: &str, -) -> Result<(), String> { +) -> Result<(), MarkerError> { for child in node.children_with_tokens() { match child { NodeOrToken::Node(child_node) => append_marker_node(target, &child_node, context)?, @@ -578,7 +609,7 @@ fn append_marker_node( target: &mut ParsedSegment, node: &MarkerSyntaxNode, context: &str, -) -> Result<(), String> { +) -> Result<(), MarkerError> { match node.kind() { MarkerSyntaxKind::Root | MarkerSyntaxKind::RangeBody @@ -586,7 +617,7 @@ fn append_marker_node( | MarkerSyntaxKind::CursorAfter => append_marker_node_contents(target, node, context), MarkerSyntaxKind::RangeMarker => append_range_marker(target, node, context), MarkerSyntaxKind::CursorMarker => append_cursor_marker(target, node, context), - kind => Err(format!( + kind => Err(marker_err!( "{context}: unexpected marker syntax node {kind:?} while translating marker tree" )), } @@ -597,7 +628,7 @@ fn append_range_marker( target: &mut ParsedSegment, node: &MarkerSyntaxNode, context: &str, -) -> Result<(), String> { +) -> Result<(), MarkerError> { let marker_name = marker_name_for_node(node, context)?; let marker_start = target.char_len; let mut body_seen = false; @@ -610,7 +641,7 @@ fn append_range_marker( } if !body_seen { - return Err(format!( + return Err(marker_err!( "{context}: malformed range marker '{marker_name}', missing body" )); } @@ -624,7 +655,7 @@ fn append_cursor_marker( target: &mut ParsedSegment, node: &MarkerSyntaxNode, context: &str, -) -> Result<(), String> { +) -> Result<(), MarkerError> { let marker_name = marker_name_for_node(node, context)?; let mut before = None::; let mut after = None::; @@ -638,10 +669,10 @@ fn append_cursor_marker( } let before = before.ok_or_else(|| { - format!("{context}: malformed cursor marker '{marker_name}', missing before segment") + marker_err!("{context}: malformed cursor marker '{marker_name}', missing before segment") })?; let after = after.ok_or_else(|| { - format!("{context}: malformed cursor marker '{marker_name}', missing after segment") + marker_err!("{context}: malformed cursor marker '{marker_name}', missing after segment") })?; append_marker_node_contents(target, &before, context)?; @@ -655,7 +686,7 @@ fn append_marker_token( target: &mut ParsedSegment, token: &MarkerSyntaxToken, context: &str, -) -> Result<(), String> { +) -> Result<(), MarkerError> { match token.kind() { MarkerSyntaxKind::Text => target.push_text(token.text(), context), MarkerSyntaxKind::MarkerName @@ -665,14 +696,14 @@ fn append_marker_token( | MarkerSyntaxKind::CloseCursor | MarkerSyntaxKind::Colon | MarkerSyntaxKind::Pipe => Ok(()), - kind => Err(format!( + kind => Err(marker_err!( "{context}: unexpected marker syntax token {kind:?} while translating marker tree" )), } } /// Extract marker name token from a marker node. -fn marker_name_for_node(node: &MarkerSyntaxNode, context: &str) -> Result { +fn marker_name_for_node(node: &MarkerSyntaxNode, context: &str) -> Result { node.children_with_tokens() .find_map(|element| match element { NodeOrToken::Node(_) => None, @@ -681,7 +712,7 @@ fn marker_name_for_node(node: &MarkerSyntaxNode, context: &str) -> Result MarkerSyntaxParser<'a> { } /// Parse full input into a syntax tree rooted at `Root`. - fn parse(mut self) -> Result { + fn parse(mut self) -> Result { self.start_node(MarkerSyntaxKind::Root); while !self.is_eof() { self.parse_item()?; @@ -714,7 +745,7 @@ impl<'a> MarkerSyntaxParser<'a> { } /// Parse one top-level item, preferring marker constructs over raw text. - fn parse_item(&mut self) -> Result<(), String> { + fn parse_item(&mut self) -> Result<(), MarkerError> { if self.try_parse_range_marker()? { return Ok(()); } @@ -728,7 +759,7 @@ impl<'a> MarkerSyntaxParser<'a> { /// Attempt to parse `[[name:...]]`. /// /// Returns `Ok(false)` if current cursor is not at a valid range marker. - fn try_parse_range_marker(&mut self) -> Result { + fn try_parse_range_marker(&mut self) -> Result { if !self.starts_with("[[") { return Ok(false); } @@ -737,7 +768,7 @@ impl<'a> MarkerSyntaxParser<'a> { let Some((name_width, marker_name)) = self.peek_marker_name_and_colon( start .checked_add(2) - .ok_or_else(|| format!("{}: marker parser index overflow", self.context))?, + .ok_or_else(|| marker_err!("{}: marker parser index overflow", self.context))?, ) else { return Ok(false); }; @@ -759,7 +790,7 @@ impl<'a> MarkerSyntaxParser<'a> { self.parse_item()?; } - Err(format!( + Err(marker_err!( "{}: unterminated range marker starting at byte {start}", self.context )) @@ -769,7 +800,7 @@ impl<'a> MarkerSyntaxParser<'a> { /// /// Cursor markers must contain exactly one top-level `|`. /// Returns `Ok(false)` if current cursor is not at a valid cursor marker. - fn try_parse_cursor_marker(&mut self) -> Result { + fn try_parse_cursor_marker(&mut self) -> Result { if !self.starts_with("((") { return Ok(false); } @@ -778,7 +809,7 @@ impl<'a> MarkerSyntaxParser<'a> { let Some((name_width, marker_name)) = self.peek_marker_name_and_colon( start .checked_add(2) - .ok_or_else(|| format!("{}: marker parser index overflow", self.context))?, + .ok_or_else(|| marker_err!("{}: marker parser index overflow", self.context))?, ) else { return Ok(false); }; @@ -793,9 +824,10 @@ impl<'a> MarkerSyntaxParser<'a> { self.start_node(MarkerSyntaxKind::CursorBefore); while !self.is_eof() { if self.starts_with("))") { - return Err(format!( + return Err(marker_err!( "{}: cursor marker '{}' must include exactly one top-level `|`", - self.context, marker_name + self.context, + marker_name )); } if self.peek_char() == Some('|') { @@ -808,7 +840,7 @@ impl<'a> MarkerSyntaxParser<'a> { } if self.is_eof() { - return Err(format!( + return Err(marker_err!( "{}: unterminated cursor marker starting at byte {start}", self.context )); @@ -822,30 +854,31 @@ impl<'a> MarkerSyntaxParser<'a> { return Ok(true); } if self.peek_char() == Some('|') { - return Err(format!( + return Err(marker_err!( "{}: cursor marker '{}' must include exactly one top-level `|`", - self.context, marker_name + self.context, + marker_name )); } self.parse_item()?; } - Err(format!( + Err(marker_err!( "{}: unterminated cursor marker starting at byte {start}", self.context )) } /// Emit one non-marker UTF-8 scalar as plain text. - fn parse_text_token(&mut self) -> Result<(), String> { - let ch = self - .peek_char() - .ok_or_else(|| format!("{}: marker parser unexpectedly reached EOF", self.context))?; + fn parse_text_token(&mut self) -> Result<(), MarkerError> { + let ch = self.peek_char().ok_or_else(|| { + marker_err!("{}: marker parser unexpectedly reached EOF", self.context) + })?; let width = ch.len_utf8(); let end = self .index .checked_add(width) - .ok_or_else(|| format!("{}: marker parser index overflow", self.context))?; + .ok_or_else(|| marker_err!("{}: marker parser index overflow", self.context))?; let text = &self.input[self.index..end]; self.emit_text_token(MarkerSyntaxKind::Text, text); self.index = end; @@ -889,7 +922,7 @@ impl<'a> MarkerSyntaxParser<'a> { self.builder.token(kind.into(), text); } - fn emit_fixed_token(&mut self, kind: MarkerSyntaxKind, text: &str) -> Result<(), String> { + fn emit_fixed_token(&mut self, kind: MarkerSyntaxKind, text: &str) -> Result<(), MarkerError> { self.emit_text_token(kind, text); self.advance_bytes(text.len()) } @@ -902,11 +935,11 @@ impl<'a> MarkerSyntaxParser<'a> { self.input[self.index..].chars().next() } - fn advance_bytes(&mut self, bytes: usize) -> Result<(), String> { + fn advance_bytes(&mut self, bytes: usize) -> Result<(), MarkerError> { self.index = self .index .checked_add(bytes) - .ok_or_else(|| format!("{}: marker parser index overflow", self.context))?; + .ok_or_else(|| marker_err!("{}: marker parser index overflow", self.context))?; Ok(()) } @@ -924,15 +957,15 @@ const fn is_marker_name_continue(ch: char) -> bool { } /// Validate marker name lexical constraints. -fn parse_marker_name<'a>(name: &'a str, context: &str) -> Result<&'a str, String> { +fn parse_marker_name<'a>(name: &'a str, context: &str) -> Result<&'a str, MarkerError> { if name.is_empty() { - return Err(format!("{context}: marker name cannot be empty")); + return Err(marker_err!("{context}: marker name cannot be empty")); } Ok(name) } /// Convert a character offset into an LSP `(line, character)` position. -fn offset_to_position(text: &str, offset: usize, context: &str) -> Result { +fn offset_to_position(text: &str, offset: usize, context: &str) -> Result { let mut line = 0u32; let mut character = 0u32; let mut consumed = 0usize; @@ -943,16 +976,16 @@ fn offset_to_position(text: &str, offset: usize, context: &str) -> Result Result Result { +fn position_to_offset(text: &str, position: Position, context: &str) -> Result { let mut line = 0u32; let mut character = 0u32; let mut offset = 0usize; @@ -977,16 +1010,16 @@ fn position_to_offset(text: &str, position: Position, context: &str) -> Result Result Option { } /// Shift an offset by signed delta with overflow/underflow checks. -fn shift_offset(value: usize, delta: isize, context: &str) -> Result { +fn shift_offset(value: usize, delta: isize, context: &str) -> Result { if delta.is_negative() { let amount = delta.unsigned_abs(); value .checked_sub(amount) - .ok_or_else(|| format!("{context}: marker offset underflow while shifting")) + .ok_or_else(|| marker_err!("{context}: marker offset underflow while shifting")) } else { let amount = - usize::try_from(delta).map_err(|_| format!("{context}: marker shift overflow"))?; + usize::try_from(delta).map_err(|_| marker_err!("{context}: marker shift overflow"))?; value .checked_add(amount) - .ok_or_else(|| format!("{context}: marker offset overflow while shifting")) + .ok_or_else(|| marker_err!("{context}: marker offset overflow while shifting")) } } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs index 6704c5ea..60ff9007 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs @@ -5,11 +5,16 @@ //! aliases to generated request IDs and also maintains per-kind FIFO queues for //! unnamed request/expect pairs. -use std::collections::{HashMap, VecDeque}; +use std::{ + collections::{HashMap, VecDeque}, + fmt, +}; + +use thiserror::Error; /// LSP request kinds supported by the scenario DSL. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub(super) enum RequestKind { +pub enum RequestKind { CodeAction, References, Definition, @@ -30,6 +35,34 @@ pub(super) enum RequestKind { ExecuteCommand, } +impl fmt::Display for RequestKind { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str(self.label()) + } +} + +#[derive(Debug, Error)] +pub enum RequestRegistryError { + #[error("request id overflow")] + RequestIdOverflow, + #[error("duplicate request alias '{name}', request aliases must be unique")] + DuplicateAlias { name: String }, + #[error("unknown request alias '{name}' for {kind}, define it with `as`")] + UnknownAlias { name: String, kind: RequestKind }, + #[error("request alias '{name}' has kind {alias_kind}, cannot match {requested_kind}")] + AliasKindMismatch { + name: String, + alias_kind: RequestKind, + requested_kind: RequestKind, + }, + #[error("request alias '{name}' for {kind} was already matched")] + AliasAlreadyMatched { name: String, kind: RequestKind }, + #[error("failed to claim queued request alias '{name}' for {kind}")] + ClaimQueueCorrupted { name: String, kind: RequestKind }, + #[error("no pending {kind} to match; add the request first or reference it via `request`")] + NoPendingRequest { kind: RequestKind }, +} + impl RequestKind { /// Canonical DSL step label for diagnostics and error messages. const fn label(self) -> &'static str { @@ -82,18 +115,16 @@ impl RequestRegistry { &mut self, kind: RequestKind, name: Option, - ) -> Result { + ) -> Result { let id = self.next_id; self.next_id = self .next_id .checked_add(1) - .ok_or_else(|| "request id overflow".to_string())?; + .ok_or(RequestRegistryError::RequestIdOverflow)?; if let Some(name) = name { if self.named.contains_key(&name) { - return Err(format!( - "duplicate request alias '{name}', request aliases must be unique" - )); + return Err(RequestRegistryError::DuplicateAlias { name }); } self.named.insert(name, (kind, id)); } @@ -106,33 +137,37 @@ impl RequestRegistry { /// /// If `name` is provided, the claim is by alias and kind-checked. /// Otherwise this pops from the per-kind FIFO queue. - pub(super) fn claim(&mut self, kind: RequestKind, name: Option<&str>) -> Result { + pub(super) fn claim( + &mut self, + kind: RequestKind, + name: Option<&str>, + ) -> Result { if let Some(name) = name { let (named_kind, id) = self.named.get(name).copied().ok_or_else(|| { - format!( - "unknown request alias '{name}' for {}, define it with `as`", - kind.label() - ) + RequestRegistryError::UnknownAlias { + name: name.to_string(), + kind, + } })?; if named_kind != kind { - return Err(format!( - "request alias '{name}' has kind {}, cannot match {}", - named_kind.label(), - kind.label() - )); + return Err(RequestRegistryError::AliasKindMismatch { + name: name.to_string(), + alias_kind: named_kind, + requested_kind: kind, + }); } let queue = self.pending.entry(kind).or_default(); let Some(index) = queue.iter().position(|candidate| *candidate == id) else { - return Err(format!( - "request alias '{name}' for {} was already matched", - kind.label() - )); + return Err(RequestRegistryError::AliasAlreadyMatched { + name: name.to_string(), + kind, + }); }; let Some(claimed) = queue.remove(index) else { - return Err(format!( - "failed to claim queued request alias '{name}' for {}", - kind.label() - )); + return Err(RequestRegistryError::ClaimQueueCorrupted { + name: name.to_string(), + kind, + }); }; return Ok(claimed); } @@ -140,11 +175,6 @@ impl RequestRegistry { self.pending .get_mut(&kind) .and_then(VecDeque::pop_front) - .ok_or_else(|| { - format!( - "no pending {} to match; add the request first or reference it via `request`", - kind.label() - ) - }) + .ok_or(RequestRegistryError::NoPendingRequest { kind }) } } From db598ee8a6931aa6bf14f674d41fde3150daae22 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 12:07:41 +0000 Subject: [PATCH 096/210] Add semantic artifact cache and eager request-path lookups --- .../src/completion/handler.rs | 28 +- .../src/completion/locals.rs | 10 +- .../src/completion/mod.rs | 4 +- .../jrsonnet-lsp-handlers/src/definition.rs | 87 ++++- crates/jrsonnet-lsp-handlers/src/lib.rs | 14 +- .../jrsonnet-lsp-handlers/src/references.rs | 96 ++++- crates/jrsonnet-lsp-inference/src/lib.rs | 5 + crates/jrsonnet-lsp-inference/src/manager.rs | 114 +++++- .../src/semantic_artifacts.rs | 340 ++++++++++++++++++ crates/jrsonnet-lsp-scope/src/resolver.rs | 36 +- .../jrsonnet-lsp/src/server/async_requests.rs | 33 +- .../jrsonnet-lsp/src/server/notifications.rs | 3 + 12 files changed, 710 insertions(+), 60 deletions(-) create mode 100644 crates/jrsonnet-lsp-inference/src/semantic_artifacts.rs diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler.rs index 225de439..fd0e8f20 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler.rs @@ -9,7 +9,7 @@ use std::path::{Path, PathBuf}; use jrsonnet_lsp_document::{token_at_offset, Document, LspPosition}; -use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_inference::{SemanticArtifacts, TypeAnalysis}; use jrsonnet_rowan_parser::AstNode; use lsp_types::{CompletionItem, CompletionItemKind, CompletionList}; use tracing::debug; @@ -18,7 +18,7 @@ use super::{ fields::check_object_field_completion, helpers::{get_identifier_prefix, is_inside_object}, imports::check_import_completion, - locals::get_local_completions, + locals::get_local_completions_with_semantic, stdlib::check_stdlib_completion, }; @@ -33,7 +33,7 @@ pub fn completion( doc_path: Option<&Path>, analysis: &TypeAnalysis, ) -> Option { - completion_with_import_roots(document, position, doc_path, &[], analysis) + completion_with_import_roots_and_semantic(document, position, doc_path, &[], analysis, None) } /// Get completion items with explicit import search roots. @@ -46,6 +46,25 @@ pub fn completion_with_import_roots( doc_path: Option<&Path>, import_roots: &[PathBuf], analysis: &TypeAnalysis, +) -> Option { + completion_with_import_roots_and_semantic( + document, + position, + doc_path, + import_roots, + analysis, + None, + ) +} + +/// Get completion items with explicit import roots and semantic artifacts. +pub fn completion_with_import_roots_and_semantic( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + import_roots: &[PathBuf], + analysis: &TypeAnalysis, + semantic: Option<&SemanticArtifacts>, ) -> Option { let text = document.text(); let line_index = document.line_index(); @@ -89,7 +108,8 @@ pub fn completion_with_import_roots( } // For general completion, provide local variables in scope - let mut items = get_local_completions(document, position, text, offset.into()); + let mut items = + get_local_completions_with_semantic(document, position, text, offset.into(), semantic); // Also include `std` as a completion option since it's always available items.push(CompletionItem { diff --git a/crates/jrsonnet-lsp-handlers/src/completion/locals.rs b/crates/jrsonnet-lsp-handlers/src/completion/locals.rs index e0cd5440..2b3b371b 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/locals.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/locals.rs @@ -1,23 +1,25 @@ //! Local variable completions. use jrsonnet_lsp_document::{Document, LspPosition}; +use jrsonnet_lsp_inference::SemanticArtifacts; use lsp_types::{CompletionItem, CompletionItemKind}; use super::helpers::get_identifier_prefix; -use crate::definition::{collect_visible_bindings, BindingKind}; +use crate::definition::{collect_visible_bindings_with_semantic, BindingKind}; -/// Get completions for local variables in scope. -pub fn get_local_completions( +/// Get completions for local variables in scope using semantic artifacts when available. +pub fn get_local_completions_with_semantic( document: &Document, position: LspPosition, text: &str, offset: u32, + semantic: Option<&SemanticArtifacts>, ) -> Vec { // Get the prefix the user is typing (if any) let prefix = get_identifier_prefix(text, offset as usize); // Collect all visible bindings - let bindings = collect_visible_bindings(document, position); + let bindings = collect_visible_bindings_with_semantic(document, position, semantic); bindings .into_iter() diff --git a/crates/jrsonnet-lsp-handlers/src/completion/mod.rs b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs index a95a1a0d..901402be 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/mod.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/mod.rs @@ -13,5 +13,7 @@ mod imports; mod locals; mod stdlib; -pub use handler::{completion, completion_with_import_roots}; +pub use handler::{ + completion, completion_with_import_roots, completion_with_import_roots_and_semantic, +}; pub use imports::find_import_string_start; diff --git a/crates/jrsonnet-lsp-handlers/src/definition.rs b/crates/jrsonnet-lsp-handlers/src/definition.rs index 3a494b03..edcc7b16 100644 --- a/crates/jrsonnet-lsp-handlers/src/definition.rs +++ b/crates/jrsonnet-lsp-handlers/src/definition.rs @@ -10,7 +10,10 @@ use jrsonnet_lsp_document::{ find_node_at_offset, to_lsp_range, token_at_offset, ByteOffset, Document, LspPosition, }; use jrsonnet_lsp_import::{check_import_path, extract_import_path}; -use jrsonnet_lsp_inference::{trace_base, trace_expr, ConstEvalResult}; +use jrsonnet_lsp_inference::{ + trace_base, trace_expr, ConstEvalResult, SemanticArtifacts, SemanticBindingKind, + SemanticImportTarget, +}; use jrsonnet_lsp_scope::{find_definition_range, is_variable_reference}; use jrsonnet_rowan_parser::{ nodes::{ @@ -60,20 +63,39 @@ pub enum DefinitionResult { /// - An import path that needs to be resolved by the caller /// - A field in an imported file (path + field chain) pub fn goto_definition(document: &Document, position: LspPosition) -> Option { - goto_with_mode(document, position, DefinitionMode::Definition) + goto_with_mode(document, position, DefinitionMode::Definition, None) } /// Find the declaration site of the symbol at the given position. /// /// Unlike `goto_definition`, this does not follow local alias chains. pub fn goto_declaration(document: &Document, position: LspPosition) -> Option { - goto_with_mode(document, position, DefinitionMode::Declaration) + goto_with_mode(document, position, DefinitionMode::Declaration, None) +} + +/// Find the definition of the symbol at the given position using semantic artifacts when available. +pub fn goto_definition_with_semantic( + document: &Document, + position: LspPosition, + semantic: Option<&SemanticArtifacts>, +) -> Option { + goto_with_mode(document, position, DefinitionMode::Definition, semantic) +} + +/// Find the declaration site of the symbol at the given position using semantic artifacts when available. +pub fn goto_declaration_with_semantic( + document: &Document, + position: LspPosition, + semantic: Option<&SemanticArtifacts>, +) -> Option { + goto_with_mode(document, position, DefinitionMode::Declaration, semantic) } fn goto_with_mode( document: &Document, position: LspPosition, mode: DefinitionMode, + semantic: Option<&SemanticArtifacts>, ) -> Option { let text = document.text(); let line_index = document.line_index(); @@ -112,10 +134,12 @@ fn goto_with_mode( } // Walk up the scope chain to find the definition - let def_range = find_definition_range(&token, &name)?; + let def_range = semantic + .and_then(|artifacts| artifacts.definition_for_ident_token(&token)) + .or_else(|| find_definition_range(&token, &name))?; let result = if mode == DefinitionMode::Definition { - match resolve_canonical_definition(document, def_range) { + match resolve_canonical_definition(document, def_range, semantic) { CanonicalDefinition::Local(range) => { DefinitionResult::Local(to_lsp_range(range, line_index, text)) } @@ -135,6 +159,7 @@ fn goto_with_mode( fn resolve_canonical_definition( document: &Document, initial_def: TextRange, + semantic: Option<&SemanticArtifacts>, ) -> CanonicalDefinition { let mut visited = std::collections::HashSet::new(); let mut current = initial_def; @@ -144,6 +169,31 @@ fn resolve_canonical_definition( return CanonicalDefinition::Local(current); } + if let Some(artifacts) = semantic { + if let Some(binding) = artifacts.binding_info(current) { + if let Some(import_target) = &binding.import_target { + return match import_target { + SemanticImportTarget::Import { path } => { + CanonicalDefinition::Import(path.clone()) + } + SemanticImportTarget::ImportField { path, fields } => { + CanonicalDefinition::ImportField { + path: path.clone(), + fields: fields.clone(), + } + } + }; + } + + if let Some(next) = binding.alias_definition { + current = next; + continue; + } + + return CanonicalDefinition::Local(current); + } + } + let Some(bind) = find_bind_by_definition_range(document, current) else { return CanonicalDefinition::Local(current); }; @@ -347,6 +397,16 @@ pub enum BindingKind { /// Collect all visible bindings at the given byte offset. #[must_use] pub fn collect_visible_bindings(document: &Document, position: LspPosition) -> Vec { + collect_visible_bindings_with_semantic(document, position, None) +} + +/// Collect all visible bindings at the given byte offset using semantic artifacts when available. +#[must_use] +pub fn collect_visible_bindings_with_semantic( + document: &Document, + position: LspPosition, + semantic: Option<&SemanticArtifacts>, +) -> Vec { let text = document.text(); let line_index = document.line_index(); @@ -355,6 +415,23 @@ pub fn collect_visible_bindings(document: &Document, position: LspPosition) -> V return Vec::new(); }; + if let Some(artifacts) = semantic { + return artifacts + .visible_bindings_at(offset.into()) + .into_iter() + .map(|binding| VisibleBinding { + name: binding.name, + range: binding.range, + kind: match binding.kind { + SemanticBindingKind::LocalVariable => BindingKind::LocalVariable, + SemanticBindingKind::LocalFunction => BindingKind::LocalFunction, + SemanticBindingKind::Parameter => BindingKind::Parameter, + SemanticBindingKind::ForVariable => BindingKind::ForVariable, + }, + }) + .collect(); + } + // Get the AST root let ast = document.ast(); let root = ast.syntax(); diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index 4cc809f4..cb735bd5 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -16,10 +16,13 @@ pub mod symbols; pub use code_action::{code_actions, CodeActionConfig, RemoveUnusedCommentsMode, RemoveUnusedMode}; pub use code_lens::{code_lens, resolve_code_lens, CodeLensConfig}; -pub use completion::{completion, completion_with_import_roots}; +pub use completion::{ + completion, completion_with_import_roots, completion_with_import_roots_and_semantic, +}; pub use definition::{ - collect_visible_bindings, goto_declaration, goto_definition, BindingKind, DefinitionResult, - VisibleBinding, + collect_visible_bindings, collect_visible_bindings_with_semantic, goto_declaration, + goto_declaration_with_semantic, goto_definition, goto_definition_with_semantic, BindingKind, + DefinitionResult, VisibleBinding, }; pub use document_highlight::document_highlights; pub use formatting::{ @@ -28,7 +31,10 @@ pub use formatting::{ }; pub use hover::{hover, hover_with_import_field_type}; pub use inlay_hint::inlay_hints; -pub use references::{find_cross_file_references, find_references}; +pub use references::{ + find_cross_file_references, find_cross_file_references_with_semantic, find_references, + find_references_with_semantic, +}; pub use rename::{prepare_rename, rename, rename_cross_file}; pub use semantic_tokens::{ legend as semantic_tokens_legend, semantic_token_reference_markdown, semantic_tokens, diff --git a/crates/jrsonnet-lsp-handlers/src/references.rs b/crates/jrsonnet-lsp-handlers/src/references.rs index 6400bd7d..0953cdcc 100644 --- a/crates/jrsonnet-lsp-handlers/src/references.rs +++ b/crates/jrsonnet-lsp-handlers/src/references.rs @@ -5,6 +5,7 @@ use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, CanonicalPath, Document, LspPosition}; use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::SemanticArtifacts; use jrsonnet_lsp_scope::{ find_definition_range, is_at_file_scope, is_definition_site, is_variable_reference, ScopeResolver, @@ -24,6 +25,17 @@ pub fn find_references( position: LspPosition, uri: &Uri, include_declaration: bool, +) -> Vec { + find_references_with_semantic(document, position, uri, include_declaration, None) +} + +/// Find all references to the symbol at the given position using semantic artifacts when available. +pub fn find_references_with_semantic( + document: &Document, + position: LspPosition, + uri: &Uri, + include_declaration: bool, + semantic: Option<&SemanticArtifacts>, ) -> Vec { let text = document.text(); let line_index = document.line_index(); @@ -44,21 +56,29 @@ pub fn find_references( let name = token.text(); - let definition_range = if is_definition_site(&token) { - token.parent().map(|p| p.text_range()) - } else if is_variable_reference(&token) { - find_definition_range(&token, name) - } else { - return Vec::new(); - }; + let definition_range = semantic + .and_then(|artifacts| artifacts.definition_for_ident_token(&token)) + .or_else(|| { + if is_definition_site(&token) { + token.parent().map(|p| p.text_range()) + } else if is_variable_reference(&token) { + find_definition_range(&token, name) + } else { + None + } + }); let Some(def_range) = definition_range else { return Vec::new(); }; - // Build scope resolver for O(1) lookups - let resolver = ScopeResolver::new(ast.syntax()); - let mut references = resolver.find_references(ast.syntax(), name, def_range); + let mut references = if let Some(artifacts) = semantic { + artifacts.references_for_definition(def_range).to_vec() + } else { + // Build scope resolver for O(1) lookups + let resolver = ScopeResolver::new(ast.syntax()); + resolver.find_references(ast.syntax(), name, def_range) + }; if !include_declaration { references.retain(|r| *r != def_range); @@ -91,6 +111,34 @@ pub fn find_cross_file_references<'a>( position: LspPosition, documents: &[(&'a CanonicalPath, &'a Document)], import_graph: &ImportGraph, +) -> Vec { + let docs_with_semantic: Vec<_> = documents + .iter() + .map(|(path, doc)| (*path, *doc, None)) + .collect(); + find_cross_file_references_with_semantic( + current_document, + current_path, + position, + None, + &docs_with_semantic, + import_graph, + ) +} + +/// Find cross-file references using semantic artifacts when available. +#[must_use] +pub fn find_cross_file_references_with_semantic<'a>( + current_document: &Document, + current_path: &CanonicalPath, + position: LspPosition, + current_semantic: Option<&SemanticArtifacts>, + documents: &[( + &'a CanonicalPath, + &'a Document, + Option<&'a SemanticArtifacts>, + )], + import_graph: &ImportGraph, ) -> Vec { let text = current_document.text(); let line_index = current_document.line_index(); @@ -112,15 +160,16 @@ pub fn find_cross_file_references<'a>( return Vec::new(); } - let Some(name) = resolve_exported_symbol_name(current_document, &token) else { + let Some(name) = resolve_exported_symbol_name(current_document, &token, current_semantic) + else { return Vec::new(); }; // Search all other documents for imports of this file (in parallel) let references: Vec = documents .par_iter() - .filter(|(doc_path, _)| *doc_path != current_path) - .flat_map(|(doc_path, doc)| { + .filter(|(doc_path, _, _)| *doc_path != current_path) + .flat_map(|(doc_path, doc, semantic)| { let import_bindings = import_binding_names(import_graph, doc_path, current_path); if import_bindings.is_empty() { return Vec::new(); @@ -134,7 +183,9 @@ pub fn find_cross_file_references<'a>( import_bindings .into_iter() - .flat_map(|binding_name| find_references_to_import(doc, &binding_name, &name)) + .flat_map(|binding_name| { + find_references_to_import(doc, &binding_name, &name, *semantic) + }) .map(|range| Location { uri: doc_uri.clone(), range: to_lsp_range(range, doc_line_index, doc_text), @@ -161,7 +212,11 @@ fn import_binding_names( bindings } -fn resolve_exported_symbol_name(document: &Document, token: &SyntaxToken) -> Option { +fn resolve_exported_symbol_name( + document: &Document, + token: &SyntaxToken, + semantic: Option<&SemanticArtifacts>, +) -> Option { if token.kind() != SyntaxKind::IDENT { return None; } @@ -175,7 +230,9 @@ fn resolve_exported_symbol_name(document: &Document, token: &SyntaxToken) -> Opt } let name = token.text(); - let definition_range = find_definition_range(token, name)?; + let definition_range = semantic + .and_then(|artifacts| artifacts.definition_for_ident_token(token)) + .or_else(|| find_definition_range(token, name))?; let definition_token = definition_token(document, definition_range, name)?; if !is_at_file_scope(&definition_token) { return None; @@ -205,7 +262,14 @@ fn find_references_to_import( doc: &Document, binding_name: &str, field_name: &str, + semantic: Option<&SemanticArtifacts>, ) -> Vec { + if let Some(artifacts) = semantic { + return artifacts + .import_field_references(binding_name, field_name) + .to_vec(); + } + let mut references = Vec::new(); let ast = doc.ast(); diff --git a/crates/jrsonnet-lsp-inference/src/lib.rs b/crates/jrsonnet-lsp-inference/src/lib.rs index f4da8189..22b0ca63 100644 --- a/crates/jrsonnet-lsp-inference/src/lib.rs +++ b/crates/jrsonnet-lsp-inference/src/lib.rs @@ -28,6 +28,7 @@ pub mod manager; pub mod object; pub mod poly; pub mod provider; +pub mod semantic_artifacts; pub mod suggestions; pub mod type_cache; @@ -41,5 +42,9 @@ pub use helpers::extract_params_with_defaults; pub use manager::{DocumentManager, SharedDocumentManager}; pub use poly::instantiate_function_call_ty; pub use provider::{DocumentSource, TypeProvider}; +pub use semantic_artifacts::{ + DefinitionBindingInfo, SemanticArtifacts, SemanticBindingKind, SemanticImportTarget, + SemanticVisibleBinding, +}; pub use suggestions::find_best_match; pub use type_cache::{analyze_and_cache, new_shared_cache, SharedTypeCache, TypeCache}; diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index 10f40878..ddd5050b 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -15,7 +15,9 @@ use lru::LruCache; use moka::sync::Cache as MokaCache; use parking_lot::RwLock; -use crate::{analysis::TypeAnalysis, provider::DocumentSource}; +use crate::{ + analysis::TypeAnalysis, provider::DocumentSource, semantic_artifacts::SemanticArtifacts, +}; /// Cached type analysis for a document. #[derive(Clone)] @@ -26,6 +28,15 @@ struct CachedAnalysis { analysis: Arc, } +/// Cached semantic artifacts for a document. +#[derive(Clone)] +struct CachedSemanticArtifacts { + /// Version of the document this artifact set was computed for. + version: DocVersion, + /// The cached artifacts (wrapped in Arc for sharing). + artifacts: Arc, +} + /// Document manager that tracks open documents and caches recently closed ones. /// /// This type is `Sync` and can be safely shared across threads. @@ -37,6 +48,8 @@ pub struct DocumentManager { /// Cached type analysis per document (keyed by path, validated by version). /// Uses moka for thread-safe concurrent caching with LRU-like eviction. analysis_cache: MokaCache, + /// Cached semantic artifacts per document (keyed by path, validated by version). + semantic_cache: MokaCache, /// Global type store shared across all analyses. global_types: Arc, } @@ -60,6 +73,7 @@ impl DocumentManager { open: DashMap::new(), closed: RwLock::new(LruCache::new(closed_capacity)), analysis_cache: MokaCache::new(DEFAULT_ANALYSIS_CACHE_CAPACITY as u64), + semantic_cache: MokaCache::new(DEFAULT_ANALYSIS_CACHE_CAPACITY as u64), global_types, } } @@ -77,6 +91,9 @@ impl DocumentManager { closed.pop(&path); } + self.invalidate_analysis(&path); + self.invalidate_semantic_artifacts(&path); + let document = Document::new(text, version); self.open.insert(path, document); } @@ -87,6 +104,8 @@ impl DocumentManager { pub fn update(&self, path: &CanonicalPath, text: String, version: DocVersion) -> bool { self.open.get_mut(path).is_some_and(|mut doc| { doc.update(text, version); + self.invalidate_analysis(path); + self.invalidate_semantic_artifacts(path); true }) } @@ -101,9 +120,15 @@ impl DocumentManager { new_text: &str, version: DocVersion, ) -> bool { - self.open - .get_mut(path) - .is_some_and(|mut doc| doc.apply_incremental_change(range, new_text, version)) + self.open.get_mut(path).is_some_and(|mut doc| { + if doc.apply_incremental_change(range, new_text, version) { + self.invalidate_analysis(path); + self.invalidate_semantic_artifacts(path); + true + } else { + false + } + }) } /// Close a document (called on textDocument/didClose). @@ -128,15 +153,19 @@ impl DocumentManager { return false; }; - let mut closed = self.closed.write(); - closed.put(path.clone(), Document::new(text, DocVersion::new(0))); + self.closed + .write() + .put(path.clone(), Document::new(text, DocVersion::new(0))); + self.invalidate_analysis(path); + self.invalidate_semantic_artifacts(path); true } /// Remove a closed document entry from the cache. pub fn remove_closed(&self, path: &CanonicalPath) { - let mut closed = self.closed.write(); - closed.pop(path); + self.closed.write().pop(path); + self.invalidate_analysis(path); + self.invalidate_semantic_artifacts(path); } /// Get a reference to an open document. @@ -289,6 +318,70 @@ impl DocumentManager { .insert(path, CachedAnalysis { version, analysis }); } + /// Get cached semantic artifacts for a document, computing them if needed. + /// + /// Returns `None` if the document is not found. + pub fn get_semantic_artifacts(&self, path: &CanonicalPath) -> Option> { + let doc = self.get_document(path)?; + let version = doc.version(); + Some( + self.get_or_compute_semantic_artifacts(path, version, || { + SemanticArtifacts::build(&doc) + }), + ) + } + + /// Get cached semantic artifacts for `path@version`, or compute and cache them. + pub fn get_or_compute_semantic_artifacts( + &self, + path: &CanonicalPath, + version: DocVersion, + compute: F, + ) -> Arc + where + F: FnOnce() -> SemanticArtifacts, + { + if let Some(cached) = self.semantic_cache.get(path) { + if cached.version == version { + return Arc::clone(&cached.artifacts); + } + } + + let artifacts = Arc::new(compute()); + self.semantic_cache.insert( + path.clone(), + CachedSemanticArtifacts { + version, + artifacts: Arc::clone(&artifacts), + }, + ); + artifacts + } + + /// Insert precomputed semantic artifacts for `path@version` into the cache. + pub fn cache_semantic_artifacts( + &self, + path: CanonicalPath, + version: DocVersion, + artifacts: Arc, + ) { + self.semantic_cache + .insert(path, CachedSemanticArtifacts { version, artifacts }); + } + + /// Eagerly refresh semantic artifacts for a tracked document. + /// + /// Returns `true` when artifacts were refreshed. + pub fn refresh_semantic_artifacts(&self, path: &CanonicalPath) -> bool { + let Some(doc) = self.get_document(path) else { + return false; + }; + let version = doc.version(); + let artifacts = Arc::new(SemanticArtifacts::build(&doc)); + self.cache_semantic_artifacts(path.clone(), version, artifacts); + true + } + /// Invalidate the analysis cache for a document. /// /// Call this when a document's content changes to ensure the next @@ -296,6 +389,11 @@ impl DocumentManager { pub fn invalidate_analysis(&self, path: &CanonicalPath) { self.analysis_cache.invalidate(path); } + + /// Invalidate the semantic artifact cache for a document. + pub fn invalidate_semantic_artifacts(&self, path: &CanonicalPath) { + self.semantic_cache.invalidate(path); + } } impl DocumentSource for DocumentManager { diff --git a/crates/jrsonnet-lsp-inference/src/semantic_artifacts.rs b/crates/jrsonnet-lsp-inference/src/semantic_artifacts.rs new file mode 100644 index 00000000..c3a87632 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/semantic_artifacts.rs @@ -0,0 +1,340 @@ +//! Per-document semantic artifacts for request-time lookups. +//! +//! These artifacts are computed once per document version and reused across +//! definition/completion/references requests. + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_import::extract_import_path; +use jrsonnet_lsp_scope::{is_definition_site, is_variable_reference, ScopeIndex, ScopeResolver}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, Expr, ExprBase, ExprField, ForSpec, Param}, + AstNode, SyntaxKind, SyntaxToken, +}; +use rowan::{TextRange, TextSize}; +use rustc_hash::FxHashMap; + +use crate::{trace_expr, ConstEvalResult}; + +/// Binding kind used by semantic artifacts. +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub enum SemanticBindingKind { + LocalVariable, + LocalFunction, + Parameter, + ForVariable, +} + +/// Canonical import target for a definition. +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum SemanticImportTarget { + Import { path: String }, + ImportField { path: String, fields: Vec }, +} + +/// Cached info for a binding definition. +#[derive(Debug, Clone)] +pub struct DefinitionBindingInfo { + pub kind: SemanticBindingKind, + pub value_expr_range: Option, + pub alias_definition: Option, + pub import_target: Option, +} + +/// Visible binding at a position. +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct SemanticVisibleBinding { + pub name: String, + pub kind: SemanticBindingKind, + pub range: TextRange, +} + +/// Per-file semantic index for fast request-time lookups. +pub struct SemanticArtifacts { + scope_index: ScopeIndex, + /// Map from identifier token start to definition name-range. + reference_to_definition: FxHashMap, + /// Map from definition name-range to all identifier token ranges (definition + refs). + references_by_definition: FxHashMap>, + /// Map from definition range to binding info. + definition_bindings: FxHashMap, + /// Map from definition range to binding kind. + binding_kinds: FxHashMap, + /// Map from identifier token start to smallest enclosing expression range. + expr_at_token_start: FxHashMap, + /// Map keyed by `(import_binding_name, field_name)` to field identifier ranges. + import_field_references: FxHashMap<(String, String), Vec>, +} + +impl SemanticArtifacts { + /// Build semantic artifacts for `document`. + #[must_use] + pub fn build(document: &Document) -> Self { + let ast = document.ast(); + let root = ast.syntax(); + let scope_index = ScopeIndex::new(root); + let scope_resolver = ScopeResolver::new(root); + + let mut reference_to_definition = FxHashMap::default(); + let mut references_by_definition: FxHashMap> = + FxHashMap::default(); + let mut expr_at_token_start = FxHashMap::default(); + + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT { + continue; + } + + let start = token.text_range().start(); + if let Some(expr_range) = smallest_expr_range_for_token(&token) { + expr_at_token_start.insert(start, expr_range); + } + + if is_definition_site(&token) { + if let Some(parent) = token.parent() { + references_by_definition + .entry(parent.text_range()) + .or_default() + .push(token.text_range()); + } + continue; + } + + if !is_variable_reference(&token) { + continue; + } + + if let Some(definition_range) = scope_resolver.get_definition(&token) { + reference_to_definition.insert(start, definition_range); + references_by_definition + .entry(definition_range) + .or_default() + .push(token.text_range()); + } + } + + for ranges in references_by_definition.values_mut() { + ranges.sort_by_key(|range| range.start()); + ranges.dedup(); + } + + let mut definition_bindings = FxHashMap::default(); + let mut binding_kinds = FxHashMap::default(); + + for bind in root.descendants().filter_map(Bind::cast) { + let Some((definition_range, kind, value_expr)) = bind_definition_data(&bind) else { + continue; + }; + + let value_expr_range = value_expr.as_ref().map(|expr| expr.syntax().text_range()); + let alias_definition = value_expr + .as_ref() + .and_then(|expr| alias_definition_for_expr(expr, &scope_index)); + let import_target = value_expr + .as_ref() + .and_then(|expr| import_target_for_expr(expr, document)); + + binding_kinds.insert(definition_range, kind); + definition_bindings.insert( + definition_range, + DefinitionBindingInfo { + kind, + value_expr_range, + alias_definition, + import_target, + }, + ); + } + + for param in root.descendants().filter_map(Param::cast) { + if let Some(range) = param_definition_range(¶m) { + binding_kinds.insert(range, SemanticBindingKind::Parameter); + } + } + + for for_spec in root.descendants().filter_map(ForSpec::cast) { + if let Some(range) = for_spec_definition_range(&for_spec) { + binding_kinds.insert(range, SemanticBindingKind::ForVariable); + } + } + + let mut import_field_references: FxHashMap<(String, String), Vec> = + FxHashMap::default(); + for field in root.descendants().filter_map(ExprField::cast) { + let Some(field_ident) = field.field().and_then(|name| name.ident_lit()) else { + continue; + }; + let Some(ExprBase::ExprVar(var)) = field.base().and_then(|expr| expr.expr_base()) + else { + continue; + }; + let Some(base_ident) = var.name().and_then(|name| name.ident_lit()) else { + continue; + }; + + import_field_references + .entry(( + base_ident.text().to_string(), + field_ident.text().to_string(), + )) + .or_default() + .push(field_ident.text_range()); + } + for ranges in import_field_references.values_mut() { + ranges.sort_by_key(|range| range.start()); + ranges.dedup(); + } + + Self { + scope_index, + reference_to_definition, + references_by_definition, + definition_bindings, + binding_kinds, + expr_at_token_start, + import_field_references, + } + } + + /// Resolve a definition range from an identifier token. + pub fn definition_for_ident_token(&self, token: &SyntaxToken) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + if is_definition_site(token) { + return token.parent().map(|parent| parent.text_range()); + } + + if !is_variable_reference(token) { + return None; + } + + self.reference_to_definition + .get(&token.text_range().start()) + .copied() + .or_else(|| { + self.scope_index + .find_definition(token.text_range().start(), token.text()) + }) + } + + /// Return all identifier ranges for `definition_range`. + #[must_use] + pub fn references_for_definition(&self, definition_range: TextRange) -> &[TextRange] { + self.references_by_definition + .get(&definition_range) + .map_or(&[], Vec::as_slice) + } + + /// Return visible bindings at `position`. + pub fn visible_bindings_at(&self, position: TextSize) -> Vec { + self.scope_index + .bindings_at(position) + .into_iter() + .map(|(name, range)| SemanticVisibleBinding { + kind: self + .binding_kinds + .get(&range) + .copied() + .unwrap_or(SemanticBindingKind::LocalVariable), + name, + range, + }) + .collect() + } + + /// Return cached binding info for a definition range. + #[must_use] + pub fn binding_info(&self, definition_range: TextRange) -> Option<&DefinitionBindingInfo> { + self.definition_bindings.get(&definition_range) + } + + /// Return smallest cached expression range at token start position. + #[must_use] + pub fn expr_at_token_start(&self, token_start: TextSize) -> Option { + self.expr_at_token_start.get(&token_start).copied() + } + + /// Return cached `binding.field` identifier ranges in this file. + #[must_use] + pub fn import_field_references(&self, binding_name: &str, field_name: &str) -> &[TextRange] { + self.import_field_references + .get(&(binding_name.to_string(), field_name.to_string())) + .map_or(&[], Vec::as_slice) + } +} + +fn smallest_expr_range_for_token(token: &SyntaxToken) -> Option { + token + .parent_ancestors() + .filter_map(Expr::cast) + .map(|expr| expr.syntax().text_range()) + .min_by_key(|range| range.len()) +} + +fn bind_definition_data(bind: &Bind) -> Option<(TextRange, SemanticBindingKind, Option)> { + match bind { + Bind::BindDestruct(bind) => { + let destruct = bind.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(( + full.name()?.syntax().text_range(), + SemanticBindingKind::LocalVariable, + bind.value(), + )) + } + Bind::BindFunction(bind) => Some(( + bind.name()?.syntax().text_range(), + SemanticBindingKind::LocalFunction, + bind.value(), + )), + } +} + +fn param_definition_range(param: &Param) -> Option { + let destruct = param.destruct()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) +} + +fn for_spec_definition_range(for_spec: &ForSpec) -> Option { + let destruct = for_spec.bind()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) +} + +fn alias_definition_for_expr(expr: &Expr, scope_index: &ScopeIndex) -> Option { + let ExprBase::ExprVar(var) = expr.expr_base()? else { + return None; + }; + let ident = var.name()?.ident_lit()?; + scope_index.find_definition(ident.text_range().start(), ident.text()) +} + +fn import_target_for_expr(expr: &Expr, document: &Document) -> Option { + if let ExprBase::ExprImport(import) = expr.expr_base()? { + return Some(SemanticImportTarget::Import { + path: extract_import_path(&import)?, + }); + } + + match trace_expr(expr, document)? { + ConstEvalResult::Import { path, fields } => { + if fields.is_empty() { + Some(SemanticImportTarget::Import { path }) + } else { + Some(SemanticImportTarget::ImportField { path, fields }) + } + } + ConstEvalResult::Local { .. } | ConstEvalResult::Std { .. } => None, + } +} diff --git a/crates/jrsonnet-lsp-scope/src/resolver.rs b/crates/jrsonnet-lsp-scope/src/resolver.rs index ac3fe7c2..ba230ec9 100644 --- a/crates/jrsonnet-lsp-scope/src/resolver.rs +++ b/crates/jrsonnet-lsp-scope/src/resolver.rs @@ -6,7 +6,7 @@ //! The `ScopeResolver` struct precomputes a scope map for O(1) definition lookups. //! The `ScopeIndex` struct provides O(log n) lookups using binary search. -use std::cell::RefCell; +use std::sync::RwLock; use jrsonnet_rowan_parser::{ nodes::{ @@ -434,9 +434,9 @@ pub struct ScopeIndex { /// Map from scope start position to index for quick lookup. scope_starts: Vec<(TextSize, usize)>, /// Cached scope chains: `scope_index` -> chain of scope ranges (innermost first). - scope_chain_cache: RefCell>>, + scope_chain_cache: RwLock>>, /// Cached bindings per scope chain: `scope_index` -> all bindings in chain (with visibility info). - bindings_cache: RefCell>>, + bindings_cache: RwLock>>, } /// A cached binding with visibility information for filtering at query time. @@ -470,8 +470,8 @@ impl ScopeIndex { Self { scopes, scope_starts, - scope_chain_cache: RefCell::new(FxHashMap::default()), - bindings_cache: RefCell::new(FxHashMap::default()), + scope_chain_cache: RwLock::new(FxHashMap::default()), + bindings_cache: RwLock::new(FxHashMap::default()), } } @@ -737,8 +737,14 @@ impl ScopeIndex { }; // Check cache first - if let Some(cached) = self.scope_chain_cache.borrow().get(&scope_idx) { - return cached.clone(); + { + let cache = self + .scope_chain_cache + .read() + .unwrap_or_else(std::sync::PoisonError::into_inner); + if let Some(cached) = cache.get(&scope_idx) { + return cached.clone(); + } } // Compute the scope chain @@ -746,7 +752,8 @@ impl ScopeIndex { // Cache and return self.scope_chain_cache - .borrow_mut() + .write() + .unwrap_or_else(std::sync::PoisonError::into_inner) .insert(scope_idx, chain.clone()); chain } @@ -797,8 +804,14 @@ impl ScopeIndex { /// Get or compute cached bindings for a scope chain. fn get_or_compute_bindings(&self, scope_idx: usize) -> Vec { // Check cache first - if let Some(cached) = self.bindings_cache.borrow().get(&scope_idx) { - return cached.clone(); + { + let cache = self + .bindings_cache + .read() + .unwrap_or_else(std::sync::PoisonError::into_inner); + if let Some(cached) = cache.get(&scope_idx) { + return cached.clone(); + } } // Compute bindings for the entire scope chain @@ -806,7 +819,8 @@ impl ScopeIndex { // Cache and return self.bindings_cache - .borrow_mut() + .write() + .unwrap_or_else(std::sync::PoisonError::into_inner) .insert(scope_idx, bindings.clone()); bindings } diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 2c730e80..110b9aaa 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -127,13 +127,14 @@ impl AsyncRequestContext { let path = CanonicalPath::from_uri(uri)?; let doc = self.documents.get(&path)?.clone(); let lsp_pos = position.into(); + let semantic = self.documents.get_semantic_artifacts(&path); let result = match target { GotoTarget::Definition | GotoTarget::TypeDefinition => { - handlers::goto_definition(&doc, lsp_pos)? + handlers::goto_definition_with_semantic(&doc, lsp_pos, semantic.as_deref())? } GotoTarget::Declaration | GotoTarget::Implementation => { - handlers::goto_declaration(&doc, lsp_pos)? + handlers::goto_declaration_with_semantic(&doc, lsp_pos, semantic.as_deref())? } }; match result { @@ -204,6 +205,7 @@ impl AsyncRequestContext { let position = params.text_document_position.position; let path = CanonicalPath::from_uri(uri)?; let doc = self.documents.get(&path)?.clone(); + let semantic = self.documents.get_semantic_artifacts(&path); let lsp_pos = position.into(); let analysis = self.analyze_document(&path, &doc); @@ -215,12 +217,13 @@ impl AsyncRequestContext { ); drop(config); - let list = handlers::completion_with_import_roots( + let list = handlers::completion_with_import_roots_and_semantic( &doc, lsp_pos, Some(path.as_path()), &import_roots, &analysis, + semantic.as_deref(), )?; Some(CompletionResponse::List(list)) } @@ -231,24 +234,40 @@ impl AsyncRequestContext { let path = CanonicalPath::from_uri(uri)?; let doc = self.documents.get_document(&path)?; let lsp_pos = position.into(); + let semantic = self.documents.get_semantic_artifacts(&path); let include_declaration = params.context.include_declaration; - let mut refs = handlers::find_references(&doc, lsp_pos, uri, include_declaration); + let mut refs = handlers::find_references_with_semantic( + &doc, + lsp_pos, + uri, + include_declaration, + semantic.as_deref(), + ); let importers = self.import_graph.read().transitive_importers(&path); let importer_docs: Vec<_> = importers .into_iter() - .filter_map(|p| self.documents.get_document(&p).map(|d| (p, d))) + .filter_map(|p| { + self.documents.get_document(&p).map(|d| { + let semantic = self.documents.get_semantic_artifacts(&p); + (p, d, semantic) + }) + }) + .collect(); + let importer_refs: Vec<_> = importer_docs + .iter() + .map(|(k, v, semantic)| (k, v, semantic.as_deref())) .collect(); - let importer_refs: Vec<_> = importer_docs.iter().map(|(k, v)| (k, v)).collect(); let cross_refs = { let import_graph = self.import_graph.read(); - handlers::find_cross_file_references( + handlers::find_cross_file_references_with_semantic( &doc, &path, lsp_pos, + semantic.as_deref(), &importer_refs, &import_graph, ) diff --git a/crates/jrsonnet-lsp/src/server/notifications.rs b/crates/jrsonnet-lsp/src/server/notifications.rs index 51aabe89..a81b4412 100644 --- a/crates/jrsonnet-lsp/src/server/notifications.rs +++ b/crates/jrsonnet-lsp/src/server/notifications.rs @@ -70,6 +70,7 @@ impl Server { let version = DocVersion::new(params.text_document.version); self.documents.open(path.clone(), text, version); + self.documents.refresh_semantic_artifacts(&path); // Invalidate type cache for this file and all files that depend on it self.invalidate_type_cache_with_dependents(&path); @@ -110,6 +111,8 @@ impl Server { } } + self.documents.refresh_semantic_artifacts(&path); + // Invalidate type cache for this file and all files that depend on it self.invalidate_type_cache_with_dependents(&path); From 9937850fdebfc0543e49928648908411f2300d3f Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 12:29:57 +0000 Subject: [PATCH 097/210] chore(lint): remove option_if_let_else workspace allow - drop the workspace-level allow for clippy::option_if_let_else. - migrate remaining LSP call sites to Option::map_or_else so strict clippy passes. --- Cargo.toml | 1 - .../jrsonnet-lsp-handlers/src/references.rs | 15 ++++----- .../jrsonnet-lsp/src/server/async_requests.rs | 31 ++++++++++--------- 3 files changed, 24 insertions(+), 23 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index a8e1166b..8172d152 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -156,7 +156,6 @@ similar_names = "allow" unnecessary_semicolon = "allow" too_long_first_doc_paragraph = "allow" map_unwrap_or = "warn" -option_if_let_else = "allow" manual_repeat_n = "allow" mem_replace_option_with_some = "allow" unnecessary_literal_bound = "allow" diff --git a/crates/jrsonnet-lsp-handlers/src/references.rs b/crates/jrsonnet-lsp-handlers/src/references.rs index 0953cdcc..9465a7ce 100644 --- a/crates/jrsonnet-lsp-handlers/src/references.rs +++ b/crates/jrsonnet-lsp-handlers/src/references.rs @@ -72,13 +72,14 @@ pub fn find_references_with_semantic( return Vec::new(); }; - let mut references = if let Some(artifacts) = semantic { - artifacts.references_for_definition(def_range).to_vec() - } else { - // Build scope resolver for O(1) lookups - let resolver = ScopeResolver::new(ast.syntax()); - resolver.find_references(ast.syntax(), name, def_range) - }; + let mut references = semantic.map_or_else( + || { + // Build scope resolver for O(1) lookups + let resolver = ScopeResolver::new(ast.syntax()); + resolver.find_references(ast.syntax(), name, def_range) + }, + |artifacts| artifacts.references_for_definition(def_range).to_vec(), + ); if !include_declaration { references.retain(|r| *r != def_range); diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 110b9aaa..98ab8d28 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -492,23 +492,24 @@ impl AsyncRequestContext { fn eval_command_jpath(&self, base_path: Option<&CanonicalPath>) -> Vec { let config = self.config.read(); - let jpath = if let Some(base_path) = base_path { - let mut roots = effective_import_roots( - base_path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ); - if !config.resolve_paths_with_tanka { - if let Some(dir) = base_path.as_path().parent() { - if !roots.iter().any(|entry| entry == dir) { - roots.push(dir.to_path_buf()); + let jpath = base_path.map_or_else( + || config.jpath.clone(), + |base_path| { + let mut roots = effective_import_roots( + base_path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + if !config.resolve_paths_with_tanka { + if let Some(dir) = base_path.as_path().parent() { + if !roots.iter().any(|entry| entry == dir) { + roots.push(dir.to_path_buf()); + } } } - } - roots - } else { - config.jpath.clone() - }; + roots + }, + ); drop(config); jpath } From 9021dcb2e07c947d9b3b71e5b516c2de1daac78f Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 12:38:10 +0000 Subject: [PATCH 098/210] refactor(lsp-document): use typed URI/path conversion - add `url` as a workspace dependency and consume it from `jrsonnet-lsp-document`. - replace ad-hoc file URI parsing with typed conversions and explicit error variants. - add structural tests for non-file URI rejection, escaped path round-trips, and relative-path URI failures. - update LSP request/notification call-sites to handle fallible URI decoding without panics. --- Cargo.lock | 1 + Cargo.toml | 1 + crates/jrsonnet-lsp-document/Cargo.toml | 1 + crates/jrsonnet-lsp-document/src/error.rs | 12 +++ crates/jrsonnet-lsp-document/src/types.rs | 96 +++++++++++++++---- crates/jrsonnet-lsp/src/server.rs | 20 ++-- .../jrsonnet-lsp/src/server/async_requests.rs | 22 ++--- .../jrsonnet-lsp/src/server/notifications.rs | 10 +- 8 files changed, 119 insertions(+), 44 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a3b6a753..6fdaad37 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1872,6 +1872,7 @@ dependencies = [ "lsp-types", "rowan", "thiserror 1.0.69", + "url", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 8172d152..fd01131b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -85,6 +85,7 @@ xshell = "0.2.6" lsp-server = "0.7.6" lsp-types = "0.97.0" +url = "2.5.4" regex = "1.10" lru = "0.16.0" diff --git a/crates/jrsonnet-lsp-document/Cargo.toml b/crates/jrsonnet-lsp-document/Cargo.toml index 7e460d29..43f074ca 100644 --- a/crates/jrsonnet-lsp-document/Cargo.toml +++ b/crates/jrsonnet-lsp-document/Cargo.toml @@ -13,6 +13,7 @@ jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-par lsp-types.workspace = true rowan.workspace = true thiserror.workspace = true +url.workspace = true [lints] workspace = true diff --git a/crates/jrsonnet-lsp-document/src/error.rs b/crates/jrsonnet-lsp-document/src/error.rs index 9c4cf3b3..3397760f 100644 --- a/crates/jrsonnet-lsp-document/src/error.rs +++ b/crates/jrsonnet-lsp-document/src/error.rs @@ -27,6 +27,18 @@ pub enum LspError { #[error("invalid URI: {0}")] InvalidUri(String), + /// URI is valid but is not a file URI. + #[error("URI is not a file URI: {0}")] + NonFileUri(String), + + /// File URI could not be converted to a filesystem path. + #[error("failed to convert URI to path: {0}")] + UriToPath(String), + + /// Filesystem path could not be converted to a URI. + #[error("failed to convert path to URI: {0}")] + PathToUri(String), + /// Identifier is not valid for Jsonnet. #[error("invalid identifier: {0}")] InvalidIdentifier(String), diff --git a/crates/jrsonnet-lsp-document/src/types.rs b/crates/jrsonnet-lsp-document/src/types.rs index b8fe1581..4b89cc5f 100644 --- a/crates/jrsonnet-lsp-document/src/types.rs +++ b/crates/jrsonnet-lsp-document/src/types.rs @@ -6,6 +6,7 @@ use std::path::PathBuf; use derive_more::{AsRef, Deref, Display, From, Into}; +use url::Url; use crate::error::{validate_identifier, LspError, LspResult}; @@ -135,33 +136,35 @@ impl CanonicalPath { Ok(Self(path.canonicalize()?)) } - /// Create from a URI, returning None if the URI is not a file URI. - pub fn from_uri(uri: &lsp_types::Uri) -> Option { + /// Create from a URI. + /// + /// # Errors + /// Returns `Err(LspError::InvalidUri)` when URI parsing fails, + /// `Err(LspError::NonFileUri)` when URI scheme is not `file`, + /// or `Err(LspError::UriToPath)` when URI cannot be converted to a path. + pub fn from_uri(uri: &lsp_types::Uri) -> LspResult { let uri_str = uri.as_str(); - if !uri_str.starts_with("file://") { - return None; + let parsed = Url::parse(uri_str).map_err(|_| LspError::InvalidUri(uri_str.to_string()))?; + if parsed.scheme() != "file" { + return Err(LspError::NonFileUri(uri_str.to_string())); } - // Parse file:// URI to path - let path_str = uri_str.strip_prefix("file://")?; - // Simple percent decoding for common cases - let decoded = path_str.replace("%20", " "); - let path = PathBuf::from(decoded); - // Try to canonicalize, but if it fails (file doesn't exist yet), use as-is - Some(Self(path.canonicalize().unwrap_or(path))) + let path = parsed + .to_file_path() + .map_err(|()| LspError::UriToPath(uri_str.to_string()))?; + Ok(Self(path.canonicalize().unwrap_or(path))) } /// Convert to a file URI. /// /// # Errors - /// Returns `Err(LspError::InvalidUri)` if the canonical path cannot be - /// represented as a valid URI string. + /// Returns `Err(LspError::PathToUri)` when the path cannot be converted + /// into a file URI, or `Err(LspError::InvalidUri)` if URI parsing fails. pub fn to_uri(&self) -> LspResult { - let path_str = self.0.to_string_lossy(); - // Create file:// URI - let uri_string = format!("file://{path_str}"); - uri_string + let url = Url::from_file_path(&self.0) + .map_err(|()| LspError::PathToUri(self.0.display().to_string()))?; + url.as_str() .parse() - .map_err(|_| LspError::InvalidUri(uri_string)) + .map_err(|_| LspError::InvalidUri(url.to_string())) } /// Get the inner path. @@ -219,6 +222,8 @@ impl SymbolName { #[cfg(test)] mod tests { + use std::time::{SystemTime, UNIX_EPOCH}; + use super::*; #[test] @@ -296,4 +301,59 @@ mod tests { let name = SymbolName::new("myVar").unwrap(); assert_eq!(format!("{name}"), "myVar"); } + + fn unique_test_dir(label: &str) -> PathBuf { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("clock should be after unix epoch") + .as_nanos(); + std::env::temp_dir().join(format!( + "jrsonnet-lsp-document-{label}-{}-{nanos}", + std::process::id() + )) + } + + #[test] + fn test_canonical_path_from_uri_rejects_non_file_uri() { + use crate::error::LspError; + + let uri: lsp_types::Uri = "https://example.com/test.jsonnet".parse().unwrap(); + let err = CanonicalPath::from_uri(&uri).unwrap_err(); + match err { + LspError::NonFileUri(value) => assert_eq!(value, uri.as_str()), + other => panic!("unexpected error: {other:?}"), + } + } + + #[test] + fn test_canonical_path_uri_round_trip_with_escaped_chars() { + let dir = unique_test_dir("uri-roundtrip"); + std::fs::create_dir_all(&dir).expect("create test directory"); + let file_path = dir.join("a #b.jsonnet"); + std::fs::write(&file_path, "{}").expect("create test file"); + let canonical = file_path.canonicalize().expect("canonicalize test file"); + + let url = Url::from_file_path(&canonical).expect("build file URL"); + let uri: lsp_types::Uri = url.as_str().parse().expect("parse URI"); + let path = CanonicalPath::from_uri(&uri).expect("decode URI to path"); + assert_eq!(path.as_path(), canonical.as_path()); + + let roundtrip_uri = path.to_uri().expect("encode path to URI"); + let roundtrip_path = CanonicalPath::from_uri(&roundtrip_uri).expect("decode roundtrip URI"); + assert_eq!(roundtrip_path, path); + + std::fs::remove_dir_all(dir).expect("remove test directory"); + } + + #[test] + fn test_canonical_path_to_uri_rejects_relative_path() { + use crate::error::LspError; + + let path = CanonicalPath::new(PathBuf::from("relative.jsonnet")); + let err = path.to_uri().unwrap_err(); + match err { + LspError::PathToUri(value) => assert_eq!(value, "relative.jsonnet"), + other => panic!("unexpected error: {other:?}"), + } + } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 055539f2..e9a033b6 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -348,14 +348,14 @@ impl Server { if let Some(workspace_folders) = &init_roots.workspace_folders { for folder in workspace_folders { - if let Some(path) = CanonicalPath::from_uri(&folder.uri) { + if let Ok(path) = CanonicalPath::from_uri(&folder.uri) { roots.push(path.as_path().to_path_buf()); } } } if let Some(root_uri) = &init_roots.root_uri { - if let Some(path) = CanonicalPath::from_uri(root_uri) { + if let Ok(path) = CanonicalPath::from_uri(root_uri) { roots.push(path.as_path().to_path_buf()); } } @@ -916,7 +916,7 @@ impl Server { /// Handle textDocument/documentSymbol request. fn on_document_symbol(&self, params: &DocumentSymbolParams) -> Option { let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?.clone(); let symbols = handlers::document_symbols(&doc); @@ -935,7 +935,7 @@ impl Server { ) -> Option> { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?.clone(); let lsp_pos = position.into(); @@ -950,7 +950,7 @@ impl Server { /// Handle textDocument/codeAction request. fn on_code_action(&self, params: &CodeActionParams) -> Option { let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let actions = { let doc = self.documents.get(&path)?; let code_action_config = self.config.read().code_actions; @@ -973,7 +973,7 @@ impl Server { fn on_signature_help(&self, params: &SignatureHelpParams) -> Option { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?; let lsp_pos = position.into(); @@ -984,7 +984,7 @@ impl Server { /// Handle textDocument/formatting request. fn on_formatting(&self, params: &DocumentFormattingParams) -> Option> { let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?; // Get formatting config @@ -1002,7 +1002,7 @@ impl Server { ) -> Option { let uri = ¶ms.text_document.uri; let position = params.position; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?; let lsp_pos = position.into(); @@ -1016,7 +1016,7 @@ impl Server { params: &SemanticTokensParams, ) -> Option { let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?; Some(handlers::semantic_tokens(&doc).into()) @@ -1028,7 +1028,7 @@ impl Server { params: &SemanticTokensRangeParams, ) -> Option { let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?; Some(handlers::semantic_tokens_range(&doc, params.range).into()) diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 98ab8d28..dd27fdf0 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -74,7 +74,7 @@ impl AsyncRequestContext { pub(super) fn hover(&self, params: &HoverParams) -> Option { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?.clone(); let lsp_pos = position.into(); let analysis = self.analyze_document(&path, &doc); @@ -124,7 +124,7 @@ impl AsyncRequestContext { ) -> Option { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?.clone(); let lsp_pos = position.into(); let semantic = self.documents.get_semantic_artifacts(&path); @@ -190,7 +190,7 @@ impl AsyncRequestContext { pub(super) fn inlay_hints(&self, params: &InlayHintParams) -> Option> { let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?.clone(); let analysis = self.analyze_document(&path, &doc); let hints = handlers::inlay_hints(&doc, &analysis, params.range); @@ -203,7 +203,7 @@ impl AsyncRequestContext { pub(super) fn completion(&self, params: &CompletionParams) -> Option { let uri = ¶ms.text_document_position.text_document.uri; let position = params.text_document_position.position; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?.clone(); let semantic = self.documents.get_semantic_artifacts(&path); @@ -231,7 +231,7 @@ impl AsyncRequestContext { pub(super) fn references(&self, params: &ReferenceParams) -> Option> { let uri = ¶ms.text_document_position.text_document.uri; let position = params.text_document_position.position; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get_document(&path)?; let lsp_pos = position.into(); let semantic = self.documents.get_semantic_artifacts(&path); @@ -334,7 +334,7 @@ impl AsyncRequestContext { pub(super) fn rename(&self, params: &RenameParams) -> Option { let uri = ¶ms.text_document_position.text_document.uri; let position = params.text_document_position.position; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?; let new_name = match SymbolName::new(¶ms.new_name) { @@ -361,7 +361,7 @@ impl AsyncRequestContext { pub(super) fn code_lens(&self, params: &CodeLensParams) -> Option> { let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri)?; + let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?.clone(); let config = handlers::CodeLensConfig::all(); @@ -418,7 +418,7 @@ impl AsyncRequestContext { use jrsonnet_parser::{SourceFile, SourcePath}; let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let path = CanonicalPath::from_uri(&uri_parsed)?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; let text = self.documents.get_text(&path)?; let jpath = self.eval_command_jpath(Some(&path)); @@ -460,7 +460,7 @@ impl AsyncRequestContext { let base_path = base_uri .and_then(|uri| uri.parse::().ok()) - .and_then(|uri| CanonicalPath::from_uri(&uri)); + .and_then(|uri| CanonicalPath::from_uri(&uri).ok()); let jpath = self.eval_command_jpath(base_path.as_ref()); let state = create_state_with_jpath(&jpath); let source_name = base_path.map_or_else( @@ -516,7 +516,7 @@ impl AsyncRequestContext { fn execute_find_transitive_importers(&self, uri: &str) -> Option { let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let path = CanonicalPath::from_uri(&uri_parsed)?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; let import_graph = self.import_graph.read(); let importers = import_graph.transitive_importers(&path); @@ -560,7 +560,7 @@ impl AsyncRequestContext { fn execute_show_errors(&self, uri: &str) -> Option { let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let path = CanonicalPath::from_uri(&uri_parsed)?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; let doc = self.load_document_for_path(&path)?; let analysis = self.analyze_document(&path, &doc); let (enable_lint_diagnostics, evaluator, import_roots) = { diff --git a/crates/jrsonnet-lsp/src/server/notifications.rs b/crates/jrsonnet-lsp/src/server/notifications.rs index a81b4412..99173473 100644 --- a/crates/jrsonnet-lsp/src/server/notifications.rs +++ b/crates/jrsonnet-lsp/src/server/notifications.rs @@ -61,7 +61,7 @@ impl Server { let uri = ¶ms.text_document.uri; info!("Document opened: {}", uri.as_str()); - let Some(path) = CanonicalPath::from_uri(uri) else { + let Ok(path) = CanonicalPath::from_uri(uri) else { warn!("Could not convert URI to path: {}", uri.as_str()); return; }; @@ -87,7 +87,7 @@ impl Server { let uri = ¶ms.text_document.uri; debug!("Document changed: {}", uri.as_str()); - let Some(path) = CanonicalPath::from_uri(uri) else { + let Ok(path) = CanonicalPath::from_uri(uri) else { warn!("Could not convert URI to path: {}", uri.as_str()); return; }; @@ -128,7 +128,7 @@ impl Server { let uri = ¶ms.text_document.uri; info!("Document closed: {}", uri.as_str()); - let Some(path) = CanonicalPath::from_uri(uri) else { + let Ok(path) = CanonicalPath::from_uri(uri) else { warn!("Could not convert URI to path: {}", uri.as_str()); return Ok(()); }; @@ -159,7 +159,7 @@ impl Server { let uri = ¶ms.text_document.uri; debug!("Document saved: {}", uri.as_str()); - let Some(path) = CanonicalPath::from_uri(uri) else { + let Ok(path) = CanonicalPath::from_uri(uri) else { warn!("Could not convert URI to path: {}", uri.as_str()); return; }; @@ -261,7 +261,7 @@ impl Server { let mut requires_global_reindex = false; for change in params.changes { - let Some(path) = CanonicalPath::from_uri(&change.uri) else { + let Ok(path) = CanonicalPath::from_uri(&change.uri) else { continue; }; From 53b91ebf03712f8e07bba5a62ce3dfa037706489 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 12:44:46 +0000 Subject: [PATCH 099/210] refactor(lsp-import): intern graph file paths with FileId - add `FileId` and `PathInterner` utilities in `jrsonnet-lsp-document`. - switch `ImportGraph` internals to stable file ids for forward/reverse edges and dependency traversal. - keep public graph APIs path-based, resolving ids at the boundary. - add structural tests for equivalent-path lookups and missing-root dependency traversal behavior. --- crates/jrsonnet-lsp-document/src/file_ids.rs | 117 +++++++++ crates/jrsonnet-lsp-document/src/lib.rs | 2 + crates/jrsonnet-lsp-import/src/graph.rs | 252 ++++++++++++++----- 3 files changed, 312 insertions(+), 59 deletions(-) create mode 100644 crates/jrsonnet-lsp-document/src/file_ids.rs diff --git a/crates/jrsonnet-lsp-document/src/file_ids.rs b/crates/jrsonnet-lsp-document/src/file_ids.rs new file mode 100644 index 00000000..95c04025 --- /dev/null +++ b/crates/jrsonnet-lsp-document/src/file_ids.rs @@ -0,0 +1,117 @@ +//! File identity and path interning utilities. + +use std::collections::HashMap; + +use crate::CanonicalPath; + +/// Stable identifier for an interned file path. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct FileId(u32); + +impl FileId { + /// Create a file id from a raw integer. + #[must_use] + pub fn from_raw(raw: u32) -> Self { + Self(raw) + } + + /// Get the underlying integer representation. + #[must_use] + pub fn as_raw(self) -> u32 { + self.0 + } + + /// Get this id as a vector index. + #[must_use] + pub fn as_usize(self) -> usize { + self.0 as usize + } +} + +/// Bidirectional interner between canonical paths and stable file identifiers. +#[derive(Debug, Clone, Default)] +pub struct PathInterner { + path_to_id: HashMap, + id_to_path: Vec, +} + +impl PathInterner { + /// Create an empty interner. + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Get an interned id for `path`, inserting it if needed. + pub fn intern(&mut self, path: &CanonicalPath) -> FileId { + if let Some(&id) = self.path_to_id.get(path) { + return id; + } + + let raw = u32::try_from(self.id_to_path.len()).expect("too many interned file paths"); + let id = FileId::from_raw(raw); + self.path_to_id.insert(path.clone(), id); + self.id_to_path.push(path.clone()); + id + } + + /// Get an existing id for `path`. + #[must_use] + pub fn get_id(&self, path: &CanonicalPath) -> Option { + self.path_to_id.get(path).copied() + } + + /// Resolve an interned id to its canonical path. + #[must_use] + pub fn get_path(&self, file_id: FileId) -> Option<&CanonicalPath> { + self.id_to_path.get(file_id.as_usize()) + } +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use super::*; + + fn path(name: &str) -> CanonicalPath { + CanonicalPath::new(PathBuf::from(format!("/test/{name}.jsonnet"))) + } + + #[test] + fn test_path_interner_reuses_id_for_same_path() { + let mut interner = PathInterner::new(); + let alpha = path("alpha"); + + let first = interner.intern(&alpha); + let second = interner.intern(&alpha); + + assert_eq!(first, second); + assert_eq!(interner.get_id(&alpha), Some(first)); + assert_eq!(interner.get_path(first), Some(&alpha)); + } + + #[test] + fn test_path_interner_distinguishes_paths() { + let mut interner = PathInterner::new(); + let alpha = path("alpha"); + let beta = path("beta"); + + let alpha_id = interner.intern(&alpha); + let beta_id = interner.intern(&beta); + + assert_eq!(alpha_id, FileId::from_raw(0)); + assert_eq!(beta_id, FileId::from_raw(1)); + assert_eq!(interner.get_path(alpha_id), Some(&alpha)); + assert_eq!(interner.get_path(beta_id), Some(&beta)); + } + + #[test] + fn test_path_interner_unknown_lookup() { + let interner = PathInterner::new(); + let alpha = path("alpha"); + + assert_eq!(interner.get_id(&alpha), None); + assert_eq!(interner.get_path(FileId::from_raw(0)), None); + } +} diff --git a/crates/jrsonnet-lsp-document/src/lib.rs b/crates/jrsonnet-lsp-document/src/lib.rs index 52f703d5..94044580 100644 --- a/crates/jrsonnet-lsp-document/src/lib.rs +++ b/crates/jrsonnet-lsp-document/src/lib.rs @@ -12,6 +12,7 @@ pub mod ast_utils; pub mod config; pub mod document; pub mod error; +pub mod file_ids; pub mod position; pub mod types; @@ -25,6 +26,7 @@ pub use document::{Document, ParsedDocument, SharedDocument, SyntaxError}; pub use error::{ is_valid_jsonnet_identifier, validate_identifier, HandlerResult, LspError, LspResult, }; +pub use file_ids::{FileId, PathInterner}; pub use position::LineIndex; pub use types::{ ByteOffset, CanonicalPath, CharOffset, DocVersion, Line, LspPosition, LspRange, SymbolName, diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs index 312742b3..e13e3935 100644 --- a/crates/jrsonnet-lsp-import/src/graph.rs +++ b/crates/jrsonnet-lsp-import/src/graph.rs @@ -5,7 +5,7 @@ use std::collections::{HashMap, HashSet, VecDeque}; -use jrsonnet_lsp_document::{strip_string_quotes, CanonicalPath, Document}; +use jrsonnet_lsp_document::{strip_string_quotes, CanonicalPath, Document, FileId, PathInterner}; use jrsonnet_rowan_parser::{ nodes::{Bind, Destruct, ExprImport, ImportKindKind, StmtLocal}, AstNode, AstToken, SyntaxKind, @@ -54,10 +54,12 @@ pub struct ImportOccurrence { /// - `imported_by`: file → list of files that import it (reverse index) #[derive(Debug, Default)] pub struct ImportGraph { + /// Interned mapping between canonical paths and stable file ids. + paths: PathInterner, /// Map of file → import entries in that file. - imports: HashMap>, + imports: HashMap>, /// Reverse index: file → files that import it. - imported_by: HashMap>, + imported_by: HashMap>, } impl ImportGraph { @@ -77,21 +79,23 @@ impl ImportGraph { /// Parse the imports first using [`parse_document_imports`], then call this /// method while holding the write lock. pub fn update_file_with_entries(&mut self, path: &CanonicalPath, entries: Vec) { + let file_id = self.paths.intern(path); // Remove old entries for this file self.remove_file(path); // Update imported_by reverse index for entry in &entries { if let Some(ref resolved) = entry.resolved_path { + let resolved_id = self.paths.intern(resolved); self.imported_by - .entry(resolved.clone()) + .entry(resolved_id) .or_default() - .insert(path.clone()); + .insert(file_id); } } // Store the import entries - self.imports.insert(path.clone(), entries); + self.imports.insert(file_id, entries); } /// Update a file's imports in the graph. @@ -111,15 +115,24 @@ impl ImportGraph { /// /// This removes the file's import entries and updates the reverse index. pub fn remove_file(&mut self, path: &CanonicalPath) { + let Some(file_id) = self.paths.get_id(path) else { + return; + }; + // Remove from imported_by reverse index - if let Some(old_entries) = self.imports.get(path) { + if let Some(old_entries) = self.imports.get(&file_id) { for entry in old_entries { if let Some(ref resolved) = entry.resolved_path { - if let Some(importers) = self.imported_by.get_mut(resolved) { - importers.remove(path); - // Clean up empty sets - if importers.is_empty() { - self.imported_by.remove(resolved); + if let Some(resolved_id) = self.paths.get_id(resolved) { + let should_remove_entry = self + .imported_by + .get_mut(&resolved_id) + .is_some_and(|importers| { + importers.remove(&file_id); + importers.is_empty() + }); + if should_remove_entry { + self.imported_by.remove(&resolved_id); } } } @@ -127,16 +140,45 @@ impl ImportGraph { } // Remove the import entries - self.imports.remove(path); + self.imports.remove(&file_id); + } + + fn direct_importers_by_id(&self, file_id: FileId) -> Vec { + self.imported_by + .get(&file_id) + .map(|s| s.iter().copied().collect()) + .unwrap_or_default() + } + + #[must_use] + fn path_for_id(&self, file_id: FileId) -> Option<&CanonicalPath> { + self.paths.get_path(file_id) + } + + #[must_use] + fn id_for_path(&self, path: &CanonicalPath) -> Option { + self.paths.get_id(path) + } + + #[must_use] + fn resolved_entry_id(&self, entry: &ImportEntry) -> Option { + entry + .resolved_path + .as_ref() + .and_then(|path| self.paths.get_id(path)) } /// Get the files that directly import a given file. #[must_use] pub fn direct_importers(&self, path: &CanonicalPath) -> Vec { - self.imported_by - .get(path) - .map(|s| s.iter().cloned().collect()) - .unwrap_or_default() + let Some(file_id) = self.id_for_path(path) else { + return Vec::new(); + }; + + self.direct_importers_by_id(file_id) + .into_iter() + .filter_map(|importer_id| self.path_for_id(importer_id).cloned()) + .collect() } /// Get all files that transitively import a given file. @@ -145,23 +187,33 @@ impl ImportGraph { /// to find all files that depend on the given file, directly or indirectly. #[must_use] pub fn transitive_importers(&self, path: &CanonicalPath) -> HashSet { + let Some(root_id) = self.id_for_path(path) else { + return HashSet::new(); + }; + let mut result = HashSet::new(); - let mut queue = VecDeque::from([path.clone()]); + let mut queue = VecDeque::from([root_id]); while let Some(current) = queue.pop_front() { - for importer in self.direct_importers(¤t) { - if result.insert(importer.clone()) { + for importer in self.direct_importers_by_id(current) { + if result.insert(importer) { queue.push_back(importer); } } } result + .into_iter() + .filter_map(|importer_id| self.path_for_id(importer_id).cloned()) + .collect() } /// Get the import entries for a file. pub fn imports(&self, path: &CanonicalPath) -> &[ImportEntry] { - self.imports.get(path).map_or(&[], Vec::as_slice) + let Some(file_id) = self.id_for_path(path) else { + return &[]; + }; + self.imports.get(&file_id).map_or(&[], Vec::as_slice) } /// Find imports in a file that point to a specific target file. @@ -171,12 +223,19 @@ impl ImportGraph { file: &CanonicalPath, target: &CanonicalPath, ) -> Vec<&ImportEntry> { + let Some(file_id) = self.id_for_path(file) else { + return Vec::new(); + }; + let Some(target_id) = self.id_for_path(target) else { + return Vec::new(); + }; + self.imports - .get(file) + .get(&file_id) .map(|entries| { entries .iter() - .filter(|e| e.resolved_path.as_ref() == Some(target)) + .filter(|entry| self.resolved_entry_id(entry) == Some(target_id)) .collect() }) .unwrap_or_default() @@ -190,7 +249,9 @@ impl ImportGraph { /// Get all files tracked in the graph. pub fn all_files(&self) -> impl Iterator { - self.imports.keys() + self.imports + .keys() + .filter_map(|file_id| self.path_for_id(*file_id)) } /// Compute a topological ordering of files based on import dependencies. @@ -202,33 +263,30 @@ impl ImportGraph { /// Returns `None` if there's a cycle in the import graph. #[must_use] pub fn topological_order(&self) -> Option>> { - let mut in_degree: HashMap<&CanonicalPath, usize> = HashMap::new(); - let mut levels: Vec> = Vec::new(); + let mut in_degree: HashMap = HashMap::new(); + let mut levels: Vec> = Vec::new(); // Initialize in-degree for all files to 0 - for path in self.imports.keys() { - in_degree.insert(path, 0); + for &path_id in self.imports.keys() { + in_degree.insert(path_id, 0); } // Calculate in-degree: count how many dependencies each file has // (how many files it imports that are also in our graph) - for (path, entries) in &self.imports { + for (&path_id, entries) in &self.imports { let dep_count = entries .iter() - .filter(|e| { - e.resolved_path - .as_ref() - .is_some_and(|p| self.imports.contains_key(p)) - }) + .filter_map(|entry| self.resolved_entry_id(entry)) + .filter(|dep_id| self.imports.contains_key(dep_id)) .count(); - in_degree.insert(path, dep_count); + in_degree.insert(path_id, dep_count); } // Find all files with no dependencies (in-degree 0) - let mut current_level: Vec = in_degree + let mut current_level: Vec = in_degree .iter() .filter(|(_, °)| deg == 0) - .map(|(&path, _)| path.clone()) + .map(|(&path_id, _)| path_id) .collect(); let mut processed = HashSet::new(); @@ -238,32 +296,33 @@ impl ImportGraph { current_level.sort(); // Mark current level as processed - for path in ¤t_level { - processed.insert(path.clone()); + for path_id in ¤t_level { + processed.insert(*path_id); } levels.push(current_level.clone()); // Find next level: files whose dependencies are all now processed let mut next_level = Vec::new(); - for path in ¤t_level { + for path_id in ¤t_level { // For each file that imports this one - if let Some(importers) = self.imported_by.get(path) { + if let Some(importers) = self.imported_by.get(path_id) { for importer in importers { if processed.contains(importer) { continue; } // Check if all dependencies of importer are processed let all_deps_processed = self.imports.get(importer).is_none_or(|entries| { - entries.iter().all(|e| { - e.resolved_path.as_ref().is_none_or(|p| { - processed.contains(p) || !self.imports.contains_key(p) + entries.iter().all(|entry| { + self.resolved_entry_id(entry).is_none_or(|dep_id| { + processed.contains(&dep_id) + || !self.imports.contains_key(&dep_id) }) }) }); if all_deps_processed && !next_level.contains(importer) { - next_level.push(importer.clone()); + next_level.push(*importer); } } } @@ -274,7 +333,17 @@ impl ImportGraph { // Check if all files were processed (no cycles) if processed.len() == self.imports.len() { - Some(levels) + Some( + levels + .into_iter() + .map(|level| { + level + .into_iter() + .filter_map(|file_id| self.path_for_id(file_id).cloned()) + .collect() + }) + .collect(), + ) } else { None // Cycle detected } @@ -356,23 +425,33 @@ impl ImportGraph { F: Fn(&CanonicalPath) + Sync, P: Fn(&ImportEntry) -> bool + Sync, { + let Some(root_id) = self.id_for_path(root) else { + return; + }; + let mut work = WorkQueue::new(); - work.push(root.clone()); + work.push(root_id); - let levels = work.run(|path, deps| { + let levels = work.run(|path_id, deps| { // Get dependencies from import graph - for entry in self.imports(path) { - if !include_dependency(entry) { - continue; - } - if let Some(ref resolved) = entry.resolved_path { - deps.push(resolved.clone()); + if let Some(entries) = self.imports.get(path_id) { + for entry in entries { + if !include_dependency(entry) { + continue; + } + if let Some(resolved_id) = self.resolved_entry_id(entry) { + deps.push(resolved_id); + } } } }); // Process levels in dependency order (leaves first) - levels.process_parallel(&f); + levels.process_parallel(|path_id| { + if let Some(path) = self.path_for_id(*path_id) { + f(path); + } + }); } /// Process a file and its transitive importers using a work queue. @@ -386,12 +465,16 @@ impl ImportGraph { where F: Fn(&CanonicalPath) + Sync, { + let Some(root_id) = self.id_for_path(root) else { + return; + }; + let mut work = WorkQueue::new(); - work.push(root.clone()); + work.push(root_id); - let mut levels = work.run(|path, deps| { + let mut levels = work.run(|path_id, deps| { // Get files that import this file - for importer in self.direct_importers(path) { + for importer in self.direct_importers_by_id(*path_id) { deps.push(importer); } }); @@ -401,7 +484,11 @@ impl ImportGraph { levels.reverse(); // Process levels (root first, then importers) - levels.process_parallel(&f); + levels.process_parallel(|path_id| { + if let Some(path) = self.path_for_id(*path_id) { + f(path); + } + }); } } @@ -829,6 +916,33 @@ lib1 + lib2 assert_eq!(importers, vec![main.clone()]); } + #[test] + fn test_import_graph_lookups_with_equivalent_paths() { + let mut graph = ImportGraph::new(); + + let main = test_path("main.jsonnet"); + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + graph.update_file(&main, &doc, simple_resolver); + + let main_lookup = test_path("main.jsonnet"); + let lib_lookup = test_path("lib.jsonnet"); + + assert_eq!( + graph.direct_importers(&lib_lookup), + vec![main_lookup.clone()] + ); + assert_eq!( + graph.imports_of_target(&main_lookup, &lib_lookup), + vec![&ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(lib_lookup), + }] + ); + } + #[test] fn test_import_graph_remove() { let mut graph = ImportGraph::new(); @@ -1149,4 +1263,24 @@ u1 + u2 // lib first, then utils (imports lib), then main (imports utils) assert_eq!(order, vec![lib, utils, main]); } + + #[test] + fn test_process_with_dependencies_unknown_root_is_noop() { + use std::sync::{Arc, Mutex}; + + let graph = ImportGraph::new(); + let missing = test_path("missing.jsonnet"); + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + graph.process_with_dependencies( + &missing, + |_| true, + move |path| { + processed_clone.lock().unwrap().push(path.clone()); + }, + ); + + assert_eq!(*processed.lock().unwrap(), Vec::::new()); + } } From 9920380e83648329733452434b497d0cd720bf01 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 12:52:16 +0000 Subject: [PATCH 100/210] refactor(lsp-inference): key document state by FileId - migrate `DocumentManager` open/closed stores and analysis/semantic caches to interned `FileId` keys. - keep external APIs path-based by resolving through a shared `PathInterner` boundary. - add structural tests for equivalent-path lookups and missing-path no-op operations. - remove redundant request-path `Document` clones now that manager lookups return owned values. --- crates/jrsonnet-lsp-inference/src/manager.rs | 265 ++++++++++++------ crates/jrsonnet-lsp/src/server.rs | 4 +- .../jrsonnet-lsp/src/server/async_requests.rs | 10 +- 3 files changed, 187 insertions(+), 92 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index ddd5050b..9f576bf2 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -7,7 +7,7 @@ use std::{num::NonZeroUsize, sync::Arc}; use dashmap::DashMap; use jrsonnet_lsp_document::{ - CanonicalPath, DocVersion, Document, DEFAULT_ANALYSIS_CACHE_CAPACITY, + CanonicalPath, DocVersion, Document, FileId, PathInterner, DEFAULT_ANALYSIS_CACHE_CAPACITY, DEFAULT_CLOSED_CACHE_CAPACITY, }; use jrsonnet_lsp_types::GlobalTyStore; @@ -41,15 +41,16 @@ struct CachedSemanticArtifacts { /// /// This type is `Sync` and can be safely shared across threads. pub struct DocumentManager { - /// Open documents - concurrent access without global lock. - open: DashMap, + /// Interned mapping between canonical paths and stable file ids. + paths: RwLock, + /// Open documents keyed by interned file ids. + open: DashMap, /// LRU cache for recently closed documents. - closed: RwLock>, - /// Cached type analysis per document (keyed by path, validated by version). - /// Uses moka for thread-safe concurrent caching with LRU-like eviction. - analysis_cache: MokaCache, - /// Cached semantic artifacts per document (keyed by path, validated by version). - semantic_cache: MokaCache, + closed: RwLock>, + /// Cached type analysis per document id (validated by version). + analysis_cache: MokaCache, + /// Cached semantic artifacts per document id (validated by version). + semantic_cache: MokaCache, /// Global type store shared across all analyses. global_types: Arc, } @@ -70,6 +71,7 @@ impl DocumentManager { pub fn with_capacity(global_types: Arc, closed_capacity: usize) -> Self { let closed_capacity = NonZeroUsize::new(closed_capacity).unwrap_or(NonZeroUsize::MIN); Self { + paths: RwLock::new(PathInterner::new()), open: DashMap::new(), closed: RwLock::new(LruCache::new(closed_capacity)), analysis_cache: MokaCache::new(DEFAULT_ANALYSIS_CACHE_CAPACITY as u64), @@ -83,29 +85,55 @@ impl DocumentManager { &self.global_types } + fn intern_path(&self, path: &CanonicalPath) -> FileId { + self.paths.write().intern(path) + } + + fn file_id(&self, path: &CanonicalPath) -> Option { + self.paths.read().get_id(path) + } + + fn path_for_id(&self, file_id: FileId) -> Option { + self.paths.read().get_path(file_id).cloned() + } + + fn invalidate_analysis_by_id(&self, file_id: FileId) { + self.analysis_cache.invalidate(&file_id); + } + + fn invalidate_semantic_artifacts_by_id(&self, file_id: FileId) { + self.semantic_cache.invalidate(&file_id); + } + /// Open a document (called on textDocument/didOpen). pub fn open(&self, path: CanonicalPath, text: String, version: DocVersion) { + let file_id = self.intern_path(&path); + // Move from closed cache if present { let mut closed = self.closed.write(); - closed.pop(&path); + closed.pop(&file_id); } - self.invalidate_analysis(&path); - self.invalidate_semantic_artifacts(&path); + self.invalidate_analysis_by_id(file_id); + self.invalidate_semantic_artifacts_by_id(file_id); let document = Document::new(text, version); - self.open.insert(path, document); + self.open.insert(file_id, document); } /// Update an open document with full text (called on textDocument/didChange with full sync). /// /// Returns true if the document was found and updated. pub fn update(&self, path: &CanonicalPath, text: String, version: DocVersion) -> bool { - self.open.get_mut(path).is_some_and(|mut doc| { + let Some(file_id) = self.file_id(path) else { + return false; + }; + + self.open.get_mut(&file_id).is_some_and(|mut doc| { doc.update(text, version); - self.invalidate_analysis(path); - self.invalidate_semantic_artifacts(path); + self.invalidate_analysis_by_id(file_id); + self.invalidate_semantic_artifacts_by_id(file_id); true }) } @@ -120,10 +148,14 @@ impl DocumentManager { new_text: &str, version: DocVersion, ) -> bool { - self.open.get_mut(path).is_some_and(|mut doc| { + let Some(file_id) = self.file_id(path) else { + return false; + }; + + self.open.get_mut(&file_id).is_some_and(|mut doc| { if doc.apply_incremental_change(range, new_text, version) { - self.invalidate_analysis(path); - self.invalidate_semantic_artifacts(path); + self.invalidate_analysis_by_id(file_id); + self.invalidate_semantic_artifacts_by_id(file_id); true } else { false @@ -135,9 +167,13 @@ impl DocumentManager { /// /// Moves the document to the closed cache for potential reuse. pub fn close(&self, path: &CanonicalPath) { - if let Some((path, document)) = self.open.remove(path) { + let Some(file_id) = self.file_id(path) else { + return; + }; + + if let Some((file_id, document)) = self.open.remove(&file_id) { let mut closed = self.closed.write(); - closed.put(path, document); + closed.put(file_id, document); } } @@ -153,27 +189,29 @@ impl DocumentManager { return false; }; + let file_id = self.intern_path(path); self.closed .write() - .put(path.clone(), Document::new(text, DocVersion::new(0))); - self.invalidate_analysis(path); - self.invalidate_semantic_artifacts(path); + .put(file_id, Document::new(text, DocVersion::new(0))); + self.invalidate_analysis_by_id(file_id); + self.invalidate_semantic_artifacts_by_id(file_id); true } /// Remove a closed document entry from the cache. pub fn remove_closed(&self, path: &CanonicalPath) { - self.closed.write().pop(path); - self.invalidate_analysis(path); - self.invalidate_semantic_artifacts(path); + let Some(file_id) = self.file_id(path) else { + return; + }; + self.closed.write().pop(&file_id); + self.invalidate_analysis_by_id(file_id); + self.invalidate_semantic_artifacts_by_id(file_id); } - /// Get a reference to an open document. - pub fn get( - &self, - path: &CanonicalPath, - ) -> Option> { - self.open.get(path) + /// Get an open document. + pub fn get(&self, path: &CanonicalPath) -> Option { + let file_id = self.file_id(path)?; + self.open.get(&file_id).map(|doc| doc.clone()) } /// Get the document text for a path. @@ -181,17 +219,19 @@ impl DocumentManager { /// Checks open documents first, then the closed cache, /// and finally tries to read from disk. pub fn get_text(&self, path: &CanonicalPath) -> Option { - // Check open documents - if let Some(doc) = self.open.get(path) { - return Some(doc.text().to_string()); - } - - // Check closed cache (use peek to avoid write lock when just reading) - { - let closed = self.closed.read(); - if let Some(doc) = closed.peek(path) { + if let Some(file_id) = self.file_id(path) { + // Check open documents + if let Some(doc) = self.open.get(&file_id) { return Some(doc.text().to_string()); } + + // Check closed cache (use peek to avoid write lock when just reading) + { + let closed = self.closed.read(); + if let Some(doc) = closed.peek(&file_id) { + return Some(doc.text().to_string()); + } + } } // Try to read from disk @@ -202,32 +242,36 @@ impl DocumentManager { /// /// Returns a cloned Document which is cheap due to internal Arc usage. pub fn get_document(&self, path: &CanonicalPath) -> Option { - // Check open documents - if let Some(doc) = self.open.get(path) { - return Some(doc.clone()); - } - - // Check closed cache - { - let closed = self.closed.read(); - if let Some(doc) = closed.peek(path) { + if let Some(file_id) = self.file_id(path) { + // Check open documents + if let Some(doc) = self.open.get(&file_id) { return Some(doc.clone()); } + + // Check closed cache + { + let closed = self.closed.read(); + if let Some(doc) = closed.peek(&file_id) { + return Some(doc.clone()); + } + } } // Read from disk once, then cache in `closed` for reuse. let text = std::fs::read_to_string(path.as_path()).ok()?; let document = Document::new(text, DocVersion::new(0)); + let file_id = self.intern_path(path); { let mut closed = self.closed.write(); - closed.put(path.clone(), document.clone()); + closed.put(file_id, document.clone()); } Some(document) } /// Check if a document is currently open. pub fn is_open(&self, path: &CanonicalPath) -> bool { - self.open.contains_key(path) + self.file_id(path) + .is_some_and(|file_id| self.open.contains_key(&file_id)) } /// Get the number of open documents. @@ -241,26 +285,36 @@ impl DocumentManager { F: FnMut(&CanonicalPath, &Document), { for entry in &self.open { - f(entry.key(), entry.value()); + if let Some(path) = self.path_for_id(*entry.key()) { + f(&path, entry.value()); + } } } /// Get all open document paths. pub fn open_paths(&self) -> Vec { - self.open.iter().map(|e| e.key().clone()).collect() + self.open + .iter() + .filter_map(|entry| self.path_for_id(*entry.key())) + .collect() } /// Iterate over all open documents. - pub fn iter(&self) -> dashmap::iter::Iter<'_, CanonicalPath, Document> { - self.open.iter() + pub fn iter(&self) -> std::vec::IntoIter<(CanonicalPath, Document)> { + self.open + .iter() + .filter_map(|entry| { + self.path_for_id(*entry.key()) + .map(|path| (path, entry.value().clone())) + }) + .collect::>() + .into_iter() } /// Parallel iterate over all open documents. - pub fn par_iter( - &self, - ) -> rayon::iter::IterBridge> { + pub fn par_iter(&self) -> rayon::vec::IntoIter<(CanonicalPath, Document)> { use rayon::prelude::*; - self.open.iter().par_bridge() + self.iter().collect::>().into_par_iter() } /// Get cached type analysis for a document, computing it if needed. @@ -287,7 +341,9 @@ impl DocumentManager { where F: FnOnce() -> TypeAnalysis, { - if let Some(cached) = self.analysis_cache.get(path) { + let file_id = self.intern_path(path); + + if let Some(cached) = self.analysis_cache.get(&file_id) { if cached.version == version { return Arc::clone(&cached.analysis); } @@ -295,7 +351,7 @@ impl DocumentManager { let analysis = Arc::new(compute()); self.analysis_cache.insert( - path.clone(), + file_id, CachedAnalysis { version, analysis: Arc::clone(&analysis), @@ -314,8 +370,9 @@ impl DocumentManager { version: DocVersion, analysis: Arc, ) { + let file_id = self.intern_path(&path); self.analysis_cache - .insert(path, CachedAnalysis { version, analysis }); + .insert(file_id, CachedAnalysis { version, analysis }); } /// Get cached semantic artifacts for a document, computing them if needed. @@ -341,7 +398,9 @@ impl DocumentManager { where F: FnOnce() -> SemanticArtifacts, { - if let Some(cached) = self.semantic_cache.get(path) { + let file_id = self.intern_path(path); + + if let Some(cached) = self.semantic_cache.get(&file_id) { if cached.version == version { return Arc::clone(&cached.artifacts); } @@ -349,7 +408,7 @@ impl DocumentManager { let artifacts = Arc::new(compute()); self.semantic_cache.insert( - path.clone(), + file_id, CachedSemanticArtifacts { version, artifacts: Arc::clone(&artifacts), @@ -365,8 +424,9 @@ impl DocumentManager { version: DocVersion, artifacts: Arc, ) { + let file_id = self.intern_path(&path); self.semantic_cache - .insert(path, CachedSemanticArtifacts { version, artifacts }); + .insert(file_id, CachedSemanticArtifacts { version, artifacts }); } /// Eagerly refresh semantic artifacts for a tracked document. @@ -387,25 +447,29 @@ impl DocumentManager { /// Call this when a document's content changes to ensure the next /// `get_analysis` call recomputes the analysis. pub fn invalidate_analysis(&self, path: &CanonicalPath) { - self.analysis_cache.invalidate(path); + if let Some(file_id) = self.file_id(path) { + self.invalidate_analysis_by_id(file_id); + } } /// Invalidate the semantic artifact cache for a document. pub fn invalidate_semantic_artifacts(&self, path: &CanonicalPath) { - self.semantic_cache.invalidate(path); + if let Some(file_id) = self.file_id(path) { + self.invalidate_semantic_artifacts_by_id(file_id); + } } } impl DocumentSource for DocumentManager { fn get_document(&self, path: &CanonicalPath) -> Option { - // Delegate to the existing get_document method + // Delegate to the existing get_document method. DocumentManager::get_document(self, path) } } -impl<'a> IntoIterator for &'a DocumentManager { - type Item = dashmap::mapref::multiple::RefMulti<'a, CanonicalPath, Document>; - type IntoIter = dashmap::iter::Iter<'a, CanonicalPath, Document>; +impl IntoIterator for &DocumentManager { + type Item = (CanonicalPath, Document); + type IntoIter = std::vec::IntoIter<(CanonicalPath, Document)>; fn into_iter(self) -> Self::IntoIter { self.iter() @@ -436,7 +500,20 @@ mod tests { manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); - let doc = manager.get(&path).expect("document should exist").clone(); + let doc = manager.get(&path).expect("document should exist"); + assert_eq!(doc.text(), "{ a: 1 }"); + assert_eq!(doc.version(), DocVersion::new(1)); + } + + #[test] + fn test_open_and_get_with_equivalent_path() { + let manager = DocumentManager::new(test_global_store()); + let path = test_path("test"); + + manager.open(path, "{ a: 1 }".to_string(), DocVersion::new(1)); + + let lookup = test_path("test"); + let doc = manager.get(&lookup).expect("document should exist"); assert_eq!(doc.text(), "{ a: 1 }"); assert_eq!(doc.version(), DocVersion::new(1)); } @@ -449,7 +526,7 @@ mod tests { manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); assert!(manager.update(&path, "{ a: 2 }".to_string(), DocVersion::new(2))); - let doc = manager.get(&path).expect("document should exist").clone(); + let doc = manager.get(&path).expect("document should exist"); assert_eq!(doc.text(), "{ a: 2 }"); assert_eq!(doc.version(), DocVersion::new(2)); } @@ -463,7 +540,7 @@ mod tests { manager.close(&path); assert!(!manager.is_open(&path)); - // But the text should still be available from cache + // But the text should still be available from cache. assert_eq!(manager.get_text(&path), Some("{ a: 1 }".to_string())); } @@ -475,18 +552,36 @@ mod tests { manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); manager.close(&path); - // Reopen with new content + // Reopen with new content. manager.open(path.clone(), "{ a: 2 }".to_string(), DocVersion::new(2)); - let doc = manager.get(&path).expect("document should exist").clone(); + let doc = manager.get(&path).expect("document should exist"); assert_eq!(doc.text(), "{ a: 2 }"); } + #[test] + fn test_missing_path_operations_are_noop() { + let manager = DocumentManager::new(test_global_store()); + let path = test_path("missing"); + + assert_eq!(manager.get(&path).map(|doc| doc.text().to_string()), None); + assert!(!manager.update(&path, "{}".to_string(), DocVersion::new(1))); + assert!(!manager.apply_incremental_change( + &path, + lsp_types::Range { + start: lsp_types::Position::new(0, 0), + end: lsp_types::Position::new(0, 0), + }, + "{}", + DocVersion::new(1), + )); + } + #[test] fn test_multiple_documents() { let manager = DocumentManager::new(test_global_store()); - // Open multiple documents sequentially + // Open multiple documents sequentially. for i in 0..10 { let path = test_path(&format!("test{i}")); manager.open(path.clone(), format!("{{ a: {i} }}"), DocVersion::new(i)); @@ -507,10 +602,10 @@ mod tests { DocVersion::new(1), ); - // First call computes analysis + // First call computes analysis. let analysis1 = manager.get_analysis(&path).expect("analysis should exist"); - // Second call returns cached (same Arc pointer) + // Second call returns cached (same Arc pointer). let analysis2 = manager.get_analysis(&path).expect("analysis should exist"); assert!( Arc::ptr_eq(&analysis1, &analysis2), @@ -531,10 +626,10 @@ mod tests { let analysis1 = manager.get_analysis(&path).expect("analysis should exist"); - // Invalidate the cache + // Invalidate the cache. manager.invalidate_analysis(&path); - // Next call recomputes (different Arc pointer) + // Next call recomputes (different Arc pointer). let analysis2 = manager.get_analysis(&path).expect("analysis should exist"); assert!( !Arc::ptr_eq(&analysis1, &analysis2), @@ -555,10 +650,10 @@ mod tests { let analysis1 = manager.get_analysis(&path).expect("analysis should exist"); - // Update the document (changes version) + // Update the document (changes version). manager.update(&path, "local y = 2; y".to_string(), DocVersion::new(2)); - // Next call recomputes due to version mismatch + // Next call recomputes due to version mismatch. let analysis2 = manager.get_analysis(&path).expect("analysis should exist"); assert!( !Arc::ptr_eq(&analysis1, &analysis2), diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index e9a033b6..e5c23779 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -917,7 +917,7 @@ impl Server { fn on_document_symbol(&self, params: &DocumentSymbolParams) -> Option { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?.clone(); + let doc = self.documents.get(&path)?; let symbols = handlers::document_symbols(&doc); Some(DocumentSymbolResponse::Nested(symbols)) @@ -936,7 +936,7 @@ impl Server { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?.clone(); + let doc = self.documents.get(&path)?; let lsp_pos = position.into(); let highlights = handlers::document_highlights(&doc, lsp_pos); diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index dd27fdf0..a09d4af3 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -75,7 +75,7 @@ impl AsyncRequestContext { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?.clone(); + let doc = self.documents.get(&path)?; let lsp_pos = position.into(); let analysis = self.analyze_document(&path, &doc); let import_field_type_resolver = |import_path: &str, fields: &[String]| { @@ -125,7 +125,7 @@ impl AsyncRequestContext { let uri = ¶ms.text_document_position_params.text_document.uri; let position = params.text_document_position_params.position; let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?.clone(); + let doc = self.documents.get(&path)?; let lsp_pos = position.into(); let semantic = self.documents.get_semantic_artifacts(&path); @@ -191,7 +191,7 @@ impl AsyncRequestContext { pub(super) fn inlay_hints(&self, params: &InlayHintParams) -> Option> { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?.clone(); + let doc = self.documents.get(&path)?; let analysis = self.analyze_document(&path, &doc); let hints = handlers::inlay_hints(&doc, &analysis, params.range); if hints.is_empty() { @@ -204,7 +204,7 @@ impl AsyncRequestContext { let uri = ¶ms.text_document_position.text_document.uri; let position = params.text_document_position.position; let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?.clone(); + let doc = self.documents.get(&path)?; let semantic = self.documents.get_semantic_artifacts(&path); let lsp_pos = position.into(); @@ -362,7 +362,7 @@ impl AsyncRequestContext { pub(super) fn code_lens(&self, params: &CodeLensParams) -> Option> { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?.clone(); + let doc = self.documents.get(&path)?; let config = handlers::CodeLensConfig::all(); let analysis = self.analyze_document(&path, &doc); From 2aa5be8e814e3e68193df99c599226ea1e44b83c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 13:16:38 +0000 Subject: [PATCH 101/210] refactor(lsp): intern remaining path-keyed state maps - key async diagnostics debounce sequences by `FileId` while keeping request/result APIs path-based. - key `TypeCache` internals by `FileId` through an internal `PathInterner` boundary. - add structural tests for equivalent-path debouncing and equivalent-path type-cache lookups. --- .../jrsonnet-lsp-inference/src/type_cache.rs | 49 +++++++++++++---- crates/jrsonnet-lsp/src/async_diagnostics.rs | 54 ++++++++++++++++--- 2 files changed, 88 insertions(+), 15 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs index 6f84084d..ae81f1ec 100644 --- a/crates/jrsonnet-lsp-inference/src/type_cache.rs +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -10,7 +10,9 @@ use std::{num::NonZeroUsize, sync::Arc}; -use jrsonnet_lsp_document::{CanonicalPath, Document, DEFAULT_TYPE_CACHE_CAPACITY}; +use jrsonnet_lsp_document::{ + CanonicalPath, Document, FileId, PathInterner, DEFAULT_TYPE_CACHE_CAPACITY, +}; use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; use lru::LruCache; use parking_lot::RwLock; @@ -28,7 +30,9 @@ use crate::analysis::TypeAnalysis; #[derive(Debug)] pub struct TypeCache { /// LRU cache from file path to its cached type. - cache: LruCache, + cache: LruCache, + /// Interned mapping between canonical paths and stable file ids. + paths: PathInterner, /// Global type store for shared types. global_types: Arc, } @@ -53,6 +57,7 @@ impl TypeCache { let capacity = NonZeroUsize::new(capacity).unwrap_or(NonZeroUsize::MIN); Self { cache: LruCache::new(capacity), + paths: PathInterner::new(), global_types, } } @@ -68,39 +73,49 @@ impl TypeCache { /// Uses `peek` to avoid updating LRU order for read-only lookups. #[must_use] pub fn get(&self, path: &CanonicalPath) -> Option { - self.cache.peek(path).map(|c| c.ty) + let file_id = self.paths.get_id(path)?; + self.cache.peek(&file_id).map(|c| c.ty) } /// Get the cached type for a file and update LRU order. /// /// Use this when the lookup indicates actual usage of the cached type. pub fn get_and_touch(&mut self, path: &CanonicalPath) -> Option { - self.cache.get(path).map(|c| c.ty) + let file_id = self.paths.get_id(path)?; + self.cache.get(&file_id).map(|c| c.ty) } /// Update the cache for a file. /// /// The type must be global, preventing accidental cross-analysis leakage. pub fn update(&mut self, path: &CanonicalPath, ty: GlobalTy, version: i32) { - self.cache.put(path.clone(), CachedType { ty, version }); + let file_id = self.paths.intern(path); + self.cache.put(file_id, CachedType { ty, version }); } /// Invalidate the cache for a file. pub fn invalidate(&mut self, path: &CanonicalPath) { - self.cache.pop(path); + if let Some(file_id) = self.paths.get_id(path) { + self.cache.pop(&file_id); + } } /// Invalidate the cache for multiple files. pub fn invalidate_many(&mut self, paths: impl IntoIterator) { for path in paths { - self.cache.pop(&path); + if let Some(file_id) = self.paths.get_id(&path) { + self.cache.pop(&file_id); + } } } /// Check if a file's cache is up to date with the given version. #[must_use] pub fn is_up_to_date(&self, path: &CanonicalPath, version: i32) -> bool { - self.cache.peek(path).is_some_and(|c| c.version == version) + self.paths + .get_id(path) + .and_then(|file_id| self.cache.peek(&file_id)) + .is_some_and(|cached| cached.version == version) } /// Get the number of cached entries. @@ -247,7 +262,12 @@ mod tests { let actual: BTreeSet<_> = cache .cache .iter() - .map(|(k, v)| (k.as_path().to_string_lossy().to_string(), v.ty)) + .filter_map(|(k, v)| { + cache + .paths + .get_path(*k) + .map(|path| (path.as_path().to_string_lossy().to_string(), v.ty)) + }) .collect(); let expected: BTreeSet<_> = expected .iter() @@ -256,6 +276,17 @@ mod tests { assert_eq!(actual, expected, "Cache contents mismatch"); } + #[test] + fn test_equivalent_path_lookup() { + let mut cache = TypeCache::new(test_global_store()); + let path = test_path("main.jsonnet"); + cache.update(&path, GlobalTy::NUMBER, 1); + + let lookup = test_path("main.jsonnet"); + assert_eq!(cache.get(&lookup), Some(GlobalTy::NUMBER)); + assert!(cache.is_up_to_date(&lookup, 1)); + } + #[test] fn test_cache_basic_ty() { let mut cache = TypeCache::new(test_global_store()); diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index d82e1592..fc8b4c6c 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -14,7 +14,7 @@ use std::{ }; use crossbeam_channel::{Receiver, Sender}; -use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document}; +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document, FileId, PathInterner}; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{ DocumentSource, SharedDocumentManager, SharedTypeCache, TypeProvider, @@ -35,6 +35,8 @@ const DEBOUNCE_DELAY_MS: u64 = 500; /// document state that triggered this request. #[derive(Debug, Clone)] struct DiagnosticsRequest { + /// Interned file identifier for sequence tracking. + file_id: FileId, /// Path of the document. path: CanonicalPath, /// The document text. @@ -82,7 +84,7 @@ struct WorkerDocumentSource { struct WorkerRuntime { requests: Receiver, results: Sender, - latest_sequences: Arc>>, + latest_sequences: Arc>>, config: DiagnosticsConfig, } @@ -106,7 +108,9 @@ pub struct AsyncDiagnostics { /// Sequence counter for detecting stale requests. sequence: AtomicU64, /// Latest requested sequence per file (for debouncing). - latest_sequences: Arc>>, + latest_sequences: Arc>>, + /// Interned mapping between canonical paths and stable file ids. + path_interner: Arc>, /// Background thread handle. _thread_handle: thread::JoinHandle<()>, } @@ -118,6 +122,7 @@ impl AsyncDiagnostics { let (request_sender, request_receiver) = crossbeam_channel::unbounded(); let (result_sender, result_receiver) = crossbeam_channel::unbounded(); let latest_sequences = Arc::new(RwLock::new(FxHashMap::default())); + let path_interner = Arc::new(RwLock::new(PathInterner::new())); let sequences_clone = Arc::clone(&latest_sequences); let thread_handle = thread::spawn(move || { @@ -134,6 +139,7 @@ impl AsyncDiagnostics { result_receiver, sequence: AtomicU64::new(0), latest_sequences, + path_interner, _thread_handle: thread_handle, } } @@ -151,11 +157,13 @@ impl AsyncDiagnostics { import_roots: Vec, ) { let sequence = self.sequence.fetch_add(1, Ordering::SeqCst); + let file_id = self.path_interner.write().intern(&path); // Record this as the latest sequence for this path - self.latest_sequences.write().insert(path.clone(), sequence); + self.latest_sequences.write().insert(file_id, sequence); let request = DiagnosticsRequest { + file_id, path, text, version, @@ -204,7 +212,7 @@ impl AsyncDiagnostics { // Check if this request is still the latest for this file { let sequences = latest_sequences.read(); - if let Some(&latest) = sequences.get(&request.path) { + if let Some(&latest) = sequences.get(&request.file_id) { if latest > request.sequence { trace!( "Diagnostics worker: skipping stale request for {} (seq={}, latest={})", @@ -257,7 +265,7 @@ impl AsyncDiagnostics { // Check again if still the latest (diagnostics computation may have taken time) { let sequences = latest_sequences.read(); - if let Some(&latest) = sequences.get(&request.path) { + if let Some(&latest) = sequences.get(&request.file_id) { if latest > request.sequence { trace!( "Diagnostics worker: discarding result for {} (seq={}, latest={})", @@ -371,6 +379,40 @@ mod tests { .expect_err("should not receive more results after debouncing"); } + #[test] + fn test_debouncing_with_equivalent_paths() { + let runner = AsyncDiagnostics::new(test_config()); + + // Use distinct CanonicalPath instances with the same value. + runner.schedule( + test_path("test"), + "{ a: 1 }".to_string(), + DocVersion::new(1), + false, + vec![], + ); + runner.schedule( + test_path("test"), + "{ a: 2 }".to_string(), + DocVersion::new(2), + false, + vec![], + ); + + let result = runner + .results() + .recv_timeout(Duration::from_secs(3)) + .expect("should receive result"); + + assert_eq!(result.path, test_path("test")); + + // Should not get more results immediately (first request was debounced). + runner + .results() + .recv_timeout(Duration::from_millis(200)) + .expect_err("should not receive more results after debouncing"); + } + #[test] fn test_syntax_errors() { let runner = AsyncDiagnostics::new(test_config()); From e8ec2f501a7fa6ada0e56708cabe3bb4ad25b270 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 13:37:13 +0000 Subject: [PATCH 102/210] refactor(lsp): enforce shared FileId path identity - add PathStore in jrsonnet-lsp-document as the shared typed boundary for path/id interning, and make FileId construction opaque. - wire a single shared PathStore through server construction (DocumentManager, ImportGraph, TypeCache, async diagnostics) so subsystems use one id space. - move parking_lot to workspace dependencies and consume it from affected crates. - remove Hash/Ord from CanonicalPath, replace path-keyed test maps with FileId keys, and make importer/path normalization deterministic without relying on CanonicalPath ordering. - update structural tests and benchmarks for the new constructor shapes and path/id behavior. --- Cargo.lock | 1 + Cargo.toml | 1 + crates/jrsonnet-lsp-document/Cargo.toml | 1 + crates/jrsonnet-lsp-document/src/file_ids.rs | 71 ++++++++++++++---- crates/jrsonnet-lsp-document/src/lib.rs | 2 +- crates/jrsonnet-lsp-document/src/types.rs | 2 +- crates/jrsonnet-lsp-handlers/src/rename.rs | 5 +- crates/jrsonnet-lsp-import/src/graph.rs | 73 +++++++++++-------- crates/jrsonnet-lsp-inference/Cargo.toml | 2 +- crates/jrsonnet-lsp-inference/src/manager.rs | 54 +++++++++----- crates/jrsonnet-lsp-inference/src/provider.rs | 49 ++++++++----- .../jrsonnet-lsp-inference/src/type_cache.rs | 44 ++++++----- crates/jrsonnet-lsp/Cargo.toml | 2 +- crates/jrsonnet-lsp/benches/type_cache.rs | 16 ++-- crates/jrsonnet-lsp/src/async_diagnostics.rs | 30 +++++--- crates/jrsonnet-lsp/src/server.rs | 25 +++++-- .../jrsonnet-lsp/src/server/async_requests.rs | 7 +- .../jrsonnet-lsp/src/server/import_graph.rs | 5 +- .../jrsonnet-lsp/src/server/notifications.rs | 3 +- crates/jrsonnet-lsp/tests/cross_file_tests.rs | 40 +++++----- 20 files changed, 267 insertions(+), 166 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6fdaad37..8880392c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1870,6 +1870,7 @@ dependencies = [ "derive_more 1.0.0", "jrsonnet-rowan-parser", "lsp-types", + "parking_lot", "rowan", "thiserror 1.0.69", "url", diff --git a/Cargo.toml b/Cargo.toml index fd01131b..3b58aaa4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -85,6 +85,7 @@ xshell = "0.2.6" lsp-server = "0.7.6" lsp-types = "0.97.0" +parking_lot = "0.12.5" url = "2.5.4" regex = "1.10" diff --git a/crates/jrsonnet-lsp-document/Cargo.toml b/crates/jrsonnet-lsp-document/Cargo.toml index 43f074ca..047cba82 100644 --- a/crates/jrsonnet-lsp-document/Cargo.toml +++ b/crates/jrsonnet-lsp-document/Cargo.toml @@ -11,6 +11,7 @@ description = "Document parsing, position conversion, and AST utilities for jrso derive_more = { version = "1", features = ["full"] } jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } lsp-types.workspace = true +parking_lot.workspace = true rowan.workspace = true thiserror.workspace = true url.workspace = true diff --git a/crates/jrsonnet-lsp-document/src/file_ids.rs b/crates/jrsonnet-lsp-document/src/file_ids.rs index 95c04025..d4f047f9 100644 --- a/crates/jrsonnet-lsp-document/src/file_ids.rs +++ b/crates/jrsonnet-lsp-document/src/file_ids.rs @@ -1,6 +1,8 @@ //! File identity and path interning utilities. -use std::collections::HashMap; +use std::{collections::HashMap, path::PathBuf, sync::Arc}; + +use parking_lot::RwLock; use crate::CanonicalPath; @@ -11,7 +13,7 @@ pub struct FileId(u32); impl FileId { /// Create a file id from a raw integer. #[must_use] - pub fn from_raw(raw: u32) -> Self { + fn from_raw(raw: u32) -> Self { Self(raw) } @@ -30,27 +32,21 @@ impl FileId { /// Bidirectional interner between canonical paths and stable file identifiers. #[derive(Debug, Clone, Default)] -pub struct PathInterner { - path_to_id: HashMap, +struct PathInterner { + path_to_id: HashMap, id_to_path: Vec, } impl PathInterner { - /// Create an empty interner. - #[must_use] - pub fn new() -> Self { - Self::default() - } - /// Get an interned id for `path`, inserting it if needed. pub fn intern(&mut self, path: &CanonicalPath) -> FileId { - if let Some(&id) = self.path_to_id.get(path) { + if let Some(&id) = self.path_to_id.get(path.as_path()) { return id; } let raw = u32::try_from(self.id_to_path.len()).expect("too many interned file paths"); let id = FileId::from_raw(raw); - self.path_to_id.insert(path.clone(), id); + self.path_to_id.insert(path.as_path().to_path_buf(), id); self.id_to_path.push(path.clone()); id } @@ -58,7 +54,7 @@ impl PathInterner { /// Get an existing id for `path`. #[must_use] pub fn get_id(&self, path: &CanonicalPath) -> Option { - self.path_to_id.get(path).copied() + self.path_to_id.get(path.as_path()).copied() } /// Resolve an interned id to its canonical path. @@ -68,6 +64,38 @@ impl PathInterner { } } +/// Thread-safe shared store for canonical paths and stable file ids. +#[derive(Debug, Clone, Default)] +pub struct PathStore { + interner: Arc>, +} + +impl PathStore { + /// Create an empty shared store. + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Get an interned id for `path`, inserting it if needed. + #[must_use] + pub fn intern(&self, path: &CanonicalPath) -> FileId { + self.interner.write().intern(path) + } + + /// Get an existing id for `path`. + #[must_use] + pub fn get_id(&self, path: &CanonicalPath) -> Option { + self.interner.read().get_id(path) + } + + /// Resolve an interned id to its canonical path. + #[must_use] + pub fn get_path(&self, file_id: FileId) -> Option { + self.interner.read().get_path(file_id).cloned() + } +} + #[cfg(test)] mod tests { use std::path::PathBuf; @@ -80,7 +108,7 @@ mod tests { #[test] fn test_path_interner_reuses_id_for_same_path() { - let mut interner = PathInterner::new(); + let mut interner = PathInterner::default(); let alpha = path("alpha"); let first = interner.intern(&alpha); @@ -93,7 +121,7 @@ mod tests { #[test] fn test_path_interner_distinguishes_paths() { - let mut interner = PathInterner::new(); + let mut interner = PathInterner::default(); let alpha = path("alpha"); let beta = path("beta"); @@ -108,10 +136,21 @@ mod tests { #[test] fn test_path_interner_unknown_lookup() { - let interner = PathInterner::new(); + let interner = PathInterner::default(); let alpha = path("alpha"); assert_eq!(interner.get_id(&alpha), None); assert_eq!(interner.get_path(FileId::from_raw(0)), None); } + + #[test] + fn test_path_store_shares_interned_ids_across_clones() { + let store = PathStore::new(); + let other = store.clone(); + let alpha = path("alpha"); + + let id = store.intern(&alpha); + assert_eq!(other.get_id(&alpha), Some(id)); + assert_eq!(other.get_path(id), Some(alpha)); + } } diff --git a/crates/jrsonnet-lsp-document/src/lib.rs b/crates/jrsonnet-lsp-document/src/lib.rs index 94044580..04e2c77d 100644 --- a/crates/jrsonnet-lsp-document/src/lib.rs +++ b/crates/jrsonnet-lsp-document/src/lib.rs @@ -26,7 +26,7 @@ pub use document::{Document, ParsedDocument, SharedDocument, SyntaxError}; pub use error::{ is_valid_jsonnet_identifier, validate_identifier, HandlerResult, LspError, LspResult, }; -pub use file_ids::{FileId, PathInterner}; +pub use file_ids::{FileId, PathStore}; pub use position::LineIndex; pub use types::{ ByteOffset, CanonicalPath, CharOffset, DocVersion, Line, LspPosition, LspRange, SymbolName, diff --git a/crates/jrsonnet-lsp-document/src/types.rs b/crates/jrsonnet-lsp-document/src/types.rs index 4b89cc5f..0669cb02 100644 --- a/crates/jrsonnet-lsp-document/src/types.rs +++ b/crates/jrsonnet-lsp-document/src/types.rs @@ -117,7 +117,7 @@ impl From for lsp_types::Range { } /// Normalized canonical path as cache key. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Display, Deref)] +#[derive(Debug, Clone, PartialEq, Eq, Display, Deref)] #[display("{}", _0.display())] pub struct CanonicalPath(PathBuf); diff --git a/crates/jrsonnet-lsp-handlers/src/rename.rs b/crates/jrsonnet-lsp-handlers/src/rename.rs index b186614a..1b8028a6 100644 --- a/crates/jrsonnet-lsp-handlers/src/rename.rs +++ b/crates/jrsonnet-lsp-handlers/src/rename.rs @@ -698,7 +698,8 @@ mod tests { // Create the manager and import graph let global = Arc::new(GlobalTyStore::new()); - let manager = Arc::new(DocumentManager::new(global)); + let path_store = jrsonnet_lsp_document::PathStore::new(); + let manager = Arc::new(DocumentManager::new(global, path_store.clone())); let lib_canon = CanonicalPath::new(lib_path); let main_canon = CanonicalPath::new(main_path); @@ -712,7 +713,7 @@ mod tests { ); // Build import graph - let mut import_graph = ImportGraph::new(); + let mut import_graph = ImportGraph::new(path_store); if let Some(main_doc) = manager.get_document(&main_canon) { let entries = jrsonnet_lsp_import::parse_document_imports(&main_doc, &|import_path| { let import_full = temp_dir.path().join(import_path); diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs index e13e3935..828ee87a 100644 --- a/crates/jrsonnet-lsp-import/src/graph.rs +++ b/crates/jrsonnet-lsp-import/src/graph.rs @@ -5,7 +5,7 @@ use std::collections::{HashMap, HashSet, VecDeque}; -use jrsonnet_lsp_document::{strip_string_quotes, CanonicalPath, Document, FileId, PathInterner}; +use jrsonnet_lsp_document::{strip_string_quotes, CanonicalPath, Document, FileId, PathStore}; use jrsonnet_rowan_parser::{ nodes::{Bind, Destruct, ExprImport, ImportKindKind, StmtLocal}, AstNode, AstToken, SyntaxKind, @@ -52,10 +52,10 @@ pub struct ImportOccurrence { /// This structure maintains two maps: /// - `imports`: file → list of files it imports /// - `imported_by`: file → list of files that import it (reverse index) -#[derive(Debug, Default)] +#[derive(Debug)] pub struct ImportGraph { /// Interned mapping between canonical paths and stable file ids. - paths: PathInterner, + paths: PathStore, /// Map of file → import entries in that file. imports: HashMap>, /// Reverse index: file → files that import it. @@ -65,8 +65,12 @@ pub struct ImportGraph { impl ImportGraph { /// Create a new empty import graph. #[must_use] - pub fn new() -> Self { - Self::default() + pub fn new(paths: PathStore) -> Self { + Self { + paths, + imports: HashMap::new(), + imported_by: HashMap::new(), + } } /// Update the import graph for a file. @@ -151,7 +155,7 @@ impl ImportGraph { } #[must_use] - fn path_for_id(&self, file_id: FileId) -> Option<&CanonicalPath> { + fn path_for_id(&self, file_id: FileId) -> Option { self.paths.get_path(file_id) } @@ -175,10 +179,13 @@ impl ImportGraph { return Vec::new(); }; - self.direct_importers_by_id(file_id) + let mut importers: Vec<_> = self + .direct_importers_by_id(file_id) .into_iter() - .filter_map(|importer_id| self.path_for_id(importer_id).cloned()) - .collect() + .filter_map(|importer_id| self.path_for_id(importer_id)) + .collect(); + importers.sort_by(|a, b| a.as_path().cmp(b.as_path())); + importers } /// Get all files that transitively import a given file. @@ -186,9 +193,9 @@ impl ImportGraph { /// This performs a breadth-first search through the import graph /// to find all files that depend on the given file, directly or indirectly. #[must_use] - pub fn transitive_importers(&self, path: &CanonicalPath) -> HashSet { + pub fn transitive_importers(&self, path: &CanonicalPath) -> Vec { let Some(root_id) = self.id_for_path(path) else { - return HashSet::new(); + return Vec::new(); }; let mut result = HashSet::new(); @@ -202,10 +209,12 @@ impl ImportGraph { } } - result + let mut importers: Vec<_> = result .into_iter() - .filter_map(|importer_id| self.path_for_id(importer_id).cloned()) - .collect() + .filter_map(|importer_id| self.path_for_id(importer_id)) + .collect(); + importers.sort_by(|a, b| a.as_path().cmp(b.as_path())); + importers } /// Get the import entries for a file. @@ -248,7 +257,7 @@ impl ImportGraph { } /// Get all files tracked in the graph. - pub fn all_files(&self) -> impl Iterator { + pub fn all_files(&self) -> impl Iterator + '_ { self.imports .keys() .filter_map(|file_id| self.path_for_id(*file_id)) @@ -339,7 +348,7 @@ impl ImportGraph { .map(|level| { level .into_iter() - .filter_map(|file_id| self.path_for_id(file_id).cloned()) + .filter_map(|file_id| self.path_for_id(file_id)) .collect() }) .collect(), @@ -449,7 +458,7 @@ impl ImportGraph { // Process levels in dependency order (leaves first) levels.process_parallel(|path_id| { if let Some(path) = self.path_for_id(*path_id) { - f(path); + f(&path); } }); } @@ -486,7 +495,7 @@ impl ImportGraph { // Process levels (root first, then importers) levels.process_parallel(|path_id| { if let Some(path) = self.path_for_id(*path_id) { - f(path); + f(&path); } }); } @@ -890,7 +899,7 @@ lib1 + lib2 #[test] fn test_import_graph_update() { - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let main = test_path("main.jsonnet"); let lib = test_path("lib.jsonnet"); @@ -918,7 +927,7 @@ lib1 + lib2 #[test] fn test_import_graph_lookups_with_equivalent_paths() { - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let main = test_path("main.jsonnet"); let code = r#"local lib = import "lib.jsonnet"; lib"#; @@ -945,7 +954,7 @@ lib1 + lib2 #[test] fn test_import_graph_remove() { - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let main = test_path("main.jsonnet"); let code = r#"local lib = import "lib.jsonnet"; lib"#; @@ -966,7 +975,7 @@ lib1 + lib2 #[test] fn test_transitive_importers() { - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); // Setup: main.jsonnet -> utils.jsonnet -> lib.jsonnet let main = test_path("main.jsonnet"); @@ -985,12 +994,12 @@ lib1 + lib2 // Check transitive importers of lib let importers = graph.transitive_importers(&lib); - assert_eq!(importers, HashSet::from([utils, main])); + assert_eq!(importers, vec![main, utils]); } #[test] fn test_imports_of_target() { - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let main = test_path("main.jsonnet"); let lib = test_path("lib.jsonnet"); @@ -1018,7 +1027,7 @@ lib + other #[test] fn test_topological_order_simple() { - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); // Setup: main -> utils -> lib (chain dependency) let main = test_path("main.jsonnet"); @@ -1050,7 +1059,7 @@ lib + other #[test] fn test_topological_order_parallel_files() { - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); // Setup: main imports both utils1 and utils2 (independent) let main = test_path("main.jsonnet"); @@ -1085,7 +1094,7 @@ u1 + u2 fn test_process_in_parallel() { use std::sync::atomic::{AtomicUsize, Ordering}; - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); // Setup: main -> lib (chain) let main = test_path("main.jsonnet"); @@ -1113,7 +1122,7 @@ u1 + u2 fn test_process_in_parallel_order() { use std::sync::{Arc, Mutex}; - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); // Setup: main -> lib (chain) let main = test_path("main.jsonnet"); @@ -1143,7 +1152,7 @@ u1 + u2 fn test_process_with_dependencies() { use std::sync::{Arc, Mutex}; - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); // Setup: main -> utils -> lib let main = test_path("main.jsonnet"); @@ -1186,7 +1195,7 @@ u1 + u2 fn test_process_with_dependencies_filtered_by_kind() { use std::sync::{Arc, Mutex}; - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let main = test_path("main.jsonnet"); let data = test_path("data.jsonnet"); @@ -1229,7 +1238,7 @@ u1 + u2 fn test_process_importers_with_work_queue() { use std::sync::{Arc, Mutex}; - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); // Setup: main -> utils -> lib let main = test_path("main.jsonnet"); @@ -1268,7 +1277,7 @@ u1 + u2 fn test_process_with_dependencies_unknown_root_is_noop() { use std::sync::{Arc, Mutex}; - let graph = ImportGraph::new(); + let graph = ImportGraph::new(PathStore::new()); let missing = test_path("missing.jsonnet"); let processed = Arc::new(Mutex::new(Vec::new())); let processed_clone = Arc::clone(&processed); diff --git a/crates/jrsonnet-lsp-inference/Cargo.toml b/crates/jrsonnet-lsp-inference/Cargo.toml index c3d27228..9a066256 100644 --- a/crates/jrsonnet-lsp-inference/Cargo.toml +++ b/crates/jrsonnet-lsp-inference/Cargo.toml @@ -19,7 +19,7 @@ jrsonnet-std-sig = { version = "0.5.0-pre97", path = "../jrsonnet-std-sig" } lru.workspace = true lsp-types.workspace = true moka = { version = "0.12", features = ["sync"] } -parking_lot = "0.12" +parking_lot.workspace = true rayon = "1.11.0" rowan.workspace = true rustc-hash.workspace = true diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index 9f576bf2..4acd438e 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -7,7 +7,7 @@ use std::{num::NonZeroUsize, sync::Arc}; use dashmap::DashMap; use jrsonnet_lsp_document::{ - CanonicalPath, DocVersion, Document, FileId, PathInterner, DEFAULT_ANALYSIS_CACHE_CAPACITY, + CanonicalPath, DocVersion, Document, FileId, PathStore, DEFAULT_ANALYSIS_CACHE_CAPACITY, DEFAULT_CLOSED_CACHE_CAPACITY, }; use jrsonnet_lsp_types::GlobalTyStore; @@ -42,7 +42,7 @@ struct CachedSemanticArtifacts { /// This type is `Sync` and can be safely shared across threads. pub struct DocumentManager { /// Interned mapping between canonical paths and stable file ids. - paths: RwLock, + paths: PathStore, /// Open documents keyed by interned file ids. open: DashMap, /// LRU cache for recently closed documents. @@ -57,21 +57,25 @@ pub struct DocumentManager { impl Default for DocumentManager { fn default() -> Self { - Self::new(Arc::new(GlobalTyStore::new())) + Self::new(Arc::new(GlobalTyStore::new()), PathStore::new()) } } impl DocumentManager { /// Create a new document manager with a shared global type store. - pub fn new(global_types: Arc) -> Self { - Self::with_capacity(global_types, DEFAULT_CLOSED_CACHE_CAPACITY) + pub fn new(global_types: Arc, paths: PathStore) -> Self { + Self::with_capacity(global_types, DEFAULT_CLOSED_CACHE_CAPACITY, paths) } /// Create a new document manager with specific capacities. - pub fn with_capacity(global_types: Arc, closed_capacity: usize) -> Self { + pub fn with_capacity( + global_types: Arc, + closed_capacity: usize, + paths: PathStore, + ) -> Self { let closed_capacity = NonZeroUsize::new(closed_capacity).unwrap_or(NonZeroUsize::MIN); Self { - paths: RwLock::new(PathInterner::new()), + paths, open: DashMap::new(), closed: RwLock::new(LruCache::new(closed_capacity)), analysis_cache: MokaCache::new(DEFAULT_ANALYSIS_CACHE_CAPACITY as u64), @@ -86,15 +90,15 @@ impl DocumentManager { } fn intern_path(&self, path: &CanonicalPath) -> FileId { - self.paths.write().intern(path) + self.paths.intern(path) } fn file_id(&self, path: &CanonicalPath) -> Option { - self.paths.read().get_id(path) + self.paths.get_id(path) } fn path_for_id(&self, file_id: FileId) -> Option { - self.paths.read().get_path(file_id).cloned() + self.paths.get_path(file_id) } fn invalidate_analysis_by_id(&self, file_id: FileId) { @@ -495,7 +499,8 @@ mod tests { #[test] fn test_open_and_get() { - let manager = DocumentManager::new(test_global_store()); + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); let path = test_path("test"); manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); @@ -507,7 +512,8 @@ mod tests { #[test] fn test_open_and_get_with_equivalent_path() { - let manager = DocumentManager::new(test_global_store()); + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); let path = test_path("test"); manager.open(path, "{ a: 1 }".to_string(), DocVersion::new(1)); @@ -520,7 +526,8 @@ mod tests { #[test] fn test_update() { - let manager = DocumentManager::new(test_global_store()); + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); let path = test_path("test"); manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); @@ -533,7 +540,8 @@ mod tests { #[test] fn test_close_moves_to_cache() { - let manager = DocumentManager::new(test_global_store()); + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); let path = test_path("test"); manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); @@ -546,7 +554,8 @@ mod tests { #[test] fn test_reopen_clears_from_cache() { - let manager = DocumentManager::new(test_global_store()); + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); let path = test_path("test"); manager.open(path.clone(), "{ a: 1 }".to_string(), DocVersion::new(1)); @@ -561,7 +570,8 @@ mod tests { #[test] fn test_missing_path_operations_are_noop() { - let manager = DocumentManager::new(test_global_store()); + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); let path = test_path("missing"); assert_eq!(manager.get(&path).map(|doc| doc.text().to_string()), None); @@ -579,7 +589,8 @@ mod tests { #[test] fn test_multiple_documents() { - let manager = DocumentManager::new(test_global_store()); + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); // Open multiple documents sequentially. for i in 0..10 { @@ -593,7 +604,8 @@ mod tests { #[test] fn test_analysis_caching() { - let manager = DocumentManager::new(test_global_store()); + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); let path = test_path("test"); manager.open( @@ -615,7 +627,8 @@ mod tests { #[test] fn test_analysis_cache_invalidation() { - let manager = DocumentManager::new(test_global_store()); + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); let path = test_path("test"); manager.open( @@ -639,7 +652,8 @@ mod tests { #[test] fn test_analysis_cache_version_mismatch() { - let manager = DocumentManager::new(test_global_store()); + let manager = + DocumentManager::new(test_global_store(), jrsonnet_lsp_document::PathStore::new()); let path = test_path("test"); manager.open( diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index 0b5c4a63..815b79b3 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -149,7 +149,7 @@ fn resolved_imports_for( #[cfg(test)] mod tests { use dashmap::DashMap; - use jrsonnet_lsp_document::{CanonicalPath, DocVersion}; + use jrsonnet_lsp_document::{CanonicalPath, DocVersion, FileId, PathStore}; use jrsonnet_lsp_types::Ty; use super::*; @@ -157,24 +157,28 @@ mod tests { /// Test document source backed by a `DashMap`. struct TestDocSource { - docs: DashMap, + paths: PathStore, + docs: DashMap, } impl TestDocSource { - fn new() -> Self { + fn new(paths: PathStore) -> Self { Self { + paths, docs: DashMap::new(), } } fn insert(&self, path: CanonicalPath, doc: Document) { - self.docs.insert(path, doc); + let file_id = self.paths.intern(&path); + self.docs.insert(file_id, doc); } } impl DocumentSource for TestDocSource { fn get_document(&self, path: &CanonicalPath) -> Option { - self.docs.get(path).map(|r| r.clone()) + let file_id = self.paths.get_id(path)?; + self.docs.get(&file_id).map(|r| r.clone()) } } @@ -185,9 +189,10 @@ mod tests { #[test] fn test_provider_analyze_simple() { let global_types = Arc::new(GlobalTyStore::new()); - let type_cache = new_shared_cache(Arc::clone(&global_types)); - let import_graph = Arc::new(RwLock::new(ImportGraph::new())); - let doc_source = TestDocSource::new(); + let path_store = PathStore::new(); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store))); + let doc_source = TestDocSource::new(PathStore::new()); let provider = TypeProvider::new(type_cache, import_graph, global_types); @@ -206,9 +211,10 @@ mod tests { #[test] fn test_provider_ensures_dependencies_analyzed() { let global_types = Arc::new(GlobalTyStore::new()); - let type_cache = new_shared_cache(Arc::clone(&global_types)); - let import_graph = Arc::new(RwLock::new(ImportGraph::new())); - let doc_source = TestDocSource::new(); + let path_store = PathStore::new(); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); + let doc_source = TestDocSource::new(path_store); // Add imported file let dep_path = test_path("dep.jsonnet"); @@ -254,9 +260,10 @@ mod tests { #[test] fn test_provider_uses_graph_resolved_import_paths() { let global_types = Arc::new(GlobalTyStore::new()); - let type_cache = new_shared_cache(Arc::clone(&global_types)); - let import_graph = Arc::new(RwLock::new(ImportGraph::new())); - let doc_source = TestDocSource::new(); + let path_store = PathStore::new(); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); + let doc_source = TestDocSource::new(path_store); let dep_path = test_path("deps/dep.jsonnet"); let dep_doc = Document::new("42".to_string(), DocVersion(1)); @@ -292,9 +299,10 @@ mod tests { #[test] fn test_provider_importstr_infers_string() { let global_types = Arc::new(GlobalTyStore::new()); - let type_cache = new_shared_cache(Arc::clone(&global_types)); - let import_graph = Arc::new(RwLock::new(ImportGraph::new())); - let doc_source = TestDocSource::new(); + let path_store = PathStore::new(); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); + let doc_source = TestDocSource::new(path_store); let main_path = test_path("main.jsonnet"); let main_doc = Document::new(r#"importstr "./script.k""#.to_string(), DocVersion(1)); @@ -328,9 +336,10 @@ mod tests { #[test] fn test_provider_importbin_infers_bounded_byte_array() { let global_types = Arc::new(GlobalTyStore::new()); - let type_cache = new_shared_cache(Arc::clone(&global_types)); - let import_graph = Arc::new(RwLock::new(ImportGraph::new())); - let doc_source = TestDocSource::new(); + let path_store = PathStore::new(); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); + let doc_source = TestDocSource::new(path_store); let main_path = test_path("main.jsonnet"); let main_doc = Document::new(r#"importbin "./script.k""#.to_string(), DocVersion(1)); diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs index ae81f1ec..5a97f76d 100644 --- a/crates/jrsonnet-lsp-inference/src/type_cache.rs +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -11,7 +11,7 @@ use std::{num::NonZeroUsize, sync::Arc}; use jrsonnet_lsp_document::{ - CanonicalPath, Document, FileId, PathInterner, DEFAULT_TYPE_CACHE_CAPACITY, + CanonicalPath, Document, FileId, PathStore, DEFAULT_TYPE_CACHE_CAPACITY, }; use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; use lru::LruCache; @@ -32,7 +32,7 @@ pub struct TypeCache { /// LRU cache from file path to its cached type. cache: LruCache, /// Interned mapping between canonical paths and stable file ids. - paths: PathInterner, + paths: PathStore, /// Global type store for shared types. global_types: Arc, } @@ -48,16 +48,20 @@ struct CachedType { impl TypeCache { /// Create a new empty type cache with default capacity. - pub fn new(global_types: Arc) -> Self { - Self::with_capacity(global_types, DEFAULT_TYPE_CACHE_CAPACITY) + pub fn new(global_types: Arc, paths: PathStore) -> Self { + Self::with_capacity(global_types, DEFAULT_TYPE_CACHE_CAPACITY, paths) } /// Create a new type cache with the specified capacity. - pub fn with_capacity(global_types: Arc, capacity: usize) -> Self { + pub fn with_capacity( + global_types: Arc, + capacity: usize, + paths: PathStore, + ) -> Self { let capacity = NonZeroUsize::new(capacity).unwrap_or(NonZeroUsize::MIN); Self { cache: LruCache::new(capacity), - paths: PathInterner::new(), + paths, global_types, } } @@ -140,8 +144,8 @@ impl TypeCache { pub type SharedTypeCache = Arc>; /// Create a new shared type cache with the given global type store. -pub fn new_shared_cache(global_types: Arc) -> SharedTypeCache { - Arc::new(RwLock::new(TypeCache::new(global_types))) +pub fn new_shared_cache(global_types: Arc, paths: PathStore) -> SharedTypeCache { + Arc::new(RwLock::new(TypeCache::new(global_types, paths))) } /// Analyze a document and update the type cache. @@ -257,6 +261,10 @@ mod tests { Arc::new(GlobalTyStore::new()) } + fn test_path_store() -> PathStore { + PathStore::new() + } + /// Assert that the cache contains exactly the specified global type entries. fn assert_cache_contents_ty(cache: &TypeCache, expected: &[(&str, GlobalTy)]) { let actual: BTreeSet<_> = cache @@ -278,7 +286,7 @@ mod tests { #[test] fn test_equivalent_path_lookup() { - let mut cache = TypeCache::new(test_global_store()); + let mut cache = TypeCache::new(test_global_store(), test_path_store()); let path = test_path("main.jsonnet"); cache.update(&path, GlobalTy::NUMBER, 1); @@ -289,7 +297,7 @@ mod tests { #[test] fn test_cache_basic_ty() { - let mut cache = TypeCache::new(test_global_store()); + let mut cache = TypeCache::new(test_global_store(), test_path_store()); let path = test_path("main.jsonnet"); // Initially empty @@ -314,7 +322,7 @@ mod tests { #[test] fn test_analyze_and_cache() { let global_types = test_global_store(); - let cache = new_shared_cache(global_types); + let cache = new_shared_cache(global_types, test_path_store()); let path = test_path("test.jsonnet"); let doc = Document::new("42".to_string(), DocVersion::new(1)); @@ -337,7 +345,7 @@ mod tests { #[test] fn test_multiple_files_ty() { - let mut cache = TypeCache::new(test_global_store()); + let mut cache = TypeCache::new(test_global_store(), test_path_store()); let path1 = test_path("file1.jsonnet"); let path2 = test_path("file2.jsonnet"); @@ -359,7 +367,7 @@ mod tests { #[test] fn test_invalidate_many_ty() { - let mut cache = TypeCache::new(test_global_store()); + let mut cache = TypeCache::new(test_global_store(), test_path_store()); let path1 = test_path("lib.jsonnet"); let path2 = test_path("utils.jsonnet"); @@ -391,7 +399,7 @@ mod tests { #[test] fn test_basic_get_update() { - let mut cache = TypeCache::new(test_global_store()); + let mut cache = TypeCache::new(test_global_store(), test_path_store()); let path = test_path("test.jsonnet"); // Update with a global type @@ -408,7 +416,7 @@ mod tests { #[test] fn test_global_store_access() { let global_types = test_global_store(); - let mut cache = TypeCache::new(Arc::clone(&global_types)); + let mut cache = TypeCache::new(Arc::clone(&global_types), test_path_store()); let path = test_path("test.jsonnet"); // Cache a type - types are stored in the shared global store @@ -424,7 +432,7 @@ mod tests { #[test] fn test_clear() { - let mut cache = TypeCache::new(test_global_store()); + let mut cache = TypeCache::new(test_global_store(), test_path_store()); let path1 = test_path("a.jsonnet"); let path2 = test_path("b.jsonnet"); @@ -444,7 +452,7 @@ mod tests { #[test] fn test_lru_eviction() { // Create a cache with capacity 3 - let mut cache = TypeCache::with_capacity(test_global_store(), 3); + let mut cache = TypeCache::with_capacity(test_global_store(), 3, test_path_store()); let path1 = test_path("file1.jsonnet"); let path2 = test_path("file2.jsonnet"); @@ -477,7 +485,7 @@ mod tests { #[test] fn test_capacity_zero_falls_back_to_one() { // Verify with_capacity(0) doesn't panic and has minimum capacity - let mut cache = TypeCache::with_capacity(test_global_store(), 0); + let mut cache = TypeCache::with_capacity(test_global_store(), 0, test_path_store()); let path = test_path("test.jsonnet"); cache.update(&path, GlobalTy::NUMBER, 1); assert_eq!(cache.get(&path), Some(GlobalTy::NUMBER)); diff --git a/crates/jrsonnet-lsp/Cargo.toml b/crates/jrsonnet-lsp/Cargo.toml index b1171384..a9e42fc3 100644 --- a/crates/jrsonnet-lsp/Cargo.toml +++ b/crates/jrsonnet-lsp/Cargo.toml @@ -21,7 +21,7 @@ jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-par jrsonnet-stdlib.workspace = true lsp-server.workspace = true lsp-types.workspace = true -parking_lot = "0.12" +parking_lot.workspace = true rayon = "1.11.0" rustc-hash.workspace = true serde = { workspace = true, features = ["derive"] } diff --git a/crates/jrsonnet-lsp/benches/type_cache.rs b/crates/jrsonnet-lsp/benches/type_cache.rs index 26afe1b2..0229ee65 100644 --- a/crates/jrsonnet-lsp/benches/type_cache.rs +++ b/crates/jrsonnet-lsp/benches/type_cache.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; -use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_document::{CanonicalPath, PathStore}; use jrsonnet_lsp_inference::{new_shared_cache, TypeCache}; use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; @@ -19,7 +19,7 @@ fn bench_cache_operations(c: &mut Criterion) { // Benchmark cache updates for size in [100, 500, 1000] { let global = Arc::new(GlobalTyStore::new()); - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); group.bench_with_input(BenchmarkId::new("update", size), &size, |b, &size| { b.iter(|| { @@ -34,7 +34,7 @@ fn bench_cache_operations(c: &mut Criterion) { // Benchmark cache lookups (cache hit) for size in [100, 500, 1000] { let global = Arc::new(GlobalTyStore::new()); - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); // Pre-populate cache for i in 0..size { @@ -55,7 +55,7 @@ fn bench_cache_operations(c: &mut Criterion) { // Benchmark cache lookups (cache miss) for size in [100, 500, 1000] { let global = Arc::new(GlobalTyStore::new()); - let cache = TypeCache::new(Arc::clone(&global)); + let cache = TypeCache::new(Arc::clone(&global), PathStore::new()); group.bench_with_input(BenchmarkId::new("lookup_miss", size), &size, |b, &size| { b.iter(|| { @@ -75,7 +75,7 @@ fn bench_cache_operations(c: &mut Criterion) { b.iter_batched( || { // Setup: create and populate cache - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); for i in 0..size { let path = make_path(i); cache.update(&path, GlobalTy::NUMBER, 1); @@ -105,7 +105,7 @@ fn bench_cache_operations(c: &mut Criterion) { b.iter_batched( || { // Setup: create and populate cache - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); let paths: Vec<_> = (0..size).map(make_path).collect(); for path in &paths { cache.update(path, GlobalTy::NUMBER, 1); @@ -131,7 +131,7 @@ fn bench_shared_cache(c: &mut Criterion) { // Benchmark concurrent access patterns (simulated via sequential access with locking) for size in [100, 500] { let global = Arc::new(GlobalTyStore::new()); - let cache = new_shared_cache(Arc::clone(&global)); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); // Pre-populate { @@ -170,7 +170,7 @@ fn bench_version_check(c: &mut Criterion) { let mut group = c.benchmark_group("version_check"); let global = Arc::new(GlobalTyStore::new()); - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); // Pre-populate with version 1 for i in 0..1000 { diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index fc8b4c6c..2c535eaf 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -14,7 +14,7 @@ use std::{ }; use crossbeam_channel::{Receiver, Sender}; -use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document, FileId, PathInterner}; +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document, FileId, PathStore}; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{ DocumentSource, SharedDocumentManager, SharedTypeCache, TypeProvider, @@ -73,6 +73,8 @@ pub struct DiagnosticsConfig { pub type_cache: SharedTypeCache, /// Shared global type store. pub global_types: Arc, + /// Shared file id/path store. + pub path_store: PathStore, } struct WorkerDocumentSource { @@ -109,8 +111,8 @@ pub struct AsyncDiagnostics { sequence: AtomicU64, /// Latest requested sequence per file (for debouncing). latest_sequences: Arc>>, - /// Interned mapping between canonical paths and stable file ids. - path_interner: Arc>, + /// Shared file id/path store. + path_store: PathStore, /// Background thread handle. _thread_handle: thread::JoinHandle<()>, } @@ -122,7 +124,7 @@ impl AsyncDiagnostics { let (request_sender, request_receiver) = crossbeam_channel::unbounded(); let (result_sender, result_receiver) = crossbeam_channel::unbounded(); let latest_sequences = Arc::new(RwLock::new(FxHashMap::default())); - let path_interner = Arc::new(RwLock::new(PathInterner::new())); + let path_store = config.path_store.clone(); let sequences_clone = Arc::clone(&latest_sequences); let thread_handle = thread::spawn(move || { @@ -139,7 +141,7 @@ impl AsyncDiagnostics { result_receiver, sequence: AtomicU64::new(0), latest_sequences, - path_interner, + path_store, _thread_handle: thread_handle, } } @@ -157,7 +159,7 @@ impl AsyncDiagnostics { import_roots: Vec, ) { let sequence = self.sequence.fetch_add(1, Ordering::SeqCst); - let file_id = self.path_interner.write().intern(&path); + let file_id = self.path_store.intern(&path); // Record this as the latest sequence for this path self.latest_sequences.write().insert(file_id, sequence); @@ -315,14 +317,20 @@ mod tests { fn test_config() -> DiagnosticsConfig { let global_types = Arc::new(GlobalTyStore::new()); + let path_store = PathStore::new(); DiagnosticsConfig { evaluator: None, - documents: Arc::new(jrsonnet_lsp_inference::DocumentManager::new(Arc::clone( - &global_types, - ))), - import_graph: Arc::new(RwLock::new(ImportGraph::new())), - type_cache: jrsonnet_lsp_inference::new_shared_cache(Arc::clone(&global_types)), + documents: Arc::new(jrsonnet_lsp_inference::DocumentManager::new( + Arc::clone(&global_types), + path_store.clone(), + )), + import_graph: Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))), + type_cache: jrsonnet_lsp_inference::new_shared_cache( + Arc::clone(&global_types), + path_store.clone(), + ), global_types, + path_store, } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index e5c23779..a0a12d8f 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -16,7 +16,7 @@ use std::{ use anyhow::{Context, Result}; use crossbeam_channel::{select, Receiver, Sender}; -use jrsonnet_lsp_document::{CanonicalPath, DocVersion}; +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, PathStore}; use jrsonnet_lsp_handlers as handlers; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{ @@ -75,6 +75,8 @@ pub struct Server { documents: SharedDocumentManager, /// Import graph for cross-file references. import_graph: Arc>, + /// Shared file id/path store. + path_store: PathStore, /// Global type store shared across all analyses. global_types: Arc, /// Cross-file type cache for import resolution. @@ -118,14 +120,23 @@ const SUPPORTED_EXECUTE_COMMANDS: [&str; 5] = [ const WATCHED_FILE_GLOB_PATTERNS: [&str; 3] = ["**/*.jsonnet", "**/*.libsonnet", "**/*.json"]; +pub(super) fn normalize_paths(paths: &mut Vec) { + paths.sort_by(|a, b| a.as_path().cmp(b.as_path())); + paths.dedup(); +} + impl Server { /// Create a new server with the given connection. #[must_use] pub fn new(connection: Connection) -> Self { let global_types = Arc::new(GlobalTyStore::new()); - let documents = Arc::new(DocumentManager::new(Arc::clone(&global_types))); - let import_graph = Arc::new(RwLock::new(ImportGraph::new())); - let type_cache = new_shared_cache(Arc::clone(&global_types)); + let path_store = PathStore::new(); + let documents = Arc::new(DocumentManager::new( + Arc::clone(&global_types), + path_store.clone(), + )); + let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); let (request_response_sender, request_response_receiver) = crossbeam_channel::unbounded(); let inflight_requests = InflightRequests::new(connection.sender.clone()); let diagnostics = AsyncDiagnostics::new(DiagnosticsConfig { @@ -134,12 +145,14 @@ impl Server { import_graph: Arc::clone(&import_graph), type_cache: Arc::clone(&type_cache), global_types: Arc::clone(&global_types), + path_store: path_store.clone(), }); Self { connection, documents, import_graph, + path_store, type_cache, global_types, config: Arc::new(RwLock::new(ServerConfig::default())), @@ -212,6 +225,7 @@ impl Server { import_graph: Arc::clone(&self.import_graph), type_cache: Arc::clone(&self.type_cache), global_types: Arc::clone(&self.global_types), + path_store: self.path_store.clone(), }); } @@ -327,8 +341,7 @@ impl Server { for root in &roots { files.extend(Self::collect_workspace_files(root)); } - files.sort(); - files.dedup(); + normalize_paths(&mut files); let file_count = files.len(); for path in &files { diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index a09d4af3..c0cb640c 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -17,7 +17,7 @@ use parking_lot::RwLock; use rayon::prelude::*; use tracing::{info, warn}; -use super::SharedConfig; +use super::{normalize_paths, SharedConfig}; use crate::analysis::{ eval::create_state_with_jpath, tanka::effective_import_roots, EvalConfig, Evaluator, }; @@ -288,11 +288,10 @@ impl AsyncRequestContext { let mut paths = { let import_graph = self.import_graph.read(); - import_graph.all_files().cloned().collect::>() + import_graph.all_files().collect::>() }; paths.extend(self.documents.open_paths()); - paths.sort(); - paths.dedup(); + normalize_paths(&mut paths); let mut all_symbols: Vec = paths .into_par_iter() diff --git a/crates/jrsonnet-lsp/src/server/import_graph.rs b/crates/jrsonnet-lsp/src/server/import_graph.rs index eb6f25e7..a1830a7c 100644 --- a/crates/jrsonnet-lsp/src/server/import_graph.rs +++ b/crates/jrsonnet-lsp/src/server/import_graph.rs @@ -12,11 +12,10 @@ impl Server { pub(super) fn tracked_paths_for_reindex(&self) -> Vec { let mut paths = { let import_graph = self.import_graph.read(); - import_graph.all_files().cloned().collect::>() + import_graph.all_files().collect::>() }; paths.extend(self.documents.open_paths()); - paths.sort(); - paths.dedup(); + normalize_paths(&mut paths); paths } diff --git a/crates/jrsonnet-lsp/src/server/notifications.rs b/crates/jrsonnet-lsp/src/server/notifications.rs index 99173473..d0f3719d 100644 --- a/crates/jrsonnet-lsp/src/server/notifications.rs +++ b/crates/jrsonnet-lsp/src/server/notifications.rs @@ -294,8 +294,7 @@ impl Server { } } - changed_paths.sort(); - changed_paths.dedup(); + normalize_paths(&mut changed_paths); for path in changed_paths { if self.documents.is_open(&path) { self.schedule_diagnostics(&path); diff --git a/crates/jrsonnet-lsp/tests/cross_file_tests.rs b/crates/jrsonnet-lsp/tests/cross_file_tests.rs index 3053fdd6..b69f613e 100644 --- a/crates/jrsonnet-lsp/tests/cross_file_tests.rs +++ b/crates/jrsonnet-lsp/tests/cross_file_tests.rs @@ -9,7 +9,7 @@ use std::{ sync::Arc, }; -use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document}; +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document, PathStore}; use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_inference::{analyze_and_cache, new_shared_cache, TypeAnalysis, TypeCache}; use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore, Ty}; @@ -78,7 +78,7 @@ mod import_graph_tests { "local f2 = import 'file2.jsonnet'; { value: 1, next: f2 }", ); - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let resolver = make_resolver(base_dir); // Parse all files @@ -139,7 +139,7 @@ mod import_graph_tests { ", ); - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let resolver = make_resolver(base_dir); for file in [&file_d, &file_b, &file_c, &file_a] { @@ -181,7 +181,7 @@ mod import_graph_tests { "local lib = import 'lib.jsonnet'; lib.helper", ); - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let resolver = make_resolver(base_dir); // Add both files @@ -230,7 +230,7 @@ mod import_graph_tests { "local s = import 'shared.jsonnet'; s.x * 2", ); - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let resolver = make_resolver(base_dir); for file in [&shared, &user1, &user2, &user3] { @@ -253,7 +253,7 @@ mod type_cache_tests { #[test] fn test_cache_basic_types() { let global = Arc::new(GlobalTyStore::new()); - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); let tmp = TempDir::new().unwrap(); let file1 = write_file(&tmp, "number.jsonnet", "42"); @@ -278,7 +278,7 @@ mod type_cache_tests { #[test] fn test_cache_version_tracking() { let global = Arc::new(GlobalTyStore::new()); - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); let tmp = TempDir::new().unwrap(); let file = write_file(&tmp, "test.jsonnet", "1"); @@ -298,7 +298,7 @@ mod type_cache_tests { #[test] fn test_cache_invalidation() { let global = Arc::new(GlobalTyStore::new()); - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); let tmp = TempDir::new().unwrap(); let file1 = write_file(&tmp, "a.jsonnet", "1"); @@ -326,7 +326,7 @@ mod type_cache_tests { #[test] fn test_cache_invalidate_many() { let global = Arc::new(GlobalTyStore::new()); - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); let tmp = TempDir::new().unwrap(); let files: Vec<_> = (0..10) @@ -357,7 +357,7 @@ mod cross_file_type_tests { #[test] fn test_analyze_and_cache_basic() { let global = Arc::new(GlobalTyStore::new()); - let cache = new_shared_cache(Arc::clone(&global)); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); let tmp = TempDir::new().unwrap(); let file = write_file(&tmp, "number.jsonnet", "42"); @@ -378,7 +378,7 @@ mod cross_file_type_tests { #[test] fn test_analyze_and_cache_different_types() { let global = Arc::new(GlobalTyStore::new()); - let cache = new_shared_cache(Arc::clone(&global)); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); let tmp = TempDir::new().unwrap(); // Test various Jsonnet literal types @@ -402,7 +402,7 @@ mod cross_file_type_tests { #[test] fn test_cache_hit_on_same_version() { let global = Arc::new(GlobalTyStore::new()); - let cache = new_shared_cache(Arc::clone(&global)); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); let tmp = TempDir::new().unwrap(); let file = write_file(&tmp, "test.jsonnet", "42"); @@ -422,7 +422,7 @@ mod cross_file_type_tests { #[test] fn test_cache_miss_on_new_version() { let global = Arc::new(GlobalTyStore::new()); - let cache = new_shared_cache(Arc::clone(&global)); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); let tmp = TempDir::new().unwrap(); let file = write_file(&tmp, "test.jsonnet", "42"); @@ -442,7 +442,7 @@ mod cross_file_type_tests { #[test] fn test_shared_global_store() { let global = Arc::new(GlobalTyStore::new()); - let cache = new_shared_cache(Arc::clone(&global)); + let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); let tmp = TempDir::new().unwrap(); // Analyze multiple files @@ -480,10 +480,10 @@ mod transitive_update_tests { let mid = write_file(&tmp, "mid.jsonnet", "local b = import 'base.jsonnet'; b"); let top_file = write_file(&tmp, "top.jsonnet", "local m = import 'mid.jsonnet'; m"); - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let resolver = make_resolver(base_dir); let global = Arc::new(GlobalTyStore::new()); - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); // Build graph for file in [&base, &mid, &top_file] { @@ -524,10 +524,10 @@ mod transitive_update_tests { let lib2 = write_file(&tmp, "lib2.jsonnet", "{ b: 2 }"); let main = write_file(&tmp, "main.jsonnet", "local l1 = import 'lib1.jsonnet'; l1"); - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let resolver = make_resolver(base_dir); let global = Arc::new(GlobalTyStore::new()); - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); // Build graph - main imports lib1, not lib2 for file in [&lib1, &lib2, &main] { @@ -571,10 +571,10 @@ mod transitive_update_tests { ", ); - let mut graph = ImportGraph::new(); + let mut graph = ImportGraph::new(PathStore::new()); let resolver = make_resolver(base_dir); let global = Arc::new(GlobalTyStore::new()); - let mut cache = TypeCache::new(Arc::clone(&global)); + let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); for file in [&d, &b, &c, &a] { let content = fs::read_to_string(file).unwrap(); From 781405cc4c76b722223c1408c97fbb8662fd1d60 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 14:45:08 +0000 Subject: [PATCH 103/210] refactor(lsp): remove path access shims from interner APIs - split PathStore into an intern-only writer and shared PathResolver read handle, with interned paths stored as shared Arc values. - migrate ImportGraph, DocumentManager, TypeCache, server request/notification flows, and cross-file handlers to resolver/file-id APIs and remove the old with_path/get_path style accessors. - keep URI/path handling typed at boundaries while making internal graph/cache traversal file-id first, including deterministic traversal updates and helper cleanup. - update structural tests and benches to exercise equivalent-path behavior through the shared interner model and new resolver-based lookups. --- crates/jrsonnet-lsp-document/src/file_ids.rs | 74 ++-- crates/jrsonnet-lsp-document/src/lib.rs | 2 +- .../jrsonnet-lsp-handlers/src/references.rs | 18 +- crates/jrsonnet-lsp-handlers/src/rename.rs | 31 +- crates/jrsonnet-lsp-import/src/graph.rs | 318 +++++++++--------- crates/jrsonnet-lsp-inference/src/manager.rs | 69 +++- crates/jrsonnet-lsp-inference/src/provider.rs | 78 +++-- .../jrsonnet-lsp-inference/src/type_cache.rs | 228 ++++++++----- crates/jrsonnet-lsp/benches/type_cache.rs | 104 +++--- crates/jrsonnet-lsp/src/server.rs | 38 ++- .../jrsonnet-lsp/src/server/async_requests.rs | 50 +-- .../jrsonnet-lsp/src/server/import_graph.rs | 92 ++--- .../jrsonnet-lsp/src/server/notifications.rs | 64 ++-- crates/jrsonnet-lsp/tests/cross_file_tests.rs | 187 ++++++---- 14 files changed, 807 insertions(+), 546 deletions(-) diff --git a/crates/jrsonnet-lsp-document/src/file_ids.rs b/crates/jrsonnet-lsp-document/src/file_ids.rs index d4f047f9..54ad237e 100644 --- a/crates/jrsonnet-lsp-document/src/file_ids.rs +++ b/crates/jrsonnet-lsp-document/src/file_ids.rs @@ -34,7 +34,7 @@ impl FileId { #[derive(Debug, Clone, Default)] struct PathInterner { path_to_id: HashMap, - id_to_path: Vec, + id_to_path: Vec>, } impl PathInterner { @@ -47,20 +47,40 @@ impl PathInterner { let raw = u32::try_from(self.id_to_path.len()).expect("too many interned file paths"); let id = FileId::from_raw(raw); self.path_to_id.insert(path.as_path().to_path_buf(), id); - self.id_to_path.push(path.clone()); + self.id_to_path.push(Arc::new(path.clone())); id } /// Get an existing id for `path`. #[must_use] - pub fn get_id(&self, path: &CanonicalPath) -> Option { + pub fn file(&self, path: &CanonicalPath) -> Option { self.path_to_id.get(path.as_path()).copied() } /// Resolve an interned id to its canonical path. #[must_use] - pub fn get_path(&self, file_id: FileId) -> Option<&CanonicalPath> { - self.id_to_path.get(file_id.as_usize()) + pub fn path(&self, file: FileId) -> Option> { + self.id_to_path.get(file.as_usize()).cloned() + } +} + +/// Read-only resolver for a shared interned path store. +#[derive(Debug, Clone)] +pub struct PathResolver { + interner: Arc>, +} + +impl PathResolver { + /// Resolve a canonical path to an already interned file identifier. + #[must_use] + pub fn file(&self, path: &CanonicalPath) -> Option { + self.interner.read().file(path) + } + + /// Resolve an interned file identifier to its canonical path. + #[must_use] + pub fn path(&self, file: FileId) -> Option> { + self.interner.read().path(file) } } @@ -83,16 +103,12 @@ impl PathStore { self.interner.write().intern(path) } - /// Get an existing id for `path`. + /// Get a read-only resolver for already interned paths. #[must_use] - pub fn get_id(&self, path: &CanonicalPath) -> Option { - self.interner.read().get_id(path) - } - - /// Resolve an interned id to its canonical path. - #[must_use] - pub fn get_path(&self, file_id: FileId) -> Option { - self.interner.read().get_path(file_id).cloned() + pub fn resolver(&self) -> PathResolver { + PathResolver { + interner: Arc::clone(&self.interner), + } } } @@ -115,8 +131,8 @@ mod tests { let second = interner.intern(&alpha); assert_eq!(first, second); - assert_eq!(interner.get_id(&alpha), Some(first)); - assert_eq!(interner.get_path(first), Some(&alpha)); + assert_eq!(interner.file(&alpha), Some(first)); + assert_eq!(interner.path(first), Some(Arc::new(alpha))); } #[test] @@ -130,8 +146,8 @@ mod tests { assert_eq!(alpha_id, FileId::from_raw(0)); assert_eq!(beta_id, FileId::from_raw(1)); - assert_eq!(interner.get_path(alpha_id), Some(&alpha)); - assert_eq!(interner.get_path(beta_id), Some(&beta)); + assert_eq!(interner.path(alpha_id), Some(Arc::new(alpha))); + assert_eq!(interner.path(beta_id), Some(Arc::new(beta))); } #[test] @@ -139,8 +155,8 @@ mod tests { let interner = PathInterner::default(); let alpha = path("alpha"); - assert_eq!(interner.get_id(&alpha), None); - assert_eq!(interner.get_path(FileId::from_raw(0)), None); + assert_eq!(interner.file(&alpha), None); + assert_eq!(interner.path(FileId::from_raw(0)), None); } #[test] @@ -150,7 +166,21 @@ mod tests { let alpha = path("alpha"); let id = store.intern(&alpha); - assert_eq!(other.get_id(&alpha), Some(id)); - assert_eq!(other.get_path(id), Some(alpha)); + let resolver = other.resolver(); + assert_eq!(resolver.file(&alpha), Some(id)); + assert_eq!(resolver.path(id), Some(Arc::new(alpha))); + } + + #[test] + fn test_path_store_path_returns_shared_arc() { + let store = PathStore::new(); + let resolver = store.resolver(); + let alpha = path("alpha"); + let id = store.intern(&alpha); + + let first = resolver.path(id).expect("path should exist"); + let second = resolver.path(id).expect("path should exist"); + assert!(Arc::ptr_eq(&first, &second)); + assert_eq!(first.as_ref(), &alpha); } } diff --git a/crates/jrsonnet-lsp-document/src/lib.rs b/crates/jrsonnet-lsp-document/src/lib.rs index 04e2c77d..1724c460 100644 --- a/crates/jrsonnet-lsp-document/src/lib.rs +++ b/crates/jrsonnet-lsp-document/src/lib.rs @@ -26,7 +26,7 @@ pub use document::{Document, ParsedDocument, SharedDocument, SyntaxError}; pub use error::{ is_valid_jsonnet_identifier, validate_identifier, HandlerResult, LspError, LspResult, }; -pub use file_ids::{FileId, PathStore}; +pub use file_ids::{FileId, PathResolver, PathStore}; pub use position::LineIndex; pub use types::{ ByteOffset, CanonicalPath, CharOffset, DocVersion, Line, LspPosition, LspRange, SymbolName, diff --git a/crates/jrsonnet-lsp-handlers/src/references.rs b/crates/jrsonnet-lsp-handlers/src/references.rs index 9465a7ce..778165d0 100644 --- a/crates/jrsonnet-lsp-handlers/src/references.rs +++ b/crates/jrsonnet-lsp-handlers/src/references.rs @@ -3,7 +3,9 @@ //! Finds all references to a symbol within the current document and across //! all open/importing documents (cross-file references). -use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, CanonicalPath, Document, LspPosition}; +use jrsonnet_lsp_document::{ + to_lsp_range, token_at_offset, CanonicalPath, Document, FileId, LspPosition, +}; use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_inference::SemanticArtifacts; use jrsonnet_lsp_scope::{ @@ -165,13 +167,19 @@ pub fn find_cross_file_references_with_semantic<'a>( else { return Vec::new(); }; + let Some(current_file) = import_graph.file(current_path) else { + return Vec::new(); + }; // Search all other documents for imports of this file (in parallel) let references: Vec = documents .par_iter() .filter(|(doc_path, _, _)| *doc_path != current_path) .flat_map(|(doc_path, doc, semantic)| { - let import_bindings = import_binding_names(import_graph, doc_path, current_path); + let Some(importer_file) = import_graph.file(doc_path) else { + return Vec::new(); + }; + let import_bindings = import_binding_names(import_graph, importer_file, current_file); if import_bindings.is_empty() { return Vec::new(); } @@ -200,11 +208,11 @@ pub fn find_cross_file_references_with_semantic<'a>( fn import_binding_names( import_graph: &ImportGraph, - importer_path: &CanonicalPath, - target_path: &CanonicalPath, + importer_file: FileId, + target_file: FileId, ) -> Vec { let mut bindings: Vec = import_graph - .imports_of_target(importer_path, target_path) + .imports_of_target(importer_file, target_file) .into_iter() .filter_map(|entry| entry.binding_name.clone()) .collect(); diff --git a/crates/jrsonnet-lsp-handlers/src/rename.rs b/crates/jrsonnet-lsp-handlers/src/rename.rs index 1b8028a6..cf12c041 100644 --- a/crates/jrsonnet-lsp-handlers/src/rename.rs +++ b/crates/jrsonnet-lsp-handlers/src/rename.rs @@ -20,7 +20,7 @@ use std::{ }; use jrsonnet_lsp_document::{ - to_lsp_range, token_at_offset, CanonicalPath, Document, LspPosition, SymbolName, + to_lsp_range, token_at_offset, CanonicalPath, Document, FileId, LspPosition, SymbolName, }; use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_inference::DocumentManager; @@ -212,15 +212,18 @@ pub fn rename_cross_file( } let old_name = token.text().to_string(); + let Some(current_file) = import_graph.file(current_path) else { + return workspace_edit_from_changes(all_changes); + }; // Find files that import this file - let importers = import_graph.transitive_importers(current_path); + let importers = import_graph.transitive_importers(current_file); // Find references in each importing file - for importer_path in &importers { + for importer_file in importers { if let Some((importer_uri, edits)) = find_references_in_importer( - importer_path, - current_path, + importer_file, + current_file, &old_name, new_name, manager, @@ -235,11 +238,11 @@ pub fn rename_cross_file( fn import_binding_names( import_graph: &ImportGraph, - importer_path: &CanonicalPath, - source_path: &CanonicalPath, + importer_file: FileId, + source_file: FileId, ) -> HashSet { import_graph - .imports_of_target(importer_path, source_path) + .imports_of_target(importer_file, source_file) .into_iter() .filter_map(|entry| entry.binding_name.clone()) .collect() @@ -253,8 +256,8 @@ fn import_binding_names( /// lib.field_name // This is a reference to field_name in source.jsonnet /// ``` fn find_references_in_importer( - importer_path: &CanonicalPath, - source_path: &CanonicalPath, + importer_file: FileId, + source_file: FileId, old_name: &str, new_name: &SymbolName, manager: &Arc, @@ -262,14 +265,15 @@ fn find_references_in_importer( ) -> Option<(Uri, Vec)> { use jrsonnet_rowan_parser::nodes::{ExprBase, ExprField}; - let doc = manager.get_document(importer_path)?; + let importer_path = import_graph.path(importer_file)?; + let doc = manager.get_document(importer_path.as_ref())?; let uri = importer_path.to_uri().ok()?; let text = doc.text(); let line_index = doc.line_index(); let ast = doc.ast(); let mut edits = Vec::new(); - let import_bindings = import_binding_names(import_graph, importer_path, source_path); + let import_bindings = import_binding_names(import_graph, importer_file, source_file); if import_bindings.is_empty() { return None; @@ -719,7 +723,8 @@ mod tests { let import_full = temp_dir.path().join(import_path); import_full.canonicalize().ok().map(CanonicalPath::new) }); - import_graph.update_file_with_entries(&main_canon, entries); + let main_file = import_graph.intern(&main_canon); + import_graph.update_file_with_entries(main_file, entries); } // Get the lib document diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs index 828ee87a..7506c1f4 100644 --- a/crates/jrsonnet-lsp-import/src/graph.rs +++ b/crates/jrsonnet-lsp-import/src/graph.rs @@ -5,7 +5,9 @@ use std::collections::{HashMap, HashSet, VecDeque}; -use jrsonnet_lsp_document::{strip_string_quotes, CanonicalPath, Document, FileId, PathStore}; +use jrsonnet_lsp_document::{ + strip_string_quotes, CanonicalPath, Document, FileId, PathResolver, PathStore, +}; use jrsonnet_rowan_parser::{ nodes::{Bind, Destruct, ExprImport, ImportKindKind, StmtLocal}, AstNode, AstToken, SyntaxKind, @@ -56,6 +58,8 @@ pub struct ImportOccurrence { pub struct ImportGraph { /// Interned mapping between canonical paths and stable file ids. paths: PathStore, + /// Read-only resolver over interned mapping. + resolver: PathResolver, /// Map of file → import entries in that file. imports: HashMap>, /// Reverse index: file → files that import it. @@ -66,13 +70,33 @@ impl ImportGraph { /// Create a new empty import graph. #[must_use] pub fn new(paths: PathStore) -> Self { + let resolver = paths.resolver(); Self { paths, + resolver, imports: HashMap::new(), imported_by: HashMap::new(), } } + /// Get or create the interned file id for `path`. + #[must_use] + pub fn intern(&self, path: &CanonicalPath) -> FileId { + self.paths.intern(path) + } + + /// Resolve a file path to an interned file identifier. + #[must_use] + pub fn file(&self, path: &CanonicalPath) -> Option { + self.resolver.file(path) + } + + /// Resolve an interned file identifier to a path. + #[must_use] + pub fn path(&self, file: FileId) -> Option> { + self.resolver.path(file) + } + /// Update the import graph for a file. /// /// This parses the document to find all imports, resolves their paths, @@ -82,10 +106,9 @@ impl ImportGraph { /// This is the preferred method when you want to minimize lock hold time. /// Parse the imports first using [`parse_document_imports`], then call this /// method while holding the write lock. - pub fn update_file_with_entries(&mut self, path: &CanonicalPath, entries: Vec) { - let file_id = self.paths.intern(path); + pub fn update_file_with_entries(&mut self, file_id: FileId, entries: Vec) { // Remove old entries for this file - self.remove_file(path); + self.remove_file(file_id); // Update imported_by reverse index for entry in &entries { @@ -107,27 +130,23 @@ impl ImportGraph { /// This parses the document and updates the import graph atomically. /// For better performance when parsing is slow, use [`parse_document_imports`] /// followed by [`update_file_with_entries`] to parse outside the lock. - pub fn update_file(&mut self, path: &CanonicalPath, doc: &Document, resolve_import: F) + pub fn update_file(&mut self, file_id: FileId, doc: &Document, resolve_import: F) where F: Fn(&str) -> Option, { let entries = parse_document_imports(doc, &resolve_import); - self.update_file_with_entries(path, entries); + self.update_file_with_entries(file_id, entries); } /// Remove a file from the import graph. /// /// This removes the file's import entries and updates the reverse index. - pub fn remove_file(&mut self, path: &CanonicalPath) { - let Some(file_id) = self.paths.get_id(path) else { - return; - }; - + pub fn remove_file(&mut self, file_id: FileId) { // Remove from imported_by reverse index if let Some(old_entries) = self.imports.get(&file_id) { for entry in old_entries { if let Some(ref resolved) = entry.resolved_path { - if let Some(resolved_id) = self.paths.get_id(resolved) { + if let Some(resolved_id) = self.resolver.file(resolved) { let should_remove_entry = self .imported_by .get_mut(&resolved_id) @@ -156,12 +175,9 @@ impl ImportGraph { #[must_use] fn path_for_id(&self, file_id: FileId) -> Option { - self.paths.get_path(file_id) - } - - #[must_use] - fn id_for_path(&self, path: &CanonicalPath) -> Option { - self.paths.get_id(path) + self.resolver + .path(file_id) + .map(|path| path.as_ref().clone()) } #[must_use] @@ -169,22 +185,14 @@ impl ImportGraph { entry .resolved_path .as_ref() - .and_then(|path| self.paths.get_id(path)) + .and_then(|path| self.resolver.file(path)) } /// Get the files that directly import a given file. #[must_use] - pub fn direct_importers(&self, path: &CanonicalPath) -> Vec { - let Some(file_id) = self.id_for_path(path) else { - return Vec::new(); - }; - - let mut importers: Vec<_> = self - .direct_importers_by_id(file_id) - .into_iter() - .filter_map(|importer_id| self.path_for_id(importer_id)) - .collect(); - importers.sort_by(|a, b| a.as_path().cmp(b.as_path())); + pub fn direct_importers(&self, file: FileId) -> Vec { + let mut importers = self.direct_importers_by_id(file); + importers.sort_unstable(); importers } @@ -193,13 +201,9 @@ impl ImportGraph { /// This performs a breadth-first search through the import graph /// to find all files that depend on the given file, directly or indirectly. #[must_use] - pub fn transitive_importers(&self, path: &CanonicalPath) -> Vec { - let Some(root_id) = self.id_for_path(path) else { - return Vec::new(); - }; - + pub fn transitive_importers(&self, file: FileId) -> Vec { let mut result = HashSet::new(); - let mut queue = VecDeque::from([root_id]); + let mut queue = VecDeque::from([file]); while let Some(current) = queue.pop_front() { for importer in self.direct_importers_by_id(current) { @@ -209,36 +213,19 @@ impl ImportGraph { } } - let mut importers: Vec<_> = result - .into_iter() - .filter_map(|importer_id| self.path_for_id(importer_id)) - .collect(); - importers.sort_by(|a, b| a.as_path().cmp(b.as_path())); + let mut importers: Vec<_> = result.into_iter().collect(); + importers.sort_unstable(); importers } /// Get the import entries for a file. - pub fn imports(&self, path: &CanonicalPath) -> &[ImportEntry] { - let Some(file_id) = self.id_for_path(path) else { - return &[]; - }; - self.imports.get(&file_id).map_or(&[], Vec::as_slice) + pub fn imports(&self, file: FileId) -> &[ImportEntry] { + self.imports.get(&file).map_or(&[], Vec::as_slice) } /// Find imports in a file that point to a specific target file. #[must_use] - pub fn imports_of_target( - &self, - file: &CanonicalPath, - target: &CanonicalPath, - ) -> Vec<&ImportEntry> { - let Some(file_id) = self.id_for_path(file) else { - return Vec::new(); - }; - let Some(target_id) = self.id_for_path(target) else { - return Vec::new(); - }; - + pub fn imports_of_target(&self, file_id: FileId, target_id: FileId) -> Vec<&ImportEntry> { self.imports .get(&file_id) .map(|entries| { @@ -257,10 +244,14 @@ impl ImportGraph { } /// Get all files tracked in the graph. - pub fn all_files(&self) -> impl Iterator + '_ { - self.imports - .keys() - .filter_map(|file_id| self.path_for_id(*file_id)) + pub fn all_files(&self) -> impl Iterator + '_ { + self.imports.keys().copied() + } + + /// Get all tracked file paths. + pub fn all_paths(&self) -> impl Iterator + '_ { + self.all_files() + .filter_map(|file_id| self.path_for_id(file_id)) } /// Compute a topological ordering of files based on import dependencies. @@ -271,7 +262,7 @@ impl ImportGraph { /// /// Returns `None` if there's a cycle in the import graph. #[must_use] - pub fn topological_order(&self) -> Option>> { + pub fn topological_order(&self) -> Option>> { let mut in_degree: HashMap = HashMap::new(); let mut levels: Vec> = Vec::new(); @@ -342,17 +333,7 @@ impl ImportGraph { // Check if all files were processed (no cycles) if processed.len() == self.imports.len() { - Some( - levels - .into_iter() - .map(|level| { - level - .into_iter() - .filter_map(|file_id| self.path_for_id(file_id)) - .collect() - }) - .collect(), - ) + Some(levels) } else { None // Cycle detected } @@ -373,13 +354,13 @@ impl ImportGraph { /// /// # Example /// ```ignore - /// graph.process_in_parallel(|path| { - /// analyze_file(path); + /// graph.process_in_parallel(|file| { + /// analyze_file(file); /// }); /// ``` pub fn process_in_parallel(&self, f: F) -> Option<()> where - F: Fn(&CanonicalPath) + Sync, + F: Fn(FileId) + Sync, { use rayon::prelude::*; @@ -387,7 +368,7 @@ impl ImportGraph { // Process each level sequentially, but files within each level in parallel for level in levels { - level.par_iter().for_each(&f); + level.par_iter().copied().for_each(&f); } Some(()) @@ -400,7 +381,7 @@ impl ImportGraph { /// you need to process dependents before their dependencies. pub fn process_in_parallel_reverse(&self, f: F) -> Option<()> where - F: Fn(&CanonicalPath) + Sync, + F: Fn(FileId) + Sync, { use rayon::prelude::*; @@ -408,7 +389,7 @@ impl ImportGraph { // Process levels in reverse order for level in levels.into_iter().rev() { - level.par_iter().for_each(&f); + level.par_iter().copied().for_each(&f); } Some(()) @@ -425,21 +406,17 @@ impl ImportGraph { /// /// # Example /// ```ignore - /// graph.process_with_dependencies(&path, |_| true, |p| { - /// analyze_file(p); + /// graph.process_with_dependencies(file, |_| true, |dep| { + /// analyze_file(dep); /// }); /// ``` - pub fn process_with_dependencies(&self, root: &CanonicalPath, include_dependency: P, f: F) + pub fn process_with_dependencies(&self, root: FileId, include_dependency: P, f: F) where - F: Fn(&CanonicalPath) + Sync, + F: Fn(FileId) + Sync, P: Fn(&ImportEntry) -> bool + Sync, { - let Some(root_id) = self.id_for_path(root) else { - return; - }; - let mut work = WorkQueue::new(); - work.push(root_id); + work.push(root); let levels = work.run(|path_id, deps| { // Get dependencies from import graph @@ -456,11 +433,7 @@ impl ImportGraph { }); // Process levels in dependency order (leaves first) - levels.process_parallel(|path_id| { - if let Some(path) = self.path_for_id(*path_id) { - f(&path); - } - }); + levels.process_parallel(|path_id| f(*path_id)); } /// Process a file and its transitive importers using a work queue. @@ -470,16 +443,12 @@ impl ImportGraph { /// /// Useful for invalidation cascading: when a file changes, process it /// and all files that depend on it. - pub fn process_importers_with_work_queue(&self, root: &CanonicalPath, f: F) + pub fn process_importers_with_work_queue(&self, root: FileId, f: F) where - F: Fn(&CanonicalPath) + Sync, + F: Fn(FileId) + Sync, { - let Some(root_id) = self.id_for_path(root) else { - return; - }; - let mut work = WorkQueue::new(); - work.push(root_id); + work.push(root); let mut levels = work.run(|path_id, deps| { // Get files that import this file @@ -493,11 +462,7 @@ impl ImportGraph { levels.reverse(); // Process levels (root first, then importers) - levels.process_parallel(|path_id| { - if let Some(path) = self.path_for_id(*path_id) { - f(&path); - } - }); + levels.process_parallel(|path_id| f(*path_id)); } } @@ -779,6 +744,13 @@ mod tests { } } + fn graph_paths(graph: &ImportGraph, files: Vec) -> Vec { + files + .into_iter() + .filter_map(|file| graph.path(file).map(|path| path.as_ref().clone())) + .collect() + } + #[test] fn test_parse_local_import() { let code = r#"local lib = import "lib.jsonnet"; lib"#; @@ -906,10 +878,10 @@ lib1 + lib2 let code = r#"local lib = import "lib.jsonnet"; lib"#; let doc = Document::new(code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &doc, simple_resolver); + graph.update_file(graph.intern(&main), &doc, simple_resolver); // Check that main imports lib - let imports = graph.imports(&main); + let imports = graph.imports(graph.intern(&main)); assert_eq!( imports, vec![ImportEntry { @@ -921,8 +893,8 @@ lib1 + lib2 ); // Check the reverse index - let importers = graph.direct_importers(&lib); - assert_eq!(importers, vec![main.clone()]); + let importers = graph_paths(&graph, graph.direct_importers(graph.intern(&lib))); + assert_eq!(importers, vec![main]); } #[test] @@ -932,17 +904,17 @@ lib1 + lib2 let main = test_path("main.jsonnet"); let code = r#"local lib = import "lib.jsonnet"; lib"#; let doc = Document::new(code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &doc, simple_resolver); + graph.update_file(graph.intern(&main), &doc, simple_resolver); let main_lookup = test_path("main.jsonnet"); let lib_lookup = test_path("lib.jsonnet"); assert_eq!( - graph.direct_importers(&lib_lookup), + graph_paths(&graph, graph.direct_importers(graph.intern(&lib_lookup)),), vec![main_lookup.clone()] ); assert_eq!( - graph.imports_of_target(&main_lookup, &lib_lookup), + graph.imports_of_target(graph.intern(&main_lookup), graph.intern(&lib_lookup),), vec![&ImportEntry { kind: ImportKind::Code, binding_name: Some("lib".to_string()), @@ -960,17 +932,17 @@ lib1 + lib2 let code = r#"local lib = import "lib.jsonnet"; lib"#; let doc = Document::new(code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &doc, simple_resolver); + graph.update_file(graph.intern(&main), &doc, simple_resolver); // Remove main - graph.remove_file(&main); + graph.remove_file(graph.intern(&main)); // Check that main no longer has imports - assert!(graph.imports(&main).is_empty()); + assert!(graph.imports(graph.intern(&main)).is_empty()); // Check the reverse index is updated let lib = test_path("lib.jsonnet"); - assert!(graph.direct_importers(&lib).is_empty()); + assert!(graph.direct_importers(graph.intern(&lib)).is_empty()); } #[test] @@ -985,15 +957,15 @@ lib1 + lib2 // main imports utils let main_code = r#"local utils = import "utils.jsonnet"; utils"#; let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &main_doc, simple_resolver); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); // utils imports lib let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); - graph.update_file(&utils, &utils_doc, simple_resolver); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); // Check transitive importers of lib - let importers = graph.transitive_importers(&lib); + let importers = graph_paths(&graph, graph.transitive_importers(graph.intern(&lib))); assert_eq!(importers, vec![main, utils]); } @@ -1010,10 +982,10 @@ local other = import "other.jsonnet"; lib + other "#; let doc = Document::new(code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &doc, simple_resolver); + graph.update_file(graph.intern(&main), &doc, simple_resolver); // Get imports of lib.jsonnet from main - let imports = graph.imports_of_target(&main, &lib); + let imports = graph.imports_of_target(graph.intern(&main), graph.intern(&lib)); assert_eq!( imports, vec![&ImportEntry { @@ -1037,19 +1009,27 @@ lib + other // lib has no imports let lib_code = "{}"; let lib_doc = Document::new(lib_code.to_string(), DocVersion::new(1)); - graph.update_file(&lib, &lib_doc, simple_resolver); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); // utils imports lib let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); - graph.update_file(&utils, &utils_doc, simple_resolver); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); // main imports utils let main_code = r#"local utils = import "utils.jsonnet"; utils"#; let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &main_doc, simple_resolver); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); - let levels = graph.topological_order().expect("Should not have cycles"); + let levels = graph + .topological_order() + .map(|levels| { + levels + .into_iter() + .map(|level| graph_paths(&graph, level)) + .collect::>() + }) + .expect("Should not have cycles"); // lib should be in first level (no deps) // utils should be in second level (depends on lib) @@ -1068,11 +1048,11 @@ lib + other // utils1 has no imports let utils1_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(&utils1, &utils1_doc, simple_resolver); + graph.update_file(graph.intern(&utils1), &utils1_doc, simple_resolver); // utils2 has no imports let utils2_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(&utils2, &utils2_doc, simple_resolver); + graph.update_file(graph.intern(&utils2), &utils2_doc, simple_resolver); // main imports both let main_code = r#" @@ -1081,9 +1061,17 @@ local u2 = import "utils2.jsonnet"; u1 + u2 "#; let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &main_doc, simple_resolver); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); - let levels = graph.topological_order().expect("Should not have cycles"); + let levels = graph + .topological_order() + .map(|levels| { + levels + .into_iter() + .map(|level| graph_paths(&graph, level)) + .collect::>() + }) + .expect("Should not have cycles"); // utils1 and utils2 should be in first level (independent, can be parallel, sorted) // main should be in second level @@ -1102,16 +1090,16 @@ u1 + u2 // lib has no imports let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(&lib, &lib_doc, simple_resolver); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); // main imports lib let main_code = r#"local lib = import "lib.jsonnet"; lib"#; let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &main_doc, simple_resolver); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); let counter = AtomicUsize::new(0); graph - .process_in_parallel(|_path| { + .process_in_parallel(|_file| { counter.fetch_add(1, Ordering::SeqCst); }) .expect("should process files in parallel"); @@ -1130,20 +1118,20 @@ u1 + u2 // lib has no imports let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(&lib, &lib_doc, simple_resolver); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); // main imports lib let main_code = r#"local lib = import "lib.jsonnet"; lib"#; let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &main_doc, simple_resolver); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); let processed_order = Arc::new(Mutex::new(Vec::new())); let order_clone = Arc::clone(&processed_order); - graph.process_in_parallel(move |path| { - order_clone.lock().unwrap().push(path.clone()); + graph.process_in_parallel(move |file| { + order_clone.lock().unwrap().push(file); }); - let order: Vec = processed_order.lock().unwrap().clone(); + let order = graph_paths(&graph, processed_order.lock().unwrap().clone()); // lib should be processed before main (lib has no deps, main depends on lib) assert_eq!(order, vec![lib, main]); } @@ -1161,31 +1149,31 @@ u1 + u2 // lib has no imports let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(&lib, &lib_doc, simple_resolver); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); // utils imports lib let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); - graph.update_file(&utils, &utils_doc, simple_resolver); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); // main imports utils let main_code = r#"local utils = import "utils.jsonnet"; utils"#; let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &main_doc, simple_resolver); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); let processed = Arc::new(Mutex::new(Vec::new())); let processed_clone = Arc::clone(&processed); // Process main and its dependencies graph.process_with_dependencies( - &main, + graph.intern(&main), |_| true, - move |path| { - processed_clone.lock().unwrap().push(path.clone()); + move |file| { + processed_clone.lock().unwrap().push(file); }, ); - let order: Vec = processed.lock().unwrap().clone(); + let order = graph_paths(&graph, processed.lock().unwrap().clone()); // lib should be processed before utils, utils before main assert_eq!(order, vec![lib, utils, main]); @@ -1202,7 +1190,7 @@ u1 + u2 let script = test_path("script.k"); graph.update_file_with_entries( - &main, + graph.intern(&main), vec![ ImportEntry { kind: ImportKind::Code, @@ -1223,14 +1211,14 @@ u1 + u2 let processed_clone = Arc::clone(&processed); graph.process_with_dependencies( - &main, + graph.intern(&main), |entry| entry.kind == ImportKind::Code, - move |path| { - processed_clone.lock().unwrap().push(path.clone()); + move |file| { + processed_clone.lock().unwrap().push(file); }, ); - let order: Vec = processed.lock().unwrap().clone(); + let order = graph_paths(&graph, processed.lock().unwrap().clone()); assert_eq!(order, vec![data, main]); } @@ -1247,27 +1235,27 @@ u1 + u2 // lib has no imports let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(&lib, &lib_doc, simple_resolver); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); // utils imports lib let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); - graph.update_file(&utils, &utils_doc, simple_resolver); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); // main imports utils let main_code = r#"local utils = import "utils.jsonnet"; utils"#; let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(&main, &main_doc, simple_resolver); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); let processed = Arc::new(Mutex::new(Vec::new())); let processed_clone = Arc::clone(&processed); // Process lib and its importers (cascade) - graph.process_importers_with_work_queue(&lib, move |path| { - processed_clone.lock().unwrap().push(path.clone()); + graph.process_importers_with_work_queue(graph.intern(&lib), move |file| { + processed_clone.lock().unwrap().push(file); }); - let order: Vec = processed.lock().unwrap().clone(); + let order = graph_paths(&graph, processed.lock().unwrap().clone()); // lib first, then utils (imports lib), then main (imports utils) assert_eq!(order, vec![lib, utils, main]); @@ -1282,14 +1270,16 @@ u1 + u2 let processed = Arc::new(Mutex::new(Vec::new())); let processed_clone = Arc::clone(&processed); - graph.process_with_dependencies( - &missing, - |_| true, - move |path| { - processed_clone.lock().unwrap().push(path.clone()); - }, - ); + if let Some(root) = graph.file(&missing) { + graph.process_with_dependencies( + root, + |_| true, + move |file| { + processed_clone.lock().unwrap().push(file); + }, + ); + } - assert_eq!(*processed.lock().unwrap(), Vec::::new()); + assert_eq!(*processed.lock().unwrap(), Vec::::new()); } } diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index 4acd438e..38db132a 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -7,8 +7,8 @@ use std::{num::NonZeroUsize, sync::Arc}; use dashmap::DashMap; use jrsonnet_lsp_document::{ - CanonicalPath, DocVersion, Document, FileId, PathStore, DEFAULT_ANALYSIS_CACHE_CAPACITY, - DEFAULT_CLOSED_CACHE_CAPACITY, + CanonicalPath, DocVersion, Document, FileId, PathResolver, PathStore, + DEFAULT_ANALYSIS_CACHE_CAPACITY, DEFAULT_CLOSED_CACHE_CAPACITY, }; use jrsonnet_lsp_types::GlobalTyStore; use lru::LruCache; @@ -43,6 +43,8 @@ struct CachedSemanticArtifacts { pub struct DocumentManager { /// Interned mapping between canonical paths and stable file ids. paths: PathStore, + /// Read-only resolver over interned mapping. + resolver: PathResolver, /// Open documents keyed by interned file ids. open: DashMap, /// LRU cache for recently closed documents. @@ -74,8 +76,10 @@ impl DocumentManager { paths: PathStore, ) -> Self { let closed_capacity = NonZeroUsize::new(closed_capacity).unwrap_or(NonZeroUsize::MIN); + let resolver = paths.resolver(); Self { paths, + resolver, open: DashMap::new(), closed: RwLock::new(LruCache::new(closed_capacity)), analysis_cache: MokaCache::new(DEFAULT_ANALYSIS_CACHE_CAPACITY as u64), @@ -94,11 +98,28 @@ impl DocumentManager { } fn file_id(&self, path: &CanonicalPath) -> Option { - self.paths.get_id(path) + self.resolver.file(path) + } + + fn file_id_or_intern(&self, path: &CanonicalPath) -> FileId { + self.file_id(path).unwrap_or_else(|| self.intern_path(path)) + } + + /// Resolve a file path to an interned file identifier. + #[must_use] + pub fn file(&self, path: &CanonicalPath) -> Option { + self.file_id(path) } fn path_for_id(&self, file_id: FileId) -> Option { - self.paths.get_path(file_id) + self.resolver + .path(file_id) + .map(|path| path.as_ref().clone()) + } + + /// Resolve an interned file identifier to a path. + pub fn path(&self, file: FileId) -> Option> { + self.resolver.path(file) } fn invalidate_analysis_by_id(&self, file_id: FileId) { @@ -109,9 +130,19 @@ impl DocumentManager { self.semantic_cache.invalidate(&file_id); } + /// Invalidate analysis cache by file identifier. + pub fn invalidate_analysis_file(&self, file: FileId) { + self.invalidate_analysis_by_id(file); + } + + /// Invalidate semantic artifacts cache by file identifier. + pub fn invalidate_semantic_artifacts_file(&self, file: FileId) { + self.invalidate_semantic_artifacts_by_id(file); + } + /// Open a document (called on textDocument/didOpen). pub fn open(&self, path: CanonicalPath, text: String, version: DocVersion) { - let file_id = self.intern_path(&path); + let file_id = self.file_id_or_intern(&path); // Move from closed cache if present { @@ -193,7 +224,7 @@ impl DocumentManager { return false; }; - let file_id = self.intern_path(path); + let file_id = self.file_id_or_intern(path); self.closed .write() .put(file_id, Document::new(text, DocVersion::new(0))); @@ -264,7 +295,7 @@ impl DocumentManager { // Read from disk once, then cache in `closed` for reuse. let text = std::fs::read_to_string(path.as_path()).ok()?; let document = Document::new(text, DocVersion::new(0)); - let file_id = self.intern_path(path); + let file_id = self.file_id_or_intern(path); { let mut closed = self.closed.write(); closed.put(file_id, document.clone()); @@ -278,11 +309,21 @@ impl DocumentManager { .is_some_and(|file_id| self.open.contains_key(&file_id)) } + /// Check if a file is currently open. + pub fn is_open_file(&self, file: FileId) -> bool { + self.open.contains_key(&file) + } + /// Get the number of open documents. pub fn open_count(&self) -> usize { self.open.len() } + /// Get all open files. + pub fn open_files(&self) -> Vec { + self.open.iter().map(|entry| *entry.key()).collect() + } + /// Iterate over all open documents. pub fn for_each_open(&self, mut f: F) where @@ -297,9 +338,9 @@ impl DocumentManager { /// Get all open document paths. pub fn open_paths(&self) -> Vec { - self.open - .iter() - .filter_map(|entry| self.path_for_id(*entry.key())) + self.open_files() + .into_iter() + .filter_map(|file| self.path_for_id(file)) .collect() } @@ -345,7 +386,7 @@ impl DocumentManager { where F: FnOnce() -> TypeAnalysis, { - let file_id = self.intern_path(path); + let file_id = self.file_id_or_intern(path); if let Some(cached) = self.analysis_cache.get(&file_id) { if cached.version == version { @@ -374,7 +415,7 @@ impl DocumentManager { version: DocVersion, analysis: Arc, ) { - let file_id = self.intern_path(&path); + let file_id = self.file_id_or_intern(&path); self.analysis_cache .insert(file_id, CachedAnalysis { version, analysis }); } @@ -402,7 +443,7 @@ impl DocumentManager { where F: FnOnce() -> SemanticArtifacts, { - let file_id = self.intern_path(path); + let file_id = self.file_id_or_intern(path); if let Some(cached) = self.semantic_cache.get(&file_id) { if cached.version == version { @@ -428,7 +469,7 @@ impl DocumentManager { version: DocVersion, artifacts: Arc, ) { - let file_id = self.intern_path(&path); + let file_id = self.file_id_or_intern(&path); self.semantic_cache .insert(file_id, CachedSemanticArtifacts { version, artifacts }); } diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index 815b79b3..b0102398 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -5,7 +5,7 @@ use std::sync::Arc; -use jrsonnet_lsp_document::{CanonicalPath, Document}; +use jrsonnet_lsp_document::{CanonicalPath, Document, FileId}; use jrsonnet_lsp_import::{ImportGraph, ImportKind}; use jrsonnet_lsp_types::GlobalTyStore; use parking_lot::RwLock; @@ -78,7 +78,9 @@ impl TypeProvider { self.ensure_dependencies_analyzed(path, doc_source); let resolved_imports = { let graph = self.import_graph.read(); - resolved_imports_for(&graph, path) + graph.file(path).map_or_else(FxHashMap::default, |file| { + resolved_imports_for(&graph, file) + }) }; // Analyze with import resolution @@ -99,19 +101,26 @@ impl TypeProvider { doc_source: &D, ) { let graph = self.import_graph.read(); + let Some(root) = graph.file(path) else { + return; + }; graph.process_with_dependencies( - path, + root, |entry| entry.kind == ImportKind::Code, - |dep_path| { - if let Some(doc) = doc_source.get_document(dep_path) { - let resolved_imports = resolved_imports_for(&graph, dep_path); - analyze_and_cache_with_resolved_imports( - dep_path, - &doc, - &self.type_cache, - resolved_imports, - ); - } + |dep_file| { + let Some(dep_path) = graph.path(dep_file) else { + return; + }; + let Some(doc) = doc_source.get_document(dep_path.as_ref()) else { + return; + }; + let resolved_imports = resolved_imports_for(&graph, dep_file); + analyze_and_cache_with_resolved_imports( + dep_path.as_ref(), + &doc, + &self.type_cache, + resolved_imports, + ); }, ); } @@ -129,19 +138,15 @@ impl TypeProvider { } } -fn resolved_imports_for( - graph: &ImportGraph, - path: &CanonicalPath, -) -> FxHashMap { +fn resolved_imports_for(graph: &ImportGraph, file: FileId) -> FxHashMap { graph - .imports(path) + .imports(file) .iter() .filter(|entry| entry.kind == ImportKind::Code) .filter_map(|entry| { - entry - .resolved_path - .clone() - .map(|resolved_path| (entry.import_path.clone(), resolved_path)) + let resolved = entry.resolved_path.as_ref()?; + let file = graph.file(resolved)?; + Some((entry.import_path.clone(), file)) }) .collect() } @@ -149,7 +154,7 @@ fn resolved_imports_for( #[cfg(test)] mod tests { use dashmap::DashMap; - use jrsonnet_lsp_document::{CanonicalPath, DocVersion, FileId, PathStore}; + use jrsonnet_lsp_document::{CanonicalPath, DocVersion, FileId, PathResolver, PathStore}; use jrsonnet_lsp_types::Ty; use super::*; @@ -158,13 +163,16 @@ mod tests { /// Test document source backed by a `DashMap`. struct TestDocSource { paths: PathStore, + resolver: PathResolver, docs: DashMap, } impl TestDocSource { fn new(paths: PathStore) -> Self { + let resolver = paths.resolver(); Self { paths, + resolver, docs: DashMap::new(), } } @@ -177,7 +185,7 @@ mod tests { impl DocumentSource for TestDocSource { fn get_document(&self, path: &CanonicalPath) -> Option { - let file_id = self.paths.get_id(path)?; + let file_id = self.resolver.file(path)?; self.docs.get(&file_id).map(|r| r.clone()) } } @@ -214,7 +222,7 @@ mod tests { let path_store = PathStore::new(); let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); - let doc_source = TestDocSource::new(path_store); + let doc_source = TestDocSource::new(path_store.clone()); // Add imported file let dep_path = test_path("dep.jsonnet"); @@ -229,8 +237,9 @@ mod tests { // Update import graph to show main imports dep { let mut graph = import_graph.write(); + let main_file = graph.intern(&main_path); graph.update_file_with_entries( - &main_path, + main_file, vec![jrsonnet_lsp_import::ImportEntry { kind: jrsonnet_lsp_import::ImportKind::Code, import_path: "dep.jsonnet".to_string(), @@ -247,14 +256,18 @@ mod tests { ); // Before analyzing main, dep should not be in the cache - assert!(type_cache.read().get(&dep_path).is_none()); + let dep_file = path_store + .resolver() + .file(&dep_path) + .expect("dependency should be interned in path store"); + assert!(type_cache.read().get(dep_file).is_none()); // Analyze main - this should trigger dependency analysis let _analysis = provider.analyze(&main_path, &main_doc, &doc_source); // After analyzing main, dep should be in the cache // (because ensure_dependencies_analyzed processes it first) - assert!(type_cache.read().get(&dep_path).is_some()); + assert!(type_cache.read().get(dep_file).is_some()); } #[test] @@ -275,8 +288,9 @@ mod tests { { let mut graph = import_graph.write(); + let main_file = graph.intern(&main_path); graph.update_file_with_entries( - &main_path, + main_file, vec![jrsonnet_lsp_import::ImportEntry { kind: jrsonnet_lsp_import::ImportKind::Code, import_path: "vendor/dep.jsonnet".to_string(), @@ -312,8 +326,9 @@ mod tests { { let mut graph = import_graph.write(); + let main_file = graph.intern(&main_path); graph.update_file_with_entries( - &main_path, + main_file, vec![jrsonnet_lsp_import::ImportEntry { kind: jrsonnet_lsp_import::ImportKind::String, import_path: "./script.k".to_string(), @@ -349,8 +364,9 @@ mod tests { { let mut graph = import_graph.write(); + let main_file = graph.intern(&main_path); graph.update_file_with_entries( - &main_path, + main_file, vec![jrsonnet_lsp_import::ImportEntry { kind: jrsonnet_lsp_import::ImportKind::Binary, import_path: "./script.k".to_string(), diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs index 5a97f76d..b92cc53b 100644 --- a/crates/jrsonnet-lsp-inference/src/type_cache.rs +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -11,7 +11,7 @@ use std::{num::NonZeroUsize, sync::Arc}; use jrsonnet_lsp_document::{ - CanonicalPath, Document, FileId, PathStore, DEFAULT_TYPE_CACHE_CAPACITY, + CanonicalPath, Document, FileId, PathResolver, PathStore, DEFAULT_TYPE_CACHE_CAPACITY, }; use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; use lru::LruCache; @@ -33,6 +33,8 @@ pub struct TypeCache { cache: LruCache, /// Interned mapping between canonical paths and stable file ids. paths: PathStore, + /// Read-only resolver over interned mapping. + resolver: PathResolver, /// Global type store for shared types. global_types: Arc, } @@ -59,9 +61,11 @@ impl TypeCache { paths: PathStore, ) -> Self { let capacity = NonZeroUsize::new(capacity).unwrap_or(NonZeroUsize::MIN); + let resolver = paths.resolver(); Self { cache: LruCache::new(capacity), paths, + resolver, global_types, } } @@ -72,53 +76,63 @@ impl TypeCache { &self.global_types } + /// Get or create the interned file for `path`. + #[must_use] + pub fn intern(&self, path: &CanonicalPath) -> FileId { + self.paths.intern(path) + } + + /// Resolve a file path to an interned file identifier. + #[must_use] + pub fn file(&self, path: &CanonicalPath) -> Option { + self.resolver.file(path) + } + + /// Resolve an interned file identifier to a path. + #[must_use] + pub fn path(&self, file: FileId) -> Option> { + self.resolver.path(file) + } + /// Get the cached type for a file, if available. /// /// Uses `peek` to avoid updating LRU order for read-only lookups. #[must_use] - pub fn get(&self, path: &CanonicalPath) -> Option { - let file_id = self.paths.get_id(path)?; - self.cache.peek(&file_id).map(|c| c.ty) + pub fn get(&self, file: FileId) -> Option { + self.cache.peek(&file).map(|cached| cached.ty) } /// Get the cached type for a file and update LRU order. /// /// Use this when the lookup indicates actual usage of the cached type. - pub fn get_and_touch(&mut self, path: &CanonicalPath) -> Option { - let file_id = self.paths.get_id(path)?; - self.cache.get(&file_id).map(|c| c.ty) + pub fn get_and_touch(&mut self, file: FileId) -> Option { + self.cache.get(&file).map(|cached| cached.ty) } /// Update the cache for a file. /// /// The type must be global, preventing accidental cross-analysis leakage. - pub fn update(&mut self, path: &CanonicalPath, ty: GlobalTy, version: i32) { - let file_id = self.paths.intern(path); - self.cache.put(file_id, CachedType { ty, version }); + pub fn update(&mut self, file: FileId, ty: GlobalTy, version: i32) { + self.cache.put(file, CachedType { ty, version }); } /// Invalidate the cache for a file. - pub fn invalidate(&mut self, path: &CanonicalPath) { - if let Some(file_id) = self.paths.get_id(path) { - self.cache.pop(&file_id); - } + pub fn invalidate(&mut self, file: FileId) { + self.cache.pop(&file); } /// Invalidate the cache for multiple files. - pub fn invalidate_many(&mut self, paths: impl IntoIterator) { - for path in paths { - if let Some(file_id) = self.paths.get_id(&path) { - self.cache.pop(&file_id); - } + pub fn invalidate_many(&mut self, files: impl IntoIterator) { + for file in files { + self.invalidate(file); } } /// Check if a file's cache is up to date with the given version. #[must_use] - pub fn is_up_to_date(&self, path: &CanonicalPath, version: i32) -> bool { - self.paths - .get_id(path) - .and_then(|file_id| self.cache.peek(&file_id)) + pub fn is_up_to_date(&self, file: FileId, version: i32) -> bool { + self.cache + .peek(&file) .is_some_and(|cached| cached.version == version) } @@ -166,15 +180,20 @@ pub(crate) fn analyze_and_cache_with_resolved_imports( resolved_imports: I, ) -> GlobalTy where - I: IntoIterator, + I: IntoIterator, { let version = doc.version().0; + let file = { + let read_cache = cache.read(); + read_cache.file(path) + }; + let file = file.unwrap_or_else(|| cache.write().intern(path)); // Check if we already have a cached type for this version { let read_cache = cache.read(); - if read_cache.is_up_to_date(path, version) { - if let Some(ty) = read_cache.get(path) { + if read_cache.is_up_to_date(file, version) { + if let Some(ty) = read_cache.get(file) { return ty; } } @@ -199,7 +218,7 @@ where // Cache the type (it's already in the global store) { let mut write_cache = cache.write(); - write_cache.update(path, ty, version); + write_cache.update(file, ty, version); } ty @@ -210,8 +229,8 @@ where /// Uses import paths that were already resolved by the import graph. #[derive(Debug)] pub struct CachingImportResolver { - /// Import path -> resolved canonical path, built from import graph entries. - resolved_imports: FxHashMap, + /// Import path -> resolved file. + resolved_imports: FxHashMap, /// Type cache for looking up cached file types. cache: SharedTypeCache, } @@ -224,7 +243,7 @@ impl CachingImportResolver { /// * `cache` - Shared type cache for looking up cached types pub fn new(resolved_imports: I, cache: SharedTypeCache) -> Self where - I: IntoIterator, + I: IntoIterator, { Self { resolved_imports: resolved_imports.into_iter().collect(), @@ -232,16 +251,16 @@ impl CachingImportResolver { } } - fn resolved_path(&self, import_path: &str) -> Option<&CanonicalPath> { - self.resolved_imports.get(import_path) + fn resolved_file(&self, import_path: &str) -> Option { + self.resolved_imports.get(import_path).copied() } } impl crate::env::ImportResolver for CachingImportResolver { fn resolve_import(&self, import_path: &str) -> Option { - let canonical_path = self.resolved_path(import_path)?; + let file = self.resolved_file(import_path)?; let cache = self.cache.read(); - cache.get(canonical_path) + cache.get(file) } } @@ -265,6 +284,30 @@ mod tests { PathStore::new() } + fn cache_get(cache: &TypeCache, file: FileId) -> Option { + cache.get(file) + } + + fn cache_touch(cache: &mut TypeCache, file: FileId) -> Option { + cache.get_and_touch(file) + } + + fn cache_update(cache: &mut TypeCache, file: FileId, ty: GlobalTy, version: i32) { + cache.update(file, ty, version); + } + + fn cache_invalidate(cache: &mut TypeCache, file: FileId) { + cache.invalidate(file); + } + + fn cache_invalidate_many(cache: &mut TypeCache, files: impl IntoIterator) { + cache.invalidate_many(files); + } + + fn cache_is_up_to_date(cache: &TypeCache, file: FileId, version: i32) -> bool { + cache.is_up_to_date(file, version) + } + /// Assert that the cache contains exactly the specified global type entries. fn assert_cache_contents_ty(cache: &TypeCache, expected: &[(&str, GlobalTy)]) { let actual: BTreeSet<_> = cache @@ -272,8 +315,7 @@ mod tests { .iter() .filter_map(|(k, v)| { cache - .paths - .get_path(*k) + .path(*k) .map(|path| (path.as_path().to_string_lossy().to_string(), v.ty)) }) .collect(); @@ -288,11 +330,13 @@ mod tests { fn test_equivalent_path_lookup() { let mut cache = TypeCache::new(test_global_store(), test_path_store()); let path = test_path("main.jsonnet"); - cache.update(&path, GlobalTy::NUMBER, 1); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::NUMBER, 1); let lookup = test_path("main.jsonnet"); - assert_eq!(cache.get(&lookup), Some(GlobalTy::NUMBER)); - assert!(cache.is_up_to_date(&lookup, 1)); + let lookup_file = cache.file(&lookup).expect("lookup path should be interned"); + assert_eq!(cache_get(&cache, lookup_file), Some(GlobalTy::NUMBER)); + assert!(cache_is_up_to_date(&cache, lookup_file, 1)); } #[test] @@ -304,18 +348,19 @@ mod tests { assert_cache_contents_ty(&cache, &[]); // Add an entry - cache.update(&path, GlobalTy::NUMBER, 1); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::NUMBER, 1); assert_cache_contents_ty(&cache, &[("main.jsonnet", GlobalTy::NUMBER)]); - assert!(cache.is_up_to_date(&path, 1)); - assert!(!cache.is_up_to_date(&path, 2)); + assert!(cache_is_up_to_date(&cache, file, 1)); + assert!(!cache_is_up_to_date(&cache, file, 2)); // Update the entry - cache.update(&path, GlobalTy::STRING, 2); + cache_update(&mut cache, file, GlobalTy::STRING, 2); assert_cache_contents_ty(&cache, &[("main.jsonnet", GlobalTy::STRING)]); - assert!(cache.is_up_to_date(&path, 2)); + assert!(cache_is_up_to_date(&cache, file, 2)); // Invalidate - cache.invalidate(&path); + cache_invalidate(&mut cache, file); assert_cache_contents_ty(&cache, &[]); } @@ -330,7 +375,13 @@ mod tests { let ty1 = analyze_and_cache(&path, &doc, &cache); assert_eq!(ty1, GlobalTy::NUMBER); // Verify cached value - assert_eq!(cache.read().get(&path), Some(GlobalTy::NUMBER)); + assert_eq!( + { + let read_cache = cache.read(); + read_cache.file(&path).and_then(|file| read_cache.get(file)) + }, + Some(GlobalTy::NUMBER) + ); // Second call should return cached value let ty2 = analyze_and_cache(&path, &doc, &cache); @@ -340,7 +391,13 @@ mod tests { let doc2 = Document::new("\"hello\"".to_string(), DocVersion::new(2)); let ty3 = analyze_and_cache(&path, &doc2, &cache); assert_eq!(ty3, GlobalTy::STRING); - assert_eq!(cache.read().get(&path), Some(GlobalTy::STRING)); + assert_eq!( + { + let read_cache = cache.read(); + read_cache.file(&path).and_then(|file| read_cache.get(file)) + }, + Some(GlobalTy::STRING) + ); } #[test] @@ -349,9 +406,11 @@ mod tests { let path1 = test_path("file1.jsonnet"); let path2 = test_path("file2.jsonnet"); + let file1 = cache.intern(&path1); + let file2 = cache.intern(&path2); - cache.update(&path1, GlobalTy::NUMBER, 1); - cache.update(&path2, GlobalTy::STRING, 1); + cache_update(&mut cache, file1, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2, GlobalTy::STRING, 1); assert_cache_contents_ty( &cache, @@ -361,7 +420,7 @@ mod tests { ], ); - cache.invalidate(&path1); + cache_invalidate(&mut cache, file1); assert_cache_contents_ty(&cache, &[("file2.jsonnet", GlobalTy::STRING)]); } @@ -373,12 +432,16 @@ mod tests { let path2 = test_path("utils.jsonnet"); let path3 = test_path("main.jsonnet"); let path4 = test_path("other.jsonnet"); + let file1 = cache.intern(&path1); + let file2 = cache.intern(&path2); + let file3 = cache.intern(&path3); + let file4 = cache.intern(&path4); // Cache all files - cache.update(&path1, GlobalTy::NUMBER, 1); - cache.update(&path2, GlobalTy::STRING, 1); - cache.update(&path3, GlobalTy::BOOL, 1); - cache.update(&path4, GlobalTy::NULL, 1); + cache_update(&mut cache, file1, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2, GlobalTy::STRING, 1); + cache_update(&mut cache, file3, GlobalTy::BOOL, 1); + cache_update(&mut cache, file4, GlobalTy::NULL, 1); assert_cache_contents_ty( &cache, @@ -391,7 +454,7 @@ mod tests { ); // Invalidate multiple files (simulating cascading invalidation) - cache.invalidate_many(vec![path1.clone(), path2.clone(), path3.clone()]); + cache_invalidate_many(&mut cache, vec![file1, file2, file3]); // Only path4 should remain assert_cache_contents_ty(&cache, &[("other.jsonnet", GlobalTy::NULL)]); @@ -401,16 +464,17 @@ mod tests { fn test_basic_get_update() { let mut cache = TypeCache::new(test_global_store(), test_path_store()); let path = test_path("test.jsonnet"); + let file = cache.intern(&path); // Update with a global type - cache.update(&path, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file, GlobalTy::NUMBER, 1); // Get should return the same type - assert_eq!(cache.get(&path), Some(GlobalTy::NUMBER)); + assert_eq!(cache_get(&cache, file), Some(GlobalTy::NUMBER)); // Version check - assert!(cache.is_up_to_date(&path, 1)); - assert!(!cache.is_up_to_date(&path, 2)); + assert!(cache_is_up_to_date(&cache, file, 1)); + assert!(!cache_is_up_to_date(&cache, file, 2)); } #[test] @@ -418,12 +482,13 @@ mod tests { let global_types = test_global_store(); let mut cache = TypeCache::new(Arc::clone(&global_types), test_path_store()); let path = test_path("test.jsonnet"); + let file = cache.intern(&path); // Cache a type - types are stored in the shared global store - cache.update(&path, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file, GlobalTy::NUMBER, 1); // Verify we can retrieve the type - let retrieved = cache.get(&path).unwrap(); + let retrieved = cache_get(&cache, file).unwrap(); assert_eq!(retrieved, GlobalTy::NUMBER); // The cache's global_types should be the same reference @@ -435,9 +500,11 @@ mod tests { let mut cache = TypeCache::new(test_global_store(), test_path_store()); let path1 = test_path("a.jsonnet"); let path2 = test_path("b.jsonnet"); + let file1 = cache.intern(&path1); + let file2 = cache.intern(&path2); - cache.update(&path1, GlobalTy::NUMBER, 1); - cache.update(&path2, GlobalTy::STRING, 1); + cache_update(&mut cache, file1, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2, GlobalTy::STRING, 1); assert_eq!(cache.len(), 2); assert!(!cache.is_empty()); @@ -446,7 +513,7 @@ mod tests { assert_eq!(cache.len(), 0); assert!(cache.is_empty()); - assert_eq!(cache.get(&path1), None); + assert_eq!(cache_get(&cache, file1), None); } #[test] @@ -458,28 +525,32 @@ mod tests { let path2 = test_path("file2.jsonnet"); let path3 = test_path("file3.jsonnet"); let path4 = test_path("file4.jsonnet"); + let file1 = cache.intern(&path1); + let file2 = cache.intern(&path2); + let file3 = cache.intern(&path3); + let file4 = cache.intern(&path4); // Fill the cache - cache.update(&path1, GlobalTy::NUMBER, 1); - cache.update(&path2, GlobalTy::STRING, 1); - cache.update(&path3, GlobalTy::BOOL, 1); + cache_update(&mut cache, file1, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2, GlobalTy::STRING, 1); + cache_update(&mut cache, file3, GlobalTy::BOOL, 1); assert_eq!(cache.len(), 3); - assert_eq!(cache.get(&path1), Some(GlobalTy::NUMBER)); - assert_eq!(cache.get(&path2), Some(GlobalTy::STRING)); - assert_eq!(cache.get(&path3), Some(GlobalTy::BOOL)); + assert_eq!(cache_get(&cache, file1), Some(GlobalTy::NUMBER)); + assert_eq!(cache_get(&cache, file2), Some(GlobalTy::STRING)); + assert_eq!(cache_get(&cache, file3), Some(GlobalTy::BOOL)); // Access path1 to make it recently used (path2 is now least recently used) - let _ = cache.get_and_touch(&path1); + let _ = cache_touch(&mut cache, file1); // Add a fourth entry - should evict path2 (LRU) - cache.update(&path4, GlobalTy::NULL, 1); + cache_update(&mut cache, file4, GlobalTy::NULL, 1); assert_eq!(cache.len(), 3); - assert_eq!(cache.get(&path1), Some(GlobalTy::NUMBER)); // Still present (was touched) - assert_eq!(cache.get(&path2), None); // Evicted (was LRU) - assert_eq!(cache.get(&path3), Some(GlobalTy::BOOL)); // Still present - assert_eq!(cache.get(&path4), Some(GlobalTy::NULL)); // Newly added + assert_eq!(cache_get(&cache, file1), Some(GlobalTy::NUMBER)); // Still present (was touched) + assert_eq!(cache_get(&cache, file2), None); // Evicted (was LRU) + assert_eq!(cache_get(&cache, file3), Some(GlobalTy::BOOL)); // Still present + assert_eq!(cache_get(&cache, file4), Some(GlobalTy::NULL)); // Newly added } #[test] @@ -487,7 +558,8 @@ mod tests { // Verify with_capacity(0) doesn't panic and has minimum capacity let mut cache = TypeCache::with_capacity(test_global_store(), 0, test_path_store()); let path = test_path("test.jsonnet"); - cache.update(&path, GlobalTy::NUMBER, 1); - assert_eq!(cache.get(&path), Some(GlobalTy::NUMBER)); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::NUMBER, 1); + assert_eq!(cache_get(&cache, file), Some(GlobalTy::NUMBER)); } } diff --git a/crates/jrsonnet-lsp/benches/type_cache.rs b/crates/jrsonnet-lsp/benches/type_cache.rs index 0229ee65..7b5b84b2 100644 --- a/crates/jrsonnet-lsp/benches/type_cache.rs +++ b/crates/jrsonnet-lsp/benches/type_cache.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; -use jrsonnet_lsp_document::{CanonicalPath, PathStore}; +use jrsonnet_lsp_document::{CanonicalPath, FileId, PathStore}; use jrsonnet_lsp_inference::{new_shared_cache, TypeCache}; use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; @@ -13,6 +13,14 @@ fn make_path(i: usize) -> CanonicalPath { CanonicalPath::new(std::path::PathBuf::from(format!("/test/file{i}.jsonnet"))) } +fn make_paths(size: usize) -> Vec { + (0..size).map(make_path).collect() +} + +fn intern_files(cache: &TypeCache, paths: &[CanonicalPath]) -> Vec { + paths.iter().map(|path| cache.intern(path)).collect() +} + fn bench_cache_operations(c: &mut Criterion) { let mut group = c.benchmark_group("type_cache"); @@ -20,12 +28,13 @@ fn bench_cache_operations(c: &mut Criterion) { for size in [100, 500, 1000] { let global = Arc::new(GlobalTyStore::new()); let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let paths = make_paths(size); + let files = intern_files(&cache, &paths); group.bench_with_input(BenchmarkId::new("update", size), &size, |b, &size| { b.iter(|| { - for i in 0..size { - let path = make_path(i); - cache.update(&path, GlobalTy::NUMBER, 1); + for file in files.iter().take(size) { + cache.update(*file, GlobalTy::NUMBER, 1); } }); }); @@ -35,18 +44,18 @@ fn bench_cache_operations(c: &mut Criterion) { for size in [100, 500, 1000] { let global = Arc::new(GlobalTyStore::new()); let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let paths = make_paths(size); + let files = intern_files(&cache, &paths); // Pre-populate cache - for i in 0..size { - let path = make_path(i); - cache.update(&path, GlobalTy::NUMBER, 1); + for file in &files { + cache.update(*file, GlobalTy::NUMBER, 1); } group.bench_with_input(BenchmarkId::new("lookup_hit", size), &size, |b, &size| { b.iter(|| { - for i in 0..size { - let path = make_path(i); - black_box(cache.get(&path)); + for file in files.iter().take(size) { + black_box(cache.get(*file)); } }); }); @@ -56,12 +65,13 @@ fn bench_cache_operations(c: &mut Criterion) { for size in [100, 500, 1000] { let global = Arc::new(GlobalTyStore::new()); let cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let paths = make_paths(size); + let files = intern_files(&cache, &paths); group.bench_with_input(BenchmarkId::new("lookup_miss", size), &size, |b, &size| { b.iter(|| { - for i in 0..size { - let path = make_path(i); - black_box(cache.get(&path)); + for file in files.iter().take(size) { + black_box(cache.get(*file)); } }); }); @@ -76,17 +86,17 @@ fn bench_cache_operations(c: &mut Criterion) { || { // Setup: create and populate cache let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); - for i in 0..size { - let path = make_path(i); - cache.update(&path, GlobalTy::NUMBER, 1); + let paths = make_paths(size); + let files = intern_files(&cache, &paths); + for file in &files { + cache.update(*file, GlobalTy::NUMBER, 1); } - cache + (cache, files) }, - |mut cache| { + |(mut cache, files)| { // Benchmark: invalidate all entries - for i in 0..size { - let path = make_path(i); - cache.invalidate(&path); + for file in files.iter().take(size) { + cache.invalidate(*file); } }, criterion::BatchSize::SmallInput, @@ -106,15 +116,16 @@ fn bench_cache_operations(c: &mut Criterion) { || { // Setup: create and populate cache let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); - let paths: Vec<_> = (0..size).map(make_path).collect(); - for path in &paths { - cache.update(path, GlobalTy::NUMBER, 1); + let paths = make_paths(size); + let files = intern_files(&cache, &paths); + for file in &files { + cache.update(*file, GlobalTy::NUMBER, 1); } - (cache, paths) + (cache, files) }, - |(mut cache, paths)| { + |(mut cache, files)| { // Benchmark: invalidate all at once - cache.invalidate_many(paths); + cache.invalidate_many(files); }, criterion::BatchSize::SmallInput, ); @@ -134,30 +145,30 @@ fn bench_shared_cache(c: &mut Criterion) { let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); // Pre-populate - { + let files = { let mut write_cache = cache.write(); - for i in 0..size { - let path = make_path(i); - write_cache.update(&path, GlobalTy::NUMBER, 1); + let paths = make_paths(size); + let files = intern_files(&write_cache, &paths); + for file in &files { + write_cache.update(*file, GlobalTy::NUMBER, 1); } - } + files + }; group.bench_with_input(BenchmarkId::new("read_lock", size), &size, |b, &size| { b.iter(|| { - for i in 0..size { - let path = make_path(i); + for file in files.iter().take(size) { let read_cache = cache.read(); - black_box(read_cache.get(&path)); + black_box(read_cache.get(*file)); } }); }); group.bench_with_input(BenchmarkId::new("write_lock", size), &size, |b, &size| { b.iter(|| { - for i in 0..size { - let path = make_path(i); + for file in files.iter().take(size) { let mut write_cache = cache.write(); - write_cache.update(&path, GlobalTy::STRING, 2); + write_cache.update(*file, GlobalTy::STRING, 2); } }); }); @@ -171,27 +182,26 @@ fn bench_version_check(c: &mut Criterion) { let global = Arc::new(GlobalTyStore::new()); let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); + let paths = make_paths(1000); + let files = intern_files(&cache, &paths); // Pre-populate with version 1 - for i in 0..1000 { - let path = make_path(i); - cache.update(&path, GlobalTy::NUMBER, 1); + for file in &files { + cache.update(*file, GlobalTy::NUMBER, 1); } group.bench_function("is_up_to_date_hit", |b| { b.iter(|| { - for i in 0..1000 { - let path = make_path(i); - black_box(cache.is_up_to_date(&path, 1)); + for file in &files { + black_box(cache.is_up_to_date(*file, 1)); } }); }); group.bench_function("is_up_to_date_miss", |b| { b.iter(|| { - for i in 0..1000 { - let path = make_path(i); - black_box(cache.is_up_to_date(&path, 2)); // Different version + for file in &files { + black_box(cache.is_up_to_date(*file, 2)); // Different version } }); }); diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index a0a12d8f..cbd89466 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -9,6 +9,7 @@ mod notifications; mod watched_files; use std::{ + collections::BTreeSet, panic::{catch_unwind, AssertUnwindSafe}, path::{Path, PathBuf}, sync::Arc, @@ -16,7 +17,7 @@ use std::{ use anyhow::{Context, Result}; use crossbeam_channel::{select, Receiver, Sender}; -use jrsonnet_lsp_document::{CanonicalPath, DocVersion, PathStore}; +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, FileId, PathStore}; use jrsonnet_lsp_handlers as handlers; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{ @@ -120,9 +121,10 @@ const SUPPORTED_EXECUTE_COMMANDS: [&str; 5] = [ const WATCHED_FILE_GLOB_PATTERNS: [&str; 3] = ["**/*.jsonnet", "**/*.libsonnet", "**/*.json"]; -pub(super) fn normalize_paths(paths: &mut Vec) { - paths.sort_by(|a, b| a.as_path().cmp(b.as_path())); - paths.dedup(); +pub(super) fn unique_files(files: impl IntoIterator) -> Vec { + let mut set = BTreeSet::new(); + set.extend(files); + set.into_iter().collect() } impl Server { @@ -181,20 +183,17 @@ impl Server { /// /// When a file changes, any cached types for files that depend on it may be stale, /// so we invalidate the entire dependency chain. - fn invalidate_type_cache_with_dependents(&self, path: &CanonicalPath) { - // Get all files that transitively import this file - let dependents = self.import_graph.read().transitive_importers(path); - - // Invalidate the changed file and all its dependents + fn invalidate_type_cache_with_dependents(&self, file: FileId) { + let dependents = self.import_graph.read().transitive_importers(file); let mut cache = self.type_cache.write(); - cache.invalidate(path); - cache.invalidate_many(dependents.iter().cloned()); + cache.invalidate(file); + cache.invalidate_many(dependents.iter().copied()); drop(cache); // Keep analysis cache consistent with type cache invalidation. - self.documents.invalidate_analysis(path); + self.documents.invalidate_analysis_file(file); for dependent in dependents { - self.documents.invalidate_analysis(&dependent); + self.documents.invalidate_analysis_file(dependent); } } @@ -336,16 +335,25 @@ impl Server { let documents = Arc::clone(&self.documents); let import_graph = Arc::clone(&self.import_graph); let config = Arc::clone(&self.config); + let path_store = self.path_store.clone(); rayon::spawn(move || { let mut files = Vec::new(); for root in &roots { files.extend(Self::collect_workspace_files(root)); } - normalize_paths(&mut files); + files.sort_by(|a, b| a.as_path().cmp(b.as_path())); + files.dedup(); let file_count = files.len(); for path in &files { - Self::update_import_graph_for_path(&documents, &import_graph, &config, path); + let file = path_store.intern(path); + Self::update_import_graph_for_file( + &documents, + &import_graph, + &config, + &path_store, + file, + ); } info!( diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index c0cb640c..02086a6f 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -17,7 +17,7 @@ use parking_lot::RwLock; use rayon::prelude::*; use tracing::{info, warn}; -use super::{normalize_paths, SharedConfig}; +use super::{unique_files, SharedConfig}; use crate::analysis::{ eval::create_state_with_jpath, tanka::effective_import_roots, EvalConfig, Evaluator, }; @@ -245,15 +245,20 @@ impl AsyncRequestContext { semantic.as_deref(), ); - let importers = self.import_graph.read().transitive_importers(&path); + let importers = { + let import_graph = self.import_graph.read(); + import_graph + .file(&path) + .map_or_else(Vec::new, |file| import_graph.transitive_importers(file)) + }; let importer_docs: Vec<_> = importers .into_iter() - .filter_map(|p| { - self.documents.get_document(&p).map(|d| { - let semantic = self.documents.get_semantic_artifacts(&p); - (p, d, semantic) - }) + .filter_map(|file| { + let path = self.documents.path(file)?; + let doc = self.documents.get_document(path.as_ref())?; + let semantic = self.documents.get_semantic_artifacts(path.as_ref()); + Some((path.as_ref().clone(), doc, semantic)) }) .collect(); let importer_refs: Vec<_> = importer_docs @@ -286,17 +291,18 @@ impl AsyncRequestContext { ) -> Option { let query = ¶ms.query; - let mut paths = { + let files = { let import_graph = self.import_graph.read(); - import_graph.all_files().collect::>() + unique_files(import_graph.all_files().chain(self.documents.open_files())) }; - paths.extend(self.documents.open_paths()); - normalize_paths(&mut paths); - let mut all_symbols: Vec = paths + let mut all_symbols: Vec = files .into_par_iter() - .flat_map(|path| { - let Some(doc) = self.documents.get_document(&path) else { + .flat_map(|file| { + let Some(path) = self.documents.path(file) else { + return Vec::new(); + }; + let Some(doc) = self.documents.get_document(path.as_ref()) else { return Vec::new(); }; let Ok(uri) = path.to_uri() else { @@ -518,13 +524,18 @@ impl AsyncRequestContext { let path = CanonicalPath::from_uri(&uri_parsed).ok()?; let import_graph = self.import_graph.read(); - let importers = import_graph.transitive_importers(&path); - drop(import_graph); - + let importers = import_graph + .file(&path) + .map_or_else(Vec::new, |file| import_graph.transitive_importers(file)); let mut importer_uris: Vec = importers .iter() - .filter_map(|p| p.to_uri().ok().map(|uri| uri.to_string())) + .filter_map(|file| { + import_graph + .path(*file) + .and_then(|path| path.to_uri().ok().map(|uri| uri.to_string())) + }) .collect(); + drop(import_graph); importer_uris.sort(); Some(serde_json::json!({ @@ -605,8 +616,9 @@ impl AsyncRequestContext { import: &str, ) -> Option { let import_graph = self.import_graph.read(); + let from_file = import_graph.file(from)?; import_graph - .imports(from) + .imports(from_file) .iter() .find(|entry| entry.import_path == import) .and_then(|entry| entry.resolved_path.clone()) diff --git a/crates/jrsonnet-lsp/src/server/import_graph.rs b/crates/jrsonnet-lsp/src/server/import_graph.rs index a1830a7c..1e3aacb9 100644 --- a/crates/jrsonnet-lsp/src/server/import_graph.rs +++ b/crates/jrsonnet-lsp/src/server/import_graph.rs @@ -5,29 +5,38 @@ impl Server { /// /// Parses the document's import statements and updates the graph /// so that cross-file references can be found efficiently. - pub(super) fn update_import_graph(&self, path: &CanonicalPath) { - Self::update_import_graph_for_path(&self.documents, &self.import_graph, &self.config, path); + pub(super) fn update_import_graph(&self, file: FileId) { + Self::update_import_graph_for_file( + &self.documents, + &self.import_graph, + &self.config, + &self.path_store, + file, + ); } - pub(super) fn tracked_paths_for_reindex(&self) -> Vec { - let mut paths = { + pub(super) fn tracked_files_for_reindex(&self) -> Vec { + let known_files = { let import_graph = self.import_graph.read(); import_graph.all_files().collect::>() }; - paths.extend(self.documents.open_paths()); - normalize_paths(&mut paths); - paths + unique_files(known_files.into_iter().chain(self.documents.open_files())) } - pub(super) fn update_import_graph_for_path( + pub(super) fn update_import_graph_for_file( documents: &SharedDocumentManager, import_graph: &Arc>, config: &SharedConfig, - path: &CanonicalPath, + path_store: &PathStore, + file: FileId, ) { - let Some(doc) = documents.get_document(path) else { + let resolver = path_store.resolver(); + let Some(path) = resolver.path(file) else { + return; + }; + let Some(doc) = documents.get_document(path.as_ref()) else { // File no longer exists or cannot be read. - import_graph.write().remove_file(path); + import_graph.write().remove_file(file); return; }; @@ -41,47 +50,52 @@ impl Server { // Parse imports OUTSIDE the lock to minimize lock hold time. // This is important for responsiveness when parsing large files. - let import_resolution = ImportResolution::new(path, &import_roots); + let import_resolution = ImportResolution::new(path.as_ref(), &import_roots); let entries = import_resolution.parse_entries(&doc); // Now acquire the write lock and do the quick data structure update - import_graph.write().update_file_with_entries(path, entries); + import_graph.write().update_file_with_entries(file, entries); } - /// Schedule diagnostics for currently-open files that import `path`. - pub(super) fn schedule_diagnostics_for_open_importers(&self, path: &CanonicalPath) { - let importers = self.import_graph.read().transitive_importers(path); + /// Schedule diagnostics for currently-open files that import `file`. + pub(super) fn schedule_diagnostics_for_open_importers(&self, file: FileId) { + let importers = self.import_graph.read().transitive_importers(file); for importer in importers { - if self.documents.is_open(&importer) { - self.schedule_diagnostics(&importer); + if self.documents.is_open_file(importer) { + self.schedule_diagnostics_file(importer); } } } - /// Schedule diagnostics computation for a document. - /// - /// Diagnostics are computed asynchronously with debouncing. - pub(super) fn schedule_diagnostics(&self, path: &CanonicalPath) { - let Some(doc) = self.documents.get(path) else { + /// Schedule diagnostics computation for a file. + pub(super) fn schedule_diagnostics_file(&self, file: FileId) { + let resolver = self.path_store.resolver(); + let request = resolver.path(file).and_then(|path| { + let doc = self.documents.get(path.as_ref())?; + let (enable_lint, import_roots) = { + let config = self.config.read(); + ( + config.lint_diagnostics_enabled(), + effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ), + ) + }; + Some(( + path.as_ref().clone(), + doc.text().to_string(), + doc.version(), + enable_lint, + import_roots, + )) + }); + let Some((path, text, version, enable_lint, import_roots)) = request else { return; }; - let (enable_lint, import_roots) = { - let config = self.config.read(); - ( - config.lint_diagnostics_enabled(), - effective_import_roots( - path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ), - ) - }; - let text = doc.text().to_string(); - let version = doc.version(); - drop(doc); // Release the borrow before scheduling - self.diagnostics - .schedule(path.clone(), text, version, enable_lint, import_roots); + .schedule(path, text, version, enable_lint, import_roots); } } diff --git a/crates/jrsonnet-lsp/src/server/notifications.rs b/crates/jrsonnet-lsp/src/server/notifications.rs index d0f3719d..4c802213 100644 --- a/crates/jrsonnet-lsp/src/server/notifications.rs +++ b/crates/jrsonnet-lsp/src/server/notifications.rs @@ -68,18 +68,19 @@ impl Server { let text = params.text_document.text; let version = DocVersion::new(params.text_document.version); + let file = self.path_store.intern(&path); self.documents.open(path.clone(), text, version); self.documents.refresh_semantic_artifacts(&path); // Invalidate type cache for this file and all files that depend on it - self.invalidate_type_cache_with_dependents(&path); + self.invalidate_type_cache_with_dependents(file); // Update import graph - self.update_import_graph(&path); + self.update_import_graph(file); // Publish diagnostics - self.schedule_diagnostics(&path); + self.schedule_diagnostics_file(file); } /// Handle textDocument/didChange notification. @@ -93,6 +94,7 @@ impl Server { }; let version = DocVersion::new(params.text_document.version); + let file = self.path_store.intern(&path); // Process each change (INCREMENTAL sync may send multiple changes) for change in params.content_changes { @@ -114,13 +116,13 @@ impl Server { self.documents.refresh_semantic_artifacts(&path); // Invalidate type cache for this file and all files that depend on it - self.invalidate_type_cache_with_dependents(&path); + self.invalidate_type_cache_with_dependents(file); // Update import graph (imports may have changed) - self.update_import_graph(&path); + self.update_import_graph(file); // Publish diagnostics - self.schedule_diagnostics(&path); + self.schedule_diagnostics_file(file); } /// Handle textDocument/didClose notification. @@ -132,17 +134,18 @@ impl Server { warn!("Could not convert URI to path: {}", uri.as_str()); return Ok(()); }; + let file = self.path_store.intern(&path); self.documents.close(&path); // Invalidate type cache for this file and all files that depend on it // (dependents may have cached types based on this file's exports) - self.invalidate_type_cache_with_dependents(&path); + self.invalidate_type_cache_with_dependents(file); // Keep import graph semantics for closed documents by re-indexing from // cached/disk content instead of dropping the file node. - self.update_import_graph(&path); - self.schedule_diagnostics_for_open_importers(&path); + self.update_import_graph(file); + self.schedule_diagnostics_for_open_importers(file); // Clear diagnostics for closed document self.send_notification::(lsp_types::PublishDiagnosticsParams { @@ -163,6 +166,7 @@ impl Server { warn!("Could not convert URI to path: {}", uri.as_str()); return; }; + let file = self.path_store.intern(&path); if let Some(text) = params.text { let Some(doc) = self.documents.get(&path) else { @@ -177,12 +181,12 @@ impl Server { } } - self.invalidate_type_cache_with_dependents(&path); - self.update_import_graph(&path); - if self.documents.is_open(&path) { - self.schedule_diagnostics(&path); + self.invalidate_type_cache_with_dependents(file); + self.update_import_graph(file); + if self.documents.is_open_file(file) { + self.schedule_diagnostics_file(file); } - self.schedule_diagnostics_for_open_importers(&path); + self.schedule_diagnostics_for_open_importers(file); } /// Handle workspace/didChangeConfiguration notification. @@ -232,14 +236,14 @@ impl Server { // Import resolution and cached file types depend on jpath/tanka settings. self.type_cache.write().clear(); - for path in self.tracked_paths_for_reindex() { - self.update_import_graph(&path); + for file in self.tracked_files_for_reindex() { + self.update_import_graph(file); } } if runtime_config_changed || diagnostics_config_changed { - for path in self.documents.open_paths() { - self.schedule_diagnostics(&path); + for file in self.documents.open_files() { + self.schedule_diagnostics_file(file); } } @@ -257,27 +261,28 @@ impl Server { /// This keeps import graph and type cache up to date for files that change on disk /// while not being open in the editor. pub(super) fn on_did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let mut changed_paths = Vec::new(); + let mut changed_files = Vec::new(); let mut requires_global_reindex = false; for change in params.changes { let Ok(path) = CanonicalPath::from_uri(&change.uri) else { continue; }; + let file = self.path_store.intern(&path); - self.invalidate_type_cache_with_dependents(&path); + self.invalidate_type_cache_with_dependents(file); match change.typ { FileChangeType::DELETED => { self.documents.remove_closed(&path); - self.import_graph.write().remove_file(&path); + self.import_graph.write().remove_file(file); requires_global_reindex = true; } FileChangeType::CHANGED | FileChangeType::CREATED => { if !self.documents.is_open(&path) { self.documents.refresh_closed_from_disk(&path); } - self.update_import_graph(&path); + self.update_import_graph(file); if change.typ == FileChangeType::CREATED { requires_global_reindex = true; } @@ -285,21 +290,20 @@ impl Server { _ => {} } - changed_paths.push(path); + changed_files.push(file); } if requires_global_reindex { - for path in self.tracked_paths_for_reindex() { - self.update_import_graph(&path); + for file in self.tracked_files_for_reindex() { + self.update_import_graph(file); } } - normalize_paths(&mut changed_paths); - for path in changed_paths { - if self.documents.is_open(&path) { - self.schedule_diagnostics(&path); + for file in unique_files(changed_files) { + if self.documents.is_open_file(file) { + self.schedule_diagnostics_file(file); } - self.schedule_diagnostics_for_open_importers(&path); + self.schedule_diagnostics_for_open_importers(file); } } } diff --git a/crates/jrsonnet-lsp/tests/cross_file_tests.rs b/crates/jrsonnet-lsp/tests/cross_file_tests.rs index b69f613e..ebbe0486 100644 --- a/crates/jrsonnet-lsp/tests/cross_file_tests.rs +++ b/crates/jrsonnet-lsp/tests/cross_file_tests.rs @@ -9,7 +9,7 @@ use std::{ sync::Arc, }; -use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document, PathStore}; +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document, FileId, PathStore}; use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_inference::{analyze_and_cache, new_shared_cache, TypeAnalysis, TypeCache}; use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore, Ty}; @@ -42,6 +42,26 @@ fn make_resolver(base_dir: &Path) -> impl Fn(&str) -> Option + '_ } } +fn cache_update(cache: &mut TypeCache, file: FileId, ty: GlobalTy, version: i32) { + cache.update(file, ty, version); +} + +fn cache_get(cache: &TypeCache, file: FileId) -> Option { + cache.get(file) +} + +fn cache_is_up_to_date(cache: &TypeCache, file: FileId, version: i32) -> bool { + cache.is_up_to_date(file, version) +} + +fn cache_invalidate(cache: &mut TypeCache, file: FileId) { + cache.invalidate(file); +} + +fn cache_invalidate_many(cache: &mut TypeCache, files: impl IntoIterator) { + cache.invalidate_many(files); +} + mod import_graph_tests { use super::*; @@ -86,21 +106,22 @@ mod import_graph_tests { let content = fs::read_to_string(file).unwrap(); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); - graph.update_file(&path, &doc, &resolver); + graph.update_file(graph.intern(&path), &doc, &resolver); } // file1 should have direct imports: file2 let path1 = canonical_path(&file1); - let imports1 = graph.imports(&path1); + let imports1 = graph.imports(graph.intern(&path1)); assert_eq!(imports1.len(), 1, "file1 should import exactly one file"); // file6 should have no imports let path6 = canonical_path(&file6); - let imports6 = graph.imports(&path6); + let imports6 = graph.imports(graph.intern(&path6)); assert!(imports6.is_empty(), "file6 should have no imports"); // Transitive importers of file6 should include file1-5 - let importers = graph.transitive_importers(&path6); + let file6 = graph.file(&path6).expect("file6 should be interned"); + let importers = graph.transitive_importers(file6); assert!( importers.len() >= 5, "file6 should have at least 5 transitive importers" @@ -146,12 +167,12 @@ mod import_graph_tests { let content = fs::read_to_string(file).unwrap(); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); - graph.update_file(&path, &doc, &resolver); + graph.update_file(graph.intern(&path), &doc, &resolver); } // A should import B and C let path_a = canonical_path(&file_a); - let imports_a = graph.imports(&path_a); + let imports_a = graph.imports(graph.intern(&path_a)); assert_eq!( imports_a.len(), 2, @@ -160,7 +181,8 @@ mod import_graph_tests { // D should be imported by both B and C let path_d = canonical_path(&file_d); - let importers_d = graph.transitive_importers(&path_d); + let file_d = graph.file(&path_d).expect("d should be interned"); + let importers_d = graph.transitive_importers(file_d); // D's transitive importers: B, C, A assert_eq!( importers_d.len(), @@ -188,21 +210,21 @@ mod import_graph_tests { let lib_content = fs::read_to_string(&lib).unwrap(); let lib_doc = Document::new(lib_content, DocVersion::new(1)); let lib_path = canonical_path(&lib); - graph.update_file(&lib_path, &lib_doc, &resolver); + graph.update_file(graph.intern(&lib_path), &lib_doc, &resolver); let main_content = fs::read_to_string(&main).unwrap(); let main_doc = Document::new(main_content, DocVersion::new(1)); let main_path = canonical_path(&main); - graph.update_file(&main_path, &main_doc, &resolver); + graph.update_file(graph.intern(&main_path), &main_doc, &resolver); // Verify import relationship - assert_eq!(graph.imports(&main_path).len(), 1); + assert_eq!(graph.imports(graph.intern(&main_path)).len(), 1); // Remove lib - graph.remove_file(&lib_path); + graph.remove_file(graph.intern(&lib_path)); // Graph should handle missing targets gracefully - let imports = graph.imports(&main_path); + let imports = graph.imports(graph.intern(&main_path)); // main still imports lib (by path), even if lib is removed from graph assert_eq!(imports.len(), 1); } @@ -237,12 +259,13 @@ mod import_graph_tests { let content = fs::read_to_string(file).unwrap(); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); - graph.update_file(&path, &doc, &resolver); + graph.update_file(graph.intern(&path), &doc, &resolver); } // shared should be imported by user1, user2, user3 let shared_path = canonical_path(&shared); - let importers = graph.transitive_importers(&shared_path); + let shared = graph.file(&shared_path).expect("shared should be interned"); + let importers = graph.transitive_importers(shared); assert_eq!(importers.len(), 3, "shared should have 3 importers"); } } @@ -263,16 +286,19 @@ mod type_cache_tests { let path1 = canonical_path(&file1); let path2 = canonical_path(&file2); let path3 = canonical_path(&file3); + let file1_id = cache.intern(&path1); + let file2_id = cache.intern(&path2); + let file3_id = cache.intern(&path3); // Cache different types - cache.update(&path1, GlobalTy::NUMBER, 1); - cache.update(&path2, GlobalTy::STRING, 1); - cache.update(&path3, GlobalTy::BOOL, 1); + cache_update(&mut cache, file1_id, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2_id, GlobalTy::STRING, 1); + cache_update(&mut cache, file3_id, GlobalTy::BOOL, 1); // Retrieve and verify - assert_eq!(cache.get(&path1), Some(GlobalTy::NUMBER)); - assert_eq!(cache.get(&path2), Some(GlobalTy::STRING)); - assert_eq!(cache.get(&path3), Some(GlobalTy::BOOL)); + assert_eq!(cache_get(&cache, file1_id), Some(GlobalTy::NUMBER)); + assert_eq!(cache_get(&cache, file2_id), Some(GlobalTy::STRING)); + assert_eq!(cache_get(&cache, file3_id), Some(GlobalTy::BOOL)); } #[test] @@ -283,16 +309,17 @@ mod type_cache_tests { let file = write_file(&tmp, "test.jsonnet", "1"); let path = canonical_path(&file); + let file_id = cache.intern(&path); // Initial version - cache.update(&path, GlobalTy::NUMBER, 1); - assert!(cache.is_up_to_date(&path, 1)); - assert!(!cache.is_up_to_date(&path, 2)); + cache_update(&mut cache, file_id, GlobalTy::NUMBER, 1); + assert!(cache_is_up_to_date(&cache, file_id, 1)); + assert!(!cache_is_up_to_date(&cache, file_id, 2)); // Update version - cache.update(&path, GlobalTy::STRING, 2); - assert!(!cache.is_up_to_date(&path, 1)); - assert!(cache.is_up_to_date(&path, 2)); + cache_update(&mut cache, file_id, GlobalTy::STRING, 2); + assert!(!cache_is_up_to_date(&cache, file_id, 1)); + assert!(cache_is_up_to_date(&cache, file_id, 2)); } #[test] @@ -308,19 +335,22 @@ mod type_cache_tests { let path1 = canonical_path(&file1); let path2 = canonical_path(&file2); let path3 = canonical_path(&file3); + let file1_id = cache.intern(&path1); + let file2_id = cache.intern(&path2); + let file3_id = cache.intern(&path3); - cache.update(&path1, GlobalTy::NUMBER, 1); - cache.update(&path2, GlobalTy::NUMBER, 1); - cache.update(&path3, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file1_id, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file2_id, GlobalTy::NUMBER, 1); + cache_update(&mut cache, file3_id, GlobalTy::NUMBER, 1); assert_eq!(cache.len(), 3); // Invalidate one - cache.invalidate(&path2); + cache_invalidate(&mut cache, file2_id); assert_eq!(cache.len(), 2); - cache.get(&path1).expect("path1 should still be cached"); - assert_eq!(cache.get(&path2), None); - cache.get(&path3).expect("path3 should still be cached"); + cache_get(&cache, file1_id).expect("path1 should still be cached"); + assert_eq!(cache_get(&cache, file2_id), None); + cache_get(&cache, file3_id).expect("path3 should still be cached"); } #[test] @@ -335,16 +365,17 @@ mod type_cache_tests { canonical_path(&file) }) .collect(); + let file_ids: Vec<_> = files.iter().map(|path| cache.intern(path)).collect(); // Cache all - for path in &files { - cache.update(path, GlobalTy::NUMBER, 1); + for file in &file_ids { + cache_update(&mut cache, *file, GlobalTy::NUMBER, 1); } assert_eq!(cache.len(), 10); // Invalidate half - let to_invalidate: Vec<_> = files.iter().step_by(2).cloned().collect(); - cache.invalidate_many(to_invalidate); + let to_invalidate: Vec<_> = file_ids.iter().step_by(2).copied().collect(); + cache_invalidate_many(&mut cache, to_invalidate); // Should have 5 remaining assert_eq!(cache.len(), 5); @@ -371,7 +402,11 @@ mod cross_file_type_tests { assert_eq!(ty, Ty::NUMBER); // Should be cached - let cached = cache.read().get(&path); + let cached = { + let read_cache = cache.read(); + let file = read_cache.file(&path).expect("path should be interned"); + read_cache.get(file) + }; assert_eq!(cached, Some(GlobalTy::NUMBER)); } @@ -490,28 +525,33 @@ mod transitive_update_tests { let content = fs::read_to_string(file).unwrap(); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); - graph.update_file(&path, &doc, &resolver); - cache.update(&path, GlobalTy::ANY, 1); // Placeholder type + graph.update_file(graph.intern(&path), &doc, &resolver); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::ANY, 1); // Placeholder type } let base_path = canonical_path(&base); let mid_path = canonical_path(&mid); let top_path = canonical_path(&top_file); + let base_file = cache.intern(&base_path); + let mid_file = cache.intern(&mid_path); + let top_file = cache.intern(&top_path); // All should be cached - cache.get(&base_path).expect("base should be cached"); - cache.get(&mid_path).expect("mid should be cached"); - cache.get(&top_path).expect("top should be cached"); + cache_get(&cache, base_file).expect("base should be cached"); + cache_get(&cache, mid_file).expect("mid should be cached"); + cache_get(&cache, top_file).expect("top should be cached"); // Simulate base file change - need to invalidate transitive importers - let importers = graph.transitive_importers(&base_path); - cache.invalidate(&base_path); - cache.invalidate_many(importers); + let graph_base = graph.file(&base_path).expect("base should be interned"); + let importers = graph.transitive_importers(graph_base); + cache_invalidate(&mut cache, base_file); + cache_invalidate_many(&mut cache, importers); // All should be invalidated - assert_eq!(cache.get(&base_path), None); - assert_eq!(cache.get(&mid_path), None); - assert_eq!(cache.get(&top_path), None); + assert_eq!(cache_get(&cache, base_file), None); + assert_eq!(cache_get(&cache, mid_file), None); + assert_eq!(cache_get(&cache, top_file), None); } #[test] @@ -534,22 +574,27 @@ mod transitive_update_tests { let content = fs::read_to_string(file).unwrap(); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); - graph.update_file(&path, &doc, &resolver); - cache.update(&path, GlobalTy::ANY, 1); + graph.update_file(graph.intern(&path), &doc, &resolver); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::ANY, 1); } let lib1_path = canonical_path(&lib1); let lib2_path = canonical_path(&lib2); let main_path = canonical_path(&main); + let lib1_file = cache.intern(&lib1_path); + let lib2_file = cache.intern(&lib2_path); + let main_file = cache.intern(&main_path); // Change lib1 - should invalidate lib1 and main, but not lib2 - let importers = graph.transitive_importers(&lib1_path); - cache.invalidate(&lib1_path); - cache.invalidate_many(importers); - - assert_eq!(cache.get(&lib1_path), None); - assert_eq!(cache.get(&main_path), None); - cache.get(&lib2_path).expect("lib2 should be unchanged"); + let graph_lib1 = graph.file(&lib1_path).expect("lib1 should be interned"); + let importers = graph.transitive_importers(graph_lib1); + cache_invalidate(&mut cache, lib1_file); + cache_invalidate_many(&mut cache, importers); + + assert_eq!(cache_get(&cache, lib1_file), None); + assert_eq!(cache_get(&cache, main_file), None); + cache_get(&cache, lib2_file).expect("lib2 should be unchanged"); } #[test] @@ -580,22 +625,28 @@ mod transitive_update_tests { let content = fs::read_to_string(file).unwrap(); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); - graph.update_file(&path, &doc, &resolver); - cache.update(&path, GlobalTy::ANY, 1); + graph.update_file(graph.intern(&path), &doc, &resolver); + let file = cache.intern(&path); + cache_update(&mut cache, file, GlobalTy::ANY, 1); } let d_path = canonical_path(&d); + let d_file = cache.intern(&d_path); + let b_file = cache.intern(&canonical_path(&b)); + let c_file = cache.intern(&canonical_path(&c)); + let a_file = cache.intern(&canonical_path(&a)); // Change D - let importers = graph.transitive_importers(&d_path); - cache.invalidate(&d_path); - cache.invalidate_many(importers); + let graph_d = graph.file(&d_path).expect("d should be interned"); + let importers = graph.transitive_importers(graph_d); + cache_invalidate(&mut cache, d_file); + cache_invalidate_many(&mut cache, importers); // All should be invalidated (D is transitively imported by all) - assert!(cache.get(&canonical_path(&d)).is_none()); - assert!(cache.get(&canonical_path(&b)).is_none()); - assert!(cache.get(&canonical_path(&c)).is_none()); - assert!(cache.get(&canonical_path(&a)).is_none()); + assert!(cache_get(&cache, d_file).is_none()); + assert!(cache_get(&cache, b_file).is_none()); + assert!(cache_get(&cache, c_file).is_none()); + assert!(cache_get(&cache, a_file).is_none()); } } From 0311f936d1ffd12f59eeb35f3f1d870b558394f8 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 15:11:12 +0000 Subject: [PATCH 104/210] refactor(lsp): borrow interned paths and remove display shims - switch path resolution to borrowed InternedPath guards, keeping path access zero-copy and FileId-backed. - remove server-owned PathStore and route interning/path resolution through DocumentManager so subsystems share one interner identity. - key async diagnostics scheduling and cache updates by FileId, resolving canonical paths only at request/response boundaries. - drop CanonicalPath Deref and remove manual .display() formatting in LSP crates/tests so typed Display handling is enforced. --- crates/jrsonnet-lsp-document/src/file_ids.rs | 103 ++++++++++++++--- crates/jrsonnet-lsp-document/src/lib.rs | 2 +- crates/jrsonnet-lsp-document/src/types.rs | 13 ++- .../jrsonnet-lsp-handlers/src/formatting.rs | 6 +- crates/jrsonnet-lsp-handlers/src/rename.rs | 6 +- crates/jrsonnet-lsp-import/src/graph.rs | 23 ++-- crates/jrsonnet-lsp-inference/src/manager.rs | 29 +++-- crates/jrsonnet-lsp-inference/src/provider.rs | 4 +- .../jrsonnet-lsp-inference/src/type_cache.rs | 4 +- .../src/scenario_script/parse.rs | 4 +- crates/jrsonnet-lsp/src/async_diagnostics.rs | 109 ++++++++++-------- crates/jrsonnet-lsp/src/server.rs | 20 +--- .../jrsonnet-lsp/src/server/async_requests.rs | 12 +- .../jrsonnet-lsp/src/server/import_graph.rs | 67 +++++------ .../jrsonnet-lsp/src/server/notifications.rs | 10 +- .../jrsonnet-lsp/tests/docs_lsp_examples.rs | 6 +- 16 files changed, 242 insertions(+), 176 deletions(-) diff --git a/crates/jrsonnet-lsp-document/src/file_ids.rs b/crates/jrsonnet-lsp-document/src/file_ids.rs index 54ad237e..7f05b9e0 100644 --- a/crates/jrsonnet-lsp-document/src/file_ids.rs +++ b/crates/jrsonnet-lsp-document/src/file_ids.rs @@ -1,8 +1,8 @@ //! File identity and path interning utilities. -use std::{collections::HashMap, path::PathBuf, sync::Arc}; +use std::{collections::HashMap, ops::Deref, path::PathBuf, sync::Arc}; -use parking_lot::RwLock; +use parking_lot::{RwLock, RwLockReadGuard}; use crate::CanonicalPath; @@ -30,11 +30,17 @@ impl FileId { } } +impl std::fmt::Display for FileId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + /// Bidirectional interner between canonical paths and stable file identifiers. #[derive(Debug, Clone, Default)] struct PathInterner { path_to_id: HashMap, - id_to_path: Vec>, + id_to_path: Vec, } impl PathInterner { @@ -47,7 +53,7 @@ impl PathInterner { let raw = u32::try_from(self.id_to_path.len()).expect("too many interned file paths"); let id = FileId::from_raw(raw); self.path_to_id.insert(path.as_path().to_path_buf(), id); - self.id_to_path.push(Arc::new(path.clone())); + self.id_to_path.push(path.clone()); id } @@ -59,8 +65,37 @@ impl PathInterner { /// Resolve an interned id to its canonical path. #[must_use] - pub fn path(&self, file: FileId) -> Option> { - self.id_to_path.get(file.as_usize()).cloned() + pub fn path(&self, file: FileId) -> Option<&CanonicalPath> { + self.id_to_path.get(file.as_usize()) + } +} + +/// Borrowed view of an interned canonical path. +pub struct InternedPath<'a> { + guard: RwLockReadGuard<'a, PathInterner>, + file: FileId, +} + +impl InternedPath<'_> { + #[must_use] + pub fn as_canonical_path(&self) -> &CanonicalPath { + self + } +} + +impl std::fmt::Debug for InternedPath<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{} ({})", self.as_canonical_path(), self.file) + } +} + +impl Deref for InternedPath<'_> { + type Target = CanonicalPath; + + fn deref(&self) -> &Self::Target { + self.guard + .path(self.file) + .expect("interned path missing for interned file id") } } @@ -77,10 +112,12 @@ impl PathResolver { self.interner.read().file(path) } - /// Resolve an interned file identifier to its canonical path. + /// Borrow an interned file identifier's canonical path. #[must_use] - pub fn path(&self, file: FileId) -> Option> { - self.interner.read().path(file) + pub fn path(&self, file: FileId) -> Option> { + let guard = self.interner.read(); + guard.path(file)?; + Some(InternedPath { guard, file }) } } @@ -110,6 +147,20 @@ impl PathStore { interner: Arc::clone(&self.interner), } } + + /// Borrow an interned file identifier's canonical path. + #[must_use] + pub fn path(&self, file: FileId) -> Option> { + let guard = self.interner.read(); + guard.path(file)?; + Some(InternedPath { guard, file }) + } + + /// Returns `true` when two stores share the same underlying interner. + #[must_use] + pub fn shares_interner_with(&self, other: &Self) -> bool { + Arc::ptr_eq(&self.interner, &other.interner) + } } #[cfg(test)] @@ -132,7 +183,7 @@ mod tests { assert_eq!(first, second); assert_eq!(interner.file(&alpha), Some(first)); - assert_eq!(interner.path(first), Some(Arc::new(alpha))); + assert_eq!(interner.path(first), Some(&alpha)); } #[test] @@ -146,8 +197,8 @@ mod tests { assert_eq!(alpha_id, FileId::from_raw(0)); assert_eq!(beta_id, FileId::from_raw(1)); - assert_eq!(interner.path(alpha_id), Some(Arc::new(alpha))); - assert_eq!(interner.path(beta_id), Some(Arc::new(beta))); + assert_eq!(interner.path(alpha_id), Some(&alpha)); + assert_eq!(interner.path(beta_id), Some(&beta)); } #[test] @@ -168,19 +219,35 @@ mod tests { let id = store.intern(&alpha); let resolver = other.resolver(); assert_eq!(resolver.file(&alpha), Some(id)); - assert_eq!(resolver.path(id), Some(Arc::new(alpha))); + assert_eq!(resolver.path(id).as_deref(), Some(&alpha)); } #[test] - fn test_path_store_path_returns_shared_arc() { + fn test_path_store_path_borrows_without_cloning() { let store = PathStore::new(); let resolver = store.resolver(); let alpha = path("alpha"); let id = store.intern(&alpha); - let first = resolver.path(id).expect("path should exist"); - let second = resolver.path(id).expect("path should exist"); - assert!(Arc::ptr_eq(&first, &second)); - assert_eq!(first.as_ref(), &alpha); + let first = resolver + .path(id) + .map(|path| std::ptr::from_ref(path.as_canonical_path())) + .expect("path should exist"); + let second = resolver + .path(id) + .map(|path| std::ptr::from_ref(path.as_canonical_path())) + .expect("path should exist"); + assert_eq!(first, second); + assert_eq!(resolver.path(id).as_deref(), Some(&alpha)); + } + + #[test] + fn test_path_store_shares_interner_identity() { + let store = PathStore::new(); + let clone = store.clone(); + let other = PathStore::new(); + + assert!(store.shares_interner_with(&clone)); + assert!(!store.shares_interner_with(&other)); } } diff --git a/crates/jrsonnet-lsp-document/src/lib.rs b/crates/jrsonnet-lsp-document/src/lib.rs index 1724c460..872f1427 100644 --- a/crates/jrsonnet-lsp-document/src/lib.rs +++ b/crates/jrsonnet-lsp-document/src/lib.rs @@ -26,7 +26,7 @@ pub use document::{Document, ParsedDocument, SharedDocument, SyntaxError}; pub use error::{ is_valid_jsonnet_identifier, validate_identifier, HandlerResult, LspError, LspResult, }; -pub use file_ids::{FileId, PathResolver, PathStore}; +pub use file_ids::{FileId, InternedPath, PathResolver, PathStore}; pub use position::LineIndex; pub use types::{ ByteOffset, CanonicalPath, CharOffset, DocVersion, Line, LspPosition, LspRange, SymbolName, diff --git a/crates/jrsonnet-lsp-document/src/types.rs b/crates/jrsonnet-lsp-document/src/types.rs index 0669cb02..8f8f46cc 100644 --- a/crates/jrsonnet-lsp-document/src/types.rs +++ b/crates/jrsonnet-lsp-document/src/types.rs @@ -117,10 +117,15 @@ impl From for lsp_types::Range { } /// Normalized canonical path as cache key. -#[derive(Debug, Clone, PartialEq, Eq, Display, Deref)] -#[display("{}", _0.display())] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct CanonicalPath(PathBuf); +impl std::fmt::Display for CanonicalPath { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.0.to_string_lossy()) + } +} + impl CanonicalPath { /// Create a new canonical path from an already-canonicalized path. #[must_use] @@ -160,8 +165,8 @@ impl CanonicalPath { /// Returns `Err(LspError::PathToUri)` when the path cannot be converted /// into a file URI, or `Err(LspError::InvalidUri)` if URI parsing fails. pub fn to_uri(&self) -> LspResult { - let url = Url::from_file_path(&self.0) - .map_err(|()| LspError::PathToUri(self.0.display().to_string()))?; + let url = + Url::from_file_path(&self.0).map_err(|()| LspError::PathToUri(self.to_string()))?; url.as_str() .parse() .map_err(|_| LspError::InvalidUri(url.to_string())) diff --git a/crates/jrsonnet-lsp-handlers/src/formatting.rs b/crates/jrsonnet-lsp-handlers/src/formatting.rs index 4a76fe4f..d4ff13e4 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting.rs @@ -182,7 +182,7 @@ impl fmt::Display for FormatterError { Self::MissingWorkspaceRoot { document_path } => write!( formatter, "no workspace root available for document {}", - document_path.display() + document_path.to_string_lossy() ), Self::Spawn { program, source } => { write!(formatter, "failed to spawn formatter {program}: {source}") @@ -566,8 +566,8 @@ mod tests { let output = format!( "cwd={}\narg1=-stdio\narg2={}\n{input}", - root.display(), - document_path.display() + root.to_string_lossy(), + document_path.to_string_lossy() ); assert_eq!( format_document_with_config(input, &config, context), diff --git a/crates/jrsonnet-lsp-handlers/src/rename.rs b/crates/jrsonnet-lsp-handlers/src/rename.rs index cf12c041..07d4f459 100644 --- a/crates/jrsonnet-lsp-handlers/src/rename.rs +++ b/crates/jrsonnet-lsp-handlers/src/rename.rs @@ -266,8 +266,10 @@ fn find_references_in_importer( use jrsonnet_rowan_parser::nodes::{ExprBase, ExprField}; let importer_path = import_graph.path(importer_file)?; - let doc = manager.get_document(importer_path.as_ref())?; - let uri = importer_path.to_uri().ok()?; + let path = importer_path.as_canonical_path(); + let doc = manager.get_document(path)?; + let uri = path.to_uri().ok()?; + drop(importer_path); let text = doc.text(); let line_index = doc.line_index(); let ast = doc.ast(); diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs index 7506c1f4..b176a86c 100644 --- a/crates/jrsonnet-lsp-import/src/graph.rs +++ b/crates/jrsonnet-lsp-import/src/graph.rs @@ -91,9 +91,9 @@ impl ImportGraph { self.resolver.file(path) } - /// Resolve an interned file identifier to a path. + /// Borrow an interned file identifier's canonical path. #[must_use] - pub fn path(&self, file: FileId) -> Option> { + pub fn path(&self, file: FileId) -> Option> { self.resolver.path(file) } @@ -173,13 +173,6 @@ impl ImportGraph { .unwrap_or_default() } - #[must_use] - fn path_for_id(&self, file_id: FileId) -> Option { - self.resolver - .path(file_id) - .map(|path| path.as_ref().clone()) - } - #[must_use] fn resolved_entry_id(&self, entry: &ImportEntry) -> Option { entry @@ -248,12 +241,6 @@ impl ImportGraph { self.imports.keys().copied() } - /// Get all tracked file paths. - pub fn all_paths(&self) -> impl Iterator + '_ { - self.all_files() - .filter_map(|file_id| self.path_for_id(file_id)) - } - /// Compute a topological ordering of files based on import dependencies. /// /// Returns files in an order where each file comes after all files it imports. @@ -747,7 +734,11 @@ mod tests { fn graph_paths(graph: &ImportGraph, files: Vec) -> Vec { files .into_iter() - .filter_map(|file| graph.path(file).map(|path| path.as_ref().clone())) + .filter_map(|file| { + graph + .path(file) + .map(|path| path.as_canonical_path().clone()) + }) .collect() } diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index 38db132a..5a375c4e 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -57,12 +57,6 @@ pub struct DocumentManager { global_types: Arc, } -impl Default for DocumentManager { - fn default() -> Self { - Self::new(Arc::new(GlobalTyStore::new()), PathStore::new()) - } -} - impl DocumentManager { /// Create a new document manager with a shared global type store. pub fn new(global_types: Arc, paths: PathStore) -> Self { @@ -105,6 +99,12 @@ impl DocumentManager { self.file_id(path).unwrap_or_else(|| self.intern_path(path)) } + /// Get or create the interned file id for `path`. + #[must_use] + pub fn intern(&self, path: &CanonicalPath) -> FileId { + self.file_id_or_intern(path) + } + /// Resolve a file path to an interned file identifier. #[must_use] pub fn file(&self, path: &CanonicalPath) -> Option { @@ -114,11 +114,11 @@ impl DocumentManager { fn path_for_id(&self, file_id: FileId) -> Option { self.resolver .path(file_id) - .map(|path| path.as_ref().clone()) + .map(|path| path.as_canonical_path().clone()) } - /// Resolve an interned file identifier to a path. - pub fn path(&self, file: FileId) -> Option> { + /// Borrow an interned file identifier's canonical path. + pub fn path(&self, file: FileId) -> Option> { self.resolver.path(file) } @@ -420,6 +420,17 @@ impl DocumentManager { .insert(file_id, CachedAnalysis { version, analysis }); } + /// Insert a precomputed analysis for `file@version` into the cache. + pub fn cache_analysis_file( + &self, + file: FileId, + version: DocVersion, + analysis: Arc, + ) { + self.analysis_cache + .insert(file, CachedAnalysis { version, analysis }); + } + /// Get cached semantic artifacts for a document, computing them if needed. /// /// Returns `None` if the document is not found. diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index b0102398..b37f93e7 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -111,12 +111,12 @@ impl TypeProvider { let Some(dep_path) = graph.path(dep_file) else { return; }; - let Some(doc) = doc_source.get_document(dep_path.as_ref()) else { + let Some(doc) = doc_source.get_document(dep_path.as_canonical_path()) else { return; }; let resolved_imports = resolved_imports_for(&graph, dep_file); analyze_and_cache_with_resolved_imports( - dep_path.as_ref(), + dep_path.as_canonical_path(), &doc, &self.type_cache, resolved_imports, diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs index b92cc53b..7b996be9 100644 --- a/crates/jrsonnet-lsp-inference/src/type_cache.rs +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -88,9 +88,9 @@ impl TypeCache { self.resolver.file(path) } - /// Resolve an interned file identifier to a path. + /// Borrow an interned file identifier's canonical path. #[must_use] - pub fn path(&self, file: FileId) -> Option> { + pub fn path(&self, file: FileId) -> Option> { self.resolver.path(file) } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs index bb65fe03..fc00ced1 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs @@ -69,7 +69,7 @@ mod tests { } fn main_uri(tmp: &tempfile::TempDir) -> String { - format!("file://{}", main_file(tmp).display()) + format!("file://{}", main_file(tmp).to_string_lossy()) } #[test] @@ -97,7 +97,7 @@ steps: let uri = main_uri(&tmp); let expected = Scenario::new(vec![ ScenarioStep::WriteFile(WriteFileStep { - path: main_file(&tmp).display().to_string(), + path: main_file(&tmp).to_string_lossy().into_owned(), text: "local name = 1;\nname".to_string(), }), ScenarioStep::Open(OpenStep { diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index 2c535eaf..ac8099ea 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -14,7 +14,7 @@ use std::{ }; use crossbeam_channel::{Receiver, Sender}; -use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document, FileId, PathStore}; +use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document, FileId}; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{ DocumentSource, SharedDocumentManager, SharedTypeCache, TypeProvider, @@ -37,8 +37,6 @@ const DEBOUNCE_DELAY_MS: u64 = 500; struct DiagnosticsRequest { /// Interned file identifier for sequence tracking. file_id: FileId, - /// Path of the document. - path: CanonicalPath, /// The document text. text: String, /// The document version. @@ -54,8 +52,6 @@ struct DiagnosticsRequest { /// Completed diagnostics result. #[derive(Debug)] pub struct DiagnosticsResult { - /// Path of the document. - pub path: CanonicalPath, /// The computed diagnostics params. pub params: lsp_types::PublishDiagnosticsParams, } @@ -73,12 +69,10 @@ pub struct DiagnosticsConfig { pub type_cache: SharedTypeCache, /// Shared global type store. pub global_types: Arc, - /// Shared file id/path store. - pub path_store: PathStore, } struct WorkerDocumentSource { - current_path: CanonicalPath, + current_file: FileId, current_doc: Document, documents: SharedDocumentManager, } @@ -92,7 +86,11 @@ struct WorkerRuntime { impl DocumentSource for WorkerDocumentSource { fn get_document(&self, path: &CanonicalPath) -> Option { - if path == &self.current_path { + if self + .documents + .file(path) + .is_some_and(|file| file == self.current_file) + { return Some(self.current_doc.clone()); } self.documents.get_document(path) @@ -111,8 +109,6 @@ pub struct AsyncDiagnostics { sequence: AtomicU64, /// Latest requested sequence per file (for debouncing). latest_sequences: Arc>>, - /// Shared file id/path store. - path_store: PathStore, /// Background thread handle. _thread_handle: thread::JoinHandle<()>, } @@ -124,7 +120,6 @@ impl AsyncDiagnostics { let (request_sender, request_receiver) = crossbeam_channel::unbounded(); let (result_sender, result_receiver) = crossbeam_channel::unbounded(); let latest_sequences = Arc::new(RwLock::new(FxHashMap::default())); - let path_store = config.path_store.clone(); let sequences_clone = Arc::clone(&latest_sequences); let thread_handle = thread::spawn(move || { @@ -141,7 +136,6 @@ impl AsyncDiagnostics { result_receiver, sequence: AtomicU64::new(0), latest_sequences, - path_store, _thread_handle: thread_handle, } } @@ -152,21 +146,19 @@ impl AsyncDiagnostics { /// within the debounce window, only the latest is processed. pub fn schedule( &self, - path: CanonicalPath, + file_id: FileId, text: String, version: DocVersion, enable_lint: bool, import_roots: Vec, ) { let sequence = self.sequence.fetch_add(1, Ordering::SeqCst); - let file_id = self.path_store.intern(&path); // Record this as the latest sequence for this path self.latest_sequences.write().insert(file_id, sequence); let request = DiagnosticsRequest { file_id, - path, text, version, enable_lint, @@ -203,8 +195,7 @@ impl AsyncDiagnostics { }; trace!( - "Diagnostics worker: received request for {} (seq={})", - request.path.as_path().display(), + "Diagnostics worker: received request (seq={})", request.sequence ); @@ -217,8 +208,7 @@ impl AsyncDiagnostics { if let Some(&latest) = sequences.get(&request.file_id) { if latest > request.sequence { trace!( - "Diagnostics worker: skipping stale request for {} (seq={}, latest={})", - request.path.as_path().display(), + "Diagnostics worker: skipping stale request (seq={}, latest={})", request.sequence, latest ); @@ -228,15 +218,18 @@ impl AsyncDiagnostics { } // Compute diagnostics + // Reconstruct the document in the worker thread + let document = Document::new(request.text, request.version); + let Some(path) = config.documents.path(request.file_id) else { + debug!("Diagnostics worker: path missing for interned file, skipping diagnostics"); + continue; + }; trace!( "Diagnostics worker: computing diagnostics for {}", - request.path.as_path().display() + path.as_canonical_path() ); - - // Reconstruct the document in the worker thread - let document = Document::new(request.text, request.version); let doc_source = WorkerDocumentSource { - current_path: request.path.clone(), + current_file: request.file_id, current_doc: document.clone(), documents: Arc::clone(&config.documents), }; @@ -245,12 +238,14 @@ impl AsyncDiagnostics { Arc::clone(&config.import_graph), Arc::clone(&config.global_types), ); - let analysis = Arc::new(provider.analyze(&request.path, &document, &doc_source)); - let import_resolution = ImportResolution::new(&request.path, &request.import_roots); + let analysis = + Arc::new(provider.analyze(path.as_canonical_path(), &document, &doc_source)); + let import_resolution = + ImportResolution::new(path.as_canonical_path(), &request.import_roots); let import_occurrences = import_resolution.parse_occurrences(&document); let Some(params) = handlers::publish_diagnostics_params( - &request.path, + path.as_canonical_path(), &document, request.enable_lint, config.evaluator.as_deref(), @@ -259,7 +254,7 @@ impl AsyncDiagnostics { ) else { debug!( "Diagnostics worker: failed to build URI for {}, skipping diagnostics", - request.path.as_path().display() + path.as_canonical_path() ); continue; }; @@ -270,8 +265,7 @@ impl AsyncDiagnostics { if let Some(&latest) = sequences.get(&request.file_id) { if latest > request.sequence { trace!( - "Diagnostics worker: discarding result for {} (seq={}, latest={})", - request.path.as_path().display(), + "Diagnostics worker: discarding stale result (seq={}, latest={})", request.sequence, latest ); @@ -281,17 +275,14 @@ impl AsyncDiagnostics { } // Reuse analysis work done for diagnostics in foreground requests. - config.documents.cache_analysis( - request.path.clone(), + config.documents.cache_analysis_file( + request.file_id, request.version, Arc::clone(&analysis), ); // Send result - let result = DiagnosticsResult { - path: request.path, - params, - }; + let result = DiagnosticsResult { params }; if results.send(result).is_err() { debug!("Diagnostics worker: result channel closed, exiting"); @@ -313,6 +304,8 @@ impl Drop for AsyncDiagnostics { mod tests { use std::time::Duration; + use jrsonnet_lsp_document::PathStore; + use super::*; fn test_config() -> DiagnosticsConfig { @@ -327,10 +320,9 @@ mod tests { import_graph: Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))), type_cache: jrsonnet_lsp_inference::new_shared_cache( Arc::clone(&global_types), - path_store.clone(), + path_store, ), global_types, - path_store, } } @@ -340,13 +332,16 @@ mod tests { #[test] fn test_basic_diagnostics() { - let runner = AsyncDiagnostics::new(test_config()); + let config = test_config(); + let documents = Arc::clone(&config.documents); + let runner = AsyncDiagnostics::new(config); let path = test_path("test"); + let file = documents.intern(&path); let text = "{ a: 1 }".to_string(); let version = DocVersion::new(1); - runner.schedule(path.clone(), text, version, false, vec![]); + runner.schedule(file, text, version, false, vec![]); // Wait for result with timeout let result = runner @@ -354,21 +349,24 @@ mod tests { .recv_timeout(Duration::from_secs(2)) .expect("should receive result"); - assert_eq!(result.path, path); + assert_eq!(result.params.uri, path.to_uri().expect("valid URI")); assert!(result.params.diagnostics.is_empty()); } #[test] fn test_debouncing() { - let runner = AsyncDiagnostics::new(test_config()); + let config = test_config(); + let documents = Arc::clone(&config.documents); + let runner = AsyncDiagnostics::new(config); let path = test_path("test"); + let file = documents.intern(&path); // Schedule multiple requests rapidly for i in 0..5 { let text = format!("{{ a: {i} }}"); let version = DocVersion::new(i); - runner.schedule(path.clone(), text, version, false, vec![]); + runner.schedule(file, text, version, false, vec![]); } // Should only get one result (the last one) due to debouncing @@ -378,7 +376,7 @@ mod tests { .recv_timeout(Duration::from_secs(3)) .expect("should receive result"); - assert_eq!(result.path, path); + assert_eq!(result.params.uri, path.to_uri().expect("valid URI")); // Should not get more results immediately (other requests were debounced) runner @@ -389,18 +387,21 @@ mod tests { #[test] fn test_debouncing_with_equivalent_paths() { - let runner = AsyncDiagnostics::new(test_config()); + let config = test_config(); + let documents = Arc::clone(&config.documents); + let runner = AsyncDiagnostics::new(config); + let file = documents.intern(&test_path("test")); // Use distinct CanonicalPath instances with the same value. runner.schedule( - test_path("test"), + file, "{ a: 1 }".to_string(), DocVersion::new(1), false, vec![], ); runner.schedule( - test_path("test"), + file, "{ a: 2 }".to_string(), DocVersion::new(2), false, @@ -412,7 +413,10 @@ mod tests { .recv_timeout(Duration::from_secs(3)) .expect("should receive result"); - assert_eq!(result.path, test_path("test")); + assert_eq!( + result.params.uri, + test_path("test").to_uri().expect("valid URI") + ); // Should not get more results immediately (first request was debounced). runner @@ -423,13 +427,16 @@ mod tests { #[test] fn test_syntax_errors() { - let runner = AsyncDiagnostics::new(test_config()); + let config = test_config(); + let documents = Arc::clone(&config.documents); + let runner = AsyncDiagnostics::new(config); let path = test_path("test"); + let file = documents.intern(&path); let text = "{ a: }".to_string(); let version = DocVersion::new(1); - runner.schedule(path, text, version, false, vec![]); + runner.schedule(file, text, version, false, vec![]); let result = runner .results() diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index cbd89466..a029c23b 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -76,8 +76,6 @@ pub struct Server { documents: SharedDocumentManager, /// Import graph for cross-file references. import_graph: Arc>, - /// Shared file id/path store. - path_store: PathStore, /// Global type store shared across all analyses. global_types: Arc, /// Cross-file type cache for import resolution. @@ -138,7 +136,7 @@ impl Server { path_store.clone(), )); let import_graph = Arc::new(RwLock::new(ImportGraph::new(path_store.clone()))); - let type_cache = new_shared_cache(Arc::clone(&global_types), path_store.clone()); + let type_cache = new_shared_cache(Arc::clone(&global_types), path_store); let (request_response_sender, request_response_receiver) = crossbeam_channel::unbounded(); let inflight_requests = InflightRequests::new(connection.sender.clone()); let diagnostics = AsyncDiagnostics::new(DiagnosticsConfig { @@ -147,14 +145,12 @@ impl Server { import_graph: Arc::clone(&import_graph), type_cache: Arc::clone(&type_cache), global_types: Arc::clone(&global_types), - path_store: path_store.clone(), }); Self { connection, documents, import_graph, - path_store, type_cache, global_types, config: Arc::new(RwLock::new(ServerConfig::default())), @@ -224,7 +220,6 @@ impl Server { import_graph: Arc::clone(&self.import_graph), type_cache: Arc::clone(&self.type_cache), global_types: Arc::clone(&self.global_types), - path_store: self.path_store.clone(), }); } @@ -335,7 +330,6 @@ impl Server { let documents = Arc::clone(&self.documents); let import_graph = Arc::clone(&self.import_graph); let config = Arc::clone(&self.config); - let path_store = self.path_store.clone(); rayon::spawn(move || { let mut files = Vec::new(); for root in &roots { @@ -346,14 +340,8 @@ impl Server { let file_count = files.len(); for path in &files { - let file = path_store.intern(path); - Self::update_import_graph_for_file( - &documents, - &import_graph, - &config, - &path_store, - file, - ); + let file = documents.intern(path); + Self::update_import_graph_for_file(&documents, &import_graph, &config, file); } info!( @@ -434,7 +422,7 @@ impl Server { match CanonicalPath::try_from_path(&path) { Ok(path) => files.push(path), - Err(err) => warn!("Skipping workspace file {}: {}", path.display(), err), + Err(err) => warn!("Skipping workspace file {}: {err}", path.to_string_lossy()), } } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 02086a6f..af21f6b2 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -256,9 +256,11 @@ impl AsyncRequestContext { .into_iter() .filter_map(|file| { let path = self.documents.path(file)?; - let doc = self.documents.get_document(path.as_ref())?; - let semantic = self.documents.get_semantic_artifacts(path.as_ref()); - Some((path.as_ref().clone(), doc, semantic)) + let doc = self.documents.get_document(path.as_canonical_path())?; + let semantic = self + .documents + .get_semantic_artifacts(path.as_canonical_path()); + Some((path.as_canonical_path().clone(), doc, semantic)) }) .collect(); let importer_refs: Vec<_> = importer_docs @@ -302,10 +304,10 @@ impl AsyncRequestContext { let Some(path) = self.documents.path(file) else { return Vec::new(); }; - let Some(doc) = self.documents.get_document(path.as_ref()) else { + let Some(doc) = self.documents.get_document(path.as_canonical_path()) else { return Vec::new(); }; - let Ok(uri) = path.to_uri() else { + let Ok(uri) = path.as_canonical_path().to_uri() else { return Vec::new(); }; handlers::workspace_symbols_for_document(&doc, &uri, query) diff --git a/crates/jrsonnet-lsp/src/server/import_graph.rs b/crates/jrsonnet-lsp/src/server/import_graph.rs index 1e3aacb9..2a407e49 100644 --- a/crates/jrsonnet-lsp/src/server/import_graph.rs +++ b/crates/jrsonnet-lsp/src/server/import_graph.rs @@ -6,13 +6,7 @@ impl Server { /// Parses the document's import statements and updates the graph /// so that cross-file references can be found efficiently. pub(super) fn update_import_graph(&self, file: FileId) { - Self::update_import_graph_for_file( - &self.documents, - &self.import_graph, - &self.config, - &self.path_store, - file, - ); + Self::update_import_graph_for_file(&self.documents, &self.import_graph, &self.config, file); } pub(super) fn tracked_files_for_reindex(&self) -> Vec { @@ -27,31 +21,30 @@ impl Server { documents: &SharedDocumentManager, import_graph: &Arc>, config: &SharedConfig, - path_store: &PathStore, file: FileId, ) { - let resolver = path_store.resolver(); - let Some(path) = resolver.path(file) else { - return; - }; - let Some(doc) = documents.get_document(path.as_ref()) else { - // File no longer exists or cannot be read. - import_graph.write().remove_file(file); - return; - }; + let entries = { + let Some(path) = documents.path(file) else { + return; + }; + let Some(doc) = documents.get_document(path.as_canonical_path()) else { + // File no longer exists or cannot be read. + import_graph.write().remove_file(file); + return; + }; - let config = config.read(); - let import_roots = effective_import_roots( - path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ); - drop(config); + let config = config.read(); + let import_roots = effective_import_roots( + path.as_canonical_path().as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + drop(config); - // Parse imports OUTSIDE the lock to minimize lock hold time. - // This is important for responsiveness when parsing large files. - let import_resolution = ImportResolution::new(path.as_ref(), &import_roots); - let entries = import_resolution.parse_entries(&doc); + // Parse imports OUTSIDE the graph lock to minimize lock hold time. + let import_resolution = ImportResolution::new(path.as_canonical_path(), &import_roots); + import_resolution.parse_entries(&doc) + }; // Now acquire the write lock and do the quick data structure update import_graph.write().update_file_with_entries(file, entries); @@ -61,41 +54,41 @@ impl Server { pub(super) fn schedule_diagnostics_for_open_importers(&self, file: FileId) { let importers = self.import_graph.read().transitive_importers(file); for importer in importers { - if self.documents.is_open_file(importer) { - self.schedule_diagnostics_file(importer); + if !self.documents.is_open_file(importer) { + continue; } + + self.schedule_diagnostics_file(importer); } } /// Schedule diagnostics computation for a file. pub(super) fn schedule_diagnostics_file(&self, file: FileId) { - let resolver = self.path_store.resolver(); - let request = resolver.path(file).and_then(|path| { - let doc = self.documents.get(path.as_ref())?; + let request = self.documents.path(file).and_then(|path| { + let doc = self.documents.get(path.as_canonical_path())?; let (enable_lint, import_roots) = { let config = self.config.read(); ( config.lint_diagnostics_enabled(), effective_import_roots( - path.as_path(), + path.as_canonical_path().as_path(), &config.jpath, config.resolve_paths_with_tanka, ), ) }; Some(( - path.as_ref().clone(), doc.text().to_string(), doc.version(), enable_lint, import_roots, )) }); - let Some((path, text, version, enable_lint, import_roots)) = request else { + let Some((text, version, enable_lint, import_roots)) = request else { return; }; self.diagnostics - .schedule(path, text, version, enable_lint, import_roots); + .schedule(file, text, version, enable_lint, import_roots); } } diff --git a/crates/jrsonnet-lsp/src/server/notifications.rs b/crates/jrsonnet-lsp/src/server/notifications.rs index 4c802213..c76efd0e 100644 --- a/crates/jrsonnet-lsp/src/server/notifications.rs +++ b/crates/jrsonnet-lsp/src/server/notifications.rs @@ -68,7 +68,7 @@ impl Server { let text = params.text_document.text; let version = DocVersion::new(params.text_document.version); - let file = self.path_store.intern(&path); + let file = self.documents.intern(&path); self.documents.open(path.clone(), text, version); self.documents.refresh_semantic_artifacts(&path); @@ -94,7 +94,7 @@ impl Server { }; let version = DocVersion::new(params.text_document.version); - let file = self.path_store.intern(&path); + let file = self.documents.intern(&path); // Process each change (INCREMENTAL sync may send multiple changes) for change in params.content_changes { @@ -134,7 +134,7 @@ impl Server { warn!("Could not convert URI to path: {}", uri.as_str()); return Ok(()); }; - let file = self.path_store.intern(&path); + let file = self.documents.intern(&path); self.documents.close(&path); @@ -166,7 +166,7 @@ impl Server { warn!("Could not convert URI to path: {}", uri.as_str()); return; }; - let file = self.path_store.intern(&path); + let file = self.documents.intern(&path); if let Some(text) = params.text { let Some(doc) = self.documents.get(&path) else { @@ -268,7 +268,7 @@ impl Server { let Ok(path) = CanonicalPath::from_uri(&change.uri) else { continue; }; - let file = self.path_store.intern(&path); + let file = self.documents.intern(&path); self.invalidate_type_cache_with_dependents(file); diff --git a/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs b/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs index 09d4887f..c19df6f0 100644 --- a/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs +++ b/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs @@ -36,8 +36,8 @@ fn relative_path(path: &Path) -> String { let workspace_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../.."); let relative = path .strip_prefix(workspace_root) - .unwrap_or_else(|err| panic!("failed to relativize {}: {err}", path.display())); - relative.display().to_string() + .unwrap_or_else(|err| panic!("failed to relativize {}: {err}", path.to_string_lossy())); + relative.to_string_lossy().into_owned() } fn extract_markdown_fenced_blocks(markdown: &str) -> Vec { @@ -91,7 +91,7 @@ fn collect_jsonnet_doc_examples() -> Vec { let mut examples = Vec::new(); for path in docs_lsp_markdown_paths() { let text = fs::read_to_string(&path) - .unwrap_or_else(|err| panic!("failed to read {}: {err}", path.display())); + .unwrap_or_else(|err| panic!("failed to read {}: {err}", path.to_string_lossy())); let source_path = relative_path(&path); for block in extract_markdown_fenced_blocks(&text) { if block.language == "jsonnet" { From 9d7e6e216d758a9e8f822a176aab19aa9e26351d Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 17:54:22 +0000 Subject: [PATCH 105/210] refactor(lsp): split server requests and large scope/scenario modules Break up the large LSP hotspots into cohesive modules without changing request behavior. Server/request routing: - Extract request dispatching out of `server.rs` into `server/request_dispatch.rs`. - Split async and sync request routing to one file per handler under `server/requests/{async_handlers,sync_handlers}`. - Split `async_requests.rs` into per-feature method modules plus shared helpers. Scope/import/scenario decomposition: - Split `jrsonnet-lsp-scope::resolver` into `definitions`, `references`, and `scope_index` modules with tests kept inline in owning files. - Split `jrsonnet-lsp-import` graph and resolve internals into focused modules. - Split `jrsonnet-lsp-scenario::scenario` into document/workspace/request/diagnostics/doctest modules and preserve existing public types via re-exports. Handlers/inference cleanup: - Continue decomposition of handler and inference hotspots (definition, semantic_tokens, completion handler, analysis internals) to reduce file size and improve navigation. Validation: - Ran `make fmt`, `nix fmt`, targeted `cargo check`, clippy for affected LSP crates, and targeted LSP integration tests for goto-definition and semantic tokens range. --- .../src/code_action/mod.rs | 112 ++ .../src/code_action/quickfix.rs | 154 ++ .../remove_unused.rs} | 282 +--- .../{handler.rs => handler/dispatch.rs} | 107 +- .../src/completion/handler/keywords.rs | 40 + .../src/completion/handler/mod.rs | 70 + .../jrsonnet-lsp-handlers/src/definition.rs | 1019 ------------- .../src/definition/field_path.rs | 155 ++ .../src/definition/import.rs | 72 + .../src/definition/local.rs | 188 +++ .../src/definition/mod.rs | 221 +++ .../src/definition/symbol.rs | 393 +++++ .../src/semantic_tokens.rs | 855 ----------- .../src/semantic_tokens/classification.rs | 143 ++ .../src/semantic_tokens/encode.rs | 411 ++++++ .../src/semantic_tokens/legend.rs | 191 +++ .../src/semantic_tokens/mod.rs | 14 + .../src/semantic_tokens/walk.rs | 117 ++ crates/jrsonnet-lsp-import/src/graph.rs | 1276 ----------------- crates/jrsonnet-lsp-import/src/graph/mod.rs | 8 + .../src/graph/operations.rs | 238 +++ crates/jrsonnet-lsp-import/src/graph/parse.rs | 265 ++++ crates/jrsonnet-lsp-import/src/graph/tests.rs | 561 ++++++++ .../src/graph/traversal.rs | 219 +++ crates/jrsonnet-lsp-import/src/resolve.rs | 241 ---- crates/jrsonnet-lsp-import/src/resolve/mod.rs | 32 + .../src/resolve/parse_adapter.rs | 116 ++ .../src/resolve/path_resolution.rs | 113 ++ .../src/analysis/build.rs | 94 ++ .../src/analysis/mod.rs | 82 ++ .../src/{analysis.rs => analysis/queries.rs} | 168 +-- .../src/scenario/diagnostics_steps.rs | 72 + .../src/scenario/doctest_assertions.rs | 81 ++ .../src/scenario/document_steps.rs | 186 +++ .../jrsonnet-lsp-scenario/src/scenario/mod.rs | 134 ++ .../request_steps.rs} | 597 +------- .../src/scenario/workspace_steps.rs | 148 ++ .../src/scenario_runner/helpers.rs | 6 +- .../src/scenario_runner/runner.rs | 10 +- .../src/resolver/definitions.rs | 260 ++++ crates/jrsonnet-lsp-scope/src/resolver/mod.rs | 20 + .../src/resolver/references.rs | 253 ++++ .../{resolver.rs => resolver/scope_index.rs} | 560 +------- crates/jrsonnet-lsp/src/server.rs | 447 +----- .../jrsonnet-lsp/src/server/async_requests.rs | 897 +----------- .../src/server/async_requests/code_lens.rs | 17 + .../src/server/async_requests/commands.rs | 248 ++++ .../src/server/async_requests/completion.rs | 36 + .../server/async_requests/goto_declaration.rs | 12 + .../server/async_requests/goto_definition.rs | 12 + .../async_requests/goto_implementation.rs | 12 + .../src/server/async_requests/goto_shared.rs | 86 ++ .../async_requests/goto_type_definition.rs | 12 + .../src/server/async_requests/hover.rs | 25 + .../server/async_requests/import_lookup.rs | 311 ++++ .../src/server/async_requests/inlay_hints.rs | 19 + .../src/server/async_requests/references.rs | 66 + .../src/server/async_requests/rename.rs | 36 + .../server/async_requests/workspace_symbol.rs | 73 + .../jrsonnet-lsp/src/server/notifications.rs | 10 + .../src/server/request_dispatch.rs | 371 +++++ .../requests/async_handlers/code_lens.rs | 10 + .../requests/async_handlers/completion.rs | 10 + .../async_handlers/execute_command.rs | 10 + .../async_handlers/goto_declaration.rs | 10 + .../async_handlers/goto_definition.rs | 10 + .../async_handlers/goto_implementation.rs | 10 + .../async_handlers/goto_type_definition.rs | 10 + .../server/requests/async_handlers/hover.rs | 7 + .../requests/async_handlers/inlay_hints.rs | 10 + .../src/server/requests/async_handlers/mod.rs | 12 + .../requests/async_handlers/references.rs | 10 + .../server/requests/async_handlers/rename.rs | 10 + .../async_handlers/workspace_symbol.rs | 10 + .../jrsonnet-lsp/src/server/requests/mod.rs | 7 + .../requests/sync_handlers/code_action.rs | 26 + .../sync_handlers/code_lens_resolve.rs | 8 + .../sync_handlers/document_highlight.rs | 23 + .../requests/sync_handlers/document_symbol.rs | 17 + .../requests/sync_handlers/formatting.rs | 17 + .../src/server/requests/sync_handlers/mod.rs | 9 + .../requests/sync_handlers/prepare_rename.rs | 19 + .../sync_handlers/semantic_tokens_full.rs | 16 + .../sync_handlers/semantic_tokens_range.rs | 16 + .../requests/sync_handlers/signature_help.rs | 16 + .../jrsonnet-lsp/src/server/watched_files.rs | 7 + 86 files changed, 6935 insertions(+), 6349 deletions(-) create mode 100644 crates/jrsonnet-lsp-handlers/src/code_action/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/code_action/quickfix.rs rename crates/jrsonnet-lsp-handlers/src/{code_action.rs => code_action/remove_unused.rs} (81%) rename crates/jrsonnet-lsp-handlers/src/completion/{handler.rs => handler/dispatch.rs} (89%) create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/handler/keywords.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/handler/mod.rs delete mode 100644 crates/jrsonnet-lsp-handlers/src/definition.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/definition/field_path.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/definition/import.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/definition/local.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/definition/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/definition/symbol.rs delete mode 100644 crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs delete mode 100644 crates/jrsonnet-lsp-import/src/graph.rs create mode 100644 crates/jrsonnet-lsp-import/src/graph/mod.rs create mode 100644 crates/jrsonnet-lsp-import/src/graph/operations.rs create mode 100644 crates/jrsonnet-lsp-import/src/graph/parse.rs create mode 100644 crates/jrsonnet-lsp-import/src/graph/tests.rs create mode 100644 crates/jrsonnet-lsp-import/src/graph/traversal.rs delete mode 100644 crates/jrsonnet-lsp-import/src/resolve.rs create mode 100644 crates/jrsonnet-lsp-import/src/resolve/mod.rs create mode 100644 crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs create mode 100644 crates/jrsonnet-lsp-import/src/resolve/path_resolution.rs create mode 100644 crates/jrsonnet-lsp-inference/src/analysis/build.rs create mode 100644 crates/jrsonnet-lsp-inference/src/analysis/mod.rs rename crates/jrsonnet-lsp-inference/src/{analysis.rs => analysis/queries.rs} (74%) create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario/diagnostics_steps.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario/document_steps.rs create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario/mod.rs rename crates/jrsonnet-lsp-scenario/src/{scenario.rs => scenario/request_steps.rs} (65%) create mode 100644 crates/jrsonnet-lsp-scenario/src/scenario/workspace_steps.rs create mode 100644 crates/jrsonnet-lsp-scope/src/resolver/definitions.rs create mode 100644 crates/jrsonnet-lsp-scope/src/resolver/mod.rs create mode 100644 crates/jrsonnet-lsp-scope/src/resolver/references.rs rename crates/jrsonnet-lsp-scope/src/{resolver.rs => resolver/scope_index.rs} (55%) create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/completion.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/goto_declaration.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/goto_definition.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/goto_implementation.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/goto_type_definition.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/hover.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/import_lookup.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/references.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/rename.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs create mode 100644 crates/jrsonnet-lsp/src/server/request_dispatch.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/code_lens.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/completion.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/execute_command.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_declaration.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_definition.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_implementation.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_type_definition.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/hover.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/inlay_hints.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/references.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/rename.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/workspace_symbol.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/mod.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_action.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_lens_resolve.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_highlight.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_symbol.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/formatting.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/prepare_rename.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_full.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_range.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/signature_help.rs diff --git a/crates/jrsonnet-lsp-handlers/src/code_action/mod.rs b/crates/jrsonnet-lsp-handlers/src/code_action/mod.rs new file mode 100644 index 00000000..babbe30d --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_action/mod.rs @@ -0,0 +1,112 @@ +//! Code action handler. +//! +//! Provides quick fixes for diagnostics. + +mod quickfix; +mod remove_unused; + +use jrsonnet_lsp_document::Document; +use lsp_types::{ + CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, NumberOrString, Range, Uri, +}; +use remove_unused::{ + remove_all_unused_bindings_action_with_policy, remove_unused_binding_action_with_policy, + RemoveUnusedPolicy, +}; +use serde::{Deserialize, Serialize}; + +use self::quickfix::unused_variable_action; + +const UNUSED_VARIABLE_CODE: &str = "unused-variable"; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum RemoveUnusedMode { + #[default] + All, + ImportBindings, + NonImportBindings, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum RemoveUnusedCommentsMode { + #[default] + None, + Above, + Below, + All, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +#[serde(default, rename_all = "camelCase")] +pub struct CodeActionConfig { + pub remove_unused: RemoveUnusedMode, + pub remove_unused_comments: RemoveUnusedCommentsMode, +} + +fn is_unused_variable_diagnostic(diagnostic: &Diagnostic) -> bool { + matches!( + diagnostic.code.as_ref(), + Some(NumberOrString::String(code)) if code == UNUSED_VARIABLE_CODE + ) +} + +fn range_overlaps(a: Range, b: Range) -> bool { + (a.start.line, a.start.character) <= (b.end.line, b.end.character) + && (b.start.line, b.start.character) <= (a.end.line, a.end.character) +} + +fn wants_quickfix(context: &CodeActionContext) -> bool { + context.only.as_ref().is_none_or(|kinds| { + kinds + .iter() + .any(|kind| kind.as_str().starts_with(CodeActionKind::QUICKFIX.as_str())) + }) +} + +fn wants_fix_all(context: &CodeActionContext) -> bool { + context.only.as_ref().is_none_or(|kinds| { + kinds.iter().any(|kind| { + kind.as_str() + .starts_with(CodeActionKind::SOURCE_FIX_ALL.as_str()) + }) + }) +} + +/// Build code actions for a given range and context. +pub fn code_actions( + document: &Document, + uri: &Uri, + range: Range, + context: &CodeActionContext, + config: &CodeActionConfig, +) -> Vec { + let mut actions = Vec::new(); + let policy = RemoveUnusedPolicy::from_config(*config); + + if wants_quickfix(context) { + actions.extend( + context + .diagnostics + .iter() + .filter(|diagnostic| range_overlaps(diagnostic.range, range)) + .flat_map(|diagnostic| { + [ + unused_variable_action(document, uri, diagnostic), + remove_unused_binding_action_with_policy(document, uri, diagnostic, policy), + ] + .into_iter() + .flatten() + }), + ); + } + + if let Some(fix_all_action) = + remove_all_unused_bindings_action_with_policy(document, uri, context, policy) + { + actions.push(fix_all_action); + } + + actions +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_action/quickfix.rs b/crates/jrsonnet-lsp-handlers/src/code_action/quickfix.rs new file mode 100644 index 00000000..9838955c --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_action/quickfix.rs @@ -0,0 +1,154 @@ +use std::collections::HashMap; + +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::{ + CodeAction, CodeActionKind, CodeActionOrCommand, Diagnostic, TextEdit, Uri, WorkspaceEdit, +}; + +use super::is_unused_variable_diagnostic; + +pub(super) fn unused_variable_action( + document: &Document, + uri: &Uri, + diagnostic: &Diagnostic, +) -> Option { + if !is_unused_variable_diagnostic(diagnostic) { + return None; + } + + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(diagnostic.range.start.into(), text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text(); + if name.starts_with('_') { + return None; + } + + let mut changes = HashMap::new(); + changes.insert( + uri.clone(), + vec![TextEdit { + range: to_lsp_range(token.text_range(), line_index, text), + new_text: format!("_{name}"), + }], + ); + + Some( + CodeAction { + title: format!("Prefix `{name}` with `_`"), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + } + .into(), + ) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use lsp_types::{ + CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, DiagnosticSeverity, + NumberOrString, Position, Range, Uri, + }; + + use super::super::{code_actions, CodeActionConfig, UNUSED_VARIABLE_CODE}; + + fn diag_unused(range: Range) -> Diagnostic { + Diagnostic { + range, + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String(UNUSED_VARIABLE_CODE.to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + } + } + + fn uri() -> Uri { + "file:///test/code_action.jsonnet".parse().unwrap() + } + + fn range(start: u32, end: u32) -> Range { + Range { + start: Position { + line: 0, + character: start, + }, + end: Position { + line: 0, + character: end, + }, + } + } + + #[test] + fn test_unused_variable_quickfix() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(6, 7))], + only: None, + trigger_kind: None, + }; + + let actions = code_actions( + &document, + &uri(), + range(0, 20), + &context, + &CodeActionConfig::default(), + ); + let first = actions.first().expect("should include at least one action"); + let CodeActionOrCommand::CodeAction(action) = first else { + panic!("expected a code action") + }; + + assert_eq!(action.title, "Prefix `x` with `_`"); + assert_eq!(action.kind, Some(CodeActionKind::QUICKFIX)); + let changes = action + .edit + .as_ref() + .and_then(|edit| edit.changes.as_ref()) + .expect("quickfix should include edits"); + let edits = changes.get(&uri()).expect("edits should target test uri"); + assert_eq!(edits[0].new_text, "_x"); + } + + #[test] + fn test_code_action_skips_non_identifier_range() { + let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); + let context = CodeActionContext { + diagnostics: vec![diag_unused(range(8, 9))], // '=' character + only: None, + trigger_kind: None, + }; + + let actions = code_actions( + &document, + &uri(), + range(0, 20), + &context, + &CodeActionConfig::default(), + ); + assert!(actions.is_empty()); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_action.rs b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused.rs similarity index 81% rename from crates/jrsonnet-lsp-handlers/src/code_action.rs rename to crates/jrsonnet-lsp-handlers/src/code_action/remove_unused.rs index 5ce9a528..1c60a5f7 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_action.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused.rs @@ -1,7 +1,3 @@ -//! Code action handler. -//! -//! Provides quick fixes for diagnostics. - use std::collections::{HashMap, HashSet}; use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document}; @@ -14,38 +10,14 @@ use jrsonnet_rowan_parser::{ AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; use lsp_types::{ - CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, NumberOrString, - Range, TextEdit, Uri, WorkspaceEdit, + CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, TextEdit, Uri, + WorkspaceEdit, }; -use serde::{Deserialize, Serialize}; - -const UNUSED_VARIABLE_CODE: &str = "unused-variable"; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum RemoveUnusedMode { - #[default] - All, - ImportBindings, - NonImportBindings, -} -#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum RemoveUnusedCommentsMode { - #[default] - None, - Above, - Below, - All, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] -#[serde(default, rename_all = "camelCase")] -pub struct CodeActionConfig { - pub remove_unused: RemoveUnusedMode, - pub remove_unused_comments: RemoveUnusedCommentsMode, -} +use super::{ + is_unused_variable_diagnostic, wants_fix_all, CodeActionConfig, RemoveUnusedCommentsMode, + RemoveUnusedMode, +}; #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum RemovalFlavor { @@ -63,7 +35,7 @@ enum CommentPolicy { } #[derive(Debug, Clone, Copy, PartialEq, Eq)] -struct RemoveUnusedPolicy { +pub(super) struct RemoveUnusedPolicy { flavor: RemovalFlavor, comments: CommentPolicy, } @@ -89,7 +61,7 @@ impl CommentPolicy { } impl RemoveUnusedPolicy { - const fn from_config(config: CodeActionConfig) -> Self { + pub(super) const fn from_config(config: CodeActionConfig) -> Self { let flavor = match config.remove_unused { RemoveUnusedMode::All => RemovalFlavor::All, RemoveUnusedMode::ImportBindings => RemovalFlavor::ImportBindings, @@ -284,13 +256,6 @@ fn contiguous_runs(indices: &[usize]) -> Vec<(usize, usize)> { runs } -fn is_unused_variable_diagnostic(diagnostic: &Diagnostic) -> bool { - matches!( - diagnostic.code.as_ref(), - Some(NumberOrString::String(code)) if code == UNUSED_VARIABLE_CODE - ) -} - fn bind_name_range(bind: &Bind) -> Option { match bind { Bind::BindDestruct(bind_destruct) => { @@ -539,81 +504,7 @@ fn remove_unused_edit_for_diagnostic( ) } -fn range_overlaps(a: Range, b: Range) -> bool { - (a.start.line, a.start.character) <= (b.end.line, b.end.character) - && (b.start.line, b.start.character) <= (a.end.line, a.end.character) -} - -fn wants_quickfix(context: &CodeActionContext) -> bool { - context.only.as_ref().is_none_or(|kinds| { - kinds - .iter() - .any(|kind| kind.as_str().starts_with(CodeActionKind::QUICKFIX.as_str())) - }) -} - -fn wants_fix_all(context: &CodeActionContext) -> bool { - context.only.as_ref().is_none_or(|kinds| { - kinds.iter().any(|kind| { - kind.as_str() - .starts_with(CodeActionKind::SOURCE_FIX_ALL.as_str()) - }) - }) -} - -fn unused_variable_action( - document: &Document, - uri: &Uri, - diagnostic: &Diagnostic, -) -> Option { - if !is_unused_variable_diagnostic(diagnostic) { - return None; - } - - let text = document.text(); - let line_index = document.line_index(); - let offset = line_index.offset(diagnostic.range.start.into(), text)?; - let ast = document.ast(); - let token = token_at_offset(ast.syntax(), offset)?; - - if token.kind() != SyntaxKind::IDENT { - return None; - } - - let name = token.text(); - if name.starts_with('_') { - return None; - } - - let mut changes = HashMap::new(); - changes.insert( - uri.clone(), - vec![TextEdit { - range: to_lsp_range(token.text_range(), line_index, text), - new_text: format!("_{name}"), - }], - ); - - Some( - CodeAction { - title: format!("Prefix `{name}` with `_`"), - kind: Some(CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(WorkspaceEdit { - changes: Some(changes), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(true), - disabled: None, - data: None, - } - .into(), - ) -} - -fn remove_unused_binding_action_with_policy( +pub(super) fn remove_unused_binding_action_with_policy( document: &Document, uri: &Uri, diagnostic: &Diagnostic, @@ -658,7 +549,7 @@ fn remove_unused_binding_action_with_policy( ) } -fn remove_all_unused_bindings_action_with_policy( +pub(super) fn remove_all_unused_bindings_action_with_policy( document: &Document, uri: &Uri, context: &CodeActionContext, @@ -721,49 +612,20 @@ fn remove_all_unused_bindings_action_with_policy( ) } -/// Build code actions for a given range and context. -pub fn code_actions( - document: &Document, - uri: &Uri, - range: Range, - context: &CodeActionContext, - config: &CodeActionConfig, -) -> Vec { - let mut actions = Vec::new(); - let policy = RemoveUnusedPolicy::from_config(*config); - - if wants_quickfix(context) { - actions.extend( - context - .diagnostics - .iter() - .filter(|diagnostic| range_overlaps(diagnostic.range, range)) - .flat_map(|diagnostic| { - [ - unused_variable_action(document, uri, diagnostic), - remove_unused_binding_action_with_policy(document, uri, diagnostic, policy), - ] - .into_iter() - .flatten() - }), - ); - } - - if let Some(fix_all_action) = - remove_all_unused_bindings_action_with_policy(document, uri, context, policy) - { - actions.push(fix_all_action); - } - - actions -} - #[cfg(test)] mod tests { - use jrsonnet_lsp_document::DocVersion; - use lsp_types::{CodeActionContext, CodeActionKind, DiagnosticSeverity, Position}; + use std::collections::HashMap; + + use jrsonnet_lsp_document::{DocVersion, Document}; + use lsp_types::{ + CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, + DiagnosticSeverity, NumberOrString, Position, Range, TextEdit, Uri, WorkspaceEdit, + }; - use super::*; + use super::super::{ + code_actions, CodeActionConfig, RemoveUnusedCommentsMode, RemoveUnusedMode, + UNUSED_VARIABLE_CODE, + }; fn diag_unused(range: Range) -> Diagnostic { Diagnostic { @@ -809,89 +671,6 @@ mod tests { } } - #[test] - fn test_unused_variable_quickfix() { - let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); - let context = CodeActionContext { - diagnostics: vec![diag_unused(range(6, 7))], - only: None, - trigger_kind: None, - }; - - let actions = code_actions( - &document, - &uri(), - range(0, 20), - &context, - &CodeActionConfig::default(), - ); - assert_eq!( - actions, - vec![ - CodeActionOrCommand::CodeAction(CodeAction { - title: "Prefix `x` with `_`".to_string(), - kind: Some(CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diag_unused(range(6, 7))]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri(), - vec![TextEdit { - range: range(6, 7), - new_text: "_x".to_string(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(true), - disabled: None, - data: None, - }), - CodeActionOrCommand::CodeAction(CodeAction { - title: "Remove unused binding `x`".to_string(), - kind: Some(CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diag_unused(range(6, 7))]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri(), - vec![TextEdit { - range: range(0, 11), - new_text: String::new(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }), - CodeActionOrCommand::CodeAction(CodeAction { - title: "Remove all unused bindings".to_string(), - kind: Some(CodeActionKind::SOURCE_FIX_ALL), - diagnostics: Some(vec![diag_unused(range(6, 7))]), - edit: Some(WorkspaceEdit { - changes: Some(HashMap::from([( - uri(), - vec![TextEdit { - range: range(0, 11), - new_text: String::new(), - }], - )])), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }), - ] - ); - } - #[test] fn test_code_action_returns_fix_all_for_source_fix_all_filter() { let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); @@ -933,25 +712,6 @@ mod tests { ); } - #[test] - fn test_code_action_skips_non_identifier_range() { - let document = Document::new("local x = 1; 42".to_string(), DocVersion::new(1)); - let context = CodeActionContext { - diagnostics: vec![diag_unused(range(8, 9))], // '=' character - only: None, - trigger_kind: None, - }; - - let actions = code_actions( - &document, - &uri(), - range(0, 20), - &context, - &CodeActionConfig::default(), - ); - assert!(actions.is_empty()); - } - #[test] fn test_remove_unused_binding_handles_multi_bind_statement() { let document = Document::new("local x = 1, y = 2; y".to_string(), DocVersion::new(1)); diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch.rs similarity index 89% rename from crates/jrsonnet-lsp-handlers/src/completion/handler.rs rename to crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch.rs index fd0e8f20..c026b409 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch.rs @@ -1,64 +1,23 @@ -//! Completion handler for providing code completions. -//! -//! Supports: -//! - Standard library functions (triggered by `std.`) -//! - Local variables in scope -//! - Object field completion (triggered by `obj.`) -//! - Import path completion (inside import strings) - use std::path::{Path, PathBuf}; use jrsonnet_lsp_document::{token_at_offset, Document, LspPosition}; use jrsonnet_lsp_inference::{SemanticArtifacts, TypeAnalysis}; use jrsonnet_rowan_parser::AstNode; -use lsp_types::{CompletionItem, CompletionItemKind, CompletionList}; +use lsp_types::CompletionList; use tracing::debug; use super::{ - fields::check_object_field_completion, - helpers::{get_identifier_prefix, is_inside_object}, - imports::check_import_completion, - locals::get_local_completions_with_semantic, - stdlib::check_stdlib_completion, + super::{ + fields::check_object_field_completion, + helpers::{get_identifier_prefix, is_inside_object}, + imports::check_import_completion, + locals::get_local_completions_with_semantic, + stdlib::check_stdlib_completion, + }, + keywords::{add_object_keyword_completions, add_std_completion}, }; -/// Get completion items for the given position. -/// -/// `doc_path` is the path to the current document, used for import path completion. -/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure -/// that import types are properly resolved. -pub fn completion( - document: &Document, - position: LspPosition, - doc_path: Option<&Path>, - analysis: &TypeAnalysis, -) -> Option { - completion_with_import_roots_and_semantic(document, position, doc_path, &[], analysis, None) -} - -/// Get completion items with explicit import search roots. -/// -/// `import_roots` is typically configured from server `jpath` entries and used -/// when completing inside import strings. -pub fn completion_with_import_roots( - document: &Document, - position: LspPosition, - doc_path: Option<&Path>, - import_roots: &[PathBuf], - analysis: &TypeAnalysis, -) -> Option { - completion_with_import_roots_and_semantic( - document, - position, - doc_path, - import_roots, - analysis, - None, - ) -} - -/// Get completion items with explicit import roots and semantic artifacts. -pub fn completion_with_import_roots_and_semantic( +pub(super) fn completion_dispatch( document: &Document, position: LspPosition, doc_path: Option<&Path>, @@ -107,48 +66,15 @@ pub fn completion_with_import_roots_and_semantic( }); } - // For general completion, provide local variables in scope + // For general completion, provide local variables in scope. let mut items = get_local_completions_with_semantic(document, position, text, offset.into(), semantic); - // Also include `std` as a completion option since it's always available - items.push(CompletionItem { - label: "std".to_string(), - kind: Some(CompletionItemKind::MODULE), - detail: Some("Jsonnet standard library".to_string()), - ..Default::default() - }); + add_std_completion(&mut items); - // Add `self`, `super`, and `$` if inside an object if is_inside_object(ast.syntax(), offset) { let prefix = get_identifier_prefix(text, offset.into()); - - if prefix.is_empty() || "$".starts_with(prefix) { - items.push(CompletionItem { - label: "$".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to root object".to_string()), - ..Default::default() - }); - } - - if prefix.is_empty() || "self".starts_with(prefix) { - items.push(CompletionItem { - label: "self".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to current object".to_string()), - ..Default::default() - }); - } - - if prefix.is_empty() || "super".starts_with(prefix) { - items.push(CompletionItem { - label: "super".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to inherited object".to_string()), - ..Default::default() - }); - } + add_object_keyword_completions(&mut items, prefix); } Some(CompletionList { @@ -161,10 +87,13 @@ pub fn completion_with_import_roots_and_semantic( mod tests { use std::sync::Arc; - use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_lsp_types::GlobalTyStore; + use lsp_types::{CompletionItem, CompletionItemKind}; - use super::{super::imports::find_import_string_start, *}; + use super::super::completion; + use crate::completion::imports::find_import_string_start; /// Create a `TypeAnalysis` for test purposes. fn test_analysis(doc: &Document) -> TypeAnalysis { diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/keywords.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/keywords.rs new file mode 100644 index 00000000..c837f67f --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/keywords.rs @@ -0,0 +1,40 @@ +use lsp_types::{CompletionItem, CompletionItemKind}; + +pub(super) fn add_std_completion(items: &mut Vec) { + // `std` is always available in scope. + items.push(CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }); +} + +pub(super) fn add_object_keyword_completions(items: &mut Vec, prefix: &str) { + if prefix.is_empty() || "$".starts_with(prefix) { + items.push(CompletionItem { + label: "$".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to root object".to_string()), + ..Default::default() + }); + } + + if prefix.is_empty() || "self".starts_with(prefix) { + items.push(CompletionItem { + label: "self".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to current object".to_string()), + ..Default::default() + }); + } + + if prefix.is_empty() || "super".starts_with(prefix) { + items.push(CompletionItem { + label: "super".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to inherited object".to_string()), + ..Default::default() + }); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/mod.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/mod.rs new file mode 100644 index 00000000..b931825e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/mod.rs @@ -0,0 +1,70 @@ +//! Completion handler for providing code completions. +//! +//! Supports: +//! - Standard library functions (triggered by `std.`) +//! - Local variables in scope +//! - Object field completion (triggered by `obj.`) +//! - Import path completion (inside import strings) + +mod dispatch; +mod keywords; + +use std::path::{Path, PathBuf}; + +use jrsonnet_lsp_document::{Document, LspPosition}; +use jrsonnet_lsp_inference::{SemanticArtifacts, TypeAnalysis}; +use lsp_types::CompletionList; + +/// Get completion items for the given position. +/// +/// `doc_path` is the path to the current document, used for import path completion. +/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure +/// that import types are properly resolved. +pub fn completion( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + analysis: &TypeAnalysis, +) -> Option { + completion_with_import_roots_and_semantic(document, position, doc_path, &[], analysis, None) +} + +/// Get completion items with explicit import search roots. +/// +/// `import_roots` is typically configured from server `jpath` entries and used +/// when completing inside import strings. +pub fn completion_with_import_roots( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + import_roots: &[PathBuf], + analysis: &TypeAnalysis, +) -> Option { + completion_with_import_roots_and_semantic( + document, + position, + doc_path, + import_roots, + analysis, + None, + ) +} + +/// Get completion items with explicit import roots and semantic artifacts. +pub fn completion_with_import_roots_and_semantic( + document: &Document, + position: LspPosition, + doc_path: Option<&Path>, + import_roots: &[PathBuf], + analysis: &TypeAnalysis, + semantic: Option<&SemanticArtifacts>, +) -> Option { + dispatch::completion_dispatch( + document, + position, + doc_path, + import_roots, + analysis, + semantic, + ) +} diff --git a/crates/jrsonnet-lsp-handlers/src/definition.rs b/crates/jrsonnet-lsp-handlers/src/definition.rs deleted file mode 100644 index edcc7b16..00000000 --- a/crates/jrsonnet-lsp-handlers/src/definition.rs +++ /dev/null @@ -1,1019 +0,0 @@ -//! Go-to-definition handler. -//! -//! Finds the definition of a symbol at a given position by: -//! 1. Finding the token at the cursor position -//! 2. If it's a variable reference, walking up the scope chain to find the binding -//! 3. If it's an import path, returning the import path for resolution -//! 4. If it's a field access on an import, returning the import path and field chain - -use jrsonnet_lsp_document::{ - find_node_at_offset, to_lsp_range, token_at_offset, ByteOffset, Document, LspPosition, -}; -use jrsonnet_lsp_import::{check_import_path, extract_import_path}; -use jrsonnet_lsp_inference::{ - trace_base, trace_expr, ConstEvalResult, SemanticArtifacts, SemanticBindingKind, - SemanticImportTarget, -}; -use jrsonnet_lsp_scope::{find_definition_range, is_variable_reference}; -use jrsonnet_rowan_parser::{ - nodes::{ - Bind, BindFunction, Destruct, Expr, ExprBase, ExprField, ExprFunction, ForSpec, - MemberBindStmt, Param, StmtLocal, - }, - AstNode, SyntaxKind, SyntaxNode, SyntaxToken, -}; -use lsp_types::Range; -use rowan::TextRange; -use tracing::debug; - -#[derive(Clone, Copy, Debug, Eq, PartialEq)] -enum DefinitionMode { - Declaration, - Definition, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -enum CanonicalDefinition { - Local(TextRange), - Import(String), - ImportField { path: String, fields: Vec }, -} - -/// Result of a go-to-definition request. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum DefinitionResult { - /// A local definition within the same document. - Local(Range), - /// An import path that needs to be resolved relative to the document. - Import(String), - /// A field in an imported file. Contains the import path and the field chain. - /// For `lib.foo.bar`, this would be `("lib.libsonnet", ["foo", "bar"])`. - ImportField { - /// The import path to resolve. - path: String, - /// The chain of field names to navigate. - fields: Vec, - }, -} - -/// Find the definition of the symbol at the given position. -/// -/// Returns a `DefinitionResult` indicating either: -/// - A local range within the document -/// - An import path that needs to be resolved by the caller -/// - A field in an imported file (path + field chain) -pub fn goto_definition(document: &Document, position: LspPosition) -> Option { - goto_with_mode(document, position, DefinitionMode::Definition, None) -} - -/// Find the declaration site of the symbol at the given position. -/// -/// Unlike `goto_definition`, this does not follow local alias chains. -pub fn goto_declaration(document: &Document, position: LspPosition) -> Option { - goto_with_mode(document, position, DefinitionMode::Declaration, None) -} - -/// Find the definition of the symbol at the given position using semantic artifacts when available. -pub fn goto_definition_with_semantic( - document: &Document, - position: LspPosition, - semantic: Option<&SemanticArtifacts>, -) -> Option { - goto_with_mode(document, position, DefinitionMode::Definition, semantic) -} - -/// Find the declaration site of the symbol at the given position using semantic artifacts when available. -pub fn goto_declaration_with_semantic( - document: &Document, - position: LspPosition, - semantic: Option<&SemanticArtifacts>, -) -> Option { - goto_with_mode(document, position, DefinitionMode::Declaration, semantic) -} - -fn goto_with_mode( - document: &Document, - position: LspPosition, - mode: DefinitionMode, - semantic: Option<&SemanticArtifacts>, -) -> Option { - let text = document.text(); - let line_index = document.line_index(); - - // Convert LSP position to byte offset - let offset = line_index.offset(position, text)?; - - // Get the AST root - let ast = document.ast(); - - // Find the token at the offset - let token = token_at_offset(ast.syntax(), offset)?; - - // Check for import path first - if let Some(import_path) = check_import_path(&token) { - debug!(import_path = %import_path, "found import definition"); - return Some(DefinitionResult::Import(import_path)); - } - - // Check if this is an identifier that could be a variable reference - if token.kind() != SyntaxKind::IDENT { - return None; - } - - let name = token.text().to_string(); - - // Check if this identifier is a field access (e.g., `foo` in `lib.foo`) - // and if the base resolves to an import - if let Some(result) = check_field_access_on_import(&token, &name, document) { - return Some(result); - } - - // Check if this identifier is part of a variable reference (ExprVar) - if !is_variable_reference(&token) { - return None; - } - - // Walk up the scope chain to find the definition - let def_range = semantic - .and_then(|artifacts| artifacts.definition_for_ident_token(&token)) - .or_else(|| find_definition_range(&token, &name))?; - - let result = if mode == DefinitionMode::Definition { - match resolve_canonical_definition(document, def_range, semantic) { - CanonicalDefinition::Local(range) => { - DefinitionResult::Local(to_lsp_range(range, line_index, text)) - } - CanonicalDefinition::Import(path) => DefinitionResult::Import(path), - CanonicalDefinition::ImportField { path, fields } => { - DefinitionResult::ImportField { path, fields } - } - } - } else { - DefinitionResult::Local(to_lsp_range(def_range, line_index, text)) - }; - - debug!(name = %name, mode = ?mode, ?result, "resolved symbol target"); - Some(result) -} - -fn resolve_canonical_definition( - document: &Document, - initial_def: TextRange, - semantic: Option<&SemanticArtifacts>, -) -> CanonicalDefinition { - let mut visited = std::collections::HashSet::new(); - let mut current = initial_def; - - loop { - if !visited.insert(current) { - return CanonicalDefinition::Local(current); - } - - if let Some(artifacts) = semantic { - if let Some(binding) = artifacts.binding_info(current) { - if let Some(import_target) = &binding.import_target { - return match import_target { - SemanticImportTarget::Import { path } => { - CanonicalDefinition::Import(path.clone()) - } - SemanticImportTarget::ImportField { path, fields } => { - CanonicalDefinition::ImportField { - path: path.clone(), - fields: fields.clone(), - } - } - }; - } - - if let Some(next) = binding.alias_definition { - current = next; - continue; - } - - return CanonicalDefinition::Local(current); - } - } - - let Some(bind) = find_bind_by_definition_range(document, current) else { - return CanonicalDefinition::Local(current); - }; - let Some(value_expr) = bind_value_expr(&bind) else { - return CanonicalDefinition::Local(current); - }; - - if let Some((path, fields)) = resolve_expr_to_import(&value_expr, document) { - return if fields.is_empty() { - CanonicalDefinition::Import(path) - } else { - CanonicalDefinition::ImportField { path, fields } - }; - } - - let Some(next) = aliased_definition_range(&value_expr) else { - return CanonicalDefinition::Local(current); - }; - current = next; - } -} - -fn find_bind_by_definition_range(document: &Document, range: TextRange) -> Option { - let ast = document.ast(); - let root = ast.syntax(); - let offset = ByteOffset::from(u32::from(range.start())); - - if let Some(token) = token_at_offset(root, offset) { - if token.kind() == SyntaxKind::IDENT { - if let Some(bind) = token - .parent() - .and_then(|node| node.ancestors().find_map(Bind::cast)) - { - if bind_definition_range(&bind) - .is_some_and(|definition_range| definition_range == range) - { - return Some(bind); - } - } - } - } - - root.descendants().filter_map(Bind::cast).find(|bind| { - bind_definition_range(bind).is_some_and(|definition_range| definition_range == range) - }) -} - -fn bind_definition_range(bind: &Bind) -> Option { - match bind { - Bind::BindDestruct(bind) => { - let destruct = bind.into()?; - let Destruct::DestructFull(full) = destruct else { - return None; - }; - Some(full.name()?.syntax().text_range()) - } - Bind::BindFunction(bind) => Some(bind.name()?.syntax().text_range()), - } -} - -fn bind_value_expr(bind: &Bind) -> Option { - match bind { - Bind::BindDestruct(bind) => bind.value(), - Bind::BindFunction(bind) => bind.value(), - } -} - -fn resolve_expr_to_import(value_expr: &Expr, document: &Document) -> Option<(String, Vec)> { - if let ExprBase::ExprImport(import) = value_expr.expr_base()? { - return Some((extract_import_path(&import)?, Vec::new())); - } - - match trace_expr(value_expr, document)? { - ConstEvalResult::Import { path, fields } => Some((path, fields)), - ConstEvalResult::Std { .. } | ConstEvalResult::Local { .. } => None, - } -} - -fn aliased_definition_range(value_expr: &Expr) -> Option { - let ExprBase::ExprVar(var) = value_expr.expr_base()? else { - return None; - }; - let ident = var.name()?.ident_lit()?; - find_definition_range(&ident, ident.text()) -} - -/// Check if the token is a field name in a field access expression where the base -/// is defined as an import. For example, `foo` in `lib.foo` where `lib = import "..."`. -/// -/// Returns `DefinitionResult::ImportField` if this is a field access on an import. -fn check_field_access_on_import( - token: &SyntaxToken, - _field_name: &str, - document: &Document, -) -> Option { - // Check if we're inside an ExprField (field access like .foo) - let expr_field = token.parent_ancestors().find_map(ExprField::cast)?; - - // Verify this token is the field name in the ExprField - let field_name_node = expr_field.field()?; - if field_name_node.syntax().text_range() != token.parent()?.text_range() { - return None; - } - - // Build the field chain by walking up the ExprField chain - // We collect fields from innermost to outermost, then reverse - let mut fields = Vec::new(); - let mut current_field = expr_field; - - // Add the current field name - if let Some(name) = current_field.field() { - if let Some(ident) = name.ident_lit() { - fields.push(ident.text().to_string()); - } - } - - // Walk up the ExprField chain (from outermost to innermost in the code) - // For `lib.foo.bar`, the current_field starts at `.bar`, its base is ExprField `.foo` - let base = loop { - // Get the base of the current field access - let base_expr = current_field.base()?; - - match base_expr.expr_base()? { - ExprBase::ExprField(inner_field) => { - // Another field access - collect its field name and continue - if let Some(name) = inner_field.field() { - if let Some(ident) = name.ident_lit() { - fields.push(ident.text().to_string()); - } - } - current_field = inner_field; - } - other => { - // Found the base (could be ExprVar, ExprImport, etc.) - break other; - } - } - }; - - // Reverse to get fields in order from base outward - fields.reverse(); - - // Now check if the base resolves to an import - let (import_path, mut base_fields) = resolve_base_to_import(&base, document)?; - - // Combine base fields (from tracing through bindings) with field chain - base_fields.extend(fields); - - debug!( - import_path = %import_path, - fields = ?base_fields, - "found import field definition" - ); - - Some(DefinitionResult::ImportField { - path: import_path, - fields: base_fields, - }) -} - -/// Try to resolve the base expression to an import path and any additional fields. -/// -/// This handles cases like: -/// - `lib.foo` where `lib = import "lib.libsonnet"` -/// - Direct import: `(import "lib.libsonnet").foo` -/// - Chained bindings: `local x = import "..."; local y = x; y.foo` -/// -/// Returns the import path and any fields that were traced through the base. -fn resolve_base_to_import(base: &ExprBase, document: &Document) -> Option<(String, Vec)> { - // Use const_eval to trace through bindings - match trace_base(base, document)? { - ConstEvalResult::Import { path, fields } => Some((path, fields)), - ConstEvalResult::Std { .. } | ConstEvalResult::Local { .. } => None, - } -} - -/// A binding visible at a given position. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct VisibleBinding { - /// The name of the binding. - pub name: String, - /// The kind of binding. - pub kind: BindingKind, - /// The text range of the definition. - pub range: TextRange, -} - -/// The kind of a binding. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum BindingKind { - /// A local variable binding (local x = ...). - LocalVariable, - /// A function binding (local f(x) = ...). - LocalFunction, - /// A function parameter. - Parameter, - /// A for-comprehension variable. - ForVariable, -} - -/// Collect all visible bindings at the given byte offset. -#[must_use] -pub fn collect_visible_bindings(document: &Document, position: LspPosition) -> Vec { - collect_visible_bindings_with_semantic(document, position, None) -} - -/// Collect all visible bindings at the given byte offset using semantic artifacts when available. -#[must_use] -pub fn collect_visible_bindings_with_semantic( - document: &Document, - position: LspPosition, - semantic: Option<&SemanticArtifacts>, -) -> Vec { - let text = document.text(); - let line_index = document.line_index(); - - // Convert LSP position to byte offset - let Some(offset) = line_index.offset(position, text) else { - return Vec::new(); - }; - - if let Some(artifacts) = semantic { - return artifacts - .visible_bindings_at(offset.into()) - .into_iter() - .map(|binding| VisibleBinding { - name: binding.name, - range: binding.range, - kind: match binding.kind { - SemanticBindingKind::LocalVariable => BindingKind::LocalVariable, - SemanticBindingKind::LocalFunction => BindingKind::LocalFunction, - SemanticBindingKind::Parameter => BindingKind::Parameter, - SemanticBindingKind::ForVariable => BindingKind::ForVariable, - }, - }) - .collect(); - } - - // Get the AST root - let ast = document.ast(); - let root = ast.syntax(); - - let Some(mut current) = start_node_for_offset(root, offset) else { - return Vec::new(); - }; - - // Walk up the scope chain and collect all bindings - let mut bindings = Vec::new(); - let mut seen_names = std::collections::HashSet::new(); - - // First collect from current node if it's a scope - if is_scope_node(¤t) { - // Create a dummy child at the end for scope checking - collect_scope_bindings_at_offset( - ¤t, - u32::from(offset), - &mut bindings, - &mut seen_names, - ); - } - - while let Some(parent) = current.parent() { - collect_scope_bindings_at_offset( - &parent, - u32::from(offset), - &mut bindings, - &mut seen_names, - ); - current = parent; - } - - bindings -} - -fn start_node_for_offset(root: &SyntaxNode, offset: ByteOffset) -> Option { - token_at_offset(root, offset) - .and_then(|token| token.parent()) - .or_else(|| { - let offset_u32 = u32::from(offset); - if offset_u32 == 0 { - return None; - } - token_at_offset(root, ByteOffset::from(offset_u32 - 1)).and_then(|token| token.parent()) - }) - .or_else(|| find_node_at_offset(root, offset)) -} - -/// Check if a node is a scope-introducing node. -fn is_scope_node(node: &SyntaxNode) -> bool { - matches!( - node.kind(), - SyntaxKind::EXPR - | SyntaxKind::EXPR_FUNCTION - | SyntaxKind::BIND_FUNCTION - | SyntaxKind::FOR_SPEC - | SyntaxKind::OBJ_BODY_MEMBER_LIST - | SyntaxKind::EXPR_ARRAY_COMP - | SyntaxKind::OBJ_BODY_COMP - ) -} - -/// Collect bindings from a scope using offset for visibility checking. -fn collect_scope_bindings_at_offset( - scope: &SyntaxNode, - offset: u32, - bindings: &mut Vec, - seen: &mut std::collections::HashSet, -) { - match scope.kind() { - SyntaxKind::EXPR => { - collect_expr_bindings_at_offset(scope, offset, bindings, seen); - } - SyntaxKind::EXPR_FUNCTION => { - collect_function_params(scope, bindings, seen); - } - SyntaxKind::BIND_FUNCTION => { - collect_bind_function_params(scope, bindings, seen); - } - SyntaxKind::FOR_SPEC => { - collect_for_binding(scope, bindings, seen); - } - SyntaxKind::OBJ_BODY_MEMBER_LIST => { - collect_object_locals(scope, bindings, seen); - } - SyntaxKind::EXPR_ARRAY_COMP | SyntaxKind::OBJ_BODY_COMP => { - collect_comprehension_bindings(scope, bindings, seen); - } - _ => {} - } -} - -/// Collect local bindings from an Expr using offset for visibility. -fn collect_expr_bindings_at_offset( - expr: &SyntaxNode, - offset: u32, - bindings: &mut Vec, - seen: &mut std::collections::HashSet, -) { - let text_size = rowan::TextSize::from(offset); - - for stmt_node in expr.children() { - if stmt_node.kind() == SyntaxKind::STMT_LOCAL { - // Only consider bindings that appear before our position - if stmt_node.text_range().end() > text_size { - continue; - } - - if let Some(stmt_local) = StmtLocal::cast(stmt_node) { - for bind in stmt_local.binds() { - if let Some(binding) = extract_binding(&bind) { - if !seen.contains(&binding.name) { - seen.insert(binding.name.clone()); - bindings.push(binding); - } - } - } - } - } - } -} - -/// Extract a binding from a Bind node. -fn extract_binding(bind: &Bind) -> Option { - match bind { - Bind::BindDestruct(bd) => { - let destruct = bd.into()?; - if let Destruct::DestructFull(full) = destruct { - let bind_name = full.name()?; - let ident = bind_name.ident_lit()?; - return Some(VisibleBinding { - name: ident.text().to_string(), - kind: BindingKind::LocalVariable, - range: bind_name.syntax().text_range(), - }); - } - None - } - Bind::BindFunction(bf) => { - let bind_name = bf.name()?; - let ident = bind_name.ident_lit()?; - Some(VisibleBinding { - name: ident.text().to_string(), - kind: BindingKind::LocalFunction, - range: bind_name.syntax().text_range(), - }) - } - } -} - -/// Collect function parameters. -fn collect_function_params( - func_node: &SyntaxNode, - bindings: &mut Vec, - seen: &mut std::collections::HashSet, -) { - let Some(func) = ExprFunction::cast(func_node.clone()) else { - return; - }; - let Some(params) = func.params_desc() else { - return; - }; - - for param in params.params() { - if let Some(binding) = extract_param_binding(¶m) { - if !seen.contains(&binding.name) { - seen.insert(binding.name.clone()); - bindings.push(binding); - } - } - } -} - -/// Collect `BindFunction` parameters. -fn collect_bind_function_params( - func_node: &SyntaxNode, - bindings: &mut Vec, - seen: &mut std::collections::HashSet, -) { - let Some(func) = BindFunction::cast(func_node.clone()) else { - return; - }; - let Some(params) = func.params() else { - return; - }; - - for param in params.params() { - if let Some(binding) = extract_param_binding(¶m) { - if !seen.contains(&binding.name) { - seen.insert(binding.name.clone()); - bindings.push(binding); - } - } - } -} - -/// Extract a binding from a Param node. -fn extract_param_binding(param: &Param) -> Option { - let destruct = param.destruct()?; - if let Destruct::DestructFull(full) = destruct { - let param_name = full.name()?; - let ident = param_name.ident_lit()?; - return Some(VisibleBinding { - name: ident.text().to_string(), - kind: BindingKind::Parameter, - range: param_name.syntax().text_range(), - }); - } - None -} - -/// Collect for-comprehension binding. -fn collect_for_binding( - for_node: &SyntaxNode, - bindings: &mut Vec, - seen: &mut std::collections::HashSet, -) { - let Some(for_spec) = ForSpec::cast(for_node.clone()) else { - return; - }; - let Some(destruct) = for_spec.bind() else { - return; - }; - - if let Destruct::DestructFull(full) = destruct { - if let Some(bind_name) = full.name() { - if let Some(ident) = bind_name.ident_lit() { - let name = ident.text().to_string(); - if !seen.contains(&name) { - seen.insert(name.clone()); - bindings.push(VisibleBinding { - name, - kind: BindingKind::ForVariable, - range: bind_name.syntax().text_range(), - }); - } - } - } - } -} - -/// Collect object local bindings. -fn collect_object_locals( - obj_body: &SyntaxNode, - bindings: &mut Vec, - seen: &mut std::collections::HashSet, -) { - for member_node in obj_body.children() { - if member_node.kind() != SyntaxKind::MEMBER_BIND_STMT { - continue; - } - let Some(member_bind) = MemberBindStmt::cast(member_node) else { - continue; - }; - let Some(obj_local) = member_bind.obj_local() else { - continue; - }; - let Some(bind) = obj_local.bind() else { - continue; - }; - let Some(binding) = extract_binding(&bind) else { - continue; - }; - if seen.contains(&binding.name) { - continue; - } - seen.insert(binding.name.clone()); - bindings.push(binding); - } -} - -/// Collect bindings from comprehension `FOR_SPEC` children. -fn collect_comprehension_bindings( - comp_node: &SyntaxNode, - bindings: &mut Vec, - seen: &mut std::collections::HashSet, -) { - for child in comp_node.children() { - if child.kind() != SyntaxKind::FOR_SPEC { - continue; - } - collect_for_binding(&child, bindings, seen); - } -} - -#[cfg(test)] -mod tests { - use jrsonnet_lsp_document::DocVersion; - - use super::*; - - /// Helper to extract local range from definition result. - fn expect_local(result: Option) -> Range { - match result { - Some(DefinitionResult::Local(r)) => r, - other => panic!("Expected Local definition, got {other:?}"), - } - } - - fn expect_import_field( - result: Option, - expected_path: &str, - expected_fields: &[&str], - ) { - match result { - Some(DefinitionResult::ImportField { path, fields }) => { - assert_eq!(path, expected_path); - assert_eq!(fields, expected_fields); - } - other => panic!("Expected ImportField definition, got {other:?}"), - } - } - - #[test] - fn test_local_variable_definition() { - let code = r"local x = 1; x + 1"; - // ^^^^^^ def ^ use at position 13 - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - // Position at the 'x' in 'x + 1' (character 13) - let pos = (0, 13).into(); - - let result = goto_definition(&doc, pos); - let r = expect_local(result); - // Definition should be at position 6 (the 'x' in 'local x') - assert_eq!(r.start.line, 0); - assert_eq!(r.start.character, 6); - } - - #[test] - fn test_function_parameter_definition() { - let code = r"local f(x) = x * 2; f(3)"; - // ^ param ^ use - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - // Position at the 'x' in 'x * 2' (character 13) - let pos = (0, 13).into(); - - let result = goto_definition(&doc, pos); - let r = expect_local(result); - // Definition should be at position 8 (the 'x' parameter) - assert_eq!(r.start.line, 0); - assert_eq!(r.start.character, 8); - } - - #[test] - fn test_nested_local_shadowing() { - let code = r"local x = 1; local x = 2; x"; - // ^def1 ^def2 ^use - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - // Position at the final 'x' (character 26) - let pos = (0, 26).into(); - - let result = goto_definition(&doc, pos); - let r = expect_local(result); - // Should find the second (closer) definition at position 19 - assert_eq!(r.start.character, 19); - } - - #[test] - fn test_no_definition_for_undefined() { - let code = r"local x = 1; y + 1"; - // ^ undefined - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - // Position at 'y' (character 13) - let pos = (0, 13).into(); - - let result = goto_definition(&doc, pos); - assert!( - result.is_none(), - "Should not find definition for undefined y" - ); - } - - #[test] - fn test_import_definition() { - let code = r#"import "lib/utils.libsonnet""#; - // ^ cursor in string - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - // Position inside the import string (character 8) - let pos = (0, 8).into(); - - let result = goto_definition(&doc, pos); - match result { - Some(DefinitionResult::Import(path)) => { - assert_eq!(path, "lib/utils.libsonnet"); - } - other => panic!("Expected Import definition, got {other:?}"), - } - } - - #[test] - fn test_importstr_definition() { - let code = r#"importstr "data/config.txt""#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let pos = (0, 12).into(); - - let result = goto_definition(&doc, pos); - match result { - Some(DefinitionResult::Import(path)) => { - assert_eq!(path, "data/config.txt"); - } - other => panic!("Expected Import definition, got {other:?}"), - } - } - - #[test] - fn test_for_comprehension_variable() { - let code = r"[x * 2 for x in [1, 2, 3]]"; - // ^use ^def - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - // Position at 'x' in 'x * 2' (character 1) - let pos = (0, 1).into(); - - let result = goto_definition(&doc, pos); - let r = expect_local(result); - // Definition should be at position 11 (the 'x' in 'for x') - assert_eq!(r.start.line, 0); - assert_eq!(r.start.character, 11); - } - - #[test] - fn test_object_local_binding() { - let code = r"{ local helper = 1, result: helper }"; - // ^def ^use - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - // Position at 'helper' in 'result: helper' (character 28) - let pos = (0, 28).into(); - - let result = goto_definition(&doc, pos); - let r = expect_local(result); - // Definition should be at position 8 (the 'helper' in 'local helper') - assert_eq!(r.start.line, 0); - assert_eq!(r.start.character, 8); - } - - #[test] - fn test_field_access_on_import() { - let code = r#"local lib = import "lib.libsonnet"; lib.foo"#; - // ^field access - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - // Position at 'foo' in 'lib.foo' (character 40) - let pos = (0, 40).into(); - - let result = goto_definition(&doc, pos); - match result { - Some(DefinitionResult::ImportField { path, fields }) => { - assert_eq!(path, "lib.libsonnet"); - assert_eq!(fields, vec!["foo"]); - } - other => panic!("Expected ImportField definition, got {other:?}"), - } - } - - #[test] - fn test_nested_field_access_on_import() { - let code = r#"local lib = import "lib.libsonnet"; lib.foo.bar"#; - // ^field access - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - // Position at 'bar' in 'lib.foo.bar' (character 44) - let pos = (0, 44).into(); - - let result = goto_definition(&doc, pos); - match result { - Some(DefinitionResult::ImportField { path, fields }) => { - assert_eq!(path, "lib.libsonnet"); - assert_eq!(fields, vec!["foo", "bar"]); - } - other => panic!("Expected ImportField definition, got {other:?}"), - } - } - - #[test] - fn test_multiline_local_definition() { - let code = "local\n x\n =\n 1;\nx"; - // ^def(line 1, char 2) ^use(line 4, char 0) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - // Position at 'x' on the last line - let pos = (4, 0).into(); - - let result = goto_definition(&doc, pos); - let r = expect_local(result); - // Definition should be on line 1, character 2 - assert_eq!(r.start.line, 1); - assert_eq!(r.start.character, 2); - } - - #[test] - fn test_definition_follows_local_alias_chain() { - let code = "local x = 1;\nlocal y = x;\ny"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let definition = expect_local(goto_definition(&doc, (2, 0).into())); - let declaration = expect_local(goto_declaration(&doc, (2, 0).into())); - - assert_eq!( - definition, - Range { - start: lsp_types::Position { - line: 0, - character: 6, - }, - end: lsp_types::Position { - line: 0, - character: 7, - }, - } - ); - assert_eq!( - declaration, - Range { - start: lsp_types::Position { - line: 1, - character: 6, - }, - end: lsp_types::Position { - line: 1, - character: 7, - }, - } - ); - } - - #[test] - fn test_definition_resolves_alias_to_import_field() { - let code = r#"local lib = import "lib.libsonnet"; -local alias = lib.foo; -alias"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let definition = goto_definition(&doc, (2, 1).into()); - expect_import_field(definition, "lib.libsonnet", &["foo"]); - - let declaration = expect_local(goto_declaration(&doc, (2, 1).into())); - assert_eq!( - declaration, - Range { - start: lsp_types::Position { - line: 1, - character: 6, - }, - end: lsp_types::Position { - line: 1, - character: 11, - }, - } - ); - } - - #[test] - fn test_collect_visible_bindings_at_eof() { - let code = "local x = 1;\n"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - assert_eq!( - collect_visible_bindings(&doc, (1, 0).into()), - vec![VisibleBinding { - name: "x".to_string(), - kind: BindingKind::LocalVariable, - range: TextRange::new(6.into(), 7.into()), - }] - ); - } - - #[test] - fn test_collect_visible_bindings_out_of_bounds_position() { - let code = "local x = 1;"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - assert_eq!( - collect_visible_bindings(&doc, (9, 0).into()), - Vec::::new() - ); - } -} diff --git a/crates/jrsonnet-lsp-handlers/src/definition/field_path.rs b/crates/jrsonnet-lsp-handlers/src/definition/field_path.rs new file mode 100644 index 00000000..906d87d7 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition/field_path.rs @@ -0,0 +1,155 @@ +use jrsonnet_lsp_document::Document; +use jrsonnet_rowan_parser::{ + nodes::{ExprBase, ExprField}, + AstNode, SyntaxToken, +}; +use tracing::debug; + +use super::{import::resolve_base_to_import, DefinitionResult}; + +/// Check if the token is a field name in a field access expression where the base +/// is defined as an import. For example, `foo` in `lib.foo` where `lib = import "..."`. +/// +/// Returns `DefinitionResult::ImportField` if this is a field access on an import. +pub(super) fn check_field_access_on_import( + token: &SyntaxToken, + _field_name: &str, + document: &Document, +) -> Option { + // Check if we're inside an ExprField (field access like .foo) + let expr_field = token.parent_ancestors().find_map(ExprField::cast)?; + + // Verify this token is the field name in the ExprField + let field_name_node = expr_field.field()?; + if field_name_node.syntax().text_range() != token.parent()?.text_range() { + return None; + } + + // Build the field chain by walking up the ExprField chain + // We collect fields from innermost to outermost, then reverse + let mut fields = Vec::new(); + let mut current_field = expr_field; + + // Add the current field name + if let Some(name) = current_field.field() { + if let Some(ident) = name.ident_lit() { + fields.push(ident.text().to_string()); + } + } + + // Walk up the ExprField chain (from outermost to innermost in the code) + // For `lib.foo.bar`, the current_field starts at `.bar`, its base is ExprField `.foo` + let base = loop { + // Get the base of the current field access + let base_expr = current_field.base()?; + + match base_expr.expr_base()? { + ExprBase::ExprField(inner_field) => { + // Another field access - collect its field name and continue + if let Some(name) = inner_field.field() { + if let Some(ident) = name.ident_lit() { + fields.push(ident.text().to_string()); + } + } + current_field = inner_field; + } + other => { + // Found the base (could be ExprVar, ExprImport, etc.) + break other; + } + } + }; + + // Reverse to get fields in order from base outward + fields.reverse(); + + // Now check if the base resolves to an import + let (import_path, mut base_fields) = resolve_base_to_import(&base, document)?; + + // Combine base fields (from tracing through bindings) with field chain + base_fields.extend(fields); + + debug!( + import_path = %import_path, + fields = ?base_fields, + "found import field definition" + ); + + Some(DefinitionResult::ImportField { + path: import_path, + fields: base_fields, + }) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use lsp_types::Range; + + use super::super::{goto_declaration, goto_definition, DefinitionResult}; + + fn expect_import_field( + result: Option, + expected_path: &str, + expected_fields: &[&str], + ) { + match result { + Some(DefinitionResult::ImportField { path, fields }) => { + assert_eq!(path, expected_path); + assert_eq!(fields, expected_fields); + } + other => panic!("Expected ImportField definition, got {other:?}"), + } + } + + fn expect_local(result: Option) -> Range { + match result { + Some(DefinitionResult::Local(range)) => range, + other => panic!("Expected Local definition, got {other:?}"), + } + } + + #[test] + fn test_field_access_on_import() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 40).into()); + expect_import_field(result, "lib.libsonnet", &["foo"]); + } + + #[test] + fn test_nested_field_access_on_import() { + let code = r#"local lib = import "lib.libsonnet"; lib.foo.bar"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 44).into()); + expect_import_field(result, "lib.libsonnet", &["foo", "bar"]); + } + + #[test] + fn test_definition_resolves_alias_to_import_field() { + let code = r#"local lib = import "lib.libsonnet"; +local alias = lib.foo; +alias"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let definition = goto_definition(&doc, (2, 1).into()); + expect_import_field(definition, "lib.libsonnet", &["foo"]); + + let declaration = expect_local(goto_declaration(&doc, (2, 1).into())); + assert_eq!( + declaration, + Range { + start: lsp_types::Position { + line: 1, + character: 6, + }, + end: lsp_types::Position { + line: 1, + character: 11, + }, + } + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/definition/import.rs b/crates/jrsonnet-lsp-handlers/src/definition/import.rs new file mode 100644 index 00000000..f69754c4 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition/import.rs @@ -0,0 +1,72 @@ +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_import::extract_import_path; +use jrsonnet_lsp_inference::{trace_base, trace_expr, ConstEvalResult}; +use jrsonnet_rowan_parser::nodes::{Expr, ExprBase}; + +pub(super) fn resolve_expr_to_import( + value_expr: &Expr, + document: &Document, +) -> Option<(String, Vec)> { + if let ExprBase::ExprImport(import) = value_expr.expr_base()? { + return Some((extract_import_path(&import)?, Vec::new())); + } + + match trace_expr(value_expr, document)? { + ConstEvalResult::Import { path, fields } => Some((path, fields)), + ConstEvalResult::Std { .. } | ConstEvalResult::Local { .. } => None, + } +} + +/// Try to resolve the base expression to an import path and any additional fields. +/// +/// This handles cases like: +/// - `lib.foo` where `lib = import "lib.libsonnet"` +/// - Direct import: `(import "lib.libsonnet").foo` +/// - Chained bindings: `local x = import "..."; local y = x; y.foo` +/// +/// Returns the import path and any fields that were traced through the base. +pub(super) fn resolve_base_to_import( + base: &ExprBase, + document: &Document, +) -> Option<(String, Vec)> { + // Use const_eval to trace through bindings + match trace_base(base, document)? { + ConstEvalResult::Import { path, fields } => Some((path, fields)), + ConstEvalResult::Std { .. } | ConstEvalResult::Local { .. } => None, + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + + use super::super::{goto_definition, DefinitionResult}; + + #[test] + fn test_import_definition() { + let code = r#"import "lib/utils.libsonnet""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 8).into()); + match result { + Some(DefinitionResult::Import(path)) => { + assert_eq!(path, "lib/utils.libsonnet"); + } + other => panic!("Expected Import definition, got {other:?}"), + } + } + + #[test] + fn test_importstr_definition() { + let code = r#"importstr "data/config.txt""#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 12).into()); + match result { + Some(DefinitionResult::Import(path)) => { + assert_eq!(path, "data/config.txt"); + } + other => panic!("Expected Import definition, got {other:?}"), + } + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/definition/local.rs b/crates/jrsonnet-lsp-handlers/src/definition/local.rs new file mode 100644 index 00000000..66cee3cf --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition/local.rs @@ -0,0 +1,188 @@ +use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document}; +use jrsonnet_lsp_scope::find_definition_range; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, Expr, ExprBase}, + AstNode, SyntaxKind, +}; +use rowan::TextRange; + +pub(super) fn find_bind_by_definition_range(document: &Document, range: TextRange) -> Option { + let ast = document.ast(); + let root = ast.syntax(); + let offset = ByteOffset::from(u32::from(range.start())); + + if let Some(token) = token_at_offset(root, offset) { + if token.kind() == SyntaxKind::IDENT { + if let Some(bind) = token + .parent() + .and_then(|node| node.ancestors().find_map(Bind::cast)) + { + if bind_definition_range(&bind) + .is_some_and(|definition_range| definition_range == range) + { + return Some(bind); + } + } + } + } + + root.descendants().filter_map(Bind::cast).find(|bind| { + bind_definition_range(bind).is_some_and(|definition_range| definition_range == range) + }) +} + +fn bind_definition_range(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind) => { + let destruct = bind.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) + } + Bind::BindFunction(bind) => Some(bind.name()?.syntax().text_range()), + } +} + +pub(super) fn bind_value_expr(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind) => bind.value(), + Bind::BindFunction(bind) => bind.value(), + } +} + +pub(super) fn aliased_definition_range(value_expr: &Expr) -> Option { + let ExprBase::ExprVar(var) = value_expr.expr_base()? else { + return None; + }; + let ident = var.name()?.ident_lit()?; + find_definition_range(&ident, ident.text()) +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use lsp_types::Range; + + use super::super::{goto_declaration, goto_definition, DefinitionResult}; + + fn expect_local(result: Option) -> Range { + match result { + Some(DefinitionResult::Local(range)) => range, + other => panic!("Expected Local definition, got {other:?}"), + } + } + + #[test] + fn test_local_variable_definition() { + let code = r"local x = 1; x + 1"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 13).into()); + let range = expect_local(result); + assert_eq!(range.start.line, 0); + assert_eq!(range.start.character, 6); + } + + #[test] + fn test_function_parameter_definition() { + let code = r"local f(x) = x * 2; f(3)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 13).into()); + let range = expect_local(result); + assert_eq!(range.start.line, 0); + assert_eq!(range.start.character, 8); + } + + #[test] + fn test_nested_local_shadowing() { + let code = r"local x = 1; local x = 2; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 26).into()); + let range = expect_local(result); + assert_eq!(range.start.character, 19); + } + + #[test] + fn test_no_definition_for_undefined() { + let code = r"local x = 1; y + 1"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 13).into()); + assert!( + result.is_none(), + "Should not find definition for undefined y" + ); + } + + #[test] + fn test_for_comprehension_variable() { + let code = r"[x * 2 for x in [1, 2, 3]]"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 1).into()); + let range = expect_local(result); + assert_eq!(range.start.line, 0); + assert_eq!(range.start.character, 11); + } + + #[test] + fn test_object_local_binding() { + let code = r"{ local helper = 1, result: helper }"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (0, 28).into()); + let range = expect_local(result); + assert_eq!(range.start.line, 0); + assert_eq!(range.start.character, 8); + } + + #[test] + fn test_multiline_local_definition() { + let code = "local\n x\n =\n 1;\nx"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let result = goto_definition(&doc, (4, 0).into()); + let range = expect_local(result); + assert_eq!(range.start.line, 1); + assert_eq!(range.start.character, 2); + } + + #[test] + fn test_definition_follows_local_alias_chain() { + let code = "local x = 1;\nlocal y = x;\ny"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let definition = expect_local(goto_definition(&doc, (2, 0).into())); + let declaration = expect_local(goto_declaration(&doc, (2, 0).into())); + + assert_eq!( + definition, + Range { + start: lsp_types::Position { + line: 0, + character: 6, + }, + end: lsp_types::Position { + line: 0, + character: 7, + }, + } + ); + assert_eq!( + declaration, + Range { + start: lsp_types::Position { + line: 1, + character: 6, + }, + end: lsp_types::Position { + line: 1, + character: 7, + }, + } + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/definition/mod.rs b/crates/jrsonnet-lsp-handlers/src/definition/mod.rs new file mode 100644 index 00000000..3272c7e5 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition/mod.rs @@ -0,0 +1,221 @@ +//! Go-to-definition handler. +//! +//! Finds the definition of a symbol at a given position by: +//! 1. Finding the token at the cursor position +//! 2. If it's a variable reference, walking up the scope chain to find the binding +//! 3. If it's an import path, returning the import path for resolution +//! 4. If it's a field access on an import, returning the import path and field chain + +mod field_path; +mod import; +mod local; +mod symbol; + +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document, LspPosition}; +use jrsonnet_lsp_import::check_import_path; +use jrsonnet_lsp_inference::{SemanticArtifacts, SemanticImportTarget}; +use jrsonnet_lsp_scope::{find_definition_range, is_variable_reference}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::Range; +use rowan::TextRange; +use tracing::debug; + +use self::{ + field_path::check_field_access_on_import, + import::resolve_expr_to_import, + local::{aliased_definition_range, bind_value_expr, find_bind_by_definition_range}, +}; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum DefinitionMode { + Declaration, + Definition, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +enum CanonicalDefinition { + Local(TextRange), + Import(String), + ImportField { path: String, fields: Vec }, +} + +/// Result of a go-to-definition request. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum DefinitionResult { + /// A local definition within the same document. + Local(Range), + /// An import path that needs to be resolved relative to the document. + Import(String), + /// A field in an imported file. Contains the import path and the field chain. + /// For `lib.foo.bar`, this would be `("lib.libsonnet", ["foo", "bar"])`. + ImportField { + /// The import path to resolve. + path: String, + /// The chain of field names to navigate. + fields: Vec, + }, +} + +/// Find the definition of the symbol at the given position. +/// +/// Returns a `DefinitionResult` indicating either: +/// - A local range within the document +/// - An import path that needs to be resolved by the caller +/// - A field in an imported file (path + field chain) +pub fn goto_definition(document: &Document, position: LspPosition) -> Option { + goto_with_mode(document, position, DefinitionMode::Definition, None) +} + +/// Find the declaration site of the symbol at the given position. +/// +/// Unlike `goto_definition`, this does not follow local alias chains. +pub fn goto_declaration(document: &Document, position: LspPosition) -> Option { + goto_with_mode(document, position, DefinitionMode::Declaration, None) +} + +/// Find the definition of the symbol at the given position using semantic artifacts when available. +pub fn goto_definition_with_semantic( + document: &Document, + position: LspPosition, + semantic: Option<&SemanticArtifacts>, +) -> Option { + goto_with_mode(document, position, DefinitionMode::Definition, semantic) +} + +/// Find the declaration site of the symbol at the given position using semantic artifacts when available. +pub fn goto_declaration_with_semantic( + document: &Document, + position: LspPosition, + semantic: Option<&SemanticArtifacts>, +) -> Option { + goto_with_mode(document, position, DefinitionMode::Declaration, semantic) +} + +fn goto_with_mode( + document: &Document, + position: LspPosition, + mode: DefinitionMode, + semantic: Option<&SemanticArtifacts>, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + // Convert LSP position to byte offset + let offset = line_index.offset(position, text)?; + + // Get the AST root + let ast = document.ast(); + + // Find the token at the offset + let token = token_at_offset(ast.syntax(), offset)?; + + // Check for import path first + if let Some(import_path) = check_import_path(&token) { + debug!(import_path = %import_path, "found import definition"); + return Some(DefinitionResult::Import(import_path)); + } + + // Check if this is an identifier that could be a variable reference + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text().to_string(); + + // Check if this identifier is a field access (e.g., `foo` in `lib.foo`) + // and if the base resolves to an import + if let Some(result) = check_field_access_on_import(&token, &name, document) { + return Some(result); + } + + // Check if this identifier is part of a variable reference (ExprVar) + if !is_variable_reference(&token) { + return None; + } + + // Walk up the scope chain to find the definition + let def_range = semantic + .and_then(|artifacts| artifacts.definition_for_ident_token(&token)) + .or_else(|| find_definition_range(&token, &name))?; + + let result = if mode == DefinitionMode::Definition { + match resolve_canonical_definition(document, def_range, semantic) { + CanonicalDefinition::Local(range) => { + DefinitionResult::Local(to_lsp_range(range, line_index, text)) + } + CanonicalDefinition::Import(path) => DefinitionResult::Import(path), + CanonicalDefinition::ImportField { path, fields } => { + DefinitionResult::ImportField { path, fields } + } + } + } else { + DefinitionResult::Local(to_lsp_range(def_range, line_index, text)) + }; + + debug!(name = %name, mode = ?mode, ?result, "resolved symbol target"); + Some(result) +} + +fn resolve_canonical_definition( + document: &Document, + initial_def: TextRange, + semantic: Option<&SemanticArtifacts>, +) -> CanonicalDefinition { + let mut visited = std::collections::HashSet::new(); + let mut current = initial_def; + + loop { + if !visited.insert(current) { + return CanonicalDefinition::Local(current); + } + + if let Some(artifacts) = semantic { + if let Some(binding) = artifacts.binding_info(current) { + if let Some(import_target) = &binding.import_target { + return match import_target { + SemanticImportTarget::Import { path } => { + CanonicalDefinition::Import(path.clone()) + } + SemanticImportTarget::ImportField { path, fields } => { + CanonicalDefinition::ImportField { + path: path.clone(), + fields: fields.clone(), + } + } + }; + } + + if let Some(next) = binding.alias_definition { + current = next; + continue; + } + + return CanonicalDefinition::Local(current); + } + } + + let Some(bind) = find_bind_by_definition_range(document, current) else { + return CanonicalDefinition::Local(current); + }; + let Some(value_expr) = bind_value_expr(&bind) else { + return CanonicalDefinition::Local(current); + }; + + if let Some((path, fields)) = resolve_expr_to_import(&value_expr, document) { + return if fields.is_empty() { + CanonicalDefinition::Import(path) + } else { + CanonicalDefinition::ImportField { path, fields } + }; + } + + let Some(next) = aliased_definition_range(&value_expr) else { + return CanonicalDefinition::Local(current); + }; + current = next; + } +} + +pub use symbol::{ + collect_visible_bindings, collect_visible_bindings_with_semantic, BindingKind, VisibleBinding, +}; diff --git a/crates/jrsonnet-lsp-handlers/src/definition/symbol.rs b/crates/jrsonnet-lsp-handlers/src/definition/symbol.rs new file mode 100644 index 00000000..318c5368 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/definition/symbol.rs @@ -0,0 +1,393 @@ +use jrsonnet_lsp_document::{ + find_node_at_offset, token_at_offset, ByteOffset, Document, LspPosition, +}; +use jrsonnet_lsp_inference::{SemanticArtifacts, SemanticBindingKind}; +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, BindFunction, Destruct, ExprFunction, ForSpec, MemberBindStmt, Param, StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, +}; +use rowan::TextRange; + +/// A binding visible at a given position. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct VisibleBinding { + /// The name of the binding. + pub name: String, + /// The kind of binding. + pub kind: BindingKind, + /// The text range of the definition. + pub range: TextRange, +} + +/// The kind of a binding. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum BindingKind { + /// A local variable binding (local x = ...). + LocalVariable, + /// A function binding (local f(x) = ...). + LocalFunction, + /// A function parameter. + Parameter, + /// A for-comprehension variable. + ForVariable, +} + +/// Collect all visible bindings at the given byte offset. +#[must_use] +pub fn collect_visible_bindings(document: &Document, position: LspPosition) -> Vec { + collect_visible_bindings_with_semantic(document, position, None) +} + +/// Collect all visible bindings at the given byte offset using semantic artifacts when available. +#[must_use] +pub fn collect_visible_bindings_with_semantic( + document: &Document, + position: LspPosition, + semantic: Option<&SemanticArtifacts>, +) -> Vec { + let text = document.text(); + let line_index = document.line_index(); + + // Convert LSP position to byte offset + let Some(offset) = line_index.offset(position, text) else { + return Vec::new(); + }; + + if let Some(artifacts) = semantic { + return artifacts + .visible_bindings_at(offset.into()) + .into_iter() + .map(|binding| VisibleBinding { + name: binding.name, + range: binding.range, + kind: match binding.kind { + SemanticBindingKind::LocalVariable => BindingKind::LocalVariable, + SemanticBindingKind::LocalFunction => BindingKind::LocalFunction, + SemanticBindingKind::Parameter => BindingKind::Parameter, + SemanticBindingKind::ForVariable => BindingKind::ForVariable, + }, + }) + .collect(); + } + + // Get the AST root + let ast = document.ast(); + let root = ast.syntax(); + + let Some(mut current) = start_node_for_offset(root, offset) else { + return Vec::new(); + }; + + // Walk up the scope chain and collect all bindings + let mut bindings = Vec::new(); + let mut seen_names = std::collections::HashSet::new(); + + // First collect from current node if it's a scope + if is_scope_node(¤t) { + // Create a dummy child at the end for scope checking + collect_scope_bindings_at_offset( + ¤t, + u32::from(offset), + &mut bindings, + &mut seen_names, + ); + } + + while let Some(parent) = current.parent() { + collect_scope_bindings_at_offset( + &parent, + u32::from(offset), + &mut bindings, + &mut seen_names, + ); + current = parent; + } + + bindings +} + +fn start_node_for_offset(root: &SyntaxNode, offset: ByteOffset) -> Option { + token_at_offset(root, offset) + .and_then(|token| token.parent()) + .or_else(|| { + let offset_u32 = u32::from(offset); + if offset_u32 == 0 { + return None; + } + token_at_offset(root, ByteOffset::from(offset_u32 - 1)).and_then(|token| token.parent()) + }) + .or_else(|| find_node_at_offset(root, offset)) +} + +/// Check if a node is a scope-introducing node. +fn is_scope_node(node: &SyntaxNode) -> bool { + matches!( + node.kind(), + SyntaxKind::EXPR + | SyntaxKind::EXPR_FUNCTION + | SyntaxKind::BIND_FUNCTION + | SyntaxKind::FOR_SPEC + | SyntaxKind::OBJ_BODY_MEMBER_LIST + | SyntaxKind::EXPR_ARRAY_COMP + | SyntaxKind::OBJ_BODY_COMP + ) +} + +/// Collect bindings from a scope using offset for visibility checking. +fn collect_scope_bindings_at_offset( + scope: &SyntaxNode, + offset: u32, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + match scope.kind() { + SyntaxKind::EXPR => { + collect_expr_bindings_at_offset(scope, offset, bindings, seen); + } + SyntaxKind::EXPR_FUNCTION => { + collect_function_params(scope, bindings, seen); + } + SyntaxKind::BIND_FUNCTION => { + collect_bind_function_params(scope, bindings, seen); + } + SyntaxKind::FOR_SPEC => { + collect_for_binding(scope, bindings, seen); + } + SyntaxKind::OBJ_BODY_MEMBER_LIST => { + collect_object_locals(scope, bindings, seen); + } + SyntaxKind::EXPR_ARRAY_COMP | SyntaxKind::OBJ_BODY_COMP => { + collect_comprehension_bindings(scope, bindings, seen); + } + _ => {} + } +} + +/// Collect local bindings from an Expr using offset for visibility. +fn collect_expr_bindings_at_offset( + expr: &SyntaxNode, + offset: u32, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let text_size = rowan::TextSize::from(offset); + + for stmt_node in expr.children() { + if stmt_node.kind() == SyntaxKind::STMT_LOCAL { + // Only consider bindings that appear before our position + if stmt_node.text_range().end() > text_size { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let Some(binding) = extract_binding(&bind) { + if !seen.contains(&binding.name) { + seen.insert(binding.name.clone()); + bindings.push(binding); + } + } + } + } + } + } +} + +/// Extract a binding from a Bind node. +fn extract_binding(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + return Some(VisibleBinding { + name: ident.text().to_string(), + kind: BindingKind::LocalVariable, + range: bind_name.syntax().text_range(), + }); + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + Some(VisibleBinding { + name: ident.text().to_string(), + kind: BindingKind::LocalFunction, + range: bind_name.syntax().text_range(), + }) + } + } +} + +/// Collect function parameters. +fn collect_function_params( + func_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let Some(func) = ExprFunction::cast(func_node.clone()) else { + return; + }; + let Some(params) = func.params_desc() else { + return; + }; + + for param in params.params() { + if let Some(binding) = extract_param_binding(¶m) { + if !seen.contains(&binding.name) { + seen.insert(binding.name.clone()); + bindings.push(binding); + } + } + } +} + +/// Collect `BindFunction` parameters. +fn collect_bind_function_params( + func_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let Some(func) = BindFunction::cast(func_node.clone()) else { + return; + }; + let Some(params) = func.params() else { + return; + }; + + for param in params.params() { + if let Some(binding) = extract_param_binding(¶m) { + if !seen.contains(&binding.name) { + seen.insert(binding.name.clone()); + bindings.push(binding); + } + } + } +} + +/// Extract a binding from a Param node. +fn extract_param_binding(param: &Param) -> Option { + let destruct = param.destruct()?; + if let Destruct::DestructFull(full) = destruct { + let param_name = full.name()?; + let ident = param_name.ident_lit()?; + return Some(VisibleBinding { + name: ident.text().to_string(), + kind: BindingKind::Parameter, + range: param_name.syntax().text_range(), + }); + } + None +} + +/// Collect for-comprehension binding. +fn collect_for_binding( + for_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + let Some(for_spec) = ForSpec::cast(for_node.clone()) else { + return; + }; + let Some(destruct) = for_spec.bind() else { + return; + }; + + if let Destruct::DestructFull(full) = destruct { + if let Some(bind_name) = full.name() { + if let Some(ident) = bind_name.ident_lit() { + let name = ident.text().to_string(); + if !seen.contains(&name) { + seen.insert(name.clone()); + bindings.push(VisibleBinding { + name, + kind: BindingKind::ForVariable, + range: bind_name.syntax().text_range(), + }); + } + } + } + } +} + +/// Collect object local bindings. +fn collect_object_locals( + obj_body: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + for member_node in obj_body.children() { + if member_node.kind() != SyntaxKind::MEMBER_BIND_STMT { + continue; + } + let Some(member_bind) = MemberBindStmt::cast(member_node) else { + continue; + }; + let Some(obj_local) = member_bind.obj_local() else { + continue; + }; + let Some(bind) = obj_local.bind() else { + continue; + }; + let Some(binding) = extract_binding(&bind) else { + continue; + }; + if seen.contains(&binding.name) { + continue; + } + seen.insert(binding.name.clone()); + bindings.push(binding); + } +} + +/// Collect bindings from comprehension `FOR_SPEC` children. +fn collect_comprehension_bindings( + comp_node: &SyntaxNode, + bindings: &mut Vec, + seen: &mut std::collections::HashSet, +) { + for child in comp_node.children() { + if child.kind() != SyntaxKind::FOR_SPEC { + continue; + } + collect_for_binding(&child, bindings, seen); + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use rowan::TextRange; + + use super::super::{collect_visible_bindings, BindingKind, VisibleBinding}; + + #[test] + fn test_collect_visible_bindings_at_eof() { + let code = "local x = 1;\n"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + assert_eq!( + collect_visible_bindings(&doc, (1, 0).into()), + vec![VisibleBinding { + name: "x".to_string(), + kind: BindingKind::LocalVariable, + range: TextRange::new(6.into(), 7.into()), + }] + ); + } + + #[test] + fn test_collect_visible_bindings_out_of_bounds_position() { + let code = "local x = 1;"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + assert_eq!( + collect_visible_bindings(&doc, (9, 0).into()), + Vec::::new() + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs deleted file mode 100644 index 3e64ab93..00000000 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens.rs +++ /dev/null @@ -1,855 +0,0 @@ -//! Semantic tokens handler. -//! -//! Provides semantic highlighting for Jsonnet code. - -use std::fmt::Write as _; - -use jrsonnet_lsp_document::{Document, LineIndex}; -use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, ident_resolves_to_builtin_std}; -use jrsonnet_rowan_parser::{ - nodes::{BindFunction, Destruct, ExprField, ParamsDesc, StmtLocal}, - AstNode, SyntaxKind, SyntaxNode, SyntaxToken, -}; -use lsp_types::{Range, SemanticToken, SemanticTokenType, SemanticTokens, SemanticTokensLegend}; -use strum_macros::{AsRefStr, EnumString, FromRepr}; - -/// Semantic token type with compile-time index. -/// -/// The enum values match the indices in `TOKEN_TYPES`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, FromRepr, AsRefStr)] -#[strum(ascii_case_insensitive, serialize_all = "snake_case")] -#[repr(u32)] -pub enum SemanticTokenTypeName { - Namespace = 0, - Parameter = 7, - Variable = 8, - Property = 9, - Function = 12, - Method = 13, - Keyword = 15, - Comment = 17, - String = 18, - Number = 19, - Operator = 21, -} - -impl SemanticTokenTypeName { - #[must_use] - pub const fn as_index(self) -> u32 { - self as u32 - } -} - -/// Semantic token modifier names with their corresponding LSP bit flags. -#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, FromRepr, AsRefStr)] -#[strum(ascii_case_insensitive, serialize_all = "snake_case")] -#[repr(u32)] -pub enum SemanticTokenModifierName { - Declaration = 0, - Definition = 1, - Readonly = 2, - Static = 3, - Deprecated = 4, - Abstract = 5, - Async = 6, - Modification = 7, - Documentation = 8, - #[strum( - serialize = "default_library", - serialize = "default-library", - serialize = "defaultlibrary" - )] - DefaultLibrary = 9, -} - -impl SemanticTokenModifierName { - #[must_use] - pub const fn as_index(self) -> u32 { - self as u32 - } - - #[must_use] - pub const fn as_bitset(self) -> u32 { - 1 << self.as_index() - } -} - -/// Semantic token types we support. -/// -/// The indices in this array must match the `SemanticTokenTypeName` enum values. -pub const TOKEN_TYPES: &[SemanticTokenType] = &[ - SemanticTokenType::NAMESPACE, // 0: std - SemanticTokenType::TYPE, // 1: (unused) - SemanticTokenType::CLASS, // 2: (unused) - SemanticTokenType::ENUM, // 3: (unused) - SemanticTokenType::INTERFACE, // 4: (unused) - SemanticTokenType::STRUCT, // 5: (unused) - SemanticTokenType::TYPE_PARAMETER, // 6: (unused) - SemanticTokenType::PARAMETER, // 7: function parameters - SemanticTokenType::VARIABLE, // 8: local variables - SemanticTokenType::PROPERTY, // 9: object fields - SemanticTokenType::ENUM_MEMBER, // 10: (unused) - SemanticTokenType::EVENT, // 11: (unused) - SemanticTokenType::FUNCTION, // 12: function definitions - SemanticTokenType::METHOD, // 13: object methods - SemanticTokenType::MACRO, // 14: (unused) - SemanticTokenType::KEYWORD, // 15: keywords - SemanticTokenType::MODIFIER, // 16: (unused) - SemanticTokenType::COMMENT, // 17: comments - SemanticTokenType::STRING, // 18: strings - SemanticTokenType::NUMBER, // 19: numbers - SemanticTokenType::REGEXP, // 20: (unused) - SemanticTokenType::OPERATOR, // 21: operators -]; - -/// Semantic token modifiers (bit flags). -pub const TOKEN_MODIFIERS: &[lsp_types::SemanticTokenModifier] = &[ - lsp_types::SemanticTokenModifier::DECLARATION, - lsp_types::SemanticTokenModifier::DEFINITION, - lsp_types::SemanticTokenModifier::READONLY, - lsp_types::SemanticTokenModifier::STATIC, - lsp_types::SemanticTokenModifier::DEPRECATED, - lsp_types::SemanticTokenModifier::ABSTRACT, - lsp_types::SemanticTokenModifier::ASYNC, - lsp_types::SemanticTokenModifier::MODIFICATION, - lsp_types::SemanticTokenModifier::DOCUMENTATION, - lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY, -]; - -/// Render a Markdown reference block for semantic token types and modifiers. -/// -/// This output is consumed by docs validation tests to keep docs in sync with -/// the actual semantic token legend. -#[must_use] -pub fn semantic_token_reference_markdown() -> String { - let mut markdown = String::new(); - markdown.push_str("#### Semantic Token Legend (Generated)\n\n"); - markdown.push_str("Token types (`index`: `lsp_name`, usage):\n"); - for (index, token_type) in TOKEN_TYPES.iter().enumerate() { - let used = SemanticTokenTypeName::from_repr(index as u32); - let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_ref().to_owned()); - let _ = writeln!(markdown, "- `{index}`: `{}` ({usage})", token_type.as_str()); - } - markdown.push('\n'); - markdown.push_str("Token modifiers (`bit`: `lsp_name`, usage):\n"); - for (index, modifier) in TOKEN_MODIFIERS.iter().enumerate() { - let used = SemanticTokenModifierName::from_repr(index as u32); - let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_ref().to_owned()); - let _ = writeln!( - markdown, - "- `1 << {index}`: `{}` ({usage})", - modifier.as_str() - ); - } - markdown -} - -fn to_u32(value: usize) -> u32 { - u32::try_from(value).unwrap_or(u32::MAX) -} - -/// Get the semantic tokens legend. -#[must_use] -pub fn legend() -> SemanticTokensLegend { - SemanticTokensLegend { - token_types: TOKEN_TYPES.to_vec(), - token_modifiers: TOKEN_MODIFIERS.to_vec(), - } -} - -/// Compute semantic tokens for a document. -#[must_use] -pub fn semantic_tokens(document: &Document) -> SemanticTokens { - let text = document.text(); - let line_index = document.line_index(); - let ast = document.ast(); - - let mut builder = SemanticTokenBuilder::new(line_index, text, None); - - // Walk all tokens in the document - for element in ast.syntax().descendants_with_tokens() { - if let Some(token) = element.into_token() { - builder.visit_token(&token); - } - } - - builder.build() -} - -/// Compute semantic tokens for a specific range in a document. -#[must_use] -pub fn semantic_tokens_range(document: &Document, range: Range) -> SemanticTokens { - let text = document.text(); - let line_index = document.line_index(); - let ast = document.ast(); - - let mut builder = SemanticTokenBuilder::new(line_index, text, Some(range)); - - // Walk all tokens in the document - for element in ast.syntax().descendants_with_tokens() { - if let Some(token) = element.into_token() { - builder.visit_token(&token); - } - } - - builder.build() -} - -/// Builder for semantic tokens. -struct SemanticTokenBuilder<'a> { - line_index: &'a LineIndex, - text: &'a str, - range: Option, - tokens: Vec, -} - -/// Raw token before delta encoding. -struct RawToken { - line: u32, - start_char: u32, - length: u32, - token_type: u32, - token_modifiers: u32, -} - -impl<'a> SemanticTokenBuilder<'a> { - fn new(line_index: &'a LineIndex, text: &'a str, range: Option) -> Self { - Self { - line_index, - text, - range, - tokens: Vec::new(), - } - } - - fn visit_token(&mut self, token: &SyntaxToken) { - let kind = token.kind(); - - if kind.is_semantic_keyword_token() { - self.add_token(token, SemanticTokenTypeName::Keyword, 0); - return; - } - if kind.is_semantic_comment_token() { - self.add_token(token, SemanticTokenTypeName::Comment, 0); - return; - } - if kind.is_semantic_string_token() { - self.add_token(token, SemanticTokenTypeName::String, 0); - return; - } - if kind.is_semantic_number_token() { - self.add_token(token, SemanticTokenTypeName::Number, 0); - return; - } - if kind == SyntaxKind::IDENT { - // Identifiers need AST context for precise token type. - self.visit_identifier(token); - return; - } - if kind.is_semantic_operator_token() { - self.add_token(token, SemanticTokenTypeName::Operator, 0); - } - } - - fn visit_identifier(&mut self, token: &SyntaxToken) { - let Some(parent) = token.parent() else { - return; - }; - - // Check if this is "std" - if token.text() == "std" && ident_resolves_to_builtin_std(token) { - self.add_token( - token, - SemanticTokenTypeName::Namespace, - SemanticTokenModifierName::DefaultLibrary.as_bitset(), - ); - return; - } - - // Check context based on parent/grandparent - if parent.kind() == SyntaxKind::NAME { - if let Some(grandparent) = parent.parent() { - match grandparent.kind() { - // Variable reference - SyntaxKind::EXPR_VAR => { - // Check if this references a parameter or variable - let token_type = classify_variable_reference(token); - self.add_token(token, token_type, 0); - } - - // Definition sites - SyntaxKind::DESTRUCT_FULL => { - // Could be parameter or local variable definition - let (token_type, modifiers) = classify_definition_site(&grandparent); - self.add_token(token, token_type, modifiers); - } - - SyntaxKind::BIND_FUNCTION => { - // Function definition - self.add_token( - token, - SemanticTokenTypeName::Function, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset(), - ); - } - - // Field access (std.xyz or obj.field) - SyntaxKind::EXPR_FIELD => { - // Check if accessing std - if is_stdlib_access(&grandparent) { - self.add_token( - token, - SemanticTokenTypeName::Function, - SemanticTokenModifierName::DefaultLibrary.as_bitset(), - ); - } else { - self.add_token(token, SemanticTokenTypeName::Property, 0); - } - } - - _ => {} - } - } - } - - // Check for field name in object (ID node) - if parent.kind() == SyntaxKind::NAME { - if let Some(grandparent) = parent.parent() { - if grandparent.kind() == SyntaxKind::FIELD_NAME_FIXED { - // This is a field definition - if let Some(great_grandparent) = grandparent.parent() { - if great_grandparent.kind() == SyntaxKind::MEMBER_FIELD_METHOD { - self.add_token( - token, - SemanticTokenTypeName::Method, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset(), - ); - } else if great_grandparent.kind() == SyntaxKind::MEMBER_FIELD_NORMAL { - self.add_token( - token, - SemanticTokenTypeName::Property, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset(), - ); - } - } - } - } - } - } - - fn add_token( - &mut self, - token: &SyntaxToken, - token_type: SemanticTokenTypeName, - token_modifiers: u32, - ) { - let range = token.text_range(); - let start_pos = self - .line_index - .position(range.start().into(), self.text) - .unwrap_or_default(); - - // Handle multi-line tokens (like block strings/comments) - let token_text = token.text(); - let lines: Vec<&str> = token_text.lines().collect(); - - let token_type_u32 = token_type as u32; - - if lines.len() <= 1 { - // Single line token - self.push_token_if_in_range(RawToken { - line: start_pos.line.0, - start_char: start_pos.character.0, - length: to_u32(token_text.len()), - token_type: token_type_u32, - token_modifiers, - }); - } else { - // Multi-line token - emit one token per line - for (i, line) in lines.iter().enumerate() { - let line_num = start_pos.line.0.saturating_add(to_u32(i)); - let start_char = if i == 0 { start_pos.character.0 } else { 0 }; - let length = to_u32(line.len()); - - if length > 0 { - self.push_token_if_in_range(RawToken { - line: line_num, - start_char, - length, - token_type: token_type_u32, - token_modifiers, - }); - } - } - } - } - - fn push_token_if_in_range(&mut self, token: RawToken) { - let Some(range) = &self.range else { - self.tokens.push(token); - return; - }; - - let token_end = token.start_char.saturating_add(token.length); - if token.line < range.start.line || token.line > range.end.line { - return; - } - if token.line == range.start.line && token_end <= range.start.character { - return; - } - if token.line == range.end.line && token.start_char >= range.end.character { - return; - } - - self.tokens.push(token); - } - - fn build(mut self) -> SemanticTokens { - // Sort tokens by position - self.tokens - .sort_unstable_by(|a, b| (a.line, a.start_char).cmp(&(b.line, b.start_char))); - - // Convert to delta-encoded SemanticToken format - let mut data = Vec::with_capacity(self.tokens.len()); - let mut prev_line = 0u32; - let mut prev_char = 0u32; - - for token in &self.tokens { - let delta_line = token.line - prev_line; - let delta_start = if delta_line == 0 { - token.start_char - prev_char - } else { - token.start_char - }; - - data.push(SemanticToken { - delta_line, - delta_start, - length: token.length, - token_type: token.token_type, - token_modifiers_bitset: token.token_modifiers, - }); - - prev_line = token.line; - prev_char = token.start_char; - } - - SemanticTokens { - result_id: None, - data, - } - } -} - -/// Classify a variable reference to determine its token type. -fn classify_variable_reference(token: &SyntaxToken) -> SemanticTokenTypeName { - // Walk up the scope chain to find the definition - let Some(mut current) = token.parent() else { - return SemanticTokenTypeName::Variable; - }; - - let name = token.text(); - - while let Some(parent) = current.parent() { - if is_parameter_in_scope(&parent, name) { - return SemanticTokenTypeName::Parameter; - } - if is_function_in_scope(&parent, ¤t, name) { - return SemanticTokenTypeName::Function; - } - current = parent; - } - - SemanticTokenTypeName::Variable -} - -/// Check if a name is a parameter in the given scope. -fn is_parameter_in_scope(scope: &SyntaxNode, name: &str) -> bool { - match scope.kind() { - SyntaxKind::EXPR_FUNCTION => { - if let Some(func) = jrsonnet_rowan_parser::nodes::ExprFunction::cast(scope.clone()) { - if let Some(params) = func.params_desc() { - return params_contain_name(¶ms, name); - } - } - } - SyntaxKind::BIND_FUNCTION => { - if let Some(func) = BindFunction::cast(scope.clone()) { - if let Some(params) = func.params() { - return params_contain_name(¶ms, name); - } - } - } - _ => {} - } - false -} - -/// Check if params contain a given name. -fn params_contain_name(params: &ParamsDesc, name: &str) -> bool { - for param in params.params() { - if let Some(destruct) = param.destruct() { - if let Destruct::DestructFull(full) = destruct { - if let Some(param_name) = full.name() { - if let Some(ident) = param_name.ident_lit() { - if ident.text() == name { - return true; - } - } - } - } - } - } - false -} - -/// Check if a name is a function defined in the given scope. -fn is_function_in_scope(scope: &SyntaxNode, child: &SyntaxNode, name: &str) -> bool { - if scope.kind() != SyntaxKind::EXPR { - return false; - } - - for stmt_node in scope.children() { - if stmt_node.kind() == SyntaxKind::STMT_LOCAL { - if stmt_node.text_range().end() > child.text_range().start() { - continue; - } - - if let Some(stmt_local) = StmtLocal::cast(stmt_node) { - for bind in stmt_local.binds() { - if let jrsonnet_rowan_parser::nodes::Bind::BindFunction(bf) = bind { - if let Some(bind_name) = bf.name() { - if let Some(ident) = bind_name.ident_lit() { - if ident.text() == name { - return true; - } - } - } - } - } - } - } - } - false -} - -/// Classify a definition site. -fn classify_definition_site(destruct_node: &SyntaxNode) -> (SemanticTokenTypeName, u32) { - // Walk up to find if this is a parameter or local variable - let mut current = destruct_node.clone(); - - while let Some(parent) = current.parent() { - match parent.kind() { - SyntaxKind::PARAM => { - return ( - SemanticTokenTypeName::Parameter, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset(), - ); - } - SyntaxKind::BIND_DESTRUCT | SyntaxKind::FOR_SPEC => { - return ( - SemanticTokenTypeName::Variable, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset(), - ); - } - _ => {} - } - current = parent; - } - - ( - SemanticTokenTypeName::Variable, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset(), - ) -} - -/// Check if a field access is on builtin std. -fn is_stdlib_access(expr_field: &SyntaxNode) -> bool { - let Some(field) = ExprField::cast(expr_field.clone()) else { - return false; - }; - let Some(base) = field.base() else { - return false; - }; - expr_resolves_to_builtin_std(&base) -} - -#[cfg(test)] -mod tests { - use jrsonnet_lsp_document::DocVersion; - - use super::*; - - #[derive(Debug, Clone, PartialEq, Eq)] - struct AbsoluteToken { - line: u32, - start_char: u32, - length: u32, - token_type: u32, - token_modifiers: u32, - } - - fn token( - line: u32, - start_char: u32, - length: u32, - token_type: SemanticTokenTypeName, - token_modifiers: u32, - ) -> AbsoluteToken { - AbsoluteToken { - line, - start_char, - length, - token_type: token_type as u32, - token_modifiers, - } - } - - fn decode_absolute(tokens: &SemanticTokens) -> Vec { - let mut line = 0_u32; - let mut start_char = 0_u32; - let mut out = Vec::with_capacity(tokens.data.len()); - - for token in &tokens.data { - line = line.saturating_add(token.delta_line); - start_char = if token.delta_line == 0 { - start_char.saturating_add(token.delta_start) - } else { - token.delta_start - }; - out.push(AbsoluteToken { - line, - start_char, - length: token.length, - token_type: token.token_type, - token_modifiers: token.token_modifiers_bitset, - }); - } - - out - } - - #[test] - fn test_semantic_tokens_keywords() { - let code = "local x = if true then 1 else 2; x"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let tokens = semantic_tokens(&doc); - assert_eq!( - decode_absolute(&tokens), - vec![ - token(0, 0, 5, SemanticTokenTypeName::Keyword, 0), - token( - 0, - 6, - 1, - SemanticTokenTypeName::Variable, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset() - ), - token(0, 8, 1, SemanticTokenTypeName::Operator, 0), - token(0, 10, 2, SemanticTokenTypeName::Keyword, 0), - token(0, 13, 4, SemanticTokenTypeName::Keyword, 0), - token(0, 18, 4, SemanticTokenTypeName::Keyword, 0), - token(0, 23, 1, SemanticTokenTypeName::Number, 0), - token(0, 25, 4, SemanticTokenTypeName::Keyword, 0), - token(0, 30, 1, SemanticTokenTypeName::Number, 0), - token(0, 33, 1, SemanticTokenTypeName::Variable, 0), - ] - ); - } - - #[test] - fn test_semantic_tokens_function() { - let code = "local add(a, b) = a + b; add(1, 2)"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let tokens = semantic_tokens(&doc); - assert_eq!( - decode_absolute(&tokens), - vec![ - token(0, 0, 5, SemanticTokenTypeName::Keyword, 0), - token( - 0, - 6, - 3, - SemanticTokenTypeName::Function, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset() - ), - token( - 0, - 10, - 1, - SemanticTokenTypeName::Parameter, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset() - ), - token( - 0, - 13, - 1, - SemanticTokenTypeName::Parameter, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset() - ), - token(0, 16, 1, SemanticTokenTypeName::Operator, 0), - token(0, 18, 1, SemanticTokenTypeName::Parameter, 0), - token(0, 20, 1, SemanticTokenTypeName::Operator, 0), - token(0, 22, 1, SemanticTokenTypeName::Parameter, 0), - token(0, 25, 3, SemanticTokenTypeName::Function, 0), - token(0, 29, 1, SemanticTokenTypeName::Number, 0), - token(0, 32, 1, SemanticTokenTypeName::Number, 0), - ] - ); - } - - #[test] - fn test_semantic_tokens_object() { - let code = r#"{ name: "test", greet(x): "Hello " + x }"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let tokens = semantic_tokens(&doc); - assert_eq!( - decode_absolute(&tokens), - vec![ - token( - 0, - 2, - 4, - SemanticTokenTypeName::Property, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset() - ), - token(0, 8, 6, SemanticTokenTypeName::String, 0), - token( - 0, - 16, - 5, - SemanticTokenTypeName::Method, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset() - ), - token( - 0, - 22, - 1, - SemanticTokenTypeName::Parameter, - SemanticTokenModifierName::Declaration.as_bitset() - | SemanticTokenModifierName::Definition.as_bitset() - ), - token(0, 26, 8, SemanticTokenTypeName::String, 0), - token(0, 35, 1, SemanticTokenTypeName::Operator, 0), - token(0, 37, 1, SemanticTokenTypeName::Variable, 0), - ] - ); - } - - #[test] - fn test_semantic_tokens_stdlib() { - let code = "std.length([1, 2, 3])"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let tokens = semantic_tokens(&doc); - assert_eq!( - decode_absolute(&tokens), - vec![ - token( - 0, - 0, - 3, - SemanticTokenTypeName::Namespace, - SemanticTokenModifierName::DefaultLibrary.as_bitset() - ), - token( - 0, - 4, - 6, - SemanticTokenTypeName::Function, - SemanticTokenModifierName::DefaultLibrary.as_bitset() - ), - token(0, 12, 1, SemanticTokenTypeName::Number, 0), - token(0, 15, 1, SemanticTokenTypeName::Number, 0), - token(0, 18, 1, SemanticTokenTypeName::Number, 0), - ] - ); - } - - #[test] - fn test_legend() { - let leg = legend(); - assert_eq!(leg.token_types, TOKEN_TYPES.to_vec()); - assert_eq!(leg.token_modifiers, TOKEN_MODIFIERS.to_vec()); - } - - #[test] - fn test_semantic_token_legend_docs_are_in_sync() { - let docs_path = - std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("../../docs/lsp/HANDLERS.md"); - let docs = std::fs::read_to_string(&docs_path).expect("read HANDLERS.md"); - let start_marker = ""; - let end_marker = ""; - let start_index = docs - .find(start_marker) - .expect("semantic token legend start marker should exist"); - let end_marker_index = docs - .find(end_marker) - .expect("semantic token legend end marker should exist"); - let end_index = end_marker_index + end_marker.len(); - let actual = docs[start_index..end_index].trim_end(); - let expected = format!( - "{start_marker}\n{}\n{end_marker}", - semantic_token_reference_markdown().trim_end() - ); - let normalized_actual = actual - .lines() - .map(str::trim_end) - .filter(|line| !line.is_empty()) - .collect::>(); - let normalized_expected = expected - .lines() - .map(str::trim_end) - .filter(|line| !line.is_empty()) - .collect::>(); - assert_eq!( - normalized_actual, normalized_expected, - "semantic token docs drifted; update docs/lsp/HANDLERS.md legend block" - ); - } - - #[test] - fn test_semantic_tokens_range_filters_lines() { - let code = "local x = 1\nlocal y = x + 1"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let tokens = semantic_tokens_range( - &doc, - Range { - start: lsp_types::Position { - line: 1, - character: 0, - }, - end: lsp_types::Position { - line: 1, - character: 100, - }, - }, - ); - assert_eq!( - decode_absolute(&tokens), - vec![ - token(1, 0, 5, SemanticTokenTypeName::Keyword, 0), - token(1, 6, 1, SemanticTokenTypeName::Variable, 0), - token(1, 8, 1, SemanticTokenTypeName::Operator, 0), - token(1, 12, 1, SemanticTokenTypeName::Operator, 0), - token(1, 14, 1, SemanticTokenTypeName::Number, 0), - ] - ); - } -} diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs new file mode 100644 index 00000000..b318ebd4 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs @@ -0,0 +1,143 @@ +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; +use jrsonnet_rowan_parser::{ + nodes::{BindFunction, Destruct, ExprField, ParamsDesc, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; + +use super::legend::{SemanticTokenModifierName, SemanticTokenTypeName}; + +/// Classify a variable reference to determine its token type. +pub(super) fn classify_variable_reference(token: &SyntaxToken) -> SemanticTokenTypeName { + // Walk up the scope chain to find the definition. + let Some(mut current) = token.parent() else { + return SemanticTokenTypeName::Variable; + }; + + let name = token.text(); + + while let Some(parent) = current.parent() { + if is_parameter_in_scope(&parent, name) { + return SemanticTokenTypeName::Parameter; + } + if is_function_in_scope(&parent, ¤t, name) { + return SemanticTokenTypeName::Function; + } + current = parent; + } + + SemanticTokenTypeName::Variable +} + +/// Classify a definition site. +pub(super) fn classify_definition_site(destruct_node: &SyntaxNode) -> (SemanticTokenTypeName, u32) { + // Walk up to find if this is a parameter or local variable. + let mut current = destruct_node.clone(); + + while let Some(parent) = current.parent() { + match parent.kind() { + SyntaxKind::PARAM => { + return ( + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ); + } + SyntaxKind::BIND_DESTRUCT | SyntaxKind::FOR_SPEC => { + return ( + SemanticTokenTypeName::Variable, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ); + } + _ => {} + } + current = parent; + } + + ( + SemanticTokenTypeName::Variable, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ) +} + +/// Check if a field access is on builtin std. +pub(super) fn is_stdlib_access(expr_field: &SyntaxNode) -> bool { + let Some(field) = ExprField::cast(expr_field.clone()) else { + return false; + }; + let Some(base) = field.base() else { + return false; + }; + expr_resolves_to_builtin_std(&base) +} + +/// Check if a name is a parameter in the given scope. +fn is_parameter_in_scope(scope: &SyntaxNode, name: &str) -> bool { + match scope.kind() { + SyntaxKind::EXPR_FUNCTION => { + if let Some(func) = jrsonnet_rowan_parser::nodes::ExprFunction::cast(scope.clone()) { + if let Some(params) = func.params_desc() { + return params_contain_name(¶ms, name); + } + } + } + SyntaxKind::BIND_FUNCTION => { + if let Some(func) = BindFunction::cast(scope.clone()) { + if let Some(params) = func.params() { + return params_contain_name(¶ms, name); + } + } + } + _ => {} + } + false +} + +/// Check if params contain a given name. +fn params_contain_name(params: &ParamsDesc, name: &str) -> bool { + for param in params.params() { + if let Some(destruct) = param.destruct() { + if let Destruct::DestructFull(full) = destruct { + if let Some(param_name) = full.name() { + if let Some(ident) = param_name.ident_lit() { + if ident.text() == name { + return true; + } + } + } + } + } + } + false +} + +/// Check if a name is a function defined in the given scope. +fn is_function_in_scope(scope: &SyntaxNode, child: &SyntaxNode, name: &str) -> bool { + if scope.kind() != SyntaxKind::EXPR { + return false; + } + + for stmt_node in scope.children() { + if stmt_node.kind() == SyntaxKind::STMT_LOCAL { + if stmt_node.text_range().end() > child.text_range().start() { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let jrsonnet_rowan_parser::nodes::Bind::BindFunction(bf) = bind { + if let Some(bind_name) = bf.name() { + if let Some(ident) = bind_name.ident_lit() { + if ident.text() == name { + return true; + } + } + } + } + } + } + } + } + false +} diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs new file mode 100644 index 00000000..f13dae18 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs @@ -0,0 +1,411 @@ +use jrsonnet_lsp_document::{Document, LineIndex}; +use jrsonnet_rowan_parser::AstNode; +use lsp_types::{Range, SemanticToken, SemanticTokens}; + +use super::{legend::SemanticTokenTypeName, walk}; + +fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) +} + +/// Compute semantic tokens for a document. +#[must_use] +pub fn semantic_tokens(document: &Document) -> SemanticTokens { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let mut builder = SemanticTokenBuilder::new(line_index, text, None); + + // Walk all tokens in the document. + for element in ast.syntax().descendants_with_tokens() { + if let Some(token) = element.into_token() { + walk::visit_token(&mut builder, &token); + } + } + + builder.build() +} + +/// Compute semantic tokens for a specific range in a document. +#[must_use] +pub fn semantic_tokens_range(document: &Document, range: Range) -> SemanticTokens { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let mut builder = SemanticTokenBuilder::new(line_index, text, Some(range)); + + // Walk all tokens in the document. + for element in ast.syntax().descendants_with_tokens() { + if let Some(token) = element.into_token() { + walk::visit_token(&mut builder, &token); + } + } + + builder.build() +} + +/// Builder for semantic tokens. +pub(super) struct SemanticTokenBuilder<'a> { + line_index: &'a LineIndex, + text: &'a str, + range: Option, + tokens: Vec, +} + +/// Raw token before delta encoding. +struct RawToken { + line: u32, + start_char: u32, + length: u32, + token_type: u32, + token_modifiers: u32, +} + +impl<'a> SemanticTokenBuilder<'a> { + pub(super) fn new(line_index: &'a LineIndex, text: &'a str, range: Option) -> Self { + Self { + line_index, + text, + range, + tokens: Vec::new(), + } + } + + pub(super) fn add_token( + &mut self, + token: &jrsonnet_rowan_parser::SyntaxToken, + token_type: SemanticTokenTypeName, + token_modifiers: u32, + ) { + let range = token.text_range(); + let start_pos = self + .line_index + .position(range.start().into(), self.text) + .unwrap_or_default(); + + // Handle multi-line tokens (like block strings/comments). + let token_text = token.text(); + let lines: Vec<&str> = token_text.lines().collect(); + let token_type_u32 = token_type as u32; + + if lines.len() <= 1 { + // Single line token. + self.push_token_if_in_range(RawToken { + line: start_pos.line.0, + start_char: start_pos.character.0, + length: to_u32(token_text.len()), + token_type: token_type_u32, + token_modifiers, + }); + } else { + // Multi-line token: emit one token per line. + for (i, line) in lines.iter().enumerate() { + let line_num = start_pos.line.0.saturating_add(to_u32(i)); + let start_char = if i == 0 { start_pos.character.0 } else { 0 }; + let length = to_u32(line.len()); + + if length > 0 { + self.push_token_if_in_range(RawToken { + line: line_num, + start_char, + length, + token_type: token_type_u32, + token_modifiers, + }); + } + } + } + } + + fn push_token_if_in_range(&mut self, token: RawToken) { + let Some(range) = &self.range else { + self.tokens.push(token); + return; + }; + + let token_end = token.start_char.saturating_add(token.length); + if token.line < range.start.line || token.line > range.end.line { + return; + } + if token.line == range.start.line && token_end <= range.start.character { + return; + } + if token.line == range.end.line && token.start_char >= range.end.character { + return; + } + + self.tokens.push(token); + } + + fn build(mut self) -> SemanticTokens { + // Sort tokens by position. + self.tokens + .sort_unstable_by(|a, b| (a.line, a.start_char).cmp(&(b.line, b.start_char))); + + // Convert to delta-encoded SemanticToken format. + let mut data = Vec::with_capacity(self.tokens.len()); + let mut prev_line = 0u32; + let mut prev_char = 0u32; + + for token in &self.tokens { + let delta_line = token.line - prev_line; + let delta_start = if delta_line == 0 { + token.start_char - prev_char + } else { + token.start_char + }; + + data.push(SemanticToken { + delta_line, + delta_start, + length: token.length, + token_type: token.token_type, + token_modifiers_bitset: token.token_modifiers, + }); + + prev_line = token.line; + prev_char = token.start_char; + } + + SemanticTokens { + result_id: None, + data, + } + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use lsp_types::{Range, SemanticTokens}; + + use super::{semantic_tokens, semantic_tokens_range}; + use crate::semantic_tokens::{SemanticTokenModifierName, SemanticTokenTypeName}; + + #[derive(Debug, Clone, PartialEq, Eq)] + struct AbsoluteToken { + line: u32, + start_char: u32, + length: u32, + token_type: u32, + token_modifiers: u32, + } + + fn token( + line: u32, + start_char: u32, + length: u32, + token_type: SemanticTokenTypeName, + token_modifiers: u32, + ) -> AbsoluteToken { + AbsoluteToken { + line, + start_char, + length, + token_type: token_type as u32, + token_modifiers, + } + } + + fn decode_absolute(tokens: &SemanticTokens) -> Vec { + let mut line = 0_u32; + let mut start_char = 0_u32; + let mut out = Vec::with_capacity(tokens.data.len()); + + for token in &tokens.data { + line = line.saturating_add(token.delta_line); + start_char = if token.delta_line == 0 { + start_char.saturating_add(token.delta_start) + } else { + token.delta_start + }; + out.push(AbsoluteToken { + line, + start_char, + length: token.length, + token_type: token.token_type, + token_modifiers: token.token_modifiers_bitset, + }); + } + + out + } + + #[test] + fn test_semantic_tokens_keywords() { + let code = "local x = if true then 1 else 2; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert_eq!( + decode_absolute(&tokens), + vec![ + token(0, 0, 5, SemanticTokenTypeName::Keyword, 0), + token( + 0, + 6, + 1, + SemanticTokenTypeName::Variable, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token(0, 8, 1, SemanticTokenTypeName::Operator, 0), + token(0, 10, 2, SemanticTokenTypeName::Keyword, 0), + token(0, 13, 4, SemanticTokenTypeName::Keyword, 0), + token(0, 18, 4, SemanticTokenTypeName::Keyword, 0), + token(0, 23, 1, SemanticTokenTypeName::Number, 0), + token(0, 25, 4, SemanticTokenTypeName::Keyword, 0), + token(0, 30, 1, SemanticTokenTypeName::Number, 0), + token(0, 33, 1, SemanticTokenTypeName::Variable, 0), + ] + ); + } + + #[test] + fn test_semantic_tokens_function() { + let code = "local add(a, b) = a + b; add(1, 2)"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert_eq!( + decode_absolute(&tokens), + vec![ + token(0, 0, 5, SemanticTokenTypeName::Keyword, 0), + token( + 0, + 6, + 3, + SemanticTokenTypeName::Function, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token( + 0, + 10, + 1, + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token( + 0, + 13, + 1, + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token(0, 16, 1, SemanticTokenTypeName::Operator, 0), + token(0, 18, 1, SemanticTokenTypeName::Parameter, 0), + token(0, 20, 1, SemanticTokenTypeName::Operator, 0), + token(0, 22, 1, SemanticTokenTypeName::Parameter, 0), + token(0, 25, 3, SemanticTokenTypeName::Function, 0), + token(0, 29, 1, SemanticTokenTypeName::Number, 0), + token(0, 32, 1, SemanticTokenTypeName::Number, 0), + ] + ); + } + + #[test] + fn test_semantic_tokens_object() { + let code = r#"{ name: "test", greet(x): "Hello " + x }"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert_eq!( + decode_absolute(&tokens), + vec![ + token( + 0, + 2, + 4, + SemanticTokenTypeName::Property, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token(0, 8, 6, SemanticTokenTypeName::String, 0), + token( + 0, + 16, + 5, + SemanticTokenTypeName::Method, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token( + 0, + 22, + 1, + SemanticTokenTypeName::Parameter, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset() + ), + token(0, 26, 8, SemanticTokenTypeName::String, 0), + token(0, 35, 1, SemanticTokenTypeName::Operator, 0), + token(0, 37, 1, SemanticTokenTypeName::Variable, 0), + ] + ); + } + + #[test] + fn test_semantic_tokens_stdlib() { + let code = "std.length([1, 2, 3])"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens(&doc); + assert_eq!( + decode_absolute(&tokens), + vec![ + token( + 0, + 0, + 3, + SemanticTokenTypeName::Namespace, + SemanticTokenModifierName::DefaultLibrary.as_bitset() + ), + token( + 0, + 4, + 6, + SemanticTokenTypeName::Function, + SemanticTokenModifierName::DefaultLibrary.as_bitset() + ), + token(0, 12, 1, SemanticTokenTypeName::Number, 0), + token(0, 15, 1, SemanticTokenTypeName::Number, 0), + token(0, 18, 1, SemanticTokenTypeName::Number, 0), + ] + ); + } + + #[test] + fn test_semantic_tokens_range_filters_lines() { + let code = "local x = 1\nlocal y = x + 1"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let tokens = semantic_tokens_range( + &doc, + Range { + start: lsp_types::Position { + line: 1, + character: 0, + }, + end: lsp_types::Position { + line: 1, + character: 100, + }, + }, + ); + assert_eq!( + decode_absolute(&tokens), + vec![ + token(1, 0, 5, SemanticTokenTypeName::Keyword, 0), + token(1, 6, 1, SemanticTokenTypeName::Variable, 0), + token(1, 8, 1, SemanticTokenTypeName::Operator, 0), + token(1, 12, 1, SemanticTokenTypeName::Operator, 0), + token(1, 14, 1, SemanticTokenTypeName::Number, 0), + ] + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs new file mode 100644 index 00000000..465233f5 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs @@ -0,0 +1,191 @@ +use std::fmt::Write as _; + +use lsp_types::{SemanticTokenType, SemanticTokensLegend}; +use strum_macros::{AsRefStr, EnumString, FromRepr}; + +/// Semantic token type with compile-time index. +/// +/// The enum values match the indices in `TOKEN_TYPES`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, FromRepr, AsRefStr)] +#[strum(ascii_case_insensitive, serialize_all = "snake_case")] +#[repr(u32)] +pub enum SemanticTokenTypeName { + Namespace = 0, + Parameter = 7, + Variable = 8, + Property = 9, + Function = 12, + Method = 13, + Keyword = 15, + Comment = 17, + String = 18, + Number = 19, + Operator = 21, +} + +impl SemanticTokenTypeName { + #[must_use] + pub const fn as_index(self) -> u32 { + self as u32 + } +} + +/// Semantic token modifier names with their corresponding LSP bit flags. +#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, FromRepr, AsRefStr)] +#[strum(ascii_case_insensitive, serialize_all = "snake_case")] +#[repr(u32)] +pub enum SemanticTokenModifierName { + Declaration = 0, + Definition = 1, + Readonly = 2, + Static = 3, + Deprecated = 4, + Abstract = 5, + Async = 6, + Modification = 7, + Documentation = 8, + #[strum( + serialize = "default_library", + serialize = "default-library", + serialize = "defaultlibrary" + )] + DefaultLibrary = 9, +} + +impl SemanticTokenModifierName { + #[must_use] + pub const fn as_index(self) -> u32 { + self as u32 + } + + #[must_use] + pub const fn as_bitset(self) -> u32 { + 1 << self.as_index() + } +} + +/// Semantic token types we support. +/// +/// The indices in this array must match the `SemanticTokenTypeName` enum values. +pub const TOKEN_TYPES: &[SemanticTokenType] = &[ + SemanticTokenType::NAMESPACE, // 0: std + SemanticTokenType::TYPE, // 1: (unused) + SemanticTokenType::CLASS, // 2: (unused) + SemanticTokenType::ENUM, // 3: (unused) + SemanticTokenType::INTERFACE, // 4: (unused) + SemanticTokenType::STRUCT, // 5: (unused) + SemanticTokenType::TYPE_PARAMETER, // 6: (unused) + SemanticTokenType::PARAMETER, // 7: function parameters + SemanticTokenType::VARIABLE, // 8: local variables + SemanticTokenType::PROPERTY, // 9: object fields + SemanticTokenType::ENUM_MEMBER, // 10: (unused) + SemanticTokenType::EVENT, // 11: (unused) + SemanticTokenType::FUNCTION, // 12: function definitions + SemanticTokenType::METHOD, // 13: object methods + SemanticTokenType::MACRO, // 14: (unused) + SemanticTokenType::KEYWORD, // 15: keywords + SemanticTokenType::MODIFIER, // 16: (unused) + SemanticTokenType::COMMENT, // 17: comments + SemanticTokenType::STRING, // 18: strings + SemanticTokenType::NUMBER, // 19: numbers + SemanticTokenType::REGEXP, // 20: (unused) + SemanticTokenType::OPERATOR, // 21: operators +]; + +/// Semantic token modifiers (bit flags). +pub const TOKEN_MODIFIERS: &[lsp_types::SemanticTokenModifier] = &[ + lsp_types::SemanticTokenModifier::DECLARATION, + lsp_types::SemanticTokenModifier::DEFINITION, + lsp_types::SemanticTokenModifier::READONLY, + lsp_types::SemanticTokenModifier::STATIC, + lsp_types::SemanticTokenModifier::DEPRECATED, + lsp_types::SemanticTokenModifier::ABSTRACT, + lsp_types::SemanticTokenModifier::ASYNC, + lsp_types::SemanticTokenModifier::MODIFICATION, + lsp_types::SemanticTokenModifier::DOCUMENTATION, + lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY, +]; + +/// Render a Markdown reference block for semantic token types and modifiers. +/// +/// This output is consumed by docs validation tests to keep docs in sync with +/// the actual semantic token legend. +#[must_use] +pub fn semantic_token_reference_markdown() -> String { + let mut markdown = String::new(); + markdown.push_str("#### Semantic Token Legend (Generated)\n\n"); + markdown.push_str("Token types (`index`: `lsp_name`, usage):\n"); + for (index, token_type) in TOKEN_TYPES.iter().enumerate() { + let used = SemanticTokenTypeName::from_repr(index as u32); + let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_ref().to_owned()); + let _ = writeln!(markdown, "- `{index}`: `{}` ({usage})", token_type.as_str()); + } + markdown.push('\n'); + markdown.push_str("Token modifiers (`bit`: `lsp_name`, usage):\n"); + for (index, modifier) in TOKEN_MODIFIERS.iter().enumerate() { + let used = SemanticTokenModifierName::from_repr(index as u32); + let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_ref().to_owned()); + let _ = writeln!( + markdown, + "- `1 << {index}`: `{}` ({usage})", + modifier.as_str() + ); + } + markdown +} + +/// Get the semantic tokens legend. +#[must_use] +pub fn legend() -> SemanticTokensLegend { + SemanticTokensLegend { + token_types: TOKEN_TYPES.to_vec(), + token_modifiers: TOKEN_MODIFIERS.to_vec(), + } +} + +#[cfg(test)] +mod tests { + use super::{legend, semantic_token_reference_markdown, TOKEN_MODIFIERS, TOKEN_TYPES}; + + #[test] + fn test_legend() { + let leg = legend(); + assert_eq!(leg.token_types, TOKEN_TYPES.to_vec()); + assert_eq!(leg.token_modifiers, TOKEN_MODIFIERS.to_vec()); + } + + #[test] + fn test_semantic_token_legend_docs_are_in_sync() { + let docs_path = + std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("../../docs/lsp/HANDLERS.md"); + let docs = std::fs::read_to_string(&docs_path).expect("read HANDLERS.md"); + let start_marker = ""; + let end_marker = ""; + let start_index = docs + .find(start_marker) + .expect("semantic token legend start marker should exist"); + let end_marker_index = docs + .find(end_marker) + .expect("semantic token legend end marker should exist"); + let end_index = end_marker_index + end_marker.len(); + let actual = docs[start_index..end_index].trim_end(); + let expected = format!( + "{start_marker}\n{}\n{end_marker}", + semantic_token_reference_markdown().trim_end() + ); + let normalized_actual = actual + .lines() + .map(str::trim_end) + .filter(|line| !line.is_empty()) + .collect::>(); + let normalized_expected = expected + .lines() + .map(str::trim_end) + .filter(|line| !line.is_empty()) + .collect::>(); + assert_eq!( + normalized_actual, normalized_expected, + "semantic token docs drifted; update docs/lsp/HANDLERS.md legend block" + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs new file mode 100644 index 00000000..d0ab748d --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs @@ -0,0 +1,14 @@ +//! Semantic tokens handler. +//! +//! Provides semantic highlighting for Jsonnet code. + +mod classification; +mod encode; +mod legend; +mod walk; + +pub use encode::{semantic_tokens, semantic_tokens_range}; +pub use legend::{ + legend, semantic_token_reference_markdown, SemanticTokenModifierName, SemanticTokenTypeName, + TOKEN_MODIFIERS, TOKEN_TYPES, +}; diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs new file mode 100644 index 00000000..db1a8f6c --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs @@ -0,0 +1,117 @@ +use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; + +use super::{ + classification::{classify_definition_site, classify_variable_reference, is_stdlib_access}, + encode::SemanticTokenBuilder, + legend::{SemanticTokenModifierName, SemanticTokenTypeName}, +}; + +pub(super) fn visit_token(builder: &mut SemanticTokenBuilder<'_>, token: &SyntaxToken) { + let kind = token.kind(); + + if kind.is_semantic_keyword_token() { + builder.add_token(token, SemanticTokenTypeName::Keyword, 0); + return; + } + if kind.is_semantic_comment_token() { + builder.add_token(token, SemanticTokenTypeName::Comment, 0); + return; + } + if kind.is_semantic_string_token() { + builder.add_token(token, SemanticTokenTypeName::String, 0); + return; + } + if kind.is_semantic_number_token() { + builder.add_token(token, SemanticTokenTypeName::Number, 0); + return; + } + if kind == SyntaxKind::IDENT { + // Identifiers need AST context for precise token type. + visit_identifier(builder, token); + return; + } + if kind.is_semantic_operator_token() { + builder.add_token(token, SemanticTokenTypeName::Operator, 0); + } +} + +fn visit_identifier(builder: &mut SemanticTokenBuilder<'_>, token: &SyntaxToken) { + let Some(parent) = token.parent() else { + return; + }; + + // Check if this is "std". + if token.text() == "std" && ident_resolves_to_builtin_std(token) { + builder.add_token( + token, + SemanticTokenTypeName::Namespace, + SemanticTokenModifierName::DefaultLibrary.as_bitset(), + ); + return; + } + + // Check context based on parent/grandparent. + if parent.kind() == SyntaxKind::NAME { + if let Some(grandparent) = parent.parent() { + match grandparent.kind() { + // Variable reference. + SyntaxKind::EXPR_VAR => { + let token_type = classify_variable_reference(token); + builder.add_token(token, token_type, 0); + } + // Definition sites. + SyntaxKind::DESTRUCT_FULL => { + let (token_type, modifiers) = classify_definition_site(&grandparent); + builder.add_token(token, token_type, modifiers); + } + SyntaxKind::BIND_FUNCTION => { + builder.add_token( + token, + SemanticTokenTypeName::Function, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ); + } + // Field access (std.xyz or obj.field). + SyntaxKind::EXPR_FIELD => { + if is_stdlib_access(&grandparent) { + builder.add_token( + token, + SemanticTokenTypeName::Function, + SemanticTokenModifierName::DefaultLibrary.as_bitset(), + ); + } else { + builder.add_token(token, SemanticTokenTypeName::Property, 0); + } + } + _ => {} + } + } + } + + // Check for field name in object (ID node). + if parent.kind() == SyntaxKind::NAME { + if let Some(grandparent) = parent.parent() { + if grandparent.kind() == SyntaxKind::FIELD_NAME_FIXED { + if let Some(great_grandparent) = grandparent.parent() { + if great_grandparent.kind() == SyntaxKind::MEMBER_FIELD_METHOD { + builder.add_token( + token, + SemanticTokenTypeName::Method, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ); + } else if great_grandparent.kind() == SyntaxKind::MEMBER_FIELD_NORMAL { + builder.add_token( + token, + SemanticTokenTypeName::Property, + SemanticTokenModifierName::Declaration.as_bitset() + | SemanticTokenModifierName::Definition.as_bitset(), + ); + } + } + } + } + } +} diff --git a/crates/jrsonnet-lsp-import/src/graph.rs b/crates/jrsonnet-lsp-import/src/graph.rs deleted file mode 100644 index b176a86c..00000000 --- a/crates/jrsonnet-lsp-import/src/graph.rs +++ /dev/null @@ -1,1276 +0,0 @@ -//! Import graph for tracking file dependencies. -//! -//! Maintains a bidirectional graph of import relationships between files, -//! enabling efficient cross-file reference lookups. - -use std::collections::{HashMap, HashSet, VecDeque}; - -use jrsonnet_lsp_document::{ - strip_string_quotes, CanonicalPath, Document, FileId, PathResolver, PathStore, -}; -use jrsonnet_rowan_parser::{ - nodes::{Bind, Destruct, ExprImport, ImportKindKind, StmtLocal}, - AstNode, AstToken, SyntaxKind, -}; - -use crate::{ - parse::extract_import_path, - work_queue::{WorkQueue, WorkQueueExt}, -}; - -/// Information about an import in a file. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ImportEntry { - /// Import flavor (`import`, `importstr`, `importbin`). - pub kind: ImportKind, - /// The binding name if this import is bound to a variable. - /// e.g., "lib" in `local lib = import "lib.jsonnet"` - pub binding_name: Option, - /// The raw import path as written in the source. - pub import_path: String, - /// The resolved canonical path of the imported file. - pub resolved_path: Option, -} - -/// Jsonnet import flavor. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum ImportKind { - Code, - String, - Binary, -} - -/// One import occurrence in source, including its location. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ImportOccurrence { - /// Semantic import entry data. - pub entry: ImportEntry, - /// Range of the import path token in source (e.g. `"foo.libsonnet"`). - pub import_range: rowan::TextRange, -} - -/// Import graph tracking dependencies between files. -/// -/// This structure maintains two maps: -/// - `imports`: file → list of files it imports -/// - `imported_by`: file → list of files that import it (reverse index) -#[derive(Debug)] -pub struct ImportGraph { - /// Interned mapping between canonical paths and stable file ids. - paths: PathStore, - /// Read-only resolver over interned mapping. - resolver: PathResolver, - /// Map of file → import entries in that file. - imports: HashMap>, - /// Reverse index: file → files that import it. - imported_by: HashMap>, -} - -impl ImportGraph { - /// Create a new empty import graph. - #[must_use] - pub fn new(paths: PathStore) -> Self { - let resolver = paths.resolver(); - Self { - paths, - resolver, - imports: HashMap::new(), - imported_by: HashMap::new(), - } - } - - /// Get or create the interned file id for `path`. - #[must_use] - pub fn intern(&self, path: &CanonicalPath) -> FileId { - self.paths.intern(path) - } - - /// Resolve a file path to an interned file identifier. - #[must_use] - pub fn file(&self, path: &CanonicalPath) -> Option { - self.resolver.file(path) - } - - /// Borrow an interned file identifier's canonical path. - #[must_use] - pub fn path(&self, file: FileId) -> Option> { - self.resolver.path(file) - } - - /// Update the import graph for a file. - /// - /// This parses the document to find all imports, resolves their paths, - /// and updates both the forward and reverse maps. - /// Update a file's imports in the graph with pre-parsed entries. - /// - /// This is the preferred method when you want to minimize lock hold time. - /// Parse the imports first using [`parse_document_imports`], then call this - /// method while holding the write lock. - pub fn update_file_with_entries(&mut self, file_id: FileId, entries: Vec) { - // Remove old entries for this file - self.remove_file(file_id); - - // Update imported_by reverse index - for entry in &entries { - if let Some(ref resolved) = entry.resolved_path { - let resolved_id = self.paths.intern(resolved); - self.imported_by - .entry(resolved_id) - .or_default() - .insert(file_id); - } - } - - // Store the import entries - self.imports.insert(file_id, entries); - } - - /// Update a file's imports in the graph. - /// - /// This parses the document and updates the import graph atomically. - /// For better performance when parsing is slow, use [`parse_document_imports`] - /// followed by [`update_file_with_entries`] to parse outside the lock. - pub fn update_file(&mut self, file_id: FileId, doc: &Document, resolve_import: F) - where - F: Fn(&str) -> Option, - { - let entries = parse_document_imports(doc, &resolve_import); - self.update_file_with_entries(file_id, entries); - } - - /// Remove a file from the import graph. - /// - /// This removes the file's import entries and updates the reverse index. - pub fn remove_file(&mut self, file_id: FileId) { - // Remove from imported_by reverse index - if let Some(old_entries) = self.imports.get(&file_id) { - for entry in old_entries { - if let Some(ref resolved) = entry.resolved_path { - if let Some(resolved_id) = self.resolver.file(resolved) { - let should_remove_entry = self - .imported_by - .get_mut(&resolved_id) - .is_some_and(|importers| { - importers.remove(&file_id); - importers.is_empty() - }); - if should_remove_entry { - self.imported_by.remove(&resolved_id); - } - } - } - } - } - - // Remove the import entries - self.imports.remove(&file_id); - } - - fn direct_importers_by_id(&self, file_id: FileId) -> Vec { - self.imported_by - .get(&file_id) - .map(|s| s.iter().copied().collect()) - .unwrap_or_default() - } - - #[must_use] - fn resolved_entry_id(&self, entry: &ImportEntry) -> Option { - entry - .resolved_path - .as_ref() - .and_then(|path| self.resolver.file(path)) - } - - /// Get the files that directly import a given file. - #[must_use] - pub fn direct_importers(&self, file: FileId) -> Vec { - let mut importers = self.direct_importers_by_id(file); - importers.sort_unstable(); - importers - } - - /// Get all files that transitively import a given file. - /// - /// This performs a breadth-first search through the import graph - /// to find all files that depend on the given file, directly or indirectly. - #[must_use] - pub fn transitive_importers(&self, file: FileId) -> Vec { - let mut result = HashSet::new(); - let mut queue = VecDeque::from([file]); - - while let Some(current) = queue.pop_front() { - for importer in self.direct_importers_by_id(current) { - if result.insert(importer) { - queue.push_back(importer); - } - } - } - - let mut importers: Vec<_> = result.into_iter().collect(); - importers.sort_unstable(); - importers - } - - /// Get the import entries for a file. - pub fn imports(&self, file: FileId) -> &[ImportEntry] { - self.imports.get(&file).map_or(&[], Vec::as_slice) - } - - /// Find imports in a file that point to a specific target file. - #[must_use] - pub fn imports_of_target(&self, file_id: FileId, target_id: FileId) -> Vec<&ImportEntry> { - self.imports - .get(&file_id) - .map(|entries| { - entries - .iter() - .filter(|entry| self.resolved_entry_id(entry) == Some(target_id)) - .collect() - }) - .unwrap_or_default() - } - - /// Get the number of files tracked in the graph. - #[must_use] - pub fn file_count(&self) -> usize { - self.imports.len() - } - - /// Get all files tracked in the graph. - pub fn all_files(&self) -> impl Iterator + '_ { - self.imports.keys().copied() - } - - /// Compute a topological ordering of files based on import dependencies. - /// - /// Returns files in an order where each file comes after all files it imports. - /// Files at the same "level" (no dependencies between them) can be processed - /// in parallel. - /// - /// Returns `None` if there's a cycle in the import graph. - #[must_use] - pub fn topological_order(&self) -> Option>> { - let mut in_degree: HashMap = HashMap::new(); - let mut levels: Vec> = Vec::new(); - - // Initialize in-degree for all files to 0 - for &path_id in self.imports.keys() { - in_degree.insert(path_id, 0); - } - - // Calculate in-degree: count how many dependencies each file has - // (how many files it imports that are also in our graph) - for (&path_id, entries) in &self.imports { - let dep_count = entries - .iter() - .filter_map(|entry| self.resolved_entry_id(entry)) - .filter(|dep_id| self.imports.contains_key(dep_id)) - .count(); - in_degree.insert(path_id, dep_count); - } - - // Find all files with no dependencies (in-degree 0) - let mut current_level: Vec = in_degree - .iter() - .filter(|(_, °)| deg == 0) - .map(|(&path_id, _)| path_id) - .collect(); - - let mut processed = HashSet::new(); - - while !current_level.is_empty() { - // Sort for deterministic ordering - current_level.sort(); - - // Mark current level as processed - for path_id in ¤t_level { - processed.insert(*path_id); - } - - levels.push(current_level.clone()); - - // Find next level: files whose dependencies are all now processed - let mut next_level = Vec::new(); - for path_id in ¤t_level { - // For each file that imports this one - if let Some(importers) = self.imported_by.get(path_id) { - for importer in importers { - if processed.contains(importer) { - continue; - } - // Check if all dependencies of importer are processed - let all_deps_processed = self.imports.get(importer).is_none_or(|entries| { - entries.iter().all(|entry| { - self.resolved_entry_id(entry).is_none_or(|dep_id| { - processed.contains(&dep_id) - || !self.imports.contains_key(&dep_id) - }) - }) - }); - - if all_deps_processed && !next_level.contains(importer) { - next_level.push(*importer); - } - } - } - } - - current_level = next_level; - } - - // Check if all files were processed (no cycles) - if processed.len() == self.imports.len() { - Some(levels) - } else { - None // Cycle detected - } - } - - /// Process files in topological order with parallel processing within each level. - /// - /// This computes a topological ordering of files based on import dependencies, - /// then processes each level in parallel. Files in the same level have no - /// dependencies on each other and can safely be processed concurrently. - /// - /// # Arguments - /// * `f` - Function to call for each file path. Must be `Sync` for parallel execution. - /// - /// # Returns - /// * `Some(())` if processing completed successfully - /// * `None` if there's a cycle in the import graph - /// - /// # Example - /// ```ignore - /// graph.process_in_parallel(|file| { - /// analyze_file(file); - /// }); - /// ``` - pub fn process_in_parallel(&self, f: F) -> Option<()> - where - F: Fn(FileId) + Sync, - { - use rayon::prelude::*; - - let levels = self.topological_order()?; - - // Process each level sequentially, but files within each level in parallel - for level in levels { - level.par_iter().copied().for_each(&f); - } - - Some(()) - } - - /// Process files in reverse topological order with parallel processing within each level. - /// - /// Similar to `process_in_parallel`, but processes files in reverse order - - /// files that are imported by others are processed last. This is useful when - /// you need to process dependents before their dependencies. - pub fn process_in_parallel_reverse(&self, f: F) -> Option<()> - where - F: Fn(FileId) + Sync, - { - use rayon::prelude::*; - - let levels = self.topological_order()?; - - // Process levels in reverse order - for level in levels.into_iter().rev() { - level.par_iter().copied().for_each(&f); - } - - Some(()) - } - - /// Process a file and its transitive dependencies using a work queue. - /// - /// This dynamically discovers dependencies during processing and ensures - /// dependencies are processed before dependents. Uses per-level parallelism. - /// - /// # Arguments - /// * `root` - The root file to process - /// * `f` - Function to call for each file path - /// - /// # Example - /// ```ignore - /// graph.process_with_dependencies(file, |_| true, |dep| { - /// analyze_file(dep); - /// }); - /// ``` - pub fn process_with_dependencies(&self, root: FileId, include_dependency: P, f: F) - where - F: Fn(FileId) + Sync, - P: Fn(&ImportEntry) -> bool + Sync, - { - let mut work = WorkQueue::new(); - work.push(root); - - let levels = work.run(|path_id, deps| { - // Get dependencies from import graph - if let Some(entries) = self.imports.get(path_id) { - for entry in entries { - if !include_dependency(entry) { - continue; - } - if let Some(resolved_id) = self.resolved_entry_id(entry) { - deps.push(resolved_id); - } - } - } - }); - - // Process levels in dependency order (leaves first) - levels.process_parallel(|path_id| f(*path_id)); - } - - /// Process a file and its transitive importers using a work queue. - /// - /// This processes files in reverse dependency order - the root file first, - /// then files that import it, and so on. Uses per-level parallelism. - /// - /// Useful for invalidation cascading: when a file changes, process it - /// and all files that depend on it. - pub fn process_importers_with_work_queue(&self, root: FileId, f: F) - where - F: Fn(FileId) + Sync, - { - let mut work = WorkQueue::new(); - work.push(root); - - let mut levels = work.run(|path_id, deps| { - // Get files that import this file - for importer in self.direct_importers_by_id(*path_id) { - deps.push(importer); - } - }); - - // Reverse levels: work queue puts leaves (files with no importers) at level 0, - // but we want root first, then progressively outward to importers - levels.reverse(); - - // Process levels (root first, then importers) - levels.process_parallel(|path_id| f(*path_id)); - } -} - -/// Parse import statements from a document. -/// -/// This extracts all import entries from the document without modifying -/// the import graph. Use this when you want to parse outside a lock, -/// then pass the results to [`ImportGraph::update_file_with_entries`]. -pub fn parse_document_imports(doc: &Document, resolve_import: &F) -> Vec -where - F: Fn(&str) -> Option, -{ - parse_document_import_occurrences(doc, resolve_import) - .into_iter() - .map(|occurrence| occurrence.entry) - .collect() -} - -/// Parse import occurrences from a document with source ranges. -/// -/// This is useful for diagnostics where callers need to point at the exact -/// import token in source when a path cannot be resolved. -pub fn parse_document_import_occurrences( - doc: &Document, - resolve_import: &F, -) -> Vec -where - F: Fn(&str) -> Option, -{ - let mut occurrences = Vec::new(); - let mut seen_expr_import_ranges = std::collections::HashSet::new(); - let mut seen_string_ranges = std::collections::HashSet::new(); - let ast = doc.ast(); - - // First pass: find imports in local statements (these have bindings) - for node in ast.syntax().descendants() { - if node.kind() == SyntaxKind::STMT_LOCAL { - if let Some(stmt_local) = StmtLocal::cast(node.clone()) { - for bind in stmt_local.binds() { - if let Some((occurrence, import_range)) = - parse_bind_import_with_range(&bind, resolve_import) - { - seen_expr_import_ranges.insert(import_range); - seen_string_ranges.insert(occurrence.import_range); - occurrences.push(occurrence); - } - } - } - } - } - - // Second pass: find bare import expressions that weren't part of a local statement - for node in ast.syntax().descendants() { - if node.kind() == SyntaxKind::EXPR_IMPORT { - let range = node.text_range(); - // Skip if we already captured this import in a local statement - if seen_expr_import_ranges.contains(&range) { - continue; - } - if let Some(import) = ExprImport::cast(node) { - if let Some(occurrence) = parse_import_occurrence(&import, None, resolve_import) { - seen_expr_import_ranges.insert(range); - seen_string_ranges.insert(occurrence.import_range); - occurrences.push(occurrence); - } - } - } - } - - // Third pass fallback: recover imports from token stream for syntax-broken files. - occurrences.extend(parse_token_fallback_import_occurrences( - doc, - resolve_import, - &mut seen_string_ranges, - )); - - occurrences -} - -/// Parse a bind to extract import information, returning the import's text range. -fn parse_bind_import_with_range( - bind: &Bind, - resolve_import: &F, -) -> Option<(ImportOccurrence, rowan::TextRange)> -where - F: Fn(&str) -> Option, -{ - let Bind::BindDestruct(bd) = bind else { - return None; - }; - - let destruct = bd.into()?; - let Destruct::DestructFull(full) = destruct else { - return None; - }; - - let bind_name = full.name()?.ident_lit()?.text().to_string(); - - // Check if the expression is an import - let expr = bd.value()?; - for node in expr.syntax().descendants() { - if node.kind() == SyntaxKind::EXPR_IMPORT { - let range = node.text_range(); - if let Some(import) = ExprImport::cast(node) { - if let Some(occurrence) = - parse_import_occurrence(&import, Some(bind_name.clone()), resolve_import) - { - return Some((occurrence, range)); - } - } - } - } - - None -} - -fn parse_import_occurrence( - import: &ExprImport, - binding_name: Option, - resolve_import: &F, -) -> Option -where - F: Fn(&str) -> Option, -{ - let kind = import_kind_from_expr(import)?; - let path = extract_import_path(import)?; - let resolved = resolve_import(&path); - let import_range = import.text()?.syntax().text_range(); - - Some(ImportOccurrence { - entry: ImportEntry { - kind, - binding_name, - import_path: path, - resolved_path: resolved, - }, - import_range, - }) -} - -fn parse_token_fallback_import_occurrences( - doc: &Document, - resolve_import: &F, - seen_string_ranges: &mut std::collections::HashSet, -) -> Vec -where - F: Fn(&str) -> Option, -{ - let tokens: Vec<_> = doc - .ast() - .syntax() - .descendants_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - .collect(); - - let mut occurrences = Vec::new(); - for (idx, token) in tokens.iter().enumerate() { - if !is_import_keyword(token.kind()) { - continue; - } - - let Some(import_text) = next_non_trivia_token(&tokens, idx + 1) else { - continue; - }; - if !is_import_string_token(import_text.kind()) { - continue; - } - - let import_range = import_text.text_range(); - if !seen_string_ranges.insert(import_range) { - continue; - } - - let import_path = strip_string_quotes(import_text.text()); - if import_path.is_empty() { - continue; - } - let Some(kind) = import_kind_from_keyword_token(token.kind()) else { - continue; - }; - - occurrences.push(ImportOccurrence { - entry: ImportEntry { - kind, - binding_name: binding_name_from_import_token(import_text), - resolved_path: resolve_import(&import_path), - import_path, - }, - import_range, - }); - } - occurrences -} - -fn next_non_trivia_token( - tokens: &[jrsonnet_rowan_parser::SyntaxToken], - start_idx: usize, -) -> Option<&jrsonnet_rowan_parser::SyntaxToken> { - tokens.get(start_idx..)?.iter().find(|token| { - !matches!( - token.kind(), - SyntaxKind::WHITESPACE - | SyntaxKind::MULTI_LINE_COMMENT - | SyntaxKind::SINGLE_LINE_HASH_COMMENT - | SyntaxKind::SINGLE_LINE_SLASH_COMMENT - ) - }) -} - -const fn is_import_keyword(kind: SyntaxKind) -> bool { - matches!( - kind, - SyntaxKind::IMPORT_KW | SyntaxKind::IMPORTSTR_KW | SyntaxKind::IMPORTBIN_KW - ) -} - -const fn is_import_string_token(kind: SyntaxKind) -> bool { - matches!( - kind, - SyntaxKind::STRING_DOUBLE - | SyntaxKind::STRING_SINGLE - | SyntaxKind::STRING_DOUBLE_VERBATIM - | SyntaxKind::STRING_SINGLE_VERBATIM - | SyntaxKind::ERROR_STRING_DOUBLE_UNTERMINATED - | SyntaxKind::ERROR_STRING_SINGLE_UNTERMINATED - | SyntaxKind::ERROR_STRING_DOUBLE_VERBATIM_UNTERMINATED - | SyntaxKind::ERROR_STRING_SINGLE_VERBATIM_UNTERMINATED - ) -} - -fn binding_name_from_import_token(token: &jrsonnet_rowan_parser::SyntaxToken) -> Option { - let bind = token.parent()?.ancestors().find_map(Bind::cast)?; - let Bind::BindDestruct(bind_destruct) = bind else { - return None; - }; - let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind_destruct)?; - let Destruct::DestructFull(full) = destruct else { - return None; - }; - Some(full.name()?.ident_lit()?.text().to_string()) -} - -fn import_kind_from_expr(import: &ExprImport) -> Option { - let token_kind = import.import_kind()?.kind(); - Some(match token_kind { - ImportKindKind::ImportKw => ImportKind::Code, - ImportKindKind::ImportstrKw => ImportKind::String, - ImportKindKind::ImportbinKw => ImportKind::Binary, - }) -} - -const fn import_kind_from_keyword_token(kind: SyntaxKind) -> Option { - match kind { - SyntaxKind::IMPORT_KW => Some(ImportKind::Code), - SyntaxKind::IMPORTSTR_KW => Some(ImportKind::String), - SyntaxKind::IMPORTBIN_KW => Some(ImportKind::Binary), - _ => None, - } -} - -#[cfg(test)] -mod tests { - use std::path::PathBuf; - - use jrsonnet_lsp_document::DocVersion; - - use super::*; - - fn test_path(name: &str) -> CanonicalPath { - CanonicalPath::new(PathBuf::from(format!("/test/{name}"))) - } - - /// A simple resolver that just appends the import path to /test/ - fn simple_resolver(import: &str) -> Option { - if import.is_empty() { - None - } else { - Some(test_path(import)) - } - } - - fn graph_paths(graph: &ImportGraph, files: Vec) -> Vec { - files - .into_iter() - .filter_map(|file| { - graph - .path(file) - .map(|path| path.as_canonical_path().clone()) - }) - .collect() - } - - #[test] - fn test_parse_local_import() { - let code = r#"local lib = import "lib.jsonnet"; lib"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let entries = parse_document_imports(&doc, &simple_resolver); - - assert_eq!( - entries, - vec![ImportEntry { - kind: ImportKind::Code, - binding_name: Some("lib".to_string()), - import_path: "lib.jsonnet".to_string(), - resolved_path: Some(test_path("lib.jsonnet")), - }] - ); - } - - #[test] - fn test_parse_import_occurrences_include_string_token_range() { - let code = r#"local lib = import "lib.jsonnet"; lib"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let occurrences = parse_document_import_occurrences(&doc, &simple_resolver); - let start = u32::try_from( - code.find("\"lib.jsonnet\"") - .expect("import string should exist"), - ) - .unwrap(); - let end = start + u32::try_from("\"lib.jsonnet\"".len()).unwrap(); - - assert_eq!( - occurrences, - vec![ImportOccurrence { - entry: ImportEntry { - kind: ImportKind::Code, - binding_name: Some("lib".to_string()), - import_path: "lib.jsonnet".to_string(), - resolved_path: Some(test_path("lib.jsonnet")), - }, - import_range: rowan::TextRange::new(start.into(), end.into()), - }] - ); - } - - #[test] - fn test_parse_import_occurrences_fallback_unterminated_string() { - let code = r#"local lib = import "lib.jsonnet"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let occurrences = parse_document_import_occurrences(&doc, &simple_resolver); - let start = u32::try_from( - code.find("\"lib.jsonnet") - .expect("unterminated import string should exist"), - ) - .unwrap(); - let end = u32::try_from(code.len()).unwrap(); - - assert_eq!( - occurrences, - vec![ImportOccurrence { - entry: ImportEntry { - kind: ImportKind::Code, - binding_name: Some("lib".to_string()), - import_path: "lib.jsonnet".to_string(), - resolved_path: Some(test_path("lib.jsonnet")), - }, - import_range: rowan::TextRange::new(start.into(), end.into()), - }] - ); - } - - #[test] - fn test_parse_local_import_single_quote() { - let code = "local lib = import 'lib.jsonnet'; lib"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let entries = parse_document_imports(&doc, &simple_resolver); - - assert_eq!( - entries, - vec![ImportEntry { - kind: ImportKind::Code, - binding_name: Some("lib".to_string()), - import_path: "lib.jsonnet".to_string(), - resolved_path: Some(test_path("lib.jsonnet")), - }] - ); - } - - #[test] - fn test_parse_multiple_imports() { - let code = r#" -local lib1 = import "lib1.jsonnet"; -local lib2 = import "lib2.jsonnet"; -lib1 + lib2 -"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let entries = parse_document_imports(&doc, &simple_resolver); - - assert_eq!( - entries, - vec![ - ImportEntry { - kind: ImportKind::Code, - binding_name: Some("lib1".to_string()), - import_path: "lib1.jsonnet".to_string(), - resolved_path: Some(test_path("lib1.jsonnet")), - }, - ImportEntry { - kind: ImportKind::Code, - binding_name: Some("lib2".to_string()), - import_path: "lib2.jsonnet".to_string(), - resolved_path: Some(test_path("lib2.jsonnet")), - }, - ] - ); - } - - #[test] - fn test_import_graph_update() { - let mut graph = ImportGraph::new(PathStore::new()); - - let main = test_path("main.jsonnet"); - let lib = test_path("lib.jsonnet"); - let code = r#"local lib = import "lib.jsonnet"; lib"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - graph.update_file(graph.intern(&main), &doc, simple_resolver); - - // Check that main imports lib - let imports = graph.imports(graph.intern(&main)); - assert_eq!( - imports, - vec![ImportEntry { - kind: ImportKind::Code, - binding_name: Some("lib".to_string()), - import_path: "lib.jsonnet".to_string(), - resolved_path: Some(lib.clone()), - }] - ); - - // Check the reverse index - let importers = graph_paths(&graph, graph.direct_importers(graph.intern(&lib))); - assert_eq!(importers, vec![main]); - } - - #[test] - fn test_import_graph_lookups_with_equivalent_paths() { - let mut graph = ImportGraph::new(PathStore::new()); - - let main = test_path("main.jsonnet"); - let code = r#"local lib = import "lib.jsonnet"; lib"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&main), &doc, simple_resolver); - - let main_lookup = test_path("main.jsonnet"); - let lib_lookup = test_path("lib.jsonnet"); - - assert_eq!( - graph_paths(&graph, graph.direct_importers(graph.intern(&lib_lookup)),), - vec![main_lookup.clone()] - ); - assert_eq!( - graph.imports_of_target(graph.intern(&main_lookup), graph.intern(&lib_lookup),), - vec![&ImportEntry { - kind: ImportKind::Code, - binding_name: Some("lib".to_string()), - import_path: "lib.jsonnet".to_string(), - resolved_path: Some(lib_lookup), - }] - ); - } - - #[test] - fn test_import_graph_remove() { - let mut graph = ImportGraph::new(PathStore::new()); - - let main = test_path("main.jsonnet"); - let code = r#"local lib = import "lib.jsonnet"; lib"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - graph.update_file(graph.intern(&main), &doc, simple_resolver); - - // Remove main - graph.remove_file(graph.intern(&main)); - - // Check that main no longer has imports - assert!(graph.imports(graph.intern(&main)).is_empty()); - - // Check the reverse index is updated - let lib = test_path("lib.jsonnet"); - assert!(graph.direct_importers(graph.intern(&lib)).is_empty()); - } - - #[test] - fn test_transitive_importers() { - let mut graph = ImportGraph::new(PathStore::new()); - - // Setup: main.jsonnet -> utils.jsonnet -> lib.jsonnet - let main = test_path("main.jsonnet"); - let utils = test_path("utils.jsonnet"); - let lib = test_path("lib.jsonnet"); - - // main imports utils - let main_code = r#"local utils = import "utils.jsonnet"; utils"#; - let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&main), &main_doc, simple_resolver); - - // utils imports lib - let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; - let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); - - // Check transitive importers of lib - let importers = graph_paths(&graph, graph.transitive_importers(graph.intern(&lib))); - assert_eq!(importers, vec![main, utils]); - } - - #[test] - fn test_imports_of_target() { - let mut graph = ImportGraph::new(PathStore::new()); - - let main = test_path("main.jsonnet"); - let lib = test_path("lib.jsonnet"); - - let code = r#" -local lib = import "lib.jsonnet"; -local other = import "other.jsonnet"; -lib + other -"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&main), &doc, simple_resolver); - - // Get imports of lib.jsonnet from main - let imports = graph.imports_of_target(graph.intern(&main), graph.intern(&lib)); - assert_eq!( - imports, - vec![&ImportEntry { - kind: ImportKind::Code, - binding_name: Some("lib".to_string()), - import_path: "lib.jsonnet".to_string(), - resolved_path: Some(lib), - }] - ); - } - - #[test] - fn test_topological_order_simple() { - let mut graph = ImportGraph::new(PathStore::new()); - - // Setup: main -> utils -> lib (chain dependency) - let main = test_path("main.jsonnet"); - let utils = test_path("utils.jsonnet"); - let lib = test_path("lib.jsonnet"); - - // lib has no imports - let lib_code = "{}"; - let lib_doc = Document::new(lib_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); - - // utils imports lib - let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; - let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); - - // main imports utils - let main_code = r#"local utils = import "utils.jsonnet"; utils"#; - let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&main), &main_doc, simple_resolver); - - let levels = graph - .topological_order() - .map(|levels| { - levels - .into_iter() - .map(|level| graph_paths(&graph, level)) - .collect::>() - }) - .expect("Should not have cycles"); - - // lib should be in first level (no deps) - // utils should be in second level (depends on lib) - // main should be in third level (depends on utils) - assert_eq!(levels, vec![vec![lib], vec![utils], vec![main]]); - } - - #[test] - fn test_topological_order_parallel_files() { - let mut graph = ImportGraph::new(PathStore::new()); - - // Setup: main imports both utils1 and utils2 (independent) - let main = test_path("main.jsonnet"); - let utils1 = test_path("utils1.jsonnet"); - let utils2 = test_path("utils2.jsonnet"); - - // utils1 has no imports - let utils1_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&utils1), &utils1_doc, simple_resolver); - - // utils2 has no imports - let utils2_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&utils2), &utils2_doc, simple_resolver); - - // main imports both - let main_code = r#" -local u1 = import "utils1.jsonnet"; -local u2 = import "utils2.jsonnet"; -u1 + u2 -"#; - let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&main), &main_doc, simple_resolver); - - let levels = graph - .topological_order() - .map(|levels| { - levels - .into_iter() - .map(|level| graph_paths(&graph, level)) - .collect::>() - }) - .expect("Should not have cycles"); - - // utils1 and utils2 should be in first level (independent, can be parallel, sorted) - // main should be in second level - assert_eq!(levels, vec![vec![utils1, utils2], vec![main]]); - } - - #[test] - fn test_process_in_parallel() { - use std::sync::atomic::{AtomicUsize, Ordering}; - - let mut graph = ImportGraph::new(PathStore::new()); - - // Setup: main -> lib (chain) - let main = test_path("main.jsonnet"); - let lib = test_path("lib.jsonnet"); - - // lib has no imports - let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); - - // main imports lib - let main_code = r#"local lib = import "lib.jsonnet"; lib"#; - let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&main), &main_doc, simple_resolver); - - let counter = AtomicUsize::new(0); - graph - .process_in_parallel(|_file| { - counter.fetch_add(1, Ordering::SeqCst); - }) - .expect("should process files in parallel"); - assert_eq!(counter.load(Ordering::SeqCst), 2); - } - - #[test] - fn test_process_in_parallel_order() { - use std::sync::{Arc, Mutex}; - - let mut graph = ImportGraph::new(PathStore::new()); - - // Setup: main -> lib (chain) - let main = test_path("main.jsonnet"); - let lib = test_path("lib.jsonnet"); - - // lib has no imports - let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); - - // main imports lib - let main_code = r#"local lib = import "lib.jsonnet"; lib"#; - let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&main), &main_doc, simple_resolver); - - let processed_order = Arc::new(Mutex::new(Vec::new())); - let order_clone = Arc::clone(&processed_order); - graph.process_in_parallel(move |file| { - order_clone.lock().unwrap().push(file); - }); - - let order = graph_paths(&graph, processed_order.lock().unwrap().clone()); - // lib should be processed before main (lib has no deps, main depends on lib) - assert_eq!(order, vec![lib, main]); - } - - #[test] - fn test_process_with_dependencies() { - use std::sync::{Arc, Mutex}; - - let mut graph = ImportGraph::new(PathStore::new()); - - // Setup: main -> utils -> lib - let main = test_path("main.jsonnet"); - let utils = test_path("utils.jsonnet"); - let lib = test_path("lib.jsonnet"); - - // lib has no imports - let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); - - // utils imports lib - let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; - let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); - - // main imports utils - let main_code = r#"local utils = import "utils.jsonnet"; utils"#; - let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&main), &main_doc, simple_resolver); - - let processed = Arc::new(Mutex::new(Vec::new())); - let processed_clone = Arc::clone(&processed); - - // Process main and its dependencies - graph.process_with_dependencies( - graph.intern(&main), - |_| true, - move |file| { - processed_clone.lock().unwrap().push(file); - }, - ); - - let order = graph_paths(&graph, processed.lock().unwrap().clone()); - - // lib should be processed before utils, utils before main - assert_eq!(order, vec![lib, utils, main]); - } - - #[test] - fn test_process_with_dependencies_filtered_by_kind() { - use std::sync::{Arc, Mutex}; - - let mut graph = ImportGraph::new(PathStore::new()); - - let main = test_path("main.jsonnet"); - let data = test_path("data.jsonnet"); - let script = test_path("script.k"); - - graph.update_file_with_entries( - graph.intern(&main), - vec![ - ImportEntry { - kind: ImportKind::Code, - binding_name: Some("data".to_string()), - import_path: "data.jsonnet".to_string(), - resolved_path: Some(data.clone()), - }, - ImportEntry { - kind: ImportKind::String, - binding_name: Some("payload".to_string()), - import_path: "script.k".to_string(), - resolved_path: Some(script), - }, - ], - ); - - let processed = Arc::new(Mutex::new(Vec::new())); - let processed_clone = Arc::clone(&processed); - - graph.process_with_dependencies( - graph.intern(&main), - |entry| entry.kind == ImportKind::Code, - move |file| { - processed_clone.lock().unwrap().push(file); - }, - ); - - let order = graph_paths(&graph, processed.lock().unwrap().clone()); - assert_eq!(order, vec![data, main]); - } - - #[test] - fn test_process_importers_with_work_queue() { - use std::sync::{Arc, Mutex}; - - let mut graph = ImportGraph::new(PathStore::new()); - - // Setup: main -> utils -> lib - let main = test_path("main.jsonnet"); - let utils = test_path("utils.jsonnet"); - let lib = test_path("lib.jsonnet"); - - // lib has no imports - let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); - - // utils imports lib - let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; - let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); - - // main imports utils - let main_code = r#"local utils = import "utils.jsonnet"; utils"#; - let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); - graph.update_file(graph.intern(&main), &main_doc, simple_resolver); - - let processed = Arc::new(Mutex::new(Vec::new())); - let processed_clone = Arc::clone(&processed); - - // Process lib and its importers (cascade) - graph.process_importers_with_work_queue(graph.intern(&lib), move |file| { - processed_clone.lock().unwrap().push(file); - }); - - let order = graph_paths(&graph, processed.lock().unwrap().clone()); - - // lib first, then utils (imports lib), then main (imports utils) - assert_eq!(order, vec![lib, utils, main]); - } - - #[test] - fn test_process_with_dependencies_unknown_root_is_noop() { - use std::sync::{Arc, Mutex}; - - let graph = ImportGraph::new(PathStore::new()); - let missing = test_path("missing.jsonnet"); - let processed = Arc::new(Mutex::new(Vec::new())); - let processed_clone = Arc::clone(&processed); - - if let Some(root) = graph.file(&missing) { - graph.process_with_dependencies( - root, - |_| true, - move |file| { - processed_clone.lock().unwrap().push(file); - }, - ); - } - - assert_eq!(*processed.lock().unwrap(), Vec::::new()); - } -} diff --git a/crates/jrsonnet-lsp-import/src/graph/mod.rs b/crates/jrsonnet-lsp-import/src/graph/mod.rs new file mode 100644 index 00000000..cd22709a --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph/mod.rs @@ -0,0 +1,8 @@ +mod operations; +mod parse; +mod traversal; + +pub use operations::{ + parse_document_import_occurrences, parse_document_imports, ImportEntry, ImportGraph, + ImportKind, ImportOccurrence, +}; diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs new file mode 100644 index 00000000..c64e21e0 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -0,0 +1,238 @@ +//! Import graph for tracking file dependencies. +//! +//! Maintains a bidirectional graph of import relationships between files, +//! enabling efficient cross-file reference lookups. + +use std::collections::{HashMap, HashSet, VecDeque}; + +use jrsonnet_lsp_document::{CanonicalPath, Document, FileId, PathResolver, PathStore}; + +pub use super::parse::{parse_document_import_occurrences, parse_document_imports}; + +/// Information about an import in a file. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ImportEntry { + /// Import flavor (`import`, `importstr`, `importbin`). + pub kind: ImportKind, + /// The binding name if this import is bound to a variable. + /// e.g., "lib" in `local lib = import "lib.jsonnet"` + pub binding_name: Option, + /// The raw import path as written in the source. + pub import_path: String, + /// The resolved canonical path of the imported file. + pub resolved_path: Option, +} + +/// Jsonnet import flavor. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ImportKind { + Code, + String, + Binary, +} + +/// One import occurrence in source, including its location. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ImportOccurrence { + /// Semantic import entry data. + pub entry: ImportEntry, + /// Range of the import path token in source (e.g. `"foo.libsonnet"`). + pub import_range: rowan::TextRange, +} + +/// Import graph tracking dependencies between files. +/// +/// This structure maintains two maps: +/// - `imports`: file → list of files it imports +/// - `imported_by`: file → list of files that import it (reverse index) +#[derive(Debug)] +pub struct ImportGraph { + /// Interned mapping between canonical paths and stable file ids. + pub(super) paths: PathStore, + /// Read-only resolver over interned mapping. + pub(super) resolver: PathResolver, + /// Map of file → import entries in that file. + pub(super) imports: HashMap>, + /// Reverse index: file → files that import it. + pub(super) imported_by: HashMap>, +} + +impl ImportGraph { + /// Create a new empty import graph. + #[must_use] + pub fn new(paths: PathStore) -> Self { + let resolver = paths.resolver(); + Self { + paths, + resolver, + imports: HashMap::new(), + imported_by: HashMap::new(), + } + } + + /// Get or create the interned file id for `path`. + #[must_use] + pub fn intern(&self, path: &CanonicalPath) -> FileId { + self.paths.intern(path) + } + + /// Resolve a file path to an interned file identifier. + #[must_use] + pub fn file(&self, path: &CanonicalPath) -> Option { + self.resolver.file(path) + } + + /// Borrow an interned file identifier's canonical path. + #[must_use] + pub fn path(&self, file: FileId) -> Option> { + self.resolver.path(file) + } + + /// Update the import graph for a file. + /// + /// This parses the document to find all imports, resolves their paths, + /// and updates both the forward and reverse maps. + /// Update a file's imports in the graph with pre-parsed entries. + /// + /// This is the preferred method when you want to minimize lock hold time. + /// Parse the imports first using [`parse_document_imports`], then call this + /// method while holding the write lock. + pub fn update_file_with_entries(&mut self, file_id: FileId, entries: Vec) { + // Remove old entries for this file + self.remove_file(file_id); + + // Update imported_by reverse index + for entry in &entries { + if let Some(ref resolved) = entry.resolved_path { + let resolved_id = self.paths.intern(resolved); + self.imported_by + .entry(resolved_id) + .or_default() + .insert(file_id); + } + } + + // Store the import entries + self.imports.insert(file_id, entries); + } + + /// Update a file's imports in the graph. + /// + /// This parses the document and updates the import graph atomically. + /// For better performance when parsing is slow, use [`parse_document_imports`] + /// followed by [`update_file_with_entries`] to parse outside the lock. + pub fn update_file(&mut self, file_id: FileId, doc: &Document, resolve_import: F) + where + F: Fn(&str) -> Option, + { + let entries = parse_document_imports(doc, &resolve_import); + self.update_file_with_entries(file_id, entries); + } + + /// Remove a file from the import graph. + /// + /// This removes the file's import entries and updates the reverse index. + pub fn remove_file(&mut self, file_id: FileId) { + // Remove from imported_by reverse index + if let Some(old_entries) = self.imports.get(&file_id) { + for entry in old_entries { + if let Some(ref resolved) = entry.resolved_path { + if let Some(resolved_id) = self.resolver.file(resolved) { + let should_remove_entry = self + .imported_by + .get_mut(&resolved_id) + .is_some_and(|importers| { + importers.remove(&file_id); + importers.is_empty() + }); + if should_remove_entry { + self.imported_by.remove(&resolved_id); + } + } + } + } + } + + // Remove the import entries + self.imports.remove(&file_id); + } + + pub(super) fn direct_importers_by_id(&self, file_id: FileId) -> Vec { + self.imported_by + .get(&file_id) + .map(|s| s.iter().copied().collect()) + .unwrap_or_default() + } + + #[must_use] + pub(super) fn resolved_entry_id(&self, entry: &ImportEntry) -> Option { + entry + .resolved_path + .as_ref() + .and_then(|path| self.resolver.file(path)) + } + + /// Get the files that directly import a given file. + #[must_use] + pub fn direct_importers(&self, file: FileId) -> Vec { + let mut importers = self.direct_importers_by_id(file); + importers.sort_unstable(); + importers + } + + /// Get all files that transitively import a given file. + /// + /// This performs a breadth-first search through the import graph + /// to find all files that depend on the given file, directly or indirectly. + #[must_use] + pub fn transitive_importers(&self, file: FileId) -> Vec { + let mut result = HashSet::new(); + let mut queue = VecDeque::from([file]); + + while let Some(current) = queue.pop_front() { + for importer in self.direct_importers_by_id(current) { + if result.insert(importer) { + queue.push_back(importer); + } + } + } + + let mut importers: Vec<_> = result.into_iter().collect(); + importers.sort_unstable(); + importers + } + + /// Get the import entries for a file. + pub fn imports(&self, file: FileId) -> &[ImportEntry] { + self.imports.get(&file).map_or(&[], Vec::as_slice) + } + + /// Find imports in a file that point to a specific target file. + #[must_use] + pub fn imports_of_target(&self, file_id: FileId, target_id: FileId) -> Vec<&ImportEntry> { + self.imports + .get(&file_id) + .map(|entries| { + entries + .iter() + .filter(|entry| self.resolved_entry_id(entry) == Some(target_id)) + .collect() + }) + .unwrap_or_default() + } + + /// Get the number of files tracked in the graph. + #[must_use] + pub fn file_count(&self) -> usize { + self.imports.len() + } + + /// Get all files tracked in the graph. + pub fn all_files(&self) -> impl Iterator + '_ { + self.imports.keys().copied() + } +} + +#[cfg(test)] +#[path = "tests.rs"] +mod tests; diff --git a/crates/jrsonnet-lsp-import/src/graph/parse.rs b/crates/jrsonnet-lsp-import/src/graph/parse.rs new file mode 100644 index 00000000..6934b676 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph/parse.rs @@ -0,0 +1,265 @@ +use jrsonnet_lsp_document::{strip_string_quotes, CanonicalPath, Document}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, ExprImport, ImportKindKind, StmtLocal}, + AstNode, AstToken, SyntaxKind, +}; + +use super::{ImportEntry, ImportKind, ImportOccurrence}; +use crate::parse::extract_import_path; + +/// Parse import statements from a document. +/// +/// This extracts all import entries from the document without modifying +/// the import graph. Use this when you want to parse outside a lock, +/// then pass the results to [`ImportGraph::update_file_with_entries`]. +pub fn parse_document_imports(doc: &Document, resolve_import: &F) -> Vec +where + F: Fn(&str) -> Option, +{ + parse_document_import_occurrences(doc, resolve_import) + .into_iter() + .map(|occurrence| occurrence.entry) + .collect() +} + +/// Parse import occurrences from a document with source ranges. +/// +/// This is useful for diagnostics where callers need to point at the exact +/// import token in source when a path cannot be resolved. +pub fn parse_document_import_occurrences( + doc: &Document, + resolve_import: &F, +) -> Vec +where + F: Fn(&str) -> Option, +{ + let mut occurrences = Vec::new(); + let mut seen_expr_import_ranges = std::collections::HashSet::new(); + let mut seen_string_ranges = std::collections::HashSet::new(); + let ast = doc.ast(); + + // First pass: find imports in local statements (these have bindings) + for node in ast.syntax().descendants() { + if node.kind() == SyntaxKind::STMT_LOCAL { + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + if let Some((occurrence, import_range)) = + parse_bind_import_with_range(&bind, resolve_import) + { + seen_expr_import_ranges.insert(import_range); + seen_string_ranges.insert(occurrence.import_range); + occurrences.push(occurrence); + } + } + } + } + } + + // Second pass: find bare import expressions that weren't part of a local statement + for node in ast.syntax().descendants() { + if node.kind() == SyntaxKind::EXPR_IMPORT { + let range = node.text_range(); + // Skip if we already captured this import in a local statement + if seen_expr_import_ranges.contains(&range) { + continue; + } + if let Some(import) = ExprImport::cast(node) { + if let Some(occurrence) = parse_import_occurrence(&import, None, resolve_import) { + seen_expr_import_ranges.insert(range); + seen_string_ranges.insert(occurrence.import_range); + occurrences.push(occurrence); + } + } + } + } + + // Third pass fallback: recover imports from token stream for syntax-broken files. + occurrences.extend(parse_token_fallback_import_occurrences( + doc, + resolve_import, + &mut seen_string_ranges, + )); + + occurrences +} + +/// Parse a bind to extract import information, returning the import's text range. +fn parse_bind_import_with_range( + bind: &Bind, + resolve_import: &F, +) -> Option<(ImportOccurrence, rowan::TextRange)> +where + F: Fn(&str) -> Option, +{ + let Bind::BindDestruct(bd) = bind else { + return None; + }; + + let destruct = bd.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + + let bind_name = full.name()?.ident_lit()?.text().to_string(); + + // Check if the expression is an import + let expr = bd.value()?; + for node in expr.syntax().descendants() { + if node.kind() == SyntaxKind::EXPR_IMPORT { + let range = node.text_range(); + if let Some(import) = ExprImport::cast(node) { + if let Some(occurrence) = + parse_import_occurrence(&import, Some(bind_name.clone()), resolve_import) + { + return Some((occurrence, range)); + } + } + } + } + + None +} + +fn parse_import_occurrence( + import: &ExprImport, + binding_name: Option, + resolve_import: &F, +) -> Option +where + F: Fn(&str) -> Option, +{ + let kind = import_kind_from_expr(import)?; + let path = extract_import_path(import)?; + let resolved = resolve_import(&path); + let import_range = import.text()?.syntax().text_range(); + + Some(ImportOccurrence { + entry: ImportEntry { + kind, + binding_name, + import_path: path, + resolved_path: resolved, + }, + import_range, + }) +} + +fn parse_token_fallback_import_occurrences( + doc: &Document, + resolve_import: &F, + seen_string_ranges: &mut std::collections::HashSet, +) -> Vec +where + F: Fn(&str) -> Option, +{ + let tokens: Vec<_> = doc + .ast() + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .collect(); + + let mut occurrences = Vec::new(); + for (idx, token) in tokens.iter().enumerate() { + if !is_import_keyword(token.kind()) { + continue; + } + + let Some(import_text) = next_non_trivia_token(&tokens, idx + 1) else { + continue; + }; + if !is_import_string_token(import_text.kind()) { + continue; + } + + let import_range = import_text.text_range(); + if !seen_string_ranges.insert(import_range) { + continue; + } + + let import_path = strip_string_quotes(import_text.text()); + if import_path.is_empty() { + continue; + } + let Some(kind) = import_kind_from_keyword_token(token.kind()) else { + continue; + }; + + occurrences.push(ImportOccurrence { + entry: ImportEntry { + kind, + binding_name: binding_name_from_import_token(import_text), + resolved_path: resolve_import(&import_path), + import_path, + }, + import_range, + }); + } + occurrences +} + +fn next_non_trivia_token( + tokens: &[jrsonnet_rowan_parser::SyntaxToken], + start_idx: usize, +) -> Option<&jrsonnet_rowan_parser::SyntaxToken> { + tokens.get(start_idx..)?.iter().find(|token| { + !matches!( + token.kind(), + SyntaxKind::WHITESPACE + | SyntaxKind::MULTI_LINE_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::SINGLE_LINE_SLASH_COMMENT + ) + }) +} + +const fn is_import_keyword(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::IMPORT_KW | SyntaxKind::IMPORTSTR_KW | SyntaxKind::IMPORTBIN_KW + ) +} + +const fn is_import_string_token(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::STRING_DOUBLE + | SyntaxKind::STRING_SINGLE + | SyntaxKind::STRING_DOUBLE_VERBATIM + | SyntaxKind::STRING_SINGLE_VERBATIM + | SyntaxKind::ERROR_STRING_DOUBLE_UNTERMINATED + | SyntaxKind::ERROR_STRING_SINGLE_UNTERMINATED + | SyntaxKind::ERROR_STRING_DOUBLE_VERBATIM_UNTERMINATED + | SyntaxKind::ERROR_STRING_SINGLE_VERBATIM_UNTERMINATED + ) +} + +fn binding_name_from_import_token(token: &jrsonnet_rowan_parser::SyntaxToken) -> Option { + let bind = token.parent()?.ancestors().find_map(Bind::cast)?; + let Bind::BindDestruct(bind_destruct) = bind else { + return None; + }; + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.ident_lit()?.text().to_string()) +} + +fn import_kind_from_expr(import: &ExprImport) -> Option { + let token_kind = import.import_kind()?.kind(); + Some(match token_kind { + ImportKindKind::ImportKw => ImportKind::Code, + ImportKindKind::ImportstrKw => ImportKind::String, + ImportKindKind::ImportbinKw => ImportKind::Binary, + }) +} + +const fn import_kind_from_keyword_token(kind: SyntaxKind) -> Option { + match kind { + SyntaxKind::IMPORT_KW => Some(ImportKind::Code), + SyntaxKind::IMPORTSTR_KW => Some(ImportKind::String), + SyntaxKind::IMPORTBIN_KW => Some(ImportKind::Binary), + _ => None, + } +} diff --git a/crates/jrsonnet-lsp-import/src/graph/tests.rs b/crates/jrsonnet-lsp-import/src/graph/tests.rs new file mode 100644 index 00000000..bf4a1971 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph/tests.rs @@ -0,0 +1,561 @@ +use std::path::PathBuf; + +use jrsonnet_lsp_document::DocVersion; + +use super::*; + +fn test_path(name: &str) -> CanonicalPath { + CanonicalPath::new(PathBuf::from(format!("/test/{name}"))) +} + +/// A simple resolver that just appends the import path to /test/ +fn simple_resolver(import: &str) -> Option { + if import.is_empty() { + None + } else { + Some(test_path(import)) + } +} + +fn graph_paths(graph: &ImportGraph, files: Vec) -> Vec { + files + .into_iter() + .filter_map(|file| { + graph + .path(file) + .map(|path| path.as_canonical_path().clone()) + }) + .collect() +} + +#[test] +fn test_parse_local_import() { + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let entries = parse_document_imports(&doc, &simple_resolver); + + assert_eq!( + entries, + vec![ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(test_path("lib.jsonnet")), + }] + ); +} + +#[test] +fn test_parse_import_occurrences_include_string_token_range() { + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let occurrences = parse_document_import_occurrences(&doc, &simple_resolver); + let start = u32::try_from( + code.find("\"lib.jsonnet\"") + .expect("import string should exist"), + ) + .unwrap(); + let end = start + u32::try_from("\"lib.jsonnet\"".len()).unwrap(); + + assert_eq!( + occurrences, + vec![ImportOccurrence { + entry: ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(test_path("lib.jsonnet")), + }, + import_range: rowan::TextRange::new(start.into(), end.into()), + }] + ); +} + +#[test] +fn test_parse_import_occurrences_fallback_unterminated_string() { + let code = r#"local lib = import "lib.jsonnet"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let occurrences = parse_document_import_occurrences(&doc, &simple_resolver); + let start = u32::try_from( + code.find("\"lib.jsonnet") + .expect("unterminated import string should exist"), + ) + .unwrap(); + let end = u32::try_from(code.len()).unwrap(); + + assert_eq!( + occurrences, + vec![ImportOccurrence { + entry: ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(test_path("lib.jsonnet")), + }, + import_range: rowan::TextRange::new(start.into(), end.into()), + }] + ); +} + +#[test] +fn test_parse_local_import_single_quote() { + let code = "local lib = import 'lib.jsonnet'; lib"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let entries = parse_document_imports(&doc, &simple_resolver); + + assert_eq!( + entries, + vec![ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(test_path("lib.jsonnet")), + }] + ); +} + +#[test] +fn test_parse_multiple_imports() { + let code = r#" +local lib1 = import "lib1.jsonnet"; +local lib2 = import "lib2.jsonnet"; +lib1 + lib2 +"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + let entries = parse_document_imports(&doc, &simple_resolver); + + assert_eq!( + entries, + vec![ + ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib1".to_string()), + import_path: "lib1.jsonnet".to_string(), + resolved_path: Some(test_path("lib1.jsonnet")), + }, + ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib2".to_string()), + import_path: "lib2.jsonnet".to_string(), + resolved_path: Some(test_path("lib2.jsonnet")), + }, + ] + ); +} + +#[test] +fn test_import_graph_update() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + graph.update_file(graph.intern(&main), &doc, simple_resolver); + + // Check that main imports lib + let imports = graph.imports(graph.intern(&main)); + assert_eq!( + imports, + vec![ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(lib.clone()), + }] + ); + + // Check the reverse index + let importers = graph_paths(&graph, graph.direct_importers(graph.intern(&lib))); + assert_eq!(importers, vec![main]); +} + +#[test] +fn test_import_graph_lookups_with_equivalent_paths() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &doc, simple_resolver); + + let main_lookup = test_path("main.jsonnet"); + let lib_lookup = test_path("lib.jsonnet"); + + assert_eq!( + graph_paths(&graph, graph.direct_importers(graph.intern(&lib_lookup)),), + vec![main_lookup.clone()] + ); + assert_eq!( + graph.imports_of_target(graph.intern(&main_lookup), graph.intern(&lib_lookup),), + vec![&ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(lib_lookup), + }] + ); +} + +#[test] +fn test_import_graph_remove() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let code = r#"local lib = import "lib.jsonnet"; lib"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + + graph.update_file(graph.intern(&main), &doc, simple_resolver); + + // Remove main + graph.remove_file(graph.intern(&main)); + + // Check that main no longer has imports + assert!(graph.imports(graph.intern(&main)).is_empty()); + + // Check the reverse index is updated + let lib = test_path("lib.jsonnet"); + assert!(graph.direct_importers(graph.intern(&lib)).is_empty()); +} + +#[test] +fn test_transitive_importers() { + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main.jsonnet -> utils.jsonnet -> lib.jsonnet + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); + + // Check transitive importers of lib + let importers = graph_paths(&graph, graph.transitive_importers(graph.intern(&lib))); + assert_eq!(importers, vec![main, utils]); +} + +#[test] +fn test_imports_of_target() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + + let code = r#" +local lib = import "lib.jsonnet"; +local other = import "other.jsonnet"; +lib + other +"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &doc, simple_resolver); + + // Get imports of lib.jsonnet from main + let imports = graph.imports_of_target(graph.intern(&main), graph.intern(&lib)); + assert_eq!( + imports, + vec![&ImportEntry { + kind: ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(lib), + }] + ); +} + +#[test] +fn test_topological_order_simple() { + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main -> utils -> lib (chain dependency) + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_code = "{}"; + let lib_doc = Document::new(lib_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let levels = graph + .topological_order() + .map(|levels| { + levels + .into_iter() + .map(|level| graph_paths(&graph, level)) + .collect::>() + }) + .expect("Should not have cycles"); + + // lib should be in first level (no deps) + // utils should be in second level (depends on lib) + // main should be in third level (depends on utils) + assert_eq!(levels, vec![vec![lib], vec![utils], vec![main]]); +} + +#[test] +fn test_topological_order_parallel_files() { + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main imports both utils1 and utils2 (independent) + let main = test_path("main.jsonnet"); + let utils1 = test_path("utils1.jsonnet"); + let utils2 = test_path("utils2.jsonnet"); + + // utils1 has no imports + let utils1_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils1), &utils1_doc, simple_resolver); + + // utils2 has no imports + let utils2_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils2), &utils2_doc, simple_resolver); + + // main imports both + let main_code = r#" +local u1 = import "utils1.jsonnet"; +local u2 = import "utils2.jsonnet"; +u1 + u2 +"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let levels = graph + .topological_order() + .map(|levels| { + levels + .into_iter() + .map(|level| graph_paths(&graph, level)) + .collect::>() + }) + .expect("Should not have cycles"); + + // utils1 and utils2 should be in first level (independent, can be parallel, sorted) + // main should be in second level + assert_eq!(levels, vec![vec![utils1, utils2], vec![main]]); +} + +#[test] +fn test_process_in_parallel() { + use std::sync::atomic::{AtomicUsize, Ordering}; + + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main -> lib (chain) + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + // main imports lib + let main_code = r#"local lib = import "lib.jsonnet"; lib"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let counter = AtomicUsize::new(0); + graph + .process_in_parallel(|_file| { + counter.fetch_add(1, Ordering::SeqCst); + }) + .expect("should process files in parallel"); + assert_eq!(counter.load(Ordering::SeqCst), 2); +} + +#[test] +fn test_process_in_parallel_order() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main -> lib (chain) + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + // main imports lib + let main_code = r#"local lib = import "lib.jsonnet"; lib"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let processed_order = Arc::new(Mutex::new(Vec::new())); + let order_clone = Arc::clone(&processed_order); + graph.process_in_parallel(move |file| { + order_clone.lock().unwrap().push(file); + }); + + let order = graph_paths(&graph, processed_order.lock().unwrap().clone()); + // lib should be processed before main (lib has no deps, main depends on lib) + assert_eq!(order, vec![lib, main]); +} + +#[test] +fn test_process_with_dependencies() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main -> utils -> lib + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + // Process main and its dependencies + graph.process_with_dependencies( + graph.intern(&main), + |_| true, + move |file| { + processed_clone.lock().unwrap().push(file); + }, + ); + + let order = graph_paths(&graph, processed.lock().unwrap().clone()); + + // lib should be processed before utils, utils before main + assert_eq!(order, vec![lib, utils, main]); +} + +#[test] +fn test_process_with_dependencies_filtered_by_kind() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let data = test_path("data.jsonnet"); + let script = test_path("script.k"); + + graph.update_file_with_entries( + graph.intern(&main), + vec![ + ImportEntry { + kind: ImportKind::Code, + binding_name: Some("data".to_string()), + import_path: "data.jsonnet".to_string(), + resolved_path: Some(data.clone()), + }, + ImportEntry { + kind: ImportKind::String, + binding_name: Some("payload".to_string()), + import_path: "script.k".to_string(), + resolved_path: Some(script), + }, + ], + ); + + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + graph.process_with_dependencies( + graph.intern(&main), + |entry| entry.kind == ImportKind::Code, + move |file| { + processed_clone.lock().unwrap().push(file); + }, + ); + + let order = graph_paths(&graph, processed.lock().unwrap().clone()); + assert_eq!(order, vec![data, main]); +} + +#[test] +fn test_process_importers_with_work_queue() { + use std::sync::{Arc, Mutex}; + + let mut graph = ImportGraph::new(PathStore::new()); + + // Setup: main -> utils -> lib + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + // lib has no imports + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + // utils imports lib + let utils_code = r#"local lib = import "lib.jsonnet"; lib"#; + let utils_doc = Document::new(utils_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); + + // main imports utils + let main_code = r#"local utils = import "utils.jsonnet"; utils"#; + let main_doc = Document::new(main_code.to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + // Process lib and its importers (cascade) + graph.process_importers_with_work_queue(graph.intern(&lib), move |file| { + processed_clone.lock().unwrap().push(file); + }); + + let order = graph_paths(&graph, processed.lock().unwrap().clone()); + + // lib first, then utils (imports lib), then main (imports utils) + assert_eq!(order, vec![lib, utils, main]); +} + +#[test] +fn test_process_with_dependencies_unknown_root_is_noop() { + use std::sync::{Arc, Mutex}; + + let graph = ImportGraph::new(PathStore::new()); + let missing = test_path("missing.jsonnet"); + let processed = Arc::new(Mutex::new(Vec::new())); + let processed_clone = Arc::clone(&processed); + + if let Some(root) = graph.file(&missing) { + graph.process_with_dependencies( + root, + |_| true, + move |file| { + processed_clone.lock().unwrap().push(file); + }, + ); + } + + assert_eq!(*processed.lock().unwrap(), Vec::::new()); +} diff --git a/crates/jrsonnet-lsp-import/src/graph/traversal.rs b/crates/jrsonnet-lsp-import/src/graph/traversal.rs new file mode 100644 index 00000000..1378c790 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/graph/traversal.rs @@ -0,0 +1,219 @@ +use std::collections::{HashMap, HashSet}; + +use jrsonnet_lsp_document::FileId; + +use super::{ImportEntry, ImportGraph}; +use crate::work_queue::{WorkQueue, WorkQueueExt}; + +impl ImportGraph { + /// Compute a topological ordering of files based on import dependencies. + /// + /// Returns files in an order where each file comes after all files it imports. + /// Files at the same "level" (no dependencies between them) can be processed + /// in parallel. + /// + /// Returns `None` if there's a cycle in the import graph. + #[must_use] + pub fn topological_order(&self) -> Option>> { + let mut in_degree: HashMap = HashMap::new(); + let mut levels: Vec> = Vec::new(); + + // Initialize in-degree for all files to 0 + for &path_id in self.imports.keys() { + in_degree.insert(path_id, 0); + } + + // Calculate in-degree: count how many dependencies each file has + // (how many files it imports that are also in our graph) + for (&path_id, entries) in &self.imports { + let dep_count = entries + .iter() + .filter_map(|entry| self.resolved_entry_id(entry)) + .filter(|dep_id| self.imports.contains_key(dep_id)) + .count(); + in_degree.insert(path_id, dep_count); + } + + // Find all files with no dependencies (in-degree 0) + let mut current_level: Vec = in_degree + .iter() + .filter(|(_, °)| deg == 0) + .map(|(&path_id, _)| path_id) + .collect(); + + let mut processed = HashSet::new(); + + while !current_level.is_empty() { + // Sort for deterministic ordering + current_level.sort(); + + // Mark current level as processed + for path_id in ¤t_level { + processed.insert(*path_id); + } + + levels.push(current_level.clone()); + + // Find next level: files whose dependencies are all now processed + let mut next_level = Vec::new(); + for path_id in ¤t_level { + // For each file that imports this one + if let Some(importers) = self.imported_by.get(path_id) { + for importer in importers { + if processed.contains(importer) { + continue; + } + // Check if all dependencies of importer are processed + let all_deps_processed = self.imports.get(importer).is_none_or(|entries| { + entries.iter().all(|entry| { + self.resolved_entry_id(entry).is_none_or(|dep_id| { + processed.contains(&dep_id) + || !self.imports.contains_key(&dep_id) + }) + }) + }); + + if all_deps_processed && !next_level.contains(importer) { + next_level.push(*importer); + } + } + } + } + + current_level = next_level; + } + + // Check if all files were processed (no cycles) + if processed.len() == self.imports.len() { + Some(levels) + } else { + None // Cycle detected + } + } + + /// Process files in topological order with parallel processing within each level. + /// + /// This computes a topological ordering of files based on import dependencies, + /// then processes each level in parallel. Files in the same level have no + /// dependencies on each other and can safely be processed concurrently. + /// + /// # Arguments + /// * `f` - Function to call for each file path. Must be `Sync` for parallel execution. + /// + /// # Returns + /// * `Some(())` if processing completed successfully + /// * `None` if there's a cycle in the import graph + /// + /// # Example + /// ```ignore + /// graph.process_in_parallel(|file| { + /// analyze_file(file); + /// }); + /// ``` + pub fn process_in_parallel(&self, f: F) -> Option<()> + where + F: Fn(FileId) + Sync, + { + use rayon::prelude::*; + + let levels = self.topological_order()?; + + // Process each level sequentially, but files within each level in parallel + for level in levels { + level.par_iter().copied().for_each(&f); + } + + Some(()) + } + + /// Process files in reverse topological order with parallel processing within each level. + /// + /// Similar to `process_in_parallel`, but processes files in reverse order - + /// files that are imported by others are processed last. This is useful when + /// you need to process dependents before their dependencies. + pub fn process_in_parallel_reverse(&self, f: F) -> Option<()> + where + F: Fn(FileId) + Sync, + { + use rayon::prelude::*; + + let levels = self.topological_order()?; + + // Process levels in reverse order + for level in levels.into_iter().rev() { + level.par_iter().copied().for_each(&f); + } + + Some(()) + } + + /// Process a file and its transitive dependencies using a work queue. + /// + /// This dynamically discovers dependencies during processing and ensures + /// dependencies are processed before dependents. Uses per-level parallelism. + /// + /// # Arguments + /// * `root` - The root file to process + /// * `f` - Function to call for each file path + /// + /// # Example + /// ```ignore + /// graph.process_with_dependencies(file, |_| true, |dep| { + /// analyze_file(dep); + /// }); + /// ``` + pub fn process_with_dependencies(&self, root: FileId, include_dependency: P, f: F) + where + F: Fn(FileId) + Sync, + P: Fn(&ImportEntry) -> bool + Sync, + { + let mut work = WorkQueue::new(); + work.push(root); + + let levels = work.run(|path_id, deps| { + // Get dependencies from import graph + if let Some(entries) = self.imports.get(path_id) { + for entry in entries { + if !include_dependency(entry) { + continue; + } + if let Some(resolved_id) = self.resolved_entry_id(entry) { + deps.push(resolved_id); + } + } + } + }); + + // Process levels in dependency order (leaves first) + levels.process_parallel(|path_id| f(*path_id)); + } + + /// Process a file and its transitive importers using a work queue. + /// + /// This processes files in reverse dependency order - the root file first, + /// then files that import it, and so on. Uses per-level parallelism. + /// + /// Useful for invalidation cascading: when a file changes, process it + /// and all files that depend on it. + pub fn process_importers_with_work_queue(&self, root: FileId, f: F) + where + F: Fn(FileId) + Sync, + { + let mut work = WorkQueue::new(); + work.push(root); + + let mut levels = work.run(|path_id, deps| { + // Get files that import this file + for importer in self.direct_importers_by_id(*path_id) { + deps.push(importer); + } + }); + + // Reverse levels: work queue puts leaves (files with no importers) at level 0, + // but we want root first, then progressively outward to importers + levels.reverse(); + + // Process levels (root first, then importers) + levels.process_parallel(|path_id| f(*path_id)); + } +} diff --git a/crates/jrsonnet-lsp-import/src/resolve.rs b/crates/jrsonnet-lsp-import/src/resolve.rs deleted file mode 100644 index c38856f6..00000000 --- a/crates/jrsonnet-lsp-import/src/resolve.rs +++ /dev/null @@ -1,241 +0,0 @@ -//! Shared import path resolution utilities. -//! -//! This module centralizes path resolution rules so all LSP subsystems resolve -//! imports consistently. - -use std::path::{Path, PathBuf}; - -use jrsonnet_lsp_document::{CanonicalPath, Document}; - -use crate::graph::{ - parse_document_import_occurrences, parse_document_imports, ImportEntry, ImportOccurrence, -}; - -/// Import-resolution boundary for one importing file. -/// -/// This captures the importing file path and effective import roots once, -/// then exposes a single API that callers can use for raw path resolution and -/// import parsing with consistent behavior. -#[derive(Debug, Clone, Copy)] -pub struct ImportResolution<'a> { - importer_file: &'a CanonicalPath, - import_roots: &'a [PathBuf], -} - -impl<'a> ImportResolution<'a> { - #[must_use] - pub fn new(importer_file: &'a CanonicalPath, import_roots: &'a [PathBuf]) -> Self { - Self { - importer_file, - import_roots, - } - } - - #[must_use] - pub fn resolve(self, import_path: &str) -> Option { - resolve_import_path(self.importer_file, import_path, self.import_roots) - } - - #[must_use] - pub fn parse_entries(self, doc: &Document) -> Vec { - parse_document_imports(doc, &|import_path| self.resolve(import_path)) - } - - #[must_use] - pub fn parse_occurrences(self, doc: &Document) -> Vec { - parse_document_import_occurrences(doc, &|import_path| self.resolve(import_path)) - } -} - -#[must_use] -fn resolve_import_path( - importer_file: &CanonicalPath, - import_path: &str, - import_roots: &[PathBuf], -) -> Option { - resolve_import_path_from_base(importer_file.as_path(), import_path, import_roots) -} - -#[must_use] -fn resolve_import_path_from_base( - base_file: &Path, - import_path: &str, - import_roots: &[PathBuf], -) -> Option { - let import = Path::new(import_path); - - if import.is_absolute() { - return canonical_if_exists(import); - } - - if let Some(parent) = base_file.parent() { - let relative = parent.join(import); - if let Some(canonical) = canonical_if_exists(&relative) { - return Some(canonical); - } - } - - for root in import_roots { - let candidate = root.join(import); - if let Some(canonical) = canonical_if_exists(&candidate) { - return Some(canonical); - } - } - - None -} - -fn canonical_if_exists(path: &Path) -> Option { - path.canonicalize().ok().map(CanonicalPath::new) -} - -#[cfg(test)] -mod tests { - use std::fs; - - use jrsonnet_lsp_document::DocVersion; - use tempfile::TempDir; - - use super::*; - - #[test] - fn test_resolve_relative_first() { - let tmp = TempDir::new().expect("tempdir should be created"); - let root = tmp.path(); - let importer = root.join("main.jsonnet"); - let local_lib = root.join("lib.jsonnet"); - fs::write(&importer, "import \"lib.jsonnet\"").expect("importer should be written"); - fs::write(&local_lib, "{}").expect("local lib should be written"); - - let importer = CanonicalPath::new( - importer - .canonicalize() - .expect("importer path should canonicalize"), - ); - let import_resolution = ImportResolution::new(&importer, &[]); - let resolved = import_resolution.resolve("lib.jsonnet"); - assert_eq!( - resolved, - Some(CanonicalPath::new( - local_lib - .canonicalize() - .expect("local lib path should canonicalize") - )) - ); - } - - #[test] - fn test_resolve_from_import_roots() { - let tmp = TempDir::new().expect("tempdir should be created"); - let root = tmp.path(); - let importer_dir = root.join("app"); - let jpath_dir = root.join("vendor"); - fs::create_dir_all(&importer_dir).expect("importer directory should be created"); - fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); - - let importer = importer_dir.join("main.jsonnet"); - let shared_lib = jpath_dir.join("shared.libsonnet"); - fs::write(&importer, "import \"shared.libsonnet\"").expect("importer should be written"); - fs::write(&shared_lib, "{}").expect("shared lib should be written"); - - let importer = CanonicalPath::new( - importer - .canonicalize() - .expect("importer path should canonicalize"), - ); - let import_roots = vec![jpath_dir]; - let import_resolution = ImportResolution::new(&importer, &import_roots); - let resolved = import_resolution.resolve("shared.libsonnet"); - assert_eq!( - resolved, - Some(CanonicalPath::new( - shared_lib - .canonicalize() - .expect("shared lib path should canonicalize") - )) - ); - } - - #[test] - fn test_import_resolution_parse_entries() { - let tmp = TempDir::new().expect("tempdir should be created"); - let root = tmp.path(); - let importer = root.join("main.jsonnet"); - let local_lib = root.join("lib.jsonnet"); - fs::write(&importer, "local lib = import \"lib.jsonnet\"; lib") - .expect("importer should be written"); - fs::write(&local_lib, "{}").expect("local lib should be written"); - - let importer = CanonicalPath::new( - importer - .canonicalize() - .expect("importer path should canonicalize"), - ); - let resolved_lib = CanonicalPath::new( - local_lib - .canonicalize() - .expect("local lib path should canonicalize"), - ); - let doc = Document::new( - "local lib = import \"lib.jsonnet\"; lib".to_string(), - DocVersion::new(1), - ); - let import_resolution = ImportResolution::new(&importer, &[]); - - assert_eq!( - import_resolution.parse_entries(&doc), - vec![ImportEntry { - kind: crate::graph::ImportKind::Code, - binding_name: Some("lib".to_string()), - import_path: "lib.jsonnet".to_string(), - resolved_path: Some(resolved_lib), - }] - ); - } - - #[test] - fn test_import_resolution_parse_occurrences() { - let tmp = TempDir::new().expect("tempdir should be created"); - let root = tmp.path(); - let importer = root.join("main.jsonnet"); - let local_lib = root.join("lib.jsonnet"); - let code = "local lib = import \"lib.jsonnet\"; lib"; - fs::write(&importer, code).expect("importer should be written"); - fs::write(&local_lib, "{}").expect("local lib should be written"); - - let importer = CanonicalPath::new( - importer - .canonicalize() - .expect("importer path should canonicalize"), - ); - let resolved_lib = CanonicalPath::new( - local_lib - .canonicalize() - .expect("local lib path should canonicalize"), - ); - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let import_resolution = ImportResolution::new(&importer, &[]); - - let start = u32::try_from( - code.find("\"lib.jsonnet\"") - .expect("import path should exist in source"), - ) - .expect("start offset should fit into u32"); - let end = start - + u32::try_from("\"lib.jsonnet\"".len()) - .expect("import path length should fit into u32"); - - assert_eq!( - import_resolution.parse_occurrences(&doc), - vec![ImportOccurrence { - entry: ImportEntry { - kind: crate::graph::ImportKind::Code, - binding_name: Some("lib".to_string()), - import_path: "lib.jsonnet".to_string(), - resolved_path: Some(resolved_lib), - }, - import_range: rowan::TextRange::new(start.into(), end.into()), - }] - ); - } -} diff --git a/crates/jrsonnet-lsp-import/src/resolve/mod.rs b/crates/jrsonnet-lsp-import/src/resolve/mod.rs new file mode 100644 index 00000000..b7278bf1 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/resolve/mod.rs @@ -0,0 +1,32 @@ +//! Shared import path resolution utilities. +//! +//! This module centralizes path resolution rules so all LSP subsystems resolve +//! imports consistently. + +mod parse_adapter; +mod path_resolution; + +use std::path::PathBuf; + +use jrsonnet_lsp_document::CanonicalPath; + +/// Import-resolution boundary for one importing file. +/// +/// This captures the importing file path and effective import roots once, +/// then exposes a single API that callers can use for raw path resolution and +/// import parsing with consistent behavior. +#[derive(Debug, Clone, Copy)] +pub struct ImportResolution<'a> { + pub(super) importer_file: &'a CanonicalPath, + pub(super) import_roots: &'a [PathBuf], +} + +impl<'a> ImportResolution<'a> { + #[must_use] + pub fn new(importer_file: &'a CanonicalPath, import_roots: &'a [PathBuf]) -> Self { + Self { + importer_file, + import_roots, + } + } +} diff --git a/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs b/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs new file mode 100644 index 00000000..66eb43f3 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs @@ -0,0 +1,116 @@ +use jrsonnet_lsp_document::{CanonicalPath, Document}; + +use super::{path_resolution::resolve_import_path, ImportResolution}; +use crate::graph::{ + parse_document_import_occurrences, parse_document_imports, ImportEntry, ImportOccurrence, +}; + +impl ImportResolution<'_> { + #[must_use] + pub fn resolve(self, import_path: &str) -> Option { + resolve_import_path(self.importer_file, import_path, self.import_roots) + } + + #[must_use] + pub fn parse_entries(self, doc: &Document) -> Vec { + parse_document_imports(doc, &|import_path| self.resolve(import_path)) + } + + #[must_use] + pub fn parse_occurrences(self, doc: &Document) -> Vec { + parse_document_import_occurrences(doc, &|import_path| self.resolve(import_path)) + } +} + +#[cfg(test)] +mod tests { + use std::fs; + + use jrsonnet_lsp_document::DocVersion; + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_import_resolution_parse_entries() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer = root.join("main.jsonnet"); + let local_lib = root.join("lib.jsonnet"); + fs::write(&importer, "local lib = import \"lib.jsonnet\"; lib") + .expect("importer should be written"); + fs::write(&local_lib, "{}").expect("local lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let resolved_lib = CanonicalPath::new( + local_lib + .canonicalize() + .expect("local lib path should canonicalize"), + ); + let doc = Document::new( + "local lib = import \"lib.jsonnet\"; lib".to_string(), + DocVersion::new(1), + ); + let import_resolution = ImportResolution::new(&importer, &[]); + + assert_eq!( + import_resolution.parse_entries(&doc), + vec![ImportEntry { + kind: crate::graph::ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(resolved_lib), + }] + ); + } + + #[test] + fn test_import_resolution_parse_occurrences() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer = root.join("main.jsonnet"); + let local_lib = root.join("lib.jsonnet"); + let code = "local lib = import \"lib.jsonnet\"; lib"; + fs::write(&importer, code).expect("importer should be written"); + fs::write(&local_lib, "{}").expect("local lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let resolved_lib = CanonicalPath::new( + local_lib + .canonicalize() + .expect("local lib path should canonicalize"), + ); + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let import_resolution = ImportResolution::new(&importer, &[]); + + let start = u32::try_from( + code.find("\"lib.jsonnet\"") + .expect("import path should exist in source"), + ) + .expect("start offset should fit into u32"); + let end = start + + u32::try_from("\"lib.jsonnet\"".len()) + .expect("import path length should fit into u32"); + + assert_eq!( + import_resolution.parse_occurrences(&doc), + vec![ImportOccurrence { + entry: ImportEntry { + kind: crate::graph::ImportKind::Code, + binding_name: Some("lib".to_string()), + import_path: "lib.jsonnet".to_string(), + resolved_path: Some(resolved_lib), + }, + import_range: rowan::TextRange::new(start.into(), end.into()), + }] + ); + } +} diff --git a/crates/jrsonnet-lsp-import/src/resolve/path_resolution.rs b/crates/jrsonnet-lsp-import/src/resolve/path_resolution.rs new file mode 100644 index 00000000..fce13929 --- /dev/null +++ b/crates/jrsonnet-lsp-import/src/resolve/path_resolution.rs @@ -0,0 +1,113 @@ +use std::path::Path; + +use jrsonnet_lsp_document::CanonicalPath; + +#[must_use] +pub(super) fn resolve_import_path( + importer_file: &CanonicalPath, + import_path: &str, + import_roots: &[std::path::PathBuf], +) -> Option { + resolve_import_path_from_base(importer_file.as_path(), import_path, import_roots) +} + +#[must_use] +fn resolve_import_path_from_base( + base_file: &Path, + import_path: &str, + import_roots: &[std::path::PathBuf], +) -> Option { + let import = Path::new(import_path); + + if import.is_absolute() { + return canonical_if_exists(import); + } + + if let Some(parent) = base_file.parent() { + let relative = parent.join(import); + if let Some(canonical) = canonical_if_exists(&relative) { + return Some(canonical); + } + } + + for root in import_roots { + let candidate = root.join(import); + if let Some(canonical) = canonical_if_exists(&candidate) { + return Some(canonical); + } + } + + None +} + +fn canonical_if_exists(path: &Path) -> Option { + path.canonicalize().ok().map(CanonicalPath::new) +} + +#[cfg(test)] +mod tests { + use std::fs; + + use tempfile::TempDir; + + use super::*; + use crate::resolve::ImportResolution; + + #[test] + fn test_resolve_relative_first() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer = root.join("main.jsonnet"); + let local_lib = root.join("lib.jsonnet"); + fs::write(&importer, "import \"lib.jsonnet\"").expect("importer should be written"); + fs::write(&local_lib, "{}").expect("local lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let import_resolution = ImportResolution::new(&importer, &[]); + let resolved = import_resolution.resolve("lib.jsonnet"); + assert_eq!( + resolved, + Some(CanonicalPath::new( + local_lib + .canonicalize() + .expect("local lib path should canonicalize") + )) + ); + } + + #[test] + fn test_resolve_from_import_roots() { + let tmp = TempDir::new().expect("tempdir should be created"); + let root = tmp.path(); + let importer_dir = root.join("app"); + let jpath_dir = root.join("vendor"); + fs::create_dir_all(&importer_dir).expect("importer directory should be created"); + fs::create_dir_all(&jpath_dir).expect("jpath directory should be created"); + + let importer = importer_dir.join("main.jsonnet"); + let shared_lib = jpath_dir.join("shared.libsonnet"); + fs::write(&importer, "import \"shared.libsonnet\"").expect("importer should be written"); + fs::write(&shared_lib, "{}").expect("shared lib should be written"); + + let importer = CanonicalPath::new( + importer + .canonicalize() + .expect("importer path should canonicalize"), + ); + let import_roots = vec![jpath_dir]; + let import_resolution = ImportResolution::new(&importer, &import_roots); + let resolved = import_resolution.resolve("shared.libsonnet"); + assert_eq!( + resolved, + Some(CanonicalPath::new( + shared_lib + .canonicalize() + .expect("shared lib path should canonicalize") + )) + ); + } +} diff --git a/crates/jrsonnet-lsp-inference/src/analysis/build.rs b/crates/jrsonnet-lsp-inference/src/analysis/build.rs new file mode 100644 index 00000000..caa6373e --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/analysis/build.rs @@ -0,0 +1,94 @@ +use std::sync::Arc; + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore, MutStore, Ty, TySubst}; +use parking_lot::RwLock; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +use super::TypeAnalysis; +use crate::{ + env::{ImportResolver, TypeEnv}, + expr::infer_expr_ty_and_record, +}; + +impl TypeAnalysis { + /// Analyze a document and return the type analysis results. + #[must_use] + pub fn analyze(document: &Document) -> Self { + Self::analyze_with_global(document, Arc::new(GlobalTyStore::new())) + } + + /// Analyze a document with a shared global store. + pub fn analyze_with_global(document: &Document, global: Arc) -> Self { + let mut env = TypeEnv::new(Arc::clone(&global)); + let ast = document.ast(); + + let mut expr_types = FxHashMap::default(); + + let doc_ty = ast.expr().map_or(Ty::ANY, |expr| { + analyze_and_record(&expr, &mut env, &mut expr_types) + }); + + Self::finalize_analysis(global, env.into_store(), expr_types, doc_ty) + } + + /// Analyze a document with a shared global store and import resolver. + pub fn analyze_with_resolver( + document: &Document, + global: Arc, + import_resolver: Arc, + ) -> Self { + let mut env = TypeEnv::with_import_resolver(Arc::clone(&global), import_resolver); + let ast = document.ast(); + + let mut expr_types = FxHashMap::default(); + + let doc_ty = ast.expr().map_or(Ty::ANY, |expr| { + analyze_and_record(&expr, &mut env, &mut expr_types) + }); + + Self::finalize_analysis(global, env.into_store(), expr_types, doc_ty) + } + + fn finalize_analysis( + global: Arc, + store: MutStore, + mut expr_types: FxHashMap, + document_type: Ty, + ) -> Self { + let local = store.into_local(); + let subst = TySubst::merge(global.as_ref(), &local); + + let map_ty = |ty: Ty| { + let mapped = subst.apply(ty); + if mapped.is_local() { + Ty::ANY + } else { + mapped + } + }; + + for ty in expr_types.values_mut() { + *ty = map_ty(*ty); + } + + let document_type = map_ty(document_type); + let document_type = GlobalTy::new(document_type).unwrap_or(GlobalTy::ANY); + + Self { + store: RwLock::new(MutStore::new(global)), + expr_types, + document_type, + } + } +} + +/// Analyze an expression and record types for it and all sub-expressions. +fn analyze_and_record( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expr_types: &mut FxHashMap, +) -> Ty { + infer_expr_ty_and_record(expr, env, None, expr_types) +} diff --git a/crates/jrsonnet-lsp-inference/src/analysis/mod.rs b/crates/jrsonnet-lsp-inference/src/analysis/mod.rs new file mode 100644 index 00000000..73e81615 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/analysis/mod.rs @@ -0,0 +1,82 @@ +//! Type analysis storage and queries. +//! +//! Provides eager type analysis for Jsonnet documents. Analysis is computed +//! once during construction and results are immutable, making `TypeAnalysis` +//! safe to share across threads and cache in concurrent data structures. + +mod build; +mod queries; + +use std::sync::Arc; + +use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore, MutStore, Ty}; +use parking_lot::RwLock; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +/// Stores inferred types for all expressions, allowing queries by position. +/// +/// Internally uses interned `Ty` references for memory efficiency. +/// Analysis is computed eagerly during construction. +/// +/// This type is `Send + Sync` and can be safely shared across threads +/// and cached in concurrent data structures like moka. +pub struct TypeAnalysis { + /// Type store for interning and looking up types. + /// Uses `RwLock` because some query operations (like union) may intern new types. + store: RwLock, + /// Map from expression text range to interned type. + /// Immutable after construction. + expr_types: FxHashMap, + /// The inferred type of the document's root expression. + document_type: GlobalTy, +} + +impl std::fmt::Debug for TypeAnalysis { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TypeAnalysis") + .field("store", &">") + .field("expr_types_count", &self.expr_types.len()) + .field("document_type", &self.document_type) + .finish() + } +} + +impl Default for TypeAnalysis { + fn default() -> Self { + Self::new() + } +} + +impl TypeAnalysis { + /// Create a new empty type analysis with a default global store. + #[must_use] + pub fn new() -> Self { + Self { + store: RwLock::new(MutStore::new(Arc::new(GlobalTyStore::new()))), + expr_types: FxHashMap::default(), + document_type: GlobalTy::ANY, + } + } + + /// Create a new empty type analysis with a specific global store. + pub fn with_global(global: Arc) -> Self { + Self { + store: RwLock::new(MutStore::new(global)), + expr_types: FxHashMap::default(), + document_type: GlobalTy::ANY, + } + } + + /// Get the inferred type of the document's root expression. + #[inline] + pub fn document_type(&self) -> Ty { + self.document_type.into() + } + + /// Get the inferred type of the document's root expression as a global type. + #[inline] + pub fn document_type_global(&self) -> GlobalTy { + self.document_type + } +} diff --git a/crates/jrsonnet-lsp-inference/src/analysis.rs b/crates/jrsonnet-lsp-inference/src/analysis/queries.rs similarity index 74% rename from crates/jrsonnet-lsp-inference/src/analysis.rs rename to crates/jrsonnet-lsp-inference/src/analysis/queries.rs index 95457408..7ebce7e4 100644 --- a/crates/jrsonnet-lsp-inference/src/analysis.rs +++ b/crates/jrsonnet-lsp-inference/src/analysis/queries.rs @@ -1,162 +1,13 @@ -//! Type analysis storage and queries. -//! -//! Provides eager type analysis for Jsonnet documents. Analysis is computed -//! once during construction and results are immutable, making `TypeAnalysis` -//! safe to share across threads and cache in concurrent data structures. - -use std::sync::Arc; - -use jrsonnet_lsp_document::Document; use jrsonnet_lsp_types::{ - is_subtype_ty, DisplayContext, FunctionData, GlobalTy, GlobalTyStore, MutStore, ObjectData, Ty, - TyData, TySubst, + is_subtype_ty, DisplayContext, FunctionData, MutStore, ObjectData, Ty, TyData, }; use jrsonnet_rowan_parser::SyntaxNode; -use parking_lot::RwLock; use rowan::TextRange; use rustc_hash::FxHashMap; -use crate::{ - env::{ImportResolver, TypeEnv}, - expr::infer_expr_ty_and_record, -}; - -/// Stores inferred types for all expressions, allowing queries by position. -/// -/// Internally uses interned `Ty` references for memory efficiency. -/// Analysis is computed eagerly during construction. -/// -/// This type is `Send + Sync` and can be safely shared across threads -/// and cached in concurrent data structures like moka. -pub struct TypeAnalysis { - /// Type store for interning and looking up types. - /// Uses `RwLock` because some query operations (like union) may intern new types. - store: RwLock, - /// Map from expression text range to interned type. - /// Immutable after construction. - expr_types: FxHashMap, - /// The inferred type of the document's root expression. - document_type: GlobalTy, -} - -impl std::fmt::Debug for TypeAnalysis { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("TypeAnalysis") - .field("store", &">") - .field("expr_types_count", &self.expr_types.len()) - .field("document_type", &self.document_type) - .finish() - } -} - -impl Default for TypeAnalysis { - fn default() -> Self { - Self::new() - } -} +use super::TypeAnalysis; impl TypeAnalysis { - /// Create a new empty type analysis with a default global store. - #[must_use] - pub fn new() -> Self { - Self { - store: RwLock::new(MutStore::new(Arc::new(GlobalTyStore::new()))), - expr_types: FxHashMap::default(), - document_type: GlobalTy::ANY, - } - } - - /// Create a new empty type analysis with a specific global store. - pub fn with_global(global: Arc) -> Self { - Self { - store: RwLock::new(MutStore::new(global)), - expr_types: FxHashMap::default(), - document_type: GlobalTy::ANY, - } - } - - /// Analyze a document and return the type analysis results. - #[must_use] - pub fn analyze(document: &Document) -> Self { - Self::analyze_with_global(document, Arc::new(GlobalTyStore::new())) - } - - /// Analyze a document with a shared global store. - pub fn analyze_with_global(document: &Document, global: Arc) -> Self { - let mut env = TypeEnv::new(Arc::clone(&global)); - let ast = document.ast(); - - let mut expr_types = FxHashMap::default(); - - let doc_ty = ast.expr().map_or(Ty::ANY, |expr| { - analyze_and_record(&expr, &mut env, &mut expr_types) - }); - - Self::finalize_analysis(global, env.into_store(), expr_types, doc_ty) - } - - /// Analyze a document with a shared global store and import resolver. - pub fn analyze_with_resolver( - document: &Document, - global: Arc, - import_resolver: Arc, - ) -> Self { - let mut env = TypeEnv::with_import_resolver(Arc::clone(&global), import_resolver); - let ast = document.ast(); - - let mut expr_types = FxHashMap::default(); - - let doc_ty = ast.expr().map_or(Ty::ANY, |expr| { - analyze_and_record(&expr, &mut env, &mut expr_types) - }); - - Self::finalize_analysis(global, env.into_store(), expr_types, doc_ty) - } - - fn finalize_analysis( - global: Arc, - store: MutStore, - mut expr_types: FxHashMap, - document_type: Ty, - ) -> Self { - let local = store.into_local(); - let subst = TySubst::merge(global.as_ref(), &local); - - let map_ty = |ty: Ty| { - let mapped = subst.apply(ty); - if mapped.is_local() { - Ty::ANY - } else { - mapped - } - }; - - for ty in expr_types.values_mut() { - *ty = map_ty(*ty); - } - - let document_type = map_ty(document_type); - let document_type = GlobalTy::new(document_type).unwrap_or(GlobalTy::ANY); - - Self { - store: RwLock::new(MutStore::new(global)), - expr_types, - document_type, - } - } - - /// Get the inferred type of the document's root expression. - #[inline] - pub fn document_type(&self) -> Ty { - self.document_type.into() - } - - /// Get the inferred type of the document's root expression as a global type. - #[inline] - pub fn document_type_global(&self) -> GlobalTy { - self.document_type - } - /// Get the type of an expression at a specific position. /// /// Finds the smallest expression containing the position and returns its type. @@ -482,22 +333,9 @@ impl TypeAnalysis { } } -// ============================================================================ -// Analysis helpers (used during construction) -// ============================================================================ - -/// Analyze an expression and record types for it and all sub-expressions. -fn analyze_and_record( - expr: &jrsonnet_rowan_parser::nodes::Expr, - env: &mut TypeEnv, - expr_types: &mut FxHashMap, -) -> Ty { - infer_expr_ty_and_record(expr, env, None, expr_types) -} - #[cfg(test)] mod tests { - use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_document::{DocVersion, Document}; use jrsonnet_rowan_parser::AstNode; use super::*; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/diagnostics_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/diagnostics_steps.rs new file mode 100644 index 00000000..77e9344b --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/diagnostics_steps.rs @@ -0,0 +1,72 @@ +use super::*; + +/// Expected diagnostics notification for a URI. +/// +/// Asserts the full diagnostics payload for a file. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: diagnosticsSettled +/// - step: expectDiagnostics +/// file: main.jsonnet +/// diagnostics: [] +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectDiagnosticsStep { + pub uri: String, + pub diagnostics: Vec, +} + +/// Barrier for "no new diagnostics arrive for idle_ms before timeout_ms". +/// +/// Waits until diagnostics traffic becomes idle. +/// +/// Example with defaults: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: diagnosticsSettled +/// - step: expectDiagnostics +/// file: main.jsonnet +/// diagnostics: [] +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional custom timing: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: diagnosticsSettled +/// timeout_ms: 2000 +/// idle_ms: 100 +/// - step: expectDiagnostics +/// file: main.jsonnet +/// diagnostics: [] +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct DiagnosticsSettledStep { + pub timeout_ms: u64, + pub idle_ms: u64, +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs b/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs new file mode 100644 index 00000000..545dceb3 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs @@ -0,0 +1,81 @@ +use lsp_server::{Message, Notification, Response}; +use lsp_types::{ + notification::{DidOpenTextDocument, Notification as _, PublishDiagnostics}, + request::{HoverRequest, Request as _}, + Hover, HoverContents, MarkupContent, MarkupKind, PublishDiagnosticsParams, +}; + +use super::Scenario; + +pub fn assert_yaml_scenario_runs_without_error(yaml: &str) { + let base_dir = tempfile::tempdir().expect("create temp directory for scenario"); + let scenario = crate::parse_scenario_yaml(yaml, base_dir.path()).expect("parse scenario yaml"); + assert_scenario_runs_without_error(&scenario); +} + +pub fn assert_scenario_runs_without_error(scenario: &Scenario) { + let result = crate::run_scenario(scenario, |connection| loop { + let Ok(message) = connection.receiver.recv() else { + break; + }; + match message { + Message::Request(request) => { + let response = match request.method.as_str() { + HoverRequest::METHOD => { + let hover = Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: "`number`".to_string(), + }), + range: None, + }; + match serde_json::to_value(hover) { + Ok(result) => Response::new_ok(request.id, result), + Err(_) => break, + } + } + _ => Response { + id: request.id, + result: None, + error: None, + }, + }; + if connection.sender.send(Message::Response(response)).is_err() { + break; + } + } + Message::Notification(notification) + if notification.method == DidOpenTextDocument::METHOD => + { + let Ok(params) = serde_json::from_value::( + notification.params, + ) else { + break; + }; + let publish = PublishDiagnosticsParams { + uri: params.text_document.uri, + version: Some(params.text_document.version), + diagnostics: vec![], + }; + let Ok(payload) = serde_json::to_value(publish) else { + break; + }; + let publish_notification = + Notification::new(PublishDiagnostics::METHOD.to_string(), payload); + if connection + .sender + .send(Message::Notification(publish_notification)) + .is_err() + { + break; + } + } + Message::Notification(notification) if notification.method == "exit" => break, + Message::Notification(_) | Message::Response(_) => {} + } + }); + assert!( + result.is_ok(), + "scenario should run without error: {result:?}" + ); +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/document_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/document_steps.rs new file mode 100644 index 00000000..7394971f --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/document_steps.rs @@ -0,0 +1,186 @@ +use super::*; + +/// `textDocument/didOpen`. +/// +/// Opens a document in the scenario session. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: | +/// local x = 1; +/// x +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional fields: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// language_id: jsonnet +/// version: 3 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct OpenStep { + pub uri: String, + pub text: String, + pub language_id: String, + pub version: i32, +} + +/// `textDocument/didChange` full-document replacement. +/// +/// Replaces the full contents of an already-open document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: changeFull +/// file: main.jsonnet +/// text: "2" +/// version: 2 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ChangeFullStep { + pub uri: String, + pub text: String, + pub version: i32, +} + +impl ChangeFullStep { + #[must_use] + pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { + TextDocumentContentChangeEvent { + range: None, + range_length: None, + text: self.text.clone(), + } + } +} + +/// `textDocument/didChange` incremental edit. +/// +/// Applies a range edit to an already-open document. +/// +/// Example using marker shorthand (`at` + `len`): +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: | +/// local [[target:1]] = 1; +/// target +/// - step: changeIncremental +/// file: main.jsonnet +/// at: target +/// len: 1 +/// text: "2" +/// version: 2 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ChangeIncrementalStep { + pub uri: String, + pub range: Range, + pub text: String, + pub version: i32, +} + +impl ChangeIncrementalStep { + #[must_use] + pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { + TextDocumentContentChangeEvent { + range: Some(self.range), + range_length: None, + text: self.text.clone(), + } + } +} + +/// `textDocument/didSave`. +/// +/// Emits a save notification for an open document. +/// +/// Example without text payload: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: save +/// file: main.jsonnet +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +/// +/// Optional `text` payload: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: save +/// file: main.jsonnet +/// text: "{ answer: 42 }" +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct SaveStep { + pub uri: String, + pub text: Option, +} + +/// `textDocument/didClose`. +/// +/// Closes a previously-open document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "1" +/// - step: close +/// file: main.jsonnet +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CloseStep { + pub uri: String, +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs new file mode 100644 index 00000000..2c893d92 --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs @@ -0,0 +1,134 @@ +//! Canonical scenario model for multi-file, multi-step LSP timeline tests. +//! +//! YAML scenarios are parsed by [`crate::scenario_script::parse_scenario_yaml`] +//! and compiled into these strongly typed structures. +//! +//! This module is the compiled execution model; author scenarios in YAML. +//! +//! Example: +//! ```rust +//! use jrsonnet_lsp_scenario::{ +//! parse_scenario_yaml, +//! scenario::doctest_assertions::assert_scenario_runs_without_error, +//! }; +//! +//! let base_dir = tempfile::tempdir().expect("tempdir"); +//! +//! let yaml = r#" +//! steps: +//! - step: create +//! files: +//! main.jsonnet: |- +//! { answer: 42 } +//! open: [main.jsonnet] +//! - step: diagnosticsSettled +//! - step: expectDiagnostics +//! file: main.jsonnet +//! diagnostics: [] +//! "#; +//! +//! let actual = parse_scenario_yaml(yaml, base_dir.path()).expect("parse scenario"); +//! assert_scenario_runs_without_error(&actual); +//! ``` + +mod diagnostics_steps; +mod document_steps; +mod request_steps; +mod workspace_steps; + +pub use diagnostics_steps::{DiagnosticsSettledStep, ExpectDiagnosticsStep}; +pub use document_steps::{ChangeFullStep, ChangeIncrementalStep, CloseStep, OpenStep, SaveStep}; +use lsp_types::{ + CodeActionKind, CodeActionOrCommand, CodeLens, CompletionResponse, Diagnostic, + DocumentSymbolResponse, FileChangeType, GotoDefinitionResponse, Hover, InlayHint, Location, + Position, PrepareRenameResponse, Range, SemanticTokensRangeResult, SemanticTokensResult, + SignatureHelp, TextDocumentContentChangeEvent, TextEdit, WorkspaceEdit, + WorkspaceSymbolResponse, +}; +pub use request_steps::{ + ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectDeclarationStep, + ExpectDefinitionStep, ExpectDocumentSymbolStep, ExpectExecuteCommandStep, ExpectFormattingStep, + ExpectHoverStep, ExpectHoverTypeStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, + ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, + ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, ExpectTypeDefinitionStep, + ExpectWorkspaceSymbolStep, RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, + RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, + RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, + RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, + RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, + RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, TypeMatchMode, +}; +use serde::Deserialize; +pub use workspace_steps::{ + ConfigStep, DeleteFileStep, NotifyWatchedFilesStep, ScenarioFileChangeType, + WatchedFileChangeStep, WriteFileStep, +}; + +/// A full timeline scenario. +#[derive(Debug, Clone, PartialEq)] +pub struct Scenario { + pub steps: Vec, +} + +impl Scenario { + #[must_use] + pub fn new(steps: Vec) -> Self { + Self { steps } + } +} + +/// One timeline step. +#[derive(Debug, Clone, PartialEq)] +pub enum ScenarioStep { + Open(OpenStep), + ChangeFull(ChangeFullStep), + ChangeIncremental(ChangeIncrementalStep), + Save(SaveStep), + Close(CloseStep), + Config(ConfigStep), + WriteFile(WriteFileStep), + DeleteFile(DeleteFileStep), + NotifyWatchedFiles(NotifyWatchedFilesStep), + RequestCodeAction(RequestCodeActionStep), + ExpectCodeAction(ExpectCodeActionStep), + RequestReferences(RequestReferencesStep), + ExpectReferences(ExpectReferencesStep), + RequestDefinition(RequestDefinitionStep), + ExpectDefinition(ExpectDefinitionStep), + RequestDeclaration(RequestDeclarationStep), + ExpectDeclaration(ExpectDeclarationStep), + RequestTypeDefinition(RequestTypeDefinitionStep), + ExpectTypeDefinition(ExpectTypeDefinitionStep), + RequestPrepareRename(RequestPrepareRenameStep), + ExpectPrepareRename(ExpectPrepareRenameStep), + RequestRename(RequestRenameStep), + ExpectRename(ExpectRenameStep), + RequestHover(RequestHoverStep), + ExpectHover(ExpectHoverStep), + ExpectHoverType(ExpectHoverTypeStep), + RequestSignatureHelp(RequestSignatureHelpStep), + ExpectSignatureHelp(ExpectSignatureHelpStep), + RequestCompletion(RequestCompletionStep), + ExpectCompletion(ExpectCompletionStep), + RequestFormatting(RequestFormattingStep), + ExpectFormatting(ExpectFormattingStep), + RequestSemanticTokensFull(RequestSemanticTokensFullStep), + ExpectSemanticTokensFull(ExpectSemanticTokensFullStep), + RequestSemanticTokensRange(RequestSemanticTokensRangeStep), + ExpectSemanticTokensRange(ExpectSemanticTokensRangeStep), + RequestInlayHints(RequestInlayHintsStep), + ExpectInlayHints(ExpectInlayHintsStep), + RequestDocumentSymbol(RequestDocumentSymbolStep), + ExpectDocumentSymbol(ExpectDocumentSymbolStep), + RequestWorkspaceSymbol(RequestWorkspaceSymbolStep), + ExpectWorkspaceSymbol(ExpectWorkspaceSymbolStep), + RequestCodeLens(RequestCodeLensStep), + ExpectCodeLens(ExpectCodeLensStep), + RequestExecuteCommand(RequestExecuteCommandStep), + ExpectExecuteCommand(ExpectExecuteCommandStep), + ExpectDiagnostics(ExpectDiagnosticsStep), + DiagnosticsSettled(DiagnosticsSettledStep), +} + +#[doc(hidden)] +pub mod doctest_assertions; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario.rs b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs similarity index 65% rename from crates/jrsonnet-lsp-scenario/src/scenario.rs rename to crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs index bdbca233..fb6c4640 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs @@ -1,442 +1,4 @@ -//! Canonical scenario model for multi-file, multi-step LSP timeline tests. -//! -//! YAML scenarios are parsed by [`crate::scenario_script::parse_scenario_yaml`] -//! and compiled into these strongly typed structures. -//! -//! This module is the compiled execution model; author scenarios in YAML. -//! -//! Example: -//! ```rust -//! use jrsonnet_lsp_scenario::{ -//! parse_scenario_yaml, -//! scenario::doctest_assertions::assert_scenario_runs_without_error, -//! }; -//! -//! let base_dir = tempfile::tempdir().expect("tempdir"); -//! -//! let yaml = r#" -//! steps: -//! - step: create -//! files: -//! main.jsonnet: |- -//! { answer: 42 } -//! open: [main.jsonnet] -//! - step: diagnosticsSettled -//! - step: expectDiagnostics -//! file: main.jsonnet -//! diagnostics: [] -//! "#; -//! -//! let actual = parse_scenario_yaml(yaml, base_dir.path()).expect("parse scenario"); -//! assert_scenario_runs_without_error(&actual); -//! ``` - -use lsp_types::{ - CodeActionKind, CodeActionOrCommand, CodeLens, CompletionResponse, Diagnostic, - DocumentSymbolResponse, FileChangeType, GotoDefinitionResponse, Hover, InlayHint, Location, - Position, PrepareRenameResponse, Range, SemanticTokensRangeResult, SemanticTokensResult, - SignatureHelp, TextDocumentContentChangeEvent, TextEdit, WorkspaceEdit, - WorkspaceSymbolResponse, -}; -use serde::Deserialize; - -/// A full timeline scenario. -#[derive(Debug, Clone, PartialEq)] -pub struct Scenario { - pub steps: Vec, -} - -impl Scenario { - #[must_use] - pub fn new(steps: Vec) -> Self { - Self { steps } - } -} - -/// One timeline step. -#[derive(Debug, Clone, PartialEq)] -pub enum ScenarioStep { - Open(OpenStep), - ChangeFull(ChangeFullStep), - ChangeIncremental(ChangeIncrementalStep), - Save(SaveStep), - Close(CloseStep), - Config(ConfigStep), - WriteFile(WriteFileStep), - DeleteFile(DeleteFileStep), - NotifyWatchedFiles(NotifyWatchedFilesStep), - RequestCodeAction(RequestCodeActionStep), - ExpectCodeAction(ExpectCodeActionStep), - RequestReferences(RequestReferencesStep), - ExpectReferences(ExpectReferencesStep), - RequestDefinition(RequestDefinitionStep), - ExpectDefinition(ExpectDefinitionStep), - RequestDeclaration(RequestDeclarationStep), - ExpectDeclaration(ExpectDeclarationStep), - RequestTypeDefinition(RequestTypeDefinitionStep), - ExpectTypeDefinition(ExpectTypeDefinitionStep), - RequestPrepareRename(RequestPrepareRenameStep), - ExpectPrepareRename(ExpectPrepareRenameStep), - RequestRename(RequestRenameStep), - ExpectRename(ExpectRenameStep), - RequestHover(RequestHoverStep), - ExpectHover(ExpectHoverStep), - ExpectHoverType(ExpectHoverTypeStep), - RequestSignatureHelp(RequestSignatureHelpStep), - ExpectSignatureHelp(ExpectSignatureHelpStep), - RequestCompletion(RequestCompletionStep), - ExpectCompletion(ExpectCompletionStep), - RequestFormatting(RequestFormattingStep), - ExpectFormatting(ExpectFormattingStep), - RequestSemanticTokensFull(RequestSemanticTokensFullStep), - ExpectSemanticTokensFull(ExpectSemanticTokensFullStep), - RequestSemanticTokensRange(RequestSemanticTokensRangeStep), - ExpectSemanticTokensRange(ExpectSemanticTokensRangeStep), - RequestInlayHints(RequestInlayHintsStep), - ExpectInlayHints(ExpectInlayHintsStep), - RequestDocumentSymbol(RequestDocumentSymbolStep), - ExpectDocumentSymbol(ExpectDocumentSymbolStep), - RequestWorkspaceSymbol(RequestWorkspaceSymbolStep), - ExpectWorkspaceSymbol(ExpectWorkspaceSymbolStep), - RequestCodeLens(RequestCodeLensStep), - ExpectCodeLens(ExpectCodeLensStep), - RequestExecuteCommand(RequestExecuteCommandStep), - ExpectExecuteCommand(ExpectExecuteCommandStep), - ExpectDiagnostics(ExpectDiagnosticsStep), - DiagnosticsSettled(DiagnosticsSettledStep), -} - -/// `textDocument/didOpen`. -/// -/// Opens a document in the scenario session. -/// -/// Example: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: | -/// local x = 1; -/// x -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -/// -/// Optional fields: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "1" -/// language_id: jsonnet -/// version: 3 -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct OpenStep { - pub uri: String, - pub text: String, - pub language_id: String, - pub version: i32, -} - -/// `textDocument/didChange` full-document replacement. -/// -/// Replaces the full contents of an already-open document. -/// -/// Example: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "1" -/// - step: changeFull -/// file: main.jsonnet -/// text: "2" -/// version: 2 -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ChangeFullStep { - pub uri: String, - pub text: String, - pub version: i32, -} - -impl ChangeFullStep { - #[must_use] - pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { - TextDocumentContentChangeEvent { - range: None, - range_length: None, - text: self.text.clone(), - } - } -} - -/// `textDocument/didChange` incremental edit. -/// -/// Applies a range edit to an already-open document. -/// -/// Example using marker shorthand (`at` + `len`): -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: | -/// local [[target:1]] = 1; -/// target -/// - step: changeIncremental -/// file: main.jsonnet -/// at: target -/// len: 1 -/// text: "2" -/// version: 2 -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ChangeIncrementalStep { - pub uri: String, - pub range: Range, - pub text: String, - pub version: i32, -} - -impl ChangeIncrementalStep { - #[must_use] - pub fn as_change_event(&self) -> TextDocumentContentChangeEvent { - TextDocumentContentChangeEvent { - range: Some(self.range), - range_length: None, - text: self.text.clone(), - } - } -} - -/// `textDocument/didSave`. -/// -/// Emits a save notification for an open document. -/// -/// Example without text payload: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "1" -/// - step: save -/// file: main.jsonnet -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -/// -/// Optional `text` payload: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "1" -/// - step: save -/// file: main.jsonnet -/// text: "{ answer: 42 }" -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct SaveStep { - pub uri: String, - pub text: Option, -} - -/// `textDocument/didClose`. -/// -/// Closes a previously-open document. -/// -/// Example: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "1" -/// - step: close -/// file: main.jsonnet -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct CloseStep { - pub uri: String, -} - -/// `workspace/didChangeConfiguration`. -/// -/// Pushes configuration updates to the server. -/// -/// Example: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: config -/// settings: -/// jsonnet: -/// diagnostics: -/// maxProblems: 200 -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ConfigStep { - pub settings: serde_json::Value, -} - -/// Writes text to a file on disk. -/// -/// Example: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: writeFile -/// path: libs/util.jsonnet -/// text: "{ x: 1 }" -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct WriteFileStep { - pub path: String, - pub text: String, -} - -/// Deletes a file from disk. -/// -/// Example: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: writeFile -/// path: libs/util.jsonnet -/// text: "{ x: 1 }" -/// - step: deleteFile -/// path: libs/util.jsonnet -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct DeleteFileStep { - pub path: String, -} - -/// File change kind for watched-files notifications. -/// -/// Example values: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: notifyWatchedFiles -/// changes: -/// - path: vendor/new.jsonnet -/// type: created -/// - path: vendor/existing.jsonnet -/// type: changed -/// - path: vendor/old.jsonnet -/// type: deleted -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum ScenarioFileChangeType { - Created, - Changed, - Deleted, -} - -impl ScenarioFileChangeType { - #[must_use] - pub const fn as_lsp(self) -> FileChangeType { - match self { - Self::Created => FileChangeType::CREATED, - Self::Changed => FileChangeType::CHANGED, - Self::Deleted => FileChangeType::DELETED, - } - } -} - -/// One watched-file change event. -/// -/// Example: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: notifyWatchedFiles -/// changes: -/// - path: vendor/lib.jsonnet -/// type: changed -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct WatchedFileChangeStep { - pub uri: String, - pub change_type: ScenarioFileChangeType, -} - -/// `workspace/didChangeWatchedFiles`. -/// -/// Emits a watched-files change notification. -/// -/// Example: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: notifyWatchedFiles -/// changes: -/// - path: vendor/lib.jsonnet -/// type: created -/// - path: vendor/old.jsonnet -/// type: deleted -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct NotifyWatchedFilesStep { - pub changes: Vec, -} +use super::*; /// `textDocument/codeAction` request. /// @@ -1667,160 +1229,3 @@ pub struct ExpectExecuteCommandStep { pub(crate) id: i32, pub result: Option, } - -/// Expected diagnostics notification for a URI. -/// -/// Asserts the full diagnostics payload for a file. -/// -/// Example: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "1" -/// - step: diagnosticsSettled -/// - step: expectDiagnostics -/// file: main.jsonnet -/// diagnostics: [] -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ExpectDiagnosticsStep { - pub uri: String, - pub diagnostics: Vec, -} - -/// Barrier for "no new diagnostics arrive for idle_ms before timeout_ms". -/// -/// Waits until diagnostics traffic becomes idle. -/// -/// Example with defaults: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "1" -/// - step: diagnosticsSettled -/// - step: expectDiagnostics -/// file: main.jsonnet -/// diagnostics: [] -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -/// -/// Optional custom timing: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "1" -/// - step: diagnosticsSettled -/// timeout_ms: 2000 -/// idle_ms: 100 -/// - step: expectDiagnostics -/// file: main.jsonnet -/// diagnostics: [] -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct DiagnosticsSettledStep { - pub timeout_ms: u64, - pub idle_ms: u64, -} - -#[doc(hidden)] -pub mod doctest_assertions { - use lsp_server::{Message, Notification, Response}; - use lsp_types::{ - notification::{DidOpenTextDocument, Notification as _, PublishDiagnostics}, - request::{HoverRequest, Request as _}, - Hover, HoverContents, MarkupContent, MarkupKind, PublishDiagnosticsParams, - }; - - use super::Scenario; - - pub fn assert_yaml_scenario_runs_without_error(yaml: &str) { - let base_dir = tempfile::tempdir().expect("create temp directory for scenario"); - let scenario = - crate::parse_scenario_yaml(yaml, base_dir.path()).expect("parse scenario yaml"); - assert_scenario_runs_without_error(&scenario); - } - - pub fn assert_scenario_runs_without_error(scenario: &Scenario) { - let result = crate::run_scenario(scenario, |connection| loop { - let Ok(message) = connection.receiver.recv() else { - break; - }; - match message { - Message::Request(request) => { - let response = match request.method.as_str() { - HoverRequest::METHOD => { - let hover = Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: "`number`".to_string(), - }), - range: None, - }; - match serde_json::to_value(hover) { - Ok(result) => Response::new_ok(request.id, result), - Err(_) => break, - } - } - _ => Response { - id: request.id, - result: None, - error: None, - }, - }; - if connection.sender.send(Message::Response(response)).is_err() { - break; - } - } - Message::Notification(notification) - if notification.method == DidOpenTextDocument::METHOD => - { - let Ok(params) = serde_json::from_value::( - notification.params, - ) else { - break; - }; - let publish = PublishDiagnosticsParams { - uri: params.text_document.uri, - version: Some(params.text_document.version), - diagnostics: vec![], - }; - let Ok(payload) = serde_json::to_value(publish) else { - break; - }; - let publish_notification = - Notification::new(PublishDiagnostics::METHOD.to_string(), payload); - if connection - .sender - .send(Message::Notification(publish_notification)) - .is_err() - { - break; - } - } - Message::Notification(notification) if notification.method == "exit" => break, - Message::Notification(_) | Message::Response(_) => {} - } - }); - assert!( - result.is_ok(), - "scenario should run without error: {result:?}" - ); - } -} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/workspace_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/workspace_steps.rs new file mode 100644 index 00000000..226c659a --- /dev/null +++ b/crates/jrsonnet-lsp-scenario/src/scenario/workspace_steps.rs @@ -0,0 +1,148 @@ +use super::*; + +/// `workspace/didChangeConfiguration`. +/// +/// Pushes configuration updates to the server. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: config +/// settings: +/// jsonnet: +/// diagnostics: +/// maxProblems: 200 +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ConfigStep { + pub settings: serde_json::Value, +} + +/// Writes text to a file on disk. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: writeFile +/// path: libs/util.jsonnet +/// text: "{ x: 1 }" +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct WriteFileStep { + pub path: String, + pub text: String, +} + +/// Deletes a file from disk. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: writeFile +/// path: libs/util.jsonnet +/// text: "{ x: 1 }" +/// - step: deleteFile +/// path: libs/util.jsonnet +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct DeleteFileStep { + pub path: String, +} + +/// File change kind for watched-files notifications. +/// +/// Example values: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: notifyWatchedFiles +/// changes: +/// - path: vendor/new.jsonnet +/// type: created +/// - path: vendor/existing.jsonnet +/// type: changed +/// - path: vendor/old.jsonnet +/// type: deleted +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum ScenarioFileChangeType { + Created, + Changed, + Deleted, +} + +impl ScenarioFileChangeType { + #[must_use] + pub const fn as_lsp(self) -> FileChangeType { + match self { + Self::Created => FileChangeType::CREATED, + Self::Changed => FileChangeType::CHANGED, + Self::Deleted => FileChangeType::DELETED, + } + } +} + +/// One watched-file change event. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: notifyWatchedFiles +/// changes: +/// - path: vendor/lib.jsonnet +/// type: changed +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct WatchedFileChangeStep { + pub uri: String, + pub change_type: ScenarioFileChangeType, +} + +/// `workspace/didChangeWatchedFiles`. +/// +/// Emits a watched-files change notification. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: notifyWatchedFiles +/// changes: +/// - path: vendor/lib.jsonnet +/// type: created +/// - path: vendor/old.jsonnet +/// type: deleted +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct NotifyWatchedFilesStep { + pub changes: Vec, +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs index 7e479399..027da775 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs @@ -1,5 +1,7 @@ -use std::collections::{BTreeSet, HashMap}; -use std::fmt; +use std::{ + collections::{BTreeSet, HashMap}, + fmt, +}; use serde_json::Value; use thiserror::Error; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs index 51bbb230..bfc374b3 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs @@ -215,11 +215,13 @@ mod tests { }; use super::{super::transport::RpcError, run_scenario}; - use crate::scenario::{ - DiagnosticsSettledStep, ExpectDiagnosticsStep, ExpectHoverStep, OpenStep, RequestHoverStep, - Scenario, ScenarioStep, + use crate::{ + scenario::{ + DiagnosticsSettledStep, ExpectDiagnosticsStep, ExpectHoverStep, OpenStep, + RequestHoverStep, Scenario, ScenarioStep, + }, + scenario_runner::{helpers::JsonDifference, RunnerError}, }; - use crate::scenario_runner::{helpers::JsonDifference, RunnerError}; fn test_diagnostic() -> Diagnostic { Diagnostic { diff --git a/crates/jrsonnet-lsp-scope/src/resolver/definitions.rs b/crates/jrsonnet-lsp-scope/src/resolver/definitions.rs new file mode 100644 index 00000000..102c79ac --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/resolver/definitions.rs @@ -0,0 +1,260 @@ +use jrsonnet_rowan_parser::{ + nodes::{ + Bind, BindFunction, Destruct, ExprFunction, ForSpec, MemberBindStmt, Param, StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use rowan::TextRange; + +/// Find the definition range of a symbol by walking up the scope chain. +/// +/// Starting from a token that references a variable, this walks up the AST +/// looking for the binding that defines the variable. +#[must_use] +pub fn find_definition_range(token: &SyntaxToken, name: &str) -> Option { + let mut current = token.parent()?; + + while let Some(parent) = current.parent() { + if let Some(range) = check_scope_for_definition(&parent, ¤t, name) { + return Some(range); + } + current = parent; + } + + None +} + +/// Check if a scope contains a definition for the given name. +/// +/// `child` is the node we came from (used for visibility checking). +#[must_use] +pub fn check_scope_for_definition( + scope: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + match scope.kind() { + SyntaxKind::EXPR => check_expr_for_definition(scope, child, name), + SyntaxKind::EXPR_FUNCTION => check_function_for_definition(scope, name), + SyntaxKind::BIND_FUNCTION => check_bind_function_for_definition(scope, name), + SyntaxKind::FOR_SPEC => check_for_spec_for_definition(scope, name), + SyntaxKind::OBJ_BODY_MEMBER_LIST => check_object_for_definition(scope, name), + // Array/object comprehensions: the FOR_SPEC bindings are visible to the expression + SyntaxKind::EXPR_ARRAY_COMP | SyntaxKind::OBJ_BODY_COMP => { + check_comprehension_for_definition(scope, name) + } + _ => None, + } +} + +/// Check an Expr for local definitions. +/// +/// Local definitions are only visible after their declaration point, +/// so we only check bindings that appear before the reference. +fn check_expr_for_definition( + expr: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + let mut last_match = None; + + for stmt_node in expr.children() { + if stmt_node.kind() != SyntaxKind::STMT_LOCAL { + continue; + } + + // Only consider bindings that appear before our reference. + if stmt_node.text_range().end() > child.text_range().start() { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let Some(range) = check_bind_for_name(&bind, name) { + // Keep track of the last (nearest) match for shadowing. + last_match = Some(range); + } + } + } + } + + last_match +} + +/// Check a Bind for a name. +#[must_use] +pub fn check_bind_for_name(bind: &Bind, name: &str) -> Option { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() == name { + return Some(bind_name.syntax().text_range()); + } + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() == name { + return Some(bind_name.syntax().text_range()); + } + None + } + } +} + +/// Check function parameters for a definition. +fn check_function_for_definition(func_node: &SyntaxNode, name: &str) -> Option { + let func = ExprFunction::cast(func_node.clone())?; + let params = func.params_desc()?; + + for param in params.params() { + if let Some(range) = check_param_for_name(¶m, name) { + return Some(range); + } + } + None +} + +/// Check `BindFunction` parameters for a definition. +fn check_bind_function_for_definition(func_node: &SyntaxNode, name: &str) -> Option { + let func = BindFunction::cast(func_node.clone())?; + let params = func.params()?; + + for param in params.params() { + if let Some(range) = check_param_for_name(¶m, name) { + return Some(range); + } + } + None +} + +/// Check a parameter for a name. +#[must_use] +pub fn check_param_for_name(param: &Param, name: &str) -> Option { + let destruct = param.destruct()?; + if let Destruct::DestructFull(full) = destruct { + let param_name = full.name()?; + let ident = param_name.ident_lit()?; + if ident.text() == name { + return Some(param_name.syntax().text_range()); + } + } + None +} + +/// Check `ForSpec` for a definition. +fn check_for_spec_for_definition(for_node: &SyntaxNode, name: &str) -> Option { + let for_spec = ForSpec::cast(for_node.clone())?; + let destruct = for_spec.bind()?; + + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() == name { + return Some(bind_name.syntax().text_range()); + } + } + None +} + +/// Check object locals for a definition. +fn check_object_for_definition(obj_body: &SyntaxNode, name: &str) -> Option { + for member_node in obj_body.children() { + if member_node.kind() != SyntaxKind::MEMBER_BIND_STMT { + continue; + } + if let Some(member_bind) = MemberBindStmt::cast(member_node) { + if let Some(obj_local) = member_bind.obj_local() { + if let Some(bind) = obj_local.bind() { + if let Some(range) = check_bind_for_name(&bind, name) { + return Some(range); + } + } + } + } + } + None +} + +/// Check comprehension (array or object) for `FOR_SPEC` definitions. +/// +/// In `[x for x in arr]`, the `FOR_SPEC` binding is visible to the expression. +fn check_comprehension_for_definition(comp_node: &SyntaxNode, name: &str) -> Option { + for child in comp_node.children() { + if child.kind() != SyntaxKind::FOR_SPEC { + continue; + } + if let Some(range) = check_for_spec_for_definition(&child, name) { + return Some(range); + } + } + None +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{token_at_offset, ByteOffset, DocVersion, Document}; + use jrsonnet_rowan_parser::AstNode; + + use super::find_definition_range; + + #[test] + fn test_find_definition_range_local_variable() { + let code = "local x = 1; x + 1"; + // ^def ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the reference 'x' at position 13 + let token = token_at_offset(ast.syntax(), ByteOffset::from(13u32)) + .expect("should find token at position 13"); + assert_eq!(token.text(), "x"); + + let range = find_definition_range(&token, "x").expect("should find definition range"); + + // Definition is at position 6 + assert_eq!(range.start(), 6.into()); + } + + #[test] + fn test_find_definition_range_function_param() { + let code = "local f(x) = x * 2; f(3)"; + // ^param ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the reference 'x' at position 13 + let token = token_at_offset(ast.syntax(), ByteOffset::from(13u32)) + .expect("should find token at position 13"); + assert_eq!(token.text(), "x"); + + let range = + find_definition_range(&token, "x").expect("should find definition range for parameter"); + + // Parameter is at position 8 + assert_eq!(range.start(), 8.into()); + } + + #[test] + fn test_shadowing() { + let code = "local x = 1; local x = 2; x"; + // ^def1 ^def2 ^ref + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Find the final 'x' reference + let token = token_at_offset(ast.syntax(), ByteOffset::from(26u32)) + .expect("should find token at position 26"); + assert_eq!(token.text(), "x"); + + let range = find_definition_range(&token, "x").expect("should find definition range"); + + // Should resolve to the second (closer) definition at position 19 + assert_eq!(range.start(), 19.into()); + } +} diff --git a/crates/jrsonnet-lsp-scope/src/resolver/mod.rs b/crates/jrsonnet-lsp-scope/src/resolver/mod.rs new file mode 100644 index 00000000..fa1b4200 --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/resolver/mod.rs @@ -0,0 +1,20 @@ +//! Scope resolution for Jsonnet AST. +//! +//! This module provides shared functionality for resolving symbol definitions +//! and finding references within Jsonnet code. +//! +//! - `definitions`: linear scope walking definition lookup. +//! - `references`: reference collection and cached `ScopeResolver`. +//! - `scope_index`: indexed scope tree for O(log n) lookups. + +mod definitions; +mod references; +mod scope_index; + +pub use definitions::{ + check_bind_for_name, check_param_for_name, check_scope_for_definition, find_definition_range, +}; +pub use references::{ + find_all_references, find_all_references_for_rename, references_definition, ScopeResolver, +}; +pub use scope_index::ScopeIndex; diff --git a/crates/jrsonnet-lsp-scope/src/resolver/references.rs b/crates/jrsonnet-lsp-scope/src/resolver/references.rs new file mode 100644 index 00000000..1fc965c1 --- /dev/null +++ b/crates/jrsonnet-lsp-scope/src/resolver/references.rs @@ -0,0 +1,253 @@ +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxNode, SyntaxToken}; +use rowan::{TextRange, TextSize}; +use rustc_hash::FxHashMap; + +use super::definitions::{check_scope_for_definition, find_definition_range}; +use crate::bindings::{is_definition_site, is_variable_reference}; + +/// Check if a reference resolves to a specific definition. +/// +/// Walks up the scope chain from the token to find its definition, +/// then checks if it matches the expected definition range. +#[must_use] +pub fn references_definition(token: &SyntaxToken, name: &str, def_range: TextRange) -> bool { + let Some(mut current) = token.parent() else { + return false; + }; + + while let Some(parent) = current.parent() { + if let Some(found_range) = check_scope_for_definition(&parent, ¤t, name) { + return found_range == def_range; + } + current = parent; + } + + false +} + +/// Find all references to a name in the AST. +/// +/// This function walks the entire AST looking for identifiers that: +/// 1. Match the given name +/// 2. Are either the definition or references that resolve to the definition +/// +/// The `definition_range` should be the range of the Name node at the definition site. +pub fn find_all_references( + root: &SyntaxNode, + name: &str, + definition_range: TextRange, +) -> Vec { + let mut references = Vec::new(); + + // Walk all tokens looking for identifiers matching the name. + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() == SyntaxKind::IDENT && token.text() == name { + // Check if this is a reference (ExprVar). + if is_variable_reference(&token) { + // It's a reference - check if it resolves to our definition. + if references_definition(&token, name, definition_range) { + references.push(token.text_range()); + } + } else if is_definition_site(&token) { + // It's a definition - check if it matches our target definition. + if let Some(parent) = token.parent() { + if parent.text_range() == definition_range { + references.push(parent.text_range()); + } + } + } + } + } + + references +} + +/// Find all references including both definition and uses, returning identifier ranges. +/// +/// This is a variant of `find_all_references` that returns the identifier token ranges +/// instead of the Name node ranges. This is useful for rename operations where +/// we want to replace just the identifier text. +pub fn find_all_references_for_rename( + root: &SyntaxNode, + name: &str, + definition_range: TextRange, +) -> Vec { + let mut references = Vec::new(); + + // Walk all tokens looking for identifiers matching the name. + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() == SyntaxKind::IDENT && token.text() == name { + // Check if this is a reference (ExprVar). + if is_variable_reference(&token) { + // It's a reference - check if it resolves to our definition. + if references_definition(&token, name, definition_range) { + // For rename, we want just the identifier range, not the Name node. + references.push(token.text_range()); + } + } else if is_definition_site(&token) { + // It's a definition - check if it matches our target definition. + if let Some(parent) = token.parent() { + if parent.text_range() == definition_range { + // Return the identifier range, not the Name node. + references.push(token.text_range()); + } + } + } + } + } + + references +} + +/// Cached scope resolver for efficient repeated lookups. +/// +/// Precomputes a mapping from each variable reference to its definition. +pub struct ScopeResolver { + /// Maps reference token start position to definition's `TextRange`. + reference_to_def: FxHashMap, +} + +impl ScopeResolver { + /// Build a scope resolver for the given AST root. + /// + /// Walks the AST once to build the scope map. + pub fn new(root: &SyntaxNode) -> Self { + let mut reference_to_def = FxHashMap::default(); + + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT { + continue; + } + + if !is_variable_reference(&token) { + continue; + } + + if let Some(def_range) = find_definition_range(&token, token.text()) { + reference_to_def.insert(token.text_range().start(), def_range); + } + } + + Self { reference_to_def } + } + + /// Get the definition range for a reference token. + /// + /// Returns the `TextRange` of the Name node at the definition site, + /// or None if the token is not a reference or has no definition. + #[must_use] + pub fn get_definition(&self, token: &SyntaxToken) -> Option { + self.reference_to_def + .get(&token.text_range().start()) + .copied() + } + + /// Check if a reference resolves to a specific definition. + #[must_use] + pub fn references_definition(&self, token: &SyntaxToken, def_range: TextRange) -> bool { + self.get_definition(token) == Some(def_range) + } + + /// Find all references to a definition, returning identifier token ranges. + pub fn find_references( + &self, + root: &SyntaxNode, + name: &str, + definition_range: TextRange, + ) -> Vec { + let mut references = Vec::new(); + + for token in root + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT || token.text() != name { + continue; + } + + if is_variable_reference(&token) { + if self.references_definition(&token, definition_range) { + references.push(token.text_range()); + } + continue; + } + + if !is_definition_site(&token) { + continue; + } + + let Some(parent) = token.parent() else { + continue; + }; + + if parent.text_range() == definition_range { + references.push(token.text_range()); + } + } + + references + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_rowan_parser::AstNode; + use rowan::TextRange; + + use super::find_all_references; + + #[test] + fn test_find_all_references() { + let code = "local x = 1; x + x"; + // ^def ^ref ^ref + // 0123456789... + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Get the definition range (Name node at position 6) + let def_range = TextRange::new(6.into(), 7.into()); + + let refs = find_all_references(ast.syntax(), "x", def_range); + // def at 6, refs at 13 and 17 + assert_eq!( + refs, + vec![ + TextRange::new(6.into(), 7.into()), // definition + TextRange::new(13.into(), 14.into()), // first use + TextRange::new(17.into(), 18.into()), // second use + ] + ); + } + + #[test] + fn test_references_respects_scope() { + let code = "local x = 1; local f(x) = x; x"; + // ^def1 ^def2 ^ref2 ^ref1 + // 0123456789... + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let ast = doc.ast(); + + // Get the definition range for outer x (Name node at position 6) + let def_range = TextRange::new(6.into(), 7.into()); + + let refs = find_all_references(ast.syntax(), "x", def_range); + // Should find: the definition (6) and the last reference (29), not the inner x. + assert_eq!( + refs, + vec![ + TextRange::new(6.into(), 7.into()), // outer x definition + TextRange::new(29.into(), 30.into()), // final reference to outer x + ] + ); + } +} diff --git a/crates/jrsonnet-lsp-scope/src/resolver.rs b/crates/jrsonnet-lsp-scope/src/resolver/scope_index.rs similarity index 55% rename from crates/jrsonnet-lsp-scope/src/resolver.rs rename to crates/jrsonnet-lsp-scope/src/resolver/scope_index.rs index ba230ec9..c6acf59a 100644 --- a/crates/jrsonnet-lsp-scope/src/resolver.rs +++ b/crates/jrsonnet-lsp-scope/src/resolver/scope_index.rs @@ -1,404 +1,14 @@ -//! Scope resolution for Jsonnet AST. -//! -//! This module provides shared functionality for resolving symbol definitions -//! and finding references within Jsonnet code. -//! -//! The `ScopeResolver` struct precomputes a scope map for O(1) definition lookups. -//! The `ScopeIndex` struct provides O(log n) lookups using binary search. - use std::sync::RwLock; use jrsonnet_rowan_parser::{ nodes::{ Bind, BindFunction, Destruct, ExprFunction, ForSpec, MemberBindStmt, Param, StmtLocal, }, - AstNode, SyntaxKind, SyntaxNode, SyntaxToken, + AstNode, SyntaxKind, SyntaxNode, }; use rowan::{TextRange, TextSize}; use rustc_hash::FxHashMap; -use crate::bindings::{is_definition_site, is_variable_reference}; - -/// Find the definition range of a symbol by walking up the scope chain. -/// -/// Starting from a token that references a variable, this walks up the AST -/// looking for the binding that defines the variable. -#[must_use] -pub fn find_definition_range(token: &SyntaxToken, name: &str) -> Option { - let mut current = token.parent()?; - - while let Some(parent) = current.parent() { - if let Some(range) = check_scope_for_definition(&parent, ¤t, name) { - return Some(range); - } - current = parent; - } - - None -} - -/// Check if a scope contains a definition for the given name. -/// -/// `child` is the node we came from (used for visibility checking). -#[must_use] -pub fn check_scope_for_definition( - scope: &SyntaxNode, - child: &SyntaxNode, - name: &str, -) -> Option { - match scope.kind() { - SyntaxKind::EXPR => check_expr_for_definition(scope, child, name), - SyntaxKind::EXPR_FUNCTION => check_function_for_definition(scope, name), - SyntaxKind::BIND_FUNCTION => check_bind_function_for_definition(scope, name), - SyntaxKind::FOR_SPEC => check_for_spec_for_definition(scope, name), - SyntaxKind::OBJ_BODY_MEMBER_LIST => check_object_for_definition(scope, name), - // Array/object comprehensions: the FOR_SPEC bindings are visible to the expression - SyntaxKind::EXPR_ARRAY_COMP | SyntaxKind::OBJ_BODY_COMP => { - check_comprehension_for_definition(scope, name) - } - _ => None, - } -} - -/// Check an Expr for local definitions. -/// -/// Local definitions are only visible after their declaration point, -/// so we only check bindings that appear before the reference. -fn check_expr_for_definition( - expr: &SyntaxNode, - child: &SyntaxNode, - name: &str, -) -> Option { - let mut last_match = None; - - for stmt_node in expr.children() { - if stmt_node.kind() == SyntaxKind::STMT_LOCAL { - // Only consider bindings that appear before our reference - if stmt_node.text_range().end() > child.text_range().start() { - continue; - } - - if let Some(stmt_local) = StmtLocal::cast(stmt_node) { - for bind in stmt_local.binds() { - if let Some(range) = check_bind_for_name(&bind, name) { - // Keep track of the last (nearest) match for shadowing - last_match = Some(range); - } - } - } - } - } - - last_match -} - -/// Check a Bind for a name. -#[must_use] -pub fn check_bind_for_name(bind: &Bind, name: &str) -> Option { - match bind { - Bind::BindDestruct(bd) => { - let destruct = bd.into()?; - if let Destruct::DestructFull(full) = destruct { - let bind_name = full.name()?; - let ident = bind_name.ident_lit()?; - if ident.text() == name { - return Some(bind_name.syntax().text_range()); - } - } - None - } - Bind::BindFunction(bf) => { - let bind_name = bf.name()?; - let ident = bind_name.ident_lit()?; - if ident.text() == name { - return Some(bind_name.syntax().text_range()); - } - None - } - } -} - -/// Check function parameters for a definition. -fn check_function_for_definition(func_node: &SyntaxNode, name: &str) -> Option { - let func = ExprFunction::cast(func_node.clone())?; - let params = func.params_desc()?; - - for param in params.params() { - if let Some(range) = check_param_for_name(¶m, name) { - return Some(range); - } - } - None -} - -/// Check `BindFunction` parameters for a definition. -fn check_bind_function_for_definition(func_node: &SyntaxNode, name: &str) -> Option { - let func = BindFunction::cast(func_node.clone())?; - let params = func.params()?; - - for param in params.params() { - if let Some(range) = check_param_for_name(¶m, name) { - return Some(range); - } - } - None -} - -/// Check a parameter for a name. -#[must_use] -pub fn check_param_for_name(param: &Param, name: &str) -> Option { - let destruct = param.destruct()?; - if let Destruct::DestructFull(full) = destruct { - let param_name = full.name()?; - let ident = param_name.ident_lit()?; - if ident.text() == name { - return Some(param_name.syntax().text_range()); - } - } - None -} - -/// Check `ForSpec` for a definition. -fn check_for_spec_for_definition(for_node: &SyntaxNode, name: &str) -> Option { - let for_spec = ForSpec::cast(for_node.clone())?; - let destruct = for_spec.bind()?; - - if let Destruct::DestructFull(full) = destruct { - let bind_name = full.name()?; - let ident = bind_name.ident_lit()?; - if ident.text() == name { - return Some(bind_name.syntax().text_range()); - } - } - None -} - -/// Check object locals for a definition. -fn check_object_for_definition(obj_body: &SyntaxNode, name: &str) -> Option { - for member_node in obj_body.children() { - if member_node.kind() == SyntaxKind::MEMBER_BIND_STMT { - if let Some(member_bind) = MemberBindStmt::cast(member_node) { - if let Some(obj_local) = member_bind.obj_local() { - if let Some(bind) = obj_local.bind() { - if let Some(range) = check_bind_for_name(&bind, name) { - return Some(range); - } - } - } - } - } - } - None -} - -/// Check comprehension (array or object) for `FOR_SPEC` definitions. -/// -/// In `[x for x in arr]`, the `FOR_SPEC` binding is visible to the expression. -fn check_comprehension_for_definition(comp_node: &SyntaxNode, name: &str) -> Option { - for child in comp_node.children() { - if child.kind() != SyntaxKind::FOR_SPEC { - continue; - } - if let Some(range) = check_for_spec_for_definition(&child, name) { - return Some(range); - } - } - None -} - -/// Check if a reference resolves to a specific definition. -/// -/// Walks up the scope chain from the token to find its definition, -/// then checks if it matches the expected definition range. -#[must_use] -pub fn references_definition(token: &SyntaxToken, name: &str, def_range: TextRange) -> bool { - let Some(mut current) = token.parent() else { - return false; - }; - - while let Some(parent) = current.parent() { - if let Some(found_range) = check_scope_for_definition(&parent, ¤t, name) { - return found_range == def_range; - } - current = parent; - } - - false -} - -/// Find all references to a name in the AST. -/// -/// This function walks the entire AST looking for identifiers that: -/// 1. Match the given name -/// 2. Are either the definition or references that resolve to the definition -/// -/// The `definition_range` should be the range of the Name node at the definition site. -pub fn find_all_references( - root: &SyntaxNode, - name: &str, - definition_range: TextRange, -) -> Vec { - let mut references = Vec::new(); - - // Walk all tokens looking for identifiers matching the name - for token in root - .descendants_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - { - if token.kind() == SyntaxKind::IDENT && token.text() == name { - // Check if this is a reference (ExprVar) - if is_variable_reference(&token) { - // It's a reference - check if it resolves to our definition - if references_definition(&token, name, definition_range) { - references.push(token.text_range()); - } - } else if is_definition_site(&token) { - // It's a definition - check if it matches our target definition - if let Some(parent) = token.parent() { - if parent.text_range() == definition_range { - references.push(parent.text_range()); - } - } - } - } - } - - references -} - -/// Find all references including both definition and uses, returning identifier ranges. -/// -/// This is a variant of `find_all_references` that returns the identifier token ranges -/// instead of the Name node ranges. This is useful for rename operations where -/// we want to replace just the identifier text. -pub fn find_all_references_for_rename( - root: &SyntaxNode, - name: &str, - definition_range: TextRange, -) -> Vec { - let mut references = Vec::new(); - - // Walk all tokens looking for identifiers matching the name - for token in root - .descendants_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - { - if token.kind() == SyntaxKind::IDENT && token.text() == name { - // Check if this is a reference (ExprVar) - if is_variable_reference(&token) { - // It's a reference - check if it resolves to our definition - if references_definition(&token, name, definition_range) { - // For rename, we want just the identifier range, not the Name node - references.push(token.text_range()); - } - } else if is_definition_site(&token) { - // It's a definition - check if it matches our target definition - if let Some(parent) = token.parent() { - if parent.text_range() == definition_range { - // Return the identifier range, not the Name node - references.push(token.text_range()); - } - } - } - } - } - - references -} - -/// Cached scope resolver for efficient repeated lookups. -/// -/// Precomputes a mapping from each variable reference to its definition. -pub struct ScopeResolver { - /// Maps reference token start position to definition's `TextRange`. - reference_to_def: FxHashMap, -} - -impl ScopeResolver { - /// Build a scope resolver for the given AST root. - /// - /// Walks the AST once to build the scope map. - pub fn new(root: &SyntaxNode) -> Self { - let mut reference_to_def = FxHashMap::default(); - - for token in root - .descendants_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - { - if token.kind() != SyntaxKind::IDENT { - continue; - } - - if !is_variable_reference(&token) { - continue; - } - - if let Some(def_range) = find_definition_range(&token, token.text()) { - reference_to_def.insert(token.text_range().start(), def_range); - } - } - - Self { reference_to_def } - } - - /// Get the definition range for a reference token. - /// - /// Returns the `TextRange` of the Name node at the definition site, - /// or None if the token is not a reference or has no definition. - #[must_use] - pub fn get_definition(&self, token: &SyntaxToken) -> Option { - self.reference_to_def - .get(&token.text_range().start()) - .copied() - } - - /// Check if a reference resolves to a specific definition. - #[must_use] - pub fn references_definition(&self, token: &SyntaxToken, def_range: TextRange) -> bool { - self.get_definition(token) == Some(def_range) - } - - /// Find all references to a definition, returning identifier token ranges. - pub fn find_references( - &self, - root: &SyntaxNode, - name: &str, - definition_range: TextRange, - ) -> Vec { - let mut references = Vec::new(); - - for token in root - .descendants_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - { - if token.kind() != SyntaxKind::IDENT || token.text() != name { - continue; - } - - if is_variable_reference(&token) { - if self.references_definition(&token, definition_range) { - references.push(token.text_range()); - } - continue; - } - - if !is_definition_site(&token) { - continue; - } - - let Some(parent) = token.parent() else { - continue; - }; - - if parent.text_range() == definition_range { - references.push(token.text_range()); - } - } - - references - } -} - /// A binding in a scope - maps a name to its definition range. #[derive(Debug, Clone)] struct ScopeBinding { @@ -661,12 +271,12 @@ impl ScopeIndex { /// Find the innermost scope containing a position. fn find_innermost_scope(&self, pos: TextSize) -> Option { - // Binary search to find candidate scopes + // Binary search to find candidate scopes. let search_idx = self .scope_starts .partition_point(|(start, _)| *start <= pos); - // Check scopes from the found position backwards + // Check scopes from the found position backwards. let mut best: Option = None; let mut best_size = u32::MAX; @@ -701,13 +311,13 @@ impl ScopeIndex { loop { let scope = self.scopes.get(scope_idx)?; - // Search bindings in reverse order for shadowing (last match wins) + // Search bindings in reverse order for shadowing (last match wins). for binding in scope.bindings.iter().rev() { if binding.name != name { continue; } - // Check visibility + // Check visibility. if let Some(visible_after) = binding.visible_after { if pos < visible_after { continue; @@ -717,7 +327,7 @@ impl ScopeIndex { return Some(binding.range); } - // Move to parent scope + // Move to parent scope. scope_idx = scope.parent?; } } @@ -736,7 +346,7 @@ impl ScopeIndex { return Vec::new(); }; - // Check cache first + // Check cache first. { let cache = self .scope_chain_cache @@ -747,10 +357,10 @@ impl ScopeIndex { } } - // Compute the scope chain + // Compute the scope chain. let chain = self.compute_scope_chain(scope_idx); - // Cache and return + // Cache and return. self.scope_chain_cache .write() .unwrap_or_else(std::sync::PoisonError::into_inner) @@ -787,10 +397,10 @@ impl ScopeIndex { return Vec::new(); }; - // Get or compute cached bindings for this scope chain + // Get or compute cached bindings for this scope chain. let cached = self.get_or_compute_bindings(scope_idx); - // Filter by visibility at the query position + // Filter by visibility at the query position. cached .into_iter() .filter(|b| { @@ -803,7 +413,7 @@ impl ScopeIndex { /// Get or compute cached bindings for a scope chain. fn get_or_compute_bindings(&self, scope_idx: usize) -> Vec { - // Check cache first + // Check cache first. { let cache = self .bindings_cache @@ -814,10 +424,10 @@ impl ScopeIndex { } } - // Compute bindings for the entire scope chain + // Compute bindings for the entire scope chain. let bindings = self.compute_bindings(scope_idx); - // Cache and return + // Cache and return. self.bindings_cache .write() .unwrap_or_else(std::sync::PoisonError::into_inner) @@ -855,112 +465,12 @@ impl ScopeIndex { #[cfg(test)] mod tests { - use jrsonnet_lsp_document::{token_at_offset, ByteOffset, DocVersion, Document}; - use jrsonnet_rowan_parser::AstNode; - - use super::*; - - #[test] - fn test_find_definition_range_local_variable() { - let code = "local x = 1; x + 1"; - // ^def ^ref - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let ast = doc.ast(); - - // Find the reference 'x' at position 13 - let token = token_at_offset(ast.syntax(), ByteOffset::from(13u32)) - .expect("should find token at position 13"); - assert_eq!(token.text(), "x"); - - let range = find_definition_range(&token, "x").expect("should find definition range"); - - // Definition is at position 6 - assert_eq!(range.start(), 6.into()); - } - - #[test] - fn test_find_definition_range_function_param() { - let code = "local f(x) = x * 2; f(3)"; - // ^param ^ref - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let ast = doc.ast(); - - // Find the reference 'x' at position 13 - let token = token_at_offset(ast.syntax(), ByteOffset::from(13u32)) - .expect("should find token at position 13"); - assert_eq!(token.text(), "x"); - - let range = - find_definition_range(&token, "x").expect("should find definition range for parameter"); - - // Parameter is at position 8 - assert_eq!(range.start(), 8.into()); - } - - #[test] - fn test_find_all_references() { - let code = "local x = 1; x + x"; - // ^def ^ref ^ref - // 0123456789... - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let ast = doc.ast(); - - // Get the definition range (Name node at position 6) - let def_range = TextRange::new(6.into(), 7.into()); - - let refs = find_all_references(ast.syntax(), "x", def_range); - // def at 6, refs at 13 and 17 - assert_eq!( - refs, - vec![ - TextRange::new(6.into(), 7.into()), // definition - TextRange::new(13.into(), 14.into()), // first use - TextRange::new(17.into(), 18.into()), // second use - ] - ); - } - - #[test] - fn test_shadowing() { - let code = "local x = 1; local x = 2; x"; - // ^def1 ^def2 ^ref - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let ast = doc.ast(); - - // Find the final 'x' reference - let token = token_at_offset(ast.syntax(), ByteOffset::from(26u32)) - .expect("should find token at position 26"); - assert_eq!(token.text(), "x"); - - let range = find_definition_range(&token, "x").expect("should find definition range"); - - // Should resolve to the second (closer) definition at position 19 - assert_eq!(range.start(), 19.into()); - } - - #[test] - fn test_references_respects_scope() { - let code = "local x = 1; local f(x) = x; x"; - // ^def1 ^def2 ^ref2 ^ref1 - // 0123456789... - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let ast = doc.ast(); - - // Get the definition range for outer x (Name node at position 6) - let def_range = TextRange::new(6.into(), 7.into()); - - let refs = find_all_references(ast.syntax(), "x", def_range); - // Should find: the definition (6) and the last reference (29), not the inner x - assert_eq!( - refs, - vec![ - TextRange::new(6.into(), 7.into()), // outer x definition - TextRange::new(29.into(), 30.into()), // final reference to outer x - ] - ); - } + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; + use rowan::TextRange; - // ScopeIndex tests + use super::ScopeIndex; + use crate::{bindings::is_variable_reference, resolver::find_definition_range}; #[test] fn test_scope_index_local_variable() { @@ -1028,14 +538,14 @@ mod tests { #[test] fn test_scope_index_matches_linear_search() { - // Verify that ScopeIndex produces the same results as the linear search + // Verify that ScopeIndex produces the same results as the linear search. let code = "local a = 1; local f(x, y) = x + y; local b = f(a, 2); b"; let doc = Document::new(code.to_string(), DocVersion::new(1)); let ast = doc.ast(); let index = ScopeIndex::new(ast.syntax()); - // Test various positions + // Test various positions. for token in ast .syntax() .descendants_with_tokens() @@ -1070,9 +580,9 @@ mod tests { let index = ScopeIndex::new(ast.syntax()); - // Position 13 (inside function body) should have multiple scopes + // Position 13 (inside function body) should have multiple scopes. let chain = index.scope_chain(13.into()); - // Function body is nested within multiple syntax nodes + // Function body is nested within multiple syntax nodes. assert_eq!(chain.len(), 4); } @@ -1085,7 +595,7 @@ mod tests { let index = ScopeIndex::new(ast.syntax()); - // At position 30 (after both bindings), both 'a' and 'b' should be visible + // At position 30 (after both bindings), both 'a' and 'b' should be visible. let bindings = index.bindings_at(30.into()); let mut names: Vec<_> = bindings.iter().map(|(n, _)| n.as_str()).collect(); names.sort_unstable(); @@ -1094,14 +604,14 @@ mod tests { #[test] fn test_scope_chain_cache_consistency() { - // Test that multiple calls to scope_chain return consistent results + // Test that multiple calls to scope_chain return consistent results. let code = "local f(x) = x * 2; f(3)"; let doc = Document::new(code.to_string(), DocVersion::new(1)); let ast = doc.ast(); let index = ScopeIndex::new(ast.syntax()); - // Call multiple times at the same position - should return identical results + // Call multiple times at the same position - should return identical results. let chain1 = index.scope_chain(13.into()); let chain2 = index.scope_chain(13.into()); let chain3 = index.scope_chain(13.into()); @@ -1109,13 +619,13 @@ mod tests { assert_eq!(chain1, chain2, "Repeated calls should return same result"); assert_eq!(chain2, chain3, "Repeated calls should return same result"); - // Verify the cache is populated (we get results, proving the mechanism works) + // Verify the cache is populated (we get results, proving the mechanism works). assert!(!chain1.is_empty(), "Should have at least one scope"); } #[test] fn test_bindings_cache_with_visibility() { - // Test that bindings cache correctly handles visibility filtering + // Test that bindings cache correctly handles visibility filtering. let code = "local a = 1; local b = 2; local c = 3; a + b + c"; // 0 1 2 3 4 // 0123456789012345678901234567890123456789012345678 @@ -1124,24 +634,24 @@ mod tests { let index = ScopeIndex::new(ast.syntax()); - // At position 15 (between 'local a' and 'local b'), only 'a' should be visible + // At position 15 (between 'local a' and 'local b'), only 'a' should be visible. let bindings_15 = index.bindings_at(15.into()); let names_15: Vec<_> = bindings_15.iter().map(|(n, _)| n.as_str()).collect(); assert_eq!(names_15, vec!["a"]); - // At position 28 (between 'local b' and 'local c'), 'a' and 'b' should be visible + // At position 28 (between 'local b' and 'local c'), 'a' and 'b' should be visible. let bindings_28 = index.bindings_at(28.into()); let mut names_28: Vec<_> = bindings_28.iter().map(|(n, _)| n.as_str()).collect(); names_28.sort_unstable(); assert_eq!(names_28, vec!["a", "b"]); - // At position 45 (after all locals), all should be visible + // At position 45 (after all locals), all should be visible. let bindings_45 = index.bindings_at(45.into()); let mut names_45: Vec<_> = bindings_45.iter().map(|(n, _)| n.as_str()).collect(); names_45.sort_unstable(); assert_eq!(names_45, vec!["a", "b", "c"]); - // Repeated call should give same result (using cache) + // Repeated call should give same result (using cache). let bindings_45_again = index.bindings_at(45.into()); let mut names_45_again: Vec<_> = bindings_45_again.iter().map(|(n, _)| n.as_str()).collect(); @@ -1151,7 +661,7 @@ mod tests { #[test] fn test_cache_handles_different_scopes() { - // Test that caching works correctly across different scopes + // Test that caching works correctly across different scopes. let code = "local outer = 1; local f(inner) = inner + outer; outer + f(2)"; // 0 1 2 3 4 5 6 // 01234567890123456789012345678901234567890123456789012345678901234 @@ -1160,7 +670,7 @@ mod tests { let index = ScopeIndex::new(ast.syntax()); - // Inside function (position 35, the 'inner' reference) + // Inside function (position 35, the 'inner' reference). let bindings_in_func = index.bindings_at(35.into()); let mut names_in_func: Vec<_> = bindings_in_func.iter().map(|(n, _)| n.as_str()).collect(); names_in_func.sort_unstable(); @@ -1173,7 +683,7 @@ mod tests { "Should see 'outer' inside function" ); - // Outside function (position 58, after function definition) + // Outside function (position 58, after function definition). let bindings_outside = index.bindings_at(58.into()); let names_outside: Vec<_> = bindings_outside.iter().map(|(n, _)| n.as_str()).collect(); assert!( diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index a029c23b..762f97c8 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -6,6 +6,8 @@ mod async_requests; mod import_graph; mod notifications; +mod request_dispatch; +mod requests; mod watched_files; use std::{ @@ -26,35 +28,14 @@ use jrsonnet_lsp_inference::{ use jrsonnet_lsp_types::GlobalTyStore; use lsp_server::{Connection, Message, Notification, Request, RequestId, Response}; use lsp_types::{ - notification::{ - Cancel, DidChangeConfiguration, DidChangeTextDocument, DidChangeWatchedFiles, - DidCloseTextDocument, DidOpenTextDocument, DidSaveTextDocument, Notification as _, - PublishDiagnostics, - }, - request::{ - CodeActionRequest, CodeLensRequest, CodeLensResolve, Completion, DocumentHighlightRequest, - DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, - GotoImplementation, GotoTypeDefinition, HoverRequest, InlayHintRequest, - PrepareRenameRequest, References, RegisterCapability, Rename, Request as _, - SemanticTokensFullRequest, SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, - WorkspaceSymbolRequest, - }, - CodeActionKind, CodeActionOptions, CodeActionParams, CodeActionProviderCapability, - CodeActionResponse, CodeLens, CodeLensOptions, CompletionOptions, DidChangeConfigurationParams, - DidChangeTextDocumentParams, DidChangeWatchedFilesParams, - DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, - DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentFormattingParams, - DocumentHighlight, DocumentHighlightParams, DocumentSymbolParams, DocumentSymbolResponse, - ExecuteCommandOptions, ExecuteCommandParams, FileChangeType, FileSystemWatcher, GlobPattern, - Hover, HoverParams, HoverProviderCapability, InitializeParams, InitializeResult, - NumberOrString, OneOf, PrepareRenameResponse, Registration, RegistrationParams, - RelativePattern, SemanticTokensFullOptions, SemanticTokensOptions, SemanticTokensParams, - SemanticTokensRangeParams, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, - SignatureHelp, SignatureHelpOptions, SignatureHelpParams, TextDocumentPositionParams, - TextDocumentSyncCapability, TextDocumentSyncKind, TextEdit, WorkDoneProgressOptions, + notification::PublishDiagnostics, CodeActionKind, CodeActionOptions, + CodeActionProviderCapability, CodeLensOptions, CompletionOptions, ExecuteCommandOptions, + HoverProviderCapability, InitializeParams, InitializeResult, OneOf, SemanticTokensFullOptions, + SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, + SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind, + WorkDoneProgressOptions, }; use parking_lot::RwLock; -use serde::{de::DeserializeOwned, Serialize}; use tracing::{debug, error, info, warn}; use self::async_requests::AsyncRequestContext; @@ -62,7 +43,7 @@ use crate::{ analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}, async_diagnostics::{AsyncDiagnostics, DiagnosticsConfig}, config::ServerConfig, - protocol::inflight_requests::{IncomingRequest, InflightRequests}, + protocol::inflight_requests::InflightRequests, }; /// Shared server configuration. @@ -637,416 +618,6 @@ impl Server { Message::Notification(notif) => self.handle_notification(notif), } } - - /// Handle an incoming request. - fn handle_request(&mut self, req: Request) -> Result<()> { - debug!("Handling request: {} (id={})", req.method, req.id); - - let Request { id, method, params } = req; - match method.as_str() { - Shutdown::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_shutdown_request(request) - } - GotoDefinition::METHOD - | GotoDeclaration::METHOD - | GotoTypeDefinition::METHOD - | GotoImplementation::METHOD - | InlayHintRequest::METHOD - | Completion::METHOD - | References::METHOD - | WorkspaceSymbolRequest::METHOD - | Rename::METHOD - | CodeLensRequest::METHOD - | ExecuteCommand::METHOD => self.handle_async_request(id, method.as_str(), params), - DocumentSymbolRequest::METHOD - | DocumentHighlightRequest::METHOD - | CodeActionRequest::METHOD - | HoverRequest::METHOD - | SignatureHelpRequest::METHOD - | Formatting::METHOD - | PrepareRenameRequest::METHOD - | SemanticTokensFullRequest::METHOD - | SemanticTokensRangeRequest::METHOD - | CodeLensResolve::METHOD => self.handle_sync_request(id, method.as_str(), params), - _ => { - let request = self.inflight_requests.begin_unknown(id, method.as_str()); - warn!("Unhandled request: {}", request.method()); - let message = format!("Method not found: {}", request.method()); - let _ = self.inflight_requests.send_unknown_err( - request, - lsp_server::ErrorCode::MethodNotFound, - message, - )?; - Ok(()) - } - } - } - - fn handle_shutdown_request(&mut self, request: IncomingRequest) -> Result<()> { - info!("Shutdown request received"); - self.shutdown_requested = true; - let _ = self.inflight_requests.send_ok(request, ())?; - Ok(()) - } - - fn handle_sync_request( - &mut self, - id: RequestId, - method: &str, - params: serde_json::Value, - ) -> Result<()> { - match method { - DocumentSymbolRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed(request, params, Self::on_document_symbol) - } - DocumentHighlightRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed(request, params, Self::on_document_highlight) - } - CodeActionRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed(request, params, Self::on_code_action) - } - HoverRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed(request, params, Self::on_hover) - } - SignatureHelpRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed(request, params, Self::on_signature_help) - } - Formatting::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed(request, params, Self::on_formatting) - } - PrepareRenameRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed(request, params, Self::on_prepare_rename) - } - SemanticTokensFullRequest::METHOD => { - let request = self - .inflight_requests - .begin::(id); - self.handle_sync_typed(request, params, Self::on_semantic_tokens_full) - } - SemanticTokensRangeRequest::METHOD => { - let request = self - .inflight_requests - .begin::(id); - self.handle_sync_typed(request, params, Self::on_semantic_tokens_range) - } - CodeLensResolve::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed(request, params, Self::resolve_code_lens) - } - _ => { - let request = self.inflight_requests.begin_unknown(id, method); - warn!("Unhandled request: {}", request.method()); - let message = format!("Method not found: {}", request.method()); - let _ = self.inflight_requests.send_unknown_err( - request, - lsp_server::ErrorCode::MethodNotFound, - message, - )?; - Ok(()) - } - } - } - - fn handle_sync_typed( - &mut self, - request: IncomingRequest, - params: serde_json::Value, - handler: fn(&Self, &R::Params) -> R::Result, - ) -> Result<()> - where - R: lsp_types::request::Request, - R::Params: DeserializeOwned, - R::Result: Serialize, - { - let params: R::Params = match serde_json::from_value(params) { - Ok(params) => params, - Err(err) => { - let _ = self.inflight_requests.send_err( - request, - lsp_server::ErrorCode::InvalidParams, - format!("Invalid params for {}: {err}", R::METHOD), - )?; - return Ok(()); - } - }; - - let _ = self - .inflight_requests - .send_ok(request, handler(self, ¶ms))?; - Ok(()) - } - - fn spawn_typed_json_response(&self, request: IncomingRequest, compute: F) - where - R: lsp_types::request::Request, - R::Result: Serialize + Send + 'static, - F: FnOnce() -> R::Result + Send + 'static, - { - let id = request.into_id(); - self.spawn_async_response(id, R::METHOD, move || { - serde_json::to_value(compute()).map_err(Into::into) - }); - } - - fn handle_async_request( - &mut self, - id: RequestId, - method: &str, - params: serde_json::Value, - ) -> Result<()> { - match method { - GotoDefinition::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::goto_definition) - } - GotoDeclaration::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::goto_declaration) - } - GotoImplementation::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::goto_implementation) - } - GotoTypeDefinition::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::goto_type_definition) - } - HoverRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::hover) - } - InlayHintRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::inlay_hints) - } - Completion::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::completion) - } - References::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::references) - } - WorkspaceSymbolRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::workspace_symbol) - } - Rename::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::rename) - } - CodeLensRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_typed(request, params, AsyncRequestContext::code_lens) - } - ExecuteCommand::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_async_execute_command(request, params) - } - _ => { - let request = self.inflight_requests.begin_unknown(id, method); - warn!("Unhandled request: {}", request.method()); - let message = format!("Method not found: {}", request.method()); - let _ = self.inflight_requests.send_unknown_err( - request, - lsp_server::ErrorCode::MethodNotFound, - message, - )?; - Ok(()) - } - } - } - - fn handle_async_typed( - &mut self, - request: IncomingRequest, - params: serde_json::Value, - handler: fn(&AsyncRequestContext, &R::Params) -> R::Result, - ) -> Result<()> - where - R: lsp_types::request::Request, - R::Params: DeserializeOwned + Send + 'static, - R::Result: Serialize + Send + 'static, - { - let params: R::Params = match serde_json::from_value(params) { - Ok(params) => params, - Err(err) => { - let _ = self.inflight_requests.send_err( - request, - lsp_server::ErrorCode::InvalidParams, - format!("Invalid params for {}: {err}", R::METHOD), - )?; - return Ok(()); - } - }; - let context = self.async_request_context(); - self.spawn_typed_json_response(request, move || handler(&context, ¶ms)); - Ok(()) - } - - fn handle_async_execute_command( - &mut self, - request: IncomingRequest, - params: serde_json::Value, - ) -> Result<()> { - let params: ExecuteCommandParams = match serde_json::from_value(params) { - Ok(params) => params, - Err(err) => { - let _ = self.inflight_requests.send_err( - request, - lsp_server::ErrorCode::InvalidParams, - format!("Invalid params for {}: {err}", ExecuteCommand::METHOD), - )?; - return Ok(()); - } - }; - if !Self::is_supported_execute_command(¶ms.command) { - let _ = self.inflight_requests.send_err( - request, - lsp_server::ErrorCode::InvalidParams, - format!("Unknown execute command: {}", params.command), - )?; - return Ok(()); - } - - let context = self.async_request_context(); - self.spawn_typed_json_response(request, move || context.execute_command(¶ms)); - Ok(()) - } - - /// Handle textDocument/documentSymbol request. - fn on_document_symbol(&self, params: &DocumentSymbolParams) -> Option { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - - let symbols = handlers::document_symbols(&doc); - Some(DocumentSymbolResponse::Nested(symbols)) - } - - /// Handle textDocument/hover request. - fn on_hover(&self, params: &HoverParams) -> Option { - self.async_request_context().hover(params) - } - - /// Handle textDocument/documentHighlight request. - fn on_document_highlight( - &self, - params: &DocumentHighlightParams, - ) -> Option> { - let uri = ¶ms.text_document_position_params.text_document.uri; - let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - let lsp_pos = position.into(); - - let highlights = handlers::document_highlights(&doc, lsp_pos); - if highlights.is_empty() { - return None; - } - - Some(highlights) - } - - /// Handle textDocument/codeAction request. - fn on_code_action(&self, params: &CodeActionParams) -> Option { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let actions = { - let doc = self.documents.get(&path)?; - let code_action_config = self.config.read().code_actions; - handlers::code_actions( - &doc, - uri, - params.range, - ¶ms.context, - &code_action_config, - ) - }; - if actions.is_empty() { - return None; - } - - Some(actions) - } - - /// Handle textDocument/signatureHelp request. - fn on_signature_help(&self, params: &SignatureHelpParams) -> Option { - let uri = ¶ms.text_document_position_params.text_document.uri; - let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - - let lsp_pos = position.into(); - - handlers::signature_help(&doc, lsp_pos) - } - - /// Handle textDocument/formatting request. - fn on_formatting(&self, params: &DocumentFormattingParams) -> Option> { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - - // Get formatting config - let config = self.config.read().formatting.clone(); - let context = - handlers::FormattingContext::for_document(path.as_path(), &self.workspace_roots); - - handlers::format_document_with_config(doc.text(), &config, context) - } - - /// Handle textDocument/prepareRename request. - fn on_prepare_rename( - &self, - params: &TextDocumentPositionParams, - ) -> Option { - let uri = ¶ms.text_document.uri; - let position = params.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - - let lsp_pos = position.into(); - - handlers::prepare_rename(&doc, lsp_pos) - } - - /// Handle textDocument/semanticTokens/full request. - fn on_semantic_tokens_full( - &self, - params: &SemanticTokensParams, - ) -> Option { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - - Some(handlers::semantic_tokens(&doc).into()) - } - - /// Handle textDocument/semanticTokens/range request. - fn on_semantic_tokens_range( - &self, - params: &SemanticTokensRangeParams, - ) -> Option { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - - Some(handlers::semantic_tokens_range(&doc, params.range).into()) - } - - /// Handle codeLens/resolve request. - fn resolve_code_lens(_server: &Self, params: &CodeLens) -> CodeLens { - handlers::resolve_code_lens(params.clone()) - } } impl Server { diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index af21f6b2..a622b686 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -1,28 +1,27 @@ +mod code_lens; +mod commands; +mod completion; +mod goto_declaration; +mod goto_definition; +mod goto_implementation; +mod goto_shared; +mod goto_type_definition; +mod hover; +mod import_lookup; +mod inlay_hints; +mod references; +mod rename; +mod workspace_symbol; + use std::sync::Arc; -use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, Document, SymbolName}; -use jrsonnet_lsp_handlers as handlers; -use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; +use jrsonnet_lsp_document::{CanonicalPath, Document}; +use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; -use jrsonnet_lsp_types::{GlobalTyStore, Ty, TyData}; -use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; -use lsp_types::{ - CodeLens, CodeLensParams, CompletionParams, CompletionResponse, ExecuteCommandParams, - GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams, InlayHint, InlayHintParams, - Location, PartialResultParams, Position, ReferenceContext, ReferenceParams, RenameParams, - SymbolInformation, TextDocumentIdentifier, TextDocumentPositionParams, WorkDoneProgressParams, - WorkspaceEdit, WorkspaceSymbolParams, WorkspaceSymbolResponse, -}; +use jrsonnet_lsp_types::GlobalTyStore; use parking_lot::RwLock; -use rayon::prelude::*; -use tracing::{info, warn}; - -use super::{unique_files, SharedConfig}; -use crate::analysis::{ - eval::create_state_with_jpath, tanka::effective_import_roots, EvalConfig, Evaluator, -}; -const MAX_WORKSPACE_SYMBOL_RESULTS: usize = 128; +use super::SharedConfig; #[derive(Clone)] pub(super) struct AsyncRequestContext { @@ -33,14 +32,6 @@ pub(super) struct AsyncRequestContext { config: SharedConfig, } -#[derive(Clone, Copy, Debug, Eq, PartialEq)] -enum GotoTarget { - Definition, - TypeDefinition, - Declaration, - Implementation, -} - impl AsyncRequestContext { pub(super) fn new( documents: SharedDocumentManager, @@ -70,854 +61,4 @@ impl AsyncRequestContext { provider.analyze(path, doc, self.documents.as_ref()) }) } - - pub(super) fn hover(&self, params: &HoverParams) -> Option { - let uri = ¶ms.text_document_position_params.text_document.uri; - let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - let lsp_pos = position.into(); - let analysis = self.analyze_document(&path, &doc); - let import_field_type_resolver = |import_path: &str, fields: &[String]| { - self.resolve_import_field_type(&path, import_path, fields) - }; - handlers::hover_with_import_field_type( - &doc, - lsp_pos, - &analysis, - Some(&import_field_type_resolver), - ) - } - - pub(super) fn goto_definition( - &self, - params: &GotoDefinitionParams, - ) -> Option { - self.goto_target(params, GotoTarget::Definition) - } - - pub(super) fn goto_declaration( - &self, - params: &GotoDefinitionParams, - ) -> Option { - self.goto_target(params, GotoTarget::Declaration) - } - - pub(super) fn goto_type_definition( - &self, - params: &GotoDefinitionParams, - ) -> Option { - self.goto_target(params, GotoTarget::TypeDefinition) - } - - pub(super) fn goto_implementation( - &self, - params: &GotoDefinitionParams, - ) -> Option { - self.goto_target(params, GotoTarget::Implementation) - } - - fn goto_target( - &self, - params: &GotoDefinitionParams, - target: GotoTarget, - ) -> Option { - let uri = ¶ms.text_document_position_params.text_document.uri; - let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - let lsp_pos = position.into(); - let semantic = self.documents.get_semantic_artifacts(&path); - - let result = match target { - GotoTarget::Definition | GotoTarget::TypeDefinition => { - handlers::goto_definition_with_semantic(&doc, lsp_pos, semantic.as_deref())? - } - GotoTarget::Declaration | GotoTarget::Implementation => { - handlers::goto_declaration_with_semantic(&doc, lsp_pos, semantic.as_deref())? - } - }; - match result { - handlers::DefinitionResult::Local(range) => { - let range = if target == GotoTarget::Implementation { - Self::local_implementation_range(&doc, range).unwrap_or(range) - } else { - range - }; - Some(GotoDefinitionResponse::Scalar(Location { - uri: uri.clone(), - range, - })) - } - handlers::DefinitionResult::Import(import_path) => { - let resolved = self.resolve_import_from_graph(&path, &import_path)?; - let resolved_uri = resolved.to_uri().ok()?; - let range = if target == GotoTarget::Implementation { - self.document_root_expr_range(&resolved).unwrap_or_default() - } else { - lsp_types::Range::default() - }; - Some(GotoDefinitionResponse::Scalar(Location { - uri: resolved_uri, - range, - })) - } - handlers::DefinitionResult::ImportField { - path: import_path, - fields, - } => { - let resolved = self.resolve_import_from_graph(&path, &import_path)?; - let resolved_uri = resolved.to_uri().ok()?; - let locations = self.find_field_in_file(&resolved, &fields); - let range = if target == GotoTarget::Implementation { - locations - .map(|location| location.implementation) - .or_else(|| self.find_export_binding_in_file(&resolved, &fields)) - .or_else(|| self.document_root_expr_range(&resolved)) - .unwrap_or_default() - } else { - locations - .map(|location| location.declaration) - .or_else(|| self.find_export_binding_in_file(&resolved, &fields))? - }; - Some(GotoDefinitionResponse::Scalar(Location { - uri: resolved_uri, - range, - })) - } - } - } - - pub(super) fn inlay_hints(&self, params: &InlayHintParams) -> Option> { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - let analysis = self.analyze_document(&path, &doc); - let hints = handlers::inlay_hints(&doc, &analysis, params.range); - if hints.is_empty() { - return None; - } - Some(hints) - } - - pub(super) fn completion(&self, params: &CompletionParams) -> Option { - let uri = ¶ms.text_document_position.text_document.uri; - let position = params.text_document_position.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - let semantic = self.documents.get_semantic_artifacts(&path); - - let lsp_pos = position.into(); - let analysis = self.analyze_document(&path, &doc); - let config = self.config.read(); - let import_roots = effective_import_roots( - path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ); - drop(config); - - let list = handlers::completion_with_import_roots_and_semantic( - &doc, - lsp_pos, - Some(path.as_path()), - &import_roots, - &analysis, - semantic.as_deref(), - )?; - Some(CompletionResponse::List(list)) - } - - pub(super) fn references(&self, params: &ReferenceParams) -> Option> { - let uri = ¶ms.text_document_position.text_document.uri; - let position = params.text_document_position.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get_document(&path)?; - let lsp_pos = position.into(); - let semantic = self.documents.get_semantic_artifacts(&path); - - let include_declaration = params.context.include_declaration; - let mut refs = handlers::find_references_with_semantic( - &doc, - lsp_pos, - uri, - include_declaration, - semantic.as_deref(), - ); - - let importers = { - let import_graph = self.import_graph.read(); - import_graph - .file(&path) - .map_or_else(Vec::new, |file| import_graph.transitive_importers(file)) - }; - - let importer_docs: Vec<_> = importers - .into_iter() - .filter_map(|file| { - let path = self.documents.path(file)?; - let doc = self.documents.get_document(path.as_canonical_path())?; - let semantic = self - .documents - .get_semantic_artifacts(path.as_canonical_path()); - Some((path.as_canonical_path().clone(), doc, semantic)) - }) - .collect(); - let importer_refs: Vec<_> = importer_docs - .iter() - .map(|(k, v, semantic)| (k, v, semantic.as_deref())) - .collect(); - - let cross_refs = { - let import_graph = self.import_graph.read(); - handlers::find_cross_file_references_with_semantic( - &doc, - &path, - lsp_pos, - semantic.as_deref(), - &importer_refs, - &import_graph, - ) - }; - refs.extend(cross_refs); - - if refs.is_empty() { - return None; - } - Some(refs) - } - - pub(super) fn workspace_symbol( - &self, - params: &WorkspaceSymbolParams, - ) -> Option { - let query = ¶ms.query; - - let files = { - let import_graph = self.import_graph.read(); - unique_files(import_graph.all_files().chain(self.documents.open_files())) - }; - - let mut all_symbols: Vec = files - .into_par_iter() - .flat_map(|file| { - let Some(path) = self.documents.path(file) else { - return Vec::new(); - }; - let Some(doc) = self.documents.get_document(path.as_canonical_path()) else { - return Vec::new(); - }; - let Ok(uri) = path.as_canonical_path().to_uri() else { - return Vec::new(); - }; - handlers::workspace_symbols_for_document(&doc, &uri, query) - }) - .collect(); - - let query_lower = query.to_lowercase(); - all_symbols.sort_by_cached_key(|symbol| { - let name_lower = symbol.name.to_lowercase(); - ( - workspace_symbol_match_rank(&name_lower, &query_lower), - name_lower.len(), - name_lower, - symbol.location.uri.as_str().to_string(), - symbol.location.range.start.line, - symbol.location.range.start.character, - symbol.location.range.end.line, - symbol.location.range.end.character, - ) - }); - if all_symbols.len() > MAX_WORKSPACE_SYMBOL_RESULTS { - all_symbols.truncate(MAX_WORKSPACE_SYMBOL_RESULTS); - } - - if all_symbols.is_empty() { - return None; - } - Some(WorkspaceSymbolResponse::Flat(all_symbols)) - } - - pub(super) fn rename(&self, params: &RenameParams) -> Option { - let uri = ¶ms.text_document_position.text_document.uri; - let position = params.text_document_position.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - - let new_name = match SymbolName::new(¶ms.new_name) { - Ok(name) => name, - Err(e) => { - warn!("rename rejected: {}", e); - return None; - } - }; - - let lsp_pos = position.into(); - let import_graph = self.import_graph.read(); - - handlers::rename_cross_file( - &doc, - lsp_pos, - &new_name, - uri, - &path, - &self.documents, - &import_graph, - ) - } - - pub(super) fn code_lens(&self, params: &CodeLensParams) -> Option> { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = self.documents.get(&path)?; - - let config = handlers::CodeLensConfig::all(); - let analysis = self.analyze_document(&path, &doc); - Some(handlers::code_lens(&doc, uri, &config, Some(&analysis))) - } - - pub(super) fn execute_command( - &self, - params: &ExecuteCommandParams, - ) -> Option { - info!("Execute command: {}", params.command); - - match params.command.as_str() { - "jrsonnet.evalFile" => { - let uri = params.arguments.first()?.as_str()?; - self.execute_eval_file(uri) - } - "jrsonnet.evalExpression" => { - let expr = params.arguments.first()?.as_str()?; - let base_uri = params.arguments.get(1).and_then(|v| v.as_str()); - Some(self.execute_eval_expression(expr, base_uri)) - } - "jrsonnet.findTransitiveImporters" => { - let uri = params.arguments.first()?.as_str()?; - self.execute_find_transitive_importers(uri) - } - "jrsonnet.findReferences" => { - let uri = params.arguments.first()?.as_str()?; - let line = params.arguments.get(1)?.as_u64()?; - let line = u32::try_from(line).ok()?; - let character = params.arguments.get(2)?.as_u64()?; - let character = u32::try_from(character).ok()?; - let include_declaration = params - .arguments - .get(3) - .and_then(serde_json::Value::as_bool) - .unwrap_or(false); - self.execute_find_references(uri, line, character, include_declaration) - } - "jrsonnet.showErrors" => { - let uri = params.arguments.first()?.as_str()?; - self.execute_show_errors(uri) - } - _ => { - warn!("Unknown command: {}", params.command); - None - } - } - } - - fn execute_eval_file(&self, uri: &str) -> Option { - use jrsonnet_evaluator::manifest::JsonFormat; - use jrsonnet_parser::{SourceFile, SourcePath}; - - let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let path = CanonicalPath::from_uri(&uri_parsed).ok()?; - let text = self.documents.get_text(&path)?; - - let jpath = self.eval_command_jpath(Some(&path)); - let state = create_state_with_jpath(&jpath); - - let source_path = SourcePath::new(SourceFile::new(path.as_path().to_path_buf())); - - match state.evaluate_snippet(source_path.to_string(), &text) { - Ok(val) => { - let json_format = JsonFormat::default(); - match val.manifest(json_format) { - Ok(json_str) => match serde_json::from_str::(&json_str) { - Ok(json) => Some(json), - Err(e) => { - warn!("Failed to parse manifest result as JSON: {}", e); - Some(serde_json::Value::String(json_str)) - } - }, - Err(e) => { - warn!("Failed to manifest: {}", e); - Some(serde_json::json!({ - "error": format!("Manifest error: {}", e.error()) - })) - } - } - } - Err(e) => { - warn!("Evaluation failed: {}", e); - Some(serde_json::json!({ - "error": format!("Evaluation error: {}", e.error()) - })) - } - } - } - - fn execute_eval_expression(&self, expr: &str, base_uri: Option<&str>) -> serde_json::Value { - use jrsonnet_evaluator::manifest::JsonFormat; - use jrsonnet_parser::{SourceFile, SourcePath}; - - let base_path = base_uri - .and_then(|uri| uri.parse::().ok()) - .and_then(|uri| CanonicalPath::from_uri(&uri).ok()); - let jpath = self.eval_command_jpath(base_path.as_ref()); - let state = create_state_with_jpath(&jpath); - let source_name = base_path.map_or_else( - || "".to_string(), - |path| SourcePath::new(SourceFile::new(path.as_path().to_path_buf())).to_string(), - ); - - match state.evaluate_snippet(source_name, expr) { - Ok(val) => { - let json_format = JsonFormat::default(); - match val.manifest(json_format) { - Ok(json_str) => match serde_json::from_str::(&json_str) { - Ok(json) => json, - Err(e) => { - warn!("Failed to parse manifest result as JSON: {}", e); - serde_json::Value::String(json_str) - } - }, - Err(e) => serde_json::json!({ - "error": format!("Manifest error: {}", e.error()) - }), - } - } - Err(e) => serde_json::json!({ - "error": format!("Evaluation error: {}", e.error()) - }), - } - } - - fn eval_command_jpath(&self, base_path: Option<&CanonicalPath>) -> Vec { - let config = self.config.read(); - let jpath = base_path.map_or_else( - || config.jpath.clone(), - |base_path| { - let mut roots = effective_import_roots( - base_path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ); - if !config.resolve_paths_with_tanka { - if let Some(dir) = base_path.as_path().parent() { - if !roots.iter().any(|entry| entry == dir) { - roots.push(dir.to_path_buf()); - } - } - } - roots - }, - ); - drop(config); - jpath - } - - fn execute_find_transitive_importers(&self, uri: &str) -> Option { - let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let path = CanonicalPath::from_uri(&uri_parsed).ok()?; - - let import_graph = self.import_graph.read(); - let importers = import_graph - .file(&path) - .map_or_else(Vec::new, |file| import_graph.transitive_importers(file)); - let mut importer_uris: Vec = importers - .iter() - .filter_map(|file| { - import_graph - .path(*file) - .and_then(|path| path.to_uri().ok().map(|uri| uri.to_string())) - }) - .collect(); - drop(import_graph); - importer_uris.sort(); - - Some(serde_json::json!({ - "file": uri, - "transitiveImporters": importer_uris - })) - } - - fn execute_find_references( - &self, - uri: &str, - line: u32, - character: u32, - include_declaration: bool, - ) -> Option { - let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let params = ReferenceParams { - text_document_position: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { uri: uri_parsed }, - position: Position { line, character }, - }, - context: ReferenceContext { - include_declaration, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - let refs = self.references(¶ms).unwrap_or_default(); - - serde_json::to_value(refs).ok() - } - - fn execute_show_errors(&self, uri: &str) -> Option { - let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let path = CanonicalPath::from_uri(&uri_parsed).ok()?; - let doc = self.load_document_for_path(&path)?; - let analysis = self.analyze_document(&path, &doc); - let (enable_lint_diagnostics, evaluator, import_roots) = { - let config = self.config.read(); - let evaluator = config.enable_eval_diagnostics.then(|| { - let eval_config = EvalConfig { - jpath: config.jpath.clone(), - resolve_paths_with_tanka: config.resolve_paths_with_tanka, - }; - Evaluator::new(&eval_config) - }); - let import_roots = effective_import_roots( - path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ); - (config.enable_lint_diagnostics, evaluator, import_roots) - }; - let import_resolution = ImportResolution::new(&path, &import_roots); - let import_occurrences = import_resolution.parse_occurrences(&doc); - - let diagnostics = crate::handlers::compute_diagnostics( - &doc, - &path, - enable_lint_diagnostics, - evaluator.as_ref(), - &uri_parsed, - &analysis, - &import_occurrences, - ); - - let response = lsp_types::PublishDiagnosticsParams { - uri: uri_parsed, - diagnostics, - version: Some(doc.version().0), - }; - serde_json::to_value(response).ok() - } - - fn resolve_import_from_graph( - &self, - from: &CanonicalPath, - import: &str, - ) -> Option { - let import_graph = self.import_graph.read(); - let from_file = import_graph.file(from)?; - import_graph - .imports(from_file) - .iter() - .find(|entry| entry.import_path == import) - .and_then(|entry| entry.resolved_path.clone()) - } - - fn resolve_import_from_fs(from: &CanonicalPath, import: &str) -> Option { - let import_path = std::path::Path::new(import); - let candidate = if import_path.is_absolute() { - import_path.to_path_buf() - } else if import.starts_with("./") || import.starts_with("../") { - from.as_path().parent()?.join(import_path) - } else { - return None; - }; - - CanonicalPath::try_from_path(&candidate).ok() - } - - fn resolve_import_path(&self, from: &CanonicalPath, import: &str) -> Option { - if import.starts_with("./") || import.starts_with("../") || import.starts_with('/') { - return Self::resolve_import_from_fs(from, import) - .or_else(|| self.resolve_import_from_graph(from, import)); - } - - self.resolve_import_from_graph(from, import) - .or_else(|| Self::resolve_import_from_fs(from, import)) - } - - fn resolve_import_field_type( - &self, - from: &CanonicalPath, - import_path: &str, - fields: &[String], - ) -> Option { - let resolved = self.resolve_import_path(from, import_path)?; - let doc = self.load_document_for_path(&resolved)?; - let analysis = self.analyze_document(&resolved, &doc); - let ty = Self::type_for_field_path(&analysis, analysis.document_type(), fields)?; - Some(analysis.display_for_hover(ty)) - } - - fn type_for_field_path(analysis: &TypeAnalysis, root_ty: Ty, fields: &[String]) -> Option { - fields.iter().try_fold(root_ty, |ty, field| { - Self::type_for_field(analysis, ty, field) - }) - } - - fn type_for_field(analysis: &TypeAnalysis, ty: Ty, field: &str) -> Option { - match analysis.get_data(ty) { - TyData::Any => Some(Ty::ANY), - TyData::Object(obj) => obj - .get_field(field) - .map(|field_def| field_def.ty) - .or_else(|| obj.has_unknown.then_some(Ty::ANY)), - TyData::AttrsOf { value } => Some(value), - TyData::Union(types) => { - let variants: Vec<_> = types - .into_iter() - .filter_map(|variant| Self::type_for_field(analysis, variant, field)) - .collect(); - if variants.is_empty() { - None - } else { - Some(analysis.union(variants)) - } - } - TyData::Sum(types) => { - let variants: Vec<_> = types - .into_iter() - .filter_map(|variant| Self::type_for_field(analysis, variant, field)) - .collect(); - if variants.is_empty() { - None - } else { - Some(analysis.union(variants)) - } - } - _ => None, - } - } - - fn load_document_for_path(&self, path: &CanonicalPath) -> Option { - self.documents.get_document(path) - } - - fn document_root_expr_range(&self, path: &CanonicalPath) -> Option { - let doc = self.load_document_for_path(path)?; - let expr = doc.ast().expr()?; - Some(to_lsp_range( - expr.syntax().text_range(), - doc.line_index(), - doc.text(), - )) - } - - fn find_export_binding_in_file( - &self, - path: &CanonicalPath, - fields: &[String], - ) -> Option { - let [field_name] = fields else { - return None; - }; - - let doc = self.load_document_for_path(path)?; - let text = doc.text(); - let line_index = doc.line_index(); - - doc.ast() - .syntax() - .descendants_with_tokens() - .filter_map(jrsonnet_rowan_parser::rowan::NodeOrToken::into_token) - .filter(|token| token.kind() == SyntaxKind::IDENT && token.text() == field_name) - .find_map(|token| { - let position = line_index.position(token.text_range().start().into(), text)?; - match handlers::goto_definition(&doc, position) { - Some(handlers::DefinitionResult::Local(range)) => Some(range), - Some( - handlers::DefinitionResult::Import(_) - | handlers::DefinitionResult::ImportField { .. }, - ) - | None => None, - } - }) - } - - fn local_implementation_range( - document: &Document, - declaration: lsp_types::Range, - ) -> Option { - use jrsonnet_lsp_document::LspRange; - use jrsonnet_rowan_parser::{ - nodes::{Bind, ForSpec, Param}, - AstNode, - }; - - let text = document.text(); - let line_index = document.line_index(); - let declaration_range = line_index.text_range(LspRange::from(declaration), text)?; - let ast = document.ast(); - let node = ast - .syntax() - .descendants() - .find(|candidate| candidate.text_range() == declaration_range)?; - - if let Some(bind) = node.ancestors().find_map(Bind::cast) { - let value_range = match bind { - Bind::BindDestruct(bind) => bind.value()?.syntax().text_range(), - Bind::BindFunction(bind) => bind.value()?.syntax().text_range(), - }; - return Some(to_lsp_range(value_range, line_index, text)); - } - - if let Some(param) = node.ancestors().find_map(Param::cast) { - let default_value = param.expr()?; - return Some(to_lsp_range( - default_value.syntax().text_range(), - line_index, - text, - )); - } - - if let Some(for_spec) = node.ancestors().find_map(ForSpec::cast) { - let source_expr = for_spec.expr()?; - return Some(to_lsp_range( - source_expr.syntax().text_range(), - line_index, - text, - )); - } - - None - } - - /// For a field chain like `foo.bar`, this finds the `bar` field - /// inside the `foo` field of the top-level object. - fn find_field_in_file( - &self, - path: &CanonicalPath, - fields: &[String], - ) -> Option { - use jrsonnet_rowan_parser::{ - nodes::{ExprBase, Member, ObjBody}, - AstNode, - }; - - let doc = self.load_document_for_path(path)?; - - let ast = doc.ast(); - let text = doc.text(); - let line_index = doc.line_index(); - let expr = ast.expr()?; - - let expr_base = expr.expr_base()?; - let ExprBase::ExprObject(obj) = expr_base else { - return None; - }; - let mut current_obj_body = obj.obj_body()?; - - for (i, field_name) in fields.iter().enumerate() { - let is_last = i == fields.len() - 1; - let ObjBody::ObjBodyMemberList(members) = ¤t_obj_body else { - return None; - }; - - let field_target = members.members().find_map(|member| match member { - Member::MemberFieldNormal(field) => { - let name_node = field.field_name()?; - let name = extract_field_name_string(&name_node)?; - if name != *field_name { - return None; - } - - let declaration = name_node.syntax().text_range(); - let value = field.expr()?; - let implementation = value.syntax().text_range(); - let next_body = value.expr_base().and_then(|base| { - let ExprBase::ExprObject(obj) = base else { - return None; - }; - obj.obj_body() - }); - - Some((declaration, implementation, next_body)) - } - Member::MemberFieldMethod(method) => { - let name_node = method.field_name()?; - let name = extract_field_name_string(&name_node)?; - if name != *field_name { - return None; - } - - let declaration = name_node.syntax().text_range(); - let implementation = method - .expr() - .map_or(declaration, |expr| expr.syntax().text_range()); - Some((declaration, implementation, None)) - } - Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => None, - })?; - - if is_last { - let declaration = to_lsp_range(field_target.0, line_index, text); - let implementation = to_lsp_range(field_target.1, line_index, text); - return Some(ImportedFieldLocations { - declaration, - implementation, - }); - } - - current_obj_body = field_target.2?; - } - - None - } -} - -fn workspace_symbol_match_rank(name_lower: &str, query_lower: &str) -> u8 { - if query_lower.is_empty() { - return 0; - } - if name_lower == query_lower { - return 0; - } - if name_lower.starts_with(query_lower) { - return 1; - } - 2 -} - -#[derive(Debug, Clone, Copy)] -struct ImportedFieldLocations { - declaration: lsp_types::Range, - implementation: lsp_types::Range, -} - -fn extract_field_name_string(name: &jrsonnet_rowan_parser::nodes::FieldName) -> Option { - use jrsonnet_rowan_parser::{nodes::FieldName, AstToken}; - - match name { - FieldName::FieldNameFixed(fixed) => { - if let Some(name_node) = fixed.id() { - if let Some(ident) = name_node.ident_lit() { - return Some(ident.text().to_string()); - } - } - if let Some(text) = fixed.text() { - let s = text.syntax().text(); - let name = s - .trim_start_matches('"') - .trim_start_matches('\'') - .trim_end_matches('"') - .trim_end_matches('\''); - return Some(name.to_string()); - } - None - } - FieldName::FieldNameDynamic(_) => None, - } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs b/crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs new file mode 100644 index 00000000..6d3b57ee --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs @@ -0,0 +1,17 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{CodeLens, CodeLensParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn code_lens(&self, params: &CodeLensParams) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let config = handlers::CodeLensConfig::all(); + let analysis = self.analyze_document(&path, &doc); + Some(handlers::code_lens(&doc, uri, &config, Some(&analysis))) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands.rs new file mode 100644 index 00000000..175e2781 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands.rs @@ -0,0 +1,248 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_import::ImportResolution; +use lsp_types::{ + ExecuteCommandParams, PartialResultParams, Position, ReferenceContext, ReferenceParams, + TextDocumentIdentifier, TextDocumentPositionParams, WorkDoneProgressParams, +}; +use tracing::{info, warn}; + +use super::AsyncRequestContext; +use crate::analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}; + +impl AsyncRequestContext { + pub(crate) fn execute_command( + &self, + params: &ExecuteCommandParams, + ) -> Option { + info!("Execute command: {}", params.command); + + match params.command.as_str() { + "jrsonnet.evalFile" => { + let uri = params.arguments.first()?.as_str()?; + self.execute_eval_file(uri) + } + "jrsonnet.evalExpression" => { + let expr = params.arguments.first()?.as_str()?; + let base_uri = params.arguments.get(1).and_then(|v| v.as_str()); + Some(self.execute_eval_expression(expr, base_uri)) + } + "jrsonnet.findTransitiveImporters" => { + let uri = params.arguments.first()?.as_str()?; + self.execute_find_transitive_importers(uri) + } + "jrsonnet.findReferences" => { + let uri = params.arguments.first()?.as_str()?; + let line = params.arguments.get(1)?.as_u64()?; + let line = u32::try_from(line).ok()?; + let character = params.arguments.get(2)?.as_u64()?; + let character = u32::try_from(character).ok()?; + let include_declaration = params + .arguments + .get(3) + .and_then(serde_json::Value::as_bool) + .unwrap_or(false); + self.execute_find_references(uri, line, character, include_declaration) + } + "jrsonnet.showErrors" => { + let uri = params.arguments.first()?.as_str()?; + self.execute_show_errors(uri) + } + _ => { + warn!("Unknown command: {}", params.command); + None + } + } + } + + fn execute_eval_file(&self, uri: &str) -> Option { + use jrsonnet_evaluator::manifest::JsonFormat; + use jrsonnet_parser::{SourceFile, SourcePath}; + + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; + let text = self.documents.get_text(&path)?; + + let jpath = self.eval_command_jpath(Some(&path)); + let state = crate::analysis::eval::create_state_with_jpath(&jpath); + + let source_path = SourcePath::new(SourceFile::new(path.as_path().to_path_buf())); + + match state.evaluate_snippet(source_path.to_string(), &text) { + Ok(val) => { + let json_format = JsonFormat::default(); + match val.manifest(json_format) { + Ok(json_str) => match serde_json::from_str::(&json_str) { + Ok(json) => Some(json), + Err(e) => { + warn!("Failed to parse manifest result as JSON: {}", e); + Some(serde_json::Value::String(json_str)) + } + }, + Err(e) => { + warn!("Failed to manifest: {}", e); + Some(serde_json::json!({ + "error": format!("Manifest error: {}", e.error()) + })) + } + } + } + Err(e) => { + warn!("Evaluation failed: {}", e); + Some(serde_json::json!({ + "error": format!("Evaluation error: {}", e.error()) + })) + } + } + } + + fn execute_eval_expression(&self, expr: &str, base_uri: Option<&str>) -> serde_json::Value { + use jrsonnet_evaluator::manifest::JsonFormat; + use jrsonnet_parser::{SourceFile, SourcePath}; + + let base_path = base_uri + .and_then(|uri| uri.parse::().ok()) + .and_then(|uri| CanonicalPath::from_uri(&uri).ok()); + let jpath = self.eval_command_jpath(base_path.as_ref()); + let state = crate::analysis::eval::create_state_with_jpath(&jpath); + let source_name = base_path.map_or_else( + || "".to_string(), + |path| SourcePath::new(SourceFile::new(path.as_path().to_path_buf())).to_string(), + ); + + match state.evaluate_snippet(source_name, expr) { + Ok(val) => { + let json_format = JsonFormat::default(); + match val.manifest(json_format) { + Ok(json_str) => match serde_json::from_str::(&json_str) { + Ok(json) => json, + Err(e) => { + warn!("Failed to parse manifest result as JSON: {}", e); + serde_json::Value::String(json_str) + } + }, + Err(e) => serde_json::json!({ + "error": format!("Manifest error: {}", e.error()) + }), + } + } + Err(e) => serde_json::json!({ + "error": format!("Evaluation error: {}", e.error()) + }), + } + } + + fn eval_command_jpath(&self, base_path: Option<&CanonicalPath>) -> Vec { + let config = self.config.read(); + let jpath = base_path.map_or_else( + || config.jpath.clone(), + |base_path| { + let mut roots = effective_import_roots( + base_path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + if !config.resolve_paths_with_tanka { + if let Some(dir) = base_path.as_path().parent() { + if !roots.iter().any(|entry| entry == dir) { + roots.push(dir.to_path_buf()); + } + } + } + roots + }, + ); + drop(config); + jpath + } + + fn execute_find_transitive_importers(&self, uri: &str) -> Option { + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; + + let import_graph = self.import_graph.read(); + let importers = import_graph + .file(&path) + .map_or_else(Vec::new, |file| import_graph.transitive_importers(file)); + let mut importer_uris: Vec = importers + .iter() + .filter_map(|file| { + import_graph + .path(*file) + .and_then(|path| path.to_uri().ok().map(|uri| uri.to_string())) + }) + .collect(); + drop(import_graph); + importer_uris.sort(); + + Some(serde_json::json!({ + "file": uri, + "transitiveImporters": importer_uris + })) + } + + fn execute_find_references( + &self, + uri: &str, + line: u32, + character: u32, + include_declaration: bool, + ) -> Option { + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let params = ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri: uri_parsed }, + position: Position { line, character }, + }, + context: ReferenceContext { + include_declaration, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + let refs = self.references(¶ms).unwrap_or_default(); + + serde_json::to_value(refs).ok() + } + + fn execute_show_errors(&self, uri: &str) -> Option { + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; + let doc = self.load_document_for_path(&path)?; + let analysis = self.analyze_document(&path, &doc); + let (enable_lint_diagnostics, evaluator, import_roots) = { + let config = self.config.read(); + let evaluator = config.enable_eval_diagnostics.then(|| { + let eval_config = EvalConfig { + jpath: config.jpath.clone(), + resolve_paths_with_tanka: config.resolve_paths_with_tanka, + }; + Evaluator::new(&eval_config) + }); + let import_roots = effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + (config.enable_lint_diagnostics, evaluator, import_roots) + }; + let import_resolution = ImportResolution::new(&path, &import_roots); + let import_occurrences = import_resolution.parse_occurrences(&doc); + + let diagnostics = crate::handlers::compute_diagnostics( + &doc, + &path, + enable_lint_diagnostics, + evaluator.as_ref(), + &uri_parsed, + &analysis, + &import_occurrences, + ); + + let response = lsp_types::PublishDiagnosticsParams { + uri: uri_parsed, + diagnostics, + version: Some(doc.version().0), + }; + serde_json::to_value(response).ok() + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/completion.rs b/crates/jrsonnet-lsp/src/server/async_requests/completion.rs new file mode 100644 index 00000000..b8b54963 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/completion.rs @@ -0,0 +1,36 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{CompletionParams, CompletionResponse}; + +use super::AsyncRequestContext; +use crate::analysis::tanka::effective_import_roots; + +impl AsyncRequestContext { + pub(crate) fn completion(&self, params: &CompletionParams) -> Option { + let uri = ¶ms.text_document_position.text_document.uri; + let position = params.text_document_position.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + let semantic = self.documents.get_semantic_artifacts(&path); + + let lsp_pos = position.into(); + let analysis = self.analyze_document(&path, &doc); + let config = self.config.read(); + let import_roots = effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + drop(config); + + let list = handlers::completion_with_import_roots_and_semantic( + &doc, + lsp_pos, + Some(path.as_path()), + &import_roots, + &analysis, + semantic.as_deref(), + )?; + Some(CompletionResponse::List(list)) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_declaration.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_declaration.rs new file mode 100644 index 00000000..02603d02 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_declaration.rs @@ -0,0 +1,12 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use super::{goto_shared::GotoTarget, AsyncRequestContext}; + +impl AsyncRequestContext { + pub(crate) fn goto_declaration( + &self, + params: &GotoDefinitionParams, + ) -> Option { + self.goto_target(params, GotoTarget::Declaration) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_definition.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_definition.rs new file mode 100644 index 00000000..78e388bf --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_definition.rs @@ -0,0 +1,12 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use super::{goto_shared::GotoTarget, AsyncRequestContext}; + +impl AsyncRequestContext { + pub(crate) fn goto_definition( + &self, + params: &GotoDefinitionParams, + ) -> Option { + self.goto_target(params, GotoTarget::Definition) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_implementation.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_implementation.rs new file mode 100644 index 00000000..16dd2591 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_implementation.rs @@ -0,0 +1,12 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use super::{goto_shared::GotoTarget, AsyncRequestContext}; + +impl AsyncRequestContext { + pub(crate) fn goto_implementation( + &self, + params: &GotoDefinitionParams, + ) -> Option { + self.goto_target(params, GotoTarget::Implementation) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs new file mode 100644 index 00000000..1e2c9c61 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs @@ -0,0 +1,86 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Location}; + +use super::AsyncRequestContext; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub(super) enum GotoTarget { + Definition, + TypeDefinition, + Declaration, + Implementation, +} + +impl AsyncRequestContext { + pub(super) fn goto_target( + &self, + params: &GotoDefinitionParams, + target: GotoTarget, + ) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + let lsp_pos = position.into(); + let semantic = self.documents.get_semantic_artifacts(&path); + + let result = match target { + GotoTarget::Definition | GotoTarget::TypeDefinition => { + handlers::goto_definition_with_semantic(&doc, lsp_pos, semantic.as_deref())? + } + GotoTarget::Declaration | GotoTarget::Implementation => { + handlers::goto_declaration_with_semantic(&doc, lsp_pos, semantic.as_deref())? + } + }; + match result { + handlers::DefinitionResult::Local(range) => { + let range = if target == GotoTarget::Implementation { + Self::local_implementation_range(&doc, range).unwrap_or(range) + } else { + range + }; + Some(GotoDefinitionResponse::Scalar(Location { + uri: uri.clone(), + range, + })) + } + handlers::DefinitionResult::Import(import_path) => { + let resolved = self.resolve_import_from_graph(&path, &import_path)?; + let resolved_uri = resolved.to_uri().ok()?; + let range = if target == GotoTarget::Implementation { + self.document_root_expr_range(&resolved).unwrap_or_default() + } else { + lsp_types::Range::default() + }; + Some(GotoDefinitionResponse::Scalar(Location { + uri: resolved_uri, + range, + })) + } + handlers::DefinitionResult::ImportField { + path: import_path, + fields, + } => { + let resolved = self.resolve_import_from_graph(&path, &import_path)?; + let resolved_uri = resolved.to_uri().ok()?; + let locations = self.find_field_in_file(&resolved, &fields); + let range = if target == GotoTarget::Implementation { + locations + .map(|location| location.implementation) + .or_else(|| self.find_export_binding_in_file(&resolved, &fields)) + .or_else(|| self.document_root_expr_range(&resolved)) + .unwrap_or_default() + } else { + locations + .map(|location| location.declaration) + .or_else(|| self.find_export_binding_in_file(&resolved, &fields))? + }; + Some(GotoDefinitionResponse::Scalar(Location { + uri: resolved_uri, + range, + })) + } + } + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_type_definition.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_type_definition.rs new file mode 100644 index 00000000..2867af79 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_type_definition.rs @@ -0,0 +1,12 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use super::{goto_shared::GotoTarget, AsyncRequestContext}; + +impl AsyncRequestContext { + pub(crate) fn goto_type_definition( + &self, + params: &GotoDefinitionParams, + ) -> Option { + self.goto_target(params, GotoTarget::TypeDefinition) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/hover.rs b/crates/jrsonnet-lsp/src/server/async_requests/hover.rs new file mode 100644 index 00000000..6e697a59 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/hover.rs @@ -0,0 +1,25 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{Hover, HoverParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn hover(&self, params: &HoverParams) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + let lsp_pos = position.into(); + let analysis = self.analyze_document(&path, &doc); + let import_field_type_resolver = |import_path: &str, fields: &[String]| { + self.resolve_import_field_type(&path, import_path, fields) + }; + handlers::hover_with_import_field_type( + &doc, + lsp_pos, + &analysis, + Some(&import_field_type_resolver), + ) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup.rs new file mode 100644 index 00000000..206da350 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup.rs @@ -0,0 +1,311 @@ +use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, Document, LspRange}; +use jrsonnet_lsp_handlers as handlers; +use jrsonnet_lsp_types::{Ty, TyData}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, FieldName, ForSpec, Member, ObjBody, Param}, + AstNode, AstToken, SyntaxKind, +}; + +use super::AsyncRequestContext; + +#[derive(Debug, Clone, Copy)] +pub(super) struct ImportedFieldLocations { + pub(super) declaration: lsp_types::Range, + pub(super) implementation: lsp_types::Range, +} + +impl AsyncRequestContext { + pub(super) fn resolve_import_from_graph( + &self, + from: &CanonicalPath, + import: &str, + ) -> Option { + let import_graph = self.import_graph.read(); + let from_file = import_graph.file(from)?; + import_graph + .imports(from_file) + .iter() + .find(|entry| entry.import_path == import) + .and_then(|entry| entry.resolved_path.clone()) + } + + fn resolve_import_from_fs(from: &CanonicalPath, import: &str) -> Option { + let import_path = std::path::Path::new(import); + let candidate = if import_path.is_absolute() { + import_path.to_path_buf() + } else if import.starts_with("./") || import.starts_with("../") { + from.as_path().parent()?.join(import_path) + } else { + return None; + }; + + CanonicalPath::try_from_path(&candidate).ok() + } + + pub(super) fn resolve_import_path( + &self, + from: &CanonicalPath, + import: &str, + ) -> Option { + if import.starts_with("./") || import.starts_with("../") || import.starts_with('/') { + return Self::resolve_import_from_fs(from, import) + .or_else(|| self.resolve_import_from_graph(from, import)); + } + + self.resolve_import_from_graph(from, import) + .or_else(|| Self::resolve_import_from_fs(from, import)) + } + + pub(super) fn resolve_import_field_type( + &self, + from: &CanonicalPath, + import_path: &str, + fields: &[String], + ) -> Option { + let resolved = self.resolve_import_path(from, import_path)?; + let doc = self.load_document_for_path(&resolved)?; + let analysis = self.analyze_document(&resolved, &doc); + let ty = Self::type_for_field_path(&analysis, analysis.document_type(), fields)?; + Some(analysis.display_for_hover(ty)) + } + + fn type_for_field_path( + analysis: &jrsonnet_lsp_inference::TypeAnalysis, + root_ty: Ty, + fields: &[String], + ) -> Option { + fields.iter().try_fold(root_ty, |ty, field| { + Self::type_for_field(analysis, ty, field) + }) + } + + fn type_for_field( + analysis: &jrsonnet_lsp_inference::TypeAnalysis, + ty: Ty, + field: &str, + ) -> Option { + match analysis.get_data(ty) { + TyData::Any => Some(Ty::ANY), + TyData::Object(obj) => obj + .get_field(field) + .map(|field_def| field_def.ty) + .or_else(|| obj.has_unknown.then_some(Ty::ANY)), + TyData::AttrsOf { value } => Some(value), + TyData::Union(types) => { + let variants: Vec<_> = types + .into_iter() + .filter_map(|variant| Self::type_for_field(analysis, variant, field)) + .collect(); + if variants.is_empty() { + None + } else { + Some(analysis.union(variants)) + } + } + TyData::Sum(types) => { + let variants: Vec<_> = types + .into_iter() + .filter_map(|variant| Self::type_for_field(analysis, variant, field)) + .collect(); + if variants.is_empty() { + None + } else { + Some(analysis.union(variants)) + } + } + _ => None, + } + } + + pub(super) fn load_document_for_path(&self, path: &CanonicalPath) -> Option { + self.documents.get_document(path) + } + + pub(super) fn document_root_expr_range( + &self, + path: &CanonicalPath, + ) -> Option { + let doc = self.load_document_for_path(path)?; + let expr = doc.ast().expr()?; + Some(to_lsp_range( + expr.syntax().text_range(), + doc.line_index(), + doc.text(), + )) + } + + pub(super) fn find_export_binding_in_file( + &self, + path: &CanonicalPath, + fields: &[String], + ) -> Option { + let [field_name] = fields else { + return None; + }; + + let doc = self.load_document_for_path(path)?; + let text = doc.text(); + let line_index = doc.line_index(); + + doc.ast() + .syntax() + .descendants_with_tokens() + .filter_map(jrsonnet_rowan_parser::rowan::NodeOrToken::into_token) + .filter(|token| token.kind() == SyntaxKind::IDENT && token.text() == field_name) + .find_map(|token| { + let position = line_index.position(token.text_range().start().into(), text)?; + match handlers::goto_definition(&doc, position) { + Some(handlers::DefinitionResult::Local(range)) => Some(range), + Some( + handlers::DefinitionResult::Import(_) + | handlers::DefinitionResult::ImportField { .. }, + ) + | None => None, + } + }) + } + + pub(super) fn local_implementation_range( + document: &Document, + declaration: lsp_types::Range, + ) -> Option { + let text = document.text(); + let line_index = document.line_index(); + let declaration_range = line_index.text_range(LspRange::from(declaration), text)?; + let ast = document.ast(); + let node = ast + .syntax() + .descendants() + .find(|candidate| candidate.text_range() == declaration_range)?; + + if let Some(bind) = node.ancestors().find_map(Bind::cast) { + let value_range = match bind { + Bind::BindDestruct(bind) => bind.value()?.syntax().text_range(), + Bind::BindFunction(bind) => bind.value()?.syntax().text_range(), + }; + return Some(to_lsp_range(value_range, line_index, text)); + } + + if let Some(param) = node.ancestors().find_map(Param::cast) { + let default_value = param.expr()?; + return Some(to_lsp_range( + default_value.syntax().text_range(), + line_index, + text, + )); + } + + if let Some(for_spec) = node.ancestors().find_map(ForSpec::cast) { + let source_expr = for_spec.expr()?; + return Some(to_lsp_range( + source_expr.syntax().text_range(), + line_index, + text, + )); + } + + None + } + + /// For a field chain like `foo.bar`, this finds the `bar` field + /// inside the `foo` field of the top-level object. + pub(super) fn find_field_in_file( + &self, + path: &CanonicalPath, + fields: &[String], + ) -> Option { + use jrsonnet_rowan_parser::nodes::ExprBase; + + let doc = self.load_document_for_path(path)?; + + let ast = doc.ast(); + let text = doc.text(); + let line_index = doc.line_index(); + let expr = ast.expr()?; + + let expr_base = expr.expr_base()?; + let ExprBase::ExprObject(obj) = expr_base else { + return None; + }; + let mut current_obj_body = obj.obj_body()?; + + for (i, field_name) in fields.iter().enumerate() { + let is_last = i == fields.len() - 1; + let ObjBody::ObjBodyMemberList(members) = ¤t_obj_body else { + return None; + }; + + let field_target = members.members().find_map(|member| match member { + Member::MemberFieldNormal(field) => { + let name_node = field.field_name()?; + let name = extract_field_name_string(&name_node)?; + if name != *field_name { + return None; + } + + let declaration = name_node.syntax().text_range(); + let value = field.expr()?; + let implementation = value.syntax().text_range(); + let next_body = value.expr_base().and_then(|base| { + let ExprBase::ExprObject(obj) = base else { + return None; + }; + obj.obj_body() + }); + + Some((declaration, implementation, next_body)) + } + Member::MemberFieldMethod(method) => { + let name_node = method.field_name()?; + let name = extract_field_name_string(&name_node)?; + if name != *field_name { + return None; + } + + let declaration = name_node.syntax().text_range(); + let implementation = method + .expr() + .map_or(declaration, |expr| expr.syntax().text_range()); + Some((declaration, implementation, None)) + } + Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => None, + })?; + + if is_last { + let declaration = to_lsp_range(field_target.0, line_index, text); + let implementation = to_lsp_range(field_target.1, line_index, text); + return Some(ImportedFieldLocations { + declaration, + implementation, + }); + } + + current_obj_body = field_target.2?; + } + + None + } +} + +fn extract_field_name_string(name: &FieldName) -> Option { + match name { + FieldName::FieldNameFixed(fixed) => { + if let Some(name_node) = fixed.id() { + if let Some(ident) = name_node.ident_lit() { + return Some(ident.text().to_string()); + } + } + if let Some(text) = fixed.text() { + let s = text.syntax().text(); + let name = s + .trim_start_matches('"') + .trim_start_matches('\'') + .trim_end_matches('"') + .trim_end_matches('\''); + return Some(name.to_string()); + } + None + } + FieldName::FieldNameDynamic(_) => None, + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs b/crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs new file mode 100644 index 00000000..d883528b --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs @@ -0,0 +1,19 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{InlayHint, InlayHintParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn inlay_hints(&self, params: &InlayHintParams) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + let analysis = self.analyze_document(&path, &doc); + let hints = handlers::inlay_hints(&doc, &analysis, params.range); + if hints.is_empty() { + return None; + } + Some(hints) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/references.rs b/crates/jrsonnet-lsp/src/server/async_requests/references.rs new file mode 100644 index 00000000..1ab19b34 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/references.rs @@ -0,0 +1,66 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{Location, ReferenceParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn references(&self, params: &ReferenceParams) -> Option> { + let uri = ¶ms.text_document_position.text_document.uri; + let position = params.text_document_position.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get_document(&path)?; + let lsp_pos = position.into(); + let semantic = self.documents.get_semantic_artifacts(&path); + + let include_declaration = params.context.include_declaration; + let mut refs = handlers::find_references_with_semantic( + &doc, + lsp_pos, + uri, + include_declaration, + semantic.as_deref(), + ); + + let importers = { + let import_graph = self.import_graph.read(); + import_graph + .file(&path) + .map_or_else(Vec::new, |file| import_graph.transitive_importers(file)) + }; + + let importer_docs: Vec<_> = importers + .into_iter() + .filter_map(|file| { + let path = self.documents.path(file)?; + let doc = self.documents.get_document(path.as_canonical_path())?; + let semantic = self + .documents + .get_semantic_artifacts(path.as_canonical_path()); + Some((path.as_canonical_path().clone(), doc, semantic)) + }) + .collect(); + let importer_refs: Vec<_> = importer_docs + .iter() + .map(|(k, v, semantic)| (k, v, semantic.as_deref())) + .collect(); + + let cross_refs = { + let import_graph = self.import_graph.read(); + handlers::find_cross_file_references_with_semantic( + &doc, + &path, + lsp_pos, + semantic.as_deref(), + &importer_refs, + &import_graph, + ) + }; + refs.extend(cross_refs); + + if refs.is_empty() { + return None; + } + Some(refs) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/rename.rs b/crates/jrsonnet-lsp/src/server/async_requests/rename.rs new file mode 100644 index 00000000..735a0c27 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/rename.rs @@ -0,0 +1,36 @@ +use jrsonnet_lsp_document::{CanonicalPath, SymbolName}; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{RenameParams, WorkspaceEdit}; +use tracing::warn; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn rename(&self, params: &RenameParams) -> Option { + let uri = ¶ms.text_document_position.text_document.uri; + let position = params.text_document_position.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let new_name = match SymbolName::new(¶ms.new_name) { + Ok(name) => name, + Err(err) => { + warn!("rename rejected: {}", err); + return None; + } + }; + + let lsp_pos = position.into(); + let import_graph = self.import_graph.read(); + + handlers::rename_cross_file( + &doc, + lsp_pos, + &new_name, + uri, + &path, + &self.documents, + &import_graph, + ) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs new file mode 100644 index 00000000..078016f2 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs @@ -0,0 +1,73 @@ +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{SymbolInformation, WorkspaceSymbolParams, WorkspaceSymbolResponse}; +use rayon::prelude::*; + +use super::{super::unique_files, AsyncRequestContext}; + +const MAX_WORKSPACE_SYMBOL_RESULTS: usize = 128; + +impl AsyncRequestContext { + pub(crate) fn workspace_symbol( + &self, + params: &WorkspaceSymbolParams, + ) -> Option { + let query = ¶ms.query; + + let files = { + let import_graph = self.import_graph.read(); + unique_files(import_graph.all_files().chain(self.documents.open_files())) + }; + + let mut all_symbols: Vec = files + .into_par_iter() + .flat_map(|file| { + let Some(path) = self.documents.path(file) else { + return Vec::new(); + }; + let Some(doc) = self.documents.get_document(path.as_canonical_path()) else { + return Vec::new(); + }; + let Ok(uri) = path.as_canonical_path().to_uri() else { + return Vec::new(); + }; + handlers::workspace_symbols_for_document(&doc, &uri, query) + }) + .collect(); + + let query_lower = query.to_lowercase(); + all_symbols.sort_by_cached_key(|symbol| { + let name_lower = symbol.name.to_lowercase(); + ( + workspace_symbol_match_rank(&name_lower, &query_lower), + name_lower.len(), + name_lower, + symbol.location.uri.as_str().to_string(), + symbol.location.range.start.line, + symbol.location.range.start.character, + symbol.location.range.end.line, + symbol.location.range.end.character, + ) + }); + if all_symbols.len() > MAX_WORKSPACE_SYMBOL_RESULTS { + all_symbols.truncate(MAX_WORKSPACE_SYMBOL_RESULTS); + } + + if all_symbols.is_empty() { + return None; + } + Some(WorkspaceSymbolResponse::Flat(all_symbols)) + } +} + +fn workspace_symbol_match_rank(name_lower: &str, query_lower: &str) -> u8 { + if query_lower.is_empty() { + return 0; + } + if name_lower == query_lower { + return 0; + } + if name_lower.starts_with(query_lower) { + return 1; + } + 2 +} diff --git a/crates/jrsonnet-lsp/src/server/notifications.rs b/crates/jrsonnet-lsp/src/server/notifications.rs index c76efd0e..c9c17a23 100644 --- a/crates/jrsonnet-lsp/src/server/notifications.rs +++ b/crates/jrsonnet-lsp/src/server/notifications.rs @@ -1,3 +1,13 @@ +use lsp_types::{ + notification::{ + Cancel, DidChangeConfiguration, DidChangeTextDocument, DidChangeWatchedFiles, + DidCloseTextDocument, DidOpenTextDocument, DidSaveTextDocument, Notification as _, + }, + DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams, + DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, + FileChangeType, +}; + use super::*; impl Server { diff --git a/crates/jrsonnet-lsp/src/server/request_dispatch.rs b/crates/jrsonnet-lsp/src/server/request_dispatch.rs new file mode 100644 index 00000000..2ff6f843 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/request_dispatch.rs @@ -0,0 +1,371 @@ +use lsp_server::RequestId; +use lsp_types::{ + request::{ + CodeActionRequest, CodeLensRequest, CodeLensResolve, Completion, DocumentHighlightRequest, + DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, + GotoImplementation, GotoTypeDefinition, HoverRequest, InlayHintRequest, + PrepareRenameRequest, References, Rename, Request as _, SemanticTokensFullRequest, + SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, WorkspaceSymbolRequest, + }, + ExecuteCommandParams, +}; +use serde::{de::DeserializeOwned, Serialize}; +use tracing::{debug, info, warn}; + +use super::{async_requests::AsyncRequestContext, requests, Server}; +use crate::protocol::inflight_requests::IncomingRequest; + +impl Server { + /// Handle an incoming request. + pub(super) fn handle_request(&mut self, req: lsp_server::Request) -> anyhow::Result<()> { + debug!("Handling request: {} (id={})", req.method, req.id); + + let lsp_server::Request { id, method, params } = req; + match method.as_str() { + Shutdown::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_shutdown_request(request) + } + GotoDefinition::METHOD + | GotoDeclaration::METHOD + | GotoTypeDefinition::METHOD + | GotoImplementation::METHOD + | HoverRequest::METHOD + | InlayHintRequest::METHOD + | Completion::METHOD + | References::METHOD + | WorkspaceSymbolRequest::METHOD + | Rename::METHOD + | CodeLensRequest::METHOD + | ExecuteCommand::METHOD => self.handle_async_request(id, method.as_str(), params), + DocumentSymbolRequest::METHOD + | DocumentHighlightRequest::METHOD + | CodeActionRequest::METHOD + | SignatureHelpRequest::METHOD + | Formatting::METHOD + | PrepareRenameRequest::METHOD + | SemanticTokensFullRequest::METHOD + | SemanticTokensRangeRequest::METHOD + | CodeLensResolve::METHOD => self.handle_sync_request(id, method.as_str(), params), + _ => { + let request = self.inflight_requests.begin_unknown(id, method.as_str()); + warn!("Unhandled request: {}", request.method()); + let message = format!("Method not found: {}", request.method()); + let _ = self.inflight_requests.send_unknown_err( + request, + lsp_server::ErrorCode::MethodNotFound, + message, + )?; + Ok(()) + } + } + } + + pub(super) fn handle_shutdown_request( + &mut self, + request: IncomingRequest, + ) -> anyhow::Result<()> { + info!("Shutdown request received"); + self.shutdown_requested = true; + let _ = self.inflight_requests.send_ok(request, ())?; + Ok(()) + } + + fn handle_sync_request( + &mut self, + id: RequestId, + method: &str, + params: serde_json::Value, + ) -> anyhow::Result<()> { + match method { + DocumentSymbolRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed( + request, + params, + requests::sync_handlers::document_symbol::handle, + ) + } + DocumentHighlightRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed( + request, + params, + requests::sync_handlers::document_highlight::handle, + ) + } + CodeActionRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed( + request, + params, + requests::sync_handlers::code_action::handle, + ) + } + SignatureHelpRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed( + request, + params, + requests::sync_handlers::signature_help::handle, + ) + } + Formatting::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed(request, params, requests::sync_handlers::formatting::handle) + } + PrepareRenameRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed( + request, + params, + requests::sync_handlers::prepare_rename::handle, + ) + } + SemanticTokensFullRequest::METHOD => { + let request = self + .inflight_requests + .begin::(id); + self.handle_sync_typed( + request, + params, + requests::sync_handlers::semantic_tokens_full::handle, + ) + } + SemanticTokensRangeRequest::METHOD => { + let request = self + .inflight_requests + .begin::(id); + self.handle_sync_typed( + request, + params, + requests::sync_handlers::semantic_tokens_range::handle, + ) + } + CodeLensResolve::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_sync_typed( + request, + params, + requests::sync_handlers::code_lens_resolve::handle, + ) + } + _ => { + let request = self.inflight_requests.begin_unknown(id, method); + warn!("Unhandled request: {}", request.method()); + let message = format!("Method not found: {}", request.method()); + let _ = self.inflight_requests.send_unknown_err( + request, + lsp_server::ErrorCode::MethodNotFound, + message, + )?; + Ok(()) + } + } + } + + fn handle_sync_typed( + &mut self, + request: IncomingRequest, + params: serde_json::Value, + handler: fn(&Self, &R::Params) -> R::Result, + ) -> anyhow::Result<()> + where + R: lsp_types::request::Request, + R::Params: DeserializeOwned, + R::Result: Serialize, + { + let params: R::Params = match serde_json::from_value(params) { + Ok(params) => params, + Err(err) => { + let _ = self.inflight_requests.send_err( + request, + lsp_server::ErrorCode::InvalidParams, + format!("Invalid params for {}: {err}", R::METHOD), + )?; + return Ok(()); + } + }; + + let _ = self + .inflight_requests + .send_ok(request, handler(self, ¶ms))?; + Ok(()) + } + + fn spawn_typed_json_response(&self, request: IncomingRequest, compute: F) + where + R: lsp_types::request::Request, + R::Result: Serialize + Send + 'static, + F: FnOnce() -> R::Result + Send + 'static, + { + let id = request.into_id(); + self.spawn_async_response(id, R::METHOD, move || { + serde_json::to_value(compute()).map_err(Into::into) + }); + } + + fn handle_async_request( + &mut self, + id: RequestId, + method: &str, + params: serde_json::Value, + ) -> anyhow::Result<()> { + match method { + GotoDefinition::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::goto_definition::handle, + ) + } + GotoDeclaration::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::goto_declaration::handle, + ) + } + GotoImplementation::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::goto_implementation::handle, + ) + } + GotoTypeDefinition::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::goto_type_definition::handle, + ) + } + HoverRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, requests::async_handlers::hover::handle) + } + InlayHintRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::inlay_hints::handle, + ) + } + Completion::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::completion::handle, + ) + } + References::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::references::handle, + ) + } + WorkspaceSymbolRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::workspace_symbol::handle, + ) + } + Rename::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed(request, params, requests::async_handlers::rename::handle) + } + CodeLensRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::code_lens::handle, + ) + } + ExecuteCommand::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_execute_command(request, params) + } + _ => { + let request = self.inflight_requests.begin_unknown(id, method); + warn!("Unhandled request: {}", request.method()); + let message = format!("Method not found: {}", request.method()); + let _ = self.inflight_requests.send_unknown_err( + request, + lsp_server::ErrorCode::MethodNotFound, + message, + )?; + Ok(()) + } + } + } + + fn handle_async_typed( + &mut self, + request: IncomingRequest, + params: serde_json::Value, + handler: fn(&AsyncRequestContext, &R::Params) -> R::Result, + ) -> anyhow::Result<()> + where + R: lsp_types::request::Request, + R::Params: DeserializeOwned + Send + 'static, + R::Result: Serialize + Send + 'static, + { + let params: R::Params = match serde_json::from_value(params) { + Ok(params) => params, + Err(err) => { + let _ = self.inflight_requests.send_err( + request, + lsp_server::ErrorCode::InvalidParams, + format!("Invalid params for {}: {err}", R::METHOD), + )?; + return Ok(()); + } + }; + let context = self.async_request_context(); + self.spawn_typed_json_response(request, move || handler(&context, ¶ms)); + Ok(()) + } + + fn handle_async_execute_command( + &mut self, + request: IncomingRequest, + params: serde_json::Value, + ) -> anyhow::Result<()> { + let params: ExecuteCommandParams = match serde_json::from_value(params) { + Ok(params) => params, + Err(err) => { + let _ = self.inflight_requests.send_err( + request, + lsp_server::ErrorCode::InvalidParams, + format!("Invalid params for {}: {err}", ExecuteCommand::METHOD), + )?; + return Ok(()); + } + }; + if !Self::is_supported_execute_command(¶ms.command) { + let _ = self.inflight_requests.send_err( + request, + lsp_server::ErrorCode::InvalidParams, + format!("Unknown execute command: {}", params.command), + )?; + return Ok(()); + } + + let context = self.async_request_context(); + self.spawn_typed_json_response(request, move || { + requests::async_handlers::execute_command::handle(&context, ¶ms) + }); + Ok(()) + } +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_lens.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_lens.rs new file mode 100644 index 00000000..99848c7e --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_lens.rs @@ -0,0 +1,10 @@ +use lsp_types::{CodeLens, CodeLensParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &CodeLensParams, +) -> Option> { + context.code_lens(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/completion.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/completion.rs new file mode 100644 index 00000000..40db9230 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/completion.rs @@ -0,0 +1,10 @@ +use lsp_types::{CompletionParams, CompletionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &CompletionParams, +) -> Option { + context.completion(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/execute_command.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/execute_command.rs new file mode 100644 index 00000000..29c15e30 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/execute_command.rs @@ -0,0 +1,10 @@ +use lsp_types::ExecuteCommandParams; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &ExecuteCommandParams, +) -> Option { + context.execute_command(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_declaration.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_declaration.rs new file mode 100644 index 00000000..dc777900 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_declaration.rs @@ -0,0 +1,10 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &GotoDefinitionParams, +) -> Option { + context.goto_declaration(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_definition.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_definition.rs new file mode 100644 index 00000000..9f75d14d --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_definition.rs @@ -0,0 +1,10 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &GotoDefinitionParams, +) -> Option { + context.goto_definition(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_implementation.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_implementation.rs new file mode 100644 index 00000000..d605f538 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_implementation.rs @@ -0,0 +1,10 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &GotoDefinitionParams, +) -> Option { + context.goto_implementation(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_type_definition.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_type_definition.rs new file mode 100644 index 00000000..d3a2a82f --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/goto_type_definition.rs @@ -0,0 +1,10 @@ +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &GotoDefinitionParams, +) -> Option { + context.goto_type_definition(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/hover.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/hover.rs new file mode 100644 index 00000000..459ac12f --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/hover.rs @@ -0,0 +1,7 @@ +use lsp_types::{Hover, HoverParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle(context: &AsyncRequestContext, params: &HoverParams) -> Option { + context.hover(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/inlay_hints.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/inlay_hints.rs new file mode 100644 index 00000000..6fbcbcdc --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/inlay_hints.rs @@ -0,0 +1,10 @@ +use lsp_types::{InlayHint, InlayHintParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &InlayHintParams, +) -> Option> { + context.inlay_hints(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs new file mode 100644 index 00000000..e80540a6 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs @@ -0,0 +1,12 @@ +pub(crate) mod code_lens; +pub(crate) mod completion; +pub(crate) mod execute_command; +pub(crate) mod goto_declaration; +pub(crate) mod goto_definition; +pub(crate) mod goto_implementation; +pub(crate) mod goto_type_definition; +pub(crate) mod hover; +pub(crate) mod inlay_hints; +pub(crate) mod references; +pub(crate) mod rename; +pub(crate) mod workspace_symbol; diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/references.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/references.rs new file mode 100644 index 00000000..f31b1f74 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/references.rs @@ -0,0 +1,10 @@ +use lsp_types::{Location, ReferenceParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &ReferenceParams, +) -> Option> { + context.references(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/rename.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/rename.rs new file mode 100644 index 00000000..400ee228 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/rename.rs @@ -0,0 +1,10 @@ +use lsp_types::{RenameParams, WorkspaceEdit}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &RenameParams, +) -> Option { + context.rename(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/workspace_symbol.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/workspace_symbol.rs new file mode 100644 index 00000000..cb552354 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/workspace_symbol.rs @@ -0,0 +1,10 @@ +use lsp_types::{WorkspaceSymbolParams, WorkspaceSymbolResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &WorkspaceSymbolParams, +) -> Option { + context.workspace_symbol(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/mod.rs b/crates/jrsonnet-lsp/src/server/requests/mod.rs new file mode 100644 index 00000000..77c1357e --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/mod.rs @@ -0,0 +1,7 @@ +//! Request handler routing modules. +//! +//! Async policy: request handlers should default to async execution unless +//! they are cheap, local lookups with predictable low latency. + +pub(crate) mod async_handlers; +pub(crate) mod sync_handlers; diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_action.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_action.rs new file mode 100644 index 00000000..7c0ae2df --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_action.rs @@ -0,0 +1,26 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{CodeActionParams, CodeActionResponse}; + +use crate::server::Server; + +pub(crate) fn handle(server: &Server, params: &CodeActionParams) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let actions = { + let doc = server.documents.get(&path)?; + let code_action_config = server.config.read().code_actions; + handlers::code_actions( + &doc, + uri, + params.range, + ¶ms.context, + &code_action_config, + ) + }; + if actions.is_empty() { + return None; + } + + Some(actions) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_lens_resolve.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_lens_resolve.rs new file mode 100644 index 00000000..14e2ed69 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_lens_resolve.rs @@ -0,0 +1,8 @@ +use jrsonnet_lsp_handlers as handlers; +use lsp_types::CodeLens; + +use crate::server::Server; + +pub(crate) fn handle(_server: &Server, params: &CodeLens) -> CodeLens { + handlers::resolve_code_lens(params.clone()) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_highlight.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_highlight.rs new file mode 100644 index 00000000..09022218 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_highlight.rs @@ -0,0 +1,23 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{DocumentHighlight, DocumentHighlightParams}; + +use crate::server::Server; + +pub(crate) fn handle( + server: &Server, + params: &DocumentHighlightParams, +) -> Option> { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = server.documents.get(&path)?; + let lsp_pos = position.into(); + + let highlights = handlers::document_highlights(&doc, lsp_pos); + if highlights.is_empty() { + return None; + } + + Some(highlights) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_symbol.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_symbol.rs new file mode 100644 index 00000000..1afb4040 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_symbol.rs @@ -0,0 +1,17 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{DocumentSymbolParams, DocumentSymbolResponse}; + +use crate::server::Server; + +pub(crate) fn handle( + server: &Server, + params: &DocumentSymbolParams, +) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = server.documents.get(&path)?; + + let symbols = handlers::document_symbols(&doc); + Some(DocumentSymbolResponse::Nested(symbols)) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/formatting.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/formatting.rs new file mode 100644 index 00000000..0a782bbe --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/formatting.rs @@ -0,0 +1,17 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{DocumentFormattingParams, TextEdit}; + +use crate::server::Server; + +pub(crate) fn handle(server: &Server, params: &DocumentFormattingParams) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = server.documents.get(&path)?; + + let config = server.config.read().formatting.clone(); + let context = + handlers::FormattingContext::for_document(path.as_path(), &server.workspace_roots); + + handlers::format_document_with_config(doc.text(), &config, context) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs new file mode 100644 index 00000000..72625162 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs @@ -0,0 +1,9 @@ +pub(crate) mod code_action; +pub(crate) mod code_lens_resolve; +pub(crate) mod document_highlight; +pub(crate) mod document_symbol; +pub(crate) mod formatting; +pub(crate) mod prepare_rename; +pub(crate) mod semantic_tokens_full; +pub(crate) mod semantic_tokens_range; +pub(crate) mod signature_help; diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/prepare_rename.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/prepare_rename.rs new file mode 100644 index 00000000..c2de3f8c --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/prepare_rename.rs @@ -0,0 +1,19 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{PrepareRenameResponse, TextDocumentPositionParams}; + +use crate::server::Server; + +pub(crate) fn handle( + server: &Server, + params: &TextDocumentPositionParams, +) -> Option { + let uri = ¶ms.text_document.uri; + let position = params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = server.documents.get(&path)?; + + let lsp_pos = position.into(); + + handlers::prepare_rename(&doc, lsp_pos) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_full.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_full.rs new file mode 100644 index 00000000..6eed56ab --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_full.rs @@ -0,0 +1,16 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::SemanticTokensParams; + +use crate::server::Server; + +pub(crate) fn handle( + server: &Server, + params: &SemanticTokensParams, +) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = server.documents.get(&path)?; + + Some(handlers::semantic_tokens(&doc).into()) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_range.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_range.rs new file mode 100644 index 00000000..de8f773c --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_range.rs @@ -0,0 +1,16 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::SemanticTokensRangeParams; + +use crate::server::Server; + +pub(crate) fn handle( + server: &Server, + params: &SemanticTokensRangeParams, +) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = server.documents.get(&path)?; + + Some(handlers::semantic_tokens_range(&doc, params.range).into()) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/signature_help.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/signature_help.rs new file mode 100644 index 00000000..cf5ce9ab --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/signature_help.rs @@ -0,0 +1,16 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{SignatureHelp, SignatureHelpParams}; + +use crate::server::Server; + +pub(crate) fn handle(server: &Server, params: &SignatureHelpParams) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = server.documents.get(&path)?; + + let lsp_pos = position.into(); + + handlers::signature_help(&doc, lsp_pos) +} diff --git a/crates/jrsonnet-lsp/src/server/watched_files.rs b/crates/jrsonnet-lsp/src/server/watched_files.rs index 6ffff8ab..f68de860 100644 --- a/crates/jrsonnet-lsp/src/server/watched_files.rs +++ b/crates/jrsonnet-lsp/src/server/watched_files.rs @@ -1,3 +1,10 @@ +use lsp_types::{ + notification::{DidChangeWatchedFiles, Notification as _}, + request::RegisterCapability, + DidChangeWatchedFilesRegistrationOptions, FileSystemWatcher, GlobPattern, NumberOrString, + Registration, RegistrationParams, RelativePattern, +}; + use super::*; impl Server { From 93152860dabe8e09618ff570e876ce9756fb8acd Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 13 Feb 2026 18:00:57 +0000 Subject: [PATCH 106/210] refactor(lsp): route formatting and tokens through async handlers Move high-cost request paths off the sync request loop and onto async request execution. What changed: - Reclassify these requests from sync to async dispatch in `server/request_dispatch.rs`: - `textDocument/formatting` - `textDocument/semanticTokens/full` - `textDocument/semanticTokens/range` - Add async handler files for each request under `server/requests/async_handlers/`. - Add `AsyncRequestContext` methods in new per-feature modules: - `async_requests/formatting.rs` - `async_requests/semantic_tokens_full.rs` - `async_requests/semantic_tokens_range.rs` - Extend `AsyncRequestContext` with `workspace_roots` so formatting keeps existing workspace-aware formatter resolution behavior. - Remove the old sync-handler files for formatting and semantic tokens, and drop them from `sync_handlers/mod.rs`. Why: - Formatting shells out to external formatter binaries and can block. - Semantic token computation walks full AST token streams and can be expensive on large files. - Async routing keeps these operations off the main request path. Validation: - `make fmt` - `nix fmt` - `cargo check -p jrsonnet-lsp` - `cargo clippy -p jrsonnet-lsp -p jrsonnet-lsp-check -p jrsonnet-lsp-document -p jrsonnet-lsp-handlers -p jrsonnet-lsp-import -p jrsonnet-lsp-inference -p jrsonnet-lsp-scenario -p jrsonnet-lsp-scope -p jrsonnet-lsp-stdlib -p jrsonnet-lsp-types --all-targets --no-deps -- -D warnings` - `cargo test -p jrsonnet-lsp --test integration_test features::test_semantic_tokens_range_request` - `cargo test -p jrsonnet-lsp --test e2e_scenario_tests fixture_73_tests_scenarios_runner_missing_step_coverage_yaml` - `cargo test -p jrsonnet-lsp --test e2e_scenario_tests` for semantic token stability after incremental edits. --- crates/jrsonnet-lsp/src/server.rs | 1 + .../jrsonnet-lsp/src/server/async_requests.rs | 8 ++- .../src/server/async_requests/formatting.rs | 19 ++++++ .../async_requests/semantic_tokens_full.rs | 18 ++++++ .../async_requests/semantic_tokens_range.rs | 18 ++++++ .../src/server/request_dispatch.rs | 58 ++++++++++--------- .../requests/async_handlers/formatting.rs | 10 ++++ .../src/server/requests/async_handlers/mod.rs | 3 + .../async_handlers/semantic_tokens_full.rs | 10 ++++ .../async_handlers/semantic_tokens_range.rs | 10 ++++ .../requests/sync_handlers/formatting.rs | 17 ------ .../src/server/requests/sync_handlers/mod.rs | 3 - .../sync_handlers/semantic_tokens_full.rs | 16 ----- .../sync_handlers/semantic_tokens_range.rs | 16 ----- 14 files changed, 127 insertions(+), 80 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/formatting.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_full.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_range.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/formatting.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_full.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_range.rs delete mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/formatting.rs delete mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_full.rs delete mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_range.rs diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 762f97c8..619e1d50 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -181,6 +181,7 @@ impl Server { Arc::clone(&self.global_types), Arc::clone(&self.type_cache), Arc::clone(&self.config), + self.workspace_roots.clone(), ) } diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index a622b686..10b619a4 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -1,6 +1,7 @@ mod code_lens; mod commands; mod completion; +mod formatting; mod goto_declaration; mod goto_definition; mod goto_implementation; @@ -11,9 +12,11 @@ mod import_lookup; mod inlay_hints; mod references; mod rename; +mod semantic_tokens_full; +mod semantic_tokens_range; mod workspace_symbol; -use std::sync::Arc; +use std::{path::PathBuf, sync::Arc}; use jrsonnet_lsp_document::{CanonicalPath, Document}; use jrsonnet_lsp_import::ImportGraph; @@ -30,6 +33,7 @@ pub(super) struct AsyncRequestContext { global_types: Arc, type_cache: SharedTypeCache, config: SharedConfig, + workspace_roots: Vec, } impl AsyncRequestContext { @@ -39,6 +43,7 @@ impl AsyncRequestContext { global_types: Arc, type_cache: SharedTypeCache, config: SharedConfig, + workspace_roots: Vec, ) -> Self { Self { documents, @@ -46,6 +51,7 @@ impl AsyncRequestContext { global_types, type_cache, config, + workspace_roots, } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs b/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs new file mode 100644 index 00000000..df0ec001 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs @@ -0,0 +1,19 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{DocumentFormattingParams, TextEdit}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn formatting(&self, params: &DocumentFormattingParams) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let config = self.config.read().formatting.clone(); + let context = + handlers::FormattingContext::for_document(path.as_path(), &self.workspace_roots); + + handlers::format_document_with_config(doc.text(), &config, context) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_full.rs b/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_full.rs new file mode 100644 index 00000000..0ce12d58 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_full.rs @@ -0,0 +1,18 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{SemanticTokensParams, SemanticTokensResult}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn semantic_tokens_full( + &self, + params: &SemanticTokensParams, + ) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + Some(handlers::semantic_tokens(&doc).into()) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_range.rs b/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_range.rs new file mode 100644 index 00000000..cc963ace --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/semantic_tokens_range.rs @@ -0,0 +1,18 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{SemanticTokensRangeParams, SemanticTokensRangeResult}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn semantic_tokens_range( + &self, + params: &SemanticTokensRangeParams, + ) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + Some(handlers::semantic_tokens_range(&doc, params.range).into()) + } +} diff --git a/crates/jrsonnet-lsp/src/server/request_dispatch.rs b/crates/jrsonnet-lsp/src/server/request_dispatch.rs index 2ff6f843..c8fa2b41 100644 --- a/crates/jrsonnet-lsp/src/server/request_dispatch.rs +++ b/crates/jrsonnet-lsp/src/server/request_dispatch.rs @@ -37,15 +37,15 @@ impl Server { | WorkspaceSymbolRequest::METHOD | Rename::METHOD | CodeLensRequest::METHOD + | Formatting::METHOD + | SemanticTokensFullRequest::METHOD + | SemanticTokensRangeRequest::METHOD | ExecuteCommand::METHOD => self.handle_async_request(id, method.as_str(), params), DocumentSymbolRequest::METHOD | DocumentHighlightRequest::METHOD | CodeActionRequest::METHOD | SignatureHelpRequest::METHOD - | Formatting::METHOD | PrepareRenameRequest::METHOD - | SemanticTokensFullRequest::METHOD - | SemanticTokensRangeRequest::METHOD | CodeLensResolve::METHOD => self.handle_sync_request(id, method.as_str(), params), _ => { let request = self.inflight_requests.begin_unknown(id, method.as_str()); @@ -110,10 +110,6 @@ impl Server { requests::sync_handlers::signature_help::handle, ) } - Formatting::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed(request, params, requests::sync_handlers::formatting::handle) - } PrepareRenameRequest::METHOD => { let request = self.inflight_requests.begin::(id); self.handle_sync_typed( @@ -122,26 +118,6 @@ impl Server { requests::sync_handlers::prepare_rename::handle, ) } - SemanticTokensFullRequest::METHOD => { - let request = self - .inflight_requests - .begin::(id); - self.handle_sync_typed( - request, - params, - requests::sync_handlers::semantic_tokens_full::handle, - ) - } - SemanticTokensRangeRequest::METHOD => { - let request = self - .inflight_requests - .begin::(id); - self.handle_sync_typed( - request, - params, - requests::sync_handlers::semantic_tokens_range::handle, - ) - } CodeLensResolve::METHOD => { let request = self.inflight_requests.begin::(id); self.handle_sync_typed( @@ -292,6 +268,34 @@ impl Server { requests::async_handlers::code_lens::handle, ) } + Formatting::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::formatting::handle, + ) + } + SemanticTokensFullRequest::METHOD => { + let request = self + .inflight_requests + .begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::semantic_tokens_full::handle, + ) + } + SemanticTokensRangeRequest::METHOD => { + let request = self + .inflight_requests + .begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::semantic_tokens_range::handle, + ) + } ExecuteCommand::METHOD => { let request = self.inflight_requests.begin::(id); self.handle_async_execute_command(request, params) diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/formatting.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/formatting.rs new file mode 100644 index 00000000..5e3832c8 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/formatting.rs @@ -0,0 +1,10 @@ +use lsp_types::{DocumentFormattingParams, TextEdit}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &DocumentFormattingParams, +) -> Option> { + context.formatting(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs index e80540a6..7b5cd389 100644 --- a/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs @@ -1,6 +1,7 @@ pub(crate) mod code_lens; pub(crate) mod completion; pub(crate) mod execute_command; +pub(crate) mod formatting; pub(crate) mod goto_declaration; pub(crate) mod goto_definition; pub(crate) mod goto_implementation; @@ -9,4 +10,6 @@ pub(crate) mod hover; pub(crate) mod inlay_hints; pub(crate) mod references; pub(crate) mod rename; +pub(crate) mod semantic_tokens_full; +pub(crate) mod semantic_tokens_range; pub(crate) mod workspace_symbol; diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_full.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_full.rs new file mode 100644 index 00000000..5f7608b7 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_full.rs @@ -0,0 +1,10 @@ +use lsp_types::{SemanticTokensParams, SemanticTokensResult}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &SemanticTokensParams, +) -> Option { + context.semantic_tokens_full(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_range.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_range.rs new file mode 100644 index 00000000..800cdd00 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/semantic_tokens_range.rs @@ -0,0 +1,10 @@ +use lsp_types::{SemanticTokensRangeParams, SemanticTokensRangeResult}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &SemanticTokensRangeParams, +) -> Option { + context.semantic_tokens_range(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/formatting.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/formatting.rs deleted file mode 100644 index 0a782bbe..00000000 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/formatting.rs +++ /dev/null @@ -1,17 +0,0 @@ -use jrsonnet_lsp_document::CanonicalPath; -use jrsonnet_lsp_handlers as handlers; -use lsp_types::{DocumentFormattingParams, TextEdit}; - -use crate::server::Server; - -pub(crate) fn handle(server: &Server, params: &DocumentFormattingParams) -> Option> { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = server.documents.get(&path)?; - - let config = server.config.read().formatting.clone(); - let context = - handlers::FormattingContext::for_document(path.as_path(), &server.workspace_roots); - - handlers::format_document_with_config(doc.text(), &config, context) -} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs index 72625162..d9402e79 100644 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs @@ -2,8 +2,5 @@ pub(crate) mod code_action; pub(crate) mod code_lens_resolve; pub(crate) mod document_highlight; pub(crate) mod document_symbol; -pub(crate) mod formatting; pub(crate) mod prepare_rename; -pub(crate) mod semantic_tokens_full; -pub(crate) mod semantic_tokens_range; pub(crate) mod signature_help; diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_full.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_full.rs deleted file mode 100644 index 6eed56ab..00000000 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_full.rs +++ /dev/null @@ -1,16 +0,0 @@ -use jrsonnet_lsp_document::CanonicalPath; -use jrsonnet_lsp_handlers as handlers; -use lsp_types::SemanticTokensParams; - -use crate::server::Server; - -pub(crate) fn handle( - server: &Server, - params: &SemanticTokensParams, -) -> Option { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = server.documents.get(&path)?; - - Some(handlers::semantic_tokens(&doc).into()) -} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_range.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_range.rs deleted file mode 100644 index de8f773c..00000000 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/semantic_tokens_range.rs +++ /dev/null @@ -1,16 +0,0 @@ -use jrsonnet_lsp_document::CanonicalPath; -use jrsonnet_lsp_handlers as handlers; -use lsp_types::SemanticTokensRangeParams; - -use crate::server::Server; - -pub(crate) fn handle( - server: &Server, - params: &SemanticTokensRangeParams, -) -> Option { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = server.documents.get(&path)?; - - Some(handlers::semantic_tokens_range(&doc, params.range).into()) -} From 7d25056dafc5b7b14774f2688aa84ef6f0608de3 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 13:10:50 +0000 Subject: [PATCH 107/210] refactor(lsp): share semantic token enums across types/handlers/scenario Move SemanticTokenTypeName and SemanticTokenModifierName into jrsonnet-lsp-types and re-export them for cross-crate consumers. What changed: - Added shared semantic token enums in crates/jrsonnet-lsp-types/src/semantic_tokens.rs. - Re-exported shared enums from crates/jrsonnet-lsp-types/src/lib.rs. - Updated handlers semantic token modules to consume shared enums from jrsonnet-lsp-types. - Removed now-unused strum/strum_macros dependencies from jrsonnet-lsp-handlers. - Updated jrsonnet-lsp-scenario to depend on jrsonnet-lsp-types (instead of jrsonnet-lsp-handlers) for semantic token name parsing. - Updated docs/lsp/ARCHITECTURE.md request-routing docs to reflect current async/sync handler assignments and per-handler file structure. Validation: - make fmt - cargo check -p jrsonnet-lsp-types -p jrsonnet-lsp-handlers -p jrsonnet-lsp-scenario -p jrsonnet-lsp - cargo test -p jrsonnet-lsp-handlers semantic_tokens:: - cargo test -p jrsonnet-lsp-scenario --- crates/jrsonnet-lsp-handlers/Cargo.toml | 2 - .../src/semantic_tokens/classification.rs | 2 +- .../src/semantic_tokens/encode.rs | 2 +- .../src/semantic_tokens/legend.rs | 67 +------ .../src/semantic_tokens/mod.rs | 6 +- .../src/semantic_tokens/walk.rs | 2 +- crates/jrsonnet-lsp-scenario/Cargo.toml | 2 +- .../src/scenario_script/inputs.rs | 6 +- .../src/scenario_script/parse.rs | 2 +- .../src/semantic_tokens.rs | 2 +- crates/jrsonnet-lsp-types/src/lib.rs | 2 + .../jrsonnet-lsp-types/src/semantic_tokens.rs | 167 ++++++++++++++++++ docs/lsp/ARCHITECTURE.md | 14 +- 13 files changed, 194 insertions(+), 82 deletions(-) create mode 100644 crates/jrsonnet-lsp-types/src/semantic_tokens.rs diff --git a/crates/jrsonnet-lsp-handlers/Cargo.toml b/crates/jrsonnet-lsp-handlers/Cargo.toml index 98059cf4..e9b6f01d 100644 --- a/crates/jrsonnet-lsp-handlers/Cargo.toml +++ b/crates/jrsonnet-lsp-handlers/Cargo.toml @@ -20,8 +20,6 @@ rayon = "1.11.0" serde = { workspace = true, features = ["derive"] } rowan.workspace = true serde_json.workspace = true -strum = "0.26.3" -strum_macros = "0.26.4" tracing = "0.1.44" [lints] diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs index b318ebd4..364661a3 100644 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/classification.rs @@ -4,7 +4,7 @@ use jrsonnet_rowan_parser::{ AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; -use super::legend::{SemanticTokenModifierName, SemanticTokenTypeName}; +use super::{SemanticTokenModifierName, SemanticTokenTypeName}; /// Classify a variable reference to determine its token type. pub(super) fn classify_variable_reference(token: &SyntaxToken) -> SemanticTokenTypeName { diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs index f13dae18..3b83eac4 100644 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/encode.rs @@ -2,7 +2,7 @@ use jrsonnet_lsp_document::{Document, LineIndex}; use jrsonnet_rowan_parser::AstNode; use lsp_types::{Range, SemanticToken, SemanticTokens}; -use super::{legend::SemanticTokenTypeName, walk}; +use super::{walk, SemanticTokenTypeName}; fn to_u32(value: usize) -> u32 { u32::try_from(value).unwrap_or(u32::MAX) diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs index 465233f5..bd33de11 100644 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/legend.rs @@ -1,68 +1,7 @@ use std::fmt::Write as _; +use jrsonnet_lsp_types::{SemanticTokenModifierName, SemanticTokenTypeName}; use lsp_types::{SemanticTokenType, SemanticTokensLegend}; -use strum_macros::{AsRefStr, EnumString, FromRepr}; - -/// Semantic token type with compile-time index. -/// -/// The enum values match the indices in `TOKEN_TYPES`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, FromRepr, AsRefStr)] -#[strum(ascii_case_insensitive, serialize_all = "snake_case")] -#[repr(u32)] -pub enum SemanticTokenTypeName { - Namespace = 0, - Parameter = 7, - Variable = 8, - Property = 9, - Function = 12, - Method = 13, - Keyword = 15, - Comment = 17, - String = 18, - Number = 19, - Operator = 21, -} - -impl SemanticTokenTypeName { - #[must_use] - pub const fn as_index(self) -> u32 { - self as u32 - } -} - -/// Semantic token modifier names with their corresponding LSP bit flags. -#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, FromRepr, AsRefStr)] -#[strum(ascii_case_insensitive, serialize_all = "snake_case")] -#[repr(u32)] -pub enum SemanticTokenModifierName { - Declaration = 0, - Definition = 1, - Readonly = 2, - Static = 3, - Deprecated = 4, - Abstract = 5, - Async = 6, - Modification = 7, - Documentation = 8, - #[strum( - serialize = "default_library", - serialize = "default-library", - serialize = "defaultlibrary" - )] - DefaultLibrary = 9, -} - -impl SemanticTokenModifierName { - #[must_use] - pub const fn as_index(self) -> u32 { - self as u32 - } - - #[must_use] - pub const fn as_bitset(self) -> u32 { - 1 << self.as_index() - } -} /// Semantic token types we support. /// @@ -117,14 +56,14 @@ pub fn semantic_token_reference_markdown() -> String { markdown.push_str("Token types (`index`: `lsp_name`, usage):\n"); for (index, token_type) in TOKEN_TYPES.iter().enumerate() { let used = SemanticTokenTypeName::from_repr(index as u32); - let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_ref().to_owned()); + let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_str().to_owned()); let _ = writeln!(markdown, "- `{index}`: `{}` ({usage})", token_type.as_str()); } markdown.push('\n'); markdown.push_str("Token modifiers (`bit`: `lsp_name`, usage):\n"); for (index, modifier) in TOKEN_MODIFIERS.iter().enumerate() { let used = SemanticTokenModifierName::from_repr(index as u32); - let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_ref().to_owned()); + let usage = used.map_or_else(|| "unused".to_owned(), |token| token.as_str().to_owned()); let _ = writeln!( markdown, "- `1 << {index}`: `{}` ({usage})", diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs index d0ab748d..f8ea8e0f 100644 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/mod.rs @@ -8,7 +8,5 @@ mod legend; mod walk; pub use encode::{semantic_tokens, semantic_tokens_range}; -pub use legend::{ - legend, semantic_token_reference_markdown, SemanticTokenModifierName, SemanticTokenTypeName, - TOKEN_MODIFIERS, TOKEN_TYPES, -}; +pub use jrsonnet_lsp_types::{SemanticTokenModifierName, SemanticTokenTypeName}; +pub use legend::{legend, semantic_token_reference_markdown, TOKEN_MODIFIERS, TOKEN_TYPES}; diff --git a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs index db1a8f6c..d769dd7c 100644 --- a/crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs +++ b/crates/jrsonnet-lsp-handlers/src/semantic_tokens/walk.rs @@ -4,7 +4,7 @@ use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; use super::{ classification::{classify_definition_site, classify_variable_reference, is_stdlib_access}, encode::SemanticTokenBuilder, - legend::{SemanticTokenModifierName, SemanticTokenTypeName}, + SemanticTokenModifierName, SemanticTokenTypeName, }; pub(super) fn visit_token(builder: &mut SemanticTokenBuilder<'_>, token: &SyntaxToken) { diff --git a/crates/jrsonnet-lsp-scenario/Cargo.toml b/crates/jrsonnet-lsp-scenario/Cargo.toml index fe5a326d..3cade743 100644 --- a/crates/jrsonnet-lsp-scenario/Cargo.toml +++ b/crates/jrsonnet-lsp-scenario/Cargo.toml @@ -8,7 +8,7 @@ version.workspace = true [dependencies] crossbeam-channel = "0.5" -jrsonnet-lsp-handlers = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-handlers" } +jrsonnet-lsp-types = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-types" } lsp-server.workspace = true lsp-types.workspace = true serde = { workspace = true, features = ["derive"] } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs index c81d1dae..08d975a2 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs @@ -11,7 +11,7 @@ use std::{ str::FromStr, }; -use jrsonnet_lsp_handlers::{SemanticTokenModifierName, SemanticTokenTypeName}; +use jrsonnet_lsp_types::{SemanticTokenModifierName, SemanticTokenTypeName}; use lsp_types::{ CodeAction, CodeActionKind, CodeActionOrCommand, CodeLens, CompletionResponse, Diagnostic, DiagnosticSeverity, DocumentSymbolResponse, GotoDefinitionResponse, Hover, InlayHint, Location, @@ -799,7 +799,7 @@ impl SemanticTokenTypeInput { context: &str, location: &str, ) -> Result { - SemanticTokenTypeName::from_str(&self.0).map_err(|_| { + SemanticTokenTypeName::from_str(&self.0).map_err(|()| { input_err!( "{context}: unknown semantic token type '{}' at {location}", self.0 @@ -818,7 +818,7 @@ impl SemanticTokenModifierInput { context: &str, location: &str, ) -> Result { - SemanticTokenModifierName::from_str(&self.0).map_err(|_| { + SemanticTokenModifierName::from_str(&self.0).map_err(|()| { input_err!( "{context}: unknown semantic token modifier '{}' at {location}", self.0 diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs index fc00ced1..d49f7593 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/parse.rs @@ -53,7 +53,7 @@ mod tests { use std::path::PathBuf; use assert_matches::assert_matches; - use jrsonnet_lsp_handlers::SemanticTokenTypeName; + use jrsonnet_lsp_types::SemanticTokenTypeName; use super::{parse_scenario_yaml, ParseScenarioError}; use crate::{ diff --git a/crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs b/crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs index 6efe9172..249b9591 100644 --- a/crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs +++ b/crates/jrsonnet-lsp-scenario/src/semantic_tokens.rs @@ -1,4 +1,4 @@ -use jrsonnet_lsp_handlers::{SemanticTokenModifierName, SemanticTokenTypeName}; +use jrsonnet_lsp_types::{SemanticTokenModifierName, SemanticTokenTypeName}; use lsp_types::{SemanticToken, SemanticTokens}; #[derive(Debug, Clone, Copy, PartialEq, Eq)] diff --git a/crates/jrsonnet-lsp-types/src/lib.rs b/crates/jrsonnet-lsp-types/src/lib.rs index 9cc50ef5..09a45ec8 100644 --- a/crates/jrsonnet-lsp-types/src/lib.rs +++ b/crates/jrsonnet-lsp-types/src/lib.rs @@ -23,6 +23,7 @@ pub mod global_store; pub mod local_store; pub mod mut_store; mod operations; +pub mod semantic_tokens; pub mod store; pub mod subst; pub mod unification; @@ -32,6 +33,7 @@ pub use global_store::GlobalTyStore; pub use local_store::LocalTyStore; pub use mut_store::MutStore; pub use operations::*; +pub use semantic_tokens::{SemanticTokenModifierName, SemanticTokenTypeName}; pub use store::{ reset_store, with_store, FieldDefInterned, FieldVis, FunctionData, GlobalTy, NotGlobalTy, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, TyData, TyStore, diff --git a/crates/jrsonnet-lsp-types/src/semantic_tokens.rs b/crates/jrsonnet-lsp-types/src/semantic_tokens.rs new file mode 100644 index 00000000..b0e47c68 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/semantic_tokens.rs @@ -0,0 +1,167 @@ +//! Shared semantic token type/modifier names used by the LSP stack. + +use std::str::FromStr; + +/// Semantic token type with compile-time index. +/// +/// The enum values match indices in the semantic token legend type list. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(u32)] +pub enum SemanticTokenTypeName { + Namespace = 0, + Parameter = 7, + Variable = 8, + Property = 9, + Function = 12, + Method = 13, + Keyword = 15, + Comment = 17, + String = 18, + Number = 19, + Operator = 21, +} + +impl SemanticTokenTypeName { + #[must_use] + pub const fn as_index(self) -> u32 { + self as u32 + } + + #[must_use] + pub const fn from_repr(value: u32) -> Option { + match value { + 0 => Some(Self::Namespace), + 7 => Some(Self::Parameter), + 8 => Some(Self::Variable), + 9 => Some(Self::Property), + 12 => Some(Self::Function), + 13 => Some(Self::Method), + 15 => Some(Self::Keyword), + 17 => Some(Self::Comment), + 18 => Some(Self::String), + 19 => Some(Self::Number), + 21 => Some(Self::Operator), + _ => None, + } + } + + #[must_use] + pub const fn as_str(self) -> &'static str { + match self { + Self::Namespace => "namespace", + Self::Parameter => "parameter", + Self::Variable => "variable", + Self::Property => "property", + Self::Function => "function", + Self::Method => "method", + Self::Keyword => "keyword", + Self::Comment => "comment", + Self::String => "string", + Self::Number => "number", + Self::Operator => "operator", + } + } +} + +impl FromStr for SemanticTokenTypeName { + type Err = (); + + fn from_str(value: &str) -> Result { + match value.to_ascii_lowercase().as_str() { + "namespace" => Ok(Self::Namespace), + "parameter" => Ok(Self::Parameter), + "variable" => Ok(Self::Variable), + "property" => Ok(Self::Property), + "function" => Ok(Self::Function), + "method" => Ok(Self::Method), + "keyword" => Ok(Self::Keyword), + "comment" => Ok(Self::Comment), + "string" => Ok(Self::String), + "number" => Ok(Self::Number), + "operator" => Ok(Self::Operator), + _ => Err(()), + } + } +} + +/// Semantic token modifier names with their corresponding LSP bit flags. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(u32)] +pub enum SemanticTokenModifierName { + Declaration = 0, + Definition = 1, + Readonly = 2, + Static = 3, + Deprecated = 4, + Abstract = 5, + Async = 6, + Modification = 7, + Documentation = 8, + DefaultLibrary = 9, +} + +impl SemanticTokenModifierName { + #[must_use] + pub const fn as_index(self) -> u32 { + self as u32 + } + + #[must_use] + pub const fn as_bitset(self) -> u32 { + 1 << self.as_index() + } + + #[must_use] + pub const fn from_repr(value: u32) -> Option { + match value { + 0 => Some(Self::Declaration), + 1 => Some(Self::Definition), + 2 => Some(Self::Readonly), + 3 => Some(Self::Static), + 4 => Some(Self::Deprecated), + 5 => Some(Self::Abstract), + 6 => Some(Self::Async), + 7 => Some(Self::Modification), + 8 => Some(Self::Documentation), + 9 => Some(Self::DefaultLibrary), + _ => None, + } + } + + #[must_use] + pub const fn as_str(self) -> &'static str { + match self { + Self::Declaration => "declaration", + Self::Definition => "definition", + Self::Readonly => "readonly", + Self::Static => "static", + Self::Deprecated => "deprecated", + Self::Abstract => "abstract", + Self::Async => "async", + Self::Modification => "modification", + Self::Documentation => "documentation", + Self::DefaultLibrary => "default-library", + } + } +} + +impl FromStr for SemanticTokenModifierName { + type Err = (); + + fn from_str(value: &str) -> Result { + let normalized = value.to_ascii_lowercase().replace('-', "_"); + match normalized.as_str() { + "declaration" => Ok(Self::Declaration), + "definition" => Ok(Self::Definition), + "readonly" => Ok(Self::Readonly), + "static" => Ok(Self::Static), + "deprecated" => Ok(Self::Deprecated), + "abstract" => Ok(Self::Abstract), + "async" => Ok(Self::Async), + "modification" => Ok(Self::Modification), + "documentation" => Ok(Self::Documentation), + "default_library" | "defaultlibrary" => Ok(Self::DefaultLibrary), + _ => Err(()), + } + } +} diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 197fc8d4..c1a0f11f 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -101,10 +101,7 @@ Handled directly on the server thread: - `textDocument/documentHighlight` - `textDocument/codeAction` - `textDocument/signatureHelp` -- `textDocument/formatting` - `textDocument/prepareRename` -- `textDocument/semanticTokens/full` -- `textDocument/semanticTokens/range` - `codeLens/resolve` - `shutdown` @@ -122,6 +119,9 @@ Dispatched via `spawn_async_response` (Rayon): - `workspace/symbol` - `textDocument/rename` - `textDocument/codeLens` +- `textDocument/formatting` +- `textDocument/semanticTokens/full` +- `textDocument/semanticTokens/range` - `workspace/executeCommand` Async handlers run through `AsyncRequestContext` @@ -131,6 +131,14 @@ to documents, import graph, type cache, config, and dependency-aware analysis. import graph, not just currently open buffers. Results are ranked by match quality (exact, then prefix, then substring) and capped to 128 entries. +Both sync and async request routing are organized one file per handler under: + +- `crates/jrsonnet-lsp/src/server/requests/sync_handlers/` +- `crates/jrsonnet-lsp/src/server/requests/async_handlers/` + +`AsyncRequestContext` implementation details are split one file per feature +under `crates/jrsonnet-lsp/src/server/async_requests/`. + ### In-Flight Request Boundary `Server` routes request lifecycle through From 30b2f7deeb0eae19a606d8862604c3813c5c61aa Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:00:53 +0000 Subject: [PATCH 108/210] chore(deps): regenerate Cargo.lock Regenerate Cargo.lock with cargo generate-lockfile to ensure the lock state is up to date and reproducible after recent dependency changes. Verification: - Ran cargo generate-lockfile twice and confirmed no changes on the second run. - Ran make fmt after lockfile refresh. --- Cargo.lock | 23 +---------------------- 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8880392c..0c3a9ce5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1894,8 +1894,6 @@ dependencies = [ "rstest 0.23.0", "serde", "serde_json", - "strum", - "strum_macros", "tempfile", "tracing", ] @@ -1942,7 +1940,7 @@ version = "0.5.0-pre97" dependencies = [ "assert_matches", "crossbeam-channel", - "jrsonnet-lsp-handlers", + "jrsonnet-lsp-types", "lsp-server", "lsp-types", "rowan", @@ -3906,25 +3904,6 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" -[[package]] -name = "strum" -version = "0.26.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" - -[[package]] -name = "strum_macros" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "rustversion", - "syn", -] - [[package]] name = "subtle" version = "2.6.1" From e3903628ab4a24308cf5fa3623b460f36e3dba6b Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:02:18 +0000 Subject: [PATCH 109/210] chore(fmt): apply formatting cleanups Apply formatting-only and import-order cleanups across rtk command modules, evaluator, and benchmark scripts. No behavior changes intended; this is a mechanical tidy-up commit to keep follow-up diffs focused on semantic changes. Validation: - make fmt --- cmds/rtk/src/commands/apply.rs | 15 ++++++++------- cmds/rtk/src/commands/prune.rs | 15 ++++++++------- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/cmds/rtk/src/commands/apply.rs b/cmds/rtk/src/commands/apply.rs index 4e85c2b0..4abf7cb2 100644 --- a/cmds/rtk/src/commands/apply.rs +++ b/cmds/rtk/src/commands/apply.rs @@ -10,15 +10,16 @@ use clap::{Args, ValueEnum}; use serde::{Deserialize, Serialize}; use tracing::instrument; -use super::diff::ColorMode; -use super::util::{ - build_eval_opts, create_tokio_runtime, extract_manifests, get_or_create_connection, - process_manifests, prompt_confirmation, setup_diff_engine, validate_dry_run, DiffEngineConfig, - UnimplementedArgs, -}; - // Re-export AutoApprove for backwards compatibility pub use super::util::AutoApprove; +use super::{ + diff::ColorMode, + util::{ + build_eval_opts, create_tokio_runtime, extract_manifests, get_or_create_connection, + process_manifests, prompt_confirmation, setup_diff_engine, validate_dry_run, + DiffEngineConfig, UnimplementedArgs, + }, +}; use crate::{ eval::EvalOpts, k8s::{ diff --git a/cmds/rtk/src/commands/prune.rs b/cmds/rtk/src/commands/prune.rs index f1b1a33d..d00e4581 100644 --- a/cmds/rtk/src/commands/prune.rs +++ b/cmds/rtk/src/commands/prune.rs @@ -9,15 +9,16 @@ use anyhow::{Context, Result}; use clap::Args; use tracing::instrument; -use super::diff::ColorMode; -use super::util::{ - build_eval_opts, create_tokio_runtime, extract_manifests, get_or_create_connection, - process_manifests, prompt_confirmation, setup_diff_engine, validate_dry_run, DiffEngineConfig, - UnimplementedArgs, -}; - // Re-export AutoApprove for backwards compatibility pub use super::util::AutoApprove; +use super::{ + diff::ColorMode, + util::{ + build_eval_opts, create_tokio_runtime, extract_manifests, get_or_create_connection, + process_manifests, prompt_confirmation, setup_diff_engine, validate_dry_run, + DiffEngineConfig, UnimplementedArgs, + }, +}; use crate::{ eval::EvalOpts, k8s::{ From a23cdc516bdac58d592865f6057193f5e350e660 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:09:51 +0000 Subject: [PATCH 110/210] refactor(lsp-types): split operations into submodules Split the large operations module into focused submodules. Move operator type-checking/result logic and its inline tests into operations/operators.rs. Move type narrowing logic (ty_and/ty_minus/length/field) and its inline tests into operations/logic.rs. Keep operations.rs as a thin entrypoint that re-exports public APIs. No behavior changes intended; verified with: cargo test -p jrsonnet-lsp-types --lib operations --- crates/jrsonnet-lsp-types/src/operations.rs | 1567 +---------------- .../src/operations/logic.rs | 1075 +++++++++++ .../src/operations/operators.rs | 490 ++++++ 3 files changed, 1569 insertions(+), 1563 deletions(-) create mode 100644 crates/jrsonnet-lsp-types/src/operations/logic.rs create mode 100644 crates/jrsonnet-lsp-types/src/operations/operators.rs diff --git a/crates/jrsonnet-lsp-types/src/operations.rs b/crates/jrsonnet-lsp-types/src/operations.rs index 2ebc09c5..5ef5a2c3 100644 --- a/crates/jrsonnet-lsp-types/src/operations.rs +++ b/crates/jrsonnet-lsp-types/src/operations.rs @@ -16,1567 +16,8 @@ //! These operations distribute over unions, following the rule: //! `(A | B) & C = (A & C) | (B & C)` -use jrsonnet_rowan_parser::nodes::{BinaryOperatorKind, UnaryOperatorKind}; +mod logic; +mod operators; -use crate::store::{FieldDefInterned, FieldVis, ObjectData, Ty, TyData, TypeStoreOps}; - -/// Check if a binary operation is valid and return the result type. -/// -/// Returns `Ok(result_ty)` if the operation is valid for the given operand types, -/// or `Err(error_message)` if the operation is invalid. -/// -/// # Errors -/// Returns `Err` when the operand types do not support the requested operator. -pub fn binary_op_result_ty( - op: BinaryOperatorKind, - lhs: Ty, - rhs: Ty, - store: &mut S, -) -> Result { - // Any, Never short-circuit - if lhs.is_any() || rhs.is_any() { - return Ok(Ty::ANY); - } - if lhs.is_never() { - return Ok(Ty::NEVER); - } - if rhs.is_never() { - return Ok(Ty::NEVER); - } - - // Check for TypeVar - if let TyData::TypeVar { .. } = store.get_data(lhs) { - return Ok(Ty::ANY); - } - if let TyData::TypeVar { .. } = store.get_data(rhs) { - return Ok(Ty::ANY); - } - - // Handle union types by checking all combinations - if let TyData::Union(lhs_types) = store.get_data(lhs) { - let mut results = Vec::new(); - for lt in lhs_types { - match binary_op_result_ty(op, lt, rhs, store) { - Ok(t) => results.push(t), - Err(e) => return Err(e), - } - } - return Ok(store.union(results)); - } - if let TyData::Union(rhs_types) = store.get_data(rhs) { - let mut results = Vec::new(); - for rt in rhs_types { - match binary_op_result_ty(op, lhs, rt, store) { - Ok(t) => results.push(t), - Err(e) => return Err(e), - } - } - return Ok(store.union(results)); - } - - // Handle Sum (intersection) types - all variants must support the operation - if let TyData::Sum(lhs_types) = store.get_data(lhs) { - let mut results = Vec::new(); - for lt in lhs_types { - match binary_op_result_ty(op, lt, rhs, store) { - Ok(t) => results.push(t), - Err(e) => return Err(e), - } - } - return Ok(store.union(results)); - } - if let TyData::Sum(rhs_types) = store.get_data(rhs) { - let mut results = Vec::new(); - for rt in rhs_types { - match binary_op_result_ty(op, lhs, rt, store) { - Ok(t) => results.push(t), - Err(e) => return Err(e), - } - } - return Ok(store.union(results)); - } - - // Get type data for matching - let lhs_data = store.get_data(lhs); - let rhs_data = store.get_data(rhs); - - match op { - // Arithmetic: (Number, Number) -> Number - BinaryOperatorKind::Minus - | BinaryOperatorKind::Mul - | BinaryOperatorKind::Div - | BinaryOperatorKind::Modulo => { - if is_number_ty(&lhs_data) && is_number_ty(&rhs_data) { - Ok(Ty::NUMBER) - } else { - Err(format!( - "operator requires (number, number), got ({}, {})", - store.display(lhs), - store.display(rhs) - )) - } - } - - // Plus: overloaded for number, string, char, array, tuple, object - BinaryOperatorKind::Plus => match (&lhs_data, &rhs_data) { - (d1, d2) if is_number_ty(d1) && is_number_ty(d2) => Ok(Ty::NUMBER), - (TyData::String | TyData::Char | TyData::LiteralString(_), -TyData::String | TyData::Char | TyData::LiteralString(_)) => Ok(Ty::STRING), - (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { - let elem = store.union(vec![*l, *r]); - Ok(store.array(elem)) - } - (TyData::Tuple { elems: l }, TyData::Tuple { elems: r }) => { - // Concatenate tuple element types - let mut elements = l.clone(); - elements.extend(r.iter().copied()); - Ok(store.tuple(elements)) - } - (TyData::Array { elem: a, .. }, TyData::Tuple { elems: t }) - | (TyData::Tuple { elems: t }, TyData::Array { elem: a, .. }) => { - // Mixed array/tuple concatenation - result is array - let mut types = t.clone(); - types.push(*a); - let elem = store.union(types); - Ok(store.array(elem)) - } - (TyData::Object(l), TyData::Object(r)) => Ok(store.object(ObjectData::merge(l, r))), - (TyData::AttrsOf { value: l }, TyData::AttrsOf { value: r }) => { - let value = store.union(vec![*l, *r]); - Ok(store.attrs_of(value)) - } - (TyData::Object(obj), TyData::AttrsOf { value }) - | (TyData::AttrsOf { value }, TyData::Object(obj)) => { - // Merge object with attrs-of: result is object with fields widened - let fields: Vec<_> = obj - .fields - .iter() - .map(|(name, field)| { - let widened_ty = store.union(vec![field.ty, *value]); - ( - name.clone(), - FieldDefInterned { - ty: widened_ty, - required: field.required, - visibility: field.visibility, - }, - ) - }) - .collect(); - Ok(store.object(ObjectData { - fields, - has_unknown: true, // AttrsOf adds unknown fields - })) - } - _ => Err(format!( - "operator `+` requires matching types (number+number, string+string, array+array, or object+object), got ({}, {})", - store.display(lhs), store.display(rhs) - )), - }, - - // Logical: (Bool, Bool) -> Bool (though Jsonnet actually allows any types) - BinaryOperatorKind::And | BinaryOperatorKind::Or => { - // In Jsonnet, && and || work on any types (short-circuit) - // But we can warn if operands aren't boolean - if is_bool_ty(&lhs_data) && is_bool_ty(&rhs_data) { - Ok(Ty::BOOL) - } else { - // Jsonnet allows this but returns one of the operands - Ok(store.union(vec![lhs, rhs])) - } - } - - // Bitwise: (Number, Number) -> Number - BinaryOperatorKind::BitAnd - | BinaryOperatorKind::BitOr - | BinaryOperatorKind::BitXor - | BinaryOperatorKind::Lhs - | BinaryOperatorKind::Rhs => { - if is_number_ty(&lhs_data) && is_number_ty(&rhs_data) { - Ok(Ty::NUMBER) - } else { - Err(format!( - "bitwise operator requires (number, number), got ({}, {})", - store.display(lhs), - store.display(rhs) - )) - } - } - - // Comparison: any types are valid, returns Bool - BinaryOperatorKind::Eq - | BinaryOperatorKind::Ne - | BinaryOperatorKind::Lt - | BinaryOperatorKind::Gt - | BinaryOperatorKind::Le - | BinaryOperatorKind::Ge => Ok(Ty::BOOL), - - // In: (String, Object) -> Bool - BinaryOperatorKind::InKw => { - let lhs_is_string = - matches!(lhs_data, TyData::String | TyData::Char | TyData::LiteralString(_)); - let rhs_is_object = matches!(rhs_data, TyData::Object(_) | TyData::AttrsOf { .. }); - if lhs_is_string && rhs_is_object { - Ok(Ty::BOOL) - } else { - Err(format!( - "operator `in` requires (string, object), got ({}, {})", - store.display(lhs), - store.display(rhs) - )) - } - } - - // Null coalesce: any types, returns union - BinaryOperatorKind::NullCoaelse => Ok(store.union(vec![lhs, rhs])), - - // Internal/error operators - treat as Any - BinaryOperatorKind::MetaObjectApply | BinaryOperatorKind::ErrorNoOperator => Ok(Ty::ANY), - } -} - -/// Check if a unary operation is valid and return the result type. -/// -/// Returns `Ok(result_ty)` if the operation is valid for the given operand type, -/// or `Err(error_message)` if the operation is invalid. -/// -/// # Errors -/// Returns `Err` when the operand type does not support the requested operator. -pub fn unary_op_result_ty( - op: UnaryOperatorKind, - operand: Ty, - store: &mut S, -) -> Result { - // Any, Never short-circuit - if operand.is_any() { - return Ok(Ty::ANY); - } - if operand.is_never() { - return Ok(Ty::NEVER); - } - - // Check for TypeVar - if let TyData::TypeVar { .. } = store.get_data(operand) { - return Ok(Ty::ANY); - } - - // Handle union types - if let TyData::Union(types) = store.get_data(operand) { - let mut results = Vec::new(); - for t in types { - match unary_op_result_ty(op, t, store) { - Ok(r) => results.push(r), - Err(e) => return Err(e), - } - } - return Ok(store.union(results)); - } - - let operand_data = store.get_data(operand); - - match op { - UnaryOperatorKind::Not => { - if is_bool_ty(&operand_data) { - Ok(Ty::BOOL) - } else { - Err(format!( - "operator `!` requires boolean, got {}", - store.display(operand) - )) - } - } - UnaryOperatorKind::Minus => { - if is_number_ty(&operand_data) { - Ok(Ty::NUMBER) - } else { - Err(format!( - "operator `-` requires number, got {}", - store.display(operand) - )) - } - } - UnaryOperatorKind::BitNot => { - if is_number_ty(&operand_data) { - Ok(Ty::NUMBER) - } else { - Err(format!( - "operator `~` requires number, got {}", - store.display(operand) - )) - } - } - } -} - -/// Helper to check if `TyData` represents a number type. -fn is_number_ty(data: &TyData) -> bool { - matches!(data, TyData::Number | TyData::BoundedNumber(_)) -} - -/// Helper to check if `TyData` represents a boolean type. -fn is_bool_ty(data: &TyData) -> bool { - matches!(data, TyData::Bool | TyData::True | TyData::False) -} - -/// Concatenate two arrays or tuples. -/// -/// Returns the type of the concatenated result. -pub fn array_concat_ty(left: Ty, right: Ty, store: &mut S) -> Ty { - let left_data = store.get_data(left); - let right_data = store.get_data(right); - - match (left_data, right_data) { - (TyData::Tuple { elems: a }, TyData::Tuple { elems: b }) => { - let mut elements = a; - elements.extend(b); - store.tuple(elements) - } - (TyData::Array { elem: a, .. }, TyData::Array { elem: b, .. }) => { - let elem = store.union(vec![a, b]); - store.array(elem) - } - (TyData::Tuple { elems: t }, TyData::Array { elem: a, .. }) - | (TyData::Array { elem: a, .. }, TyData::Tuple { elems: t }) => { - let mut types = t; - types.push(a); - let elem = store.union(types); - store.array(elem) - } - _ => store.array(Ty::ANY), - } -} - -// ============================================================================= -// Type Logic Operations -// ============================================================================= - -/// Compute the intersection of two types (type narrowing). -/// -/// Returns the most specific type that satisfies both constraints. -/// This is the logical AND of types - values must satisfy both. -/// -/// # Examples -/// -/// - `ty_and(Any, Number)` → `Number` -/// - `ty_and(Number, String)` → `Never` (no value is both) -/// - `ty_and(Bool, True)` → `True` -/// - `ty_and(Number | String, Number)` → `Number` -/// -/// # Distribution over Unions -/// -/// This operation distributes over unions: -/// `(A | B) & C = (A & C) | (B & C)` -pub fn ty_and(lhs: Ty, rhs: Ty, store: &mut S) -> Ty { - // Fast paths for special types - if lhs == Ty::NEVER || rhs == Ty::NEVER { - return Ty::NEVER; - } - if lhs == Ty::ANY { - return rhs; - } - if rhs == Ty::ANY { - return lhs; - } - if lhs == rhs { - return lhs; - } - - let lhs_data = store.get_data(lhs); - let rhs_data = store.get_data(rhs); - - // Handle unions: distribute (A | B) & C = (A & C) | (B & C) - if let TyData::Union(types) = lhs_data { - let narrowed: Vec = types - .iter() - .map(|&t| ty_and(t, rhs, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return store.union(narrowed); - } - if let TyData::Union(types) = rhs_data { - let narrowed: Vec = types - .iter() - .map(|&t| ty_and(lhs, t, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return store.union(narrowed); - } - - // Refresh data after potential recursion - let lhs_data = store.get_data(lhs); - let rhs_data = store.get_data(rhs); - - // Handle literal/subtype relationships - match (&lhs_data, &rhs_data) { - // Bool and its literals - (TyData::Bool, TyData::True) | (TyData::True, TyData::Bool) => return Ty::TRUE, - (TyData::Bool, TyData::False) | (TyData::False, TyData::Bool) => return Ty::FALSE, - - // String and Char - (TyData::String, TyData::Char) | (TyData::Char, TyData::String) => return Ty::CHAR, - - // String and LiteralString - (TyData::String, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::String) => { - return store.literal_string(s.clone()); - } - - // LiteralString with same value - (TyData::LiteralString(s1), TyData::LiteralString(s2)) => { - if s1 == s2 { - return store.literal_string(s1.clone()); - } - return Ty::NEVER; - } - - // Char and LiteralString of length 1 - (TyData::Char, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::Char) => { - if s.chars().count() == 1 { - return store.literal_string(s.clone()); - } - return Ty::NEVER; - } - - _ => {} - } - - // Handle arrays (preserve is_set if both are sets) - if let ( - TyData::Array { - elem: e1, - is_set: s1, - }, - TyData::Array { - elem: e2, - is_set: s2, - }, - ) = (&lhs_data, &rhs_data) - { - let elem = ty_and(*e1, *e2, store); - if elem == Ty::NEVER { - return Ty::NEVER; - } - // Result is a set only if both inputs are sets - if *s1 && *s2 { - return store.array_set(elem); - } - return store.array(elem); - } - - // Handle tuples with arrays - if let (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) = (&lhs_data, &rhs_data) - { - let narrowed: Vec = elems.iter().map(|&e| ty_and(e, *arr_elem, store)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return store.tuple(narrowed); - } - if let (TyData::Array { elem: arr_elem, .. }, TyData::Tuple { elems }) = (&lhs_data, &rhs_data) - { - let narrowed: Vec = elems.iter().map(|&e| ty_and(*arr_elem, e, store)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return store.tuple(narrowed); - } - - // Handle tuples with tuples - if let (TyData::Tuple { elems: e1 }, TyData::Tuple { elems: e2 }) = (&lhs_data, &rhs_data) { - if e1.len() != e2.len() { - return Ty::NEVER; - } - let narrowed: Vec = e1 - .iter() - .zip(e2.iter()) - .map(|(&a, &b)| ty_and(a, b, store)) - .collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return store.tuple(narrowed); - } - - // Handle objects - if let (TyData::Object(obj1), TyData::Object(obj2)) = (&lhs_data, &rhs_data) { - let mut fields = obj1.fields.clone(); - for (name, def2) in &obj2.fields { - if let Some((_, def1)) = fields.iter_mut().find(|(n, _)| n == name) { - let narrowed_ty = ty_and(def1.ty, def2.ty, store); - *def1 = FieldDefInterned { - ty: narrowed_ty, - required: def1.required || def2.required, - visibility: def1.visibility, - }; - } else { - fields.push((name.clone(), def2.clone())); - } - } - let has_unknown = obj1.has_unknown && obj2.has_unknown; - return store.object(ObjectData { - fields, - has_unknown, - }); - } - - // Handle AttrsOf - if let (TyData::AttrsOf { value: v1 }, TyData::AttrsOf { value: v2 }) = (&lhs_data, &rhs_data) { - let elem = ty_and(*v1, *v2, store); - if elem == Ty::NEVER { - return Ty::NEVER; - } - return store.attrs_of(elem); - } - - // Handle object + AttrsOf - if let (TyData::Object(obj), TyData::AttrsOf { value }) = (&lhs_data, &rhs_data) { - let fields: Vec<_> = obj - .fields - .iter() - .map(|(name, field)| { - let narrowed_ty = ty_and(field.ty, *value, store); - ( - name.clone(), - FieldDefInterned { - ty: narrowed_ty, - required: field.required, - visibility: field.visibility, - }, - ) - }) - .collect(); - return store.object(ObjectData { - fields, - has_unknown: obj.has_unknown, - }); - } - if let (TyData::AttrsOf { value }, TyData::Object(obj)) = (&lhs_data, &rhs_data) { - let fields: Vec<_> = obj - .fields - .iter() - .map(|(name, field)| { - let narrowed_ty = ty_and(*value, field.ty, store); - ( - name.clone(), - FieldDefInterned { - ty: narrowed_ty, - required: field.required, - visibility: field.visibility, - }, - ) - }) - .collect(); - return store.object(ObjectData { - fields, - has_unknown: obj.has_unknown, - }); - } - - // Handle BoundedNumber - if let (TyData::Number, TyData::BoundedNumber(bounds)) - | (TyData::BoundedNumber(bounds), TyData::Number) = (&lhs_data, &rhs_data) - { - return store.bounded_number(*bounds); - } - if let (TyData::BoundedNumber(b1), TyData::BoundedNumber(b2)) = (&lhs_data, &rhs_data) { - // Intersection of bounds: take stricter bounds - let min = match (b1.min_f64(), b2.min_f64()) { - (Some(a), Some(b)) => Some(a.max(b)), - (Some(a), None) => Some(a), - (None, Some(b)) => Some(b), - (None, None) => None, - }; - let max = match (b1.max_f64(), b2.max_f64()) { - (Some(a), Some(b)) => Some(a.min(b)), - (Some(a), None) => Some(a), - (None, Some(b)) => Some(b), - (None, None) => None, - }; - // Check for empty range - if let (Some(lo), Some(hi)) = (min, max) { - if lo > hi { - return Ty::NEVER; - } - } - return store.bounded_number(crate::store::NumBounds { - min: min.map(f64::to_bits), - max: max.map(f64::to_bits), - }); - } - - // Handle Sum (intersection) types - if let TyData::Sum(types) = lhs_data { - let narrowed: Vec = types.iter().map(|&t| ty_and(t, rhs, store)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return store.sum(narrowed); - } - if let TyData::Sum(types) = rhs_data { - let narrowed: Vec = types.iter().map(|&t| ty_and(lhs, t, store)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return store.sum(narrowed); - } - - // Handle TypeVar - keep it, may be resolved later - if matches!(lhs_data, TyData::TypeVar { .. }) || matches!(rhs_data, TyData::TypeVar { .. }) { - return store.sum(vec![lhs, rhs]); - } - - // Different incompatible concrete types have no intersection - Ty::NEVER -} - -/// Compute the exclusion of one type from another. -/// -/// Returns the type with the constraint removed (difference/minus). -/// This removes values that match `remove` from `base`. -/// -/// # Examples -/// -/// - `ty_minus(Number | String, Number)` → `String` -/// - `ty_minus(Bool, True)` → `False` -/// - `ty_minus(Any, Number)` → `Any` (Any is too general) -/// - `ty_minus(Number, Number)` → `Never` -/// -/// # Distribution over Unions -/// -/// This operation distributes over unions: -/// `(A | B) - C = (A - C) | (B - C)` -pub fn ty_minus(base: Ty, remove: Ty, store: &mut S) -> Ty { - // Fast paths - if base == Ty::NEVER { - return Ty::NEVER; - } - if remove == Ty::NEVER { - return base; - } - if remove == Ty::ANY { - return Ty::NEVER; - } - if base == Ty::ANY { - // Can't remove anything meaningful from Any - return Ty::ANY; - } - if base == remove { - return Ty::NEVER; - } - - let base_data = store.get_data(base); - - // Handle unions: distribute (A | B) - C = (A - C) | (B - C) - if let TyData::Union(types) = base_data { - let remaining: Vec = types - .iter() - .map(|&t| ty_minus(t, remove, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return store.union(remaining); - } - - // Special case: Bool minus True/False - let base_data = store.get_data(base); - let remove_data = store.get_data(remove); - match (&base_data, &remove_data) { - (TyData::Bool, TyData::True) => return Ty::FALSE, - (TyData::Bool, TyData::False) => return Ty::TRUE, - (TyData::True | TyData::False, TyData::Bool) => return Ty::NEVER, - _ => {} - } - - // For non-union types, if they don't match the remove type, return unchanged - base -} - -/// Narrow a type to one with a specific length. -/// -/// This is useful for narrowing based on `std.length(x) == n` conditions. -/// -/// # Behavior -/// -/// - Arrays become tuples with `n` elements of the same element type -/// - Tuples must have exactly `n` elements (otherwise `Never`) -/// - Strings with length 1 become `Char` -/// - Objects must have exactly `n` fields (if closed) or at least `n` (if open) -/// - Primitives like Number/Bool return `Never` (they don't have length) -/// -/// # Examples -/// -/// - `ty_with_len(Array, 3)` → `[Number, Number, Number]` -/// - `ty_with_len(String, 1)` → `Char` -/// - `ty_with_len([Number, String], 2)` → `[Number, String]` -/// - `ty_with_len([Number, String], 3)` → `Never` -pub fn ty_with_len(ty: Ty, len: usize, store: &mut S) -> Ty { - let data = store.get_data(ty); - match data { - TyData::Any => Ty::ANY, - - TyData::Array { elem, .. } => { - let elems = vec![elem; len]; - store.tuple(elems) - } - - TyData::Tuple { elems } => { - if elems.len() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::Object(obj) => { - match (obj.fields.len().cmp(&len), obj.has_unknown) { - // Exactly right number of fields - (std::cmp::Ordering::Equal, false) => ty, - // Open object with fewer fields - close it at this length - (std::cmp::Ordering::Less | std::cmp::Ordering::Equal, true) => { - if obj.fields.len() == len { - store.object(ObjectData { - fields: obj.fields, - has_unknown: false, - }) - } else { - ty // Can have unknown fields to reach the length - } - } - // Too few fields in closed object, or too many fields - (std::cmp::Ordering::Less, false) | (std::cmp::Ordering::Greater, _) => Ty::NEVER, - } - } - - TyData::Function(func) => { - if func.variadic && func.params.is_empty() { - let params = (0..len) - .map(|idx| crate::store::ParamInterned { - name: format!("arg{idx}"), - ty: Ty::ANY, - has_default: false, - }) - .collect(); - store.function(crate::store::FunctionData { - params, - return_spec: crate::store::ReturnSpec::Fixed(Ty::ANY), - variadic: false, - }) - } else if func.required_count() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, - - TyData::String => { - if len == 1 { - Ty::CHAR - } else { - ty // String can be any length - } - } - - TyData::LiteralString(s) => { - if s.chars().count() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::Char => { - if len == 1 { - ty - } else { - Ty::NEVER - } - } - - TyData::Never - | TyData::Null - | TyData::Bool - | TyData::True - | TyData::False - | TyData::Number - | TyData::BoundedNumber(_) => Ty::NEVER, - - TyData::Union(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| ty_with_len(t, len, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - store.union(narrowed) - } - - TyData::Sum(types) => { - let narrowed: Vec = types.iter().map(|&t| ty_with_len(t, len, store)).collect(); - if narrowed.contains(&Ty::NEVER) { - Ty::NEVER - } else { - store.sum(narrowed) - } - } - } -} - -/// Narrow a type to one with at least a minimum length. -/// -/// This is useful for narrowing based on `std.length(x) >= n` conditions. -/// -/// # Behavior -/// -/// - Arrays stay arrays (can have any length) -/// - Tuples must have at least `n` elements -/// - Strings stay strings (can have any length) -/// - Literal strings are checked exactly against `n` -/// - Char requires `min <= 1` -/// - Objects with unknown fields stay as-is -/// -/// # Examples -/// -/// - `ty_with_min_len(Array, 3)` → `Array` -/// - `ty_with_min_len([Number, String], 1)` → `[Number, String]` -/// - `ty_with_min_len([Number], 2)` → `Never` -/// - `ty_with_min_len("ok", 3)` → `Never` -/// - `ty_with_min_len(Char, 2)` → `Never` -pub fn ty_with_min_len(ty: Ty, min: usize, store: &mut S) -> Ty { - let data = store.get_data(ty); - match data { - TyData::Any => Ty::ANY, - TyData::Never => Ty::NEVER, - - TyData::Array { .. } - | TyData::Object(_) - | TyData::AttrsOf { .. } - | TyData::String - | TyData::Function(_) - | TyData::TypeVar { .. } => ty, - - TyData::LiteralString(s) => { - if s.chars().count() >= min { - ty - } else { - Ty::NEVER - } - } - - TyData::Tuple { elems } => { - if elems.len() >= min { - ty - } else { - Ty::NEVER - } - } - - TyData::Char => { - if min <= 1 { - ty - } else { - Ty::NEVER - } - } - - TyData::Null - | TyData::Bool - | TyData::True - | TyData::False - | TyData::Number - | TyData::BoundedNumber(_) => { - if min == 0 { - ty // Everything has "length >= 0" - } else { - Ty::NEVER - } - } - - TyData::Union(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| ty_with_min_len(t, min, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - store.union(narrowed) - } - - TyData::Sum(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| ty_with_min_len(t, min, store)) - .collect(); - if narrowed.contains(&Ty::NEVER) { - Ty::NEVER - } else { - store.sum(narrowed) - } - } - } -} - -/// Add a required field to an object type. -/// -/// Returns a new object type with the specified field added. -/// If the field already exists, its type is narrowed with the new type. -pub fn ty_with_field(ty: Ty, field: &str, field_ty: Ty, store: &mut S) -> Ty { - let data = store.get_data(ty); - match data { - TyData::Any => { - // Create an open object with this field - store.object(ObjectData { - fields: vec![( - field.to_string(), - FieldDefInterned { - ty: field_ty, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: true, - }) - } - - TyData::Object(mut obj) => { - if let Some((_, existing)) = obj.fields.iter_mut().find(|(n, _)| n == field) { - let narrowed = ty_and(existing.ty, field_ty, store); - *existing = FieldDefInterned { - ty: narrowed, - required: true, - visibility: existing.visibility, - }; - } else { - obj.fields.push(( - field.to_string(), - FieldDefInterned { - ty: field_ty, - required: true, - visibility: FieldVis::Normal, - }, - )); - } - store.object(obj) - } - - TyData::AttrsOf { value } => { - // AttrsOf with a specific field becomes object with that field - let narrowed = ty_and(value, field_ty, store); - store.object(ObjectData { - fields: vec![( - field.to_string(), - FieldDefInterned { - ty: narrowed, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: true, - }) - } - - TyData::Union(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| ty_with_field(t, field, field_ty, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - store.union(narrowed) - } - - // Non-object types can't have fields - _ => Ty::NEVER, - } -} - -#[cfg(test)] -mod tests { - use assert_matches::assert_matches; - - use super::{super::TyStore, *}; - - #[test] - fn test_binary_op_valid_number_plus_ty() { - let mut store = TyStore::new(); - let result = - binary_op_result_ty(BinaryOperatorKind::Plus, Ty::NUMBER, Ty::NUMBER, &mut store); - assert_eq!(result, Ok(Ty::NUMBER)); - } - - #[test] - fn test_binary_op_valid_string_plus_ty() { - let mut store = TyStore::new(); - let result = - binary_op_result_ty(BinaryOperatorKind::Plus, Ty::STRING, Ty::STRING, &mut store); - assert_eq!(result, Ok(Ty::STRING)); - } - - #[test] - fn test_binary_op_invalid_string_plus_number_ty() { - let mut store = TyStore::new(); - binary_op_result_ty(BinaryOperatorKind::Plus, Ty::STRING, Ty::NUMBER, &mut store) - .expect_err("String + Number is invalid"); - } - - #[test] - fn test_binary_op_comparison_returns_bool_ty() { - let mut store = TyStore::new(); - let result = - binary_op_result_ty(BinaryOperatorKind::Lt, Ty::NUMBER, Ty::NUMBER, &mut store); - assert_eq!(result, Ok(Ty::BOOL)); - } - - #[test] - fn test_binary_op_with_any_ty() { - let mut store = TyStore::new(); - let result = binary_op_result_ty(BinaryOperatorKind::Plus, Ty::ANY, Ty::NUMBER, &mut store); - assert_eq!(result, Ok(Ty::ANY)); - } - - #[test] - fn test_binary_op_with_never_ty() { - let mut store = TyStore::new(); - let result = - binary_op_result_ty(BinaryOperatorKind::Plus, Ty::NEVER, Ty::NUMBER, &mut store); - assert_eq!(result, Ok(Ty::NEVER)); - } - - #[test] - fn test_unary_op_valid_not_bool_ty() { - let mut store = TyStore::new(); - let result = unary_op_result_ty(UnaryOperatorKind::Not, Ty::BOOL, &mut store); - assert_eq!(result, Ok(Ty::BOOL)); - } - - #[test] - fn test_unary_op_invalid_not_number_ty() { - let mut store = TyStore::new(); - unary_op_result_ty(UnaryOperatorKind::Not, Ty::NUMBER, &mut store) - .expect_err("!Number is invalid"); - } - - #[test] - fn test_unary_op_minus_number_ty() { - let mut store = TyStore::new(); - let result = unary_op_result_ty(UnaryOperatorKind::Minus, Ty::NUMBER, &mut store); - assert_eq!(result, Ok(Ty::NUMBER)); - } - - #[test] - fn test_unary_op_bitnot_number_ty() { - let mut store = TyStore::new(); - let result = unary_op_result_ty(UnaryOperatorKind::BitNot, Ty::NUMBER, &mut store); - assert_eq!(result, Ok(Ty::NUMBER)); - } - - #[test] - fn test_array_concat_tuples_ty() { - let mut store = TyStore::new(); - let left = store.tuple(vec![Ty::NUMBER]); - let right = store.tuple(vec![Ty::STRING]); - let result = array_concat_ty(left, right, &mut store); - assert_matches!(store.get_data(result), TyData::Tuple { elems } => { - assert_eq!(elems, vec![Ty::NUMBER, Ty::STRING]); - }); - } - - #[test] - fn test_array_concat_arrays_ty() { - let mut store = TyStore::new(); - let left = store.array(Ty::NUMBER); - let right = store.array(Ty::STRING); - let result = array_concat_ty(left, right, &mut store); - assert_matches!(store.get_data(result), TyData::Array { .. }); - } - - #[test] - fn test_binary_op_union_lhs_ty() { - let mut store = TyStore::new(); - // (Number | String) + Number should fail (String + Number invalid) - let union_ty = store.union(vec![Ty::NUMBER, Ty::STRING]); - binary_op_result_ty(BinaryOperatorKind::Plus, union_ty, Ty::NUMBER, &mut store) - .expect_err("(Number|String) + Number is invalid because String+Number fails"); - } - - #[test] - fn test_binary_op_union_valid_ty() { - let mut store = TyStore::new(); - // (Number | Number) + Number should succeed - let union_ty = store.union(vec![Ty::NUMBER, Ty::NUMBER]); - binary_op_result_ty(BinaryOperatorKind::Plus, union_ty, Ty::NUMBER, &mut store) - .expect("(Number|Number) + Number should succeed"); - } - - #[test] - fn test_binary_op_in_ty() { - let mut store = TyStore::new(); - let obj_ty = store.object(ObjectData { - fields: vec![], - has_unknown: true, - }); - let result = binary_op_result_ty(BinaryOperatorKind::InKw, Ty::STRING, obj_ty, &mut store); - assert_eq!(result, Ok(Ty::BOOL)); - } - - #[test] - fn test_binary_op_bitwise_ty() { - let mut store = TyStore::new(); - let result = binary_op_result_ty( - BinaryOperatorKind::BitAnd, - Ty::NUMBER, - Ty::NUMBER, - &mut store, - ); - assert_eq!(result, Ok(Ty::NUMBER)); - } - - #[test] - fn test_binary_op_logical_bool_ty() { - let mut store = TyStore::new(); - let result = binary_op_result_ty(BinaryOperatorKind::And, Ty::BOOL, Ty::BOOL, &mut store); - assert_eq!(result, Ok(Ty::BOOL)); - } - - #[test] - fn test_binary_op_null_coalesce_ty() { - let mut store = TyStore::new(); - let result_ty = binary_op_result_ty( - BinaryOperatorKind::NullCoaelse, - Ty::NUMBER, - Ty::STRING, - &mut store, - ) - .expect("null coalesce should succeed"); - // Result should be union of both types - assert_matches!(store.get_data(result_ty), TyData::Union(_)); - } - - // ============================================================================= - // Type Logic Operations Tests - // ============================================================================= - - mod ty_and_tests { - use super::*; - - #[test] - fn test_any_narrows_to_constraint() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::ANY, Ty::NUMBER, &mut store), Ty::NUMBER); - assert_eq!(ty_and(Ty::NUMBER, Ty::ANY, &mut store), Ty::NUMBER); - } - - #[test] - fn test_never_always_never() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); - assert_eq!(ty_and(Ty::NUMBER, Ty::NEVER, &mut store), Ty::NEVER); - } - - #[test] - fn test_same_type_returns_same() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NUMBER); - assert_eq!(ty_and(Ty::STRING, Ty::STRING, &mut store), Ty::STRING); - } - - #[test] - fn test_incompatible_types_never() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::NUMBER, Ty::STRING, &mut store), Ty::NEVER); - assert_eq!(ty_and(Ty::BOOL, Ty::NUMBER, &mut store), Ty::NEVER); - } - - #[test] - fn test_bool_narrows_to_literal() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::BOOL, Ty::TRUE, &mut store), Ty::TRUE); - assert_eq!(ty_and(Ty::BOOL, Ty::FALSE, &mut store), Ty::FALSE); - assert_eq!(ty_and(Ty::TRUE, Ty::BOOL, &mut store), Ty::TRUE); - assert_eq!(ty_and(Ty::FALSE, Ty::BOOL, &mut store), Ty::FALSE); - } - - #[test] - fn test_string_narrows_to_char() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::STRING, Ty::CHAR, &mut store), Ty::CHAR); - assert_eq!(ty_and(Ty::CHAR, Ty::STRING, &mut store), Ty::CHAR); - } - - #[test] - fn test_union_distributes() { - let mut store = TyStore::new(); - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - // (Number | String) & Number = Number - assert_eq!(ty_and(union, Ty::NUMBER, &mut store), Ty::NUMBER); - // Number & (Number | String) = Number - assert_eq!(ty_and(Ty::NUMBER, union, &mut store), Ty::NUMBER); - } - - #[test] - fn test_union_with_incompatible_gives_partial() { - let mut store = TyStore::new(); - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - // (Number | String) & Bool = Never (both incompatible) - assert_eq!(ty_and(union, Ty::BOOL, &mut store), Ty::NEVER); - } - - #[test] - fn test_array_intersection() { - let mut store = TyStore::new(); - let arr_num = store.array(Ty::NUMBER); - let arr_any = store.array(Ty::ANY); - // Array & Array = Array - let result = ty_and(arr_num, arr_any, &mut store); - assert!( - matches!(store.get_data(result), TyData::Array { elem, .. } if elem == Ty::NUMBER) - ); - } - - #[test] - fn test_tuple_intersection_same_length() { - let mut store = TyStore::new(); - let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::ANY]); - let tuple2 = store.tuple(vec![Ty::ANY, Ty::STRING]); - let result = ty_and(tuple1, tuple2, &mut store); - assert_matches!(store.get_data(result), TyData::Tuple { elems } => { - assert_eq!(elems, vec![Ty::NUMBER, Ty::STRING]); - }); - } - - #[test] - fn test_tuple_intersection_different_length_never() { - let mut store = TyStore::new(); - let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::STRING]); - let tuple2 = store.tuple(vec![Ty::NUMBER]); - assert_eq!(ty_and(tuple1, tuple2, &mut store), Ty::NEVER); - } - - #[test] - fn test_object_intersection_merges_fields() { - let mut store = TyStore::new(); - let obj1 = store.object(ObjectData { - fields: vec![( - "a".to_string(), - FieldDefInterned { - ty: Ty::NUMBER, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: false, - }); - let obj2 = store.object(ObjectData { - fields: vec![( - "b".to_string(), - FieldDefInterned { - ty: Ty::STRING, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: false, - }); - let result = ty_and(obj1, obj2, &mut store); - assert_matches!(store.get_data(result), TyData::Object(obj) => { - // Should have both fields "a" and "b" - let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); - field_names.sort_unstable(); - assert_eq!(field_names, vec!["a", "b"]); - }); - } - - #[test] - fn test_bounded_number_intersection() { - let mut store = TyStore::new(); - let bounded1 = store.bounded_number(crate::store::NumBounds::at_least(0.0)); - let bounded2 = store.bounded_number(crate::store::NumBounds::between(-10.0, 10.0)); - let result = ty_and(bounded1, bounded2, &mut store); - // Should get [0..10] - assert_matches!(store.get_data(result), TyData::BoundedNumber(bounds) => { - assert_eq!(bounds.min_f64(), Some(0.0)); - assert_eq!(bounds.max_f64(), Some(10.0)); - }); - } - } - - mod ty_minus_tests { - use super::*; - - #[test] - fn test_same_type_gives_never() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NEVER); - } - - #[test] - fn test_different_type_unchanged() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::NUMBER, Ty::STRING, &mut store), Ty::NUMBER); - } - - #[test] - fn test_any_stays_any() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::ANY, Ty::NUMBER, &mut store), Ty::ANY); - } - - #[test] - fn test_minus_any_gives_never() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::NUMBER, Ty::ANY, &mut store), Ty::NEVER); - } - - #[test] - fn test_union_removes_matching() { - let mut store = TyStore::new(); - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - assert_eq!(ty_minus(union, Ty::NUMBER, &mut store), Ty::STRING); - } - - #[test] - fn test_bool_minus_true_gives_false() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::BOOL, Ty::TRUE, &mut store), Ty::FALSE); - assert_eq!(ty_minus(Ty::BOOL, Ty::FALSE, &mut store), Ty::TRUE); - } - - #[test] - fn test_never_stays_never() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); - } - } - - mod ty_with_len_tests { - use super::*; - - #[test] - fn test_array_to_tuple() { - let mut store = TyStore::new(); - let arr = store.array(Ty::NUMBER); - let result = ty_with_len(arr, 3, &mut store); - assert_matches!(store.get_data(result), TyData::Tuple { elems } => { - assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); - }); - } - - #[test] - fn test_tuple_matching_length() { - let mut store = TyStore::new(); - let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); - assert_eq!(ty_with_len(tuple, 2, &mut store), tuple); - } - - #[test] - fn test_tuple_wrong_length_never() { - let mut store = TyStore::new(); - let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); - assert_eq!(ty_with_len(tuple, 3, &mut store), Ty::NEVER); - } - - #[test] - fn test_string_len_1_to_char() { - let mut store = TyStore::new(); - assert_eq!(ty_with_len(Ty::STRING, 1, &mut store), Ty::CHAR); - } - - #[test] - fn test_char_len_1_ok() { - let mut store = TyStore::new(); - assert_eq!(ty_with_len(Ty::CHAR, 1, &mut store), Ty::CHAR); - } - - #[test] - fn test_char_len_not_1_never() { - let mut store = TyStore::new(); - assert_eq!(ty_with_len(Ty::CHAR, 0, &mut store), Ty::NEVER); - assert_eq!(ty_with_len(Ty::CHAR, 2, &mut store), Ty::NEVER); - } - - #[test] - fn test_number_never() { - let mut store = TyStore::new(); - assert_eq!(ty_with_len(Ty::NUMBER, 5, &mut store), Ty::NEVER); - } - - #[test] - fn test_literal_string_matching_len() { - let mut store = TyStore::new(); - let lit = store.literal_string("hello".to_string()); - assert_eq!(ty_with_len(lit, 5, &mut store), lit); - } - - #[test] - fn test_literal_string_wrong_len_never() { - let mut store = TyStore::new(); - let lit = store.literal_string("hello".to_string()); - assert_eq!(ty_with_len(lit, 3, &mut store), Ty::NEVER); - } - - #[test] - fn test_union_filters() { - let mut store = TyStore::new(); - let tuple2 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER]); - let tuple3 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); - let union = store.union(vec![tuple2, tuple3]); - assert_eq!(ty_with_len(union, 2, &mut store), tuple2); - } - - #[test] - fn test_function_matches_required_arity() { - let mut store = TyStore::new(); - let func = store.function(crate::store::FunctionData { - params: vec![ - crate::store::ParamInterned { - name: "x".to_string(), - ty: Ty::ANY, - has_default: false, - }, - crate::store::ParamInterned { - name: "y".to_string(), - ty: Ty::ANY, - has_default: true, - }, - ], - return_spec: crate::store::ReturnSpec::Fixed(Ty::NUMBER), - variadic: false, - }); - assert_eq!(ty_with_len(func, 1, &mut store), func); - assert_eq!(ty_with_len(func, 2, &mut store), Ty::NEVER); - } - - #[test] - fn test_function_any_narrows_to_exact_arity() { - let mut store = TyStore::new(); - let func = store.function_any(); - let narrowed = ty_with_len(func, 2, &mut store); - assert_matches!(store.get_data(narrowed), TyData::Function(func_data) => { - assert!(!func_data.variadic); - assert_eq!(func_data.params.len(), 2); - assert_eq!(func_data.params[0].name, "arg0"); - assert_eq!(func_data.params[1].name, "arg1"); - assert!(func_data.params.iter().all(|p| p.ty == Ty::ANY)); - assert!(func_data.params.iter().all(|p| !p.has_default)); - assert_eq!(func_data.return_spec, crate::store::ReturnSpec::Fixed(Ty::ANY)); - }); - } - } - - mod ty_with_min_len_tests { - use super::*; - - #[test] - fn test_array_unchanged() { - let mut store = TyStore::new(); - let arr = store.array(Ty::NUMBER); - assert_eq!(ty_with_min_len(arr, 5, &mut store), arr); - } - - #[test] - fn test_tuple_satisfies_min() { - let mut store = TyStore::new(); - let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); - assert_eq!(ty_with_min_len(tuple, 2, &mut store), tuple); - } - - #[test] - fn test_tuple_too_short_never() { - let mut store = TyStore::new(); - let tuple = store.tuple(vec![Ty::NUMBER]); - assert_eq!(ty_with_min_len(tuple, 2, &mut store), Ty::NEVER); - } - - #[test] - fn test_char_min_1_ok() { - let mut store = TyStore::new(); - assert_eq!(ty_with_min_len(Ty::CHAR, 1, &mut store), Ty::CHAR); - } - - #[test] - fn test_char_min_2_never() { - let mut store = TyStore::new(); - assert_eq!(ty_with_min_len(Ty::CHAR, 2, &mut store), Ty::NEVER); - } - - #[test] - fn test_literal_string_meets_min() { - let mut store = TyStore::new(); - let literal = store.literal_string("hello".to_string()); - assert_eq!(ty_with_min_len(literal, 3, &mut store), literal); - } - - #[test] - fn test_literal_string_too_short() { - let mut store = TyStore::new(); - let literal = store.literal_string("hi".to_string()); - assert_eq!(ty_with_min_len(literal, 3, &mut store), Ty::NEVER); - } - } - - mod ty_with_field_tests { - use super::*; - - #[test] - fn test_any_to_object() { - let mut store = TyStore::new(); - let result = ty_with_field(Ty::ANY, "foo", Ty::NUMBER, &mut store); - assert_matches!(store.get_data(result), TyData::Object(obj) => { - assert_eq!(obj.fields, vec![("foo".to_string(), FieldDefInterned { - ty: Ty::NUMBER, - required: true, - visibility: FieldVis::Normal, - })]); - assert!(obj.has_unknown); - }); - } - - #[test] - fn test_object_adds_field() { - let mut store = TyStore::new(); - let obj = store.object(ObjectData { - fields: vec![( - "a".to_string(), - FieldDefInterned { - ty: Ty::STRING, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: false, - }); - let result = ty_with_field(obj, "b", Ty::NUMBER, &mut store); - assert_matches!(store.get_data(result), TyData::Object(obj) => { - let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); - field_names.sort_unstable(); - assert_eq!(field_names, vec!["a", "b"]); - }); - } - - #[test] - fn test_object_narrows_existing_field() { - let mut store = TyStore::new(); - let obj = store.object(ObjectData { - fields: vec![( - "a".to_string(), - FieldDefInterned { - ty: Ty::ANY, - required: false, - visibility: FieldVis::Normal, - }, - )], - has_unknown: false, - }); - let result = ty_with_field(obj, "a", Ty::NUMBER, &mut store); - assert_matches!(store.get_data(result), TyData::Object(obj) => { - assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); - assert!(obj.fields[0].1.required); - }); - } - - #[test] - fn test_number_never() { - let mut store = TyStore::new(); - assert_eq!( - ty_with_field(Ty::NUMBER, "foo", Ty::STRING, &mut store), - Ty::NEVER - ); - } - } -} +pub use logic::{ty_and, ty_minus, ty_with_field, ty_with_len, ty_with_min_len}; +pub use operators::{array_concat_ty, binary_op_result_ty, unary_op_result_ty}; diff --git a/crates/jrsonnet-lsp-types/src/operations/logic.rs b/crates/jrsonnet-lsp-types/src/operations/logic.rs new file mode 100644 index 00000000..0a4aaa98 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/logic.rs @@ -0,0 +1,1075 @@ +//! Core type logic and narrowing operations. + +use crate::store::{FieldDefInterned, FieldVis, ObjectData, Ty, TyData, TypeStoreOps}; + +/// Compute the intersection of two types (type narrowing). +/// +/// Returns the most specific type that satisfies both constraints. +/// This is the logical AND of types - values must satisfy both. +/// +/// # Examples +/// +/// - `ty_and(Any, Number)` → `Number` +/// - `ty_and(Number, String)` → `Never` (no value is both) +/// - `ty_and(Bool, True)` → `True` +/// - `ty_and(Number | String, Number)` → `Number` +/// +/// # Distribution over Unions +/// +/// This operation distributes over unions: +/// `(A | B) & C = (A & C) | (B & C)` +pub fn ty_and(lhs: Ty, rhs: Ty, store: &mut S) -> Ty { + // Fast paths for special types + if lhs == Ty::NEVER || rhs == Ty::NEVER { + return Ty::NEVER; + } + if lhs == Ty::ANY { + return rhs; + } + if rhs == Ty::ANY { + return lhs; + } + if lhs == rhs { + return lhs; + } + + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + // Handle unions: distribute (A | B) & C = (A & C) | (B & C) + if let TyData::Union(types) = lhs_data { + let narrowed: Vec = types + .iter() + .map(|&t| ty_and(t, rhs, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(narrowed); + } + if let TyData::Union(types) = rhs_data { + let narrowed: Vec = types + .iter() + .map(|&t| ty_and(lhs, t, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(narrowed); + } + + // Refresh data after potential recursion + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + // Handle literal/subtype relationships + match (&lhs_data, &rhs_data) { + // Bool and its literals + (TyData::Bool, TyData::True) | (TyData::True, TyData::Bool) => return Ty::TRUE, + (TyData::Bool, TyData::False) | (TyData::False, TyData::Bool) => return Ty::FALSE, + + // String and Char + (TyData::String, TyData::Char) | (TyData::Char, TyData::String) => return Ty::CHAR, + + // String and LiteralString + (TyData::String, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::String) => { + return store.literal_string(s.clone()); + } + + // LiteralString with same value + (TyData::LiteralString(s1), TyData::LiteralString(s2)) => { + if s1 == s2 { + return store.literal_string(s1.clone()); + } + return Ty::NEVER; + } + + // Char and LiteralString of length 1 + (TyData::Char, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::Char) => { + if s.chars().count() == 1 { + return store.literal_string(s.clone()); + } + return Ty::NEVER; + } + + _ => {} + } + + // Handle arrays (preserve is_set if both are sets) + if let ( + TyData::Array { + elem: e1, + is_set: s1, + }, + TyData::Array { + elem: e2, + is_set: s2, + }, + ) = (&lhs_data, &rhs_data) + { + let elem = ty_and(*e1, *e2, store); + if elem == Ty::NEVER { + return Ty::NEVER; + } + // Result is a set only if both inputs are sets + if *s1 && *s2 { + return store.array_set(elem); + } + return store.array(elem); + } + + // Handle tuples with arrays + if let (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) = (&lhs_data, &rhs_data) + { + let narrowed: Vec = elems.iter().map(|&e| ty_and(e, *arr_elem, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + if let (TyData::Array { elem: arr_elem, .. }, TyData::Tuple { elems }) = (&lhs_data, &rhs_data) + { + let narrowed: Vec = elems.iter().map(|&e| ty_and(*arr_elem, e, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + + // Handle tuples with tuples + if let (TyData::Tuple { elems: e1 }, TyData::Tuple { elems: e2 }) = (&lhs_data, &rhs_data) { + if e1.len() != e2.len() { + return Ty::NEVER; + } + let narrowed: Vec = e1 + .iter() + .zip(e2.iter()) + .map(|(&a, &b)| ty_and(a, b, store)) + .collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + + // Handle objects + if let (TyData::Object(obj1), TyData::Object(obj2)) = (&lhs_data, &rhs_data) { + let mut fields = obj1.fields.clone(); + for (name, def2) in &obj2.fields { + if let Some((_, def1)) = fields.iter_mut().find(|(n, _)| n == name) { + let narrowed_ty = ty_and(def1.ty, def2.ty, store); + *def1 = FieldDefInterned { + ty: narrowed_ty, + required: def1.required || def2.required, + visibility: def1.visibility, + }; + } else { + fields.push((name.clone(), def2.clone())); + } + } + let has_unknown = obj1.has_unknown && obj2.has_unknown; + return store.object(ObjectData { + fields, + has_unknown, + }); + } + + // Handle AttrsOf + if let (TyData::AttrsOf { value: v1 }, TyData::AttrsOf { value: v2 }) = (&lhs_data, &rhs_data) { + let elem = ty_and(*v1, *v2, store); + if elem == Ty::NEVER { + return Ty::NEVER; + } + return store.attrs_of(elem); + } + + // Handle object + AttrsOf + if let (TyData::Object(obj), TyData::AttrsOf { value }) = (&lhs_data, &rhs_data) { + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let narrowed_ty = ty_and(field.ty, *value, store); + ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + return store.object(ObjectData { + fields, + has_unknown: obj.has_unknown, + }); + } + if let (TyData::AttrsOf { value }, TyData::Object(obj)) = (&lhs_data, &rhs_data) { + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let narrowed_ty = ty_and(*value, field.ty, store); + ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + return store.object(ObjectData { + fields, + has_unknown: obj.has_unknown, + }); + } + + // Handle BoundedNumber + if let (TyData::Number, TyData::BoundedNumber(bounds)) + | (TyData::BoundedNumber(bounds), TyData::Number) = (&lhs_data, &rhs_data) + { + return store.bounded_number(*bounds); + } + if let (TyData::BoundedNumber(b1), TyData::BoundedNumber(b2)) = (&lhs_data, &rhs_data) { + // Intersection of bounds: take stricter bounds + let min = match (b1.min_f64(), b2.min_f64()) { + (Some(a), Some(b)) => Some(a.max(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + let max = match (b1.max_f64(), b2.max_f64()) { + (Some(a), Some(b)) => Some(a.min(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + // Check for empty range + if let (Some(lo), Some(hi)) = (min, max) { + if lo > hi { + return Ty::NEVER; + } + } + return store.bounded_number(crate::store::NumBounds { + min: min.map(f64::to_bits), + max: max.map(f64::to_bits), + }); + } + + // Handle Sum (intersection) types + if let TyData::Sum(types) = lhs_data { + let narrowed: Vec = types.iter().map(|&t| ty_and(t, rhs, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.sum(narrowed); + } + if let TyData::Sum(types) = rhs_data { + let narrowed: Vec = types.iter().map(|&t| ty_and(lhs, t, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.sum(narrowed); + } + + // Handle TypeVar - keep it, may be resolved later + if matches!(lhs_data, TyData::TypeVar { .. }) || matches!(rhs_data, TyData::TypeVar { .. }) { + return store.sum(vec![lhs, rhs]); + } + + // Different incompatible concrete types have no intersection + Ty::NEVER +} + +/// Compute the exclusion of one type from another. +/// +/// Returns the type with the constraint removed (difference/minus). +/// This removes values that match `remove` from `base`. +/// +/// # Examples +/// +/// - `ty_minus(Number | String, Number)` → `String` +/// - `ty_minus(Bool, True)` → `False` +/// - `ty_minus(Any, Number)` → `Any` (Any is too general) +/// - `ty_minus(Number, Number)` → `Never` +/// +/// # Distribution over Unions +/// +/// This operation distributes over unions: +/// `(A | B) - C = (A - C) | (B - C)` +pub fn ty_minus(base: Ty, remove: Ty, store: &mut S) -> Ty { + // Fast paths + if base == Ty::NEVER { + return Ty::NEVER; + } + if remove == Ty::NEVER { + return base; + } + if remove == Ty::ANY { + return Ty::NEVER; + } + if base == Ty::ANY { + // Can't remove anything meaningful from Any + return Ty::ANY; + } + if base == remove { + return Ty::NEVER; + } + + let base_data = store.get_data(base); + + // Handle unions: distribute (A | B) - C = (A - C) | (B - C) + if let TyData::Union(types) = base_data { + let remaining: Vec = types + .iter() + .map(|&t| ty_minus(t, remove, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(remaining); + } + + // Special case: Bool minus True/False + let base_data = store.get_data(base); + let remove_data = store.get_data(remove); + match (&base_data, &remove_data) { + (TyData::Bool, TyData::True) => return Ty::FALSE, + (TyData::Bool, TyData::False) => return Ty::TRUE, + (TyData::True | TyData::False, TyData::Bool) => return Ty::NEVER, + _ => {} + } + + // For non-union types, if they don't match the remove type, return unchanged + base +} + +/// Narrow a type to one with a specific length. +/// +/// This is useful for narrowing based on `std.length(x) == n` conditions. +/// +/// # Behavior +/// +/// - Arrays become tuples with `n` elements of the same element type +/// - Tuples must have exactly `n` elements (otherwise `Never`) +/// - Strings with length 1 become `Char` +/// - Objects must have exactly `n` fields (if closed) or at least `n` (if open) +/// - Primitives like Number/Bool return `Never` (they don't have length) +/// +/// # Examples +/// +/// - `ty_with_len(Array, 3)` → `[Number, Number, Number]` +/// - `ty_with_len(String, 1)` → `Char` +/// - `ty_with_len([Number, String], 2)` → `[Number, String]` +/// - `ty_with_len([Number, String], 3)` → `Never` +pub fn ty_with_len(ty: Ty, len: usize, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => Ty::ANY, + + TyData::Array { elem, .. } => { + let elems = vec![elem; len]; + store.tuple(elems) + } + + TyData::Tuple { elems } => { + if elems.len() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Object(obj) => { + match (obj.fields.len().cmp(&len), obj.has_unknown) { + // Exactly right number of fields + (std::cmp::Ordering::Equal, false) => ty, + // Open object with fewer fields - close it at this length + (std::cmp::Ordering::Less | std::cmp::Ordering::Equal, true) => { + if obj.fields.len() == len { + store.object(ObjectData { + fields: obj.fields, + has_unknown: false, + }) + } else { + ty // Can have unknown fields to reach the length + } + } + // Too few fields in closed object, or too many fields + (std::cmp::Ordering::Less, false) | (std::cmp::Ordering::Greater, _) => Ty::NEVER, + } + } + + TyData::Function(func) => { + if func.variadic && func.params.is_empty() { + let params = (0..len) + .map(|idx| crate::store::ParamInterned { + name: format!("arg{idx}"), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + store.function(crate::store::FunctionData { + params, + return_spec: crate::store::ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }) + } else if func.required_count() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, + + TyData::String => { + if len == 1 { + Ty::CHAR + } else { + ty // String can be any length + } + } + + TyData::LiteralString(s) => { + if s.chars().count() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if len == 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => Ty::NEVER, + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_len(t, len, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types.iter().map(|&t| ty_with_len(t, len, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + store.sum(narrowed) + } + } + } +} + +/// Narrow a type to one with at least a minimum length. +/// +/// This is useful for narrowing based on `std.length(x) >= n` conditions. +/// +/// # Behavior +/// +/// - Arrays stay arrays (can have any length) +/// - Tuples must have at least `n` elements +/// - Strings stay strings (can have any length) +/// - Literal strings are checked exactly against `n` +/// - Char requires `min <= 1` +/// - Objects with unknown fields stay as-is +/// +/// # Examples +/// +/// - `ty_with_min_len(Array, 3)` → `Array` +/// - `ty_with_min_len([Number, String], 1)` → `[Number, String]` +/// - `ty_with_min_len([Number], 2)` → `Never` +/// - `ty_with_min_len("ok", 3)` → `Never` +/// - `ty_with_min_len(Char, 2)` → `Never` +pub fn ty_with_min_len(ty: Ty, min: usize, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => Ty::ANY, + TyData::Never => Ty::NEVER, + + TyData::Array { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } + | TyData::String + | TyData::Function(_) + | TyData::TypeVar { .. } => ty, + + TyData::LiteralString(s) => { + if s.chars().count() >= min { + ty + } else { + Ty::NEVER + } + } + + TyData::Tuple { elems } => { + if elems.len() >= min { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if min <= 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => { + if min == 0 { + ty // Everything has "length >= 0" + } else { + Ty::NEVER + } + } + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_min_len(t, min, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_min_len(t, min, store)) + .collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + store.sum(narrowed) + } + } + } +} + +/// Add a required field to an object type. +/// +/// Returns a new object type with the specified field added. +/// If the field already exists, its type is narrowed with the new type. +pub fn ty_with_field(ty: Ty, field: &str, field_ty: Ty, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => { + // Create an open object with this field + store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }) + } + + TyData::Object(mut obj) => { + if let Some((_, existing)) = obj.fields.iter_mut().find(|(n, _)| n == field) { + let narrowed = ty_and(existing.ty, field_ty, store); + *existing = FieldDefInterned { + ty: narrowed, + required: true, + visibility: existing.visibility, + }; + } else { + obj.fields.push(( + field.to_string(), + FieldDefInterned { + ty: field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )); + } + store.object(obj) + } + + TyData::AttrsOf { value } => { + // AttrsOf with a specific field becomes object with that field + let narrowed = ty_and(value, field_ty, store); + store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: narrowed, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }) + } + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_field(t, field, field_ty, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + // Non-object types can't have fields + _ => Ty::NEVER, + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::TyStore; + + mod ty_and_tests { + use super::*; + + #[test] + fn test_any_narrows_to_constraint() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::ANY, Ty::NUMBER, &mut store), Ty::NUMBER); + assert_eq!(ty_and(Ty::NUMBER, Ty::ANY, &mut store), Ty::NUMBER); + } + + #[test] + fn test_never_always_never() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); + assert_eq!(ty_and(Ty::NUMBER, Ty::NEVER, &mut store), Ty::NEVER); + } + + #[test] + fn test_same_type_returns_same() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NUMBER); + assert_eq!(ty_and(Ty::STRING, Ty::STRING, &mut store), Ty::STRING); + } + + #[test] + fn test_incompatible_types_never() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NUMBER, Ty::STRING, &mut store), Ty::NEVER); + assert_eq!(ty_and(Ty::BOOL, Ty::NUMBER, &mut store), Ty::NEVER); + } + + #[test] + fn test_bool_narrows_to_literal() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::BOOL, Ty::TRUE, &mut store), Ty::TRUE); + assert_eq!(ty_and(Ty::BOOL, Ty::FALSE, &mut store), Ty::FALSE); + assert_eq!(ty_and(Ty::TRUE, Ty::BOOL, &mut store), Ty::TRUE); + assert_eq!(ty_and(Ty::FALSE, Ty::BOOL, &mut store), Ty::FALSE); + } + + #[test] + fn test_string_narrows_to_char() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::STRING, Ty::CHAR, &mut store), Ty::CHAR); + assert_eq!(ty_and(Ty::CHAR, Ty::STRING, &mut store), Ty::CHAR); + } + + #[test] + fn test_union_distributes() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // (Number | String) & Number = Number + assert_eq!(ty_and(union, Ty::NUMBER, &mut store), Ty::NUMBER); + // Number & (Number | String) = Number + assert_eq!(ty_and(Ty::NUMBER, union, &mut store), Ty::NUMBER); + } + + #[test] + fn test_union_with_incompatible_gives_partial() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // (Number | String) & Bool = Never (both incompatible) + assert_eq!(ty_and(union, Ty::BOOL, &mut store), Ty::NEVER); + } + + #[test] + fn test_array_intersection() { + let mut store = TyStore::new(); + let arr_num = store.array(Ty::NUMBER); + let arr_any = store.array(Ty::ANY); + // Array & Array = Array + let result = ty_and(arr_num, arr_any, &mut store); + assert!( + matches!(store.get_data(result), TyData::Array { elem, .. } if elem == Ty::NUMBER) + ); + } + + #[test] + fn test_tuple_intersection_same_length() { + let mut store = TyStore::new(); + let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::ANY]); + let tuple2 = store.tuple(vec![Ty::ANY, Ty::STRING]); + let result = ty_and(tuple1, tuple2, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::STRING]); + }); + } + + #[test] + fn test_tuple_intersection_different_length_never() { + let mut store = TyStore::new(); + let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + let tuple2 = store.tuple(vec![Ty::NUMBER]); + assert_eq!(ty_and(tuple1, tuple2, &mut store), Ty::NEVER); + } + + #[test] + fn test_object_intersection_merges_fields() { + let mut store = TyStore::new(); + let obj1 = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let obj2 = store.object(ObjectData { + fields: vec![( + "b".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_and(obj1, obj2, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + // Should have both fields "a" and "b" + let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + field_names.sort_unstable(); + assert_eq!(field_names, vec!["a", "b"]); + }); + } + + #[test] + fn test_bounded_number_intersection() { + let mut store = TyStore::new(); + let bounded1 = store.bounded_number(crate::store::NumBounds::at_least(0.0)); + let bounded2 = store.bounded_number(crate::store::NumBounds::between(-10.0, 10.0)); + let result = ty_and(bounded1, bounded2, &mut store); + // Should get [0..10] + assert_matches!(store.get_data(result), TyData::BoundedNumber(bounds) => { + assert_eq!(bounds.min_f64(), Some(0.0)); + assert_eq!(bounds.max_f64(), Some(10.0)); + }); + } + } + + mod ty_minus_tests { + use super::*; + + #[test] + fn test_same_type_gives_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NEVER); + } + + #[test] + fn test_different_type_unchanged() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::STRING, &mut store), Ty::NUMBER); + } + + #[test] + fn test_any_stays_any() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::ANY, Ty::NUMBER, &mut store), Ty::ANY); + } + + #[test] + fn test_minus_any_gives_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::ANY, &mut store), Ty::NEVER); + } + + #[test] + fn test_union_removes_matching() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_minus(union, Ty::NUMBER, &mut store), Ty::STRING); + } + + #[test] + fn test_bool_minus_true_gives_false() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::BOOL, Ty::TRUE, &mut store), Ty::FALSE); + assert_eq!(ty_minus(Ty::BOOL, Ty::FALSE, &mut store), Ty::TRUE); + } + + #[test] + fn test_never_stays_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); + } + } + + mod ty_with_len_tests { + use super::*; + + #[test] + fn test_array_to_tuple() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + let result = ty_with_len(arr, 3, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + }); + } + + #[test] + fn test_tuple_matching_length() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_with_len(tuple, 2, &mut store), tuple); + } + + #[test] + fn test_tuple_wrong_length_never() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_with_len(tuple, 3, &mut store), Ty::NEVER); + } + + #[test] + fn test_string_len_1_to_char() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::STRING, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_len_1_ok() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::CHAR, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_len_not_1_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::CHAR, 0, &mut store), Ty::NEVER); + assert_eq!(ty_with_len(Ty::CHAR, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_number_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::NUMBER, 5, &mut store), Ty::NEVER); + } + + #[test] + fn test_literal_string_matching_len() { + let mut store = TyStore::new(); + let lit = store.literal_string("hello".to_string()); + assert_eq!(ty_with_len(lit, 5, &mut store), lit); + } + + #[test] + fn test_literal_string_wrong_len_never() { + let mut store = TyStore::new(); + let lit = store.literal_string("hello".to_string()); + assert_eq!(ty_with_len(lit, 3, &mut store), Ty::NEVER); + } + + #[test] + fn test_union_filters() { + let mut store = TyStore::new(); + let tuple2 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER]); + let tuple3 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + let union = store.union(vec![tuple2, tuple3]); + assert_eq!(ty_with_len(union, 2, &mut store), tuple2); + } + + #[test] + fn test_function_matches_required_arity() { + let mut store = TyStore::new(); + let func = store.function(crate::store::FunctionData { + params: vec![ + crate::store::ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false, + }, + crate::store::ParamInterned { + name: "y".to_string(), + ty: Ty::ANY, + has_default: true, + }, + ], + return_spec: crate::store::ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + }); + assert_eq!(ty_with_len(func, 1, &mut store), func); + assert_eq!(ty_with_len(func, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_function_any_narrows_to_exact_arity() { + let mut store = TyStore::new(); + let func = store.function_any(); + let narrowed = ty_with_len(func, 2, &mut store); + assert_matches!(store.get_data(narrowed), TyData::Function(func_data) => { + assert!(!func_data.variadic); + assert_eq!(func_data.params.len(), 2); + assert_eq!(func_data.params[0].name, "arg0"); + assert_eq!(func_data.params[1].name, "arg1"); + assert!(func_data.params.iter().all(|p| p.ty == Ty::ANY)); + assert!(func_data.params.iter().all(|p| !p.has_default)); + assert_eq!(func_data.return_spec, crate::store::ReturnSpec::Fixed(Ty::ANY)); + }); + } + } + + mod ty_with_min_len_tests { + use super::*; + + #[test] + fn test_array_unchanged() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + assert_eq!(ty_with_min_len(arr, 5, &mut store), arr); + } + + #[test] + fn test_tuple_satisfies_min() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + assert_eq!(ty_with_min_len(tuple, 2, &mut store), tuple); + } + + #[test] + fn test_tuple_too_short_never() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER]); + assert_eq!(ty_with_min_len(tuple, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_char_min_1_ok() { + let mut store = TyStore::new(); + assert_eq!(ty_with_min_len(Ty::CHAR, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_min_2_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_min_len(Ty::CHAR, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_literal_string_meets_min() { + let mut store = TyStore::new(); + let literal = store.literal_string("hello".to_string()); + assert_eq!(ty_with_min_len(literal, 3, &mut store), literal); + } + + #[test] + fn test_literal_string_too_short() { + let mut store = TyStore::new(); + let literal = store.literal_string("hi".to_string()); + assert_eq!(ty_with_min_len(literal, 3, &mut store), Ty::NEVER); + } + } + + mod ty_with_field_tests { + use super::*; + + #[test] + fn test_any_to_object() { + let mut store = TyStore::new(); + let result = ty_with_field(Ty::ANY, "foo", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + assert_eq!(obj.fields, vec![("foo".to_string(), FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + })]); + assert!(obj.has_unknown); + }); + } + + #[test] + fn test_object_adds_field() { + let mut store = TyStore::new(); + let obj = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_with_field(obj, "b", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + field_names.sort_unstable(); + assert_eq!(field_names, vec!["a", "b"]); + }); + } + + #[test] + fn test_object_narrows_existing_field() { + let mut store = TyStore::new(); + let obj = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::ANY, + required: false, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_with_field(obj, "a", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); + assert!(obj.fields[0].1.required); + }); + } + + #[test] + fn test_number_never() { + let mut store = TyStore::new(); + assert_eq!( + ty_with_field(Ty::NUMBER, "foo", Ty::STRING, &mut store), + Ty::NEVER + ); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/operations/operators.rs b/crates/jrsonnet-lsp-types/src/operations/operators.rs new file mode 100644 index 00000000..5935b21f --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/operators.rs @@ -0,0 +1,490 @@ +//! Type checking and result-type computation for Jsonnet operators. + +use jrsonnet_rowan_parser::nodes::{BinaryOperatorKind, UnaryOperatorKind}; + +use crate::store::{FieldDefInterned, ObjectData, Ty, TyData, TypeStoreOps}; + +pub fn binary_op_result_ty( + op: BinaryOperatorKind, + lhs: Ty, + rhs: Ty, + store: &mut S, +) -> Result { + // Any, Never short-circuit + if lhs.is_any() || rhs.is_any() { + return Ok(Ty::ANY); + } + if lhs.is_never() { + return Ok(Ty::NEVER); + } + if rhs.is_never() { + return Ok(Ty::NEVER); + } + + // Check for TypeVar + if let TyData::TypeVar { .. } = store.get_data(lhs) { + return Ok(Ty::ANY); + } + if let TyData::TypeVar { .. } = store.get_data(rhs) { + return Ok(Ty::ANY); + } + + // Handle union types by checking all combinations + if let TyData::Union(lhs_types) = store.get_data(lhs) { + let mut results = Vec::new(); + for lt in lhs_types { + match binary_op_result_ty(op, lt, rhs, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + if let TyData::Union(rhs_types) = store.get_data(rhs) { + let mut results = Vec::new(); + for rt in rhs_types { + match binary_op_result_ty(op, lhs, rt, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + + // Handle Sum (intersection) types - all variants must support the operation + if let TyData::Sum(lhs_types) = store.get_data(lhs) { + let mut results = Vec::new(); + for lt in lhs_types { + match binary_op_result_ty(op, lt, rhs, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + if let TyData::Sum(rhs_types) = store.get_data(rhs) { + let mut results = Vec::new(); + for rt in rhs_types { + match binary_op_result_ty(op, lhs, rt, store) { + Ok(t) => results.push(t), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + + // Get type data for matching + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + match op { + // Arithmetic: (Number, Number) -> Number + BinaryOperatorKind::Minus + | BinaryOperatorKind::Mul + | BinaryOperatorKind::Div + | BinaryOperatorKind::Modulo => { + if is_number_ty(&lhs_data) && is_number_ty(&rhs_data) { + Ok(Ty::NUMBER) + } else { + Err(format!( + "operator requires (number, number), got ({}, {})", + store.display(lhs), + store.display(rhs) + )) + } + } + + // Plus: overloaded for number, string, char, array, tuple, object + BinaryOperatorKind::Plus => match (&lhs_data, &rhs_data) { + (d1, d2) if is_number_ty(d1) && is_number_ty(d2) => Ok(Ty::NUMBER), + (TyData::String | TyData::Char | TyData::LiteralString(_), +TyData::String | TyData::Char | TyData::LiteralString(_)) => Ok(Ty::STRING), + (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { + let elem = store.union(vec![*l, *r]); + Ok(store.array(elem)) + } + (TyData::Tuple { elems: l }, TyData::Tuple { elems: r }) => { + // Concatenate tuple element types + let mut elements = l.clone(); + elements.extend(r.iter().copied()); + Ok(store.tuple(elements)) + } + (TyData::Array { elem: a, .. }, TyData::Tuple { elems: t }) + | (TyData::Tuple { elems: t }, TyData::Array { elem: a, .. }) => { + // Mixed array/tuple concatenation - result is array + let mut types = t.clone(); + types.push(*a); + let elem = store.union(types); + Ok(store.array(elem)) + } + (TyData::Object(l), TyData::Object(r)) => Ok(store.object(ObjectData::merge(l, r))), + (TyData::AttrsOf { value: l }, TyData::AttrsOf { value: r }) => { + let value = store.union(vec![*l, *r]); + Ok(store.attrs_of(value)) + } + (TyData::Object(obj), TyData::AttrsOf { value }) + | (TyData::AttrsOf { value }, TyData::Object(obj)) => { + // Merge object with attrs-of: result is object with fields widened + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let widened_ty = store.union(vec![field.ty, *value]); + ( + name.clone(), + FieldDefInterned { + ty: widened_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + Ok(store.object(ObjectData { + fields, + has_unknown: true, // AttrsOf adds unknown fields + })) + } + _ => Err(format!( + "operator `+` requires matching types (number+number, string+string, array+array, or object+object), got ({}, {})", + store.display(lhs), store.display(rhs) + )), + }, + + // Logical: (Bool, Bool) -> Bool (though Jsonnet actually allows any types) + BinaryOperatorKind::And | BinaryOperatorKind::Or => { + // In Jsonnet, && and || work on any types (short-circuit) + // But we can warn if operands aren't boolean + if is_bool_ty(&lhs_data) && is_bool_ty(&rhs_data) { + Ok(Ty::BOOL) + } else { + // Jsonnet allows this but returns one of the operands + Ok(store.union(vec![lhs, rhs])) + } + } + + // Bitwise: (Number, Number) -> Number + BinaryOperatorKind::BitAnd + | BinaryOperatorKind::BitOr + | BinaryOperatorKind::BitXor + | BinaryOperatorKind::Lhs + | BinaryOperatorKind::Rhs => { + if is_number_ty(&lhs_data) && is_number_ty(&rhs_data) { + Ok(Ty::NUMBER) + } else { + Err(format!( + "bitwise operator requires (number, number), got ({}, {})", + store.display(lhs), + store.display(rhs) + )) + } + } + + // Comparison: any types are valid, returns Bool + BinaryOperatorKind::Eq + | BinaryOperatorKind::Ne + | BinaryOperatorKind::Lt + | BinaryOperatorKind::Gt + | BinaryOperatorKind::Le + | BinaryOperatorKind::Ge => Ok(Ty::BOOL), + + // In: (String, Object) -> Bool + BinaryOperatorKind::InKw => { + let lhs_is_string = + matches!(lhs_data, TyData::String | TyData::Char | TyData::LiteralString(_)); + let rhs_is_object = matches!(rhs_data, TyData::Object(_) | TyData::AttrsOf { .. }); + if lhs_is_string && rhs_is_object { + Ok(Ty::BOOL) + } else { + Err(format!( + "operator `in` requires (string, object), got ({}, {})", + store.display(lhs), + store.display(rhs) + )) + } + } + + // Null coalesce: any types, returns union + BinaryOperatorKind::NullCoaelse => Ok(store.union(vec![lhs, rhs])), + + // Internal/error operators - treat as Any + BinaryOperatorKind::MetaObjectApply | BinaryOperatorKind::ErrorNoOperator => Ok(Ty::ANY), + } +} + +/// Check if a unary operation is valid and return the result type. +/// +/// Returns `Ok(result_ty)` if the operation is valid for the given operand type, +/// or `Err(error_message)` if the operation is invalid. +/// +/// # Errors +/// Returns `Err` when the operand type does not support the requested operator. +pub fn unary_op_result_ty( + op: UnaryOperatorKind, + operand: Ty, + store: &mut S, +) -> Result { + // Any, Never short-circuit + if operand.is_any() { + return Ok(Ty::ANY); + } + if operand.is_never() { + return Ok(Ty::NEVER); + } + + // Check for TypeVar + if let TyData::TypeVar { .. } = store.get_data(operand) { + return Ok(Ty::ANY); + } + + // Handle union types + if let TyData::Union(types) = store.get_data(operand) { + let mut results = Vec::new(); + for t in types { + match unary_op_result_ty(op, t, store) { + Ok(r) => results.push(r), + Err(e) => return Err(e), + } + } + return Ok(store.union(results)); + } + + let operand_data = store.get_data(operand); + + match op { + UnaryOperatorKind::Not => { + if is_bool_ty(&operand_data) { + Ok(Ty::BOOL) + } else { + Err(format!( + "operator `!` requires boolean, got {}", + store.display(operand) + )) + } + } + UnaryOperatorKind::Minus => { + if is_number_ty(&operand_data) { + Ok(Ty::NUMBER) + } else { + Err(format!( + "operator `-` requires number, got {}", + store.display(operand) + )) + } + } + UnaryOperatorKind::BitNot => { + if is_number_ty(&operand_data) { + Ok(Ty::NUMBER) + } else { + Err(format!( + "operator `~` requires number, got {}", + store.display(operand) + )) + } + } + } +} + +/// Helper to check if `TyData` represents a number type. +fn is_number_ty(data: &TyData) -> bool { + matches!(data, TyData::Number | TyData::BoundedNumber(_)) +} + +/// Helper to check if `TyData` represents a boolean type. +fn is_bool_ty(data: &TyData) -> bool { + matches!(data, TyData::Bool | TyData::True | TyData::False) +} + +/// Concatenate two arrays or tuples. +/// +/// Returns the type of the concatenated result. +pub fn array_concat_ty(left: Ty, right: Ty, store: &mut S) -> Ty { + let left_data = store.get_data(left); + let right_data = store.get_data(right); + + match (left_data, right_data) { + (TyData::Tuple { elems: a }, TyData::Tuple { elems: b }) => { + let mut elements = a; + elements.extend(b); + store.tuple(elements) + } + (TyData::Array { elem: a, .. }, TyData::Array { elem: b, .. }) => { + let elem = store.union(vec![a, b]); + store.array(elem) + } + (TyData::Tuple { elems: t }, TyData::Array { elem: a, .. }) + | (TyData::Array { elem: a, .. }, TyData::Tuple { elems: t }) => { + let mut types = t; + types.push(a); + let elem = store.union(types); + store.array(elem) + } + _ => store.array(Ty::ANY), + } +} + +// ============================================================================= + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::TyStore; + + #[test] + fn test_binary_op_valid_number_plus_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::NUMBER, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_binary_op_valid_string_plus_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::STRING, Ty::STRING, &mut store); + assert_eq!(result, Ok(Ty::STRING)); + } + + #[test] + fn test_binary_op_invalid_string_plus_number_ty() { + let mut store = TyStore::new(); + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::STRING, Ty::NUMBER, &mut store) + .expect_err("String + Number is invalid"); + } + + #[test] + fn test_binary_op_comparison_returns_bool_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Lt, Ty::NUMBER, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_binary_op_with_any_ty() { + let mut store = TyStore::new(); + let result = binary_op_result_ty(BinaryOperatorKind::Plus, Ty::ANY, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::ANY)); + } + + #[test] + fn test_binary_op_with_never_ty() { + let mut store = TyStore::new(); + let result = + binary_op_result_ty(BinaryOperatorKind::Plus, Ty::NEVER, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NEVER)); + } + + #[test] + fn test_unary_op_valid_not_bool_ty() { + let mut store = TyStore::new(); + let result = unary_op_result_ty(UnaryOperatorKind::Not, Ty::BOOL, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_unary_op_invalid_not_number_ty() { + let mut store = TyStore::new(); + unary_op_result_ty(UnaryOperatorKind::Not, Ty::NUMBER, &mut store) + .expect_err("!Number is invalid"); + } + + #[test] + fn test_unary_op_minus_number_ty() { + let mut store = TyStore::new(); + let result = unary_op_result_ty(UnaryOperatorKind::Minus, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_unary_op_bitnot_number_ty() { + let mut store = TyStore::new(); + let result = unary_op_result_ty(UnaryOperatorKind::BitNot, Ty::NUMBER, &mut store); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_array_concat_tuples_ty() { + let mut store = TyStore::new(); + let left = store.tuple(vec![Ty::NUMBER]); + let right = store.tuple(vec![Ty::STRING]); + let result = array_concat_ty(left, right, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::STRING]); + }); + } + + #[test] + fn test_array_concat_arrays_ty() { + let mut store = TyStore::new(); + let left = store.array(Ty::NUMBER); + let right = store.array(Ty::STRING); + let result = array_concat_ty(left, right, &mut store); + assert_matches!(store.get_data(result), TyData::Array { .. }); + } + + #[test] + fn test_binary_op_union_lhs_ty() { + let mut store = TyStore::new(); + // (Number | String) + Number should fail (String + Number invalid) + let union_ty = store.union(vec![Ty::NUMBER, Ty::STRING]); + binary_op_result_ty(BinaryOperatorKind::Plus, union_ty, Ty::NUMBER, &mut store) + .expect_err("(Number|String) + Number is invalid because String+Number fails"); + } + + #[test] + fn test_binary_op_union_valid_ty() { + let mut store = TyStore::new(); + // (Number | Number) + Number should succeed + let union_ty = store.union(vec![Ty::NUMBER, Ty::NUMBER]); + binary_op_result_ty(BinaryOperatorKind::Plus, union_ty, Ty::NUMBER, &mut store) + .expect("(Number|Number) + Number should succeed"); + } + + #[test] + fn test_binary_op_in_ty() { + let mut store = TyStore::new(); + let obj_ty = store.object(ObjectData { + fields: vec![], + has_unknown: true, + }); + let result = binary_op_result_ty(BinaryOperatorKind::InKw, Ty::STRING, obj_ty, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_binary_op_bitwise_ty() { + let mut store = TyStore::new(); + let result = binary_op_result_ty( + BinaryOperatorKind::BitAnd, + Ty::NUMBER, + Ty::NUMBER, + &mut store, + ); + assert_eq!(result, Ok(Ty::NUMBER)); + } + + #[test] + fn test_binary_op_logical_bool_ty() { + let mut store = TyStore::new(); + let result = binary_op_result_ty(BinaryOperatorKind::And, Ty::BOOL, Ty::BOOL, &mut store); + assert_eq!(result, Ok(Ty::BOOL)); + } + + #[test] + fn test_binary_op_null_coalesce_ty() { + let mut store = TyStore::new(); + let result_ty = binary_op_result_ty( + BinaryOperatorKind::NullCoaelse, + Ty::NUMBER, + Ty::STRING, + &mut store, + ) + .expect("null coalesce should succeed"); + // Result should be union of both types + assert_matches!(store.get_data(result_ty), TyData::Union(_)); + } +} From 03a9d307a747b8534660b742e054827978ab054f Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:13:29 +0000 Subject: [PATCH 111/210] refactor(lsp-types): split store methods by feature Split the large TyStore implementation into focused submodules. Move core interning/constructor methods into store/impl_core.rs. Move narrowing/display/substitution logic and inline tests into store/impl_analysis.rs. Move cross-store import logic into store/impl_transform.rs. Move TypeStoreOps/Default impls into store/ops_impl.rs and thread- local helpers into store/thread_local_store.rs, re-exported from store.rs to preserve the public API. No behavior changes intended; verified with: cargo test -p jrsonnet-lsp-types --lib store --- crates/jrsonnet-lsp-types/src/store.rs | 1384 +---------------- .../src/store/impl_analysis.rs | 949 +++++++++++ .../jrsonnet-lsp-types/src/store/impl_core.rs | 255 +++ .../src/store/impl_transform.rs | 107 ++ .../jrsonnet-lsp-types/src/store/ops_impl.rs | 57 + .../src/store/thread_local_store.rs | 18 + 6 files changed, 1393 insertions(+), 1377 deletions(-) create mode 100644 crates/jrsonnet-lsp-types/src/store/impl_analysis.rs create mode 100644 crates/jrsonnet-lsp-types/src/store/impl_core.rs create mode 100644 crates/jrsonnet-lsp-types/src/store/impl_transform.rs create mode 100644 crates/jrsonnet-lsp-types/src/store/ops_impl.rs create mode 100644 crates/jrsonnet-lsp-types/src/store/thread_local_store.rs diff --git a/crates/jrsonnet-lsp-types/src/store.rs b/crates/jrsonnet-lsp-types/src/store.rs index 7eac2e65..18581399 100644 --- a/crates/jrsonnet-lsp-types/src/store.rs +++ b/crates/jrsonnet-lsp-types/src/store.rs @@ -34,10 +34,7 @@ //! assert!(matches!(data, TyData::Array { .. })); //! ``` -use std::{ - cell::RefCell, - sync::atomic::{AtomicU32, Ordering}, -}; +use std::sync::atomic::{AtomicU32, Ordering}; use rustc_hash::FxHashMap; @@ -874,1377 +871,10 @@ pub struct TyStore { dedup: FxHashMap, } -impl TyStore { - /// Create a new type store with well-known types pre-populated. - #[must_use] - pub fn new() -> Self { - let mut store = Self { - data: Vec::with_capacity(64), - dedup: FxHashMap::default(), - }; - store.init_builtins(); - store - } - - /// Initialize built-in well-known types. - fn init_builtins(&mut self) { - // Must match the order of Ty constants! - let builtins = [ - TyData::Any, // 0 = ANY - TyData::Never, // 1 = NEVER - TyData::Null, // 2 = NULL - TyData::Bool, // 3 = BOOL - TyData::True, // 4 = TRUE - TyData::False, // 5 = FALSE - TyData::Number, // 6 = NUMBER - TyData::String, // 7 = STRING - TyData::Char, // 8 = CHAR - // Padding to RESERVED_COUNT - TyData::Any, // 9 - reserved - TyData::Any, // 10 - reserved - TyData::Any, // 11 - reserved - TyData::Any, // 12 - reserved - TyData::Any, // 13 - reserved - TyData::Any, // 14 - reserved - TyData::Any, // 15 - reserved - ]; - - for (i, data) in builtins.into_iter().enumerate() { - let Some(raw_id) = to_u32(i) else { - return; - }; - let ty = Ty::from_raw(raw_id); - self.data.push(data.clone()); - // Only dedup the non-padding entries - if i < 9 { - self.dedup.insert(data, ty); - } - } - - debug_assert_eq!(self.data.len(), Ty::RESERVED_COUNT as usize); - } - - /// Intern a type, returning existing ID if already present. - /// - pub fn intern(&mut self, data: TyData) -> Ty { - // Fast path for well-known types - if let Some(ty) = Ty::well_known_for_data(&data) { - return ty; - } - - // Check if already interned - if let Some(&existing) = self.dedup.get(&data) { - return existing; - } - - // Intern new type - let Some(raw_id) = to_u32(self.data.len()) else { - return Ty::ANY; - }; - let id = Ty::from_raw(raw_id); - self.data.push(data.clone()); - self.dedup.insert(data, id); - id - } - - /// Get a reference to type data with display capability. - /// - /// Returns a `TyRef` that derefs to `&TyData` and implements `Display`. - /// Use `*store.get(ty)` to pattern match on the underlying `TyData`. - #[inline] - #[must_use] - pub fn get(&self, ty: Ty) -> TyRef<'_> { - TyRef { store: self, ty } - } - - /// Get the number of interned types. - #[must_use] - pub fn len(&self) -> usize { - self.data.len() - } - - /// Check if empty (never true after init). - #[must_use] - pub fn is_empty(&self) -> bool { - self.data.is_empty() - } - - /// Create an array type. - pub fn array(&mut self, elem: Ty) -> Ty { - self.intern(TyData::Array { - elem, - is_set: false, - }) - } - - /// Create a set type (array with sorted, unique elements). - pub fn array_set(&mut self, elem: Ty) -> Ty { - self.intern(TyData::Array { elem, is_set: true }) - } - - /// Create a tuple type. - pub fn tuple(&mut self, elems: Vec) -> Ty { - if elems.is_empty() { - // Empty tuple is a closed empty array - return self.intern(TyData::Tuple { elems: vec![] }); - } - self.intern(TyData::Tuple { elems }) - } - - /// Create an object type. - pub fn object(&mut self, data: ObjectData) -> Ty { - self.intern(TyData::Object(data)) - } - - /// Create an open object (unknown fields). - pub fn object_any(&mut self) -> Ty { - self.object(ObjectData::open()) - } - - /// Create a generic function type (accepts any args, returns any). - pub fn function_any(&mut self) -> Ty { - self.function(FunctionData { - params: vec![], - return_spec: ReturnSpec::Fixed(Ty::ANY), - variadic: true, - }) - } - - /// Create an `AttrsOf` type (object with uniform value type). - pub fn attrs_of(&mut self, value: Ty) -> Ty { - self.intern(TyData::AttrsOf { value }) - } - - /// Create a function type. - pub fn function(&mut self, data: FunctionData) -> Ty { - self.intern(TyData::Function(data)) - } - - /// Create a function with simple params and fixed return. - pub fn function_simple(&mut self, param_names: Vec<&str>, return_ty: Ty) -> Ty { - let params = param_names - .into_iter() - .map(|name| ParamInterned { - name: name.to_string(), - ty: Ty::ANY, - has_default: false, - }) - .collect(); - self.function(FunctionData { - params, - return_spec: ReturnSpec::Fixed(return_ty), - variadic: false, - }) - } - - /// Create a union type. - pub fn union(&mut self, mut types: Vec) -> Ty { - // Simplification rules - match types.as_slice() { - [] => return Ty::NEVER, - [only] => return *only, - _ => {} - } - - // Flatten nested unions and remove duplicates - let mut flattened = Vec::with_capacity(types.len()); - for ty in types.drain(..) { - if ty == Ty::ANY { - return Ty::ANY; // Any absorbs everything - } - if ty == Ty::NEVER { - continue; // Never is identity for union - } - if let TyData::Union(ref inner) = *self.get(ty) { - flattened.extend(inner.iter().copied()); - } else if !flattened.contains(&ty) { - flattened.push(ty); - } - } - - // Sort for canonical form - flattened.sort_by_key(|t| t.0); - flattened.dedup(); - - match flattened.as_slice() { - [] => Ty::NEVER, - [only] => *only, - _ => self.intern(TyData::Union(flattened)), - } - } - - /// Create a sum (intersection) type. - pub fn sum(&mut self, mut types: Vec) -> Ty { - match types.as_slice() { - [] => return Ty::ANY, - [only] => return *only, - _ => {} - } - - // Flatten and simplify - let mut flattened = Vec::with_capacity(types.len()); - for ty in types.drain(..) { - if ty == Ty::NEVER { - return Ty::NEVER; // Never absorbs everything in intersection - } - if ty == Ty::ANY { - continue; // Any is identity for intersection - } - if let TyData::Sum(ref inner) = *self.get(ty) { - flattened.extend(inner.iter().copied()); - } else if !flattened.contains(&ty) { - flattened.push(ty); - } - } - - flattened.sort_by_key(|t| t.0); - flattened.dedup(); - - match flattened.as_slice() { - [] => Ty::ANY, - [only] => *only, - _ => self.intern(TyData::Sum(flattened)), - } - } - - /// Create a bounded number type. - pub fn bounded_number(&mut self, bounds: NumBounds) -> Ty { - self.intern(TyData::BoundedNumber(bounds)) - } - - /// Create a literal string type. - pub fn literal_string(&mut self, s: String) -> Ty { - self.intern(TyData::LiteralString(s)) - } +mod impl_analysis; +mod impl_core; +mod impl_transform; +mod ops_impl; +mod thread_local_store; - /// Create a type variable. - pub fn type_var(&mut self, id: TyVarId, constraints: TyConstraints) -> Ty { - self.intern(TyData::TypeVar { id, constraints }) - } - - /// Create a fresh type variable with no constraints. - pub fn fresh_var(&mut self) -> Ty { - self.type_var(TyVarId::fresh(), TyConstraints::none()) - } - - /// Narrow a type by intersecting with a constraint. - /// - /// Returns the most specific type that satisfies both. - /// For example, `narrow(Any, Number)` returns `Number`. - pub fn narrow(&mut self, ty: Ty, constraint: Ty) -> Ty { - crate::operations::ty_and(ty, constraint, self) - } - - /// Widen a type by removing a constraint. - /// - /// Returns the type with the constraint removed. - /// For example, `widen(Union(Number, String), Number)` returns `String`. - pub fn widen(&mut self, base: Ty, remove: Ty) -> Ty { - crate::operations::ty_minus(base, remove, self) - } - - /// Narrow a type to one with a specific length. - /// - /// - Arrays become tuples with n elements - /// - Strings with length 1 become Char - /// - Tuples must have matching length - pub fn with_len(&mut self, ty: Ty, len: usize) -> Ty { - crate::operations::ty_with_len(ty, len, self) - } - - /// Narrow a type to one with at least a minimum length. - pub fn with_min_len(&mut self, ty: Ty, min: usize) -> Ty { - crate::operations::ty_with_min_len(ty, min, self) - } - - /// Check if type is indexable. - #[must_use] - pub fn is_indexable(&self, ty: Ty) -> bool { - match *self.get(ty) { - TyData::Any - | TyData::String - | TyData::Char - | TyData::Array { .. } - | TyData::Tuple { .. } - | TyData::Object(_) - | TyData::AttrsOf { .. } => true, - TyData::Union(ref types) | TyData::Sum(ref types) => { - types.iter().all(|&t| self.is_indexable(t)) - } - TyData::TypeVar { - ref constraints, .. - } => constraints.must_be_indexable, - _ => false, - } - } - - /// Check if type supports field access. - #[must_use] - pub fn supports_field_access(&self, ty: Ty) -> bool { - match *self.get(ty) { - TyData::Any | TyData::Object(_) | TyData::AttrsOf { .. } => true, - TyData::Union(ref types) | TyData::Sum(ref types) => { - types.iter().all(|&t| self.supports_field_access(t)) - } - TyData::TypeVar { - ref constraints, .. - } => constraints.must_support_fields, - _ => false, - } - } - - /// Check if type is callable. - #[must_use] - pub fn is_callable(&self, ty: Ty) -> bool { - match *self.get(ty) { - TyData::Any | TyData::Function(_) => true, - TyData::Union(ref types) | TyData::Sum(ref types) => { - types.iter().all(|&t| self.is_callable(t)) - } - TyData::TypeVar { - ref constraints, .. - } => constraints.must_be_callable, - _ => false, - } - } - - /// Simplified subtype check for constraint satisfaction. - /// - /// Checks if `subtype` is a subtype of `supertype`. This is a simplified - /// version that handles the most common cases; for full subtype checking - /// use the unification module. - #[must_use] - pub fn is_subtype_of(&self, subtype: Ty, supertype: Ty) -> bool { - // Fast paths - if subtype == supertype { - return true; - } - if subtype == Ty::NEVER { - return true; // Never is subtype of everything - } - if supertype == Ty::ANY { - return true; // Everything is subtype of Any - } - if subtype == Ty::ANY { - return false; // Any is only subtype of Any (already checked) - } - - match (&*self.get(subtype), &*self.get(supertype)) { - // Char <: String - // LiteralString <: Char (if single char) - (TyData::LiteralString(s), TyData::Char) => s.chars().count() == 1, - // Char <: String - // LiteralString <: String - // True <: Bool, False <: Bool - // BoundedNumber <: Number - (TyData::Char | TyData::LiteralString(_), TyData::String) - | (TyData::True | TyData::False, TyData::Bool) - | (TyData::BoundedNumber(_), TyData::Number) => true, - // Array covariance: Array <: Array if A <: B - ( - TyData::Array { elem: sub_elem, .. }, - TyData::Array { - elem: super_elem, .. - }, - ) => self.is_subtype_of(*sub_elem, *super_elem), - // Tuple <: Array if all elements are subtypes - (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) => { - elems.iter().all(|&e| self.is_subtype_of(e, *arr_elem)) - } - // Union subtyping: all variants must be subtypes - (TyData::Union(variants), _) => { - let variants = variants.clone(); - variants.iter().all(|&v| self.is_subtype_of(v, supertype)) - } - // Subtype of union: must be subtype of some variant - (_, TyData::Union(variants)) => { - let variants = variants.clone(); - variants.iter().any(|&v| self.is_subtype_of(subtype, v)) - } - // Default: not a subtype - _ => false, - } - } - - /// Apply a substitution to a type, replacing all type variables. - pub fn apply_substitution(&mut self, ty: Ty, sub: &TySubstitution) -> Ty { - // Clone the data to avoid borrow issues - let data = self.get(ty).clone(); - match data { - TyData::TypeVar { id, .. } => sub.get(id).unwrap_or(ty), - TyData::Array { elem, is_set } => { - let new_elem = self.apply_substitution(elem, sub); - if is_set { - self.array_set(new_elem) - } else { - self.array(new_elem) - } - } - TyData::Tuple { elems } => { - // Collect elements first to avoid closure borrowing issues - let elems_vec: Vec = elems; - let mut new_elems = Vec::with_capacity(elems_vec.len()); - for e in elems_vec { - new_elems.push(self.apply_substitution(e, sub)); - } - self.tuple(new_elems) - } - TyData::Union(variants) => { - let variants_vec: Vec = variants; - let mut new_variants = Vec::with_capacity(variants_vec.len()); - for v in variants_vec { - new_variants.push(self.apply_substitution(v, sub)); - } - self.union(new_variants) - } - TyData::Sum(variants) => { - let variants_vec: Vec = variants; - let mut new_variants = Vec::with_capacity(variants_vec.len()); - for v in variants_vec { - new_variants.push(self.apply_substitution(v, sub)); - } - self.sum(new_variants) - } - TyData::Object(obj) => { - // Extract field info first - let field_info: Vec<_> = obj - .fields - .iter() - .map(|(name, fd)| (name.clone(), fd.ty, fd.required, fd.visibility)) - .collect(); - let has_unknown = obj.has_unknown; - // Now apply substitutions - let mut new_fields = Vec::with_capacity(field_info.len()); - for (name, ty, required, visibility) in field_info { - new_fields.push(( - name, - FieldDefInterned { - ty: self.apply_substitution(ty, sub), - required, - visibility, - }, - )); - } - self.object(ObjectData { - fields: new_fields, - has_unknown, - }) - } - TyData::AttrsOf { value } => { - let new_value = self.apply_substitution(value, sub); - self.attrs_of(new_value) - } - TyData::Function(func) => { - // Extract param info first - let param_info: Vec<_> = func - .params - .iter() - .map(|p| (p.name.clone(), p.ty, p.has_default)) - .collect(); - let (old_return_spec, variadic) = (func.return_spec.clone(), func.variadic); - // Now apply substitutions - let mut new_params = Vec::with_capacity(param_info.len()); - for (name, ty, has_default) in param_info { - new_params.push(ParamInterned { - name, - ty: self.apply_substitution(ty, sub), - has_default, - }); - } - let new_return_spec = match old_return_spec { - ReturnSpec::Fixed(ret) => ReturnSpec::Fixed(self.apply_substitution(ret, sub)), - other => other, - }; - self.intern(TyData::Function(FunctionData { - params: new_params, - return_spec: new_return_spec, - variadic, - })) - } - // Primitives and other types don't contain type variables - _ => ty, - } - } - - /// Check if a type has any type variables. - #[must_use] - pub fn has_type_vars(&self, ty: Ty) -> bool { - match *self.get(ty) { - TyData::TypeVar { .. } => true, - TyData::Array { elem, .. } => self.has_type_vars(elem), - TyData::Tuple { ref elems } => elems.iter().any(|&e| self.has_type_vars(e)), - TyData::Union(ref variants) | TyData::Sum(ref variants) => { - variants.iter().any(|&v| self.has_type_vars(v)) - } - TyData::Object(ref obj) => obj.fields.iter().any(|(_, fd)| self.has_type_vars(fd.ty)), - TyData::AttrsOf { value } => self.has_type_vars(value), - TyData::Function(ref func) => { - func.params.iter().any(|p| self.has_type_vars(p.ty)) - || matches!(&func.return_spec, ReturnSpec::Fixed(ret) if self.has_type_vars(*ret)) - } - _ => false, - } - } - - /// Format a type for display. - #[must_use] - pub fn display(&self, ty: Ty) -> String { - match *self.get(ty) { - TyData::Any => "any".to_string(), - TyData::Never => "never".to_string(), - TyData::Null => "null".to_string(), - TyData::Bool => "boolean".to_string(), - TyData::True => "true".to_string(), - TyData::False => "false".to_string(), - TyData::Number => "number".to_string(), - TyData::BoundedNumber(bounds) => match (bounds.min_f64(), bounds.max_f64()) { - (None, None) => "number".to_string(), - (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { - if lo.fract() == 0.0 { - format!("{lo:.0}") - } else { - format!("{lo}") - } - } - (Some(lo), Some(hi)) => format!("number[{lo}..{hi}]"), - (Some(lo), None) => format!("number[{lo}..]"), - (None, Some(hi)) => format!("number[..{hi}]"), - }, - TyData::String => "string".to_string(), - TyData::Char => "char".to_string(), - TyData::LiteralString(ref s) => format!("\"{s}\""), - TyData::Array { elem, is_set } => { - if is_set { - format!("set<{}>", self.display(elem)) - } else { - format!("array<{}>", self.display(elem)) - } - } - TyData::Tuple { ref elems } => { - let types: Vec<_> = elems.iter().map(|&t| self.display(t)).collect(); - format!("[{}]", types.join(", ")) - } - TyData::Object(ref obj) => { - if obj.fields.is_empty() && !obj.has_unknown { - "{}".to_string() - } else if obj.has_unknown { - "object".to_string() - } else { - let mut fields: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); - fields.sort_unstable(); - format!("{{ {} }}", fields.join(", ")) - } - } - TyData::AttrsOf { value } => format!("object<{}>", self.display(value)), - TyData::Function(ref func) => { - let params: Vec<_> = func.params.iter().map(|p| p.name.as_str()).collect(); - format!("function({})", params.join(", ")) - } - TyData::Union(ref types) => { - let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); - parts.join(" | ") - } - TyData::Sum(ref types) => { - let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); - parts.join(" & ") - } - TyData::TypeVar { - id, - ref constraints, - } => { - let mut s = id.to_string(); - if !constraints.is_empty() { - let mut parts: Vec = Vec::new(); - if constraints.must_be_indexable { - parts.push("indexable".to_string()); - } - if constraints.must_support_fields { - parts.push("object-like".to_string()); - } - if constraints.must_be_callable { - parts.push("callable".to_string()); - } - if let Some(bound) = constraints.upper_bound { - parts.push(format!("<: {}", self.display(bound))); - } - if !parts.is_empty() { - s.push_str(" where "); - s.push_str(&parts.join(", ")); - } - } - s - } - } - } - - /// Import a type from another store into this store. - /// - /// Well-known constants (ANY, NEVER, NULL, BOOL, TRUE, FALSE, NUMBER, STRING, CHAR) - /// are returned as-is since they have the same value across all stores. - /// Complex types are recursively imported and re-interned. - pub fn import_from(&mut self, ty: Ty, source: &S) -> Ty { - // Well-known constants are the same in all stores - if ty.is_well_known() { - return ty; - } - - // Complex types need re-interning - match source.get_data(ty) { - TyData::Array { elem, is_set } => { - let imported_elem = self.import_from(elem, source); - if is_set { - self.array_set(imported_elem) - } else { - self.array(imported_elem) - } - } - TyData::Tuple { elems } => { - let imported_elems: Vec<_> = - elems.iter().map(|&e| self.import_from(e, source)).collect(); - self.tuple(imported_elems) - } - TyData::Union(variants) => { - let imported_variants: Vec<_> = variants - .iter() - .map(|&v| self.import_from(v, source)) - .collect(); - self.union(imported_variants) - } - TyData::Object(obj) => { - let imported_fields: Vec<_> = obj - .fields - .iter() - .map(|(name, def)| { - ( - name.clone(), - FieldDefInterned { - ty: self.import_from(def.ty, source), - required: def.required, - visibility: def.visibility, - }, - ) - }) - .collect(); - self.object(ObjectData { - fields: imported_fields, - has_unknown: obj.has_unknown, - }) - } - TyData::Function(func) => { - let imported_params: Vec<_> = func - .params - .iter() - .map(|p| ParamInterned { - name: p.name.clone(), - ty: self.import_from(p.ty, source), - has_default: p.has_default, - }) - .collect(); - let imported_return_spec = match &func.return_spec { - ReturnSpec::Fixed(ret_ty) => { - ReturnSpec::Fixed(self.import_from(*ret_ty, source)) - } - other => other.clone(), - }; - self.intern(TyData::Function(FunctionData { - params: imported_params, - return_spec: imported_return_spec, - variadic: func.variadic, - })) - } - TyData::AttrsOf { value } => { - let imported_value = self.import_from(value, source); - self.intern(TyData::AttrsOf { - value: imported_value, - }) - } - TyData::BoundedNumber(bounds) => self.intern(TyData::BoundedNumber(bounds)), - TyData::LiteralString(s) => self.intern(TyData::LiteralString(s)), - TyData::TypeVar { id, constraints } => self.intern(TyData::TypeVar { id, constraints }), - TyData::Sum(variants) => { - let imported_variants: Vec<_> = variants - .iter() - .map(|&v| self.import_from(v, source)) - .collect(); - self.intern(TyData::Sum(imported_variants)) - } - // Primitives should have been caught by is_well_known(), but handle anyway - TyData::Any => Ty::ANY, - TyData::Never => Ty::NEVER, - TyData::Null => Ty::NULL, - TyData::Bool => Ty::BOOL, - TyData::True => Ty::TRUE, - TyData::False => Ty::FALSE, - TyData::Number => Ty::NUMBER, - TyData::String => Ty::STRING, - TyData::Char => Ty::CHAR, - } - } -} - -impl TypeStoreOps for TyStore { - fn get_data(&self, ty: Ty) -> TyData { - self.get(ty).clone() - } - - fn display(&self, ty: Ty) -> String { - TyStore::display(self, ty) - } - - fn array(&mut self, elem: Ty) -> Ty { - TyStore::array(self, elem) - } - - fn array_set(&mut self, elem: Ty) -> Ty { - TyStore::array_set(self, elem) - } - - fn tuple(&mut self, elems: Vec) -> Ty { - TyStore::tuple(self, elems) - } - - fn object(&mut self, data: ObjectData) -> Ty { - TyStore::object(self, data) - } - - fn attrs_of(&mut self, value: Ty) -> Ty { - TyStore::attrs_of(self, value) - } - - fn function(&mut self, data: FunctionData) -> Ty { - TyStore::function(self, data) - } - - fn union(&mut self, types: Vec) -> Ty { - TyStore::union(self, types) - } - - fn sum(&mut self, types: Vec) -> Ty { - TyStore::sum(self, types) - } - - fn bounded_number(&mut self, bounds: NumBounds) -> Ty { - TyStore::bounded_number(self, bounds) - } - - fn literal_string(&mut self, s: String) -> Ty { - TyStore::literal_string(self, s) - } -} - -impl Default for TyStore { - fn default() -> Self { - Self::new() - } -} - -// Thread-local store for convenient access during analysis -thread_local! { - static STORE: RefCell = RefCell::new(TyStore::new()); -} - -/// Execute a function with access to the thread-local type store. -pub fn with_store(f: impl FnOnce(&mut TyStore) -> R) -> R { - STORE.with(|s| f(&mut s.borrow_mut())) -} - -/// Reset the thread-local store (useful for tests). -pub fn reset_store() { - STORE.with(|s| *s.borrow_mut() = TyStore::new()); -} - -#[cfg(test)] -mod tests { - use assert_matches::assert_matches; - - use super::*; - - #[test] - fn test_well_known_types() { - let store = TyStore::new(); - - // Check that well-known types have correct data - assert!(matches!(*store.get(Ty::ANY), TyData::Any)); - assert!(matches!(*store.get(Ty::NEVER), TyData::Never)); - assert!(matches!(*store.get(Ty::NULL), TyData::Null)); - assert!(matches!(*store.get(Ty::BOOL), TyData::Bool)); - assert!(matches!(*store.get(Ty::TRUE), TyData::True)); - assert!(matches!(*store.get(Ty::FALSE), TyData::False)); - assert!(matches!(*store.get(Ty::NUMBER), TyData::Number)); - assert!(matches!(*store.get(Ty::STRING), TyData::String)); - assert!(matches!(*store.get(Ty::CHAR), TyData::Char)); - } - - #[test] - fn test_global_ty_accepts_global() { - let global = GlobalTy::new(Ty::NUMBER); - assert_eq!(global, Some(GlobalTy::NUMBER)); - } - - #[test] - fn test_global_ty_rejects_local() { - let local = Ty::from_raw_local(123); - assert!(GlobalTy::new(local).is_none()); - assert_eq!(GlobalTy::try_from(local), Err(NotGlobalTy(local))); - } - - #[test] - fn test_intern_deduplication() { - let mut store = TyStore::new(); - - // Same type data should return same ID - let arr1 = store.array(Ty::NUMBER); - let arr2 = store.array(Ty::NUMBER); - assert_eq!(arr1, arr2); - - // Different element type should be different - let arr3 = store.array(Ty::STRING); - assert_ne!(arr1, arr3); - } - - #[test] - fn test_array_type() { - let mut store = TyStore::new(); - - let arr = store.array(Ty::NUMBER); - assert!( - matches!(*store.get(arr), TyData::Array { elem, is_set: false } if elem == Ty::NUMBER) - ); - } - - #[test] - fn test_array_set_type() { - let mut store = TyStore::new(); - - let arr_set = store.array_set(Ty::NUMBER); - assert!( - matches!(*store.get(arr_set), TyData::Array { elem, is_set: true } if elem == Ty::NUMBER) - ); - - // Sets and arrays are distinct types - let arr = store.array(Ty::NUMBER); - assert_ne!(arr, arr_set); - } - - #[test] - fn test_tuple_type() { - let mut store = TyStore::new(); - - let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); - assert_matches!(*store.get(tuple), TyData::Tuple { ref elems } => { - assert_eq!(elems, &vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); - }); - } - - #[test] - fn test_union_simplification() { - let mut store = TyStore::new(); - - // Empty union is Never - let empty = store.union(vec![]); - assert_eq!(empty, Ty::NEVER); - - // Single element union is just the element - let single = store.union(vec![Ty::NUMBER]); - assert_eq!(single, Ty::NUMBER); - - // Union with Any is Any - let with_any = store.union(vec![Ty::NUMBER, Ty::ANY, Ty::STRING]); - assert_eq!(with_any, Ty::ANY); - - // Union without Never removes it - let with_never = store.union(vec![Ty::NUMBER, Ty::NEVER, Ty::STRING]); - if let TyData::Union(ref types) = *store.get(with_never) { - assert!(!types.contains(&Ty::NEVER)); - } - } - - #[test] - fn test_union_flattening() { - let mut store = TyStore::new(); - - // Create nested union - let inner = store.union(vec![Ty::NUMBER, Ty::STRING]); - let outer = store.union(vec![inner, Ty::BOOL]); - - // Should be flattened - union types are sorted for determinism - assert_matches!(*store.get(outer), TyData::Union(ref types) => { - let mut expected = vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]; - expected.sort(); - let mut actual = types.clone(); - actual.sort(); - assert_eq!(actual, expected); - }); - } - - #[test] - fn test_sum_simplification() { - let mut store = TyStore::new(); - - // Empty sum is Any - let empty = store.sum(vec![]); - assert_eq!(empty, Ty::ANY); - - // Sum with Never is Never - let with_never = store.sum(vec![Ty::NUMBER, Ty::NEVER, Ty::STRING]); - assert_eq!(with_never, Ty::NEVER); - - // Sum with Any removes it (Any is identity) - let with_any = store.sum(vec![Ty::NUMBER, Ty::ANY]); - assert_eq!(with_any, Ty::NUMBER); - } - - #[test] - fn test_is_indexable() { - let mut store = TyStore::new(); - - assert!(store.is_indexable(Ty::ANY)); - assert!(store.is_indexable(Ty::STRING)); - assert!(store.is_indexable(Ty::CHAR)); - - let arr = store.array(Ty::NUMBER); - assert!(store.is_indexable(arr)); - - let obj = store.object_any(); - assert!(store.is_indexable(obj)); - - assert!(!store.is_indexable(Ty::NUMBER)); - assert!(!store.is_indexable(Ty::BOOL)); - } - - #[test] - fn test_supports_field_access() { - let mut store = TyStore::new(); - - assert!(store.supports_field_access(Ty::ANY)); - - let obj = store.object_any(); - assert!(store.supports_field_access(obj)); - - let attrs = store.attrs_of(Ty::NUMBER); - assert!(store.supports_field_access(attrs)); - - assert!(!store.supports_field_access(Ty::NUMBER)); - assert!(!store.supports_field_access(Ty::STRING)); - } - - #[test] - fn test_is_callable() { - let mut store = TyStore::new(); - - assert!(store.is_callable(Ty::ANY)); - - let func = store.function_simple(vec!["x"], Ty::NUMBER); - assert!(store.is_callable(func)); - - assert!(!store.is_callable(Ty::NUMBER)); - assert!(!store.is_callable(Ty::STRING)); - } - - #[test] - fn test_display() { - let mut store = TyStore::new(); - - assert_eq!(store.display(Ty::ANY), "any"); - assert_eq!(store.display(Ty::NEVER), "never"); - assert_eq!(store.display(Ty::NULL), "null"); - assert_eq!(store.display(Ty::BOOL), "boolean"); - assert_eq!(store.display(Ty::NUMBER), "number"); - assert_eq!(store.display(Ty::STRING), "string"); - - let arr = store.array(Ty::NUMBER); - assert_eq!(store.display(arr), "array"); - - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - assert_eq!(store.display(union), "number | string"); - } - - #[test] - fn test_ty_copy() { - // Ty should be Copy - let ty = Ty::NUMBER; - let ty2 = ty; // Copy, not move - assert_eq!(ty, ty2); - } - - #[test] - fn test_ty_size() { - // Ty should be 4 bytes - assert_eq!(std::mem::size_of::(), 4); - } - - #[test] - fn test_type_var() { - let mut store = TyStore::new(); - - let var = store.fresh_var(); - assert!(matches!(*store.get(var), TyData::TypeVar { .. })); - } - - #[test] - fn test_bounded_number() { - let mut store = TyStore::new(); - - let bounded = store.bounded_number(NumBounds::non_negative()); - assert_eq!(store.display(bounded), "number[0..]"); - } - - #[test] - fn test_literal_string() { - let mut store = TyStore::new(); - - let lit = store.literal_string("hello".to_string()); - assert_eq!(store.display(lit), "\"hello\""); - } - - #[test] - fn test_object_with_fields() { - let mut store = TyStore::new(); - - let obj = store.object(ObjectData { - fields: vec![ - ( - "name".to_string(), - FieldDefInterned { - ty: Ty::STRING, - required: true, - visibility: FieldVis::Normal, - }, - ), - ( - "age".to_string(), - FieldDefInterned { - ty: Ty::NUMBER, - required: true, - visibility: FieldVis::Normal, - }, - ), - ], - has_unknown: false, - }); - - assert_matches!(*store.get(obj), TyData::Object(ref data) => { - assert_eq!( - data.fields, - vec![ - ( - "name".to_string(), - FieldDefInterned { - ty: Ty::STRING, - required: true, - visibility: FieldVis::Normal, - }, - ), - ( - "age".to_string(), - FieldDefInterned { - ty: Ty::NUMBER, - required: true, - visibility: FieldVis::Normal, - }, - ), - ] - ); - }); - } - - #[test] - fn test_function_type() { - let mut store = TyStore::new(); - - let func = store.function(FunctionData { - params: vec![ - ParamInterned { - name: "x".to_string(), - ty: Ty::NUMBER, - has_default: false, - }, - ParamInterned { - name: "y".to_string(), - ty: Ty::STRING, - has_default: true, - }, - ], - return_spec: ReturnSpec::Fixed(Ty::BOOL), - variadic: false, - }); - - assert_matches!(*store.get(func), TyData::Function(ref data) => { - assert_eq!(data.required_count(), 1); - assert_eq!(data.total_count(), 2); - }); - } - - use rstest::rstest; - - #[rstest] - #[case::any_to_number(Ty::ANY, Ty::NUMBER, Ty::NUMBER)] - #[case::number_to_number(Ty::NUMBER, Ty::NUMBER, Ty::NUMBER)] - #[case::number_to_string_never(Ty::NUMBER, Ty::STRING, Ty::NEVER)] - #[case::never_stays_never(Ty::NEVER, Ty::NUMBER, Ty::NEVER)] - #[case::constraint_never_is_never(Ty::NUMBER, Ty::NEVER, Ty::NEVER)] - #[case::bool_to_true(Ty::BOOL, Ty::TRUE, Ty::TRUE)] - #[case::bool_to_false(Ty::BOOL, Ty::FALSE, Ty::FALSE)] - fn test_narrow(#[case] ty: Ty, #[case] constraint: Ty, #[case] expected: Ty) { - let mut store = TyStore::new(); - assert_eq!(store.narrow(ty, constraint), expected); - } - - #[test] - fn test_narrow_union() { - let mut store = TyStore::new(); - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - assert_eq!(store.narrow(union, Ty::NUMBER), Ty::NUMBER); - } - - #[test] - fn test_narrow_number_with_bounded() { - let mut store = TyStore::new(); - let bounded = store.bounded_number(NumBounds::non_negative()); - let result = store.narrow(Ty::NUMBER, bounded); - assert_eq!(result, bounded); - } - - #[test] - fn test_narrow_bounded_with_number() { - let mut store = TyStore::new(); - let bounded = store.bounded_number(NumBounds::non_negative()); - let result = store.narrow(bounded, Ty::NUMBER); - assert_eq!(result, bounded); - } - - #[test] - fn test_narrow_bounded_intersect() { - let mut store = TyStore::new(); - // [0, inf) intersected with [-inf, 10] = [0, 10] - let non_neg = store.bounded_number(NumBounds::non_negative()); - let at_most_10 = store.bounded_number(NumBounds { - min: None, - max: Some(10.0_f64.to_bits()), - }); - let result = store.narrow(non_neg, at_most_10); - let expected = store.bounded_number(NumBounds::between(0.0, 10.0)); - assert_eq!(result, expected); - } - - #[test] - fn test_narrow_bounded_empty_intersection() { - let mut store = TyStore::new(); - // [10, inf) intersected with [-inf, 5] = empty - let at_least_10 = store.bounded_number(NumBounds::at_least(10.0)); - let at_most_5 = store.bounded_number(NumBounds { - min: None, - max: Some(5.0_f64.to_bits()), - }); - let result = store.narrow(at_least_10, at_most_5); - assert_eq!(result, Ty::NEVER); - } - - #[test] - fn test_num_bounds_intersect_both_bounded() { - let b1 = NumBounds::between(0.0, 100.0); - let b2 = NumBounds::between(50.0, 200.0); - let result = b1.intersect(&b2).expect("should intersect"); - assert_eq!(result.min_f64(), Some(50.0)); - assert_eq!(result.max_f64(), Some(100.0)); - } - - #[test] - fn test_num_bounds_intersect_empty() { - let b1 = NumBounds::between(0.0, 10.0); - let b2 = NumBounds::between(20.0, 30.0); - assert!(b1.intersect(&b2).is_none()); - } - - #[test] - fn test_num_bounds_intersect_one_unbounded() { - let bounded = NumBounds::between(5.0, 15.0); - let unbounded = NumBounds::unbounded(); - let result = bounded.intersect(&unbounded).expect("should intersect"); - assert_eq!(result.min_f64(), Some(5.0)); - assert_eq!(result.max_f64(), Some(15.0)); - } - - #[rstest] - #[case::same_type_is_never(Ty::NUMBER, Ty::NUMBER, Ty::NEVER)] - #[case::different_type_unchanged(Ty::NUMBER, Ty::STRING, Ty::NUMBER)] - #[case::any_stays_any(Ty::ANY, Ty::NUMBER, Ty::ANY)] - #[case::never_stays_never(Ty::NEVER, Ty::NUMBER, Ty::NEVER)] - #[case::remove_never_unchanged(Ty::NUMBER, Ty::NEVER, Ty::NUMBER)] - fn test_widen(#[case] base: Ty, #[case] remove: Ty, #[case] expected: Ty) { - let mut store = TyStore::new(); - assert_eq!(store.widen(base, remove), expected); - } - - #[test] - fn test_widen_union_removes_type() { - let mut store = TyStore::new(); - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - assert_eq!(store.widen(union, Ty::NUMBER), Ty::STRING); - } - - #[test] - fn test_with_len_array_to_tuple() { - let mut store = TyStore::new(); - let arr = store.array(Ty::NUMBER); - let result = store.with_len(arr, 3); - let expected = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); - assert_eq!(result, expected); - } - - #[rstest] - #[case::string_len_1_to_char(Ty::STRING, 1, Ty::CHAR)] - #[case::char_len_1_stays(Ty::CHAR, 1, Ty::CHAR)] - #[case::char_len_0_never(Ty::CHAR, 0, Ty::NEVER)] - #[case::char_len_2_never(Ty::CHAR, 2, Ty::NEVER)] - fn test_with_len(#[case] ty: Ty, #[case] len: usize, #[case] expected: Ty) { - let mut store = TyStore::new(); - assert_eq!(store.with_len(ty, len), expected); - } - - #[test] - fn test_with_len_function_uses_required_arity() { - let mut store = TyStore::new(); - let func = store.function(FunctionData { - params: vec![ - ParamInterned { - name: "x".to_string(), - ty: Ty::ANY, - has_default: false, - }, - ParamInterned { - name: "y".to_string(), - ty: Ty::ANY, - has_default: true, - }, - ], - return_spec: ReturnSpec::Fixed(Ty::NUMBER), - variadic: false, - }); - assert_eq!(store.with_len(func, 1), func); - assert_eq!(store.with_len(func, 2), Ty::NEVER); - } - - #[test] - fn test_with_len_function_any_narrows_to_exact_arity() { - let mut store = TyStore::new(); - let func_any = store.function_any(); - let narrowed = store.with_len(func_any, 2); - let TyData::Function(func) = store.get(narrowed).clone() else { - panic!("expected function"); - }; - assert!(!func.variadic); - assert_eq!( - func.params - .iter() - .map(|p| p.name.as_str()) - .collect::>(), - vec!["arg0", "arg1"] - ); - assert!(func.params.iter().all(|p| p.ty == Ty::ANY)); - assert!(func.params.iter().all(|p| !p.has_default)); - assert_eq!(func.return_spec, ReturnSpec::Fixed(Ty::ANY)); - } - - mod test_is_subtype_of { - use super::*; - - #[rstest] - #[case::same_type(Ty::NUMBER, Ty::NUMBER, true)] - #[case::never_to_any(Ty::NEVER, Ty::ANY, true)] - #[case::never_to_number(Ty::NEVER, Ty::NUMBER, true)] - #[case::any_to_any(Ty::ANY, Ty::ANY, true)] - #[case::number_to_any(Ty::NUMBER, Ty::ANY, true)] - #[case::any_to_number(Ty::ANY, Ty::NUMBER, false)] - #[case::char_to_string(Ty::CHAR, Ty::STRING, true)] - #[case::string_to_char(Ty::STRING, Ty::CHAR, false)] - #[case::true_to_bool(Ty::TRUE, Ty::BOOL, true)] - #[case::false_to_bool(Ty::FALSE, Ty::BOOL, true)] - #[case::bool_to_true(Ty::BOOL, Ty::TRUE, false)] - #[case::number_to_string(Ty::NUMBER, Ty::STRING, false)] - fn test_well_known(#[case] sub: Ty, #[case] sup: Ty, #[case] expected: bool) { - let store = TyStore::new(); - assert_eq!(store.is_subtype_of(sub, sup), expected); - } - - #[test] - fn test_array_covariance() { - let mut store = TyStore::new(); - let arr_char = store.array(Ty::CHAR); - let arr_string = store.array(Ty::STRING); - // Array <: Array because Char <: String - assert!(store.is_subtype_of(arr_char, arr_string)); - // Array NOT <: Array - assert!(!store.is_subtype_of(arr_string, arr_char)); - } - - #[test] - fn test_tuple_to_array() { - let mut store = TyStore::new(); - let tuple = store.tuple(vec![Ty::CHAR, Ty::CHAR]); - let arr_string = store.array(Ty::STRING); - // [Char, Char] <: Array because Char <: String - assert!(store.is_subtype_of(tuple, arr_string)); - } - - #[test] - fn test_union_subtype() { - let mut store = TyStore::new(); - let union = store.union(vec![Ty::TRUE, Ty::FALSE]); - // (True | False) <: Bool - assert!(store.is_subtype_of(union, Ty::BOOL)); - } - - #[test] - fn test_subtype_of_union() { - let mut store = TyStore::new(); - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - // Number <: (Number | String) - assert!(store.is_subtype_of(Ty::NUMBER, union)); - } - } - - mod test_constraints_satisfied_by { - use super::*; - - #[test] - fn test_no_constraints() { - let store = TyStore::new(); - let constraints = TyConstraints::none(); - assert!(constraints.satisfied_by(Ty::NUMBER, &store)); - assert!(constraints.satisfied_by(Ty::STRING, &store)); - assert!(constraints.satisfied_by(Ty::ANY, &store)); - } - - #[test] - fn test_must_be_indexable() { - let mut store = TyStore::new(); - let constraints = TyConstraints { - must_be_indexable: true, - ..Default::default() - }; - // Arrays are indexable - let arr = store.array(Ty::NUMBER); - assert!(constraints.satisfied_by(arr, &store)); - // Strings are indexable - assert!(constraints.satisfied_by(Ty::STRING, &store)); - // Numbers are not indexable - assert!(!constraints.satisfied_by(Ty::NUMBER, &store)); - } - - #[test] - fn test_upper_bound() { - let store = TyStore::new(); - let constraints = TyConstraints { - upper_bound: Some(Ty::STRING), - ..Default::default() - }; - // Char <: String - assert!(constraints.satisfied_by(Ty::CHAR, &store)); - // String <: String - assert!(constraints.satisfied_by(Ty::STRING, &store)); - // Number NOT <: String - assert!(!constraints.satisfied_by(Ty::NUMBER, &store)); - } - } -} +pub use thread_local_store::{reset_store, with_store}; diff --git a/crates/jrsonnet-lsp-types/src/store/impl_analysis.rs b/crates/jrsonnet-lsp-types/src/store/impl_analysis.rs new file mode 100644 index 00000000..4f766df6 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store/impl_analysis.rs @@ -0,0 +1,949 @@ +use super::*; + +impl TyStore { + /// Narrow a type by intersecting with a constraint. + /// + /// Returns the most specific type that satisfies both. + /// For example, `narrow(Any, Number)` returns `Number`. + pub fn narrow(&mut self, ty: Ty, constraint: Ty) -> Ty { + crate::operations::ty_and(ty, constraint, self) + } + + /// Widen a type by removing a constraint. + /// + /// Returns the type with the constraint removed. + /// For example, `widen(Union(Number, String), Number)` returns `String`. + pub fn widen(&mut self, base: Ty, remove: Ty) -> Ty { + crate::operations::ty_minus(base, remove, self) + } + + /// Narrow a type to one with a specific length. + /// + /// - Arrays become tuples with n elements + /// - Strings with length 1 become Char + /// - Tuples must have matching length + pub fn with_len(&mut self, ty: Ty, len: usize) -> Ty { + crate::operations::ty_with_len(ty, len, self) + } + + /// Narrow a type to one with at least a minimum length. + pub fn with_min_len(&mut self, ty: Ty, min: usize) -> Ty { + crate::operations::ty_with_min_len(ty, min, self) + } + + /// Check if type is indexable. + #[must_use] + pub fn is_indexable(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::Any + | TyData::String + | TyData::Char + | TyData::Array { .. } + | TyData::Tuple { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } => true, + TyData::Union(ref types) | TyData::Sum(ref types) => { + types.iter().all(|&t| self.is_indexable(t)) + } + TyData::TypeVar { + ref constraints, .. + } => constraints.must_be_indexable, + _ => false, + } + } + + /// Check if type supports field access. + #[must_use] + pub fn supports_field_access(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::Any | TyData::Object(_) | TyData::AttrsOf { .. } => true, + TyData::Union(ref types) | TyData::Sum(ref types) => { + types.iter().all(|&t| self.supports_field_access(t)) + } + TyData::TypeVar { + ref constraints, .. + } => constraints.must_support_fields, + _ => false, + } + } + + /// Check if type is callable. + #[must_use] + pub fn is_callable(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::Any | TyData::Function(_) => true, + TyData::Union(ref types) | TyData::Sum(ref types) => { + types.iter().all(|&t| self.is_callable(t)) + } + TyData::TypeVar { + ref constraints, .. + } => constraints.must_be_callable, + _ => false, + } + } + + /// Simplified subtype check for constraint satisfaction. + /// + /// Checks if `subtype` is a subtype of `supertype`. This is a simplified + /// version that handles the most common cases; for full subtype checking + /// use the unification module. + #[must_use] + pub fn is_subtype_of(&self, subtype: Ty, supertype: Ty) -> bool { + // Fast paths + if subtype == supertype { + return true; + } + if subtype == Ty::NEVER { + return true; // Never is subtype of everything + } + if supertype == Ty::ANY { + return true; // Everything is subtype of Any + } + if subtype == Ty::ANY { + return false; // Any is only subtype of Any (already checked) + } + + match (&*self.get(subtype), &*self.get(supertype)) { + // Char <: String + // LiteralString <: Char (if single char) + (TyData::LiteralString(s), TyData::Char) => s.chars().count() == 1, + // Char <: String + // LiteralString <: String + // True <: Bool, False <: Bool + // BoundedNumber <: Number + (TyData::Char | TyData::LiteralString(_), TyData::String) + | (TyData::True | TyData::False, TyData::Bool) + | (TyData::BoundedNumber(_), TyData::Number) => true, + // Array covariance: Array <: Array if A <: B + ( + TyData::Array { elem: sub_elem, .. }, + TyData::Array { + elem: super_elem, .. + }, + ) => self.is_subtype_of(*sub_elem, *super_elem), + // Tuple <: Array if all elements are subtypes + (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) => { + elems.iter().all(|&e| self.is_subtype_of(e, *arr_elem)) + } + // Union subtyping: all variants must be subtypes + (TyData::Union(variants), _) => { + let variants = variants.clone(); + variants.iter().all(|&v| self.is_subtype_of(v, supertype)) + } + // Subtype of union: must be subtype of some variant + (_, TyData::Union(variants)) => { + let variants = variants.clone(); + variants.iter().any(|&v| self.is_subtype_of(subtype, v)) + } + // Default: not a subtype + _ => false, + } + } + + /// Apply a substitution to a type, replacing all type variables. + pub fn apply_substitution(&mut self, ty: Ty, sub: &TySubstitution) -> Ty { + // Clone the data to avoid borrow issues + let data = self.get(ty).clone(); + match data { + TyData::TypeVar { id, .. } => sub.get(id).unwrap_or(ty), + TyData::Array { elem, is_set } => { + let new_elem = self.apply_substitution(elem, sub); + if is_set { + self.array_set(new_elem) + } else { + self.array(new_elem) + } + } + TyData::Tuple { elems } => { + // Collect elements first to avoid closure borrowing issues + let elems_vec: Vec = elems; + let mut new_elems = Vec::with_capacity(elems_vec.len()); + for e in elems_vec { + new_elems.push(self.apply_substitution(e, sub)); + } + self.tuple(new_elems) + } + TyData::Union(variants) => { + let variants_vec: Vec = variants; + let mut new_variants = Vec::with_capacity(variants_vec.len()); + for v in variants_vec { + new_variants.push(self.apply_substitution(v, sub)); + } + self.union(new_variants) + } + TyData::Sum(variants) => { + let variants_vec: Vec = variants; + let mut new_variants = Vec::with_capacity(variants_vec.len()); + for v in variants_vec { + new_variants.push(self.apply_substitution(v, sub)); + } + self.sum(new_variants) + } + TyData::Object(obj) => { + // Extract field info first + let field_info: Vec<_> = obj + .fields + .iter() + .map(|(name, fd)| (name.clone(), fd.ty, fd.required, fd.visibility)) + .collect(); + let has_unknown = obj.has_unknown; + // Now apply substitutions + let mut new_fields = Vec::with_capacity(field_info.len()); + for (name, ty, required, visibility) in field_info { + new_fields.push(( + name, + FieldDefInterned { + ty: self.apply_substitution(ty, sub), + required, + visibility, + }, + )); + } + self.object(ObjectData { + fields: new_fields, + has_unknown, + }) + } + TyData::AttrsOf { value } => { + let new_value = self.apply_substitution(value, sub); + self.attrs_of(new_value) + } + TyData::Function(func) => { + // Extract param info first + let param_info: Vec<_> = func + .params + .iter() + .map(|p| (p.name.clone(), p.ty, p.has_default)) + .collect(); + let (old_return_spec, variadic) = (func.return_spec.clone(), func.variadic); + // Now apply substitutions + let mut new_params = Vec::with_capacity(param_info.len()); + for (name, ty, has_default) in param_info { + new_params.push(ParamInterned { + name, + ty: self.apply_substitution(ty, sub), + has_default, + }); + } + let new_return_spec = match old_return_spec { + ReturnSpec::Fixed(ret) => ReturnSpec::Fixed(self.apply_substitution(ret, sub)), + other => other, + }; + self.intern(TyData::Function(FunctionData { + params: new_params, + return_spec: new_return_spec, + variadic, + })) + } + // Primitives and other types don't contain type variables + _ => ty, + } + } + + /// Check if a type has any type variables. + #[must_use] + pub fn has_type_vars(&self, ty: Ty) -> bool { + match *self.get(ty) { + TyData::TypeVar { .. } => true, + TyData::Array { elem, .. } => self.has_type_vars(elem), + TyData::Tuple { ref elems } => elems.iter().any(|&e| self.has_type_vars(e)), + TyData::Union(ref variants) | TyData::Sum(ref variants) => { + variants.iter().any(|&v| self.has_type_vars(v)) + } + TyData::Object(ref obj) => obj.fields.iter().any(|(_, fd)| self.has_type_vars(fd.ty)), + TyData::AttrsOf { value } => self.has_type_vars(value), + TyData::Function(ref func) => { + func.params.iter().any(|p| self.has_type_vars(p.ty)) + || matches!(&func.return_spec, ReturnSpec::Fixed(ret) if self.has_type_vars(*ret)) + } + _ => false, + } + } + + /// Format a type for display. + #[must_use] + pub fn display(&self, ty: Ty) -> String { + match *self.get(ty) { + TyData::Any => "any".to_string(), + TyData::Never => "never".to_string(), + TyData::Null => "null".to_string(), + TyData::Bool => "boolean".to_string(), + TyData::True => "true".to_string(), + TyData::False => "false".to_string(), + TyData::Number => "number".to_string(), + TyData::BoundedNumber(bounds) => match (bounds.min_f64(), bounds.max_f64()) { + (None, None) => "number".to_string(), + (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { + if lo.fract() == 0.0 { + format!("{lo:.0}") + } else { + format!("{lo}") + } + } + (Some(lo), Some(hi)) => format!("number[{lo}..{hi}]"), + (Some(lo), None) => format!("number[{lo}..]"), + (None, Some(hi)) => format!("number[..{hi}]"), + }, + TyData::String => "string".to_string(), + TyData::Char => "char".to_string(), + TyData::LiteralString(ref s) => format!("\"{s}\""), + TyData::Array { elem, is_set } => { + if is_set { + format!("set<{}>", self.display(elem)) + } else { + format!("array<{}>", self.display(elem)) + } + } + TyData::Tuple { ref elems } => { + let types: Vec<_> = elems.iter().map(|&t| self.display(t)).collect(); + format!("[{}]", types.join(", ")) + } + TyData::Object(ref obj) => { + if obj.fields.is_empty() && !obj.has_unknown { + "{}".to_string() + } else if obj.has_unknown { + "object".to_string() + } else { + let mut fields: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + fields.sort_unstable(); + format!("{{ {} }}", fields.join(", ")) + } + } + TyData::AttrsOf { value } => format!("object<{}>", self.display(value)), + TyData::Function(ref func) => { + let params: Vec<_> = func.params.iter().map(|p| p.name.as_str()).collect(); + format!("function({})", params.join(", ")) + } + TyData::Union(ref types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" | ") + } + TyData::Sum(ref types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" & ") + } + TyData::TypeVar { + id, + ref constraints, + } => { + let mut s = id.to_string(); + if !constraints.is_empty() { + let mut parts: Vec = Vec::new(); + if constraints.must_be_indexable { + parts.push("indexable".to_string()); + } + if constraints.must_support_fields { + parts.push("object-like".to_string()); + } + if constraints.must_be_callable { + parts.push("callable".to_string()); + } + if let Some(bound) = constraints.upper_bound { + parts.push(format!("<: {}", self.display(bound))); + } + if !parts.is_empty() { + s.push_str(" where "); + s.push_str(&parts.join(", ")); + } + } + s + } + } + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + + #[test] + fn test_well_known_types() { + let store = TyStore::new(); + + // Check that well-known types have correct data + assert!(matches!(*store.get(Ty::ANY), TyData::Any)); + assert!(matches!(*store.get(Ty::NEVER), TyData::Never)); + assert!(matches!(*store.get(Ty::NULL), TyData::Null)); + assert!(matches!(*store.get(Ty::BOOL), TyData::Bool)); + assert!(matches!(*store.get(Ty::TRUE), TyData::True)); + assert!(matches!(*store.get(Ty::FALSE), TyData::False)); + assert!(matches!(*store.get(Ty::NUMBER), TyData::Number)); + assert!(matches!(*store.get(Ty::STRING), TyData::String)); + assert!(matches!(*store.get(Ty::CHAR), TyData::Char)); + } + + #[test] + fn test_global_ty_accepts_global() { + let global = GlobalTy::new(Ty::NUMBER); + assert_eq!(global, Some(GlobalTy::NUMBER)); + } + + #[test] + fn test_global_ty_rejects_local() { + let local = Ty::from_raw_local(123); + assert!(GlobalTy::new(local).is_none()); + assert_eq!(GlobalTy::try_from(local), Err(NotGlobalTy(local))); + } + + #[test] + fn test_intern_deduplication() { + let mut store = TyStore::new(); + + // Same type data should return same ID + let arr1 = store.array(Ty::NUMBER); + let arr2 = store.array(Ty::NUMBER); + assert_eq!(arr1, arr2); + + // Different element type should be different + let arr3 = store.array(Ty::STRING); + assert_ne!(arr1, arr3); + } + + #[test] + fn test_array_type() { + let mut store = TyStore::new(); + + let arr = store.array(Ty::NUMBER); + assert!( + matches!(*store.get(arr), TyData::Array { elem, is_set: false } if elem == Ty::NUMBER) + ); + } + + #[test] + fn test_array_set_type() { + let mut store = TyStore::new(); + + let arr_set = store.array_set(Ty::NUMBER); + assert!( + matches!(*store.get(arr_set), TyData::Array { elem, is_set: true } if elem == Ty::NUMBER) + ); + + // Sets and arrays are distinct types + let arr = store.array(Ty::NUMBER); + assert_ne!(arr, arr_set); + } + + #[test] + fn test_tuple_type() { + let mut store = TyStore::new(); + + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + assert_matches!(*store.get(tuple), TyData::Tuple { ref elems } => { + assert_eq!(elems, &vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + }); + } + + #[test] + fn test_union_simplification() { + let mut store = TyStore::new(); + + // Empty union is Never + let empty = store.union(vec![]); + assert_eq!(empty, Ty::NEVER); + + // Single element union is just the element + let single = store.union(vec![Ty::NUMBER]); + assert_eq!(single, Ty::NUMBER); + + // Union with Any is Any + let with_any = store.union(vec![Ty::NUMBER, Ty::ANY, Ty::STRING]); + assert_eq!(with_any, Ty::ANY); + + // Union without Never removes it + let with_never = store.union(vec![Ty::NUMBER, Ty::NEVER, Ty::STRING]); + if let TyData::Union(ref types) = *store.get(with_never) { + assert!(!types.contains(&Ty::NEVER)); + } + } + + #[test] + fn test_union_flattening() { + let mut store = TyStore::new(); + + // Create nested union + let inner = store.union(vec![Ty::NUMBER, Ty::STRING]); + let outer = store.union(vec![inner, Ty::BOOL]); + + // Should be flattened - union types are sorted for determinism + assert_matches!(*store.get(outer), TyData::Union(ref types) => { + let mut expected = vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]; + expected.sort(); + let mut actual = types.clone(); + actual.sort(); + assert_eq!(actual, expected); + }); + } + + #[test] + fn test_sum_simplification() { + let mut store = TyStore::new(); + + // Empty sum is Any + let empty = store.sum(vec![]); + assert_eq!(empty, Ty::ANY); + + // Sum with Never is Never + let with_never = store.sum(vec![Ty::NUMBER, Ty::NEVER, Ty::STRING]); + assert_eq!(with_never, Ty::NEVER); + + // Sum with Any removes it (Any is identity) + let with_any = store.sum(vec![Ty::NUMBER, Ty::ANY]); + assert_eq!(with_any, Ty::NUMBER); + } + + #[test] + fn test_is_indexable() { + let mut store = TyStore::new(); + + assert!(store.is_indexable(Ty::ANY)); + assert!(store.is_indexable(Ty::STRING)); + assert!(store.is_indexable(Ty::CHAR)); + + let arr = store.array(Ty::NUMBER); + assert!(store.is_indexable(arr)); + + let obj = store.object_any(); + assert!(store.is_indexable(obj)); + + assert!(!store.is_indexable(Ty::NUMBER)); + assert!(!store.is_indexable(Ty::BOOL)); + } + + #[test] + fn test_supports_field_access() { + let mut store = TyStore::new(); + + assert!(store.supports_field_access(Ty::ANY)); + + let obj = store.object_any(); + assert!(store.supports_field_access(obj)); + + let attrs = store.attrs_of(Ty::NUMBER); + assert!(store.supports_field_access(attrs)); + + assert!(!store.supports_field_access(Ty::NUMBER)); + assert!(!store.supports_field_access(Ty::STRING)); + } + + #[test] + fn test_is_callable() { + let mut store = TyStore::new(); + + assert!(store.is_callable(Ty::ANY)); + + let func = store.function_simple(vec!["x"], Ty::NUMBER); + assert!(store.is_callable(func)); + + assert!(!store.is_callable(Ty::NUMBER)); + assert!(!store.is_callable(Ty::STRING)); + } + + #[test] + fn test_display() { + let mut store = TyStore::new(); + + assert_eq!(store.display(Ty::ANY), "any"); + assert_eq!(store.display(Ty::NEVER), "never"); + assert_eq!(store.display(Ty::NULL), "null"); + assert_eq!(store.display(Ty::BOOL), "boolean"); + assert_eq!(store.display(Ty::NUMBER), "number"); + assert_eq!(store.display(Ty::STRING), "string"); + + let arr = store.array(Ty::NUMBER); + assert_eq!(store.display(arr), "array"); + + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(store.display(union), "number | string"); + } + + #[test] + fn test_ty_copy() { + // Ty should be Copy + let ty = Ty::NUMBER; + let ty2 = ty; // Copy, not move + assert_eq!(ty, ty2); + } + + #[test] + fn test_ty_size() { + // Ty should be 4 bytes + assert_eq!(std::mem::size_of::(), 4); + } + + #[test] + fn test_type_var() { + let mut store = TyStore::new(); + + let var = store.fresh_var(); + assert!(matches!(*store.get(var), TyData::TypeVar { .. })); + } + + #[test] + fn test_bounded_number() { + let mut store = TyStore::new(); + + let bounded = store.bounded_number(NumBounds::non_negative()); + assert_eq!(store.display(bounded), "number[0..]"); + } + + #[test] + fn test_literal_string() { + let mut store = TyStore::new(); + + let lit = store.literal_string("hello".to_string()); + assert_eq!(store.display(lit), "\"hello\""); + } + + #[test] + fn test_object_with_fields() { + let mut store = TyStore::new(); + + let obj = store.object(ObjectData { + fields: vec![ + ( + "name".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + ), + ( + "age".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + ), + ], + has_unknown: false, + }); + + assert_matches!(*store.get(obj), TyData::Object(ref data) => { + assert_eq!( + data.fields, + vec![ + ( + "name".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + ), + ( + "age".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + ), + ] + ); + }); + } + + #[test] + fn test_function_type() { + let mut store = TyStore::new(); + + let func = store.function(FunctionData { + params: vec![ + ParamInterned { + name: "x".to_string(), + ty: Ty::NUMBER, + has_default: false, + }, + ParamInterned { + name: "y".to_string(), + ty: Ty::STRING, + has_default: true, + }, + ], + return_spec: ReturnSpec::Fixed(Ty::BOOL), + variadic: false, + }); + + assert_matches!(*store.get(func), TyData::Function(ref data) => { + assert_eq!(data.required_count(), 1); + assert_eq!(data.total_count(), 2); + }); + } + + use rstest::rstest; + + #[rstest] + #[case::any_to_number(Ty::ANY, Ty::NUMBER, Ty::NUMBER)] + #[case::number_to_number(Ty::NUMBER, Ty::NUMBER, Ty::NUMBER)] + #[case::number_to_string_never(Ty::NUMBER, Ty::STRING, Ty::NEVER)] + #[case::never_stays_never(Ty::NEVER, Ty::NUMBER, Ty::NEVER)] + #[case::constraint_never_is_never(Ty::NUMBER, Ty::NEVER, Ty::NEVER)] + #[case::bool_to_true(Ty::BOOL, Ty::TRUE, Ty::TRUE)] + #[case::bool_to_false(Ty::BOOL, Ty::FALSE, Ty::FALSE)] + fn test_narrow(#[case] ty: Ty, #[case] constraint: Ty, #[case] expected: Ty) { + let mut store = TyStore::new(); + assert_eq!(store.narrow(ty, constraint), expected); + } + + #[test] + fn test_narrow_union() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(store.narrow(union, Ty::NUMBER), Ty::NUMBER); + } + + #[test] + fn test_narrow_number_with_bounded() { + let mut store = TyStore::new(); + let bounded = store.bounded_number(NumBounds::non_negative()); + let result = store.narrow(Ty::NUMBER, bounded); + assert_eq!(result, bounded); + } + + #[test] + fn test_narrow_bounded_with_number() { + let mut store = TyStore::new(); + let bounded = store.bounded_number(NumBounds::non_negative()); + let result = store.narrow(bounded, Ty::NUMBER); + assert_eq!(result, bounded); + } + + #[test] + fn test_narrow_bounded_intersect() { + let mut store = TyStore::new(); + // [0, inf) intersected with [-inf, 10] = [0, 10] + let non_neg = store.bounded_number(NumBounds::non_negative()); + let at_most_10 = store.bounded_number(NumBounds { + min: None, + max: Some(10.0_f64.to_bits()), + }); + let result = store.narrow(non_neg, at_most_10); + let expected = store.bounded_number(NumBounds::between(0.0, 10.0)); + assert_eq!(result, expected); + } + + #[test] + fn test_narrow_bounded_empty_intersection() { + let mut store = TyStore::new(); + // [10, inf) intersected with [-inf, 5] = empty + let at_least_10 = store.bounded_number(NumBounds::at_least(10.0)); + let at_most_5 = store.bounded_number(NumBounds { + min: None, + max: Some(5.0_f64.to_bits()), + }); + let result = store.narrow(at_least_10, at_most_5); + assert_eq!(result, Ty::NEVER); + } + + #[test] + fn test_num_bounds_intersect_both_bounded() { + let b1 = NumBounds::between(0.0, 100.0); + let b2 = NumBounds::between(50.0, 200.0); + let result = b1.intersect(&b2).expect("should intersect"); + assert_eq!(result.min_f64(), Some(50.0)); + assert_eq!(result.max_f64(), Some(100.0)); + } + + #[test] + fn test_num_bounds_intersect_empty() { + let b1 = NumBounds::between(0.0, 10.0); + let b2 = NumBounds::between(20.0, 30.0); + assert!(b1.intersect(&b2).is_none()); + } + + #[test] + fn test_num_bounds_intersect_one_unbounded() { + let bounded = NumBounds::between(5.0, 15.0); + let unbounded = NumBounds::unbounded(); + let result = bounded.intersect(&unbounded).expect("should intersect"); + assert_eq!(result.min_f64(), Some(5.0)); + assert_eq!(result.max_f64(), Some(15.0)); + } + + #[rstest] + #[case::same_type_is_never(Ty::NUMBER, Ty::NUMBER, Ty::NEVER)] + #[case::different_type_unchanged(Ty::NUMBER, Ty::STRING, Ty::NUMBER)] + #[case::any_stays_any(Ty::ANY, Ty::NUMBER, Ty::ANY)] + #[case::never_stays_never(Ty::NEVER, Ty::NUMBER, Ty::NEVER)] + #[case::remove_never_unchanged(Ty::NUMBER, Ty::NEVER, Ty::NUMBER)] + fn test_widen(#[case] base: Ty, #[case] remove: Ty, #[case] expected: Ty) { + let mut store = TyStore::new(); + assert_eq!(store.widen(base, remove), expected); + } + + #[test] + fn test_widen_union_removes_type() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(store.widen(union, Ty::NUMBER), Ty::STRING); + } + + #[test] + fn test_with_len_array_to_tuple() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + let result = store.with_len(arr, 3); + let expected = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + assert_eq!(result, expected); + } + + #[rstest] + #[case::string_len_1_to_char(Ty::STRING, 1, Ty::CHAR)] + #[case::char_len_1_stays(Ty::CHAR, 1, Ty::CHAR)] + #[case::char_len_0_never(Ty::CHAR, 0, Ty::NEVER)] + #[case::char_len_2_never(Ty::CHAR, 2, Ty::NEVER)] + fn test_with_len(#[case] ty: Ty, #[case] len: usize, #[case] expected: Ty) { + let mut store = TyStore::new(); + assert_eq!(store.with_len(ty, len), expected); + } + + #[test] + fn test_with_len_function_uses_required_arity() { + let mut store = TyStore::new(); + let func = store.function(FunctionData { + params: vec![ + ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false, + }, + ParamInterned { + name: "y".to_string(), + ty: Ty::ANY, + has_default: true, + }, + ], + return_spec: ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + }); + assert_eq!(store.with_len(func, 1), func); + assert_eq!(store.with_len(func, 2), Ty::NEVER); + } + + #[test] + fn test_with_len_function_any_narrows_to_exact_arity() { + let mut store = TyStore::new(); + let func_any = store.function_any(); + let narrowed = store.with_len(func_any, 2); + let TyData::Function(func) = store.get(narrowed).clone() else { + panic!("expected function"); + }; + assert!(!func.variadic); + assert_eq!( + func.params + .iter() + .map(|p| p.name.as_str()) + .collect::>(), + vec!["arg0", "arg1"] + ); + assert!(func.params.iter().all(|p| p.ty == Ty::ANY)); + assert!(func.params.iter().all(|p| !p.has_default)); + assert_eq!(func.return_spec, ReturnSpec::Fixed(Ty::ANY)); + } + + mod test_is_subtype_of { + use super::*; + + #[rstest] + #[case::same_type(Ty::NUMBER, Ty::NUMBER, true)] + #[case::never_to_any(Ty::NEVER, Ty::ANY, true)] + #[case::never_to_number(Ty::NEVER, Ty::NUMBER, true)] + #[case::any_to_any(Ty::ANY, Ty::ANY, true)] + #[case::number_to_any(Ty::NUMBER, Ty::ANY, true)] + #[case::any_to_number(Ty::ANY, Ty::NUMBER, false)] + #[case::char_to_string(Ty::CHAR, Ty::STRING, true)] + #[case::string_to_char(Ty::STRING, Ty::CHAR, false)] + #[case::true_to_bool(Ty::TRUE, Ty::BOOL, true)] + #[case::false_to_bool(Ty::FALSE, Ty::BOOL, true)] + #[case::bool_to_true(Ty::BOOL, Ty::TRUE, false)] + #[case::number_to_string(Ty::NUMBER, Ty::STRING, false)] + fn test_well_known(#[case] sub: Ty, #[case] sup: Ty, #[case] expected: bool) { + let store = TyStore::new(); + assert_eq!(store.is_subtype_of(sub, sup), expected); + } + + #[test] + fn test_array_covariance() { + let mut store = TyStore::new(); + let arr_char = store.array(Ty::CHAR); + let arr_string = store.array(Ty::STRING); + // Array <: Array because Char <: String + assert!(store.is_subtype_of(arr_char, arr_string)); + // Array NOT <: Array + assert!(!store.is_subtype_of(arr_string, arr_char)); + } + + #[test] + fn test_tuple_to_array() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::CHAR, Ty::CHAR]); + let arr_string = store.array(Ty::STRING); + // [Char, Char] <: Array because Char <: String + assert!(store.is_subtype_of(tuple, arr_string)); + } + + #[test] + fn test_union_subtype() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::TRUE, Ty::FALSE]); + // (True | False) <: Bool + assert!(store.is_subtype_of(union, Ty::BOOL)); + } + + #[test] + fn test_subtype_of_union() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // Number <: (Number | String) + assert!(store.is_subtype_of(Ty::NUMBER, union)); + } + } + + mod test_constraints_satisfied_by { + use super::*; + + #[test] + fn test_no_constraints() { + let store = TyStore::new(); + let constraints = TyConstraints::none(); + assert!(constraints.satisfied_by(Ty::NUMBER, &store)); + assert!(constraints.satisfied_by(Ty::STRING, &store)); + assert!(constraints.satisfied_by(Ty::ANY, &store)); + } + + #[test] + fn test_must_be_indexable() { + let mut store = TyStore::new(); + let constraints = TyConstraints { + must_be_indexable: true, + ..Default::default() + }; + // Arrays are indexable + let arr = store.array(Ty::NUMBER); + assert!(constraints.satisfied_by(arr, &store)); + // Strings are indexable + assert!(constraints.satisfied_by(Ty::STRING, &store)); + // Numbers are not indexable + assert!(!constraints.satisfied_by(Ty::NUMBER, &store)); + } + + #[test] + fn test_upper_bound() { + let store = TyStore::new(); + let constraints = TyConstraints { + upper_bound: Some(Ty::STRING), + ..Default::default() + }; + // Char <: String + assert!(constraints.satisfied_by(Ty::CHAR, &store)); + // String <: String + assert!(constraints.satisfied_by(Ty::STRING, &store)); + // Number NOT <: String + assert!(!constraints.satisfied_by(Ty::NUMBER, &store)); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/store/impl_core.rs b/crates/jrsonnet-lsp-types/src/store/impl_core.rs new file mode 100644 index 00000000..e7e49da0 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store/impl_core.rs @@ -0,0 +1,255 @@ +use super::*; + +impl TyStore { + /// Create a new type store with well-known types pre-populated. + #[must_use] + pub fn new() -> Self { + let mut store = Self { + data: Vec::with_capacity(64), + dedup: FxHashMap::default(), + }; + store.init_builtins(); + store + } + + /// Initialize built-in well-known types. + fn init_builtins(&mut self) { + // Must match the order of Ty constants! + let builtins = [ + TyData::Any, // 0 = ANY + TyData::Never, // 1 = NEVER + TyData::Null, // 2 = NULL + TyData::Bool, // 3 = BOOL + TyData::True, // 4 = TRUE + TyData::False, // 5 = FALSE + TyData::Number, // 6 = NUMBER + TyData::String, // 7 = STRING + TyData::Char, // 8 = CHAR + // Padding to RESERVED_COUNT + TyData::Any, // 9 - reserved + TyData::Any, // 10 - reserved + TyData::Any, // 11 - reserved + TyData::Any, // 12 - reserved + TyData::Any, // 13 - reserved + TyData::Any, // 14 - reserved + TyData::Any, // 15 - reserved + ]; + + for (i, data) in builtins.into_iter().enumerate() { + let Some(raw_id) = to_u32(i) else { + return; + }; + let ty = Ty::from_raw(raw_id); + self.data.push(data.clone()); + // Only dedup the non-padding entries + if i < 9 { + self.dedup.insert(data, ty); + } + } + + debug_assert_eq!(self.data.len(), Ty::RESERVED_COUNT as usize); + } + + /// Intern a type, returning existing ID if already present. + /// + pub fn intern(&mut self, data: TyData) -> Ty { + // Fast path for well-known types + if let Some(ty) = Ty::well_known_for_data(&data) { + return ty; + } + + // Check if already interned + if let Some(&existing) = self.dedup.get(&data) { + return existing; + } + + // Intern new type + let Some(raw_id) = to_u32(self.data.len()) else { + return Ty::ANY; + }; + let id = Ty::from_raw(raw_id); + self.data.push(data.clone()); + self.dedup.insert(data, id); + id + } + + /// Get a reference to type data with display capability. + /// + /// Returns a `TyRef` that derefs to `&TyData` and implements `Display`. + /// Use `*store.get(ty)` to pattern match on the underlying `TyData`. + #[inline] + #[must_use] + pub fn get(&self, ty: Ty) -> TyRef<'_> { + TyRef { store: self, ty } + } + + /// Get the number of interned types. + #[must_use] + pub fn len(&self) -> usize { + self.data.len() + } + + /// Check if empty (never true after init). + #[must_use] + pub fn is_empty(&self) -> bool { + self.data.is_empty() + } + + /// Create an array type. + pub fn array(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { + elem, + is_set: false, + }) + } + + /// Create a set type (array with sorted, unique elements). + pub fn array_set(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { elem, is_set: true }) + } + + /// Create a tuple type. + pub fn tuple(&mut self, elems: Vec) -> Ty { + if elems.is_empty() { + // Empty tuple is a closed empty array + return self.intern(TyData::Tuple { elems: vec![] }); + } + self.intern(TyData::Tuple { elems }) + } + + /// Create an object type. + pub fn object(&mut self, data: ObjectData) -> Ty { + self.intern(TyData::Object(data)) + } + + /// Create an open object (unknown fields). + pub fn object_any(&mut self) -> Ty { + self.object(ObjectData::open()) + } + + /// Create a generic function type (accepts any args, returns any). + pub fn function_any(&mut self) -> Ty { + self.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }) + } + + /// Create an `AttrsOf` type (object with uniform value type). + pub fn attrs_of(&mut self, value: Ty) -> Ty { + self.intern(TyData::AttrsOf { value }) + } + + /// Create a function type. + pub fn function(&mut self, data: FunctionData) -> Ty { + self.intern(TyData::Function(data)) + } + + /// Create a function with simple params and fixed return. + pub fn function_simple(&mut self, param_names: Vec<&str>, return_ty: Ty) -> Ty { + let params = param_names + .into_iter() + .map(|name| ParamInterned { + name: name.to_string(), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + self.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }) + } + + /// Create a union type. + pub fn union(&mut self, mut types: Vec) -> Ty { + // Simplification rules + match types.as_slice() { + [] => return Ty::NEVER, + [only] => return *only, + _ => {} + } + + // Flatten nested unions and remove duplicates + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::ANY { + return Ty::ANY; // Any absorbs everything + } + if ty == Ty::NEVER { + continue; // Never is identity for union + } + if let TyData::Union(ref inner) = *self.get(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + // Sort for canonical form + flattened.sort_by_key(|t| t.0); + flattened.dedup(); + + match flattened.as_slice() { + [] => Ty::NEVER, + [only] => *only, + _ => self.intern(TyData::Union(flattened)), + } + } + + /// Create a sum (intersection) type. + pub fn sum(&mut self, mut types: Vec) -> Ty { + match types.as_slice() { + [] => return Ty::ANY, + [only] => return *only, + _ => {} + } + + // Flatten and simplify + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::NEVER { + return Ty::NEVER; // Never absorbs everything in intersection + } + if ty == Ty::ANY { + continue; // Any is identity for intersection + } + if let TyData::Sum(ref inner) = *self.get(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + flattened.sort_by_key(|t| t.0); + flattened.dedup(); + + match flattened.as_slice() { + [] => Ty::ANY, + [only] => *only, + _ => self.intern(TyData::Sum(flattened)), + } + } + + /// Create a bounded number type. + pub fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + self.intern(TyData::BoundedNumber(bounds)) + } + + /// Create a literal string type. + pub fn literal_string(&mut self, s: String) -> Ty { + self.intern(TyData::LiteralString(s)) + } + + /// Create a type variable. + pub fn type_var(&mut self, id: TyVarId, constraints: TyConstraints) -> Ty { + self.intern(TyData::TypeVar { id, constraints }) + } + + /// Create a fresh type variable with no constraints. + pub fn fresh_var(&mut self) -> Ty { + self.type_var(TyVarId::fresh(), TyConstraints::none()) + } +} diff --git a/crates/jrsonnet-lsp-types/src/store/impl_transform.rs b/crates/jrsonnet-lsp-types/src/store/impl_transform.rs new file mode 100644 index 00000000..a0bb9fc1 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store/impl_transform.rs @@ -0,0 +1,107 @@ +use super::*; + +impl TyStore { + /// Import a type from another store into this store. + /// + /// Well-known constants (ANY, NEVER, NULL, BOOL, TRUE, FALSE, NUMBER, STRING, CHAR) + /// are returned as-is since they have the same value across all stores. + /// Complex types are recursively imported and re-interned. + pub fn import_from(&mut self, ty: Ty, source: &S) -> Ty { + // Well-known constants are the same in all stores + if ty.is_well_known() { + return ty; + } + + // Complex types need re-interning + match source.get_data(ty) { + TyData::Array { elem, is_set } => { + let imported_elem = self.import_from(elem, source); + if is_set { + self.array_set(imported_elem) + } else { + self.array(imported_elem) + } + } + TyData::Tuple { elems } => { + let imported_elems: Vec<_> = + elems.iter().map(|&e| self.import_from(e, source)).collect(); + self.tuple(imported_elems) + } + TyData::Union(variants) => { + let imported_variants: Vec<_> = variants + .iter() + .map(|&v| self.import_from(v, source)) + .collect(); + self.union(imported_variants) + } + TyData::Object(obj) => { + let imported_fields: Vec<_> = obj + .fields + .iter() + .map(|(name, def)| { + ( + name.clone(), + FieldDefInterned { + ty: self.import_from(def.ty, source), + required: def.required, + visibility: def.visibility, + }, + ) + }) + .collect(); + self.object(ObjectData { + fields: imported_fields, + has_unknown: obj.has_unknown, + }) + } + TyData::Function(func) => { + let imported_params: Vec<_> = func + .params + .iter() + .map(|p| ParamInterned { + name: p.name.clone(), + ty: self.import_from(p.ty, source), + has_default: p.has_default, + }) + .collect(); + let imported_return_spec = match &func.return_spec { + ReturnSpec::Fixed(ret_ty) => { + ReturnSpec::Fixed(self.import_from(*ret_ty, source)) + } + other => other.clone(), + }; + self.intern(TyData::Function(FunctionData { + params: imported_params, + return_spec: imported_return_spec, + variadic: func.variadic, + })) + } + TyData::AttrsOf { value } => { + let imported_value = self.import_from(value, source); + self.intern(TyData::AttrsOf { + value: imported_value, + }) + } + TyData::BoundedNumber(bounds) => self.intern(TyData::BoundedNumber(bounds)), + TyData::LiteralString(s) => self.intern(TyData::LiteralString(s)), + TyData::TypeVar { id, constraints } => self.intern(TyData::TypeVar { id, constraints }), + TyData::Sum(variants) => { + let imported_variants: Vec<_> = variants + .iter() + .map(|&v| self.import_from(v, source)) + .collect(); + self.intern(TyData::Sum(imported_variants)) + } + // Primitives should have been caught by is_well_known(), but handle anyway + TyData::Any => Ty::ANY, + TyData::Never => Ty::NEVER, + TyData::Null => Ty::NULL, + TyData::Bool => Ty::BOOL, + TyData::True => Ty::TRUE, + TyData::False => Ty::FALSE, + TyData::Number => Ty::NUMBER, + TyData::String => Ty::STRING, + TyData::Char => Ty::CHAR, + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/store/ops_impl.rs b/crates/jrsonnet-lsp-types/src/store/ops_impl.rs new file mode 100644 index 00000000..310bbcf2 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store/ops_impl.rs @@ -0,0 +1,57 @@ +use super::*; + +impl TypeStoreOps for TyStore { + fn get_data(&self, ty: Ty) -> TyData { + self.get(ty).clone() + } + + fn display(&self, ty: Ty) -> String { + TyStore::display(self, ty) + } + + fn array(&mut self, elem: Ty) -> Ty { + TyStore::array(self, elem) + } + + fn array_set(&mut self, elem: Ty) -> Ty { + TyStore::array_set(self, elem) + } + + fn tuple(&mut self, elems: Vec) -> Ty { + TyStore::tuple(self, elems) + } + + fn object(&mut self, data: ObjectData) -> Ty { + TyStore::object(self, data) + } + + fn attrs_of(&mut self, value: Ty) -> Ty { + TyStore::attrs_of(self, value) + } + + fn function(&mut self, data: FunctionData) -> Ty { + TyStore::function(self, data) + } + + fn union(&mut self, types: Vec) -> Ty { + TyStore::union(self, types) + } + + fn sum(&mut self, types: Vec) -> Ty { + TyStore::sum(self, types) + } + + fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + TyStore::bounded_number(self, bounds) + } + + fn literal_string(&mut self, s: String) -> Ty { + TyStore::literal_string(self, s) + } +} + +impl Default for TyStore { + fn default() -> Self { + Self::new() + } +} diff --git a/crates/jrsonnet-lsp-types/src/store/thread_local_store.rs b/crates/jrsonnet-lsp-types/src/store/thread_local_store.rs new file mode 100644 index 00000000..612e345c --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/store/thread_local_store.rs @@ -0,0 +1,18 @@ +use std::cell::RefCell; + +use super::*; + +// Thread-local store for convenient access during analysis +thread_local! { + static STORE: RefCell = RefCell::new(TyStore::new()); +} + +/// Execute a function with access to the thread-local type store. +pub fn with_store(f: impl FnOnce(&mut TyStore) -> R) -> R { + STORE.with(|s| f(&mut s.borrow_mut())) +} + +/// Reset the thread-local store (useful for tests). +pub fn reset_store() { + STORE.with(|s| *s.borrow_mut() = TyStore::new()); +} From 92891987d06056ea3b40215fe78d3177fd6b16f6 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:15:57 +0000 Subject: [PATCH 112/210] refactor(lsp-types): split substitution module by concern Split the monolithic substitution module into focused submodules. Move mapping helpers (new/apply/contains/len/is_empty) into subst/core.rs. Move merge/topological ordering/data-rewrite logic and inline tests into subst/merge.rs. Keep subst.rs as a thin module defining TySubst and wiring submodules. No behavior changes intended; verified with: cargo test -p jrsonnet-lsp-types --lib subst --- crates/jrsonnet-lsp-types/src/subst.rs | 582 +------------------ crates/jrsonnet-lsp-types/src/subst/core.rs | 39 ++ crates/jrsonnet-lsp-types/src/subst/merge.rs | 545 +++++++++++++++++ 3 files changed, 587 insertions(+), 579 deletions(-) create mode 100644 crates/jrsonnet-lsp-types/src/subst/core.rs create mode 100644 crates/jrsonnet-lsp-types/src/subst/merge.rs diff --git a/crates/jrsonnet-lsp-types/src/subst.rs b/crates/jrsonnet-lsp-types/src/subst.rs index d372d828..f00d5e53 100644 --- a/crates/jrsonnet-lsp-types/src/subst.rs +++ b/crates/jrsonnet-lsp-types/src/subst.rs @@ -11,14 +11,7 @@ use rustc_hash::FxHashMap; -use crate::{ - global_store::GlobalTyStore, - local_store::LocalTyStore, - store::{ - FieldDefInterned, FunctionData, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, - TyData, - }, -}; +use crate::store::Ty; /// Substitution mapping from local types to global types. /// @@ -30,574 +23,5 @@ pub struct TySubst { mapping: FxHashMap, } -impl TySubst { - /// Create an empty substitution. - #[must_use] - pub fn new() -> Self { - Self::default() - } - - /// Merge local types into the global store, building the substitution map. - /// - /// This is the main entry point. It: - /// 1. Topologically sorts local types by dependency order - /// 2. Interns each into the global store (applying current substitutions) - /// 3. Records the local→global mapping - /// - /// # Arguments - /// - `global`: The global store to merge into (mutably borrowed) - /// - `local`: The local store to merge from - /// - /// # Returns - /// A substitution that can be used to rewrite local Ty references. - pub fn merge(global: &GlobalTyStore, local: &LocalTyStore) -> Self { - let mut subst = Self::new(); - - if local.is_empty() { - return subst; - } - - // Get topological ordering of local types - let order = Self::topological_sort(local); - - // Process each local type in dependency order - for local_ty in order { - let local_data = local.get_data(local_ty); - - // Apply current substitution to the type data - let substituted_data = subst.apply_to_data(local_data); - - // Intern into global store (handles deduplication) - let global_ty = global.intern(substituted_data); - - // Record the mapping - subst.mapping.insert(local_ty, global_ty); - } - - subst - } - - /// Apply the substitution to a Ty. - /// - /// - Global types are returned unchanged - /// - Local types are looked up in the mapping - /// - Unknown local types return the original (shouldn't happen after merge) - #[must_use] - pub fn apply(&self, ty: Ty) -> Ty { - if ty.is_global() { - return ty; - } - self.mapping.get(&ty).copied().unwrap_or(ty) - } - - /// Check if this substitution contains a mapping for a type. - #[must_use] - pub fn contains(&self, ty: Ty) -> bool { - self.mapping.contains_key(&ty) - } - - /// Get the number of mappings. - #[must_use] - pub fn len(&self) -> usize { - self.mapping.len() - } - - /// Check if empty. - #[must_use] - pub fn is_empty(&self) -> bool { - self.mapping.is_empty() - } - - /// Apply the substitution to `TyData`, rewriting all Ty references. - fn apply_to_data(&self, data: &TyData) -> TyData { - match data { - // Primitives have no Ty references - TyData::Any => TyData::Any, - TyData::Never => TyData::Never, - TyData::Null => TyData::Null, - TyData::Bool => TyData::Bool, - TyData::True => TyData::True, - TyData::False => TyData::False, - TyData::Number => TyData::Number, - TyData::String => TyData::String, - TyData::Char => TyData::Char, - TyData::BoundedNumber(bounds) => TyData::BoundedNumber(*bounds), - TyData::LiteralString(s) => TyData::LiteralString(s.clone()), - - // Compound types - recursively apply substitution - TyData::Array { elem, is_set } => TyData::Array { - elem: self.apply_for_merge(*elem), - is_set: *is_set, - }, - - TyData::Tuple { elems } => TyData::Tuple { - elems: elems.iter().map(|&e| self.apply_for_merge(e)).collect(), - }, - - TyData::Union(variants) => { - TyData::Union(variants.iter().map(|&v| self.apply_for_merge(v)).collect()) - } - - TyData::Sum(variants) => { - TyData::Sum(variants.iter().map(|&v| self.apply_for_merge(v)).collect()) - } - - TyData::Object(obj) => TyData::Object(ObjectData { - fields: obj - .fields - .iter() - .map(|(name, def)| { - ( - name.clone(), - FieldDefInterned { - ty: self.apply_for_merge(def.ty), - required: def.required, - visibility: def.visibility, - }, - ) - }) - .collect(), - has_unknown: obj.has_unknown, - }), - - TyData::AttrsOf { value } => TyData::AttrsOf { - value: self.apply_for_merge(*value), - }, - - TyData::Function(func) => TyData::Function(FunctionData { - params: func - .params - .iter() - .map(|p| ParamInterned { - name: p.name.clone(), - ty: self.apply_for_merge(p.ty), - has_default: p.has_default, - }) - .collect(), - return_spec: match &func.return_spec { - ReturnSpec::Fixed(ret) => ReturnSpec::Fixed(self.apply_for_merge(*ret)), - other => other.clone(), - }, - variadic: func.variadic, - }), - - TyData::TypeVar { id, constraints } => TyData::TypeVar { - id: *id, - constraints: TyConstraints { - must_be_indexable: constraints.must_be_indexable, - must_support_fields: constraints.must_support_fields, - must_be_callable: constraints.must_be_callable, - upper_bound: constraints.upper_bound.map(|b| self.apply_for_merge(b)), - }, - }, - } - } - - /// Apply substitution during merge. - /// - /// Any unresolved local reference is converted to `any` instead of leaking a - /// local `Ty` into the global store. - fn apply_for_merge(&self, ty: Ty) -> Ty { - if ty.is_global() { - return ty; - } - self.mapping.get(&ty).copied().unwrap_or(Ty::ANY) - } - - /// Topologically sort local types by dependency order. - /// - /// Types that don't depend on other local types come first. - /// This ensures that when we process a type, all its dependencies - /// have already been mapped to global types. - fn topological_sort(local: &LocalTyStore) -> Vec { - let types: Vec<_> = local.iter().collect(); - let n = types.len(); - - if n == 0 { - return vec![]; - } - - // Build adjacency list: edges[i] = types that type i depends on - let mut in_degree: FxHashMap = FxHashMap::default(); - let mut dependents: FxHashMap> = FxHashMap::default(); - - for &(ty, _) in &types { - in_degree.insert(ty, 0); - dependents.insert(ty, Vec::new()); - } - - // Count dependencies (only local ones matter) - for &(ty, data) in &types { - let deps = Self::get_local_dependencies(data); - let Some(in_degree_entry) = in_degree.get_mut(&ty) else { - continue; - }; - *in_degree_entry = deps.len(); - for dep in deps { - if let Some(dep_list) = dependents.get_mut(&dep) { - dep_list.push(ty); - } - } - } - - // Kahn's algorithm for topological sort - let mut result = Vec::with_capacity(n); - let mut queue: Vec = in_degree - .iter() - .filter(|(_, °)| deg == 0) - .map(|(&ty, _)| ty) - .collect(); - - while let Some(ty) = queue.pop() { - result.push(ty); - - if let Some(deps) = dependents.get(&ty) { - for &dependent in deps { - if let Some(deg) = in_degree.get_mut(&dependent) { - *deg -= 1; - if *deg == 0 { - queue.push(dependent); - } - } - } - } - } - - // If we didn't process all types, there's a cycle. - // This shouldn't happen with well-formed types, but handle gracefully. - if result.len() < n { - // Add remaining types in arbitrary order - for &(ty, _) in &types { - if !result.contains(&ty) { - result.push(ty); - } - } - } - - result - } - - /// Get local Ty references in a `TyData`. - fn get_local_dependencies(data: &TyData) -> Vec { - let mut deps = Vec::new(); - Self::collect_local_refs(data, &mut deps); - deps - } - - /// Recursively collect local Ty references. - fn collect_local_refs(data: &TyData, deps: &mut Vec) { - match data { - TyData::Array { elem, .. } => { - if elem.is_local() { - deps.push(*elem); - } - } - TyData::Tuple { elems } => { - for &e in elems { - if e.is_local() { - deps.push(e); - } - } - } - TyData::Union(variants) | TyData::Sum(variants) => { - for &v in variants { - if v.is_local() { - deps.push(v); - } - } - } - TyData::Object(obj) => { - for (_, def) in &obj.fields { - if def.ty.is_local() { - deps.push(def.ty); - } - } - } - TyData::AttrsOf { value } => { - if value.is_local() { - deps.push(*value); - } - } - TyData::Function(func) => { - for p in &func.params { - if p.ty.is_local() { - deps.push(p.ty); - } - } - if let ReturnSpec::Fixed(ret) = &func.return_spec { - if ret.is_local() { - deps.push(*ret); - } - } - } - TyData::TypeVar { constraints, .. } => { - if let Some(bound) = constraints.upper_bound { - if bound.is_local() { - deps.push(bound); - } - } - } - // Primitives have no references - _ => {} - } - } -} - -#[cfg(test)] -mod tests { - use assert_matches::assert_matches; - - use super::*; - - #[test] - fn test_subst_empty_local() { - let global = GlobalTyStore::new(); - let local = LocalTyStore::new(); - - let subst = TySubst::merge(&global, &local); - assert!(subst.is_empty()); - } - - #[test] - fn test_subst_simple_type() { - let global = GlobalTyStore::new(); - let mut local = LocalTyStore::new(); - - // Create a local array type - let arr = local.intern(TyData::Array { - elem: Ty::NUMBER, - is_set: false, - }); - assert!(arr.is_local()); - - let subst = TySubst::merge(&global, &local); - assert_eq!(subst.len(), 1); - - // The mapped type should be global - let global_arr = subst.apply(arr); - assert!(global_arr.is_global()); - - // Verify the data is correct - let data = global.get_data(global_arr); - assert_matches!(data, TyData::Array { elem, .. } if elem == Ty::NUMBER); - } - - #[test] - fn test_subst_nested_types() { - let global = GlobalTyStore::new(); - let mut local = LocalTyStore::new(); - - // Create nested local types: Array> - let inner = local.intern(TyData::Array { - elem: Ty::NUMBER, - is_set: false, - }); - let outer = local.intern(TyData::Array { - elem: inner, - is_set: false, - }); - - assert!(inner.is_local()); - assert!(outer.is_local()); - - let subst = TySubst::merge(&global, &local); - assert_eq!(subst.len(), 2); - - // Both should be mapped to global - let global_inner = subst.apply(inner); - let global_outer = subst.apply(outer); - assert!(global_inner.is_global()); - assert!(global_outer.is_global()); - - // Verify the outer type references the global inner - let outer_data = global.get_data(global_outer); - assert_matches!(outer_data, TyData::Array { elem, .. } if elem == global_inner); - } - - #[test] - fn test_subst_global_types_unchanged() { - let subst = TySubst::new(); - - // Global types should pass through unchanged - assert_eq!(subst.apply(Ty::NUMBER), Ty::NUMBER); - assert_eq!(subst.apply(Ty::STRING), Ty::STRING); - assert_eq!(subst.apply(Ty::ANY), Ty::ANY); - } - - #[test] - fn test_subst_deduplication() { - let global = GlobalTyStore::new(); - - // Pre-intern a type in global - let global_arr = global.intern(TyData::Array { - elem: Ty::NUMBER, - is_set: false, - }); - - // Create the same type locally - let mut local = LocalTyStore::new(); - let local_arr = local.intern(TyData::Array { - elem: Ty::NUMBER, - is_set: false, - }); - - let subst = TySubst::merge(&global, &local); - - // Should map to the existing global type - assert_eq!(subst.apply(local_arr), global_arr); - } - - #[test] - fn test_subst_union_type() { - let global = GlobalTyStore::new(); - let mut local = LocalTyStore::new(); - - // Create a local union type - let union = local.intern(TyData::Union(vec![Ty::NUMBER, Ty::STRING])); - - let subst = TySubst::merge(&global, &local); - - let global_union = subst.apply(union); - assert!(global_union.is_global()); - - let data = global.get_data(global_union); - assert_matches!(data, TyData::Union(variants) if variants == vec![Ty::NUMBER, Ty::STRING]); - } - - #[test] - fn test_subst_object_type() { - let global = GlobalTyStore::new(); - let mut local = LocalTyStore::new(); - - // Create a local object type - let obj = local.intern(TyData::Object(ObjectData { - fields: vec![( - "name".to_string(), - FieldDefInterned { - ty: Ty::STRING, - required: true, - visibility: super::super::store::FieldVis::Normal, - }, - )], - has_unknown: false, - })); - - let subst = TySubst::merge(&global, &local); - - let global_obj = subst.apply(obj); - assert!(global_obj.is_global()); - } - - #[test] - fn test_topological_sort_independent() { - let mut local = LocalTyStore::new(); - - // Create independent types (no dependencies between them) - let _arr1 = local.intern(TyData::Array { - elem: Ty::NUMBER, - is_set: false, - }); - let _arr2 = local.intern(TyData::Array { - elem: Ty::STRING, - is_set: false, - }); - - let order = TySubst::topological_sort(&local); - assert_eq!(order.len(), 2); - } - - #[test] - fn test_topological_sort_dependent() { - let mut local = LocalTyStore::new(); - - // Create dependent types - let inner = local.intern(TyData::Array { - elem: Ty::NUMBER, - is_set: false, - }); - let outer = local.intern(TyData::Array { - elem: inner, - is_set: false, - }); - - let order = TySubst::topological_sort(&local); - // inner should come before outer (inner has no deps, outer depends on inner) - assert_eq!(order, vec![inner, outer]); - } - - #[test] - fn test_merge_cycle_does_not_leave_local_refs_in_global() { - let global = GlobalTyStore::new(); - let mut local = LocalTyStore::new(); - - // Construct a direct cycle between local types: - // L0 = array, L1 = array - let local_0 = local.intern(TyData::Array { - elem: Ty::from_raw_local(1), - is_set: false, - }); - let local_1 = local.intern(TyData::Array { - elem: Ty::from_raw_local(0), - is_set: false, - }); - - let subst = TySubst::merge(&global, &local); - - let global_0 = subst.apply(local_0); - let global_1 = subst.apply(local_1); - assert!(global_0.is_global()); - assert!(global_1.is_global()); - - let data_0 = global.get_data(global_0); - let data_1 = global.get_data(global_1); - assert!( - !type_data_contains_local_refs(&data_0), - "first merged global type still contains local refs: {data_0:?}" - ); - assert!( - !type_data_contains_local_refs(&data_1), - "second merged global type still contains local refs: {data_1:?}" - ); - } - - fn type_data_contains_local_refs(data: &TyData) -> bool { - match data { - TyData::Array { elem, .. } => elem.is_local(), - TyData::Tuple { elems } => elems.iter().any(|ty| ty.is_local()), - TyData::Union(variants) | TyData::Sum(variants) => { - variants.iter().any(|ty| ty.is_local()) - } - TyData::Object(obj) => obj.fields.iter().any(|(_, field)| field.ty.is_local()), - TyData::AttrsOf { value } => value.is_local(), - TyData::Function(func) => { - func.params.iter().any(|param| param.ty.is_local()) - || match &func.return_spec { - ReturnSpec::Fixed(ty) => ty.is_local(), - ReturnSpec::SameAsArg(_) - | ReturnSpec::ArrayOfArg(_) - | ReturnSpec::ArrayWithSameElements(_) - | ReturnSpec::SetWithSameElements(_) - | ReturnSpec::FuncReturnType(_) - | ReturnSpec::ArrayOfFuncReturn(_) - | ReturnSpec::FlatMapResult(_) - | ReturnSpec::NonNegative - | ReturnSpec::ObjectValuesType(_) => false, - } - } - TyData::TypeVar { constraints, .. } => { - constraints.upper_bound.is_some_and(Ty::is_local) - } - TyData::Any - | TyData::Never - | TyData::Null - | TyData::Bool - | TyData::True - | TyData::False - | TyData::Number - | TyData::String - | TyData::Char - | TyData::BoundedNumber(_) - | TyData::LiteralString(_) => false, - } - } -} +mod core; +mod merge; diff --git a/crates/jrsonnet-lsp-types/src/subst/core.rs b/crates/jrsonnet-lsp-types/src/subst/core.rs new file mode 100644 index 00000000..ad665eb3 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/subst/core.rs @@ -0,0 +1,39 @@ +use super::*; + +impl TySubst { + /// Create an empty substitution. + #[must_use] + pub fn new() -> Self { + Self::default() + } + /// Apply the substitution to a Ty. + /// + /// - Global types are returned unchanged + /// - Local types are looked up in the mapping + /// - Unknown local types return the original (shouldn't happen after merge) + #[must_use] + pub fn apply(&self, ty: Ty) -> Ty { + if ty.is_global() { + return ty; + } + self.mapping.get(&ty).copied().unwrap_or(ty) + } + + /// Check if this substitution contains a mapping for a type. + #[must_use] + pub fn contains(&self, ty: Ty) -> bool { + self.mapping.contains_key(&ty) + } + + /// Get the number of mappings. + #[must_use] + pub fn len(&self) -> usize { + self.mapping.len() + } + + /// Check if empty. + #[must_use] + pub fn is_empty(&self) -> bool { + self.mapping.is_empty() + } +} diff --git a/crates/jrsonnet-lsp-types/src/subst/merge.rs b/crates/jrsonnet-lsp-types/src/subst/merge.rs new file mode 100644 index 00000000..f543470b --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/subst/merge.rs @@ -0,0 +1,545 @@ +use rustc_hash::FxHashMap; + +use super::*; +use crate::{ + global_store::GlobalTyStore, + local_store::LocalTyStore, + store::{ + FieldDefInterned, FunctionData, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, + TyData, + }, +}; + +impl TySubst { + /// Merge local types into the global store, building the substitution map. + /// + /// This is the main entry point. It: + /// 1. Topologically sorts local types by dependency order + /// 2. Interns each into the global store (applying current substitutions) + /// 3. Records the local→global mapping + /// + /// # Arguments + /// - `global`: The global store to merge into (mutably borrowed) + /// - `local`: The local store to merge from + /// + /// # Returns + /// A substitution that can be used to rewrite local Ty references. + pub fn merge(global: &GlobalTyStore, local: &LocalTyStore) -> Self { + let mut subst = Self::new(); + + if local.is_empty() { + return subst; + } + + // Get topological ordering of local types + let order = Self::topological_sort(local); + + // Process each local type in dependency order + for local_ty in order { + let local_data = local.get_data(local_ty); + + // Apply current substitution to the type data + let substituted_data = subst.apply_to_data(local_data); + + // Intern into global store (handles deduplication) + let global_ty = global.intern(substituted_data); + + // Record the mapping + subst.mapping.insert(local_ty, global_ty); + } + + subst + } + /// Apply the substitution to `TyData`, rewriting all Ty references. + fn apply_to_data(&self, data: &TyData) -> TyData { + match data { + // Primitives have no Ty references + TyData::Any => TyData::Any, + TyData::Never => TyData::Never, + TyData::Null => TyData::Null, + TyData::Bool => TyData::Bool, + TyData::True => TyData::True, + TyData::False => TyData::False, + TyData::Number => TyData::Number, + TyData::String => TyData::String, + TyData::Char => TyData::Char, + TyData::BoundedNumber(bounds) => TyData::BoundedNumber(*bounds), + TyData::LiteralString(s) => TyData::LiteralString(s.clone()), + + // Compound types - recursively apply substitution + TyData::Array { elem, is_set } => TyData::Array { + elem: self.apply_for_merge(*elem), + is_set: *is_set, + }, + + TyData::Tuple { elems } => TyData::Tuple { + elems: elems.iter().map(|&e| self.apply_for_merge(e)).collect(), + }, + + TyData::Union(variants) => { + TyData::Union(variants.iter().map(|&v| self.apply_for_merge(v)).collect()) + } + + TyData::Sum(variants) => { + TyData::Sum(variants.iter().map(|&v| self.apply_for_merge(v)).collect()) + } + + TyData::Object(obj) => TyData::Object(ObjectData { + fields: obj + .fields + .iter() + .map(|(name, def)| { + ( + name.clone(), + FieldDefInterned { + ty: self.apply_for_merge(def.ty), + required: def.required, + visibility: def.visibility, + }, + ) + }) + .collect(), + has_unknown: obj.has_unknown, + }), + + TyData::AttrsOf { value } => TyData::AttrsOf { + value: self.apply_for_merge(*value), + }, + + TyData::Function(func) => TyData::Function(FunctionData { + params: func + .params + .iter() + .map(|p| ParamInterned { + name: p.name.clone(), + ty: self.apply_for_merge(p.ty), + has_default: p.has_default, + }) + .collect(), + return_spec: match &func.return_spec { + ReturnSpec::Fixed(ret) => ReturnSpec::Fixed(self.apply_for_merge(*ret)), + other => other.clone(), + }, + variadic: func.variadic, + }), + + TyData::TypeVar { id, constraints } => TyData::TypeVar { + id: *id, + constraints: TyConstraints { + must_be_indexable: constraints.must_be_indexable, + must_support_fields: constraints.must_support_fields, + must_be_callable: constraints.must_be_callable, + upper_bound: constraints.upper_bound.map(|b| self.apply_for_merge(b)), + }, + }, + } + } + + /// Apply substitution during merge. + /// + /// Any unresolved local reference is converted to `any` instead of leaking a + /// local `Ty` into the global store. + fn apply_for_merge(&self, ty: Ty) -> Ty { + if ty.is_global() { + return ty; + } + self.mapping.get(&ty).copied().unwrap_or(Ty::ANY) + } + + /// Topologically sort local types by dependency order. + /// + /// Types that don't depend on other local types come first. + /// This ensures that when we process a type, all its dependencies + /// have already been mapped to global types. + fn topological_sort(local: &LocalTyStore) -> Vec { + let types: Vec<_> = local.iter().collect(); + let n = types.len(); + + if n == 0 { + return vec![]; + } + + // Build adjacency list: edges[i] = types that type i depends on + let mut in_degree: FxHashMap = FxHashMap::default(); + let mut dependents: FxHashMap> = FxHashMap::default(); + + for &(ty, _) in &types { + in_degree.insert(ty, 0); + dependents.insert(ty, Vec::new()); + } + + // Count dependencies (only local ones matter) + for &(ty, data) in &types { + let deps = Self::get_local_dependencies(data); + let Some(in_degree_entry) = in_degree.get_mut(&ty) else { + continue; + }; + *in_degree_entry = deps.len(); + for dep in deps { + if let Some(dep_list) = dependents.get_mut(&dep) { + dep_list.push(ty); + } + } + } + + // Kahn's algorithm for topological sort + let mut result = Vec::with_capacity(n); + let mut queue: Vec = in_degree + .iter() + .filter(|(_, °)| deg == 0) + .map(|(&ty, _)| ty) + .collect(); + + while let Some(ty) = queue.pop() { + result.push(ty); + + if let Some(deps) = dependents.get(&ty) { + for &dependent in deps { + if let Some(deg) = in_degree.get_mut(&dependent) { + *deg -= 1; + if *deg == 0 { + queue.push(dependent); + } + } + } + } + } + + // If we didn't process all types, there's a cycle. + // This shouldn't happen with well-formed types, but handle gracefully. + if result.len() < n { + // Add remaining types in arbitrary order + for &(ty, _) in &types { + if !result.contains(&ty) { + result.push(ty); + } + } + } + + result + } + + /// Get local Ty references in a `TyData`. + fn get_local_dependencies(data: &TyData) -> Vec { + let mut deps = Vec::new(); + Self::collect_local_refs(data, &mut deps); + deps + } + + /// Recursively collect local Ty references. + fn collect_local_refs(data: &TyData, deps: &mut Vec) { + match data { + TyData::Array { elem, .. } => { + if elem.is_local() { + deps.push(*elem); + } + } + TyData::Tuple { elems } => { + for &e in elems { + if e.is_local() { + deps.push(e); + } + } + } + TyData::Union(variants) | TyData::Sum(variants) => { + for &v in variants { + if v.is_local() { + deps.push(v); + } + } + } + TyData::Object(obj) => { + for (_, def) in &obj.fields { + if def.ty.is_local() { + deps.push(def.ty); + } + } + } + TyData::AttrsOf { value } => { + if value.is_local() { + deps.push(*value); + } + } + TyData::Function(func) => { + for p in &func.params { + if p.ty.is_local() { + deps.push(p.ty); + } + } + if let ReturnSpec::Fixed(ret) = &func.return_spec { + if ret.is_local() { + deps.push(*ret); + } + } + } + TyData::TypeVar { constraints, .. } => { + if let Some(bound) = constraints.upper_bound { + if bound.is_local() { + deps.push(bound); + } + } + } + // Primitives have no references + _ => {} + } + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + + #[test] + fn test_subst_empty_local() { + let global = GlobalTyStore::new(); + let local = LocalTyStore::new(); + + let subst = TySubst::merge(&global, &local); + assert!(subst.is_empty()); + } + + #[test] + fn test_subst_simple_type() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create a local array type + let arr = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + assert!(arr.is_local()); + + let subst = TySubst::merge(&global, &local); + assert_eq!(subst.len(), 1); + + // The mapped type should be global + let global_arr = subst.apply(arr); + assert!(global_arr.is_global()); + + // Verify the data is correct + let data = global.get_data(global_arr); + assert_matches!(data, TyData::Array { elem, .. } if elem == Ty::NUMBER); + } + + #[test] + fn test_subst_nested_types() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create nested local types: Array> + let inner = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let outer = local.intern(TyData::Array { + elem: inner, + is_set: false, + }); + + assert!(inner.is_local()); + assert!(outer.is_local()); + + let subst = TySubst::merge(&global, &local); + assert_eq!(subst.len(), 2); + + // Both should be mapped to global + let global_inner = subst.apply(inner); + let global_outer = subst.apply(outer); + assert!(global_inner.is_global()); + assert!(global_outer.is_global()); + + // Verify the outer type references the global inner + let outer_data = global.get_data(global_outer); + assert_matches!(outer_data, TyData::Array { elem, .. } if elem == global_inner); + } + + #[test] + fn test_subst_global_types_unchanged() { + let subst = TySubst::new(); + + // Global types should pass through unchanged + assert_eq!(subst.apply(Ty::NUMBER), Ty::NUMBER); + assert_eq!(subst.apply(Ty::STRING), Ty::STRING); + assert_eq!(subst.apply(Ty::ANY), Ty::ANY); + } + + #[test] + fn test_subst_deduplication() { + let global = GlobalTyStore::new(); + + // Pre-intern a type in global + let global_arr = global.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + // Create the same type locally + let mut local = LocalTyStore::new(); + let local_arr = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + let subst = TySubst::merge(&global, &local); + + // Should map to the existing global type + assert_eq!(subst.apply(local_arr), global_arr); + } + + #[test] + fn test_subst_union_type() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create a local union type + let union = local.intern(TyData::Union(vec![Ty::NUMBER, Ty::STRING])); + + let subst = TySubst::merge(&global, &local); + + let global_union = subst.apply(union); + assert!(global_union.is_global()); + + let data = global.get_data(global_union); + assert_matches!(data, TyData::Union(variants) if variants == vec![Ty::NUMBER, Ty::STRING]); + } + + #[test] + fn test_subst_object_type() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Create a local object type + let obj = local.intern(TyData::Object(ObjectData { + fields: vec![( + "name".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: crate::store::FieldVis::Normal, + }, + )], + has_unknown: false, + })); + + let subst = TySubst::merge(&global, &local); + + let global_obj = subst.apply(obj); + assert!(global_obj.is_global()); + } + + #[test] + fn test_topological_sort_independent() { + let mut local = LocalTyStore::new(); + + // Create independent types (no dependencies between them) + let _arr1 = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let _arr2 = local.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + let order = TySubst::topological_sort(&local); + assert_eq!(order.len(), 2); + } + + #[test] + fn test_topological_sort_dependent() { + let mut local = LocalTyStore::new(); + + // Create dependent types + let inner = local.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + let outer = local.intern(TyData::Array { + elem: inner, + is_set: false, + }); + + let order = TySubst::topological_sort(&local); + // inner should come before outer (inner has no deps, outer depends on inner) + assert_eq!(order, vec![inner, outer]); + } + + #[test] + fn test_merge_cycle_does_not_leave_local_refs_in_global() { + let global = GlobalTyStore::new(); + let mut local = LocalTyStore::new(); + + // Construct a direct cycle between local types: + // L0 = array, L1 = array + let local_0 = local.intern(TyData::Array { + elem: Ty::from_raw_local(1), + is_set: false, + }); + let local_1 = local.intern(TyData::Array { + elem: Ty::from_raw_local(0), + is_set: false, + }); + + let subst = TySubst::merge(&global, &local); + + let global_0 = subst.apply(local_0); + let global_1 = subst.apply(local_1); + assert!(global_0.is_global()); + assert!(global_1.is_global()); + + let data_0 = global.get_data(global_0); + let data_1 = global.get_data(global_1); + assert!( + !type_data_contains_local_refs(&data_0), + "first merged global type still contains local refs: {data_0:?}" + ); + assert!( + !type_data_contains_local_refs(&data_1), + "second merged global type still contains local refs: {data_1:?}" + ); + } + + fn type_data_contains_local_refs(data: &TyData) -> bool { + match data { + TyData::Array { elem, .. } => elem.is_local(), + TyData::Tuple { elems } => elems.iter().any(|ty| ty.is_local()), + TyData::Union(variants) | TyData::Sum(variants) => { + variants.iter().any(|ty| ty.is_local()) + } + TyData::Object(obj) => obj.fields.iter().any(|(_, field)| field.ty.is_local()), + TyData::AttrsOf { value } => value.is_local(), + TyData::Function(func) => { + func.params.iter().any(|param| param.ty.is_local()) + || match &func.return_spec { + ReturnSpec::Fixed(ty) => ty.is_local(), + ReturnSpec::SameAsArg(_) + | ReturnSpec::ArrayOfArg(_) + | ReturnSpec::ArrayWithSameElements(_) + | ReturnSpec::SetWithSameElements(_) + | ReturnSpec::FuncReturnType(_) + | ReturnSpec::ArrayOfFuncReturn(_) + | ReturnSpec::FlatMapResult(_) + | ReturnSpec::NonNegative + | ReturnSpec::ObjectValuesType(_) => false, + } + } + TyData::TypeVar { constraints, .. } => { + constraints.upper_bound.is_some_and(Ty::is_local) + } + TyData::Any + | TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::String + | TyData::Char + | TyData::BoundedNumber(_) + | TyData::LiteralString(_) => false, + } + } +} From 94695a1d71f4ca7fd4c93e6eb5b6aaf73d5e3266 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:19:53 +0000 Subject: [PATCH 113/210] refactor(lsp-types): split mut store methods by concern Split the large MutStore module into focused submodules. Move constructors/interning helpers into mut_store/core.rs. Move type-query/display/operation helpers and inline tests into mut_store/analysis.rs. Move the TypeStoreOps adapter impl into mut_store/type_store_ops_impl.rs and keep mut_store.rs as a thin struct/module entrypoint. No behavior changes intended; verified with: cargo test -p jrsonnet-lsp-types --lib mut_store --- crates/jrsonnet-lsp-types/src/mut_store.rs | 574 +----------------- .../src/mut_store/analysis.rs | 286 +++++++++ .../jrsonnet-lsp-types/src/mut_store/core.rs | 245 ++++++++ .../src/mut_store/type_store_ops_impl.rs | 52 ++ 4 files changed, 587 insertions(+), 570 deletions(-) create mode 100644 crates/jrsonnet-lsp-types/src/mut_store/analysis.rs create mode 100644 crates/jrsonnet-lsp-types/src/mut_store/core.rs create mode 100644 crates/jrsonnet-lsp-types/src/mut_store/type_store_ops_impl.rs diff --git a/crates/jrsonnet-lsp-types/src/mut_store.rs b/crates/jrsonnet-lsp-types/src/mut_store.rs index 31930ea1..c95da337 100644 --- a/crates/jrsonnet-lsp-types/src/mut_store.rs +++ b/crates/jrsonnet-lsp-types/src/mut_store.rs @@ -6,14 +6,7 @@ use std::sync::Arc; -use crate::{ - global_store::GlobalTyStore, - local_store::LocalTyStore, - store::{ - FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, TyData, - TyVarId, TypeStoreOps, - }, -}; +use crate::{global_store::GlobalTyStore, local_store::LocalTyStore}; /// Mutable store for type analysis - combines global and local stores. /// @@ -35,565 +28,6 @@ pub struct MutStore { local: LocalTyStore, } -impl MutStore { - /// Create a new mutable store wrapping a global store. - pub fn new(global: Arc) -> Self { - Self { - global, - local: LocalTyStore::new(), - } - } - - /// Create a new mutable store from a global store reference. - /// - /// Convenience constructor that clones the Arc. - pub fn from_ref(global: &Arc) -> Self { - Self::new(Arc::clone(global)) - } - - /// Get type data for any Ty (global or local). - #[must_use] - pub fn get_data(&self, ty: Ty) -> TyData { - if ty.is_local() { - self.local.get_data(ty).clone() - } else { - self.global.get_data(ty) - } - } - - /// Intern a type, checking global first, then local. - /// - /// - Well-known types return immediately - /// - Types already in global store return the global Ty - /// - Types already in local store return the local Ty - /// - New types are created in local store - pub fn intern(&mut self, data: TyData) -> Ty { - // Fast path for well-known types - if let Some(ty) = Ty::well_known_for_data(&data) { - return ty; - } - - // Check global store first (read-only) - { - let global_inner = self.global.read(); - if let Some(&existing) = global_inner.dedup.get(&data) { - return existing; - } - } - - // Check/create in local store - self.local.intern(data) - } - - /// Consume and return the local store for merging. - #[must_use] - pub fn into_local(self) -> LocalTyStore { - self.local - } - - /// Get reference to the global store. - #[must_use] - pub fn global(&self) -> &GlobalTyStore { - &self.global - } - - /// Get the Arc to the global store. - #[must_use] - pub fn global_arc(&self) -> &Arc { - &self.global - } - - /// Get reference to the local store. - #[must_use] - pub fn local(&self) -> &LocalTyStore { - &self.local - } - - // ========== Type constructors ========== - - /// Create an array type. - pub fn array(&mut self, elem: Ty) -> Ty { - self.intern(TyData::Array { - elem, - is_set: false, - }) - } - - /// Create a set type (array with sorted, unique elements). - pub fn array_set(&mut self, elem: Ty) -> Ty { - self.intern(TyData::Array { elem, is_set: true }) - } - - /// Create a tuple type. - pub fn tuple(&mut self, elems: Vec) -> Ty { - if elems.is_empty() { - return self.intern(TyData::Tuple { elems: vec![] }); - } - self.intern(TyData::Tuple { elems }) - } - - /// Create an object type. - pub fn object(&mut self, data: ObjectData) -> Ty { - self.intern(TyData::Object(data)) - } - - /// Create an open object (unknown fields). - pub fn object_any(&mut self) -> Ty { - self.object(ObjectData::open()) - } - - /// Create a generic function type (accepts any args, returns any). - pub fn function_any(&mut self) -> Ty { - self.function(FunctionData { - params: vec![], - return_spec: ReturnSpec::Fixed(Ty::ANY), - variadic: true, - }) - } - - /// Create an `AttrsOf` type (object with uniform value type). - pub fn attrs_of(&mut self, value: Ty) -> Ty { - self.intern(TyData::AttrsOf { value }) - } - - /// Create a function type. - pub fn function(&mut self, data: FunctionData) -> Ty { - self.intern(TyData::Function(data)) - } - - /// Create a function with simple params and fixed return. - pub fn function_simple(&mut self, param_names: Vec<&str>, return_ty: Ty) -> Ty { - let params = param_names - .into_iter() - .map(|name| ParamInterned { - name: name.to_string(), - ty: Ty::ANY, - has_default: false, - }) - .collect(); - self.function(FunctionData { - params, - return_spec: ReturnSpec::Fixed(return_ty), - variadic: false, - }) - } - - /// Create a union type. - pub fn union(&mut self, mut types: Vec) -> Ty { - // Simplification rules - match types.as_slice() { - [] => return Ty::NEVER, - [only] => return *only, - _ => {} - } - - // Flatten nested unions and remove duplicates - let mut flattened = Vec::with_capacity(types.len()); - for ty in types.drain(..) { - if ty == Ty::ANY { - return Ty::ANY; // Any absorbs everything - } - if ty == Ty::NEVER { - continue; // Never is identity for union - } - if let TyData::Union(inner) = self.get_data(ty) { - flattened.extend(inner.iter().copied()); - } else if !flattened.contains(&ty) { - flattened.push(ty); - } - } - - // Sort for canonical form - flattened.sort_by_key(|t| t.id()); - flattened.dedup(); - - match flattened.as_slice() { - [] => Ty::NEVER, - [only] => *only, - _ => self.intern(TyData::Union(flattened)), - } - } - - /// Create a sum (intersection) type. - pub fn sum(&mut self, mut types: Vec) -> Ty { - match types.as_slice() { - [] => return Ty::ANY, - [only] => return *only, - _ => {} - } - - // Flatten and simplify - let mut flattened = Vec::with_capacity(types.len()); - for ty in types.drain(..) { - if ty == Ty::NEVER { - return Ty::NEVER; // Never absorbs everything in intersection - } - if ty == Ty::ANY { - continue; // Any is identity for intersection - } - if let TyData::Sum(inner) = self.get_data(ty) { - flattened.extend(inner.iter().copied()); - } else if !flattened.contains(&ty) { - flattened.push(ty); - } - } - - flattened.sort_by_key(|t| t.id()); - flattened.dedup(); - - match flattened.as_slice() { - [] => Ty::ANY, - [only] => *only, - _ => self.intern(TyData::Sum(flattened)), - } - } - - /// Create a bounded number type. - pub fn bounded_number(&mut self, bounds: NumBounds) -> Ty { - self.intern(TyData::BoundedNumber(bounds)) - } - - /// Create a literal string type. - pub fn literal_string(&mut self, s: String) -> Ty { - self.intern(TyData::LiteralString(s)) - } - - /// Create a type variable. - pub fn type_var(&mut self, id: TyVarId, constraints: TyConstraints) -> Ty { - self.intern(TyData::TypeVar { id, constraints }) - } - - /// Create a fresh type variable with no constraints. - pub fn fresh_var(&mut self) -> Ty { - self.type_var(TyVarId::fresh(), TyConstraints::none()) - } - - // ========== Type queries ========== - - /// Check if type is indexable. - #[must_use] - pub fn is_indexable(&self, ty: Ty) -> bool { - match self.get_data(ty) { - TyData::Any - | TyData::String - | TyData::Char - | TyData::Array { .. } - | TyData::Tuple { .. } - | TyData::Object(_) - | TyData::AttrsOf { .. } => true, - TyData::Union(types) | TyData::Sum(types) => { - types.iter().all(|&t| self.is_indexable(t)) - } - TyData::TypeVar { constraints, .. } => constraints.must_be_indexable, - _ => false, - } - } - - /// Check if type supports field access. - #[must_use] - pub fn supports_field_access(&self, ty: Ty) -> bool { - match self.get_data(ty) { - TyData::Any | TyData::Object(_) | TyData::AttrsOf { .. } => true, - TyData::Union(types) | TyData::Sum(types) => { - types.iter().all(|&t| self.supports_field_access(t)) - } - TyData::TypeVar { constraints, .. } => constraints.must_support_fields, - _ => false, - } - } - - /// Check if type is callable. - #[must_use] - pub fn is_callable(&self, ty: Ty) -> bool { - match self.get_data(ty) { - TyData::Any | TyData::Function(_) => true, - TyData::Union(types) | TyData::Sum(types) => types.iter().all(|&t| self.is_callable(t)), - TyData::TypeVar { constraints, .. } => constraints.must_be_callable, - _ => false, - } - } - - /// Check if a type has any type variables. - #[must_use] - pub fn has_type_vars(&self, ty: Ty) -> bool { - match self.get_data(ty) { - TyData::TypeVar { .. } => true, - TyData::Array { elem, .. } => self.has_type_vars(elem), - TyData::Tuple { elems } => elems.iter().any(|&e| self.has_type_vars(e)), - TyData::Union(variants) | TyData::Sum(variants) => { - variants.iter().any(|&v| self.has_type_vars(v)) - } - TyData::Object(obj) => obj.fields.iter().any(|(_, fd)| self.has_type_vars(fd.ty)), - TyData::AttrsOf { value } => self.has_type_vars(value), - TyData::Function(func) => { - func.params.iter().any(|p| self.has_type_vars(p.ty)) - || matches!(&func.return_spec, ReturnSpec::Fixed(ret) if self.has_type_vars(*ret)) - } - _ => false, - } - } - - /// Format a type for display. - #[must_use] - pub fn display(&self, ty: Ty) -> String { - match self.get_data(ty) { - TyData::Any => "any".to_string(), - TyData::Never => "never".to_string(), - TyData::Null => "null".to_string(), - TyData::Bool => "boolean".to_string(), - TyData::True => "true".to_string(), - TyData::False => "false".to_string(), - TyData::Number => "number".to_string(), - TyData::BoundedNumber(bounds) => match (bounds.min_f64(), bounds.max_f64()) { - (None, None) => "number".to_string(), - (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { - if lo.fract() == 0.0 { - format!("{lo:.0}") - } else { - format!("{lo}") - } - } - (Some(lo), Some(hi)) => format!("number[{lo}..{hi}]"), - (Some(lo), None) => format!("number[{lo}..]"), - (None, Some(hi)) => format!("number[..{hi}]"), - }, - TyData::String => "string".to_string(), - TyData::Char => "char".to_string(), - TyData::LiteralString(s) => format!("\"{s}\""), - TyData::Array { elem, is_set } => { - let base = format!("array<{}>", self.display(elem)); - if is_set { - format!("set<{}>", self.display(elem)) - } else { - base - } - } - TyData::Tuple { elems } => { - let types: Vec<_> = elems.iter().map(|&t| self.display(t)).collect(); - format!("[{}]", types.join(", ")) - } - TyData::Object(obj) => { - if obj.fields.is_empty() && !obj.has_unknown { - "{}".to_string() - } else if obj.has_unknown { - "object".to_string() - } else { - let mut fields: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); - fields.sort_unstable(); - format!("{{ {} }}", fields.join(", ")) - } - } - TyData::AttrsOf { value } => format!("object<{}>", self.display(value)), - TyData::Function(func) => { - let params: Vec<_> = func.params.iter().map(|p| p.name.as_str()).collect(); - format!("function({})", params.join(", ")) - } - TyData::Union(types) => { - let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); - parts.join(" | ") - } - TyData::Sum(types) => { - let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); - parts.join(" & ") - } - TyData::TypeVar { id, constraints } => { - let mut s = id.to_string(); - if !constraints.is_empty() { - let mut parts: Vec = Vec::new(); - if constraints.must_be_indexable { - parts.push("indexable".to_string()); - } - if constraints.must_support_fields { - parts.push("object-like".to_string()); - } - if constraints.must_be_callable { - parts.push("callable".to_string()); - } - if let Some(bound) = constraints.upper_bound { - parts.push(format!("<: {}", self.display(bound))); - } - if !parts.is_empty() { - s.push_str(" where "); - s.push_str(&parts.join(", ")); - } - } - s - } - } - } - - /// Alias for `get_data()` to ease migration from `TyStore`. - /// - /// `TyStore::get()` returns `TyRef` which derefs to `TyData`. This returns - /// `TyData` directly. Callers using `*store.get(ty)` should use `store.get(ty)`. - #[inline] - #[must_use] - pub fn get(&self, ty: Ty) -> TyData { - self.get_data(ty) - } - - // ========== Type operations ========== - - /// Narrow a type by intersecting with a constraint. - pub fn narrow(&mut self, ty: Ty, constraint: Ty) -> Ty { - crate::operations::ty_and(ty, constraint, self) - } - - /// Narrow a type to one with an exact length. - /// - /// - Arrays become tuples with that length - /// - Strings with length 1 become Char - /// - Tuples must have matching length - pub fn with_len(&mut self, ty: Ty, len: usize) -> Ty { - crate::operations::ty_with_len(ty, len, self) - } - - /// Narrow a type to one with at least a minimum length. - pub fn with_min_len(&mut self, ty: Ty, min: usize) -> Ty { - crate::operations::ty_with_min_len(ty, min, self) - } - - /// Widen a type by removing a constraint. - pub fn widen(&mut self, base: Ty, remove: Ty) -> Ty { - crate::operations::ty_minus(base, remove, self) - } -} - -impl TypeStoreOps for MutStore { - fn get_data(&self, ty: Ty) -> TyData { - MutStore::get_data(self, ty) - } - - fn display(&self, ty: Ty) -> String { - MutStore::display(self, ty) - } - - fn array(&mut self, elem: Ty) -> Ty { - MutStore::array(self, elem) - } - - fn array_set(&mut self, elem: Ty) -> Ty { - MutStore::array_set(self, elem) - } - - fn tuple(&mut self, elems: Vec) -> Ty { - MutStore::tuple(self, elems) - } - - fn object(&mut self, data: ObjectData) -> Ty { - MutStore::object(self, data) - } - - fn attrs_of(&mut self, value: Ty) -> Ty { - MutStore::attrs_of(self, value) - } - - fn function(&mut self, data: FunctionData) -> Ty { - MutStore::function(self, data) - } - - fn union(&mut self, types: Vec) -> Ty { - MutStore::union(self, types) - } - - fn sum(&mut self, types: Vec) -> Ty { - MutStore::sum(self, types) - } - - fn bounded_number(&mut self, bounds: NumBounds) -> Ty { - MutStore::bounded_number(self, bounds) - } - - fn literal_string(&mut self, s: String) -> Ty { - MutStore::literal_string(self, s) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_mut_store_intern_global_first() { - let global = Arc::new(GlobalTyStore::new()); - - // Pre-intern a type in global - let arr_global = global.intern(TyData::Array { - elem: Ty::NUMBER, - is_set: false, - }); - - // MutStore should find it in global - let mut store = MutStore::new(Arc::clone(&global)); - let arr = store.intern(TyData::Array { - elem: Ty::NUMBER, - is_set: false, - }); - - assert_eq!(arr, arr_global); - assert!(arr.is_global()); - assert!(store.local.is_empty()); - } - - #[test] - fn test_mut_store_intern_local() { - let global = Arc::new(GlobalTyStore::new()); - let mut store = MutStore::new(global); - - // Intern a new type not in global - let arr = store.intern(TyData::Array { - elem: Ty::STRING, - is_set: false, - }); - - assert!(arr.is_local()); - assert_eq!(store.local.len(), 1); - } - - #[test] - fn test_mut_store_get_data() { - let global = Arc::new(GlobalTyStore::new()); - let arr_global = global.intern(TyData::Array { - elem: Ty::NUMBER, - is_set: false, - }); - - let mut store = MutStore::new(Arc::clone(&global)); - let arr_local = store.intern(TyData::Array { - elem: Ty::STRING, - is_set: false, - }); - - // Should get data from both stores - assert!( - matches!(store.get_data(arr_global), TyData::Array { elem, .. } if elem == Ty::NUMBER) - ); - assert!( - matches!(store.get_data(arr_local), TyData::Array { elem, .. } if elem == Ty::STRING) - ); - } - - #[test] - fn test_mut_store_union() { - let global = Arc::new(GlobalTyStore::new()); - let mut store = MutStore::new(global); - - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - assert!(!union.is_well_known()); - - // Union with ANY is ANY - let with_any = store.union(vec![Ty::NUMBER, Ty::ANY]); - assert_eq!(with_any, Ty::ANY); - } - - #[test] - fn test_mut_store_display() { - let global = Arc::new(GlobalTyStore::new()); - let mut store = MutStore::new(global); - - assert_eq!(store.display(Ty::NUMBER), "number"); - - let arr = store.array(Ty::STRING); - assert_eq!(store.display(arr), "array"); - } -} +mod analysis; +mod core; +mod type_store_ops_impl; diff --git a/crates/jrsonnet-lsp-types/src/mut_store/analysis.rs b/crates/jrsonnet-lsp-types/src/mut_store/analysis.rs new file mode 100644 index 00000000..ef0f081d --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/mut_store/analysis.rs @@ -0,0 +1,286 @@ +use super::*; +use crate::store::{ReturnSpec, Ty, TyData}; + +impl MutStore { + // ========== Type queries ========== + + /// Check if type is indexable. + #[must_use] + pub fn is_indexable(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::Any + | TyData::String + | TyData::Char + | TyData::Array { .. } + | TyData::Tuple { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } => true, + TyData::Union(types) | TyData::Sum(types) => { + types.iter().all(|&t| self.is_indexable(t)) + } + TyData::TypeVar { constraints, .. } => constraints.must_be_indexable, + _ => false, + } + } + + /// Check if type supports field access. + #[must_use] + pub fn supports_field_access(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::Any | TyData::Object(_) | TyData::AttrsOf { .. } => true, + TyData::Union(types) | TyData::Sum(types) => { + types.iter().all(|&t| self.supports_field_access(t)) + } + TyData::TypeVar { constraints, .. } => constraints.must_support_fields, + _ => false, + } + } + + /// Check if type is callable. + #[must_use] + pub fn is_callable(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::Any | TyData::Function(_) => true, + TyData::Union(types) | TyData::Sum(types) => types.iter().all(|&t| self.is_callable(t)), + TyData::TypeVar { constraints, .. } => constraints.must_be_callable, + _ => false, + } + } + + /// Check if a type has any type variables. + #[must_use] + pub fn has_type_vars(&self, ty: Ty) -> bool { + match self.get_data(ty) { + TyData::TypeVar { .. } => true, + TyData::Array { elem, .. } => self.has_type_vars(elem), + TyData::Tuple { elems } => elems.iter().any(|&e| self.has_type_vars(e)), + TyData::Union(variants) | TyData::Sum(variants) => { + variants.iter().any(|&v| self.has_type_vars(v)) + } + TyData::Object(obj) => obj.fields.iter().any(|(_, fd)| self.has_type_vars(fd.ty)), + TyData::AttrsOf { value } => self.has_type_vars(value), + TyData::Function(func) => { + func.params.iter().any(|p| self.has_type_vars(p.ty)) + || matches!(&func.return_spec, ReturnSpec::Fixed(ret) if self.has_type_vars(*ret)) + } + _ => false, + } + } + + /// Format a type for display. + #[must_use] + pub fn display(&self, ty: Ty) -> String { + match self.get_data(ty) { + TyData::Any => "any".to_string(), + TyData::Never => "never".to_string(), + TyData::Null => "null".to_string(), + TyData::Bool => "boolean".to_string(), + TyData::True => "true".to_string(), + TyData::False => "false".to_string(), + TyData::Number => "number".to_string(), + TyData::BoundedNumber(bounds) => match (bounds.min_f64(), bounds.max_f64()) { + (None, None) => "number".to_string(), + (Some(lo), Some(hi)) if (lo - hi).abs() < f64::EPSILON => { + if lo.fract() == 0.0 { + format!("{lo:.0}") + } else { + format!("{lo}") + } + } + (Some(lo), Some(hi)) => format!("number[{lo}..{hi}]"), + (Some(lo), None) => format!("number[{lo}..]"), + (None, Some(hi)) => format!("number[..{hi}]"), + }, + TyData::String => "string".to_string(), + TyData::Char => "char".to_string(), + TyData::LiteralString(s) => format!("\"{s}\""), + TyData::Array { elem, is_set } => { + let base = format!("array<{}>", self.display(elem)); + if is_set { + format!("set<{}>", self.display(elem)) + } else { + base + } + } + TyData::Tuple { elems } => { + let types: Vec<_> = elems.iter().map(|&t| self.display(t)).collect(); + format!("[{}]", types.join(", ")) + } + TyData::Object(obj) => { + if obj.fields.is_empty() && !obj.has_unknown { + "{}".to_string() + } else if obj.has_unknown { + "object".to_string() + } else { + let mut fields: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + fields.sort_unstable(); + format!("{{ {} }}", fields.join(", ")) + } + } + TyData::AttrsOf { value } => format!("object<{}>", self.display(value)), + TyData::Function(func) => { + let params: Vec<_> = func.params.iter().map(|p| p.name.as_str()).collect(); + format!("function({})", params.join(", ")) + } + TyData::Union(types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" | ") + } + TyData::Sum(types) => { + let parts: Vec<_> = types.iter().map(|&t| self.display(t)).collect(); + parts.join(" & ") + } + TyData::TypeVar { id, constraints } => { + let mut s = id.to_string(); + if !constraints.is_empty() { + let mut parts: Vec = Vec::new(); + if constraints.must_be_indexable { + parts.push("indexable".to_string()); + } + if constraints.must_support_fields { + parts.push("object-like".to_string()); + } + if constraints.must_be_callable { + parts.push("callable".to_string()); + } + if let Some(bound) = constraints.upper_bound { + parts.push(format!("<: {}", self.display(bound))); + } + if !parts.is_empty() { + s.push_str(" where "); + s.push_str(&parts.join(", ")); + } + } + s + } + } + } + + /// Alias for `get_data()` to ease migration from `TyStore`. + /// + /// `TyStore::get()` returns `TyRef` which derefs to `TyData`. This returns + /// `TyData` directly. Callers using `*store.get(ty)` should use `store.get(ty)`. + #[inline] + #[must_use] + pub fn get(&self, ty: Ty) -> TyData { + self.get_data(ty) + } + + // ========== Type operations ========== + + /// Narrow a type by intersecting with a constraint. + pub fn narrow(&mut self, ty: Ty, constraint: Ty) -> Ty { + crate::operations::ty_and(ty, constraint, self) + } + + /// Narrow a type to one with an exact length. + /// + /// - Arrays become tuples with that length + /// - Strings with length 1 become Char + /// - Tuples must have matching length + pub fn with_len(&mut self, ty: Ty, len: usize) -> Ty { + crate::operations::ty_with_len(ty, len, self) + } + + /// Narrow a type to one with at least a minimum length. + pub fn with_min_len(&mut self, ty: Ty, min: usize) -> Ty { + crate::operations::ty_with_min_len(ty, min, self) + } + + /// Widen a type by removing a constraint. + pub fn widen(&mut self, base: Ty, remove: Ty) -> Ty { + crate::operations::ty_minus(base, remove, self) + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::*; + use crate::global_store::GlobalTyStore; + + #[test] + fn test_mut_store_intern_global_first() { + let global = Arc::new(GlobalTyStore::new()); + + // Pre-intern a type in global + let arr_global = global.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + // MutStore should find it in global + let mut store = MutStore::new(Arc::clone(&global)); + let arr = store.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + assert_eq!(arr, arr_global); + assert!(arr.is_global()); + assert!(store.local.is_empty()); + } + + #[test] + fn test_mut_store_intern_local() { + let global = Arc::new(GlobalTyStore::new()); + let mut store = MutStore::new(global); + + // Intern a new type not in global + let arr = store.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + assert!(arr.is_local()); + assert_eq!(store.local.len(), 1); + } + + #[test] + fn test_mut_store_get_data() { + let global = Arc::new(GlobalTyStore::new()); + let arr_global = global.intern(TyData::Array { + elem: Ty::NUMBER, + is_set: false, + }); + + let mut store = MutStore::new(Arc::clone(&global)); + let arr_local = store.intern(TyData::Array { + elem: Ty::STRING, + is_set: false, + }); + + // Should get data from both stores + assert!( + matches!(store.get_data(arr_global), TyData::Array { elem, .. } if elem == Ty::NUMBER) + ); + assert!( + matches!(store.get_data(arr_local), TyData::Array { elem, .. } if elem == Ty::STRING) + ); + } + + #[test] + fn test_mut_store_union() { + let global = Arc::new(GlobalTyStore::new()); + let mut store = MutStore::new(global); + + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert!(!union.is_well_known()); + + // Union with ANY is ANY + let with_any = store.union(vec![Ty::NUMBER, Ty::ANY]); + assert_eq!(with_any, Ty::ANY); + } + + #[test] + fn test_mut_store_display() { + let global = Arc::new(GlobalTyStore::new()); + let mut store = MutStore::new(global); + + assert_eq!(store.display(Ty::NUMBER), "number"); + + let arr = store.array(Ty::STRING); + assert_eq!(store.display(arr), "array"); + } +} diff --git a/crates/jrsonnet-lsp-types/src/mut_store/core.rs b/crates/jrsonnet-lsp-types/src/mut_store/core.rs new file mode 100644 index 00000000..edd21e36 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/mut_store/core.rs @@ -0,0 +1,245 @@ +use std::sync::Arc; + +use super::*; +use crate::{ + global_store::GlobalTyStore, + local_store::LocalTyStore, + store::{ + FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyConstraints, TyData, + TyVarId, + }, +}; + +impl MutStore { + /// Create a new mutable store wrapping a global store. + pub fn new(global: Arc) -> Self { + Self { + global, + local: LocalTyStore::new(), + } + } + + /// Create a new mutable store from a global store reference. + /// + /// Convenience constructor that clones the Arc. + pub fn from_ref(global: &Arc) -> Self { + Self::new(Arc::clone(global)) + } + + /// Get type data for any Ty (global or local). + #[must_use] + pub fn get_data(&self, ty: Ty) -> TyData { + if ty.is_local() { + self.local.get_data(ty).clone() + } else { + self.global.get_data(ty) + } + } + + /// Intern a type, checking global first, then local. + /// + /// - Well-known types return immediately + /// - Types already in global store return the global Ty + /// - Types already in local store return the local Ty + /// - New types are created in local store + pub fn intern(&mut self, data: TyData) -> Ty { + // Fast path for well-known types + if let Some(ty) = Ty::well_known_for_data(&data) { + return ty; + } + + // Check global store first (read-only) + { + let global_inner = self.global.read(); + if let Some(&existing) = global_inner.dedup.get(&data) { + return existing; + } + } + + // Check/create in local store + self.local.intern(data) + } + + /// Consume and return the local store for merging. + #[must_use] + pub fn into_local(self) -> LocalTyStore { + self.local + } + + /// Get reference to the global store. + #[must_use] + pub fn global(&self) -> &GlobalTyStore { + &self.global + } + + /// Get the Arc to the global store. + #[must_use] + pub fn global_arc(&self) -> &Arc { + &self.global + } + + /// Get reference to the local store. + #[must_use] + pub fn local(&self) -> &LocalTyStore { + &self.local + } + + // ========== Type constructors ========== + + /// Create an array type. + pub fn array(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { + elem, + is_set: false, + }) + } + + /// Create a set type (array with sorted, unique elements). + pub fn array_set(&mut self, elem: Ty) -> Ty { + self.intern(TyData::Array { elem, is_set: true }) + } + + /// Create a tuple type. + pub fn tuple(&mut self, elems: Vec) -> Ty { + if elems.is_empty() { + return self.intern(TyData::Tuple { elems: vec![] }); + } + self.intern(TyData::Tuple { elems }) + } + + /// Create an object type. + pub fn object(&mut self, data: ObjectData) -> Ty { + self.intern(TyData::Object(data)) + } + + /// Create an open object (unknown fields). + pub fn object_any(&mut self) -> Ty { + self.object(ObjectData::open()) + } + + /// Create a generic function type (accepts any args, returns any). + pub fn function_any(&mut self) -> Ty { + self.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }) + } + + /// Create an `AttrsOf` type (object with uniform value type). + pub fn attrs_of(&mut self, value: Ty) -> Ty { + self.intern(TyData::AttrsOf { value }) + } + + /// Create a function type. + pub fn function(&mut self, data: FunctionData) -> Ty { + self.intern(TyData::Function(data)) + } + + /// Create a function with simple params and fixed return. + pub fn function_simple(&mut self, param_names: Vec<&str>, return_ty: Ty) -> Ty { + let params = param_names + .into_iter() + .map(|name| ParamInterned { + name: name.to_string(), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + self.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }) + } + + /// Create a union type. + pub fn union(&mut self, mut types: Vec) -> Ty { + // Simplification rules + match types.as_slice() { + [] => return Ty::NEVER, + [only] => return *only, + _ => {} + } + + // Flatten nested unions and remove duplicates + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::ANY { + return Ty::ANY; // Any absorbs everything + } + if ty == Ty::NEVER { + continue; // Never is identity for union + } + if let TyData::Union(inner) = self.get_data(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + // Sort for canonical form + flattened.sort_by_key(|t| t.id()); + flattened.dedup(); + + match flattened.as_slice() { + [] => Ty::NEVER, + [only] => *only, + _ => self.intern(TyData::Union(flattened)), + } + } + + /// Create a sum (intersection) type. + pub fn sum(&mut self, mut types: Vec) -> Ty { + match types.as_slice() { + [] => return Ty::ANY, + [only] => return *only, + _ => {} + } + + // Flatten and simplify + let mut flattened = Vec::with_capacity(types.len()); + for ty in types.drain(..) { + if ty == Ty::NEVER { + return Ty::NEVER; // Never absorbs everything in intersection + } + if ty == Ty::ANY { + continue; // Any is identity for intersection + } + if let TyData::Sum(inner) = self.get_data(ty) { + flattened.extend(inner.iter().copied()); + } else if !flattened.contains(&ty) { + flattened.push(ty); + } + } + + flattened.sort_by_key(|t| t.id()); + flattened.dedup(); + + match flattened.as_slice() { + [] => Ty::ANY, + [only] => *only, + _ => self.intern(TyData::Sum(flattened)), + } + } + + /// Create a bounded number type. + pub fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + self.intern(TyData::BoundedNumber(bounds)) + } + + /// Create a literal string type. + pub fn literal_string(&mut self, s: String) -> Ty { + self.intern(TyData::LiteralString(s)) + } + + /// Create a type variable. + pub fn type_var(&mut self, id: TyVarId, constraints: TyConstraints) -> Ty { + self.intern(TyData::TypeVar { id, constraints }) + } + + /// Create a fresh type variable with no constraints. + pub fn fresh_var(&mut self) -> Ty { + self.type_var(TyVarId::fresh(), TyConstraints::none()) + } +} diff --git a/crates/jrsonnet-lsp-types/src/mut_store/type_store_ops_impl.rs b/crates/jrsonnet-lsp-types/src/mut_store/type_store_ops_impl.rs new file mode 100644 index 00000000..f5073c39 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/mut_store/type_store_ops_impl.rs @@ -0,0 +1,52 @@ +use super::*; +use crate::store::{FunctionData, NumBounds, ObjectData, Ty, TyData, TypeStoreOps}; + +impl TypeStoreOps for MutStore { + fn get_data(&self, ty: Ty) -> TyData { + MutStore::get_data(self, ty) + } + + fn display(&self, ty: Ty) -> String { + MutStore::display(self, ty) + } + + fn array(&mut self, elem: Ty) -> Ty { + MutStore::array(self, elem) + } + + fn array_set(&mut self, elem: Ty) -> Ty { + MutStore::array_set(self, elem) + } + + fn tuple(&mut self, elems: Vec) -> Ty { + MutStore::tuple(self, elems) + } + + fn object(&mut self, data: ObjectData) -> Ty { + MutStore::object(self, data) + } + + fn attrs_of(&mut self, value: Ty) -> Ty { + MutStore::attrs_of(self, value) + } + + fn function(&mut self, data: FunctionData) -> Ty { + MutStore::function(self, data) + } + + fn union(&mut self, types: Vec) -> Ty { + MutStore::union(self, types) + } + + fn sum(&mut self, types: Vec) -> Ty { + MutStore::sum(self, types) + } + + fn bounded_number(&mut self, bounds: NumBounds) -> Ty { + MutStore::bounded_number(self, bounds) + } + + fn literal_string(&mut self, s: String) -> Ty { + MutStore::literal_string(self, s) + } +} From 8365213200dd586ad66d7af50edeee27b7429b95 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:23:40 +0000 Subject: [PATCH 114/210] refactor(lsp-types): split unification module by role Split the unification module into focused submodules. Move public unification data/types (Variance, PathElement, UnifyError/Reason/Result) into unification/types.rs. Move the unification algorithm, subtype/equivalence helpers, and inline tests into unification/algorithm.rs. Keep unification.rs as a thin entrypoint that re-exports the public API. No behavior changes intended; verified with: cargo test -p jrsonnet-lsp-types --lib unification --- crates/jrsonnet-lsp-types/src/unification.rs | 850 +----------------- .../src/unification/algorithm.rs | 678 ++++++++++++++ .../src/unification/types.rs | 161 ++++ 3 files changed, 843 insertions(+), 846 deletions(-) create mode 100644 crates/jrsonnet-lsp-types/src/unification/algorithm.rs create mode 100644 crates/jrsonnet-lsp-types/src/unification/types.rs diff --git a/crates/jrsonnet-lsp-types/src/unification.rs b/crates/jrsonnet-lsp-types/src/unification.rs index 81a8e23b..17b11119 100644 --- a/crates/jrsonnet-lsp-types/src/unification.rs +++ b/crates/jrsonnet-lsp-types/src/unification.rs @@ -27,850 +27,8 @@ //! unify(fn(Number) -> String, fn(Any) -> String, Covariant) // Err //! ``` -use std::fmt::Write as _; +mod algorithm; +mod types; -use crate::store::{Ty, TyData, TypeStoreOps}; - -/// Variance determines how subtyping propagates through type constructors. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum Variance { - /// Normal subtyping direction. More specific types can substitute for general ones. - Covariant, - /// Reversed subtyping direction. Used for function parameters. - Contravariant, -} - -impl Variance { - /// Flip variance (used when descending into contravariant positions). - #[must_use] - pub fn flip(self) -> Self { - match self { - Variance::Covariant => Variance::Contravariant, - Variance::Contravariant => Variance::Covariant, - } - } -} - -/// Path element describing where in the type structure an error occurred. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum PathElement { - /// In a function parameter at the given index (0-based). - Parameter(usize), - /// In the return type of a function. - ReturnType, - /// In an object field with the given name. - Field(String), - /// In an array element type. - ArrayElement, - /// In a tuple element at the given index. - TupleElement(usize), - /// In a union variant. - UnionVariant, -} - -impl std::fmt::Display for PathElement { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - PathElement::Parameter(i) => write!(f, "parameter {}", i + 1), - PathElement::ReturnType => write!(f, "return type"), - PathElement::Field(name) => write!(f, "field '{name}'"), - PathElement::ArrayElement => write!(f, "array element"), - PathElement::TupleElement(i) => write!(f, "element {i}"), - PathElement::UnionVariant => write!(f, "union variant"), - } - } -} - -/// Reason why unification failed. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum UnifyReason { - /// Incompatible base types (e.g., Number vs String). - TypeMismatch, - /// Object is missing a required field. - MissingField(String), - /// Closed object has an unexpected field. - ExtraField(String), - /// Function parameter count doesn't match. - ParamCountMismatch { got: usize, expected: usize }, - /// Nested unification error (for recursive structures). - Nested(Box), - /// Failed to unify with any variant of a union type. - /// Contains the errors from attempting each variant. - UnionMismatch(Vec), -} - -/// Detailed error explaining why unification failed. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct UnifyError { - /// The type that was provided (interned). - pub got: Ty, - /// The type that was expected (interned). - pub expected: Ty, - /// Why unification failed. - pub reason: UnifyReason, - /// Path to where the error occurred (innermost first). - pub path: Vec, -} - -impl UnifyError { - /// Create a new unification error. - #[must_use] - pub fn new(got: Ty, expected: Ty, reason: UnifyReason) -> Self { - Self { - got, - expected, - reason, - path: Vec::new(), - } - } - - /// Add a path element to track where in the type structure the error occurred. - #[must_use] - pub fn with_path(mut self, element: PathElement) -> Self { - self.path.push(element); - self - } - - /// Format the error for display. - pub fn format(&self, store: &S) -> String { - let mut msg = format!( - "type mismatch: expected `{}`, got `{}`", - store.display(self.expected), - store.display(self.got) - ); - - if !self.path.is_empty() { - msg.push_str(" in "); - for (i, element) in self.path.iter().rev().enumerate() { - if i > 0 { - msg.push_str(" -> "); - } - msg.push_str(&element.to_string()); - } - } - - match &self.reason { - UnifyReason::TypeMismatch => {} - UnifyReason::MissingField(name) => { - let _ = write!(msg, " (missing required field '{name}')"); - } - UnifyReason::ExtraField(name) => { - let _ = write!(msg, " (unexpected field '{name}')"); - } - UnifyReason::ParamCountMismatch { got, expected } => { - let _ = write!(msg, " (expected {expected} parameters, got {got})"); - } - UnifyReason::Nested(inner) => { - let _ = write!(msg, " caused by: {}", inner.format(store)); - } - UnifyReason::UnionMismatch(variant_errors) => { - msg.push_str(" (no matching union variant)"); - for err in variant_errors { - // Show the variant type that was attempted - let reason = match &err.reason { - UnifyReason::TypeMismatch => "type mismatch".to_string(), - UnifyReason::MissingField(name) => format!("missing field '{name}'"), - UnifyReason::ExtraField(name) => format!("unexpected field '{name}'"), - UnifyReason::ParamCountMismatch { got, expected } => { - format!("expected {expected} params, got {got}") - } - UnifyReason::Nested(inner) => inner.format(store), - UnifyReason::UnionMismatch(_) => "nested union mismatch".to_string(), - }; - let _ = write!(msg, "\n tried `{}`: {reason}", store.display(err.expected)); - } - } - } - - msg - } -} - -/// Result of unification. -pub type UnifyResult = Result<(), UnifyError>; - -/// Check if `got` type can be used where `expected` type is required. -/// -/// This is the primary API for type unification with interned types. -/// Works directly with `TyData`, avoiding allocation from `export()`. -/// -/// # Errors -/// Returns `Err(UnifyError)` when the provided types are incompatible under the -/// requested variance. -pub fn unify_ty( - store: &S, - got: Ty, - expected: Ty, - variance: Variance, -) -> UnifyResult { - // Fast path: identical types always unify - if got == expected { - return Ok(()); - } - - // Fast paths for well-known types - if got == Ty::NEVER { - return Ok(()); // Never is bottom, unifies with anything - } - if expected == Ty::ANY { - return Ok(()); // Any accepts everything in covariant position - } - if got == Ty::ANY && variance == Variance::Contravariant { - return Ok(()); // Any in contravariant position - } - - // Delegate to the full implementation - unify_ty_impl(store, got, expected, variance) -} - -/// Internal unification implementation. -fn unify_ty_impl( - store: &S, - got: Ty, - expected: Ty, - variance: Variance, -) -> UnifyResult { - let got_data = store.get_data(got); - let expected_data = store.get_data(expected); - - // Handle top and bottom types - match (&got_data, &expected_data) { - // Never is bottom type - unifies with anything - // Any = Any regardless of variance - (TyData::Never, _) | (TyData::Any, TyData::Any) => return Ok(()), - - // When expected is Any (and got is not Any) - (_, TyData::Any) => { - return match variance { - Variance::Covariant => Ok(()), - Variance::Contravariant => Err(make_error(got, expected)), - }; - } - - // When got is Any (and expected is not Any) - (TyData::Any, _) => { - return match variance { - Variance::Contravariant => Ok(()), - Variance::Covariant => Err(make_error(got, expected)), - }; - } - - // Expecting Never means unreachable code - only Never satisfies it - (_, TyData::Never) => return Err(make_error(got, expected)), - - // Type variables unify with any type - (TyData::TypeVar { constraints, .. }, _) => { - if let Some(bound) = &constraints.upper_bound { - return unify_ty(store, *bound, expected, variance); - } - return Ok(()); - } - (_, TyData::TypeVar { constraints, .. }) => { - if let Some(bound) = &constraints.upper_bound { - return unify_ty(store, got, *bound, variance); - } - return Ok(()); - } - - _ => {} - } - - // Handle unions - if let TyData::Union(got_variants) = store.get_data(got) { - // All variants of got must unify with expected - for variant in got_variants { - unify_ty(store, variant, expected, variance)?; - } - return Ok(()); - } - - if let TyData::Union(expected_variants) = store.get_data(expected) { - // Got must unify with at least one expected variant - // Collect all errors for comprehensive reporting - let mut variant_errors = Vec::new(); - for variant in expected_variants { - match unify_ty(store, got, variant, variance) { - Ok(()) => return Ok(()), - Err(e) => variant_errors.push(e), - } - } - // None of the variants matched - report all errors - return Err(UnifyError::new( - got, - expected, - UnifyReason::UnionMismatch(variant_errors), - )); - } - - // Re-fetch data for the concrete type matching - let got_data = store.get_data(got); - let expected_data = store.get_data(expected); - - // Now handle concrete type pairs - match (&got_data, &expected_data) { - // Primitives must match exactly - (TyData::Null, TyData::Null) - | (TyData::Bool | TyData::True | TyData::False, TyData::Bool) - | (TyData::Number | TyData::BoundedNumber(_), TyData::Number) - | (TyData::String | TyData::Char | TyData::LiteralString(_), TyData::String) - | (TyData::Char, TyData::Char) - | (TyData::True, TyData::True) - | (TyData::False, TyData::False) => Ok(()), - (TyData::LiteralString(a), TyData::LiteralString(b)) if a == b => Ok(()), - - // BoundedNumber with narrower bounds is subtype of wider - (TyData::BoundedNumber(got_bounds), TyData::BoundedNumber(expected_bounds)) => { - if got_bounds.is_subset_of(expected_bounds) { - Ok(()) - } else { - Err(make_error(got, expected)) - } - } - - // Arrays are covariant in element type - ( - TyData::Array { elem: got_elem, .. }, - TyData::Array { - elem: expected_elem, - .. - }, - ) => unify_ty(store, *got_elem, *expected_elem, variance) - .map_err(|e| e.with_path(PathElement::ArrayElement)), - - // Tuple to Array: all tuple elements must unify with array element - ( - TyData::Tuple { elems }, - TyData::Array { - elem: expected_elem, - .. - }, - ) => { - for (i, elem) in elems.iter().enumerate() { - unify_ty(store, *elem, *expected_elem, variance) - .map_err(|e| e.with_path(PathElement::TupleElement(i)))?; - } - Ok(()) - } - - // Tuples must have same length, elements unify positionally - ( - TyData::Tuple { elems: got_elems }, - TyData::Tuple { - elems: expected_elems, - }, - ) => { - if got_elems.len() != expected_elems.len() { - return Err(make_error(got, expected)); - } - for (i, (g, e)) in got_elems.iter().zip(expected_elems.iter()).enumerate() { - unify_ty(store, *g, *e, variance) - .map_err(|err| err.with_path(PathElement::TupleElement(i)))?; - } - Ok(()) - } - - // Objects use structural subtyping - (TyData::Object(got_obj), TyData::Object(expected_obj)) => { - unify_objects_ty(store, got_obj, expected_obj, variance, got, expected) - } - - // AttrsOf is covariant in T - ( - TyData::AttrsOf { value: got_value }, - TyData::AttrsOf { - value: expected_value, - }, - ) => unify_ty(store, *got_value, *expected_value, variance) - .map_err(|e| e.with_path(PathElement::Field("[*]".to_string()))), - - // Object with known fields can be used where AttrsOf is expected - ( - TyData::Object(got_obj), - TyData::AttrsOf { - value: expected_value, - }, - ) => { - for (field_name, field_def) in &got_obj.fields { - unify_ty(store, field_def.ty, *expected_value, variance) - .map_err(|e| e.with_path(PathElement::Field(field_name.clone())))?; - } - Ok(()) - } - - // AttrsOf can be used where open Object is expected - (TyData::AttrsOf { .. }, TyData::Object(expected_obj)) => { - if expected_obj.has_unknown && expected_obj.fields.is_empty() { - Ok(()) - } else { - Err(make_error(got, expected)) - } - } - - // Functions have contravariant parameters, covariant return - (TyData::Function(got_fn), TyData::Function(expected_fn)) => { - unify_functions_ty(store, got_fn, expected_fn, variance, got, expected) - } - - // All other combinations are mismatches - _ => Err(make_error(got, expected)), - } -} - -/// Unify two object types structurally. -fn unify_objects_ty( - store: &S, - got: &super::store::ObjectData, - expected: &super::store::ObjectData, - variance: Variance, - got_ty: Ty, - expected_ty: Ty, -) -> UnifyResult { - // Check that got has all required fields from expected - for (field_name, expected_field) in &expected.fields { - match got.fields.iter().find(|(n, _)| n == field_name) { - Some((_, got_field)) => { - // Field exists - unify the types - unify_ty(store, got_field.ty, expected_field.ty, variance) - .map_err(|e| e.with_path(PathElement::Field(field_name.clone())))?; - } - None => { - // Field missing - error if required and got is closed - if expected_field.required && !got.has_unknown { - return Err(UnifyError::new( - got_ty, - expected_ty, - UnifyReason::MissingField(field_name.clone()), - )); - } - } - } - } - - // If expected is closed, got cannot have extra fields - if !expected.has_unknown { - for (field_name, _) in &got.fields { - if !expected.fields.iter().any(|(n, _)| n == field_name) { - return Err(UnifyError::new( - got_ty, - expected_ty, - UnifyReason::ExtraField(field_name.clone()), - )); - } - } - } - - Ok(()) -} - -/// Unify two function types with proper variance handling. -fn unify_functions_ty( - store: &S, - got: &super::store::FunctionData, - expected: &super::store::FunctionData, - variance: Variance, - got_ty: Ty, - expected_ty: Ty, -) -> UnifyResult { - // Check parameter count compatibility - let got_required = got.params.iter().filter(|p| !p.has_default).count(); - let expected_required = expected.params.iter().filter(|p| !p.has_default).count(); - - if got_required > expected.params.len() || got.params.len() < expected_required { - return Err(UnifyError::new( - got_ty, - expected_ty, - UnifyReason::ParamCountMismatch { - got: got.params.len(), - expected: expected.params.len(), - }, - )); - } - - // Parameters are CONTRAVARIANT: swap argument order - for (i, (got_param, expected_param)) in - got.params.iter().zip(expected.params.iter()).enumerate() - { - let got_param_ty = got_param.ty; - let expected_param_ty = expected_param.ty; - - // Swap argument order for contravariant position - unify_ty(store, expected_param_ty, got_param_ty, variance) - .map_err(|e| e.with_path(PathElement::Parameter(i)))?; - } - - // Return type is COVARIANT - // Extract fixed return type or default to Any for dynamic specs - let got_return = match &got.return_spec { - super::store::ReturnSpec::Fixed(ty) => *ty, - _ => Ty::ANY, // Dynamic return specs can't be checked statically - }; - let expected_return = match &expected.return_spec { - super::store::ReturnSpec::Fixed(ty) => *ty, - _ => Ty::ANY, - }; - - unify_ty(store, got_return, expected_return, variance) - .map_err(|e| e.with_path(PathElement::ReturnType)) -} - -/// Create a `UnifyError` for type mismatch. -fn make_error(got: Ty, expected: Ty) -> UnifyError { - UnifyError::new(got, expected, UnifyReason::TypeMismatch) -} - -/// Check if `subtype` is a subtype of `supertype`. -pub fn is_subtype_ty(store: &S, subtype: Ty, supertype: Ty) -> bool { - // Fast path: identical types - if subtype == supertype { - return true; - } - - // Fast paths for well-known types - if subtype == Ty::NEVER { - return true; // Never <: everything - } - if supertype == Ty::ANY { - return true; // everything <: Any - } - - unify_ty(store, subtype, supertype, Variance::Covariant).is_ok() -} - -/// Check if two types are equivalent. -pub fn types_equivalent_ty(store: &S, a: Ty, b: Ty) -> bool { - if a == b { - return true; - } - is_subtype_ty(store, a, b) && is_subtype_ty(store, b, a) -} - -#[cfg(test)] -mod tests { - use assert_matches::assert_matches; - - use super::{ - super::store::{ - FieldDefInterned, FieldVis, FunctionData, NumBounds, ObjectData, ParamInterned, - ReturnSpec, Ty, TyStore, - }, - *, - }; - - /// Helper to create a simple function type - fn func_ty(store: &mut TyStore, params: Vec, ret: Ty) -> Ty { - let params: Vec = params - .into_iter() - .enumerate() - .map(|(i, ty)| ParamInterned { - name: format!("p{i}"), - ty, - has_default: false, - }) - .collect(); - store.function(FunctionData { - params, - return_spec: ReturnSpec::Fixed(ret), - variadic: false, - }) - } - - /// Helper to create an object type - fn obj_ty(store: &mut TyStore, fields: Vec<(&str, Ty, bool)>, has_unknown: bool) -> Ty { - let fields = fields - .into_iter() - .map(|(name, ty, required)| { - ( - name.to_string(), - FieldDefInterned { - ty, - required, - visibility: FieldVis::Normal, - }, - ) - }) - .collect(); - store.object(ObjectData { - fields, - has_unknown, - }) - } - - #[test] - fn test_primitives_unify() { - let store = TyStore::new(); - unify_ty(&store, Ty::NUMBER, Ty::NUMBER, Variance::Covariant) - .expect("Number unifies with Number"); - unify_ty(&store, Ty::STRING, Ty::STRING, Variance::Covariant) - .expect("String unifies with String"); - unify_ty(&store, Ty::BOOL, Ty::BOOL, Variance::Covariant).expect("Bool unifies with Bool"); - unify_ty(&store, Ty::NULL, Ty::NULL, Variance::Covariant).expect("Null unifies with Null"); - } - - #[test] - fn test_any_accepts_all() { - let store = TyStore::new(); - unify_ty(&store, Ty::NUMBER, Ty::ANY, Variance::Covariant).expect("Number subtype of Any"); - unify_ty(&store, Ty::STRING, Ty::ANY, Variance::Covariant).expect("String subtype of Any"); - unify_ty(&store, Ty::BOOL, Ty::ANY, Variance::Covariant).expect("Bool subtype of Any"); - } - - #[test] - fn test_never_is_bottom() { - let store = TyStore::new(); - unify_ty(&store, Ty::NEVER, Ty::NUMBER, Variance::Covariant) - .expect("Never subtype of Number"); - unify_ty(&store, Ty::NEVER, Ty::STRING, Variance::Covariant) - .expect("Never subtype of String"); - unify_ty(&store, Ty::NEVER, Ty::ANY, Variance::Covariant).expect("Never subtype of Any"); - } - - #[test] - fn test_primitive_mismatch() { - let store = TyStore::new(); - let err1 = unify_ty(&store, Ty::NUMBER, Ty::STRING, Variance::Covariant) - .expect_err("Number != String"); - assert_matches!(err1.reason, UnifyReason::TypeMismatch); - let err2 = unify_ty(&store, Ty::BOOL, Ty::NUMBER, Variance::Covariant) - .expect_err("Bool != Number"); - assert_matches!(err2.reason, UnifyReason::TypeMismatch); - } - - #[test] - fn test_char_subtype_of_string() { - let store = TyStore::new(); - unify_ty(&store, Ty::CHAR, Ty::STRING, Variance::Covariant) - .expect("Char subtype of String"); - let err = unify_ty(&store, Ty::STRING, Ty::CHAR, Variance::Covariant) - .expect_err("String not subtype of Char"); - assert_matches!(err.reason, UnifyReason::TypeMismatch); - } - - #[test] - fn test_bool_literals() { - let store = TyStore::new(); - unify_ty(&store, Ty::TRUE, Ty::BOOL, Variance::Covariant).expect("True subtype of Bool"); - unify_ty(&store, Ty::FALSE, Ty::BOOL, Variance::Covariant).expect("False subtype of Bool"); - let err = unify_ty(&store, Ty::BOOL, Ty::TRUE, Variance::Covariant) - .expect_err("Bool not subtype of True"); - assert_matches!(err.reason, UnifyReason::TypeMismatch); - } - - #[test] - fn test_array_covariance() { - let mut store = TyStore::new(); - let arr_num = store.array(Ty::NUMBER); - let arr_any = store.array(Ty::ANY); - - unify_ty(&store, arr_num, arr_any, Variance::Covariant) - .expect("Array subtype of Array"); - let err = unify_ty(&store, arr_any, arr_num, Variance::Covariant) - .expect_err("Array not subtype of Array"); - assert_matches!(err.reason, UnifyReason::TypeMismatch); - } - - #[test] - fn test_tuple_to_array() { - let mut store = TyStore::new(); - let tuple = store.tuple(vec![Ty::NUMBER, Ty::NUMBER]); - let arr_num = store.array(Ty::NUMBER); - - unify_ty(&store, tuple, arr_num, Variance::Covariant) - .expect("Tuple[Number, Number] subtype of Array"); - } - - #[test] - fn test_function_param_contravariance() { - let mut store = TyStore::new(); - let fn_any_to_num = func_ty(&mut store, vec![Ty::ANY], Ty::NUMBER); - let fn_num_to_num = func_ty(&mut store, vec![Ty::NUMBER], Ty::NUMBER); - - // fn(Any) -> Number can substitute for fn(Number) -> Number - unify_ty(&store, fn_any_to_num, fn_num_to_num, Variance::Covariant) - .expect("fn(Any)->Number subtype of fn(Number)->Number"); - - // fn(Number) -> Number cannot substitute for fn(Any) -> Number - let err = unify_ty(&store, fn_num_to_num, fn_any_to_num, Variance::Covariant) - .expect_err("fn(Number)->Number not subtype of fn(Any)->Number"); - assert_matches!(err.reason, UnifyReason::TypeMismatch); - } - - #[test] - fn test_function_return_covariance() { - let mut store = TyStore::new(); - let fn_to_num = func_ty(&mut store, vec![], Ty::NUMBER); - let fn_to_any = func_ty(&mut store, vec![], Ty::ANY); - - // fn() -> Number can substitute for fn() -> Any - unify_ty(&store, fn_to_num, fn_to_any, Variance::Covariant) - .expect("fn()->Number subtype of fn()->Any"); - - // fn() -> Any cannot substitute for fn() -> Number - let err = unify_ty(&store, fn_to_any, fn_to_num, Variance::Covariant) - .expect_err("fn()->Any not subtype of fn()->Number"); - assert_matches!(err.reason, UnifyReason::TypeMismatch); - } - - #[test] - fn test_object_structural_subtyping() { - let mut store = TyStore::new(); - let obj_ab = obj_ty( - &mut store, - vec![("a", Ty::NUMBER, true), ("b", Ty::STRING, true)], - false, - ); - let obj_a = obj_ty(&mut store, vec![("a", Ty::NUMBER, true)], true); - - // Object with more fields can be used where fewer are required (if open) - unify_ty(&store, obj_ab, obj_a, Variance::Covariant).expect("{a,b} subtype of {a,...}"); - } - - #[test] - fn test_object_missing_field() { - let mut store = TyStore::new(); - let obj_a = obj_ty(&mut store, vec![("a", Ty::NUMBER, true)], false); - let obj_ab = obj_ty( - &mut store, - vec![("a", Ty::NUMBER, true), ("b", Ty::STRING, true)], - false, - ); - - // Closed object with fewer fields cannot match one requiring more - let err = unify_ty(&store, obj_a, obj_ab, Variance::Covariant) - .expect_err("{a} not subtype of {a,b}"); - assert_matches!(err.reason, UnifyReason::MissingField { .. }); - } - - #[test] - fn test_bounded_number() { - let mut store = TyStore::new(); - let narrow = store.bounded_number(NumBounds::between(0.0, 10.0)); - let wide = store.bounded_number(NumBounds::between(-100.0, 100.0)); - - // Narrow bounds are subtype of wider bounds - unify_ty(&store, narrow, wide, Variance::Covariant).expect("[0,10] subtype of [-100,100]"); - let err1 = unify_ty(&store, wide, narrow, Variance::Covariant) - .expect_err("[-100,100] not subtype of [0,10]"); - assert_matches!(err1.reason, UnifyReason::TypeMismatch); - - // BoundedNumber is subtype of Number - unify_ty(&store, narrow, Ty::NUMBER, Variance::Covariant) - .expect("[0,10] subtype of Number"); - let err2 = unify_ty(&store, Ty::NUMBER, narrow, Variance::Covariant) - .expect_err("Number not subtype of [0,10]"); - assert_matches!(err2.reason, UnifyReason::TypeMismatch); - } - - #[test] - fn test_union_subtyping() { - let mut store = TyStore::new(); - let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); - - // Number is subtype of (Number | String) - unify_ty(&store, Ty::NUMBER, num_or_str, Variance::Covariant) - .expect("Number subtype of (Number|String)"); - unify_ty(&store, Ty::STRING, num_or_str, Variance::Covariant) - .expect("String subtype of (Number|String)"); - - // (Number | String) is NOT subtype of Number - let err = unify_ty(&store, num_or_str, Ty::NUMBER, Variance::Covariant) - .expect_err("(Number|String) not subtype of Number"); - assert_matches!(err.reason, UnifyReason::TypeMismatch); - } - - #[test] - fn test_is_subtype() { - let store = TyStore::new(); - assert!(is_subtype_ty(&store, Ty::NUMBER, Ty::ANY)); - assert!(is_subtype_ty(&store, Ty::NEVER, Ty::NUMBER)); - assert!(!is_subtype_ty(&store, Ty::NUMBER, Ty::STRING)); - } - - #[test] - fn test_types_equivalent() { - let store = TyStore::new(); - assert!(types_equivalent_ty(&store, Ty::NUMBER, Ty::NUMBER)); - assert!(!types_equivalent_ty(&store, Ty::NUMBER, Ty::ANY)); - } - - #[test] - fn test_union_mismatch_error() { - let mut store = TyStore::new(); - let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); - - // Bool cannot unify with (Number | String), should get comprehensive error - let result = unify_ty(&store, Ty::BOOL, num_or_str, Variance::Covariant); - - // Should be a UnionMismatch with errors for each variant - assert_matches!(result, Err(UnifyError { reason: UnifyReason::UnionMismatch(ref errs), .. }) => { - // Should have two errors - one for each union variant - assert_eq!( - errs.iter().map(|e| &e.reason).collect::>(), - vec![ - &UnifyReason::TypeMismatch, - &UnifyReason::TypeMismatch, - ] - ); - }); - } - - #[test] - fn test_union_mismatch_error_format() { - let mut store = TyStore::new(); - let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); - - let result = unify_ty(&store, Ty::BOOL, num_or_str, Variance::Covariant); - let err = result.expect_err("should fail"); - let formatted = err.format(&store); - - // Should show each variant type that was tried - assert!( - formatted.contains("tried `number`: type mismatch"), - "Should show number variant: {formatted}" - ); - assert!( - formatted.contains("tried `string`: type mismatch"), - "Should show string variant: {formatted}" - ); - } - - #[test] - fn test_union_mismatch_with_object_error() { - let mut store = TyStore::new(); - - // Create an object type with required field 'a' - let obj_with_a = store.object(ObjectData { - fields: vec![( - "a".to_string(), - FieldDefInterned { - ty: Ty::NUMBER, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: false, - }); - - // Create union: Number | {a: number} - let union = store.union(vec![Ty::NUMBER, obj_with_a]); - - // Empty object should fail with specific error for each variant - let empty_obj = store.object(ObjectData { - fields: vec![], - has_unknown: false, - }); - - let result = unify_ty(&store, empty_obj, union, Variance::Covariant); - let err = result.expect_err("should fail"); - let formatted = err.format(&store); - - // Should show type mismatch for number and missing field for object - assert!( - formatted.contains("tried `number`"), - "Should show number variant: {formatted}" - ); - assert!( - formatted.contains("missing field 'a'"), - "Should show missing field error: {formatted}" - ); - } -} +pub use algorithm::{is_subtype_ty, types_equivalent_ty, unify_ty}; +pub use types::{PathElement, UnifyError, UnifyReason, UnifyResult, Variance}; diff --git a/crates/jrsonnet-lsp-types/src/unification/algorithm.rs b/crates/jrsonnet-lsp-types/src/unification/algorithm.rs new file mode 100644 index 00000000..288d9866 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/unification/algorithm.rs @@ -0,0 +1,678 @@ +use super::types::{PathElement, UnifyError, UnifyReason, UnifyResult, Variance}; +use crate::store::{FunctionData, ObjectData, ReturnSpec, Ty, TyData, TypeStoreOps}; + +pub fn unify_ty( + store: &S, + got: Ty, + expected: Ty, + variance: Variance, +) -> UnifyResult { + // Fast path: identical types always unify + if got == expected { + return Ok(()); + } + + // Fast paths for well-known types + if got == Ty::NEVER { + return Ok(()); // Never is bottom, unifies with anything + } + if expected == Ty::ANY { + return Ok(()); // Any accepts everything in covariant position + } + if got == Ty::ANY && variance == Variance::Contravariant { + return Ok(()); // Any in contravariant position + } + + // Delegate to the full implementation + unify_ty_impl(store, got, expected, variance) +} + +/// Internal unification implementation. +fn unify_ty_impl( + store: &S, + got: Ty, + expected: Ty, + variance: Variance, +) -> UnifyResult { + let got_data = store.get_data(got); + let expected_data = store.get_data(expected); + + // Handle top and bottom types + match (&got_data, &expected_data) { + // Never is bottom type - unifies with anything + // Any = Any regardless of variance + (TyData::Never, _) | (TyData::Any, TyData::Any) => return Ok(()), + + // When expected is Any (and got is not Any) + (_, TyData::Any) => { + return match variance { + Variance::Covariant => Ok(()), + Variance::Contravariant => Err(make_error(got, expected)), + }; + } + + // When got is Any (and expected is not Any) + (TyData::Any, _) => { + return match variance { + Variance::Contravariant => Ok(()), + Variance::Covariant => Err(make_error(got, expected)), + }; + } + + // Expecting Never means unreachable code - only Never satisfies it + (_, TyData::Never) => return Err(make_error(got, expected)), + + // Type variables unify with any type + (TyData::TypeVar { constraints, .. }, _) => { + if let Some(bound) = &constraints.upper_bound { + return unify_ty(store, *bound, expected, variance); + } + return Ok(()); + } + (_, TyData::TypeVar { constraints, .. }) => { + if let Some(bound) = &constraints.upper_bound { + return unify_ty(store, got, *bound, variance); + } + return Ok(()); + } + + _ => {} + } + + // Handle unions + if let TyData::Union(got_variants) = store.get_data(got) { + // All variants of got must unify with expected + for variant in got_variants { + unify_ty(store, variant, expected, variance)?; + } + return Ok(()); + } + + if let TyData::Union(expected_variants) = store.get_data(expected) { + // Got must unify with at least one expected variant + // Collect all errors for comprehensive reporting + let mut variant_errors = Vec::new(); + for variant in expected_variants { + match unify_ty(store, got, variant, variance) { + Ok(()) => return Ok(()), + Err(e) => variant_errors.push(e), + } + } + // None of the variants matched - report all errors + return Err(UnifyError::new( + got, + expected, + UnifyReason::UnionMismatch(variant_errors), + )); + } + + // Re-fetch data for the concrete type matching + let got_data = store.get_data(got); + let expected_data = store.get_data(expected); + + // Now handle concrete type pairs + match (&got_data, &expected_data) { + // Primitives must match exactly + (TyData::Null, TyData::Null) + | (TyData::Bool | TyData::True | TyData::False, TyData::Bool) + | (TyData::Number | TyData::BoundedNumber(_), TyData::Number) + | (TyData::String | TyData::Char | TyData::LiteralString(_), TyData::String) + | (TyData::Char, TyData::Char) + | (TyData::True, TyData::True) + | (TyData::False, TyData::False) => Ok(()), + (TyData::LiteralString(a), TyData::LiteralString(b)) if a == b => Ok(()), + + // BoundedNumber with narrower bounds is subtype of wider + (TyData::BoundedNumber(got_bounds), TyData::BoundedNumber(expected_bounds)) => { + if got_bounds.is_subset_of(expected_bounds) { + Ok(()) + } else { + Err(make_error(got, expected)) + } + } + + // Arrays are covariant in element type + ( + TyData::Array { elem: got_elem, .. }, + TyData::Array { + elem: expected_elem, + .. + }, + ) => unify_ty(store, *got_elem, *expected_elem, variance) + .map_err(|e| e.with_path(PathElement::ArrayElement)), + + // Tuple to Array: all tuple elements must unify with array element + ( + TyData::Tuple { elems }, + TyData::Array { + elem: expected_elem, + .. + }, + ) => { + for (i, elem) in elems.iter().enumerate() { + unify_ty(store, *elem, *expected_elem, variance) + .map_err(|e| e.with_path(PathElement::TupleElement(i)))?; + } + Ok(()) + } + + // Tuples must have same length, elements unify positionally + ( + TyData::Tuple { elems: got_elems }, + TyData::Tuple { + elems: expected_elems, + }, + ) => { + if got_elems.len() != expected_elems.len() { + return Err(make_error(got, expected)); + } + for (i, (g, e)) in got_elems.iter().zip(expected_elems.iter()).enumerate() { + unify_ty(store, *g, *e, variance) + .map_err(|err| err.with_path(PathElement::TupleElement(i)))?; + } + Ok(()) + } + + // Objects use structural subtyping + (TyData::Object(got_obj), TyData::Object(expected_obj)) => { + unify_objects_ty(store, got_obj, expected_obj, variance, got, expected) + } + + // AttrsOf is covariant in T + ( + TyData::AttrsOf { value: got_value }, + TyData::AttrsOf { + value: expected_value, + }, + ) => unify_ty(store, *got_value, *expected_value, variance) + .map_err(|e| e.with_path(PathElement::Field("[*]".to_string()))), + + // Object with known fields can be used where AttrsOf is expected + ( + TyData::Object(got_obj), + TyData::AttrsOf { + value: expected_value, + }, + ) => { + for (field_name, field_def) in &got_obj.fields { + unify_ty(store, field_def.ty, *expected_value, variance) + .map_err(|e| e.with_path(PathElement::Field(field_name.clone())))?; + } + Ok(()) + } + + // AttrsOf can be used where open Object is expected + (TyData::AttrsOf { .. }, TyData::Object(expected_obj)) => { + if expected_obj.has_unknown && expected_obj.fields.is_empty() { + Ok(()) + } else { + Err(make_error(got, expected)) + } + } + + // Functions have contravariant parameters, covariant return + (TyData::Function(got_fn), TyData::Function(expected_fn)) => { + unify_functions_ty(store, got_fn, expected_fn, variance, got, expected) + } + + // All other combinations are mismatches + _ => Err(make_error(got, expected)), + } +} + +/// Unify two object types structurally. +fn unify_objects_ty( + store: &S, + got: &ObjectData, + expected: &ObjectData, + variance: Variance, + got_ty: Ty, + expected_ty: Ty, +) -> UnifyResult { + // Check that got has all required fields from expected + for (field_name, expected_field) in &expected.fields { + match got.fields.iter().find(|(n, _)| n == field_name) { + Some((_, got_field)) => { + // Field exists - unify the types + unify_ty(store, got_field.ty, expected_field.ty, variance) + .map_err(|e| e.with_path(PathElement::Field(field_name.clone())))?; + } + None => { + // Field missing - error if required and got is closed + if expected_field.required && !got.has_unknown { + return Err(UnifyError::new( + got_ty, + expected_ty, + UnifyReason::MissingField(field_name.clone()), + )); + } + } + } + } + + // If expected is closed, got cannot have extra fields + if !expected.has_unknown { + for (field_name, _) in &got.fields { + if !expected.fields.iter().any(|(n, _)| n == field_name) { + return Err(UnifyError::new( + got_ty, + expected_ty, + UnifyReason::ExtraField(field_name.clone()), + )); + } + } + } + + Ok(()) +} + +/// Unify two function types with proper variance handling. +fn unify_functions_ty( + store: &S, + got: &FunctionData, + expected: &FunctionData, + variance: Variance, + got_ty: Ty, + expected_ty: Ty, +) -> UnifyResult { + // Check parameter count compatibility + let got_required = got.params.iter().filter(|p| !p.has_default).count(); + let expected_required = expected.params.iter().filter(|p| !p.has_default).count(); + + if got_required > expected.params.len() || got.params.len() < expected_required { + return Err(UnifyError::new( + got_ty, + expected_ty, + UnifyReason::ParamCountMismatch { + got: got.params.len(), + expected: expected.params.len(), + }, + )); + } + + // Parameters are CONTRAVARIANT: swap argument order + for (i, (got_param, expected_param)) in + got.params.iter().zip(expected.params.iter()).enumerate() + { + let got_param_ty = got_param.ty; + let expected_param_ty = expected_param.ty; + + // Swap argument order for contravariant position + unify_ty(store, expected_param_ty, got_param_ty, variance) + .map_err(|e| e.with_path(PathElement::Parameter(i)))?; + } + + // Return type is COVARIANT + // Extract fixed return type or default to Any for dynamic specs + let got_return = match &got.return_spec { + ReturnSpec::Fixed(ty) => *ty, + _ => Ty::ANY, // Dynamic return specs can't be checked statically + }; + let expected_return = match &expected.return_spec { + ReturnSpec::Fixed(ty) => *ty, + _ => Ty::ANY, + }; + + unify_ty(store, got_return, expected_return, variance) + .map_err(|e| e.with_path(PathElement::ReturnType)) +} + +/// Create a `UnifyError` for type mismatch. +fn make_error(got: Ty, expected: Ty) -> UnifyError { + UnifyError::new(got, expected, UnifyReason::TypeMismatch) +} + +/// Check if `subtype` is a subtype of `supertype`. +pub fn is_subtype_ty(store: &S, subtype: Ty, supertype: Ty) -> bool { + // Fast path: identical types + if subtype == supertype { + return true; + } + + // Fast paths for well-known types + if subtype == Ty::NEVER { + return true; // Never <: everything + } + if supertype == Ty::ANY { + return true; // everything <: Any + } + + unify_ty(store, subtype, supertype, Variance::Covariant).is_ok() +} + +/// Check if two types are equivalent. +pub fn types_equivalent_ty(store: &S, a: Ty, b: Ty) -> bool { + if a == b { + return true; + } + is_subtype_ty(store, a, b) && is_subtype_ty(store, b, a) +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::store::{ + FieldDefInterned, FieldVis, FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, + Ty, TyStore, + }; + + /// Helper to create a simple function type + fn func_ty(store: &mut TyStore, params: Vec, ret: Ty) -> Ty { + let params: Vec = params + .into_iter() + .enumerate() + .map(|(i, ty)| ParamInterned { + name: format!("p{i}"), + ty, + has_default: false, + }) + .collect(); + store.function(FunctionData { + params, + return_spec: ReturnSpec::Fixed(ret), + variadic: false, + }) + } + + /// Helper to create an object type + fn obj_ty(store: &mut TyStore, fields: Vec<(&str, Ty, bool)>, has_unknown: bool) -> Ty { + let fields = fields + .into_iter() + .map(|(name, ty, required)| { + ( + name.to_string(), + FieldDefInterned { + ty, + required, + visibility: FieldVis::Normal, + }, + ) + }) + .collect(); + store.object(ObjectData { + fields, + has_unknown, + }) + } + + #[test] + fn test_primitives_unify() { + let store = TyStore::new(); + unify_ty(&store, Ty::NUMBER, Ty::NUMBER, Variance::Covariant) + .expect("Number unifies with Number"); + unify_ty(&store, Ty::STRING, Ty::STRING, Variance::Covariant) + .expect("String unifies with String"); + unify_ty(&store, Ty::BOOL, Ty::BOOL, Variance::Covariant).expect("Bool unifies with Bool"); + unify_ty(&store, Ty::NULL, Ty::NULL, Variance::Covariant).expect("Null unifies with Null"); + } + + #[test] + fn test_any_accepts_all() { + let store = TyStore::new(); + unify_ty(&store, Ty::NUMBER, Ty::ANY, Variance::Covariant).expect("Number subtype of Any"); + unify_ty(&store, Ty::STRING, Ty::ANY, Variance::Covariant).expect("String subtype of Any"); + unify_ty(&store, Ty::BOOL, Ty::ANY, Variance::Covariant).expect("Bool subtype of Any"); + } + + #[test] + fn test_never_is_bottom() { + let store = TyStore::new(); + unify_ty(&store, Ty::NEVER, Ty::NUMBER, Variance::Covariant) + .expect("Never subtype of Number"); + unify_ty(&store, Ty::NEVER, Ty::STRING, Variance::Covariant) + .expect("Never subtype of String"); + unify_ty(&store, Ty::NEVER, Ty::ANY, Variance::Covariant).expect("Never subtype of Any"); + } + + #[test] + fn test_primitive_mismatch() { + let store = TyStore::new(); + let err1 = unify_ty(&store, Ty::NUMBER, Ty::STRING, Variance::Covariant) + .expect_err("Number != String"); + assert_matches!(err1.reason, UnifyReason::TypeMismatch); + let err2 = unify_ty(&store, Ty::BOOL, Ty::NUMBER, Variance::Covariant) + .expect_err("Bool != Number"); + assert_matches!(err2.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_char_subtype_of_string() { + let store = TyStore::new(); + unify_ty(&store, Ty::CHAR, Ty::STRING, Variance::Covariant) + .expect("Char subtype of String"); + let err = unify_ty(&store, Ty::STRING, Ty::CHAR, Variance::Covariant) + .expect_err("String not subtype of Char"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_bool_literals() { + let store = TyStore::new(); + unify_ty(&store, Ty::TRUE, Ty::BOOL, Variance::Covariant).expect("True subtype of Bool"); + unify_ty(&store, Ty::FALSE, Ty::BOOL, Variance::Covariant).expect("False subtype of Bool"); + let err = unify_ty(&store, Ty::BOOL, Ty::TRUE, Variance::Covariant) + .expect_err("Bool not subtype of True"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_array_covariance() { + let mut store = TyStore::new(); + let arr_num = store.array(Ty::NUMBER); + let arr_any = store.array(Ty::ANY); + + unify_ty(&store, arr_num, arr_any, Variance::Covariant) + .expect("Array subtype of Array"); + let err = unify_ty(&store, arr_any, arr_num, Variance::Covariant) + .expect_err("Array not subtype of Array"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_tuple_to_array() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::NUMBER]); + let arr_num = store.array(Ty::NUMBER); + + unify_ty(&store, tuple, arr_num, Variance::Covariant) + .expect("Tuple[Number, Number] subtype of Array"); + } + + #[test] + fn test_function_param_contravariance() { + let mut store = TyStore::new(); + let fn_any_to_num = func_ty(&mut store, vec![Ty::ANY], Ty::NUMBER); + let fn_num_to_num = func_ty(&mut store, vec![Ty::NUMBER], Ty::NUMBER); + + // fn(Any) -> Number can substitute for fn(Number) -> Number + unify_ty(&store, fn_any_to_num, fn_num_to_num, Variance::Covariant) + .expect("fn(Any)->Number subtype of fn(Number)->Number"); + + // fn(Number) -> Number cannot substitute for fn(Any) -> Number + let err = unify_ty(&store, fn_num_to_num, fn_any_to_num, Variance::Covariant) + .expect_err("fn(Number)->Number not subtype of fn(Any)->Number"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_function_return_covariance() { + let mut store = TyStore::new(); + let fn_to_num = func_ty(&mut store, vec![], Ty::NUMBER); + let fn_to_any = func_ty(&mut store, vec![], Ty::ANY); + + // fn() -> Number can substitute for fn() -> Any + unify_ty(&store, fn_to_num, fn_to_any, Variance::Covariant) + .expect("fn()->Number subtype of fn()->Any"); + + // fn() -> Any cannot substitute for fn() -> Number + let err = unify_ty(&store, fn_to_any, fn_to_num, Variance::Covariant) + .expect_err("fn()->Any not subtype of fn()->Number"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_object_structural_subtyping() { + let mut store = TyStore::new(); + let obj_ab = obj_ty( + &mut store, + vec![("a", Ty::NUMBER, true), ("b", Ty::STRING, true)], + false, + ); + let obj_a = obj_ty(&mut store, vec![("a", Ty::NUMBER, true)], true); + + // Object with more fields can be used where fewer are required (if open) + unify_ty(&store, obj_ab, obj_a, Variance::Covariant).expect("{a,b} subtype of {a,...}"); + } + + #[test] + fn test_object_missing_field() { + let mut store = TyStore::new(); + let obj_a = obj_ty(&mut store, vec![("a", Ty::NUMBER, true)], false); + let obj_ab = obj_ty( + &mut store, + vec![("a", Ty::NUMBER, true), ("b", Ty::STRING, true)], + false, + ); + + // Closed object with fewer fields cannot match one requiring more + let err = unify_ty(&store, obj_a, obj_ab, Variance::Covariant) + .expect_err("{a} not subtype of {a,b}"); + assert_matches!(err.reason, UnifyReason::MissingField { .. }); + } + + #[test] + fn test_bounded_number() { + let mut store = TyStore::new(); + let narrow = store.bounded_number(NumBounds::between(0.0, 10.0)); + let wide = store.bounded_number(NumBounds::between(-100.0, 100.0)); + + // Narrow bounds are subtype of wider bounds + unify_ty(&store, narrow, wide, Variance::Covariant).expect("[0,10] subtype of [-100,100]"); + let err1 = unify_ty(&store, wide, narrow, Variance::Covariant) + .expect_err("[-100,100] not subtype of [0,10]"); + assert_matches!(err1.reason, UnifyReason::TypeMismatch); + + // BoundedNumber is subtype of Number + unify_ty(&store, narrow, Ty::NUMBER, Variance::Covariant) + .expect("[0,10] subtype of Number"); + let err2 = unify_ty(&store, Ty::NUMBER, narrow, Variance::Covariant) + .expect_err("Number not subtype of [0,10]"); + assert_matches!(err2.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_union_subtyping() { + let mut store = TyStore::new(); + let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); + + // Number is subtype of (Number | String) + unify_ty(&store, Ty::NUMBER, num_or_str, Variance::Covariant) + .expect("Number subtype of (Number|String)"); + unify_ty(&store, Ty::STRING, num_or_str, Variance::Covariant) + .expect("String subtype of (Number|String)"); + + // (Number | String) is NOT subtype of Number + let err = unify_ty(&store, num_or_str, Ty::NUMBER, Variance::Covariant) + .expect_err("(Number|String) not subtype of Number"); + assert_matches!(err.reason, UnifyReason::TypeMismatch); + } + + #[test] + fn test_is_subtype() { + let store = TyStore::new(); + assert!(is_subtype_ty(&store, Ty::NUMBER, Ty::ANY)); + assert!(is_subtype_ty(&store, Ty::NEVER, Ty::NUMBER)); + assert!(!is_subtype_ty(&store, Ty::NUMBER, Ty::STRING)); + } + + #[test] + fn test_types_equivalent() { + let store = TyStore::new(); + assert!(types_equivalent_ty(&store, Ty::NUMBER, Ty::NUMBER)); + assert!(!types_equivalent_ty(&store, Ty::NUMBER, Ty::ANY)); + } + + #[test] + fn test_union_mismatch_error() { + let mut store = TyStore::new(); + let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); + + // Bool cannot unify with (Number | String), should get comprehensive error + let result = unify_ty(&store, Ty::BOOL, num_or_str, Variance::Covariant); + + // Should be a UnionMismatch with errors for each variant + assert_matches!(result, Err(UnifyError { reason: UnifyReason::UnionMismatch(ref errs), .. }) => { + // Should have two errors - one for each union variant + assert_eq!( + errs.iter().map(|e| &e.reason).collect::>(), + vec![ + &UnifyReason::TypeMismatch, + &UnifyReason::TypeMismatch, + ] + ); + }); + } + + #[test] + fn test_union_mismatch_error_format() { + let mut store = TyStore::new(); + let num_or_str = store.union(vec![Ty::NUMBER, Ty::STRING]); + + let result = unify_ty(&store, Ty::BOOL, num_or_str, Variance::Covariant); + let err = result.expect_err("should fail"); + let formatted = err.format(&store); + + // Should show each variant type that was tried + assert!( + formatted.contains("tried `number`: type mismatch"), + "Should show number variant: {formatted}" + ); + assert!( + formatted.contains("tried `string`: type mismatch"), + "Should show string variant: {formatted}" + ); + } + + #[test] + fn test_union_mismatch_with_object_error() { + let mut store = TyStore::new(); + + // Create an object type with required field 'a' + let obj_with_a = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + + // Create union: Number | {a: number} + let union = store.union(vec![Ty::NUMBER, obj_with_a]); + + // Empty object should fail with specific error for each variant + let empty_obj = store.object(ObjectData { + fields: vec![], + has_unknown: false, + }); + + let result = unify_ty(&store, empty_obj, union, Variance::Covariant); + let err = result.expect_err("should fail"); + let formatted = err.format(&store); + + // Should show type mismatch for number and missing field for object + assert!( + formatted.contains("tried `number`"), + "Should show number variant: {formatted}" + ); + assert!( + formatted.contains("missing field 'a'"), + "Should show missing field error: {formatted}" + ); + } +} diff --git a/crates/jrsonnet-lsp-types/src/unification/types.rs b/crates/jrsonnet-lsp-types/src/unification/types.rs new file mode 100644 index 00000000..ecc5d268 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/unification/types.rs @@ -0,0 +1,161 @@ +use std::fmt::Write as _; + +use crate::store::{Ty, TypeStoreOps}; + +/// Variance determines how subtyping propagates through type constructors. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Variance { + /// Normal subtyping direction. More specific types can substitute for general ones. + Covariant, + /// Reversed subtyping direction. Used for function parameters. + Contravariant, +} + +impl Variance { + /// Flip variance (used when descending into contravariant positions). + #[must_use] + pub fn flip(self) -> Self { + match self { + Variance::Covariant => Variance::Contravariant, + Variance::Contravariant => Variance::Covariant, + } + } +} + +/// Path element describing where in the type structure an error occurred. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PathElement { + /// In a function parameter at the given index (0-based). + Parameter(usize), + /// In the return type of a function. + ReturnType, + /// In an object field with the given name. + Field(String), + /// In an array element type. + ArrayElement, + /// In a tuple element at the given index. + TupleElement(usize), + /// In a union variant. + UnionVariant, +} + +impl std::fmt::Display for PathElement { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + PathElement::Parameter(i) => write!(f, "parameter {}", i + 1), + PathElement::ReturnType => write!(f, "return type"), + PathElement::Field(name) => write!(f, "field '{name}'"), + PathElement::ArrayElement => write!(f, "array element"), + PathElement::TupleElement(i) => write!(f, "element {i}"), + PathElement::UnionVariant => write!(f, "union variant"), + } + } +} + +/// Reason why unification failed. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum UnifyReason { + /// Incompatible base types (e.g., Number vs String). + TypeMismatch, + /// Object is missing a required field. + MissingField(String), + /// Closed object has an unexpected field. + ExtraField(String), + /// Function parameter count doesn't match. + ParamCountMismatch { got: usize, expected: usize }, + /// Nested unification error (for recursive structures). + Nested(Box), + /// Failed to unify with any variant of a union type. + /// Contains the errors from attempting each variant. + UnionMismatch(Vec), +} + +/// Detailed error explaining why unification failed. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct UnifyError { + /// The type that was provided (interned). + pub got: Ty, + /// The type that was expected (interned). + pub expected: Ty, + /// Why unification failed. + pub reason: UnifyReason, + /// Path to where the error occurred (innermost first). + pub path: Vec, +} + +impl UnifyError { + /// Create a new unification error. + #[must_use] + pub fn new(got: Ty, expected: Ty, reason: UnifyReason) -> Self { + Self { + got, + expected, + reason, + path: Vec::new(), + } + } + + /// Add a path element to track where in the type structure the error occurred. + #[must_use] + pub fn with_path(mut self, element: PathElement) -> Self { + self.path.push(element); + self + } + + /// Format the error for display. + pub fn format(&self, store: &S) -> String { + let mut msg = format!( + "type mismatch: expected `{}`, got `{}`", + store.display(self.expected), + store.display(self.got) + ); + + if !self.path.is_empty() { + msg.push_str(" in "); + for (i, element) in self.path.iter().rev().enumerate() { + if i > 0 { + msg.push_str(" -> "); + } + msg.push_str(&element.to_string()); + } + } + + match &self.reason { + UnifyReason::TypeMismatch => {} + UnifyReason::MissingField(name) => { + let _ = write!(msg, " (missing required field '{name}')"); + } + UnifyReason::ExtraField(name) => { + let _ = write!(msg, " (unexpected field '{name}')"); + } + UnifyReason::ParamCountMismatch { got, expected } => { + let _ = write!(msg, " (expected {expected} parameters, got {got})"); + } + UnifyReason::Nested(inner) => { + let _ = write!(msg, " caused by: {}", inner.format(store)); + } + UnifyReason::UnionMismatch(variant_errors) => { + msg.push_str(" (no matching union variant)"); + for err in variant_errors { + // Show the variant type that was attempted + let reason = match &err.reason { + UnifyReason::TypeMismatch => "type mismatch".to_string(), + UnifyReason::MissingField(name) => format!("missing field '{name}'"), + UnifyReason::ExtraField(name) => format!("unexpected field '{name}'"), + UnifyReason::ParamCountMismatch { got, expected } => { + format!("expected {expected} params, got {got}") + } + UnifyReason::Nested(inner) => inner.format(store), + UnifyReason::UnionMismatch(_) => "nested union mismatch".to_string(), + }; + let _ = write!(msg, "\n tried `{}`: {reason}", store.display(err.expected)); + } + } + } + + msg + } +} + +/// Result of unification. +pub type UnifyResult = Result<(), UnifyError>; From d557ca4bc39310d52f19a9c42f56b13ddae133ff Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:25:58 +0000 Subject: [PATCH 115/210] docs(lsp): document why remaining handlers stay sync Document why the remaining sync handlers are intentionally sync instead of async. These handlers do cheap in-memory reads and avoid blocking I/O or heavy graph traversal. Keeping them sync minimizes scheduling overhead for latency-sensitive requests. Also captures the promotion rule: move a sync handler to async when it grows into heavier cross-file or blocking work. --- .../src/server/requests/sync_handlers/mod.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs index d9402e79..418956dd 100644 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs @@ -1,3 +1,12 @@ +//! Request handlers intentionally kept synchronous. +//! +//! These handlers only read already-indexed, in-memory document state and do +//! not perform cross-file graph traversal or blocking I/O. Keeping them sync +//! avoids async scheduling overhead for latency-critical, cheap requests. +//! +//! If a handler here grows into heavier work, promote it to +//! `requests::async_handlers`. + pub(crate) mod code_action; pub(crate) mod code_lens_resolve; pub(crate) mod document_highlight; From 343a8b2043590483c6d335e7f673045dd72d257e Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:31:47 +0000 Subject: [PATCH 116/210] refactor(lsp): split async import lookup by concern Split server async import lookup into focused submodules for path resolution, type lookup, document helpers, and symbol navigation. This keeps `AsyncRequestContext` extension methods grouped by feature while preserving the same behavior and call sites. --- .../server/async_requests/import_lookup.rs | 311 ------------------ .../async_requests/import_lookup/document.rs | 73 ++++ .../async_requests/import_lookup/mod.rs | 10 + .../async_requests/import_lookup/resolve.rs | 46 +++ .../async_requests/import_lookup/symbols.rs | 143 ++++++++ .../import_lookup/type_lookup.rs | 67 ++++ 6 files changed, 339 insertions(+), 311 deletions(-) delete mode 100644 crates/jrsonnet-lsp/src/server/async_requests/import_lookup.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/import_lookup/document.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/import_lookup/mod.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/import_lookup/symbols.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/import_lookup/type_lookup.rs diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup.rs deleted file mode 100644 index 206da350..00000000 --- a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup.rs +++ /dev/null @@ -1,311 +0,0 @@ -use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, Document, LspRange}; -use jrsonnet_lsp_handlers as handlers; -use jrsonnet_lsp_types::{Ty, TyData}; -use jrsonnet_rowan_parser::{ - nodes::{Bind, FieldName, ForSpec, Member, ObjBody, Param}, - AstNode, AstToken, SyntaxKind, -}; - -use super::AsyncRequestContext; - -#[derive(Debug, Clone, Copy)] -pub(super) struct ImportedFieldLocations { - pub(super) declaration: lsp_types::Range, - pub(super) implementation: lsp_types::Range, -} - -impl AsyncRequestContext { - pub(super) fn resolve_import_from_graph( - &self, - from: &CanonicalPath, - import: &str, - ) -> Option { - let import_graph = self.import_graph.read(); - let from_file = import_graph.file(from)?; - import_graph - .imports(from_file) - .iter() - .find(|entry| entry.import_path == import) - .and_then(|entry| entry.resolved_path.clone()) - } - - fn resolve_import_from_fs(from: &CanonicalPath, import: &str) -> Option { - let import_path = std::path::Path::new(import); - let candidate = if import_path.is_absolute() { - import_path.to_path_buf() - } else if import.starts_with("./") || import.starts_with("../") { - from.as_path().parent()?.join(import_path) - } else { - return None; - }; - - CanonicalPath::try_from_path(&candidate).ok() - } - - pub(super) fn resolve_import_path( - &self, - from: &CanonicalPath, - import: &str, - ) -> Option { - if import.starts_with("./") || import.starts_with("../") || import.starts_with('/') { - return Self::resolve_import_from_fs(from, import) - .or_else(|| self.resolve_import_from_graph(from, import)); - } - - self.resolve_import_from_graph(from, import) - .or_else(|| Self::resolve_import_from_fs(from, import)) - } - - pub(super) fn resolve_import_field_type( - &self, - from: &CanonicalPath, - import_path: &str, - fields: &[String], - ) -> Option { - let resolved = self.resolve_import_path(from, import_path)?; - let doc = self.load_document_for_path(&resolved)?; - let analysis = self.analyze_document(&resolved, &doc); - let ty = Self::type_for_field_path(&analysis, analysis.document_type(), fields)?; - Some(analysis.display_for_hover(ty)) - } - - fn type_for_field_path( - analysis: &jrsonnet_lsp_inference::TypeAnalysis, - root_ty: Ty, - fields: &[String], - ) -> Option { - fields.iter().try_fold(root_ty, |ty, field| { - Self::type_for_field(analysis, ty, field) - }) - } - - fn type_for_field( - analysis: &jrsonnet_lsp_inference::TypeAnalysis, - ty: Ty, - field: &str, - ) -> Option { - match analysis.get_data(ty) { - TyData::Any => Some(Ty::ANY), - TyData::Object(obj) => obj - .get_field(field) - .map(|field_def| field_def.ty) - .or_else(|| obj.has_unknown.then_some(Ty::ANY)), - TyData::AttrsOf { value } => Some(value), - TyData::Union(types) => { - let variants: Vec<_> = types - .into_iter() - .filter_map(|variant| Self::type_for_field(analysis, variant, field)) - .collect(); - if variants.is_empty() { - None - } else { - Some(analysis.union(variants)) - } - } - TyData::Sum(types) => { - let variants: Vec<_> = types - .into_iter() - .filter_map(|variant| Self::type_for_field(analysis, variant, field)) - .collect(); - if variants.is_empty() { - None - } else { - Some(analysis.union(variants)) - } - } - _ => None, - } - } - - pub(super) fn load_document_for_path(&self, path: &CanonicalPath) -> Option { - self.documents.get_document(path) - } - - pub(super) fn document_root_expr_range( - &self, - path: &CanonicalPath, - ) -> Option { - let doc = self.load_document_for_path(path)?; - let expr = doc.ast().expr()?; - Some(to_lsp_range( - expr.syntax().text_range(), - doc.line_index(), - doc.text(), - )) - } - - pub(super) fn find_export_binding_in_file( - &self, - path: &CanonicalPath, - fields: &[String], - ) -> Option { - let [field_name] = fields else { - return None; - }; - - let doc = self.load_document_for_path(path)?; - let text = doc.text(); - let line_index = doc.line_index(); - - doc.ast() - .syntax() - .descendants_with_tokens() - .filter_map(jrsonnet_rowan_parser::rowan::NodeOrToken::into_token) - .filter(|token| token.kind() == SyntaxKind::IDENT && token.text() == field_name) - .find_map(|token| { - let position = line_index.position(token.text_range().start().into(), text)?; - match handlers::goto_definition(&doc, position) { - Some(handlers::DefinitionResult::Local(range)) => Some(range), - Some( - handlers::DefinitionResult::Import(_) - | handlers::DefinitionResult::ImportField { .. }, - ) - | None => None, - } - }) - } - - pub(super) fn local_implementation_range( - document: &Document, - declaration: lsp_types::Range, - ) -> Option { - let text = document.text(); - let line_index = document.line_index(); - let declaration_range = line_index.text_range(LspRange::from(declaration), text)?; - let ast = document.ast(); - let node = ast - .syntax() - .descendants() - .find(|candidate| candidate.text_range() == declaration_range)?; - - if let Some(bind) = node.ancestors().find_map(Bind::cast) { - let value_range = match bind { - Bind::BindDestruct(bind) => bind.value()?.syntax().text_range(), - Bind::BindFunction(bind) => bind.value()?.syntax().text_range(), - }; - return Some(to_lsp_range(value_range, line_index, text)); - } - - if let Some(param) = node.ancestors().find_map(Param::cast) { - let default_value = param.expr()?; - return Some(to_lsp_range( - default_value.syntax().text_range(), - line_index, - text, - )); - } - - if let Some(for_spec) = node.ancestors().find_map(ForSpec::cast) { - let source_expr = for_spec.expr()?; - return Some(to_lsp_range( - source_expr.syntax().text_range(), - line_index, - text, - )); - } - - None - } - - /// For a field chain like `foo.bar`, this finds the `bar` field - /// inside the `foo` field of the top-level object. - pub(super) fn find_field_in_file( - &self, - path: &CanonicalPath, - fields: &[String], - ) -> Option { - use jrsonnet_rowan_parser::nodes::ExprBase; - - let doc = self.load_document_for_path(path)?; - - let ast = doc.ast(); - let text = doc.text(); - let line_index = doc.line_index(); - let expr = ast.expr()?; - - let expr_base = expr.expr_base()?; - let ExprBase::ExprObject(obj) = expr_base else { - return None; - }; - let mut current_obj_body = obj.obj_body()?; - - for (i, field_name) in fields.iter().enumerate() { - let is_last = i == fields.len() - 1; - let ObjBody::ObjBodyMemberList(members) = ¤t_obj_body else { - return None; - }; - - let field_target = members.members().find_map(|member| match member { - Member::MemberFieldNormal(field) => { - let name_node = field.field_name()?; - let name = extract_field_name_string(&name_node)?; - if name != *field_name { - return None; - } - - let declaration = name_node.syntax().text_range(); - let value = field.expr()?; - let implementation = value.syntax().text_range(); - let next_body = value.expr_base().and_then(|base| { - let ExprBase::ExprObject(obj) = base else { - return None; - }; - obj.obj_body() - }); - - Some((declaration, implementation, next_body)) - } - Member::MemberFieldMethod(method) => { - let name_node = method.field_name()?; - let name = extract_field_name_string(&name_node)?; - if name != *field_name { - return None; - } - - let declaration = name_node.syntax().text_range(); - let implementation = method - .expr() - .map_or(declaration, |expr| expr.syntax().text_range()); - Some((declaration, implementation, None)) - } - Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => None, - })?; - - if is_last { - let declaration = to_lsp_range(field_target.0, line_index, text); - let implementation = to_lsp_range(field_target.1, line_index, text); - return Some(ImportedFieldLocations { - declaration, - implementation, - }); - } - - current_obj_body = field_target.2?; - } - - None - } -} - -fn extract_field_name_string(name: &FieldName) -> Option { - match name { - FieldName::FieldNameFixed(fixed) => { - if let Some(name_node) = fixed.id() { - if let Some(ident) = name_node.ident_lit() { - return Some(ident.text().to_string()); - } - } - if let Some(text) = fixed.text() { - let s = text.syntax().text(); - let name = s - .trim_start_matches('"') - .trim_start_matches('\'') - .trim_end_matches('"') - .trim_end_matches('\''); - return Some(name.to_string()); - } - None - } - FieldName::FieldNameDynamic(_) => None, - } -} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/document.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/document.rs new file mode 100644 index 00000000..1c428a91 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/document.rs @@ -0,0 +1,73 @@ +use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, Document, LspRange}; +use jrsonnet_rowan_parser::AstNode; + +use super::super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(in crate::server::async_requests) fn load_document_for_path( + &self, + path: &CanonicalPath, + ) -> Option { + self.documents.get_document(path) + } + + pub(in crate::server::async_requests) fn document_root_expr_range( + &self, + path: &CanonicalPath, + ) -> Option { + let doc = self.load_document_for_path(path)?; + let expr = doc.ast().expr()?; + Some(to_lsp_range( + expr.syntax().text_range(), + doc.line_index(), + doc.text(), + )) + } + + pub(in crate::server::async_requests) fn local_implementation_range( + document: &Document, + declaration: lsp_types::Range, + ) -> Option { + use jrsonnet_rowan_parser::{ + nodes::{Bind, ForSpec, Param}, + AstNode, + }; + + let text = document.text(); + let line_index = document.line_index(); + let declaration_range = line_index.text_range(LspRange::from(declaration), text)?; + let ast = document.ast(); + let node = ast + .syntax() + .descendants() + .find(|candidate| candidate.text_range() == declaration_range)?; + + if let Some(bind) = node.ancestors().find_map(Bind::cast) { + let value_range = match bind { + Bind::BindDestruct(bind) => bind.value()?.syntax().text_range(), + Bind::BindFunction(bind) => bind.value()?.syntax().text_range(), + }; + return Some(to_lsp_range(value_range, line_index, text)); + } + + if let Some(param) = node.ancestors().find_map(Param::cast) { + let default_value = param.expr()?; + return Some(to_lsp_range( + default_value.syntax().text_range(), + line_index, + text, + )); + } + + if let Some(for_spec) = node.ancestors().find_map(ForSpec::cast) { + let source_expr = for_spec.expr()?; + return Some(to_lsp_range( + source_expr.syntax().text_range(), + line_index, + text, + )); + } + + None + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/mod.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/mod.rs new file mode 100644 index 00000000..40164bf5 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/mod.rs @@ -0,0 +1,10 @@ +mod document; +mod resolve; +mod symbols; +mod type_lookup; + +#[derive(Debug, Clone, Copy)] +pub(super) struct ImportedFieldLocations { + pub(super) declaration: lsp_types::Range, + pub(super) implementation: lsp_types::Range, +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs new file mode 100644 index 00000000..a7f87393 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs @@ -0,0 +1,46 @@ +use jrsonnet_lsp_document::CanonicalPath; + +use super::super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(in crate::server::async_requests) fn resolve_import_from_graph( + &self, + from: &CanonicalPath, + import: &str, + ) -> Option { + let import_graph = self.import_graph.read(); + let from_file = import_graph.file(from)?; + import_graph + .imports(from_file) + .iter() + .find(|entry| entry.import_path == import) + .and_then(|entry| entry.resolved_path.clone()) + } + + fn resolve_import_from_fs(from: &CanonicalPath, import: &str) -> Option { + let import_path = std::path::Path::new(import); + let candidate = if import_path.is_absolute() { + import_path.to_path_buf() + } else if import.starts_with("./") || import.starts_with("../") { + from.as_path().parent()?.join(import_path) + } else { + return None; + }; + + CanonicalPath::try_from_path(&candidate).ok() + } + + pub(in crate::server::async_requests) fn resolve_import_path( + &self, + from: &CanonicalPath, + import: &str, + ) -> Option { + if import.starts_with("./") || import.starts_with("../") || import.starts_with('/') { + return Self::resolve_import_from_fs(from, import) + .or_else(|| self.resolve_import_from_graph(from, import)); + } + + self.resolve_import_from_graph(from, import) + .or_else(|| Self::resolve_import_from_fs(from, import)) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/symbols.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/symbols.rs new file mode 100644 index 00000000..27a0e9c0 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/symbols.rs @@ -0,0 +1,143 @@ +use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath}; +use jrsonnet_lsp_handlers as handlers; +use jrsonnet_rowan_parser::{ + nodes::{FieldName, Member, ObjBody}, + AstNode, AstToken, SyntaxKind, +}; + +use super::{super::AsyncRequestContext, ImportedFieldLocations}; + +impl AsyncRequestContext { + pub(in crate::server::async_requests) fn find_export_binding_in_file( + &self, + path: &CanonicalPath, + fields: &[String], + ) -> Option { + let [field_name] = fields else { + return None; + }; + + let doc = self.load_document_for_path(path)?; + let text = doc.text(); + let line_index = doc.line_index(); + + doc.ast() + .syntax() + .descendants_with_tokens() + .filter_map(jrsonnet_rowan_parser::rowan::NodeOrToken::into_token) + .filter(|token| token.kind() == SyntaxKind::IDENT && token.text() == field_name) + .find_map(|token| { + let position = line_index.position(token.text_range().start().into(), text)?; + match handlers::goto_definition(&doc, position) { + Some(handlers::DefinitionResult::Local(range)) => Some(range), + Some( + handlers::DefinitionResult::Import(_) + | handlers::DefinitionResult::ImportField { .. }, + ) + | None => None, + } + }) + } + + /// For a field chain like `foo.bar`, this finds the `bar` field + /// inside the `foo` field of the top-level object. + pub(in crate::server::async_requests) fn find_field_in_file( + &self, + path: &CanonicalPath, + fields: &[String], + ) -> Option { + use jrsonnet_rowan_parser::nodes::ExprBase; + + let doc = self.load_document_for_path(path)?; + + let ast = doc.ast(); + let text = doc.text(); + let line_index = doc.line_index(); + let expr = ast.expr()?; + + let expr_base = expr.expr_base()?; + let ExprBase::ExprObject(obj) = expr_base else { + return None; + }; + let mut current_obj_body = obj.obj_body()?; + + for (i, field_name) in fields.iter().enumerate() { + let is_last = i == fields.len() - 1; + let ObjBody::ObjBodyMemberList(members) = ¤t_obj_body else { + return None; + }; + + let field_target = members.members().find_map(|member| match member { + Member::MemberFieldNormal(field) => { + let name_node = field.field_name()?; + let name = extract_field_name_string(&name_node)?; + if name != *field_name { + return None; + } + + let declaration = name_node.syntax().text_range(); + let value = field.expr()?; + let implementation = value.syntax().text_range(); + let next_body = value.expr_base().and_then(|base| { + let ExprBase::ExprObject(obj) = base else { + return None; + }; + obj.obj_body() + }); + + Some((declaration, implementation, next_body)) + } + Member::MemberFieldMethod(method) => { + let name_node = method.field_name()?; + let name = extract_field_name_string(&name_node)?; + if name != *field_name { + return None; + } + + let declaration = name_node.syntax().text_range(); + let implementation = method + .expr() + .map_or(declaration, |expr| expr.syntax().text_range()); + Some((declaration, implementation, None)) + } + Member::MemberBindStmt(_) | Member::MemberAssertStmt(_) => None, + })?; + + if is_last { + let declaration = to_lsp_range(field_target.0, line_index, text); + let implementation = to_lsp_range(field_target.1, line_index, text); + return Some(ImportedFieldLocations { + declaration, + implementation, + }); + } + + current_obj_body = field_target.2?; + } + + None + } +} + +fn extract_field_name_string(name: &FieldName) -> Option { + match name { + FieldName::FieldNameFixed(fixed) => { + if let Some(name_node) = fixed.id() { + if let Some(ident) = name_node.ident_lit() { + return Some(ident.text().to_string()); + } + } + if let Some(text) = fixed.text() { + let s = text.syntax().text(); + let name = s + .trim_start_matches('"') + .trim_start_matches('\'') + .trim_end_matches('"') + .trim_end_matches('\''); + return Some(name.to_string()); + } + None + } + FieldName::FieldNameDynamic(_) => None, + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/type_lookup.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/type_lookup.rs new file mode 100644 index 00000000..6e971c7f --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/type_lookup.rs @@ -0,0 +1,67 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_types::{Ty, TyData}; + +use super::super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(in crate::server::async_requests) fn resolve_import_field_type( + &self, + from: &CanonicalPath, + import_path: &str, + fields: &[String], + ) -> Option { + let resolved = self.resolve_import_path(from, import_path)?; + let doc = self.load_document_for_path(&resolved)?; + let analysis = self.analyze_document(&resolved, &doc); + let ty = Self::type_for_field_path(&analysis, analysis.document_type(), fields)?; + Some(analysis.display_for_hover(ty)) + } + + fn type_for_field_path( + analysis: &jrsonnet_lsp_inference::TypeAnalysis, + root_ty: Ty, + fields: &[String], + ) -> Option { + fields.iter().try_fold(root_ty, |ty, field| { + Self::type_for_field(analysis, ty, field) + }) + } + + fn type_for_field( + analysis: &jrsonnet_lsp_inference::TypeAnalysis, + ty: Ty, + field: &str, + ) -> Option { + match analysis.get_data(ty) { + TyData::Any => Some(Ty::ANY), + TyData::Object(obj) => obj + .get_field(field) + .map(|field_def| field_def.ty) + .or_else(|| obj.has_unknown.then_some(Ty::ANY)), + TyData::AttrsOf { value } => Some(value), + TyData::Union(types) => { + let variants: Vec<_> = types + .into_iter() + .filter_map(|variant| Self::type_for_field(analysis, variant, field)) + .collect(); + if variants.is_empty() { + None + } else { + Some(analysis.union(variants)) + } + } + TyData::Sum(types) => { + let variants: Vec<_> = types + .into_iter() + .filter_map(|variant| Self::type_for_field(analysis, variant, field)) + .collect(); + if variants.is_empty() { + None + } else { + Some(analysis.union(variants)) + } + } + _ => None, + } + } +} From 48793d01ca88568eab5a8c754f094c700931df6a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:34:16 +0000 Subject: [PATCH 117/210] refactor(lsp): split async command handlers by feature Split server async command handling into dedicated modules for dispatch, eval, graph traversal, references, and diagnostics. This keeps each command flow isolated and easier to evolve while preserving existing behavior and request wiring. --- .../src/server/async_requests/commands.rs | 248 ------------------ .../async_requests/commands/diagnostics.rs | 49 ++++ .../async_requests/commands/dispatch.rs | 50 ++++ .../server/async_requests/commands/eval.rs | 115 ++++++++ .../server/async_requests/commands/graph.rs | 30 +++ .../src/server/async_requests/commands/mod.rs | 5 + .../async_requests/commands/references.rs | 32 +++ 7 files changed, 281 insertions(+), 248 deletions(-) delete mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands/diagnostics.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands/references.rs diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands.rs deleted file mode 100644 index 175e2781..00000000 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands.rs +++ /dev/null @@ -1,248 +0,0 @@ -use jrsonnet_lsp_document::CanonicalPath; -use jrsonnet_lsp_import::ImportResolution; -use lsp_types::{ - ExecuteCommandParams, PartialResultParams, Position, ReferenceContext, ReferenceParams, - TextDocumentIdentifier, TextDocumentPositionParams, WorkDoneProgressParams, -}; -use tracing::{info, warn}; - -use super::AsyncRequestContext; -use crate::analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}; - -impl AsyncRequestContext { - pub(crate) fn execute_command( - &self, - params: &ExecuteCommandParams, - ) -> Option { - info!("Execute command: {}", params.command); - - match params.command.as_str() { - "jrsonnet.evalFile" => { - let uri = params.arguments.first()?.as_str()?; - self.execute_eval_file(uri) - } - "jrsonnet.evalExpression" => { - let expr = params.arguments.first()?.as_str()?; - let base_uri = params.arguments.get(1).and_then(|v| v.as_str()); - Some(self.execute_eval_expression(expr, base_uri)) - } - "jrsonnet.findTransitiveImporters" => { - let uri = params.arguments.first()?.as_str()?; - self.execute_find_transitive_importers(uri) - } - "jrsonnet.findReferences" => { - let uri = params.arguments.first()?.as_str()?; - let line = params.arguments.get(1)?.as_u64()?; - let line = u32::try_from(line).ok()?; - let character = params.arguments.get(2)?.as_u64()?; - let character = u32::try_from(character).ok()?; - let include_declaration = params - .arguments - .get(3) - .and_then(serde_json::Value::as_bool) - .unwrap_or(false); - self.execute_find_references(uri, line, character, include_declaration) - } - "jrsonnet.showErrors" => { - let uri = params.arguments.first()?.as_str()?; - self.execute_show_errors(uri) - } - _ => { - warn!("Unknown command: {}", params.command); - None - } - } - } - - fn execute_eval_file(&self, uri: &str) -> Option { - use jrsonnet_evaluator::manifest::JsonFormat; - use jrsonnet_parser::{SourceFile, SourcePath}; - - let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let path = CanonicalPath::from_uri(&uri_parsed).ok()?; - let text = self.documents.get_text(&path)?; - - let jpath = self.eval_command_jpath(Some(&path)); - let state = crate::analysis::eval::create_state_with_jpath(&jpath); - - let source_path = SourcePath::new(SourceFile::new(path.as_path().to_path_buf())); - - match state.evaluate_snippet(source_path.to_string(), &text) { - Ok(val) => { - let json_format = JsonFormat::default(); - match val.manifest(json_format) { - Ok(json_str) => match serde_json::from_str::(&json_str) { - Ok(json) => Some(json), - Err(e) => { - warn!("Failed to parse manifest result as JSON: {}", e); - Some(serde_json::Value::String(json_str)) - } - }, - Err(e) => { - warn!("Failed to manifest: {}", e); - Some(serde_json::json!({ - "error": format!("Manifest error: {}", e.error()) - })) - } - } - } - Err(e) => { - warn!("Evaluation failed: {}", e); - Some(serde_json::json!({ - "error": format!("Evaluation error: {}", e.error()) - })) - } - } - } - - fn execute_eval_expression(&self, expr: &str, base_uri: Option<&str>) -> serde_json::Value { - use jrsonnet_evaluator::manifest::JsonFormat; - use jrsonnet_parser::{SourceFile, SourcePath}; - - let base_path = base_uri - .and_then(|uri| uri.parse::().ok()) - .and_then(|uri| CanonicalPath::from_uri(&uri).ok()); - let jpath = self.eval_command_jpath(base_path.as_ref()); - let state = crate::analysis::eval::create_state_with_jpath(&jpath); - let source_name = base_path.map_or_else( - || "".to_string(), - |path| SourcePath::new(SourceFile::new(path.as_path().to_path_buf())).to_string(), - ); - - match state.evaluate_snippet(source_name, expr) { - Ok(val) => { - let json_format = JsonFormat::default(); - match val.manifest(json_format) { - Ok(json_str) => match serde_json::from_str::(&json_str) { - Ok(json) => json, - Err(e) => { - warn!("Failed to parse manifest result as JSON: {}", e); - serde_json::Value::String(json_str) - } - }, - Err(e) => serde_json::json!({ - "error": format!("Manifest error: {}", e.error()) - }), - } - } - Err(e) => serde_json::json!({ - "error": format!("Evaluation error: {}", e.error()) - }), - } - } - - fn eval_command_jpath(&self, base_path: Option<&CanonicalPath>) -> Vec { - let config = self.config.read(); - let jpath = base_path.map_or_else( - || config.jpath.clone(), - |base_path| { - let mut roots = effective_import_roots( - base_path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ); - if !config.resolve_paths_with_tanka { - if let Some(dir) = base_path.as_path().parent() { - if !roots.iter().any(|entry| entry == dir) { - roots.push(dir.to_path_buf()); - } - } - } - roots - }, - ); - drop(config); - jpath - } - - fn execute_find_transitive_importers(&self, uri: &str) -> Option { - let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let path = CanonicalPath::from_uri(&uri_parsed).ok()?; - - let import_graph = self.import_graph.read(); - let importers = import_graph - .file(&path) - .map_or_else(Vec::new, |file| import_graph.transitive_importers(file)); - let mut importer_uris: Vec = importers - .iter() - .filter_map(|file| { - import_graph - .path(*file) - .and_then(|path| path.to_uri().ok().map(|uri| uri.to_string())) - }) - .collect(); - drop(import_graph); - importer_uris.sort(); - - Some(serde_json::json!({ - "file": uri, - "transitiveImporters": importer_uris - })) - } - - fn execute_find_references( - &self, - uri: &str, - line: u32, - character: u32, - include_declaration: bool, - ) -> Option { - let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let params = ReferenceParams { - text_document_position: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { uri: uri_parsed }, - position: Position { line, character }, - }, - context: ReferenceContext { - include_declaration, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - let refs = self.references(¶ms).unwrap_or_default(); - - serde_json::to_value(refs).ok() - } - - fn execute_show_errors(&self, uri: &str) -> Option { - let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let path = CanonicalPath::from_uri(&uri_parsed).ok()?; - let doc = self.load_document_for_path(&path)?; - let analysis = self.analyze_document(&path, &doc); - let (enable_lint_diagnostics, evaluator, import_roots) = { - let config = self.config.read(); - let evaluator = config.enable_eval_diagnostics.then(|| { - let eval_config = EvalConfig { - jpath: config.jpath.clone(), - resolve_paths_with_tanka: config.resolve_paths_with_tanka, - }; - Evaluator::new(&eval_config) - }); - let import_roots = effective_import_roots( - path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ); - (config.enable_lint_diagnostics, evaluator, import_roots) - }; - let import_resolution = ImportResolution::new(&path, &import_roots); - let import_occurrences = import_resolution.parse_occurrences(&doc); - - let diagnostics = crate::handlers::compute_diagnostics( - &doc, - &path, - enable_lint_diagnostics, - evaluator.as_ref(), - &uri_parsed, - &analysis, - &import_occurrences, - ); - - let response = lsp_types::PublishDiagnosticsParams { - uri: uri_parsed, - diagnostics, - version: Some(doc.version().0), - }; - serde_json::to_value(response).ok() - } -} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/diagnostics.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/diagnostics.rs new file mode 100644 index 00000000..d508be85 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/diagnostics.rs @@ -0,0 +1,49 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_import::ImportResolution; + +use super::super::AsyncRequestContext; +use crate::analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}; + +impl AsyncRequestContext { + pub(super) fn execute_show_errors(&self, uri: &str) -> Option { + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; + let doc = self.load_document_for_path(&path)?; + let analysis = self.analyze_document(&path, &doc); + let (enable_lint_diagnostics, evaluator, import_roots) = { + let config = self.config.read(); + let evaluator = config.enable_eval_diagnostics.then(|| { + let eval_config = EvalConfig { + jpath: config.jpath.clone(), + resolve_paths_with_tanka: config.resolve_paths_with_tanka, + }; + Evaluator::new(&eval_config) + }); + let import_roots = effective_import_roots( + path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + (config.enable_lint_diagnostics, evaluator, import_roots) + }; + let import_resolution = ImportResolution::new(&path, &import_roots); + let import_occurrences = import_resolution.parse_occurrences(&doc); + + let diagnostics = crate::handlers::compute_diagnostics( + &doc, + &path, + enable_lint_diagnostics, + evaluator.as_ref(), + &uri_parsed, + &analysis, + &import_occurrences, + ); + + let response = lsp_types::PublishDiagnosticsParams { + uri: uri_parsed, + diagnostics, + version: Some(doc.version().0), + }; + serde_json::to_value(response).ok() + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs new file mode 100644 index 00000000..6862edf7 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs @@ -0,0 +1,50 @@ +use lsp_types::ExecuteCommandParams; +use tracing::{info, warn}; + +use super::super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn execute_command( + &self, + params: &ExecuteCommandParams, + ) -> Option { + info!("Execute command: {}", params.command); + + match params.command.as_str() { + "jrsonnet.evalFile" => { + let uri = params.arguments.first()?.as_str()?; + self.execute_eval_file(uri) + } + "jrsonnet.evalExpression" => { + let expr = params.arguments.first()?.as_str()?; + let base_uri = params.arguments.get(1).and_then(|v| v.as_str()); + Some(self.execute_eval_expression(expr, base_uri)) + } + "jrsonnet.findTransitiveImporters" => { + let uri = params.arguments.first()?.as_str()?; + self.execute_find_transitive_importers(uri) + } + "jrsonnet.findReferences" => { + let uri = params.arguments.first()?.as_str()?; + let line = params.arguments.get(1)?.as_u64()?; + let line = u32::try_from(line).ok()?; + let character = params.arguments.get(2)?.as_u64()?; + let character = u32::try_from(character).ok()?; + let include_declaration = params + .arguments + .get(3) + .and_then(serde_json::Value::as_bool) + .unwrap_or(false); + self.execute_find_references(uri, line, character, include_declaration) + } + "jrsonnet.showErrors" => { + let uri = params.arguments.first()?.as_str()?; + self.execute_show_errors(uri) + } + _ => { + warn!("Unknown command: {}", params.command); + None + } + } + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs new file mode 100644 index 00000000..f56fb2f9 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs @@ -0,0 +1,115 @@ +use jrsonnet_lsp_document::CanonicalPath; +use tracing::warn; + +use super::super::AsyncRequestContext; +use crate::analysis::tanka::effective_import_roots; + +impl AsyncRequestContext { + pub(super) fn execute_eval_file(&self, uri: &str) -> Option { + use jrsonnet_evaluator::manifest::JsonFormat; + use jrsonnet_parser::{SourceFile, SourcePath}; + + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; + let text = self.documents.get_text(&path)?; + + let jpath = self.eval_command_jpath(Some(&path)); + let state = crate::analysis::eval::create_state_with_jpath(&jpath); + + let source_path = SourcePath::new(SourceFile::new(path.as_path().to_path_buf())); + + match state.evaluate_snippet(source_path.to_string(), &text) { + Ok(val) => { + let json_format = JsonFormat::default(); + match val.manifest(json_format) { + Ok(json_str) => match serde_json::from_str::(&json_str) { + Ok(json) => Some(json), + Err(e) => { + warn!("Failed to parse manifest result as JSON: {}", e); + Some(serde_json::Value::String(json_str)) + } + }, + Err(e) => { + warn!("Failed to manifest: {}", e); + Some(serde_json::json!({ + "error": format!("Manifest error: {}", e.error()) + })) + } + } + } + Err(e) => { + warn!("Evaluation failed: {}", e); + Some(serde_json::json!({ + "error": format!("Evaluation error: {}", e.error()) + })) + } + } + } + + pub(super) fn execute_eval_expression( + &self, + expr: &str, + base_uri: Option<&str>, + ) -> serde_json::Value { + use jrsonnet_evaluator::manifest::JsonFormat; + use jrsonnet_parser::{SourceFile, SourcePath}; + + let base_path = base_uri + .and_then(|uri| uri.parse::().ok()) + .and_then(|uri| CanonicalPath::from_uri(&uri).ok()); + let jpath = self.eval_command_jpath(base_path.as_ref()); + let state = crate::analysis::eval::create_state_with_jpath(&jpath); + let source_name = base_path.map_or_else( + || "".to_string(), + |path| SourcePath::new(SourceFile::new(path.as_path().to_path_buf())).to_string(), + ); + + match state.evaluate_snippet(source_name, expr) { + Ok(val) => { + let json_format = JsonFormat::default(); + match val.manifest(json_format) { + Ok(json_str) => match serde_json::from_str::(&json_str) { + Ok(json) => json, + Err(e) => { + warn!("Failed to parse manifest result as JSON: {}", e); + serde_json::Value::String(json_str) + } + }, + Err(e) => serde_json::json!({ + "error": format!("Manifest error: {}", e.error()) + }), + } + } + Err(e) => serde_json::json!({ + "error": format!("Evaluation error: {}", e.error()) + }), + } + } + + pub(super) fn eval_command_jpath( + &self, + base_path: Option<&CanonicalPath>, + ) -> Vec { + let config = self.config.read(); + let jpath = base_path.map_or_else( + || config.jpath.clone(), + |base_path| { + let mut roots = effective_import_roots( + base_path.as_path(), + &config.jpath, + config.resolve_paths_with_tanka, + ); + if !config.resolve_paths_with_tanka { + if let Some(dir) = base_path.as_path().parent() { + if !roots.iter().any(|entry| entry == dir) { + roots.push(dir.to_path_buf()); + } + } + } + roots + }, + ); + drop(config); + jpath + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs new file mode 100644 index 00000000..cb9104de --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs @@ -0,0 +1,30 @@ +use jrsonnet_lsp_document::CanonicalPath; + +use super::super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(super) fn execute_find_transitive_importers(&self, uri: &str) -> Option { + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let path = CanonicalPath::from_uri(&uri_parsed).ok()?; + + let import_graph = self.import_graph.read(); + let importers = import_graph + .file(&path) + .map_or_else(Vec::new, |file| import_graph.transitive_importers(file)); + let mut importer_uris: Vec = importers + .iter() + .filter_map(|file| { + import_graph + .path(*file) + .and_then(|path| path.to_uri().ok().map(|uri| uri.to_string())) + }) + .collect(); + drop(import_graph); + importer_uris.sort(); + + Some(serde_json::json!({ + "file": uri, + "transitiveImporters": importer_uris + })) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs new file mode 100644 index 00000000..fc117fd6 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs @@ -0,0 +1,5 @@ +mod diagnostics; +mod dispatch; +mod eval; +mod graph; +mod references; diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/references.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/references.rs new file mode 100644 index 00000000..14d385d3 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/references.rs @@ -0,0 +1,32 @@ +use lsp_types::{ + PartialResultParams, Position, ReferenceContext, ReferenceParams, TextDocumentIdentifier, + TextDocumentPositionParams, WorkDoneProgressParams, +}; + +use super::super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(super) fn execute_find_references( + &self, + uri: &str, + line: u32, + character: u32, + include_declaration: bool, + ) -> Option { + let uri_parsed: lsp_types::Uri = uri.parse().ok()?; + let params = ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri: uri_parsed }, + position: Position { line, character }, + }, + context: ReferenceContext { + include_declaration, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + let refs = self.references(¶ms).unwrap_or_default(); + + serde_json::to_value(refs).ok() + } +} From a758120687ce423f79f8c74d044dff8a9d5e8e19 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:37:39 +0000 Subject: [PATCH 118/210] refactor(lsp-types): split logic ops by concern Split operations logic into dedicated modules for intersection, exclusion, length narrowing, and field narrowing. Each operation now carries its own inline tests, while `operations/logic.rs` is a thin re-export entrypoint. --- .../src/operations/logic.rs | 1082 +---------------- .../src/operations/logic/exclusion.rs | 116 ++ .../src/operations/logic/field.rs | 153 +++ .../src/operations/logic/intersection.rs | 431 +++++++ .../src/operations/logic/length.rs | 399 ++++++ 5 files changed, 1108 insertions(+), 1073 deletions(-) create mode 100644 crates/jrsonnet-lsp-types/src/operations/logic/exclusion.rs create mode 100644 crates/jrsonnet-lsp-types/src/operations/logic/field.rs create mode 100644 crates/jrsonnet-lsp-types/src/operations/logic/intersection.rs create mode 100644 crates/jrsonnet-lsp-types/src/operations/logic/length.rs diff --git a/crates/jrsonnet-lsp-types/src/operations/logic.rs b/crates/jrsonnet-lsp-types/src/operations/logic.rs index 0a4aaa98..b96645c1 100644 --- a/crates/jrsonnet-lsp-types/src/operations/logic.rs +++ b/crates/jrsonnet-lsp-types/src/operations/logic.rs @@ -1,1075 +1,11 @@ //! Core type logic and narrowing operations. -use crate::store::{FieldDefInterned, FieldVis, ObjectData, Ty, TyData, TypeStoreOps}; - -/// Compute the intersection of two types (type narrowing). -/// -/// Returns the most specific type that satisfies both constraints. -/// This is the logical AND of types - values must satisfy both. -/// -/// # Examples -/// -/// - `ty_and(Any, Number)` → `Number` -/// - `ty_and(Number, String)` → `Never` (no value is both) -/// - `ty_and(Bool, True)` → `True` -/// - `ty_and(Number | String, Number)` → `Number` -/// -/// # Distribution over Unions -/// -/// This operation distributes over unions: -/// `(A | B) & C = (A & C) | (B & C)` -pub fn ty_and(lhs: Ty, rhs: Ty, store: &mut S) -> Ty { - // Fast paths for special types - if lhs == Ty::NEVER || rhs == Ty::NEVER { - return Ty::NEVER; - } - if lhs == Ty::ANY { - return rhs; - } - if rhs == Ty::ANY { - return lhs; - } - if lhs == rhs { - return lhs; - } - - let lhs_data = store.get_data(lhs); - let rhs_data = store.get_data(rhs); - - // Handle unions: distribute (A | B) & C = (A & C) | (B & C) - if let TyData::Union(types) = lhs_data { - let narrowed: Vec = types - .iter() - .map(|&t| ty_and(t, rhs, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return store.union(narrowed); - } - if let TyData::Union(types) = rhs_data { - let narrowed: Vec = types - .iter() - .map(|&t| ty_and(lhs, t, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return store.union(narrowed); - } - - // Refresh data after potential recursion - let lhs_data = store.get_data(lhs); - let rhs_data = store.get_data(rhs); - - // Handle literal/subtype relationships - match (&lhs_data, &rhs_data) { - // Bool and its literals - (TyData::Bool, TyData::True) | (TyData::True, TyData::Bool) => return Ty::TRUE, - (TyData::Bool, TyData::False) | (TyData::False, TyData::Bool) => return Ty::FALSE, - - // String and Char - (TyData::String, TyData::Char) | (TyData::Char, TyData::String) => return Ty::CHAR, - - // String and LiteralString - (TyData::String, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::String) => { - return store.literal_string(s.clone()); - } - - // LiteralString with same value - (TyData::LiteralString(s1), TyData::LiteralString(s2)) => { - if s1 == s2 { - return store.literal_string(s1.clone()); - } - return Ty::NEVER; - } - - // Char and LiteralString of length 1 - (TyData::Char, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::Char) => { - if s.chars().count() == 1 { - return store.literal_string(s.clone()); - } - return Ty::NEVER; - } - - _ => {} - } - - // Handle arrays (preserve is_set if both are sets) - if let ( - TyData::Array { - elem: e1, - is_set: s1, - }, - TyData::Array { - elem: e2, - is_set: s2, - }, - ) = (&lhs_data, &rhs_data) - { - let elem = ty_and(*e1, *e2, store); - if elem == Ty::NEVER { - return Ty::NEVER; - } - // Result is a set only if both inputs are sets - if *s1 && *s2 { - return store.array_set(elem); - } - return store.array(elem); - } - - // Handle tuples with arrays - if let (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) = (&lhs_data, &rhs_data) - { - let narrowed: Vec = elems.iter().map(|&e| ty_and(e, *arr_elem, store)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return store.tuple(narrowed); - } - if let (TyData::Array { elem: arr_elem, .. }, TyData::Tuple { elems }) = (&lhs_data, &rhs_data) - { - let narrowed: Vec = elems.iter().map(|&e| ty_and(*arr_elem, e, store)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return store.tuple(narrowed); - } - - // Handle tuples with tuples - if let (TyData::Tuple { elems: e1 }, TyData::Tuple { elems: e2 }) = (&lhs_data, &rhs_data) { - if e1.len() != e2.len() { - return Ty::NEVER; - } - let narrowed: Vec = e1 - .iter() - .zip(e2.iter()) - .map(|(&a, &b)| ty_and(a, b, store)) - .collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return store.tuple(narrowed); - } - - // Handle objects - if let (TyData::Object(obj1), TyData::Object(obj2)) = (&lhs_data, &rhs_data) { - let mut fields = obj1.fields.clone(); - for (name, def2) in &obj2.fields { - if let Some((_, def1)) = fields.iter_mut().find(|(n, _)| n == name) { - let narrowed_ty = ty_and(def1.ty, def2.ty, store); - *def1 = FieldDefInterned { - ty: narrowed_ty, - required: def1.required || def2.required, - visibility: def1.visibility, - }; - } else { - fields.push((name.clone(), def2.clone())); - } - } - let has_unknown = obj1.has_unknown && obj2.has_unknown; - return store.object(ObjectData { - fields, - has_unknown, - }); - } - - // Handle AttrsOf - if let (TyData::AttrsOf { value: v1 }, TyData::AttrsOf { value: v2 }) = (&lhs_data, &rhs_data) { - let elem = ty_and(*v1, *v2, store); - if elem == Ty::NEVER { - return Ty::NEVER; - } - return store.attrs_of(elem); - } - - // Handle object + AttrsOf - if let (TyData::Object(obj), TyData::AttrsOf { value }) = (&lhs_data, &rhs_data) { - let fields: Vec<_> = obj - .fields - .iter() - .map(|(name, field)| { - let narrowed_ty = ty_and(field.ty, *value, store); - ( - name.clone(), - FieldDefInterned { - ty: narrowed_ty, - required: field.required, - visibility: field.visibility, - }, - ) - }) - .collect(); - return store.object(ObjectData { - fields, - has_unknown: obj.has_unknown, - }); - } - if let (TyData::AttrsOf { value }, TyData::Object(obj)) = (&lhs_data, &rhs_data) { - let fields: Vec<_> = obj - .fields - .iter() - .map(|(name, field)| { - let narrowed_ty = ty_and(*value, field.ty, store); - ( - name.clone(), - FieldDefInterned { - ty: narrowed_ty, - required: field.required, - visibility: field.visibility, - }, - ) - }) - .collect(); - return store.object(ObjectData { - fields, - has_unknown: obj.has_unknown, - }); - } - - // Handle BoundedNumber - if let (TyData::Number, TyData::BoundedNumber(bounds)) - | (TyData::BoundedNumber(bounds), TyData::Number) = (&lhs_data, &rhs_data) - { - return store.bounded_number(*bounds); - } - if let (TyData::BoundedNumber(b1), TyData::BoundedNumber(b2)) = (&lhs_data, &rhs_data) { - // Intersection of bounds: take stricter bounds - let min = match (b1.min_f64(), b2.min_f64()) { - (Some(a), Some(b)) => Some(a.max(b)), - (Some(a), None) => Some(a), - (None, Some(b)) => Some(b), - (None, None) => None, - }; - let max = match (b1.max_f64(), b2.max_f64()) { - (Some(a), Some(b)) => Some(a.min(b)), - (Some(a), None) => Some(a), - (None, Some(b)) => Some(b), - (None, None) => None, - }; - // Check for empty range - if let (Some(lo), Some(hi)) = (min, max) { - if lo > hi { - return Ty::NEVER; - } - } - return store.bounded_number(crate::store::NumBounds { - min: min.map(f64::to_bits), - max: max.map(f64::to_bits), - }); - } - - // Handle Sum (intersection) types - if let TyData::Sum(types) = lhs_data { - let narrowed: Vec = types.iter().map(|&t| ty_and(t, rhs, store)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return store.sum(narrowed); - } - if let TyData::Sum(types) = rhs_data { - let narrowed: Vec = types.iter().map(|&t| ty_and(lhs, t, store)).collect(); - if narrowed.contains(&Ty::NEVER) { - return Ty::NEVER; - } - return store.sum(narrowed); - } - - // Handle TypeVar - keep it, may be resolved later - if matches!(lhs_data, TyData::TypeVar { .. }) || matches!(rhs_data, TyData::TypeVar { .. }) { - return store.sum(vec![lhs, rhs]); - } - - // Different incompatible concrete types have no intersection - Ty::NEVER -} - -/// Compute the exclusion of one type from another. -/// -/// Returns the type with the constraint removed (difference/minus). -/// This removes values that match `remove` from `base`. -/// -/// # Examples -/// -/// - `ty_minus(Number | String, Number)` → `String` -/// - `ty_minus(Bool, True)` → `False` -/// - `ty_minus(Any, Number)` → `Any` (Any is too general) -/// - `ty_minus(Number, Number)` → `Never` -/// -/// # Distribution over Unions -/// -/// This operation distributes over unions: -/// `(A | B) - C = (A - C) | (B - C)` -pub fn ty_minus(base: Ty, remove: Ty, store: &mut S) -> Ty { - // Fast paths - if base == Ty::NEVER { - return Ty::NEVER; - } - if remove == Ty::NEVER { - return base; - } - if remove == Ty::ANY { - return Ty::NEVER; - } - if base == Ty::ANY { - // Can't remove anything meaningful from Any - return Ty::ANY; - } - if base == remove { - return Ty::NEVER; - } - - let base_data = store.get_data(base); - - // Handle unions: distribute (A | B) - C = (A - C) | (B - C) - if let TyData::Union(types) = base_data { - let remaining: Vec = types - .iter() - .map(|&t| ty_minus(t, remove, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - return store.union(remaining); - } - - // Special case: Bool minus True/False - let base_data = store.get_data(base); - let remove_data = store.get_data(remove); - match (&base_data, &remove_data) { - (TyData::Bool, TyData::True) => return Ty::FALSE, - (TyData::Bool, TyData::False) => return Ty::TRUE, - (TyData::True | TyData::False, TyData::Bool) => return Ty::NEVER, - _ => {} - } - - // For non-union types, if they don't match the remove type, return unchanged - base -} - -/// Narrow a type to one with a specific length. -/// -/// This is useful for narrowing based on `std.length(x) == n` conditions. -/// -/// # Behavior -/// -/// - Arrays become tuples with `n` elements of the same element type -/// - Tuples must have exactly `n` elements (otherwise `Never`) -/// - Strings with length 1 become `Char` -/// - Objects must have exactly `n` fields (if closed) or at least `n` (if open) -/// - Primitives like Number/Bool return `Never` (they don't have length) -/// -/// # Examples -/// -/// - `ty_with_len(Array, 3)` → `[Number, Number, Number]` -/// - `ty_with_len(String, 1)` → `Char` -/// - `ty_with_len([Number, String], 2)` → `[Number, String]` -/// - `ty_with_len([Number, String], 3)` → `Never` -pub fn ty_with_len(ty: Ty, len: usize, store: &mut S) -> Ty { - let data = store.get_data(ty); - match data { - TyData::Any => Ty::ANY, - - TyData::Array { elem, .. } => { - let elems = vec![elem; len]; - store.tuple(elems) - } - - TyData::Tuple { elems } => { - if elems.len() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::Object(obj) => { - match (obj.fields.len().cmp(&len), obj.has_unknown) { - // Exactly right number of fields - (std::cmp::Ordering::Equal, false) => ty, - // Open object with fewer fields - close it at this length - (std::cmp::Ordering::Less | std::cmp::Ordering::Equal, true) => { - if obj.fields.len() == len { - store.object(ObjectData { - fields: obj.fields, - has_unknown: false, - }) - } else { - ty // Can have unknown fields to reach the length - } - } - // Too few fields in closed object, or too many fields - (std::cmp::Ordering::Less, false) | (std::cmp::Ordering::Greater, _) => Ty::NEVER, - } - } - - TyData::Function(func) => { - if func.variadic && func.params.is_empty() { - let params = (0..len) - .map(|idx| crate::store::ParamInterned { - name: format!("arg{idx}"), - ty: Ty::ANY, - has_default: false, - }) - .collect(); - store.function(crate::store::FunctionData { - params, - return_spec: crate::store::ReturnSpec::Fixed(Ty::ANY), - variadic: false, - }) - } else if func.required_count() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, - - TyData::String => { - if len == 1 { - Ty::CHAR - } else { - ty // String can be any length - } - } - - TyData::LiteralString(s) => { - if s.chars().count() == len { - ty - } else { - Ty::NEVER - } - } - - TyData::Char => { - if len == 1 { - ty - } else { - Ty::NEVER - } - } - - TyData::Never - | TyData::Null - | TyData::Bool - | TyData::True - | TyData::False - | TyData::Number - | TyData::BoundedNumber(_) => Ty::NEVER, - - TyData::Union(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| ty_with_len(t, len, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - store.union(narrowed) - } - - TyData::Sum(types) => { - let narrowed: Vec = types.iter().map(|&t| ty_with_len(t, len, store)).collect(); - if narrowed.contains(&Ty::NEVER) { - Ty::NEVER - } else { - store.sum(narrowed) - } - } - } -} - -/// Narrow a type to one with at least a minimum length. -/// -/// This is useful for narrowing based on `std.length(x) >= n` conditions. -/// -/// # Behavior -/// -/// - Arrays stay arrays (can have any length) -/// - Tuples must have at least `n` elements -/// - Strings stay strings (can have any length) -/// - Literal strings are checked exactly against `n` -/// - Char requires `min <= 1` -/// - Objects with unknown fields stay as-is -/// -/// # Examples -/// -/// - `ty_with_min_len(Array, 3)` → `Array` -/// - `ty_with_min_len([Number, String], 1)` → `[Number, String]` -/// - `ty_with_min_len([Number], 2)` → `Never` -/// - `ty_with_min_len("ok", 3)` → `Never` -/// - `ty_with_min_len(Char, 2)` → `Never` -pub fn ty_with_min_len(ty: Ty, min: usize, store: &mut S) -> Ty { - let data = store.get_data(ty); - match data { - TyData::Any => Ty::ANY, - TyData::Never => Ty::NEVER, - - TyData::Array { .. } - | TyData::Object(_) - | TyData::AttrsOf { .. } - | TyData::String - | TyData::Function(_) - | TyData::TypeVar { .. } => ty, - - TyData::LiteralString(s) => { - if s.chars().count() >= min { - ty - } else { - Ty::NEVER - } - } - - TyData::Tuple { elems } => { - if elems.len() >= min { - ty - } else { - Ty::NEVER - } - } - - TyData::Char => { - if min <= 1 { - ty - } else { - Ty::NEVER - } - } - - TyData::Null - | TyData::Bool - | TyData::True - | TyData::False - | TyData::Number - | TyData::BoundedNumber(_) => { - if min == 0 { - ty // Everything has "length >= 0" - } else { - Ty::NEVER - } - } - - TyData::Union(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| ty_with_min_len(t, min, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - store.union(narrowed) - } - - TyData::Sum(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| ty_with_min_len(t, min, store)) - .collect(); - if narrowed.contains(&Ty::NEVER) { - Ty::NEVER - } else { - store.sum(narrowed) - } - } - } -} - -/// Add a required field to an object type. -/// -/// Returns a new object type with the specified field added. -/// If the field already exists, its type is narrowed with the new type. -pub fn ty_with_field(ty: Ty, field: &str, field_ty: Ty, store: &mut S) -> Ty { - let data = store.get_data(ty); - match data { - TyData::Any => { - // Create an open object with this field - store.object(ObjectData { - fields: vec![( - field.to_string(), - FieldDefInterned { - ty: field_ty, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: true, - }) - } - - TyData::Object(mut obj) => { - if let Some((_, existing)) = obj.fields.iter_mut().find(|(n, _)| n == field) { - let narrowed = ty_and(existing.ty, field_ty, store); - *existing = FieldDefInterned { - ty: narrowed, - required: true, - visibility: existing.visibility, - }; - } else { - obj.fields.push(( - field.to_string(), - FieldDefInterned { - ty: field_ty, - required: true, - visibility: FieldVis::Normal, - }, - )); - } - store.object(obj) - } - - TyData::AttrsOf { value } => { - // AttrsOf with a specific field becomes object with that field - let narrowed = ty_and(value, field_ty, store); - store.object(ObjectData { - fields: vec![( - field.to_string(), - FieldDefInterned { - ty: narrowed, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: true, - }) - } - - TyData::Union(types) => { - let narrowed: Vec = types - .iter() - .map(|&t| ty_with_field(t, field, field_ty, store)) - .filter(|&t| t != Ty::NEVER) - .collect(); - store.union(narrowed) - } - - // Non-object types can't have fields - _ => Ty::NEVER, - } -} - -#[cfg(test)] -mod tests { - use assert_matches::assert_matches; - - use super::*; - use crate::TyStore; - - mod ty_and_tests { - use super::*; - - #[test] - fn test_any_narrows_to_constraint() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::ANY, Ty::NUMBER, &mut store), Ty::NUMBER); - assert_eq!(ty_and(Ty::NUMBER, Ty::ANY, &mut store), Ty::NUMBER); - } - - #[test] - fn test_never_always_never() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); - assert_eq!(ty_and(Ty::NUMBER, Ty::NEVER, &mut store), Ty::NEVER); - } - - #[test] - fn test_same_type_returns_same() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NUMBER); - assert_eq!(ty_and(Ty::STRING, Ty::STRING, &mut store), Ty::STRING); - } - - #[test] - fn test_incompatible_types_never() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::NUMBER, Ty::STRING, &mut store), Ty::NEVER); - assert_eq!(ty_and(Ty::BOOL, Ty::NUMBER, &mut store), Ty::NEVER); - } - - #[test] - fn test_bool_narrows_to_literal() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::BOOL, Ty::TRUE, &mut store), Ty::TRUE); - assert_eq!(ty_and(Ty::BOOL, Ty::FALSE, &mut store), Ty::FALSE); - assert_eq!(ty_and(Ty::TRUE, Ty::BOOL, &mut store), Ty::TRUE); - assert_eq!(ty_and(Ty::FALSE, Ty::BOOL, &mut store), Ty::FALSE); - } - - #[test] - fn test_string_narrows_to_char() { - let mut store = TyStore::new(); - assert_eq!(ty_and(Ty::STRING, Ty::CHAR, &mut store), Ty::CHAR); - assert_eq!(ty_and(Ty::CHAR, Ty::STRING, &mut store), Ty::CHAR); - } - - #[test] - fn test_union_distributes() { - let mut store = TyStore::new(); - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - // (Number | String) & Number = Number - assert_eq!(ty_and(union, Ty::NUMBER, &mut store), Ty::NUMBER); - // Number & (Number | String) = Number - assert_eq!(ty_and(Ty::NUMBER, union, &mut store), Ty::NUMBER); - } - - #[test] - fn test_union_with_incompatible_gives_partial() { - let mut store = TyStore::new(); - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - // (Number | String) & Bool = Never (both incompatible) - assert_eq!(ty_and(union, Ty::BOOL, &mut store), Ty::NEVER); - } - - #[test] - fn test_array_intersection() { - let mut store = TyStore::new(); - let arr_num = store.array(Ty::NUMBER); - let arr_any = store.array(Ty::ANY); - // Array & Array = Array - let result = ty_and(arr_num, arr_any, &mut store); - assert!( - matches!(store.get_data(result), TyData::Array { elem, .. } if elem == Ty::NUMBER) - ); - } - - #[test] - fn test_tuple_intersection_same_length() { - let mut store = TyStore::new(); - let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::ANY]); - let tuple2 = store.tuple(vec![Ty::ANY, Ty::STRING]); - let result = ty_and(tuple1, tuple2, &mut store); - assert_matches!(store.get_data(result), TyData::Tuple { elems } => { - assert_eq!(elems, vec![Ty::NUMBER, Ty::STRING]); - }); - } - - #[test] - fn test_tuple_intersection_different_length_never() { - let mut store = TyStore::new(); - let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::STRING]); - let tuple2 = store.tuple(vec![Ty::NUMBER]); - assert_eq!(ty_and(tuple1, tuple2, &mut store), Ty::NEVER); - } - - #[test] - fn test_object_intersection_merges_fields() { - let mut store = TyStore::new(); - let obj1 = store.object(ObjectData { - fields: vec![( - "a".to_string(), - FieldDefInterned { - ty: Ty::NUMBER, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: false, - }); - let obj2 = store.object(ObjectData { - fields: vec![( - "b".to_string(), - FieldDefInterned { - ty: Ty::STRING, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: false, - }); - let result = ty_and(obj1, obj2, &mut store); - assert_matches!(store.get_data(result), TyData::Object(obj) => { - // Should have both fields "a" and "b" - let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); - field_names.sort_unstable(); - assert_eq!(field_names, vec!["a", "b"]); - }); - } - - #[test] - fn test_bounded_number_intersection() { - let mut store = TyStore::new(); - let bounded1 = store.bounded_number(crate::store::NumBounds::at_least(0.0)); - let bounded2 = store.bounded_number(crate::store::NumBounds::between(-10.0, 10.0)); - let result = ty_and(bounded1, bounded2, &mut store); - // Should get [0..10] - assert_matches!(store.get_data(result), TyData::BoundedNumber(bounds) => { - assert_eq!(bounds.min_f64(), Some(0.0)); - assert_eq!(bounds.max_f64(), Some(10.0)); - }); - } - } - - mod ty_minus_tests { - use super::*; - - #[test] - fn test_same_type_gives_never() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NEVER); - } - - #[test] - fn test_different_type_unchanged() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::NUMBER, Ty::STRING, &mut store), Ty::NUMBER); - } - - #[test] - fn test_any_stays_any() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::ANY, Ty::NUMBER, &mut store), Ty::ANY); - } - - #[test] - fn test_minus_any_gives_never() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::NUMBER, Ty::ANY, &mut store), Ty::NEVER); - } - - #[test] - fn test_union_removes_matching() { - let mut store = TyStore::new(); - let union = store.union(vec![Ty::NUMBER, Ty::STRING]); - assert_eq!(ty_minus(union, Ty::NUMBER, &mut store), Ty::STRING); - } - - #[test] - fn test_bool_minus_true_gives_false() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::BOOL, Ty::TRUE, &mut store), Ty::FALSE); - assert_eq!(ty_minus(Ty::BOOL, Ty::FALSE, &mut store), Ty::TRUE); - } - - #[test] - fn test_never_stays_never() { - let mut store = TyStore::new(); - assert_eq!(ty_minus(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); - } - } - - mod ty_with_len_tests { - use super::*; - - #[test] - fn test_array_to_tuple() { - let mut store = TyStore::new(); - let arr = store.array(Ty::NUMBER); - let result = ty_with_len(arr, 3, &mut store); - assert_matches!(store.get_data(result), TyData::Tuple { elems } => { - assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); - }); - } - - #[test] - fn test_tuple_matching_length() { - let mut store = TyStore::new(); - let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); - assert_eq!(ty_with_len(tuple, 2, &mut store), tuple); - } - - #[test] - fn test_tuple_wrong_length_never() { - let mut store = TyStore::new(); - let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); - assert_eq!(ty_with_len(tuple, 3, &mut store), Ty::NEVER); - } - - #[test] - fn test_string_len_1_to_char() { - let mut store = TyStore::new(); - assert_eq!(ty_with_len(Ty::STRING, 1, &mut store), Ty::CHAR); - } - - #[test] - fn test_char_len_1_ok() { - let mut store = TyStore::new(); - assert_eq!(ty_with_len(Ty::CHAR, 1, &mut store), Ty::CHAR); - } - - #[test] - fn test_char_len_not_1_never() { - let mut store = TyStore::new(); - assert_eq!(ty_with_len(Ty::CHAR, 0, &mut store), Ty::NEVER); - assert_eq!(ty_with_len(Ty::CHAR, 2, &mut store), Ty::NEVER); - } - - #[test] - fn test_number_never() { - let mut store = TyStore::new(); - assert_eq!(ty_with_len(Ty::NUMBER, 5, &mut store), Ty::NEVER); - } - - #[test] - fn test_literal_string_matching_len() { - let mut store = TyStore::new(); - let lit = store.literal_string("hello".to_string()); - assert_eq!(ty_with_len(lit, 5, &mut store), lit); - } - - #[test] - fn test_literal_string_wrong_len_never() { - let mut store = TyStore::new(); - let lit = store.literal_string("hello".to_string()); - assert_eq!(ty_with_len(lit, 3, &mut store), Ty::NEVER); - } - - #[test] - fn test_union_filters() { - let mut store = TyStore::new(); - let tuple2 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER]); - let tuple3 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); - let union = store.union(vec![tuple2, tuple3]); - assert_eq!(ty_with_len(union, 2, &mut store), tuple2); - } - - #[test] - fn test_function_matches_required_arity() { - let mut store = TyStore::new(); - let func = store.function(crate::store::FunctionData { - params: vec![ - crate::store::ParamInterned { - name: "x".to_string(), - ty: Ty::ANY, - has_default: false, - }, - crate::store::ParamInterned { - name: "y".to_string(), - ty: Ty::ANY, - has_default: true, - }, - ], - return_spec: crate::store::ReturnSpec::Fixed(Ty::NUMBER), - variadic: false, - }); - assert_eq!(ty_with_len(func, 1, &mut store), func); - assert_eq!(ty_with_len(func, 2, &mut store), Ty::NEVER); - } - - #[test] - fn test_function_any_narrows_to_exact_arity() { - let mut store = TyStore::new(); - let func = store.function_any(); - let narrowed = ty_with_len(func, 2, &mut store); - assert_matches!(store.get_data(narrowed), TyData::Function(func_data) => { - assert!(!func_data.variadic); - assert_eq!(func_data.params.len(), 2); - assert_eq!(func_data.params[0].name, "arg0"); - assert_eq!(func_data.params[1].name, "arg1"); - assert!(func_data.params.iter().all(|p| p.ty == Ty::ANY)); - assert!(func_data.params.iter().all(|p| !p.has_default)); - assert_eq!(func_data.return_spec, crate::store::ReturnSpec::Fixed(Ty::ANY)); - }); - } - } - - mod ty_with_min_len_tests { - use super::*; - - #[test] - fn test_array_unchanged() { - let mut store = TyStore::new(); - let arr = store.array(Ty::NUMBER); - assert_eq!(ty_with_min_len(arr, 5, &mut store), arr); - } - - #[test] - fn test_tuple_satisfies_min() { - let mut store = TyStore::new(); - let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); - assert_eq!(ty_with_min_len(tuple, 2, &mut store), tuple); - } - - #[test] - fn test_tuple_too_short_never() { - let mut store = TyStore::new(); - let tuple = store.tuple(vec![Ty::NUMBER]); - assert_eq!(ty_with_min_len(tuple, 2, &mut store), Ty::NEVER); - } - - #[test] - fn test_char_min_1_ok() { - let mut store = TyStore::new(); - assert_eq!(ty_with_min_len(Ty::CHAR, 1, &mut store), Ty::CHAR); - } - - #[test] - fn test_char_min_2_never() { - let mut store = TyStore::new(); - assert_eq!(ty_with_min_len(Ty::CHAR, 2, &mut store), Ty::NEVER); - } - - #[test] - fn test_literal_string_meets_min() { - let mut store = TyStore::new(); - let literal = store.literal_string("hello".to_string()); - assert_eq!(ty_with_min_len(literal, 3, &mut store), literal); - } - - #[test] - fn test_literal_string_too_short() { - let mut store = TyStore::new(); - let literal = store.literal_string("hi".to_string()); - assert_eq!(ty_with_min_len(literal, 3, &mut store), Ty::NEVER); - } - } - - mod ty_with_field_tests { - use super::*; - - #[test] - fn test_any_to_object() { - let mut store = TyStore::new(); - let result = ty_with_field(Ty::ANY, "foo", Ty::NUMBER, &mut store); - assert_matches!(store.get_data(result), TyData::Object(obj) => { - assert_eq!(obj.fields, vec![("foo".to_string(), FieldDefInterned { - ty: Ty::NUMBER, - required: true, - visibility: FieldVis::Normal, - })]); - assert!(obj.has_unknown); - }); - } - - #[test] - fn test_object_adds_field() { - let mut store = TyStore::new(); - let obj = store.object(ObjectData { - fields: vec![( - "a".to_string(), - FieldDefInterned { - ty: Ty::STRING, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: false, - }); - let result = ty_with_field(obj, "b", Ty::NUMBER, &mut store); - assert_matches!(store.get_data(result), TyData::Object(obj) => { - let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); - field_names.sort_unstable(); - assert_eq!(field_names, vec!["a", "b"]); - }); - } - - #[test] - fn test_object_narrows_existing_field() { - let mut store = TyStore::new(); - let obj = store.object(ObjectData { - fields: vec![( - "a".to_string(), - FieldDefInterned { - ty: Ty::ANY, - required: false, - visibility: FieldVis::Normal, - }, - )], - has_unknown: false, - }); - let result = ty_with_field(obj, "a", Ty::NUMBER, &mut store); - assert_matches!(store.get_data(result), TyData::Object(obj) => { - assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); - assert!(obj.fields[0].1.required); - }); - } - - #[test] - fn test_number_never() { - let mut store = TyStore::new(); - assert_eq!( - ty_with_field(Ty::NUMBER, "foo", Ty::STRING, &mut store), - Ty::NEVER - ); - } - } -} +mod exclusion; +mod field; +mod intersection; +mod length; + +pub use exclusion::ty_minus; +pub use field::ty_with_field; +pub use intersection::ty_and; +pub use length::{ty_with_len, ty_with_min_len}; diff --git a/crates/jrsonnet-lsp-types/src/operations/logic/exclusion.rs b/crates/jrsonnet-lsp-types/src/operations/logic/exclusion.rs new file mode 100644 index 00000000..df0b0e51 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/logic/exclusion.rs @@ -0,0 +1,116 @@ +use crate::store::{Ty, TyData, TypeStoreOps}; + +/// Compute the exclusion of one type from another. +/// +/// Returns the type with the constraint removed (difference/minus). +/// This removes values that match `remove` from `base`. +/// +/// # Examples +/// +/// - `ty_minus(Number | String, Number)` → `String` +/// - `ty_minus(Bool, True)` → `False` +/// - `ty_minus(Any, Number)` → `Any` (Any is too general) +/// - `ty_minus(Number, Number)` → `Never` +/// +/// # Distribution over Unions +/// +/// This operation distributes over unions: +/// `(A | B) - C = (A - C) | (B - C)` +pub fn ty_minus(base: Ty, remove: Ty, store: &mut S) -> Ty { + // Fast paths + if base == Ty::NEVER { + return Ty::NEVER; + } + if remove == Ty::NEVER { + return base; + } + if remove == Ty::ANY { + return Ty::NEVER; + } + if base == Ty::ANY { + // Can't remove anything meaningful from Any + return Ty::ANY; + } + if base == remove { + return Ty::NEVER; + } + + let base_data = store.get_data(base); + + // Handle unions: distribute (A | B) - C = (A - C) | (B - C) + if let TyData::Union(types) = base_data { + let remaining: Vec = types + .iter() + .map(|&t| ty_minus(t, remove, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(remaining); + } + + // Special case: Bool minus True/False + let base_data = store.get_data(base); + let remove_data = store.get_data(remove); + match (&base_data, &remove_data) { + (TyData::Bool, TyData::True) => return Ty::FALSE, + (TyData::Bool, TyData::False) => return Ty::TRUE, + (TyData::True | TyData::False, TyData::Bool) => return Ty::NEVER, + _ => {} + } + + // For non-union types, if they don't match the remove type, return unchanged + base +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::TyStore; + + mod ty_minus_tests { + use super::*; + + #[test] + fn test_same_type_gives_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NEVER); + } + + #[test] + fn test_different_type_unchanged() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::STRING, &mut store), Ty::NUMBER); + } + + #[test] + fn test_any_stays_any() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::ANY, Ty::NUMBER, &mut store), Ty::ANY); + } + + #[test] + fn test_minus_any_gives_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NUMBER, Ty::ANY, &mut store), Ty::NEVER); + } + + #[test] + fn test_union_removes_matching() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_minus(union, Ty::NUMBER, &mut store), Ty::STRING); + } + + #[test] + fn test_bool_minus_true_gives_false() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::BOOL, Ty::TRUE, &mut store), Ty::FALSE); + assert_eq!(ty_minus(Ty::BOOL, Ty::FALSE, &mut store), Ty::TRUE); + } + + #[test] + fn test_never_stays_never() { + let mut store = TyStore::new(); + assert_eq!(ty_minus(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/operations/logic/field.rs b/crates/jrsonnet-lsp-types/src/operations/logic/field.rs new file mode 100644 index 00000000..6f9501e6 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/logic/field.rs @@ -0,0 +1,153 @@ +use super::intersection::ty_and; +use crate::store::{FieldDefInterned, FieldVis, ObjectData, Ty, TyData, TypeStoreOps}; + +/// Add a required field to an object type. +/// +/// Returns a new object type with the specified field added. +/// If the field already exists, its type is narrowed with the new type. +pub fn ty_with_field(ty: Ty, field: &str, field_ty: Ty, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => { + // Create an open object with this field + store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }) + } + + TyData::Object(mut obj) => { + if let Some((_, existing)) = obj.fields.iter_mut().find(|(n, _)| n == field) { + let narrowed = ty_and(existing.ty, field_ty, store); + *existing = FieldDefInterned { + ty: narrowed, + required: true, + visibility: existing.visibility, + }; + } else { + obj.fields.push(( + field.to_string(), + FieldDefInterned { + ty: field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )); + } + store.object(obj) + } + + TyData::AttrsOf { value } => { + // AttrsOf with a specific field becomes object with that field + let narrowed = ty_and(value, field_ty, store); + store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: narrowed, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }) + } + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_field(t, field, field_ty, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + // Non-object types can't have fields + _ => Ty::NEVER, + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::TyStore; + + mod ty_with_field_tests { + use super::*; + + #[test] + fn test_any_to_object() { + let mut store = TyStore::new(); + let result = ty_with_field(Ty::ANY, "foo", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + assert_eq!(obj.fields, vec![("foo".to_string(), FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + })]); + assert!(obj.has_unknown); + }); + } + + #[test] + fn test_object_adds_field() { + let mut store = TyStore::new(); + let obj = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_with_field(obj, "b", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + field_names.sort_unstable(); + assert_eq!(field_names, vec!["a", "b"]); + }); + } + + #[test] + fn test_object_narrows_existing_field() { + let mut store = TyStore::new(); + let obj = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::ANY, + required: false, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_with_field(obj, "a", Ty::NUMBER, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); + assert!(obj.fields[0].1.required); + }); + } + + #[test] + fn test_number_never() { + let mut store = TyStore::new(); + assert_eq!( + ty_with_field(Ty::NUMBER, "foo", Ty::STRING, &mut store), + Ty::NEVER + ); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/operations/logic/intersection.rs b/crates/jrsonnet-lsp-types/src/operations/logic/intersection.rs new file mode 100644 index 00000000..5269c6d0 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/logic/intersection.rs @@ -0,0 +1,431 @@ +use crate::store::{FieldDefInterned, ObjectData, Ty, TyData, TypeStoreOps}; + +/// Compute the intersection of two types (type narrowing). +/// +/// Returns the most specific type that satisfies both constraints. +/// This is the logical AND of types - values must satisfy both. +/// +/// # Examples +/// +/// - `ty_and(Any, Number)` → `Number` +/// - `ty_and(Number, String)` → `Never` (no value is both) +/// - `ty_and(Bool, True)` → `True` +/// - `ty_and(Number | String, Number)` → `Number` +/// +/// # Distribution over Unions +/// +/// This operation distributes over unions: +/// `(A | B) & C = (A & C) | (B & C)` +pub fn ty_and(lhs: Ty, rhs: Ty, store: &mut S) -> Ty { + // Fast paths for special types + if lhs == Ty::NEVER || rhs == Ty::NEVER { + return Ty::NEVER; + } + if lhs == Ty::ANY { + return rhs; + } + if rhs == Ty::ANY { + return lhs; + } + if lhs == rhs { + return lhs; + } + + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + // Handle unions: distribute (A | B) & C = (A & C) | (B & C) + if let TyData::Union(types) = lhs_data { + let narrowed: Vec = types + .iter() + .map(|&t| ty_and(t, rhs, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(narrowed); + } + if let TyData::Union(types) = rhs_data { + let narrowed: Vec = types + .iter() + .map(|&t| ty_and(lhs, t, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + return store.union(narrowed); + } + + // Refresh data after potential recursion + let lhs_data = store.get_data(lhs); + let rhs_data = store.get_data(rhs); + + // Handle literal/subtype relationships + match (&lhs_data, &rhs_data) { + // Bool and its literals + (TyData::Bool, TyData::True) | (TyData::True, TyData::Bool) => return Ty::TRUE, + (TyData::Bool, TyData::False) | (TyData::False, TyData::Bool) => return Ty::FALSE, + + // String and Char + (TyData::String, TyData::Char) | (TyData::Char, TyData::String) => return Ty::CHAR, + + // String and LiteralString + (TyData::String, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::String) => { + return store.literal_string(s.clone()); + } + + // LiteralString with same value + (TyData::LiteralString(s1), TyData::LiteralString(s2)) => { + if s1 == s2 { + return store.literal_string(s1.clone()); + } + return Ty::NEVER; + } + + // Char and LiteralString of length 1 + (TyData::Char, TyData::LiteralString(s)) | (TyData::LiteralString(s), TyData::Char) => { + if s.chars().count() == 1 { + return store.literal_string(s.clone()); + } + return Ty::NEVER; + } + + _ => {} + } + + // Handle arrays (preserve is_set if both are sets) + if let ( + TyData::Array { + elem: e1, + is_set: s1, + }, + TyData::Array { + elem: e2, + is_set: s2, + }, + ) = (&lhs_data, &rhs_data) + { + let elem = ty_and(*e1, *e2, store); + if elem == Ty::NEVER { + return Ty::NEVER; + } + // Result is a set only if both inputs are sets + if *s1 && *s2 { + return store.array_set(elem); + } + return store.array(elem); + } + + // Handle tuples with arrays + if let (TyData::Tuple { elems }, TyData::Array { elem: arr_elem, .. }) = (&lhs_data, &rhs_data) + { + let narrowed: Vec = elems.iter().map(|&e| ty_and(e, *arr_elem, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + if let (TyData::Array { elem: arr_elem, .. }, TyData::Tuple { elems }) = (&lhs_data, &rhs_data) + { + let narrowed: Vec = elems.iter().map(|&e| ty_and(*arr_elem, e, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + + // Handle tuples with tuples + if let (TyData::Tuple { elems: e1 }, TyData::Tuple { elems: e2 }) = (&lhs_data, &rhs_data) { + if e1.len() != e2.len() { + return Ty::NEVER; + } + let narrowed: Vec = e1 + .iter() + .zip(e2.iter()) + .map(|(&a, &b)| ty_and(a, b, store)) + .collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.tuple(narrowed); + } + + // Handle objects + if let (TyData::Object(obj1), TyData::Object(obj2)) = (&lhs_data, &rhs_data) { + let mut fields = obj1.fields.clone(); + for (name, def2) in &obj2.fields { + if let Some((_, def1)) = fields.iter_mut().find(|(n, _)| n == name) { + let narrowed_ty = ty_and(def1.ty, def2.ty, store); + *def1 = FieldDefInterned { + ty: narrowed_ty, + required: def1.required || def2.required, + visibility: def1.visibility, + }; + } else { + fields.push((name.clone(), def2.clone())); + } + } + let has_unknown = obj1.has_unknown && obj2.has_unknown; + return store.object(ObjectData { + fields, + has_unknown, + }); + } + + // Handle AttrsOf + if let (TyData::AttrsOf { value: v1 }, TyData::AttrsOf { value: v2 }) = (&lhs_data, &rhs_data) { + let elem = ty_and(*v1, *v2, store); + if elem == Ty::NEVER { + return Ty::NEVER; + } + return store.attrs_of(elem); + } + + // Handle object + AttrsOf + if let (TyData::Object(obj), TyData::AttrsOf { value }) = (&lhs_data, &rhs_data) { + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let narrowed_ty = ty_and(field.ty, *value, store); + ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + return store.object(ObjectData { + fields, + has_unknown: obj.has_unknown, + }); + } + if let (TyData::AttrsOf { value }, TyData::Object(obj)) = (&lhs_data, &rhs_data) { + let fields: Vec<_> = obj + .fields + .iter() + .map(|(name, field)| { + let narrowed_ty = ty_and(*value, field.ty, store); + ( + name.clone(), + FieldDefInterned { + ty: narrowed_ty, + required: field.required, + visibility: field.visibility, + }, + ) + }) + .collect(); + return store.object(ObjectData { + fields, + has_unknown: obj.has_unknown, + }); + } + + // Handle BoundedNumber + if let (TyData::Number, TyData::BoundedNumber(bounds)) + | (TyData::BoundedNumber(bounds), TyData::Number) = (&lhs_data, &rhs_data) + { + return store.bounded_number(*bounds); + } + if let (TyData::BoundedNumber(b1), TyData::BoundedNumber(b2)) = (&lhs_data, &rhs_data) { + // Intersection of bounds: take stricter bounds + let min = match (b1.min_f64(), b2.min_f64()) { + (Some(a), Some(b)) => Some(a.max(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + let max = match (b1.max_f64(), b2.max_f64()) { + (Some(a), Some(b)) => Some(a.min(b)), + (Some(a), None) => Some(a), + (None, Some(b)) => Some(b), + (None, None) => None, + }; + // Check for empty range + if let (Some(lo), Some(hi)) = (min, max) { + if lo > hi { + return Ty::NEVER; + } + } + return store.bounded_number(crate::store::NumBounds { + min: min.map(f64::to_bits), + max: max.map(f64::to_bits), + }); + } + + // Handle Sum (intersection) types + if let TyData::Sum(types) = lhs_data { + let narrowed: Vec = types.iter().map(|&t| ty_and(t, rhs, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.sum(narrowed); + } + if let TyData::Sum(types) = rhs_data { + let narrowed: Vec = types.iter().map(|&t| ty_and(lhs, t, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + return Ty::NEVER; + } + return store.sum(narrowed); + } + + // Handle TypeVar - keep it, may be resolved later + if matches!(lhs_data, TyData::TypeVar { .. }) || matches!(rhs_data, TyData::TypeVar { .. }) { + return store.sum(vec![lhs, rhs]); + } + + // Different incompatible concrete types have no intersection + Ty::NEVER +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::{store::FieldVis, TyStore}; + + mod ty_and_tests { + use super::*; + + #[test] + fn test_any_narrows_to_constraint() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::ANY, Ty::NUMBER, &mut store), Ty::NUMBER); + assert_eq!(ty_and(Ty::NUMBER, Ty::ANY, &mut store), Ty::NUMBER); + } + + #[test] + fn test_never_always_never() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NEVER, Ty::NUMBER, &mut store), Ty::NEVER); + assert_eq!(ty_and(Ty::NUMBER, Ty::NEVER, &mut store), Ty::NEVER); + } + + #[test] + fn test_same_type_returns_same() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NUMBER, Ty::NUMBER, &mut store), Ty::NUMBER); + assert_eq!(ty_and(Ty::STRING, Ty::STRING, &mut store), Ty::STRING); + } + + #[test] + fn test_incompatible_types_never() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::NUMBER, Ty::STRING, &mut store), Ty::NEVER); + assert_eq!(ty_and(Ty::BOOL, Ty::NUMBER, &mut store), Ty::NEVER); + } + + #[test] + fn test_bool_narrows_to_literal() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::BOOL, Ty::TRUE, &mut store), Ty::TRUE); + assert_eq!(ty_and(Ty::BOOL, Ty::FALSE, &mut store), Ty::FALSE); + assert_eq!(ty_and(Ty::TRUE, Ty::BOOL, &mut store), Ty::TRUE); + assert_eq!(ty_and(Ty::FALSE, Ty::BOOL, &mut store), Ty::FALSE); + } + + #[test] + fn test_string_narrows_to_char() { + let mut store = TyStore::new(); + assert_eq!(ty_and(Ty::STRING, Ty::CHAR, &mut store), Ty::CHAR); + assert_eq!(ty_and(Ty::CHAR, Ty::STRING, &mut store), Ty::CHAR); + } + + #[test] + fn test_union_distributes() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // (Number | String) & Number = Number + assert_eq!(ty_and(union, Ty::NUMBER, &mut store), Ty::NUMBER); + // Number & (Number | String) = Number + assert_eq!(ty_and(Ty::NUMBER, union, &mut store), Ty::NUMBER); + } + + #[test] + fn test_union_with_incompatible_gives_partial() { + let mut store = TyStore::new(); + let union = store.union(vec![Ty::NUMBER, Ty::STRING]); + // (Number | String) & Bool = Never (both incompatible) + assert_eq!(ty_and(union, Ty::BOOL, &mut store), Ty::NEVER); + } + + #[test] + fn test_array_intersection() { + let mut store = TyStore::new(); + let arr_num = store.array(Ty::NUMBER); + let arr_any = store.array(Ty::ANY); + // Array & Array = Array + let result = ty_and(arr_num, arr_any, &mut store); + assert!( + matches!(store.get_data(result), TyData::Array { elem, .. } if elem == Ty::NUMBER) + ); + } + + #[test] + fn test_tuple_intersection_same_length() { + let mut store = TyStore::new(); + let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::ANY]); + let tuple2 = store.tuple(vec![Ty::ANY, Ty::STRING]); + let result = ty_and(tuple1, tuple2, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::STRING]); + }); + } + + #[test] + fn test_tuple_intersection_different_length_never() { + let mut store = TyStore::new(); + let tuple1 = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + let tuple2 = store.tuple(vec![Ty::NUMBER]); + assert_eq!(ty_and(tuple1, tuple2, &mut store), Ty::NEVER); + } + + #[test] + fn test_object_intersection_merges_fields() { + let mut store = TyStore::new(); + let obj1 = store.object(ObjectData { + fields: vec![( + "a".to_string(), + FieldDefInterned { + ty: Ty::NUMBER, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let obj2 = store.object(ObjectData { + fields: vec![( + "b".to_string(), + FieldDefInterned { + ty: Ty::STRING, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: false, + }); + let result = ty_and(obj1, obj2, &mut store); + assert_matches!(store.get_data(result), TyData::Object(obj) => { + // Should have both fields "a" and "b" + let mut field_names: Vec<_> = obj.fields.iter().map(|(n, _)| n.as_str()).collect(); + field_names.sort_unstable(); + assert_eq!(field_names, vec!["a", "b"]); + }); + } + + #[test] + fn test_bounded_number_intersection() { + let mut store = TyStore::new(); + let bounded1 = store.bounded_number(crate::store::NumBounds::at_least(0.0)); + let bounded2 = store.bounded_number(crate::store::NumBounds::between(-10.0, 10.0)); + let result = ty_and(bounded1, bounded2, &mut store); + // Should get [0..10] + assert_matches!(store.get_data(result), TyData::BoundedNumber(bounds) => { + assert_eq!(bounds.min_f64(), Some(0.0)); + assert_eq!(bounds.max_f64(), Some(10.0)); + }); + } + } +} diff --git a/crates/jrsonnet-lsp-types/src/operations/logic/length.rs b/crates/jrsonnet-lsp-types/src/operations/logic/length.rs new file mode 100644 index 00000000..12fc8021 --- /dev/null +++ b/crates/jrsonnet-lsp-types/src/operations/logic/length.rs @@ -0,0 +1,399 @@ +use crate::store::{ObjectData, Ty, TyData, TypeStoreOps}; + +/// Narrow a type to one with a specific length. +/// +/// This is useful for narrowing based on `std.length(x) == n` conditions. +/// +/// # Behavior +/// +/// - Arrays become tuples with `n` elements of the same element type +/// - Tuples must have exactly `n` elements (otherwise `Never`) +/// - Strings with length 1 become `Char` +/// - Objects must have exactly `n` fields (if closed) or at least `n` (if open) +/// - Primitives like Number/Bool return `Never` (they don't have length) +/// +/// # Examples +/// +/// - `ty_with_len(Array, 3)` → `[Number, Number, Number]` +/// - `ty_with_len(String, 1)` → `Char` +/// - `ty_with_len([Number, String], 2)` → `[Number, String]` +/// - `ty_with_len([Number, String], 3)` → `Never` +pub fn ty_with_len(ty: Ty, len: usize, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => Ty::ANY, + + TyData::Array { elem, .. } => { + let elems = vec![elem; len]; + store.tuple(elems) + } + + TyData::Tuple { elems } => { + if elems.len() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Object(obj) => { + match (obj.fields.len().cmp(&len), obj.has_unknown) { + // Exactly right number of fields + (std::cmp::Ordering::Equal, false) => ty, + // Open object with fewer fields - close it at this length + (std::cmp::Ordering::Less | std::cmp::Ordering::Equal, true) => { + if obj.fields.len() == len { + store.object(ObjectData { + fields: obj.fields, + has_unknown: false, + }) + } else { + ty // Can have unknown fields to reach the length + } + } + // Too few fields in closed object, or too many fields + (std::cmp::Ordering::Less, false) | (std::cmp::Ordering::Greater, _) => Ty::NEVER, + } + } + + TyData::Function(func) => { + if func.variadic && func.params.is_empty() { + let params = (0..len) + .map(|idx| crate::store::ParamInterned { + name: format!("arg{idx}"), + ty: Ty::ANY, + has_default: false, + }) + .collect(); + store.function(crate::store::FunctionData { + params, + return_spec: crate::store::ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }) + } else if func.required_count() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::AttrsOf { .. } | TyData::TypeVar { .. } => ty, + + TyData::String => { + if len == 1 { + Ty::CHAR + } else { + ty // String can be any length + } + } + + TyData::LiteralString(s) => { + if s.chars().count() == len { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if len == 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Never + | TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => Ty::NEVER, + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_len(t, len, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types.iter().map(|&t| ty_with_len(t, len, store)).collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + store.sum(narrowed) + } + } + } +} + +/// Narrow a type to one with at least a minimum length. +/// +/// This is useful for narrowing based on `std.length(x) >= n` conditions. +/// +/// # Behavior +/// +/// - Arrays stay arrays (can have any length) +/// - Tuples must have at least `n` elements +/// - Strings stay strings (can have any length) +/// - Literal strings are checked exactly against `n` +/// - Char requires `min <= 1` +/// - Objects with unknown fields stay as-is +/// +/// # Examples +/// +/// - `ty_with_min_len(Array, 3)` → `Array` +/// - `ty_with_min_len([Number, String], 1)` → `[Number, String]` +/// - `ty_with_min_len([Number], 2)` → `Never` +/// - `ty_with_min_len("ok", 3)` → `Never` +/// - `ty_with_min_len(Char, 2)` → `Never` +pub fn ty_with_min_len(ty: Ty, min: usize, store: &mut S) -> Ty { + let data = store.get_data(ty); + match data { + TyData::Any => Ty::ANY, + TyData::Never => Ty::NEVER, + + TyData::Array { .. } + | TyData::Object(_) + | TyData::AttrsOf { .. } + | TyData::String + | TyData::Function(_) + | TyData::TypeVar { .. } => ty, + + TyData::LiteralString(s) => { + if s.chars().count() >= min { + ty + } else { + Ty::NEVER + } + } + + TyData::Tuple { elems } => { + if elems.len() >= min { + ty + } else { + Ty::NEVER + } + } + + TyData::Char => { + if min <= 1 { + ty + } else { + Ty::NEVER + } + } + + TyData::Null + | TyData::Bool + | TyData::True + | TyData::False + | TyData::Number + | TyData::BoundedNumber(_) => { + if min == 0 { + ty // Everything has "length >= 0" + } else { + Ty::NEVER + } + } + + TyData::Union(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_min_len(t, min, store)) + .filter(|&t| t != Ty::NEVER) + .collect(); + store.union(narrowed) + } + + TyData::Sum(types) => { + let narrowed: Vec = types + .iter() + .map(|&t| ty_with_min_len(t, min, store)) + .collect(); + if narrowed.contains(&Ty::NEVER) { + Ty::NEVER + } else { + store.sum(narrowed) + } + } + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + use crate::TyStore; + + mod ty_with_len_tests { + use super::*; + + #[test] + fn test_array_to_tuple() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + let result = ty_with_len(arr, 3, &mut store); + assert_matches!(store.get_data(result), TyData::Tuple { elems } => { + assert_eq!(elems, vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + }); + } + + #[test] + fn test_tuple_matching_length() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_with_len(tuple, 2, &mut store), tuple); + } + + #[test] + fn test_tuple_wrong_length_never() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING]); + assert_eq!(ty_with_len(tuple, 3, &mut store), Ty::NEVER); + } + + #[test] + fn test_string_len_1_to_char() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::STRING, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_len_1_ok() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::CHAR, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_len_not_1_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::CHAR, 0, &mut store), Ty::NEVER); + assert_eq!(ty_with_len(Ty::CHAR, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_number_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_len(Ty::NUMBER, 5, &mut store), Ty::NEVER); + } + + #[test] + fn test_literal_string_matching_len() { + let mut store = TyStore::new(); + let lit = store.literal_string("hello".to_string()); + assert_eq!(ty_with_len(lit, 5, &mut store), lit); + } + + #[test] + fn test_literal_string_wrong_len_never() { + let mut store = TyStore::new(); + let lit = store.literal_string("hello".to_string()); + assert_eq!(ty_with_len(lit, 3, &mut store), Ty::NEVER); + } + + #[test] + fn test_union_filters() { + let mut store = TyStore::new(); + let tuple2 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER]); + let tuple3 = store.tuple(vec![Ty::NUMBER, Ty::NUMBER, Ty::NUMBER]); + let union = store.union(vec![tuple2, tuple3]); + assert_eq!(ty_with_len(union, 2, &mut store), tuple2); + } + + #[test] + fn test_function_matches_required_arity() { + let mut store = TyStore::new(); + let func = store.function(crate::store::FunctionData { + params: vec![ + crate::store::ParamInterned { + name: "x".to_string(), + ty: Ty::ANY, + has_default: false, + }, + crate::store::ParamInterned { + name: "y".to_string(), + ty: Ty::ANY, + has_default: true, + }, + ], + return_spec: crate::store::ReturnSpec::Fixed(Ty::NUMBER), + variadic: false, + }); + assert_eq!(ty_with_len(func, 1, &mut store), func); + assert_eq!(ty_with_len(func, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_function_any_narrows_to_exact_arity() { + let mut store = TyStore::new(); + let func = store.function_any(); + let narrowed = ty_with_len(func, 2, &mut store); + assert_matches!(store.get_data(narrowed), TyData::Function(func_data) => { + assert!(!func_data.variadic); + assert_eq!(func_data.params.len(), 2); + assert_eq!(func_data.params[0].name, "arg0"); + assert_eq!(func_data.params[1].name, "arg1"); + assert!(func_data.params.iter().all(|p| p.ty == Ty::ANY)); + assert!(func_data.params.iter().all(|p| !p.has_default)); + assert_eq!(func_data.return_spec, crate::store::ReturnSpec::Fixed(Ty::ANY)); + }); + } + } + + mod ty_with_min_len_tests { + use super::*; + + #[test] + fn test_array_unchanged() { + let mut store = TyStore::new(); + let arr = store.array(Ty::NUMBER); + assert_eq!(ty_with_min_len(arr, 5, &mut store), arr); + } + + #[test] + fn test_tuple_satisfies_min() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER, Ty::STRING, Ty::BOOL]); + assert_eq!(ty_with_min_len(tuple, 2, &mut store), tuple); + } + + #[test] + fn test_tuple_too_short_never() { + let mut store = TyStore::new(); + let tuple = store.tuple(vec![Ty::NUMBER]); + assert_eq!(ty_with_min_len(tuple, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_char_min_1_ok() { + let mut store = TyStore::new(); + assert_eq!(ty_with_min_len(Ty::CHAR, 1, &mut store), Ty::CHAR); + } + + #[test] + fn test_char_min_2_never() { + let mut store = TyStore::new(); + assert_eq!(ty_with_min_len(Ty::CHAR, 2, &mut store), Ty::NEVER); + } + + #[test] + fn test_literal_string_meets_min() { + let mut store = TyStore::new(); + let literal = store.literal_string("hello".to_string()); + assert_eq!(ty_with_min_len(literal, 3, &mut store), literal); + } + + #[test] + fn test_literal_string_too_short() { + let mut store = TyStore::new(); + let literal = store.literal_string("hi".to_string()); + assert_eq!(ty_with_min_len(literal, 3, &mut store), Ty::NEVER); + } + } +} From 1c9aed6ec75b80fefdc9e6536ccedcc362f210bb Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:39:37 +0000 Subject: [PATCH 119/210] refactor(lsp): extract server workspace indexing module Move workspace root normalization and startup indexing helpers out of `server.rs` into `server/workspace_index.rs`. This narrows `server.rs` responsibility while preserving behavior and keeping initialization flow unchanged. --- crates/jrsonnet-lsp/src/server.rs | 126 +--------------- .../src/server/workspace_index.rs | 134 ++++++++++++++++++ 2 files changed, 136 insertions(+), 124 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/server/workspace_index.rs diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 619e1d50..2dc43d51 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -9,11 +9,12 @@ mod notifications; mod request_dispatch; mod requests; mod watched_files; +mod workspace_index; use std::{ collections::BTreeSet, panic::{catch_unwind, AssertUnwindSafe}, - path::{Path, PathBuf}, + path::PathBuf, sync::Arc, }; @@ -303,129 +304,6 @@ impl Server { Ok(()) } - fn schedule_workspace_index_bootstrap(&self, roots: Vec) { - if roots.is_empty() { - debug!("No workspace roots provided; skipping startup index bootstrap"); - return; - } - - let documents = Arc::clone(&self.documents); - let import_graph = Arc::clone(&self.import_graph); - let config = Arc::clone(&self.config); - rayon::spawn(move || { - let mut files = Vec::new(); - for root in &roots { - files.extend(Self::collect_workspace_files(root)); - } - files.sort_by(|a, b| a.as_path().cmp(b.as_path())); - files.dedup(); - - let file_count = files.len(); - for path in &files { - let file = documents.intern(path); - Self::update_import_graph_for_file(&documents, &import_graph, &config, file); - } - - info!( - "Startup workspace index bootstrap complete: indexed {} files across {} roots", - file_count, - roots.len() - ); - }); - } - - fn workspace_root_paths(init_roots: &InitializeRoots) -> Vec { - let mut roots = Vec::new(); - - if let Some(workspace_folders) = &init_roots.workspace_folders { - for folder in workspace_folders { - if let Ok(path) = CanonicalPath::from_uri(&folder.uri) { - roots.push(path.as_path().to_path_buf()); - } - } - } - - if let Some(root_uri) = &init_roots.root_uri { - if let Ok(path) = CanonicalPath::from_uri(root_uri) { - roots.push(path.as_path().to_path_buf()); - } - } - - if let Some(root_path) = &init_roots.root_path { - roots.push(PathBuf::from(root_path)); - } - - let mut normalized_roots = roots - .into_iter() - .filter_map(Self::normalize_workspace_root) - .collect::>(); - normalized_roots.sort(); - normalized_roots.dedup(); - normalized_roots - } - - fn normalize_workspace_root(path: PathBuf) -> Option { - let path = path.canonicalize().unwrap_or(path); - if path.is_dir() { - return Some(path); - } - if path.is_file() { - return path.parent().map(Path::to_path_buf); - } - None - } - - fn collect_workspace_files(root: &Path) -> Vec { - let mut files = Vec::new(); - let mut to_visit = vec![root.to_path_buf()]; - - while let Some(dir) = to_visit.pop() { - let Ok(entries) = std::fs::read_dir(&dir) else { - continue; - }; - - for entry in entries.flatten() { - let path = entry.path(); - let Ok(file_type) = entry.file_type() else { - continue; - }; - - if file_type.is_dir() { - if Self::should_skip_workspace_dir(&path) { - continue; - } - to_visit.push(path); - continue; - } - - if !file_type.is_file() || !Self::is_indexed_workspace_file(&path) { - continue; - } - - match CanonicalPath::try_from_path(&path) { - Ok(path) => files.push(path), - Err(err) => warn!("Skipping workspace file {}: {err}", path.to_string_lossy()), - } - } - } - - files - } - - fn should_skip_workspace_dir(path: &Path) -> bool { - let Some(name) = path.file_name().and_then(|name| name.to_str()) else { - return false; - }; - matches!(name, ".git" | ".jj" | ".svn" | "node_modules" | "target") - } - - fn is_indexed_workspace_file(path: &Path) -> bool { - let Some(extension) = path.extension().and_then(|extension| extension.to_str()) else { - return false; - }; - matches!(extension, "jsonnet" | "libsonnet" | "json") - } - /// Handle the initialize request. fn initialize(&self) -> Result<(RequestId, InitializeParams, InitializeRoots)> { let msg = self diff --git a/crates/jrsonnet-lsp/src/server/workspace_index.rs b/crates/jrsonnet-lsp/src/server/workspace_index.rs new file mode 100644 index 00000000..7ff24771 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/workspace_index.rs @@ -0,0 +1,134 @@ +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; + +use jrsonnet_lsp_document::CanonicalPath; +use tracing::{debug, info, warn}; + +use super::{InitializeRoots, Server}; + +impl Server { + pub(super) fn schedule_workspace_index_bootstrap(&self, roots: Vec) { + if roots.is_empty() { + debug!("No workspace roots provided; skipping startup index bootstrap"); + return; + } + + let documents = Arc::clone(&self.documents); + let import_graph = Arc::clone(&self.import_graph); + let config = Arc::clone(&self.config); + rayon::spawn(move || { + let mut files = Vec::new(); + for root in &roots { + files.extend(Self::collect_workspace_files(root)); + } + files.sort_by(|a, b| a.as_path().cmp(b.as_path())); + files.dedup(); + + let file_count = files.len(); + for path in &files { + let file = documents.intern(path); + Self::update_import_graph_for_file(&documents, &import_graph, &config, file); + } + + info!( + "Startup workspace index bootstrap complete: indexed {} files across {} roots", + file_count, + roots.len() + ); + }); + } + + pub(super) fn workspace_root_paths(init_roots: &InitializeRoots) -> Vec { + let mut roots = Vec::new(); + + if let Some(workspace_folders) = &init_roots.workspace_folders { + for folder in workspace_folders { + if let Ok(path) = CanonicalPath::from_uri(&folder.uri) { + roots.push(path.as_path().to_path_buf()); + } + } + } + + if let Some(root_uri) = &init_roots.root_uri { + if let Ok(path) = CanonicalPath::from_uri(root_uri) { + roots.push(path.as_path().to_path_buf()); + } + } + + if let Some(root_path) = &init_roots.root_path { + roots.push(PathBuf::from(root_path)); + } + + let mut normalized_roots = roots + .into_iter() + .filter_map(Self::normalize_workspace_root) + .collect::>(); + normalized_roots.sort(); + normalized_roots.dedup(); + normalized_roots + } + + fn normalize_workspace_root(path: PathBuf) -> Option { + let path = path.canonicalize().unwrap_or(path); + if path.is_dir() { + return Some(path); + } + if path.is_file() { + return path.parent().map(Path::to_path_buf); + } + None + } + + fn collect_workspace_files(root: &Path) -> Vec { + let mut files = Vec::new(); + let mut to_visit = vec![root.to_path_buf()]; + + while let Some(dir) = to_visit.pop() { + let Ok(entries) = std::fs::read_dir(&dir) else { + continue; + }; + + for entry in entries.flatten() { + let path = entry.path(); + let Ok(file_type) = entry.file_type() else { + continue; + }; + + if file_type.is_dir() { + if Self::should_skip_workspace_dir(&path) { + continue; + } + to_visit.push(path); + continue; + } + + if !file_type.is_file() || !Self::is_indexed_workspace_file(&path) { + continue; + } + + match CanonicalPath::try_from_path(&path) { + Ok(path) => files.push(path), + Err(err) => warn!("Skipping workspace file {}: {err}", path.to_string_lossy()), + } + } + } + + files + } + + fn should_skip_workspace_dir(path: &Path) -> bool { + let Some(name) = path.file_name().and_then(|name| name.to_str()) else { + return false; + }; + matches!(name, ".git" | ".jj" | ".svn" | "node_modules" | "target") + } + + fn is_indexed_workspace_file(path: &Path) -> bool { + let Some(extension) = path.extension().and_then(|extension| extension.to_str()) else { + return false; + }; + matches!(extension, "jsonnet" | "libsonnet" | "json") + } +} From ba19c476e967c41871a3e5edb2a431e152e58a9c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:41:54 +0000 Subject: [PATCH 120/210] refactor(lsp): extract server initialization module Move initialize request parsing and capability construction out of `server.rs` into `server/initialization.rs`. This keeps startup concerns localized and reduces top-level server module size without changing runtime behavior. --- crates/jrsonnet-lsp/src/server.rs | 111 +---------------- .../jrsonnet-lsp/src/server/initialization.rs | 117 ++++++++++++++++++ 2 files changed, 121 insertions(+), 107 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/server/initialization.rs diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 2dc43d51..8b55b423 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -5,6 +5,7 @@ mod async_requests; mod import_graph; +mod initialization; mod notifications; mod request_dispatch; mod requests; @@ -18,24 +19,16 @@ use std::{ sync::Arc, }; -use anyhow::{Context, Result}; +use anyhow::Result; use crossbeam_channel::{select, Receiver, Sender}; use jrsonnet_lsp_document::{CanonicalPath, DocVersion, FileId, PathStore}; -use jrsonnet_lsp_handlers as handlers; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{ new_shared_cache, DocumentManager, SharedDocumentManager, SharedTypeCache, }; use jrsonnet_lsp_types::GlobalTyStore; use lsp_server::{Connection, Message, Notification, Request, RequestId, Response}; -use lsp_types::{ - notification::PublishDiagnostics, CodeActionKind, CodeActionOptions, - CodeActionProviderCapability, CodeLensOptions, CompletionOptions, ExecuteCommandOptions, - HoverProviderCapability, InitializeParams, InitializeResult, OneOf, SemanticTokensFullOptions, - SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, - SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind, - WorkDoneProgressOptions, -}; +use lsp_types::{notification::PublishDiagnostics, InitializeParams, OneOf}; use parking_lot::RwLock; use tracing::{debug, error, info, warn}; @@ -263,13 +256,7 @@ impl Server { debug!("Runtime components initialized"); // Send initialize result - let result = InitializeResult { - capabilities: Self::server_capabilities(), - server_info: Some(ServerInfo { - name: "jrsonnet-lsp".to_string(), - version: Some(env!("CARGO_PKG_VERSION").to_string()), - }), - }; + let result = Self::initialize_result(); let result = serde_json::to_value(result)?; self.connection @@ -304,96 +291,6 @@ impl Server { Ok(()) } - /// Handle the initialize request. - fn initialize(&self) -> Result<(RequestId, InitializeParams, InitializeRoots)> { - let msg = self - .connection - .receiver - .recv() - .context("Failed to receive initialize request")?; - - match msg { - Message::Request(req) if req.method == "initialize" => { - let init_roots: InitializeRoots = - serde_json::from_value(req.params.clone()).unwrap_or_default(); - let params: InitializeParams = serde_json::from_value(req.params)?; - info!("Initialize request from: {:?}", params.client_info); - Ok((req.id, params, init_roots)) - } - _ => anyhow::bail!("Expected initialize request, got: {msg:?}"), - } - } - - /// Get the server capabilities. - fn server_capabilities() -> ServerCapabilities { - ServerCapabilities { - text_document_sync: Some(TextDocumentSyncCapability::Options( - lsp_types::TextDocumentSyncOptions { - open_close: Some(true), - change: Some(TextDocumentSyncKind::INCREMENTAL), - will_save: None, - will_save_wait_until: None, - save: Some(lsp_types::TextDocumentSyncSaveOptions::Supported(true)), - }, - )), - document_symbol_provider: Some(OneOf::Left(true)), - definition_provider: Some(OneOf::Left(true)), - declaration_provider: Some(lsp_types::DeclarationCapability::Simple(true)), - implementation_provider: Some(lsp_types::ImplementationProviderCapability::Simple( - true, - )), - type_definition_provider: Some(lsp_types::TypeDefinitionProviderCapability::Simple( - true, - )), - hover_provider: Some(HoverProviderCapability::Simple(true)), - document_highlight_provider: Some(OneOf::Left(true)), - inlay_hint_provider: Some(OneOf::Left(true)), - code_action_provider: Some(CodeActionProviderCapability::Options(CodeActionOptions { - code_action_kinds: Some(vec![ - CodeActionKind::QUICKFIX, - CodeActionKind::SOURCE_FIX_ALL, - ]), - work_done_progress_options: WorkDoneProgressOptions::default(), - resolve_provider: Some(false), - })), - completion_provider: Some(CompletionOptions { - trigger_characters: Some(vec![".".to_string()]), - ..Default::default() - }), - signature_help_provider: Some(SignatureHelpOptions { - trigger_characters: Some(vec!["(".to_string(), ",".to_string()]), - retrigger_characters: None, - work_done_progress_options: WorkDoneProgressOptions::default(), - }), - document_formatting_provider: Some(OneOf::Left(true)), - references_provider: Some(OneOf::Left(true)), - workspace_symbol_provider: Some(OneOf::Left(true)), - rename_provider: Some(OneOf::Right(lsp_types::RenameOptions { - prepare_provider: Some(true), - work_done_progress_options: WorkDoneProgressOptions::default(), - })), - semantic_tokens_provider: Some( - SemanticTokensServerCapabilities::SemanticTokensOptions(SemanticTokensOptions { - legend: handlers::semantic_tokens_legend(), - full: Some(SemanticTokensFullOptions::Bool(true)), - range: Some(true), - work_done_progress_options: WorkDoneProgressOptions::default(), - }), - ), - execute_command_provider: Some(ExecuteCommandOptions { - commands: SUPPORTED_EXECUTE_COMMANDS - .into_iter() - .map(ToString::to_string) - .collect(), - work_done_progress_options: WorkDoneProgressOptions::default(), - }), - code_lens_provider: Some(CodeLensOptions { - resolve_provider: Some(true), - }), - ..Default::default() - } - } - /// Main message loop. /// /// Uses crossbeam select! to handle both LSP messages and async diagnostics results. diff --git a/crates/jrsonnet-lsp/src/server/initialization.rs b/crates/jrsonnet-lsp/src/server/initialization.rs new file mode 100644 index 00000000..1ec28ffe --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/initialization.rs @@ -0,0 +1,117 @@ +use anyhow::Context; +use jrsonnet_lsp_handlers as handlers; +use lsp_server::{Message, RequestId}; +use lsp_types::{ + CodeActionKind, CodeActionOptions, CodeActionProviderCapability, CodeLensOptions, + CompletionOptions, ExecuteCommandOptions, HoverProviderCapability, InitializeParams, + InitializeResult, OneOf, SemanticTokensFullOptions, SemanticTokensOptions, + SemanticTokensServerCapabilities, ServerCapabilities, ServerInfo, SignatureHelpOptions, + TextDocumentSyncCapability, TextDocumentSyncKind, WorkDoneProgressOptions, +}; +use tracing::info; + +use super::{InitializeRoots, Server, SUPPORTED_EXECUTE_COMMANDS}; + +impl Server { + /// Handle the initialize request. + pub(super) fn initialize( + &self, + ) -> anyhow::Result<(RequestId, InitializeParams, InitializeRoots)> { + let msg = self + .connection + .receiver + .recv() + .context("Failed to receive initialize request")?; + + match msg { + Message::Request(req) if req.method == "initialize" => { + let init_roots: InitializeRoots = + serde_json::from_value(req.params.clone()).unwrap_or_default(); + let params: InitializeParams = serde_json::from_value(req.params)?; + info!("Initialize request from: {:?}", params.client_info); + Ok((req.id, params, init_roots)) + } + _ => anyhow::bail!("Expected initialize request, got: {msg:?}"), + } + } + + /// Get the server capabilities. + pub(super) fn server_capabilities() -> ServerCapabilities { + ServerCapabilities { + text_document_sync: Some(TextDocumentSyncCapability::Options( + lsp_types::TextDocumentSyncOptions { + open_close: Some(true), + change: Some(TextDocumentSyncKind::INCREMENTAL), + will_save: None, + will_save_wait_until: None, + save: Some(lsp_types::TextDocumentSyncSaveOptions::Supported(true)), + }, + )), + document_symbol_provider: Some(OneOf::Left(true)), + definition_provider: Some(OneOf::Left(true)), + declaration_provider: Some(lsp_types::DeclarationCapability::Simple(true)), + implementation_provider: Some(lsp_types::ImplementationProviderCapability::Simple( + true, + )), + type_definition_provider: Some(lsp_types::TypeDefinitionProviderCapability::Simple( + true, + )), + hover_provider: Some(HoverProviderCapability::Simple(true)), + document_highlight_provider: Some(OneOf::Left(true)), + inlay_hint_provider: Some(OneOf::Left(true)), + code_action_provider: Some(CodeActionProviderCapability::Options(CodeActionOptions { + code_action_kinds: Some(vec![ + CodeActionKind::QUICKFIX, + CodeActionKind::SOURCE_FIX_ALL, + ]), + work_done_progress_options: WorkDoneProgressOptions::default(), + resolve_provider: Some(false), + })), + completion_provider: Some(CompletionOptions { + trigger_characters: Some(vec![".".to_string()]), + ..Default::default() + }), + signature_help_provider: Some(SignatureHelpOptions { + trigger_characters: Some(vec!["(".to_string(), ",".to_string()]), + retrigger_characters: None, + work_done_progress_options: WorkDoneProgressOptions::default(), + }), + document_formatting_provider: Some(OneOf::Left(true)), + references_provider: Some(OneOf::Left(true)), + workspace_symbol_provider: Some(OneOf::Left(true)), + rename_provider: Some(OneOf::Right(lsp_types::RenameOptions { + prepare_provider: Some(true), + work_done_progress_options: WorkDoneProgressOptions::default(), + })), + semantic_tokens_provider: Some( + SemanticTokensServerCapabilities::SemanticTokensOptions(SemanticTokensOptions { + legend: handlers::semantic_tokens_legend(), + full: Some(SemanticTokensFullOptions::Bool(true)), + range: Some(true), + work_done_progress_options: WorkDoneProgressOptions::default(), + }), + ), + execute_command_provider: Some(ExecuteCommandOptions { + commands: SUPPORTED_EXECUTE_COMMANDS + .into_iter() + .map(ToString::to_string) + .collect(), + work_done_progress_options: WorkDoneProgressOptions::default(), + }), + code_lens_provider: Some(CodeLensOptions { + resolve_provider: Some(true), + }), + ..Default::default() + } + } + + pub(super) fn initialize_result() -> InitializeResult { + InitializeResult { + capabilities: Self::server_capabilities(), + server_info: Some(ServerInfo { + name: "jrsonnet-lsp".to_string(), + version: Some(env!("CARGO_PKG_VERSION").to_string()), + }), + } + } +} From 188de2b46c6d79d6a3429dbfca5a83b7b2cbfd30 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 14:44:00 +0000 Subject: [PATCH 121/210] refactor(lsp): extract server event loop module Move main message loop, message dispatch, and notification sending out of `server.rs` into `server/event_loop.rs`. This keeps core server setup focused while preserving existing request and diagnostics flow behavior. --- crates/jrsonnet-lsp/src/server.rs | 122 +----------------- crates/jrsonnet-lsp/src/server/event_loop.rs | 126 +++++++++++++++++++ 2 files changed, 129 insertions(+), 119 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/server/event_loop.rs diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 8b55b423..42477621 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -4,6 +4,7 @@ //! Diagnostics are computed asynchronously with debouncing to avoid blocking the event loop. mod async_requests; +mod event_loop; mod import_graph; mod initialization; mod notifications; @@ -20,14 +21,14 @@ use std::{ }; use anyhow::Result; -use crossbeam_channel::{select, Receiver, Sender}; +use crossbeam_channel::{Receiver, Sender}; use jrsonnet_lsp_document::{CanonicalPath, DocVersion, FileId, PathStore}; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{ new_shared_cache, DocumentManager, SharedDocumentManager, SharedTypeCache, }; use jrsonnet_lsp_types::GlobalTyStore; -use lsp_server::{Connection, Message, Notification, Request, RequestId, Response}; +use lsp_server::{Connection, Message, Notification, RequestId, Response}; use lsp_types::{notification::PublishDiagnostics, InitializeParams, OneOf}; use parking_lot::RwLock; use tracing::{debug, error, info, warn}; @@ -290,123 +291,6 @@ impl Server { info!("Server shutting down"); Ok(()) } - - /// Main message loop. - /// - /// Uses crossbeam select! to handle both LSP messages and async diagnostics results. - fn main_loop(&mut self) -> Result<()> { - loop { - // Use select! to wait on either LSP messages or diagnostics results. - // We receive first and then process to avoid borrow conflicts. - enum SelectResult { - LspMessage(Result), - DiagnosticsResult( - Result< - crate::async_diagnostics::DiagnosticsResult, - crossbeam_channel::RecvError, - >, - ), - AsyncRequestResponse(Result), - } - - let result = { - let lsp_receiver = &self.connection.receiver; - let diag_receiver = self.diagnostics.results(); - let request_receiver = &self.request_response_receiver; - - select! { - recv(lsp_receiver) -> msg => SelectResult::LspMessage(msg), - recv(diag_receiver) -> result => SelectResult::DiagnosticsResult(result), - recv(request_receiver) -> response => SelectResult::AsyncRequestResponse(response), - } - }; - - match result { - SelectResult::LspMessage(Ok(msg)) => { - if self.handle_message(msg)? { - break; // Exit notification received - } - } - SelectResult::LspMessage(Err(e)) => { - error!("Error receiving message: {}", e); - break; - } - SelectResult::DiagnosticsResult(Ok(result)) => { - // Send the completed diagnostics to the client - self.send_notification::(result.params)?; - } - SelectResult::DiagnosticsResult(Err(_)) => { - // Diagnostics channel closed, that's fine - debug!("Diagnostics channel closed"); - } - SelectResult::AsyncRequestResponse(Ok(response)) => { - if !self.inflight_requests.send_inflight_response(response)? { - debug!("Dropping async response for non-pending request"); - } - } - SelectResult::AsyncRequestResponse(Err(_)) => { - debug!("Async request response channel closed"); - } - } - } - - Ok(()) - } - - /// Handle a single LSP message. - /// - /// Returns true if exit notification was received. - fn handle_message(&mut self, msg: Message) -> Result { - match msg { - Message::Request(req) => { - if self.shutdown_requested { - // After shutdown, only respond with errors. - let Request { id, method, .. } = req; - let request = self.inflight_requests.begin_unknown(id, method.as_str()); - let _ = self.inflight_requests.send_unknown_err( - request, - lsp_server::ErrorCode::InvalidRequest, - "Server is shutting down", - )?; - } else { - self.handle_request(req)?; - } - Ok(false) - } - Message::Response(resp) => { - if let Some(meta) = self.inflight_requests.complete_outgoing(resp.id.clone()) { - if let Some(error) = &resp.error { - warn!( - "Outgoing request {} ({}) failed: {} ({})", - resp.id, meta.method, error.message, error.code - ); - } else { - debug!( - "Received response for outgoing request {} ({})", - resp.id, meta.method - ); - } - } else { - debug!("Received untracked response: {:?}", resp.id); - } - Ok(false) - } - Message::Notification(notif) => self.handle_notification(notif), - } - } -} - -impl Server { - /// Send a notification to the client. - fn send_notification( - &self, - params: N::Params, - ) -> Result<()> { - let params = serde_json::to_value(params)?; - let notif = Notification::new(N::METHOD.to_string(), params); - self.connection.sender.send(Message::Notification(notif))?; - Ok(()) - } } /// Run the LSP server over stdio. diff --git a/crates/jrsonnet-lsp/src/server/event_loop.rs b/crates/jrsonnet-lsp/src/server/event_loop.rs new file mode 100644 index 00000000..8c213dd6 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/event_loop.rs @@ -0,0 +1,126 @@ +use anyhow::Result; +use crossbeam_channel::select; +use lsp_server::{Message, Notification, Request}; +use lsp_types::notification::PublishDiagnostics; +use tracing::{debug, error, warn}; + +use super::Server; + +impl Server { + /// Main message loop. + /// + /// Uses crossbeam select! to handle both LSP messages and async diagnostics results. + pub(super) fn main_loop(&mut self) -> Result<()> { + loop { + // Use select! to wait on either LSP messages or diagnostics results. + // We receive first and then process to avoid borrow conflicts. + enum SelectResult { + LspMessage(Result), + DiagnosticsResult( + Result< + crate::async_diagnostics::DiagnosticsResult, + crossbeam_channel::RecvError, + >, + ), + AsyncRequestResponse(Result), + } + + let result = { + let lsp_receiver = &self.connection.receiver; + let diag_receiver = self.diagnostics.results(); + let request_receiver = &self.request_response_receiver; + + select! { + recv(lsp_receiver) -> msg => SelectResult::LspMessage(msg), + recv(diag_receiver) -> result => SelectResult::DiagnosticsResult(result), + recv(request_receiver) -> response => SelectResult::AsyncRequestResponse(response), + } + }; + + match result { + SelectResult::LspMessage(Ok(msg)) => { + if self.handle_message(msg)? { + break; // Exit notification received + } + } + SelectResult::LspMessage(Err(e)) => { + error!("Error receiving message: {}", e); + break; + } + SelectResult::DiagnosticsResult(Ok(result)) => { + // Send the completed diagnostics to the client + self.send_notification::(result.params)?; + } + SelectResult::DiagnosticsResult(Err(_)) => { + // Diagnostics channel closed, that's fine + debug!("Diagnostics channel closed"); + } + SelectResult::AsyncRequestResponse(Ok(response)) => { + if !self.inflight_requests.send_inflight_response(response)? { + debug!("Dropping async response for non-pending request"); + } + } + SelectResult::AsyncRequestResponse(Err(_)) => { + debug!("Async request response channel closed"); + } + } + } + + Ok(()) + } + + /// Handle a single LSP message. + /// + /// Returns true if exit notification was received. + fn handle_message(&mut self, msg: Message) -> Result { + match msg { + Message::Request(req) => { + if self.shutdown_requested { + // After shutdown, only respond with errors. + let Request { id, method, .. } = req; + let request = self.inflight_requests.begin_unknown(id, method.as_str()); + let _ = self.inflight_requests.send_unknown_err( + request, + lsp_server::ErrorCode::InvalidRequest, + "Server is shutting down", + )?; + } else { + self.handle_request(req)?; + } + Ok(false) + } + Message::Response(resp) => { + if let Some(meta) = self.inflight_requests.complete_outgoing(resp.id.clone()) { + if let Some(error) = &resp.error { + warn!( + "Outgoing request {} ({}) failed: {} ({})", + resp.id, meta.method, error.message, error.code + ); + } else { + debug!( + "Received response for outgoing request {} ({})", + resp.id, meta.method + ); + } + } else { + debug!("Received untracked response: {:?}", resp.id); + } + Ok(false) + } + Message::Notification(notif) => self.handle_notification(notif), + } + } +} + +impl Server { + /// Send a notification to the client. + pub(super) fn send_notification( + &self, + params: N::Params, + ) -> Result<()> { + let params = serde_json::to_value(params)?; + let notif = Notification::new(N::METHOD.to_string(), params); + self.connection.sender.send(Message::Notification(notif))?; + Ok(()) + } +} From 17cbf6f270713da744474404f22410ec75ec3e28 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:03:38 +0000 Subject: [PATCH 122/210] refactor(lsp-check): split `type_check` into focused modules Break up the large `type_check.rs` file into smaller modules with clear responsibilities: - `type_check/types.rs` holds `TypeError` and check configuration - `type_check/core.rs` keeps traversal and structural checks - `type_check/calls.rs` handles stdlib/user call validation Preserve the public API through `type_check/mod.rs` and keep existing behavior and test coverage intact. --- .../src/type_check/calls.rs | 574 +++++++++++ .../src/{type_check.rs => type_check/core.rs} | 953 +----------------- .../jrsonnet-lsp-check/src/type_check/mod.rs | 13 + .../src/type_check/types.rs | 379 +++++++ 4 files changed, 971 insertions(+), 948 deletions(-) create mode 100644 crates/jrsonnet-lsp-check/src/type_check/calls.rs rename crates/jrsonnet-lsp-check/src/{type_check.rs => type_check/core.rs} (60%) create mode 100644 crates/jrsonnet-lsp-check/src/type_check/mod.rs create mode 100644 crates/jrsonnet-lsp-check/src/type_check/types.rs diff --git a/crates/jrsonnet-lsp-check/src/type_check/calls.rs b/crates/jrsonnet-lsp-check/src/type_check/calls.rs new file mode 100644 index 00000000..1b73061c --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/type_check/calls.rs @@ -0,0 +1,574 @@ +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, ident_resolves_to_builtin_std}; +use jrsonnet_lsp_stdlib::get_stdlib_signature; +use jrsonnet_lsp_types::{FunctionData, TyData}; +use jrsonnet_rowan_parser::{ + nodes::{Expr, ExprBase, ExprCall}, + AstNode, +}; +use rowan::TextRange; + +use super::{TypeError, TypeErrorKind}; +use crate::format_check::{ + parse_format_string, FormatParseError, FormatPlaceholder, FormatTypeKind, +}; + +/// Validate a function call using `FunctionData` (Ty-native version). +pub(super) fn validate_function_call_ty( + func_data: &FunctionData, + function_name: String, + arg_count: usize, + range: TextRange, +) -> Option { + // Count required parameters (those without defaults) + let required = func_data.params.iter().filter(|p| !p.has_default).count(); + let total = func_data.params.len(); + + if arg_count < required { + Some(TypeError { + kind: TypeErrorKind::TooFewArguments { + function_name, + required, + provided: arg_count, + }, + range, + }) + } else if arg_count > total && !func_data.variadic { + Some(TypeError { + kind: TypeErrorKind::TooManyArguments { + function_name, + max_allowed: total, + provided: arg_count, + }, + range, + }) + } else { + None + } +} + +/// Check if an `ExprCall` is a stdlib function call and validate argument count and types. +/// +/// Matches the pattern: `std.functionName(args...)`, including aliases that +/// resolve to the builtin std object. +pub(super) fn check_stdlib_call_expr( + call: &ExprCall, + analysis: &TypeAnalysis, + errors: &mut Vec, +) { + // Get the callee - should be std.functionName (ExprField) + let Some(callee_expr) = call.callee() else { + return; + }; + let Some(ExprBase::ExprField(field)) = callee_expr.expr_base() else { + return; + }; + + // Check if base resolves to builtin std. + let Some(base_expr) = field.base() else { + return; + }; + if !expr_resolves_to_builtin_std(&base_expr) { + return; + } + + // Extract function name from the field + let Some(fn_name) = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + else { + return; + }; + + // Look up signature + let Some(sig) = get_stdlib_signature(&fn_name) else { + return; + }; + + // Count arguments + let arg_count = call.args_desc().map_or(0, |args| args.args().count()); + + // Validate using the unified function + let qualified_name = format!("std.{fn_name}"); + let Some(func_data) = sig.func_data() else { + return; + }; + if let Some(error) = validate_function_call_ty( + &func_data, + qualified_name.clone(), + arg_count, + call.syntax().text_range(), + ) { + errors.push(error); + } + + // Check argument types + if let Some(args_desc) = call.args_desc() { + for (i, arg) in args_desc.args().enumerate() { + if i >= func_data.params.len() { + break; // Variadic or too many args - handled elsewhere + } + + let Some(param) = func_data.params.get(i) else { + break; + }; + let stdlib_expected_ty = param.ty; + + // Skip if expected type is Any (no constraint) + if stdlib_expected_ty.is_any() { + continue; + } + + // Get the argument's inferred type + let Some(arg_expr) = arg.expr() else { + continue; + }; + let Some(actual_ty) = analysis.type_for_range(arg_expr.syntax().text_range()) else { + continue; + }; + + // Skip if actual type is Any or Never + if actual_ty.is_any() || actual_ty.is_never() { + continue; + } + + // Import expected type from stdlib store into analysis store + let expected_ty = analysis.import_from_stdlib(stdlib_expected_ty); + + // Check if actual type is subtype of expected type + // Special case: if expected is function_any() (no params), accept any function + // This handles higher-order functions like std.map where we accept any callable + let is_function_wildcard = { + let stdlib_store = jrsonnet_lsp_stdlib::stdlib_store(); + match *stdlib_store.get(stdlib_expected_ty) { + TyData::Function(ref f) => f.params.is_empty(), + _ => false, + } + }; + let type_matches = if is_function_wildcard { + analysis.is_function(actual_ty) + } else { + analysis.is_subtype(actual_ty, expected_ty) + }; + + if !type_matches { + errors.push(TypeError { + kind: TypeErrorKind::ArgumentTypeMismatch { + function_name: qualified_name.clone(), + param_name: param.name.clone(), + param_index: i, + expected: expected_ty, + actual: actual_ty, + }, + range: arg_expr.syntax().text_range(), + }); + } + } + } + + // Special validation for std.format + if fn_name == "format" { + check_format_call(call, analysis, errors); + } + + // Higher-order function validation + check_higher_order_call(&fn_name, call, analysis, errors); +} + +/// Configuration for higher-order function validation. +struct HigherOrderConfig { + /// Name of the callback parameter. + callback_param_name: &'static str, + /// Index of the callback argument (0-based). + callback_arg_index: usize, + /// Index of the array argument (0-based). + array_arg_index: usize, +} + +/// Validate higher-order function calls. +/// +/// Checks that callback function parameters are compatible with array element types. +/// For example, in `std.map(func, arr)`, we verify that `func` can accept elements of `arr`. +fn check_higher_order_call( + fn_name: &str, + call: &ExprCall, + analysis: &TypeAnalysis, + errors: &mut Vec, +) { + // Configuration for higher-order functions + // (callback_param_name, callback_arg_index, array_arg_index) + let config: Option = match fn_name { + "map" | "filter" | "flatMap" => Some(HigherOrderConfig { + callback_param_name: "func", + callback_arg_index: 0, + array_arg_index: 1, + }), + "find" | "findIndex" => Some(HigherOrderConfig { + callback_param_name: "func", + callback_arg_index: 1, + array_arg_index: 0, + }), + "sort" | "uniq" => Some(HigherOrderConfig { + callback_param_name: "keyF", + callback_arg_index: 1, + array_arg_index: 0, + }), + // foldl/foldr are more complex (accumulator + element), handle separately if needed + _ => None, + }; + + let Some(config) = config else { + return; + }; + + let Some(args_desc) = call.args_desc() else { + return; + }; + let args: Vec<_> = args_desc.args().collect(); + + // Get the callback and array arguments + let Some(callback_arg) = args.get(config.callback_arg_index) else { + return; + }; + let Some(array_arg) = args.get(config.array_arg_index) else { + return; + }; + + // Get the callback's type + let Some(callback_expr) = callback_arg.expr() else { + return; + }; + let Some(callback_ty) = analysis.type_for_range(callback_expr.syntax().text_range()) else { + return; + }; + + // Get the array's type + let Some(array_expr) = array_arg.expr() else { + return; + }; + let Some(array_ty) = analysis.type_for_range(array_expr.syntax().text_range()) else { + return; + }; + + // Skip if types are Any or Never + if array_ty.is_any() || array_ty.is_never() || callback_ty.is_any() || callback_ty.is_never() { + return; + } + + // Extract element type from array + // First get the data, then create union outside the borrow + let element_info = analysis.with_data(array_ty, |data| match data { + TyData::Array { elem, .. } => Some(Ok(*elem)), + TyData::Tuple { elems } => { + if elems.is_empty() { + None + } else { + Some(Err(elems.clone())) // Need to create union outside borrow + } + } + _ => None, + }); + let element_ty = match element_info { + Some(Ok(ty)) => ty, + Some(Err(elems)) => analysis.union(elems), + None => return, + }; + + // Skip if element type is Any + if element_ty.is_any() { + return; + } + + // Extract the callback's first parameter type + let callback_param_ty = analysis.with_data(callback_ty, |data| match data { + TyData::Function(ft) => ft.params.first().map(|param| param.ty), + _ => None, + }); + let Some(callback_param_ty) = callback_param_ty else { + return; + }; + + // Skip if callback param type is Any + if callback_param_ty.is_any() { + return; + } + + // Check if element type is compatible with callback param type + if !analysis.is_subtype(element_ty, callback_param_ty) { + errors.push(TypeError { + kind: TypeErrorKind::CallbackTypeMismatch { + function_name: format!("std.{fn_name}"), + callback_param: config.callback_param_name.to_string(), + element_type: element_ty, + callback_param_type: callback_param_ty, + }, + range: callback_expr.syntax().text_range(), + }); + } +} + +/// Validate a `std.format()` call. +/// +/// Checks: +/// - Format string is valid +/// - Argument count matches placeholders +/// - Argument types match expected types (when inferrable) +fn check_format_call(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut Vec) { + let Some(args_desc) = call.args_desc() else { + return; + }; + let args: Vec<_> = args_desc.args().collect(); + if args.is_empty() { + return; + } + + // Get the format string from the first argument + let Some(fmt_arg) = args.first() else { + return; + }; + let Some(fmt_expr) = fmt_arg.expr() else { + return; + }; + + // Try to extract a literal string value + let Some(fmt_string) = get_string_literal(&fmt_expr) else { + return; // Can't validate non-literal format strings + }; + + // Parse the format string + let format_spec = match parse_format_string(&fmt_string) { + Ok(spec) => spec, + Err(e) => { + let message = match e { + FormatParseError::IncompleteSpecifier => "incomplete format specifier".to_string(), + FormatParseError::UnknownSpecifier(c) => format!("unknown specifier '%{c}'"), + FormatParseError::UnclosedNamedPlaceholder => { + "unclosed named placeholder".to_string() + } + FormatParseError::EmptyName => "empty name in named placeholder".to_string(), + FormatParseError::MixedPositionalAndNamed => { + "cannot mix positional and named placeholders".to_string() + } + }; + errors.push(TypeError { + kind: TypeErrorKind::FormatStringError { message }, + range: fmt_expr.syntax().text_range(), + }); + return; + } + }; + + // For positional placeholders, check argument count + // This includes extra args consumed by dynamic width (*) and precision (.*) + if format_spec.uses_positional { + let positional_count = format_spec.positional_arg_count(); + let provided = args.len() - 1; // Exclude format string itself + + if provided != positional_count { + errors.push(TypeError { + kind: TypeErrorKind::FormatArgCount { + expected: positional_count, + provided, + }, + range: call.syntax().text_range(), + }); + return; // Skip type checking if count is wrong + } + + // Check argument types (skip format string, check remaining args) + for (i, placeholder) in format_spec.placeholders.iter().enumerate() { + if let FormatPlaceholder::Positional { + expected_type, + specifier, + .. + } = placeholder + { + // Get the corresponding argument (offset by 1 for format string) + if let Some(arg) = args.get(i + 1) { + if let Some(arg_expr) = arg.expr() { + let Some(actual_ty) = + analysis.type_for_range(arg_expr.syntax().text_range()) + else { + continue; + }; + + // Skip Any types (unknown) + if actual_ty.is_any() || *expected_type == FormatTypeKind::Any { + continue; + } + + // Check type compatibility using FormatTypeKind method + let is_compatible = analysis + .with_store(|store| expected_type.is_compatible_with(actual_ty, store)); + if !is_compatible { + let expected_ty = + analysis.with_store_mut(|store| expected_type.to_ty(store)); + errors.push(TypeError { + kind: TypeErrorKind::FormatArgTypeMismatch { + index: i, + expected: expected_ty, + actual: actual_ty, + specifier: *specifier, + }, + range: arg_expr.syntax().text_range(), + }); + } + } + } + } + } + } +} + +/// Extract a string literal value from an expression. +fn get_string_literal(expr: &Expr) -> Option { + let base = expr.expr_base()?; + match base { + ExprBase::ExprString(s) => { + let text = s.syntax().text().to_string(); + if text.starts_with("|||") { + return parse_text_block_literal(&text); + } + + if let Some(inner) = text + .strip_prefix("@\"") + .and_then(|value| value.strip_suffix('"')) + .or_else(|| { + text.strip_prefix("@'") + .and_then(|value| value.strip_suffix('\'')) + }) { + return Some(inner.to_string()); + } + + text.strip_prefix('"') + .and_then(|value| value.strip_suffix('"')) + .or_else(|| { + text.strip_prefix('\'') + .and_then(|value| value.strip_suffix('\'')) + }) + .map(unescape_string) + } + _ => None, + } +} + +/// Parse Jsonnet text block syntax (`||| ... |||`) into its string content. +fn parse_text_block_literal(text: &str) -> Option { + let after_open = text.strip_prefix("|||")?; + let (_, body_with_terminator) = after_open.split_once('\n')?; + + let mut raw_lines = Vec::new(); + let mut found_terminator = false; + for line in body_with_terminator.split('\n') { + if is_text_block_terminator(line) { + found_terminator = true; + break; + } + raw_lines.push(line); + } + if !found_terminator { + return None; + } + + let indent = raw_lines + .iter() + .find(|line| !line.is_empty()) + .map(|line| { + line.chars() + .take_while(|ch| *ch == ' ' || *ch == '\t') + .collect::() + }) + .unwrap_or_default(); + + let normalized = raw_lines + .into_iter() + .map(|line| { + if indent.is_empty() || line.is_empty() { + line.to_string() + } else { + line.strip_prefix(&indent).unwrap_or(line).to_string() + } + }) + .collect::>(); + + Some(normalized.join("\n")) +} + +fn is_text_block_terminator(line: &str) -> bool { + line.trim_start_matches([' ', '\t']) == "|||" +} + +/// Unescape a string literal (simplified version). +fn unescape_string(s: &str) -> String { + let mut result = String::with_capacity(s.len()); + let mut chars = s.chars().peekable(); + + while let Some(c) = chars.next() { + if c == '\\' { + match chars.next() { + Some('n') => result.push('\n'), + Some('t') => result.push('\t'), + Some('r') => result.push('\r'), + Some('\\') | None => result.push('\\'), + Some('"') => result.push('"'), + Some('\'') => result.push('\''), + Some(other) => { + result.push('\\'); + result.push(other); + } + } + } else { + result.push(c); + } + } + + result +} + +/// Check if an `ExprCall` is a user function call and validate argument count. +/// +/// Matches the pattern: `varName(args...)` where varName is a known function. +pub(super) fn check_user_function_call_expr( + call: &ExprCall, + analysis: &TypeAnalysis, + errors: &mut Vec, +) { + // Get the callee - should be a variable (ExprVar) + let Some(callee_expr) = call.callee() else { + return; + }; + let Some(ExprBase::ExprVar(var)) = callee_expr.expr_base() else { + return; + }; + + let Some(var_ident) = var.name().and_then(|n| n.ident_lit()) else { + return; + }; + let var_name = var_ident.text().to_string(); + + // Skip builtin std - handled by check_stdlib_call_expr. + if ident_resolves_to_builtin_std(&var_ident) { + return; + } + + // Look up the type of the variable + let Some(var_ty) = analysis.type_for_range(var.syntax().text_range()) else { + return; + }; + + // Get function data using Ty-native method + let Some(func_data) = analysis.get_function(var_ty) else { + return; + }; + + // Count arguments + let arg_count = call.args_desc().map_or(0, |args| args.args().count()); + + // Validate using the Ty-native function + if let Some(error) = + validate_function_call_ty(&func_data, var_name, arg_count, call.syntax().text_range()) + { + errors.push(error); + } +} diff --git a/crates/jrsonnet-lsp-check/src/type_check.rs b/crates/jrsonnet-lsp-check/src/type_check/core.rs similarity index 60% rename from crates/jrsonnet-lsp-check/src/type_check.rs rename to crates/jrsonnet-lsp-check/src/type_check/core.rs index 0e6bf0b1..72c76fdc 100644 --- a/crates/jrsonnet-lsp-check/src/type_check.rs +++ b/crates/jrsonnet-lsp-check/src/type_check/core.rs @@ -1,18 +1,6 @@ -//! Static type checking for Jsonnet expressions. -//! -//! Provides type error diagnostics that detect type mismatches before evaluation: -//! - Binary operator type mismatches -//! - Unary operator type mismatches -//! - Field access on non-objects -//! - Index access on non-indexable types -//! - Function calls on non-callables -//! - Wrong argument counts for function calls - -use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; +use jrsonnet_lsp_document::Document; use jrsonnet_lsp_inference::{find_best_match, TypeAnalysis, TypeEnv}; -use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, ident_resolves_to_builtin_std}; -use jrsonnet_lsp_stdlib::get_stdlib_signature; -use jrsonnet_lsp_types::{binary_op_result_ty, unary_op_result_ty, FunctionData, Ty, TyData}; +use jrsonnet_lsp_types::{binary_op_result_ty, unary_op_result_ty, Ty}; use jrsonnet_rowan_parser::{ nodes::{ BinaryOperatorKind, Expr, ExprArray, ExprArrayComp, ExprBase, ExprBinary, ExprCall, @@ -21,387 +9,12 @@ use jrsonnet_rowan_parser::{ }, AstNode, }; -use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString}; -use rowan::TextRange; -use crate::format_check::{ - parse_format_string, FormatParseError, FormatPlaceholder, FormatTypeKind, +use super::{ + calls::{check_stdlib_call_expr, check_user_function_call_expr}, + TypeCheckConfig, TypeCheckRule, TypeError, TypeErrorKind, }; -/// A type error detected during static analysis. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TypeError { - /// The kind of type error. - pub kind: TypeErrorKind, - /// The source location of the error. - pub range: TextRange, -} - -/// The kind of type error. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum TypeErrorKind { - /// Binary operator applied to incompatible types. - BinaryOpMismatch { - lhs: Ty, - rhs: Ty, - op: &'static str, - message: String, - }, - /// Unary operator applied to incompatible type. - UnaryOpMismatch { - operand: Ty, - op: &'static str, - message: String, - }, - /// Field access (`.field`) on a non-object type. - FieldAccessOnNonObject { actual: Ty }, - /// Index access (`[i]`) on a non-indexable type. - IndexOnNonIndexable { actual: Ty }, - /// Function call on a non-callable type. - CallOnNonFunction { actual: Ty }, - /// Wrong number of arguments to function. - WrongArgCount { expected: usize, actual: usize }, - /// Too few arguments to function. - TooFewArguments { - function_name: String, - required: usize, - provided: usize, - }, - /// Too many arguments to function. - TooManyArguments { - function_name: String, - max_allowed: usize, - provided: usize, - }, - /// Access to non-existent field on object with known structure. - NoSuchField { - field: String, - available: Vec, - suggestion: Option, - }, - /// Index out of bounds on a tuple with known length. - TupleIndexOutOfBounds { tuple_len: usize, index: usize }, - /// Format string parse error. - FormatStringError { message: String }, - /// Wrong number of format arguments. - FormatArgCount { expected: usize, provided: usize }, - /// Format argument type mismatch. - FormatArgTypeMismatch { - index: usize, - expected: Ty, - actual: Ty, - specifier: char, - }, - /// Function argument type mismatch. - ArgumentTypeMismatch { - function_name: String, - param_name: String, - param_index: usize, - expected: Ty, - actual: Ty, - }, - /// Callback function parameter type mismatch with collection element type. - CallbackTypeMismatch { - function_name: String, - callback_param: String, - element_type: Ty, - callback_param_type: Ty, - }, -} - -impl TypeErrorKind { - /// Apply a type substitution to all `Ty` references in this error kind. - /// - /// This is used when merging local types into the global store - the substitution - /// maps local `Ty` values to their global equivalents. - #[must_use] - pub fn apply_substitution(&self, subst: &jrsonnet_lsp_types::TySubst) -> Self { - match self { - TypeErrorKind::BinaryOpMismatch { - lhs, - rhs, - op, - message, - } => TypeErrorKind::BinaryOpMismatch { - lhs: subst.apply(*lhs), - rhs: subst.apply(*rhs), - op, - message: message.clone(), - }, - TypeErrorKind::UnaryOpMismatch { - operand, - op, - message, - } => TypeErrorKind::UnaryOpMismatch { - operand: subst.apply(*operand), - op, - message: message.clone(), - }, - TypeErrorKind::FieldAccessOnNonObject { actual } => { - TypeErrorKind::FieldAccessOnNonObject { - actual: subst.apply(*actual), - } - } - TypeErrorKind::IndexOnNonIndexable { actual } => TypeErrorKind::IndexOnNonIndexable { - actual: subst.apply(*actual), - }, - TypeErrorKind::CallOnNonFunction { actual } => TypeErrorKind::CallOnNonFunction { - actual: subst.apply(*actual), - }, - // These variants have no Ty references - TypeErrorKind::WrongArgCount { .. } - | TypeErrorKind::TooFewArguments { .. } - | TypeErrorKind::TooManyArguments { .. } - | TypeErrorKind::NoSuchField { .. } - | TypeErrorKind::TupleIndexOutOfBounds { .. } - | TypeErrorKind::FormatStringError { .. } - | TypeErrorKind::FormatArgCount { .. } => self.clone(), - TypeErrorKind::FormatArgTypeMismatch { - index, - expected, - actual, - specifier, - } => TypeErrorKind::FormatArgTypeMismatch { - index: *index, - expected: subst.apply(*expected), - actual: subst.apply(*actual), - specifier: *specifier, - }, - TypeErrorKind::ArgumentTypeMismatch { - function_name, - param_name, - param_index, - expected, - actual, - } => TypeErrorKind::ArgumentTypeMismatch { - function_name: function_name.clone(), - param_name: param_name.clone(), - param_index: *param_index, - expected: subst.apply(*expected), - actual: subst.apply(*actual), - }, - TypeErrorKind::CallbackTypeMismatch { - function_name, - callback_param, - element_type, - callback_param_type, - } => TypeErrorKind::CallbackTypeMismatch { - function_name: function_name.clone(), - callback_param: callback_param.clone(), - element_type: subst.apply(*element_type), - callback_param_type: subst.apply(*callback_param_type), - }, - } - } -} - -impl TypeError { - /// Apply a type substitution to all `Ty` references in this error. - /// - /// This is used when merging local types into the global store - the substitution - /// maps local `Ty` values to their global equivalents. - #[must_use] - pub fn apply_substitution(&self, subst: &jrsonnet_lsp_types::TySubst) -> Self { - Self { - kind: self.kind.apply_substitution(subst), - range: self.range, - } - } - - /// Convert the type error to an LSP diagnostic. - pub fn to_diagnostic( - &self, - line_index: &LineIndex, - text: &str, - analysis: &TypeAnalysis, - ) -> Diagnostic { - let message = match &self.kind { - TypeErrorKind::BinaryOpMismatch { message, .. } - | TypeErrorKind::UnaryOpMismatch { message, .. } => message.clone(), - TypeErrorKind::FieldAccessOnNonObject { actual } => { - format!( - "field access on non-object type `{}`", - analysis.display(*actual) - ) - } - TypeErrorKind::IndexOnNonIndexable { actual } => { - format!( - "index access on non-indexable type `{}`", - analysis.display(*actual) - ) - } - TypeErrorKind::CallOnNonFunction { actual } => { - format!( - "cannot call non-function type `{}`", - analysis.display(*actual) - ) - } - TypeErrorKind::WrongArgCount { expected, actual } => { - format!("function expects {expected} argument(s), but {actual} provided") - } - TypeErrorKind::TooFewArguments { - function_name, - required, - provided, - } => { - format!( - "`{function_name}` requires at least {required} argument(s), but {provided} provided" - ) - } - TypeErrorKind::TooManyArguments { - function_name, - max_allowed, - provided, - } => { - format!( - "`{function_name}` accepts at most {max_allowed} argument(s), but {provided} provided" - ) - } - TypeErrorKind::NoSuchField { - field, - available, - suggestion, - } => { - let mut msg = format!("no such field `{field}`"); - if let Some(suggested) = suggestion { - msg.push_str("; did you mean `"); - msg.push_str(suggested); - msg.push_str("`?"); - } else if !available.is_empty() { - let available_str = available.join(", "); - msg.push_str("; available fields: "); - msg.push_str(&available_str); - } - msg - } - TypeErrorKind::TupleIndexOutOfBounds { tuple_len, index } => { - format!("index {index} is out of bounds for tuple of length {tuple_len}") - } - TypeErrorKind::FormatStringError { message } => { - format!("invalid format string: {message}") - } - TypeErrorKind::FormatArgCount { expected, provided } => { - format!("format string expects {expected} argument(s), but {provided} provided") - } - TypeErrorKind::FormatArgTypeMismatch { - index, - expected, - actual, - specifier, - } => { - format!( - "format argument {} (specifier %{}) expects `{}`, got `{}`", - index + 1, - specifier, - analysis.display(*expected), - analysis.display(*actual) - ) - } - TypeErrorKind::ArgumentTypeMismatch { - function_name, - param_name, - param_index, - expected, - actual, - } => { - format!( - "`{}` argument {} (`{}`) expects `{}`, got `{}`", - function_name, - param_index + 1, - param_name, - analysis.display(*expected), - analysis.display(*actual) - ) - } - TypeErrorKind::CallbackTypeMismatch { - function_name, - callback_param, - element_type, - callback_param_type, - } => { - format!( - "`{}` callback parameter `{}` has type `{}`, but array elements have type `{}`", - function_name, - callback_param, - analysis.display(*callback_param_type), - analysis.display(*element_type) - ) - } - }; - - Diagnostic { - range: to_lsp_range(self.range, line_index, text), - severity: Some(DiagnosticSeverity::WARNING), - code: Some(NumberOrString::String("type-error".to_string())), - code_description: None, - source: Some("jrsonnet-lint".to_string()), - message, - related_information: None, - tags: None, - data: None, - } - } -} - -/// Configuration for type checking. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum TypeCheckRule { - BinaryOps, - UnaryOps, - FieldAccess, - IndexAccess, - CallChecks, -} - -impl TypeCheckRule { - const fn bit(self) -> u8 { - match self { - Self::BinaryOps => 1 << 0, - Self::UnaryOps => 1 << 1, - Self::FieldAccess => 1 << 2, - Self::IndexAccess => 1 << 3, - Self::CallChecks => 1 << 4, - } - } -} - -#[derive(Debug, Clone, Default, PartialEq, Eq)] -pub struct TypeCheckConfig { - enabled: u8, -} - -impl TypeCheckConfig { - /// Enable a single type-check rule in this config. - #[must_use] - pub fn with_enabled(mut self, rule: TypeCheckRule) -> Self { - self.enable(rule); - self - } - - /// Enable a single type-check rule in this config. - pub fn enable(&mut self, rule: TypeCheckRule) { - self.enabled |= rule.bit(); - } - - /// Check whether a type-check rule is enabled. - #[must_use] - pub fn is_enabled(&self, rule: TypeCheckRule) -> bool { - self.enabled & rule.bit() != 0 - } - - /// Create a config with all checks enabled. - #[must_use] - pub fn all() -> Self { - Self { - enabled: TypeCheckRule::BinaryOps.bit() - | TypeCheckRule::UnaryOps.bit() - | TypeCheckRule::FieldAccess.bit() - | TypeCheckRule::IndexAccess.bit() - | TypeCheckRule::CallChecks.bit(), - } - } -} - /// Check types in a document and return any type errors. pub fn check_types( document: &Document, @@ -886,562 +499,6 @@ fn check_obj_body( } } -/// Validate a function call using `FunctionData` (Ty-native version). -fn validate_function_call_ty( - func_data: &FunctionData, - function_name: String, - arg_count: usize, - range: TextRange, -) -> Option { - // Count required parameters (those without defaults) - let required = func_data.params.iter().filter(|p| !p.has_default).count(); - let total = func_data.params.len(); - - if arg_count < required { - Some(TypeError { - kind: TypeErrorKind::TooFewArguments { - function_name, - required, - provided: arg_count, - }, - range, - }) - } else if arg_count > total && !func_data.variadic { - Some(TypeError { - kind: TypeErrorKind::TooManyArguments { - function_name, - max_allowed: total, - provided: arg_count, - }, - range, - }) - } else { - None - } -} - -/// Check if an `ExprCall` is a stdlib function call and validate argument count and types. -/// -/// Matches the pattern: `std.functionName(args...)`, including aliases that -/// resolve to the builtin std object. -fn check_stdlib_call_expr(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut Vec) { - // Get the callee - should be std.functionName (ExprField) - let Some(callee_expr) = call.callee() else { - return; - }; - let Some(ExprBase::ExprField(field)) = callee_expr.expr_base() else { - return; - }; - - // Check if base resolves to builtin std. - let Some(base_expr) = field.base() else { - return; - }; - if !expr_resolves_to_builtin_std(&base_expr) { - return; - } - - // Extract function name from the field - let Some(fn_name) = field - .field() - .and_then(|n| n.ident_lit()) - .map(|t| t.text().to_string()) - else { - return; - }; - - // Look up signature - let Some(sig) = get_stdlib_signature(&fn_name) else { - return; - }; - - // Count arguments - let arg_count = call.args_desc().map_or(0, |args| args.args().count()); - - // Validate using the unified function - let qualified_name = format!("std.{fn_name}"); - let Some(func_data) = sig.func_data() else { - return; - }; - if let Some(error) = validate_function_call_ty( - &func_data, - qualified_name.clone(), - arg_count, - call.syntax().text_range(), - ) { - errors.push(error); - } - - // Check argument types - if let Some(args_desc) = call.args_desc() { - for (i, arg) in args_desc.args().enumerate() { - if i >= func_data.params.len() { - break; // Variadic or too many args - handled elsewhere - } - - let Some(param) = func_data.params.get(i) else { - break; - }; - let stdlib_expected_ty = param.ty; - - // Skip if expected type is Any (no constraint) - if stdlib_expected_ty.is_any() { - continue; - } - - // Get the argument's inferred type - let Some(arg_expr) = arg.expr() else { - continue; - }; - let Some(actual_ty) = analysis.type_for_range(arg_expr.syntax().text_range()) else { - continue; - }; - - // Skip if actual type is Any or Never - if actual_ty.is_any() || actual_ty.is_never() { - continue; - } - - // Import expected type from stdlib store into analysis store - let expected_ty = analysis.import_from_stdlib(stdlib_expected_ty); - - // Check if actual type is subtype of expected type - // Special case: if expected is function_any() (no params), accept any function - // This handles higher-order functions like std.map where we accept any callable - let is_function_wildcard = { - let stdlib_store = jrsonnet_lsp_stdlib::stdlib_store(); - match *stdlib_store.get(stdlib_expected_ty) { - TyData::Function(ref f) => f.params.is_empty(), - _ => false, - } - }; - let type_matches = if is_function_wildcard { - analysis.is_function(actual_ty) - } else { - analysis.is_subtype(actual_ty, expected_ty) - }; - - if !type_matches { - errors.push(TypeError { - kind: TypeErrorKind::ArgumentTypeMismatch { - function_name: qualified_name.clone(), - param_name: param.name.clone(), - param_index: i, - expected: expected_ty, - actual: actual_ty, - }, - range: arg_expr.syntax().text_range(), - }); - } - } - } - - // Special validation for std.format - if fn_name == "format" { - check_format_call(call, analysis, errors); - } - - // Higher-order function validation - check_higher_order_call(&fn_name, call, analysis, errors); -} - -/// Configuration for higher-order function validation. -struct HigherOrderConfig { - /// Name of the callback parameter. - callback_param_name: &'static str, - /// Index of the callback argument (0-based). - callback_arg_index: usize, - /// Index of the array argument (0-based). - array_arg_index: usize, -} - -/// Validate higher-order function calls. -/// -/// Checks that callback function parameters are compatible with array element types. -/// For example, in `std.map(func, arr)`, we verify that `func` can accept elements of `arr`. -fn check_higher_order_call( - fn_name: &str, - call: &ExprCall, - analysis: &TypeAnalysis, - errors: &mut Vec, -) { - // Configuration for higher-order functions - // (callback_param_name, callback_arg_index, array_arg_index) - let config: Option = match fn_name { - "map" | "filter" | "flatMap" => Some(HigherOrderConfig { - callback_param_name: "func", - callback_arg_index: 0, - array_arg_index: 1, - }), - "find" | "findIndex" => Some(HigherOrderConfig { - callback_param_name: "func", - callback_arg_index: 1, - array_arg_index: 0, - }), - "sort" | "uniq" => Some(HigherOrderConfig { - callback_param_name: "keyF", - callback_arg_index: 1, - array_arg_index: 0, - }), - // foldl/foldr are more complex (accumulator + element), handle separately if needed - _ => None, - }; - - let Some(config) = config else { - return; - }; - - let Some(args_desc) = call.args_desc() else { - return; - }; - let args: Vec<_> = args_desc.args().collect(); - - // Get the callback and array arguments - let Some(callback_arg) = args.get(config.callback_arg_index) else { - return; - }; - let Some(array_arg) = args.get(config.array_arg_index) else { - return; - }; - - // Get the callback's type - let Some(callback_expr) = callback_arg.expr() else { - return; - }; - let Some(callback_ty) = analysis.type_for_range(callback_expr.syntax().text_range()) else { - return; - }; - - // Get the array's type - let Some(array_expr) = array_arg.expr() else { - return; - }; - let Some(array_ty) = analysis.type_for_range(array_expr.syntax().text_range()) else { - return; - }; - - // Skip if types are Any or Never - if array_ty.is_any() || array_ty.is_never() || callback_ty.is_any() || callback_ty.is_never() { - return; - } - - // Extract element type from array - // First get the data, then create union outside the borrow - let element_info = analysis.with_data(array_ty, |data| match data { - TyData::Array { elem, .. } => Some(Ok(*elem)), - TyData::Tuple { elems } => { - if elems.is_empty() { - None - } else { - Some(Err(elems.clone())) // Need to create union outside borrow - } - } - _ => None, - }); - let element_ty = match element_info { - Some(Ok(ty)) => ty, - Some(Err(elems)) => analysis.union(elems), - None => return, - }; - - // Skip if element type is Any - if element_ty.is_any() { - return; - } - - // Extract the callback's first parameter type - let callback_param_ty = analysis.with_data(callback_ty, |data| match data { - TyData::Function(ft) => ft.params.first().map(|param| param.ty), - _ => None, - }); - let Some(callback_param_ty) = callback_param_ty else { - return; - }; - - // Skip if callback param type is Any - if callback_param_ty.is_any() { - return; - } - - // Check if element type is compatible with callback param type - if !analysis.is_subtype(element_ty, callback_param_ty) { - errors.push(TypeError { - kind: TypeErrorKind::CallbackTypeMismatch { - function_name: format!("std.{fn_name}"), - callback_param: config.callback_param_name.to_string(), - element_type: element_ty, - callback_param_type: callback_param_ty, - }, - range: callback_expr.syntax().text_range(), - }); - } -} - -/// Validate a `std.format()` call. -/// -/// Checks: -/// - Format string is valid -/// - Argument count matches placeholders -/// - Argument types match expected types (when inferrable) -fn check_format_call(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut Vec) { - let Some(args_desc) = call.args_desc() else { - return; - }; - let args: Vec<_> = args_desc.args().collect(); - if args.is_empty() { - return; - } - - // Get the format string from the first argument - let Some(fmt_arg) = args.first() else { - return; - }; - let Some(fmt_expr) = fmt_arg.expr() else { - return; - }; - - // Try to extract a literal string value - let Some(fmt_string) = get_string_literal(&fmt_expr) else { - return; // Can't validate non-literal format strings - }; - - // Parse the format string - let format_spec = match parse_format_string(&fmt_string) { - Ok(spec) => spec, - Err(e) => { - let message = match e { - FormatParseError::IncompleteSpecifier => "incomplete format specifier".to_string(), - FormatParseError::UnknownSpecifier(c) => format!("unknown specifier '%{c}'"), - FormatParseError::UnclosedNamedPlaceholder => { - "unclosed named placeholder".to_string() - } - FormatParseError::EmptyName => "empty name in named placeholder".to_string(), - FormatParseError::MixedPositionalAndNamed => { - "cannot mix positional and named placeholders".to_string() - } - }; - errors.push(TypeError { - kind: TypeErrorKind::FormatStringError { message }, - range: fmt_expr.syntax().text_range(), - }); - return; - } - }; - - // For positional placeholders, check argument count - // This includes extra args consumed by dynamic width (*) and precision (.*) - if format_spec.uses_positional { - let positional_count = format_spec.positional_arg_count(); - let provided = args.len() - 1; // Exclude format string itself - - if provided != positional_count { - errors.push(TypeError { - kind: TypeErrorKind::FormatArgCount { - expected: positional_count, - provided, - }, - range: call.syntax().text_range(), - }); - return; // Skip type checking if count is wrong - } - - // Check argument types (skip format string, check remaining args) - for (i, placeholder) in format_spec.placeholders.iter().enumerate() { - if let FormatPlaceholder::Positional { - expected_type, - specifier, - .. - } = placeholder - { - // Get the corresponding argument (offset by 1 for format string) - if let Some(arg) = args.get(i + 1) { - if let Some(arg_expr) = arg.expr() { - let Some(actual_ty) = - analysis.type_for_range(arg_expr.syntax().text_range()) - else { - continue; - }; - - // Skip Any types (unknown) - if actual_ty.is_any() || *expected_type == FormatTypeKind::Any { - continue; - } - - // Check type compatibility using FormatTypeKind method - let is_compatible = analysis - .with_store(|store| expected_type.is_compatible_with(actual_ty, store)); - if !is_compatible { - let expected_ty = - analysis.with_store_mut(|store| expected_type.to_ty(store)); - errors.push(TypeError { - kind: TypeErrorKind::FormatArgTypeMismatch { - index: i, - expected: expected_ty, - actual: actual_ty, - specifier: *specifier, - }, - range: arg_expr.syntax().text_range(), - }); - } - } - } - } - } - } -} - -/// Extract a string literal value from an expression. -fn get_string_literal(expr: &Expr) -> Option { - let base = expr.expr_base()?; - match base { - ExprBase::ExprString(s) => { - let text = s.syntax().text().to_string(); - if text.starts_with("|||") { - return parse_text_block_literal(&text); - } - - if let Some(inner) = text - .strip_prefix("@\"") - .and_then(|value| value.strip_suffix('"')) - .or_else(|| { - text.strip_prefix("@'") - .and_then(|value| value.strip_suffix('\'')) - }) { - return Some(inner.to_string()); - } - - text.strip_prefix('"') - .and_then(|value| value.strip_suffix('"')) - .or_else(|| { - text.strip_prefix('\'') - .and_then(|value| value.strip_suffix('\'')) - }) - .map(unescape_string) - } - _ => None, - } -} - -/// Parse Jsonnet text block syntax (`||| ... |||`) into its string content. -fn parse_text_block_literal(text: &str) -> Option { - let after_open = text.strip_prefix("|||")?; - let (_, body_with_terminator) = after_open.split_once('\n')?; - - let mut raw_lines = Vec::new(); - let mut found_terminator = false; - for line in body_with_terminator.split('\n') { - if is_text_block_terminator(line) { - found_terminator = true; - break; - } - raw_lines.push(line); - } - if !found_terminator { - return None; - } - - let indent = raw_lines - .iter() - .find(|line| !line.is_empty()) - .map(|line| { - line.chars() - .take_while(|ch| *ch == ' ' || *ch == '\t') - .collect::() - }) - .unwrap_or_default(); - - let normalized = raw_lines - .into_iter() - .map(|line| { - if indent.is_empty() || line.is_empty() { - line.to_string() - } else { - line.strip_prefix(&indent).unwrap_or(line).to_string() - } - }) - .collect::>(); - - Some(normalized.join("\n")) -} - -fn is_text_block_terminator(line: &str) -> bool { - line.trim_start_matches([' ', '\t']) == "|||" -} - -/// Unescape a string literal (simplified version). -fn unescape_string(s: &str) -> String { - let mut result = String::with_capacity(s.len()); - let mut chars = s.chars().peekable(); - - while let Some(c) = chars.next() { - if c == '\\' { - match chars.next() { - Some('n') => result.push('\n'), - Some('t') => result.push('\t'), - Some('r') => result.push('\r'), - Some('\\') | None => result.push('\\'), - Some('"') => result.push('"'), - Some('\'') => result.push('\''), - Some(other) => { - result.push('\\'); - result.push(other); - } - } - } else { - result.push(c); - } - } - - result -} - -/// Check if an `ExprCall` is a user function call and validate argument count. -/// -/// Matches the pattern: `varName(args...)` where varName is a known function. -fn check_user_function_call_expr( - call: &ExprCall, - analysis: &TypeAnalysis, - errors: &mut Vec, -) { - // Get the callee - should be a variable (ExprVar) - let Some(callee_expr) = call.callee() else { - return; - }; - let Some(ExprBase::ExprVar(var)) = callee_expr.expr_base() else { - return; - }; - - let Some(var_ident) = var.name().and_then(|n| n.ident_lit()) else { - return; - }; - let var_name = var_ident.text().to_string(); - - // Skip builtin std - handled by check_stdlib_call_expr. - if ident_resolves_to_builtin_std(&var_ident) { - return; - } - - // Look up the type of the variable - let Some(var_ty) = analysis.type_for_range(var.syntax().text_range()) else { - return; - }; - - // Get function data using Ty-native method - let Some(func_data) = analysis.get_function(var_ty) else { - return; - }; - - // Count arguments - let arg_count = call.args_desc().map_or(0, |args| args.args().count()); - - // Validate using the Ty-native function - if let Some(error) = - validate_function_call_ty(&func_data, var_name, arg_count, call.syntax().text_range()) - { - errors.push(error); - } -} - /// Get the string representation of a binary operator. fn binary_op_str(op: BinaryOperatorKind) -> &'static str { match op { diff --git a/crates/jrsonnet-lsp-check/src/type_check/mod.rs b/crates/jrsonnet-lsp-check/src/type_check/mod.rs new file mode 100644 index 00000000..c1bb3e02 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/type_check/mod.rs @@ -0,0 +1,13 @@ +//! Static type checking for Jsonnet expressions. +//! +//! Provides type error diagnostics that detect type mismatches before +//! evaluation: operator mismatches, invalid field/index access, and +//! function call validation. + +mod calls; +mod core; +mod types; + +pub use core::check_types; + +pub use types::{TypeCheckConfig, TypeCheckRule, TypeError, TypeErrorKind}; diff --git a/crates/jrsonnet-lsp-check/src/type_check/types.rs b/crates/jrsonnet-lsp-check/src/type_check/types.rs new file mode 100644 index 00000000..fd4e4e48 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/type_check/types.rs @@ -0,0 +1,379 @@ +use jrsonnet_lsp_document::{to_lsp_range, LineIndex}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_types::Ty; +use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString}; +use rowan::TextRange; + +/// A type error detected during static analysis. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TypeError { + /// The kind of type error. + pub kind: TypeErrorKind, + /// The source location of the error. + pub range: TextRange, +} + +/// The kind of type error. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum TypeErrorKind { + /// Binary operator applied to incompatible types. + BinaryOpMismatch { + lhs: Ty, + rhs: Ty, + op: &'static str, + message: String, + }, + /// Unary operator applied to incompatible type. + UnaryOpMismatch { + operand: Ty, + op: &'static str, + message: String, + }, + /// Field access (`.field`) on a non-object type. + FieldAccessOnNonObject { actual: Ty }, + /// Index access (`[i]`) on a non-indexable type. + IndexOnNonIndexable { actual: Ty }, + /// Function call on a non-callable type. + CallOnNonFunction { actual: Ty }, + /// Wrong number of arguments to function. + WrongArgCount { expected: usize, actual: usize }, + /// Too few arguments to function. + TooFewArguments { + function_name: String, + required: usize, + provided: usize, + }, + /// Too many arguments to function. + TooManyArguments { + function_name: String, + max_allowed: usize, + provided: usize, + }, + /// Access to non-existent field on object with known structure. + NoSuchField { + field: String, + available: Vec, + suggestion: Option, + }, + /// Index out of bounds on a tuple with known length. + TupleIndexOutOfBounds { tuple_len: usize, index: usize }, + /// Format string parse error. + FormatStringError { message: String }, + /// Wrong number of format arguments. + FormatArgCount { expected: usize, provided: usize }, + /// Format argument type mismatch. + FormatArgTypeMismatch { + index: usize, + expected: Ty, + actual: Ty, + specifier: char, + }, + /// Function argument type mismatch. + ArgumentTypeMismatch { + function_name: String, + param_name: String, + param_index: usize, + expected: Ty, + actual: Ty, + }, + /// Callback function parameter type mismatch with collection element type. + CallbackTypeMismatch { + function_name: String, + callback_param: String, + element_type: Ty, + callback_param_type: Ty, + }, +} + +impl TypeErrorKind { + /// Apply a type substitution to all `Ty` references in this error kind. + /// + /// This is used when merging local types into the global store - the substitution + /// maps local `Ty` values to their global equivalents. + #[must_use] + pub fn apply_substitution(&self, subst: &jrsonnet_lsp_types::TySubst) -> Self { + match self { + TypeErrorKind::BinaryOpMismatch { + lhs, + rhs, + op, + message, + } => TypeErrorKind::BinaryOpMismatch { + lhs: subst.apply(*lhs), + rhs: subst.apply(*rhs), + op, + message: message.clone(), + }, + TypeErrorKind::UnaryOpMismatch { + operand, + op, + message, + } => TypeErrorKind::UnaryOpMismatch { + operand: subst.apply(*operand), + op, + message: message.clone(), + }, + TypeErrorKind::FieldAccessOnNonObject { actual } => { + TypeErrorKind::FieldAccessOnNonObject { + actual: subst.apply(*actual), + } + } + TypeErrorKind::IndexOnNonIndexable { actual } => TypeErrorKind::IndexOnNonIndexable { + actual: subst.apply(*actual), + }, + TypeErrorKind::CallOnNonFunction { actual } => TypeErrorKind::CallOnNonFunction { + actual: subst.apply(*actual), + }, + // These variants have no Ty references + TypeErrorKind::WrongArgCount { .. } + | TypeErrorKind::TooFewArguments { .. } + | TypeErrorKind::TooManyArguments { .. } + | TypeErrorKind::NoSuchField { .. } + | TypeErrorKind::TupleIndexOutOfBounds { .. } + | TypeErrorKind::FormatStringError { .. } + | TypeErrorKind::FormatArgCount { .. } => self.clone(), + TypeErrorKind::FormatArgTypeMismatch { + index, + expected, + actual, + specifier, + } => TypeErrorKind::FormatArgTypeMismatch { + index: *index, + expected: subst.apply(*expected), + actual: subst.apply(*actual), + specifier: *specifier, + }, + TypeErrorKind::ArgumentTypeMismatch { + function_name, + param_name, + param_index, + expected, + actual, + } => TypeErrorKind::ArgumentTypeMismatch { + function_name: function_name.clone(), + param_name: param_name.clone(), + param_index: *param_index, + expected: subst.apply(*expected), + actual: subst.apply(*actual), + }, + TypeErrorKind::CallbackTypeMismatch { + function_name, + callback_param, + element_type, + callback_param_type, + } => TypeErrorKind::CallbackTypeMismatch { + function_name: function_name.clone(), + callback_param: callback_param.clone(), + element_type: subst.apply(*element_type), + callback_param_type: subst.apply(*callback_param_type), + }, + } + } +} + +impl TypeError { + /// Apply a type substitution to all `Ty` references in this error. + /// + /// This is used when merging local types into the global store - the substitution + /// maps local `Ty` values to their global equivalents. + #[must_use] + pub fn apply_substitution(&self, subst: &jrsonnet_lsp_types::TySubst) -> Self { + Self { + kind: self.kind.apply_substitution(subst), + range: self.range, + } + } + + /// Convert the type error to an LSP diagnostic. + pub fn to_diagnostic( + &self, + line_index: &LineIndex, + text: &str, + analysis: &TypeAnalysis, + ) -> Diagnostic { + let message = match &self.kind { + TypeErrorKind::BinaryOpMismatch { message, .. } + | TypeErrorKind::UnaryOpMismatch { message, .. } => message.clone(), + TypeErrorKind::FieldAccessOnNonObject { actual } => { + format!( + "field access on non-object type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::IndexOnNonIndexable { actual } => { + format!( + "index access on non-indexable type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::CallOnNonFunction { actual } => { + format!( + "cannot call non-function type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::WrongArgCount { expected, actual } => { + format!("function expects {expected} argument(s), but {actual} provided") + } + TypeErrorKind::TooFewArguments { + function_name, + required, + provided, + } => { + format!( + "`{function_name}` requires at least {required} argument(s), but {provided} provided" + ) + } + TypeErrorKind::TooManyArguments { + function_name, + max_allowed, + provided, + } => { + format!( + "`{function_name}` accepts at most {max_allowed} argument(s), but {provided} provided" + ) + } + TypeErrorKind::NoSuchField { + field, + available, + suggestion, + } => { + let mut msg = format!("no such field `{field}`"); + if let Some(suggested) = suggestion { + msg.push_str("; did you mean `"); + msg.push_str(suggested); + msg.push_str("`?"); + } else if !available.is_empty() { + let available_str = available.join(", "); + msg.push_str("; available fields: "); + msg.push_str(&available_str); + } + msg + } + TypeErrorKind::TupleIndexOutOfBounds { tuple_len, index } => { + format!("index {index} is out of bounds for tuple of length {tuple_len}") + } + TypeErrorKind::FormatStringError { message } => { + format!("invalid format string: {message}") + } + TypeErrorKind::FormatArgCount { expected, provided } => { + format!("format string expects {expected} argument(s), but {provided} provided") + } + TypeErrorKind::FormatArgTypeMismatch { + index, + expected, + actual, + specifier, + } => { + format!( + "format argument {} (specifier %{}) expects `{}`, got `{}`", + index + 1, + specifier, + analysis.display(*expected), + analysis.display(*actual) + ) + } + TypeErrorKind::ArgumentTypeMismatch { + function_name, + param_name, + param_index, + expected, + actual, + } => { + format!( + "`{}` argument {} (`{}`) expects `{}`, got `{}`", + function_name, + param_index + 1, + param_name, + analysis.display(*expected), + analysis.display(*actual) + ) + } + TypeErrorKind::CallbackTypeMismatch { + function_name, + callback_param, + element_type, + callback_param_type, + } => { + format!( + "`{}` callback parameter `{}` has type `{}`, but array elements have type `{}`", + function_name, + callback_param, + analysis.display(*callback_param_type), + analysis.display(*element_type) + ) + } + }; + + Diagnostic { + range: to_lsp_range(self.range, line_index, text), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("type-error".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message, + related_information: None, + tags: None, + data: None, + } + } +} + +/// Configuration for type checking. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TypeCheckRule { + BinaryOps, + UnaryOps, + FieldAccess, + IndexAccess, + CallChecks, +} + +impl TypeCheckRule { + const fn bit(self) -> u8 { + match self { + Self::BinaryOps => 1 << 0, + Self::UnaryOps => 1 << 1, + Self::FieldAccess => 1 << 2, + Self::IndexAccess => 1 << 3, + Self::CallChecks => 1 << 4, + } + } +} + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct TypeCheckConfig { + enabled: u8, +} + +impl TypeCheckConfig { + /// Enable a single type-check rule in this config. + #[must_use] + pub fn with_enabled(mut self, rule: TypeCheckRule) -> Self { + self.enable(rule); + self + } + + /// Enable a single type-check rule in this config. + pub fn enable(&mut self, rule: TypeCheckRule) { + self.enabled |= rule.bit(); + } + + /// Check whether a type-check rule is enabled. + #[must_use] + pub fn is_enabled(&self, rule: TypeCheckRule) -> bool { + self.enabled & rule.bit() != 0 + } + + /// Create a config with all checks enabled. + #[must_use] + pub fn all() -> Self { + Self { + enabled: TypeCheckRule::BinaryOps.bit() + | TypeCheckRule::UnaryOps.bit() + | TypeCheckRule::FieldAccess.bit() + | TypeCheckRule::IndexAccess.bit() + | TypeCheckRule::CallChecks.bit(), + } + } +} From 21b83c5f8892cf69fd056afdd9afd3a5d50a1d1f Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:07:01 +0000 Subject: [PATCH 123/210] refactor(lsp-check): split lint passes into modules Decompose `lint.rs` into pass-oriented modules to make ownership and navigation clearer: - `lint/unused_shadow.rs` for unused-variable and shadow checks - `lint/unreachable.rs` for control-flow reachability checks - `lint/duplicates.rs` for duplicate field/param checks - `lint/mod.rs` as the entrypoint and shared context Keep lint behavior unchanged and preserve inline tests under the pass implementation module. --- .../jrsonnet-lsp-check/src/lint/duplicates.rs | 185 +++++ crates/jrsonnet-lsp-check/src/lint/mod.rs | 207 ++++++ .../src/lint/unreachable.rs | 261 +++++++ .../src/{lint.rs => lint/unused_shadow.rs} | 656 +----------------- 4 files changed, 680 insertions(+), 629 deletions(-) create mode 100644 crates/jrsonnet-lsp-check/src/lint/duplicates.rs create mode 100644 crates/jrsonnet-lsp-check/src/lint/mod.rs create mode 100644 crates/jrsonnet-lsp-check/src/lint/unreachable.rs rename crates/jrsonnet-lsp-check/src/{lint.rs => lint/unused_shadow.rs} (58%) diff --git a/crates/jrsonnet-lsp-check/src/lint/duplicates.rs b/crates/jrsonnet-lsp-check/src/lint/duplicates.rs new file mode 100644 index 00000000..f32f4849 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/lint/duplicates.rs @@ -0,0 +1,185 @@ +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, ExprFunction, ExprObject, FieldName, Member, ObjBody}, + AstNode, SyntaxKind, SyntaxNode, +}; +use lsp_types::{ + Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, Location, NumberOrString, +}; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +use super::LintContext; + +pub(super) fn check_duplicate_fields( + node: &SyntaxNode, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + // Find all object expressions + for child in node.descendants() { + if child.kind() == SyntaxKind::EXPR_OBJECT { + if let Some(obj) = ExprObject::cast(child) { + check_object_for_duplicate_fields(&obj, ctx, diagnostics); + } + } + } +} + +/// Check a single object expression for duplicate fields. +fn check_object_for_duplicate_fields( + obj: &ExprObject, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + let mut seen: FxHashMap = FxHashMap::default(); + + let Some(obj_body) = obj.obj_body() else { + return; + }; + + let ObjBody::ObjBodyMemberList(members) = obj_body else { + // Object comprehension - can't have static duplicate fields + return; + }; + + for member in members.members() { + let field_name = match &member { + Member::MemberBindStmt(bind_stmt) => { + // { local x = value } - object-local binding + extract_bind_name(bind_stmt.obj_local().and_then(|ol| ol.bind())) + } + Member::MemberFieldNormal(field) => { + // { field: value } or { field:: value } + field.field_name().and_then(extract_static_field_name) + } + Member::MemberFieldMethod(method) => { + // { method(...): value } + method.field_name().and_then(extract_static_field_name) + } + Member::MemberAssertStmt(_) => None, // assert doesn't define a field + }; + + if let Some(name) = field_name { + let range = member.syntax().text_range(); + if let Some(first_range) = seen.get(&name) { + diagnostics.push(Diagnostic { + range: ctx.to_lsp_range(range), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("duplicate-field".to_string())), + source: Some("jrsonnet-lsp".to_string()), + message: format!("duplicate field `{name}`"), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: ctx.uri.clone(), + range: ctx.to_lsp_range(*first_range), + }, + message: "first definition here".to_string(), + }]), + ..Default::default() + }); + } else { + seen.insert(name, range); + } + } + } +} + +/// Extract a name from an optional Bind node. +fn extract_bind_name(bind: Option) -> Option { + let bind = bind?; + match bind { + Bind::BindDestruct(bd) => { + // Use BindDestruct::into to get Option + // (note: calling bd.into() directly can be ambiguous with Into trait) + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bd)?; + if let Destruct::DestructFull(full) = destruct { + full.name() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + } else { + None + } + } + Bind::BindFunction(bf) => bf + .name() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()), + } +} + +/// Extract a static field name from a `FieldName` node. +fn extract_static_field_name(field_name: FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => { + // FieldNameFixed has id() for identifier and text() for string literals + fixed + .id() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()) + } + FieldName::FieldNameDynamic(_) => None, // Dynamic field names can't be statically checked + } +} + +/// Check for duplicate function parameters in the entire AST. +pub(super) fn check_duplicate_params( + node: &SyntaxNode, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + // Find all function expressions + for child in node.descendants() { + if child.kind() == SyntaxKind::EXPR_FUNCTION { + if let Some(func) = ExprFunction::cast(child) { + check_function_for_duplicate_params(&func, ctx, diagnostics); + } + } + } +} + +/// Check a single function expression for duplicate parameters. +fn check_function_for_duplicate_params( + func: &ExprFunction, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + let Some(params_desc) = func.params_desc() else { + return; + }; + + let mut seen: FxHashMap = FxHashMap::default(); + + for param in params_desc.params() { + // Extract parameter name from destruct + let param_name = param.destruct().and_then(|d| match d { + Destruct::DestructFull(full) => full + .name() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()), + _ => None, // Array/object destructuring is more complex + }); + + if let Some(name) = param_name { + let range = param.syntax().text_range(); + if let Some(first_range) = seen.get(&name) { + diagnostics.push(Diagnostic { + range: ctx.to_lsp_range(range), + severity: Some(DiagnosticSeverity::ERROR), + code: Some(NumberOrString::String("duplicate-param".to_string())), + source: Some("jrsonnet-lsp".to_string()), + message: format!("duplicate parameter `{name}`"), + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: ctx.uri.clone(), + range: ctx.to_lsp_range(*first_range), + }, + message: "first definition here".to_string(), + }]), + ..Default::default() + }); + } else { + seen.insert(name, range); + } + } + } +} diff --git a/crates/jrsonnet-lsp-check/src/lint/mod.rs b/crates/jrsonnet-lsp-check/src/lint/mod.rs new file mode 100644 index 00000000..ac90774c --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/lint/mod.rs @@ -0,0 +1,207 @@ +//! Lint diagnostics for Jsonnet code. +//! +//! Provides static analysis warnings that don't require evaluation: +//! - Unused variables +//! - Shadowed variables +//! - Unreachable code (via type inference) + +mod duplicates; +mod unreachable; +mod unused_shadow; + +use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; +use jrsonnet_lsp_inference::{TypeAnalysis, TypeEnv}; +use jrsonnet_rowan_parser::AstNode; +use lsp_types::{ + Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, Location, NumberOrString, Range, +}; +use rowan::TextRange; + +use self::{ + duplicates::{check_duplicate_fields, check_duplicate_params}, + unreachable::check_unreachable_code, + unused_shadow::{check_shadowed_variables, check_unused_variables}, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum LintRule { + UnusedVariables, + UnreachableCode, + ShadowedVariables, + TypeErrors, + DuplicateFields, + DuplicateParams, +} + +impl LintRule { + const fn bit(self) -> u8 { + match self { + Self::UnusedVariables => 1 << 0, + Self::UnreachableCode => 1 << 1, + Self::ShadowedVariables => 1 << 2, + Self::TypeErrors => 1 << 3, + Self::DuplicateFields => 1 << 4, + Self::DuplicateParams => 1 << 5, + } + } +} + +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +pub struct LintConfig { + enabled: u8, +} + +impl LintConfig { + /// Enable a single lint rule in this config. + #[must_use] + pub fn with_enabled(mut self, rule: LintRule) -> Self { + self.enable(rule); + self + } + + /// Enable a single lint rule in this config. + pub fn enable(&mut self, rule: LintRule) { + self.enabled |= rule.bit(); + } + + /// Check whether a lint rule is enabled. + #[must_use] + pub fn is_enabled(self, rule: LintRule) -> bool { + self.enabled & rule.bit() != 0 + } + + /// Create a config with all lints enabled. + #[must_use] + pub fn all() -> Self { + Self { + enabled: LintRule::UnusedVariables.bit() + | LintRule::UnreachableCode.bit() + | LintRule::ShadowedVariables.bit() + | LintRule::TypeErrors.bit() + | LintRule::DuplicateFields.bit() + | LintRule::DuplicateParams.bit(), + } + } +} + +/// Run lint checks on a document. +/// +/// The `uri` parameter is used to create related location information in diagnostics. +pub fn lint( + document: &Document, + analysis: &TypeAnalysis, + config: &LintConfig, + uri: &lsp_types::Uri, +) -> Vec { + let mut diagnostics = Vec::new(); + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + if let Some(expr) = ast.expr() { + let ctx = LintContext::new(text, line_index, uri); + + if config.is_enabled(LintRule::UnusedVariables) { + check_unused_variables(&expr, &ctx, &mut diagnostics); + } + + if config.is_enabled(LintRule::ShadowedVariables) { + check_shadowed_variables(expr.syntax(), &ctx, &mut diagnostics); + } + + if config.is_enabled(LintRule::UnreachableCode) { + let mut env = TypeEnv::new_default(); + check_unreachable_code(&expr, &ctx, &mut env, &mut diagnostics); + } + + if config.is_enabled(LintRule::TypeErrors) { + let type_check_config = super::type_check::TypeCheckConfig::all(); + let type_errors = + super::type_check::check_types(document, analysis, &type_check_config); + diagnostics.extend( + type_errors + .into_iter() + .map(|e| e.to_diagnostic(line_index, text, analysis)), + ); + } + + if config.is_enabled(LintRule::DuplicateFields) { + check_duplicate_fields(expr.syntax(), &ctx, &mut diagnostics); + } + + if config.is_enabled(LintRule::DuplicateParams) { + check_duplicate_params(expr.syntax(), &ctx, &mut diagnostics); + } + } + + diagnostics +} + +/// Context for lint checking. +struct LintContext<'a> { + text: &'a str, + line_index: &'a LineIndex, + uri: &'a lsp_types::Uri, +} + +impl<'a> LintContext<'a> { + fn new(text: &'a str, line_index: &'a LineIndex, uri: &'a lsp_types::Uri) -> Self { + Self { + text, + line_index, + uri, + } + } + + fn to_lsp_range(&self, range: TextRange) -> Range { + to_lsp_range(range, self.line_index, self.text) + } + + fn make_diagnostic( + &self, + range: TextRange, + message: String, + severity: DiagnosticSeverity, + code: &str, + ) -> Diagnostic { + Diagnostic { + range: self.to_lsp_range(range), + severity: Some(severity), + code: Some(NumberOrString::String(code.to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message, + related_information: None, + tags: None, + data: None, + } + } + + fn make_diagnostic_with_related( + &self, + range: TextRange, + message: String, + severity: DiagnosticSeverity, + code: &str, + related_range: TextRange, + related_message: String, + ) -> Diagnostic { + Diagnostic { + range: self.to_lsp_range(range), + severity: Some(severity), + code: Some(NumberOrString::String(code.to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message, + related_information: Some(vec![DiagnosticRelatedInformation { + location: Location { + uri: self.uri.clone(), + range: self.to_lsp_range(related_range), + }, + message: related_message, + }]), + tags: None, + data: None, + } + } +} diff --git a/crates/jrsonnet-lsp-check/src/lint/unreachable.rs b/crates/jrsonnet-lsp-check/src/lint/unreachable.rs new file mode 100644 index 00000000..ec775f54 --- /dev/null +++ b/crates/jrsonnet-lsp-check/src/lint/unreachable.rs @@ -0,0 +1,261 @@ +use jrsonnet_lsp_inference::{infer_expr_ty, TypeEnv}; +use jrsonnet_lsp_types::Ty; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, Expr, ExprBase, Stmt}, + AstNode, +}; +use lsp_types::{Diagnostic, DiagnosticSeverity}; +use rowan::TextRange; + +use super::LintContext; + +fn extract_simple_bind_name( + bind: &Bind, +) -> Option<(String, &jrsonnet_rowan_parser::nodes::BindDestruct)> { + let Bind::BindDestruct(bd) = bind else { + return None; + }; + let destruct: Destruct = bd.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let name = full.name()?.ident_lit()?.text().to_string(); + Some((name, bd)) +} + +/// Check for unreachable code in an expression. +/// +/// Unreachable code patterns detected: +/// - Code after an `assert` whose condition is a divergent expression (type Never) +/// - Code after an `assert false` +pub(super) fn check_unreachable_code( + expr: &Expr, + ctx: &LintContext, + env: &mut TypeEnv, + diagnostics: &mut Vec, +) { + // Check statements (locals and asserts) that precede the body + for stmt in expr.stmts() { + match &stmt { + Stmt::StmtAssert(assert_stmt) => { + // Check if the assert condition diverges + if let Some(assertion) = assert_stmt.assertion() { + if let Some(cond) = assertion.condition() { + let cond_ty = infer_expr_ty(&cond, env); + if cond_ty.is_never() { + // The assert condition itself diverges, so everything after is unreachable + // Find the range of everything after this assert + if let Some(body_range) = find_code_after_stmt(&stmt, expr) { + diagnostics.push( + ctx.make_diagnostic_with_related( + body_range, + "unreachable code: assert condition always diverges" + .to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + cond.syntax().text_range(), + "divergent expression here".to_string(), + ), + ); + return; // Don't report more unreachable code + } + } + // Also check if condition is statically `false` + if is_statically_false(&cond) { + if let Some(body_range) = find_code_after_stmt(&stmt, expr) { + diagnostics.push( + ctx.make_diagnostic_with_related( + body_range, + "unreachable code: assert condition is always false" + .to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + cond.syntax().text_range(), + "condition is `false`".to_string(), + ), + ); + return; + } + } + } + } + } + Stmt::StmtLocal(local_stmt) => { + // Process local bindings for the type environment + for bind in local_stmt.binds() { + if let Some((name, bd)) = extract_simple_bind_name(&bind) { + let ty = bd.value().map_or(Ty::ANY, |v| infer_expr_ty(&v, env)); + env.define_ty(name, ty); + } + } + } + } + } + + // Recursively check the body expression and nested expressions + if let Some(base) = expr.expr_base() { + check_unreachable_in_base(&base, ctx, env, diagnostics); + } +} + +/// Check if an expression is statically `false`. +fn is_statically_false(expr: &Expr) -> bool { + if let Some(base) = expr.expr_base() { + if let ExprBase::ExprLiteral(lit) = base { + if let Some(literal) = lit.literal() { + return matches!( + literal.kind(), + jrsonnet_rowan_parser::nodes::LiteralKind::FalseKw + ); + } + } + } + false +} + +/// Find the range of code after a statement in an expression. +fn find_code_after_stmt(stmt: &Stmt, expr: &Expr) -> Option { + let stmt_end = stmt.syntax().text_range().end(); + let expr_end = expr.syntax().text_range().end(); + + // Check if there's anything after this statement + if stmt_end < expr_end { + // Find the start of the next significant content + let next_start = stmt_end; + Some(TextRange::new(next_start, expr_end)) + } else { + None + } +} + +/// Check for unreachable code in a base expression. +fn check_unreachable_in_base( + base: &ExprBase, + ctx: &LintContext, + env: &mut TypeEnv, + diagnostics: &mut Vec, +) { + match base { + ExprBase::ExprBinary(binary) => { + // Check if left operand is divergent + if let Some(lhs) = binary.lhs() { + let lhs_ty = infer_expr_ty(&lhs, env); + if lhs_ty.is_never() { + // Right operand is unreachable + if let Some(rhs) = binary.rhs() { + diagnostics.push(ctx.make_diagnostic_with_related( + rhs.syntax().text_range(), + "unreachable code: left operand always diverges".to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + lhs.syntax().text_range(), + "divergent expression here".to_string(), + )); + return; + } + } + // Recursively check left operand + check_unreachable_code(&lhs, ctx, env, diagnostics); + } + // Recursively check right operand + if let Some(rhs) = binary.rhs() { + check_unreachable_code(&rhs, ctx, env, diagnostics); + } + } + ExprBase::ExprIfThenElse(if_expr) => { + // Check condition for divergence + if let Some(cond) = if_expr.cond() { + let cond_ty = infer_expr_ty(&cond, env); + if cond_ty.is_never() { + // Both branches are unreachable + if let Some(then_clause) = if_expr.then() { + if let Some(then_expr) = then_clause.expr() { + diagnostics.push(ctx.make_diagnostic_with_related( + then_expr.syntax().text_range(), + "unreachable code: condition always diverges".to_string(), + DiagnosticSeverity::WARNING, + "unreachable-code", + cond.syntax().text_range(), + "divergent expression here".to_string(), + )); + } + } + return; + } + // Recursively check condition + check_unreachable_code(&cond, ctx, env, diagnostics); + } + // Recursively check branches + if let Some(then_clause) = if_expr.then() { + if let Some(then_expr) = then_clause.expr() { + check_unreachable_code(&then_expr, ctx, env, diagnostics); + } + } + if let Some(else_clause) = if_expr.else_() { + if let Some(else_expr) = else_clause.expr() { + check_unreachable_code(&else_expr, ctx, env, diagnostics); + } + } + } + ExprBase::ExprParened(parens) => { + if let Some(inner) = parens.expr() { + check_unreachable_code(&inner, ctx, env, diagnostics); + } + } + ExprBase::ExprFunction(func) => { + // Check function body + if let Some(body) = func.expr() { + env.push_scope(); + check_unreachable_code(&body, ctx, env, diagnostics); + env.pop_scope(); + } + } + ExprBase::ExprArray(arr) => { + for elem in arr.exprs() { + check_unreachable_code(&elem, ctx, env, diagnostics); + } + } + ExprBase::ExprObject(obj) => { + if let Some(body) = obj.obj_body() { + check_unreachable_in_obj_body(&body, ctx, env, diagnostics); + } + } + _ => {} + } +} + +/// Check for unreachable code in an object body. +fn check_unreachable_in_obj_body( + body: &jrsonnet_rowan_parser::nodes::ObjBody, + ctx: &LintContext, + env: &mut TypeEnv, + diagnostics: &mut Vec, +) { + use jrsonnet_rowan_parser::nodes::{Member, ObjBody}; + if let ObjBody::ObjBodyMemberList(members) = body { + for member in members.members() { + match member { + Member::MemberFieldNormal(field) => { + if let Some(expr) = field.expr() { + check_unreachable_code(&expr, ctx, env, diagnostics); + } + } + Member::MemberFieldMethod(method) => { + if let Some(expr) = method.expr() { + env.push_scope(); + check_unreachable_code(&expr, ctx, env, diagnostics); + env.pop_scope(); + } + } + Member::MemberAssertStmt(assert_member) => { + if let Some(assertion) = assert_member.assertion() { + if let Some(cond) = assertion.condition() { + check_unreachable_code(&cond, ctx, env, diagnostics); + } + } + } + Member::MemberBindStmt(_) => {} + } + } + } +} diff --git a/crates/jrsonnet-lsp-check/src/lint.rs b/crates/jrsonnet-lsp-check/src/lint/unused_shadow.rs similarity index 58% rename from crates/jrsonnet-lsp-check/src/lint.rs rename to crates/jrsonnet-lsp-check/src/lint/unused_shadow.rs index e4bd3c96..d23eca1b 100644 --- a/crates/jrsonnet-lsp-check/src/lint.rs +++ b/crates/jrsonnet-lsp-check/src/lint/unused_shadow.rs @@ -1,28 +1,14 @@ -//! Lint diagnostics for Jsonnet code. -//! -//! Provides static analysis warnings that don't require evaluation: -//! - Unused variables -//! - Shadowed variables -//! - Unreachable code (via type inference) - -use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; -use jrsonnet_lsp_inference::{infer_expr_ty, TypeAnalysis, TypeEnv}; use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; -use jrsonnet_lsp_types::Ty; use jrsonnet_rowan_parser::{ - nodes::{ - Bind, Destruct, DestructArrayPart, Expr, ExprBase, ExprFunction, ExprObject, ExprVar, - FieldName, ForSpec, Member, ObjBody, Param, Stmt, StmtLocal, - }, + nodes::{Bind, Destruct, DestructArrayPart, Expr, ExprVar, ForSpec, Param, StmtLocal}, AstNode, SyntaxKind, SyntaxNode, }; -use lsp_types::{ - Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, Location, NumberOrString, Range, -}; +use lsp_types::{Diagnostic, DiagnosticSeverity}; use rowan::TextRange; use rustc_hash::{FxHashMap, FxHashSet}; -/// Information about a variable in scope, used for related diagnostics. +use super::LintContext; + #[derive(Debug, Clone)] struct ScopeVar { range: TextRange, @@ -37,193 +23,11 @@ struct VarDef { is_underscore_prefixed: bool, } -/// Lint configuration options. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum LintRule { - UnusedVariables, - UnreachableCode, - ShadowedVariables, - TypeErrors, - DuplicateFields, - DuplicateParams, -} - -impl LintRule { - const fn bit(self) -> u8 { - match self { - Self::UnusedVariables => 1 << 0, - Self::UnreachableCode => 1 << 1, - Self::ShadowedVariables => 1 << 2, - Self::TypeErrors => 1 << 3, - Self::DuplicateFields => 1 << 4, - Self::DuplicateParams => 1 << 5, - } - } -} - -#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] -pub struct LintConfig { - enabled: u8, -} - -impl LintConfig { - /// Enable a single lint rule in this config. - #[must_use] - pub fn with_enabled(mut self, rule: LintRule) -> Self { - self.enable(rule); - self - } - - /// Enable a single lint rule in this config. - pub fn enable(&mut self, rule: LintRule) { - self.enabled |= rule.bit(); - } - - /// Check whether a lint rule is enabled. - #[must_use] - pub fn is_enabled(self, rule: LintRule) -> bool { - self.enabled & rule.bit() != 0 - } - - /// Create a config with all lints enabled. - #[must_use] - pub fn all() -> Self { - Self { - enabled: LintRule::UnusedVariables.bit() - | LintRule::UnreachableCode.bit() - | LintRule::ShadowedVariables.bit() - | LintRule::TypeErrors.bit() - | LintRule::DuplicateFields.bit() - | LintRule::DuplicateParams.bit(), - } - } -} - -/// Run lint checks on a document. -/// -/// The `uri` parameter is used to create related location information in diagnostics. -pub fn lint( - document: &Document, - analysis: &TypeAnalysis, - config: &LintConfig, - uri: &lsp_types::Uri, -) -> Vec { - let mut diagnostics = Vec::new(); - let text = document.text(); - let line_index = document.line_index(); - let ast = document.ast(); - - if let Some(expr) = ast.expr() { - let ctx = LintContext::new(text, line_index, uri); - - if config.is_enabled(LintRule::UnusedVariables) { - check_unused_variables(&expr, &ctx, &mut diagnostics); - } - - if config.is_enabled(LintRule::ShadowedVariables) { - let mut scope_stack: Vec> = vec![FxHashMap::default()]; - check_shadowed_variables(expr.syntax(), &ctx, &mut scope_stack, &mut diagnostics); - } - - if config.is_enabled(LintRule::UnreachableCode) { - let mut env = TypeEnv::new_default(); - check_unreachable_code(&expr, &ctx, &mut env, &mut diagnostics); - } - - if config.is_enabled(LintRule::TypeErrors) { - let type_check_config = super::type_check::TypeCheckConfig::all(); - let type_errors = - super::type_check::check_types(document, analysis, &type_check_config); - diagnostics.extend( - type_errors - .into_iter() - .map(|e| e.to_diagnostic(line_index, text, analysis)), - ); - } - - if config.is_enabled(LintRule::DuplicateFields) { - check_duplicate_fields(expr.syntax(), &ctx, &mut diagnostics); - } - - if config.is_enabled(LintRule::DuplicateParams) { - check_duplicate_params(expr.syntax(), &ctx, &mut diagnostics); - } - } - - diagnostics -} - -/// Context for lint checking. -struct LintContext<'a> { - text: &'a str, - line_index: &'a LineIndex, - uri: &'a lsp_types::Uri, -} - -impl<'a> LintContext<'a> { - fn new(text: &'a str, line_index: &'a LineIndex, uri: &'a lsp_types::Uri) -> Self { - Self { - text, - line_index, - uri, - } - } - - fn to_lsp_range(&self, range: TextRange) -> Range { - to_lsp_range(range, self.line_index, self.text) - } - - fn make_diagnostic( - &self, - range: TextRange, - message: String, - severity: DiagnosticSeverity, - code: &str, - ) -> Diagnostic { - Diagnostic { - range: self.to_lsp_range(range), - severity: Some(severity), - code: Some(NumberOrString::String(code.to_string())), - code_description: None, - source: Some("jrsonnet-lint".to_string()), - message, - related_information: None, - tags: None, - data: None, - } - } - - fn make_diagnostic_with_related( - &self, - range: TextRange, - message: String, - severity: DiagnosticSeverity, - code: &str, - related_range: TextRange, - related_message: String, - ) -> Diagnostic { - Diagnostic { - range: self.to_lsp_range(range), - severity: Some(severity), - code: Some(NumberOrString::String(code.to_string())), - code_description: None, - source: Some("jrsonnet-lint".to_string()), - message, - related_information: Some(vec![DiagnosticRelatedInformation { - location: Location { - uri: self.uri.clone(), - range: self.to_lsp_range(related_range), - }, - message: related_message, - }]), - tags: None, - data: None, - } - } -} - -/// Check for unused variables in an expression. -fn check_unused_variables(expr: &Expr, ctx: &LintContext, diagnostics: &mut Vec) { +pub(super) fn check_unused_variables( + expr: &Expr, + ctx: &LintContext, + diagnostics: &mut Vec, +) { let mut definitions: FxHashMap> = FxHashMap::default(); let mut references: FxHashSet = FxHashSet::default(); @@ -257,7 +61,16 @@ fn check_unused_variables(expr: &Expr, ctx: &LintContext, diagnostics: &mut Vec< } /// Check for shadowed variables in nested scopes. -fn check_shadowed_variables( +pub(super) fn check_shadowed_variables( + node: &SyntaxNode, + ctx: &LintContext, + diagnostics: &mut Vec, +) { + let mut scope_stack: Vec> = vec![FxHashMap::default()]; + check_shadowed_variables_inner(node, ctx, &mut scope_stack, diagnostics); +} + +fn check_shadowed_variables_inner( node: &SyntaxNode, ctx: &LintContext, scope_stack: &mut Vec>, @@ -321,7 +134,7 @@ fn check_shadowed_variables( // Recurse into children for child in node.children() { - check_shadowed_variables(&child, ctx, scope_stack, diagnostics); + check_shadowed_variables_inner(&child, ctx, scope_stack, diagnostics); } if introduces_scope { @@ -454,258 +267,6 @@ fn check_param_for_shadow( /// Extract variable name from a simple `BindDestruct` (not array/object destructuring). /// /// Returns the variable name and a reference to the `BindDestruct` for value access. -fn extract_simple_bind_name( - bind: &Bind, -) -> Option<(String, &jrsonnet_rowan_parser::nodes::BindDestruct)> { - let Bind::BindDestruct(bd) = bind else { - return None; - }; - let destruct: Destruct = bd.into()?; - let Destruct::DestructFull(full) = destruct else { - return None; - }; - let name = full.name()?.ident_lit()?.text().to_string(); - Some((name, bd)) -} - -/// Check for unreachable code in an expression. -/// -/// Unreachable code patterns detected: -/// - Code after an `assert` whose condition is a divergent expression (type Never) -/// - Code after an `assert false` -fn check_unreachable_code( - expr: &Expr, - ctx: &LintContext, - env: &mut TypeEnv, - diagnostics: &mut Vec, -) { - // Check statements (locals and asserts) that precede the body - for stmt in expr.stmts() { - match &stmt { - Stmt::StmtAssert(assert_stmt) => { - // Check if the assert condition diverges - if let Some(assertion) = assert_stmt.assertion() { - if let Some(cond) = assertion.condition() { - let cond_ty = infer_expr_ty(&cond, env); - if cond_ty.is_never() { - // The assert condition itself diverges, so everything after is unreachable - // Find the range of everything after this assert - if let Some(body_range) = find_code_after_stmt(&stmt, expr) { - diagnostics.push( - ctx.make_diagnostic_with_related( - body_range, - "unreachable code: assert condition always diverges" - .to_string(), - DiagnosticSeverity::WARNING, - "unreachable-code", - cond.syntax().text_range(), - "divergent expression here".to_string(), - ), - ); - return; // Don't report more unreachable code - } - } - // Also check if condition is statically `false` - if is_statically_false(&cond) { - if let Some(body_range) = find_code_after_stmt(&stmt, expr) { - diagnostics.push( - ctx.make_diagnostic_with_related( - body_range, - "unreachable code: assert condition is always false" - .to_string(), - DiagnosticSeverity::WARNING, - "unreachable-code", - cond.syntax().text_range(), - "condition is `false`".to_string(), - ), - ); - return; - } - } - } - } - } - Stmt::StmtLocal(local_stmt) => { - // Process local bindings for the type environment - for bind in local_stmt.binds() { - if let Some((name, bd)) = extract_simple_bind_name(&bind) { - let ty = bd.value().map_or(Ty::ANY, |v| infer_expr_ty(&v, env)); - env.define_ty(name, ty); - } - } - } - } - } - - // Recursively check the body expression and nested expressions - if let Some(base) = expr.expr_base() { - check_unreachable_in_base(&base, ctx, env, diagnostics); - } -} - -/// Check if an expression is statically `false`. -fn is_statically_false(expr: &Expr) -> bool { - if let Some(base) = expr.expr_base() { - if let ExprBase::ExprLiteral(lit) = base { - if let Some(literal) = lit.literal() { - return matches!( - literal.kind(), - jrsonnet_rowan_parser::nodes::LiteralKind::FalseKw - ); - } - } - } - false -} - -/// Find the range of code after a statement in an expression. -fn find_code_after_stmt(stmt: &Stmt, expr: &Expr) -> Option { - let stmt_end = stmt.syntax().text_range().end(); - let expr_end = expr.syntax().text_range().end(); - - // Check if there's anything after this statement - if stmt_end < expr_end { - // Find the start of the next significant content - let next_start = stmt_end; - Some(TextRange::new(next_start, expr_end)) - } else { - None - } -} - -/// Check for unreachable code in a base expression. -fn check_unreachable_in_base( - base: &ExprBase, - ctx: &LintContext, - env: &mut TypeEnv, - diagnostics: &mut Vec, -) { - match base { - ExprBase::ExprBinary(binary) => { - // Check if left operand is divergent - if let Some(lhs) = binary.lhs() { - let lhs_ty = infer_expr_ty(&lhs, env); - if lhs_ty.is_never() { - // Right operand is unreachable - if let Some(rhs) = binary.rhs() { - diagnostics.push(ctx.make_diagnostic_with_related( - rhs.syntax().text_range(), - "unreachable code: left operand always diverges".to_string(), - DiagnosticSeverity::WARNING, - "unreachable-code", - lhs.syntax().text_range(), - "divergent expression here".to_string(), - )); - return; - } - } - // Recursively check left operand - check_unreachable_code(&lhs, ctx, env, diagnostics); - } - // Recursively check right operand - if let Some(rhs) = binary.rhs() { - check_unreachable_code(&rhs, ctx, env, diagnostics); - } - } - ExprBase::ExprIfThenElse(if_expr) => { - // Check condition for divergence - if let Some(cond) = if_expr.cond() { - let cond_ty = infer_expr_ty(&cond, env); - if cond_ty.is_never() { - // Both branches are unreachable - if let Some(then_clause) = if_expr.then() { - if let Some(then_expr) = then_clause.expr() { - diagnostics.push(ctx.make_diagnostic_with_related( - then_expr.syntax().text_range(), - "unreachable code: condition always diverges".to_string(), - DiagnosticSeverity::WARNING, - "unreachable-code", - cond.syntax().text_range(), - "divergent expression here".to_string(), - )); - } - } - return; - } - // Recursively check condition - check_unreachable_code(&cond, ctx, env, diagnostics); - } - // Recursively check branches - if let Some(then_clause) = if_expr.then() { - if let Some(then_expr) = then_clause.expr() { - check_unreachable_code(&then_expr, ctx, env, diagnostics); - } - } - if let Some(else_clause) = if_expr.else_() { - if let Some(else_expr) = else_clause.expr() { - check_unreachable_code(&else_expr, ctx, env, diagnostics); - } - } - } - ExprBase::ExprParened(parens) => { - if let Some(inner) = parens.expr() { - check_unreachable_code(&inner, ctx, env, diagnostics); - } - } - ExprBase::ExprFunction(func) => { - // Check function body - if let Some(body) = func.expr() { - env.push_scope(); - check_unreachable_code(&body, ctx, env, diagnostics); - env.pop_scope(); - } - } - ExprBase::ExprArray(arr) => { - for elem in arr.exprs() { - check_unreachable_code(&elem, ctx, env, diagnostics); - } - } - ExprBase::ExprObject(obj) => { - if let Some(body) = obj.obj_body() { - check_unreachable_in_obj_body(&body, ctx, env, diagnostics); - } - } - _ => {} - } -} - -/// Check for unreachable code in an object body. -fn check_unreachable_in_obj_body( - body: &jrsonnet_rowan_parser::nodes::ObjBody, - ctx: &LintContext, - env: &mut TypeEnv, - diagnostics: &mut Vec, -) { - use jrsonnet_rowan_parser::nodes::{Member, ObjBody}; - if let ObjBody::ObjBodyMemberList(members) = body { - for member in members.members() { - match member { - Member::MemberFieldNormal(field) => { - if let Some(expr) = field.expr() { - check_unreachable_code(&expr, ctx, env, diagnostics); - } - } - Member::MemberFieldMethod(method) => { - if let Some(expr) = method.expr() { - env.push_scope(); - check_unreachable_code(&expr, ctx, env, diagnostics); - env.pop_scope(); - } - } - Member::MemberAssertStmt(assert_member) => { - if let Some(assertion) = assert_member.assertion() { - if let Some(cond) = assertion.condition() { - check_unreachable_code(&cond, ctx, env, diagnostics); - } - } - } - Member::MemberBindStmt(_) => {} - } - } - } -} - -/// Collect variable definitions and references from the AST. fn collect_definitions_and_references( node: &SyntaxNode, definitions: &mut FxHashMap>, @@ -841,179 +402,16 @@ fn collect_param_definition(param: &Param, definitions: &mut FxHashMap) { - // Find all object expressions - for child in node.descendants() { - if child.kind() == SyntaxKind::EXPR_OBJECT { - if let Some(obj) = ExprObject::cast(child) { - check_object_for_duplicate_fields(&obj, ctx, diagnostics); - } - } - } -} - -/// Check a single object expression for duplicate fields. -fn check_object_for_duplicate_fields( - obj: &ExprObject, - ctx: &LintContext, - diagnostics: &mut Vec, -) { - let mut seen: FxHashMap = FxHashMap::default(); - - let Some(obj_body) = obj.obj_body() else { - return; - }; - - let ObjBody::ObjBodyMemberList(members) = obj_body else { - // Object comprehension - can't have static duplicate fields - return; - }; - - for member in members.members() { - let field_name = match &member { - Member::MemberBindStmt(bind_stmt) => { - // { local x = value } - object-local binding - extract_bind_name(bind_stmt.obj_local().and_then(|ol| ol.bind())) - } - Member::MemberFieldNormal(field) => { - // { field: value } or { field:: value } - field.field_name().and_then(extract_static_field_name) - } - Member::MemberFieldMethod(method) => { - // { method(...): value } - method.field_name().and_then(extract_static_field_name) - } - Member::MemberAssertStmt(_) => None, // assert doesn't define a field - }; - - if let Some(name) = field_name { - let range = member.syntax().text_range(); - if let Some(first_range) = seen.get(&name) { - diagnostics.push(Diagnostic { - range: ctx.to_lsp_range(range), - severity: Some(DiagnosticSeverity::WARNING), - code: Some(NumberOrString::String("duplicate-field".to_string())), - source: Some("jrsonnet-lsp".to_string()), - message: format!("duplicate field `{name}`"), - related_information: Some(vec![DiagnosticRelatedInformation { - location: Location { - uri: ctx.uri.clone(), - range: ctx.to_lsp_range(*first_range), - }, - message: "first definition here".to_string(), - }]), - ..Default::default() - }); - } else { - seen.insert(name, range); - } - } - } -} - -/// Extract a name from an optional Bind node. -fn extract_bind_name(bind: Option) -> Option { - let bind = bind?; - match bind { - Bind::BindDestruct(bd) => { - // Use BindDestruct::into to get Option - // (note: calling bd.into() directly can be ambiguous with Into trait) - let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bd)?; - if let Destruct::DestructFull(full) = destruct { - full.name() - .and_then(|n| n.ident_lit()) - .map(|t| t.text().to_string()) - } else { - None - } - } - Bind::BindFunction(bf) => bf - .name() - .and_then(|n| n.ident_lit()) - .map(|t| t.text().to_string()), - } -} - -/// Extract a static field name from a `FieldName` node. -fn extract_static_field_name(field_name: FieldName) -> Option { - match field_name { - FieldName::FieldNameFixed(fixed) => { - // FieldNameFixed has id() for identifier and text() for string literals - fixed - .id() - .and_then(|n| n.ident_lit()) - .map(|t| t.text().to_string()) - } - FieldName::FieldNameDynamic(_) => None, // Dynamic field names can't be statically checked - } -} - -/// Check for duplicate function parameters in the entire AST. -fn check_duplicate_params(node: &SyntaxNode, ctx: &LintContext, diagnostics: &mut Vec) { - // Find all function expressions - for child in node.descendants() { - if child.kind() == SyntaxKind::EXPR_FUNCTION { - if let Some(func) = ExprFunction::cast(child) { - check_function_for_duplicate_params(&func, ctx, diagnostics); - } - } - } -} - -/// Check a single function expression for duplicate parameters. -fn check_function_for_duplicate_params( - func: &ExprFunction, - ctx: &LintContext, - diagnostics: &mut Vec, -) { - let Some(params_desc) = func.params_desc() else { - return; - }; - - let mut seen: FxHashMap = FxHashMap::default(); - - for param in params_desc.params() { - // Extract parameter name from destruct - let param_name = param.destruct().and_then(|d| match d { - Destruct::DestructFull(full) => full - .name() - .and_then(|n| n.ident_lit()) - .map(|t| t.text().to_string()), - _ => None, // Array/object destructuring is more complex - }); - - if let Some(name) = param_name { - let range = param.syntax().text_range(); - if let Some(first_range) = seen.get(&name) { - diagnostics.push(Diagnostic { - range: ctx.to_lsp_range(range), - severity: Some(DiagnosticSeverity::ERROR), - code: Some(NumberOrString::String("duplicate-param".to_string())), - source: Some("jrsonnet-lsp".to_string()), - message: format!("duplicate parameter `{name}`"), - related_information: Some(vec![DiagnosticRelatedInformation { - location: Location { - uri: ctx.uri.clone(), - range: ctx.to_lsp_range(*first_range), - }, - message: "first definition here".to_string(), - }]), - ..Default::default() - }); - } else { - seen.insert(name, range); - } - } - } -} - #[cfg(test)] mod tests { - use jrsonnet_lsp_document::DocVersion; - use lsp_types::{Position, Range, Uri}; + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_inference::TypeAnalysis; + use lsp_types::{ + Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, Location, NumberOrString, + Position, Range, Uri, + }; - use super::*; + use crate::lint::{lint, LintConfig, LintRule}; fn test_uri() -> Uri { "file:///test.jsonnet".parse().unwrap() From d21a8220b577822105a7e0176c010ac435346bc1 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:10:36 +0000 Subject: [PATCH 124/210] refactor(lsp-handlers): split remove_unused internals Break the remove-unused code action implementation into focused submodules: - `remove_unused/mod.rs` keeps policy and action entrypoints - `remove_unused/ranges.rs` owns range expansion and fix-all math - `remove_unused/edits.rs` resolves binding edits from diagnostics Preserve external behavior and keep existing inline code-action tests in the implementation module. --- .../edits.rs} | 537 +----------------- .../src/code_action/remove_unused/mod.rs | 181 ++++++ .../src/code_action/remove_unused/ranges.rs | 348 ++++++++++++ 3 files changed, 541 insertions(+), 525 deletions(-) rename crates/jrsonnet-lsp-handlers/src/code_action/{remove_unused.rs => remove_unused/edits.rs} (53%) create mode 100644 crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/ranges.rs diff --git a/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused.rs b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/edits.rs similarity index 53% rename from crates/jrsonnet-lsp-handlers/src/code_action/remove_unused.rs rename to crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/edits.rs index 1c60a5f7..8f6a917b 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/edits.rs @@ -1,425 +1,21 @@ -use std::collections::{HashMap, HashSet}; - -use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document}; +use jrsonnet_lsp_document::{token_at_offset, Document}; use jrsonnet_rowan_parser::{ - nodes::{ - Bind, BindDestruct, BindFunction, Destruct, Expr, ExprBase, Member, MemberBindStmt, - ObjBodyMemberList, StmtLocal, - }, - rowan::{TextRange, TextSize, TokenAtOffset}, - AstNode, SyntaxKind, SyntaxNode, SyntaxToken, -}; -use lsp_types::{ - CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, TextEdit, Uri, - WorkspaceEdit, + nodes::{BindDestruct, BindFunction, Destruct, Expr, ExprBase, MemberBindStmt, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, }; +use lsp_types::Diagnostic; +use rowan::TextRange; use super::{ - is_unused_variable_diagnostic, wants_fix_all, CodeActionConfig, RemoveUnusedCommentsMode, - RemoveUnusedMode, + ranges::{expand_range_with_policy, remove_range_for_list_entry}, + RemoveUnusedPolicy, }; -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -enum RemovalFlavor { - All, - ImportBindings, - NonImportBindings, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -enum CommentPolicy { - None, - Above, - Below, - All, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(super) struct RemoveUnusedPolicy { - flavor: RemovalFlavor, - comments: CommentPolicy, -} - -impl RemovalFlavor { - const fn allows(self, import_binding: bool) -> bool { - match self { - Self::All => true, - Self::ImportBindings => import_binding, - Self::NonImportBindings => !import_binding, - } - } -} - -impl CommentPolicy { - const fn keeps_above_comments(self) -> bool { - matches!(self, Self::Above | Self::All) - } - - const fn keeps_below_comments(self) -> bool { - matches!(self, Self::Below | Self::All) - } -} - -impl RemoveUnusedPolicy { - pub(super) const fn from_config(config: CodeActionConfig) -> Self { - let flavor = match config.remove_unused { - RemoveUnusedMode::All => RemovalFlavor::All, - RemoveUnusedMode::ImportBindings => RemovalFlavor::ImportBindings, - RemoveUnusedMode::NonImportBindings => RemovalFlavor::NonImportBindings, - }; - let comments = match config.remove_unused_comments { - RemoveUnusedCommentsMode::None => CommentPolicy::None, - RemoveUnusedCommentsMode::Above => CommentPolicy::Above, - RemoveUnusedCommentsMode::Below => CommentPolicy::Below, - RemoveUnusedCommentsMode::All => CommentPolicy::All, - }; - Self { flavor, comments } - } -} - #[derive(Debug, Clone, PartialEq, Eq)] -struct RemoveUnusedEdit { - binding_name: String, - range: TextRange, -} - -const fn is_trivia_kind(kind: SyntaxKind) -> bool { - matches!( - kind, - SyntaxKind::WHITESPACE - | SyntaxKind::MULTI_LINE_COMMENT - | SyntaxKind::SINGLE_LINE_HASH_COMMENT - | SyntaxKind::SINGLE_LINE_SLASH_COMMENT - ) -} - -const fn is_comment_kind(kind: SyntaxKind) -> bool { - matches!( - kind, - SyntaxKind::MULTI_LINE_COMMENT - | SyntaxKind::SINGLE_LINE_HASH_COMMENT - | SyntaxKind::SINGLE_LINE_SLASH_COMMENT - ) -} - -const fn is_whitespace_kind(kind: SyntaxKind) -> bool { - matches!(kind, SyntaxKind::WHITESPACE) -} - -fn can_absorb_trivia(token: &SyntaxToken, keep_comments: bool) -> bool { - if is_whitespace_kind(token.kind()) { - return !token.text().contains("\n\n"); - } - - is_comment_kind(token.kind()) && keep_comments -} - -fn previous_significant_token(token: &SyntaxToken) -> Option { - std::iter::successors(token.prev_token(), SyntaxToken::prev_token) - .find(|candidate| !is_trivia_kind(candidate.kind())) -} - -fn next_significant_token(token: &SyntaxToken) -> Option { - std::iter::successors(token.next_token(), SyntaxToken::next_token) - .find(|candidate| !is_trivia_kind(candidate.kind())) +pub(super) struct RemoveUnusedEdit { + pub(super) binding_name: String, + pub(super) range: TextRange, } - -fn single_line_trivia_end_after(token: &SyntaxToken) -> Option { - let trivia = token.next_token()?; - if !is_trivia_kind(trivia.kind()) || trivia.text().contains('\n') { - return None; - } - Some(trivia.text_range().end()) -} - -fn token_at_range_start(syntax: &SyntaxNode, range: TextRange) -> Option { - match syntax.token_at_offset(range.start()) { - TokenAtOffset::None => None, - TokenAtOffset::Single(token) => Some(token), - TokenAtOffset::Between(_, right) => Some(right), - } -} - -fn token_at_range_end(syntax: &SyntaxNode, range: TextRange) -> Option { - let end = range.end().checked_sub(TextSize::new(1))?; - match syntax.token_at_offset(end) { - TokenAtOffset::None => None, - TokenAtOffset::Single(token) => Some(token), - TokenAtOffset::Between(left, _) => Some(left), - } -} - -fn expand_range_with_policy( - syntax: &SyntaxNode, - range: TextRange, - comments: CommentPolicy, -) -> Option { - let mut start = token_at_range_start(syntax, range)?; - while let Some(previous) = start.prev_token() { - if can_absorb_trivia(&previous, comments.keeps_above_comments()) { - start = previous; - continue; - } - break; - } - - let mut end = token_at_range_end(syntax, range)?; - while let Some(next) = end.next_token() { - if can_absorb_trivia(&next, comments.keeps_below_comments()) { - end = next; - continue; - } - break; - } - - Some(TextRange::new( - start.text_range().start(), - end.text_range().end(), - )) -} - -fn remove_range_for_list_entry(entry: &SyntaxNode) -> Option { - let first = entry.first_token()?; - let last = entry.last_token()?; - - if let Some(next) = next_significant_token(&last) { - if next.kind() == SyntaxKind::COMMA { - let end = - single_line_trivia_end_after(&next).unwrap_or_else(|| next.text_range().end()); - return Some(TextRange::new(first.text_range().start(), end)); - } - } - - if let Some(previous) = previous_significant_token(&first) { - if previous.kind() == SyntaxKind::COMMA { - return Some(TextRange::new( - previous.text_range().start(), - last.text_range().end(), - )); - } - } - - Some(entry.text_range()) -} - -fn remove_range_for_entry_run( - entries: &[SyntaxNode], - run_start: usize, - run_end: usize, -) -> Option { - let first = entries.get(run_start)?.first_token()?; - let last = entries.get(run_end)?.last_token()?; - - let start = if run_start == 0 { - first.text_range().start() - } else { - let previous = previous_significant_token(&first)?; - if previous.kind() == SyntaxKind::COMMA { - previous.text_range().start() - } else { - first.text_range().start() - } - }; - - let end = if run_start == 0 && run_end + 1 < entries.len() { - match next_significant_token(&last) { - Some(next) if next.kind() == SyntaxKind::COMMA => { - single_line_trivia_end_after(&next).unwrap_or_else(|| next.text_range().end()) - } - _ => last.text_range().end(), - } - } else { - last.text_range().end() - }; - - Some(TextRange::new(start, end)) -} - -fn contiguous_runs(indices: &[usize]) -> Vec<(usize, usize)> { - if indices.is_empty() { - return Vec::new(); - } - - let mut runs = Vec::new(); - let mut run_start = indices[0]; - let mut previous = indices[0]; - for &index in indices.iter().skip(1) { - if index == previous + 1 { - previous = index; - continue; - } - runs.push((run_start, previous)); - run_start = index; - previous = index; - } - runs.push((run_start, previous)); - runs -} - -fn bind_name_range(bind: &Bind) -> Option { - match bind { - Bind::BindDestruct(bind_destruct) => { - let destruct = BindDestruct::into(bind_destruct)?; - let Destruct::DestructFull(full) = destruct else { - return None; - }; - Some(full.name()?.syntax().text_range()) - } - Bind::BindFunction(bind_function) => Some(bind_function.name()?.syntax().text_range()), - } -} - -fn binding_name_range_for_diagnostic( - document: &Document, - diagnostic: &Diagnostic, -) -> Option { - if !is_unused_variable_diagnostic(diagnostic) { - return None; - } - - let text = document.text(); - let line_index = document.line_index(); - let offset = line_index.offset(diagnostic.range.start.into(), text)?; - let ast = document.ast(); - let token = token_at_offset(ast.syntax(), offset)?; - if token.kind() != SyntaxKind::IDENT { - return None; - } - Some(token.text_range()) -} - -fn unused_binding_name_ranges( - document: &Document, - diagnostics: &[Diagnostic], -) -> HashSet { - diagnostics - .iter() - .filter_map(|diagnostic| binding_name_range_for_diagnostic(document, diagnostic)) - .collect() -} - -fn removal_ranges_for_stmt_local( - stmt_local: &StmtLocal, - unused_name_ranges: &HashSet, -) -> Vec { - let binds: Vec = stmt_local.binds().collect(); - let unused_indices: Vec = binds - .iter() - .enumerate() - .filter_map(|(idx, bind)| { - let name_range = bind_name_range(bind)?; - unused_name_ranges.contains(&name_range).then_some(idx) - }) - .collect(); - if unused_indices.is_empty() { - return Vec::new(); - } - if unused_indices.len() == binds.len() { - return vec![stmt_local.syntax().text_range()]; - } - - let entries: Vec = binds.iter().map(|bind| bind.syntax().clone()).collect(); - contiguous_runs(&unused_indices) - .into_iter() - .filter_map(|(run_start, run_end)| remove_range_for_entry_run(&entries, run_start, run_end)) - .collect() -} - -fn removal_ranges_for_member_list( - member_list: &ObjBodyMemberList, - unused_name_ranges: &HashSet, -) -> Vec { - let members: Vec = member_list.members().collect(); - let unused_indices: Vec = members - .iter() - .enumerate() - .filter_map(|(idx, member)| { - let Member::MemberBindStmt(bind_stmt) = member else { - return None; - }; - let bind = bind_stmt.obj_local()?.bind()?; - let name_range = bind_name_range(&bind)?; - unused_name_ranges.contains(&name_range).then_some(idx) - }) - .collect(); - if unused_indices.is_empty() { - return Vec::new(); - } - - let entries: Vec = members - .iter() - .map(|member| member.syntax().clone()) - .collect(); - contiguous_runs(&unused_indices) - .into_iter() - .filter_map(|(run_start, run_end)| remove_range_for_entry_run(&entries, run_start, run_end)) - .collect() -} - -fn merge_overlapping_ranges(mut ranges: Vec) -> Vec { - if ranges.is_empty() { - return ranges; - } - ranges.sort_unstable_by_key(|range| (range.start(), range.end())); - - let mut merged = Vec::with_capacity(ranges.len()); - for range in ranges { - let Some(last) = merged.last_mut() else { - merged.push(range); - continue; - }; - if range.start() <= last.end() { - let end = if range.end() > last.end() { - range.end() - } else { - last.end() - }; - *last = TextRange::new(last.start(), end); - continue; - } - merged.push(range); - } - - merged -} - -fn removal_ranges_for_fix_all( - document: &Document, - diagnostics: &[Diagnostic], - policy: RemoveUnusedPolicy, -) -> Vec { - let unused_name_ranges = unused_binding_name_ranges(document, diagnostics); - if unused_name_ranges.is_empty() { - return Vec::new(); - } - - let mut ranges = Vec::new(); - let ast = document.ast(); - for stmt_local in ast.syntax().descendants().filter_map(StmtLocal::cast) { - ranges.extend(removal_ranges_for_stmt_local( - &stmt_local, - &unused_name_ranges, - )); - } - for member_list in ast - .syntax() - .descendants() - .filter_map(ObjBodyMemberList::cast) - { - ranges.extend(removal_ranges_for_member_list( - &member_list, - &unused_name_ranges, - )); - } - - let syntax = document.ast().syntax().clone(); - let expanded: Vec = ranges - .into_iter() - .filter_map(|range| expand_range_with_policy(&syntax, range, policy.comments)) - .collect(); - merge_overlapping_ranges(expanded) -} - fn is_import_expression(expr: Expr) -> bool { match expr.expr_base() { Some(ExprBase::ExprImport(_)) => true, @@ -464,7 +60,7 @@ fn remove_edit_for_bind( )) } -fn remove_unused_edit_for_diagnostic( +pub(super) fn remove_unused_edit_for_diagnostic( document: &Document, diagnostic: &Diagnostic, policy: RemoveUnusedPolicy, @@ -503,115 +99,6 @@ fn remove_unused_edit_for_diagnostic( bind_function.value(), ) } - -pub(super) fn remove_unused_binding_action_with_policy( - document: &Document, - uri: &Uri, - diagnostic: &Diagnostic, - policy: RemoveUnusedPolicy, -) -> Option { - if !is_unused_variable_diagnostic(diagnostic) { - return None; - } - - let text = document.text(); - let line_index = document.line_index(); - let (edit, import_binding) = remove_unused_edit_for_diagnostic(document, diagnostic, policy)?; - if !policy.flavor.allows(import_binding) { - return None; - } - - let mut changes = HashMap::new(); - changes.insert( - uri.clone(), - vec![TextEdit { - range: to_lsp_range(edit.range, line_index, text), - new_text: String::new(), - }], - ); - - Some( - CodeAction { - title: format!("Remove unused binding `{}`", edit.binding_name), - kind: Some(CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(WorkspaceEdit { - changes: Some(changes), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - } - .into(), - ) -} - -pub(super) fn remove_all_unused_bindings_action_with_policy( - document: &Document, - uri: &Uri, - context: &CodeActionContext, - policy: RemoveUnusedPolicy, -) -> Option { - if !wants_fix_all(context) { - return None; - } - - let diagnostics: Vec = context - .diagnostics - .iter() - .filter(|diagnostic| is_unused_variable_diagnostic(diagnostic)) - .filter(|diagnostic| { - remove_unused_edit_for_diagnostic(document, diagnostic, policy) - .is_some_and(|(_, import_binding)| policy.flavor.allows(import_binding)) - }) - .cloned() - .collect(); - if diagnostics.is_empty() { - return None; - } - - let mut ranges = removal_ranges_for_fix_all(document, &diagnostics, policy); - if ranges.is_empty() { - return None; - } - ranges.sort_unstable_by_key(|range| (range.start(), range.end())); - let text = document.text(); - let line_index = document.line_index(); - - let edits: Vec = ranges - .into_iter() - .rev() - .map(|range| TextEdit { - range: to_lsp_range(range, line_index, text), - new_text: String::new(), - }) - .collect(); - - let mut changes = HashMap::new(); - changes.insert(uri.clone(), edits); - - Some( - CodeAction { - title: "Remove all unused bindings".to_string(), - kind: Some(CodeActionKind::SOURCE_FIX_ALL), - diagnostics: Some(diagnostics), - edit: Some(WorkspaceEdit { - changes: Some(changes), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - } - .into(), - ) -} - #[cfg(test)] mod tests { use std::collections::HashMap; @@ -622,7 +109,7 @@ mod tests { DiagnosticSeverity, NumberOrString, Position, Range, TextEdit, Uri, WorkspaceEdit, }; - use super::super::{ + use super::super::super::{ code_actions, CodeActionConfig, RemoveUnusedCommentsMode, RemoveUnusedMode, UNUSED_VARIABLE_CODE, }; diff --git a/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/mod.rs b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/mod.rs new file mode 100644 index 00000000..7e4a7119 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/mod.rs @@ -0,0 +1,181 @@ +mod edits; +mod ranges; + +use std::collections::HashMap; + +use jrsonnet_lsp_document::{to_lsp_range, Document}; +use lsp_types::{ + CodeAction, CodeActionContext, CodeActionKind, CodeActionOrCommand, Diagnostic, TextEdit, Uri, + WorkspaceEdit, +}; + +use self::{edits::remove_unused_edit_for_diagnostic, ranges::removal_ranges_for_fix_all}; +use super::{ + is_unused_variable_diagnostic, wants_fix_all, CodeActionConfig, RemoveUnusedCommentsMode, + RemoveUnusedMode, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum RemovalFlavor { + All, + ImportBindings, + NonImportBindings, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum CommentPolicy { + None, + Above, + Below, + All, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) struct RemoveUnusedPolicy { + flavor: RemovalFlavor, + comments: CommentPolicy, +} + +impl RemovalFlavor { + const fn allows(self, import_binding: bool) -> bool { + match self { + Self::All => true, + Self::ImportBindings => import_binding, + Self::NonImportBindings => !import_binding, + } + } +} + +impl CommentPolicy { + const fn keeps_above_comments(self) -> bool { + matches!(self, Self::Above | Self::All) + } + + const fn keeps_below_comments(self) -> bool { + matches!(self, Self::Below | Self::All) + } +} + +impl RemoveUnusedPolicy { + pub(super) const fn from_config(config: CodeActionConfig) -> Self { + let flavor = match config.remove_unused { + RemoveUnusedMode::All => RemovalFlavor::All, + RemoveUnusedMode::ImportBindings => RemovalFlavor::ImportBindings, + RemoveUnusedMode::NonImportBindings => RemovalFlavor::NonImportBindings, + }; + let comments = match config.remove_unused_comments { + RemoveUnusedCommentsMode::None => CommentPolicy::None, + RemoveUnusedCommentsMode::Above => CommentPolicy::Above, + RemoveUnusedCommentsMode::Below => CommentPolicy::Below, + RemoveUnusedCommentsMode::All => CommentPolicy::All, + }; + Self { flavor, comments } + } +} +pub(super) fn remove_unused_binding_action_with_policy( + document: &Document, + uri: &Uri, + diagnostic: &Diagnostic, + policy: RemoveUnusedPolicy, +) -> Option { + if !is_unused_variable_diagnostic(diagnostic) { + return None; + } + + let text = document.text(); + let line_index = document.line_index(); + let (edit, import_binding) = remove_unused_edit_for_diagnostic(document, diagnostic, policy)?; + if !policy.flavor.allows(import_binding) { + return None; + } + + let mut changes = HashMap::new(); + changes.insert( + uri.clone(), + vec![TextEdit { + range: to_lsp_range(edit.range, line_index, text), + new_text: String::new(), + }], + ); + + Some( + CodeAction { + title: format!("Remove unused binding `{}`", edit.binding_name), + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + } + .into(), + ) +} + +pub(super) fn remove_all_unused_bindings_action_with_policy( + document: &Document, + uri: &Uri, + context: &CodeActionContext, + policy: RemoveUnusedPolicy, +) -> Option { + if !wants_fix_all(context) { + return None; + } + + let diagnostics: Vec = context + .diagnostics + .iter() + .filter(|diagnostic| is_unused_variable_diagnostic(diagnostic)) + .filter(|diagnostic| { + remove_unused_edit_for_diagnostic(document, diagnostic, policy) + .is_some_and(|(_, import_binding)| policy.flavor.allows(import_binding)) + }) + .cloned() + .collect(); + if diagnostics.is_empty() { + return None; + } + + let mut ranges = removal_ranges_for_fix_all(document, &diagnostics, policy); + if ranges.is_empty() { + return None; + } + ranges.sort_unstable_by_key(|range| (range.start(), range.end())); + let text = document.text(); + let line_index = document.line_index(); + + let edits: Vec = ranges + .into_iter() + .rev() + .map(|range| TextEdit { + range: to_lsp_range(range, line_index, text), + new_text: String::new(), + }) + .collect(); + + let mut changes = HashMap::new(); + changes.insert(uri.clone(), edits); + + Some( + CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(diagnostics), + edit: Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + } + .into(), + ) +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/ranges.rs b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/ranges.rs new file mode 100644 index 00000000..91ca871f --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_action/remove_unused/ranges.rs @@ -0,0 +1,348 @@ +use std::collections::HashSet; + +use jrsonnet_lsp_document::{token_at_offset, Document}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, BindDestruct, Destruct, Member, ObjBodyMemberList, StmtLocal}, + rowan::{TextRange, TextSize, TokenAtOffset}, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use lsp_types::Diagnostic; + +use super::{is_unused_variable_diagnostic, CommentPolicy, RemoveUnusedPolicy}; + +const fn is_trivia_kind(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::WHITESPACE + | SyntaxKind::MULTI_LINE_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::SINGLE_LINE_SLASH_COMMENT + ) +} + +const fn is_comment_kind(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::MULTI_LINE_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::SINGLE_LINE_SLASH_COMMENT + ) +} + +const fn is_whitespace_kind(kind: SyntaxKind) -> bool { + matches!(kind, SyntaxKind::WHITESPACE) +} + +fn can_absorb_trivia(token: &SyntaxToken, keep_comments: bool) -> bool { + if is_whitespace_kind(token.kind()) { + return !token.text().contains("\n\n"); + } + + is_comment_kind(token.kind()) && keep_comments +} + +fn previous_significant_token(token: &SyntaxToken) -> Option { + std::iter::successors(token.prev_token(), SyntaxToken::prev_token) + .find(|candidate| !is_trivia_kind(candidate.kind())) +} + +fn next_significant_token(token: &SyntaxToken) -> Option { + std::iter::successors(token.next_token(), SyntaxToken::next_token) + .find(|candidate| !is_trivia_kind(candidate.kind())) +} + +fn single_line_trivia_end_after(token: &SyntaxToken) -> Option { + let trivia = token.next_token()?; + if !is_trivia_kind(trivia.kind()) || trivia.text().contains('\n') { + return None; + } + Some(trivia.text_range().end()) +} + +fn token_at_range_start(syntax: &SyntaxNode, range: TextRange) -> Option { + match syntax.token_at_offset(range.start()) { + TokenAtOffset::None => None, + TokenAtOffset::Single(token) => Some(token), + TokenAtOffset::Between(_, right) => Some(right), + } +} + +fn token_at_range_end(syntax: &SyntaxNode, range: TextRange) -> Option { + let end = range.end().checked_sub(TextSize::new(1))?; + match syntax.token_at_offset(end) { + TokenAtOffset::None => None, + TokenAtOffset::Single(token) => Some(token), + TokenAtOffset::Between(left, _) => Some(left), + } +} + +pub(super) fn expand_range_with_policy( + syntax: &SyntaxNode, + range: TextRange, + comments: CommentPolicy, +) -> Option { + let mut start = token_at_range_start(syntax, range)?; + while let Some(previous) = start.prev_token() { + if can_absorb_trivia(&previous, comments.keeps_above_comments()) { + start = previous; + continue; + } + break; + } + + let mut end = token_at_range_end(syntax, range)?; + while let Some(next) = end.next_token() { + if can_absorb_trivia(&next, comments.keeps_below_comments()) { + end = next; + continue; + } + break; + } + + Some(TextRange::new( + start.text_range().start(), + end.text_range().end(), + )) +} + +pub(super) fn remove_range_for_list_entry(entry: &SyntaxNode) -> Option { + let first = entry.first_token()?; + let last = entry.last_token()?; + + if let Some(next) = next_significant_token(&last) { + if next.kind() == SyntaxKind::COMMA { + let end = + single_line_trivia_end_after(&next).unwrap_or_else(|| next.text_range().end()); + return Some(TextRange::new(first.text_range().start(), end)); + } + } + + if let Some(previous) = previous_significant_token(&first) { + if previous.kind() == SyntaxKind::COMMA { + return Some(TextRange::new( + previous.text_range().start(), + last.text_range().end(), + )); + } + } + + Some(entry.text_range()) +} + +fn remove_range_for_entry_run( + entries: &[SyntaxNode], + run_start: usize, + run_end: usize, +) -> Option { + let first = entries.get(run_start)?.first_token()?; + let last = entries.get(run_end)?.last_token()?; + + let start = if run_start == 0 { + first.text_range().start() + } else { + let previous = previous_significant_token(&first)?; + if previous.kind() == SyntaxKind::COMMA { + previous.text_range().start() + } else { + first.text_range().start() + } + }; + + let end = if run_start == 0 && run_end + 1 < entries.len() { + match next_significant_token(&last) { + Some(next) if next.kind() == SyntaxKind::COMMA => { + single_line_trivia_end_after(&next).unwrap_or_else(|| next.text_range().end()) + } + _ => last.text_range().end(), + } + } else { + last.text_range().end() + }; + + Some(TextRange::new(start, end)) +} + +fn contiguous_runs(indices: &[usize]) -> Vec<(usize, usize)> { + if indices.is_empty() { + return Vec::new(); + } + + let mut runs = Vec::new(); + let mut run_start = indices[0]; + let mut previous = indices[0]; + for &index in indices.iter().skip(1) { + if index == previous + 1 { + previous = index; + continue; + } + runs.push((run_start, previous)); + run_start = index; + previous = index; + } + runs.push((run_start, previous)); + runs +} + +fn bind_name_range(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind_destruct) => { + let destruct = BindDestruct::into(bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) + } + Bind::BindFunction(bind_function) => Some(bind_function.name()?.syntax().text_range()), + } +} + +fn binding_name_range_for_diagnostic( + document: &Document, + diagnostic: &Diagnostic, +) -> Option { + if !is_unused_variable_diagnostic(diagnostic) { + return None; + } + + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(diagnostic.range.start.into(), text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + if token.kind() != SyntaxKind::IDENT { + return None; + } + Some(token.text_range()) +} + +fn unused_binding_name_ranges( + document: &Document, + diagnostics: &[Diagnostic], +) -> HashSet { + diagnostics + .iter() + .filter_map(|diagnostic| binding_name_range_for_diagnostic(document, diagnostic)) + .collect() +} + +fn removal_ranges_for_stmt_local( + stmt_local: &StmtLocal, + unused_name_ranges: &HashSet, +) -> Vec { + let binds: Vec = stmt_local.binds().collect(); + let unused_indices: Vec = binds + .iter() + .enumerate() + .filter_map(|(idx, bind)| { + let name_range = bind_name_range(bind)?; + unused_name_ranges.contains(&name_range).then_some(idx) + }) + .collect(); + if unused_indices.is_empty() { + return Vec::new(); + } + if unused_indices.len() == binds.len() { + return vec![stmt_local.syntax().text_range()]; + } + + let entries: Vec = binds.iter().map(|bind| bind.syntax().clone()).collect(); + contiguous_runs(&unused_indices) + .into_iter() + .filter_map(|(run_start, run_end)| remove_range_for_entry_run(&entries, run_start, run_end)) + .collect() +} + +fn removal_ranges_for_member_list( + member_list: &ObjBodyMemberList, + unused_name_ranges: &HashSet, +) -> Vec { + let members: Vec = member_list.members().collect(); + let unused_indices: Vec = members + .iter() + .enumerate() + .filter_map(|(idx, member)| { + let Member::MemberBindStmt(bind_stmt) = member else { + return None; + }; + let bind = bind_stmt.obj_local()?.bind()?; + let name_range = bind_name_range(&bind)?; + unused_name_ranges.contains(&name_range).then_some(idx) + }) + .collect(); + if unused_indices.is_empty() { + return Vec::new(); + } + + let entries: Vec = members + .iter() + .map(|member| member.syntax().clone()) + .collect(); + contiguous_runs(&unused_indices) + .into_iter() + .filter_map(|(run_start, run_end)| remove_range_for_entry_run(&entries, run_start, run_end)) + .collect() +} + +fn merge_overlapping_ranges(mut ranges: Vec) -> Vec { + if ranges.is_empty() { + return ranges; + } + ranges.sort_unstable_by_key(|range| (range.start(), range.end())); + + let mut merged = Vec::with_capacity(ranges.len()); + for range in ranges { + let Some(last) = merged.last_mut() else { + merged.push(range); + continue; + }; + if range.start() <= last.end() { + let end = if range.end() > last.end() { + range.end() + } else { + last.end() + }; + *last = TextRange::new(last.start(), end); + continue; + } + merged.push(range); + } + + merged +} + +pub(super) fn removal_ranges_for_fix_all( + document: &Document, + diagnostics: &[Diagnostic], + policy: RemoveUnusedPolicy, +) -> Vec { + let unused_name_ranges = unused_binding_name_ranges(document, diagnostics); + if unused_name_ranges.is_empty() { + return Vec::new(); + } + + let mut ranges = Vec::new(); + let ast = document.ast(); + for stmt_local in ast.syntax().descendants().filter_map(StmtLocal::cast) { + ranges.extend(removal_ranges_for_stmt_local( + &stmt_local, + &unused_name_ranges, + )); + } + for member_list in ast + .syntax() + .descendants() + .filter_map(ObjBodyMemberList::cast) + { + ranges.extend(removal_ranges_for_member_list( + &member_list, + &unused_name_ranges, + )); + } + + let syntax = document.ast().syntax().clone(); + let expanded: Vec = ranges + .into_iter() + .filter_map(|range| expand_range_with_policy(&syntax, range, policy.comments)) + .collect(); + merge_overlapping_ranges(expanded) +} From dad20cc946730c61510983a77715f874e1635471 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:13:53 +0000 Subject: [PATCH 125/210] refactor(lsp-inference): split expr inference module Decompose `expr.rs` into smaller modules with clear boundaries: - `expr/mod.rs` keeps public entrypoints and bind/env plumbing - `expr/base.rs` handles base-expression dispatch and primitives - `expr/advanced.rs` covers call/collection/function inference Preserve behavior and keep existing inline tests under the advanced implementation module. --- .../src/{expr.rs => expr/advanced.rs} | 762 +----------------- .../jrsonnet-lsp-inference/src/expr/base.rs | 453 +++++++++++ crates/jrsonnet-lsp-inference/src/expr/mod.rs | 310 +++++++ 3 files changed, 775 insertions(+), 750 deletions(-) rename crates/jrsonnet-lsp-inference/src/{expr.rs => expr/advanced.rs} (68%) create mode 100644 crates/jrsonnet-lsp-inference/src/expr/base.rs create mode 100644 crates/jrsonnet-lsp-inference/src/expr/mod.rs diff --git a/crates/jrsonnet-lsp-inference/src/expr.rs b/crates/jrsonnet-lsp-inference/src/expr/advanced.rs similarity index 68% rename from crates/jrsonnet-lsp-inference/src/expr.rs rename to crates/jrsonnet-lsp-inference/src/expr/advanced.rs index fb5a2b98..c7d3c20f 100644 --- a/crates/jrsonnet-lsp-inference/src/expr.rs +++ b/crates/jrsonnet-lsp-inference/src/expr/advanced.rs @@ -1,755 +1,17 @@ -//! Expression type inference. - -use jrsonnet_lsp_document::Document; -use jrsonnet_lsp_import::extract_import_path; -use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, var_resolves_to_builtin_std}; +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; use jrsonnet_lsp_types::{ - FieldDefInterned, FieldVis, FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, - TyData, -}; -use jrsonnet_rowan_parser::{ - nodes::{BinaryOperatorKind, Bind, ExprBase, ImportKindKind, LiteralKind}, - AstNode, + FunctionData, NumBounds, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, }; -use rowan::TextRange; +use jrsonnet_rowan_parser::{nodes::ExprBase, AstNode}; use rustc_hash::FxHashMap; +use super::{bind_destruct_with_type_ty, infer_expr_ty_impl, TypeRecorder}; use crate::{ - env::TypeEnv, - flow::{self, Facts}, - helpers::{ - extract_params_with_default_types_ty, extract_var_name_from_expr, - infer_stdlib_field_access_ty, - }, - object::{infer_object_type_ty, infer_object_type_with_super_ty}, + env::TypeEnv, flow, helpers::extract_params_with_default_types_ty, + object::infer_object_type_with_super_ty, }; -/// Apply type facts to the environment, narrowing variable types. -/// -/// This is used to apply facts extracted from assert statements and -/// type guard conditions to narrow types for subsequent code. -fn apply_facts_to_env(facts: &Facts, env: &mut TypeEnv) { - for (var_name, fact) in facts.iter() { - // Get the current type of the variable - let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); - // Apply the fact to narrow the type - let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); - // Update the environment with the narrowed type - env.define_ty(var_name.clone(), narrowed_ty); - } -} - -trait TypeRecorder { - fn record(&mut self, range: TextRange, ty: Ty); -} - -struct NoopRecorder; - -impl TypeRecorder for NoopRecorder { - fn record(&mut self, _range: TextRange, _ty: Ty) {} -} - -impl TypeRecorder for FxHashMap { - fn record(&mut self, range: TextRange, ty: Ty) { - self.insert(range, ty); - } -} - -fn record_expr_and_base( - recorder: &mut R, - expr: &jrsonnet_rowan_parser::nodes::Expr, - ty: Ty, -) { - recorder.record(expr.syntax().text_range(), ty); - if let Some(base) = expr.expr_base() { - recorder.record(base.syntax().text_range(), ty); - } -} - -/// Infer the type of a document's root expression, returning an interned `Ty` and the environment. -/// -/// This is useful for tests that need to inspect the type structure using `TyData`. -#[must_use] -pub fn infer_document_type_ty(document: &Document) -> (Ty, TypeEnv) { - let ast = document.ast(); - let mut env = TypeEnv::new_default(); - - let ty = ast - .expr() - .map_or(Ty::ANY, |expr| infer_expr_ty(&expr, &mut env)); - - (ty, env) -} - -/// Infer the type of an expression, returning an interned `Ty`. -pub fn infer_expr_ty(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> Ty { - infer_expr_ty_with_expected(expr, env, None) -} - -pub(crate) fn infer_expr_ty_and_record( - expr: &jrsonnet_rowan_parser::nodes::Expr, - env: &mut TypeEnv, - expected: Option, - recorder: &mut FxHashMap, -) -> Ty { - infer_expr_ty_impl(expr, env, expected, recorder) -} - -/// Infer the type of an expression with an optional expected type, returning `Ty`. -/// -/// This is the efficient internal version that works with interned types throughout. -pub fn infer_expr_ty_with_expected( - expr: &jrsonnet_rowan_parser::nodes::Expr, - env: &mut TypeEnv, - expected: Option, -) -> Ty { - let mut recorder = NoopRecorder; - infer_expr_ty_impl(expr, env, expected, &mut recorder) -} - -fn infer_expr_ty_impl( - expr: &jrsonnet_rowan_parser::nodes::Expr, - env: &mut TypeEnv, - expected: Option, - recorder: &mut R, -) -> Ty { - // First, handle local bindings and assert statements that may precede the expression - for stmt in expr.stmts() { - match stmt { - jrsonnet_rowan_parser::nodes::Stmt::StmtLocal(stmt_local) => { - for bind in stmt_local.binds() { - infer_bind_type_ty(&bind, env, recorder); - } - } - jrsonnet_rowan_parser::nodes::Stmt::StmtAssert(stmt_assert) => { - // Extract type facts from assert conditions and apply them - if let Some(assertion) = stmt_assert.assertion() { - if let Some(cond) = assertion.condition() { - let facts = flow::extract_facts(&cond); - apply_facts_to_env(&facts, env); - } - } - } - } - } - - // Get the base expression type - let ty = expr.expr_base().map_or(Ty::ANY, |base| { - infer_base_ty(&base, env, expected, recorder) - }); - record_expr_and_base(recorder, expr, ty); - ty -} - -/// Infer types from a bind (local variable definition) using interned types. -fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: &mut R) { - match bind { - Bind::BindDestruct(bd) => { - if let Some(destruct) = bd.into() { - if let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct { - if let Some(name_node) = full.name() { - if let Some(ident) = name_node.ident_lit() { - let name = ident.text().to_string(); - let ty = bd - .value() - .map_or(Ty::ANY, |v| infer_expr_ty_impl(&v, env, None, recorder)); - env.define_ty(name, ty); - } - } - } - } - } - Bind::BindFunction(bf) => { - let Some(name_node) = bf.name() else { - return; - }; - let Some(ident) = name_node.ident_lit() else { - return; - }; - let name = ident.text().to_string(); - let params = bf - .params() - .map(|p| extract_params_with_default_types_ty(&p, env)) - .unwrap_or_default(); - - // Install a provisional function first so recursive self-calls can resolve. - let provisional_func = FunctionData { - params: params.clone(), - return_spec: ReturnSpec::Fixed(Ty::ANY), - variadic: false, - }; - let provisional_ty = env.store_mut().intern(TyData::Function(provisional_func)); - env.define_ty(name.clone(), provisional_ty); - - let (return_ty, param_constraints) = if env.can_infer_function_body() { - bf.value().map_or_else( - || (Ty::ANY, FxHashMap::default()), - |body| { - env.push_scope(); - let param_names: Vec = - params.iter().map(|p| p.name.clone()).collect(); - for param in ¶ms { - env.define_ty(param.name.clone(), param.ty); - } - - env.start_constraint_tracking(¶m_names); - env.enter_function(); - let body_ty = infer_expr_ty_impl(&body, env, None, recorder); - env.exit_function(); - let constraints = env.stop_constraint_tracking_ty(); - env.pop_scope(); - (body_ty, constraints) - }, - ) - } else { - (Ty::ANY, FxHashMap::default()) - }; - - let final_params: Vec = params - .into_iter() - .map(|param| { - let mut narrowed_ty = param.ty; - if let Some(constraints) = param_constraints.get(¶m.name) { - for constraint_ty in constraints { - narrowed_ty = env.store_mut().narrow(narrowed_ty, *constraint_ty); - } - } - ParamInterned { - name: param.name, - ty: narrowed_ty, - has_default: param.has_default, - } - }) - .collect(); - - let final_func = FunctionData { - params: final_params, - return_spec: ReturnSpec::Fixed(return_ty), - variadic: false, - }; - let final_ty = env.store_mut().intern(TyData::Function(final_func)); - env.define_ty(name, final_ty); - } - } -} - -/// Bind a destructuring pattern with an interned type. -/// -/// This is used for comprehension variables where we know the element type -/// from the iterator expression. -pub(super) fn bind_destruct_with_type_ty( - destruct: &jrsonnet_rowan_parser::nodes::Destruct, - ty: Ty, - env: &mut TypeEnv, -) { - use jrsonnet_rowan_parser::nodes::{Destruct, DestructArrayPart}; - - match destruct { - Destruct::DestructFull(full) => { - let Some(ident) = full.name().and_then(|n| n.ident_lit()) else { - return; - }; - env.define_ty(ident.text().to_string(), ty); - } - Destruct::DestructArray(arr) => { - let elem_types = extract_array_element_types_ty(ty, env); - for (i, part) in arr.destruct_array_parts().enumerate() { - let DestructArrayPart::DestructArrayElement(elem) = part else { - continue; - }; - let Some(inner) = elem.destruct() else { - continue; - }; - let elem_ty = elem_types.get(i).copied().unwrap_or(Ty::ANY); - bind_destruct_with_type_ty(&inner, elem_ty, env); - } - } - Destruct::DestructObject(obj) => { - for field in obj.destruct_object_fields() { - let Some(inner) = field.destruct() else { - continue; - }; - let field_ty = lookup_destruct_field_type_ty(&inner, ty, env); - bind_destruct_with_type_ty(&inner, field_ty, env); - } - } - Destruct::DestructSkip(_) => {} - } -} - -/// Extract element types from an array or tuple type (Ty version). -fn extract_array_element_types_ty(ty: Ty, env: &TypeEnv) -> Vec { - let store = env.store(); - match store.get(ty) { - TyData::Tuple { ref elems } => elems.clone(), - TyData::Array { elem, .. } => vec![elem], - _ => vec![], - } -} - -/// Look up the type for a destructured field from an object type (Ty version). -fn lookup_destruct_field_type_ty( - destruct: &jrsonnet_rowan_parser::nodes::Destruct, - ty: Ty, - env: &TypeEnv, -) -> Ty { - let store = env.store(); - let TyData::Object(ref obj_data) = store.get(ty) else { - return Ty::ANY; - }; - - let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct else { - return Ty::ANY; - }; - - let Some(ident) = full.name().and_then(|n| n.ident_lit()) else { - return Ty::ANY; - }; - - obj_data.get_field(ident.text()).map_or(Ty::ANY, |fd| fd.ty) -} - -/// Check if an expression is guaranteed to diverge (never return). -/// -/// An expression diverges if it has type `Never` - meaning it always -/// throws an error. This is used for unreachable code detection. -pub fn is_divergent(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> bool { - infer_expr_ty(expr, env).is_never() -} - -/// Infer the type of a base expression with an optional expected type, returning `Ty`. -/// -/// This is the efficient internal version that works with interned types throughout. -fn infer_base_ty( - base: &ExprBase, - env: &mut TypeEnv, - expected: Option, - recorder: &mut R, -) -> Ty { - match base { - // Primitives - return constants directly - ExprBase::ExprLiteral(lit) => { - if let Some(literal) = lit.literal() { - match literal.kind() { - LiteralKind::NullKw => return Ty::NULL, - LiteralKind::TrueKw => return Ty::TRUE, - LiteralKind::FalseKw => return Ty::FALSE, - LiteralKind::SelfKw => { - // `self` refers to the innermost object - return env - .self_ty() - .unwrap_or_else(|| env.store_mut().object_any()); - } - LiteralKind::Dollar => { - // `$` refers to the outermost (root) object - return env - .root_ty() - .unwrap_or_else(|| env.store_mut().object_any()); - } - LiteralKind::SuperKw => { - // `super` refers to the base object being extended - return env - .super_ty() - .unwrap_or_else(|| env.store_mut().object_any()); - } - } - } - Ty::ANY - } - ExprBase::ExprNumber(_) => Ty::NUMBER, - ExprBase::ExprString(_) => Ty::STRING, - ExprBase::ExprError(_) => Ty::NEVER, - - // Variable lookup - use Ty-native lookup - ExprBase::ExprVar(var) => { - if var_resolves_to_builtin_std(var) { - return env.store_mut().object_any(); - } - - var.name() - .and_then(|name| name.ident_lit()) - .and_then(|ident| env.lookup(ident.text())) - .unwrap_or(Ty::ANY) - } - - // Parenthesized expression - recurse with expected type - ExprBase::ExprParened(parens) => { - if let Some(inner) = parens.expr() { - return infer_expr_ty_impl(&inner, env, expected, recorder); - } - Ty::ANY - } - - // Import - try to resolve the type from the import cache - ExprBase::ExprImport(import) => { - let Some(kind) = import.import_kind().map(|token| token.kind()) else { - return Ty::ANY; - }; - match kind { - ImportKindKind::ImportKw => { - let Some(path) = extract_import_path(import) else { - return Ty::ANY; - }; - if path.is_empty() { - return Ty::ANY; - } - // Try to resolve the import type, fall back to ANY - env.resolve_import(&path).map_or(Ty::ANY, Ty::from) - } - ImportKindKind::ImportstrKw => Ty::STRING, - ImportKindKind::ImportbinKw => { - let byte_ty = env - .store_mut() - .bounded_number(NumBounds::between(0.0, 255.0)); - env.store_mut().array(byte_ty) - } - } - } - - // Unary operators - ExprBase::ExprUnary(unary) => { - let rhs_ty = unary - .rhs() - .map_or(Ty::ANY, |rhs| infer_expr_ty_impl(&rhs, env, None, recorder)); - if rhs_ty == Ty::NEVER { - return Ty::NEVER; - } - let Some(op_kind) = unary.unary_operator().map(|op| op.kind()) else { - return Ty::ANY; - }; - if op_kind.returns_boolean() { - return Ty::BOOL; - } - if op_kind.returns_number() { - return Ty::NUMBER; - } - Ty::ANY - } - - // Binary operators - handle simple cases directly - ExprBase::ExprBinary(binary) => infer_binary_expr_base_ty(binary, env, recorder), - - // Index access: arr[0], obj["field"], str[0] - ExprBase::ExprIndex(idx) => infer_index_expr_base_ty(idx, env, recorder), - - // Slice: arr[1:3], str[::2] - ExprBase::ExprSlice(slice) => infer_slice_expr_base_ty(slice, env, recorder), - - // If-then-else with flow typing - ExprBase::ExprIfThenElse(if_expr) => { - infer_if_then_else_expr_base_ty(if_expr, env, expected, recorder) - } - - // Field access: obj.field or std.fn - ExprBase::ExprField(field) => infer_field_expr_base_ty(field, env, recorder), - - // Function call: fn(args) - ExprBase::ExprCall(call) => infer_call_expr_base_ty(call, env, recorder), - - // Array literal: [a, b, c] - ExprBase::ExprArray(arr) => infer_array_expr_base_ty(arr, env, expected, recorder), - - // Array comprehension: [expr for x in arr] - ExprBase::ExprArrayComp(comp) => infer_array_comp_expr_base_ty(comp, env, recorder), - - // Object literal: { field: value } - ExprBase::ExprObject(obj) => { - infer_object_type_ty(obj.obj_body().as_ref(), env, &mut |expr, env| { - infer_expr_ty_impl(expr, env, None, recorder) - }) - } - - // Function definition: function(x) body - ExprBase::ExprFunction(func) => infer_function_expr_base_ty(func, env, recorder), - - // Object extension: base { ... } - ExprBase::ExprObjExtend(extend) => infer_obj_extend_expr_base_ty(extend, env, recorder), - } -} - -fn infer_binary_expr_base_ty( - binary: &jrsonnet_rowan_parser::nodes::ExprBinary, - env: &mut TypeEnv, - recorder: &mut R, -) -> Ty { - let lhs_ty = binary - .lhs() - .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); - if lhs_ty == Ty::NEVER { - return Ty::NEVER; - } - let rhs_ty = binary - .rhs() - .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); - if rhs_ty == Ty::NEVER { - return Ty::NEVER; - } - - let Some(op_kind) = binary.binary_operator().map(|op| op.kind()) else { - return Ty::ANY; - }; - - if op_kind == BinaryOperatorKind::Modulo && lhs_ty == Ty::STRING { - return Ty::STRING; - } - if op_kind.returns_number() { - return Ty::NUMBER; - } - if op_kind.returns_boolean() { - return Ty::BOOL; - } - if op_kind == BinaryOperatorKind::Plus { - if lhs_ty == Ty::STRING && rhs_ty == Ty::STRING { - return Ty::STRING; - } - if lhs_ty == Ty::NUMBER && rhs_ty == Ty::NUMBER { - return Ty::NUMBER; - } - let store = env.store_mut(); - let lhs_data = store.get(lhs_ty); - let rhs_data = store.get(rhs_ty); - return match (&lhs_data, &rhs_data) { - (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { - let elem_union = store.union(vec![*l, *r]); - store.array(elem_union) - } - (TyData::Tuple { elems: l }, TyData::Tuple { elems: r }) => { - let mut elems = l.clone(); - elems.extend(r.iter().copied()); - store.tuple(elems) - } - (TyData::Object(left_obj), TyData::Object(right_obj)) => { - let merged = ObjectData::merge(left_obj, right_obj); - store.object(merged) - } - _ => Ty::NUMBER, - }; - } - if op_kind.is_logical_short_circuit() { - return env.store_mut().union(vec![lhs_ty, rhs_ty]); - } - Ty::ANY -} - -fn infer_index_expr_base_ty( - idx: &jrsonnet_rowan_parser::nodes::ExprIndex, - env: &mut TypeEnv, - recorder: &mut R, -) -> Ty { - let base_ty = idx - .base() - .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); - if base_ty == Ty::NEVER { - return Ty::NEVER; - } - let mut index_literal = None; - if let Some(index_expr) = idx.index() { - let idx_ty = infer_expr_ty_impl(&index_expr, env, None, recorder); - if idx_ty == Ty::NEVER { - return Ty::NEVER; - } - index_literal = extract_string_literal(&index_expr); - } - if base_ty == Ty::STRING { - return Ty::STRING; - } - let store = env.store_mut(); - match store.get(base_ty) { - TyData::Array { elem, .. } => elem, - TyData::Tuple { ref elems } => { - let elems_copy: Vec = elems.clone(); - store.union(elems_copy) - } - TyData::Object(_) | TyData::Union(_) => { - if let Some(field_name) = index_literal.as_deref() { - return object_field_ty(base_ty, field_name, store).unwrap_or(Ty::ANY); - } - Ty::ANY - } - _ => Ty::ANY, - } -} - -fn object_field_ty( - ty: Ty, - field_name: &str, - store: &mut jrsonnet_lsp_types::MutStore, -) -> Option { - match store.get(ty) { - TyData::Object(obj) => obj - .fields - .iter() - .find(|(name, _)| name == field_name) - .map(|(_, field)| field.ty) - .or_else(|| obj.has_unknown.then_some(Ty::ANY)), - TyData::Union(types) => { - let field_types = types - .into_iter() - .filter_map(|variant| object_field_ty(variant, field_name, store)) - .collect::>(); - if field_types.is_empty() { - None - } else { - Some(store.union(field_types)) - } - } - _ => None, - } -} - -fn extract_string_literal(expr: &jrsonnet_rowan_parser::nodes::Expr) -> Option { - let base = expr.expr_base()?; - let ExprBase::ExprString(s) = base else { - return None; - }; - let text = s.syntax().first_token()?.text().to_string(); - if (text.starts_with('"') && text.ends_with('"')) - || (text.starts_with('\'') && text.ends_with('\'')) - { - return Some(text[1..text.len() - 1].to_string()); - } - None -} - -fn infer_slice_expr_base_ty( - slice: &jrsonnet_rowan_parser::nodes::ExprSlice, - env: &mut TypeEnv, - recorder: &mut R, -) -> Ty { - let base_ty = slice - .base() - .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); - if base_ty == Ty::NEVER { - return Ty::NEVER; - } - let store = env.store_mut(); - match store.get(base_ty) { - TyData::Array { .. } => base_ty, - TyData::Tuple { ref elems } => { - let elems_copy: Vec = elems.clone(); - let elem_union = store.union(elems_copy); - store.array(elem_union) - } - _ if base_ty == Ty::STRING => Ty::STRING, - _ => Ty::ANY, - } -} - -fn infer_if_then_else_expr_base_ty( - if_expr: &jrsonnet_rowan_parser::nodes::ExprIfThenElse, - env: &mut TypeEnv, - expected: Option, - recorder: &mut R, -) -> Ty { - let facts = if let Some(cond) = if_expr.cond() { - let cond_ty = infer_expr_ty_impl(&cond, env, None, recorder); - if cond_ty == Ty::NEVER { - return Ty::NEVER; - } - flow::extract_facts(&cond) - } else { - Facts::new() - }; - - let then_ty = if_expr.then().map_or(Ty::ANY, |then_clause| { - then_clause.expr().map_or(Ty::ANY, |then_expr| { - env.push_scope(); - for (var_name, fact) in facts.iter() { - let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); - let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); - env.define_ty(var_name.clone(), narrowed_ty); - } - let ty = infer_expr_ty_impl(&then_expr, env, expected, recorder); - env.pop_scope(); - ty - }) - }); - - let else_ty = if_expr.else_().map_or(Ty::ANY, |else_clause| { - else_clause.expr().map_or(Ty::ANY, |else_expr| { - env.push_scope(); - for (var_name, fact) in facts.iter() { - let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); - let widened_ty = fact.apply_negated(current_ty, env.store_mut()); - env.define_ty(var_name.clone(), widened_ty); - } - let ty = infer_expr_ty_impl(&else_expr, env, expected, recorder); - env.pop_scope(); - ty - }) - }); - - env.store_mut().union(vec![then_ty, else_ty]) -} - -fn infer_field_expr_base_ty( - field: &jrsonnet_rowan_parser::nodes::ExprField, - env: &mut TypeEnv, - recorder: &mut R, -) -> Ty { - if let Some(ty) = infer_stdlib_field_access_ty(field, env) { - if let Some(base_expr) = field.base() { - let _ = infer_expr_ty_impl(&base_expr, env, None, recorder); - } - return ty; - } - - if let Some(base_expr) = field.base() { - if let Some(var_name) = extract_var_name_from_expr(&base_expr) { - if env.is_tracked_param(&var_name) { - let field_name = field - .field() - .and_then(|n| n.ident_lit()) - .map(|t| t.text().to_string()); - if let Some(fn_name) = field_name { - let obj_data = ObjectData { - fields: vec![( - fn_name, - FieldDefInterned { - ty: Ty::ANY, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: true, - }; - let constraint = env.store_mut().object(obj_data); - env.add_constraint_ty(&var_name, constraint); - } else { - let constraint = env.store_mut().object_any(); - env.add_constraint_ty(&var_name, constraint); - } - } - } - } - - let base_ty = field - .base() - .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); - if base_ty == Ty::NEVER { - return Ty::NEVER; - } - if base_ty == Ty::STRING { - return Ty::STRING; - } - - let field_name = field - .field() - .and_then(|n| n.ident_lit()) - .map(|t| t.text().to_string()); - - let Some(field_name) = field_name.as_deref() else { - return Ty::ANY; - }; - let store = env.store_mut(); - if let Some(ty) = object_field_ty(base_ty, field_name, store) { - return ty; - } - - Ty::ANY -} - -fn infer_call_expr_base_ty( +pub(super) fn infer_call_expr_base_ty( call: &jrsonnet_rowan_parser::nodes::ExprCall, env: &mut TypeEnv, recorder: &mut R, @@ -929,7 +191,7 @@ fn collection_to_array_ty(ty: Ty, store: &mut jrsonnet_lsp_types::MutStore) -> T } } -fn infer_array_expr_base_ty( +pub(super) fn infer_array_expr_base_ty( arr: &jrsonnet_rowan_parser::nodes::ExprArray, env: &mut TypeEnv, expected: Option, @@ -972,7 +234,7 @@ fn infer_array_expr_base_ty( env.store_mut().array(elem_ty) } -fn infer_array_comp_expr_base_ty( +pub(super) fn infer_array_comp_expr_base_ty( comp: &jrsonnet_rowan_parser::nodes::ExprArrayComp, env: &mut TypeEnv, recorder: &mut R, @@ -1047,7 +309,7 @@ fn infer_array_comp_expr_base_ty( env.store_mut().array(body_ty) } -fn infer_function_expr_base_ty( +pub(super) fn infer_function_expr_base_ty( func: &jrsonnet_rowan_parser::nodes::ExprFunction, env: &mut TypeEnv, recorder: &mut R, @@ -1140,7 +402,7 @@ fn infer_function_expr_base_ty( result } -fn infer_obj_extend_expr_base_ty( +pub(super) fn infer_obj_extend_expr_base_ty( extend: &jrsonnet_rowan_parser::nodes::ExprObjExtend, env: &mut TypeEnv, recorder: &mut R, @@ -1207,7 +469,7 @@ mod tests { }; use rstest::rstest; - use super::*; + use super::{super::*, *}; /// Assert that an `ObjectData` has exactly the specified field names. fn assert_fields_ty(obj: &ObjectData, expected: &[&str]) { diff --git a/crates/jrsonnet-lsp-inference/src/expr/base.rs b/crates/jrsonnet-lsp-inference/src/expr/base.rs new file mode 100644 index 00000000..07893d17 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/expr/base.rs @@ -0,0 +1,453 @@ +use jrsonnet_lsp_import::extract_import_path; +use jrsonnet_lsp_scope::var_resolves_to_builtin_std; +use jrsonnet_lsp_types::{FieldDefInterned, FieldVis, NumBounds, ObjectData, Ty, TyData}; +use jrsonnet_rowan_parser::{ + nodes::{BinaryOperatorKind, ExprBase, ImportKindKind, LiteralKind}, + AstNode, +}; + +use super::{ + advanced::{ + infer_array_comp_expr_base_ty, infer_array_expr_base_ty, infer_call_expr_base_ty, + infer_function_expr_base_ty, infer_obj_extend_expr_base_ty, + }, + infer_expr_ty_impl, TypeRecorder, +}; +use crate::{ + env::TypeEnv, + flow::{self, Facts}, + helpers::{extract_var_name_from_expr, infer_stdlib_field_access_ty}, + object::infer_object_type_ty, +}; + +/// Infer the type of a base expression with an optional expected type, returning `Ty`. +/// +/// This is the efficient internal version that works with interned types throughout. +pub(super) fn infer_base_ty( + base: &ExprBase, + env: &mut TypeEnv, + expected: Option, + recorder: &mut R, +) -> Ty { + match base { + // Primitives - return constants directly + ExprBase::ExprLiteral(lit) => { + if let Some(literal) = lit.literal() { + match literal.kind() { + LiteralKind::NullKw => return Ty::NULL, + LiteralKind::TrueKw => return Ty::TRUE, + LiteralKind::FalseKw => return Ty::FALSE, + LiteralKind::SelfKw => { + // `self` refers to the innermost object + return env + .self_ty() + .unwrap_or_else(|| env.store_mut().object_any()); + } + LiteralKind::Dollar => { + // `$` refers to the outermost (root) object + return env + .root_ty() + .unwrap_or_else(|| env.store_mut().object_any()); + } + LiteralKind::SuperKw => { + // `super` refers to the base object being extended + return env + .super_ty() + .unwrap_or_else(|| env.store_mut().object_any()); + } + } + } + Ty::ANY + } + ExprBase::ExprNumber(_) => Ty::NUMBER, + ExprBase::ExprString(_) => Ty::STRING, + ExprBase::ExprError(_) => Ty::NEVER, + + // Variable lookup - use Ty-native lookup + ExprBase::ExprVar(var) => { + if var_resolves_to_builtin_std(var) { + return env.store_mut().object_any(); + } + + var.name() + .and_then(|name| name.ident_lit()) + .and_then(|ident| env.lookup(ident.text())) + .unwrap_or(Ty::ANY) + } + + // Parenthesized expression - recurse with expected type + ExprBase::ExprParened(parens) => { + if let Some(inner) = parens.expr() { + return infer_expr_ty_impl(&inner, env, expected, recorder); + } + Ty::ANY + } + + // Import - try to resolve the type from the import cache + ExprBase::ExprImport(import) => { + let Some(kind) = import.import_kind().map(|token| token.kind()) else { + return Ty::ANY; + }; + match kind { + ImportKindKind::ImportKw => { + let Some(path) = extract_import_path(import) else { + return Ty::ANY; + }; + if path.is_empty() { + return Ty::ANY; + } + // Try to resolve the import type, fall back to ANY + env.resolve_import(&path).map_or(Ty::ANY, Ty::from) + } + ImportKindKind::ImportstrKw => Ty::STRING, + ImportKindKind::ImportbinKw => { + let byte_ty = env + .store_mut() + .bounded_number(NumBounds::between(0.0, 255.0)); + env.store_mut().array(byte_ty) + } + } + } + + // Unary operators + ExprBase::ExprUnary(unary) => { + let rhs_ty = unary + .rhs() + .map_or(Ty::ANY, |rhs| infer_expr_ty_impl(&rhs, env, None, recorder)); + if rhs_ty == Ty::NEVER { + return Ty::NEVER; + } + let Some(op_kind) = unary.unary_operator().map(|op| op.kind()) else { + return Ty::ANY; + }; + if op_kind.returns_boolean() { + return Ty::BOOL; + } + if op_kind.returns_number() { + return Ty::NUMBER; + } + Ty::ANY + } + + // Binary operators - handle simple cases directly + ExprBase::ExprBinary(binary) => infer_binary_expr_base_ty(binary, env, recorder), + + // Index access: arr[0], obj["field"], str[0] + ExprBase::ExprIndex(idx) => infer_index_expr_base_ty(idx, env, recorder), + + // Slice: arr[1:3], str[::2] + ExprBase::ExprSlice(slice) => infer_slice_expr_base_ty(slice, env, recorder), + + // If-then-else with flow typing + ExprBase::ExprIfThenElse(if_expr) => { + infer_if_then_else_expr_base_ty(if_expr, env, expected, recorder) + } + + // Field access: obj.field or std.fn + ExprBase::ExprField(field) => infer_field_expr_base_ty(field, env, recorder), + + // Function call: fn(args) + ExprBase::ExprCall(call) => infer_call_expr_base_ty(call, env, recorder), + + // Array literal: [a, b, c] + ExprBase::ExprArray(arr) => infer_array_expr_base_ty(arr, env, expected, recorder), + + // Array comprehension: [expr for x in arr] + ExprBase::ExprArrayComp(comp) => infer_array_comp_expr_base_ty(comp, env, recorder), + + // Object literal: { field: value } + ExprBase::ExprObject(obj) => { + infer_object_type_ty(obj.obj_body().as_ref(), env, &mut |expr, env| { + infer_expr_ty_impl(expr, env, None, recorder) + }) + } + + // Function definition: function(x) body + ExprBase::ExprFunction(func) => infer_function_expr_base_ty(func, env, recorder), + + // Object extension: base { ... } + ExprBase::ExprObjExtend(extend) => infer_obj_extend_expr_base_ty(extend, env, recorder), + } +} + +fn infer_binary_expr_base_ty( + binary: &jrsonnet_rowan_parser::nodes::ExprBinary, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + let lhs_ty = binary + .lhs() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if lhs_ty == Ty::NEVER { + return Ty::NEVER; + } + let rhs_ty = binary + .rhs() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if rhs_ty == Ty::NEVER { + return Ty::NEVER; + } + + let Some(op_kind) = binary.binary_operator().map(|op| op.kind()) else { + return Ty::ANY; + }; + + if op_kind == BinaryOperatorKind::Modulo && lhs_ty == Ty::STRING { + return Ty::STRING; + } + if op_kind.returns_number() { + return Ty::NUMBER; + } + if op_kind.returns_boolean() { + return Ty::BOOL; + } + if op_kind == BinaryOperatorKind::Plus { + if lhs_ty == Ty::STRING && rhs_ty == Ty::STRING { + return Ty::STRING; + } + if lhs_ty == Ty::NUMBER && rhs_ty == Ty::NUMBER { + return Ty::NUMBER; + } + let store = env.store_mut(); + let lhs_data = store.get(lhs_ty); + let rhs_data = store.get(rhs_ty); + return match (&lhs_data, &rhs_data) { + (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { + let elem_union = store.union(vec![*l, *r]); + store.array(elem_union) + } + (TyData::Tuple { elems: l }, TyData::Tuple { elems: r }) => { + let mut elems = l.clone(); + elems.extend(r.iter().copied()); + store.tuple(elems) + } + (TyData::Object(left_obj), TyData::Object(right_obj)) => { + let merged = ObjectData::merge(left_obj, right_obj); + store.object(merged) + } + _ => Ty::NUMBER, + }; + } + if op_kind.is_logical_short_circuit() { + return env.store_mut().union(vec![lhs_ty, rhs_ty]); + } + Ty::ANY +} + +fn infer_index_expr_base_ty( + idx: &jrsonnet_rowan_parser::nodes::ExprIndex, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + let base_ty = idx + .base() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + let mut index_literal = None; + if let Some(index_expr) = idx.index() { + let idx_ty = infer_expr_ty_impl(&index_expr, env, None, recorder); + if idx_ty == Ty::NEVER { + return Ty::NEVER; + } + index_literal = extract_string_literal(&index_expr); + } + if base_ty == Ty::STRING { + return Ty::STRING; + } + let store = env.store_mut(); + match store.get(base_ty) { + TyData::Array { elem, .. } => elem, + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + store.union(elems_copy) + } + TyData::Object(_) | TyData::Union(_) => { + if let Some(field_name) = index_literal.as_deref() { + return object_field_ty(base_ty, field_name, store).unwrap_or(Ty::ANY); + } + Ty::ANY + } + _ => Ty::ANY, + } +} + +fn object_field_ty( + ty: Ty, + field_name: &str, + store: &mut jrsonnet_lsp_types::MutStore, +) -> Option { + match store.get(ty) { + TyData::Object(obj) => obj + .fields + .iter() + .find(|(name, _)| name == field_name) + .map(|(_, field)| field.ty) + .or_else(|| obj.has_unknown.then_some(Ty::ANY)), + TyData::Union(types) => { + let field_types = types + .into_iter() + .filter_map(|variant| object_field_ty(variant, field_name, store)) + .collect::>(); + if field_types.is_empty() { + None + } else { + Some(store.union(field_types)) + } + } + _ => None, + } +} + +fn extract_string_literal(expr: &jrsonnet_rowan_parser::nodes::Expr) -> Option { + let base = expr.expr_base()?; + let ExprBase::ExprString(s) = base else { + return None; + }; + let text = s.syntax().first_token()?.text().to_string(); + if (text.starts_with('"') && text.ends_with('"')) + || (text.starts_with('\'') && text.ends_with('\'')) + { + return Some(text[1..text.len() - 1].to_string()); + } + None +} + +fn infer_slice_expr_base_ty( + slice: &jrsonnet_rowan_parser::nodes::ExprSlice, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + let base_ty = slice + .base() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + let store = env.store_mut(); + match store.get(base_ty) { + TyData::Array { .. } => base_ty, + TyData::Tuple { ref elems } => { + let elems_copy: Vec = elems.clone(); + let elem_union = store.union(elems_copy); + store.array(elem_union) + } + _ if base_ty == Ty::STRING => Ty::STRING, + _ => Ty::ANY, + } +} + +fn infer_if_then_else_expr_base_ty( + if_expr: &jrsonnet_rowan_parser::nodes::ExprIfThenElse, + env: &mut TypeEnv, + expected: Option, + recorder: &mut R, +) -> Ty { + let facts = if let Some(cond) = if_expr.cond() { + let cond_ty = infer_expr_ty_impl(&cond, env, None, recorder); + if cond_ty == Ty::NEVER { + return Ty::NEVER; + } + flow::extract_facts(&cond) + } else { + Facts::new() + }; + + let then_ty = if_expr.then().map_or(Ty::ANY, |then_clause| { + then_clause.expr().map_or(Ty::ANY, |then_expr| { + env.push_scope(); + for (var_name, fact) in facts.iter() { + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); + env.define_ty(var_name.clone(), narrowed_ty); + } + let ty = infer_expr_ty_impl(&then_expr, env, expected, recorder); + env.pop_scope(); + ty + }) + }); + + let else_ty = if_expr.else_().map_or(Ty::ANY, |else_clause| { + else_clause.expr().map_or(Ty::ANY, |else_expr| { + env.push_scope(); + for (var_name, fact) in facts.iter() { + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + let widened_ty = fact.apply_negated(current_ty, env.store_mut()); + env.define_ty(var_name.clone(), widened_ty); + } + let ty = infer_expr_ty_impl(&else_expr, env, expected, recorder); + env.pop_scope(); + ty + }) + }); + + env.store_mut().union(vec![then_ty, else_ty]) +} + +fn infer_field_expr_base_ty( + field: &jrsonnet_rowan_parser::nodes::ExprField, + env: &mut TypeEnv, + recorder: &mut R, +) -> Ty { + if let Some(ty) = infer_stdlib_field_access_ty(field, env) { + if let Some(base_expr) = field.base() { + let _ = infer_expr_ty_impl(&base_expr, env, None, recorder); + } + return ty; + } + + if let Some(base_expr) = field.base() { + if let Some(var_name) = extract_var_name_from_expr(&base_expr) { + if env.is_tracked_param(&var_name) { + let field_name = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()); + if let Some(fn_name) = field_name { + let obj_data = ObjectData { + fields: vec![( + fn_name, + FieldDefInterned { + ty: Ty::ANY, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }; + let constraint = env.store_mut().object(obj_data); + env.add_constraint_ty(&var_name, constraint); + } else { + let constraint = env.store_mut().object_any(); + env.add_constraint_ty(&var_name, constraint); + } + } + } + } + + let base_ty = field + .base() + .map_or(Ty::ANY, |e| infer_expr_ty_impl(&e, env, None, recorder)); + if base_ty == Ty::NEVER { + return Ty::NEVER; + } + if base_ty == Ty::STRING { + return Ty::STRING; + } + + let field_name = field + .field() + .and_then(|n| n.ident_lit()) + .map(|t| t.text().to_string()); + + let Some(field_name) = field_name.as_deref() else { + return Ty::ANY; + }; + let store = env.store_mut(); + if let Some(ty) = object_field_ty(base_ty, field_name, store) { + return ty; + } + + Ty::ANY +} diff --git a/crates/jrsonnet-lsp-inference/src/expr/mod.rs b/crates/jrsonnet-lsp-inference/src/expr/mod.rs new file mode 100644 index 00000000..d4dd4d30 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/expr/mod.rs @@ -0,0 +1,310 @@ +//! Expression type inference. + +mod advanced; +mod base; + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_types::{FunctionData, ParamInterned, ReturnSpec, Ty, TyData}; +use jrsonnet_rowan_parser::{nodes::Bind, AstNode}; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +use self::base::infer_base_ty; +use crate::{ + env::TypeEnv, + flow::{self, Facts}, + helpers::extract_params_with_default_types_ty, +}; + +/// Apply type facts to the environment, narrowing variable types. +/// +/// This is used to apply facts extracted from assert statements and +/// type guard conditions to narrow types for subsequent code. +fn apply_facts_to_env(facts: &Facts, env: &mut TypeEnv) { + for (var_name, fact) in facts.iter() { + // Get the current type of the variable + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + // Apply the fact to narrow the type + let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); + // Update the environment with the narrowed type + env.define_ty(var_name.clone(), narrowed_ty); + } +} + +pub(super) trait TypeRecorder { + fn record(&mut self, range: TextRange, ty: Ty); +} + +struct NoopRecorder; + +impl TypeRecorder for NoopRecorder { + fn record(&mut self, _range: TextRange, _ty: Ty) {} +} + +impl TypeRecorder for FxHashMap { + fn record(&mut self, range: TextRange, ty: Ty) { + self.insert(range, ty); + } +} + +fn record_expr_and_base( + recorder: &mut R, + expr: &jrsonnet_rowan_parser::nodes::Expr, + ty: Ty, +) { + recorder.record(expr.syntax().text_range(), ty); + if let Some(base) = expr.expr_base() { + recorder.record(base.syntax().text_range(), ty); + } +} + +/// Infer the type of a document's root expression, returning an interned `Ty` and the environment. +/// +/// This is useful for tests that need to inspect the type structure using `TyData`. +#[must_use] +pub fn infer_document_type_ty(document: &Document) -> (Ty, TypeEnv) { + let ast = document.ast(); + let mut env = TypeEnv::new_default(); + + let ty = ast + .expr() + .map_or(Ty::ANY, |expr| infer_expr_ty(&expr, &mut env)); + + (ty, env) +} + +/// Infer the type of an expression, returning an interned `Ty`. +pub fn infer_expr_ty(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> Ty { + infer_expr_ty_with_expected(expr, env, None) +} + +pub(crate) fn infer_expr_ty_and_record( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, + recorder: &mut FxHashMap, +) -> Ty { + infer_expr_ty_impl(expr, env, expected, recorder) +} + +/// Infer the type of an expression with an optional expected type, returning `Ty`. +/// +/// This is the efficient internal version that works with interned types throughout. +pub fn infer_expr_ty_with_expected( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, +) -> Ty { + let mut recorder = NoopRecorder; + infer_expr_ty_impl(expr, env, expected, &mut recorder) +} + +pub(super) fn infer_expr_ty_impl( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, + recorder: &mut R, +) -> Ty { + // First, handle local bindings and assert statements that may precede the expression + for stmt in expr.stmts() { + match stmt { + jrsonnet_rowan_parser::nodes::Stmt::StmtLocal(stmt_local) => { + for bind in stmt_local.binds() { + infer_bind_type_ty(&bind, env, recorder); + } + } + jrsonnet_rowan_parser::nodes::Stmt::StmtAssert(stmt_assert) => { + // Extract type facts from assert conditions and apply them + if let Some(assertion) = stmt_assert.assertion() { + if let Some(cond) = assertion.condition() { + let facts = flow::extract_facts(&cond); + apply_facts_to_env(&facts, env); + } + } + } + } + } + + // Get the base expression type + let ty = expr.expr_base().map_or(Ty::ANY, |base| { + infer_base_ty(&base, env, expected, recorder) + }); + record_expr_and_base(recorder, expr, ty); + ty +} + +/// Infer types from a bind (local variable definition) using interned types. +fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: &mut R) { + match bind { + Bind::BindDestruct(bd) => { + if let Some(destruct) = bd.into() { + if let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct { + if let Some(name_node) = full.name() { + if let Some(ident) = name_node.ident_lit() { + let name = ident.text().to_string(); + let ty = bd + .value() + .map_or(Ty::ANY, |v| infer_expr_ty_impl(&v, env, None, recorder)); + env.define_ty(name, ty); + } + } + } + } + } + Bind::BindFunction(bf) => { + let Some(name_node) = bf.name() else { + return; + }; + let Some(ident) = name_node.ident_lit() else { + return; + }; + let name = ident.text().to_string(); + let params = bf + .params() + .map(|p| extract_params_with_default_types_ty(&p, env)) + .unwrap_or_default(); + + // Install a provisional function first so recursive self-calls can resolve. + let provisional_func = FunctionData { + params: params.clone(), + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }; + let provisional_ty = env.store_mut().intern(TyData::Function(provisional_func)); + env.define_ty(name.clone(), provisional_ty); + + let (return_ty, param_constraints) = if env.can_infer_function_body() { + bf.value().map_or_else( + || (Ty::ANY, FxHashMap::default()), + |body| { + env.push_scope(); + let param_names: Vec = + params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + + env.start_constraint_tracking(¶m_names); + env.enter_function(); + let body_ty = infer_expr_ty_impl(&body, env, None, recorder); + env.exit_function(); + let constraints = env.stop_constraint_tracking_ty(); + env.pop_scope(); + (body_ty, constraints) + }, + ) + } else { + (Ty::ANY, FxHashMap::default()) + }; + + let final_params: Vec = params + .into_iter() + .map(|param| { + let mut narrowed_ty = param.ty; + if let Some(constraints) = param_constraints.get(¶m.name) { + for constraint_ty in constraints { + narrowed_ty = env.store_mut().narrow(narrowed_ty, *constraint_ty); + } + } + ParamInterned { + name: param.name, + ty: narrowed_ty, + has_default: param.has_default, + } + }) + .collect(); + + let final_func = FunctionData { + params: final_params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }; + let final_ty = env.store_mut().intern(TyData::Function(final_func)); + env.define_ty(name, final_ty); + } + } +} + +/// Bind a destructuring pattern with an interned type. +/// +/// This is used for comprehension variables where we know the element type +/// from the iterator expression. +pub(super) fn bind_destruct_with_type_ty( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, + ty: Ty, + env: &mut TypeEnv, +) { + use jrsonnet_rowan_parser::nodes::{Destruct, DestructArrayPart}; + + match destruct { + Destruct::DestructFull(full) => { + let Some(ident) = full.name().and_then(|n| n.ident_lit()) else { + return; + }; + env.define_ty(ident.text().to_string(), ty); + } + Destruct::DestructArray(arr) => { + let elem_types = extract_array_element_types_ty(ty, env); + for (i, part) in arr.destruct_array_parts().enumerate() { + let DestructArrayPart::DestructArrayElement(elem) = part else { + continue; + }; + let Some(inner) = elem.destruct() else { + continue; + }; + let elem_ty = elem_types.get(i).copied().unwrap_or(Ty::ANY); + bind_destruct_with_type_ty(&inner, elem_ty, env); + } + } + Destruct::DestructObject(obj) => { + for field in obj.destruct_object_fields() { + let Some(inner) = field.destruct() else { + continue; + }; + let field_ty = lookup_destruct_field_type_ty(&inner, ty, env); + bind_destruct_with_type_ty(&inner, field_ty, env); + } + } + Destruct::DestructSkip(_) => {} + } +} + +/// Extract element types from an array or tuple type (Ty version). +fn extract_array_element_types_ty(ty: Ty, env: &TypeEnv) -> Vec { + let store = env.store(); + match store.get(ty) { + TyData::Tuple { ref elems } => elems.clone(), + TyData::Array { elem, .. } => vec![elem], + _ => vec![], + } +} + +/// Look up the type for a destructured field from an object type (Ty version). +fn lookup_destruct_field_type_ty( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, + ty: Ty, + env: &TypeEnv, +) -> Ty { + let store = env.store(); + let TyData::Object(ref obj_data) = store.get(ty) else { + return Ty::ANY; + }; + + let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct else { + return Ty::ANY; + }; + + let Some(ident) = full.name().and_then(|n| n.ident_lit()) else { + return Ty::ANY; + }; + + obj_data.get_field(ident.text()).map_or(Ty::ANY, |fd| fd.ty) +} + +/// Check if an expression is guaranteed to diverge (never return). +/// +/// An expression diverges if it has type `Never` - meaning it always +/// throws an error. This is used for unreachable code detection. +pub fn is_divergent(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> bool { + infer_expr_ty(expr, env).is_never() +} From 191f6521db1cb9723e31fdbe92d253b5e1449f34 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:17:26 +0000 Subject: [PATCH 126/210] refactor(lsp-inference): split flow module by concern Split flow typing into module files with a clear responsibility split between fact representation/application and AST fact extraction. Move extraction and parsing logic with its inline tests into `flow/extract.rs`, and keep flow fact structures and narrowing logic in `flow/mod.rs`. This keeps behavior and tests intact while making future incremental flow-handler work easier. --- .../src/{flow.rs => flow/extract.rs} | 747 +----------------- crates/jrsonnet-lsp-inference/src/flow/mod.rs | 721 +++++++++++++++++ 2 files changed, 726 insertions(+), 742 deletions(-) rename crates/jrsonnet-lsp-inference/src/{flow.rs => flow/extract.rs} (69%) create mode 100644 crates/jrsonnet-lsp-inference/src/flow/mod.rs diff --git a/crates/jrsonnet-lsp-inference/src/flow.rs b/crates/jrsonnet-lsp-inference/src/flow/extract.rs similarity index 69% rename from crates/jrsonnet-lsp-inference/src/flow.rs rename to crates/jrsonnet-lsp-inference/src/flow/extract.rs index 3b04d578..ec7a0c5b 100644 --- a/crates/jrsonnet-lsp-inference/src/flow.rs +++ b/crates/jrsonnet-lsp-inference/src/flow/extract.rs @@ -1,750 +1,11 @@ -//! Flow typing: extracting and applying type facts from conditions. -//! -//! This module implements flow-sensitive type narrowing based on conditions. -//! When a condition like `std.isNumber(x)` is true, we can narrow the type of `x` -//! to `Number` in the then-branch, and to "not Number" in the else-branch. -//! -//! The design follows the approach from rjsonnet, with a fact-based system that -//! supports logical combinations (and, or, not) and proper totality tracking. - -use jrsonnet_lsp_types::{ - FieldDefInterned, FieldVis, FunctionData, MutStore, ObjectData, ReturnSpec, Ty, TyData, -}; -use rustc_hash::FxHashMap; - -/// Totality indicates whether a fact can be negated. -/// -/// - `Total`: The fact fully classifies the value. For example, `std.isNumber(x)` -/// is total because if it's false, we know `x` is definitely NOT a number. -/// - `Partial`: The fact only partially classifies the value. For example, -/// `std.isInteger(x)` is partial because if it's false, `x` might still be -/// a decimal number. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum Totality { - /// Fact can be negated - if false, the negation applies. - Total, - /// Fact cannot be negated - if false, we learn nothing. - Partial, -} - -/// A type fact about a single variable. -/// -/// Facts represent what we know about a variable's type based on a condition. -/// They can be combined using logical operators and applied to narrow types. -#[derive(Debug, Clone, PartialEq)] -pub struct Fact { - repr: FactRepr, -} - -/// Internal representation of a fact. -#[derive(Debug, Clone, PartialEq)] -enum FactRepr { - /// Narrows to a primitive type (number, string, bool, null, array, object, function). - Prim(PrimFact, Totality), - /// Object has a field with optional type constraint. - HasField { - field: String, - /// Optional constraint on the field's type. - field_type: Option>, - }, - /// Value has a specific length. - /// For arrays: converts to tuple with that many elements. - /// For objects: closes the object if field count matches. - HasLen(usize), - /// Value has at least this length (non-empty check). - /// Useful for `std.length(x) > 0` patterns. - MinLen(usize), - /// Array elements are all of a specific type. - /// Used for patterns like `std.all(std.map(std.isNumber, arr))`. - ArrayElemType(PrimFact, Totality), - /// Value equals a literal boolean (true or false). - /// Used for `x == true` or `x == false` patterns. - LiteralBool(bool), - /// Value equals a literal string. - /// Used for `x == "literal"` patterns. - LiteralString(String), - /// Logical AND of two facts. - And(Box, Box), - /// Logical OR of two facts. - Or(Box, Box), - /// Logical NOT of a fact. - Not(Box), -} - -/// Primitive type facts. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -enum PrimFact { - Null, - Bool, - Number, - String, - Array, - Object, - Function, -} - -impl PrimFact { - /// Convert to an interned Ty. - fn as_ty(self, store: &mut MutStore) -> Ty { - match self { - PrimFact::Null => Ty::NULL, - PrimFact::Bool => Ty::BOOL, - PrimFact::Number => Ty::NUMBER, - PrimFact::String => Ty::STRING, - PrimFact::Array => store.array(Ty::ANY), - PrimFact::Object => store.object(ObjectData::open()), - PrimFact::Function => store.function(FunctionData { - params: vec![], - return_spec: ReturnSpec::Fixed(Ty::ANY), - variadic: true, - }), - } - } - - /// Approximate the complement of this primitive type when starting from `any`. - fn negated_any_ty(self, store: &mut MutStore) -> Ty { - let array_any = store.array(Ty::ANY); - let object_any = store.object(ObjectData::open()); - let function_any = store.function(FunctionData { - params: vec![], - return_spec: ReturnSpec::Fixed(Ty::ANY), - variadic: true, - }); - match self { - PrimFact::Null => store.union(vec![ - Ty::BOOL, - Ty::NUMBER, - Ty::STRING, - array_any, - object_any, - function_any, - ]), - PrimFact::Bool => store.union(vec![ - Ty::NULL, - Ty::NUMBER, - Ty::STRING, - array_any, - object_any, - function_any, - ]), - PrimFact::Number => store.union(vec![ - Ty::NULL, - Ty::BOOL, - Ty::STRING, - array_any, - object_any, - function_any, - ]), - PrimFact::String => store.union(vec![ - Ty::NULL, - Ty::BOOL, - Ty::NUMBER, - array_any, - object_any, - function_any, - ]), - PrimFact::Array => store.union(vec![ - Ty::NULL, - Ty::BOOL, - Ty::NUMBER, - Ty::STRING, - object_any, - function_any, - ]), - PrimFact::Object => store.union(vec![ - Ty::NULL, - Ty::BOOL, - Ty::NUMBER, - Ty::STRING, - array_any, - function_any, - ]), - PrimFact::Function => store.union(vec![ - Ty::NULL, - Ty::BOOL, - Ty::NUMBER, - Ty::STRING, - array_any, - object_any, - ]), - } - } -} - -impl Fact { - /// Create a null fact. - #[must_use] - pub fn null() -> Self { - Self { - repr: FactRepr::Prim(PrimFact::Null, Totality::Total), - } - } - - /// Create a number fact with given totality. - #[must_use] - pub fn number(totality: Totality) -> Self { - Self { - repr: FactRepr::Prim(PrimFact::Number, totality), - } - } - - /// Create a string fact with given totality. - #[must_use] - pub fn string(totality: Totality) -> Self { - Self { - repr: FactRepr::Prim(PrimFact::String, totality), - } - } - - /// Create an array fact with given totality. - #[must_use] - pub fn array(totality: Totality) -> Self { - Self { - repr: FactRepr::Prim(PrimFact::Array, totality), - } - } - - /// Create an object fact with given totality. - #[must_use] - pub fn object(totality: Totality) -> Self { - Self { - repr: FactRepr::Prim(PrimFact::Object, totality), - } - } - - /// Create a function fact. - #[must_use] - pub fn function() -> Self { - Self { - repr: FactRepr::Prim(PrimFact::Function, Totality::Total), - } - } - - /// Create a boolean fact. - #[must_use] - pub fn boolean() -> Self { - Self { - repr: FactRepr::Prim(PrimFact::Bool, Totality::Total), - } - } - - /// Create a fact that an object has a field. - #[must_use] - pub fn has_field(field: String) -> Self { - Self { - repr: FactRepr::HasField { - field, - field_type: None, - }, - } - } - - /// Create a fact that an object has a field with a specific type. - #[must_use] - pub fn has_field_typed(field: String, field_fact: Fact) -> Self { - Self { - repr: FactRepr::HasField { - field, - field_type: Some(Box::new(field_fact)), - }, - } - } - - /// Create a fact that a value has a specific length. - /// Applies to arrays, strings, and objects. - #[must_use] - pub fn has_len(len: usize) -> Self { - Self { - repr: FactRepr::HasLen(len), - } - } - - /// Create a fact that a value has at least a minimum length. - /// Useful for non-empty checks like `std.length(x) > 0`. - #[must_use] - pub fn min_len(min: usize) -> Self { - Self { - repr: FactRepr::MinLen(min), - } - } - - /// Create a fact that an array's elements are all of a specific type. - /// Used for higher-order predicates like `std.all(std.map(std.isNumber, arr))`. - fn array_elem_number(totality: Totality) -> Self { - Self { - repr: FactRepr::ArrayElemType(PrimFact::Number, totality), - } - } - - fn array_elem_string(totality: Totality) -> Self { - Self { - repr: FactRepr::ArrayElemType(PrimFact::String, totality), - } - } - - fn array_elem_bool(totality: Totality) -> Self { - Self { - repr: FactRepr::ArrayElemType(PrimFact::Bool, totality), - } - } - - fn array_elem_array(totality: Totality) -> Self { - Self { - repr: FactRepr::ArrayElemType(PrimFact::Array, totality), - } - } - - fn array_elem_object(totality: Totality) -> Self { - Self { - repr: FactRepr::ArrayElemType(PrimFact::Object, totality), - } - } - - fn array_elem_function(totality: Totality) -> Self { - Self { - repr: FactRepr::ArrayElemType(PrimFact::Function, totality), - } - } - - /// Create a fact that a value equals a specific boolean literal. - /// Used for `x == true` or `x == false` patterns. - #[must_use] - pub fn literal_bool(value: bool) -> Self { - Self { - repr: FactRepr::LiteralBool(value), - } - } - - /// Create a fact that a value equals a specific string literal. - /// Used for `x == "literal"` patterns. - #[must_use] - pub fn literal_string(value: String) -> Self { - Self { - repr: FactRepr::LiteralString(value), - } - } - - /// Logical AND of two facts. - #[must_use] - pub fn and(self, other: Self) -> Self { - Self { - repr: FactRepr::And(Box::new(self.repr), Box::new(other.repr)), - } - } - - /// Logical OR of two facts. - #[must_use] - pub fn or(self, other: Self) -> Self { - Self { - repr: FactRepr::Or(Box::new(self.repr), Box::new(other.repr)), - } - } - - /// Return the logical negation of this fact. - #[must_use] - pub fn negated(self) -> Self { - Self { - repr: FactRepr::Not(Box::new(self.repr)), - } - } - - /// Apply this fact to narrow a type. - /// - /// Returns the narrowed type when the fact is known to be true. - pub fn apply_to(&self, ty: Ty, store: &mut MutStore) -> Ty { - self.repr.apply_to(ty, store) - } - - /// Apply the negation of this fact to narrow a type. - /// - /// Returns the narrowed type when the fact is known to be false. - pub fn apply_negated(&self, ty: Ty, store: &mut MutStore) -> Ty { - self.repr.apply_negated(ty, store) - } -} - -impl std::ops::Not for Fact { - type Output = Self; - - fn not(self) -> Self::Output { - self.negated() - } -} - -impl FactRepr { - /// Apply this fact to narrow a type (when fact is true). - fn apply_to(&self, ty: Ty, store: &mut MutStore) -> Ty { - match self { - FactRepr::Prim(prim, _) => { - let constraint = prim.as_ty(store); - store.narrow(ty, constraint) - } - - FactRepr::HasField { field, field_type } => { - apply_has_field_fact(ty, field, field_type.as_deref(), store) - } - - FactRepr::HasLen(len) => store.with_len(ty, *len), - - FactRepr::MinLen(min) => store.with_min_len(ty, *min), - - FactRepr::ArrayElemType(prim, _) => { - let prim_ty = prim.as_ty(store); - apply_array_elem_constraint(ty, prim_ty, store) - } - - FactRepr::LiteralBool(value) => { - // Narrow to the specific boolean literal type - let constraint = if *value { Ty::TRUE } else { Ty::FALSE }; - store.narrow(ty, constraint) - } - - FactRepr::LiteralString(value) => { - // Narrow to the specific string literal type - let constraint = store.literal_string(value.clone()); - store.narrow(ty, constraint) - } - - FactRepr::And(lhs, rhs) => { - // Apply both facts sequentially - let narrowed = lhs.apply_to(ty, store); - rhs.apply_to(narrowed, store) - } - - FactRepr::Or(lhs, rhs) => { - if let ( - FactRepr::ArrayElemType(lhs_prim, _), - FactRepr::ArrayElemType(rhs_prim, _), - ) = (&**lhs, &**rhs) - { - let lhs_ty = lhs_prim.as_ty(store); - let rhs_ty = rhs_prim.as_ty(store); - let union_ty = store.union(vec![lhs_ty, rhs_ty]); - return apply_array_elem_constraint(ty, union_ty, store); - } - // Apply each fact and union the results - // (a || b) means: either a is true OR b is true - // So the type is: (ty narrowed by a) | (ty narrowed by b) - let t1 = lhs.apply_to(ty, store); - let t2 = rhs.apply_to(ty, store); - store.union(vec![t1, t2]) - } - - FactRepr::Not(inner) => { - // Apply negated inner fact - inner.apply_negated(ty, store) - } - } - } - - /// Apply the negation of this fact (when fact is false). - fn apply_negated(&self, ty: Ty, store: &mut MutStore) -> Ty { - match self { - FactRepr::Prim(prim, totality) => { - match totality { - Totality::Total => { - // Can negate: widen by removing this type - let remove = prim.as_ty(store); - store.widen(ty, remove) - } - Totality::Partial => { - // Cannot negate: type unchanged - ty - } - } - } - - FactRepr::HasField { field, field_type } => { - let negated_field_ty = field_type.as_ref().map_or(Ty::NEVER, |inner| match &inner - .repr - { - FactRepr::Prim(prim, Totality::Total) => prim.negated_any_ty(store), - _ => inner.apply_negated(Ty::ANY, store), - }); - let constraint = store.object(ObjectData { - fields: vec![( - field.clone(), - FieldDefInterned { - ty: negated_field_ty, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: true, - }); - store.narrow(ty, constraint) - } - - FactRepr::HasLen(_) => { - // Negating "has length n" doesn't narrow the type in a useful way - // (it just has a different length) - ty - } - - FactRepr::MinLen(min) => { - // Negating "min length n" means length < n - // For min=1 (non-empty), negation means empty (length 0) - if *min == 1 { - store.with_len(ty, 0) - } else { - ty - } - } - - FactRepr::ArrayElemType(_prim, totality) => { - // Negating "all elements are type T" - match totality { - Totality::Total => { - // Total: we know at least one element is NOT T - // This doesn't narrow the type in a useful way - ty - } - Totality::Partial => { - // Partial: can't negate - ty - } - } - } - - FactRepr::LiteralBool(value) => { - // Negating "x == true" means x is false (and vice versa) - // This is total: if x != true and x is a bool, then x == false - let constraint = if *value { Ty::FALSE } else { Ty::TRUE }; - store.narrow(ty, constraint) - } - - FactRepr::LiteralString(value) => { - // Negating "x == literal" means x != literal - // Widen by removing the literal type - let remove = store.literal_string(value.clone()); - store.widen(ty, remove) - } - - // De Morgan's laws: - // !(a && b) = !a || !b - FactRepr::And(lhs, rhs) => { - let t1 = lhs.apply_negated(ty, store); - let t2 = rhs.apply_negated(ty, store); - store.union(vec![t1, t2]) - } - - // !(a || b) = !a && !b - FactRepr::Or(lhs, rhs) => { - let narrowed = lhs.apply_negated(ty, store); - rhs.apply_negated(narrowed, store) - } - - // Double negation: !!a = a - FactRepr::Not(inner) => inner.apply_to(ty, store), - } - } -} - -fn apply_array_elem_constraint(ty: Ty, elem_constraint: Ty, store: &mut MutStore) -> Ty { - match store.get(ty) { - TyData::Array { .. } | TyData::Any => store.array(elem_constraint), - TyData::Tuple { elems } => { - let narrowed: Vec<_> = elems - .iter() - .map(|&e| store.narrow(e, elem_constraint)) - .filter(|&e| e != Ty::NEVER) - .collect(); - if narrowed.is_empty() { - Ty::NEVER - } else { - store.tuple(narrowed) - } - } - TyData::Union(types) => { - let narrowed: Vec<_> = types - .iter() - .map(|&variant| apply_array_elem_constraint(variant, elem_constraint, store)) - .filter(|&variant| variant != Ty::NEVER) - .collect(); - store.union(narrowed) - } - _ => ty, - } -} - -fn apply_has_field_fact( - ty: Ty, - field: &str, - field_fact: Option<&Fact>, - store: &mut MutStore, -) -> Ty { - let required_field_ty = field_fact.map_or(Ty::ANY, |fact| fact.apply_to(Ty::ANY, store)); - - match store.get(ty) { - TyData::Object(mut obj_data) => { - if let Some((_, existing_field)) = - obj_data.fields.iter_mut().find(|(name, _)| name == field) - { - let next_field_ty = field_fact.map_or(existing_field.ty, |fact| { - fact.apply_to(existing_field.ty, store) - }); - if next_field_ty == Ty::NEVER { - return Ty::NEVER; - } - existing_field.ty = next_field_ty; - existing_field.required = true; - return store.object(obj_data); - } - - if obj_data.has_unknown { - obj_data.fields.push(( - field.to_string(), - FieldDefInterned { - ty: required_field_ty, - required: true, - visibility: FieldVis::Normal, - }, - )); - return store.object(obj_data); - } - - Ty::NEVER - } - TyData::Union(types) => { - let narrowed: Vec<_> = types - .iter() - .map(|&variant| apply_has_field_fact(variant, field, field_fact, store)) - .filter(|&variant| variant != Ty::NEVER) - .collect(); - store.union(narrowed) - } - TyData::Any => store.object(ObjectData { - fields: vec![( - field.to_string(), - FieldDefInterned { - ty: required_field_ty, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: true, - }), - _ => { - let constraint = store.object(ObjectData { - fields: vec![( - field.to_string(), - FieldDefInterned { - ty: required_field_ty, - required: true, - visibility: FieldVis::Normal, - }, - )], - has_unknown: true, - }); - store.narrow(ty, constraint) - } - } -} - -/// A collection of facts about multiple variables. -#[derive(Debug, Clone, Default)] -pub struct Facts { - /// Map from variable name to fact about that variable. - facts: FxHashMap, -} - -impl Facts { - /// Create an empty facts collection. - #[must_use] - pub fn new() -> Self { - Self::default() - } - - /// Add a fact for a variable. - /// - /// If a fact already exists for this variable, they are `ANDed` together. - pub fn add(&mut self, var_name: String, fact: Fact) { - if let Some(existing) = self.facts.remove(&var_name) { - self.facts.insert(var_name, existing.and(fact)); - } else { - self.facts.insert(var_name, fact); - } - } - - /// Get the fact for a variable, if any. - #[must_use] - pub fn get(&self, var_name: &str) -> Option<&Fact> { - self.facts.get(var_name) - } - - /// Check if there are any facts. - #[must_use] - pub fn is_empty(&self) -> bool { - self.facts.is_empty() - } - - /// Iterate over all facts. - pub fn iter(&self) -> impl Iterator { - self.facts.iter() - } - - /// Combine two fact sets with OR. - /// - /// Only keeps facts that exist in both sets, combining them with OR. - /// This is used for || conditions. - #[must_use] - pub fn or_combine(mut self, mut other: Self) -> Self { - let mut result = Facts::new(); - for (var_name, fact) in self.facts.drain() { - if let Some(other_fact) = other.facts.remove(&var_name) { - result.facts.insert(var_name, fact.or(other_fact)); - } - // If not in both, we learn nothing - } - result - } - - /// Combine two fact sets with AND (merge). - /// - /// Combines all facts from both sets. - #[must_use] - pub fn and_combine(mut self, other: Self) -> Self { - for (var_name, fact) in other.facts { - self.add(var_name, fact); - } - self - } - - /// Negate all facts in this collection. - #[must_use] - pub fn negate(self) -> Self { - let mut result = Facts::new(); - for (var_name, fact) in self.facts { - result.facts.insert(var_name, !fact); - } - result - } -} - use jrsonnet_lsp_scope::{expr_resolves_to_builtin_std, var_resolves_to_builtin_std}; use jrsonnet_rowan_parser::{ nodes::{ArgsDesc, BinaryOperatorKind, Expr, ExprBase, ExprCall, LiteralKind}, AstNode, AstToken, }; -/// Extract type facts from a condition expression. -/// -/// Recognizes patterns like: -/// - `std.isNumber(x)` → x: Number -/// - `std.isString(x)` → x: String -/// - `std.isBoolean(x)` → x: Bool -/// - `std.isArray(x)` → x: Array -/// - `std.isObject(x)` → x: Object -/// - `std.isFunction(x)` → x: Function -/// - `std.objectHas(x, "field")` → x has field "field" -/// - `"field" in x` → x has field "field" -/// - `x == null` → x: Null -/// - `x != null` → x: NOT Null -/// - `std.type(x) == "number"` → x: Number -/// - `a && b` → facts from a AND facts from b -/// - `a || b` → facts from a OR facts from b (intersection) -/// - `!a` → negated facts from a -#[must_use] +use super::{Fact, FactRepr, Facts, PrimFact, Totality}; + pub fn extract_facts(cond: &Expr) -> Facts { let mut facts = Facts::new(); extract_facts_into(cond, &mut facts); @@ -1415,7 +676,9 @@ fn extract_bool_literal(expr: &Expr) -> Option { #[cfg(test)] mod tests { use jrsonnet_lsp_document::{DocVersion, Document}; - use jrsonnet_lsp_types::{FieldDefInterned, FieldVis, GlobalTyStore, ObjectData, TyData}; + use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, GlobalTyStore, MutStore, ObjectData, Ty, TyData, + }; use rstest::rstest; use super::*; diff --git a/crates/jrsonnet-lsp-inference/src/flow/mod.rs b/crates/jrsonnet-lsp-inference/src/flow/mod.rs new file mode 100644 index 00000000..2c664be3 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/flow/mod.rs @@ -0,0 +1,721 @@ +//! Flow typing: extracting and applying type facts from conditions. +//! +//! This module implements flow-sensitive type narrowing based on +//! conditions. + +mod extract; + +pub use extract::{extract_array_predicate_fact, extract_facts}; +use jrsonnet_lsp_types::{ + FieldDefInterned, FieldVis, FunctionData, MutStore, ObjectData, ReturnSpec, Ty, TyData, +}; +use rustc_hash::FxHashMap; + +/// Totality indicates whether a fact can be negated. +/// +/// - `Total`: The fact fully classifies the value. For example, `std.isNumber(x)` +/// is total because if it's false, we know `x` is definitely NOT a number. +/// - `Partial`: The fact only partially classifies the value. For example, +/// `std.isInteger(x)` is partial because if it's false, `x` might still be +/// a decimal number. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Totality { + /// Fact can be negated - if false, the negation applies. + Total, + /// Fact cannot be negated - if false, we learn nothing. + Partial, +} + +/// A type fact about a single variable. +/// +/// Facts represent what we know about a variable's type based on a condition. +/// They can be combined using logical operators and applied to narrow types. +#[derive(Debug, Clone, PartialEq)] +pub struct Fact { + repr: FactRepr, +} + +/// Internal representation of a fact. +#[derive(Debug, Clone, PartialEq)] +enum FactRepr { + /// Narrows to a primitive type (number, string, bool, null, array, object, function). + Prim(PrimFact, Totality), + /// Object has a field with optional type constraint. + HasField { + field: String, + /// Optional constraint on the field's type. + field_type: Option>, + }, + /// Value has a specific length. + /// For arrays: converts to tuple with that many elements. + /// For objects: closes the object if field count matches. + HasLen(usize), + /// Value has at least this length (non-empty check). + /// Useful for `std.length(x) > 0` patterns. + MinLen(usize), + /// Array elements are all of a specific type. + /// Used for patterns like `std.all(std.map(std.isNumber, arr))`. + ArrayElemType(PrimFact, Totality), + /// Value equals a literal boolean (true or false). + /// Used for `x == true` or `x == false` patterns. + LiteralBool(bool), + /// Value equals a literal string. + /// Used for `x == "literal"` patterns. + LiteralString(String), + /// Logical AND of two facts. + And(Box, Box), + /// Logical OR of two facts. + Or(Box, Box), + /// Logical NOT of a fact. + Not(Box), +} + +/// Primitive type facts. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum PrimFact { + Null, + Bool, + Number, + String, + Array, + Object, + Function, +} + +impl PrimFact { + /// Convert to an interned Ty. + fn as_ty(self, store: &mut MutStore) -> Ty { + match self { + PrimFact::Null => Ty::NULL, + PrimFact::Bool => Ty::BOOL, + PrimFact::Number => Ty::NUMBER, + PrimFact::String => Ty::STRING, + PrimFact::Array => store.array(Ty::ANY), + PrimFact::Object => store.object(ObjectData::open()), + PrimFact::Function => store.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }), + } + } + + /// Approximate the complement of this primitive type when starting from `any`. + fn negated_any_ty(self, store: &mut MutStore) -> Ty { + let array_any = store.array(Ty::ANY); + let object_any = store.object(ObjectData::open()); + let function_any = store.function(FunctionData { + params: vec![], + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: true, + }); + match self { + PrimFact::Null => store.union(vec![ + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + array_any, + object_any, + function_any, + ]), + PrimFact::Bool => store.union(vec![ + Ty::NULL, + Ty::NUMBER, + Ty::STRING, + array_any, + object_any, + function_any, + ]), + PrimFact::Number => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::STRING, + array_any, + object_any, + function_any, + ]), + PrimFact::String => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + array_any, + object_any, + function_any, + ]), + PrimFact::Array => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + object_any, + function_any, + ]), + PrimFact::Object => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + array_any, + function_any, + ]), + PrimFact::Function => store.union(vec![ + Ty::NULL, + Ty::BOOL, + Ty::NUMBER, + Ty::STRING, + array_any, + object_any, + ]), + } + } +} + +impl Fact { + /// Create a null fact. + #[must_use] + pub fn null() -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Null, Totality::Total), + } + } + + /// Create a number fact with given totality. + #[must_use] + pub fn number(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Number, totality), + } + } + + /// Create a string fact with given totality. + #[must_use] + pub fn string(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::String, totality), + } + } + + /// Create an array fact with given totality. + #[must_use] + pub fn array(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Array, totality), + } + } + + /// Create an object fact with given totality. + #[must_use] + pub fn object(totality: Totality) -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Object, totality), + } + } + + /// Create a function fact. + #[must_use] + pub fn function() -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Function, Totality::Total), + } + } + + /// Create a boolean fact. + #[must_use] + pub fn boolean() -> Self { + Self { + repr: FactRepr::Prim(PrimFact::Bool, Totality::Total), + } + } + + /// Create a fact that an object has a field. + #[must_use] + pub fn has_field(field: String) -> Self { + Self { + repr: FactRepr::HasField { + field, + field_type: None, + }, + } + } + + /// Create a fact that an object has a field with a specific type. + #[must_use] + pub fn has_field_typed(field: String, field_fact: Fact) -> Self { + Self { + repr: FactRepr::HasField { + field, + field_type: Some(Box::new(field_fact)), + }, + } + } + + /// Create a fact that a value has a specific length. + /// Applies to arrays, strings, and objects. + #[must_use] + pub fn has_len(len: usize) -> Self { + Self { + repr: FactRepr::HasLen(len), + } + } + + /// Create a fact that a value has at least a minimum length. + /// Useful for non-empty checks like `std.length(x) > 0`. + #[must_use] + pub fn min_len(min: usize) -> Self { + Self { + repr: FactRepr::MinLen(min), + } + } + + /// Create a fact that an array's elements are all of a specific type. + /// Used for higher-order predicates like `std.all(std.map(std.isNumber, arr))`. + fn array_elem_number(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Number, totality), + } + } + + fn array_elem_string(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::String, totality), + } + } + + fn array_elem_bool(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Bool, totality), + } + } + + fn array_elem_array(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Array, totality), + } + } + + fn array_elem_object(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Object, totality), + } + } + + fn array_elem_function(totality: Totality) -> Self { + Self { + repr: FactRepr::ArrayElemType(PrimFact::Function, totality), + } + } + + /// Create a fact that a value equals a specific boolean literal. + /// Used for `x == true` or `x == false` patterns. + #[must_use] + pub fn literal_bool(value: bool) -> Self { + Self { + repr: FactRepr::LiteralBool(value), + } + } + + /// Create a fact that a value equals a specific string literal. + /// Used for `x == "literal"` patterns. + #[must_use] + pub fn literal_string(value: String) -> Self { + Self { + repr: FactRepr::LiteralString(value), + } + } + + /// Logical AND of two facts. + #[must_use] + pub fn and(self, other: Self) -> Self { + Self { + repr: FactRepr::And(Box::new(self.repr), Box::new(other.repr)), + } + } + + /// Logical OR of two facts. + #[must_use] + pub fn or(self, other: Self) -> Self { + Self { + repr: FactRepr::Or(Box::new(self.repr), Box::new(other.repr)), + } + } + + /// Return the logical negation of this fact. + #[must_use] + pub fn negated(self) -> Self { + Self { + repr: FactRepr::Not(Box::new(self.repr)), + } + } + + /// Apply this fact to narrow a type. + /// + /// Returns the narrowed type when the fact is known to be true. + pub fn apply_to(&self, ty: Ty, store: &mut MutStore) -> Ty { + self.repr.apply_to(ty, store) + } + + /// Apply the negation of this fact to narrow a type. + /// + /// Returns the narrowed type when the fact is known to be false. + pub fn apply_negated(&self, ty: Ty, store: &mut MutStore) -> Ty { + self.repr.apply_negated(ty, store) + } +} + +impl std::ops::Not for Fact { + type Output = Self; + + fn not(self) -> Self::Output { + self.negated() + } +} + +impl FactRepr { + /// Apply this fact to narrow a type (when fact is true). + fn apply_to(&self, ty: Ty, store: &mut MutStore) -> Ty { + match self { + FactRepr::Prim(prim, _) => { + let constraint = prim.as_ty(store); + store.narrow(ty, constraint) + } + + FactRepr::HasField { field, field_type } => { + apply_has_field_fact(ty, field, field_type.as_deref(), store) + } + + FactRepr::HasLen(len) => store.with_len(ty, *len), + + FactRepr::MinLen(min) => store.with_min_len(ty, *min), + + FactRepr::ArrayElemType(prim, _) => { + let prim_ty = prim.as_ty(store); + apply_array_elem_constraint(ty, prim_ty, store) + } + + FactRepr::LiteralBool(value) => { + // Narrow to the specific boolean literal type + let constraint = if *value { Ty::TRUE } else { Ty::FALSE }; + store.narrow(ty, constraint) + } + + FactRepr::LiteralString(value) => { + // Narrow to the specific string literal type + let constraint = store.literal_string(value.clone()); + store.narrow(ty, constraint) + } + + FactRepr::And(lhs, rhs) => { + // Apply both facts sequentially + let narrowed = lhs.apply_to(ty, store); + rhs.apply_to(narrowed, store) + } + + FactRepr::Or(lhs, rhs) => { + if let ( + FactRepr::ArrayElemType(lhs_prim, _), + FactRepr::ArrayElemType(rhs_prim, _), + ) = (&**lhs, &**rhs) + { + let lhs_ty = lhs_prim.as_ty(store); + let rhs_ty = rhs_prim.as_ty(store); + let union_ty = store.union(vec![lhs_ty, rhs_ty]); + return apply_array_elem_constraint(ty, union_ty, store); + } + // Apply each fact and union the results + // (a || b) means: either a is true OR b is true + // So the type is: (ty narrowed by a) | (ty narrowed by b) + let t1 = lhs.apply_to(ty, store); + let t2 = rhs.apply_to(ty, store); + store.union(vec![t1, t2]) + } + + FactRepr::Not(inner) => { + // Apply negated inner fact + inner.apply_negated(ty, store) + } + } + } + + /// Apply the negation of this fact (when fact is false). + fn apply_negated(&self, ty: Ty, store: &mut MutStore) -> Ty { + match self { + FactRepr::Prim(prim, totality) => { + match totality { + Totality::Total => { + // Can negate: widen by removing this type + let remove = prim.as_ty(store); + store.widen(ty, remove) + } + Totality::Partial => { + // Cannot negate: type unchanged + ty + } + } + } + + FactRepr::HasField { field, field_type } => { + let negated_field_ty = field_type.as_ref().map_or(Ty::NEVER, |inner| match &inner + .repr + { + FactRepr::Prim(prim, Totality::Total) => prim.negated_any_ty(store), + _ => inner.apply_negated(Ty::ANY, store), + }); + let constraint = store.object(ObjectData { + fields: vec![( + field.clone(), + FieldDefInterned { + ty: negated_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }); + store.narrow(ty, constraint) + } + + FactRepr::HasLen(_) => { + // Negating "has length n" doesn't narrow the type in a useful way + // (it just has a different length) + ty + } + + FactRepr::MinLen(min) => { + // Negating "min length n" means length < n + // For min=1 (non-empty), negation means empty (length 0) + if *min == 1 { + store.with_len(ty, 0) + } else { + ty + } + } + + FactRepr::ArrayElemType(_prim, totality) => { + // Negating "all elements are type T" + match totality { + Totality::Total => { + // Total: we know at least one element is NOT T + // This doesn't narrow the type in a useful way + ty + } + Totality::Partial => { + // Partial: can't negate + ty + } + } + } + + FactRepr::LiteralBool(value) => { + // Negating "x == true" means x is false (and vice versa) + // This is total: if x != true and x is a bool, then x == false + let constraint = if *value { Ty::FALSE } else { Ty::TRUE }; + store.narrow(ty, constraint) + } + + FactRepr::LiteralString(value) => { + // Negating "x == literal" means x != literal + // Widen by removing the literal type + let remove = store.literal_string(value.clone()); + store.widen(ty, remove) + } + + // De Morgan's laws: + // !(a && b) = !a || !b + FactRepr::And(lhs, rhs) => { + let t1 = lhs.apply_negated(ty, store); + let t2 = rhs.apply_negated(ty, store); + store.union(vec![t1, t2]) + } + + // !(a || b) = !a && !b + FactRepr::Or(lhs, rhs) => { + let narrowed = lhs.apply_negated(ty, store); + rhs.apply_negated(narrowed, store) + } + + // Double negation: !!a = a + FactRepr::Not(inner) => inner.apply_to(ty, store), + } + } +} + +fn apply_array_elem_constraint(ty: Ty, elem_constraint: Ty, store: &mut MutStore) -> Ty { + match store.get(ty) { + TyData::Array { .. } | TyData::Any => store.array(elem_constraint), + TyData::Tuple { elems } => { + let narrowed: Vec<_> = elems + .iter() + .map(|&e| store.narrow(e, elem_constraint)) + .filter(|&e| e != Ty::NEVER) + .collect(); + if narrowed.is_empty() { + Ty::NEVER + } else { + store.tuple(narrowed) + } + } + TyData::Union(types) => { + let narrowed: Vec<_> = types + .iter() + .map(|&variant| apply_array_elem_constraint(variant, elem_constraint, store)) + .filter(|&variant| variant != Ty::NEVER) + .collect(); + store.union(narrowed) + } + _ => ty, + } +} + +fn apply_has_field_fact( + ty: Ty, + field: &str, + field_fact: Option<&Fact>, + store: &mut MutStore, +) -> Ty { + let required_field_ty = field_fact.map_or(Ty::ANY, |fact| fact.apply_to(Ty::ANY, store)); + + match store.get(ty) { + TyData::Object(mut obj_data) => { + if let Some((_, existing_field)) = + obj_data.fields.iter_mut().find(|(name, _)| name == field) + { + let next_field_ty = field_fact.map_or(existing_field.ty, |fact| { + fact.apply_to(existing_field.ty, store) + }); + if next_field_ty == Ty::NEVER { + return Ty::NEVER; + } + existing_field.ty = next_field_ty; + existing_field.required = true; + return store.object(obj_data); + } + + if obj_data.has_unknown { + obj_data.fields.push(( + field.to_string(), + FieldDefInterned { + ty: required_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )); + return store.object(obj_data); + } + + Ty::NEVER + } + TyData::Union(types) => { + let narrowed: Vec<_> = types + .iter() + .map(|&variant| apply_has_field_fact(variant, field, field_fact, store)) + .filter(|&variant| variant != Ty::NEVER) + .collect(); + store.union(narrowed) + } + TyData::Any => store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: required_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }), + _ => { + let constraint = store.object(ObjectData { + fields: vec![( + field.to_string(), + FieldDefInterned { + ty: required_field_ty, + required: true, + visibility: FieldVis::Normal, + }, + )], + has_unknown: true, + }); + store.narrow(ty, constraint) + } + } +} + +/// A collection of facts about multiple variables. +#[derive(Debug, Clone, Default)] +pub struct Facts { + /// Map from variable name to fact about that variable. + facts: FxHashMap, +} + +impl Facts { + /// Create an empty facts collection. + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Add a fact for a variable. + /// + /// If a fact already exists for this variable, they are `ANDed` together. + pub fn add(&mut self, var_name: String, fact: Fact) { + if let Some(existing) = self.facts.remove(&var_name) { + self.facts.insert(var_name, existing.and(fact)); + } else { + self.facts.insert(var_name, fact); + } + } + + /// Get the fact for a variable, if any. + #[must_use] + pub fn get(&self, var_name: &str) -> Option<&Fact> { + self.facts.get(var_name) + } + + /// Check if there are any facts. + #[must_use] + pub fn is_empty(&self) -> bool { + self.facts.is_empty() + } + + /// Iterate over all facts. + pub fn iter(&self) -> impl Iterator { + self.facts.iter() + } + + /// Combine two fact sets with OR. + /// + /// Only keeps facts that exist in both sets, combining them with OR. + /// This is used for || conditions. + #[must_use] + pub fn or_combine(mut self, mut other: Self) -> Self { + let mut result = Facts::new(); + for (var_name, fact) in self.facts.drain() { + if let Some(other_fact) = other.facts.remove(&var_name) { + result.facts.insert(var_name, fact.or(other_fact)); + } + // If not in both, we learn nothing + } + result + } + + /// Combine two fact sets with AND (merge). + /// + /// Combines all facts from both sets. + #[must_use] + pub fn and_combine(mut self, other: Self) -> Self { + for (var_name, fact) in other.facts { + self.add(var_name, fact); + } + self + } + + /// Negate all facts in this collection. + #[must_use] + pub fn negate(self) -> Self { + let mut result = Facts::new(); + for (var_name, fact) in self.facts { + result.facts.insert(var_name, !fact); + } + result + } +} From 245f76a0438db6a3ed6c9d0a6f574fe32d7e01bc Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:22:06 +0000 Subject: [PATCH 127/210] refactor(lsp-handlers): split rename into focused modules Split rename internals into a directory module with separate files for local rename flow, cross-file importer scanning, and shared edit helpers. Keep public APIs unchanged, while reducing file size and making each concern more isolated for follow-up changes. Move tests inline with the logic they verify in `local.rs` and `cross_file.rs`, with no tests in `mod.rs`. --- .../src/rename/common.rs | 44 +++ .../src/rename/cross_file.rs | 332 ++++++++++++++++ .../src/{rename.rs => rename/local.rs} | 371 +----------------- .../jrsonnet-lsp-handlers/src/rename/mod.rs | 10 + 4 files changed, 389 insertions(+), 368 deletions(-) create mode 100644 crates/jrsonnet-lsp-handlers/src/rename/common.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs rename crates/jrsonnet-lsp-handlers/src/{rename.rs => rename/local.rs} (50%) create mode 100644 crates/jrsonnet-lsp-handlers/src/rename/mod.rs diff --git a/crates/jrsonnet-lsp-handlers/src/rename/common.rs b/crates/jrsonnet-lsp-handlers/src/rename/common.rs new file mode 100644 index 00000000..17f3158a --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/rename/common.rs @@ -0,0 +1,44 @@ +use std::collections::HashMap; + +use lsp_types::{TextEdit, Uri, WorkspaceEdit}; + +pub(super) fn workspace_edit_from_changes( + changes: HashMap>, +) -> Option { + if changes.is_empty() { + return None; + } + + Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }) +} + +pub(super) fn field_definition_range( + token: &jrsonnet_rowan_parser::SyntaxToken, +) -> Option { + use jrsonnet_rowan_parser::SyntaxKind; + + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let mut node = token.parent(); + while let Some(current) = node { + if current.kind() == SyntaxKind::FIELD_NAME_FIXED { + let parent = current.parent()?; + if matches!( + parent.kind(), + SyntaxKind::MEMBER_FIELD_NORMAL | SyntaxKind::MEMBER_FIELD_METHOD + ) { + return Some(token.text_range()); + } + return None; + } + node = current.parent(); + } + + None +} diff --git a/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs b/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs new file mode 100644 index 00000000..e4b0d393 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs @@ -0,0 +1,332 @@ +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; + +use jrsonnet_lsp_document::{ + to_lsp_range, token_at_offset, CanonicalPath, Document, FileId, LspPosition, SymbolName, +}; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::DocumentManager; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::{TextEdit, Uri, WorkspaceEdit}; + +use super::{ + common::{field_definition_range, workspace_edit_from_changes}, + local::rename, +}; + +/// Perform cross-file rename operation. +/// +/// This extends the basic rename to also find references in files that import +/// the current file. For top-level object fields, it finds field accesses +/// in importing files. +/// +/// # Arguments +/// - `document`: The current document +/// - `position`: Cursor position +/// - `new_name`: The new name for the symbol +/// - `uri`: URI of the current document +/// - `current_path`: Canonical path of the current file +/// - `manager`: Document manager for accessing other files +/// - `import_graph`: Import graph for finding importing files +/// +/// # Returns +/// A `WorkspaceEdit` with changes across all affected files. +pub fn rename_cross_file( + document: &Document, + position: LspPosition, + new_name: &SymbolName, + uri: &Uri, + current_path: &CanonicalPath, + manager: &Arc, + import_graph: &ImportGraph, +) -> Option { + // First, do the local rename to get edits for the current file + let mut all_changes: HashMap> = HashMap::new(); + + // Get local edits + if let Some(local_edit) = rename(document, position, new_name, uri) { + if let Some(changes) = local_edit.changes { + all_changes.extend(changes); + } + } + + // Find the symbol name being renamed + let text = document.text(); + let line_index = document.line_index(); + let offset = line_index.offset(position, text)?; + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + + if let Some(range) = field_definition_range(&token) { + all_changes.entry(uri.clone()).or_default().push(TextEdit { + range: to_lsp_range(range, line_index, text), + new_text: new_name.to_string(), + }); + } + + if token.kind() != SyntaxKind::IDENT { + return workspace_edit_from_changes(all_changes); + } + + let old_name = token.text().to_string(); + let Some(current_file) = import_graph.file(current_path) else { + return workspace_edit_from_changes(all_changes); + }; + + // Find files that import this file + let importers = import_graph.transitive_importers(current_file); + + // Find references in each importing file + for importer_file in importers { + if let Some((importer_uri, edits)) = find_references_in_importer( + importer_file, + current_file, + &old_name, + new_name, + manager, + import_graph, + ) { + all_changes.entry(importer_uri).or_default().extend(edits); + } + } + + workspace_edit_from_changes(all_changes) +} + +fn import_binding_names( + import_graph: &ImportGraph, + importer_file: FileId, + source_file: FileId, +) -> HashSet { + import_graph + .imports_of_target(importer_file, source_file) + .into_iter() + .filter_map(|entry| entry.binding_name.clone()) + .collect() +} + +/// Find references to a symbol in a file that imports the source file. +/// +/// This looks for patterns like: +/// ```jsonnet +/// local lib = import 'source.jsonnet'; +/// lib.field_name // This is a reference to field_name in source.jsonnet +/// ``` +fn find_references_in_importer( + importer_file: FileId, + source_file: FileId, + old_name: &str, + new_name: &SymbolName, + manager: &Arc, + import_graph: &ImportGraph, +) -> Option<(Uri, Vec)> { + use jrsonnet_rowan_parser::nodes::{ExprBase, ExprField}; + + let importer_path = import_graph.path(importer_file)?; + let path = importer_path.as_canonical_path(); + let doc = manager.get_document(path)?; + let uri = path.to_uri().ok()?; + drop(importer_path); + let text = doc.text(); + let line_index = doc.line_index(); + let ast = doc.ast(); + + let mut edits = Vec::new(); + let import_bindings = import_binding_names(import_graph, importer_file, source_file); + + if import_bindings.is_empty() { + return None; + } + + // Find field accesses on the import bindings that match old_name + // ExprField represents obj.field syntax + for node in ast.syntax().descendants() { + if node.kind() != SyntaxKind::EXPR_FIELD { + continue; + } + + let Some(field_expr) = ExprField::cast(node) else { + continue; + }; + + // Check if the field name matches + let Some(field_name) = field_expr.field() else { + continue; + }; + + let Some(field_ident) = field_name.ident_lit() else { + continue; + }; + + if field_ident.text() != old_name { + continue; + } + + // Check if the base expression is one of our import bindings + let Some(base_expr) = field_expr.base() else { + continue; + }; + + let Some(base) = base_expr.expr_base() else { + continue; + }; + + let ExprBase::ExprVar(var) = base else { + continue; + }; + + let Some(var_name) = var.name() else { + continue; + }; + + let Some(var_ident) = var_name.ident_lit() else { + continue; + }; + + if !import_bindings.contains(var_ident.text()) { + continue; + } + + // Found a field access on an import binding matching old_name + edits.push(TextEdit { + range: to_lsp_range(field_ident.text_range(), line_index, text), + new_text: new_name.to_string(), + }); + } + + if edits.is_empty() { + None + } else { + Some((uri, edits)) + } +} + +#[cfg(test)] +mod tests { + use std::io::Write; + + use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_types::GlobalTyStore; + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_cross_file_rename_field_access() { + // Create a temp directory with two files + let temp_dir = TempDir::new().unwrap(); + let lib_path = temp_dir.path().join("lib.jsonnet"); + let main_path = temp_dir.path().join("main.jsonnet"); + + // lib.jsonnet: exports an object with a `helper` field + let lib_code = r"{ helper: function(x) x * 2 }"; + std::fs::File::create(&lib_path) + .unwrap() + .write_all(lib_code.as_bytes()) + .unwrap(); + + // main.jsonnet: imports lib and uses lib.helper + let main_code = r#"local lib = import "lib.jsonnet"; lib.helper(5)"#; + std::fs::File::create(&main_path) + .unwrap() + .write_all(main_code.as_bytes()) + .unwrap(); + + // Create the manager and import graph + let global = Arc::new(GlobalTyStore::new()); + let path_store = jrsonnet_lsp_document::PathStore::new(); + let manager = Arc::new(DocumentManager::new(global, path_store.clone())); + + let lib_canon = CanonicalPath::new(lib_path); + let main_canon = CanonicalPath::new(main_path); + + // Open both documents + manager.open(lib_canon.clone(), lib_code.to_string(), DocVersion::new(1)); + manager.open( + main_canon.clone(), + main_code.to_string(), + DocVersion::new(1), + ); + + // Build import graph + let mut import_graph = ImportGraph::new(path_store); + if let Some(main_doc) = manager.get_document(&main_canon) { + let entries = jrsonnet_lsp_import::parse_document_imports(&main_doc, &|import_path| { + let import_full = temp_dir.path().join(import_path); + import_full.canonicalize().ok().map(CanonicalPath::new) + }); + let main_file = import_graph.intern(&main_canon); + import_graph.update_file_with_entries(main_file, entries); + } + + // Get the lib document + let lib_doc = manager.get_document(&lib_canon).unwrap(); + let lib_uri = lib_canon.to_uri().expect("lib URI should be valid"); + + // Rename 'helper' in lib.jsonnet (position 2 is the 'h' in 'helper') + // This is an object field, not a local variable, so local rename won't work + // but cross-file rename should find `lib.helper` in main.jsonnet + let pos = (0, 2).into(); + let new_name = SymbolName::new("util").unwrap(); + + let result = rename_cross_file( + &lib_doc, + pos, + &new_name, + &lib_uri, + &lib_canon, + &manager, + &import_graph, + ); + + // Cross-file rename should find `lib.helper` in main.jsonnet + let edit = result.expect("should produce workspace edit"); + let changes = edit.changes.expect("should have changes"); + + // Should rename the source field definition and importer field access. + // lib_code: { helper: function(x) x * 2 } + // ^^^^^^ + // position: 2 8 + // main_code: local lib = import "lib.jsonnet"; lib.helper(5) + // ^^^^^^ + // position: 38 44 + let mut expected_changes: HashMap> = HashMap::new(); + expected_changes.insert( + lib_uri, + vec![TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 2, + }, + end: lsp_types::Position { + line: 0, + character: 8, + }, + }, + new_text: "util".to_string(), + }], + ); + + let main_uri = main_canon.to_uri().expect("main URI should be valid"); + expected_changes.insert( + main_uri, + vec![TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 38, + }, + end: lsp_types::Position { + line: 0, + character: 44, + }, + }, + new_text: "util".to_string(), + }], + ); + assert_eq!(changes, expected_changes); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/rename.rs b/crates/jrsonnet-lsp-handlers/src/rename/local.rs similarity index 50% rename from crates/jrsonnet-lsp-handlers/src/rename.rs rename to crates/jrsonnet-lsp-handlers/src/rename/local.rs index 07d4f459..bd259b64 100644 --- a/crates/jrsonnet-lsp-handlers/src/rename.rs +++ b/crates/jrsonnet-lsp-handlers/src/rename/local.rs @@ -1,69 +1,13 @@ -//! Rename handler. -//! -//! Provides rename functionality for local symbols. -//! -//! # Cross-File Rename -//! -//! For symbols that are exported from a file, `rename_cross_file` can be used -//! to rename references in importing files. This requires: -//! - A `DocumentManager` to access documents -//! - An `ImportGraph` to find importing files -//! -//! Cross-file rename finds references by: -//! 1. Finding files that import the current file -//! 2. Looking for field accesses that match the renamed symbol -//! 3. Returning a `WorkspaceEdit` with changes across all files - -use std::{ - collections::{HashMap, HashSet}, - sync::Arc, -}; +use std::collections::HashMap; -use jrsonnet_lsp_document::{ - to_lsp_range, token_at_offset, CanonicalPath, Document, FileId, LspPosition, SymbolName, -}; -use jrsonnet_lsp_import::ImportGraph; -use jrsonnet_lsp_inference::DocumentManager; +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document, LspPosition, SymbolName}; use jrsonnet_lsp_scope::{ find_definition_range, is_definition_site, is_renameable, is_variable_reference, ScopeResolver, }; use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; use lsp_types::{PrepareRenameResponse, TextEdit, Uri, WorkspaceEdit}; -fn workspace_edit_from_changes(changes: HashMap>) -> Option { - if changes.is_empty() { - return None; - } - - Some(WorkspaceEdit { - changes: Some(changes), - document_changes: None, - change_annotations: None, - }) -} - -fn field_definition_range(token: &jrsonnet_rowan_parser::SyntaxToken) -> Option { - if token.kind() != SyntaxKind::IDENT { - return None; - } - - let mut node = token.parent(); - while let Some(current) = node { - if current.kind() == SyntaxKind::FIELD_NAME_FIXED { - let parent = current.parent()?; - if matches!( - parent.kind(), - SyntaxKind::MEMBER_FIELD_NORMAL | SyntaxKind::MEMBER_FIELD_METHOD - ) { - return Some(token.text_range()); - } - return None; - } - node = current.parent(); - } - - None -} +use super::common::{field_definition_range, workspace_edit_from_changes}; /// Prepare rename response. /// Returns the range of the symbol to be renamed and its current name. @@ -157,193 +101,6 @@ pub fn rename( workspace_edit_from_changes(changes) } -/// Perform cross-file rename operation. -/// -/// This extends the basic rename to also find references in files that import -/// the current file. For top-level object fields, it finds field accesses -/// in importing files. -/// -/// # Arguments -/// - `document`: The current document -/// - `position`: Cursor position -/// - `new_name`: The new name for the symbol -/// - `uri`: URI of the current document -/// - `current_path`: Canonical path of the current file -/// - `manager`: Document manager for accessing other files -/// - `import_graph`: Import graph for finding importing files -/// -/// # Returns -/// A `WorkspaceEdit` with changes across all affected files. -pub fn rename_cross_file( - document: &Document, - position: LspPosition, - new_name: &SymbolName, - uri: &Uri, - current_path: &CanonicalPath, - manager: &Arc, - import_graph: &ImportGraph, -) -> Option { - // First, do the local rename to get edits for the current file - let mut all_changes: HashMap> = HashMap::new(); - - // Get local edits - if let Some(local_edit) = rename(document, position, new_name, uri) { - if let Some(changes) = local_edit.changes { - all_changes.extend(changes); - } - } - - // Find the symbol name being renamed - let text = document.text(); - let line_index = document.line_index(); - let offset = line_index.offset(position, text)?; - let ast = document.ast(); - let token = token_at_offset(ast.syntax(), offset)?; - - if let Some(range) = field_definition_range(&token) { - all_changes.entry(uri.clone()).or_default().push(TextEdit { - range: to_lsp_range(range, line_index, text), - new_text: new_name.to_string(), - }); - } - - if token.kind() != SyntaxKind::IDENT { - return workspace_edit_from_changes(all_changes); - } - - let old_name = token.text().to_string(); - let Some(current_file) = import_graph.file(current_path) else { - return workspace_edit_from_changes(all_changes); - }; - - // Find files that import this file - let importers = import_graph.transitive_importers(current_file); - - // Find references in each importing file - for importer_file in importers { - if let Some((importer_uri, edits)) = find_references_in_importer( - importer_file, - current_file, - &old_name, - new_name, - manager, - import_graph, - ) { - all_changes.entry(importer_uri).or_default().extend(edits); - } - } - - workspace_edit_from_changes(all_changes) -} - -fn import_binding_names( - import_graph: &ImportGraph, - importer_file: FileId, - source_file: FileId, -) -> HashSet { - import_graph - .imports_of_target(importer_file, source_file) - .into_iter() - .filter_map(|entry| entry.binding_name.clone()) - .collect() -} - -/// Find references to a symbol in a file that imports the source file. -/// -/// This looks for patterns like: -/// ```jsonnet -/// local lib = import 'source.jsonnet'; -/// lib.field_name // This is a reference to field_name in source.jsonnet -/// ``` -fn find_references_in_importer( - importer_file: FileId, - source_file: FileId, - old_name: &str, - new_name: &SymbolName, - manager: &Arc, - import_graph: &ImportGraph, -) -> Option<(Uri, Vec)> { - use jrsonnet_rowan_parser::nodes::{ExprBase, ExprField}; - - let importer_path = import_graph.path(importer_file)?; - let path = importer_path.as_canonical_path(); - let doc = manager.get_document(path)?; - let uri = path.to_uri().ok()?; - drop(importer_path); - let text = doc.text(); - let line_index = doc.line_index(); - let ast = doc.ast(); - - let mut edits = Vec::new(); - let import_bindings = import_binding_names(import_graph, importer_file, source_file); - - if import_bindings.is_empty() { - return None; - } - - // Find field accesses on the import bindings that match old_name - // ExprField represents obj.field syntax - for node in ast.syntax().descendants() { - if node.kind() != SyntaxKind::EXPR_FIELD { - continue; - } - - let Some(field_expr) = ExprField::cast(node) else { - continue; - }; - - // Check if the field name matches - let Some(field_name) = field_expr.field() else { - continue; - }; - - let Some(field_ident) = field_name.ident_lit() else { - continue; - }; - - if field_ident.text() != old_name { - continue; - } - - // Check if the base expression is one of our import bindings - let Some(base_expr) = field_expr.base() else { - continue; - }; - - let Some(base) = base_expr.expr_base() else { - continue; - }; - - let ExprBase::ExprVar(var) = base else { - continue; - }; - - let Some(var_name) = var.name() else { - continue; - }; - - let Some(var_ident) = var_name.ident_lit() else { - continue; - }; - - if !import_bindings.contains(var_ident.text()) { - continue; - } - - // Found a field access on an import binding matching old_name - edits.push(TextEdit { - range: to_lsp_range(field_ident.text_range(), line_index, text), - new_text: new_name.to_string(), - }); - } - - if edits.is_empty() { - None - } else { - Some((uri, edits)) - } -} - #[cfg(test)] mod tests { use jrsonnet_lsp_document::DocVersion; @@ -675,126 +432,4 @@ mod tests { ] ); } - - #[test] - fn test_cross_file_rename_field_access() { - use std::io::Write; - - use jrsonnet_lsp_types::GlobalTyStore; - use tempfile::TempDir; - - // Create a temp directory with two files - let temp_dir = TempDir::new().unwrap(); - let lib_path = temp_dir.path().join("lib.jsonnet"); - let main_path = temp_dir.path().join("main.jsonnet"); - - // lib.jsonnet: exports an object with a `helper` field - let lib_code = r"{ helper: function(x) x * 2 }"; - std::fs::File::create(&lib_path) - .unwrap() - .write_all(lib_code.as_bytes()) - .unwrap(); - - // main.jsonnet: imports lib and uses lib.helper - let main_code = r#"local lib = import "lib.jsonnet"; lib.helper(5)"#; - std::fs::File::create(&main_path) - .unwrap() - .write_all(main_code.as_bytes()) - .unwrap(); - - // Create the manager and import graph - let global = Arc::new(GlobalTyStore::new()); - let path_store = jrsonnet_lsp_document::PathStore::new(); - let manager = Arc::new(DocumentManager::new(global, path_store.clone())); - - let lib_canon = CanonicalPath::new(lib_path); - let main_canon = CanonicalPath::new(main_path); - - // Open both documents - manager.open(lib_canon.clone(), lib_code.to_string(), DocVersion::new(1)); - manager.open( - main_canon.clone(), - main_code.to_string(), - DocVersion::new(1), - ); - - // Build import graph - let mut import_graph = ImportGraph::new(path_store); - if let Some(main_doc) = manager.get_document(&main_canon) { - let entries = jrsonnet_lsp_import::parse_document_imports(&main_doc, &|import_path| { - let import_full = temp_dir.path().join(import_path); - import_full.canonicalize().ok().map(CanonicalPath::new) - }); - let main_file = import_graph.intern(&main_canon); - import_graph.update_file_with_entries(main_file, entries); - } - - // Get the lib document - let lib_doc = manager.get_document(&lib_canon).unwrap(); - let lib_uri = lib_canon.to_uri().expect("lib URI should be valid"); - - // Rename 'helper' in lib.jsonnet (position 2 is the 'h' in 'helper') - // This is an object field, not a local variable, so local rename won't work - // but cross-file rename should find `lib.helper` in main.jsonnet - let pos = (0, 2).into(); - let new_name = SymbolName::new("util").unwrap(); - - let result = rename_cross_file( - &lib_doc, - pos, - &new_name, - &lib_uri, - &lib_canon, - &manager, - &import_graph, - ); - - // Cross-file rename should find `lib.helper` in main.jsonnet - let edit = result.expect("should produce workspace edit"); - let changes = edit.changes.expect("should have changes"); - - // Should rename the source field definition and importer field access. - // lib_code: { helper: function(x) x * 2 } - // ^^^^^^ - // position: 2 8 - // main_code: local lib = import "lib.jsonnet"; lib.helper(5) - // ^^^^^^ - // position: 38 44 - let mut expected_changes: HashMap> = HashMap::new(); - expected_changes.insert( - lib_uri, - vec![TextEdit { - range: lsp_types::Range { - start: lsp_types::Position { - line: 0, - character: 2, - }, - end: lsp_types::Position { - line: 0, - character: 8, - }, - }, - new_text: "util".to_string(), - }], - ); - - let main_uri = main_canon.to_uri().expect("main URI should be valid"); - expected_changes.insert( - main_uri, - vec![TextEdit { - range: lsp_types::Range { - start: lsp_types::Position { - line: 0, - character: 38, - }, - end: lsp_types::Position { - line: 0, - character: 44, - }, - }, - new_text: "util".to_string(), - }], - ); - assert_eq!(changes, expected_changes); - } } diff --git a/crates/jrsonnet-lsp-handlers/src/rename/mod.rs b/crates/jrsonnet-lsp-handlers/src/rename/mod.rs new file mode 100644 index 00000000..bf5ee0a5 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/rename/mod.rs @@ -0,0 +1,10 @@ +//! Rename handlers. +//! +//! Provides local and cross-file rename functionality. + +mod common; +mod cross_file; +mod local; + +pub use cross_file::rename_cross_file; +pub use local::{prepare_rename, rename}; From 6811a31b2454ecde7f8bcf578fadd60924f3a5c2 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:25:03 +0000 Subject: [PATCH 128/210] refactor(lsp-handlers): split code lens internals Break the code lens implementation into focused modules for dispatch, reference counting, command/action lenses, and type lenses. Keep the public API unchanged while isolating logic by concern and reducing per-file size. Keep tests inline in `dispatch.rs` and keep `mod.rs` test-free. --- .../src/code_lens/actions.rs | 75 +++++ .../{code_lens.rs => code_lens/dispatch.rs} | 269 +----------------- .../src/code_lens/mod.rs | 14 + .../src/code_lens/refs.rs | 70 +++++ .../src/code_lens/type_lenses.rs | 107 +++++++ 5 files changed, 281 insertions(+), 254 deletions(-) create mode 100644 crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs rename crates/jrsonnet-lsp-handlers/src/{code_lens.rs => code_lens/dispatch.rs} (64%) create mode 100644 crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/code_lens/refs.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/code_lens/type_lenses.rs diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs new file mode 100644 index 00000000..cd16ea06 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs @@ -0,0 +1,75 @@ +use jrsonnet_lsp_document::Document; +use lsp_types::{CodeLens, Command, Range, Uri}; + +/// Generate "Evaluate" code lens for the document root. +pub(super) fn evaluate_lens(document: &Document, uri: &Uri) -> Option { + let ast = document.ast(); + + // Only show evaluate lens if document has a root expression + ast.expr()?; + + // Place the lens at line 0 + let range = Range { + start: lsp_types::Position { + line: 0, + character: 0, + }, + end: lsp_types::Position { + line: 0, + character: 0, + }, + }; + + Some(CodeLens { + range, + command: Some(Command { + title: "Evaluate".to_string(), + command: "jrsonnet.evalFile".to_string(), + arguments: Some(vec![serde_json::json!(uri.to_string())]), + }), + data: None, + }) +} + +/// Generate error status code lens for the document. +/// +/// Shows the number of syntax errors at the top of the file. +/// Only shown when there are errors (no lens for clean files to reduce clutter). +pub(super) fn error_status_lens(document: &Document, uri: &Uri) -> Option { + let errors = document.errors(); + + // Only show lens if there are errors + if errors.is_empty() { + return None; + } + + let error_count = errors.len(); + + // Place the lens at line 0 + let range = Range { + start: lsp_types::Position { + line: 0, + character: 0, + }, + end: lsp_types::Position { + line: 0, + character: 0, + }, + }; + + let title = if error_count == 1 { + "1 syntax error".to_string() + } else { + format!("{error_count} syntax errors") + }; + + Some(CodeLens { + range, + command: Some(Command { + title, + command: "jrsonnet.showErrors".to_string(), + arguments: Some(vec![serde_json::json!(uri.to_string())]), + }), + data: None, + }) +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs similarity index 64% rename from crates/jrsonnet-lsp-handlers/src/code_lens.rs rename to crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs index 80ee4cb9..1894f299 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_lens.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs @@ -1,19 +1,12 @@ -//! Code lens handler for showing reference counts and actions. -//! -//! Provides: -//! - Reference counts for definitions (functions, variables) -//! - "Evaluate" action for executable Jsonnet files -//! - Type annotations for function definitions -//! - Error status indicator for the file - -use jrsonnet_lsp_document::{to_lsp_range, Document}; +use jrsonnet_lsp_document::Document; use jrsonnet_lsp_inference::TypeAnalysis; -use jrsonnet_lsp_scope::{is_definition_site, ScopeResolver}; -use jrsonnet_rowan_parser::{ - nodes::{Bind, BindFunction, StmtLocal}, - AstNode, SyntaxKind, +use lsp_types::{CodeLens, Uri}; + +use super::{ + actions::{error_status_lens, evaluate_lens}, + refs::reference_count_lenses, + type_lenses::type_lenses, }; -use lsp_types::{CodeLens, Command, Range, Uri}; /// Configuration for code lens generation. #[derive(Debug, Clone, Default)] @@ -92,245 +85,6 @@ pub fn code_lens( lenses } -/// Generate reference count code lenses for all definitions. -fn reference_count_lenses(document: &Document, uri: &Uri) -> Vec { - let mut lenses = Vec::new(); - let ast = document.ast(); - let text = document.text(); - let line_index = document.line_index(); - - // Build scope resolver for reference counting - let resolver = ScopeResolver::new(ast.syntax()); - - // Find all definitions and count their references - for token in ast - .syntax() - .descendants_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - { - if token.kind() != SyntaxKind::IDENT { - continue; - } - - if !is_definition_site(&token) { - continue; - } - - let Some(parent) = token.parent() else { - continue; - }; - - let def_range = parent.text_range(); - let name = token.text(); - - // Count references (excluding the definition itself) - let references = resolver.find_references(ast.syntax(), name, def_range); - let ref_count = references.len().saturating_sub(1); // Exclude definition - - // Skip if no references (to avoid clutter) - if ref_count == 0 { - continue; - } - - let range = to_lsp_range(def_range, line_index, text); - - let title = if ref_count == 1 { - "1 reference".to_string() - } else { - format!("{ref_count} references") - }; - - lenses.push(CodeLens { - range, - command: Some(Command { - title, - command: "jrsonnet.findReferences".to_string(), - arguments: Some(vec![ - serde_json::json!(uri.to_string()), - serde_json::json!(range.start.line), - serde_json::json!(range.start.character), - ]), - }), - data: None, - }); - } - - lenses -} - -/// Generate "Evaluate" code lens for the document root. -fn evaluate_lens(document: &Document, uri: &Uri) -> Option { - let ast = document.ast(); - - // Only show evaluate lens if document has a root expression - ast.expr()?; - - // Place the lens at line 0 - let range = Range { - start: lsp_types::Position { - line: 0, - character: 0, - }, - end: lsp_types::Position { - line: 0, - character: 0, - }, - }; - - Some(CodeLens { - range, - command: Some(Command { - title: "Evaluate".to_string(), - command: "jrsonnet.evalFile".to_string(), - arguments: Some(vec![serde_json::json!(uri.to_string())]), - }), - data: None, - }) -} - -/// Generate error status code lens for the document. -/// -/// Shows the number of syntax errors at the top of the file. -/// Only shown when there are errors (no lens for clean files to reduce clutter). -fn error_status_lens(document: &Document, uri: &Uri) -> Option { - let errors = document.errors(); - - // Only show lens if there are errors - if errors.is_empty() { - return None; - } - - let error_count = errors.len(); - - // Place the lens at line 0 - let range = Range { - start: lsp_types::Position { - line: 0, - character: 0, - }, - end: lsp_types::Position { - line: 0, - character: 0, - }, - }; - - let title = if error_count == 1 { - "1 syntax error".to_string() - } else { - format!("{error_count} syntax errors") - }; - - Some(CodeLens { - range, - command: Some(Command { - title, - command: "jrsonnet.showErrors".to_string(), - arguments: Some(vec![serde_json::json!(uri.to_string())]), - }), - data: None, - }) -} - -/// Generate type annotation code lenses for function definitions. -fn type_lenses(document: &Document, analysis: &TypeAnalysis) -> Vec { - let mut lenses = Vec::new(); - let ast = document.ast(); - let text = document.text(); - let line_index = document.line_index(); - - // Find all function definitions (local f(x) = ...) - for node in ast.syntax().descendants() { - // Look for BindFunction nodes (function definitions) - if let Some(bind_func) = BindFunction::cast(node.clone()) { - let Some(name_node) = bind_func.name() else { - continue; - }; - - // Get the type for the function body (the expression) - let Some(body) = bind_func.value() else { - continue; - }; - let body_range = body.syntax().text_range(); - let Some(ty) = analysis.type_for_range(body_range) else { - continue; - }; - - // Format the type - let type_str = analysis.display(ty); - - // Skip if it's just "any" or "function" - not informative - if type_str == "any" || type_str == "function" { - continue; - } - - let range = to_lsp_range(name_node.syntax().text_range(), line_index, text); - - lenses.push(CodeLens { - range, - command: Some(Command { - title: format!(":: {type_str}"), - command: String::new(), // No action, just informational - arguments: None, - }), - data: None, - }); - } - - // Also show types for complex local bindings (local x = { ... }) - if let Some(stmt_local) = StmtLocal::cast(node.clone()) { - for bind in stmt_local.binds() { - if let Bind::BindDestruct(bd) = bind { - // Skip simple literals - only show for complex expressions - let Some(value) = bd.value() else { - continue; - }; - - // Check if it's a "complex" expression (object, array, function call) - let is_complex = value.expr_base().is_some_and(|base| { - matches!( - base.syntax().kind(), - SyntaxKind::EXPR_OBJECT - | SyntaxKind::EXPR_ARRAY | SyntaxKind::EXPR_CALL - | SyntaxKind::EXPR_IF_THEN_ELSE - ) - }); - - if !is_complex { - continue; - } - - // Get type for the binding value - let value_range = value.syntax().text_range(); - let Some(ty) = analysis.type_for_range(value_range) else { - continue; - }; - - let type_str = analysis.display(ty); - - // Skip uninformative types - if type_str == "any" { - continue; - } - - let range = to_lsp_range(bd.syntax().text_range(), line_index, text); - - lenses.push(CodeLens { - range, - command: Some(Command { - title: format!(":: {type_str}"), - command: String::new(), - arguments: None, - }), - data: None, - }); - } - } - } - } - - lenses -} - /// Resolve a code lens (add command if not present). /// /// This is called when the client requests resolution of a code lens @@ -345,8 +99,15 @@ pub fn resolve_code_lens(lens: CodeLens) -> CodeLens { mod tests { use std::sync::Arc; - use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_document::{to_lsp_range, DocVersion, Document}; + use jrsonnet_lsp_inference::TypeAnalysis; + use jrsonnet_lsp_scope::{is_definition_site, ScopeResolver}; use jrsonnet_lsp_types::GlobalTyStore; + use jrsonnet_rowan_parser::{ + nodes::{Bind, BindFunction, StmtLocal}, + AstNode, SyntaxKind, + }; + use lsp_types::{CodeLens, Command, Range, Uri}; use rowan::NodeOrToken; use super::*; diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs new file mode 100644 index 00000000..dbb53540 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs @@ -0,0 +1,14 @@ +//! Code lens handler for showing reference counts and actions. +//! +//! Provides: +//! - Reference counts for definitions (functions, variables) +//! - "Evaluate" action for executable Jsonnet files +//! - Type annotations for function definitions +//! - Error status indicator for the file + +mod actions; +mod dispatch; +mod refs; +mod type_lenses; + +pub use dispatch::{code_lens, resolve_code_lens, CodeLensConfig, ErrorLensVisibility}; diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/refs.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/refs.rs new file mode 100644 index 00000000..5359af1f --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/refs.rs @@ -0,0 +1,70 @@ +use jrsonnet_lsp_document::{to_lsp_range, Document}; +use jrsonnet_lsp_scope::{is_definition_site, ScopeResolver}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::{CodeLens, Command, Uri}; + +/// Generate reference count code lenses for all definitions. +pub(super) fn reference_count_lenses(document: &Document, uri: &Uri) -> Vec { + let mut lenses = Vec::new(); + let ast = document.ast(); + let text = document.text(); + let line_index = document.line_index(); + + // Build scope resolver for reference counting + let resolver = ScopeResolver::new(ast.syntax()); + + // Find all definitions and count their references + for token in ast + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + { + if token.kind() != SyntaxKind::IDENT { + continue; + } + + if !is_definition_site(&token) { + continue; + } + + let Some(parent) = token.parent() else { + continue; + }; + + let def_range = parent.text_range(); + let name = token.text(); + + // Count references (excluding the definition itself) + let references = resolver.find_references(ast.syntax(), name, def_range); + let ref_count = references.len().saturating_sub(1); // Exclude definition + + // Skip if no references (to avoid clutter) + if ref_count == 0 { + continue; + } + + let range = to_lsp_range(def_range, line_index, text); + + let title = if ref_count == 1 { + "1 reference".to_string() + } else { + format!("{ref_count} references") + }; + + lenses.push(CodeLens { + range, + command: Some(Command { + title, + command: "jrsonnet.findReferences".to_string(), + arguments: Some(vec![ + serde_json::json!(uri.to_string()), + serde_json::json!(range.start.line), + serde_json::json!(range.start.character), + ]), + }), + data: None, + }); + } + + lenses +} diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/type_lenses.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/type_lenses.rs new file mode 100644 index 00000000..a9921f92 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/type_lenses.rs @@ -0,0 +1,107 @@ +use jrsonnet_lsp_document::{to_lsp_range, Document}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_rowan_parser::{ + nodes::{Bind, BindFunction, StmtLocal}, + AstNode, SyntaxKind, +}; +use lsp_types::{CodeLens, Command}; + +/// Generate type annotation code lenses for function definitions. +pub(super) fn type_lenses(document: &Document, analysis: &TypeAnalysis) -> Vec { + let mut lenses = Vec::new(); + let ast = document.ast(); + let text = document.text(); + let line_index = document.line_index(); + + // Find all function definitions (local f(x) = ...) + for node in ast.syntax().descendants() { + // Look for BindFunction nodes (function definitions) + if let Some(bind_func) = BindFunction::cast(node.clone()) { + let Some(name_node) = bind_func.name() else { + continue; + }; + + // Get the type for the function body (the expression) + let Some(body) = bind_func.value() else { + continue; + }; + let body_range = body.syntax().text_range(); + let Some(ty) = analysis.type_for_range(body_range) else { + continue; + }; + + // Format the type + let type_str = analysis.display(ty); + + // Skip if it's just "any" or "function" - not informative + if type_str == "any" || type_str == "function" { + continue; + } + + let range = to_lsp_range(name_node.syntax().text_range(), line_index, text); + + lenses.push(CodeLens { + range, + command: Some(Command { + title: format!(":: {type_str}"), + command: String::new(), // No action, just informational + arguments: None, + }), + data: None, + }); + } + + // Also show types for complex local bindings (local x = { ... }) + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + if let Bind::BindDestruct(bd) = bind { + // Skip simple literals - only show for complex expressions + let Some(value) = bd.value() else { + continue; + }; + + // Check if it's a "complex" expression (object, array, function call) + let is_complex = value.expr_base().is_some_and(|base| { + matches!( + base.syntax().kind(), + SyntaxKind::EXPR_OBJECT + | SyntaxKind::EXPR_ARRAY | SyntaxKind::EXPR_CALL + | SyntaxKind::EXPR_IF_THEN_ELSE + ) + }); + + if !is_complex { + continue; + } + + // Get type for the binding value + let value_range = value.syntax().text_range(); + let Some(ty) = analysis.type_for_range(value_range) else { + continue; + }; + + let type_str = analysis.display(ty); + + // Skip uninformative types + if type_str == "any" { + continue; + } + + let range = to_lsp_range(bd.syntax().text_range(), line_index, text); + + lenses.push(CodeLens { + range, + command: Some(Command { + title: format!(":: {type_str}"), + command: String::new(), + arguments: None, + }), + data: None, + }); + } + } + } + } + + lenses +} From d23e2d2525616eea3ca745ef473a80ddf355489f Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:27:54 +0000 Subject: [PATCH 129/210] refactor(lsp-handlers): split hover internals Split hover behavior into focused modules for top-level dispatch, local-definition rendering, and stdlib symbol docs. Keep public hover APIs unchanged and keep tests inline in `handler.rs` while keeping `mod.rs` test-free. This reduces file size and isolates import/type-resolution paths for future changes. --- .../src/{hover.rs => hover/handler.rs} | 223 +----------------- .../jrsonnet-lsp-handlers/src/hover/local.rs | 178 ++++++++++++++ crates/jrsonnet-lsp-handlers/src/hover/mod.rs | 15 ++ .../jrsonnet-lsp-handlers/src/hover/stdlib.rs | 38 +++ 4 files changed, 234 insertions(+), 220 deletions(-) rename crates/jrsonnet-lsp-handlers/src/{hover.rs => hover/handler.rs} (66%) create mode 100644 crates/jrsonnet-lsp-handlers/src/hover/local.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/hover/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs diff --git a/crates/jrsonnet-lsp-handlers/src/hover.rs b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs similarity index 66% rename from crates/jrsonnet-lsp-handlers/src/hover.rs rename to crates/jrsonnet-lsp-handlers/src/hover/handler.rs index 832f49e6..777c1c0d 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs @@ -1,25 +1,9 @@ -//! Hover handler for providing documentation on hover. -//! -//! Supports: -//! - Standard library functions (std.*) -//! - Local variable definitions (shows first few lines) - -use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, ByteOffset, Document, LspPosition}; +use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document, LspPosition}; use jrsonnet_lsp_inference::TypeAnalysis; -use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; -use jrsonnet_lsp_stdlib as stdlib; -use jrsonnet_lsp_types::{Ty, TyData}; -use jrsonnet_rowan_parser::{ - nodes::{Bind, Destruct, ExprField}, - AstNode, SyntaxKind, SyntaxToken, -}; +use jrsonnet_rowan_parser::AstNode; use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; -use crate::definition::{goto_definition, DefinitionResult}; - -/// Maximum number of lines to show in hover for local definitions. -const MAX_HOVER_LINES: usize = 5; -type ImportFieldTypeResolver<'a> = dyn Fn(&str, &[String]) -> Option + 'a; +use super::{local::check_local_hover, stdlib::check_stdlib_hover, ImportFieldTypeResolver}; /// Get hover information for the given position. /// @@ -103,207 +87,6 @@ fn merge_markdown_sections( } } -/// Check for hover on a local variable reference. -fn check_local_hover( - document: &Document, - analysis: &TypeAnalysis, - position: LspPosition, - text: &str, - offset: ByteOffset, - import_field_type_resolver: Option<&ImportFieldTypeResolver<'_>>, -) -> Option { - let result = goto_definition(document, position) - .or_else(|| local_definition_at_offset(document, offset))?; - - // Get the inferred type at this position. If the local definition site only - // reports `any`, fall back to the bound value expression type. - let ast = document.ast(); - let inferred_ty = analysis.type_at_position(ast.syntax(), offset.into()); - let mut inferred_type = inferred_ty.map(|ty| analysis.display_for_hover(ty)); - let inferred_is_any = inferred_ty.is_none_or(|ty| ty == Ty::ANY); - let inferred_is_object = inferred_ty - .is_some_and(|ty| analysis.with_data(ty, |data| matches!(data, TyData::Object(_)))); - - match &result { - DefinitionResult::ImportField { path, fields } => { - if inferred_is_any || inferred_is_object { - if let Some(resolver) = import_field_type_resolver { - if let Some(resolved_type) = resolver(path, fields) { - inferred_type = Some(resolved_type); - } - } - } - } - DefinitionResult::Local(range) => { - if matches!(inferred_type.as_deref(), None | Some("any")) { - inferred_type = definition_value_type(document, analysis, range); - } - } - DefinitionResult::Import(path) => { - if inferred_is_any { - if let Some(resolver) = import_field_type_resolver { - if let Some(resolved_type) = resolver(path, &[]) { - inferred_type = Some(resolved_type); - } - } - } - } - } - - let type_str = inferred_type - .map(|ty| format!("`{ty}`\n\n")) - .unwrap_or_default(); - - match result { - DefinitionResult::Local(range) => { - let start_line = range.start.line as usize; - let lines: Vec<&str> = text.lines().collect(); - let num_lines = lines.len(); - - if start_line >= num_lines { - return None; - } - - let mut def_end_line = start_line; - for (i, line) in lines - .iter() - .enumerate() - .skip(start_line) - .take(MAX_HOVER_LINES) - { - def_end_line = i; - if line.contains(';') { - break; - } - } - - let preview_slice = lines.get(start_line..=def_end_line)?; - let preview_lines: Vec<&str> = preview_slice - .iter() - .take(MAX_HOVER_LINES) - .copied() - .collect(); - - let mut preview = preview_lines.join("\n"); - if def_end_line - start_line >= MAX_HOVER_LINES { - preview.push_str("\n..."); - } - - Some(Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: format!("{type_str}```jsonnet\n{preview}\n```"), - }), - range: None, - }) - } - DefinitionResult::Import(path) => Some(Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: format!("{type_str}`{path}`"), - }), - range: None, - }), - DefinitionResult::ImportField { path, fields } => { - let field_chain = fields.join("."); - Some(Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: format!("{type_str}`{field_chain}` from `{path}`"), - }), - range: None, - }) - } - } -} - -fn local_definition_at_offset(document: &Document, offset: ByteOffset) -> Option { - let ast = document.ast(); - let token = token_at_offset(ast.syntax(), offset)?; - if token.kind() != SyntaxKind::IDENT { - return None; - } - - let name_node = token.parent()?; - let bind = name_node.ancestors().find_map(Bind::cast)?; - let definition_range = match bind { - Bind::BindDestruct(bind) => { - let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind)?; - let Destruct::DestructFull(full) = destruct else { - return None; - }; - full.name()?.syntax().text_range() - } - Bind::BindFunction(bind) => bind.name()?.syntax().text_range(), - }; - - if name_node.text_range() != definition_range { - return None; - } - - Some(DefinitionResult::Local(to_lsp_range( - definition_range, - document.line_index(), - document.text(), - ))) -} - -fn definition_value_type( - document: &Document, - analysis: &TypeAnalysis, - range: &lsp_types::Range, -) -> Option { - let text = document.text(); - let line_index = document.line_index(); - let ast = document.ast(); - - let def_pos: LspPosition = (range.start.line, range.start.character).into(); - let def_offset = line_index.offset(def_pos, text)?; - let token = token_at_offset(ast.syntax(), def_offset)?; - - let bind = token.parent()?.ancestors().find_map(Bind::cast)?; - let value = match bind { - Bind::BindDestruct(bind) => bind.value()?, - Bind::BindFunction(bind) => bind.value()?, - }; - let ty = analysis.type_for_range(value.syntax().text_range())?; - Some(analysis.display_for_hover(ty)) -} - -/// Check if the token is a stdlib function call and return hover info. -fn check_stdlib_hover(token: &SyntaxToken) -> Option { - if token.kind() != SyntaxKind::IDENT { - return None; - } - - let name = token.text(); - - // Walk up: IDENT -> NAME -> EXPR_FIELD - let parent = token.parent()?; - if parent.kind() != SyntaxKind::NAME { - return None; - } - - let field = ExprField::cast(parent.parent()?)?; - - // Check if base resolves to builtin std. - let base = field.base()?; - if !expr_resolves_to_builtin_std(&base) { - return None; - } - - stdlib::ensure_initialized(); - let doc = stdlib::get_stdlib_doc(name)?; - - Some(Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: doc.to_markdown(), - }), - range: None, - }) -} - #[cfg(test)] mod tests { use std::sync::Arc; diff --git a/crates/jrsonnet-lsp-handlers/src/hover/local.rs b/crates/jrsonnet-lsp-handlers/src/hover/local.rs new file mode 100644 index 00000000..2fe38e1e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/local.rs @@ -0,0 +1,178 @@ +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, ByteOffset, Document, LspPosition}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_types::{Ty, TyData}; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct}, + AstNode, SyntaxKind, +}; +use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; + +use super::{ImportFieldTypeResolver, MAX_HOVER_LINES}; +use crate::definition::{goto_definition, DefinitionResult}; + +/// Check for hover on a local variable reference. +pub(super) fn check_local_hover( + document: &Document, + analysis: &TypeAnalysis, + position: LspPosition, + text: &str, + offset: ByteOffset, + import_field_type_resolver: Option<&ImportFieldTypeResolver<'_>>, +) -> Option { + let result = goto_definition(document, position) + .or_else(|| local_definition_at_offset(document, offset))?; + + // Get the inferred type at this position. If the local definition site only + // reports `any`, fall back to the bound value expression type. + let ast = document.ast(); + let inferred_ty = analysis.type_at_position(ast.syntax(), offset.into()); + let mut inferred_type = inferred_ty.map(|ty| analysis.display_for_hover(ty)); + let inferred_is_any = inferred_ty.is_none_or(|ty| ty == Ty::ANY); + let inferred_is_object = inferred_ty + .is_some_and(|ty| analysis.with_data(ty, |data| matches!(data, TyData::Object(_)))); + + match &result { + DefinitionResult::ImportField { path, fields } => { + if inferred_is_any || inferred_is_object { + if let Some(resolver) = import_field_type_resolver { + if let Some(resolved_type) = resolver(path, fields) { + inferred_type = Some(resolved_type); + } + } + } + } + DefinitionResult::Local(range) => { + if matches!(inferred_type.as_deref(), None | Some("any")) { + inferred_type = definition_value_type(document, analysis, range); + } + } + DefinitionResult::Import(path) => { + if inferred_is_any { + if let Some(resolver) = import_field_type_resolver { + if let Some(resolved_type) = resolver(path, &[]) { + inferred_type = Some(resolved_type); + } + } + } + } + } + + let type_str = inferred_type + .map(|ty| format!("`{ty}`\n\n")) + .unwrap_or_default(); + + match result { + DefinitionResult::Local(range) => { + let start_line = range.start.line as usize; + let lines: Vec<&str> = text.lines().collect(); + let num_lines = lines.len(); + + if start_line >= num_lines { + return None; + } + + let mut def_end_line = start_line; + for (i, line) in lines + .iter() + .enumerate() + .skip(start_line) + .take(MAX_HOVER_LINES) + { + def_end_line = i; + if line.contains(';') { + break; + } + } + + let preview_slice = lines.get(start_line..=def_end_line)?; + let preview_lines: Vec<&str> = preview_slice + .iter() + .take(MAX_HOVER_LINES) + .copied() + .collect(); + + let mut preview = preview_lines.join("\n"); + if def_end_line - start_line >= MAX_HOVER_LINES { + preview.push_str("\n..."); + } + + Some(Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: format!("{type_str}```jsonnet\n{preview}\n```"), + }), + range: None, + }) + } + DefinitionResult::Import(path) => Some(Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: format!("{type_str}`{path}`"), + }), + range: None, + }), + DefinitionResult::ImportField { path, fields } => { + let field_chain = fields.join("."); + Some(Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: format!("{type_str}`{field_chain}` from `{path}`"), + }), + range: None, + }) + } + } +} + +fn local_definition_at_offset(document: &Document, offset: ByteOffset) -> Option { + let ast = document.ast(); + let token = token_at_offset(ast.syntax(), offset)?; + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name_node = token.parent()?; + let bind = name_node.ancestors().find_map(Bind::cast)?; + let definition_range = match bind { + Bind::BindDestruct(bind) => { + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + full.name()?.syntax().text_range() + } + Bind::BindFunction(bind) => bind.name()?.syntax().text_range(), + }; + + if name_node.text_range() != definition_range { + return None; + } + + Some(DefinitionResult::Local(to_lsp_range( + definition_range, + document.line_index(), + document.text(), + ))) +} + +fn definition_value_type( + document: &Document, + analysis: &TypeAnalysis, + range: &lsp_types::Range, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let def_pos: LspPosition = (range.start.line, range.start.character).into(); + let def_offset = line_index.offset(def_pos, text)?; + let token = token_at_offset(ast.syntax(), def_offset)?; + + let bind = token.parent()?.ancestors().find_map(Bind::cast)?; + let value = match bind { + Bind::BindDestruct(bind) => bind.value()?, + Bind::BindFunction(bind) => bind.value()?, + }; + let ty = analysis.type_for_range(value.syntax().text_range())?; + Some(analysis.display_for_hover(ty)) +} diff --git a/crates/jrsonnet-lsp-handlers/src/hover/mod.rs b/crates/jrsonnet-lsp-handlers/src/hover/mod.rs new file mode 100644 index 00000000..054014ff --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/mod.rs @@ -0,0 +1,15 @@ +//! Hover handler for providing documentation on hover. +//! +//! Supports: +//! - Standard library functions (std.*) +//! - Local variable definitions (shows first few lines) + +mod handler; +mod local; +mod stdlib; + +pub use handler::{hover, hover_with_import_field_type}; + +/// Maximum number of lines to show in hover for local definitions. +pub(super) const MAX_HOVER_LINES: usize = 5; +pub(super) type ImportFieldTypeResolver<'a> = dyn Fn(&str, &[String]) -> Option + 'a; diff --git a/crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs b/crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs new file mode 100644 index 00000000..be8a9560 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs @@ -0,0 +1,38 @@ +use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; +use jrsonnet_lsp_stdlib as stdlib; +use jrsonnet_rowan_parser::{nodes::ExprField, AstNode, SyntaxKind, SyntaxToken}; +use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; + +/// Check if the token is a stdlib function call and return hover info. +pub(super) fn check_stdlib_hover(token: &SyntaxToken) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let name = token.text(); + + // Walk up: IDENT -> NAME -> EXPR_FIELD + let parent = token.parent()?; + if parent.kind() != SyntaxKind::NAME { + return None; + } + + let field = ExprField::cast(parent.parent()?)?; + + // Check if base resolves to builtin std. + let base = field.base()?; + if !expr_resolves_to_builtin_std(&base) { + return None; + } + + stdlib::ensure_initialized(); + let doc = stdlib::get_stdlib_doc(name)?; + + Some(Hover { + contents: HoverContents::Markup(MarkupContent { + kind: MarkupKind::Markdown, + value: doc.to_markdown(), + }), + range: None, + }) +} From 0bd5f3fd42a0365ac96455e2ace0454c79258d40 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:30:56 +0000 Subject: [PATCH 130/210] refactor(lsp-handlers): split formatting internals Split formatting into focused modules for config types, CLI argument construction, formatter execution, and response dispatch. Keep formatting APIs unchanged while reducing file size and isolating engine-specific behavior. Keep tests inline in `engine.rs` and keep `mod.rs` test-free. --- .../src/formatting/args.rs | 76 +++++ .../src/formatting/dispatch.rs | 68 +++++ .../{formatting.rs => formatting/engine.rs} | 271 +----------------- .../src/formatting/mod.rs | 11 + .../src/formatting/types.rs | 115 ++++++++ 5 files changed, 283 insertions(+), 258 deletions(-) create mode 100644 crates/jrsonnet-lsp-handlers/src/formatting/args.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs rename crates/jrsonnet-lsp-handlers/src/{formatting.rs => formatting/engine.rs} (56%) create mode 100644 crates/jrsonnet-lsp-handlers/src/formatting/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/formatting/types.rs diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/args.rs b/crates/jrsonnet-lsp-handlers/src/formatting/args.rs new file mode 100644 index 00000000..f1b2dc8b --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/formatting/args.rs @@ -0,0 +1,76 @@ +use super::types::FormattingConfig; + +/// Build CLI arguments for the formatter based on config. +/// +/// Different formatters support different options: +/// - jrsonnet-fmt: only --indent +/// - jsonnetfmt: all options +pub(super) fn build_formatter_args(config: &FormattingConfig) -> Vec { + let mut args = vec!["-".to_string()]; // Read from stdin + + if let Some(indent) = config.indent { + args.push("--indent".to_string()); + args.push(indent.to_string()); + } + + if let Some(max_blank_lines) = config.max_blank_lines { + args.push("--max-blank-lines".to_string()); + args.push(max_blank_lines.to_string()); + } + + if let Some(ref style) = config.string_style { + // jsonnetfmt uses single-letter codes: d=double, s=single, l=leave + let code = match style.to_lowercase().as_str() { + "double" | "d" => "d", + "single" | "s" => "s", + _ => "l", + }; + args.push("--string-style".to_string()); + args.push(code.to_string()); + } + + if let Some(ref style) = config.comment_style { + // jsonnetfmt uses single-letter codes: h=hash, s=slash, l=leave + let code = match style.to_lowercase().as_str() { + "hash" | "h" => "h", + "slash" | "s" => "s", + _ => "l", + }; + args.push("--comment-style".to_string()); + args.push(code.to_string()); + } + + if config.pad_arrays == Some(true) { + args.push("--pad-arrays".to_string()); + } + + if config.pad_objects == Some(false) { + args.push("--no-pad-objects".to_string()); + } + + if config.pretty_field_names == Some(false) { + args.push("--no-pretty-field-names".to_string()); + } + + if config.sort_imports == Some(false) { + args.push("--no-sort-imports".to_string()); + } + + if config.use_implicit_plus == Some(false) { + args.push("--no-use-implicit-plus".to_string()); + } + + if config.strip_everything == Some(true) { + args.push("--strip-everything".to_string()); + } + + if config.strip_comments == Some(true) { + args.push("--strip-comments".to_string()); + } + + if config.strip_all_but_comments == Some(true) { + args.push("--strip-all-but-comments".to_string()); + } + + args +} diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs b/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs new file mode 100644 index 00000000..ea23adc6 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs @@ -0,0 +1,68 @@ +use lsp_types::{Position, Range, TextEdit}; +use tracing::debug; + +use super::{ + engine::run_formatter, + types::{FormattingConfig, FormattingContext}, +}; + +fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) +} + +/// Format a Jsonnet document with default configuration. +/// +/// Returns a list of text edits to apply to the document. +/// On error, returns None. +#[must_use] +pub fn format_document(text: &str) -> Option> { + format_document_with_config( + text, + &FormattingConfig::default(), + FormattingContext::detached(), + ) +} + +/// Format a Jsonnet document with the given configuration. +/// +/// Returns a list of text edits to apply to the document. +/// On error, returns None. +#[must_use] +pub fn format_document_with_config( + text: &str, + config: &FormattingConfig, + context: FormattingContext<'_>, +) -> Option> { + // Try to run the formatter + let formatted = match run_formatter(text, config, context) { + Ok(formatted) => formatted, + Err(err) => { + debug!("Formatting unavailable: {err}"); + return None; + } + }; + + if formatted == text { + // No changes needed + return Some(Vec::new()); + } + + // Return a single edit that replaces the entire document + let lines: Vec<&str> = text.lines().collect(); + let last_line = to_u32(lines.len().saturating_sub(1)); + let last_col = to_u32(lines.last().map_or(0, |l| l.len())); + + Some(vec![TextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: last_line, + character: last_col, + }, + }, + new_text: formatted, + }]) +} diff --git a/crates/jrsonnet-lsp-handlers/src/formatting.rs b/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs similarity index 56% rename from crates/jrsonnet-lsp-handlers/src/formatting.rs rename to crates/jrsonnet-lsp-handlers/src/formatting/engine.rs index d4ff13e4..b5cf1f27 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs @@ -1,7 +1,3 @@ -//! Document formatting handler. -//! -//! Formats Jsonnet code using an external formatter (jrsonnet-fmt, jsonnetfmt, etc.). - use std::{ ffi::OsStr, fmt, io, @@ -9,128 +5,13 @@ use std::{ process::{Command, Stdio}, }; -use lsp_types::{Position, Range, TextEdit}; -use serde::{Deserialize, Serialize}; -use tracing::debug; - -fn to_u32(value: usize) -> u32 { - u32::try_from(value).unwrap_or(u32::MAX) -} - -/// Formatter execution mode. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -pub enum FormatterEngine { - /// Resolve formatter via PATH (auto mode). - #[serde(rename = "path", alias = "auto", alias = "auto-path")] - Path, - /// Use workspace-local contract: `{workspace}/bin/jsonnetfmt -stdio `. - #[serde( - rename = "bin-jsonnetfmt-stdio", - alias = "workspace-jsonnetfmt-stdio", - alias = "workspaceBinJsonnetfmtStdio" - )] - BinJsonnetfmtStdio, -} - -impl Default for FormatterEngine { - fn default() -> Self { - Self::Path - } -} - -/// Formatting configuration options. -/// -/// These options correspond to the go-jsonnet formatter (jsonnetfmt) options. -/// Not all formatters support all options - jrsonnet-fmt only supports `indent`. -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(default)] -pub struct FormattingConfig { - /// Number of spaces per indentation level (default: 2). - #[serde(alias = "Indent")] - pub indent: Option, - - /// Maximum blank lines allowed (default: 2). - #[serde(alias = "MaxBlankLines")] - pub max_blank_lines: Option, - - /// String style: "double", "single", or "leave" (default: "leave"). - #[serde(alias = "StringStyle")] - pub string_style: Option, - - /// Comment style: "hash", "slash", or "leave" (default: "leave"). - #[serde(alias = "CommentStyle")] - pub comment_style: Option, - - /// Add padding inside arrays (default: false). - #[serde(alias = "PadArrays")] - pub pad_arrays: Option, - - /// Add padding inside objects (default: true). - #[serde(alias = "PadObjects")] - pub pad_objects: Option, - - /// Pretty print fields (one per line) (default: true). - #[serde(alias = "PrettyFieldNames")] - pub pretty_field_names: Option, - - /// Sort imports (default: true). - #[serde(alias = "SortImports")] - pub sort_imports: Option, - - /// Use implicit plus for object inheritance (default: true). - #[serde(alias = "UseImplicitPlus")] - pub use_implicit_plus: Option, - - /// Strip everything after // (default: false). - #[serde(alias = "StripEverything")] - pub strip_everything: Option, - - /// Strip all comments (default: false). - #[serde(alias = "StripComments")] - pub strip_comments: Option, - - /// Strip all comments except those at the start (default: false). - #[serde(alias = "StripAllButComments")] - pub strip_all_but_comments: Option, - - /// Path to the formatter binary (default: searches PATH for jrsonnet-fmt or jsonnetfmt). - #[serde(alias = "FormatterPath")] - pub formatter_path: Option, - - /// Formatter engine mode. - /// - /// - `None` or `path`: try `jrsonnet-fmt` then `jsonnetfmt` in `PATH` - /// - `bin-jsonnetfmt-stdio`: run `{workspaceRoot}/bin/jsonnetfmt -stdio ` - #[serde(alias = "FormatterEngine")] - pub formatter_engine: Option, -} - -/// Context required for formatter resolution. -#[derive(Debug, Clone, Copy, Default)] -pub struct FormattingContext<'a> { - /// Absolute path to the document being formatted. - pub document_path: Option<&'a Path>, - /// Known workspace roots from initialization. - pub workspace_roots: &'a [PathBuf], -} - -impl<'a> FormattingContext<'a> { - #[must_use] - pub fn detached() -> Self { - Self::default() - } - - #[must_use] - pub fn for_document(document_path: &'a Path, workspace_roots: &'a [PathBuf]) -> Self { - Self { - document_path: Some(document_path), - workspace_roots, - } - } -} +use super::{ + args::build_formatter_args, + types::{FormatterEngine, FormattingConfig, FormattingContext}, +}; #[derive(Debug)] -enum FormatterError { +pub(super) enum FormatterError { NoFormatterInPath, MissingDocumentPath, MissingWorkspaceRoot { @@ -222,68 +103,11 @@ impl fmt::Display for FormatterError { } } -/// Format a Jsonnet document with default configuration. -/// -/// Returns a list of text edits to apply to the document. -/// On error, returns None. -#[must_use] -pub fn format_document(text: &str) -> Option> { - format_document_with_config( - text, - &FormattingConfig::default(), - FormattingContext::detached(), - ) -} - -/// Format a Jsonnet document with the given configuration. -/// -/// Returns a list of text edits to apply to the document. -/// On error, returns None. -#[must_use] -pub fn format_document_with_config( - text: &str, - config: &FormattingConfig, - context: FormattingContext<'_>, -) -> Option> { - // Try to run the formatter - let formatted = match run_formatter(text, config, context) { - Ok(formatted) => formatted, - Err(err) => { - debug!("Formatting unavailable: {err}"); - return None; - } - }; - - if formatted == text { - // No changes needed - return Some(Vec::new()); - } - - // Return a single edit that replaces the entire document - let lines: Vec<&str> = text.lines().collect(); - let last_line = to_u32(lines.len().saturating_sub(1)); - let last_col = to_u32(lines.last().map_or(0, |l| l.len())); - - Some(vec![TextEdit { - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: last_line, - character: last_col, - }, - }, - new_text: formatted, - }]) -} - /// Run the formatter on the input text. /// /// If `config.formatter_path` is set, that formatter is used directly. /// Otherwise, formatter behavior is selected by `formatter_engine`. -fn run_formatter( +pub(super) fn run_formatter( input: &str, config: &FormattingConfig, context: FormattingContext<'_>, @@ -330,81 +154,6 @@ fn run_formatter( } } -/// Build CLI arguments for the formatter based on config. -/// -/// Different formatters support different options: -/// - jrsonnet-fmt: only --indent -/// - jsonnetfmt: all options -fn build_formatter_args(config: &FormattingConfig) -> Vec { - let mut args = vec!["-".to_string()]; // Read from stdin - - if let Some(indent) = config.indent { - args.push("--indent".to_string()); - args.push(indent.to_string()); - } - - if let Some(max_blank_lines) = config.max_blank_lines { - args.push("--max-blank-lines".to_string()); - args.push(max_blank_lines.to_string()); - } - - if let Some(ref style) = config.string_style { - // jsonnetfmt uses single-letter codes: d=double, s=single, l=leave - let code = match style.to_lowercase().as_str() { - "double" | "d" => "d", - "single" | "s" => "s", - _ => "l", - }; - args.push("--string-style".to_string()); - args.push(code.to_string()); - } - - if let Some(ref style) = config.comment_style { - // jsonnetfmt uses single-letter codes: h=hash, s=slash, l=leave - let code = match style.to_lowercase().as_str() { - "hash" | "h" => "h", - "slash" | "s" => "s", - _ => "l", - }; - args.push("--comment-style".to_string()); - args.push(code.to_string()); - } - - if config.pad_arrays == Some(true) { - args.push("--pad-arrays".to_string()); - } - - if config.pad_objects == Some(false) { - args.push("--no-pad-objects".to_string()); - } - - if config.pretty_field_names == Some(false) { - args.push("--no-pretty-field-names".to_string()); - } - - if config.sort_imports == Some(false) { - args.push("--no-sort-imports".to_string()); - } - - if config.use_implicit_plus == Some(false) { - args.push("--no-use-implicit-plus".to_string()); - } - - if config.strip_everything == Some(true) { - args.push("--strip-everything".to_string()); - } - - if config.strip_comments == Some(true) { - args.push("--strip-comments".to_string()); - } - - if config.strip_all_but_comments == Some(true) { - args.push("--strip-all-but-comments".to_string()); - } - - args -} - /// Try to run a specific formatter binary. fn try_run_formatter_binary( program: S, @@ -485,7 +234,9 @@ fn select_workspace_root<'a>( mod tests { use std::{fs, path::Path}; - use super::*; + use lsp_types::{Position, Range, TextEdit}; + + use super::{super::dispatch::format_document_with_config, *}; #[test] fn test_select_workspace_root_prefers_deepest_match() { @@ -523,6 +274,10 @@ mod tests { fs::set_permissions(path, permissions).unwrap(); } + fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) + } + fn full_replacement_edit(old: &str, new_text: String) -> Vec { vec![TextEdit { range: Range { diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs b/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs new file mode 100644 index 00000000..20d18a8a --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs @@ -0,0 +1,11 @@ +//! Document formatting handler. +//! +//! Formats Jsonnet code using an external formatter (jrsonnet-fmt, jsonnetfmt, etc.). + +mod args; +mod dispatch; +mod engine; +mod types; + +pub use dispatch::{format_document, format_document_with_config}; +pub use types::{FormatterEngine, FormattingConfig, FormattingContext}; diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/types.rs b/crates/jrsonnet-lsp-handlers/src/formatting/types.rs new file mode 100644 index 00000000..0a1661c8 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/formatting/types.rs @@ -0,0 +1,115 @@ +use std::path::{Path, PathBuf}; + +use serde::{Deserialize, Serialize}; + +/// Formatter execution mode. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum FormatterEngine { + /// Resolve formatter via PATH (auto mode). + #[serde(rename = "path", alias = "auto", alias = "auto-path")] + Path, + /// Use workspace-local contract: `{workspace}/bin/jsonnetfmt -stdio `. + #[serde( + rename = "bin-jsonnetfmt-stdio", + alias = "workspace-jsonnetfmt-stdio", + alias = "workspaceBinJsonnetfmtStdio" + )] + BinJsonnetfmtStdio, +} + +impl Default for FormatterEngine { + fn default() -> Self { + Self::Path + } +} + +/// Formatting configuration options. +/// +/// These options correspond to the go-jsonnet formatter (jsonnetfmt) options. +/// Not all formatters support all options - jrsonnet-fmt only supports `indent`. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(default)] +pub struct FormattingConfig { + /// Number of spaces per indentation level (default: 2). + #[serde(alias = "Indent")] + pub indent: Option, + + /// Maximum blank lines allowed (default: 2). + #[serde(alias = "MaxBlankLines")] + pub max_blank_lines: Option, + + /// String style: "double", "single", or "leave" (default: "leave"). + #[serde(alias = "StringStyle")] + pub string_style: Option, + + /// Comment style: "hash", "slash", or "leave" (default: "leave"). + #[serde(alias = "CommentStyle")] + pub comment_style: Option, + + /// Add padding inside arrays (default: false). + #[serde(alias = "PadArrays")] + pub pad_arrays: Option, + + /// Add padding inside objects (default: true). + #[serde(alias = "PadObjects")] + pub pad_objects: Option, + + /// Pretty print fields (one per line) (default: true). + #[serde(alias = "PrettyFieldNames")] + pub pretty_field_names: Option, + + /// Sort imports (default: true). + #[serde(alias = "SortImports")] + pub sort_imports: Option, + + /// Use implicit plus for object inheritance (default: true). + #[serde(alias = "UseImplicitPlus")] + pub use_implicit_plus: Option, + + /// Strip everything after // (default: false). + #[serde(alias = "StripEverything")] + pub strip_everything: Option, + + /// Strip all comments (default: false). + #[serde(alias = "StripComments")] + pub strip_comments: Option, + + /// Strip all comments except those at the start (default: false). + #[serde(alias = "StripAllButComments")] + pub strip_all_but_comments: Option, + + /// Path to the formatter binary (default: searches PATH for jrsonnet-fmt or jsonnetfmt). + #[serde(alias = "FormatterPath")] + pub formatter_path: Option, + + /// Formatter engine mode. + /// + /// - `None` or `path`: try `jrsonnet-fmt` then `jsonnetfmt` in `PATH` + /// - `bin-jsonnetfmt-stdio`: run `{workspaceRoot}/bin/jsonnetfmt -stdio ` + #[serde(alias = "FormatterEngine")] + pub formatter_engine: Option, +} + +/// Context required for formatter resolution. +#[derive(Debug, Clone, Copy, Default)] +pub struct FormattingContext<'a> { + /// Absolute path to the document being formatted. + pub document_path: Option<&'a Path>, + /// Known workspace roots from initialization. + pub workspace_roots: &'a [PathBuf], +} + +impl<'a> FormattingContext<'a> { + #[must_use] + pub fn detached() -> Self { + Self::default() + } + + #[must_use] + pub fn for_document(document_path: &'a Path, workspace_roots: &'a [PathBuf]) -> Self { + Self { + document_path: Some(document_path), + workspace_roots, + } + } +} From 9b7966083e93ff8abb4fdaefabdfce220bff7aa7 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:33:48 +0000 Subject: [PATCH 131/210] refactor(lsp-handlers): split references internals Split references handling into focused local and cross-file modules while keeping public APIs unchanged. Move local scope/token resolution and tests into `local.rs`, and isolate import-graph cross-file scanning in `cross_file.rs`. Keep `mod.rs` as wiring-only and test-free. --- .../src/references/cross_file.rs | 236 +++++++++++++++++ .../{references.rs => references/local.rs} | 242 +----------------- .../src/references/mod.rs | 10 + 3 files changed, 250 insertions(+), 238 deletions(-) create mode 100644 crates/jrsonnet-lsp-handlers/src/references/cross_file.rs rename crates/jrsonnet-lsp-handlers/src/{references.rs => references/local.rs} (56%) create mode 100644 crates/jrsonnet-lsp-handlers/src/references/mod.rs diff --git a/crates/jrsonnet-lsp-handlers/src/references/cross_file.rs b/crates/jrsonnet-lsp-handlers/src/references/cross_file.rs new file mode 100644 index 00000000..398d4a3e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/references/cross_file.rs @@ -0,0 +1,236 @@ +use jrsonnet_lsp_document::{ + to_lsp_range, token_at_offset, CanonicalPath, Document, FileId, LspPosition, +}; +use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_inference::SemanticArtifacts; +use jrsonnet_lsp_scope::{ + find_definition_range, is_at_file_scope, is_definition_site, is_variable_reference, +}; +use jrsonnet_rowan_parser::{ + nodes::{ExprBase, ExprField}, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use lsp_types::Location; +use rayon::prelude::*; +use rowan::TextRange; + +/// Find cross-file references to a symbol. +/// +/// This function searches all provided documents for references to a symbol +/// that is exported from the current document (i.e., accessible via import). +/// +/// Returns references from other documents that import this file and use the symbol. +/// +/// The `documents` parameter is a slice of (path, document reference) pairs representing +/// all open documents to search. +#[must_use] +pub fn find_cross_file_references<'a>( + current_document: &Document, + current_path: &CanonicalPath, + position: LspPosition, + documents: &[(&'a CanonicalPath, &'a Document)], + import_graph: &ImportGraph, +) -> Vec { + let docs_with_semantic: Vec<_> = documents + .iter() + .map(|(path, doc)| (*path, *doc, None)) + .collect(); + find_cross_file_references_with_semantic( + current_document, + current_path, + position, + None, + &docs_with_semantic, + import_graph, + ) +} + +/// Find cross-file references using semantic artifacts when available. +#[must_use] +pub fn find_cross_file_references_with_semantic<'a>( + current_document: &Document, + current_path: &CanonicalPath, + position: LspPosition, + current_semantic: Option<&SemanticArtifacts>, + documents: &[( + &'a CanonicalPath, + &'a Document, + Option<&'a SemanticArtifacts>, + )], + import_graph: &ImportGraph, +) -> Vec { + let text = current_document.text(); + let line_index = current_document.line_index(); + + // Convert LSP position to byte offset + let Some(offset) = line_index.offset(position, text) else { + return Vec::new(); + }; + + let ast = current_document.ast(); + + // Find the token at the offset + let Some(token) = token_at_offset(ast.syntax(), offset) else { + return Vec::new(); + }; + + // Must be an identifier + if token.kind() != SyntaxKind::IDENT { + return Vec::new(); + } + + let Some(name) = resolve_exported_symbol_name(current_document, &token, current_semantic) + else { + return Vec::new(); + }; + let Some(current_file) = import_graph.file(current_path) else { + return Vec::new(); + }; + + // Search all other documents for imports of this file (in parallel) + let references: Vec = documents + .par_iter() + .filter(|(doc_path, _, _)| *doc_path != current_path) + .flat_map(|(doc_path, doc, semantic)| { + let Some(importer_file) = import_graph.file(doc_path) else { + return Vec::new(); + }; + let import_bindings = import_binding_names(import_graph, importer_file, current_file); + if import_bindings.is_empty() { + return Vec::new(); + } + + let Ok(doc_uri) = doc_path.to_uri() else { + return Vec::new(); + }; + let doc_text = doc.text(); + let doc_line_index = doc.line_index(); + + import_bindings + .into_iter() + .flat_map(|binding_name| { + find_references_to_import(doc, &binding_name, &name, *semantic) + }) + .map(|range| Location { + uri: doc_uri.clone(), + range: to_lsp_range(range, doc_line_index, doc_text), + }) + .collect::>() + }) + .collect(); + + references +} + +fn import_binding_names( + import_graph: &ImportGraph, + importer_file: FileId, + target_file: FileId, +) -> Vec { + let mut bindings: Vec = import_graph + .imports_of_target(importer_file, target_file) + .into_iter() + .filter_map(|entry| entry.binding_name.clone()) + .collect(); + bindings.sort(); + bindings.dedup(); + bindings +} + +fn resolve_exported_symbol_name( + document: &Document, + token: &SyntaxToken, + semantic: Option<&SemanticArtifacts>, +) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + if is_definition_site(token) && is_at_file_scope(token) { + return Some(token.text().to_string()); + } + + if !is_variable_reference(token) { + return None; + } + + let name = token.text(); + let definition_range = semantic + .and_then(|artifacts| artifacts.definition_for_ident_token(token)) + .or_else(|| find_definition_range(token, name))?; + let definition_token = definition_token(document, definition_range, name)?; + if !is_at_file_scope(&definition_token) { + return None; + } + + Some(name.to_string()) +} + +fn definition_token(document: &Document, range: TextRange, name: &str) -> Option { + document + .ast() + .syntax() + .descendants_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .find(|token| { + token.kind() == SyntaxKind::IDENT + && token.text() == name + && is_definition_site(token) + && token + .parent() + .is_some_and(|parent| parent.text_range() == range) + }) +} + +/// Find references to an imported symbol in a document. +fn find_references_to_import( + doc: &Document, + binding_name: &str, + field_name: &str, + semantic: Option<&SemanticArtifacts>, +) -> Vec { + if let Some(artifacts) = semantic { + return artifacts + .import_field_references(binding_name, field_name) + .to_vec(); + } + + let mut references = Vec::new(); + let ast = doc.ast(); + + // Walk all tokens looking for field accesses on the imported name + for node in ast.syntax().descendants() { + // Look for field accesses: importName.fieldName + if node.kind() == SyntaxKind::EXPR_FIELD { + // Check if this is accessing the imported binding + if let Some(range) = check_field_access(&node, binding_name, field_name) { + references.push(range); + } + } + } + + references +} + +/// Check if a field access is accessing a specific field on a specific binding. +fn check_field_access( + node: &SyntaxNode, + binding_name: &str, + field_name: &str, +) -> Option { + let field = ExprField::cast(node.clone())?; + let field_ident = field.field()?.ident_lit()?; + if field_ident.text() != field_name { + return None; + } + + let base = field.base()?.expr_base()?; + let ExprBase::ExprVar(var) = base else { + return None; + }; + if var.name()?.ident_lit()?.text() != binding_name { + return None; + } + + Some(field_ident.text_range()) +} diff --git a/crates/jrsonnet-lsp-handlers/src/references.rs b/crates/jrsonnet-lsp-handlers/src/references/local.rs similarity index 56% rename from crates/jrsonnet-lsp-handlers/src/references.rs rename to crates/jrsonnet-lsp-handlers/src/references/local.rs index 778165d0..b79663ba 100644 --- a/crates/jrsonnet-lsp-handlers/src/references.rs +++ b/crates/jrsonnet-lsp-handlers/src/references/local.rs @@ -1,24 +1,10 @@ -//! Find references handler. -//! -//! Finds all references to a symbol within the current document and across -//! all open/importing documents (cross-file references). - -use jrsonnet_lsp_document::{ - to_lsp_range, token_at_offset, CanonicalPath, Document, FileId, LspPosition, -}; -use jrsonnet_lsp_import::ImportGraph; +use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, Document, LspPosition}; use jrsonnet_lsp_inference::SemanticArtifacts; use jrsonnet_lsp_scope::{ - find_definition_range, is_at_file_scope, is_definition_site, is_variable_reference, - ScopeResolver, -}; -use jrsonnet_rowan_parser::{ - nodes::{ExprBase, ExprField}, - AstNode, SyntaxKind, SyntaxNode, SyntaxToken, + find_definition_range, is_definition_site, is_variable_reference, ScopeResolver, }; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; use lsp_types::{Location, Uri}; -use rayon::prelude::*; -use rowan::TextRange; use tracing::debug; /// Find all references to the symbol at the given position. @@ -98,230 +84,10 @@ pub fn find_references_with_semantic( .collect() } -/// Find cross-file references to a symbol. -/// -/// This function searches all provided documents for references to a symbol -/// that is exported from the current document (i.e., accessible via import). -/// -/// Returns references from other documents that import this file and use the symbol. -/// -/// The `documents` parameter is a slice of (path, document reference) pairs representing -/// all open documents to search. -#[must_use] -pub fn find_cross_file_references<'a>( - current_document: &Document, - current_path: &CanonicalPath, - position: LspPosition, - documents: &[(&'a CanonicalPath, &'a Document)], - import_graph: &ImportGraph, -) -> Vec { - let docs_with_semantic: Vec<_> = documents - .iter() - .map(|(path, doc)| (*path, *doc, None)) - .collect(); - find_cross_file_references_with_semantic( - current_document, - current_path, - position, - None, - &docs_with_semantic, - import_graph, - ) -} - -/// Find cross-file references using semantic artifacts when available. -#[must_use] -pub fn find_cross_file_references_with_semantic<'a>( - current_document: &Document, - current_path: &CanonicalPath, - position: LspPosition, - current_semantic: Option<&SemanticArtifacts>, - documents: &[( - &'a CanonicalPath, - &'a Document, - Option<&'a SemanticArtifacts>, - )], - import_graph: &ImportGraph, -) -> Vec { - let text = current_document.text(); - let line_index = current_document.line_index(); - - // Convert LSP position to byte offset - let Some(offset) = line_index.offset(position, text) else { - return Vec::new(); - }; - - let ast = current_document.ast(); - - // Find the token at the offset - let Some(token) = token_at_offset(ast.syntax(), offset) else { - return Vec::new(); - }; - - // Must be an identifier - if token.kind() != SyntaxKind::IDENT { - return Vec::new(); - } - - let Some(name) = resolve_exported_symbol_name(current_document, &token, current_semantic) - else { - return Vec::new(); - }; - let Some(current_file) = import_graph.file(current_path) else { - return Vec::new(); - }; - - // Search all other documents for imports of this file (in parallel) - let references: Vec = documents - .par_iter() - .filter(|(doc_path, _, _)| *doc_path != current_path) - .flat_map(|(doc_path, doc, semantic)| { - let Some(importer_file) = import_graph.file(doc_path) else { - return Vec::new(); - }; - let import_bindings = import_binding_names(import_graph, importer_file, current_file); - if import_bindings.is_empty() { - return Vec::new(); - } - - let Ok(doc_uri) = doc_path.to_uri() else { - return Vec::new(); - }; - let doc_text = doc.text(); - let doc_line_index = doc.line_index(); - - import_bindings - .into_iter() - .flat_map(|binding_name| { - find_references_to_import(doc, &binding_name, &name, *semantic) - }) - .map(|range| Location { - uri: doc_uri.clone(), - range: to_lsp_range(range, doc_line_index, doc_text), - }) - .collect::>() - }) - .collect(); - - references -} - -fn import_binding_names( - import_graph: &ImportGraph, - importer_file: FileId, - target_file: FileId, -) -> Vec { - let mut bindings: Vec = import_graph - .imports_of_target(importer_file, target_file) - .into_iter() - .filter_map(|entry| entry.binding_name.clone()) - .collect(); - bindings.sort(); - bindings.dedup(); - bindings -} - -fn resolve_exported_symbol_name( - document: &Document, - token: &SyntaxToken, - semantic: Option<&SemanticArtifacts>, -) -> Option { - if token.kind() != SyntaxKind::IDENT { - return None; - } - - if is_definition_site(token) && is_at_file_scope(token) { - return Some(token.text().to_string()); - } - - if !is_variable_reference(token) { - return None; - } - - let name = token.text(); - let definition_range = semantic - .and_then(|artifacts| artifacts.definition_for_ident_token(token)) - .or_else(|| find_definition_range(token, name))?; - let definition_token = definition_token(document, definition_range, name)?; - if !is_at_file_scope(&definition_token) { - return None; - } - - Some(name.to_string()) -} - -fn definition_token(document: &Document, range: TextRange, name: &str) -> Option { - document - .ast() - .syntax() - .descendants_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - .find(|token| { - token.kind() == SyntaxKind::IDENT - && token.text() == name - && is_definition_site(token) - && token - .parent() - .is_some_and(|parent| parent.text_range() == range) - }) -} - -/// Find references to an imported symbol in a document. -fn find_references_to_import( - doc: &Document, - binding_name: &str, - field_name: &str, - semantic: Option<&SemanticArtifacts>, -) -> Vec { - if let Some(artifacts) = semantic { - return artifacts - .import_field_references(binding_name, field_name) - .to_vec(); - } - - let mut references = Vec::new(); - let ast = doc.ast(); - - // Walk all tokens looking for field accesses on the imported name - for node in ast.syntax().descendants() { - // Look for field accesses: importName.fieldName - if node.kind() == SyntaxKind::EXPR_FIELD { - // Check if this is accessing the imported binding - if let Some(range) = check_field_access(&node, binding_name, field_name) { - references.push(range); - } - } - } - - references -} - -/// Check if a field access is accessing a specific field on a specific binding. -fn check_field_access( - node: &SyntaxNode, - binding_name: &str, - field_name: &str, -) -> Option { - let field = ExprField::cast(node.clone())?; - let field_ident = field.field()?.ident_lit()?; - if field_ident.text() != field_name { - return None; - } - - let base = field.base()?.expr_base()?; - let ExprBase::ExprVar(var) = base else { - return None; - }; - if var.name()?.ident_lit()?.text() != binding_name { - return None; - } - - Some(field_ident.text_range()) -} - #[cfg(test)] mod tests { use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_scope::is_at_file_scope; use super::*; diff --git a/crates/jrsonnet-lsp-handlers/src/references/mod.rs b/crates/jrsonnet-lsp-handlers/src/references/mod.rs new file mode 100644 index 00000000..fb788b70 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/references/mod.rs @@ -0,0 +1,10 @@ +//! Find references handler. +//! +//! Finds all references to a symbol within the current document and across +//! all open/importing documents (cross-file references). + +mod cross_file; +mod local; + +pub use cross_file::{find_cross_file_references, find_cross_file_references_with_semantic}; +pub use local::{find_references, find_references_with_semantic}; From df490116ccece9f69ef0272cee8884ba44231c61 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:42:38 +0000 Subject: [PATCH 132/210] refactor(lsp): move request handlers from sync to async path Move document symbol, document highlight, code action, signature help, and prepare rename onto the async request dispatch path. Introduce dedicated async request context modules and async handler wrappers for each request, and route these methods through `handle_async_typed` in request dispatch.and latency-sensitive. --- .../jrsonnet-lsp/src/server/async_requests.rs | 5 + .../src/server/async_requests/code_action.rs | 28 ++++ .../async_requests/document_highlight.rs | 25 ++++ .../server/async_requests/document_symbol.rs | 19 +++ .../server/async_requests/prepare_rename.rs | 21 +++ .../server/async_requests/signature_help.rs | 18 +++ .../src/server/request_dispatch.rs | 127 +++++++++--------- .../requests/async_handlers/code_action.rs | 10 ++ .../async_handlers/document_highlight.rs | 10 ++ .../async_handlers/document_symbol.rs | 10 ++ .../src/server/requests/async_handlers/mod.rs | 5 + .../requests/async_handlers/prepare_rename.rs | 10 ++ .../requests/async_handlers/signature_help.rs | 10 ++ .../requests/sync_handlers/code_action.rs | 26 ---- .../sync_handlers/document_highlight.rs | 23 ---- .../requests/sync_handlers/document_symbol.rs | 17 --- .../src/server/requests/sync_handlers/mod.rs | 10 +- .../requests/sync_handlers/prepare_rename.rs | 19 --- .../requests/sync_handlers/signature_help.rs | 16 --- 19 files changed, 235 insertions(+), 174 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/code_action.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/document_highlight.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/document_symbol.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/prepare_rename.rs create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/signature_help.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/code_action.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/document_highlight.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/document_symbol.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/prepare_rename.rs create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/signature_help.rs delete mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_action.rs delete mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_highlight.rs delete mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_symbol.rs delete mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/prepare_rename.rs delete mode 100644 crates/jrsonnet-lsp/src/server/requests/sync_handlers/signature_help.rs diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 10b619a4..29a7a49e 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -1,6 +1,9 @@ +mod code_action; mod code_lens; mod commands; mod completion; +mod document_highlight; +mod document_symbol; mod formatting; mod goto_declaration; mod goto_definition; @@ -10,10 +13,12 @@ mod goto_type_definition; mod hover; mod import_lookup; mod inlay_hints; +mod prepare_rename; mod references; mod rename; mod semantic_tokens_full; mod semantic_tokens_range; +mod signature_help; mod workspace_symbol; use std::{path::PathBuf, sync::Arc}; diff --git a/crates/jrsonnet-lsp/src/server/async_requests/code_action.rs b/crates/jrsonnet-lsp/src/server/async_requests/code_action.rs new file mode 100644 index 00000000..a715b676 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/code_action.rs @@ -0,0 +1,28 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{CodeActionParams, CodeActionResponse}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn code_action(&self, params: &CodeActionParams) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let actions = { + let doc = self.documents.get(&path)?; + let code_action_config = self.config.read().code_actions; + handlers::code_actions( + &doc, + uri, + params.range, + ¶ms.context, + &code_action_config, + ) + }; + if actions.is_empty() { + return None; + } + + Some(actions) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/document_highlight.rs b/crates/jrsonnet-lsp/src/server/async_requests/document_highlight.rs new file mode 100644 index 00000000..324850ed --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/document_highlight.rs @@ -0,0 +1,25 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{DocumentHighlight, DocumentHighlightParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn document_highlight( + &self, + params: &DocumentHighlightParams, + ) -> Option> { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + let lsp_pos = position.into(); + + let highlights = handlers::document_highlights(&doc, lsp_pos); + if highlights.is_empty() { + return None; + } + + Some(highlights) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/document_symbol.rs b/crates/jrsonnet-lsp/src/server/async_requests/document_symbol.rs new file mode 100644 index 00000000..9dae8aba --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/document_symbol.rs @@ -0,0 +1,19 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{DocumentSymbolParams, DocumentSymbolResponse}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn document_symbol( + &self, + params: &DocumentSymbolParams, + ) -> Option { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let symbols = handlers::document_symbols(&doc); + Some(DocumentSymbolResponse::Nested(symbols)) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/prepare_rename.rs b/crates/jrsonnet-lsp/src/server/async_requests/prepare_rename.rs new file mode 100644 index 00000000..06b251ed --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/prepare_rename.rs @@ -0,0 +1,21 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{PrepareRenameResponse, TextDocumentPositionParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn prepare_rename( + &self, + params: &TextDocumentPositionParams, + ) -> Option { + let uri = ¶ms.text_document.uri; + let position = params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let lsp_pos = position.into(); + + handlers::prepare_rename(&doc, lsp_pos) + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/signature_help.rs b/crates/jrsonnet-lsp/src/server/async_requests/signature_help.rs new file mode 100644 index 00000000..afb4939a --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/signature_help.rs @@ -0,0 +1,18 @@ +use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_handlers as handlers; +use lsp_types::{SignatureHelp, SignatureHelpParams}; + +use super::AsyncRequestContext; + +impl AsyncRequestContext { + pub(crate) fn signature_help(&self, params: &SignatureHelpParams) -> Option { + let uri = ¶ms.text_document_position_params.text_document.uri; + let position = params.text_document_position_params.position; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let lsp_pos = position.into(); + + handlers::signature_help(&doc, lsp_pos) + } +} diff --git a/crates/jrsonnet-lsp/src/server/request_dispatch.rs b/crates/jrsonnet-lsp/src/server/request_dispatch.rs index c8fa2b41..cfcbc1fb 100644 --- a/crates/jrsonnet-lsp/src/server/request_dispatch.rs +++ b/crates/jrsonnet-lsp/src/server/request_dispatch.rs @@ -38,15 +38,15 @@ impl Server { | Rename::METHOD | CodeLensRequest::METHOD | Formatting::METHOD - | SemanticTokensFullRequest::METHOD - | SemanticTokensRangeRequest::METHOD - | ExecuteCommand::METHOD => self.handle_async_request(id, method.as_str(), params), - DocumentSymbolRequest::METHOD + | DocumentSymbolRequest::METHOD | DocumentHighlightRequest::METHOD | CodeActionRequest::METHOD | SignatureHelpRequest::METHOD | PrepareRenameRequest::METHOD - | CodeLensResolve::METHOD => self.handle_sync_request(id, method.as_str(), params), + | SemanticTokensFullRequest::METHOD + | SemanticTokensRangeRequest::METHOD + | ExecuteCommand::METHOD => self.handle_async_request(id, method.as_str(), params), + CodeLensResolve::METHOD => self.handle_sync_request(id, method.as_str(), params), _ => { let request = self.inflight_requests.begin_unknown(id, method.as_str()); warn!("Unhandled request: {}", request.method()); @@ -77,67 +77,24 @@ impl Server { method: &str, params: serde_json::Value, ) -> anyhow::Result<()> { - match method { - DocumentSymbolRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed( - request, - params, - requests::sync_handlers::document_symbol::handle, - ) - } - DocumentHighlightRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed( - request, - params, - requests::sync_handlers::document_highlight::handle, - ) - } - CodeActionRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed( - request, - params, - requests::sync_handlers::code_action::handle, - ) - } - SignatureHelpRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed( - request, - params, - requests::sync_handlers::signature_help::handle, - ) - } - PrepareRenameRequest::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed( - request, - params, - requests::sync_handlers::prepare_rename::handle, - ) - } - CodeLensResolve::METHOD => { - let request = self.inflight_requests.begin::(id); - self.handle_sync_typed( - request, - params, - requests::sync_handlers::code_lens_resolve::handle, - ) - } - _ => { - let request = self.inflight_requests.begin_unknown(id, method); - warn!("Unhandled request: {}", request.method()); - let message = format!("Method not found: {}", request.method()); - let _ = self.inflight_requests.send_unknown_err( - request, - lsp_server::ErrorCode::MethodNotFound, - message, - )?; - Ok(()) - } + if method == CodeLensResolve::METHOD { + let request = self.inflight_requests.begin::(id); + return self.handle_sync_typed( + request, + params, + requests::sync_handlers::code_lens_resolve::handle, + ); } + + let request = self.inflight_requests.begin_unknown(id, method); + warn!("Unhandled request: {}", request.method()); + let message = format!("Method not found: {}", request.method()); + let _ = self.inflight_requests.send_unknown_err( + request, + lsp_server::ErrorCode::MethodNotFound, + message, + )?; + Ok(()) } fn handle_sync_typed( @@ -276,6 +233,46 @@ impl Server { requests::async_handlers::formatting::handle, ) } + DocumentSymbolRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::document_symbol::handle, + ) + } + DocumentHighlightRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::document_highlight::handle, + ) + } + CodeActionRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::code_action::handle, + ) + } + SignatureHelpRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::signature_help::handle, + ) + } + PrepareRenameRequest::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::prepare_rename::handle, + ) + } SemanticTokensFullRequest::METHOD => { let request = self .inflight_requests diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_action.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_action.rs new file mode 100644 index 00000000..f967b5ac --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/code_action.rs @@ -0,0 +1,10 @@ +use lsp_types::{CodeActionParams, CodeActionResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &CodeActionParams, +) -> Option { + context.code_action(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_highlight.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_highlight.rs new file mode 100644 index 00000000..b93f11f8 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_highlight.rs @@ -0,0 +1,10 @@ +use lsp_types::{DocumentHighlight, DocumentHighlightParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &DocumentHighlightParams, +) -> Option> { + context.document_highlight(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_symbol.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_symbol.rs new file mode 100644 index 00000000..1bb0b8e0 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/document_symbol.rs @@ -0,0 +1,10 @@ +use lsp_types::{DocumentSymbolParams, DocumentSymbolResponse}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &DocumentSymbolParams, +) -> Option { + context.document_symbol(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs index 7b5cd389..be2c838a 100644 --- a/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs @@ -1,5 +1,8 @@ +pub(crate) mod code_action; pub(crate) mod code_lens; pub(crate) mod completion; +pub(crate) mod document_highlight; +pub(crate) mod document_symbol; pub(crate) mod execute_command; pub(crate) mod formatting; pub(crate) mod goto_declaration; @@ -8,8 +11,10 @@ pub(crate) mod goto_implementation; pub(crate) mod goto_type_definition; pub(crate) mod hover; pub(crate) mod inlay_hints; +pub(crate) mod prepare_rename; pub(crate) mod references; pub(crate) mod rename; pub(crate) mod semantic_tokens_full; pub(crate) mod semantic_tokens_range; +pub(crate) mod signature_help; pub(crate) mod workspace_symbol; diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/prepare_rename.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/prepare_rename.rs new file mode 100644 index 00000000..92642917 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/prepare_rename.rs @@ -0,0 +1,10 @@ +use lsp_types::{PrepareRenameResponse, TextDocumentPositionParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &TextDocumentPositionParams, +) -> Option { + context.prepare_rename(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/signature_help.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/signature_help.rs new file mode 100644 index 00000000..70b8a200 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/signature_help.rs @@ -0,0 +1,10 @@ +use lsp_types::{SignatureHelp, SignatureHelpParams}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &SignatureHelpParams, +) -> Option { + context.signature_help(params) +} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_action.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_action.rs deleted file mode 100644 index 7c0ae2df..00000000 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/code_action.rs +++ /dev/null @@ -1,26 +0,0 @@ -use jrsonnet_lsp_document::CanonicalPath; -use jrsonnet_lsp_handlers as handlers; -use lsp_types::{CodeActionParams, CodeActionResponse}; - -use crate::server::Server; - -pub(crate) fn handle(server: &Server, params: &CodeActionParams) -> Option { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let actions = { - let doc = server.documents.get(&path)?; - let code_action_config = server.config.read().code_actions; - handlers::code_actions( - &doc, - uri, - params.range, - ¶ms.context, - &code_action_config, - ) - }; - if actions.is_empty() { - return None; - } - - Some(actions) -} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_highlight.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_highlight.rs deleted file mode 100644 index 09022218..00000000 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_highlight.rs +++ /dev/null @@ -1,23 +0,0 @@ -use jrsonnet_lsp_document::CanonicalPath; -use jrsonnet_lsp_handlers as handlers; -use lsp_types::{DocumentHighlight, DocumentHighlightParams}; - -use crate::server::Server; - -pub(crate) fn handle( - server: &Server, - params: &DocumentHighlightParams, -) -> Option> { - let uri = ¶ms.text_document_position_params.text_document.uri; - let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = server.documents.get(&path)?; - let lsp_pos = position.into(); - - let highlights = handlers::document_highlights(&doc, lsp_pos); - if highlights.is_empty() { - return None; - } - - Some(highlights) -} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_symbol.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_symbol.rs deleted file mode 100644 index 1afb4040..00000000 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/document_symbol.rs +++ /dev/null @@ -1,17 +0,0 @@ -use jrsonnet_lsp_document::CanonicalPath; -use jrsonnet_lsp_handlers as handlers; -use lsp_types::{DocumentSymbolParams, DocumentSymbolResponse}; - -use crate::server::Server; - -pub(crate) fn handle( - server: &Server, - params: &DocumentSymbolParams, -) -> Option { - let uri = ¶ms.text_document.uri; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = server.documents.get(&path)?; - - let symbols = handlers::document_symbols(&doc); - Some(DocumentSymbolResponse::Nested(symbols)) -} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs index 418956dd..a1ab4419 100644 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs +++ b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/mod.rs @@ -1,15 +1,9 @@ //! Request handlers intentionally kept synchronous. //! -//! These handlers only read already-indexed, in-memory document state and do -//! not perform cross-file graph traversal or blocking I/O. Keeping them sync -//! avoids async scheduling overhead for latency-critical, cheap requests. +//! These handlers are intentionally tiny and latency-critical, so they stay +//! on the synchronous path. //! //! If a handler here grows into heavier work, promote it to //! `requests::async_handlers`. -pub(crate) mod code_action; pub(crate) mod code_lens_resolve; -pub(crate) mod document_highlight; -pub(crate) mod document_symbol; -pub(crate) mod prepare_rename; -pub(crate) mod signature_help; diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/prepare_rename.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/prepare_rename.rs deleted file mode 100644 index c2de3f8c..00000000 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/prepare_rename.rs +++ /dev/null @@ -1,19 +0,0 @@ -use jrsonnet_lsp_document::CanonicalPath; -use jrsonnet_lsp_handlers as handlers; -use lsp_types::{PrepareRenameResponse, TextDocumentPositionParams}; - -use crate::server::Server; - -pub(crate) fn handle( - server: &Server, - params: &TextDocumentPositionParams, -) -> Option { - let uri = ¶ms.text_document.uri; - let position = params.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = server.documents.get(&path)?; - - let lsp_pos = position.into(); - - handlers::prepare_rename(&doc, lsp_pos) -} diff --git a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/signature_help.rs b/crates/jrsonnet-lsp/src/server/requests/sync_handlers/signature_help.rs deleted file mode 100644 index cf5ce9ab..00000000 --- a/crates/jrsonnet-lsp/src/server/requests/sync_handlers/signature_help.rs +++ /dev/null @@ -1,16 +0,0 @@ -use jrsonnet_lsp_document::CanonicalPath; -use jrsonnet_lsp_handlers as handlers; -use lsp_types::{SignatureHelp, SignatureHelpParams}; - -use crate::server::Server; - -pub(crate) fn handle(server: &Server, params: &SignatureHelpParams) -> Option { - let uri = ¶ms.text_document_position_params.text_document.uri; - let position = params.text_document_position_params.position; - let path = CanonicalPath::from_uri(uri).ok()?; - let doc = server.documents.get(&path)?; - - let lsp_pos = position.into(); - - handlers::signature_help(&doc, lsp_pos) -} From c70d20c75bfed6bf9a2b11a6e781c9d4fe42c211 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:47:22 +0000 Subject: [PATCH 133/210] refactor(lsp-handlers): split symbols into focused modules Split symbols into extract, names, and workspace modules with a small `mod.rs` surface. Keep document symbol extraction and workspace symbol flattening separate so traversal and matching logic evolve independently. Move tests inline to `extract.rs` and `workspace.rs` and keep `mod.rs` free of tests. --- .../src/{symbols.rs => symbols/extract.rs} | 258 +----------------- .../jrsonnet-lsp-handlers/src/symbols/mod.rs | 11 + .../src/symbols/names.rs | 75 +++++ .../src/symbols/workspace.rs | 129 +++++++++ 4 files changed, 225 insertions(+), 248 deletions(-) rename crates/jrsonnet-lsp-handlers/src/{symbols.rs => symbols/extract.rs} (51%) create mode 100644 crates/jrsonnet-lsp-handlers/src/symbols/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/symbols/names.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/symbols/workspace.rs diff --git a/crates/jrsonnet-lsp-handlers/src/symbols.rs b/crates/jrsonnet-lsp-handlers/src/symbols/extract.rs similarity index 51% rename from crates/jrsonnet-lsp-handlers/src/symbols.rs rename to crates/jrsonnet-lsp-handlers/src/symbols/extract.rs index e3206d1a..dc7e3f5c 100644 --- a/crates/jrsonnet-lsp-handlers/src/symbols.rs +++ b/crates/jrsonnet-lsp-handlers/src/symbols/extract.rs @@ -1,23 +1,16 @@ -//! Document symbols handler for providing outline view. -//! -//! Extracts symbols from Jsonnet AST including: -//! - Local bindings (local x = ...) -//! - Object fields -//! - Function definitions - use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; use jrsonnet_rowan_parser::{ nodes::{ - Bind, BindDestruct, BindFunction, ExprBase, ExprObject, FieldName, FieldNameFixed, Member, - MemberBindStmt, MemberFieldMethod, MemberFieldNormal, ObjBody, ObjBodyMemberList, ObjLocal, - Stmt, StmtLocal, + Bind, BindDestruct, BindFunction, ExprBase, ExprObject, Member, MemberBindStmt, + MemberFieldMethod, MemberFieldNormal, ObjBody, ObjBodyMemberList, ObjLocal, Stmt, + StmtLocal, }, - AstNode, AstToken, + AstNode, }; -use lsp_types::{DocumentSymbol, Location, SymbolInformation, SymbolKind, Uri}; -use rowan::TextRange; +use lsp_types::{DocumentSymbol, SymbolKind}; + +use super::names::{build_document_symbol, create_symbol, get_destruct_name, get_field_name}; -/// Extract document symbols from a parsed document. #[must_use] pub fn document_symbols(document: &Document) -> Vec { let ast = document.ast(); @@ -27,14 +20,12 @@ pub fn document_symbols(document: &Document) -> Vec { let mut symbols = Vec::new(); if let Some(expr) = ast.expr() { - // Process top-level statements (local bindings) for stmt in expr.stmts() { if let Some(sym) = process_stmt(&stmt, text, line_index) { symbols.push(sym); } } - // Process the main expression if let Some(base) = expr.expr_base() { symbols.extend(process_expr_base(&base, text, line_index)); } @@ -43,30 +34,25 @@ pub fn document_symbols(document: &Document) -> Vec { symbols } -/// Process a statement and extract symbols. fn process_stmt(stmt: &Stmt, text: &str, line_index: &LineIndex) -> Option { match stmt { Stmt::StmtLocal(local) => process_local_stmt(local, text, line_index), - Stmt::StmtAssert(_) => None, // Asserts don't produce symbols + Stmt::StmtAssert(_) => None, } } -/// Process a local statement. fn process_local_stmt( local: &StmtLocal, text: &str, line_index: &LineIndex, ) -> Option { - // Local statements can have multiple bindings let binds: Vec<_> = local.binds().collect(); if binds.len() == 1 { - // Single binding - return it directly binds .first() .and_then(|bind| process_bind(bind, text, line_index)) } else if !binds.is_empty() { - // Multiple bindings - create a container let range = local.syntax().text_range(); let children: Vec<_> = binds .iter() @@ -91,7 +77,6 @@ fn process_local_stmt( } } -/// Process a binding and extract symbols. fn process_bind(bind: &Bind, text: &str, line_index: &LineIndex) -> Option { match bind { Bind::BindDestruct(bd) => process_bind_destruct(bd, text, line_index), @@ -99,19 +84,15 @@ fn process_bind(bind: &Bind, text: &str, line_index: &LineIndex) -> Option Option { let destruct = bind.into()?; - - // Get the name from the destruct pattern let name = get_destruct_name(&destruct)?; let range = bind.syntax().text_range(); - // Check if the value is a function let (kind, children) = bind .value() .map_or((SymbolKind::VARIABLE, None), |value_expr| { @@ -137,7 +118,6 @@ fn process_bind_destruct( create_symbol(name, kind, range, range, line_index, text, children) } -/// Process a function binding. fn process_bind_function( bind: &BindFunction, text: &str, @@ -146,7 +126,6 @@ fn process_bind_function( let name = bind.name()?.ident_lit()?.text().to_string(); let range = bind.syntax().text_range(); - // Get parameter names for detail let detail = bind.params().map(|params| { let param_names: Vec<_> = params .params() @@ -165,7 +144,6 @@ fn process_bind_function( ) } -/// Process an expression base and extract symbols. fn process_expr_base(base: &ExprBase, text: &str, line_index: &LineIndex) -> Vec { match base { ExprBase::ExprObject(obj) => process_object(obj, text, line_index), @@ -173,7 +151,6 @@ fn process_expr_base(base: &ExprBase, text: &str, line_index: &LineIndex) -> Vec } } -/// Process an object expression. fn process_object(obj: &ExprObject, text: &str, line_index: &LineIndex) -> Vec { let Some(body) = obj.obj_body() else { return Vec::new(); @@ -181,11 +158,10 @@ fn process_object(obj: &ExprObject, text: &str, line_index: &LineIndex) -> Vec process_member_list(&list, text, line_index), - ObjBody::ObjBodyComp(_) => Vec::new(), // Object comprehensions don't have static fields + ObjBody::ObjBodyComp(_) => Vec::new(), } } -/// Process a member list (object body). fn process_member_list( list: &ObjBodyMemberList, text: &str, @@ -202,17 +178,15 @@ fn process_member_list( symbols } -/// Process a single member. fn process_member(member: &Member, text: &str, line_index: &LineIndex) -> Option { match member { Member::MemberBindStmt(bind_stmt) => process_member_bind(bind_stmt, text, line_index), Member::MemberFieldNormal(field) => process_field_normal(field, text, line_index), Member::MemberFieldMethod(method) => process_field_method(method, text, line_index), - Member::MemberAssertStmt(_) => None, // Asserts don't produce symbols + Member::MemberAssertStmt(_) => None, } } -/// Process a member bind statement (local inside object). fn process_member_bind( bind_stmt: &MemberBindStmt, text: &str, @@ -222,7 +196,6 @@ fn process_member_bind( process_obj_local(&obj_local, text, line_index) } -/// Process an object-local binding. fn process_obj_local( obj_local: &ObjLocal, text: &str, @@ -232,7 +205,6 @@ fn process_obj_local( process_bind(&bind, text, line_index) } -/// Process a normal field. fn process_field_normal( field: &MemberFieldNormal, text: &str, @@ -241,7 +213,6 @@ fn process_field_normal( let name = get_field_name(&field.field_name()?)?; let range = field.syntax().text_range(); - // Check if the value is an object (for nested symbols) let children = field.expr().and_then(|expr| { if let Some(ExprBase::ExprObject(obj)) = expr.expr_base() { let children = process_object(&obj, text, line_index); @@ -255,7 +226,6 @@ fn process_field_normal( } }); - // Determine kind based on value let kind = field.expr().map_or(SymbolKind::FIELD, |expr| { expr.expr_base() .map_or(SymbolKind::FIELD, |base| match base { @@ -269,7 +239,6 @@ fn process_field_normal( create_symbol(name, kind, range, range, line_index, text, children) } -/// Process a method field. fn process_field_method( method: &MemberFieldMethod, text: &str, @@ -278,7 +247,6 @@ fn process_field_method( let name = get_field_name(&method.field_name()?)?; let range = method.syntax().text_range(); - // Get parameter names for detail let detail = method.params_desc().map(|params| { let param_names: Vec<_> = params .params() @@ -297,147 +265,6 @@ fn process_field_method( ) } -/// Get the name from a field name node. -fn get_field_name(field_name: &FieldName) -> Option { - match field_name { - FieldName::FieldNameFixed(fixed) => get_fixed_field_name(fixed), - FieldName::FieldNameDynamic(_) => Some("[computed]".to_string()), - } -} - -/// Get the name from a fixed field name. -fn get_fixed_field_name(fixed: &FieldNameFixed) -> Option { - if let Some(name) = fixed.id() { - Some(name.ident_lit()?.text().to_string()) - } else { - fixed - .text() - .map(|text| text.text().trim_matches('"').trim_matches('\'').to_string()) - } -} - -/// Get the name from a destruct pattern. -fn get_destruct_name(destruct: &jrsonnet_rowan_parser::nodes::Destruct) -> Option { - use jrsonnet_rowan_parser::nodes::Destruct; - match destruct { - Destruct::DestructFull(full) => Some(full.name()?.ident_lit()?.text().to_string()), - Destruct::DestructSkip(_) => None, - Destruct::DestructArray(_) => Some("[array]".to_string()), - Destruct::DestructObject(_) => Some("{object}".to_string()), - } -} - -/// Create a `DocumentSymbol` with the given properties. -fn create_symbol( - name: String, - kind: SymbolKind, - range: TextRange, - selection_range: TextRange, - line_index: &LineIndex, - text: &str, - children: Option>, -) -> Option { - build_document_symbol( - name, - None, - kind, - to_lsp_range(range, line_index, text), - to_lsp_range(selection_range, line_index, text), - children, - ) -} - -fn build_document_symbol( - name: String, - detail: Option, - kind: SymbolKind, - range: lsp_types::Range, - selection_range: lsp_types::Range, - children: Option>, -) -> Option { - serde_json::from_value(serde_json::json!({ - "name": name, - "detail": detail, - "kind": kind, - "tags": Option::>::None, - "range": range, - "selectionRange": selection_range, - "children": children, - })) - .ok() -} - -fn build_symbol_information( - name: String, - kind: SymbolKind, - tags: Option>, - location: Location, - container_name: Option, -) -> Option { - serde_json::from_value(serde_json::json!({ - "name": name, - "kind": kind, - "tags": tags, - "location": location, - "containerName": container_name, - })) - .ok() -} - -/// Search for symbols matching a query across a document. -/// Returns a flat list of `SymbolInformation`. -pub fn workspace_symbols_for_document( - document: &Document, - uri: &Uri, - query: &str, -) -> Vec { - let doc_symbols = document_symbols(document); - let mut results = Vec::new(); - - // Flatten and filter document symbols - flatten_symbols(&doc_symbols, uri, query, None, &mut results); - - results -} - -/// Recursively flatten `DocumentSymbol` tree into `SymbolInformation` list. -fn flatten_symbols( - symbols: &[DocumentSymbol], - uri: &Uri, - query: &str, - container_name: Option<&str>, - results: &mut Vec, -) { - let query_lower = query.to_lowercase(); - - for symbol in symbols { - // Check if symbol name matches query (case-insensitive substring match) - let matches = query.is_empty() || symbol.name.to_lowercase().contains(&query_lower); - - if matches { - let location = Location { - uri: uri.clone(), - range: symbol.range, - }; - let symbol_info = build_symbol_information( - symbol.name.clone(), - symbol.kind, - symbol.tags.clone(), - location, - container_name.map(String::from), - ); - if let Some(symbol_info) = symbol_info { - results.push(symbol_info); - } - } - - // Recursively process children - if let Some(children) = &symbol.children { - flatten_symbols(children, uri, query, Some(&symbol.name), results); - } - } -} - #[cfg(test)] mod tests { use jrsonnet_lsp_document::DocVersion; @@ -459,11 +286,9 @@ mod tests { ); let symbols = document_symbols(&doc); - // Check symbol names let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); assert_eq!(names, vec!["name", "value", "nested"]); - // Check nested children let nested_children = symbols[2] .children .as_ref() @@ -480,7 +305,6 @@ mod tests { ); let symbols = document_symbols(&doc); - // Should have local x, local y, and object fields a, b let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); assert_eq!(names, vec!["x", "y", "a", "b"]); } @@ -493,7 +317,6 @@ mod tests { ); let symbols = document_symbols(&doc); - // Assert full structure of symbols let symbol_info: Vec<(&str, SymbolKind)> = symbols.iter().map(|s| (s.name.as_str(), s.kind)).collect(); assert_eq!( @@ -514,65 +337,4 @@ mod tests { assert_eq!(names, vec!["greet"]); assert_eq!(symbols[0].kind, SymbolKind::METHOD); } - - #[test] - fn test_workspace_symbols_empty_query() { - let doc = Document::new( - r#"local x = 1; { name: "test", value: x }"#.to_string(), - DocVersion::new(1), - ); - let uri: Uri = "file:///test.jsonnet".parse().unwrap(); - - let symbols = workspace_symbols_for_document(&doc, &uri, ""); - - // Should return all symbols: x, name, value - let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); - assert_eq!(names, vec!["x", "name", "value"]); - } - - #[test] - fn test_workspace_symbols_with_query() { - let doc = Document::new( - r"{ myField: 1, otherField: 2, myMethod(x): x }".to_string(), - DocVersion::new(1), - ); - let uri: Uri = "file:///test.jsonnet".parse().unwrap(); - - let symbols = workspace_symbols_for_document(&doc, &uri, "my"); - - // Should match myField and myMethod - let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); - assert_eq!(names, vec!["myField", "myMethod"]); - } - - #[test] - fn test_workspace_symbols_case_insensitive() { - let doc = Document::new( - r"{ MyField: 1, myfield: 2, MYFIELD: 3 }".to_string(), - DocVersion::new(1), - ); - let uri: Uri = "file:///test.jsonnet".parse().unwrap(); - - let symbols = workspace_symbols_for_document(&doc, &uri, "myfield"); - - // Should match all three (case insensitive) - let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); - assert_eq!(names, vec!["MyField", "myfield", "MYFIELD"]); - } - - #[test] - fn test_workspace_symbols_nested() { - let doc = Document::new( - r"{ outer: { innerField: 1 } }".to_string(), - DocVersion::new(1), - ); - let uri: Uri = "file:///test.jsonnet".parse().unwrap(); - - let symbols = workspace_symbols_for_document(&doc, &uri, "inner"); - - // Should find innerField with container_name "outer" - let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); - assert_eq!(names, vec!["innerField"]); - assert_eq!(symbols[0].container_name, Some("outer".to_string())); - } } diff --git a/crates/jrsonnet-lsp-handlers/src/symbols/mod.rs b/crates/jrsonnet-lsp-handlers/src/symbols/mod.rs new file mode 100644 index 00000000..daba2776 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/symbols/mod.rs @@ -0,0 +1,11 @@ +//! Document and workspace symbol handlers. +//! +//! Extracts symbols from Jsonnet AST nodes and exposes both tree-style +//! document symbols and flat workspace symbol views. + +mod extract; +mod names; +mod workspace; + +pub use extract::document_symbols; +pub use workspace::workspace_symbols_for_document; diff --git a/crates/jrsonnet-lsp-handlers/src/symbols/names.rs b/crates/jrsonnet-lsp-handlers/src/symbols/names.rs new file mode 100644 index 00000000..7341b730 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/symbols/names.rs @@ -0,0 +1,75 @@ +use jrsonnet_lsp_document::{to_lsp_range, LineIndex}; +use jrsonnet_rowan_parser::{ + nodes::{FieldName, FieldNameFixed}, + AstToken, +}; +use lsp_types::{DocumentSymbol, SymbolKind}; +use rowan::TextRange; + +pub(super) fn get_field_name(field_name: &FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => get_fixed_field_name(fixed), + FieldName::FieldNameDynamic(_) => Some("[computed]".to_string()), + } +} + +fn get_fixed_field_name(fixed: &FieldNameFixed) -> Option { + if let Some(name) = fixed.id() { + Some(name.ident_lit()?.text().to_string()) + } else { + fixed + .text() + .map(|text| text.text().trim_matches('"').trim_matches('\'').to_string()) + } +} + +pub(super) fn get_destruct_name( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, +) -> Option { + use jrsonnet_rowan_parser::nodes::Destruct; + match destruct { + Destruct::DestructFull(full) => Some(full.name()?.ident_lit()?.text().to_string()), + Destruct::DestructSkip(_) => None, + Destruct::DestructArray(_) => Some("[array]".to_string()), + Destruct::DestructObject(_) => Some("{object}".to_string()), + } +} + +pub(super) fn create_symbol( + name: String, + kind: SymbolKind, + range: TextRange, + selection_range: TextRange, + line_index: &LineIndex, + text: &str, + children: Option>, +) -> Option { + build_document_symbol( + name, + None, + kind, + to_lsp_range(range, line_index, text), + to_lsp_range(selection_range, line_index, text), + children, + ) +} + +pub(super) fn build_document_symbol( + name: String, + detail: Option, + kind: SymbolKind, + range: lsp_types::Range, + selection_range: lsp_types::Range, + children: Option>, +) -> Option { + serde_json::from_value(serde_json::json!({ + "name": name, + "detail": detail, + "kind": kind, + "tags": Option::>::None, + "range": range, + "selectionRange": selection_range, + "children": children, + })) + .ok() +} diff --git a/crates/jrsonnet-lsp-handlers/src/symbols/workspace.rs b/crates/jrsonnet-lsp-handlers/src/symbols/workspace.rs new file mode 100644 index 00000000..b3f59563 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/symbols/workspace.rs @@ -0,0 +1,129 @@ +use jrsonnet_lsp_document::Document; +use lsp_types::{Location, SymbolInformation, Uri}; + +use super::extract::document_symbols; + +fn build_symbol_information( + name: String, + kind: lsp_types::SymbolKind, + tags: Option>, + location: Location, + container_name: Option, +) -> Option { + serde_json::from_value(serde_json::json!({ + "name": name, + "kind": kind, + "tags": tags, + "location": location, + "containerName": container_name, + })) + .ok() +} + +pub fn workspace_symbols_for_document( + document: &Document, + uri: &Uri, + query: &str, +) -> Vec { + let doc_symbols = document_symbols(document); + let mut results = Vec::new(); + + flatten_symbols(&doc_symbols, uri, query, None, &mut results); + + results +} + +fn flatten_symbols( + symbols: &[lsp_types::DocumentSymbol], + uri: &Uri, + query: &str, + container_name: Option<&str>, + results: &mut Vec, +) { + let query_lower = query.to_lowercase(); + + for symbol in symbols { + let matches = query.is_empty() || symbol.name.to_lowercase().contains(&query_lower); + + if matches { + let location = Location { + uri: uri.clone(), + range: symbol.range, + }; + let symbol_info = build_symbol_information( + symbol.name.clone(), + symbol.kind, + symbol.tags.clone(), + location, + container_name.map(String::from), + ); + if let Some(symbol_info) = symbol_info { + results.push(symbol_info); + } + } + + if let Some(children) = &symbol.children { + flatten_symbols(children, uri, query, Some(&symbol.name), results); + } + } +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + + use super::*; + + #[test] + fn test_workspace_symbols_empty_query() { + let doc = Document::new( + r#"local x = 1; { name: "test", value: x }"#.to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, ""); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["x", "name", "value"]); + } + + #[test] + fn test_workspace_symbols_with_query() { + let doc = Document::new( + r"{ myField: 1, otherField: 2, myMethod(x): x }".to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, "my"); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["myField", "myMethod"]); + } + + #[test] + fn test_workspace_symbols_case_insensitive() { + let doc = Document::new( + r"{ MyField: 1, myfield: 2, MYFIELD: 3 }".to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, "myfield"); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["MyField", "myfield", "MYFIELD"]); + } + + #[test] + fn test_workspace_symbols_nested() { + let doc = Document::new( + r"{ outer: { innerField: 1 } }".to_string(), + DocVersion::new(1), + ); + let uri: Uri = "file:///test.jsonnet".parse().unwrap(); + + let symbols = workspace_symbols_for_document(&doc, &uri, "inner"); + let names: Vec<_> = symbols.iter().map(|s| s.name.as_str()).collect(); + assert_eq!(names, vec!["innerField"]); + assert_eq!(symbols[0].container_name, Some("outer".to_string())); + } +} From 29335844657a204c5f3a9f4db2d5b7d801897e31 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:50:56 +0000 Subject: [PATCH 134/210] refactor(lsp-handlers): split signature help internals Split signature help into focused modules for call-context parsing, local signature lookup, and signature rendering. Keep the public entrypoint in `handler.rs` and move shared models into a dedicated model module to reduce cross-cutting helpers. Keep tests inline in `handler.rs` and `render.rs`, with no tests in `mod.rs`. --- .../src/signature_help.rs | 575 ------------------ .../src/signature_help/context.rs | 95 +++ .../src/signature_help/handler.rs | 153 +++++ .../src/signature_help/local.rs | 130 ++++ .../src/signature_help/mod.rs | 11 + .../src/signature_help/model.rs | 19 + .../src/signature_help/render.rs | 168 +++++ 7 files changed, 576 insertions(+), 575 deletions(-) delete mode 100644 crates/jrsonnet-lsp-handlers/src/signature_help.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/signature_help/context.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/signature_help/handler.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/signature_help/local.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/signature_help/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/signature_help/model.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/signature_help/render.rs diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help.rs b/crates/jrsonnet-lsp-handlers/src/signature_help.rs deleted file mode 100644 index cdb4e3e5..00000000 --- a/crates/jrsonnet-lsp-handlers/src/signature_help.rs +++ /dev/null @@ -1,575 +0,0 @@ -//! Signature help handler. -//! -//! Provides parameter information when the user is inside a function call. - -use jrsonnet_lsp_document::{Document, LspPosition}; -use jrsonnet_lsp_stdlib as stdlib; -use jrsonnet_rowan_parser::{ - nodes::{ - Arg, ArgsDesc, Bind, BindFunction, Destruct, ExprBase, ExprCall, ExprField, Param, - StmtLocal, - }, - AstNode, SyntaxKind, SyntaxNode, SyntaxToken, -}; -use lsp_types::{ - Documentation, MarkupContent, MarkupKind, ParameterInformation, ParameterLabel, SignatureHelp, - SignatureInformation, -}; - -#[derive(Debug, Default, Clone)] -struct ActiveArg { - positional_index: u32, - named_arg: Option, -} - -#[derive(Debug, Clone)] -struct SignatureParamInfo { - label: String, - name: String, -} - -fn to_u32(value: usize) -> u32 { - u32::try_from(value).unwrap_or(u32::MAX) -} - -/// Get signature help at the given position. -#[must_use] -pub fn signature_help(document: &Document, position: LspPosition) -> Option { - let text = document.text(); - let line_index = document.line_index(); - - let offset = line_index.offset(position, text)?; - let cursor_offset: rowan::TextSize = offset.into(); - - let ast = document.ast(); - let root = ast.syntax(); - - let token = token_at_offset(root, offset.into())?; - let (func_name, active_arg) = find_call_context(&token, cursor_offset)?; - - get_signature_for_function(&func_name, &token, &active_arg) -} - -/// Find the function call context around the cursor. -/// Returns the function name and active argument info. -fn find_call_context( - token: &SyntaxToken, - cursor_offset: rowan::TextSize, -) -> Option<(String, ActiveArg)> { - let mut current = token.parent()?; - - loop { - if let Some(call) = ExprCall::cast(current.clone()) { - return extract_call_info(&call, cursor_offset); - } - - if current.kind() == SyntaxKind::ARGS_DESC { - if let Some(call) = current.parent().and_then(ExprCall::cast) { - return extract_call_info(&call, cursor_offset); - } - } - - current = current.parent()?; - } -} - -/// Extract call information from an `ExprCall` node. -fn extract_call_info( - call: &ExprCall, - cursor_offset: rowan::TextSize, -) -> Option<(String, ActiveArg)> { - let func_name = extract_callee_name(call)?; - let active_arg = active_arg_for_call(call, cursor_offset); - Some((func_name, active_arg)) -} - -/// Extract the function name from the callee of an `ExprCall`. -fn extract_callee_name(call: &ExprCall) -> Option { - let callee = call.callee()?; - match callee.expr_base()? { - ExprBase::ExprVar(var) => Some(var.name()?.ident_lit()?.text().to_string()), - ExprBase::ExprField(field) => extract_field_name(&field), - _ => None, - } -} - -/// Extract the field name from an `ExprField` (returns just the field name, e.g., "length" from std.length). -fn extract_field_name(field: &ExprField) -> Option { - Some(field.field()?.ident_lit()?.text().to_string()) -} - -fn active_arg_for_call(call: &ExprCall, cursor_offset: rowan::TextSize) -> ActiveArg { - let Some(args_desc) = call.args_desc() else { - return ActiveArg::default(); - }; - let positional_index = positional_arg_index(&args_desc, cursor_offset); - let named_arg = args_desc - .args() - .nth(usize::try_from(positional_index).unwrap_or(usize::MAX)) - .and_then(|arg| arg_name(&arg)); - - ActiveArg { - positional_index, - named_arg, - } -} - -/// Compute the currently active positional argument index. -fn positional_arg_index(args_desc: &ArgsDesc, cursor_offset: rowan::TextSize) -> u32 { - let args: Vec<_> = args_desc.args().collect(); - if args.is_empty() { - return count_preceding_commas(args_desc, cursor_offset); - } - - for (index, arg) in args.iter().enumerate() { - if cursor_offset <= arg.syntax().text_range().end() { - return to_u32(index); - } - } - - count_preceding_commas(args_desc, cursor_offset) -} - -fn arg_name(arg: &Arg) -> Option { - Some(arg.name()?.ident_lit()?.text().to_string()) -} - -/// Count top-level commas before the cursor inside an argument list. -fn count_preceding_commas(args_desc: &ArgsDesc, cursor_offset: rowan::TextSize) -> u32 { - to_u32( - args_desc - .syntax() - .children_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - .filter(|t| t.kind() == SyntaxKind::COMMA && t.text_range().end() <= cursor_offset) - .count(), - ) -} - -/// Get signature information for a function. -fn get_signature_for_function( - name: &str, - token: &SyntaxToken, - active_arg: &ActiveArg, -) -> Option { - // First check stdlib - if let Some(doc) = stdlib::get_stdlib_doc(name) { - let (params_info, variadic) = stdlib_params(name, &doc.signature); - let active_param = resolve_active_parameter(¶ms_info, variadic, active_arg); - let signature_name = format!("std.{name}"); - let (label, param_offsets) = signature_label_with_offsets(&signature_name, ¶ms_info); - let params = to_lsp_params(¶m_offsets); - - return Some(SignatureHelp { - signatures: vec![SignatureInformation { - label, - documentation: Some(Documentation::MarkupContent(MarkupContent { - kind: MarkupKind::Markdown, - value: doc.description.to_string(), - })), - parameters: Some(params), - active_parameter: Some(active_param), - }], - active_signature: Some(0), - active_parameter: Some(active_param), - }); - } - - // Check for local function definitions - if let Some(sig) = find_local_function_signature(token, name) { - let active_param = resolve_active_parameter(&sig.params, false, active_arg); - let (label, param_offsets) = signature_label_with_offsets(name, &sig.params); - let params = to_lsp_params(¶m_offsets); - - return Some(SignatureHelp { - signatures: vec![SignatureInformation { - label, - documentation: None, - parameters: Some(params), - active_parameter: Some(active_param), - }], - active_signature: Some(0), - active_parameter: Some(active_param), - }); - } - - None -} - -fn signature_label_with_offsets( - name: &str, - params: &[SignatureParamInfo], -) -> (String, Vec<[u32; 2]>) { - let mut label = String::new(); - let mut offsets = Vec::with_capacity(params.len()); - label.push_str(name); - label.push('('); - - for (index, param) in params.iter().enumerate() { - if index > 0 { - label.push_str(", "); - } - let start = to_u32(label.len()); - label.push_str(¶m.label); - let end = to_u32(label.len()); - offsets.push([start, end]); - } - - label.push(')'); - (label, offsets) -} - -fn to_lsp_params(offsets: &[[u32; 2]]) -> Vec { - offsets - .iter() - .map(|offset| ParameterInformation { - label: ParameterLabel::LabelOffsets(*offset), - documentation: None, - }) - .collect() -} - -fn resolve_active_parameter( - params: &[SignatureParamInfo], - _variadic: bool, - active_arg: &ActiveArg, -) -> u32 { - if params.is_empty() { - return active_arg.positional_index; - } - - if let Some(named_arg) = active_arg.named_arg.as_deref() { - if let Some(index) = params.iter().position(|param| param.name == named_arg) { - return to_u32(index); - } - } - - let max_index = to_u32(params.len().saturating_sub(1)); - active_arg.positional_index.min(max_index) -} - -fn stdlib_params(name: &str, fallback_signature: &str) -> (Vec, bool) { - if let Some(func_data) = stdlib::get_stdlib_func_data(name) { - let params = func_data - .params - .into_iter() - .map(|param| { - let label = if param.has_default { - format!("{}=...", param.name) - } else { - param.name.clone() - }; - SignatureParamInfo { - label, - name: param.name, - } - }) - .collect(); - return (params, func_data.variadic); - } - - let params = parse_signature_params(fallback_signature) - .into_iter() - .map(|label| { - let name = label - .split_once('=') - .map_or_else(|| label.clone(), |(name, _)| name.to_string()); - SignatureParamInfo { label, name } - }) - .collect(); - (params, false) -} - -/// Parse parameter names from a signature string like "(func, arr". -fn parse_signature_params(signature: &str) -> Vec { - // Remove leading '(' if present - let s = signature.trim_start_matches('('); - // Split by comma and trim whitespace - s.split(',') - .map(|p| p.trim().to_string()) - .filter(|p| !p.is_empty()) - .collect() -} - -/// Local function signature info. -struct LocalFunctionSignature { - params: Vec, -} - -/// Find a local function definition and extract its signature. -fn find_local_function_signature( - token: &SyntaxToken, - name: &str, -) -> Option { - let mut current = token.parent()?; - - while let Some(parent) = current.parent() { - if let Some(sig) = check_scope_for_function(&parent, ¤t, name) { - return Some(sig); - } - current = parent; - } - - None -} - -/// Check a scope for a function definition. -fn check_scope_for_function( - scope: &SyntaxNode, - child: &SyntaxNode, - name: &str, -) -> Option { - match scope.kind() { - SyntaxKind::EXPR => check_expr_for_function(scope, child, name), - _ => None, - } -} - -/// Check an Expr for local function definitions. -fn check_expr_for_function( - expr: &SyntaxNode, - child: &SyntaxNode, - name: &str, -) -> Option { - for stmt_node in expr.children() { - if stmt_node.kind() == SyntaxKind::STMT_LOCAL { - // Only look at locals that appear before the child - if stmt_node.text_range().end() > child.text_range().start() { - continue; - } - - if let Some(stmt_local) = StmtLocal::cast(stmt_node) { - for bind in stmt_local.binds() { - if let Some(sig) = check_bind_for_function(&bind, name) { - return Some(sig); - } - } - } - } - } - None -} - -/// Check a bind for a function definition. -fn check_bind_for_function(bind: &Bind, name: &str) -> Option { - match bind { - Bind::BindDestruct(bd) => { - // Check if this is a function value - let destruct = bd.into()?; - if let Destruct::DestructFull(full) = destruct { - let bind_name = full.name()?; - let ident = bind_name.ident_lit()?; - if ident.text() != name { - return None; - } - - // Check if the value is a function - let value = bd.value()?; - if let Some(base) = value.expr_base() { - if let ExprBase::ExprFunction(func) = base { - return extract_params_from_function_expr(&func); - } - } - } - None - } - Bind::BindFunction(bf) => { - let bind_name = bf.name()?; - let ident = bind_name.ident_lit()?; - if ident.text() != name { - return None; - } - - extract_params_from_bind_function(bf) - } - } -} - -/// Extract parameter names from a `BindFunction`. -fn extract_params_from_bind_function(func: &BindFunction) -> Option { - let params_desc = func.params()?; - let params: Vec = params_desc - .params() - .filter_map(|p| extract_param_info(&p)) - .collect(); - - Some(LocalFunctionSignature { params }) -} - -/// Extract parameter names from an `ExprFunction`. -fn extract_params_from_function_expr( - func: &jrsonnet_rowan_parser::nodes::ExprFunction, -) -> Option { - let params_desc = func.params_desc()?; - let params: Vec = params_desc - .params() - .filter_map(|p| extract_param_info(&p)) - .collect(); - - Some(LocalFunctionSignature { params }) -} - -/// Extract parameter label and matching name. -fn extract_param_info(param: &Param) -> Option { - let destruct = param.destruct()?; - let name = match destruct { - Destruct::DestructFull(full) => { - let name = full.name()?; - name.ident_lit()?.text().to_string() - } - Destruct::DestructArray(_) => "[array]".to_string(), - Destruct::DestructObject(_) => "{object}".to_string(), - Destruct::DestructSkip(_) => return None, - }; - - let label = if param.assign_token().is_some() { - format!("{name}=...") - } else { - name.clone() - }; - Some(SignatureParamInfo { label, name }) -} - -/// Find the token at the given byte offset. -fn token_at_offset(root: &SyntaxNode, offset: u32) -> Option { - root.token_at_offset(rowan::TextSize::from(offset)) - .right_biased() -} - -#[cfg(test)] -mod tests { - use jrsonnet_lsp_document::DocVersion; - - use super::*; - - fn document_with_cursor(code_with_cursor: &str) -> (Document, LspPosition) { - let cursor = code_with_cursor - .find('|') - .expect("test source should include `|` cursor marker"); - let mut source = code_with_cursor.to_string(); - source.remove(cursor); - - let before = &code_with_cursor[..cursor]; - let line = to_u32(before.bytes().filter(|&b| b == b'\n').count()); - let column = to_u32( - before - .rsplit_once('\n') - .map_or(before.len(), |(_, suffix)| suffix.len()), - ); - - ( - Document::new(source, DocVersion::new(1)), - (line, column).into(), - ) - } - - #[test] - fn test_parse_signature_params() { - assert_eq!( - parse_signature_params("(func, arr"), - vec!["func".to_string(), "arr".to_string()] - ); - assert_eq!(parse_signature_params("(x"), vec!["x".to_string()]); - assert_eq!( - parse_signature_params("(a, b, c"), - vec!["a".to_string(), "b".to_string(), "c".to_string()] - ); - } - - #[test] - fn test_stdlib_signature_help() { - let code = "std.filter(|"; - // ^ cursor here (position 11) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let pos = (0, 11).into(); - - let help = signature_help(&doc, pos); - assert_eq!( - help, - Some(SignatureHelp { - signatures: vec![SignatureInformation { - label: "std.filter(func, arr)".to_string(), - documentation: Some(Documentation::MarkupContent(MarkupContent { - kind: MarkupKind::Markdown, - value: "Returns elements of `arr` where `func(x)` is true.".to_string(), - })), - parameters: Some(vec![ - ParameterInformation { - label: ParameterLabel::LabelOffsets([11, 15]), - documentation: None, - }, - ParameterInformation { - label: ParameterLabel::LabelOffsets([17, 20]), - documentation: None, - }, - ]), - active_parameter: Some(0), - }], - active_signature: Some(0), - active_parameter: Some(0), - }) - ); - } - - #[test] - fn test_local_function_signature_help() { - let code = r"local add(a, b) = a + b; add(1|"; - // ^ cursor here - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let pos = (0, 30).into(); - - let help = signature_help(&doc, pos); - assert_eq!( - help, - Some(SignatureHelp { - signatures: vec![SignatureInformation { - label: "add(a, b)".to_string(), - documentation: None, - parameters: Some(vec![ - ParameterInformation { - label: ParameterLabel::LabelOffsets([4, 5]), - documentation: None, - }, - ParameterInformation { - label: ParameterLabel::LabelOffsets([7, 8]), - documentation: None, - }, - ]), - active_parameter: Some(0), - }], - active_signature: Some(0), - active_parameter: Some(0), - }) - ); - } - - #[test] - fn test_no_signature_help_outside_call() { - let code = "local x = 1; x"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - - let pos = (0, 13).into(); - - let help = signature_help(&doc, pos); - assert_eq!(help, None); - } - - #[test] - fn test_stdlib_named_argument_active_parameter() { - let (doc, pos) = document_with_cursor(r#"std.substr(str="abc", from=1|, len=1)"#); - let help = signature_help(&doc, pos).expect("signature help should be available"); - assert_eq!(help.active_parameter, Some(1)); - assert_eq!(help.signatures[0].active_parameter, Some(1)); - } - - #[test] - fn test_local_named_argument_active_parameter() { - let (doc, pos) = - document_with_cursor(r"local add(a, b, c) = a + b + c; add(c=3, a=1, b=2|)"); - let help = signature_help(&doc, pos).expect("signature help should be available"); - assert_eq!(help.active_parameter, Some(1)); - assert_eq!(help.signatures[0].active_parameter, Some(1)); - } -} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/context.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/context.rs new file mode 100644 index 00000000..3e96333e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/context.rs @@ -0,0 +1,95 @@ +use jrsonnet_rowan_parser::{ + nodes::{Arg, ArgsDesc, ExprBase, ExprCall, ExprField}, + AstNode, SyntaxKind, SyntaxToken, +}; + +use super::model::{to_u32, ActiveArg}; + +pub(super) fn find_call_context( + token: &SyntaxToken, + cursor_offset: rowan::TextSize, +) -> Option<(String, ActiveArg)> { + let mut current = token.parent()?; + + loop { + if let Some(call) = ExprCall::cast(current.clone()) { + return extract_call_info(&call, cursor_offset); + } + + if current.kind() == SyntaxKind::ARGS_DESC { + if let Some(call) = current.parent().and_then(ExprCall::cast) { + return extract_call_info(&call, cursor_offset); + } + } + + current = current.parent()?; + } +} + +fn extract_call_info( + call: &ExprCall, + cursor_offset: rowan::TextSize, +) -> Option<(String, ActiveArg)> { + let func_name = extract_callee_name(call)?; + let active_arg = active_arg_for_call(call, cursor_offset); + Some((func_name, active_arg)) +} + +fn extract_callee_name(call: &ExprCall) -> Option { + let callee = call.callee()?; + match callee.expr_base()? { + ExprBase::ExprVar(var) => Some(var.name()?.ident_lit()?.text().to_string()), + ExprBase::ExprField(field) => extract_field_name(&field), + _ => None, + } +} + +fn extract_field_name(field: &ExprField) -> Option { + Some(field.field()?.ident_lit()?.text().to_string()) +} + +fn active_arg_for_call(call: &ExprCall, cursor_offset: rowan::TextSize) -> ActiveArg { + let Some(args_desc) = call.args_desc() else { + return ActiveArg::default(); + }; + let positional_index = positional_arg_index(&args_desc, cursor_offset); + let named_arg = args_desc + .args() + .nth(usize::try_from(positional_index).unwrap_or(usize::MAX)) + .and_then(|arg| arg_name(&arg)); + + ActiveArg { + positional_index, + named_arg, + } +} + +fn positional_arg_index(args_desc: &ArgsDesc, cursor_offset: rowan::TextSize) -> u32 { + let args: Vec<_> = args_desc.args().collect(); + if args.is_empty() { + return count_preceding_commas(args_desc, cursor_offset); + } + + for (index, arg) in args.iter().enumerate() { + if cursor_offset <= arg.syntax().text_range().end() { + return to_u32(index); + } + } + + count_preceding_commas(args_desc, cursor_offset) +} + +fn arg_name(arg: &Arg) -> Option { + Some(arg.name()?.ident_lit()?.text().to_string()) +} + +fn count_preceding_commas(args_desc: &ArgsDesc, cursor_offset: rowan::TextSize) -> u32 { + to_u32( + args_desc + .syntax() + .children_with_tokens() + .filter_map(rowan::NodeOrToken::into_token) + .filter(|t| t.kind() == SyntaxKind::COMMA && t.text_range().end() <= cursor_offset) + .count(), + ) +} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/handler.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/handler.rs new file mode 100644 index 00000000..041e8a22 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/handler.rs @@ -0,0 +1,153 @@ +use jrsonnet_lsp_document::{Document, LspPosition}; +use jrsonnet_rowan_parser::{AstNode, SyntaxNode, SyntaxToken}; + +use super::{context::find_call_context, render::get_signature_for_function}; + +#[must_use] +pub fn signature_help( + document: &Document, + position: LspPosition, +) -> Option { + let text = document.text(); + let line_index = document.line_index(); + + let offset = line_index.offset(position, text)?; + let cursor_offset: rowan::TextSize = offset.into(); + + let ast = document.ast(); + let root = ast.syntax(); + + let token = token_at_offset(root, offset.into())?; + let (func_name, active_arg) = find_call_context(&token, cursor_offset)?; + + get_signature_for_function(&func_name, &token, &active_arg) +} + +fn token_at_offset(root: &SyntaxNode, offset: u32) -> Option { + root.token_at_offset(rowan::TextSize::from(offset)) + .right_biased() +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::DocVersion; + use lsp_types::{ + Documentation, MarkupContent, MarkupKind, ParameterInformation, ParameterLabel, + SignatureHelp, SignatureInformation, + }; + + use super::{super::model::to_u32, *}; + + fn document_with_cursor(code_with_cursor: &str) -> (Document, LspPosition) { + let cursor = code_with_cursor + .find('|') + .expect("test source should include `|` cursor marker"); + let mut source = code_with_cursor.to_string(); + source.remove(cursor); + + let before = &code_with_cursor[..cursor]; + let line = to_u32(before.bytes().filter(|&b| b == b'\n').count()); + let column = to_u32( + before + .rsplit_once('\n') + .map_or(before.len(), |(_, suffix)| suffix.len()), + ); + + ( + Document::new(source, DocVersion::new(1)), + (line, column).into(), + ) + } + + #[test] + fn test_stdlib_signature_help() { + let code = "std.filter(|"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 11).into(); + + let help = signature_help(&doc, pos); + assert_eq!( + help, + Some(SignatureHelp { + signatures: vec![SignatureInformation { + label: "std.filter(func, arr)".to_string(), + documentation: Some(Documentation::MarkupContent(MarkupContent { + kind: MarkupKind::Markdown, + value: "Returns elements of `arr` where `func(x)` is true.".to_string(), + })), + parameters: Some(vec![ + ParameterInformation { + label: ParameterLabel::LabelOffsets([11, 15]), + documentation: None, + }, + ParameterInformation { + label: ParameterLabel::LabelOffsets([17, 20]), + documentation: None, + }, + ]), + active_parameter: Some(0), + }], + active_signature: Some(0), + active_parameter: Some(0), + }) + ); + } + + #[test] + fn test_local_function_signature_help() { + let code = r"local add(a, b) = a + b; add(1|"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 30).into(); + + let help = signature_help(&doc, pos); + assert_eq!( + help, + Some(SignatureHelp { + signatures: vec![SignatureInformation { + label: "add(a, b)".to_string(), + documentation: None, + parameters: Some(vec![ + ParameterInformation { + label: ParameterLabel::LabelOffsets([4, 5]), + documentation: None, + }, + ParameterInformation { + label: ParameterLabel::LabelOffsets([7, 8]), + documentation: None, + }, + ]), + active_parameter: Some(0), + }], + active_signature: Some(0), + active_parameter: Some(0), + }) + ); + } + + #[test] + fn test_no_signature_help_outside_call() { + let code = "local x = 1; x"; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let pos = (0, 13).into(); + + let help = signature_help(&doc, pos); + assert_eq!(help, None); + } + + #[test] + fn test_stdlib_named_argument_active_parameter() { + let (doc, pos) = document_with_cursor(r#"std.substr(str="abc", from=1|, len=1)"#); + let help = signature_help(&doc, pos).expect("signature help should be available"); + assert_eq!(help.active_parameter, Some(1)); + assert_eq!(help.signatures[0].active_parameter, Some(1)); + } + + #[test] + fn test_local_named_argument_active_parameter() { + let (doc, pos) = + document_with_cursor(r"local add(a, b, c) = a + b + c; add(c=3, a=1, b=2|)"); + let help = signature_help(&doc, pos).expect("signature help should be available"); + assert_eq!(help.active_parameter, Some(1)); + assert_eq!(help.signatures[0].active_parameter, Some(1)); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/local.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/local.rs new file mode 100644 index 00000000..63193b34 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/local.rs @@ -0,0 +1,130 @@ +use jrsonnet_rowan_parser::{ + nodes::{Bind, BindFunction, Destruct, ExprBase, Param, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; + +use super::model::{LocalFunctionSignature, SignatureParamInfo}; + +pub(super) fn find_local_function_signature( + token: &SyntaxToken, + name: &str, +) -> Option { + let mut current = token.parent()?; + + while let Some(parent) = current.parent() { + if let Some(sig) = check_scope_for_function(&parent, ¤t, name) { + return Some(sig); + } + current = parent; + } + + None +} + +fn check_scope_for_function( + scope: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + match scope.kind() { + SyntaxKind::EXPR => check_expr_for_function(scope, child, name), + _ => None, + } +} + +fn check_expr_for_function( + expr: &SyntaxNode, + child: &SyntaxNode, + name: &str, +) -> Option { + for stmt_node in expr.children() { + if stmt_node.kind() == SyntaxKind::STMT_LOCAL { + if stmt_node.text_range().end() > child.text_range().start() { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(stmt_node) { + for bind in stmt_local.binds() { + if let Some(sig) = check_bind_for_function(&bind, name) { + return Some(sig); + } + } + } + } + } + None +} + +fn check_bind_for_function(bind: &Bind, name: &str) -> Option { + match bind { + Bind::BindDestruct(bd) => { + let destruct = bd.into()?; + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() != name { + return None; + } + + let value = bd.value()?; + if let Some(base) = value.expr_base() { + if let ExprBase::ExprFunction(func) = base { + return extract_params_from_function_expr(&func); + } + } + } + None + } + Bind::BindFunction(bf) => { + let bind_name = bf.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() != name { + return None; + } + + extract_params_from_bind_function(bf) + } + } +} + +fn extract_params_from_bind_function(func: &BindFunction) -> Option { + let params_desc = func.params()?; + let params: Vec = params_desc + .params() + .filter_map(|p| extract_param_info(&p)) + .collect(); + + Some(LocalFunctionSignature { params }) +} + +fn extract_params_from_function_expr( + func: &jrsonnet_rowan_parser::nodes::ExprFunction, +) -> Option { + let params_desc = func.params_desc()?; + let params: Vec = params_desc + .params() + .filter_map(|p| extract_param_info(&p)) + .collect(); + + Some(LocalFunctionSignature { params }) +} + +fn extract_param_info(param: &Param) -> Option { + let destruct = param.destruct()?; + let name = match destruct { + Destruct::DestructFull(full) => { + let name = full.name()?; + name.ident_lit()?.text().to_string() + } + Destruct::DestructArray(_) => "[array]".to_string(), + Destruct::DestructObject(_) => "{object}".to_string(), + Destruct::DestructSkip(_) => return None, + }; + + let label = if param.assign_token().is_some() { + format!("{name}=...") + } else { + name.clone() + }; + Some(SignatureParamInfo { label, name }) +} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/mod.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/mod.rs new file mode 100644 index 00000000..238494e2 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/mod.rs @@ -0,0 +1,11 @@ +//! Signature help handler. +//! +//! Provides parameter information when the user is inside a function call. + +mod context; +mod handler; +mod local; +mod model; +mod render; + +pub use handler::signature_help; diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/model.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/model.rs new file mode 100644 index 00000000..721a9f4b --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/model.rs @@ -0,0 +1,19 @@ +#[derive(Debug, Default, Clone)] +pub(super) struct ActiveArg { + pub(super) positional_index: u32, + pub(super) named_arg: Option, +} + +#[derive(Debug, Clone)] +pub(super) struct SignatureParamInfo { + pub(super) label: String, + pub(super) name: String, +} + +pub(super) struct LocalFunctionSignature { + pub(super) params: Vec, +} + +pub(super) fn to_u32(value: usize) -> u32 { + u32::try_from(value).unwrap_or(u32::MAX) +} diff --git a/crates/jrsonnet-lsp-handlers/src/signature_help/render.rs b/crates/jrsonnet-lsp-handlers/src/signature_help/render.rs new file mode 100644 index 00000000..d35ac1cc --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/signature_help/render.rs @@ -0,0 +1,168 @@ +use jrsonnet_lsp_stdlib as stdlib; +use jrsonnet_rowan_parser::SyntaxToken; +use lsp_types::{ + Documentation, MarkupContent, MarkupKind, ParameterInformation, ParameterLabel, SignatureHelp, + SignatureInformation, +}; + +use super::{ + local::find_local_function_signature, + model::{to_u32, ActiveArg, SignatureParamInfo}, +}; + +pub(super) fn get_signature_for_function( + name: &str, + token: &SyntaxToken, + active_arg: &ActiveArg, +) -> Option { + if let Some(doc) = stdlib::get_stdlib_doc(name) { + let (params_info, variadic) = stdlib_params(name, &doc.signature); + let active_param = resolve_active_parameter(¶ms_info, variadic, active_arg); + let signature_name = format!("std.{name}"); + let (label, param_offsets) = signature_label_with_offsets(&signature_name, ¶ms_info); + let params = to_lsp_params(¶m_offsets); + + return Some(SignatureHelp { + signatures: vec![SignatureInformation { + label, + documentation: Some(Documentation::MarkupContent(MarkupContent { + kind: MarkupKind::Markdown, + value: doc.description.to_string(), + })), + parameters: Some(params), + active_parameter: Some(active_param), + }], + active_signature: Some(0), + active_parameter: Some(active_param), + }); + } + + if let Some(sig) = find_local_function_signature(token, name) { + let active_param = resolve_active_parameter(&sig.params, false, active_arg); + let (label, param_offsets) = signature_label_with_offsets(name, &sig.params); + let params = to_lsp_params(¶m_offsets); + + return Some(SignatureHelp { + signatures: vec![SignatureInformation { + label, + documentation: None, + parameters: Some(params), + active_parameter: Some(active_param), + }], + active_signature: Some(0), + active_parameter: Some(active_param), + }); + } + + None +} + +fn signature_label_with_offsets( + name: &str, + params: &[SignatureParamInfo], +) -> (String, Vec<[u32; 2]>) { + let mut label = String::new(); + let mut offsets = Vec::with_capacity(params.len()); + label.push_str(name); + label.push('('); + + for (index, param) in params.iter().enumerate() { + if index > 0 { + label.push_str(", "); + } + let start = to_u32(label.len()); + label.push_str(¶m.label); + let end = to_u32(label.len()); + offsets.push([start, end]); + } + + label.push(')'); + (label, offsets) +} + +fn to_lsp_params(offsets: &[[u32; 2]]) -> Vec { + offsets + .iter() + .map(|offset| ParameterInformation { + label: ParameterLabel::LabelOffsets(*offset), + documentation: None, + }) + .collect() +} + +fn resolve_active_parameter( + params: &[SignatureParamInfo], + _variadic: bool, + active_arg: &ActiveArg, +) -> u32 { + if params.is_empty() { + return active_arg.positional_index; + } + + if let Some(named_arg) = active_arg.named_arg.as_deref() { + if let Some(index) = params.iter().position(|param| param.name == named_arg) { + return to_u32(index); + } + } + + let max_index = to_u32(params.len().saturating_sub(1)); + active_arg.positional_index.min(max_index) +} + +fn stdlib_params(name: &str, fallback_signature: &str) -> (Vec, bool) { + if let Some(func_data) = stdlib::get_stdlib_func_data(name) { + let params = func_data + .params + .into_iter() + .map(|param| { + let label = if param.has_default { + format!("{}=...", param.name) + } else { + param.name.clone() + }; + SignatureParamInfo { + label, + name: param.name, + } + }) + .collect(); + return (params, func_data.variadic); + } + + let params = parse_signature_params(fallback_signature) + .into_iter() + .map(|label| { + let name = label + .split_once('=') + .map_or_else(|| label.clone(), |(name, _)| name.to_string()); + SignatureParamInfo { label, name } + }) + .collect(); + (params, false) +} + +fn parse_signature_params(signature: &str) -> Vec { + let s = signature.trim_start_matches('('); + s.split(',') + .map(|p| p.trim().to_string()) + .filter(|p| !p.is_empty()) + .collect() +} + +#[cfg(test)] +mod tests { + use super::parse_signature_params; + + #[test] + fn test_parse_signature_params() { + assert_eq!( + parse_signature_params("(func, arr"), + vec!["func".to_string(), "arr".to_string()] + ); + assert_eq!(parse_signature_params("(x"), vec!["x".to_string()]); + assert_eq!( + parse_signature_params("(a, b, c"), + vec!["a".to_string(), "b".to_string(), "c".to_string()] + ); + } +} From a7e9e984244f2ac6fefa0a7b405dda5441ca7958 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 15:54:28 +0000 Subject: [PATCH 135/210] refactor(lsp-handlers): split completion field resolution Split completion field logic into dedicated parse, lookup, and item construction modules behind a focused `mod.rs` entrypoint. Keep behavior unchanged while reducing the size and mixed responsibilities of the original `fields.rs` implementation. Retain the existing public API for object field completion so callers and tests remain stable. --- .../src/completion/fields.rs | 563 ------------------ .../src/completion/fields/items.rs | 68 +++ .../src/completion/fields/lookup.rs | 191 ++++++ .../src/completion/fields/mod.rs | 203 +++++++ .../src/completion/fields/parse.rs | 71 +++ 5 files changed, 533 insertions(+), 563 deletions(-) delete mode 100644 crates/jrsonnet-lsp-handlers/src/completion/fields.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/fields/items.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/fields/lookup.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/fields/mod.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/fields/parse.rs diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields.rs deleted file mode 100644 index d441539b..00000000 --- a/crates/jrsonnet-lsp-handlers/src/completion/fields.rs +++ /dev/null @@ -1,563 +0,0 @@ -//! Object field completions for `obj.` patterns. - -use jrsonnet_lsp_document::{ - is_valid_jsonnet_identifier, token_at_offset, ByteOffset, Document, LineIndex, -}; -use jrsonnet_lsp_inference::TypeAnalysis; -use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; -use jrsonnet_rowan_parser::{ - nodes::{BindDestruct, Destruct, Expr, FieldName, MemberFieldNormal, ObjBody, StmtLocal}, - AstNode, AstToken, SyntaxKind, SyntaxNode, -}; -use lsp_types::{CompletionItem, CompletionItemKind, CompletionTextEdit, Range, TextEdit}; - -/// Check if we're completing object fields after `obj.`. -/// -/// The `analysis` parameter should be pre-computed using `TypeProvider` to ensure -/// that import types are properly resolved. -pub fn check_object_field_completion( - document: &Document, - text: &str, - offset: u32, - analysis: &TypeAnalysis, -) -> Option> { - let offset_usize = offset as usize; - - // Look for `identifier.` pattern before cursor - // Find the dot - let before_cursor = &text[..offset_usize]; - let dot_pos = before_cursor.rfind('.')?; - - // Check there's no whitespace between dot and cursor - let after_dot = &before_cursor[dot_pos + 1..]; - if after_dot.contains(char::is_whitespace) && !after_dot.trim().is_empty() { - return None; - } - - // Get the identifier before the dot - let before_dot = &before_cursor[..dot_pos]; - let ident_start = before_dot - .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') - .map_or(0, |i| i + 1); - let identifier = before_dot[ident_start..].trim(); - let ast = document.ast(); - let line_index = document.line_index(); - - if identifier == "std" { - // Skip builtin std (handled separately). If `std` is shadowed by user code, - // keep object-field completion enabled. - let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; - let before_dot_offset = ByteOffset::new(u32::try_from(before_dot_pos).ok()?); - let token = token_at_offset(ast.syntax(), before_dot_offset)?; - if token.kind() == SyntaxKind::IDENT && ident_resolves_to_builtin_std(&token) { - return None; - } - } - - // Get what the user is typing after the dot (for filtering) - let prefix = after_dot.trim(); - - // Look for expression just before the dot (not at the dot) - let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; - let before_dot_offset = u32::try_from(before_dot_pos).ok()?; - let dot_offset = u32::try_from(dot_pos).ok()?; - - // Prefer exact expression-range lookup to avoid ambiguity at punctuation - // positions (e.g. `hm["foo"].`, where querying at `]` picks the inner - // string literal instead of the full index expression). - if let Some(expr_range) = - expression_range_before_dot(ast.syntax(), before_dot_offset, dot_offset) - { - if let Some(fields) = analysis.fields_for_range(expr_range) { - let store = analysis.store(); - let items = fields - .into_iter() - .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) - .filter_map(|(name, ty)| { - field_completion_item( - name, - store.display(ty), - dot_pos, - offset, - line_index, - text, - ) - }) - .collect::>(); - - if !items.is_empty() { - return Some(items); - } - } - } - - // Bracket lookups (for example `obj["field"].`) can place the cursor on `]`, - // which does not reliably map back to the index expression type. Anchor at the - // matching `[` and retry from there. - if let Some(index_anchor) = bracket_index_anchor(before_cursor, dot_pos) { - let index_anchor_text_size = rowan::TextSize::from(index_anchor); - if let Some(fields) = analysis.fields_at_position(ast.syntax(), index_anchor_text_size) { - let store = analysis.store(); - let items = fields - .into_iter() - .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) - .filter_map(|(name, ty)| { - field_completion_item( - name, - store.display(ty), - dot_pos, - offset, - line_index, - text, - ) - }) - .collect::>(); - - if !items.is_empty() { - return Some(items); - } - } - } - - // Fall back to position-based lookup when we can't find a matching - // expression range (e.g. broken syntax around the dot). - let before_dot_text_size = rowan::TextSize::from(before_dot_offset); - if let Some(fields) = analysis.fields_at_position(ast.syntax(), before_dot_text_size) { - let store = analysis.store(); - let items = fields - .into_iter() - .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) - .filter_map(|(name, ty)| { - field_completion_item(name, store.display(ty), dot_pos, offset, line_index, text) - }) - .collect::>(); - - if !items.is_empty() { - return Some(items); - } - } - - if let Some((base_identifier, key)) = parse_bracket_lookup(before_dot) { - if let Some(target_range) = - find_bracket_lookup_target_expr_range(ast.syntax(), &base_identifier, &key, dot_offset) - { - if let Some(fields) = analysis.fields_for_range(target_range) { - let store = analysis.store(); - let items = fields - .into_iter() - .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) - .filter_map(|(name, ty)| { - field_completion_item( - name, - store.display(ty), - dot_pos, - offset, - line_index, - text, - ) - }) - .collect::>(); - if !items.is_empty() { - return Some(items); - } - } - - if let Some(target_node) = ast - .syntax() - .descendants() - .find(|node| node.text_range() == target_range) - { - if let Some(fields) = extract_object_fields(&target_node) { - let items = fields - .into_iter() - .filter(|name| prefix.is_empty() || name.starts_with(prefix)) - .filter_map(|name| { - field_completion_item( - name, - "object field".to_string(), - dot_pos, - offset, - line_index, - text, - ) - }) - .collect::>(); - if !items.is_empty() { - return Some(items); - } - } - } - } - } - - if identifier.is_empty() { - return None; - } - - // Fall back to AST-based field extraction for cases where type inference isn't enough - let fields = find_object_fields_for_identifier(ast.syntax(), identifier, dot_offset)?; - - // Filter and convert to completion items - let items = fields - .into_iter() - .filter(|f| prefix.is_empty() || f.starts_with(prefix)) - .filter_map(|name| { - field_completion_item( - name, - "object field".to_string(), - dot_pos, - offset, - line_index, - text, - ) - }) - .collect::>(); - - if items.is_empty() { - return None; - } - Some(items) -} - -fn expression_range_before_dot( - root: &SyntaxNode, - before_dot_offset: u32, - dot_offset: u32, -) -> Option { - let token = token_at_offset(root, ByteOffset::new(before_dot_offset))?; - let dot = rowan::TextSize::from(dot_offset); - token - .parent_ancestors() - .filter_map(Expr::cast) - .map(|expr| expr.syntax().text_range()) - .filter(|range| range.end() == dot) - .min_by_key(|range| range.len()) -} - -fn bracket_index_anchor(before_cursor: &str, dot_pos: usize) -> Option { - let before_dot = before_cursor.get(..dot_pos)?; - let mut bracket_depth = 0usize; - - for (index, byte) in before_dot.as_bytes().iter().enumerate().rev() { - match *byte { - b']' => bracket_depth += 1, - b'[' => { - if bracket_depth == 0 { - continue; - } - bracket_depth -= 1; - if bracket_depth == 0 { - return u32::try_from(index).ok(); - } - } - _ => {} - } - } - - None -} - -fn parse_bracket_lookup(before_dot: &str) -> Option<(String, String)> { - let trimmed = before_dot.trim_end(); - let close_bracket = trimmed.rfind(']')?; - if close_bracket + 1 != trimmed.len() { - return None; - } - - let open_bracket = trimmed[..close_bracket].rfind('[')?; - let base_expr = trimmed[..open_bracket].trim_end(); - let key_expr = trimmed[open_bracket + 1..close_bracket].trim(); - let key = key_expr - .strip_prefix('"') - .and_then(|value| value.strip_suffix('"')) - .or_else(|| { - key_expr - .strip_prefix('\'') - .and_then(|value| value.strip_suffix('\'')) - })? - .to_string(); - - let ident_start = base_expr - .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') - .map_or(0, |index| index + 1); - let identifier = base_expr[ident_start..].trim(); - if identifier.is_empty() { - return None; - } - - Some((identifier.to_string(), key)) -} - -fn field_completion_item( - name: String, - detail: String, - dot_pos: usize, - offset: u32, - line_index: &LineIndex, - text: &str, -) -> Option { - if is_valid_jsonnet_identifier(&name) { - return Some(CompletionItem { - label: name, - kind: Some(CompletionItemKind::FIELD), - detail: Some(detail), - ..Default::default() - }); - } - - let start_offset = ByteOffset::new(u32::try_from(dot_pos).ok()?); - let end_offset = ByteOffset::new(offset); - let start = line_index.position(start_offset, text)?; - let end = line_index.position(end_offset, text)?; - let escaped = serde_json::to_string(&name).ok()?; - - Some(CompletionItem { - label: name, - kind: Some(CompletionItemKind::FIELD), - detail: Some(detail), - text_edit: Some(CompletionTextEdit::Edit(TextEdit { - range: Range { - start: start.into(), - end: end.into(), - }, - new_text: format!("[{escaped}]"), - })), - ..Default::default() - }) -} - -/// Find object fields for an identifier by looking up its definition. -fn find_object_fields_for_identifier( - root: &SyntaxNode, - identifier: &str, - offset: u32, -) -> Option> { - let text_size = rowan::TextSize::from(offset); - - // Search for local bindings with this name - for node in root.descendants() { - if node.kind() == SyntaxKind::STMT_LOCAL { - // Check if this binding is before our position - if node.text_range().end() > text_size { - continue; - } - - if let Some(stmt_local) = StmtLocal::cast(node.clone()) { - for bind in stmt_local.binds() { - match &bind { - jrsonnet_rowan_parser::nodes::Bind::BindDestruct(bd) => { - if let Some(fields) = check_bind_destruct_for_object(bd, identifier) { - return Some(fields); - } - } - jrsonnet_rowan_parser::nodes::Bind::BindFunction(bf) => { - // Functions don't have object fields in this context - let _ = bf; - } - } - } - } - } - } - - None -} - -fn find_bracket_lookup_target_expr_range( - root: &SyntaxNode, - identifier: &str, - key: &str, - offset: u32, -) -> Option { - let text_size = rowan::TextSize::from(offset); - - for node in root.descendants() { - if node.kind() != SyntaxKind::STMT_LOCAL { - continue; - } - if node.text_range().end() > text_size { - continue; - } - - let Some(stmt_local) = StmtLocal::cast(node.clone()) else { - continue; - }; - for bind in stmt_local.binds() { - let jrsonnet_rowan_parser::nodes::Bind::BindDestruct(bind_destruct) = bind else { - continue; - }; - if let Some(range) = - check_bind_destruct_for_bracket_target(&bind_destruct, identifier, key) - { - return Some(range); - } - } - } - - None -} - -/// Check if a `BindDestruct` is for the given identifier and extract object fields. -fn check_bind_destruct_for_object(bind: &BindDestruct, identifier: &str) -> Option> { - let destruct = bind.into()?; - - if let Destruct::DestructFull(full) = destruct { - let bind_name = full.name()?; - let ident = bind_name.ident_lit()?; - - if ident.text() != identifier { - return None; - } - - // Found the binding, now look at its value - // The value is in the parent BindDestruct - let value_expr = bind.value()?; - - // Check if the value is an object - extract_object_fields(value_expr.syntax()) - } else { - None - } -} - -fn check_bind_destruct_for_bracket_target( - bind: &BindDestruct, - identifier: &str, - key: &str, -) -> Option { - let destruct = bind.into()?; - let Destruct::DestructFull(full) = destruct else { - return None; - }; - let bind_name = full.name()?; - let ident = bind_name.ident_lit()?; - if ident.text() != identifier { - return None; - } - - let value_expr = bind.value()?; - find_object_field_expr_range(value_expr.syntax(), key) -} - -fn find_object_field_expr_range(expr: &SyntaxNode, key: &str) -> Option { - let obj_node = find_object_in_expr(expr)?; - let obj_body = ObjBody::cast(obj_node)?; - let ObjBody::ObjBodyMemberList(member_list) = obj_body else { - return None; - }; - - for member in member_list.members() { - let jrsonnet_rowan_parser::nodes::Member::MemberFieldNormal(field) = member else { - continue; - }; - let Some(field_name) = extract_field_name(&field) else { - continue; - }; - if field_name != key { - continue; - } - return Some(field.expr()?.syntax().text_range()); - } - - None -} - -/// Extract field names from an object expression. -fn extract_object_fields(expr: &SyntaxNode) -> Option> { - // The expression might be wrapped in Expr nodes - let obj_node = find_object_in_expr(expr)?; - - let obj_body = ObjBody::cast(obj_node)?; - - let mut fields = Vec::new(); - - // ObjBody contains members - if let ObjBody::ObjBodyMemberList(member_list) = obj_body { - for member in member_list.members() { - match member { - jrsonnet_rowan_parser::nodes::Member::MemberFieldNormal(field) => { - if let Some(name) = extract_field_name(&field) { - fields.push(name); - } - } - jrsonnet_rowan_parser::nodes::Member::MemberFieldMethod(method) => { - if let Some(field_name) = method.field_name() { - if let Some(name) = extract_field_name_from_field_name(&field_name) { - fields.push(name); - } - } - } - _ => {} - } - } - } - - if fields.is_empty() { - None - } else { - Some(fields) - } -} - -/// Find an object body node within an expression. -fn find_object_in_expr(node: &SyntaxNode) -> Option { - // Direct object body - if node.kind() == SyntaxKind::OBJ_BODY_MEMBER_LIST || node.kind() == SyntaxKind::OBJ_BODY_COMP { - return Some(node.clone()); - } - - // Look for ExprObject child - for child in node.children() { - if child.kind() == SyntaxKind::EXPR_OBJECT { - // Find the ObjBody inside - for obj_child in child.children() { - if obj_child.kind() == SyntaxKind::OBJ_BODY_MEMBER_LIST - || obj_child.kind() == SyntaxKind::OBJ_BODY_COMP - { - return Some(obj_child); - } - } - } - - // Recurse into Expr nodes - if child.kind() == SyntaxKind::EXPR { - if let Some(found) = find_object_in_expr(&child) { - return Some(found); - } - } - } - - None -} - -/// Extract field name from a `MemberFieldNormal`. -fn extract_field_name(field: &MemberFieldNormal) -> Option { - let field_name = field.field_name()?; - extract_field_name_from_field_name(&field_name) -} - -/// Extract name string from a `FieldName` node. -fn extract_field_name_from_field_name(field_name: &FieldName) -> Option { - match field_name { - FieldName::FieldNameFixed(fixed) => { - // FieldNameFixed has id() which returns Name - if let Some(name) = fixed.id() { - if let Some(ident) = name.ident_lit() { - return Some(ident.text().to_string()); - } - } - // Or it could have a text() string - if let Some(text) = fixed.text() { - // Remove quotes from text - let s = text.syntax().text().to_string(); - let s = s.trim_matches('"').trim_matches('\''); - return Some(s.to_string()); - } - None - } - FieldName::FieldNameDynamic(_) => { - // Dynamic field names like [expr] can't be completed statically - None - } - } -} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields/items.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields/items.rs new file mode 100644 index 00000000..afe4657e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields/items.rs @@ -0,0 +1,68 @@ +use jrsonnet_lsp_document::{is_valid_jsonnet_identifier, ByteOffset, LineIndex}; +use jrsonnet_rowan_parser::{ + nodes::{FieldName, MemberFieldNormal}, + AstToken, +}; +use lsp_types::{CompletionItem, CompletionItemKind, CompletionTextEdit, Range, TextEdit}; + +pub(super) fn field_completion_item( + name: String, + detail: String, + dot_pos: usize, + offset: u32, + line_index: &LineIndex, + text: &str, +) -> Option { + if is_valid_jsonnet_identifier(&name) { + return Some(CompletionItem { + label: name, + kind: Some(CompletionItemKind::FIELD), + detail: Some(detail), + ..Default::default() + }); + } + + let start_offset = ByteOffset::new(u32::try_from(dot_pos).ok()?); + let end_offset = ByteOffset::new(offset); + let start = line_index.position(start_offset, text)?; + let end = line_index.position(end_offset, text)?; + let escaped = serde_json::to_string(&name).ok()?; + + Some(CompletionItem { + label: name, + kind: Some(CompletionItemKind::FIELD), + detail: Some(detail), + text_edit: Some(CompletionTextEdit::Edit(TextEdit { + range: Range { + start: start.into(), + end: end.into(), + }, + new_text: format!("[{escaped}]"), + })), + ..Default::default() + }) +} + +pub(super) fn extract_field_name(field: &MemberFieldNormal) -> Option { + let field_name = field.field_name()?; + extract_field_name_from_field_name(&field_name) +} + +pub(super) fn extract_field_name_from_field_name(field_name: &FieldName) -> Option { + match field_name { + FieldName::FieldNameFixed(fixed) => { + if let Some(name) = fixed.id() { + if let Some(ident) = name.ident_lit() { + return Some(ident.text().to_string()); + } + } + if let Some(text) = fixed.text() { + let s = text.syntax().text().to_string(); + let s = s.trim_matches('"').trim_matches('\''); + return Some(s.to_string()); + } + None + } + FieldName::FieldNameDynamic(_) => None, + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields/lookup.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields/lookup.rs new file mode 100644 index 00000000..4372b6fe --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields/lookup.rs @@ -0,0 +1,191 @@ +use jrsonnet_rowan_parser::{ + nodes::{BindDestruct, Destruct, ObjBody, StmtLocal}, + AstNode, SyntaxKind, SyntaxNode, +}; + +use super::items::{extract_field_name, extract_field_name_from_field_name}; + +pub(super) fn find_object_fields_for_identifier( + root: &SyntaxNode, + identifier: &str, + offset: u32, +) -> Option> { + let text_size = rowan::TextSize::from(offset); + + for node in root.descendants() { + if node.kind() == SyntaxKind::STMT_LOCAL { + if node.text_range().end() > text_size { + continue; + } + + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + match &bind { + jrsonnet_rowan_parser::nodes::Bind::BindDestruct(bd) => { + if let Some(fields) = check_bind_destruct_for_object(bd, identifier) { + return Some(fields); + } + } + jrsonnet_rowan_parser::nodes::Bind::BindFunction(bf) => { + let _ = bf; + } + } + } + } + } + } + + None +} + +pub(super) fn find_bracket_lookup_target_expr_range( + root: &SyntaxNode, + identifier: &str, + key: &str, + offset: u32, +) -> Option { + let text_size = rowan::TextSize::from(offset); + + for node in root.descendants() { + if node.kind() != SyntaxKind::STMT_LOCAL { + continue; + } + if node.text_range().end() > text_size { + continue; + } + + let Some(stmt_local) = StmtLocal::cast(node.clone()) else { + continue; + }; + for bind in stmt_local.binds() { + let jrsonnet_rowan_parser::nodes::Bind::BindDestruct(bind_destruct) = bind else { + continue; + }; + if let Some(range) = + check_bind_destruct_for_bracket_target(&bind_destruct, identifier, key) + { + return Some(range); + } + } + } + + None +} + +fn check_bind_destruct_for_object(bind: &BindDestruct, identifier: &str) -> Option> { + let destruct = bind.into()?; + + if let Destruct::DestructFull(full) = destruct { + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + + if ident.text() != identifier { + return None; + } + + let value_expr = bind.value()?; + extract_object_fields(value_expr.syntax()) + } else { + None + } +} + +fn check_bind_destruct_for_bracket_target( + bind: &BindDestruct, + identifier: &str, + key: &str, +) -> Option { + let destruct = bind.into()?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let bind_name = full.name()?; + let ident = bind_name.ident_lit()?; + if ident.text() != identifier { + return None; + } + + let value_expr = bind.value()?; + find_object_field_expr_range(value_expr.syntax(), key) +} + +fn find_object_field_expr_range(expr: &SyntaxNode, key: &str) -> Option { + let obj_node = find_object_in_expr(expr)?; + let obj_body = ObjBody::cast(obj_node)?; + let ObjBody::ObjBodyMemberList(member_list) = obj_body else { + return None; + }; + + for member in member_list.members() { + let jrsonnet_rowan_parser::nodes::Member::MemberFieldNormal(field) = member else { + continue; + }; + let Some(field_name) = extract_field_name(&field) else { + continue; + }; + if field_name != key { + continue; + } + return Some(field.expr()?.syntax().text_range()); + } + + None +} + +pub(super) fn extract_object_fields(expr: &SyntaxNode) -> Option> { + let obj_node = find_object_in_expr(expr)?; + let obj_body = ObjBody::cast(obj_node)?; + let mut fields = Vec::new(); + + if let ObjBody::ObjBodyMemberList(member_list) = obj_body { + for member in member_list.members() { + match member { + jrsonnet_rowan_parser::nodes::Member::MemberFieldNormal(field) => { + if let Some(name) = extract_field_name(&field) { + fields.push(name); + } + } + jrsonnet_rowan_parser::nodes::Member::MemberFieldMethod(method) => { + if let Some(field_name) = method.field_name() { + if let Some(name) = extract_field_name_from_field_name(&field_name) { + fields.push(name); + } + } + } + _ => {} + } + } + } + + if fields.is_empty() { + None + } else { + Some(fields) + } +} + +fn find_object_in_expr(node: &SyntaxNode) -> Option { + if node.kind() == SyntaxKind::OBJ_BODY_MEMBER_LIST || node.kind() == SyntaxKind::OBJ_BODY_COMP { + return Some(node.clone()); + } + + for child in node.children() { + if child.kind() == SyntaxKind::EXPR_OBJECT { + for obj_child in child.children() { + if obj_child.kind() == SyntaxKind::OBJ_BODY_MEMBER_LIST + || obj_child.kind() == SyntaxKind::OBJ_BODY_COMP + { + return Some(obj_child); + } + } + } + + if child.kind() == SyntaxKind::EXPR { + if let Some(found) = find_object_in_expr(&child) { + return Some(found); + } + } + } + + None +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields/mod.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields/mod.rs new file mode 100644 index 00000000..4bea06f8 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields/mod.rs @@ -0,0 +1,203 @@ +//! Object field completions for `obj.` patterns. + +mod items; +mod lookup; +mod parse; + +use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_scope::ident_resolves_to_builtin_std; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use lsp_types::CompletionItem; + +use self::{ + items::field_completion_item, + lookup::{ + extract_object_fields, find_bracket_lookup_target_expr_range, + find_object_fields_for_identifier, + }, + parse::{bracket_index_anchor, expression_range_before_dot, parse_bracket_lookup}, +}; + +pub fn check_object_field_completion( + document: &Document, + text: &str, + offset: u32, + analysis: &TypeAnalysis, +) -> Option> { + let offset_usize = offset as usize; + let before_cursor = &text[..offset_usize]; + let dot_pos = before_cursor.rfind('.')?; + + let after_dot = &before_cursor[dot_pos + 1..]; + if after_dot.contains(char::is_whitespace) && !after_dot.trim().is_empty() { + return None; + } + + let before_dot = &before_cursor[..dot_pos]; + let ident_start = before_dot + .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') + .map_or(0, |i| i + 1); + let identifier = before_dot[ident_start..].trim(); + let ast = document.ast(); + let line_index = document.line_index(); + + if identifier == "std" { + let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; + let before_dot_offset = ByteOffset::new(u32::try_from(before_dot_pos).ok()?); + let token = token_at_offset(ast.syntax(), before_dot_offset)?; + if token.kind() == SyntaxKind::IDENT && ident_resolves_to_builtin_std(&token) { + return None; + } + } + + let prefix = after_dot.trim(); + let before_dot_pos = if dot_pos > 0 { dot_pos - 1 } else { 0 }; + let before_dot_offset = u32::try_from(before_dot_pos).ok()?; + let dot_offset = u32::try_from(dot_pos).ok()?; + + if let Some(expr_range) = + expression_range_before_dot(ast.syntax(), before_dot_offset, dot_offset) + { + if let Some(fields) = analysis.fields_for_range(expr_range) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item( + name, + store.display(ty), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + + if !items.is_empty() { + return Some(items); + } + } + } + + if let Some(index_anchor) = bracket_index_anchor(before_cursor, dot_pos) { + let index_anchor_text_size = rowan::TextSize::from(index_anchor); + if let Some(fields) = analysis.fields_at_position(ast.syntax(), index_anchor_text_size) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item( + name, + store.display(ty), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + + if !items.is_empty() { + return Some(items); + } + } + } + + let before_dot_text_size = rowan::TextSize::from(before_dot_offset); + if let Some(fields) = analysis.fields_at_position(ast.syntax(), before_dot_text_size) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item(name, store.display(ty), dot_pos, offset, line_index, text) + }) + .collect::>(); + + if !items.is_empty() { + return Some(items); + } + } + + if let Some((base_identifier, key)) = parse_bracket_lookup(before_dot) { + if let Some(target_range) = + find_bracket_lookup_target_expr_range(ast.syntax(), &base_identifier, &key, dot_offset) + { + if let Some(fields) = analysis.fields_for_range(target_range) { + let store = analysis.store(); + let items = fields + .into_iter() + .filter(|(name, _)| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|(name, ty)| { + field_completion_item( + name, + store.display(ty), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + if !items.is_empty() { + return Some(items); + } + } + + if let Some(target_node) = ast + .syntax() + .descendants() + .find(|node| node.text_range() == target_range) + { + if let Some(fields) = extract_object_fields(&target_node) { + let items = fields + .into_iter() + .filter(|name| prefix.is_empty() || name.starts_with(prefix)) + .filter_map(|name| { + field_completion_item( + name, + "object field".to_string(), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + if !items.is_empty() { + return Some(items); + } + } + } + } + } + + if identifier.is_empty() { + return None; + } + + let fields = find_object_fields_for_identifier(ast.syntax(), identifier, dot_offset)?; + let items = fields + .into_iter() + .filter(|f| prefix.is_empty() || f.starts_with(prefix)) + .filter_map(|name| { + field_completion_item( + name, + "object field".to_string(), + dot_pos, + offset, + line_index, + text, + ) + }) + .collect::>(); + + if items.is_empty() { + return None; + } + Some(items) +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/fields/parse.rs b/crates/jrsonnet-lsp-handlers/src/completion/fields/parse.rs new file mode 100644 index 00000000..cea94c1c --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/fields/parse.rs @@ -0,0 +1,71 @@ +use jrsonnet_lsp_document::{token_at_offset, ByteOffset}; +use jrsonnet_rowan_parser::{nodes::Expr, AstNode, SyntaxNode}; + +pub(super) fn expression_range_before_dot( + root: &SyntaxNode, + before_dot_offset: u32, + dot_offset: u32, +) -> Option { + let token = token_at_offset(root, ByteOffset::new(before_dot_offset))?; + let dot = rowan::TextSize::from(dot_offset); + token + .parent_ancestors() + .filter_map(Expr::cast) + .map(|expr| expr.syntax().text_range()) + .filter(|range| range.end() == dot) + .min_by_key(|range| range.len()) +} + +pub(super) fn bracket_index_anchor(before_cursor: &str, dot_pos: usize) -> Option { + let before_dot = before_cursor.get(..dot_pos)?; + let mut bracket_depth = 0usize; + + for (index, byte) in before_dot.as_bytes().iter().enumerate().rev() { + match *byte { + b']' => bracket_depth += 1, + b'[' => { + if bracket_depth == 0 { + continue; + } + bracket_depth -= 1; + if bracket_depth == 0 { + return u32::try_from(index).ok(); + } + } + _ => {} + } + } + + None +} + +pub(super) fn parse_bracket_lookup(before_dot: &str) -> Option<(String, String)> { + let trimmed = before_dot.trim_end(); + let close_bracket = trimmed.rfind(']')?; + if close_bracket + 1 != trimmed.len() { + return None; + } + + let open_bracket = trimmed[..close_bracket].rfind('[')?; + let base_expr = trimmed[..open_bracket].trim_end(); + let key_expr = trimmed[open_bracket + 1..close_bracket].trim(); + let key = key_expr + .strip_prefix('"') + .and_then(|value| value.strip_suffix('"')) + .or_else(|| { + key_expr + .strip_prefix('\'') + .and_then(|value| value.strip_suffix('\'')) + })? + .to_string(); + + let ident_start = base_expr + .rfind(|c: char| !c.is_ascii_alphanumeric() && c != '_') + .map_or(0, |index| index + 1); + let identifier = base_expr[ident_start..].trim(); + if identifier.is_empty() { + return None; + } + + Some((identifier.to_string(), key)) +} From 0c0c9e4ba2784948fc8139686e4a12e2568e88b4 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 16:01:14 +0000 Subject: [PATCH 136/210] refactor(lsp-handlers): split completion dispatch by feature Break completion dispatch into focused stdlib, imports, object field, and scope fallback modules behind a thin orchestrator. Move dispatch tests into the corresponding feature modules so each behavior is validated alongside its implementation. Keep `dispatch.rs` as the integration entrypoint and preserve existing completion behavior and test coverage. --- .../src/completion/handler/dispatch.rs | 785 +----------------- .../completion/handler/dispatch/imports.rs | 69 ++ .../handler/dispatch/object_fields.rs | 354 ++++++++ .../src/completion/handler/dispatch/scope.rs | 269 ++++++ .../src/completion/handler/dispatch/stdlib.rs | 68 ++ .../completion/handler/dispatch/test_util.rs | 14 + 6 files changed, 796 insertions(+), 763 deletions(-) create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/imports.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/object_fields.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/scope.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/stdlib.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/test_util.rs diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch.rs index c026b409..b98e1fd7 100644 --- a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch.rs +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch.rs @@ -4,18 +4,13 @@ use jrsonnet_lsp_document::{token_at_offset, Document, LspPosition}; use jrsonnet_lsp_inference::{SemanticArtifacts, TypeAnalysis}; use jrsonnet_rowan_parser::AstNode; use lsp_types::CompletionList; -use tracing::debug; -use super::{ - super::{ - fields::check_object_field_completion, - helpers::{get_identifier_prefix, is_inside_object}, - imports::check_import_completion, - locals::get_local_completions_with_semantic, - stdlib::check_stdlib_completion, - }, - keywords::{add_object_keyword_completions, add_std_completion}, -}; +mod imports; +mod object_fields; +mod scope; +mod stdlib; +#[cfg(test)] +mod test_util; pub(super) fn completion_dispatch( document: &Document, @@ -27,766 +22,30 @@ pub(super) fn completion_dispatch( ) -> Option { let text = document.text(); let line_index = document.line_index(); - - // Convert LSP position to byte offset let offset = line_index.offset(position, text)?; - - // Get the AST root let ast = document.ast(); - - // Find the token at or before the offset (may be None at whitespace/EOF) let token = token_at_offset(ast.syntax(), offset); + let text_offset: rowan::TextSize = offset.into(); - // Check if we're completing after `std.` - if let Some(ref t) = token { - if let Some(items) = check_stdlib_completion(t, text, offset.into()) { - debug!(count = items.len(), "providing stdlib completions"); - return Some(CompletionList { - is_incomplete: false, - items, - }); - } - } - - // Check if we're inside an import string - if let Some(items) = check_import_completion(text, offset.into(), doc_path, import_roots) { - debug!(count = items.len(), "providing import completions"); - return Some(CompletionList { - is_incomplete: false, - items, - }); - } - - // Check if we're completing after `obj.` (object field access) - if let Some(items) = check_object_field_completion(document, text, offset.into(), analysis) { - debug!(count = items.len(), "providing object field completions"); - return Some(CompletionList { - is_incomplete: false, - items, - }); - } - - // For general completion, provide local variables in scope. - let mut items = - get_local_completions_with_semantic(document, position, text, offset.into(), semantic); - - add_std_completion(&mut items); - - if is_inside_object(ast.syntax(), offset) { - let prefix = get_identifier_prefix(text, offset.into()); - add_object_keyword_completions(&mut items, prefix); - } - - Some(CompletionList { - is_incomplete: false, - items, - }) -} - -#[cfg(test)] -mod tests { - use std::sync::Arc; - - use jrsonnet_lsp_document::{DocVersion, Document}; - use jrsonnet_lsp_inference::TypeAnalysis; - use jrsonnet_lsp_types::GlobalTyStore; - use lsp_types::{CompletionItem, CompletionItemKind}; - - use super::super::completion; - use crate::completion::imports::find_import_string_start; - - /// Create a `TypeAnalysis` for test purposes. - fn test_analysis(doc: &Document) -> TypeAnalysis { - let global_types = Arc::new(GlobalTyStore::new()); - TypeAnalysis::analyze_with_global(doc, global_types) + if let Some(result) = stdlib::try_stdlib_completion(token.as_ref(), text, text_offset) { + return Some(result); } - #[test] - fn test_stdlib_completion_with_prefix_xor() { - // Use prefix "xo" which only matches "xor" in stdlib - let code = "std.xo"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 6).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - - // Extract just labels for comparison (full CompletionItem has dynamic detail text) - let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - assert_eq!(labels, vec!["xor"]); + if let Some(result) = imports::try_import_completion(text, text_offset, doc_path, import_roots) + { + return Some(result); } - #[test] - fn test_stdlib_completion_with_prefix_x() { - // Use prefix "x" which matches "xnor" and "xor" in stdlib - let code = "std.x"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 5).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - - // Extract labels and sort for comparison - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["xnor", "xor"]); - } - - #[test] - fn test_stdlib_completion_with_alias_prefix_xo() { - let code = "local s = std; s.xo"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = ( - 0, - u32::try_from(code.len()).expect("test code length fits in u32"), - ) - .into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - assert_eq!(labels, vec!["xor"]); - } - - #[test] - fn test_shadowed_std_uses_object_field_completion() { - let code = "local std = { foo: 1 }; std."; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = ( - 0, - u32::try_from(code.len()).expect("test code length fits in u32"), - ) - .into(); - - let result = completion(&doc, pos, None, &analysis).expect("should get completions"); - assert_eq!( - result.items, - vec![CompletionItem { - label: "foo".to_string(), - label_details: None, - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: None, - insert_text_format: None, - insert_text_mode: None, - text_edit: None, - additional_text_edits: None, - command: None, - commit_characters: None, - data: None, - tags: None, - }] - ); + if let Some(result) = + object_fields::try_object_field_completion(document, text, text_offset, analysis) + { + return Some(result); } - #[test] - fn test_completion_includes_std_and_local() { - // Simple case: one local variable, position at end where std and x are visible - let code = "local x = 1; "; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 13).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - - // Extract labels and sort for comparison - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["std", "x"]); - } - - #[test] - fn test_completion_item_has_documentation() { - // Use a simple object where we can assert the full completion list - let code = "local obj = { foo: 1 }; obj."; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 28).into(); - - let result = completion(&doc, pos, None, &analysis).expect("should get completions"); - - // Assert the full completion list for object field access - assert_eq!( - result.items, - vec![CompletionItem { - label: "foo".to_string(), - label_details: None, - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: None, - insert_text_format: None, - insert_text_mode: None, - text_edit: None, - additional_text_edits: None, - command: None, - commit_characters: None, - data: None, - tags: None, - }] - ); - } - - #[test] - fn test_local_variable_completion() { - let code = "local foo = 1; local bar = 2; "; - // ^ cursor here (character 30) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 30).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["bar", "foo", "std"]); - } - - #[test] - fn test_function_parameter_completion() { - let code = "local f(x, y) = x + "; - // ^ cursor here (character 20) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 20).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - // x, y are params; f is in scope; std is always available - assert_eq!(labels, vec!["f", "std", "x", "y"]); - } - - #[test] - fn test_completion_with_prefix() { - let code = "local foo = 1; local bar = 2; f"; - // ^ cursor at 'f' (character 31) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 31).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - // 'foo' starts with 'f', and 'std' is always included (no prefix filtering on keywords) - assert_eq!(labels, vec!["foo", "std"]); - } - - #[test] - fn test_object_field_completion() { - let code = "local obj = { foo: 1, bar: 2 }; obj."; - // ^ cursor here (character 36) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 36).into(); - - let list = - completion(&doc, pos, None, &analysis).expect("should get object field completions"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["bar", "foo"]); - } - - #[test] - fn test_object_field_completion_with_prefix() { - let code = "local obj = { foo: 1, bar: 2 }; obj.f"; - // ^ cursor here (character 37) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 37).into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get object field completions with prefix"); - let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - // Only 'foo' starts with 'f' - assert_eq!(labels, vec!["foo"]); - } - - #[test] - fn test_object_field_completion_non_identifier_uses_bracket_text_edit() { - let code = r#"local obj = { "my-field": 1, normal: 2 }; obj."#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = ( - 0, - u32::try_from(code.len()).expect("test code length fits in u32"), - ) - .into(); - let dot = u32::try_from(code.rfind('.').expect("dot should exist")).unwrap(); - let end = u32::try_from(code.len()).unwrap(); - - let mut result = completion(&doc, pos, None, &analysis).expect("should get completions"); - result - .items - .sort_by(|left, right| left.label.cmp(&right.label)); - - assert_eq!( - result.items, - vec![ - CompletionItem { - label: "my-field".to_string(), - label_details: None, - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: None, - insert_text_format: None, - insert_text_mode: None, - text_edit: Some(lsp_types::CompletionTextEdit::Edit(lsp_types::TextEdit { - range: lsp_types::Range { - start: lsp_types::Position::new(0, dot), - end: lsp_types::Position::new(0, end), - }, - new_text: r#"["my-field"]"#.to_string(), - })), - additional_text_edits: None, - command: None, - commit_characters: None, - data: None, - tags: None, - }, - CompletionItem { - label: "normal".to_string(), - label_details: None, - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: None, - insert_text_format: None, - insert_text_mode: None, - text_edit: None, - additional_text_edits: None, - command: None, - commit_characters: None, - data: None, - tags: None, - }, - ] - ); - } - - #[test] - fn test_object_field_completion_non_identifier_prefix_rewrites_dot_expression() { - let code = r#"local obj = { "my-field": 1, normal: 2 }; obj.my"#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = ( - 0, - u32::try_from(code.len()).expect("test code length fits in u32"), - ) - .into(); - let dot = u32::try_from(code.rfind('.').expect("dot should exist")).unwrap(); - let end = u32::try_from(code.len()).unwrap(); - - let result = completion(&doc, pos, None, &analysis).expect("should get completions"); - assert_eq!( - result.items, - vec![CompletionItem { - label: "my-field".to_string(), - label_details: None, - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: None, - insert_text_format: None, - insert_text_mode: None, - text_edit: Some(lsp_types::CompletionTextEdit::Edit(lsp_types::TextEdit { - range: lsp_types::Range { - start: lsp_types::Position::new(0, dot), - end: lsp_types::Position::new(0, end), - }, - new_text: r#"["my-field"]"#.to_string(), - })), - additional_text_edits: None, - command: None, - commit_characters: None, - data: None, - tags: None, - }] - ); - } - - #[test] - fn test_import_string_detection() { - // Test that we correctly detect import string context - // import " - cursor inside open string after import keyword - let text1 = r#"import ""#; - assert_eq!(find_import_string_start(text1), Some(8)); // position after opening quote - - // importstr " - cursor inside open string after importstr keyword - let text2 = r#"importstr ""#; - assert_eq!(find_import_string_start(text2), Some(11)); // position after opening quote - - // local x = " - not an import, just a regular string - let text3 = r#"local x = ""#; - assert_eq!(find_import_string_start(text3), None); - - // import "foo.jsonnet" - closed string, not inside import path - let text4 = r#"import "foo.jsonnet""#; - assert_eq!(find_import_string_start(text4), None); - } - - #[test] - fn test_import_completion_with_path() { - use tempfile::TempDir; - - // Create a temp directory with some files - let temp_dir = TempDir::new().unwrap(); - let temp_path = temp_dir.path(); - - // Create test files - std::fs::write(temp_path.join("utils.libsonnet"), "{}").unwrap(); - std::fs::write(temp_path.join("config.jsonnet"), "{}").unwrap(); - std::fs::create_dir(temp_path.join("lib")).unwrap(); - std::fs::write(temp_path.join("lib/helper.libsonnet"), "{}").unwrap(); - - // Create a document in the temp directory - let doc_path = temp_path.join("main.jsonnet"); - let code = r#"import ""#; - // ^ cursor here (character 8) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 8).into(); - - let list = completion(&doc, pos, Some(&doc_path), &analysis) - .expect("should get import completions"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["config.jsonnet", "lib", "utils.libsonnet"]); - } - - #[test] - fn test_for_comprehension_variable_completion() { - let code = "[x for x in [1, 2, 3] if ]"; - // ^ cursor here (character 25) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 25).into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get completions in for comprehension"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["std", "x"]); - } - - #[test] - fn test_object_local_completion() { - let code = "{ local helper = 1, field: }"; - // ^ cursor here (character 27) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 27).into(); - - let list = - completion(&doc, pos, None, &analysis).expect("should get completions in object"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["$", "helper", "self", "std", "super"]); - } - - #[test] - fn test_nested_function_completion() { - // Cursor inside inner function body, after "a + " - let code = "local outer(a) = local inner(b) = a + ; inner(1); outer(1)"; - // ^ cursor at position 37 - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 37).into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get completions in nested function"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - // a (outer param), b (inner param), std - assert_eq!(labels, vec!["a", "b", "std"]); - } - - #[test] - fn test_completion_at_eof() { - let code = "local x = 1;\n"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = (1, 0).into(); - - let list = completion(&doc, pos, None, &analysis).expect("should get completions at EOF"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["std", "x"]); - } - - #[test] - fn test_completion_in_object() { - let code = "{ foo: 1, bar: }"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = (0, 14).into(); - - let result = completion(&doc, pos, None, &analysis); - let list = result.expect("Should get completions in object"); - - let mut items: Vec<_> = list.items; - items.sort_by(|a, b| a.label.cmp(&b.label)); - - assert_eq!( - items, - vec![ - CompletionItem { - label: "$".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to root object".to_string()), - ..Default::default() - }, - CompletionItem { - label: "self".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to current object".to_string()), - ..Default::default() - }, - CompletionItem { - label: "std".to_string(), - kind: Some(CompletionItemKind::MODULE), - detail: Some("Jsonnet standard library".to_string()), - ..Default::default() - }, - CompletionItem { - label: "super".to_string(), - kind: Some(CompletionItemKind::KEYWORD), - detail: Some("Reference to inherited object".to_string()), - ..Default::default() - }, - ] - ); - } - - #[test] - fn test_no_self_super_outside_object() { - let code = "local x = 1; x"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = (0, 13).into(); - - let result = completion(&doc, pos, None, &analysis); - let list = result.expect("Should get completions"); - - let mut items: Vec<_> = list.items; - items.sort_by(|a, b| a.label.cmp(&b.label)); - - assert_eq!( - items, - vec![ - CompletionItem { - label: "std".to_string(), - kind: Some(CompletionItemKind::MODULE), - detail: Some("Jsonnet standard library".to_string()), - ..Default::default() - }, - CompletionItem { - label: "x".to_string(), - kind: Some(CompletionItemKind::VARIABLE), - detail: Some("local variable".to_string()), - ..Default::default() - }, - ] - ); - } - - #[test] - fn test_multiple_for_specs_completion() { - // Multiple for specs in array comprehension - let code = "[x + y for x in [1] for y in [2]]"; - // ^ cursor at position 3 (inside expression) - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (0, 3).into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get completions in multi-for comprehension"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["std", "x", "y"]); - } - - #[test] - fn test_completion_with_syntax_error() { - // Incomplete expression with syntax error - let code = "local x = 1;\nlocal y = 2;\n{ foo: x +"; - // ^ cursor at position 9 - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (2, 10).into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get completions despite syntax error"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - assert_eq!(labels, vec!["$", "self", "std", "super", "x", "y"]); - } - - #[test] - fn test_shadowed_variable_completion() { - // Inner binding shadows outer binding - let code = "local x = 1;\nlocal f(x) = x +"; - // ^ cursor at position 16 - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - - let pos = (1, 16).into(); - - let list = - completion(&doc, pos, None, &analysis).expect("should get completions with shadowing"); - let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); - labels.sort_unstable(); - // Only one 'x' (the parameter) should be visible - outer 'x' is shadowed - assert_eq!(labels, vec!["f", "std", "x"]); - } - - #[test] - fn test_object_field_completion_with_types() { - let code = "local obj = { num: 42, str: \"hello\", arr: [1, 2] }; obj."; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = (0, 56).into(); - - let result = completion(&doc, pos, None, &analysis); - let list = result.expect("Should get object field completions"); - - let mut items: Vec<_> = list.items; - items.sort_by(|a, b| a.label.cmp(&b.label)); - - assert_eq!( - items, - vec![ - CompletionItem { - label: "arr".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("[number, number]".to_string()), // Tuple type - ..Default::default() - }, - CompletionItem { - label: "num".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - ..Default::default() - }, - CompletionItem { - label: "str".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("string".to_string()), - ..Default::default() - }, - ] - ); - } - - #[test] - fn test_object_field_completion_nested() { - let code = "local obj = { inner: { x: 1, y: 2 } }; obj.inner."; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = (0, 49).into(); - - let result = completion(&doc, pos, None, &analysis); - let list = result.expect("Should get nested object field completions"); - - let mut items: Vec<_> = list.items; - items.sort_by(|a, b| a.label.cmp(&b.label)); - - assert_eq!( - items, - vec![ - CompletionItem { - label: "x".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - ..Default::default() - }, - CompletionItem { - label: "y".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - ..Default::default() - }, - ] - ); - } - - #[test] - fn test_object_field_completion_after_bracket_lookup() { - let code = r#"local hm = { foo: { a: true, b: 4, c: "hi" } }; hm["foo"]."#; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let analysis = test_analysis(&doc); - let pos = ( - 0, - u32::try_from(code.len()).expect("test code length fits in u32"), - ) - .into(); - - let list = completion(&doc, pos, None, &analysis) - .expect("should get object field completions after bracket lookup"); - let mut items: Vec<_> = list.items; - items.sort_by(|a, b| a.label.cmp(&b.label)); - - assert_eq!( - items, - vec![ - CompletionItem { - label: "a".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("true".to_string()), - ..Default::default() - }, - CompletionItem { - label: "b".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("number".to_string()), - ..Default::default() - }, - CompletionItem { - label: "c".to_string(), - kind: Some(CompletionItemKind::FIELD), - detail: Some("string".to_string()), - ..Default::default() - }, - ] - ); - } + Some(scope::general_completion( + document, + position, + text_offset, + semantic, + )) } diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/imports.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/imports.rs new file mode 100644 index 00000000..71099cfa --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/imports.rs @@ -0,0 +1,69 @@ +use std::path::{Path, PathBuf}; + +use lsp_types::CompletionList; +use tracing::debug; + +use super::super::super::imports::check_import_completion; + +pub(super) fn try_import_completion( + text: &str, + offset: rowan::TextSize, + doc_path: Option<&Path>, + import_roots: &[PathBuf], +) -> Option { + let items = check_import_completion(text, offset.into(), doc_path, import_roots)?; + debug!(count = items.len(), "providing import completions"); + Some(CompletionList { + is_incomplete: false, + items, + }) +} + +#[cfg(test)] +mod tests { + use super::super::{ + super::completion, + test_util::{test_analysis, test_document}, + }; + use crate::completion::imports::find_import_string_start; + + #[test] + fn test_import_string_detection() { + let text1 = r#"import ""#; + assert_eq!(find_import_string_start(text1), Some(8)); + + let text2 = r#"importstr ""#; + assert_eq!(find_import_string_start(text2), Some(11)); + + let text3 = r#"local x = ""#; + assert_eq!(find_import_string_start(text3), None); + + let text4 = r#"import "foo.jsonnet""#; + assert_eq!(find_import_string_start(text4), None); + } + + #[test] + fn test_import_completion_with_path() { + use tempfile::TempDir; + + let temp_dir = TempDir::new().unwrap(); + let temp_path = temp_dir.path(); + + std::fs::write(temp_path.join("utils.libsonnet"), "{}").unwrap(); + std::fs::write(temp_path.join("config.jsonnet"), "{}").unwrap(); + std::fs::create_dir(temp_path.join("lib")).unwrap(); + std::fs::write(temp_path.join("lib/helper.libsonnet"), "{}").unwrap(); + + let doc_path = temp_path.join("main.jsonnet"); + let code = r#"import ""#; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 8).into(); + + let list = completion(&doc, pos, Some(&doc_path), &analysis) + .expect("should get import completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["config.jsonnet", "lib", "utils.libsonnet"]); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/object_fields.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/object_fields.rs new file mode 100644 index 00000000..0123cd3b --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/object_fields.rs @@ -0,0 +1,354 @@ +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_inference::TypeAnalysis; +use lsp_types::CompletionList; +use tracing::debug; + +use super::super::super::fields::check_object_field_completion; + +pub(super) fn try_object_field_completion( + document: &Document, + text: &str, + offset: rowan::TextSize, + analysis: &TypeAnalysis, +) -> Option { + let items = check_object_field_completion(document, text, offset.into(), analysis)?; + debug!(count = items.len(), "providing object field completions"); + Some(CompletionList { + is_incomplete: false, + items, + }) +} + +#[cfg(test)] +mod tests { + use lsp_types::{CompletionItem, CompletionItemKind}; + + use super::super::{ + super::completion, + test_util::{test_analysis, test_document}, + }; + + #[test] + fn test_shadowed_std_uses_object_field_completion() { + let code = "local std = { foo: 1 }; std."; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + assert_eq!( + result.items, + vec![CompletionItem { + label: "foo".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + + #[test] + fn test_completion_item_has_documentation() { + let code = "local obj = { foo: 1 }; obj."; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 28).into(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + assert_eq!( + result.items, + vec![CompletionItem { + label: "foo".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + + #[test] + fn test_object_field_completion() { + let code = "local obj = { foo: 1, bar: 2 }; obj."; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 36).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get object field completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["bar", "foo"]); + } + + #[test] + fn test_object_field_completion_with_prefix() { + let code = "local obj = { foo: 1, bar: 2 }; obj.f"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 37).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get object field completions with prefix"); + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + assert_eq!(labels, vec!["foo"]); + } + + #[test] + fn test_object_field_completion_non_identifier_uses_bracket_text_edit() { + let code = r#"local obj = { "my-field": 1, normal: 2 }; obj."#; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + let dot = u32::try_from(code.rfind('.').expect("dot should exist")).unwrap(); + let end = u32::try_from(code.len()).unwrap(); + + let mut result = completion(&doc, pos, None, &analysis).expect("should get completions"); + result + .items + .sort_by(|left, right| left.label.cmp(&right.label)); + + assert_eq!( + result.items, + vec![ + CompletionItem { + label: "my-field".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: Some(lsp_types::CompletionTextEdit::Edit(lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position::new(0, dot), + end: lsp_types::Position::new(0, end), + }, + new_text: r#"["my-field"]"#.to_string(), + })), + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }, + CompletionItem { + label: "normal".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: None, + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }, + ] + ); + } + + #[test] + fn test_object_field_completion_non_identifier_prefix_rewrites_dot_expression() { + let code = r#"local obj = { "my-field": 1, normal: 2 }; obj.my"#; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + let dot = u32::try_from(code.rfind('.').expect("dot should exist")).unwrap(); + let end = u32::try_from(code.len()).unwrap(); + + let result = completion(&doc, pos, None, &analysis).expect("should get completions"); + assert_eq!( + result.items, + vec![CompletionItem { + label: "my-field".to_string(), + label_details: None, + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: None, + insert_text_format: None, + insert_text_mode: None, + text_edit: Some(lsp_types::CompletionTextEdit::Edit(lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position::new(0, dot), + end: lsp_types::Position::new(0, end), + }, + new_text: r#"["my-field"]"#.to_string(), + })), + additional_text_edits: None, + command: None, + commit_characters: None, + data: None, + tags: None, + }] + ); + } + + #[test] + fn test_object_field_completion_with_types() { + let code = "local obj = { num: 42, str: \"hello\", arr: [1, 2] }; obj."; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 56).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get object field completions"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "arr".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("[number, number]".to_string()), + ..Default::default() + }, + CompletionItem { + label: "num".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "str".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("string".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_object_field_completion_nested() { + let code = "local obj = { inner: { x: 1, y: 2 } }; obj.inner."; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 49).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get nested object field completions"); + + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "x".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "y".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_object_field_completion_after_bracket_lookup() { + let code = r#"local hm = { foo: { a: true, b: 4, c: "hi" } }; hm["foo"]."#; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get object field completions after bracket lookup"); + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "a".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("true".to_string()), + ..Default::default() + }, + CompletionItem { + label: "b".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("number".to_string()), + ..Default::default() + }, + CompletionItem { + label: "c".to_string(), + kind: Some(CompletionItemKind::FIELD), + detail: Some("string".to_string()), + ..Default::default() + }, + ] + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/scope.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/scope.rs new file mode 100644 index 00000000..da84454c --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/scope.rs @@ -0,0 +1,269 @@ +use jrsonnet_lsp_document::{Document, LspPosition}; +use jrsonnet_lsp_inference::SemanticArtifacts; +use jrsonnet_rowan_parser::AstNode; +use lsp_types::CompletionList; + +use super::super::{ + super::{ + helpers::{get_identifier_prefix, is_inside_object}, + locals::get_local_completions_with_semantic, + }, + keywords::{add_object_keyword_completions, add_std_completion}, +}; + +pub(super) fn general_completion( + document: &Document, + position: LspPosition, + offset: rowan::TextSize, + semantic: Option<&SemanticArtifacts>, +) -> CompletionList { + let text = document.text(); + let ast = document.ast(); + let mut items = + get_local_completions_with_semantic(document, position, text, offset.into(), semantic); + + add_std_completion(&mut items); + + if is_inside_object(ast.syntax(), offset.into()) { + let prefix = get_identifier_prefix(text, offset.into()); + add_object_keyword_completions(&mut items, prefix); + } + + CompletionList { + is_incomplete: false, + items, + } +} + +#[cfg(test)] +mod tests { + use lsp_types::{CompletionItem, CompletionItemKind}; + + use super::super::{ + super::completion, + test_util::{test_analysis, test_document}, + }; + + #[test] + fn test_completion_includes_std_and_local() { + let code = "local x = 1; "; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 13).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_local_variable_completion() { + let code = "local foo = 1; local bar = 2; "; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 30).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["bar", "foo", "std"]); + } + + #[test] + fn test_function_parameter_completion() { + let code = "local f(x, y) = x + "; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 20).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["f", "std", "x", "y"]); + } + + #[test] + fn test_completion_with_prefix() { + let code = "local foo = 1; local bar = 2; f"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 31).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["foo", "std"]); + } + + #[test] + fn test_for_comprehension_variable_completion() { + let code = "[x for x in [1, 2, 3] if ]"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 25).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in for comprehension"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_object_local_completion() { + let code = "{ local helper = 1, field: }"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 27).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get completions in object"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["$", "helper", "self", "std", "super"]); + } + + #[test] + fn test_nested_function_completion() { + let code = "local outer(a) = local inner(b) = a + ; inner(1); outer(1)"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 37).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in nested function"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["a", "b", "std"]); + } + + #[test] + fn test_completion_at_eof() { + let code = "local x = 1;\n"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (1, 0).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions at EOF"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x"]); + } + + #[test] + fn test_completion_in_object() { + let code = "{ foo: 1, bar: }"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 14).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get completions in object"); + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "$".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to root object".to_string()), + ..Default::default() + }, + CompletionItem { + label: "self".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to current object".to_string()), + ..Default::default() + }, + CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }, + CompletionItem { + label: "super".to_string(), + kind: Some(CompletionItemKind::KEYWORD), + detail: Some("Reference to inherited object".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_no_self_super_outside_object() { + let code = "local x = 1; x"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 13).into(); + + let result = completion(&doc, pos, None, &analysis); + let list = result.expect("Should get completions"); + let mut items: Vec<_> = list.items; + items.sort_by(|a, b| a.label.cmp(&b.label)); + + assert_eq!( + items, + vec![ + CompletionItem { + label: "std".to_string(), + kind: Some(CompletionItemKind::MODULE), + detail: Some("Jsonnet standard library".to_string()), + ..Default::default() + }, + CompletionItem { + label: "x".to_string(), + kind: Some(CompletionItemKind::VARIABLE), + detail: Some("local variable".to_string()), + ..Default::default() + }, + ] + ); + } + + #[test] + fn test_multiple_for_specs_completion() { + let code = "[x + y for x in [1] for y in [2]]"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 3).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions in multi-for comprehension"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["std", "x", "y"]); + } + + #[test] + fn test_completion_with_syntax_error() { + let code = "local x = 1;\nlocal y = 2;\n{ foo: x +"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (2, 10).into(); + + let list = completion(&doc, pos, None, &analysis) + .expect("should get completions despite syntax error"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["$", "self", "std", "super", "x", "y"]); + } + + #[test] + fn test_shadowed_variable_completion() { + let code = "local x = 1;\nlocal f(x) = x +"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (1, 16).into(); + + let list = + completion(&doc, pos, None, &analysis).expect("should get completions with shadowing"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["f", "std", "x"]); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/stdlib.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/stdlib.rs new file mode 100644 index 00000000..657fbe36 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/stdlib.rs @@ -0,0 +1,68 @@ +use jrsonnet_rowan_parser::SyntaxToken; +use lsp_types::CompletionList; +use tracing::debug; + +use super::super::super::stdlib::check_stdlib_completion; + +pub(super) fn try_stdlib_completion( + token: Option<&SyntaxToken>, + text: &str, + offset: rowan::TextSize, +) -> Option { + let token = token?; + let items = check_stdlib_completion(token, text, offset.into())?; + debug!(count = items.len(), "providing stdlib completions"); + Some(CompletionList { + is_incomplete: false, + items, + }) +} + +#[cfg(test)] +mod tests { + use super::super::{ + super::completion, + test_util::{test_analysis, test_document}, + }; + + #[test] + fn test_stdlib_completion_with_prefix_xor() { + let code = "std.xo"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 6).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + assert_eq!(labels, vec!["xor"]); + } + + #[test] + fn test_stdlib_completion_with_prefix_x() { + let code = "std.x"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = (0, 5).into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let mut labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + labels.sort_unstable(); + assert_eq!(labels, vec!["xnor", "xor"]); + } + + #[test] + fn test_stdlib_completion_with_alias_prefix_xo() { + let code = "local s = std; s.xo"; + let doc = test_document(code); + let analysis = test_analysis(&doc); + let pos = ( + 0, + u32::try_from(code.len()).expect("test code length fits in u32"), + ) + .into(); + + let list = completion(&doc, pos, None, &analysis).expect("should get completions"); + let labels: Vec<&str> = list.items.iter().map(|i| i.label.as_str()).collect(); + assert_eq!(labels, vec!["xor"]); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/test_util.rs b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/test_util.rs new file mode 100644 index 00000000..48cfd3ef --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/completion/handler/dispatch/test_util.rs @@ -0,0 +1,14 @@ +use std::sync::Arc; + +use jrsonnet_lsp_document::{DocVersion, Document}; +use jrsonnet_lsp_inference::TypeAnalysis; +use jrsonnet_lsp_types::GlobalTyStore; + +pub(super) fn test_analysis(doc: &Document) -> TypeAnalysis { + let global_types = Arc::new(GlobalTyStore::new()); + TypeAnalysis::analyze_with_global(doc, global_types) +} + +pub(super) fn test_document(code: &str) -> Document { + Document::new(code.to_string(), DocVersion::new(1)) +} From ab37d86823ec2c7d9442e6774a69342ecb540b58 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 17:52:35 +0000 Subject: [PATCH 137/210] fix(inference): infer object-local bindings in object bodies Object type inference now evaluates object-local bindings from `MemberBindStmt` members before inferring field types. The new path handles both destruct and function bindings, keeps bindings scoped to the object body, and supports recursive local functions via provisional function types. Added regression tests for local variable and local function object bindings so `local x = 1, z: x` style shapes infer correctly. --- crates/jrsonnet-lsp-inference/src/object.rs | 129 +++++++++++++++++++- 1 file changed, 127 insertions(+), 2 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/object.rs b/crates/jrsonnet-lsp-inference/src/object.rs index 1f1ccf72..9d6395d5 100644 --- a/crates/jrsonnet-lsp-inference/src/object.rs +++ b/crates/jrsonnet-lsp-inference/src/object.rs @@ -1,9 +1,9 @@ //! Object type inference for Jsonnet expressions. use jrsonnet_lsp_types::{ - FieldDefInterned, FieldVis, FunctionData, ObjectData, ReturnSpec, Ty, TyData, + FieldDefInterned, FieldVis, FunctionData, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, }; -use jrsonnet_rowan_parser::nodes::{Expr, Member, ObjBody}; +use jrsonnet_rowan_parser::nodes::{Bind, Destruct, Expr, Member, ObjBody}; use rustc_hash::FxHashMap; use crate::{ @@ -96,9 +96,24 @@ pub fn infer_object_type_with_super_ty( }; let preliminary_ty = env.store_mut().object(preliminary_obj); + // Object-local bindings are scoped to this object body. + env.push_scope(); + // Push object context for self references env.push_object_context_ty(preliminary_ty, super_type); + // Object-local bindings (`local x = ...`) participate in field inference. + for member in members.members() { + let Member::MemberBindStmt(bind_stmt) = member else { + continue; + }; + let Some(bind) = bind_stmt.obj_local().and_then(|obj_local| obj_local.bind()) + else { + continue; + }; + infer_object_local_bind_ty(&bind, env, infer_expr); + } + // Pass 2: Infer actual field types with self available let mut final_fields: Vec<(String, FieldDefInterned)> = Vec::new(); @@ -197,6 +212,7 @@ pub fn infer_object_type_with_super_ty( // Pop object context env.pop_object_context(); + env.pop_scope(); // Sort for canonical form final_fields.sort_by(|(a, _), (b, _)| a.cmp(b)); @@ -213,6 +229,99 @@ pub fn infer_object_type_with_super_ty( } } +fn infer_object_local_bind_ty( + bind: &Bind, + env: &mut TypeEnv, + infer_expr: &mut impl FnMut(&Expr, &mut TypeEnv) -> Ty, +) { + match bind { + Bind::BindDestruct(bind_destruct) => { + let Some(destruct) = bind_destruct.into() else { + return; + }; + let Destruct::DestructFull(full) = destruct else { + return; + }; + let Some(ident) = full.name().and_then(|name| name.ident_lit()) else { + return; + }; + let name = ident.text().to_string(); + let ty = bind_destruct + .value() + .map_or(Ty::ANY, |value| infer_expr(&value, env)); + env.define_ty(name, ty); + } + Bind::BindFunction(bind_function) => { + let Some(name_node) = bind_function.name() else { + return; + }; + let Some(ident) = name_node.ident_lit() else { + return; + }; + let name = ident.text().to_string(); + let params = bind_function + .params() + .map(|params| extract_params_with_default_types_ty(¶ms, env)) + .unwrap_or_default(); + + // Install a provisional function so recursive calls can resolve. + let provisional_ty = env.store_mut().function(FunctionData { + params: params.clone(), + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }); + env.define_ty(name.clone(), provisional_ty); + + let (return_ty, param_constraints) = if env.can_infer_function_body() { + bind_function.value().map_or_else( + || (Ty::ANY, FxHashMap::default()), + |body| { + env.push_scope(); + let param_names: Vec = + params.iter().map(|param| param.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + env.start_constraint_tracking(¶m_names); + env.enter_function(); + let body_ty = infer_expr(&body, env); + env.exit_function(); + let constraints = env.stop_constraint_tracking_ty(); + env.pop_scope(); + (body_ty, constraints) + }, + ) + } else { + (Ty::ANY, FxHashMap::default()) + }; + + let final_params: Vec = params + .into_iter() + .map(|param| { + let mut narrowed_ty = param.ty; + if let Some(constraints) = param_constraints.get(¶m.name) { + for constraint_ty in constraints { + narrowed_ty = env.store_mut().narrow(narrowed_ty, *constraint_ty); + } + } + ParamInterned { + name: param.name, + ty: narrowed_ty, + has_default: param.has_default, + } + }) + .collect(); + + let final_ty = env.store_mut().function(FunctionData { + params: final_params, + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }); + env.define_ty(name, final_ty); + } + } +} + #[cfg(test)] mod tests { use std::collections::BTreeSet; @@ -337,4 +446,20 @@ mod tests { } ); } + + #[test] + fn test_object_local_binding_infers_field_type() { + let (ty, env) = infer_doc("{ local x = 1, z: x }"); + let obj = try_object(&env, ty).expect("expected object"); + let field_def = get_field_ty(&obj, "z").expect("Should have 'z' field"); + assert_eq!(field_def.ty, Ty::NUMBER); + } + + #[test] + fn test_object_local_function_binding_infers_field_type() { + let (ty, env) = infer_doc("{ local one() = 1, z: one() }"); + let obj = try_object(&env, ty).expect("expected object"); + let field_def = get_field_ty(&obj, "z").expect("Should have 'z' field"); + assert_eq!(field_def.ty, Ty::NUMBER); + } } From 552439a4483b0b4417579e46609386c288d0688d Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 18:02:53 +0000 Subject: [PATCH 138/210] feat(lsp): add configurable inlay hint categories Expand inlay hint generation to support category-based toggles with conservative defaults aligned to common LSP behavior. Added `InlayHintsConfig` and wired it through server configuration, including initialization options and live `didChangeConfiguration` updates. Default hints now include local and object-local bindings/functions, while object fields and methods stay off by default. Added integration coverage for live inlay-hint config updates and scenario coverage for default object-local behavior plus config-driven category switching. --- .../jrsonnet-lsp-handlers/src/inlay_hint.rs | 419 +++++++++++++++--- crates/jrsonnet-lsp-handlers/src/lib.rs | 2 +- crates/jrsonnet-lsp/src/config.rs | 47 +- .../src/server/async_requests/inlay_hints.rs | 3 +- .../tests/integration_test/features.rs | 98 ++++ .../runner/inlay_hints_config_categories.yaml | 58 +++ .../inlay_hints_object_locals_default.yaml | 31 ++ 7 files changed, 602 insertions(+), 56 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_object_locals_default.yaml diff --git a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs index 48cc6e8c..58d3a9be 100644 --- a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs +++ b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs @@ -2,13 +2,55 @@ //! //! Provides type hints for local bindings and local function return values. -use jrsonnet_lsp_document::{to_lsp_range, Document}; +use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_rowan_parser::{ - nodes::{Bind, BindDestruct, BindFunction, Destruct, StmtLocal}, + nodes::{ + Bind, BindDestruct, BindFunction, Destruct, Member, MemberFieldMethod, MemberFieldNormal, + ObjBodyMemberList, StmtLocal, + }, AstNode, }; use lsp_types::{InlayHint, InlayHintKind, InlayHintLabel, Position, Range}; +use serde::{Deserialize, Serialize}; + +/// Configuration for inlay hint generation. +#[allow(clippy::struct_excessive_bools)] +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(default)] +pub struct InlayHintsConfig { + /// Show type hints for `local x = ...` statements. + #[serde(alias = "localVariableTypes")] + pub local_variable_types: bool, + /// Show return-type hints for `local f(...) = ...` statements. + #[serde(alias = "localFunctionReturnTypes")] + pub local_function_return_types: bool, + /// Show type hints for object-local `local x = ...` bindings. + #[serde(alias = "objectLocalVariableTypes")] + pub object_local_variable_types: bool, + /// Show return-type hints for object-local `local f(...) = ...` bindings. + #[serde(alias = "objectLocalFunctionReturnTypes")] + pub object_local_function_return_types: bool, + /// Show type hints for object fields such as `{ field: expr }`. + #[serde(alias = "objectFieldTypes")] + pub object_field_types: bool, + /// Show return-type hints for object methods such as `{ f(x): expr }`. + #[serde(alias = "objectMethodReturnTypes")] + pub object_method_return_types: bool, +} + +impl Default for InlayHintsConfig { + fn default() -> Self { + Self { + local_variable_types: true, + local_function_return_types: true, + object_local_variable_types: true, + object_local_function_return_types: true, + object_field_types: false, + object_method_return_types: false, + } + } +} fn type_hint(position: Position, label: String) -> InlayHint { InlayHint { @@ -37,68 +79,114 @@ pub fn inlay_hints( document: &Document, analysis: &TypeAnalysis, visible_range: Range, +) -> Vec { + inlay_hints_with_config( + document, + analysis, + visible_range, + &InlayHintsConfig::default(), + ) +} + +/// Compute inlay hints for a visible range in a document with feature flags. +pub fn inlay_hints_with_config( + document: &Document, + analysis: &TypeAnalysis, + visible_range: Range, + config: &InlayHintsConfig, ) -> Vec { let ast = document.ast(); let text = document.text(); let line_index = document.line_index(); let mut hints = Vec::new(); - for node in ast.syntax().descendants() { - if let Some(bind_func) = BindFunction::cast(node.clone()) { - let Some(name_node) = bind_func.name() else { - continue; - }; - let Some(body) = bind_func.value() else { - continue; - }; - let Some(ty) = analysis.type_for_range(body.syntax().text_range()) else { - continue; - }; - - let type_str = analysis.display(ty); - if is_uninformative_type(&type_str) { - continue; - } - - let name_range = to_lsp_range(name_node.syntax().text_range(), line_index, text); - if position_in_range(name_range.end, visible_range) { - hints.push(type_hint(name_range.end, format!(" -> {type_str}"))); + for stmt_local in ast.syntax().descendants().filter_map(StmtLocal::cast) { + for bind in stmt_local.binds() { + match bind { + Bind::BindDestruct(bind_destruct) if config.local_variable_types => { + push_binding_type_hint( + &mut hints, + &bind_destruct, + analysis, + visible_range, + line_index, + text, + ); + } + Bind::BindFunction(bind_function) if config.local_function_return_types => { + push_function_return_hint( + &mut hints, + &bind_function, + analysis, + visible_range, + line_index, + text, + ); + } + _ => {} } } + } - if let Some(stmt_local) = StmtLocal::cast(node) { - for bind in stmt_local.binds() { - let Bind::BindDestruct(bind_destruct) = bind else { - continue; - }; - let Some(value) = bind_destruct.value() else { - continue; - }; - let Some(destruct) = BindDestruct::into(&bind_destruct) else { - continue; - }; - let Destruct::DestructFull(full) = destruct else { - continue; - }; - let Some(name_node) = full.name() else { - continue; - }; - let Some(ident) = name_node.ident_lit() else { - continue; - }; - let Some(ty) = analysis.type_for_range(value.syntax().text_range()) else { - continue; - }; - - let type_str = analysis.display(ty); - if is_uninformative_type(&type_str) { - continue; + for member_list in ast + .syntax() + .descendants() + .filter_map(ObjBodyMemberList::cast) + { + for member in member_list.members() { + match member { + Member::MemberBindStmt(bind_stmt) => { + let Some(bind) = bind_stmt.obj_local().and_then(|obj_local| obj_local.bind()) + else { + continue; + }; + match bind { + Bind::BindDestruct(bind_destruct) if config.object_local_variable_types => { + push_binding_type_hint( + &mut hints, + &bind_destruct, + analysis, + visible_range, + line_index, + text, + ); + } + Bind::BindFunction(bind_function) + if config.object_local_function_return_types => + { + push_function_return_hint( + &mut hints, + &bind_function, + analysis, + visible_range, + line_index, + text, + ); + } + _ => {} + } } - - let name_range = to_lsp_range(ident.text_range(), line_index, text); - if position_in_range(name_range.end, visible_range) { - hints.push(type_hint(name_range.end, format!(": {type_str}"))); + Member::MemberFieldNormal(field) if config.object_field_types => { + push_field_type_hint( + &mut hints, + &field, + analysis, + visible_range, + line_index, + text, + ); + } + Member::MemberFieldMethod(method) if config.object_method_return_types => { + push_method_return_hint( + &mut hints, + &method, + analysis, + visible_range, + line_index, + text, + ); } + _ => {} } } } @@ -106,6 +194,131 @@ pub fn inlay_hints( hints } +fn push_binding_type_hint( + hints: &mut Vec, + bind_destruct: &BindDestruct, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(value) = bind_destruct.value() else { + return; + }; + let Some(destruct) = BindDestruct::into(bind_destruct) else { + return; + }; + let Destruct::DestructFull(full) = destruct else { + return; + }; + let Some(name_node) = full.name() else { + return; + }; + let Some(ident) = name_node.ident_lit() else { + return; + }; + let Some(ty) = analysis.type_for_range(value.syntax().text_range()) else { + return; + }; + + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + return; + } + + let name_range = to_lsp_range(ident.text_range(), line_index, text); + if position_in_range(name_range.end, visible_range) { + hints.push(type_hint(name_range.end, format!(": {type_str}"))); + } +} + +fn push_function_return_hint( + hints: &mut Vec, + bind_function: &BindFunction, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(name_node) = bind_function.name() else { + return; + }; + let Some(body) = bind_function.value() else { + return; + }; + let Some(ty) = analysis.type_for_range(body.syntax().text_range()) else { + return; + }; + + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + return; + } + + let name_range = to_lsp_range(name_node.syntax().text_range(), line_index, text); + if position_in_range(name_range.end, visible_range) { + hints.push(type_hint(name_range.end, format!(" -> {type_str}"))); + } +} + +fn push_field_type_hint( + hints: &mut Vec, + field: &MemberFieldNormal, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(field_name) = field.field_name() else { + return; + }; + let Some(value) = field.expr() else { + return; + }; + let Some(ty) = analysis.type_for_range(value.syntax().text_range()) else { + return; + }; + + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + return; + } + + let name_range = to_lsp_range(field_name.syntax().text_range(), line_index, text); + if position_in_range(name_range.end, visible_range) { + hints.push(type_hint(name_range.end, format!(": {type_str}"))); + } +} + +fn push_method_return_hint( + hints: &mut Vec, + method: &MemberFieldMethod, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(field_name) = method.field_name() else { + return; + }; + let Some(body) = method.expr() else { + return; + }; + let Some(ty) = analysis.type_for_range(body.syntax().text_range()) else { + return; + }; + + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + return; + } + + let name_range = to_lsp_range(field_name.syntax().text_range(), line_index, text); + if position_in_range(name_range.end, visible_range) { + hints.push(type_hint(name_range.end, format!(" -> {type_str}"))); + } +} + #[cfg(test)] mod tests { use std::sync::Arc; @@ -189,6 +402,106 @@ mod tests { ); } + #[test] + fn test_object_local_binding_type_hint_default_enabled() { + let doc = Document::new("{ local x = 1, z: x }".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let hints = inlay_hints(&doc, &analysis, full_line_range()); + + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 9, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_object_field_hint_only_when_enabled() { + let doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local_variable_types: false, + local_function_return_types: false, + object_local_variable_types: false, + object_local_function_return_types: false, + object_field_types: true, + object_method_return_types: false, + }; + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 3, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_object_method_hint_only_when_enabled() { + let doc = Document::new("{ one(): 1 }".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local_variable_types: false, + local_function_return_types: false, + object_local_variable_types: false, + object_local_function_return_types: false, + object_field_types: false, + object_method_return_types: true, + }; + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 5, + }, + label: InlayHintLabel::String(" -> number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_config_can_disable_object_local_binding_hints() { + let doc = Document::new("{ local x = 1, z: x }".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + object_local_variable_types: false, + ..InlayHintsConfig::default() + }; + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq(&hints, vec![]); + } + #[test] fn test_inlay_hint_respects_visible_range() { let doc = Document::new( diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index cb735bd5..f94d4acb 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -30,7 +30,7 @@ pub use formatting::{ FormattingContext, }; pub use hover::{hover, hover_with_import_field_type}; -pub use inlay_hint::inlay_hints; +pub use inlay_hint::{inlay_hints, inlay_hints_with_config, InlayHintsConfig}; pub use references::{ find_cross_file_references, find_cross_file_references_with_semantic, find_references, find_references_with_semantic, diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index 1c4deb72..8a2e3437 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -9,7 +9,8 @@ use std::{collections::HashMap, path::PathBuf}; // Re-export config types from handlers crate pub use jrsonnet_lsp_handlers::{ - CodeActionConfig, FormatterEngine, FormattingConfig, RemoveUnusedCommentsMode, RemoveUnusedMode, + CodeActionConfig, FormatterEngine, FormattingConfig, InlayHintsConfig, + RemoveUnusedCommentsMode, RemoveUnusedMode, }; use serde::{Deserialize, Serialize}; @@ -63,6 +64,10 @@ pub struct ServerConfig { #[serde(default, alias = "codeActions")] pub code_actions: CodeActionConfig, + /// Inlay hint options. + #[serde(default, alias = "inlayHints")] + pub inlay_hints: InlayHintsConfig, + /// Log level for the server (error, warn, info, debug). #[serde(alias = "logLevel", alias = "log_level")] pub log_level: Option, @@ -86,6 +91,8 @@ struct ServerConfigPatch { formatting: Option, #[serde(rename = "codeActions")] code_actions: Option, + #[serde(rename = "inlayHints")] + inlay_hints: Option, #[serde(alias = "logLevel", alias = "log_level")] log_level: Option, } @@ -100,6 +107,7 @@ impl ServerConfigPatch { && self.resolve_paths_with_tanka.is_none() && self.formatting.is_none() && self.code_actions.is_none() + && self.inlay_hints.is_none() && self.log_level.is_none() } @@ -128,6 +136,9 @@ impl ServerConfigPatch { if let Some(code_actions) = self.code_actions { config.code_actions = code_actions; } + if let Some(inlay_hints) = self.inlay_hints { + config.inlay_hints = inlay_hints; + } if let Some(log_level) = self.log_level { config.log_level = log_level.as_str().map(ToString::to_string); } @@ -190,6 +201,9 @@ impl ServerConfig { if other.code_actions != CodeActionConfig::default() { self.code_actions = other.code_actions; } + if other.inlay_hints != InlayHintsConfig::default() { + self.inlay_hints = other.inlay_hints; + } } /// Merge formatting configuration. @@ -275,6 +289,7 @@ mod tests { assert!(config.ext_vars.is_empty()); assert!(!config.enable_eval_diagnostics); assert_eq!(config.code_actions, CodeActionConfig::default()); + assert_eq!(config.inlay_hints, InlayHintsConfig::default()); } #[test] @@ -425,6 +440,36 @@ mod tests { ); } + #[test] + fn test_inlay_hints_config_from_initialization_options() { + let json = serde_json::json!({ + "inlayHints": { + "localVariableTypes": false, + "objectFieldTypes": true + } + }); + + let config = ServerConfig::from_initialization_options(Some(json)); + assert!(!config.inlay_hints.local_variable_types); + assert!(config.inlay_hints.object_field_types); + } + + #[test] + fn test_update_from_settings_updates_inlay_hints_config() { + let mut config = ServerConfig::new(); + + let settings = serde_json::json!({ + "inlayHints": { + "localFunctionReturnTypes": false, + "objectMethodReturnTypes": true + } + }); + + assert!(config.update_from_settings(settings)); + assert!(!config.inlay_hints.local_function_return_types); + assert!(config.inlay_hints.object_method_return_types); + } + #[test] fn test_formatting_config() { let json = serde_json::json!({ diff --git a/crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs b/crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs index d883528b..963a04f0 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/inlay_hints.rs @@ -10,7 +10,8 @@ impl AsyncRequestContext { let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?; let analysis = self.analyze_document(&path, &doc); - let hints = handlers::inlay_hints(&doc, &analysis, params.range); + let config = self.config.read().inlay_hints; + let hints = handlers::inlay_hints_with_config(&doc, &analysis, params.range, &config); if hints.is_empty() { return None; } diff --git a/crates/jrsonnet-lsp/tests/integration_test/features.rs b/crates/jrsonnet-lsp/tests/integration_test/features.rs index 07e47fb9..9e747e13 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/features.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/features.rs @@ -113,6 +113,104 @@ fn test_inlay_hint() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_inlay_hint_config_updates_via_configuration_change() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/inlay-config.jsonnet"; + let text = "{ local x = 1, z: x, a: 1 }"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(inlay_hint_request(2, uri, 0, 0, 0, 80))) + .unwrap(); + let before = recv_response(&client_conn, 2); + assert!(before.error.is_none(), "Inlay hint should succeed"); + let before_hints: Option> = + serde_json::from_value(before.result.expect("should have result")).unwrap(); + let before_hints = before_hints.unwrap_or_default(); + let before_json = serde_json::to_value(&before_hints).expect("hints should serialize"); + let expected_before = serde_json::json!([{ + "position": { "line": 0, "character": 9 }, + "label": ": number", + "kind": 1, + "paddingLeft": true + }]); + assert_eq!(before_json, expected_before); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "inlayHints": { + "localVariableTypes": false, + "localFunctionReturnTypes": false, + "objectLocalVariableTypes": false, + "objectLocalFunctionReturnTypes": false, + "objectFieldTypes": true, + "objectMethodReturnTypes": false + } + } + })), + )) + .unwrap(); + + client_conn + .sender + .send(Message::Request(inlay_hint_request(3, uri, 0, 0, 0, 80))) + .unwrap(); + let after = recv_response(&client_conn, 3); + assert!(after.error.is_none(), "Inlay hint should succeed"); + let after_hints: Option> = + serde_json::from_value(after.result.expect("should have result")).unwrap(); + let after_hints = after_hints.unwrap_or_default(); + let after_json = serde_json::to_value(&after_hints).expect("hints should serialize"); + let expected_after = serde_json::json!([ + { + "position": { "line": 0, "character": 16 }, + "label": ": number", + "kind": 1, + "paddingLeft": true + }, + { + "position": { "line": 0, "character": 22 }, + "label": ": number", + "kind": 1, + "paddingLeft": true + } + ]); + assert_eq!(after_json, expected_after); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_code_action_unused_variable_quickfix() { let (client_conn, server_conn) = Connection::memory(); diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml new file mode 100644 index 00000000..db111a30 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml @@ -0,0 +1,58 @@ +# Verify config-driven category toggles for inlay hints. +steps: +- step: create + files: + main.jsonnet: | + [[hintRange:local ((topLocal:x|)) = 1; + { + local ((objLocal:y|)) = 2, + ((fieldA:a|)): x + y, + ((methodM:m|))(): 1, + }]] + +- step: diagnosticsSettled + +- step: requestInlayHints + as: defaultHints + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: defaultHints + result: + - positionOf: topLocal + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: objLocal + label: ": number" + kind: 1 + paddingLeft: true + +- step: config + settings: + jsonnet: + inlayHints: + localVariableTypes: false + localFunctionReturnTypes: false + objectLocalVariableTypes: false + objectLocalFunctionReturnTypes: false + objectFieldTypes: true + objectMethodReturnTypes: true + +- step: requestInlayHints + as: fieldMethodHints + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: fieldMethodHints + result: + - positionOf: fieldA + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: methodM + label: " -> number" + kind: 1 + paddingLeft: true diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_object_locals_default.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_object_locals_default.yaml new file mode 100644 index 00000000..4407598c --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_object_locals_default.yaml @@ -0,0 +1,31 @@ +# Ensure default inlay hints include local/object-local bindings, while +# object fields and methods remain off by default. +steps: +- step: create + files: + main.jsonnet: | + [[hintRange:{ + local ((objLocalVar:x|)) = 1, + local ((objLocalFn:f|))() = 1, + a: x, + m(): f(), + }]] + +- step: diagnosticsSettled + +- step: requestInlayHints + as: defaultHints + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: defaultHints + result: + - positionOf: objLocalVar + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: objLocalFn + label: " -> number" + kind: 1 + paddingLeft: true From b7541b53b2ab342c6ede720d0d88ee4ebf9c8c2e Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 18:58:04 +0000 Subject: [PATCH 139/210] fix(hover): infer key and object-local bind token types Type lookup now checks contextual token semantics before fallback to enclosing expression ranges in `type_at_position`. Field-name handling now follows language semantics: - fixed keys infer as string - dynamic keys infer from their key expression type Object-local bind definition tokens infer from bound values. Added scenario coverage for passing hover cases and tightened function-variable hover assertions to type-only checks. Validated with targeted e2e scenario tests for key and object-local binding hover behavior. --- .../src/analysis/queries.rs | 121 ++++++++++++++++-- .../scenarios/hover/function_variable.yaml | 17 +-- .../hover_object_field_key_is_string.yaml | 19 +++ .../hover_object_local_binding_precision.yaml | 33 +++++ ..._object_local_binding_usage_precision.yaml | 24 ++++ 5 files changed, 189 insertions(+), 25 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml diff --git a/crates/jrsonnet-lsp-inference/src/analysis/queries.rs b/crates/jrsonnet-lsp-inference/src/analysis/queries.rs index 7ebce7e4..a981f01d 100644 --- a/crates/jrsonnet-lsp-inference/src/analysis/queries.rs +++ b/crates/jrsonnet-lsp-inference/src/analysis/queries.rs @@ -1,7 +1,10 @@ use jrsonnet_lsp_types::{ is_subtype_ty, DisplayContext, FunctionData, MutStore, ObjectData, Ty, TyData, }; -use jrsonnet_rowan_parser::SyntaxNode; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Destruct, FieldName}, + AstNode, SyntaxNode, SyntaxToken, +}; use rowan::TextRange; use rustc_hash::FxHashMap; @@ -18,24 +21,27 @@ impl TypeAnalysis { /// Find the smallest expression type containing `offset` by walking syntax ancestors. fn find_type_at_in_syntax(&self, root: &SyntaxNode, offset: rowan::TextSize) -> Option { - let candidate_for_token = |token: rowan::SyntaxToken<_>| { - let mut best: Option<(TextRange, Ty)> = self - .expr_types - .get(&token.text_range()) - .copied() - .map(|ty| (token.text_range(), ty)); + let update_best = |best: &mut Option<(TextRange, Ty)>, range: TextRange, ty: Ty| match best + { + None => *best = Some((range, ty)), + Some((best_range, _)) if range.len() < best_range.len() => { + *best = Some((range, ty)); + } + _ => {} + }; + + let candidate_for_token = |token: SyntaxToken| { + let mut best: Option<(TextRange, Ty)> = self.contextual_type_for_token(&token); + + if let Some(ty) = self.expr_types.get(&token.text_range()).copied() { + update_best(&mut best, token.text_range(), ty); + } let mut current = token.parent(); while let Some(node) = current { let range = node.text_range(); if let Some(ty) = self.expr_types.get(&range).copied() { - match best { - None => best = Some((range, ty)), - Some((best_range, _)) if range.len() < best_range.len() => { - best = Some((range, ty)); - } - _ => {} - } + update_best(&mut best, range, ty); } current = node.parent(); } @@ -62,6 +68,45 @@ impl TypeAnalysis { best.map(|(_, ty)| ty) } + /// Infer contextual type for non-expression nodes that users still hover. + /// + /// Examples: + /// - Object field names (`a: 1`, `"a": 1`, `[expr]: 1`) + /// - Local binding definition names (`local x = value`) + fn contextual_type_for_token(&self, token: &SyntaxToken) -> Option<(TextRange, Ty)> { + let node = token.parent()?; + + if let Some(field_name) = node.ancestors().find_map(FieldName::cast) { + return match field_name { + FieldName::FieldNameFixed(fixed) => Some((fixed.syntax().text_range(), Ty::STRING)), + FieldName::FieldNameDynamic(dynamic) => { + let expr = dynamic.expr()?; + let ty = self.type_for_range(expr.syntax().text_range())?; + Some((dynamic.syntax().text_range(), ty)) + } + }; + } + + let bind = node.ancestors().find_map(Bind::cast)?; + match bind { + Bind::BindDestruct(bind_destruct) => { + let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind_destruct)?; + let Destruct::DestructFull(full) = destruct else { + return None; + }; + let name = full.name()?; + if node.text_range() != name.syntax().text_range() { + return None; + } + + let value = bind_destruct.value()?; + let ty = self.type_for_range(value.syntax().text_range())?; + Some((name.syntax().text_range(), ty)) + } + Bind::BindFunction(_) => None, + } + } + /// Find a type at the given offset. fn find_type_at(&self, offset: rowan::TextSize) -> Option { let mut best_range: Option = None; @@ -372,6 +417,54 @@ mod tests { assert_eq!(ty, Ty::NUMBER); } + #[test] + fn test_type_at_position_field_name_fixed_is_string() { + let code = "{ key: 1 }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let key_offset = nth_offset(code, "key", 0); + let ty = analysis + .type_at_position(&root, key_offset) + .expect("should find type at fixed field name"); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_type_at_position_field_name_dynamic_uses_expr_type() { + let code = r#"{ [("x" + "y")]: 1 }"#; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let l_bracket_offset = nth_offset(code, "[", 0); + let ty = analysis + .type_at_position(&root, l_bracket_offset) + .expect("should find type at computed field name"); + assert_eq!(ty, Ty::STRING); + } + + #[test] + fn test_type_at_position_object_local_bind_name_uses_value_type() { + let code = r"{ local x = { a: 1 }, y: x }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let bind_name_offset = nth_offset(code, "x = {", 0); + let ty = analysis + .type_at_position(&root, bind_name_offset) + .expect("should find type at object-local bind name"); + + analysis.with_data(ty, |data| match data { + TyData::Object(obj) => { + assert!(!obj.has_unknown); + assert_eq!(obj.fields.len(), 1); + assert_eq!(obj.fields[0].0, "a"); + assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); + } + other => panic!("expected object type for bind name, got {other:?}"), + }); + } + #[test] fn test_fields_at_position_object() { let code = "{ a: 1, b: 2 }"; diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml index be1cbd2d..81b1466d 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml @@ -1,20 +1,15 @@ steps: - step: create files: - main.jsonnet: local ((m1:|))f(x) = x; + main.jsonnet: | + local ((m1:|))f(x) = x; + f - step: requestHover as: hover file: main.jsonnet at: m1 -- step: expectHover +- step: expectHoverType request: hover - result: - contents: - kind: markdown - value: |- - `any` - - ```jsonnet - local f(x) = x; - ``` + type: 'function(x: any)' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml new file mode 100644 index 00000000..d0e0f54a --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml @@ -0,0 +1,19 @@ +# Hover on an object field key should report `string`. +steps: +- step: create + files: + main.jsonnet: | + { + ((fieldKey:|))z: 1, + } + +- step: diagnosticsSettled + +- step: requestHover + as: fieldKeyHover + file: main.jsonnet + at: fieldKey +- step: expectHoverType + request: fieldKeyHover + type: string + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml new file mode 100644 index 00000000..9654815e --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml @@ -0,0 +1,33 @@ +# Hover on object-local bindings should report the binding's value type at both +# declaration and usage sites, not the enclosing object type. +steps: +- step: create + files: + main.jsonnet: | + { + local ((bindX:|))x = { + a: 1, + b: 2, + }, + z: ((useX:|))x, + } + +- step: diagnosticsSettled + +- step: requestHover + as: bindHover + file: main.jsonnet + at: bindX +- step: expectHoverType + request: bindHover + type: '{ a: number, b: number }' + match: exact + +- step: requestHover + as: useHover + file: main.jsonnet + at: useX +- step: expectHoverType + request: useHover + type: '{ a: number, b: number }' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml new file mode 100644 index 00000000..bfc11708 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml @@ -0,0 +1,24 @@ +# Hover on object-local binding usage should report the binding value type, +# not an enclosing object wrapper type. +steps: +- step: create + files: + main.jsonnet: | + { + local x = { + a: 1, + b: 2, + }, + z: ((useX:|))x, + } + +- step: diagnosticsSettled + +- step: requestHover + as: useHover + file: main.jsonnet + at: useX +- step: expectHoverType + request: useHover + type: '{ a: number, b: number }' + match: exact From afe24252d944e8df42e4a94dbe7e1806485b8fa1 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Mon, 16 Feb 2026 19:01:14 +0000 Subject: [PATCH 140/210] fix(hover): return enclosing type for brace-token hover Allow hover type output on non-trivia tokens even when token-level documentation is unavailable. This keeps comment and whitespace hovers disabled, but enables type hover on structural tokens like braces by relying on `inferred_type_markdown`. Added unit tests for open/close brace hover behavior and scenario fixtures that assert enclosing object type at both positions. Validated with: - `cargo test -p jrsonnet-lsp-handlers test_hover_on_open_brace_returns_enclosing_object_type` - `cargo test -p jrsonnet-lsp-handlers test_hover_on_close_brace_returns_enclosing_object_type` - `cargo test -p jrsonnet-lsp --test e2e_scenario_tests brace_enclosing_type -- --nocapture` --- .../src/hover/handler.rs | 38 +++++++++++++++++-- .../hover_close_brace_enclosing_type.yaml | 22 +++++++++++ .../hover_open_brace_enclosing_type.yaml | 22 +++++++++++ 3 files changed, 78 insertions(+), 4 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml diff --git a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs index 777c1c0d..090af2f9 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs @@ -1,6 +1,6 @@ use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document, LspPosition}; use jrsonnet_lsp_inference::TypeAnalysis; -use jrsonnet_rowan_parser::AstNode; +use jrsonnet_rowan_parser::{nodes::Trivia, AstNode, AstToken}; use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; use super::{local::check_local_hover, stdlib::check_stdlib_hover, ImportFieldTypeResolver}; @@ -30,8 +30,8 @@ pub fn hover_with_import_field_type( let ast = document.ast(); let token = token_at_offset(ast.syntax(), offset)?; - // Only provide hover for meaningful tokens - if !token.kind().is_hover_eligible() { + // Never provide hover for trivia tokens. + if Trivia::cast(token.clone()).is_some() { return None; } @@ -51,7 +51,11 @@ pub fn hover_with_import_field_type( } let type_markdown = inferred_type_markdown(document, analysis, offset); - let token_markdown = token.kind().token_doc_markdown(); + let token_markdown = token + .kind() + .is_hover_eligible() + .then(|| token.kind().token_doc_markdown()) + .flatten(); if let Some(value) = merge_markdown_sections(type_markdown, token_markdown) { return Some(Hover { contents: HoverContents::Markup(MarkupContent { @@ -435,4 +439,30 @@ mod tests { let result = get_hover("local x = 1; x", 0, 13); assert_matches!(result, None); } + + #[test] + fn test_hover_on_open_brace_returns_enclosing_object_type() { + let code = "{ z: { a: 1, b: 2 } }"; + let result = get_hover(code, 0, 0); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, "`{ z: { a: number, b: number } }`"); + }); + } + + #[test] + fn test_hover_on_close_brace_returns_enclosing_object_type() { + let code = "{ z: { a: 1, b: 2 } }"; + let close_brace = u32::try_from(code.rfind('}').expect("code should end with `}`")) + .expect("close brace offset should fit u32"); + let result = get_hover(code, 0, close_brace); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, "`{ z: { a: number, b: number } }`"); + }); + } } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml new file mode 100644 index 00000000..6a706c9d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml @@ -0,0 +1,22 @@ +# Hover on a closing `}` should still report the enclosing object type. +steps: +- step: create + files: + main.jsonnet: | + { + z: { + a: 1, + b: 2, + }, + ((closeBrace:|))} + +- step: diagnosticsSettled + +- step: requestHover + as: closeBraceHover + file: main.jsonnet + at: closeBrace +- step: expectHoverType + request: closeBraceHover + type: '{ z: { a: number, b: number } }' + match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml new file mode 100644 index 00000000..52373198 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml @@ -0,0 +1,22 @@ +# Hover on an opening `{` should still report the enclosing object type. +steps: +- step: create + files: + main.jsonnet: | + ((openBrace:|)){ + z: { + a: 1, + b: 2, + }, + } + +- step: diagnosticsSettled + +- step: requestHover + as: openBraceHover + file: main.jsonnet + at: openBrace +- step: expectHoverType + request: openBraceHover + type: '{ z: { a: number, b: number } }' + match: exact From c31e49515afc402c67cf067d8023f2af05fabd72 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 17 Feb 2026 11:05:51 +0000 Subject: [PATCH 141/210] feat(inlay): add enum-based hint category configuration Replace boolean inlay hint toggles with category enums for locals, object locals, and object members. New modes: - `LocalHintsMode`: `off`, `variables`, `functions`, `all` - `ObjectMemberHintsMode`: `off`, `fields`, `methods`, `all` Config and tests now use camelCase enum fields: - `local` - `objectLocal` - `objectMembers` Added `strum` derives in handlers and updated `Cargo.lock`. Validated with: - `cargo test -p jrsonnet-lsp --test e2e_scenario_tests inlay_hints_config_categories -- --nocapture` - `cargo test -p jrsonnet-lsp --test integration_test test_inlay_hint_config_updates_via_configuration_change -- --nocapture` --- Cargo.lock | 23 ++++ crates/jrsonnet-lsp-handlers/Cargo.toml | 1 + .../jrsonnet-lsp-handlers/src/inlay_hint.rs | 121 +++++++++++------- crates/jrsonnet-lsp-handlers/src/lib.rs | 4 +- crates/jrsonnet-lsp/src/config.rs | 26 ++-- .../tests/integration_test/features.rs | 9 +- .../runner/inlay_hints_config_categories.yaml | 9 +- 7 files changed, 125 insertions(+), 68 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0c3a9ce5..84fd69d6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1894,6 +1894,7 @@ dependencies = [ "rstest 0.23.0", "serde", "serde_json", + "strum", "tempfile", "tracing", ] @@ -3904,6 +3905,28 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn", +] + [[package]] name = "subtle" version = "2.6.1" diff --git a/crates/jrsonnet-lsp-handlers/Cargo.toml b/crates/jrsonnet-lsp-handlers/Cargo.toml index e9b6f01d..a8095a9d 100644 --- a/crates/jrsonnet-lsp-handlers/Cargo.toml +++ b/crates/jrsonnet-lsp-handlers/Cargo.toml @@ -20,6 +20,7 @@ rayon = "1.11.0" serde = { workspace = true, features = ["derive"] } rowan.workspace = true serde_json.workspace = true +strum = { version = "0.26", features = ["derive"] } tracing = "0.1.44" [lints] diff --git a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs index 58d3a9be..1cd7daba 100644 --- a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs +++ b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs @@ -13,41 +13,74 @@ use jrsonnet_rowan_parser::{ }; use lsp_types::{InlayHint, InlayHintKind, InlayHintLabel, Position, Range}; use serde::{Deserialize, Serialize}; +use strum::{Display, EnumString}; + +/// Category selection for local-binding inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum LocalHintsMode { + Off, + Variables, + Functions, + #[default] + All, +} + +impl LocalHintsMode { + const fn variable_hints_enabled(self) -> bool { + matches!(self, Self::Variables | Self::All) + } + + const fn function_hints_enabled(self) -> bool { + matches!(self, Self::Functions | Self::All) + } +} + +/// Category selection for object member inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum ObjectMemberHintsMode { + #[default] + Off, + Fields, + Methods, + All, +} + +impl ObjectMemberHintsMode { + const fn field_hints_enabled(self) -> bool { + matches!(self, Self::Fields | Self::All) + } + + const fn method_hints_enabled(self) -> bool { + matches!(self, Self::Methods | Self::All) + } +} /// Configuration for inlay hint generation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] -#[serde(default)] +#[serde(default, rename_all = "camelCase")] pub struct InlayHintsConfig { - /// Show type hints for `local x = ...` statements. - #[serde(alias = "localVariableTypes")] - pub local_variable_types: bool, - /// Show return-type hints for `local f(...) = ...` statements. - #[serde(alias = "localFunctionReturnTypes")] - pub local_function_return_types: bool, - /// Show type hints for object-local `local x = ...` bindings. - #[serde(alias = "objectLocalVariableTypes")] - pub object_local_variable_types: bool, - /// Show return-type hints for object-local `local f(...) = ...` bindings. - #[serde(alias = "objectLocalFunctionReturnTypes")] - pub object_local_function_return_types: bool, - /// Show type hints for object fields such as `{ field: expr }`. - #[serde(alias = "objectFieldTypes")] - pub object_field_types: bool, - /// Show return-type hints for object methods such as `{ f(x): expr }`. - #[serde(alias = "objectMethodReturnTypes")] - pub object_method_return_types: bool, + /// Category filter for top-level `local` bindings. + pub local: LocalHintsMode, + /// Category filter for object-local `local` bindings. + pub object_local: LocalHintsMode, + /// Category filter for object fields and methods. + pub object_members: ObjectMemberHintsMode, } impl Default for InlayHintsConfig { fn default() -> Self { Self { - local_variable_types: true, - local_function_return_types: true, - object_local_variable_types: true, - object_local_function_return_types: true, - object_field_types: false, - object_method_return_types: false, + local: LocalHintsMode::All, + object_local: LocalHintsMode::All, + object_members: ObjectMemberHintsMode::Off, } } } @@ -103,7 +136,7 @@ pub fn inlay_hints_with_config( for stmt_local in ast.syntax().descendants().filter_map(StmtLocal::cast) { for bind in stmt_local.binds() { match bind { - Bind::BindDestruct(bind_destruct) if config.local_variable_types => { + Bind::BindDestruct(bind_destruct) if config.local.variable_hints_enabled() => { push_binding_type_hint( &mut hints, &bind_destruct, @@ -113,7 +146,7 @@ pub fn inlay_hints_with_config( text, ); } - Bind::BindFunction(bind_function) if config.local_function_return_types => { + Bind::BindFunction(bind_function) if config.local.function_hints_enabled() => { push_function_return_hint( &mut hints, &bind_function, @@ -141,7 +174,9 @@ pub fn inlay_hints_with_config( continue; }; match bind { - Bind::BindDestruct(bind_destruct) if config.object_local_variable_types => { + Bind::BindDestruct(bind_destruct) + if config.object_local.variable_hints_enabled() => + { push_binding_type_hint( &mut hints, &bind_destruct, @@ -152,7 +187,7 @@ pub fn inlay_hints_with_config( ); } Bind::BindFunction(bind_function) - if config.object_local_function_return_types => + if config.object_local.function_hints_enabled() => { push_function_return_hint( &mut hints, @@ -166,7 +201,7 @@ pub fn inlay_hints_with_config( _ => {} } } - Member::MemberFieldNormal(field) if config.object_field_types => { + Member::MemberFieldNormal(field) if config.object_members.field_hints_enabled() => { push_field_type_hint( &mut hints, &field, @@ -176,7 +211,9 @@ pub fn inlay_hints_with_config( text, ); } - Member::MemberFieldMethod(method) if config.object_method_return_types => { + Member::MemberFieldMethod(method) + if config.object_members.method_hints_enabled() => + { push_method_return_hint( &mut hints, &method, @@ -431,12 +468,9 @@ mod tests { let doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); let analysis = test_analysis(&doc); let config = InlayHintsConfig { - local_variable_types: false, - local_function_return_types: false, - object_local_variable_types: false, - object_local_function_return_types: false, - object_field_types: true, - object_method_return_types: false, + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Fields, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -463,12 +497,9 @@ mod tests { let doc = Document::new("{ one(): 1 }".to_string(), DocVersion::new(1)); let analysis = test_analysis(&doc); let config = InlayHintsConfig { - local_variable_types: false, - local_function_return_types: false, - object_local_variable_types: false, - object_local_function_return_types: false, - object_field_types: false, - object_method_return_types: true, + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Methods, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -495,7 +526,7 @@ mod tests { let doc = Document::new("{ local x = 1, z: x }".to_string(), DocVersion::new(1)); let analysis = test_analysis(&doc); let config = InlayHintsConfig { - object_local_variable_types: false, + object_local: LocalHintsMode::Functions, ..InlayHintsConfig::default() }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index f94d4acb..5173ada4 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -30,7 +30,9 @@ pub use formatting::{ FormattingContext, }; pub use hover::{hover, hover_with_import_field_type}; -pub use inlay_hint::{inlay_hints, inlay_hints_with_config, InlayHintsConfig}; +pub use inlay_hint::{ + inlay_hints, inlay_hints_with_config, InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, +}; pub use references::{ find_cross_file_references, find_cross_file_references_with_semantic, find_references, find_references_with_semantic, diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index 8a2e3437..2ad1e398 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -9,8 +9,8 @@ use std::{collections::HashMap, path::PathBuf}; // Re-export config types from handlers crate pub use jrsonnet_lsp_handlers::{ - CodeActionConfig, FormatterEngine, FormattingConfig, InlayHintsConfig, - RemoveUnusedCommentsMode, RemoveUnusedMode, + CodeActionConfig, FormatterEngine, FormattingConfig, InlayHintsConfig, LocalHintsMode, + ObjectMemberHintsMode, RemoveUnusedCommentsMode, RemoveUnusedMode, }; use serde::{Deserialize, Serialize}; @@ -444,14 +444,17 @@ mod tests { fn test_inlay_hints_config_from_initialization_options() { let json = serde_json::json!({ "inlayHints": { - "localVariableTypes": false, - "objectFieldTypes": true + "local": "off", + "objectMembers": "fields" } }); let config = ServerConfig::from_initialization_options(Some(json)); - assert!(!config.inlay_hints.local_variable_types); - assert!(config.inlay_hints.object_field_types); + assert_eq!(config.inlay_hints.local, LocalHintsMode::Off); + assert_eq!( + config.inlay_hints.object_members, + ObjectMemberHintsMode::Fields + ); } #[test] @@ -460,14 +463,17 @@ mod tests { let settings = serde_json::json!({ "inlayHints": { - "localFunctionReturnTypes": false, - "objectMethodReturnTypes": true + "local": "functions", + "objectMembers": "methods" } }); assert!(config.update_from_settings(settings)); - assert!(!config.inlay_hints.local_function_return_types); - assert!(config.inlay_hints.object_method_return_types); + assert_eq!(config.inlay_hints.local, LocalHintsMode::Functions); + assert_eq!( + config.inlay_hints.object_members, + ObjectMemberHintsMode::Methods + ); } #[test] diff --git a/crates/jrsonnet-lsp/tests/integration_test/features.rs b/crates/jrsonnet-lsp/tests/integration_test/features.rs index 9e747e13..c795e24f 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/features.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/features.rs @@ -159,12 +159,9 @@ fn test_inlay_hint_config_updates_via_configuration_change() { did_change_configuration_notification(serde_json::json!({ "jsonnet": { "inlayHints": { - "localVariableTypes": false, - "localFunctionReturnTypes": false, - "objectLocalVariableTypes": false, - "objectLocalFunctionReturnTypes": false, - "objectFieldTypes": true, - "objectMethodReturnTypes": false + "local": "off", + "objectLocal": "off", + "objectMembers": "fields" } } })), diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml index db111a30..f070c7a7 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_config_categories.yaml @@ -33,12 +33,9 @@ steps: settings: jsonnet: inlayHints: - localVariableTypes: false - localFunctionReturnTypes: false - objectLocalVariableTypes: false - objectLocalFunctionReturnTypes: false - objectFieldTypes: true - objectMethodReturnTypes: true + local: "off" + objectLocal: "off" + objectMembers: "all" - step: requestInlayHints as: fieldMethodHints From 26727b704f83f5b5062ae47d2c71bc38cbc96e65 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 17 Feb 2026 11:37:27 +0000 Subject: [PATCH 142/210] fix(hover): map bind syntax hover to bound value type When hovering tokens inside a bind declaration, use the bound expression type instead of falling back to enclosing object type. This fixes incorrect results like showing outer object fields on `=`. Updated scenario coverage: - `hover_object_local_binding_precision` now asserts `=` hover type - `hover_edge_positions` expects concrete `=` type instead of `null` --- .../src/hover/handler.rs | 48 +++++++++++++++++-- .../runner/hover_edge_positions.yaml | 6 +-- .../hover_object_local_binding_precision.yaml | 11 ++++- 3 files changed, 58 insertions(+), 7 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs index 090af2f9..0248b11d 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs @@ -1,6 +1,10 @@ use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document, LspPosition}; use jrsonnet_lsp_inference::TypeAnalysis; -use jrsonnet_rowan_parser::{nodes::Trivia, AstNode, AstToken}; +use jrsonnet_lsp_types::Ty; +use jrsonnet_rowan_parser::{ + nodes::{Bind, Trivia}, + AstNode, AstToken, +}; use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; use super::{local::check_local_hover, stdlib::check_stdlib_hover, ImportFieldTypeResolver}; @@ -50,7 +54,7 @@ pub fn hover_with_import_field_type( return Some(hover); } - let type_markdown = inferred_type_markdown(document, analysis, offset); + let type_markdown = inferred_type_markdown(document, analysis, offset, &token); let token_markdown = token .kind() .is_hover_eligible() @@ -73,12 +77,38 @@ fn inferred_type_markdown( document: &Document, analysis: &TypeAnalysis, offset: ByteOffset, + token: &jrsonnet_rowan_parser::SyntaxToken, ) -> Option { let ast = document.ast(); - let ty = analysis.type_at_position(ast.syntax(), offset.into())?; + let ty = bind_context_type(analysis, token) + .or_else(|| analysis.type_at_position(ast.syntax(), offset.into()))?; Some(format!("`{}`", analysis.display_for_hover(ty))) } +/// For bind syntax hover (e.g. hovering `=` in `local x = expr`), prefer the +/// bound value type instead of falling back to enclosing expression type. +fn bind_context_type( + analysis: &TypeAnalysis, + token: &jrsonnet_rowan_parser::SyntaxToken, +) -> Option { + let node = token.parent()?; + let bind = node.ancestors().find_map(Bind::cast)?; + + match bind { + Bind::BindDestruct(bind_destruct) => { + let value = bind_destruct.value()?; + let value_range = value.syntax().text_range(); + let token_range = token.text_range(); + if value_range.start() <= token_range.start() && token_range.end() <= value_range.end() + { + return None; + } + analysis.type_for_range(value_range) + } + Bind::BindFunction(_) => None, + } +} + fn merge_markdown_sections( type_markdown: Option, token_markdown: Option, @@ -440,6 +470,18 @@ mod tests { assert_matches!(result, None); } + #[test] + fn test_hover_on_object_local_bind_equals_uses_bound_value_type() { + let code = "{ local x = { a: 1 }, z: x }"; + let result = get_hover(code, 0, 11); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, "`{ a: number }`"); + }); + } + #[test] fn test_hover_on_open_brace_returns_enclosing_object_type() { let code = "{ z: { a: 1, b: 2 } }"; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml index d514eb8b..496f8cb7 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml @@ -23,9 +23,10 @@ steps: as: hoverEquals file: main.jsonnet at: m2 -- step: expectHover +- step: expectHoverType request: hoverEquals - result: + type: number + match: exact # Real symbol usage should still resolve to a type. - step: requestHover @@ -36,4 +37,3 @@ steps: request: hoverUsage type: number match: exact - diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml index 9654815e..4f67a4ff 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml @@ -5,7 +5,7 @@ steps: files: main.jsonnet: | { - local ((bindX:|))x = { + local ((bindX:|))x ((bindEq:|))= { a: 1, b: 2, }, @@ -23,6 +23,15 @@ steps: type: '{ a: number, b: number }' match: exact +- step: requestHover + as: bindEqHover + file: main.jsonnet + at: bindEq +- step: expectHoverType + request: bindEqHover + type: '{ a: number, b: number }' + match: exact + - step: requestHover as: useHover file: main.jsonnet From d6d2a12e6eb817ddf756968b7a708e3505fc1ba2 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Tue, 17 Feb 2026 12:18:14 +0000 Subject: [PATCH 143/210] fix(hover): improve function-sugar token type resolution Record function-sugar bind and parameter token ranges during inference so hover lookups return parameter and bind types. Narrow destruct-bind contextual mapping to name and `=` tokens to avoid broad fallback on malformed leading syntax. Add scenario coverage for function-sugar token positions and update malformed hover expectation in mixed request fixture. --- .../src/hover/handler.rs | 62 +++--- .../src/analysis/queries.rs | 176 ++++++++++++++++-- crates/jrsonnet-lsp-inference/src/expr/mod.rs | 31 ++- .../hover_completion_execute_command.yaml | 5 +- .../hover_function_sugar_positions.yaml | 46 +++++ 5 files changed, 266 insertions(+), 54 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml diff --git a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs index 0248b11d..4a2438df 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs @@ -1,10 +1,6 @@ use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document, LspPosition}; use jrsonnet_lsp_inference::TypeAnalysis; -use jrsonnet_lsp_types::Ty; -use jrsonnet_rowan_parser::{ - nodes::{Bind, Trivia}, - AstNode, AstToken, -}; +use jrsonnet_rowan_parser::{nodes::Trivia, AstNode, AstToken}; use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; use super::{local::check_local_hover, stdlib::check_stdlib_hover, ImportFieldTypeResolver}; @@ -54,7 +50,7 @@ pub fn hover_with_import_field_type( return Some(hover); } - let type_markdown = inferred_type_markdown(document, analysis, offset, &token); + let type_markdown = inferred_type_markdown(document, analysis, offset); let token_markdown = token .kind() .is_hover_eligible() @@ -77,38 +73,12 @@ fn inferred_type_markdown( document: &Document, analysis: &TypeAnalysis, offset: ByteOffset, - token: &jrsonnet_rowan_parser::SyntaxToken, ) -> Option { let ast = document.ast(); - let ty = bind_context_type(analysis, token) - .or_else(|| analysis.type_at_position(ast.syntax(), offset.into()))?; + let ty = analysis.type_at_position(ast.syntax(), offset.into())?; Some(format!("`{}`", analysis.display_for_hover(ty))) } -/// For bind syntax hover (e.g. hovering `=` in `local x = expr`), prefer the -/// bound value type instead of falling back to enclosing expression type. -fn bind_context_type( - analysis: &TypeAnalysis, - token: &jrsonnet_rowan_parser::SyntaxToken, -) -> Option { - let node = token.parent()?; - let bind = node.ancestors().find_map(Bind::cast)?; - - match bind { - Bind::BindDestruct(bind_destruct) => { - let value = bind_destruct.value()?; - let value_range = value.syntax().text_range(); - let token_range = token.text_range(); - if value_range.start() <= token_range.start() && token_range.end() <= value_range.end() - { - return None; - } - analysis.type_for_range(value_range) - } - Bind::BindFunction(_) => None, - } -} - fn merge_markdown_sections( type_markdown: Option, token_markdown: Option, @@ -473,7 +443,7 @@ mod tests { #[test] fn test_hover_on_object_local_bind_equals_uses_bound_value_type() { let code = "{ local x = { a: 1 }, z: x }"; - let result = get_hover(code, 0, 11); + let result = get_hover(code, 0, 10); assert_matches!(result, Some(Hover { contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), range: None @@ -507,4 +477,28 @@ mod tests { assert_eq!(value, "`{ z: { a: number, b: number } }`"); }); } + + #[test] + fn test_hover_on_function_sugar_param_returns_parameter_type() { + let code = "local f(x) = x + 1;\nf"; + let result = get_hover(code, 0, 8); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, "`any`"); + }); + } + + #[test] + fn test_hover_on_function_sugar_equals_returns_function_type() { + let code = "local f(x) = x + 1;\nf"; + let result = get_hover(code, 0, 11); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, "`(x: any) -> number`"); + }); + } } diff --git a/crates/jrsonnet-lsp-inference/src/analysis/queries.rs b/crates/jrsonnet-lsp-inference/src/analysis/queries.rs index a981f01d..ef5dfb27 100644 --- a/crates/jrsonnet-lsp-inference/src/analysis/queries.rs +++ b/crates/jrsonnet-lsp-inference/src/analysis/queries.rs @@ -75,6 +75,7 @@ impl TypeAnalysis { /// - Local binding definition names (`local x = value`) fn contextual_type_for_token(&self, token: &SyntaxToken) -> Option<(TextRange, Ty)> { let node = token.parent()?; + let token_range = token.text_range(); if let Some(field_name) = node.ancestors().find_map(FieldName::cast) { return match field_name { @@ -90,18 +91,24 @@ impl TypeAnalysis { let bind = node.ancestors().find_map(Bind::cast)?; match bind { Bind::BindDestruct(bind_destruct) => { + let value = bind_destruct.value()?; + let value_range = value.syntax().text_range(); + if value_range.start() <= token_range.start() + && token_range.end() <= value_range.end() + { + return None; + } let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind_destruct)?; let Destruct::DestructFull(full) = destruct else { return None; }; - let name = full.name()?; - if node.text_range() != name.syntax().text_range() { + let name_range = full.name()?.syntax().text_range(); + let assign_range = bind_destruct.assign_token().map(|token| token.text_range()); + if token_range != name_range && assign_range != Some(token_range) { return None; } - - let value = bind_destruct.value()?; - let ty = self.type_for_range(value.syntax().text_range())?; - Some((name.syntax().text_range(), ty)) + let ty = self.type_for_range(value_range)?; + Some((value_range, ty)) } Bind::BindFunction(_) => None, } @@ -380,10 +387,14 @@ impl TypeAnalysis { #[cfg(test)] mod tests { + use std::sync::Arc; + use jrsonnet_lsp_document::{DocVersion, Document}; + use jrsonnet_lsp_types::{GlobalTy, GlobalTyStore}; use jrsonnet_rowan_parser::AstNode; use super::*; + use crate::ImportResolver; fn analyze_doc(code: &str) -> (TypeAnalysis, Document) { let doc = Document::new(code.to_string(), DocVersion::new(1)); @@ -391,6 +402,25 @@ mod tests { (analysis, doc) } + #[derive(Debug)] + struct NoopImportResolver; + + impl ImportResolver for NoopImportResolver { + fn resolve_import(&self, _import_path: &str) -> Option { + None + } + } + + fn analyze_doc_with_resolver(code: &str) -> (TypeAnalysis, Document) { + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let analysis = TypeAnalysis::analyze_with_resolver( + &doc, + Arc::new(GlobalTyStore::new()), + Arc::new(NoopImportResolver), + ); + (analysis, doc) + } + #[test] fn test_type_at_position_finds_smallest() { let code = "{ a: 1 + 2 }"; @@ -454,17 +484,135 @@ mod tests { .type_at_position(&root, bind_name_offset) .expect("should find type at object-local bind name"); - analysis.with_data(ty, |data| match data { - TyData::Object(obj) => { - assert!(!obj.has_unknown); - assert_eq!(obj.fields.len(), 1); - assert_eq!(obj.fields[0].0, "a"); - assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); - } - other => panic!("expected object type for bind name, got {other:?}"), + analysis.with_data(ty, |data| { + assert!( + matches!(data, TyData::Object(_)), + "expected object type for bind name, got {:?}", + data + ); + let TyData::Object(obj) = data else { + return; + }; + assert!(!obj.has_unknown); + assert_eq!(obj.fields.len(), 1); + assert_eq!(obj.fields[0].0, "a"); + assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); }); } + #[test] + fn test_type_at_position_object_local_bind_equals_uses_value_type() { + let code = r"{ local x = { a: 1 }, y: x }"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let equals_offset = nth_offset(code, "=", 0); + let ty = analysis + .type_at_position(&root, equals_offset) + .expect("should find type at object-local bind equals token"); + + analysis.with_data(ty, |data| { + assert!( + matches!(data, TyData::Object(_)), + "expected object type for bind equals, got {:?}", + data + ); + let TyData::Object(obj) = data else { + return; + }; + assert!(!obj.has_unknown); + assert_eq!(obj.fields.len(), 1); + assert_eq!(obj.fields[0].0, "a"); + assert_eq!(obj.fields[0].1.ty, Ty::NUMBER); + }); + } + + #[test] + fn test_type_at_position_function_sugar_param_is_parameter_type() { + let code = "local f(x) = x + 1; f"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let param_offset = nth_offset(code, "x)", 0); + let ty = analysis + .type_at_position(&root, param_offset) + .expect("should find type at function-sugar parameter token"); + + assert_eq!(ty, Ty::ANY); + } + + #[test] + fn test_type_at_position_function_sugar_equals_is_function_type() { + let code = "local f(x) = x + 1; f"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let equals_offset = nth_offset(code, "=", 0); + let ty = analysis + .type_at_position(&root, equals_offset) + .expect("should find type at function-sugar equals token"); + + analysis.with_data(ty, |data| { + assert!( + matches!(data, TyData::Function(_)), + "expected function type for bind equals, got {:?}", + data + ); + let TyData::Function(func) = data else { + return; + }; + assert_eq!(func.params.len(), 1); + assert_eq!(func.params[0].name, "x"); + assert_eq!(func.params[0].ty, Ty::ANY); + assert_eq!( + func.return_spec, + jrsonnet_lsp_types::ReturnSpec::Fixed(Ty::NUMBER) + ); + }); + } + + #[test] + fn test_type_at_position_function_sugar_default_param_reflects_default_type() { + let code = "local f(x = 1) = x; f"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let param_offset = nth_offset(code, "x =", 0); + let ty = analysis + .type_at_position(&root, param_offset) + .expect("should find type at defaulted function-sugar parameter"); + + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_type_at_position_function_sugar_param_multiline_is_parameter_type() { + let code = "local f(x) = x + 1;\nf"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let param_offset = nth_offset(code, "x) =", 0); + let ty = analysis + .type_at_position(&root, param_offset) + .expect("should find type at multiline function-sugar parameter token"); + + assert_eq!(ty, Ty::ANY); + } + + #[test] + fn test_type_at_position_function_sugar_with_resolver_keeps_parameter_type() { + let code = "local f(x) = x + 1;\nf"; + let (analysis, doc) = analyze_doc_with_resolver(code); + let root = doc.ast().syntax().clone(); + + let param_offset = nth_offset(code, "x) =", 0); + let ty = analysis + .type_at_position(&root, param_offset) + .expect("should find type at function-sugar parameter token with resolver"); + + assert_eq!(ty, Ty::ANY); + } + #[test] fn test_fields_at_position_object() { let code = "{ a: 1, b: 2 }"; diff --git a/crates/jrsonnet-lsp-inference/src/expr/mod.rs b/crates/jrsonnet-lsp-inference/src/expr/mod.rs index d4dd4d30..6e82b6fc 100644 --- a/crates/jrsonnet-lsp-inference/src/expr/mod.rs +++ b/crates/jrsonnet-lsp-inference/src/expr/mod.rs @@ -145,6 +145,11 @@ fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: let ty = bd .value() .map_or(Ty::ANY, |v| infer_expr_ty_impl(&v, env, None, recorder)); + recorder.record(bd.syntax().text_range(), ty); + recorder.record(name_node.syntax().text_range(), ty); + if let Some(assign_token) = bd.assign_token() { + recorder.record(assign_token.text_range(), ty); + } env.define_ty(name, ty); } } @@ -159,9 +164,10 @@ fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: return; }; let name = ident.text().to_string(); - let params = bf - .params() - .map(|p| extract_params_with_default_types_ty(&p, env)) + let params_desc = bf.params(); + let params = params_desc + .as_ref() + .map(|p| extract_params_with_default_types_ty(p, env)) .unwrap_or_default(); // Install a provisional function first so recursive self-calls can resolve. @@ -215,11 +221,28 @@ fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: .collect(); let final_func = FunctionData { - params: final_params, + params: final_params.clone(), return_spec: ReturnSpec::Fixed(return_ty), variadic: false, }; let final_ty = env.store_mut().intern(TyData::Function(final_func)); + recorder.record(bf.syntax().text_range(), final_ty); + recorder.record(name_node.syntax().text_range(), final_ty); + if let Some(assign_token) = bf.assign_token() { + recorder.record(assign_token.text_range(), final_ty); + } + if let Some(params_desc) = params_desc { + recorder.record(params_desc.syntax().text_range(), final_ty); + for (param, param_ty) in params_desc.params().zip(final_params.iter()) { + recorder.record(param.syntax().text_range(), param_ty.ty); + if let Some(destruct) = param.destruct() { + recorder.record(destruct.syntax().text_range(), param_ty.ty); + } + if let Some(assign_token) = param.assign_token() { + recorder.record(assign_token.text_range(), param_ty.ty); + } + } + } env.define_ty(name, final_ty); } } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml index 50ac63e8..9f421102 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml @@ -12,9 +12,10 @@ steps: as: hoverRequest file: main.jsonnet at: m1 -- step: expectHover +- step: expectHoverType request: hoverRequest - result: + type: any + match: exact - step: requestCompletion as: completionRequest diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml new file mode 100644 index 00000000..cce6401a --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml @@ -0,0 +1,46 @@ +# Verify function-sugar hover reports parameter and bind syntax types, not +# unrelated enclosing expression types. +steps: +- step: create + files: + main.jsonnet: | + local f(((param:|))x((rparen:|))) ((eq:|))= x + 1; + ((use:|))f + +- step: diagnosticsSettled + +- step: requestHover + as: paramHover + file: main.jsonnet + at: param +- step: expectHoverType + request: paramHover + type: any + match: exact + +- step: requestHover + as: rparenHover + file: main.jsonnet + at: rparen +- step: expectHoverType + request: rparenHover + type: any + match: exact + +- step: requestHover + as: equalsHover + file: main.jsonnet + at: eq +- step: expectHoverType + request: equalsHover + type: '(x: any) -> number' + match: exact + +- step: requestHover + as: usageHover + file: main.jsonnet + at: use +- step: expectHoverType + request: usageHover + type: '(x: any) -> number' + match: exact From 1ce2db7d11b8005dbd2cb9cdf71b16d76035ae05 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 11:16:42 +0000 Subject: [PATCH 144/210] fix(hover): trim trailing blank lines from multiline local hovers When generating hover previews for local definitions, we were previously including any following blank lines up to the maximum hover line limit. This could result in hover previews that ended with one or more blank lines, which was not ideal. With this change, we find the actual end of the definition based on the syntax tree. --- Cargo.lock | 1 + crates/jrsonnet-lsp-handlers/Cargo.toml | 1 + .../src/hover/handler.rs | 59 +++++++++++--- .../jrsonnet-lsp-handlers/src/hover/local.rs | 78 ++++++++++--------- ...ne_preview_trims_trailing_blank_lines.yaml | 56 +++++++++++++ 5 files changed, 148 insertions(+), 47 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml diff --git a/Cargo.lock b/Cargo.lock index 84fd69d6..5bff0b56 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1881,6 +1881,7 @@ name = "jrsonnet-lsp-handlers" version = "0.5.0-pre97" dependencies = [ "assert_matches", + "indoc", "jrsonnet-lsp-document", "jrsonnet-lsp-import", "jrsonnet-lsp-inference", diff --git a/crates/jrsonnet-lsp-handlers/Cargo.toml b/crates/jrsonnet-lsp-handlers/Cargo.toml index a8095a9d..f7a5cad9 100644 --- a/crates/jrsonnet-lsp-handlers/Cargo.toml +++ b/crates/jrsonnet-lsp-handlers/Cargo.toml @@ -28,6 +28,7 @@ workspace = true [dev-dependencies] assert_matches = "1.5.0" +indoc.workspace = true jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-document" } rstest = "0.23" tempfile.workspace = true diff --git a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs index 4a2438df..c9f2a3ca 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs @@ -43,7 +43,6 @@ pub fn hover_with_import_field_type( document, analysis, position, - text, offset, import_field_type_resolver, ) { @@ -96,6 +95,7 @@ mod tests { use std::sync::Arc; use assert_matches::assert_matches; + use indoc::indoc; use jrsonnet_lsp_document::DocVersion; use jrsonnet_lsp_inference::ImportResolver; use jrsonnet_lsp_types::{ @@ -227,25 +227,25 @@ mod tests { "local foo = 1; foo", 0, 15, - "`number`\n\n```jsonnet\nlocal foo = 1; foo\n```" + "`number`\n\n```jsonnet\nlocal foo = 1;\n```" )] #[case( "local add(a, b) = a + b; add(1, 2)", 0, 25, - "`(a: any, b: any) -> number`\n\n```jsonnet\nlocal add(a, b) = a + b; add(1, 2)\n```" + "`(a: any, b: any) -> number`\n\n```jsonnet\nlocal add(a, b) = a + b;\n```" )] #[case( "local arr = [1, 2, 3]; arr", 0, 6, - "`[number, number, number]`\n\n```jsonnet\nlocal arr = [1, 2, 3]; arr\n```" + "`[number, number, number]`\n\n```jsonnet\nlocal arr = [1, 2, 3];\n```" )] #[case( "local obj = { a: 1 }; obj", 0, 6, - "`{ a: number }`\n\n```jsonnet\nlocal obj = { a: 1 }; obj\n```" + "`{ a: number }`\n\n```jsonnet\nlocal obj = { a: 1 };\n```" )] fn test_local_hover( #[case] code: &str, @@ -262,6 +262,46 @@ mod tests { }); } + #[test] + fn test_local_hover_multiline_preview_trims_trailing_blank_lines() { + let code = "{\n local x = {\n a: 1,\n b: 2,\n },\n\n z: x,\n}\n"; + let result = get_hover(code, 6, 5); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, indoc! {r" + `{ a: number, b: number }` + + ```jsonnet + local x = { + a: 1, + b: 2, + } + ```"}); + }); + } + + #[test] + fn test_local_hover_file_level_preview_trims_trailing_blank_lines() { + let code = "local x = {\n a: 1,\n b: 2,\n};\n\n{\n z: x,\n}\n"; + let result = get_hover(code, 6, 5); + assert_matches!(result, Some(Hover { + contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + range: None + }) => { + assert_eq!(value, indoc! {r" + `{ a: number, b: number }` + + ```jsonnet + local x = { + a: 1, + b: 2, + }; + ```"}); + }); + } + #[rstest] #[case( r#"import "lib/utils.libsonnet""#, @@ -362,17 +402,12 @@ mod tests { } #[rstest] - #[case( - "local x = 42; x", - 0, - 14, - "`number`\n\n```jsonnet\nlocal x = 42; x\n```" - )] + #[case("local x = 42; x", 0, 14, "`number`\n\n```jsonnet\nlocal x = 42;\n```")] #[case( "local obj = { a: 1, b: \"hello\" }; obj", 0, 35, - "`{ a: number, b: string }`\n\n```jsonnet\nlocal obj = { a: 1, b: \"hello\" }; obj\n```" + "`{ a: number, b: string }`\n\n```jsonnet\nlocal obj = { a: 1, b: \"hello\" };\n```" )] #[case("42", 0, 0, "`number`")] #[case("\"hello\"", 0, 1, "`string`")] diff --git a/crates/jrsonnet-lsp-handlers/src/hover/local.rs b/crates/jrsonnet-lsp-handlers/src/hover/local.rs index 2fe38e1e..9c361084 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/local.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/local.rs @@ -2,7 +2,7 @@ use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, ByteOffset, Document, use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_lsp_types::{Ty, TyData}; use jrsonnet_rowan_parser::{ - nodes::{Bind, Destruct}, + nodes::{Bind, Destruct, MemberBindStmt, StmtLocal}, AstNode, SyntaxKind, }; use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; @@ -15,7 +15,6 @@ pub(super) fn check_local_hover( document: &Document, analysis: &TypeAnalysis, position: LspPosition, - text: &str, offset: ByteOffset, import_field_type_resolver: Option<&ImportFieldTypeResolver<'_>>, ) -> Option { @@ -63,39 +62,7 @@ pub(super) fn check_local_hover( match result { DefinitionResult::Local(range) => { - let start_line = range.start.line as usize; - let lines: Vec<&str> = text.lines().collect(); - let num_lines = lines.len(); - - if start_line >= num_lines { - return None; - } - - let mut def_end_line = start_line; - for (i, line) in lines - .iter() - .enumerate() - .skip(start_line) - .take(MAX_HOVER_LINES) - { - def_end_line = i; - if line.contains(';') { - break; - } - } - - let preview_slice = lines.get(start_line..=def_end_line)?; - let preview_lines: Vec<&str> = preview_slice - .iter() - .take(MAX_HOVER_LINES) - .copied() - .collect(); - - let mut preview = preview_lines.join("\n"); - if def_end_line - start_line >= MAX_HOVER_LINES { - preview.push_str("\n..."); - } - + let preview = definition_preview(document, range)?; Some(Hover { contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, @@ -124,6 +91,47 @@ pub(super) fn check_local_hover( } } +fn definition_preview(document: &Document, range: lsp_types::Range) -> Option { + let text = document.text(); + let line_index = document.line_index(); + let ast = document.ast(); + + let def_pos: LspPosition = (range.start.line, range.start.character).into(); + let def_offset = line_index.offset(def_pos, text)?; + let token = token_at_offset(ast.syntax(), def_offset)?; + let bind = token.parent()?.ancestors().find_map(Bind::cast)?; + + let preview_node = bind + .syntax() + .ancestors() + .find_map(MemberBindStmt::cast) + .map(|node| node.syntax().clone()) + .or_else(|| { + bind.syntax() + .ancestors() + .find_map(StmtLocal::cast) + .map(|node| node.syntax().clone()) + }) + .unwrap_or_else(|| bind.syntax().clone()); + + let preview_text = preview_node.text().to_string(); + let mut preview_lines: Vec<&str> = preview_text.lines().collect(); + if preview_lines.is_empty() { + return None; + } + + let truncated = preview_lines.len() > MAX_HOVER_LINES; + if truncated { + preview_lines.truncate(MAX_HOVER_LINES); + } + + let mut preview = preview_lines.join("\n"); + if truncated { + preview.push_str("\n..."); + } + Some(preview) +} + fn local_definition_at_offset(document: &Document, offset: ByteOffset) -> Option { let ast = document.ast(); let token = token_at_offset(ast.syntax(), offset)?; diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml new file mode 100644 index 00000000..60f1193d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml @@ -0,0 +1,56 @@ +steps: +- step: create + files: + main.jsonnet: | + local top = { + a: 1, + b: 2, + }; + + { + topUse: ((topUse:|))top, + local objLocal = { + c: 3, + d: 4, + }, + + objUse: ((objUse:|))objLocal, + } + +- step: requestHover + as: topHover + file: main.jsonnet + at: topUse +- step: expectHover + request: topHover + result: + contents: + kind: markdown + value: |- + `{ a: number, b: number }` + + ```jsonnet + local top = { + a: 1, + b: 2, + }; + ``` + +- step: requestHover + as: objHover + file: main.jsonnet + at: objUse +- step: expectHover + request: objHover + result: + contents: + kind: markdown + value: |- + `{ c: number, d: number }` + + ```jsonnet + local objLocal = { + c: 3, + d: 4, + } + ``` From 23bd0878769e07d4e63d0c8beed3c386c3ebc390 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 14:22:59 +0000 Subject: [PATCH 145/210] refactor(hover): centralize hover policy and section model Introduce explicit hover target classification and a target policy matrix to decide which sections are shown for each hover kind. Move hover output assembly to semantic blocks and a single renderer that emits array hover contents with language-tagged code blocks. Unify preview selection and rendering under LSP-owned policy and wire handler and local flows through shared hover facts. Update scenario expectations to assert ordered hover section arrays, drop expectHoverType, and route expectTypes through expectHoverPrimaryType. --- .../src/hover/handler.rs | 630 ++++++++++++++---- .../jrsonnet-lsp-handlers/src/hover/local.rs | 235 +++++-- crates/jrsonnet-lsp-handlers/src/hover/mod.rs | 3 + .../jrsonnet-lsp-handlers/src/hover/model.rs | 293 ++++++++ .../src/hover/preview_policy.rs | 157 +++++ .../jrsonnet-lsp-handlers/src/hover/stdlib.rs | 15 +- .../jrsonnet-lsp-handlers/src/hover/target.rs | 139 ++++ .../src/scenario/doctest_assertions.rs | 9 +- .../jrsonnet-lsp-scenario/src/scenario/mod.rs | 21 +- .../src/scenario/request_steps.rs | 174 +++-- .../src/scenario_runner/expectation_steps.rs | 104 +-- .../src/scenario_runner/helpers.rs | 37 +- .../src/scenario_runner/runner.rs | 26 +- .../src/scenario_script/compile.rs | 39 +- .../src/scenario_script/inputs.rs | 9 +- ...er_map_predicate_refines_output_array.yaml | 1 - ...r_with_predicate_refines_output_array.yaml | 2 - .../negated_guard_refines_branches.yaml | 1 - .../union_guard_refines_both_branches.yaml | 1 - .../length_eq_refines_array_to_tuple.yaml | 1 - ..._function_refines_impossible_branches.yaml | 1 - ...known_function_allows_typed_call_site.yaml | 1 - ...field_type_guards_refine_nested_paths.yaml | 1 - .../tests/scenarios/hover/array_variable.yaml | 9 +- .../scenarios/hover/boolean_literal.yaml | 17 +- .../scenarios/hover/function_variable.yaml | 10 +- .../hover_new_import_invalid_syntax.yaml | 11 +- ...ne_preview_trims_trailing_blank_lines.yaml | 18 +- .../tests/scenarios/hover/null_literal.yaml | 17 +- .../tests/scenarios/hover/number_literal.yaml | 8 +- .../scenarios/hover/object_variable.yaml | 9 +- .../tests/scenarios/hover/std_object.yaml | 8 +- .../tests/scenarios/hover/string_literal.yaml | 8 +- .../hover_bracket_lookup_field_type.yaml | 10 +- .../runner/hover_cjk_import_shape.yaml | 11 +- .../hover_close_brace_enclosing_type.yaml | 15 +- .../hover_completion_execute_command.yaml | 10 +- .../runner/hover_edge_positions.yaml | 20 +- .../hover_function_sugar_positions.yaml | 40 +- ...hover_import_binding_member_signature.yaml | 22 +- .../hover_import_field_method_type.yaml | 11 +- .../hover_non_identifier_field_names.yaml | 10 +- .../hover_object_field_key_is_string.yaml | 10 +- .../hover_object_local_binding_precision.yaml | 39 +- ..._object_local_binding_usage_precision.yaml | 13 +- .../hover_open_brace_enclosing_type.yaml | 15 +- .../runner/hover_union_nested_field_type.yaml | 10 +- .../runner/importstr_non_json_dependency.yaml | 22 +- 48 files changed, 1699 insertions(+), 574 deletions(-) create mode 100644 crates/jrsonnet-lsp-handlers/src/hover/model.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/hover/preview_policy.rs create mode 100644 crates/jrsonnet-lsp-handlers/src/hover/target.rs diff --git a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs index c9f2a3ca..0a59ffc4 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs @@ -1,9 +1,23 @@ use jrsonnet_lsp_document::{token_at_offset, ByteOffset, Document, LspPosition}; use jrsonnet_lsp_inference::TypeAnalysis; -use jrsonnet_rowan_parser::{nodes::Trivia, AstNode, AstToken}; -use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; - -use super::{local::check_local_hover, stdlib::check_stdlib_hover, ImportFieldTypeResolver}; +use jrsonnet_rowan_parser::{nodes::Trivia, AstNode, AstToken, SyntaxToken}; +use lsp_types::Hover; + +use super::{ + local::{check_local_hover, LocalHoverSections}, + model::{compose_hover_contents_with_policy, HoverDocBlock, HoverFacts, HoverTokenPurpose}, + preview_policy::select_preview_code, + stdlib::stdlib_hover_doc, + target::HoverTarget, + ImportFieldTypeResolver, +}; + +#[derive(Debug, Clone, PartialEq, Eq, Default)] +struct HoverDocsFacts { + has_stdlib_docs: bool, + has_token_docs: bool, + blocks: Vec, +} /// Get hover information for the given position. /// @@ -35,37 +49,46 @@ pub fn hover_with_import_field_type( return None; } - if let Some(hover) = check_stdlib_hover(&token) { - return Some(hover); - } - - if let Some(hover) = check_local_hover( + let local_sections = check_local_hover( document, analysis, position, offset, import_field_type_resolver, - ) { - return Some(hover); - } - - let type_markdown = inferred_type_markdown(document, analysis, offset); - let token_markdown = token - .kind() - .is_hover_eligible() - .then(|| token.kind().token_doc_markdown()) - .flatten(); - if let Some(value) = merge_markdown_sections(type_markdown, token_markdown) { - return Some(Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value, - }), - range: None, - }); - } - - None + ); + let local_kind = local_sections.as_ref().map(|sections| sections.kind); + let (local_type_markdown, context_markdown, definition_preview) = local_sections.map_or( + (None, None, None), + |LocalHoverSections { + kind: _, + type_markdown, + context_markdown, + preview_code, + }| (type_markdown, context_markdown, preview_code), + ); + let docs_facts = docs_facts_for_token(&token); + let type_markdown = + local_type_markdown.or_else(|| inferred_type_markdown(document, analysis, offset)); + let preview_code = select_preview_code(&token, definition_preview); + let target = HoverTarget::classify( + local_kind, + docs_facts.has_stdlib_docs, + docs_facts.has_token_docs, + ); + let contents = compose_hover_contents_with_policy( + target.policy(), + HoverFacts { + type_markdown, + context_markdown, + docs: docs_facts.blocks, + preview_code, + }, + )?; + + Some(Hover { + contents, + range: None, + }) } fn inferred_type_markdown( @@ -78,16 +101,38 @@ fn inferred_type_markdown( Some(format!("`{}`", analysis.display_for_hover(ty))) } -fn merge_markdown_sections( - type_markdown: Option, - token_markdown: Option, -) -> Option { - match (type_markdown, token_markdown) { - (Some(ty), Some(doc)) => Some(format!("{ty}\n\n---\n\n{doc}")), - (Some(ty), None) => Some(ty), - (None, Some(doc)) => Some(doc), - (None, None) => None, +fn docs_facts_for_token(token: &SyntaxToken) -> HoverDocsFacts { + let mut docs_facts = HoverDocsFacts::default(); + + if let Some(doc) = stdlib_hover_doc(token) { + docs_facts.has_stdlib_docs = true; + docs_facts.blocks.push(HoverDocBlock::Stdlib { + name: doc.name.to_string(), + signature: doc.signature.to_string(), + description: doc.description.to_string(), + example: doc.example.map(ToString::to_string), + }); } + + let token_purposes = if token.kind().is_hover_eligible() { + token.kind().token_doc_purposes() + } else { + &[] + }; + if !token_purposes.is_empty() { + docs_facts.has_token_docs = true; + docs_facts.blocks.push(HoverDocBlock::TokenPurposes( + token_purposes + .iter() + .map(|purpose| HoverTokenPurpose { + doc: purpose.doc.to_string(), + example: purpose.example.to_string(), + }) + .collect(), + )); + } + + docs_facts } #[cfg(test)] @@ -102,6 +147,7 @@ mod tests { FieldDefInterned, FieldVis, FunctionData, GlobalTy, GlobalTyStore, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, }; + use lsp_types::{HoverContents, MarkedString}; use rstest::rstest; use super::*; @@ -114,6 +160,37 @@ mod tests { hover(&doc, pos, &analysis) } + fn marked_string_as_markdown(marked: &MarkedString) -> String { + match marked { + MarkedString::String(value) => value.clone(), + MarkedString::LanguageString(language) => { + format!("```{}\n{}\n```", language.language, language.value) + } + } + } + + fn hover_contents_as_sections(contents: &HoverContents) -> Option> { + match contents { + HoverContents::Array(items) => Some( + items + .iter() + .map(marked_string_as_markdown) + .collect::>(), + ), + HoverContents::Markup(_) | HoverContents::Scalar(_) => None, + } + } + + fn assert_hover_contents(contents: &HoverContents, expected_sections: &[&str]) { + let actual_sections = + hover_contents_as_sections(contents).expect("hover contents should be array"); + let expected_sections = expected_sections + .iter() + .map(|section| (*section).to_string()) + .collect::>(); + assert_eq!(actual_sections, expected_sections); + } + #[derive(Debug)] struct StaticImportResolver { path: &'static str, @@ -166,32 +243,80 @@ mod tests { #[rstest] #[case( - "std.map(function(x) x, [])", - 0, 4, - "```jsonnet\nstd.map(func, arr)\n```\n\nApplies `func` to each element of `arr`.\n\n**Example:**\n```jsonnet\nstd.map(function(x) x * 2, [1,2,3]) // [2, 4, 6]\n```" - )] + "std.map(function(x) x, [])", + 0, 4, + vec![ + "`function(func: function(), arr: array)`", + indoc! {r" + ```jsonnet + std.map(func, arr) + ```"}, + "Applies `func` to each element of `arr`.", + "**Example:**", + indoc! {r" + ```jsonnet + std.map(function(x) x * 2, [1,2,3]) // [2, 4, 6] + ```"}, + indoc! {r" + ```jsonnet + std.map + ```"}, + ] + )] #[case( - "local s = std; s.map(function(x) x, [])", + "local s = std; s.map(function(x) x, [])", 0, 17, - "```jsonnet\nstd.map(func, arr)\n```\n\nApplies `func` to each element of `arr`.\n\n**Example:**\n```jsonnet\nstd.map(function(x) x * 2, [1,2,3]) // [2, 4, 6]\n```" - )] + vec![ + "`function(func: function(), arr: array)`", + indoc! {r" + ```jsonnet + std.map(func, arr) + ```"}, + "Applies `func` to each element of `arr`.", + "**Example:**", + indoc! {r" + ```jsonnet + std.map(function(x) x * 2, [1,2,3]) // [2, 4, 6] + ```"}, + indoc! {r" + ```jsonnet + s.map + ```"}, + ] + )] #[case( - "std.filter(function(x) x > 0, [1, -1, 2])", + "std.filter(function(x) x > 0, [1, -1, 2])", 0, 4, - "```jsonnet\nstd.filter(func, arr)\n```\n\nReturns elements of `arr` where `func(x)` is true.\n\n**Example:**\n```jsonnet\nstd.filter(function(x) x > 1, [1,2,3]) // [2, 3]\n```" - )] + vec![ + "`function(func: function(), arr: array)`", + indoc! {r" + ```jsonnet + std.filter(func, arr) + ```"}, + "Returns elements of `arr` where `func(x)` is true.", + "**Example:**", + indoc! {r" + ```jsonnet + std.filter(function(x) x > 1, [1,2,3]) // [2, 3] + ```"}, + indoc! {r" + ```jsonnet + std.filter + ```"}, + ] + )] fn test_stdlib_hover( #[case] code: &str, #[case] line: u32, #[case] char: u32, - #[case] expected: &str, + #[case] expected: Vec<&str>, ) { let result = get_hover(code, line, char); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, expected); + assert_hover_contents(&contents, &expected); }); } @@ -210,15 +335,26 @@ mod tests { .offset(pos, doc.text()) .expect("offset should exist"); let token = token_at_offset(doc.ast().syntax(), offset).expect("token should exist"); - assert_eq!(check_stdlib_hover(&token), None); + assert_eq!(stdlib_hover_doc(&token), None); let hover_result = hover(&doc, pos, &analysis); assert_matches!( hover_result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None - }) if value == "`function(x: any)`" + }) => { + assert_hover_contents( + &contents, + &[ + "`function(x: any)`", + indoc! {r" + ```jsonnet + std.map + ```"}, + ], + ); + } ); } @@ -227,38 +363,62 @@ mod tests { "local foo = 1; foo", 0, 15, - "`number`\n\n```jsonnet\nlocal foo = 1;\n```" + vec![ + "`number`", + indoc! {r" + ```jsonnet + local foo = 1; + ```"}, + ] )] #[case( "local add(a, b) = a + b; add(1, 2)", 0, 25, - "`(a: any, b: any) -> number`\n\n```jsonnet\nlocal add(a, b) = a + b;\n```" + vec![ + "`(a: any, b: any) -> number`", + indoc! {r" + ```jsonnet + local add(a, b) = a + b; + ```"}, + ] )] #[case( "local arr = [1, 2, 3]; arr", 0, 6, - "`[number, number, number]`\n\n```jsonnet\nlocal arr = [1, 2, 3];\n```" + vec![ + "`[number, number, number]`", + indoc! {r" + ```jsonnet + local arr = [1, 2, 3]; + ```"}, + ] )] #[case( "local obj = { a: 1 }; obj", 0, 6, - "`{ a: number }`\n\n```jsonnet\nlocal obj = { a: 1 };\n```" + vec![ + "`{ a: number }`", + indoc! {r" + ```jsonnet + local obj = { a: 1 }; + ```"}, + ] )] fn test_local_hover( #[case] code: &str, #[case] line: u32, #[case] char: u32, - #[case] expected: &str, + #[case] expected: Vec<&str>, ) { let result = get_hover(code, line, char); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, expected); + assert_hover_contents(&contents, &expected); }); } @@ -267,18 +427,22 @@ mod tests { let code = "{\n local x = {\n a: 1,\n b: 2,\n },\n\n z: x,\n}\n"; let result = get_hover(code, 6, 5); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, indoc! {r" - `{ a: number, b: number }` - - ```jsonnet - local x = { - a: 1, - b: 2, - } - ```"}); + assert_hover_contents( + &contents, + &[ + "`{ a: number, b: number }`", + indoc! {r" + ```jsonnet + local x = { + a: 1, + b: 2, + } + ```"}, + ], + ); }); } @@ -287,64 +451,107 @@ mod tests { let code = "local x = {\n a: 1,\n b: 2,\n};\n\n{\n z: x,\n}\n"; let result = get_hover(code, 6, 5); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, indoc! {r" - `{ a: number, b: number }` - - ```jsonnet - local x = { - a: 1, - b: 2, - }; - ```"}); + assert_hover_contents( + &contents, + &[ + "`{ a: number, b: number }`", + indoc! {r" + ```jsonnet + local x = { + a: 1, + b: 2, + }; + ```"}, + ], + ); }); } #[rstest] #[case( - r#"import "lib/utils.libsonnet""#, - 0, - 10, - "`any`\n\n`lib/utils.libsonnet`" - )] + r#"import "lib/utils.libsonnet""#, + 0, + 10, + vec![ + "`any`", + "`lib/utils.libsonnet`", + indoc! {r#" + ```jsonnet + import "lib/utils.libsonnet" + ```"#}, + ] + )] #[case( - r#"local lib = import "lib.libsonnet"; lib.foo"#, - 0, - 40, - "`any`\n\n`foo` from `lib.libsonnet`" - )] + r#"local lib = import "lib.libsonnet"; lib.foo"#, + 0, + 40, + vec![ + "`any`", + "`foo` from `lib.libsonnet`", + indoc! {r" + ```jsonnet + lib.foo + ```"}, + ] + )] fn test_import_hover( #[case] code: &str, #[case] line: u32, #[case] char: u32, - #[case] expected: &str, + #[case] expected: Vec<&str>, ) { let result = get_hover(code, line, char); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, expected); + assert_hover_contents(&contents, &expected); }); } #[rstest] - #[case(None, "`function(x)`\n\n`foo` from `lib.libsonnet`", "function(x)")] #[case( - Some("function"), - "`(x: any) -> number`\n\n`foo` from `lib.libsonnet`", - "function(x)" - )] + None, + vec![ + "`function(x)`", + "`foo` from `lib.libsonnet`", + indoc! {r" + ```jsonnet + lib.foo + ```"}, + ], + "function(x)" + )] #[case( - Some("object"), - "`{ resolved: string }`\n\n`foo` from `lib.libsonnet`", - "{ resolved: string }" - )] + Some("function"), + vec![ + "`(x: any) -> number`", + "`foo` from `lib.libsonnet`", + indoc! {r" + ```jsonnet + lib.foo + ```"}, + ], + "function(x)" + )] + #[case( + Some("object"), + vec![ + "`{ resolved: string }`", + "`foo` from `lib.libsonnet`", + indoc! {r" + ```jsonnet + lib.foo + ```"}, + ], + "{ resolved: string }" + )] fn test_import_field_hover_resolution_exact_shape( #[case] inferred_import_kind: Option<&str>, - #[case] expected: &str, + #[case] expected: Vec<&str>, #[case] resolved_type: &str, ) { let code = r#"local lib = import "lib.libsonnet"; lib.foo"#; @@ -394,36 +601,86 @@ mod tests { ); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, expected); + assert_hover_contents(&contents, &expected); }); } #[rstest] - #[case("local x = 42; x", 0, 14, "`number`\n\n```jsonnet\nlocal x = 42;\n```")] + #[case( + "local x = 42; x", + 0, + 14, + vec![ + "`number`", + indoc! {r" + ```jsonnet + local x = 42; + ```"}, + ] + )] #[case( "local obj = { a: 1, b: \"hello\" }; obj", 0, 35, - "`{ a: number, b: string }`\n\n```jsonnet\nlocal obj = { a: 1, b: \"hello\" };\n```" + vec![ + "`{ a: number, b: string }`", + indoc! {r#" + ```jsonnet + local obj = { a: 1, b: "hello" }; + ```"#}, + ] )] - #[case("42", 0, 0, "`number`")] - #[case("\"hello\"", 0, 1, "`string`")] - #[case("[1, 2, 3]", 0, 1, "`number`")] + #[case( + "42", + 0, + 0, + vec![ + "`number`", + indoc! {r" + ```jsonnet + 42 + ```"}, + ] + )] + #[case( + "\"hello\"", + 0, + 1, + vec![ + "`string`", + indoc! {r#" + ```jsonnet + "hello" + ```"#}, + ] + )] + #[case( + "[1, 2, 3]", + 0, + 1, + vec![ + "`number`", + indoc! {r" + ```jsonnet + 1 + ```"}, + ] + )] fn test_type_inference_hover( #[case] code: &str, #[case] line: u32, #[case] char: u32, - #[case] expected: &str, + #[case] expected: Vec<&str>, ) { let result = get_hover(code, line, char); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, expected); + assert_hover_contents(&contents, &expected); }); } @@ -431,12 +688,19 @@ mod tests { fn test_keyword_hover_includes_token_docs() { let result = get_hover("null", 0, 0); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!( - value, - "`null`\n\n---\n\nLiteral `null` value.\n\n```jsonnet\nnull\n```" + assert_hover_contents( + &contents, + &[ + "`null`", + "Literal `null` value.", + indoc! {r" + ```jsonnet + null + ```"}, + ], ); }); } @@ -445,12 +709,19 @@ mod tests { fn test_operator_hover_docs() { let result = get_hover("1 + 2", 0, 2); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!( - value, - "`number`\n\n---\n\n`+` adds numbers, concatenates strings/arrays, or merges objects.\n\n```jsonnet\n1 + 2\n```" + assert_hover_contents( + &contents, + &[ + "`number`", + "`+` adds numbers, concatenates strings/arrays, or merges objects.", + indoc! {r" + ```jsonnet + 1 + 2 + ```"}, + ], ); }); } @@ -459,12 +730,31 @@ mod tests { fn test_multi_purpose_operator_hover_docs() { let result = get_hover("\"hello %s\" % \"world\"", 0, 11); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!( - value, - "`string`\n\n---\n\nThis token has multiple purposes:\n\n**Purpose 1**\n\n`%` computes numeric remainder.\n\n```jsonnet\n5 % 2\n```\n\n**Purpose 2**\n\n`%` formats strings with placeholders.\n\n```jsonnet\n\"hello %s\" % \"world\"\n```" + assert_hover_contents( + &contents, + &[ + "`string`", + "This token has multiple purposes:", + indoc! {r" + **Purpose 1** + + `%` computes numeric remainder."}, + indoc! {r" + ```jsonnet + 5 % 2 + ```"}, + indoc! {r" + **Purpose 2** + + `%` formats strings with placeholders."}, + indoc! {r#" + ```jsonnet + "hello %s" % "world" + ```"#}, + ], ); }); } @@ -480,10 +770,19 @@ mod tests { let code = "{ local x = { a: 1 }, z: x }"; let result = get_hover(code, 0, 10); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, "`{ a: number }`"); + assert_hover_contents( + &contents, + &[ + "`{ a: number }`", + indoc! {r" + ```jsonnet + x = { a: 1 } + ```"}, + ], + ); }); } @@ -492,10 +791,19 @@ mod tests { let code = "{ z: { a: 1, b: 2 } }"; let result = get_hover(code, 0, 0); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, "`{ z: { a: number, b: number } }`"); + assert_hover_contents( + &contents, + &[ + "`{ z: { a: number, b: number } }`", + indoc! {r" + ```jsonnet + { z: { a: 1, b: 2 } } + ```"}, + ], + ); }); } @@ -506,10 +814,19 @@ mod tests { .expect("close brace offset should fit u32"); let result = get_hover(code, 0, close_brace); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, "`{ z: { a: number, b: number } }`"); + assert_hover_contents( + &contents, + &[ + "`{ z: { a: number, b: number } }`", + indoc! {r" + ```jsonnet + { z: { a: 1, b: 2 } } + ```"}, + ], + ); }); } @@ -518,10 +835,40 @@ mod tests { let code = "local f(x) = x + 1;\nf"; let result = get_hover(code, 0, 8); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, + range: None + }) => { + assert_hover_contents( + &contents, + &[ + "`any`", + indoc! {r" + ```jsonnet + local f(x) = x + 1; + ```"}, + ], + ); + }); + } + + #[test] + fn test_hover_on_function_sugar_param_reference_shows_preview() { + let code = "local f(x) = x + 1;\nf"; + let result = get_hover(code, 0, 13); + assert_matches!(result, Some(Hover { + contents, range: None }) => { - assert_eq!(value, "`any`"); + assert_hover_contents( + &contents, + &[ + "`any`", + indoc! {r" + ```jsonnet + local f(x) = x + 1; + ```"}, + ], + ); }); } @@ -530,10 +877,19 @@ mod tests { let code = "local f(x) = x + 1;\nf"; let result = get_hover(code, 0, 11); assert_matches!(result, Some(Hover { - contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value }), + contents, range: None }) => { - assert_eq!(value, "`(x: any) -> number`"); + assert_hover_contents( + &contents, + &[ + "`(x: any) -> number`", + indoc! {r" + ```jsonnet + f(x) = x + 1 + ```"}, + ], + ); }); } } diff --git a/crates/jrsonnet-lsp-handlers/src/hover/local.rs b/crates/jrsonnet-lsp-handlers/src/hover/local.rs index 9c361084..6d5cb35f 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/local.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/local.rs @@ -2,13 +2,30 @@ use jrsonnet_lsp_document::{to_lsp_range, token_at_offset, ByteOffset, Document, use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_lsp_types::{Ty, TyData}; use jrsonnet_rowan_parser::{ - nodes::{Bind, Destruct, MemberBindStmt, StmtLocal}, - AstNode, SyntaxKind, + nodes::{ + Bind, Destruct, ExprFunction, ForSpec, MemberBindStmt, MemberFieldMethod, Param, StmtLocal, + }, + AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; -use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; +use rowan::TextRange; -use super::{ImportFieldTypeResolver, MAX_HOVER_LINES}; -use crate::definition::{goto_definition, DefinitionResult}; +use super::{preview_policy::render_preview, ImportFieldTypeResolver}; +use crate::definition::{goto_declaration, goto_definition, DefinitionResult}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum LocalHoverKind { + Local, + Import, + ImportField, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) struct LocalHoverSections { + pub kind: LocalHoverKind, + pub type_markdown: Option, + pub context_markdown: Option, + pub preview_code: Option, +} /// Check for hover on a local variable reference. pub(super) fn check_local_hover( @@ -17,9 +34,10 @@ pub(super) fn check_local_hover( position: LspPosition, offset: ByteOffset, import_field_type_resolver: Option<&ImportFieldTypeResolver<'_>>, -) -> Option { +) -> Option { let result = goto_definition(document, position) .or_else(|| local_definition_at_offset(document, offset))?; + let declaration_range = goto_declaration(document, position).and_then(local_range_from_result); // Get the inferred type at this position. If the local definition site only // reports `any`, fall back to the bound value expression type. @@ -56,38 +74,44 @@ pub(super) fn check_local_hover( } } - let type_str = inferred_type - .map(|ty| format!("`{ty}`\n\n")) - .unwrap_or_default(); + let type_markdown = inferred_type.map(|ty| format!("`{ty}`")); - match result { - DefinitionResult::Local(range) => { - let preview = definition_preview(document, range)?; - Some(Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: format!("{type_str}```jsonnet\n{preview}\n```"), - }), - range: None, - }) - } - DefinitionResult::Import(path) => Some(Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: format!("{type_str}`{path}`"), - }), - range: None, - }), + let (kind, context_markdown) = match &result { + DefinitionResult::Local(_) => (LocalHoverKind::Local, None), + DefinitionResult::Import(path) => (LocalHoverKind::Import, Some(format!("`{path}`"))), DefinitionResult::ImportField { path, fields } => { let field_chain = fields.join("."); - Some(Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: format!("{type_str}`{field_chain}` from `{path}`"), - }), - range: None, - }) + ( + LocalHoverKind::ImportField, + Some(format!("`{field_chain}` from `{path}`")), + ) } + }; + let preview_code = preview_range_for_result(&result, declaration_range) + .and_then(|range| definition_preview(document, range)); + + Some(LocalHoverSections { + kind, + type_markdown, + context_markdown, + preview_code, + }) +} + +fn local_range_from_result(result: DefinitionResult) -> Option { + match result { + DefinitionResult::Local(range) => Some(range), + DefinitionResult::Import(_) | DefinitionResult::ImportField { .. } => None, + } +} + +fn preview_range_for_result( + result: &DefinitionResult, + declaration_range: Option, +) -> Option { + match result { + DefinitionResult::Local(range) => Some(*range), + DefinitionResult::Import(_) | DefinitionResult::ImportField { .. } => declaration_range, } } @@ -99,37 +123,55 @@ fn definition_preview(document: &Document, range: lsp_types::Range) -> Option Option { + let name_node = token.parent()?; + + name_node .ancestors() .find_map(MemberBindStmt::cast) .map(|node| node.syntax().clone()) .or_else(|| { - bind.syntax() + name_node + .ancestors() + .find_map(MemberFieldMethod::cast) + .map(|node| node.syntax().clone()) + }) + .or_else(|| { + name_node + .ancestors() + .find_map(ForSpec::cast) + .map(|node| node.syntax().clone()) + }) + .or_else(|| { + name_node + .ancestors() + .find_map(ExprFunction::cast) + .map(|node| node.syntax().clone()) + }) + .or_else(|| { + name_node .ancestors() .find_map(StmtLocal::cast) .map(|node| node.syntax().clone()) }) - .unwrap_or_else(|| bind.syntax().clone()); - - let preview_text = preview_node.text().to_string(); - let mut preview_lines: Vec<&str> = preview_text.lines().collect(); - if preview_lines.is_empty() { - return None; - } - - let truncated = preview_lines.len() > MAX_HOVER_LINES; - if truncated { - preview_lines.truncate(MAX_HOVER_LINES); - } - - let mut preview = preview_lines.join("\n"); - if truncated { - preview.push_str("\n..."); - } - Some(preview) + .or_else(|| { + name_node + .ancestors() + .find_map(Bind::cast) + .map(|node| node.syntax().clone()) + }) + .or_else(|| { + name_node + .ancestors() + .find_map(Param::cast) + .map(|node| node.syntax().clone()) + }) } fn local_definition_at_offset(document: &Document, offset: ByteOffset) -> Option { @@ -140,27 +182,67 @@ fn local_definition_at_offset(document: &Document, offset: ByteOffset) -> Option } let name_node = token.parent()?; - let bind = name_node.ancestors().find_map(Bind::cast)?; - let definition_range = match bind { - Bind::BindDestruct(bind) => { - let destruct = jrsonnet_rowan_parser::nodes::BindDestruct::into(&bind)?; - let Destruct::DestructFull(full) = destruct else { - return None; - }; - full.name()?.syntax().text_range() + let name_range = name_node.text_range(); + + if let Some(bind) = name_node.ancestors().find_map(Bind::cast) { + if let Some(definition_range) = bind_definition_range(&bind) { + if name_range == definition_range { + return Some(DefinitionResult::Local(to_lsp_range( + definition_range, + document.line_index(), + document.text(), + ))); + } } - Bind::BindFunction(bind) => bind.name()?.syntax().text_range(), - }; + } - if name_node.text_range() != definition_range { - return None; + if let Some(param) = name_node.ancestors().find_map(Param::cast) { + if let Some(definition_range) = param_definition_range(¶m) { + if name_range == definition_range { + return Some(DefinitionResult::Local(to_lsp_range( + definition_range, + document.line_index(), + document.text(), + ))); + } + } + } + + if let Some(for_spec) = name_node.ancestors().find_map(ForSpec::cast) { + if let Some(definition_range) = for_spec_definition_range(&for_spec) { + if name_range == definition_range { + return Some(DefinitionResult::Local(to_lsp_range( + definition_range, + document.line_index(), + document.text(), + ))); + } + } + } + + None +} + +fn bind_definition_range(bind: &Bind) -> Option { + match bind { + Bind::BindDestruct(bind) => destruct_definition_range(bind.into()?), + Bind::BindFunction(bind) => Some(bind.name()?.syntax().text_range()), } +} + +fn param_definition_range(param: &Param) -> Option { + destruct_definition_range(param.destruct()?) +} - Some(DefinitionResult::Local(to_lsp_range( - definition_range, - document.line_index(), - document.text(), - ))) +fn for_spec_definition_range(for_spec: &ForSpec) -> Option { + destruct_definition_range(for_spec.bind()?) +} + +fn destruct_definition_range(destruct: Destruct) -> Option { + let Destruct::DestructFull(full) = destruct else { + return None; + }; + Some(full.name()?.syntax().text_range()) } fn definition_value_type( @@ -175,8 +257,13 @@ fn definition_value_type( let def_pos: LspPosition = (range.start.line, range.start.character).into(); let def_offset = line_index.offset(def_pos, text)?; let token = token_at_offset(ast.syntax(), def_offset)?; + let name_node = token.parent()?; + + let bind = name_node.ancestors().find_map(Bind::cast)?; + if bind_definition_range(&bind) != Some(name_node.text_range()) { + return None; + } - let bind = token.parent()?.ancestors().find_map(Bind::cast)?; let value = match bind { Bind::BindDestruct(bind) => bind.value()?, Bind::BindFunction(bind) => bind.value()?, diff --git a/crates/jrsonnet-lsp-handlers/src/hover/mod.rs b/crates/jrsonnet-lsp-handlers/src/hover/mod.rs index 054014ff..3834aa00 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/mod.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/mod.rs @@ -6,7 +6,10 @@ mod handler; mod local; +mod model; +mod preview_policy; mod stdlib; +mod target; pub use handler::{hover, hover_with_import_field_type}; diff --git a/crates/jrsonnet-lsp-handlers/src/hover/model.rs b/crates/jrsonnet-lsp-handlers/src/hover/model.rs new file mode 100644 index 00000000..767dc007 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/model.rs @@ -0,0 +1,293 @@ +use std::fmt; + +use lsp_types::{HoverContents, LanguageString, MarkedString}; + +use super::target::{HoverSectionSlot, HoverTargetPolicy}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) struct HoverCodeBlock { + pub(super) language: String, + pub(super) value: String, +} + +impl HoverCodeBlock { + #[must_use] + pub(super) fn jsonnet(value: String) -> Self { + Self { + language: "jsonnet".to_string(), + value, + } + } + + fn into_marked_string(self) -> Option { + let code = self.value.trim(); + (!code.is_empty()).then(|| { + MarkedString::LanguageString(LanguageString { + language: self.language, + value: code.to_string(), + }) + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) enum HoverDocMarkdown { + Raw(String), + StdlibExampleHeading, + TokenMultiPurposeIntro, + TokenPurpose { index: Option, doc: String }, +} + +impl HoverDocMarkdown { + #[must_use] + pub(super) fn to_markdown(&self) -> String { + match self { + Self::Raw(markdown) => markdown.clone(), + Self::StdlibExampleHeading => "**Example:**".to_string(), + Self::TokenMultiPurposeIntro => "This token has multiple purposes:".to_string(), + Self::TokenPurpose { index, doc } => index.map_or_else( + || doc.clone(), + |index| format!("**Purpose {index}**\n\n{doc}"), + ), + } + } +} + +impl fmt::Display for HoverDocMarkdown { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.to_markdown()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) struct HoverTokenPurpose { + pub(super) doc: String, + pub(super) example: String, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) enum HoverDocBlock { + Stdlib { + name: String, + signature: String, + description: String, + example: Option, + }, + TokenPurposes(Vec), +} + +impl HoverDocBlock { + #[must_use] + pub(super) fn into_sections(self) -> Vec { + match self { + Self::Stdlib { + name, + signature, + description, + example, + } => { + let mut sections = vec![ + HoverBlock::DocsCode(HoverCodeBlock::jsonnet(format!( + "std.{name}{signature})" + ))), + HoverBlock::DocsText(HoverDocMarkdown::Raw(description)), + ]; + if let Some(example) = example { + sections.push(HoverBlock::DocsText(HoverDocMarkdown::StdlibExampleHeading)); + sections.push(HoverBlock::DocsCode(HoverCodeBlock::jsonnet(example))); + } + sections + } + Self::TokenPurposes(purposes) => { + let multiple_purposes = purposes.len() > 1; + let mut sections = Vec::new(); + if multiple_purposes { + sections.push(HoverBlock::DocsText( + HoverDocMarkdown::TokenMultiPurposeIntro, + )); + } + for (idx, purpose) in purposes.into_iter().enumerate() { + sections.push(HoverBlock::DocsText(HoverDocMarkdown::TokenPurpose { + index: multiple_purposes.then_some(idx + 1), + doc: purpose.doc, + })); + sections.push(HoverBlock::DocsCode(HoverCodeBlock::jsonnet( + purpose.example, + ))); + } + sections + } + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(super) enum HoverBlock { + Type(String), + Context(String), + DocsText(HoverDocMarkdown), + DocsCode(HoverCodeBlock), + Preview(HoverCodeBlock), +} + +impl HoverBlock { + fn into_marked_string(self) -> Option { + match self { + Self::Type(markdown) | Self::Context(markdown) => { + let markdown = markdown.trim(); + (!markdown.is_empty()).then(|| MarkedString::String(markdown.to_string())) + } + Self::DocsText(markdown) => { + let markdown = markdown.to_markdown(); + let markdown = markdown.trim(); + (!markdown.is_empty()).then(|| MarkedString::String(markdown.to_string())) + } + Self::DocsCode(code) | Self::Preview(code) => code.into_marked_string(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub(super) struct HoverFacts { + pub(super) type_markdown: Option, + pub(super) context_markdown: Option, + pub(super) docs: Vec, + pub(super) preview_code: Option, +} + +pub(super) fn compose_hover_contents_with_policy( + policy: HoverTargetPolicy, + facts: HoverFacts, +) -> Option { + let mut type_markdown = facts.type_markdown; + let mut context_markdown = facts.context_markdown; + let mut docs = facts.docs; + let mut preview_code = facts.preview_code; + + let mut blocks = Vec::new(); + let slots = [ + HoverSectionSlot::Type, + HoverSectionSlot::Context, + HoverSectionSlot::Docs, + HoverSectionSlot::Preview, + ]; + for slot in slots { + if !policy.allows(slot) { + continue; + } + match slot { + HoverSectionSlot::Type => { + if let Some(value) = type_markdown.take() { + blocks.push(HoverBlock::Type(value)); + } + } + HoverSectionSlot::Context => { + if let Some(value) = context_markdown.take() { + blocks.push(HoverBlock::Context(value)); + } + } + HoverSectionSlot::Docs => { + for doc in std::mem::take(&mut docs) { + blocks.extend(doc.into_sections()); + } + } + HoverSectionSlot::Preview => { + if let Some(value) = preview_code.take() { + blocks.push(HoverBlock::Preview(HoverCodeBlock::jsonnet(value))); + } + } + } + } + + render_hover_blocks(blocks) +} + +fn render_hover_blocks(blocks: Vec) -> Option { + let mut unique_sections = Vec::new(); + for section in blocks + .into_iter() + .filter_map(HoverBlock::into_marked_string) + { + if unique_sections.iter().all(|existing| existing != §ion) { + unique_sections.push(section); + } + } + (!unique_sections.is_empty()).then_some(HoverContents::Array(unique_sections)) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::hover::target::HoverTargetKind; + + #[test] + fn test_compose_hover_contents_uses_fixed_slot_order() { + let contents = compose_hover_contents_with_policy( + HoverTargetPolicy::for_kind(HoverTargetKind::StdlibSymbol), + HoverFacts { + type_markdown: Some("`number`".to_string()), + context_markdown: Some("`ignored`".to_string()), + docs: vec![HoverDocBlock::TokenPurposes(vec![HoverTokenPurpose { + doc: "Literal `null` value.".to_string(), + example: "null".to_string(), + }])], + preview_code: Some("x".to_string()), + }, + ) + .expect("expected hover contents"); + + let HoverContents::Array(sections) = contents else { + panic!("expected hover array contents"); + }; + assert_eq!(sections.len(), 4); + assert_eq!(sections[0], MarkedString::String("`number`".to_string())); + assert_eq!( + sections[1], + MarkedString::String("Literal `null` value.".to_string()) + ); + assert_eq!( + sections[2], + MarkedString::LanguageString(LanguageString { + language: "jsonnet".to_string(), + value: "null".to_string(), + }) + ); + assert_eq!( + sections[3], + MarkedString::LanguageString(LanguageString { + language: "jsonnet".to_string(), + value: "x".to_string(), + }) + ); + } + + #[test] + fn test_compose_hover_contents_respects_policy_caps() { + let contents = compose_hover_contents_with_policy( + HoverTargetPolicy::for_kind(HoverTargetKind::LocalRef), + HoverFacts { + type_markdown: Some("`number`".to_string()), + context_markdown: Some("`context`".to_string()), + docs: vec![HoverDocBlock::TokenPurposes(vec![HoverTokenPurpose { + doc: "doc".to_string(), + example: "expr".to_string(), + }])], + preview_code: Some("local x = 1;".to_string()), + }, + ) + .expect("expected hover contents"); + + let HoverContents::Array(sections) = contents else { + panic!("expected hover array contents"); + }; + assert_eq!(sections.len(), 2); + assert_eq!(sections[0], MarkedString::String("`number`".to_string())); + assert_eq!( + sections[1], + MarkedString::LanguageString(LanguageString { + language: "jsonnet".to_string(), + value: "local x = 1;".to_string(), + }) + ); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/hover/preview_policy.rs b/crates/jrsonnet-lsp-handlers/src/hover/preview_policy.rs new file mode 100644 index 00000000..d9d2fb67 --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/preview_policy.rs @@ -0,0 +1,157 @@ +use jrsonnet_rowan_parser::{SyntaxKind, SyntaxToken}; + +use super::MAX_HOVER_LINES; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum HoverPreviewCandidateKind { + DefinitionSite, + MemberBindingOrFunctionSugar, + Expression, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum HoverPreviewNodeClass { + MemberBindingOrFunctionSugar, + Expression, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +struct HoverPreviewSyntaxKind(SyntaxKind); + +impl HoverPreviewSyntaxKind { + fn classify(self) -> Option { + let kind = self.0; + if matches!( + kind, + SyntaxKind::MEMBER_BIND_STMT + | SyntaxKind::MEMBER_FIELD_METHOD + | SyntaxKind::MEMBER_FIELD_NORMAL + | SyntaxKind::MEMBER_ASSERT_STMT + | SyntaxKind::FOR_SPEC + | SyntaxKind::STMT_LOCAL + | SyntaxKind::BIND_DESTRUCT + | SyntaxKind::BIND_FUNCTION + | SyntaxKind::PARAM + | SyntaxKind::EXPR_FUNCTION + ) { + return Some(HoverPreviewNodeClass::MemberBindingOrFunctionSugar); + } + if matches!( + kind, + SyntaxKind::EXPR_IF_THEN_ELSE + | SyntaxKind::EXPR_ARRAY_COMP + | SyntaxKind::EXPR_OBJECT + | SyntaxKind::EXPR_ARRAY + | SyntaxKind::EXPR_CALL + | SyntaxKind::EXPR_FIELD + | SyntaxKind::EXPR_INDEX + | SyntaxKind::EXPR_SLICE + | SyntaxKind::EXPR_BINARY + | SyntaxKind::EXPR_UNARY + | SyntaxKind::EXPR_OBJ_EXTEND + | SyntaxKind::EXPR_IMPORT + | SyntaxKind::EXPR_PARENED + | SyntaxKind::EXPR_LITERAL + | SyntaxKind::EXPR_STRING + | SyntaxKind::EXPR_NUMBER + ) { + return Some(HoverPreviewNodeClass::Expression); + } + None + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct HoverPreviewCandidate { + kind: HoverPreviewCandidateKind, + depth: usize, + preview_code: String, +} + +impl HoverPreviewCandidate { + fn rank_key(&self) -> (u8, usize, u8, usize) { + let (source_rank, depth_rank, kind_tiebreak) = match self.kind { + HoverPreviewCandidateKind::DefinitionSite => (0, 0, 0), + HoverPreviewCandidateKind::MemberBindingOrFunctionSugar => (1, self.depth, 0), + HoverPreviewCandidateKind::Expression => (1, self.depth, 1), + }; + // For equally ranked candidates, prefer richer snippets. + let richness_rank = usize::MAX.saturating_sub(self.preview_code.len()); + (source_rank, depth_rank, kind_tiebreak, richness_rank) + } +} + +pub(super) fn select_preview_code( + token: &SyntaxToken, + definition_preview: Option, +) -> Option { + collect_preview_candidates(token, definition_preview) + .into_iter() + .min_by_key(HoverPreviewCandidate::rank_key) + .map(|candidate| candidate.preview_code) +} + +pub(super) fn render_preview(preview_text: &str) -> Option { + let mut preview_lines: Vec<&str> = preview_text.lines().collect(); + if preview_lines.is_empty() { + return None; + } + + let truncated = preview_lines.len() > MAX_HOVER_LINES; + if truncated { + preview_lines.truncate(MAX_HOVER_LINES); + } + + let mut preview = preview_lines.join("\n"); + if truncated { + preview.push_str("\n..."); + } + Some(preview) +} + +fn collect_preview_candidates( + token: &SyntaxToken, + definition_preview: Option, +) -> Vec { + let mut candidates = Vec::new(); + + if let Some(preview_code) = definition_preview { + let preview_code = preview_code.trim().to_string(); + if !preview_code.is_empty() { + candidates.push(HoverPreviewCandidate { + kind: HoverPreviewCandidateKind::DefinitionSite, + depth: 0, + preview_code, + }); + } + } + + let Some(token_node) = token.parent() else { + return candidates; + }; + for (depth, node) in token_node.ancestors().enumerate() { + let Some(kind) = preview_candidate_kind_for_node(node.kind()) else { + continue; + }; + let node_text = node.text().to_string(); + let Some(preview_code) = render_preview(&node_text) else { + continue; + }; + candidates.push(HoverPreviewCandidate { + kind, + depth, + preview_code, + }); + } + + candidates +} + +fn preview_candidate_kind_for_node(kind: SyntaxKind) -> Option { + match HoverPreviewSyntaxKind(kind).classify()? { + HoverPreviewNodeClass::MemberBindingOrFunctionSugar => { + Some(HoverPreviewCandidateKind::MemberBindingOrFunctionSugar) + } + HoverPreviewNodeClass::Expression => Some(HoverPreviewCandidateKind::Expression), + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs b/crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs index be8a9560..bf11c8a0 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/stdlib.rs @@ -1,10 +1,9 @@ use jrsonnet_lsp_scope::expr_resolves_to_builtin_std; use jrsonnet_lsp_stdlib as stdlib; use jrsonnet_rowan_parser::{nodes::ExprField, AstNode, SyntaxKind, SyntaxToken}; -use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind}; -/// Check if the token is a stdlib function call and return hover info. -pub(super) fn check_stdlib_hover(token: &SyntaxToken) -> Option { +/// Check if the token is a stdlib function call and return structured docs. +pub(super) fn stdlib_hover_doc(token: &SyntaxToken) -> Option<&'static stdlib::StdlibDoc> { if token.kind() != SyntaxKind::IDENT { return None; } @@ -26,13 +25,5 @@ pub(super) fn check_stdlib_hover(token: &SyntaxToken) -> Option { } stdlib::ensure_initialized(); - let doc = stdlib::get_stdlib_doc(name)?; - - Some(Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: doc.to_markdown(), - }), - range: None, - }) + stdlib::get_stdlib_doc(name) } diff --git a/crates/jrsonnet-lsp-handlers/src/hover/target.rs b/crates/jrsonnet-lsp-handlers/src/hover/target.rs new file mode 100644 index 00000000..6e23618e --- /dev/null +++ b/crates/jrsonnet-lsp-handlers/src/hover/target.rs @@ -0,0 +1,139 @@ +use super::local::LocalHoverKind; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum HoverTargetKind { + ImportFieldRef, + ImportRef, + LocalRef, + StdlibSymbol, + TokenDoc, + Expression, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) struct HoverTarget { + pub(super) kind: HoverTargetKind, +} + +impl HoverTarget { + #[must_use] + pub(super) fn classify( + local_kind: Option, + has_stdlib_docs: bool, + has_token_docs: bool, + ) -> Self { + let kind = if matches!(local_kind, Some(LocalHoverKind::ImportField)) { + HoverTargetKind::ImportFieldRef + } else if matches!(local_kind, Some(LocalHoverKind::Import)) { + HoverTargetKind::ImportRef + } else if matches!(local_kind, Some(LocalHoverKind::Local)) { + HoverTargetKind::LocalRef + } else if has_stdlib_docs { + HoverTargetKind::StdlibSymbol + } else if has_token_docs { + HoverTargetKind::TokenDoc + } else { + HoverTargetKind::Expression + }; + Self { kind } + } + + #[must_use] + pub(super) fn policy(self) -> HoverTargetPolicy { + HoverTargetPolicy::for_kind(self.kind) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum HoverSectionSlot { + Type, + Context, + Docs, + Preview, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) struct HoverTargetPolicy { + allowed_slots: &'static [HoverSectionSlot], +} + +impl HoverTargetPolicy { + #[must_use] + pub(super) fn for_kind(kind: HoverTargetKind) -> Self { + const IMPORT_SLOTS: &[HoverSectionSlot] = &[ + HoverSectionSlot::Type, + HoverSectionSlot::Context, + HoverSectionSlot::Preview, + ]; + const LOCAL_OR_EXPR_SLOTS: &[HoverSectionSlot] = + &[HoverSectionSlot::Type, HoverSectionSlot::Preview]; + const DOCS_SLOTS: &[HoverSectionSlot] = &[ + HoverSectionSlot::Type, + HoverSectionSlot::Docs, + HoverSectionSlot::Preview, + ]; + + match kind { + HoverTargetKind::ImportFieldRef | HoverTargetKind::ImportRef => Self { + allowed_slots: IMPORT_SLOTS, + }, + HoverTargetKind::LocalRef | HoverTargetKind::Expression => Self { + allowed_slots: LOCAL_OR_EXPR_SLOTS, + }, + HoverTargetKind::StdlibSymbol | HoverTargetKind::TokenDoc => Self { + allowed_slots: DOCS_SLOTS, + }, + } + } + + #[must_use] + pub(super) fn allows(self, slot: HoverSectionSlot) -> bool { + self.allowed_slots.contains(&slot) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_hover_target_priority_prefers_local_refs_over_docs() { + let target = HoverTarget::classify(Some(LocalHoverKind::ImportField), true, true); + assert_eq!(target.kind, HoverTargetKind::ImportFieldRef); + + let target = HoverTarget::classify(Some(LocalHoverKind::Import), true, true); + assert_eq!(target.kind, HoverTargetKind::ImportRef); + + let target = HoverTarget::classify(Some(LocalHoverKind::Local), true, true); + assert_eq!(target.kind, HoverTargetKind::LocalRef); + } + + #[test] + fn test_hover_target_priority_prefers_stdlib_over_token_docs() { + let target = HoverTarget::classify(None, true, true); + assert_eq!(target.kind, HoverTargetKind::StdlibSymbol); + } + + #[test] + fn test_hover_target_defaults_to_expression() { + let target = HoverTarget::classify(None, false, false); + assert_eq!(target.kind, HoverTargetKind::Expression); + } + + #[test] + fn test_hover_target_policy_matrix() { + let import_field = HoverTargetPolicy::for_kind(HoverTargetKind::ImportFieldRef); + assert!(import_field.allows(HoverSectionSlot::Type)); + assert!(import_field.allows(HoverSectionSlot::Context)); + assert!(import_field.allows(HoverSectionSlot::Preview)); + assert!(!import_field.allows(HoverSectionSlot::Docs)); + + let token_doc = HoverTargetPolicy::for_kind(HoverTargetKind::TokenDoc); + assert!(token_doc.allows(HoverSectionSlot::Docs)); + assert!(!token_doc.allows(HoverSectionSlot::Context)); + + let expression = HoverTargetPolicy::for_kind(HoverTargetKind::Expression); + assert!(!expression.allows(HoverSectionSlot::Docs)); + assert!(expression.allows(HoverSectionSlot::Preview)); + } +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs b/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs index 545dceb3..88e5b69b 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs @@ -2,7 +2,7 @@ use lsp_server::{Message, Notification, Response}; use lsp_types::{ notification::{DidOpenTextDocument, Notification as _, PublishDiagnostics}, request::{HoverRequest, Request as _}, - Hover, HoverContents, MarkupContent, MarkupKind, PublishDiagnosticsParams, + Hover, HoverContents, MarkedString, PublishDiagnosticsParams, }; use super::Scenario; @@ -23,10 +23,9 @@ pub fn assert_scenario_runs_without_error(scenario: &Scenario) { let response = match request.method.as_str() { HoverRequest::METHOD => { let hover = Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: "`number`".to_string(), - }), + contents: HoverContents::Array(vec![MarkedString::String( + "`number`".to_string(), + )]), range: None, }; match serde_json::to_value(hover) { diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs index 2c893d92..07a4275f 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs @@ -40,23 +40,22 @@ pub use diagnostics_steps::{DiagnosticsSettledStep, ExpectDiagnosticsStep}; pub use document_steps::{ChangeFullStep, ChangeIncrementalStep, CloseStep, OpenStep, SaveStep}; use lsp_types::{ CodeActionKind, CodeActionOrCommand, CodeLens, CompletionResponse, Diagnostic, - DocumentSymbolResponse, FileChangeType, GotoDefinitionResponse, Hover, InlayHint, Location, - Position, PrepareRenameResponse, Range, SemanticTokensRangeResult, SemanticTokensResult, - SignatureHelp, TextDocumentContentChangeEvent, TextEdit, WorkspaceEdit, - WorkspaceSymbolResponse, + DocumentSymbolResponse, FileChangeType, GotoDefinitionResponse, InlayHint, Location, Position, + PrepareRenameResponse, Range, SemanticTokensRangeResult, SemanticTokensResult, SignatureHelp, + TextDocumentContentChangeEvent, TextEdit, WorkspaceEdit, WorkspaceSymbolResponse, }; pub use request_steps::{ ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDocumentSymbolStep, ExpectExecuteCommandStep, ExpectFormattingStep, - ExpectHoverStep, ExpectHoverTypeStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, + ExpectHoverPrimaryTypeStep, ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, ExpectTypeDefinitionStep, - ExpectWorkspaceSymbolStep, RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, - RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, - RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, - RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, + ExpectWorkspaceSymbolStep, HoverSectionExpectation, RequestCodeActionStep, RequestCodeLensStep, + RequestCompletionStep, RequestDeclarationStep, RequestDefinitionStep, + RequestDocumentSymbolStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, + RequestInlayHintsStep, RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, - RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, TypeMatchMode, + RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, }; use serde::Deserialize; pub use workspace_steps::{ @@ -105,7 +104,7 @@ pub enum ScenarioStep { ExpectRename(ExpectRenameStep), RequestHover(RequestHoverStep), ExpectHover(ExpectHoverStep), - ExpectHoverType(ExpectHoverTypeStep), + ExpectHoverPrimaryType(ExpectHoverPrimaryTypeStep), RequestSignatureHelp(RequestSignatureHelpStep), ExpectSignatureHelp(ExpectSignatureHelpStep), RequestCompletion(RequestCompletionStep), diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs index fb6c4640..f4113f0f 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs @@ -1,3 +1,5 @@ +use serde::Deserialize; + use super::*; /// `textDocument/codeAction` request. @@ -481,9 +483,7 @@ pub struct ExpectRenameStep { /// - step: expectHover /// request: hover /// result: -/// contents: -/// kind: markdown -/// value: "`number`" +/// - type: number /// "#; /// /// assert_yaml_scenario_runs_without_error(yaml); @@ -514,9 +514,7 @@ pub struct RequestHoverStep { /// - step: expectHover /// request: hover /// result: -/// contents: -/// kind: markdown -/// value: "`number`" +/// - type: number /// "#; /// /// assert_yaml_scenario_runs_without_error(yaml); @@ -524,58 +522,91 @@ pub struct RequestHoverStep { #[derive(Debug, Clone, PartialEq, Eq)] pub struct ExpectHoverStep { pub(crate) id: i32, - pub result: Option, + /// Ordered hover sections rendered by the server. + /// + /// `null` means "expect no hover response". + pub result: Option>, } -/// Expected inferred type extracted from a `textDocument/hover` response. -/// -/// Asserts the inferred type prefix in a hover response. -/// -/// Example: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "local [[m:x]] = 1; m" -/// - step: requestHover -/// as: hover -/// file: main.jsonnet -/// at: m -/// - step: expectHoverType -/// request: hover -/// type: number -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -/// -/// Optional `match: contains`: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "local [[m:x]] = 1; m" -/// - step: requestHover -/// as: hover -/// file: main.jsonnet -/// at: m -/// - step: expectHoverType -/// request: hover -/// type: um -/// match: contains -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` #[derive(Debug, Clone, PartialEq, Eq)] -pub struct ExpectHoverTypeStep { +pub struct ExpectHoverPrimaryTypeStep { pub(crate) id: i32, pub expected_type: String, - pub match_mode: TypeMatchMode, +} + +/// One expected hover section, compared in authored order using exact matching. +/// +/// Exactly one key should be present per list item. +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +pub enum HoverSectionExpectation { + Type { + #[serde(rename = "type")] + ty: String, + }, + Context { + context: String, + }, + Preview { + preview: HoverCodeBlockExpectation, + }, + Docs { + docs: HoverDocsExpectation, + }, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct HoverCodeBlockExpectation { + pub language: String, + pub value: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(untagged)] +pub enum HoverDocsExpectation { + Markdown(String), + Code(HoverCodeBlockExpectation), +} + +impl HoverSectionExpectation { + #[must_use] + pub fn to_markdown(&self) -> String { + match self { + Self::Type { ty } => format!("`{ty}`"), + Self::Context { context } => context.clone(), + Self::Preview { preview } => preview.value.clone(), + Self::Docs { docs } => match docs { + HoverDocsExpectation::Markdown(markdown) => markdown.clone(), + HoverDocsExpectation::Code(code) => { + format!("```{}\n{}\n```", code.language, code.value) + } + }, + } + } + + #[must_use] + pub fn to_marked_string(&self) -> lsp_types::MarkedString { + match self { + Self::Preview { preview } => { + lsp_types::MarkedString::LanguageString(lsp_types::LanguageString { + language: preview.language.clone(), + value: preview.value.clone(), + }) + } + Self::Docs { + docs: HoverDocsExpectation::Code(code), + } => lsp_types::MarkedString::LanguageString(lsp_types::LanguageString { + language: code.language.clone(), + value: code.value.clone(), + }), + Self::Type { .. } + | Self::Context { .. } + | Self::Docs { + docs: HoverDocsExpectation::Markdown(_), + } => lsp_types::MarkedString::String(self.to_markdown()), + } + } } /// `textDocument/signatureHelp` request. @@ -637,43 +668,6 @@ pub struct ExpectSignatureHelpStep { pub result: Option, } -/// Type string match mode for `ExpectHoverTypeStep`. -/// -/// Example values in YAML: -/// ```rust -/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; -/// let yaml = r#" -/// steps: -/// - step: open -/// file: main.jsonnet -/// text: "local [[m:x]] = 1; m" -/// - step: requestHover -/// as: hover_exact -/// file: main.jsonnet -/// at: m -/// - step: expectHoverType -/// request: hover_exact -/// type: number -/// - step: requestHover -/// as: hover_contains -/// file: main.jsonnet -/// at: m -/// - step: expectHoverType -/// request: hover_contains -/// type: um -/// match: contains -/// "#; -/// -/// assert_yaml_scenario_runs_without_error(yaml); -/// ``` -#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum TypeMatchMode { - #[default] - Exact, - Contains, -} - /// `textDocument/completion` request. /// /// Requests completion items at a position. diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs index fe662fa8..0977dc62 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs @@ -6,8 +6,8 @@ use thiserror::Error; use super::{ helpers::{ - completion_label_counts, completion_labels, hover_leading_type, json_mismatch_report, - label_counts, JsonMismatchReport, + completion_label_counts, completion_labels, hover_array_sections_from_json, + json_mismatch_report, label_counts, marked_string_markdown, JsonMismatchReport, }, transport::{RpcError, SerdeError, TransportError}, RunnerResult, ScenarioRunner, REQUEST_TIMEOUT, @@ -15,22 +15,23 @@ use super::{ use crate::scenario::{ DiagnosticsSettledStep, ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, ExpectDocumentSymbolStep, - ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, ExpectHoverTypeStep, + ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverPrimaryTypeStep, ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, - ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, TypeMatchMode, + ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, }; #[derive(Debug, Error)] pub enum AssertionError { - #[error("hover response {id} had no result, expected inferred type {expected_type}")] - HoverMissingResult { id: i32, expected_type: String }, + #[error("hover response {id} had no result, expected hover section array")] + HoverMissingResult { id: i32 }, + #[error("hover response {id} had unsupported content shape: {hover}")] + HoverMalformedContent { id: i32, hover: String }, #[error("hover response {id} did not start with a backticked type: {hover}")] HoverMalformedLeadingType { id: i32, hover: String }, - #[error("hover type mismatch for id {id}: actual={actual} expected {match_mode} {expected}")] + #[error("hover type mismatch for id {id}: actual={actual} expected exactly {expected}")] HoverTypeMismatch { id: i32, - match_mode: &'static str, actual: String, expected: String, }, @@ -185,49 +186,72 @@ impl ScenarioRunner { } pub(super) fn step_expect_hover(&mut self, step: &ExpectHoverStep) -> RunnerResult<()> { - let actual = self.response_result::("hover", step.id)?; - if actual != step.result { - return Self::response_mismatch("hover", step.id, &actual, &step.result); + let actual = self.response_result::("hover", step.id)?; + match (&actual, &step.result) { + (None, None) => Ok(()), + (None, Some(_)) => Err(AssertionError::HoverMissingResult { id: step.id }.into()), + (Some(actual_hover), None) => { + Self::response_mismatch_json_values("hover", step.id, actual_hover, &Value::Null) + } + (Some(actual_hover), Some(expected_sections)) => { + let actual_sections = + hover_array_sections_from_json(actual_hover).ok_or_else(|| { + AssertionError::HoverMalformedContent { + id: step.id, + hover: actual_hover.to_string(), + } + })?; + let expected_sections = expected_sections + .iter() + .map(crate::scenario::HoverSectionExpectation::to_marked_string) + .collect::>(); + if actual_sections == expected_sections { + Ok(()) + } else { + Self::response_mismatch("hover", step.id, &actual_sections, &expected_sections) + } + } } - Ok(()) } - pub(super) fn step_expect_hover_type( + pub(super) fn step_expect_hover_primary_type( &mut self, - step: &ExpectHoverTypeStep, + step: &ExpectHoverPrimaryTypeStep, ) -> RunnerResult<()> { let actual_hover = self - .response_result::("hover", step.id)? - .ok_or_else(|| AssertionError::HoverMissingResult { - id: step.id, - expected_type: step.expected_type.clone(), - })?; - let actual_type = hover_leading_type(&actual_hover).ok_or_else(|| { - AssertionError::HoverMalformedLeadingType { + .response_result::("hover", step.id)? + .ok_or_else(|| AssertionError::HoverMissingResult { id: step.id })?; + let actual_sections = hover_array_sections_from_json(&actual_hover).ok_or_else(|| { + AssertionError::HoverMalformedContent { id: step.id, - hover: format!("{actual_hover:?}"), + hover: actual_hover.to_string(), } })?; - match step.match_mode { - TypeMatchMode::Exact if actual_type != step.expected_type => { - Err(AssertionError::HoverTypeMismatch { - id: step.id, - match_mode: "exactly", - actual: actual_type, - expected: step.expected_type.clone(), - } - .into()) - } - TypeMatchMode::Contains if !actual_type.contains(&step.expected_type) => { - Err(AssertionError::HoverTypeMismatch { + let first_section = + actual_sections + .first() + .ok_or_else(|| AssertionError::HoverMalformedLeadingType { id: step.id, - match_mode: "to contain", - actual: actual_type, - expected: step.expected_type.clone(), - } - .into()) + hover: actual_hover.to_string(), + })?; + let first_markdown = marked_string_markdown(first_section); + let actual_type = first_markdown + .trim_start() + .strip_prefix('`') + .and_then(|rest| rest.split_once('`').map(|(ty, _)| ty.to_string())) + .ok_or_else(|| AssertionError::HoverMalformedLeadingType { + id: step.id, + hover: first_markdown.clone(), + })?; + if actual_type == step.expected_type { + Ok(()) + } else { + Err(AssertionError::HoverTypeMismatch { + id: step.id, + actual: actual_type, + expected: step.expected_type.clone(), } - TypeMatchMode::Exact | TypeMatchMode::Contains => Ok(()), + .into()) } } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs index 027da775..42e68cfe 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs @@ -28,25 +28,26 @@ pub(super) fn parse_uri(uri: &str, context: &'static str) -> RunnerResult Option { - let text = match &hover.contents { - lsp_types::HoverContents::Markup(markup) => markup.value.as_str(), - lsp_types::HoverContents::Scalar(scalar) => match scalar { - lsp_types::MarkedString::String(value) => value.as_str(), - lsp_types::MarkedString::LanguageString(language) => language.value.as_str(), - }, - lsp_types::HoverContents::Array(items) => { - let first = items.first()?; - match first { - lsp_types::MarkedString::String(value) => value.as_str(), - lsp_types::MarkedString::LanguageString(language) => language.value.as_str(), - } +pub(super) fn marked_string_markdown(marked: &lsp_types::MarkedString) -> String { + match marked { + lsp_types::MarkedString::String(value) => value.clone(), + lsp_types::MarkedString::LanguageString(language) => { + format!("```{}\n{}\n```", language.language, language.value) } - }; - let trimmed = text.trim_start(); - let rest = trimmed.strip_prefix('`')?; - let (ty, _) = rest.split_once('`')?; - Some(ty.to_string()) + } +} + +pub(super) fn hover_array_sections_from_json( + hover: &Value, +) -> Option> { + let contents = hover.get("contents")?; + let sections = contents.as_array()?; + sections + .iter() + .cloned() + .map(serde_json::from_value) + .collect::, _>>() + .ok() } pub(super) fn completion_items( diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs index bfc374b3..cf0ba8b0 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs @@ -148,7 +148,9 @@ impl ScenarioRunner { ScenarioStep::ExpectRename(expectation) => self.step_expect_rename(expectation), ScenarioStep::RequestHover(request) => self.step_request_hover(request), ScenarioStep::ExpectHover(expectation) => self.step_expect_hover(expectation), - ScenarioStep::ExpectHoverType(expectation) => self.step_expect_hover_type(expectation), + ScenarioStep::ExpectHoverPrimaryType(expectation) => { + self.step_expect_hover_primary_type(expectation) + } ScenarioStep::RequestSignatureHelp(request) => { self.step_request_signature_help(request) } @@ -210,15 +212,15 @@ mod tests { use lsp_types::{ notification::{DidOpenTextDocument, Notification as _, PublishDiagnostics}, request::{HoverRequest, Request as _}, - Diagnostic, DiagnosticSeverity, Hover, HoverContents, MarkupContent, MarkupKind, Position, + Diagnostic, DiagnosticSeverity, Hover, HoverContents, MarkedString, Position, PublishDiagnosticsParams, Range, }; use super::{super::transport::RpcError, run_scenario}; use crate::{ scenario::{ - DiagnosticsSettledStep, ExpectDiagnosticsStep, ExpectHoverStep, OpenStep, - RequestHoverStep, Scenario, ScenarioStep, + DiagnosticsSettledStep, ExpectDiagnosticsStep, ExpectHoverStep, + HoverSectionExpectation, OpenStep, RequestHoverStep, Scenario, ScenarioStep, }, scenario_runner::{helpers::JsonDifference, RunnerError}, }; @@ -248,10 +250,7 @@ mod tests { fn test_hover() -> Hover { Hover { - contents: HoverContents::Markup(MarkupContent { - kind: MarkupKind::Markdown, - value: "`number`".to_string(), - }), + contents: HoverContents::Array(vec![MarkedString::String("`number`".to_string())]), range: None, } } @@ -278,7 +277,9 @@ mod tests { }), ScenarioStep::ExpectHover(ExpectHoverStep { id: 7, - result: Some(expected_hover.clone()), + result: Some(vec![HoverSectionExpectation::Type { + ty: "number".to_string(), + }]), }), ScenarioStep::ExpectDiagnostics(ExpectDiagnosticsStep { uri, @@ -414,10 +415,9 @@ mod tests { [JsonDifference::ValueMismatch { path: "$".to_string(), actual: serde_json::json!({ - "contents": { - "kind": "markdown", - "value": "`number`" - } + "contents": [ + "`number`" + ] }), expected: serde_json::Value::Null, }] diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs index 0547af76..f237b379 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs @@ -33,7 +33,7 @@ use crate::scenario::{ ChangeFullStep, ChangeIncrementalStep, CloseStep, ConfigStep, DeleteFileStep, DiagnosticsSettledStep, ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, ExpectDocumentSymbolStep, - ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, ExpectHoverTypeStep, + ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverPrimaryTypeStep, ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, NotifyWatchedFilesStep, OpenStep, @@ -43,7 +43,7 @@ use crate::scenario::{ RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, SaveStep, Scenario, ScenarioFileChangeType, ScenarioStep, - TypeMatchMode, WatchedFileChangeStep, WriteFileStep, + WatchedFileChangeStep, WriteFileStep, }; /// Parsed YAML root object for one scenario script file. @@ -92,7 +92,7 @@ impl ScenarioScript { /// Most script steps translate to exactly one executable step, with two notable /// expansions: /// - `create` expands to one or more `writeFile` steps plus `open` steps. - /// - `expectTypes` expands to repeated `requestHover` + `expectHoverType` pairs. + /// - `expectTypes` expands to repeated `requestHover` + `expectHover` pairs. pub(super) fn compile(self, base_dir: &Path) -> Result { let mut registry = RequestRegistry::new(); let mut marker_store = MarkerStore::new(); @@ -434,13 +434,6 @@ impl ScenarioScript { result: step.result, })] } - ScenarioScriptStep::ExpectHoverType(step) => { - vec![ScenarioStep::ExpectHoverType(ExpectHoverTypeStep { - id: registry.claim(RequestKind::Hover, step.request.as_deref())?, - expected_type: step.expected_type, - match_mode: step.match_mode, - })] - } ScenarioScriptStep::RequestSignatureHelp(step) => { vec![ScenarioStep::RequestSignatureHelp( RequestSignatureHelpStep { @@ -477,11 +470,11 @@ impl ScenarioScript { "expectTypes.checks", )?, }); - let expect = ScenarioStep::ExpectHoverType(ExpectHoverTypeStep { - id, - expected_type: check.expected_type, - match_mode: check.match_mode, - }); + let expect = + ScenarioStep::ExpectHoverPrimaryType(ExpectHoverPrimaryTypeStep { + id, + expected_type: check.expected_type, + }); steps.push(request); steps.push(expect); Ok::, CompileScenarioError>(steps) @@ -876,7 +869,6 @@ enum ScenarioScriptStep { ExpectRename(ExpectRenameScriptStep), RequestHover(RequestHoverScriptStep), ExpectHover(ExpectHoverScriptStep), - ExpectHoverType(ExpectHoverTypeScriptStep), RequestSignatureHelp(RequestSignatureHelpScriptStep), ExpectSignatureHelp(ExpectSignatureHelpScriptStep), ExpectTypes(ExpectTypesScriptStep), @@ -1140,17 +1132,6 @@ struct RequestHoverScriptStep { at: Option, } -#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] -#[serde(deny_unknown_fields)] -struct ExpectHoverTypeScriptStep { - #[serde(default)] - request: Option, - #[serde(rename = "type")] - expected_type: String, - #[serde(default, rename = "match")] - match_mode: TypeMatchMode, -} - #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] #[serde(deny_unknown_fields)] struct RequestSignatureHelpScriptStep { @@ -1169,7 +1150,7 @@ struct ExpectTypesScriptStep { checks: Vec, } -/// One `expectTypes.checks` entry, expanded to requestHover + expectHoverType. +/// One `expectTypes.checks` entry, expanded to requestHover + expectHover. #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] #[serde(deny_unknown_fields)] struct TypeExpectationScriptStep { @@ -1177,8 +1158,6 @@ struct TypeExpectationScriptStep { at: Option, #[serde(rename = "type")] expected_type: String, - #[serde(default, rename = "match")] - match_mode: TypeMatchMode, } #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs index 08d975a2..9d813434 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs @@ -14,7 +14,7 @@ use std::{ use jrsonnet_lsp_types::{SemanticTokenModifierName, SemanticTokenTypeName}; use lsp_types::{ CodeAction, CodeActionKind, CodeActionOrCommand, CodeLens, CompletionResponse, Diagnostic, - DiagnosticSeverity, DocumentSymbolResponse, GotoDefinitionResponse, Hover, InlayHint, Location, + DiagnosticSeverity, DocumentSymbolResponse, GotoDefinitionResponse, InlayHint, Location, NumberOrString, PrepareRenameResponse, SemanticTokens, SemanticTokensRangeResult, SemanticTokensResult, SignatureHelp, SymbolInformation, SymbolKind, TextEdit, WorkspaceEdit, WorkspaceSymbolResponse, @@ -28,7 +28,10 @@ use super::{ }, paths::file_uri, }; -use crate::semantic_tokens::{encode_semantic_tokens, semantic_modifiers, ExpectedSemanticToken}; +use crate::{ + scenario::HoverSectionExpectation, + semantic_tokens::{encode_semantic_tokens, semantic_modifiers, ExpectedSemanticToken}, +}; #[derive(Debug, Error)] pub enum InputError { @@ -1087,7 +1090,7 @@ impl PrepareRenameResponseInput { pub(super) struct ExpectHoverScriptStep { #[serde(default)] pub(super) request: Option, - pub(super) result: Option, + pub(super) result: Option>, } #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml index cd552a54..5302cbe6 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml @@ -21,6 +21,5 @@ steps: checks: - at: m1 type: 'array' - match: exact - at: m2 type: 'array' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml index 6c2efd1d..7afd3fe9 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml @@ -17,7 +17,5 @@ steps: checks: - at: m1 type: 'array' - match: exact - at: m2 type: 'array' - match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml index d0676c40..ccb37fd8 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml @@ -20,4 +20,3 @@ steps: type: 'string' - at: m2 type: 'number' - match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml index f664534e..3e7adb15 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml @@ -18,6 +18,5 @@ steps: checks: - at: m1 type: 'number' - match: exact - at: m2 type: 'string' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml index f64df1c1..25323937 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml @@ -15,4 +15,3 @@ steps: checks: - at: m1 type: '[any, any, any]' - match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml index e0e520e4..4f6182f3 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml @@ -22,4 +22,3 @@ steps: type: 'never' - at: m3 type: '(x: any, y: any) -> number' - match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml index 1ca31e3d..ae4bd76d 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml @@ -20,6 +20,5 @@ steps: checks: - at: m1 type: 'function(arg0: any, arg1: any)' - match: exact - at: m2 type: 'function()' diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml index 3852695f..ff83fd2b 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml @@ -24,4 +24,3 @@ steps: type: 'number' - at: m2 type: 'string | function() | object | array' - match: exact diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml index 30260ba2..ed139f4e 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/array_variable.yaml @@ -10,11 +10,8 @@ steps: - step: expectHover request: hover result: - contents: - kind: markdown + - type: '[number, number, number]' + - preview: + language: jsonnet value: |- - `[number, number, number]` - - ```jsonnet local arr = [1, 2, 3]; - ``` diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml index 7b391046..6c02b040 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/boolean_literal.yaml @@ -10,15 +10,8 @@ steps: - step: expectHover request: hover result: - contents: - kind: markdown - value: |- - `true` - - --- - - Boolean literal `true`. - - ```jsonnet - true - ``` + - type: 'true' + - docs: Boolean literal `true`. + - docs: + language: jsonnet + value: "true" diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml index 81b1466d..6dc140a1 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/function_variable.yaml @@ -9,7 +9,11 @@ steps: as: hover file: main.jsonnet at: m1 -- step: expectHoverType +- step: expectHover request: hover - type: 'function(x: any)' - match: exact + result: + - type: 'function(x: any)' + - preview: + language: jsonnet + value: |- + local f(x) = x; diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml index 4ff30de3..f79644db 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/hover_new_import_invalid_syntax.yaml @@ -24,7 +24,12 @@ steps: as: hover_new file: main.libsonnet at: m1 -- step: expectHoverType +- step: expectHover request: hover_new - type: '(input: any) -> { value: any }' - match: exact + result: + - type: '(input: any) -> { value: any }' + - context: '`new` from `./module/main.libsonnet`' + - preview: + language: jsonnet + value: |- + module.new diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml index 60f1193d..a78371b9 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/local_multiline_preview_trims_trailing_blank_lines.yaml @@ -24,17 +24,14 @@ steps: - step: expectHover request: topHover result: - contents: - kind: markdown + - type: '{ a: number, b: number }' + - preview: + language: jsonnet value: |- - `{ a: number, b: number }` - - ```jsonnet local top = { a: 1, b: 2, }; - ``` - step: requestHover as: objHover @@ -43,14 +40,11 @@ steps: - step: expectHover request: objHover result: - contents: - kind: markdown + - type: '{ c: number, d: number }' + - preview: + language: jsonnet value: |- - `{ c: number, d: number }` - - ```jsonnet local objLocal = { c: 3, d: 4, } - ``` diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml index a33ff710..2cc5135d 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/null_literal.yaml @@ -10,15 +10,8 @@ steps: - step: expectHover request: hover result: - contents: - kind: markdown - value: |- - `null` - - --- - - Literal `null` value. - - ```jsonnet - null - ``` + - type: 'null' + - docs: Literal `null` value. + - docs: + language: jsonnet + value: "null" diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml index 73c1bdeb..46faf4c8 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/number_literal.yaml @@ -10,6 +10,8 @@ steps: - step: expectHover request: hover result: - contents: - kind: markdown - value: "`number`" + - type: number + - preview: + language: jsonnet + value: |- + 42 diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml index 694dac3d..f5ba2884 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/object_variable.yaml @@ -10,11 +10,8 @@ steps: - step: expectHover request: hover result: - contents: - kind: markdown + - type: '{ a: number }' + - preview: + language: jsonnet value: |- - `{ a: number }` - - ```jsonnet local obj = { a: 1 }; - ``` diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml index b4cc2c3e..fb428ceb 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/std_object.yaml @@ -10,6 +10,8 @@ steps: - step: expectHover request: hover result: - contents: - kind: markdown - value: "`object`" + - type: object + - preview: + language: jsonnet + value: |- + std.length diff --git a/crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml b/crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml index 7d967b13..c2164e58 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/hover/string_literal.yaml @@ -10,6 +10,8 @@ steps: - step: expectHover request: hover result: - contents: - kind: markdown - value: "`string`" + - type: string + - preview: + language: jsonnet + value: |- + "hello" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml index 53fe4138..383a9de8 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_bracket_lookup_field_type.yaml @@ -19,7 +19,11 @@ steps: as: bracketFieldHover file: main.jsonnet at: m1 -- step: expectHoverType +- step: expectHover request: bracketFieldHover - type: "true" - match: exact + result: + - type: "true" + - preview: + language: jsonnet + value: |- + hm["foo"].a diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml index 668e5921..638ca6d4 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_cjk_import_shape.yaml @@ -15,7 +15,12 @@ steps: as: importedTypeHasCjkField file: b.jsonnet at: m1 -- step: expectHoverType +- step: expectHover request: importedTypeHasCjkField - type: '{ æ°´: number }' - match: exact + result: + - type: '{ æ°´: number }' + - context: '`a.jsonnet`' + - preview: + language: jsonnet + value: |- + local imported = import "a.jsonnet"; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml index 6a706c9d..c1649cc3 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml @@ -16,7 +16,16 @@ steps: as: closeBraceHover file: main.jsonnet at: closeBrace -- step: expectHoverType +- step: expectHover request: closeBraceHover - type: '{ z: { a: number, b: number } }' - match: exact + result: + - type: '{ z: { a: number, b: number } }' + - preview: + language: jsonnet + value: |- + { + z: { + a: 1, + b: 2, + }, + ... diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml index 9f421102..f1c9788d 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml @@ -12,10 +12,14 @@ steps: as: hoverRequest file: main.jsonnet at: m1 -- step: expectHoverType +- step: expectHover request: hoverRequest - type: any - match: exact + result: + - type: any + - preview: + language: jsonnet + value: |- + local lib = import 'lib.jsonnet'; - step: requestCompletion as: completionRequest diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml index 496f8cb7..4da8bf5b 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml @@ -23,17 +23,25 @@ steps: as: hoverEquals file: main.jsonnet at: m2 -- step: expectHoverType +- step: expectHover request: hoverEquals - type: number - match: exact + result: + - type: number + - preview: + language: jsonnet + value: |- + x = 1 # Real symbol usage should still resolve to a type. - step: requestHover as: hoverUsage file: main.jsonnet at: m3 -- step: expectHoverType +- step: expectHover request: hoverUsage - type: number - match: exact + result: + - type: number + - preview: + language: jsonnet + value: |- + local x = 1; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml index cce6401a..a64e6f80 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml @@ -13,34 +13,50 @@ steps: as: paramHover file: main.jsonnet at: param -- step: expectHoverType +- step: expectHover request: paramHover - type: any - match: exact + result: + - type: any + - preview: + language: jsonnet + value: |- + local f(x) = x + 1; - step: requestHover as: rparenHover file: main.jsonnet at: rparen -- step: expectHoverType +- step: expectHover request: rparenHover - type: any - match: exact + result: + - type: any + - preview: + language: jsonnet + value: |- + f(x) = x + 1 - step: requestHover as: equalsHover file: main.jsonnet at: eq -- step: expectHoverType +- step: expectHover request: equalsHover - type: '(x: any) -> number' - match: exact + result: + - type: '(x: any) -> number' + - preview: + language: jsonnet + value: |- + f(x) = x + 1 - step: requestHover as: usageHover file: main.jsonnet at: use -- step: expectHoverType +- step: expectHover request: usageHover - type: '(x: any) -> number' - match: exact + result: + - type: '(x: any) -> number' + - preview: + language: jsonnet + value: |- + local f(x) = x + 1; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml index 6d499840..ed268982 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_binding_member_signature.yaml @@ -25,16 +25,26 @@ steps: as: bindingHover file: main.libsonnet at: mBinding -- step: expectHoverType +- step: expectHover request: bindingHover - type: 'new: (namespace: any, cluster: { provider: any, ... }) ->' - match: contains + result: + - type: '{ new: (namespace: any, cluster: { provider: any, ... }) -> { definitions: { enabled: boolean } } }' + - context: '`./database/main.libsonnet`' + - preview: + language: jsonnet + value: |- + local database = import "./database/main.libsonnet"; - step: requestHover as: methodHover file: main.libsonnet at: mMethod -- step: expectHoverType +- step: expectHover request: methodHover - type: '(namespace: any, cluster: { provider: any, ... }) -> { definitions: { enabled: boolean } }' - match: contains + result: + - type: '(namespace: any, cluster: { provider: any, ... }) -> { definitions: { enabled: boolean } }' + - context: '`new` from `./database/main.libsonnet`' + - preview: + language: jsonnet + value: |- + database.new diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml index 42275d06..eb635de9 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_import_field_method_type.yaml @@ -16,7 +16,12 @@ steps: as: importedMethodHover file: main.libsonnet at: m1 -- step: expectHoverType +- step: expectHover request: importedMethodHover - type: '(x: any) -> { y: number }' - match: exact + result: + - type: '(x: any) -> { y: number }' + - context: '`new` from `./module/main.libsonnet`' + - preview: + language: jsonnet + value: |- + module.new diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml index 6ad2eaf0..94afc6f6 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_non_identifier_field_names.yaml @@ -13,7 +13,11 @@ steps: as: objectTypeHasSpacedKey file: main.jsonnet at: m1 -- step: expectHoverType +- step: expectHover request: objectTypeHasSpacedKey - type: '{ foo bar: number, the-field: number }' - match: exact + result: + - type: '{ foo bar: number, the-field: number }' + - preview: + language: jsonnet + value: |- + local obj = { "foo bar": 3, "the-field": 4 }; diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml index d0e0f54a..fe285480 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_field_key_is_string.yaml @@ -13,7 +13,11 @@ steps: as: fieldKeyHover file: main.jsonnet at: fieldKey -- step: expectHoverType +- step: expectHover request: fieldKeyHover - type: string - match: exact + result: + - type: string + - preview: + language: jsonnet + value: |- + z: 1 diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml index 4f67a4ff..3bf14cf3 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml @@ -18,25 +18,46 @@ steps: as: bindHover file: main.jsonnet at: bindX -- step: expectHoverType +- step: expectHover request: bindHover - type: '{ a: number, b: number }' - match: exact + result: + - type: '{ a: number, b: number }' + - preview: + language: jsonnet + value: |- + local x = { + a: 1, + b: 2, + } - step: requestHover as: bindEqHover file: main.jsonnet at: bindEq -- step: expectHoverType +- step: expectHover request: bindEqHover - type: '{ a: number, b: number }' - match: exact + result: + - type: '{ a: number, b: number }' + - preview: + language: jsonnet + value: |- + x = { + a: 1, + b: 2, + } - step: requestHover as: useHover file: main.jsonnet at: useX -- step: expectHoverType +- step: expectHover request: useHover - type: '{ a: number, b: number }' - match: exact + result: + - type: '{ a: number, b: number }' + - preview: + language: jsonnet + value: |- + local x = { + a: 1, + b: 2, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml index bfc11708..91a7c739 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_usage_precision.yaml @@ -18,7 +18,14 @@ steps: as: useHover file: main.jsonnet at: useX -- step: expectHoverType +- step: expectHover request: useHover - type: '{ a: number, b: number }' - match: exact + result: + - type: '{ a: number, b: number }' + - preview: + language: jsonnet + value: |- + local x = { + a: 1, + b: 2, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml index 52373198..37f04140 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml @@ -16,7 +16,16 @@ steps: as: openBraceHover file: main.jsonnet at: openBrace -- step: expectHoverType +- step: expectHover request: openBraceHover - type: '{ z: { a: number, b: number } }' - match: exact + result: + - type: '{ z: { a: number, b: number } }' + - preview: + language: jsonnet + value: |- + { + z: { + a: 1, + b: 2, + }, + ... diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml index 182b3446..2fb40077 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_union_nested_field_type.yaml @@ -22,7 +22,11 @@ steps: as: unionFieldHoverNumber file: main.jsonnet at: m1 -- step: expectHoverType +- step: expectHover request: unionFieldHoverNumber - type: 'true | false | number | string' - match: exact + result: + - type: 'true | false | number | string' + - preview: + language: jsonnet + value: |- + obj.a.b diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml index 95a671d5..a67cb9b1 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/importstr_non_json_dependency.yaml @@ -35,10 +35,15 @@ steps: file: main.libsonnet at: m1 # `importstr` must infer a string type. -- step: expectHoverType +- step: expectHover request: payloadHover - type: string - match: exact + result: + - type: string + - context: '`payload` from `./data.libsonnet`' + - preview: + language: jsonnet + value: |- + local payload = data.payload; # Hover the `bytes` value in the same final object expression. - step: requestHover @@ -46,7 +51,12 @@ steps: file: main.libsonnet at: m2 # `importbin` must infer bounded byte numbers (0..255). -- step: expectHoverType +- step: expectHover request: bytesHover - type: array - match: exact + result: + - type: array + - context: '`bytes` from `./data.libsonnet`' + - preview: + language: jsonnet + value: |- + local bytes = data.bytes; From a5557558571bb5fcb706a8389367bd58920159f8 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 14:30:38 +0000 Subject: [PATCH 146/210] refactor(paths): remove guard-backed interned path API Replace InternedPath with cloned Arc handles in path interner APIs so read-lock guards never escape callers. Update dependent manager/import/handler/server call sites to use the new non-locking handle shape while preserving behavior. This removes a key source of lock-lifetime coupling that can cause contention or deadlock under concurrent LSP workloads. --- crates/jrsonnet-lsp-document/src/file_ids.rs | 72 +++++-------------- crates/jrsonnet-lsp-document/src/lib.rs | 2 +- .../src/rename/cross_file.rs | 3 +- .../src/graph/operations.rs | 7 +- crates/jrsonnet-lsp-import/src/graph/tests.rs | 6 +- crates/jrsonnet-lsp-inference/src/manager.rs | 4 +- crates/jrsonnet-lsp-inference/src/provider.rs | 4 +- .../jrsonnet-lsp-inference/src/type_cache.rs | 2 +- crates/jrsonnet-lsp/src/async_diagnostics.rs | 12 ++-- .../src/server/async_requests/references.rs | 8 +-- .../server/async_requests/workspace_symbol.rs | 4 +- .../jrsonnet-lsp/src/server/import_graph.rs | 10 +-- 12 files changed, 45 insertions(+), 89 deletions(-) diff --git a/crates/jrsonnet-lsp-document/src/file_ids.rs b/crates/jrsonnet-lsp-document/src/file_ids.rs index 7f05b9e0..3d2dab74 100644 --- a/crates/jrsonnet-lsp-document/src/file_ids.rs +++ b/crates/jrsonnet-lsp-document/src/file_ids.rs @@ -1,8 +1,8 @@ //! File identity and path interning utilities. -use std::{collections::HashMap, ops::Deref, path::PathBuf, sync::Arc}; +use std::{collections::HashMap, path::PathBuf, sync::Arc}; -use parking_lot::{RwLock, RwLockReadGuard}; +use parking_lot::RwLock; use crate::CanonicalPath; @@ -40,7 +40,7 @@ impl std::fmt::Display for FileId { #[derive(Debug, Clone, Default)] struct PathInterner { path_to_id: HashMap, - id_to_path: Vec, + id_to_path: Vec>, } impl PathInterner { @@ -53,7 +53,7 @@ impl PathInterner { let raw = u32::try_from(self.id_to_path.len()).expect("too many interned file paths"); let id = FileId::from_raw(raw); self.path_to_id.insert(path.as_path().to_path_buf(), id); - self.id_to_path.push(path.clone()); + self.id_to_path.push(Arc::new(path.clone())); id } @@ -65,40 +65,11 @@ impl PathInterner { /// Resolve an interned id to its canonical path. #[must_use] - pub fn path(&self, file: FileId) -> Option<&CanonicalPath> { + pub fn path(&self, file: FileId) -> Option<&Arc> { self.id_to_path.get(file.as_usize()) } } -/// Borrowed view of an interned canonical path. -pub struct InternedPath<'a> { - guard: RwLockReadGuard<'a, PathInterner>, - file: FileId, -} - -impl InternedPath<'_> { - #[must_use] - pub fn as_canonical_path(&self) -> &CanonicalPath { - self - } -} - -impl std::fmt::Debug for InternedPath<'_> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{} ({})", self.as_canonical_path(), self.file) - } -} - -impl Deref for InternedPath<'_> { - type Target = CanonicalPath; - - fn deref(&self) -> &Self::Target { - self.guard - .path(self.file) - .expect("interned path missing for interned file id") - } -} - /// Read-only resolver for a shared interned path store. #[derive(Debug, Clone)] pub struct PathResolver { @@ -114,10 +85,8 @@ impl PathResolver { /// Borrow an interned file identifier's canonical path. #[must_use] - pub fn path(&self, file: FileId) -> Option> { - let guard = self.interner.read(); - guard.path(file)?; - Some(InternedPath { guard, file }) + pub fn path(&self, file: FileId) -> Option> { + self.interner.read().path(file).cloned() } } @@ -150,10 +119,8 @@ impl PathStore { /// Borrow an interned file identifier's canonical path. #[must_use] - pub fn path(&self, file: FileId) -> Option> { - let guard = self.interner.read(); - guard.path(file)?; - Some(InternedPath { guard, file }) + pub fn path(&self, file: FileId) -> Option> { + self.interner.read().path(file).cloned() } /// Returns `true` when two stores share the same underlying interner. @@ -183,7 +150,7 @@ mod tests { assert_eq!(first, second); assert_eq!(interner.file(&alpha), Some(first)); - assert_eq!(interner.path(first), Some(&alpha)); + assert_eq!(interner.path(first).map(Arc::as_ref), Some(&alpha)); } #[test] @@ -197,8 +164,8 @@ mod tests { assert_eq!(alpha_id, FileId::from_raw(0)); assert_eq!(beta_id, FileId::from_raw(1)); - assert_eq!(interner.path(alpha_id), Some(&alpha)); - assert_eq!(interner.path(beta_id), Some(&beta)); + assert_eq!(interner.path(alpha_id).map(Arc::as_ref), Some(&alpha)); + assert_eq!(interner.path(beta_id).map(Arc::as_ref), Some(&beta)); } #[test] @@ -229,16 +196,11 @@ mod tests { let alpha = path("alpha"); let id = store.intern(&alpha); - let first = resolver - .path(id) - .map(|path| std::ptr::from_ref(path.as_canonical_path())) - .expect("path should exist"); - let second = resolver - .path(id) - .map(|path| std::ptr::from_ref(path.as_canonical_path())) - .expect("path should exist"); - assert_eq!(first, second); - assert_eq!(resolver.path(id).as_deref(), Some(&alpha)); + let first = resolver.path(id).expect("path should exist"); + let second = resolver.path(id).expect("path should exist"); + assert!(Arc::ptr_eq(&first, &second)); + assert_eq!(first.as_ref(), &alpha); + assert_eq!(second.as_ref(), &alpha); } #[test] diff --git a/crates/jrsonnet-lsp-document/src/lib.rs b/crates/jrsonnet-lsp-document/src/lib.rs index 872f1427..1724c460 100644 --- a/crates/jrsonnet-lsp-document/src/lib.rs +++ b/crates/jrsonnet-lsp-document/src/lib.rs @@ -26,7 +26,7 @@ pub use document::{Document, ParsedDocument, SharedDocument, SyntaxError}; pub use error::{ is_valid_jsonnet_identifier, validate_identifier, HandlerResult, LspError, LspResult, }; -pub use file_ids::{FileId, InternedPath, PathResolver, PathStore}; +pub use file_ids::{FileId, PathResolver, PathStore}; pub use position::LineIndex; pub use types::{ ByteOffset, CanonicalPath, CharOffset, DocVersion, Line, LspPosition, LspRange, SymbolName, diff --git a/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs b/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs index e4b0d393..e73d344a 100644 --- a/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs +++ b/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs @@ -125,10 +125,9 @@ fn find_references_in_importer( use jrsonnet_rowan_parser::nodes::{ExprBase, ExprField}; let importer_path = import_graph.path(importer_file)?; - let path = importer_path.as_canonical_path(); + let path = importer_path.as_ref(); let doc = manager.get_document(path)?; let uri = path.to_uri().ok()?; - drop(importer_path); let text = doc.text(); let line_index = doc.line_index(); let ast = doc.ast(); diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs index c64e21e0..a6c06f87 100644 --- a/crates/jrsonnet-lsp-import/src/graph/operations.rs +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -3,7 +3,10 @@ //! Maintains a bidirectional graph of import relationships between files, //! enabling efficient cross-file reference lookups. -use std::collections::{HashMap, HashSet, VecDeque}; +use std::{ + collections::{HashMap, HashSet, VecDeque}, + sync::Arc, +}; use jrsonnet_lsp_document::{CanonicalPath, Document, FileId, PathResolver, PathStore}; @@ -84,7 +87,7 @@ impl ImportGraph { /// Borrow an interned file identifier's canonical path. #[must_use] - pub fn path(&self, file: FileId) -> Option> { + pub fn path(&self, file: FileId) -> Option> { self.resolver.path(file) } diff --git a/crates/jrsonnet-lsp-import/src/graph/tests.rs b/crates/jrsonnet-lsp-import/src/graph/tests.rs index bf4a1971..20a71b0a 100644 --- a/crates/jrsonnet-lsp-import/src/graph/tests.rs +++ b/crates/jrsonnet-lsp-import/src/graph/tests.rs @@ -20,11 +20,7 @@ fn simple_resolver(import: &str) -> Option { fn graph_paths(graph: &ImportGraph, files: Vec) -> Vec { files .into_iter() - .filter_map(|file| { - graph - .path(file) - .map(|path| path.as_canonical_path().clone()) - }) + .filter_map(|file| graph.path(file).map(|path| path.as_ref().clone())) .collect() } diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index 5a375c4e..1b040b47 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -114,11 +114,11 @@ impl DocumentManager { fn path_for_id(&self, file_id: FileId) -> Option { self.resolver .path(file_id) - .map(|path| path.as_canonical_path().clone()) + .map(|path| path.as_ref().clone()) } /// Borrow an interned file identifier's canonical path. - pub fn path(&self, file: FileId) -> Option> { + pub fn path(&self, file: FileId) -> Option> { self.resolver.path(file) } diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index b37f93e7..b0102398 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -111,12 +111,12 @@ impl TypeProvider { let Some(dep_path) = graph.path(dep_file) else { return; }; - let Some(doc) = doc_source.get_document(dep_path.as_canonical_path()) else { + let Some(doc) = doc_source.get_document(dep_path.as_ref()) else { return; }; let resolved_imports = resolved_imports_for(&graph, dep_file); analyze_and_cache_with_resolved_imports( - dep_path.as_canonical_path(), + dep_path.as_ref(), &doc, &self.type_cache, resolved_imports, diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs index 7b996be9..077679b4 100644 --- a/crates/jrsonnet-lsp-inference/src/type_cache.rs +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -90,7 +90,7 @@ impl TypeCache { /// Borrow an interned file identifier's canonical path. #[must_use] - pub fn path(&self, file: FileId) -> Option> { + pub fn path(&self, file: FileId) -> Option> { self.resolver.path(file) } diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index ac8099ea..f7d22ca3 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -226,7 +226,7 @@ impl AsyncDiagnostics { }; trace!( "Diagnostics worker: computing diagnostics for {}", - path.as_canonical_path() + path.as_ref() ); let doc_source = WorkerDocumentSource { current_file: request.file_id, @@ -238,14 +238,12 @@ impl AsyncDiagnostics { Arc::clone(&config.import_graph), Arc::clone(&config.global_types), ); - let analysis = - Arc::new(provider.analyze(path.as_canonical_path(), &document, &doc_source)); - let import_resolution = - ImportResolution::new(path.as_canonical_path(), &request.import_roots); + let analysis = Arc::new(provider.analyze(path.as_ref(), &document, &doc_source)); + let import_resolution = ImportResolution::new(path.as_ref(), &request.import_roots); let import_occurrences = import_resolution.parse_occurrences(&document); let Some(params) = handlers::publish_diagnostics_params( - path.as_canonical_path(), + path.as_ref(), &document, request.enable_lint, config.evaluator.as_deref(), @@ -254,7 +252,7 @@ impl AsyncDiagnostics { ) else { debug!( "Diagnostics worker: failed to build URI for {}, skipping diagnostics", - path.as_canonical_path() + path.as_ref() ); continue; }; diff --git a/crates/jrsonnet-lsp/src/server/async_requests/references.rs b/crates/jrsonnet-lsp/src/server/async_requests/references.rs index 1ab19b34..399d2462 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/references.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/references.rs @@ -33,11 +33,9 @@ impl AsyncRequestContext { .into_iter() .filter_map(|file| { let path = self.documents.path(file)?; - let doc = self.documents.get_document(path.as_canonical_path())?; - let semantic = self - .documents - .get_semantic_artifacts(path.as_canonical_path()); - Some((path.as_canonical_path().clone(), doc, semantic)) + let doc = self.documents.get_document(path.as_ref())?; + let semantic = self.documents.get_semantic_artifacts(path.as_ref()); + Some((path.as_ref().clone(), doc, semantic)) }) .collect(); let importer_refs: Vec<_> = importer_docs diff --git a/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs index 078016f2..b71f6dff 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs @@ -24,10 +24,10 @@ impl AsyncRequestContext { let Some(path) = self.documents.path(file) else { return Vec::new(); }; - let Some(doc) = self.documents.get_document(path.as_canonical_path()) else { + let Some(doc) = self.documents.get_document(path.as_ref()) else { return Vec::new(); }; - let Ok(uri) = path.as_canonical_path().to_uri() else { + let Ok(uri) = path.to_uri() else { return Vec::new(); }; handlers::workspace_symbols_for_document(&doc, &uri, query) diff --git a/crates/jrsonnet-lsp/src/server/import_graph.rs b/crates/jrsonnet-lsp/src/server/import_graph.rs index 2a407e49..b18b143e 100644 --- a/crates/jrsonnet-lsp/src/server/import_graph.rs +++ b/crates/jrsonnet-lsp/src/server/import_graph.rs @@ -27,7 +27,7 @@ impl Server { let Some(path) = documents.path(file) else { return; }; - let Some(doc) = documents.get_document(path.as_canonical_path()) else { + let Some(doc) = documents.get_document(path.as_ref()) else { // File no longer exists or cannot be read. import_graph.write().remove_file(file); return; @@ -35,14 +35,14 @@ impl Server { let config = config.read(); let import_roots = effective_import_roots( - path.as_canonical_path().as_path(), + path.as_ref().as_path(), &config.jpath, config.resolve_paths_with_tanka, ); drop(config); // Parse imports OUTSIDE the graph lock to minimize lock hold time. - let import_resolution = ImportResolution::new(path.as_canonical_path(), &import_roots); + let import_resolution = ImportResolution::new(path.as_ref(), &import_roots); import_resolution.parse_entries(&doc) }; @@ -65,13 +65,13 @@ impl Server { /// Schedule diagnostics computation for a file. pub(super) fn schedule_diagnostics_file(&self, file: FileId) { let request = self.documents.path(file).and_then(|path| { - let doc = self.documents.get(path.as_canonical_path())?; + let doc = self.documents.get(path.as_ref())?; let (enable_lint, import_roots) = { let config = self.config.read(); ( config.lint_diagnostics_enabled(), effective_import_roots( - path.as_canonical_path().as_path(), + path.as_ref().as_path(), &config.jpath, config.resolve_paths_with_tanka, ), From 8fa081c7148a418b58a7ecf508f423c8e62e68bb Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 14:37:47 +0000 Subject: [PATCH 147/210] refactor(import-graph): record resolved FileId on entries Add resolved_file to parsed and test ImportEntry values and use it for edge updates in import graph operations. This moves graph dependency updates to FileId-based data while keeping resolved_path for boundary and display use, reducing path-resolution work during graph mutation. --- .../src/graph/operations.rs | 47 ++++++++++--------- crates/jrsonnet-lsp-import/src/graph/parse.rs | 2 + crates/jrsonnet-lsp-import/src/graph/tests.rs | 12 +++++ .../src/graph/traversal.rs | 6 +-- .../src/resolve/parse_adapter.rs | 2 + crates/jrsonnet-lsp-inference/src/provider.rs | 8 ++++ 6 files changed, 52 insertions(+), 25 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs index a6c06f87..4c9e9844 100644 --- a/crates/jrsonnet-lsp-import/src/graph/operations.rs +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -22,6 +22,8 @@ pub struct ImportEntry { pub binding_name: Option, /// The raw import path as written in the source. pub import_path: String, + /// Interned file id of the resolved target, when known. + pub resolved_file: Option, /// The resolved canonical path of the imported file. pub resolved_path: Option, } @@ -106,8 +108,7 @@ impl ImportGraph { // Update imported_by reverse index for entry in &entries { - if let Some(ref resolved) = entry.resolved_path { - let resolved_id = self.paths.intern(resolved); + if let Some(resolved_id) = entry.resolved_file { self.imported_by .entry(resolved_id) .or_default() @@ -128,7 +129,13 @@ impl ImportGraph { where F: Fn(&str) -> Option, { - let entries = parse_document_imports(doc, &resolve_import); + let mut entries = parse_document_imports(doc, &resolve_import); + for entry in &mut entries { + let Some(path) = entry.resolved_path.as_ref() else { + continue; + }; + entry.resolved_file = Some(self.paths.intern(path)); + } self.update_file_with_entries(file_id, entries); } @@ -139,19 +146,18 @@ impl ImportGraph { // Remove from imported_by reverse index if let Some(old_entries) = self.imports.get(&file_id) { for entry in old_entries { - if let Some(ref resolved) = entry.resolved_path { - if let Some(resolved_id) = self.resolver.file(resolved) { - let should_remove_entry = self - .imported_by - .get_mut(&resolved_id) - .is_some_and(|importers| { - importers.remove(&file_id); - importers.is_empty() - }); - if should_remove_entry { - self.imported_by.remove(&resolved_id); - } - } + let Some(resolved_id) = entry.resolved_file else { + continue; + }; + let should_remove_entry = + self.imported_by + .get_mut(&resolved_id) + .is_some_and(|importers| { + importers.remove(&file_id); + importers.is_empty() + }); + if should_remove_entry { + self.imported_by.remove(&resolved_id); } } } @@ -168,11 +174,8 @@ impl ImportGraph { } #[must_use] - pub(super) fn resolved_entry_id(&self, entry: &ImportEntry) -> Option { - entry - .resolved_path - .as_ref() - .and_then(|path| self.resolver.file(path)) + pub(super) fn resolved_entry_id(entry: &ImportEntry) -> Option { + entry.resolved_file } /// Get the files that directly import a given file. @@ -218,7 +221,7 @@ impl ImportGraph { .map(|entries| { entries .iter() - .filter(|entry| self.resolved_entry_id(entry) == Some(target_id)) + .filter(|entry| Self::resolved_entry_id(entry) == Some(target_id)) .collect() }) .unwrap_or_default() diff --git a/crates/jrsonnet-lsp-import/src/graph/parse.rs b/crates/jrsonnet-lsp-import/src/graph/parse.rs index 6934b676..ddd31eed 100644 --- a/crates/jrsonnet-lsp-import/src/graph/parse.rs +++ b/crates/jrsonnet-lsp-import/src/graph/parse.rs @@ -138,6 +138,7 @@ where kind, binding_name, import_path: path, + resolved_file: None, resolved_path: resolved, }, import_range, @@ -191,6 +192,7 @@ where binding_name: binding_name_from_import_token(import_text), resolved_path: resolve_import(&import_path), import_path, + resolved_file: None, }, import_range, }); diff --git a/crates/jrsonnet-lsp-import/src/graph/tests.rs b/crates/jrsonnet-lsp-import/src/graph/tests.rs index 20a71b0a..a4426607 100644 --- a/crates/jrsonnet-lsp-import/src/graph/tests.rs +++ b/crates/jrsonnet-lsp-import/src/graph/tests.rs @@ -37,6 +37,7 @@ fn test_parse_local_import() { kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), + resolved_file: None, resolved_path: Some(test_path("lib.jsonnet")), }] ); @@ -61,6 +62,7 @@ fn test_parse_import_occurrences_include_string_token_range() { kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), + resolved_file: None, resolved_path: Some(test_path("lib.jsonnet")), }, import_range: rowan::TextRange::new(start.into(), end.into()), @@ -87,6 +89,7 @@ fn test_parse_import_occurrences_fallback_unterminated_string() { kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), + resolved_file: None, resolved_path: Some(test_path("lib.jsonnet")), }, import_range: rowan::TextRange::new(start.into(), end.into()), @@ -107,6 +110,7 @@ fn test_parse_local_import_single_quote() { kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), + resolved_file: None, resolved_path: Some(test_path("lib.jsonnet")), }] ); @@ -130,12 +134,14 @@ lib1 + lib2 kind: ImportKind::Code, binding_name: Some("lib1".to_string()), import_path: "lib1.jsonnet".to_string(), + resolved_file: None, resolved_path: Some(test_path("lib1.jsonnet")), }, ImportEntry { kind: ImportKind::Code, binding_name: Some("lib2".to_string()), import_path: "lib2.jsonnet".to_string(), + resolved_file: None, resolved_path: Some(test_path("lib2.jsonnet")), }, ] @@ -155,12 +161,14 @@ fn test_import_graph_update() { // Check that main imports lib let imports = graph.imports(graph.intern(&main)); + let lib_file = graph.intern(&lib); assert_eq!( imports, vec![ImportEntry { kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), + resolved_file: Some(lib_file), resolved_path: Some(lib.clone()), }] ); @@ -192,6 +200,7 @@ fn test_import_graph_lookups_with_equivalent_paths() { kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), + resolved_file: Some(graph.intern(&lib_lookup)), resolved_path: Some(lib_lookup), }] ); @@ -265,6 +274,7 @@ lib + other kind: ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), + resolved_file: Some(graph.intern(&lib)), resolved_path: Some(lib), }] ); @@ -469,12 +479,14 @@ fn test_process_with_dependencies_filtered_by_kind() { kind: ImportKind::Code, binding_name: Some("data".to_string()), import_path: "data.jsonnet".to_string(), + resolved_file: Some(graph.intern(&data)), resolved_path: Some(data.clone()), }, ImportEntry { kind: ImportKind::String, binding_name: Some("payload".to_string()), import_path: "script.k".to_string(), + resolved_file: Some(graph.intern(&script)), resolved_path: Some(script), }, ], diff --git a/crates/jrsonnet-lsp-import/src/graph/traversal.rs b/crates/jrsonnet-lsp-import/src/graph/traversal.rs index 1378c790..1d98e364 100644 --- a/crates/jrsonnet-lsp-import/src/graph/traversal.rs +++ b/crates/jrsonnet-lsp-import/src/graph/traversal.rs @@ -28,7 +28,7 @@ impl ImportGraph { for (&path_id, entries) in &self.imports { let dep_count = entries .iter() - .filter_map(|entry| self.resolved_entry_id(entry)) + .filter_map(Self::resolved_entry_id) .filter(|dep_id| self.imports.contains_key(dep_id)) .count(); in_degree.insert(path_id, dep_count); @@ -66,7 +66,7 @@ impl ImportGraph { // Check if all dependencies of importer are processed let all_deps_processed = self.imports.get(importer).is_none_or(|entries| { entries.iter().all(|entry| { - self.resolved_entry_id(entry).is_none_or(|dep_id| { + Self::resolved_entry_id(entry).is_none_or(|dep_id| { processed.contains(&dep_id) || !self.imports.contains_key(&dep_id) }) @@ -177,7 +177,7 @@ impl ImportGraph { if !include_dependency(entry) { continue; } - if let Some(resolved_id) = self.resolved_entry_id(entry) { + if let Some(resolved_id) = Self::resolved_entry_id(entry) { deps.push(resolved_id); } } diff --git a/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs b/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs index 66eb43f3..553e3267 100644 --- a/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs +++ b/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs @@ -63,6 +63,7 @@ mod tests { kind: crate::graph::ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), + resolved_file: None, resolved_path: Some(resolved_lib), }] ); @@ -107,6 +108,7 @@ mod tests { kind: crate::graph::ImportKind::Code, binding_name: Some("lib".to_string()), import_path: "lib.jsonnet".to_string(), + resolved_file: None, resolved_path: Some(resolved_lib), }, import_range: rowan::TextRange::new(start.into(), end.into()), diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index b0102398..82a15f75 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -238,11 +238,13 @@ mod tests { { let mut graph = import_graph.write(); let main_file = graph.intern(&main_path); + let dep_file = graph.intern(&dep_path); graph.update_file_with_entries( main_file, vec![jrsonnet_lsp_import::ImportEntry { kind: jrsonnet_lsp_import::ImportKind::Code, import_path: "dep.jsonnet".to_string(), + resolved_file: Some(dep_file), resolved_path: Some(dep_path.clone()), binding_name: None, }], @@ -289,11 +291,13 @@ mod tests { { let mut graph = import_graph.write(); let main_file = graph.intern(&main_path); + let dep_file = graph.intern(&dep_path); graph.update_file_with_entries( main_file, vec![jrsonnet_lsp_import::ImportEntry { kind: jrsonnet_lsp_import::ImportKind::Code, import_path: "vendor/dep.jsonnet".to_string(), + resolved_file: Some(dep_file), resolved_path: Some(dep_path), binding_name: None, }], @@ -327,11 +331,13 @@ mod tests { { let mut graph = import_graph.write(); let main_file = graph.intern(&main_path); + let script_file = graph.intern(&script_path); graph.update_file_with_entries( main_file, vec![jrsonnet_lsp_import::ImportEntry { kind: jrsonnet_lsp_import::ImportKind::String, import_path: "./script.k".to_string(), + resolved_file: Some(script_file), resolved_path: Some(script_path), binding_name: None, }], @@ -365,11 +371,13 @@ mod tests { { let mut graph = import_graph.write(); let main_file = graph.intern(&main_path); + let script_file = graph.intern(&script_path); graph.update_file_with_entries( main_file, vec![jrsonnet_lsp_import::ImportEntry { kind: jrsonnet_lsp_import::ImportKind::Binary, import_path: "./script.k".to_string(), + resolved_file: Some(script_file), resolved_path: Some(script_path), binding_name: None, }], From 5610f67bced3243e7e98b21926a9bd58ff2c43e4 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 14:41:28 +0000 Subject: [PATCH 148/210] refactor(import-graph): resolve entry FileIds before writes Add an explicit resolve_entry_files step so parsed import entries are mapped to FileId before update_file_with_entries mutates graph edges. Use this in server import-graph refresh and related call sites, and prefer resolved_file in provider/import lookup with path fallback. --- .../jrsonnet-lsp-handlers/src/rename/cross_file.rs | 2 ++ crates/jrsonnet-lsp-import/src/graph/operations.rs | 13 +++++++++++-- crates/jrsonnet-lsp-inference/src/provider.rs | 10 +++++++--- .../server/async_requests/import_lookup/resolve.rs | 8 +++++++- crates/jrsonnet-lsp/src/server/import_graph.rs | 10 ++++++++-- 5 files changed, 35 insertions(+), 8 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs b/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs index e73d344a..7e9ed985 100644 --- a/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs +++ b/crates/jrsonnet-lsp-handlers/src/rename/cross_file.rs @@ -257,6 +257,8 @@ mod tests { import_full.canonicalize().ok().map(CanonicalPath::new) }); let main_file = import_graph.intern(&main_canon); + let mut entries = entries; + import_graph.resolve_entry_files(&mut entries); import_graph.update_file_with_entries(main_file, entries); } diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs index 4c9e9844..475f72fd 100644 --- a/crates/jrsonnet-lsp-import/src/graph/operations.rs +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -130,13 +130,22 @@ impl ImportGraph { F: Fn(&str) -> Option, { let mut entries = parse_document_imports(doc, &resolve_import); - for entry in &mut entries { + self.resolve_entry_files(&mut entries); + self.update_file_with_entries(file_id, entries); + } + + /// Fill `resolved_file` for entries that currently only carry + /// a canonical `resolved_path`. + pub fn resolve_entry_files(&self, entries: &mut [ImportEntry]) { + for entry in entries { + if entry.resolved_file.is_some() { + continue; + } let Some(path) = entry.resolved_path.as_ref() else { continue; }; entry.resolved_file = Some(self.paths.intern(path)); } - self.update_file_with_entries(file_id, entries); } /// Remove a file from the import graph. diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index 82a15f75..91fd911a 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -144,9 +144,13 @@ fn resolved_imports_for(graph: &ImportGraph, file: FileId) -> FxHashMap Option { diff --git a/crates/jrsonnet-lsp/src/server/import_graph.rs b/crates/jrsonnet-lsp/src/server/import_graph.rs index b18b143e..15eef290 100644 --- a/crates/jrsonnet-lsp/src/server/import_graph.rs +++ b/crates/jrsonnet-lsp/src/server/import_graph.rs @@ -23,7 +23,7 @@ impl Server { config: &SharedConfig, file: FileId, ) { - let entries = { + let mut entries = { let Some(path) = documents.path(file) else { return; }; @@ -46,7 +46,13 @@ impl Server { import_resolution.parse_entries(&doc) }; - // Now acquire the write lock and do the quick data structure update + { + // Resolve to FileId before taking the write lock. + let graph = import_graph.read(); + graph.resolve_entry_files(&mut entries); + } + + // Now acquire the write lock and do the quick data structure update. import_graph.write().update_file_with_entries(file, entries); } From e6218703ba15a1cfc80edf4c4773c038e08b58e4 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 14:45:23 +0000 Subject: [PATCH 149/210] refactor(inference): use FileId for dependency analysis Extend DocumentSource with FileId lookups and switch provider dependency pre-analysis to fetch documents by interned id. Add analyze_and_cache_file_with_resolved_imports to avoid path round-trips once file ids are known, and wire document manager/worker adapters to serve documents directly by FileId. --- crates/jrsonnet-lsp-inference/src/manager.rs | 45 +++++++++++++---- crates/jrsonnet-lsp-inference/src/provider.rs | 50 ++++++++++++++++--- .../jrsonnet-lsp-inference/src/type_cache.rs | 14 +++++- crates/jrsonnet-lsp/src/async_diagnostics.rs | 13 ++--- 4 files changed, 96 insertions(+), 26 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index 1b040b47..37f80b94 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -278,17 +278,8 @@ impl DocumentManager { /// Returns a cloned Document which is cheap due to internal Arc usage. pub fn get_document(&self, path: &CanonicalPath) -> Option { if let Some(file_id) = self.file_id(path) { - // Check open documents - if let Some(doc) = self.open.get(&file_id) { - return Some(doc.clone()); - } - - // Check closed cache - { - let closed = self.closed.read(); - if let Some(doc) = closed.peek(&file_id) { - return Some(doc.clone()); - } + if let Some(doc) = self.get_document_file(file_id) { + return Some(doc); } } @@ -303,6 +294,34 @@ impl DocumentManager { Some(document) } + /// Get a document by interned file id from open docs, closed cache, or disk. + /// + /// Returns a cloned Document which is cheap due to internal Arc usage. + pub fn get_document_file(&self, file: FileId) -> Option { + // Check open documents. + if let Some(doc) = self.open.get(&file) { + return Some(doc.clone()); + } + + // Check closed cache. + { + let closed = self.closed.read(); + if let Some(doc) = closed.peek(&file) { + return Some(doc.clone()); + } + } + + // Read from disk once, then cache in `closed` for reuse. + let path = self.path(file)?; + let text = std::fs::read_to_string(path.as_ref().as_path()).ok()?; + let document = Document::new(text, DocVersion::new(0)); + { + let mut closed = self.closed.write(); + closed.put(file, document.clone()); + } + Some(document) + } + /// Check if a document is currently open. pub fn is_open(&self, path: &CanonicalPath) -> bool { self.file_id(path) @@ -521,6 +540,10 @@ impl DocumentSource for DocumentManager { // Delegate to the existing get_document method. DocumentManager::get_document(self, path) } + + fn get_document_file(&self, file: FileId) -> Option { + DocumentManager::get_document_file(self, file) + } } impl IntoIterator for &DocumentManager { diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index 91fd911a..7b53ad09 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -13,7 +13,9 @@ use rustc_hash::FxHashMap; use crate::{ analysis::TypeAnalysis, - type_cache::{analyze_and_cache_with_resolved_imports, CachingImportResolver, SharedTypeCache}, + type_cache::{ + analyze_and_cache_file_with_resolved_imports, CachingImportResolver, SharedTypeCache, + }, }; /// Trait for looking up documents by path. @@ -23,6 +25,8 @@ use crate::{ pub trait DocumentSource { /// Get a document by path, if it exists. fn get_document(&self, path: &CanonicalPath) -> Option; + /// Get a document by interned file id, if it exists. + fn get_document_file(&self, file: FileId) -> Option; } /// Provides type analysis with proper dependency handling. @@ -108,15 +112,12 @@ impl TypeProvider { root, |entry| entry.kind == ImportKind::Code, |dep_file| { - let Some(dep_path) = graph.path(dep_file) else { - return; - }; - let Some(doc) = doc_source.get_document(dep_path.as_ref()) else { + let Some(doc) = doc_source.get_document_file(dep_file) else { return; }; let resolved_imports = resolved_imports_for(&graph, dep_file); - analyze_and_cache_with_resolved_imports( - dep_path.as_ref(), + analyze_and_cache_file_with_resolved_imports( + dep_file, &doc, &self.type_cache, resolved_imports, @@ -190,8 +191,41 @@ mod tests { impl DocumentSource for TestDocSource { fn get_document(&self, path: &CanonicalPath) -> Option { let file_id = self.resolver.file(path)?; - self.docs.get(&file_id).map(|r| r.clone()) + self.get_document_file(file_id) } + + fn get_document_file(&self, file: FileId) -> Option { + self.docs.get(&file).map(|r| r.clone()) + } + } + + impl TestDocSource { + fn path(&self, file: FileId) -> Option> { + self.resolver.path(file) + } + } + + #[test] + fn test_provider_doc_source_file_lookup_round_trip() { + let path_store = PathStore::new(); + let doc_source = TestDocSource::new(path_store.clone()); + let path = test_path("roundtrip.jsonnet"); + let doc = Document::new("42".to_string(), DocVersion(1)); + doc_source.insert(path.clone(), doc.clone()); + + let file = path_store + .resolver() + .file(&path) + .expect("path should have been interned"); + let loaded = doc_source + .get_document_file(file) + .expect("document should be retrievable by file id"); + assert_eq!(loaded.text(), doc.text()); + assert_eq!(loaded.version(), doc.version()); + assert_eq!( + doc_source.path(file).map(|p| p.as_ref().clone()), + Some(path) + ); } fn test_path(name: &str) -> CanonicalPath { diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs index 077679b4..98e58500 100644 --- a/crates/jrsonnet-lsp-inference/src/type_cache.rs +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -182,12 +182,24 @@ pub(crate) fn analyze_and_cache_with_resolved_imports( where I: IntoIterator, { - let version = doc.version().0; let file = { let read_cache = cache.read(); read_cache.file(path) }; let file = file.unwrap_or_else(|| cache.write().intern(path)); + analyze_and_cache_file_with_resolved_imports(file, doc, cache, resolved_imports) +} + +pub(crate) fn analyze_and_cache_file_with_resolved_imports( + file: FileId, + doc: &Document, + cache: &SharedTypeCache, + resolved_imports: I, +) -> GlobalTy +where + I: IntoIterator, +{ + let version = doc.version().0; // Check if we already have a cached type for this version { diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index f7d22ca3..40d7a512 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -86,14 +86,15 @@ struct WorkerRuntime { impl DocumentSource for WorkerDocumentSource { fn get_document(&self, path: &CanonicalPath) -> Option { - if self - .documents - .file(path) - .is_some_and(|file| file == self.current_file) - { + let file = self.documents.file(path)?; + self.get_document_file(file) + } + + fn get_document_file(&self, file: FileId) -> Option { + if file == self.current_file { return Some(self.current_doc.clone()); } - self.documents.get_document(path) + self.documents.get_document_file(file) } } From dd8d1856af1610e534689b4d17a9cd4e1378da3b Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 14:47:06 +0000 Subject: [PATCH 150/210] refactor(inference): narrow DocumentSource to FileId Drop the path-based method from DocumentSource now that provider analysis only needs dependency docs by FileId. Keep path handling in document manager boundary methods while internal provider/worker flows use the file-id API directly. --- crates/jrsonnet-lsp-inference/src/manager.rs | 5 ----- crates/jrsonnet-lsp-inference/src/provider.rs | 7 ------- crates/jrsonnet-lsp/src/async_diagnostics.rs | 9 ++------- 3 files changed, 2 insertions(+), 19 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/manager.rs b/crates/jrsonnet-lsp-inference/src/manager.rs index 37f80b94..cb8d23e8 100644 --- a/crates/jrsonnet-lsp-inference/src/manager.rs +++ b/crates/jrsonnet-lsp-inference/src/manager.rs @@ -536,11 +536,6 @@ impl DocumentManager { } impl DocumentSource for DocumentManager { - fn get_document(&self, path: &CanonicalPath) -> Option { - // Delegate to the existing get_document method. - DocumentManager::get_document(self, path) - } - fn get_document_file(&self, file: FileId) -> Option { DocumentManager::get_document_file(self, file) } diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index 7b53ad09..a82e06a7 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -23,8 +23,6 @@ use crate::{ /// This allows `TypeProvider` to work with different document storage /// implementations (e.g., `DocumentManager`, `DashMap`). pub trait DocumentSource { - /// Get a document by path, if it exists. - fn get_document(&self, path: &CanonicalPath) -> Option; /// Get a document by interned file id, if it exists. fn get_document_file(&self, file: FileId) -> Option; } @@ -189,11 +187,6 @@ mod tests { } impl DocumentSource for TestDocSource { - fn get_document(&self, path: &CanonicalPath) -> Option { - let file_id = self.resolver.file(path)?; - self.get_document_file(file_id) - } - fn get_document_file(&self, file: FileId) -> Option { self.docs.get(&file).map(|r| r.clone()) } diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index 40d7a512..981b78b0 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -14,7 +14,7 @@ use std::{ }; use crossbeam_channel::{Receiver, Sender}; -use jrsonnet_lsp_document::{CanonicalPath, DocVersion, Document, FileId}; +use jrsonnet_lsp_document::{DocVersion, Document, FileId}; use jrsonnet_lsp_import::{ImportGraph, ImportResolution}; use jrsonnet_lsp_inference::{ DocumentSource, SharedDocumentManager, SharedTypeCache, TypeProvider, @@ -85,11 +85,6 @@ struct WorkerRuntime { } impl DocumentSource for WorkerDocumentSource { - fn get_document(&self, path: &CanonicalPath) -> Option { - let file = self.documents.file(path)?; - self.get_document_file(file) - } - fn get_document_file(&self, file: FileId) -> Option { if file == self.current_file { return Some(self.current_doc.clone()); @@ -303,7 +298,7 @@ impl Drop for AsyncDiagnostics { mod tests { use std::time::Duration; - use jrsonnet_lsp_document::PathStore; + use jrsonnet_lsp_document::{CanonicalPath, PathStore}; use super::*; From 3e040568a89377da43e91eca7ae4cbf5707f5b0a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 14:48:32 +0000 Subject: [PATCH 151/210] refactor(async): load documents by FileId in handlers Switch references and workspace symbol handlers to use DocumentManager::get_document_file when iterating known FileId values. This removes redundant path-to-file lookups and keeps path handling at URI/boundary points only. --- crates/jrsonnet-lsp/src/server/async_requests/references.rs | 2 +- .../jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server/async_requests/references.rs b/crates/jrsonnet-lsp/src/server/async_requests/references.rs index 399d2462..7d9c7423 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/references.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/references.rs @@ -33,7 +33,7 @@ impl AsyncRequestContext { .into_iter() .filter_map(|file| { let path = self.documents.path(file)?; - let doc = self.documents.get_document(path.as_ref())?; + let doc = self.documents.get_document_file(file)?; let semantic = self.documents.get_semantic_artifacts(path.as_ref()); Some((path.as_ref().clone(), doc, semantic)) }) diff --git a/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs index b71f6dff..1e6fd249 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs @@ -24,7 +24,7 @@ impl AsyncRequestContext { let Some(path) = self.documents.path(file) else { return Vec::new(); }; - let Some(doc) = self.documents.get_document(path.as_ref()) else { + let Some(doc) = self.documents.get_document_file(file) else { return Vec::new(); }; let Ok(uri) = path.to_uri() else { From 2115fd4ff9131324b841c4366083bafcd0e3648a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 14:49:28 +0000 Subject: [PATCH 152/210] refactor(import-graph): use FileId doc lookup on refresh Use DocumentManager::get_document_file in import-graph refresh since the caller already has a FileId. This removes another path-to-file lookup from the hot reindex/update path while keeping path use for boundary resolution only. --- crates/jrsonnet-lsp/src/server/import_graph.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/jrsonnet-lsp/src/server/import_graph.rs b/crates/jrsonnet-lsp/src/server/import_graph.rs index 15eef290..ab3702f0 100644 --- a/crates/jrsonnet-lsp/src/server/import_graph.rs +++ b/crates/jrsonnet-lsp/src/server/import_graph.rs @@ -27,7 +27,7 @@ impl Server { let Some(path) = documents.path(file) else { return; }; - let Some(doc) = documents.get_document(path.as_ref()) else { + let Some(doc) = documents.get_document_file(file) else { // File no longer exists or cannot be read. import_graph.write().remove_file(file); return; From 44291d5cb0e68891a045d22fcc9f26d5b7870c30 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 15:26:07 +0000 Subject: [PATCH 153/210] refactor(imports): enforce resolved FileId graph invariant Treat import-graph entries as resolved_file-authoritative by adding a debug invariant in update_file_with_entries and removing resolved_path-based fallback logic in provider and import lookup resolution paths. --- crates/jrsonnet-lsp-import/src/graph/operations.rs | 7 +++++++ crates/jrsonnet-lsp-inference/src/provider.rs | 7 +------ .../src/server/async_requests/import_lookup/resolve.rs | 10 +++------- 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs index 475f72fd..264e9516 100644 --- a/crates/jrsonnet-lsp-import/src/graph/operations.rs +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -103,6 +103,13 @@ impl ImportGraph { /// Parse the imports first using [`parse_document_imports`], then call this /// method while holding the write lock. pub fn update_file_with_entries(&mut self, file_id: FileId, entries: Vec) { + debug_assert!( + entries + .iter() + .all(|entry| { entry.resolved_path.is_none() || entry.resolved_file.is_some() }), + "import graph entries with resolved_path must also include resolved_file" + ); + // Remove old entries for this file self.remove_file(file_id); diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index a82e06a7..d94d9576 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -143,12 +143,7 @@ fn resolved_imports_for(graph: &ImportGraph, file: FileId) -> FxHashMap Option { From f7c3ece076db67d6348766c0074c3da256a280c1 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 15:28:01 +0000 Subject: [PATCH 154/210] refactor(import-graph): resolve entry FileIds defensively Make update_file_with_entries populate missing resolved_file values from resolved_path before mutating dependency indexes. This prevents silent edge drops if a caller provides parsed entries without explicit FileId resolution. --- crates/jrsonnet-lsp-import/src/graph/operations.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs index 264e9516..5c07d203 100644 --- a/crates/jrsonnet-lsp-import/src/graph/operations.rs +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -102,7 +102,8 @@ impl ImportGraph { /// This is the preferred method when you want to minimize lock hold time. /// Parse the imports first using [`parse_document_imports`], then call this /// method while holding the write lock. - pub fn update_file_with_entries(&mut self, file_id: FileId, entries: Vec) { + pub fn update_file_with_entries(&mut self, file_id: FileId, mut entries: Vec) { + self.resolve_entry_files(&mut entries); debug_assert!( entries .iter() From aff86bfd32f7a36867b5677bd4799ba168b77900 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 15:33:07 +0000 Subject: [PATCH 155/210] refactor(locking): shorten import graph read-lock scope Add dependency_levels to import graph and use it in TypeProvider to snapshot dependency order and resolved imports under a short read lock, then release the lock before expensive analysis/cache work. This avoids holding import-graph read locks across long-running inference. --- .../src/graph/traversal.rs | 43 +++++++++------ crates/jrsonnet-lsp-inference/src/provider.rs | 54 ++++++++++++------- 2 files changed, 62 insertions(+), 35 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/graph/traversal.rs b/crates/jrsonnet-lsp-import/src/graph/traversal.rs index 1d98e364..02b0b13a 100644 --- a/crates/jrsonnet-lsp-import/src/graph/traversal.rs +++ b/crates/jrsonnet-lsp-import/src/graph/traversal.rs @@ -6,6 +6,32 @@ use super::{ImportEntry, ImportGraph}; use crate::work_queue::{WorkQueue, WorkQueueExt}; impl ImportGraph { + /// Build dependency levels for a root file. + /// + /// Returns levels in dependency order (leaves first), with each inner level + /// safe to process in parallel. + pub fn dependency_levels

(&self, root: FileId, include_dependency: P) -> Vec> + where + P: Fn(&ImportEntry) -> bool + Sync, + { + let mut work = WorkQueue::new(); + work.push(root); + + work.run(|path_id, deps| { + // Get dependencies from import graph + if let Some(entries) = self.imports.get(path_id) { + for entry in entries { + if !include_dependency(entry) { + continue; + } + if let Some(resolved_id) = Self::resolved_entry_id(entry) { + deps.push(resolved_id); + } + } + } + }) + } + /// Compute a topological ordering of files based on import dependencies. /// /// Returns files in an order where each file comes after all files it imports. @@ -167,22 +193,7 @@ impl ImportGraph { F: Fn(FileId) + Sync, P: Fn(&ImportEntry) -> bool + Sync, { - let mut work = WorkQueue::new(); - work.push(root); - - let levels = work.run(|path_id, deps| { - // Get dependencies from import graph - if let Some(entries) = self.imports.get(path_id) { - for entry in entries { - if !include_dependency(entry) { - continue; - } - if let Some(resolved_id) = Self::resolved_entry_id(entry) { - deps.push(resolved_id); - } - } - } - }); + let levels = self.dependency_levels(root, include_dependency); // Process levels in dependency order (leaves first) levels.process_parallel(|path_id| f(*path_id)); diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index d94d9576..a7a5e4e3 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -9,6 +9,7 @@ use jrsonnet_lsp_document::{CanonicalPath, Document, FileId}; use jrsonnet_lsp_import::{ImportGraph, ImportKind}; use jrsonnet_lsp_types::GlobalTyStore; use parking_lot::RwLock; +use rayon::prelude::*; use rustc_hash::FxHashMap; use crate::{ @@ -102,26 +103,41 @@ impl TypeProvider { path: &CanonicalPath, doc_source: &D, ) { - let graph = self.import_graph.read(); - let Some(root) = graph.file(path) else { - return; + let dependency_levels = { + let graph = self.import_graph.read(); + let Some(root) = graph.file(path) else { + return; + }; + + // Snapshot dependencies and resolved imports under the read lock, then + // release the lock before expensive analysis work. + graph + .dependency_levels(root, |entry| entry.kind == ImportKind::Code) + .into_iter() + .map(|level| { + level + .into_iter() + .map(|dep_file| (dep_file, resolved_imports_for(&graph, dep_file))) + .collect::>() + }) + .collect::>() }; - graph.process_with_dependencies( - root, - |entry| entry.kind == ImportKind::Code, - |dep_file| { - let Some(doc) = doc_source.get_document_file(dep_file) else { - return; - }; - let resolved_imports = resolved_imports_for(&graph, dep_file); - analyze_and_cache_file_with_resolved_imports( - dep_file, - &doc, - &self.type_cache, - resolved_imports, - ); - }, - ); + + for level in dependency_levels { + level + .into_par_iter() + .for_each(|(dep_file, resolved_imports)| { + let Some(doc) = doc_source.get_document_file(dep_file) else { + return; + }; + analyze_and_cache_file_with_resolved_imports( + dep_file, + &doc, + &self.type_cache, + resolved_imports, + ); + }); + } } /// Get the global type store. From 774a72540873b317ac7c25ff17daf084297dc6ff Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 15:53:36 +0000 Subject: [PATCH 156/210] perf(import-graph): optimize topological traversal and queue Switch import traversal in-degree bookkeeping to FxHashMap and split topological ordering into helper methods with guard clauses. Compute next levels by decrementing importer in-degrees instead of rescanning dependency sets. Move WorkQueue internals to FxHashMap/FxHashSet, add explicit queued-item deduplication, and avoid extra dependency vector cloning during DFS scheduling. --- Cargo.lock | 1 + crates/jrsonnet-lsp-import/Cargo.toml | 1 + .../src/graph/traversal.rs | 128 +++++++++--------- crates/jrsonnet-lsp-import/src/work_queue.rs | 59 ++++---- 4 files changed, 101 insertions(+), 88 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5bff0b56..e7fbea22 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1908,6 +1908,7 @@ dependencies = [ "jrsonnet-rowan-parser", "rayon", "rowan", + "rustc-hash", "tempfile", ] diff --git a/crates/jrsonnet-lsp-import/Cargo.toml b/crates/jrsonnet-lsp-import/Cargo.toml index 1322a0cc..a9a2810e 100644 --- a/crates/jrsonnet-lsp-import/Cargo.toml +++ b/crates/jrsonnet-lsp-import/Cargo.toml @@ -12,6 +12,7 @@ jrsonnet-lsp-document = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-docum jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } rayon = "1.11.0" rowan.workspace = true +rustc-hash.workspace = true [dev-dependencies] tempfile.workspace = true diff --git a/crates/jrsonnet-lsp-import/src/graph/traversal.rs b/crates/jrsonnet-lsp-import/src/graph/traversal.rs index 02b0b13a..a21e013d 100644 --- a/crates/jrsonnet-lsp-import/src/graph/traversal.rs +++ b/crates/jrsonnet-lsp-import/src/graph/traversal.rs @@ -1,6 +1,5 @@ -use std::collections::{HashMap, HashSet}; - use jrsonnet_lsp_document::FileId; +use rustc_hash::FxHashMap; use super::{ImportEntry, ImportGraph}; use crate::work_queue::{WorkQueue, WorkQueueExt}; @@ -41,80 +40,85 @@ impl ImportGraph { /// Returns `None` if there's a cycle in the import graph. #[must_use] pub fn topological_order(&self) -> Option>> { - let mut in_degree: HashMap = HashMap::new(); - let mut levels: Vec> = Vec::new(); - - // Initialize in-degree for all files to 0 - for &path_id in self.imports.keys() { - in_degree.insert(path_id, 0); + if self.imports.is_empty() { + return Some(Vec::new()); } - // Calculate in-degree: count how many dependencies each file has - // (how many files it imports that are also in our graph) - for (&path_id, entries) in &self.imports { - let dep_count = entries - .iter() - .filter_map(Self::resolved_entry_id) - .filter(|dep_id| self.imports.contains_key(dep_id)) - .count(); - in_degree.insert(path_id, dep_count); + let mut in_degree = self.topological_in_degree(); + let mut current_level = Self::zero_in_degree(&in_degree); + if current_level.is_empty() { + return None; } - // Find all files with no dependencies (in-degree 0) - let mut current_level: Vec = in_degree - .iter() - .filter(|(_, °)| deg == 0) - .map(|(&path_id, _)| path_id) - .collect(); - - let mut processed = HashSet::new(); + let mut levels: Vec> = Vec::new(); + let mut processed_count = 0usize; while !current_level.is_empty() { // Sort for deterministic ordering - current_level.sort(); + current_level.sort_unstable(); + + processed_count += current_level.len(); + let next_level = self.next_topological_level(¤t_level, &mut in_degree); + levels.push(current_level); + current_level = next_level; + } + + // Check if all files were processed (no cycles) + if processed_count != self.imports.len() { + return None; + } + + Some(levels) + } - // Mark current level as processed - for path_id in ¤t_level { - processed.insert(*path_id); + fn topological_in_degree(&self) -> FxHashMap { + let mut in_degree: FxHashMap = + self.imports.keys().map(|&file| (file, 0)).collect(); + + for (dependency, importers) in &self.imported_by { + if !in_degree.contains_key(dependency) { + continue; } - levels.push(current_level.clone()); - - // Find next level: files whose dependencies are all now processed - let mut next_level = Vec::new(); - for path_id in ¤t_level { - // For each file that imports this one - if let Some(importers) = self.imported_by.get(path_id) { - for importer in importers { - if processed.contains(importer) { - continue; - } - // Check if all dependencies of importer are processed - let all_deps_processed = self.imports.get(importer).is_none_or(|entries| { - entries.iter().all(|entry| { - Self::resolved_entry_id(entry).is_none_or(|dep_id| { - processed.contains(&dep_id) - || !self.imports.contains_key(&dep_id) - }) - }) - }); - - if all_deps_processed && !next_level.contains(importer) { - next_level.push(*importer); - } - } + for importer in importers { + if let Some(degree) = in_degree.get_mut(importer) { + *degree += 1; } } - - current_level = next_level; } - // Check if all files were processed (no cycles) - if processed.len() == self.imports.len() { - Some(levels) - } else { - None // Cycle detected - } + in_degree + } + + fn zero_in_degree(in_degree: &FxHashMap) -> Vec { + in_degree + .iter() + .filter_map(|(&file, °ree)| (degree == 0).then_some(file)) + .collect() + } + + fn next_topological_level( + &self, + current_level: &[FileId], + in_degree: &mut FxHashMap, + ) -> Vec { + current_level + .iter() + .flat_map(|file| { + self.imported_by + .get(file) + .into_iter() + .flat_map(|importers| importers.iter().copied()) + }) + .filter_map(|importer| { + let degree = in_degree.get_mut(&importer)?; + if *degree == 0 { + return None; + } + *degree -= 1; + (*degree == 0).then_some(importer) + }) + .collect() } /// Process files in topological order with parallel processing within each level. diff --git a/crates/jrsonnet-lsp-import/src/work_queue.rs b/crates/jrsonnet-lsp-import/src/work_queue.rs index 8d04ffc3..e6c626d6 100644 --- a/crates/jrsonnet-lsp-import/src/work_queue.rs +++ b/crates/jrsonnet-lsp-import/src/work_queue.rs @@ -30,7 +30,9 @@ //! } //! ``` -use std::{collections::HashMap, hash::Hash}; +use std::hash::Hash; + +use rustc_hash::{FxHashMap, FxHashSet}; /// Action in the work queue stack. #[derive(Debug)] @@ -50,9 +52,11 @@ pub struct WorkQueue { /// Stack of actions (LIFO for DFS). actions: Vec>, /// Computed levels for processed items. - item_levels: HashMap, + item_levels: FxHashMap, /// Track which items are currently being processed (for cycle detection). - in_progress: HashMap, + in_progress: FxHashSet, + /// Track items already queued for processing. + queued: FxHashSet, } impl Default for WorkQueue @@ -73,8 +77,9 @@ where pub fn new() -> Self { Self { actions: Vec::new(), - item_levels: HashMap::new(), - in_progress: HashMap::new(), + item_levels: FxHashMap::default(), + in_progress: FxHashSet::default(), + queued: FxHashSet::default(), } } @@ -85,10 +90,16 @@ where where T: Clone, { - // Skip if already processed or in progress - if !self.item_levels.contains_key(&item) && !self.in_progress.contains_key(&item) { - self.actions.push(Action::Enter(item)); + // Skip if already processed, in progress, or already queued. + if self.item_levels.contains_key(&item) + || self.in_progress.contains(&item) + || self.queued.contains(&item) + { + return; } + + self.queued.insert(item.clone()); + self.actions.push(Action::Enter(item)); } /// Get the computed level for an item (if already processed). @@ -117,7 +128,7 @@ where F: FnMut(&T, &mut Vec), { // Track dependencies for each item to compute levels - let mut item_deps: HashMap> = HashMap::new(); + let mut item_deps: FxHashMap> = FxHashMap::default(); while let Some(action) = self.actions.pop() { match action { @@ -125,9 +136,10 @@ where if self.item_levels.contains_key(&item) { continue; // Already processed } + self.queued.remove(&item); // Mark as in progress - self.in_progress.insert(item.clone(), true); + self.in_progress.insert(item.clone()); // Schedule exit self.actions.push(Action::Exit(item.clone())); @@ -136,23 +148,21 @@ where let mut deps = Vec::new(); visitor(&item, &mut deps); - // Store deps for level computation - item_deps.insert(item.clone(), deps.clone()); - // Push dependencies onto stack - for dep in deps { - if !self.item_levels.contains_key(&dep) - && !self.in_progress.contains_key(&dep) - { - self.actions.push(Action::Enter(dep)); - } + for dep in &deps { + self.push(dep.clone()); } + + // Store deps for level computation + item_deps.insert(item, deps); } Action::Exit(item) => { self.in_progress.remove(&item); // Compute level: max(dep levels) + 1, or 0 if no deps - let deps = item_deps.get(&item).cloned().unwrap_or_default(); + let deps = item_deps + .get(&item) + .map_or(&[][..], std::vec::Vec::as_slice); let max_dep_level = deps .iter() .filter_map(|dep| self.get_level(dep)) @@ -179,12 +189,7 @@ where let mut levels: Vec> = vec![Vec::new(); max_level + 1]; for (item, level) in self.item_levels { - if levels.len() <= level { - levels.resize_with(level + 1, Vec::new); - } - if let Some(items) = levels.get_mut(level) { - items.push(item); - } + levels[level].push(item); } levels @@ -253,6 +258,8 @@ impl WorkQueueExt for Vec> { #[cfg(test)] mod tests { + use std::collections::HashMap; + use super::*; #[test] From b5c686b5b8ea71332ce50c8f25acec0c3885a127 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 16:04:41 +0000 Subject: [PATCH 157/210] perf(workspace-symbol): rank top results without full sort Extract workspace symbol ranking into explicit sort keys and use `select_nth_unstable_by` to keep only the top candidates before final deterministic ordering. Preserve output ordering semantics and add focused unit tests for ordering, truncation, and match-rank behavior. --- .../server/async_requests/workspace_symbol.rs | 111 +++++++++++++++--- 1 file changed, 94 insertions(+), 17 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs index 1e6fd249..4aa398b4 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs @@ -5,6 +5,7 @@ use rayon::prelude::*; use super::{super::unique_files, AsyncRequestContext}; const MAX_WORKSPACE_SYMBOL_RESULTS: usize = 128; +type WorkspaceSymbolSortKey = (u8, usize, String, String, u32, u32, u32, u32); impl AsyncRequestContext { pub(crate) fn workspace_symbol( @@ -18,7 +19,7 @@ impl AsyncRequestContext { unique_files(import_graph.all_files().chain(self.documents.open_files())) }; - let mut all_symbols: Vec = files + let all_symbols: Vec = files .into_par_iter() .flat_map(|file| { let Some(path) = self.documents.path(file) else { @@ -35,22 +36,7 @@ impl AsyncRequestContext { .collect(); let query_lower = query.to_lowercase(); - all_symbols.sort_by_cached_key(|symbol| { - let name_lower = symbol.name.to_lowercase(); - ( - workspace_symbol_match_rank(&name_lower, &query_lower), - name_lower.len(), - name_lower, - symbol.location.uri.as_str().to_string(), - symbol.location.range.start.line, - symbol.location.range.start.character, - symbol.location.range.end.line, - symbol.location.range.end.character, - ) - }); - if all_symbols.len() > MAX_WORKSPACE_SYMBOL_RESULTS { - all_symbols.truncate(MAX_WORKSPACE_SYMBOL_RESULTS); - } + let all_symbols = rank_workspace_symbols(all_symbols, &query_lower); if all_symbols.is_empty() { return None; @@ -59,6 +45,51 @@ impl AsyncRequestContext { } } +fn rank_workspace_symbols( + all_symbols: Vec, + query_lower: &str, +) -> Vec { + let ranked_symbols: Vec<(WorkspaceSymbolSortKey, SymbolInformation)> = all_symbols + .into_iter() + .map(|symbol| (workspace_symbol_sort_key(&symbol, query_lower), symbol)) + .collect(); + + take_top_ranked(ranked_symbols, MAX_WORKSPACE_SYMBOL_RESULTS) + .into_iter() + .map(|(_, symbol)| symbol) + .collect() +} + +fn take_top_ranked(mut ranked: Vec<(K, V)>, max_results: usize) -> Vec<(K, V)> { + if max_results == 0 { + return Vec::new(); + } + if ranked.len() > max_results { + let nth = max_results - 1; + ranked.select_nth_unstable_by(nth, |(left, _), (right, _)| left.cmp(right)); + ranked.truncate(max_results); + } + ranked.sort_unstable_by(|(left, _), (right, _)| left.cmp(right)); + ranked +} + +fn workspace_symbol_sort_key( + symbol: &SymbolInformation, + query_lower: &str, +) -> WorkspaceSymbolSortKey { + let name_lower = symbol.name.to_lowercase(); + ( + workspace_symbol_match_rank(&name_lower, query_lower), + name_lower.len(), + name_lower, + symbol.location.uri.as_str().to_string(), + symbol.location.range.start.line, + symbol.location.range.start.character, + symbol.location.range.end.line, + symbol.location.range.end.character, + ) +} + fn workspace_symbol_match_rank(name_lower: &str, query_lower: &str) -> u8 { if query_lower.is_empty() { return 0; @@ -71,3 +102,49 @@ fn workspace_symbol_match_rank(name_lower: &str, query_lower: &str) -> u8 { } 2 } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_take_top_ranked_orders_by_key() { + let ranked = take_top_ranked( + vec![ + ((2u8, 0usize), "tail"), + ((1, 1), "prefix_b"), + ((1, 0), "prefix_a"), + ((0, 0), "exact"), + ], + 4, + ); + + let values: Vec<_> = ranked.into_iter().map(|(_, value)| value).collect(); + assert_eq!(values, vec!["exact", "prefix_a", "prefix_b", "tail"]); + } + + #[test] + fn test_take_top_ranked_truncates_to_limit() { + let ranked = take_top_ranked( + (0..(MAX_WORKSPACE_SYMBOL_RESULTS + 5)) + .map(|i| ((1u8, i), i)) + .collect(), + MAX_WORKSPACE_SYMBOL_RESULTS, + ); + + assert_eq!(ranked.len(), MAX_WORKSPACE_SYMBOL_RESULTS); + assert_eq!(ranked.first().map(|((_, i), _)| *i), Some(0)); + assert_eq!( + ranked.last().map(|((_, i), _)| *i), + Some(MAX_WORKSPACE_SYMBOL_RESULTS - 1) + ); + } + + #[test] + fn test_workspace_symbol_match_rank() { + assert_eq!(workspace_symbol_match_rank("foo", ""), 0); + assert_eq!(workspace_symbol_match_rank("foo", "foo"), 0); + assert_eq!(workspace_symbol_match_rank("foobar", "foo"), 1); + assert_eq!(workspace_symbol_match_rank("barfoo", "foo"), 2); + } +} From ff50734d0c7201a27b41dca8bed3518f22d360bc Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 16:06:16 +0000 Subject: [PATCH 158/210] Include jrsonnet-lsp in workspace default members --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 3b58aaa4..87f44e42 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] members = ["crates/*", "bindings/jsonnet", "cmds/*", "tests", "xtask"] -default-members = ["cmds/jrsonnet"] +default-members = ["cmds/jrsonnet", "cmds/jrsonnet-lsp"] resolver = "2" [workspace.package] From bf288ea4e2ed3750492c63d3846412b6a5937633 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 16:08:01 +0000 Subject: [PATCH 159/210] perf(diagnostics): coalesce debounced worker requests Update the async diagnostics worker to debounce once, drain pending requests, and keep only the newest request per file before processing. This avoids repeated stale-request sleeps under bursty edit traffic. Keep stale-sequence checks before and after analysis, and add a unit test that verifies newest-per-file coalescing semantics. --- crates/jrsonnet-lsp/src/async_diagnostics.rs | 216 ++++++++++++------- 1 file changed, 137 insertions(+), 79 deletions(-) diff --git a/crates/jrsonnet-lsp/src/async_diagnostics.rs b/crates/jrsonnet-lsp/src/async_diagnostics.rs index 981b78b0..f985c288 100644 --- a/crates/jrsonnet-lsp/src/async_diagnostics.rs +++ b/crates/jrsonnet-lsp/src/async_diagnostics.rs @@ -184,106 +184,126 @@ impl AsyncDiagnostics { loop { // Wait for a request - let Ok(request) = requests.recv() else { + let Ok(first_request) = requests.recv() else { // Channel closed, exit debug!("Diagnostics worker: channel closed, exiting"); break; }; - trace!( - "Diagnostics worker: received request (seq={})", - request.sequence - ); - // Debounce: wait a bit before processing thread::sleep(Duration::from_millis(DEBOUNCE_DELAY_MS)); - // Check if this request is still the latest for this file - { - let sequences = latest_sequences.read(); - if let Some(&latest) = sequences.get(&request.file_id) { - if latest > request.sequence { - trace!( - "Diagnostics worker: skipping stale request (seq={}, latest={})", - request.sequence, - latest - ); - continue; + for request in Self::collect_latest_requests(first_request, &requests) { + trace!( + "Diagnostics worker: received request (seq={})", + request.sequence + ); + + // Check if this request is still the latest for this file + { + let sequences = latest_sequences.read(); + if let Some(&latest) = sequences.get(&request.file_id) { + if latest > request.sequence { + trace!( + "Diagnostics worker: skipping stale request (seq={}, latest={})", + request.sequence, + latest + ); + continue; + } } } - } - // Compute diagnostics - // Reconstruct the document in the worker thread - let document = Document::new(request.text, request.version); - let Some(path) = config.documents.path(request.file_id) else { - debug!("Diagnostics worker: path missing for interned file, skipping diagnostics"); - continue; - }; - trace!( - "Diagnostics worker: computing diagnostics for {}", - path.as_ref() - ); - let doc_source = WorkerDocumentSource { - current_file: request.file_id, - current_doc: document.clone(), - documents: Arc::clone(&config.documents), - }; - let provider = TypeProvider::new( - Arc::clone(&config.type_cache), - Arc::clone(&config.import_graph), - Arc::clone(&config.global_types), - ); - let analysis = Arc::new(provider.analyze(path.as_ref(), &document, &doc_source)); - let import_resolution = ImportResolution::new(path.as_ref(), &request.import_roots); - let import_occurrences = import_resolution.parse_occurrences(&document); - - let Some(params) = handlers::publish_diagnostics_params( - path.as_ref(), - &document, - request.enable_lint, - config.evaluator.as_deref(), - analysis.as_ref(), - &import_occurrences, - ) else { - debug!( - "Diagnostics worker: failed to build URI for {}, skipping diagnostics", + // Compute diagnostics + // Reconstruct the document in the worker thread + let document = Document::new(request.text, request.version); + let Some(path) = config.documents.path(request.file_id) else { + debug!( + "Diagnostics worker: path missing for interned file, skipping diagnostics" + ); + continue; + }; + trace!( + "Diagnostics worker: computing diagnostics for {}", path.as_ref() ); - continue; - }; - - // Check again if still the latest (diagnostics computation may have taken time) - { - let sequences = latest_sequences.read(); - if let Some(&latest) = sequences.get(&request.file_id) { - if latest > request.sequence { - trace!( - "Diagnostics worker: discarding stale result (seq={}, latest={})", - request.sequence, - latest - ); - continue; + let doc_source = WorkerDocumentSource { + current_file: request.file_id, + current_doc: document.clone(), + documents: Arc::clone(&config.documents), + }; + let provider = TypeProvider::new( + Arc::clone(&config.type_cache), + Arc::clone(&config.import_graph), + Arc::clone(&config.global_types), + ); + let analysis = Arc::new(provider.analyze(path.as_ref(), &document, &doc_source)); + let import_resolution = ImportResolution::new(path.as_ref(), &request.import_roots); + let import_occurrences = import_resolution.parse_occurrences(&document); + + let Some(params) = handlers::publish_diagnostics_params( + path.as_ref(), + &document, + request.enable_lint, + config.evaluator.as_deref(), + analysis.as_ref(), + &import_occurrences, + ) else { + debug!( + "Diagnostics worker: failed to build URI for {}, skipping diagnostics", + path.as_ref() + ); + continue; + }; + + // Check again if still the latest (diagnostics computation may have taken time) + { + let sequences = latest_sequences.read(); + if let Some(&latest) = sequences.get(&request.file_id) { + if latest > request.sequence { + trace!( + "Diagnostics worker: discarding stale result (seq={}, latest={})", + request.sequence, + latest + ); + continue; + } } } - } - // Reuse analysis work done for diagnostics in foreground requests. - config.documents.cache_analysis_file( - request.file_id, - request.version, - Arc::clone(&analysis), - ); + // Reuse analysis work done for diagnostics in foreground requests. + config.documents.cache_analysis_file( + request.file_id, + request.version, + Arc::clone(&analysis), + ); - // Send result - let result = DiagnosticsResult { params }; + // Send result + let result = DiagnosticsResult { params }; - if results.send(result).is_err() { - debug!("Diagnostics worker: result channel closed, exiting"); - break; + if results.send(result).is_err() { + debug!("Diagnostics worker: result channel closed, exiting"); + return; + } } } } + + fn collect_latest_requests( + first_request: DiagnosticsRequest, + requests: &Receiver, + ) -> Vec { + let mut latest_by_file = FxHashMap::default(); + latest_by_file.insert(first_request.file_id, first_request); + + for request in requests.try_iter() { + latest_by_file.insert(request.file_id, request); + } + + let mut coalesced: Vec<_> = latest_by_file.into_values().collect(); + coalesced.sort_unstable_by_key(|request| request.file_id); + coalesced + } } impl Drop for AsyncDiagnostics { @@ -299,6 +319,7 @@ mod tests { use std::time::Duration; use jrsonnet_lsp_document::{CanonicalPath, PathStore}; + use rustc_hash::FxHashMap; use super::*; @@ -456,4 +477,41 @@ mod tests { .collect::>() ); } + + #[test] + fn test_collect_latest_requests_keeps_newest_per_file() { + let paths = PathStore::new(); + let file_a = paths.intern(&test_path("a")); + let file_b = paths.intern(&test_path("b")); + let (sender, receiver) = crossbeam_channel::unbounded(); + + let first = test_request(file_a, 1, "{ a: 1 }"); + sender + .send(test_request(file_b, 2, "{ b: 1 }")) + .expect("send b"); + sender + .send(test_request(file_a, 3, "{ a: 2 }")) + .expect("send latest a"); + + let coalesced = AsyncDiagnostics::collect_latest_requests(first, &receiver); + assert_eq!(coalesced.len(), 2); + + let sequences: FxHashMap<_, _> = coalesced + .into_iter() + .map(|request| (request.file_id, request.sequence)) + .collect(); + assert_eq!(sequences.get(&file_a), Some(&3)); + assert_eq!(sequences.get(&file_b), Some(&2)); + } + + fn test_request(file_id: FileId, sequence: u64, text: &str) -> DiagnosticsRequest { + DiagnosticsRequest { + file_id, + text: text.to_string(), + version: DocVersion::new(i32::try_from(sequence).expect("sequence fits i32")), + enable_lint: false, + import_roots: vec![], + sequence, + } + } } From 3909dd2909f53dad6a4cd073f62532f0348a115a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 16:14:50 +0000 Subject: [PATCH 160/210] perf(import-graph): avoid interner write lock on known paths Update `resolve_entry_files` to probe the read-only resolver first and only call `intern` when a path has not been interned yet. This keeps hot-path entry resolution on read locks for already-known imports and reduces unnecessary write-lock pressure. --- crates/jrsonnet-lsp-import/src/graph/operations.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs index 5c07d203..3ba325dd 100644 --- a/crates/jrsonnet-lsp-import/src/graph/operations.rs +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -152,7 +152,11 @@ impl ImportGraph { let Some(path) = entry.resolved_path.as_ref() else { continue; }; - entry.resolved_file = Some(self.paths.intern(path)); + entry.resolved_file = Some( + self.resolver + .file(path) + .unwrap_or_else(|| self.paths.intern(path)), + ); } } From 089ac16c0c0250e3ada752ceb2bab37990faebca Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 16:16:25 +0000 Subject: [PATCH 161/210] perf(server): drop redundant import-graph pre-resolve pass Remove the server-side read-lock pre-resolution step before `update_file_with_entries`. Graph updates now resolve file ids defensively, so the extra pass was redundant work and held an unnecessary lock in refresh paths. --- crates/jrsonnet-lsp/src/server/import_graph.rs | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/crates/jrsonnet-lsp/src/server/import_graph.rs b/crates/jrsonnet-lsp/src/server/import_graph.rs index ab3702f0..b6d1787e 100644 --- a/crates/jrsonnet-lsp/src/server/import_graph.rs +++ b/crates/jrsonnet-lsp/src/server/import_graph.rs @@ -23,7 +23,7 @@ impl Server { config: &SharedConfig, file: FileId, ) { - let mut entries = { + let entries = { let Some(path) = documents.path(file) else { return; }; @@ -46,13 +46,8 @@ impl Server { import_resolution.parse_entries(&doc) }; - { - // Resolve to FileId before taking the write lock. - let graph = import_graph.read(); - graph.resolve_entry_files(&mut entries); - } - - // Now acquire the write lock and do the quick data structure update. + // Acquire the write lock and perform the graph update; entry file-id + // resolution is handled defensively by the graph update path. import_graph.write().update_file_with_entries(file, entries); } From b60a0bb5f658e85656f1f237eb234026374740d6 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 16:22:17 +0000 Subject: [PATCH 162/210] perf(import-graph): cache resolved imports per file Add per-file resolved import maps in `ImportGraph` for all import flavors and for code imports only. Build and maintain these caches on graph updates and clear them on file removal. Switch inference dependency resolution to reuse the cached code-import map, and switch async import lookup resolution to the cached import-path map. Add a graph test covering cache population and invalidation behavior. --- .../src/graph/operations.rs | 60 +++++++++++++++++++ crates/jrsonnet-lsp-import/src/graph/tests.rs | 39 ++++++++++++ crates/jrsonnet-lsp-inference/src/provider.rs | 11 +--- .../async_requests/import_lookup/resolve.rs | 5 +- 4 files changed, 103 insertions(+), 12 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs index 3ba325dd..59529eb4 100644 --- a/crates/jrsonnet-lsp-import/src/graph/operations.rs +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -9,6 +9,7 @@ use std::{ }; use jrsonnet_lsp_document::{CanonicalPath, Document, FileId, PathResolver, PathStore}; +use rustc_hash::FxHashMap; pub use super::parse::{parse_document_import_occurrences, parse_document_imports}; @@ -58,6 +59,10 @@ pub struct ImportGraph { pub(super) resolver: PathResolver, /// Map of file → import entries in that file. pub(super) imports: HashMap>, + /// Cache of resolved imports by raw import path (`import`/`importstr`/`importbin`). + pub(super) resolved_imports: HashMap>, + /// Cache of resolved code imports by raw import path (`import` only). + pub(super) resolved_code_imports: HashMap>, /// Reverse index: file → files that import it. pub(super) imported_by: HashMap>, } @@ -71,6 +76,8 @@ impl ImportGraph { paths, resolver, imports: HashMap::new(), + resolved_imports: HashMap::new(), + resolved_code_imports: HashMap::new(), imported_by: HashMap::new(), } } @@ -124,6 +131,19 @@ impl ImportGraph { } } + let (resolved_imports, resolved_code_imports) = Self::build_resolved_import_maps(&entries); + if resolved_imports.is_empty() { + self.resolved_imports.remove(&file_id); + } else { + self.resolved_imports.insert(file_id, resolved_imports); + } + if resolved_code_imports.is_empty() { + self.resolved_code_imports.remove(&file_id); + } else { + self.resolved_code_imports + .insert(file_id, resolved_code_imports); + } + // Store the import entries self.imports.insert(file_id, entries); } @@ -185,6 +205,8 @@ impl ImportGraph { // Remove the import entries self.imports.remove(&file_id); + self.resolved_imports.remove(&file_id); + self.resolved_code_imports.remove(&file_id); } pub(super) fn direct_importers_by_id(&self, file_id: FileId) -> Vec { @@ -234,6 +256,21 @@ impl ImportGraph { self.imports.get(&file).map_or(&[], Vec::as_slice) } + /// Get cached resolved imports (`import`, `importstr`, `importbin`) for a file. + pub fn resolved_import_map(&self, file: FileId) -> Option<&FxHashMap> { + self.resolved_imports.get(&file) + } + + /// Get cached resolved code imports (`import`) for a file. + pub fn resolved_code_import_map(&self, file: FileId) -> Option<&FxHashMap> { + self.resolved_code_imports.get(&file) + } + + /// Resolve one raw import path from a file using the cached import map. + pub fn resolved_import(&self, file: FileId, import_path: &str) -> Option { + self.resolved_import_map(file)?.get(import_path).copied() + } + /// Find imports in a file that point to a specific target file. #[must_use] pub fn imports_of_target(&self, file_id: FileId, target_id: FileId) -> Vec<&ImportEntry> { @@ -258,6 +295,29 @@ impl ImportGraph { pub fn all_files(&self) -> impl Iterator + '_ { self.imports.keys().copied() } + + fn build_resolved_import_maps( + entries: &[ImportEntry], + ) -> (FxHashMap, FxHashMap) { + let mut resolved_imports = FxHashMap::default(); + let mut resolved_code_imports = FxHashMap::default(); + + for entry in entries { + let Some(resolved_file) = entry.resolved_file else { + continue; + }; + resolved_imports + .entry(entry.import_path.clone()) + .or_insert(resolved_file); + if entry.kind == ImportKind::Code { + resolved_code_imports + .entry(entry.import_path.clone()) + .or_insert(resolved_file); + } + } + + (resolved_imports, resolved_code_imports) + } } #[cfg(test)] diff --git a/crates/jrsonnet-lsp-import/src/graph/tests.rs b/crates/jrsonnet-lsp-import/src/graph/tests.rs index a4426607..8acc8e70 100644 --- a/crates/jrsonnet-lsp-import/src/graph/tests.rs +++ b/crates/jrsonnet-lsp-import/src/graph/tests.rs @@ -280,6 +280,45 @@ lib + other ); } +#[test] +fn test_resolved_import_maps_cached_and_updated() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + let text = test_path("data.txt"); + let code = r#" +local lib = import "lib.jsonnet"; +local text = importstr "data.txt"; +{ lib: lib, text: text } +"#; + let doc = Document::new(code.to_string(), DocVersion::new(1)); + let main_file = graph.intern(&main); + let lib_file = graph.intern(&lib); + let text_file = graph.intern(&text); + graph.update_file(main_file, &doc, simple_resolver); + + assert_eq!( + graph.resolved_import(main_file, "lib.jsonnet"), + Some(lib_file) + ); + assert_eq!( + graph.resolved_import(main_file, "data.txt"), + Some(text_file) + ); + + let code_imports = graph + .resolved_code_import_map(main_file) + .expect("code import cache should exist"); + assert_eq!(code_imports.get("lib.jsonnet"), Some(&lib_file)); + assert!(!code_imports.contains_key("data.txt")); + + let empty_doc = Document::new("{}".to_string(), DocVersion::new(2)); + graph.update_file(main_file, &empty_doc, simple_resolver); + assert!(graph.resolved_import_map(main_file).is_none()); + assert!(graph.resolved_code_import_map(main_file).is_none()); +} + #[test] fn test_topological_order_simple() { let mut graph = ImportGraph::new(PathStore::new()); diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index a7a5e4e3..127d6956 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -155,14 +155,9 @@ impl TypeProvider { fn resolved_imports_for(graph: &ImportGraph, file: FileId) -> FxHashMap { graph - .imports(file) - .iter() - .filter(|entry| entry.kind == ImportKind::Code) - .filter_map(|entry| { - let resolved = entry.resolved_file?; - Some((entry.import_path.clone(), resolved)) - }) - .collect() + .resolved_code_import_map(file) + .cloned() + .unwrap_or_default() } #[cfg(test)] diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs index 19a694c8..c1604572 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_lookup/resolve.rs @@ -11,10 +11,7 @@ impl AsyncRequestContext { let import_graph = self.import_graph.read(); let from_file = import_graph.file(from)?; import_graph - .imports(from_file) - .iter() - .find(|entry| entry.import_path == import) - .and_then(|entry| entry.resolved_file) + .resolved_import(from_file, import) .and_then(|file| import_graph.path(file)) .map(|path| path.as_ref().clone()) } From 7174f60aff32442f49535e354a22c0081bb6db3b Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 16:48:19 +0000 Subject: [PATCH 163/210] test(scenario): remove hover primary-type expect shortcut --- .../jrsonnet-lsp-scenario/src/scenario/mod.rs | 15 +++-- .../src/scenario/request_steps.rs | 6 -- .../src/scenario_runner/expectation_steps.rs | 59 ++----------------- .../src/scenario_runner/helpers.rs | 9 --- .../src/scenario_runner/runner.rs | 3 - .../src/scenario_script/compile.rs | 19 +++--- ...al_comprehension_refines_element_type.yaml | 27 +++++++-- ...er_map_predicate_refines_output_array.yaml | 27 +++++++-- ...r_with_predicate_refines_output_array.yaml | 27 +++++++-- ..._order_all_map_refines_array_elements.yaml | 14 +++-- ...l_string_equality_after_string_assert.yaml | 14 +++-- ...tring_equality_partial_without_assert.yaml | 40 ++++++++++--- .../negated_guard_refines_branches.yaml | 27 +++++++-- .../null_and_length_guard.yaml | 14 +++-- .../null_branch_split.yaml | 27 +++++++-- .../null_guard_refines_non_null_branch.yaml | 14 +++-- ...ic_predicates_refine_arithmetic_paths.yaml | 40 ++++++++++--- .../union_guard_refines_both_branches.yaml | 27 +++++++-- .../function_length_assert_narrows_arity.yaml | 14 +++-- .../length_eq_refines_array_to_tuple.yaml | 14 +++-- ..._function_refines_impossible_branches.yaml | 40 ++++++++++--- ...wn_object_refines_impossible_branches.yaml | 40 ++++++++++--- ...known_function_allows_typed_call_site.yaml | 27 +++++++-- ...length_unknown_function_refines_arity.yaml | 27 +++++++-- ...n_object_composition_refines_by_shape.yaml | 53 +++++++++++++---- .../negated_membership_and_is_precise.yaml | 14 +++-- ...mbership_or_eliminates_required_field.yaml | 14 +++-- ...negated_membership_or_is_conservative.yaml | 14 +++-- ...or_with_length_still_eliminates_field.yaml | 14 +++-- ...eld_literal_chain_preserves_base_type.yaml | 14 +++-- ...t_field_presence_refines_object_shape.yaml | 14 +++-- ...field_type_guards_refine_nested_paths.yaml | 27 +++++++-- .../object_membership_known_union.yaml | 14 +++-- 33 files changed, 512 insertions(+), 237 deletions(-) diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs index 07a4275f..a7a05728 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs @@ -47,13 +47,13 @@ use lsp_types::{ pub use request_steps::{ ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDocumentSymbolStep, ExpectExecuteCommandStep, ExpectFormattingStep, - ExpectHoverPrimaryTypeStep, ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, - ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, - ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, ExpectTypeDefinitionStep, - ExpectWorkspaceSymbolStep, HoverSectionExpectation, RequestCodeActionStep, RequestCodeLensStep, - RequestCompletionStep, RequestDeclarationStep, RequestDefinitionStep, - RequestDocumentSymbolStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, - RequestInlayHintsStep, RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, + ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, + ExpectRenameStep, ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, + ExpectSignatureHelpStep, ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, + HoverSectionExpectation, RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, + RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, + RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, + RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, }; @@ -104,7 +104,6 @@ pub enum ScenarioStep { ExpectRename(ExpectRenameStep), RequestHover(RequestHoverStep), ExpectHover(ExpectHoverStep), - ExpectHoverPrimaryType(ExpectHoverPrimaryTypeStep), RequestSignatureHelp(RequestSignatureHelpStep), ExpectSignatureHelp(ExpectSignatureHelpStep), RequestCompletion(RequestCompletionStep), diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs index f4113f0f..ad4477fc 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs @@ -528,12 +528,6 @@ pub struct ExpectHoverStep { pub result: Option>, } -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ExpectHoverPrimaryTypeStep { - pub(crate) id: i32, - pub expected_type: String, -} - /// One expected hover section, compared in authored order using exact matching. /// /// Exactly one key should be present per list item. diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs index 0977dc62..bb777219 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs @@ -7,7 +7,7 @@ use thiserror::Error; use super::{ helpers::{ completion_label_counts, completion_labels, hover_array_sections_from_json, - json_mismatch_report, label_counts, marked_string_markdown, JsonMismatchReport, + json_mismatch_report, label_counts, JsonMismatchReport, }, transport::{RpcError, SerdeError, TransportError}, RunnerResult, ScenarioRunner, REQUEST_TIMEOUT, @@ -15,10 +15,10 @@ use super::{ use crate::scenario::{ DiagnosticsSettledStep, ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, ExpectDocumentSymbolStep, - ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverPrimaryTypeStep, ExpectHoverStep, - ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, - ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, - ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, + ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, ExpectInlayHintsStep, + ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, + ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, ExpectTypeDefinitionStep, + ExpectWorkspaceSymbolStep, }; #[derive(Debug, Error)] @@ -27,14 +27,6 @@ pub enum AssertionError { HoverMissingResult { id: i32 }, #[error("hover response {id} had unsupported content shape: {hover}")] HoverMalformedContent { id: i32, hover: String }, - #[error("hover response {id} did not start with a backticked type: {hover}")] - HoverMalformedLeadingType { id: i32, hover: String }, - #[error("hover type mismatch for id {id}: actual={actual} expected exactly {expected}")] - HoverTypeMismatch { - id: i32, - actual: String, - expected: String, - }, #[error("completion response {id} had no result, expected labels {expected_labels:?}")] CompletionMissingResult { id: i32, @@ -214,47 +206,6 @@ impl ScenarioRunner { } } - pub(super) fn step_expect_hover_primary_type( - &mut self, - step: &ExpectHoverPrimaryTypeStep, - ) -> RunnerResult<()> { - let actual_hover = self - .response_result::("hover", step.id)? - .ok_or_else(|| AssertionError::HoverMissingResult { id: step.id })?; - let actual_sections = hover_array_sections_from_json(&actual_hover).ok_or_else(|| { - AssertionError::HoverMalformedContent { - id: step.id, - hover: actual_hover.to_string(), - } - })?; - let first_section = - actual_sections - .first() - .ok_or_else(|| AssertionError::HoverMalformedLeadingType { - id: step.id, - hover: actual_hover.to_string(), - })?; - let first_markdown = marked_string_markdown(first_section); - let actual_type = first_markdown - .trim_start() - .strip_prefix('`') - .and_then(|rest| rest.split_once('`').map(|(ty, _)| ty.to_string())) - .ok_or_else(|| AssertionError::HoverMalformedLeadingType { - id: step.id, - hover: first_markdown.clone(), - })?; - if actual_type == step.expected_type { - Ok(()) - } else { - Err(AssertionError::HoverTypeMismatch { - id: step.id, - actual: actual_type, - expected: step.expected_type.clone(), - } - .into()) - } - } - pub(super) fn step_expect_signature_help( &mut self, step: &ExpectSignatureHelpStep, diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs index 42e68cfe..ea852bf3 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs @@ -28,15 +28,6 @@ pub(super) fn parse_uri(uri: &str, context: &'static str) -> RunnerResult String { - match marked { - lsp_types::MarkedString::String(value) => value.clone(), - lsp_types::MarkedString::LanguageString(language) => { - format!("```{}\n{}\n```", language.language, language.value) - } - } -} - pub(super) fn hover_array_sections_from_json( hover: &Value, ) -> Option> { diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs index cf0ba8b0..a0b14e11 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs @@ -148,9 +148,6 @@ impl ScenarioRunner { ScenarioStep::ExpectRename(expectation) => self.step_expect_rename(expectation), ScenarioStep::RequestHover(request) => self.step_request_hover(request), ScenarioStep::ExpectHover(expectation) => self.step_expect_hover(expectation), - ScenarioStep::ExpectHoverPrimaryType(expectation) => { - self.step_expect_hover_primary_type(expectation) - } ScenarioStep::RequestSignatureHelp(request) => { self.step_request_signature_help(request) } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs index f237b379..d30b495c 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs @@ -33,10 +33,10 @@ use crate::scenario::{ ChangeFullStep, ChangeIncrementalStep, CloseStep, ConfigStep, DeleteFileStep, DiagnosticsSettledStep, ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, ExpectDocumentSymbolStep, - ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverPrimaryTypeStep, ExpectHoverStep, - ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, - ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, - ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, NotifyWatchedFilesStep, OpenStep, + ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, ExpectInlayHintsStep, + ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, + ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, ExpectTypeDefinitionStep, + ExpectWorkspaceSymbolStep, HoverSectionExpectation, NotifyWatchedFilesStep, OpenStep, RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, RequestPrepareRenameStep, @@ -470,11 +470,12 @@ impl ScenarioScript { "expectTypes.checks", )?, }); - let expect = - ScenarioStep::ExpectHoverPrimaryType(ExpectHoverPrimaryTypeStep { - id, - expected_type: check.expected_type, - }); + let expect = ScenarioStep::ExpectHover(ExpectHoverStep { + id, + result: Some(vec![HoverSectionExpectation::Type { + ty: check.expected_type, + }]), + }); steps.push(request); steps.push(expect); Ok::, CompileScenarioError>(steps) diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml index 9b97aab6..a7983d32 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/conditional_comprehension_refines_element_type.yaml @@ -13,10 +13,25 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'array' - - at: m2 - type: 'array' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local zs = [x for x in xs if x != null];" +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local ys = [(if x == null then \"no\" else x - 1) for x in xs];" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml index 5302cbe6..f64dc453 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_map_predicate_refines_output_array.yaml @@ -16,10 +16,25 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'array' - - at: m2 - type: 'array' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local f(xs) =\n assert std.isArray(xs);\n assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs));\n local ys = std.filterMap(std.isNumber, inc, xs);\n ys\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local ys = std.filterMap(std.isNumber, inc, xs);" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml index 7afd3fe9..49f75cad 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/filter_with_predicate_refines_output_array.yaml @@ -12,10 +12,25 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'array' - - at: m2 - type: 'array' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local f(xs) =\n assert std.isArray(xs);\n assert std.all(std.map(function(x) std.isNumber(x) || std.isString(x), xs));\n local ys = std.filter(std.isNumber, xs);\n ys\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local ys = std.filter(std.isNumber, xs);" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml index ecf1d816..850916bd 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_collections/higher_order_all_map_refines_array_elements.yaml @@ -12,8 +12,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'array' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "array" + - preview: + language: "jsonnet" + value: "local f(arr) =\n if std.all(std.map(std.isNumber, arr)) then\n arr\n else\n arr;" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml index 29ee52d9..4c14aea7 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_after_string_assert.yaml @@ -15,8 +15,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'string' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert std.isString(x);\n if x == \"hi\" then\n \"hey\"\n else if x == \"bye\" then\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml index 0d4c2106..07591e7e 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/literal_string_equality_partial_without_assert.yaml @@ -14,12 +14,36 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: '"hi"' - - at: m2 - type: '"bye"' - - at: m3 - type: 'any' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "\"hi\"" + - preview: + language: "jsonnet" + value: "local f(x) =\n if x == \"hi\" then\n std.length(x)\n else if x == \"bye\" then\n std.length(x)\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "\"bye\"" + - preview: + language: "jsonnet" + value: "local f(x) =\n if x == \"hi\" then\n std.length(x)\n else if x == \"bye\" then\n std.length(x)\n..." +- step: requestHover + as: hover3 + file: main.jsonnet + at: m3 +- step: expectHover + request: hover3 + result: + - type: "any" + - preview: + language: "jsonnet" + value: "local f(x) =\n if x == \"hi\" then\n std.length(x)\n else if x == \"bye\" then\n std.length(x)\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml index ccb37fd8..010b6b95 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/negated_guard_refines_branches.yaml @@ -13,10 +13,25 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'string' - - at: m2 - type: 'number' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert std.isNumber(x) || std.isString(x);\n if !std.isNumber(x) then\n std.length(x)\n else\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert std.isNumber(x) || std.isString(x);\n if !std.isNumber(x) then\n std.length(x)\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml index 48fd4b19..07ee1879 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_and_length_guard.yaml @@ -13,8 +13,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'string' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert x == null || std.isString(x);\n if x != null && std.length(x) >= 10 then\n x\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml index b6707b6a..841f9cc1 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_branch_split.yaml @@ -14,10 +14,25 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'number' - - at: m2 - type: 'null' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert x == null || std.isNumber(x);\n if x != null then\n x\n else\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "null" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert x == null || std.isNumber(x);\n if x != null then\n x\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml index 86137c42..517edbc7 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/null_guard_refines_non_null_branch.yaml @@ -13,8 +13,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'string' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert x == null || std.isString(x);\n if x != null then\n std.length(x)\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml index a62a43fa..8c169f1f 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/partial_numeric_predicates_refine_arithmetic_paths.yaml @@ -15,12 +15,36 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'number' - - at: m2 - type: 'number' - - at: m3 - type: 'number' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isDecimal(x) then\n x + 0.5\n else if std.isInteger(x) then\n x + 1\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isDecimal(x) then\n x + 0.5\n else if std.isInteger(x) then\n x + 1\n..." +- step: requestHover + as: hover3 + file: main.jsonnet + at: m3 +- step: expectHover + request: hover3 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local n = f(5);" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml index 3e7adb15..3c71b9fc 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_guards_and_literals/union_guard_refines_both_branches.yaml @@ -13,10 +13,25 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'number' - - at: m2 - type: 'string' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert std.isNumber(x) || std.isString(x);\n if std.isNumber(x) then\n x + 1\n else\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "local f(x) =\n assert std.isNumber(x) || std.isString(x);\n if std.isNumber(x) then\n x + 1\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml index 38f303cc..f41f8ec5 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/function_length_assert_narrows_arity.yaml @@ -11,8 +11,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'function(arg0: any, arg1: any)' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "function(arg0: any, arg1: any)" + - preview: + language: "jsonnet" + value: "local wrap(f) =\n assert std.isFunction(f);\n assert std.length(f) == 2;\n f\n\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml index 25323937..dfce2f37 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_eq_refines_array_to_tuple.yaml @@ -10,8 +10,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: '[any, any, any]' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "[any, any, any]" + - preview: + language: "jsonnet" + value: "local f(xs) =\n assert std.isArray(xs) && std.length(xs) == 3;\n xs\n\nf" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml index 4f6182f3..11316106 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_function_refines_impossible_branches.yaml @@ -13,12 +13,36 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'never' - - at: m2 - type: 'never' - - at: m3 - type: '(x: any, y: any) -> number' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "local f(x, y) = y + 1;" +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "local f(x, y) = y + 1;" +- step: requestHover + as: hover3 + file: main.jsonnet + at: m3 +- step: expectHover + request: hover3 + result: + - type: "(x: any, y: any) -> number" + - preview: + language: "jsonnet" + value: "local f(x, y) = y + 1;" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml index ec8e1ad1..5c2b3f31 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_known_object_refines_impossible_branches.yaml @@ -13,12 +13,36 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'never' - - at: m2 - type: 'never' - - at: m3 - type: 'number' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "local x = { a: 1, b: \"hi\" };" +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "local x = { a: 1, b: \"hi\" };" +- step: requestHover + as: hover3 + file: main.jsonnet + at: m3 +- step: expectHover + request: hover3 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "x.a" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml index ae4bd76d..27f1d1c8 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_allows_typed_call_site.yaml @@ -15,10 +15,25 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'function(arg0: any, arg1: any)' - - at: m2 - type: 'function()' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "function(arg0: any, arg1: any)" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isFunction(x) then\n if std.length(x) == 2 then\n x(3, 5)\n else\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "function()" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isFunction(x) then\n if std.length(x) == 2 then\n x(3, 5)\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml index 488d4a78..7a3d9273 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_function_refines_arity.yaml @@ -15,10 +15,25 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'function(arg0: any, arg1: any)' - - at: m2 - type: 'function()' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "function(arg0: any, arg1: any)" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isFunction(x) then\n if std.length(x) == 2 then\n x\n else\n..." +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "function()" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isFunction(x) then\n if std.length(x) == 2 then\n x\n else\n..." diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml index 2d1bb8ef..ba4ee5fa 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_length_and_arity/length_unknown_object_composition_refines_by_shape.yaml @@ -25,14 +25,47 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'number' - - at: m2 - type: 'never' - - at: m3 - type: 'number' - - at: m4 - type: 'number' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "x.b" +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isObject(x) then\n if \"a\" in x && std.isString(x.a) then\n if \"b\" in x && std.isNumber(x.b) then\n if std.length(x) == 2 then\n..." +- step: requestHover + as: hover3 + file: main.jsonnet + at: m3 +- step: expectHover + request: hover3 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "x.b" +- step: requestHover + as: hover4 + file: main.jsonnet + at: m4 +- step: expectHover + request: hover4 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "x.b" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml index 6a7be070..b60fb55b 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_and_is_precise.yaml @@ -12,8 +12,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'any' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "any" + - preview: + language: "jsonnet" + value: "x.foo" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml index e783bc8c..826bce73 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_eliminates_required_field.yaml @@ -12,8 +12,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'never' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "x.foo" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml index 93870d76..86824a50 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_is_conservative.yaml @@ -12,8 +12,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: '{ bar: never, foo: never, ... }' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "{ bar: never, foo: never, ... }" + - preview: + language: "jsonnet" + value: "local f(x) =\n if std.isObject(x) && \"foo\" in x && !(\"foo\" in x || \"bar\" in x) then\n x\n else\n null;" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml index 55dac638..bbbec979 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/negated_membership_or_with_length_still_eliminates_field.yaml @@ -12,8 +12,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'never' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "never" + - preview: + language: "jsonnet" + value: "x.foo" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml index d8339b46..9568a064 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_literal_chain_preserves_base_type.yaml @@ -17,8 +17,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'string' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "string" + - preview: + language: "jsonnet" + value: "x.t" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml index 218c1026..489d9b71 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_presence_refines_object_shape.yaml @@ -13,8 +13,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'any' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "any" + - preview: + language: "jsonnet" + value: "obj.foo" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml index ff83fd2b..8288017c 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_field_type_guards_refine_nested_paths.yaml @@ -17,10 +17,25 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'number' - - at: m2 - type: 'string | function() | object | array' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "obj.a" +- step: requestHover + as: hover2 + file: main.jsonnet + at: m2 +- step: expectHover + request: hover2 + result: + - type: "string | function() | object | array" + - preview: + language: "jsonnet" + value: "obj.a" diff --git a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml index 305e0a9e..bf3f434c 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/flow/flow_objects_and_membership/object_membership_known_union.yaml @@ -13,8 +13,14 @@ steps: - step: diagnosticsSettled -- step: expectTypes +- step: requestHover + as: hover1 file: main.jsonnet - checks: - - at: m1 - type: 'number' + at: m1 +- step: expectHover + request: hover1 + result: + - type: "number" + - preview: + language: "jsonnet" + value: "obj.foo" From 7acf97b6f6978cdbc293e9523313b74df3711379 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 16:55:36 +0000 Subject: [PATCH 164/210] test(scenario): require full completion expectations --- .../src/scenario/request_steps.rs | 5 -- .../src/scenario_runner/expectation_steps.rs | 71 +------------------ .../src/scenario_runner/helpers.rs | 35 +-------- .../src/scenario_script/compile.rs | 6 +- .../src/scenario_script/inputs.rs | 9 +-- .../completion_bracket_lookup_fields.yaml | 14 +++- .../completion_nested_object_fields.yaml | 11 ++- .../runner/completion_syntax_error_scope.yaml | 24 ++++++- .../runner/completion_union_nested_flow.yaml | 8 ++- 9 files changed, 53 insertions(+), 130 deletions(-) diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs index ad4477fc..2f61e3b4 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs @@ -718,12 +718,7 @@ pub struct RequestCompletionStep { #[derive(Debug, Clone, PartialEq)] pub struct ExpectCompletionStep { pub(crate) id: i32, - /// Full completion response assertion. pub result: Option, - /// Optional label-only assertion mode. Compared order-insensitively. - pub labels: Option>, - /// When true, `labels` is treated as a required subset of actual labels. - pub allow_extra: bool, } /// `textDocument/formatting` request. diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs index bb777219..0fd97053 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs @@ -5,10 +5,7 @@ use serde_json::Value; use thiserror::Error; use super::{ - helpers::{ - completion_label_counts, completion_labels, hover_array_sections_from_json, - json_mismatch_report, label_counts, JsonMismatchReport, - }, + helpers::{hover_array_sections_from_json, json_mismatch_report, JsonMismatchReport}, transport::{RpcError, SerdeError, TransportError}, RunnerResult, ScenarioRunner, REQUEST_TIMEOUT, }; @@ -27,27 +24,6 @@ pub enum AssertionError { HoverMissingResult { id: i32 }, #[error("hover response {id} had unsupported content shape: {hover}")] HoverMalformedContent { id: i32, hover: String }, - #[error("completion response {id} had no result, expected labels {expected_labels:?}")] - CompletionMissingResult { - id: i32, - expected_labels: Vec, - }, - #[error( - "completion labels mismatch for id {id}: expected at least {expected_count} of `{label}`, got {actual_count} (actual labels: {actual_labels:?})" - )] - CompletionAtLeastLabelMismatch { - id: i32, - label: String, - expected_count: usize, - actual_count: usize, - actual_labels: Vec, - }, - #[error("completion labels mismatch for id {id}: actual={actual_labels:?} expected={expected_labels:?}")] - CompletionLabelsMismatch { - id: i32, - actual_labels: Vec, - expected_labels: Vec, - }, #[error("diagnostics mismatch for uri {uri}\n{details}")] DiagnosticsMismatch { uri: String, @@ -217,50 +193,7 @@ impl ScenarioRunner { &mut self, step: &ExpectCompletionStep, ) -> RunnerResult<()> { - let actual = - self.response_result::("completion", step.id)?; - - let check_full_result = step.result.is_some() || step.labels.is_none(); - if check_full_result && actual != step.result { - return Self::response_mismatch("completion", step.id, &actual, &step.result); - } - - if let Some(expected_labels) = &step.labels { - let Some(actual_response) = actual.as_ref() else { - return Err(AssertionError::CompletionMissingResult { - id: step.id, - expected_labels: expected_labels.clone(), - } - .into()); - }; - - let actual_counts = completion_label_counts(actual_response); - let expected_counts = label_counts(expected_labels); - if step.allow_extra { - for (label, expected_count) in &expected_counts { - let actual_count = actual_counts.get(label).copied().unwrap_or_default(); - if actual_count < *expected_count { - return Err(AssertionError::CompletionAtLeastLabelMismatch { - id: step.id, - label: label.clone(), - expected_count: *expected_count, - actual_count, - actual_labels: completion_labels(actual_response), - } - .into()); - } - } - } else if actual_counts != expected_counts { - return Err(AssertionError::CompletionLabelsMismatch { - id: step.id, - actual_labels: completion_labels(actual_response), - expected_labels: expected_labels.clone(), - } - .into()); - } - } - - Ok(()) + self.expect_typed_response("completion", step.id, &step.result) } pub(super) fn step_expect_formatting( diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs index ea852bf3..420c838c 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs @@ -1,7 +1,4 @@ -use std::{ - collections::{BTreeSet, HashMap}, - fmt, -}; +use std::{collections::BTreeSet, fmt}; use serde_json::Value; use thiserror::Error; @@ -41,36 +38,6 @@ pub(super) fn hover_array_sections_from_json( .ok() } -pub(super) fn completion_items( - response: &lsp_types::CompletionResponse, -) -> &[lsp_types::CompletionItem] { - match response { - lsp_types::CompletionResponse::Array(items) => items, - lsp_types::CompletionResponse::List(list) => &list.items, - } -} - -pub(super) fn completion_labels(response: &lsp_types::CompletionResponse) -> Vec { - completion_items(response) - .iter() - .map(|item| item.label.clone()) - .collect() -} - -pub(super) fn label_counts(labels: &[String]) -> HashMap { - let mut counts = HashMap::new(); - for label in labels { - *counts.entry(label.clone()).or_insert(0) += 1; - } - counts -} - -pub(super) fn completion_label_counts( - response: &lsp_types::CompletionResponse, -) -> HashMap { - label_counts(&completion_labels(response)) -} - const MAX_DIFF_LINES: usize = 20; const MAX_VALUE_PREVIEW_CHARS: usize = 120; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs index d30b495c..fc55b11f 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs @@ -497,15 +497,13 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectCompletion(step) => { let request_id = - registry.claim(RequestKind::Completion, step.request.as_deref())?; + registry.claim(RequestKind::Completion, Some(step.request.as_str()))?; let file = completion_request_files.get(&request_id).ok_or({ CompileScenarioError::MissingRequestFileContext { step: "expectCompletion", request_id, } })?; - let labels = step.labels.clone(); - let allow_extra = step.allow_extra; vec![ScenarioStep::ExpectCompletion(ExpectCompletionStep { id: request_id, result: step.resolve_result( @@ -513,8 +511,6 @@ impl ScenarioScript { file, "expectCompletion.result", )?, - labels, - allow_extra, })] } ScenarioScriptStep::RequestFormatting(step) => { diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs index 9d813434..f19345dd 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs @@ -1164,15 +1164,8 @@ impl ExpectPrepareRenameScriptStep { #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] pub(super) struct ExpectCompletionScriptStep { - #[serde(default)] - pub(super) request: Option, + pub(super) request: String, pub(super) result: Option, - /// Optional label subset assertion, checked by the scenario runner. - #[serde(default)] - pub(super) labels: Option>, - /// When true, allow result items beyond `labels`. - #[serde(default)] - pub(super) allow_extra: bool, } impl ExpectCompletionScriptStep { diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml index fa30ae92..3c741e43 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_bracket_lookup_fields.yaml @@ -22,5 +22,15 @@ steps: at: m1 - step: expectCompletion request: bracketCompletion - labels: [a, b, c] - allow_extra: false + result: + isIncomplete: false + items: + - label: a + kind: 5 + detail: "true" + - label: b + kind: 5 + detail: number + - label: c + kind: 5 + detail: string diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml index 0a4136f6..52757e16 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_nested_object_fields.yaml @@ -14,5 +14,12 @@ steps: at: m1 - step: expectCompletion request: nestedFields - labels: [x, y] - allow_extra: false + result: + isIncomplete: false + items: + - label: x + kind: 5 + detail: number + - label: y + kind: 5 + detail: number diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml index beb2dc0b..27f450e3 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_syntax_error_scope.yaml @@ -16,6 +16,24 @@ steps: at: m1 - step: expectCompletion request: completionAtError - labels: [$, self, std, super, x, y] - allow_extra: false - + result: + isIncomplete: false + items: + - label: x + kind: 6 + detail: local variable + - label: y + kind: 6 + detail: local variable + - label: std + kind: 9 + detail: Jsonnet standard library + - label: $ + kind: 14 + detail: Reference to root object + - label: self + kind: 14 + detail: Reference to current object + - label: super + kind: 14 + detail: Reference to inherited object diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml index 4f186b47..0cd15c61 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/completion_union_nested_flow.yaml @@ -24,5 +24,9 @@ steps: at: m1 - step: expectCompletion request: unionFieldCompletion - labels: [b] - allow_extra: false + result: + isIncomplete: false + items: + - label: b + kind: 5 + detail: "true | false | number | string" From 6174ddf416a4c42b49ee67697c0734e0d9ad4e9a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 18 Feb 2026 17:04:13 +0000 Subject: [PATCH 165/210] test(scenario): require explicit request aliases in expects --- .../src/scenario_script/compile.rs | 77 ++++++++----------- .../src/scenario_script/inputs.rs | 18 ++--- .../src/scenario_script/registry.rs | 73 ++++++++---------- 3 files changed, 70 insertions(+), 98 deletions(-) diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs index fc55b11f..aba122bb 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs @@ -226,7 +226,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectCodeAction(step) => { let request_id = - registry.claim(RequestKind::CodeAction, step.request.as_deref())?; + registry.claim(RequestKind::CodeAction, step.request.as_str())?; let default_file = code_action_request_files .get(&request_id) .map(String::as_str); @@ -280,7 +280,7 @@ impl ScenarioScript { }) .transpose()?; vec![ScenarioStep::ExpectReferences(ExpectReferencesStep { - id: registry.claim(RequestKind::References, step.request.as_deref())?, + id: registry.claim(RequestKind::References, step.request.as_str())?, result, })] } @@ -297,7 +297,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectDefinition(step) => { vec![ScenarioStep::ExpectDefinition(ExpectDefinitionStep { - id: registry.claim(RequestKind::Definition, step.request.as_deref())?, + id: registry.claim(RequestKind::Definition, step.request.as_str())?, result: step .result .map(|result| { @@ -319,7 +319,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectDeclaration(step) => { vec![ScenarioStep::ExpectDeclaration(ExpectDeclarationStep { - id: registry.claim(RequestKind::Declaration, step.request.as_deref())?, + id: registry.claim(RequestKind::Declaration, step.request.as_str())?, result: step .result .map(|result| { @@ -346,7 +346,7 @@ impl ScenarioScript { vec![ScenarioStep::ExpectTypeDefinition( ExpectTypeDefinitionStep { id: registry - .claim(RequestKind::TypeDefinition, step.request.as_deref())?, + .claim(RequestKind::TypeDefinition, step.request.as_str())?, result: step .result .map(|result| { @@ -378,7 +378,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectPrepareRename(step) => { let request_id = - registry.claim(RequestKind::PrepareRename, step.request.as_deref())?; + registry.claim(RequestKind::PrepareRename, step.request.as_str())?; let file = prepare_rename_request_files.get(&request_id).ok_or( CompileScenarioError::MissingRequestFileContext { step: "expectPrepareRename", @@ -408,7 +408,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectRename(step) => { vec![ScenarioStep::ExpectRename(ExpectRenameStep { - id: registry.claim(RequestKind::Rename, step.request.as_deref())?, + id: registry.claim(RequestKind::Rename, step.request.as_str())?, result: step .result .map(|result| { @@ -430,7 +430,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectHover(step) => { vec![ScenarioStep::ExpectHover(ExpectHoverStep { - id: registry.claim(RequestKind::Hover, step.request.as_deref())?, + id: registry.claim(RequestKind::Hover, step.request.as_str())?, result: step.result, })] } @@ -449,7 +449,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectSignatureHelp(step) => { vec![ScenarioStep::ExpectSignatureHelp(ExpectSignatureHelpStep { - id: registry.claim(RequestKind::SignatureHelp, step.request.as_deref())?, + id: registry.claim(RequestKind::SignatureHelp, step.request.as_str())?, result: step.result, })] } @@ -497,7 +497,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectCompletion(step) => { let request_id = - registry.claim(RequestKind::Completion, Some(step.request.as_str()))?; + registry.claim(RequestKind::Completion, step.request.as_str())?; let file = completion_request_files.get(&request_id).ok_or({ CompileScenarioError::MissingRequestFileContext { step: "expectCompletion", @@ -523,7 +523,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectFormatting(step) => { vec![ScenarioStep::ExpectFormatting(ExpectFormattingStep { - id: registry.claim(RequestKind::Formatting, step.request.as_deref())?, + id: registry.claim(RequestKind::Formatting, step.request.as_str())?, result: step.result, })] } @@ -540,7 +540,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectSemanticTokensFull(step) => { let request_id = - registry.claim(RequestKind::SemanticTokensFull, step.request.as_deref())?; + registry.claim(RequestKind::SemanticTokensFull, step.request.as_str())?; let file = semantic_tokens_full_request_files.get(&request_id).ok_or( CompileScenarioError::MissingRequestFileContext { step: "expectSemanticTokensFull", @@ -580,8 +580,8 @@ impl ScenarioScript { )] } ScenarioScriptStep::ExpectSemanticTokensRange(step) => { - let request_id = registry - .claim(RequestKind::SemanticTokensRange, step.request.as_deref())?; + let request_id = + registry.claim(RequestKind::SemanticTokensRange, step.request.as_str())?; let file = semantic_tokens_range_request_files.get(&request_id).ok_or( CompileScenarioError::MissingRequestFileContext { step: "expectSemanticTokensRange", @@ -620,7 +620,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectInlayHints(step) => { let request_id = - registry.claim(RequestKind::InlayHints, step.request.as_deref())?; + registry.claim(RequestKind::InlayHints, step.request.as_str())?; let file = inlay_hint_request_files.get(&request_id).ok_or({ CompileScenarioError::MissingRequestFileContext { step: "expectInlayHints", @@ -656,7 +656,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectDocumentSymbol(step) => { let request_id = - registry.claim(RequestKind::DocumentSymbol, step.request.as_deref())?; + registry.claim(RequestKind::DocumentSymbol, step.request.as_str())?; let file = document_symbol_request_files.get(&request_id).ok_or( CompileScenarioError::MissingRequestFileContext { step: "expectDocumentSymbol", @@ -687,7 +687,7 @@ impl ScenarioScript { vec![ScenarioStep::ExpectWorkspaceSymbol( ExpectWorkspaceSymbolStep { id: registry - .claim(RequestKind::WorkspaceSymbol, step.request.as_deref())?, + .claim(RequestKind::WorkspaceSymbol, step.request.as_str())?, result: step .result .map(|result| { @@ -711,7 +711,7 @@ impl ScenarioScript { } ScenarioScriptStep::ExpectCodeLens(step) => { let request_id = - registry.claim(RequestKind::CodeLens, step.request.as_deref())?; + registry.claim(RequestKind::CodeLens, step.request.as_str())?; let default_file = code_lens_request_files.get(&request_id).map(String::as_str); vec![ScenarioStep::ExpectCodeLens(ExpectCodeLensStep { id: request_id, @@ -747,7 +747,7 @@ impl ScenarioScript { vec![ScenarioStep::ExpectExecuteCommand( ExpectExecuteCommandStep { id: registry - .claim(RequestKind::ExecuteCommand, step.request.as_deref())?, + .claim(RequestKind::ExecuteCommand, step.request.as_str())?, result: step.result, }, )] @@ -990,8 +990,8 @@ struct WatchedFileChangeScriptStep { } // Request/expect payloads generally follow the same aliasing contract: -// `request*` steps may define `as`, and matching `expect*` steps may reference -// that alias via `request` or consume by FIFO order when omitted. +// `request*` steps may define `as`, and matching `expect*` steps must reference +// that alias via `request`. /// `requestCodeAction` optionally names the request and captures context. #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] @@ -1007,12 +1007,11 @@ struct RequestCodeActionScriptStep { only: Option>, } -/// `expectCodeAction` can match by explicit request alias or FIFO order. +/// `expectCodeAction` matches by explicit request alias. #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectCodeActionScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option>, } @@ -1031,8 +1030,7 @@ struct RequestReferencesScriptStep { #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectReferencesScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option>, } @@ -1049,8 +1047,7 @@ struct RequestDefinitionScriptStep { #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectDefinitionScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option, } @@ -1067,8 +1064,7 @@ struct RequestDeclarationScriptStep { #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectDeclarationScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option, } @@ -1085,8 +1081,7 @@ struct RequestTypeDefinitionScriptStep { #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectTypeDefinitionScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option, } @@ -1114,8 +1109,7 @@ struct RequestRenameScriptStep { #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectRenameScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option, } @@ -1190,8 +1184,7 @@ struct RequestSemanticTokensFullScriptStep { #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectSemanticTokensFullScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option, } @@ -1208,8 +1201,7 @@ struct RequestSemanticTokensRangeScriptStep { #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectSemanticTokensRangeScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option, } @@ -1226,8 +1218,7 @@ struct RequestInlayHintsScriptStep { #[derive(Debug, Clone, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectInlayHintsScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option>, } @@ -1258,8 +1249,7 @@ struct RequestWorkspaceSymbolScriptStep { #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectWorkspaceSymbolScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option, } @@ -1274,8 +1264,7 @@ struct RequestCodeLensScriptStep { #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectCodeLensScriptStep { - #[serde(default)] - request: Option, + request: String, result: Option>, } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs index f19345dd..0ccba3d9 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs @@ -1088,32 +1088,28 @@ impl PrepareRenameResponseInput { #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] #[serde(deny_unknown_fields)] pub(super) struct ExpectHoverScriptStep { - #[serde(default)] - pub(super) request: Option, + pub(super) request: String, pub(super) result: Option>, } #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] #[serde(deny_unknown_fields)] pub(super) struct ExpectSignatureHelpScriptStep { - #[serde(default)] - pub(super) request: Option, + pub(super) request: String, pub(super) result: Option, } #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] pub(super) struct ExpectFormattingScriptStep { - #[serde(default)] - pub(super) request: Option, + pub(super) request: String, pub(super) result: Option>, } #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] pub(super) struct ExpectDocumentSymbolScriptStep { - #[serde(default)] - pub(super) request: Option, + pub(super) request: String, pub(super) result: Option, } @@ -1134,16 +1130,14 @@ impl ExpectDocumentSymbolScriptStep { #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] #[serde(deny_unknown_fields)] pub(super) struct ExpectExecuteCommandScriptStep { - #[serde(default)] - pub(super) request: Option, + pub(super) request: String, pub(super) result: Option, } #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] pub(super) struct ExpectPrepareRenameScriptStep { - #[serde(default)] - pub(super) request: Option, + pub(super) request: String, pub(super) result: Option, } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs index 60ff9007..67076259 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs @@ -2,8 +2,7 @@ //! //! Scenario scripts can name requests with `as: some_alias` and later refer to //! them from `expect*` steps using `request: some_alias`. This registry maps -//! aliases to generated request IDs and also maintains per-kind FIFO queues for -//! unnamed request/expect pairs. +//! aliases to generated request IDs and tracks unmatched requests by kind. use std::{ collections::{HashMap, VecDeque}, @@ -59,8 +58,6 @@ pub enum RequestRegistryError { AliasAlreadyMatched { name: String, kind: RequestKind }, #[error("failed to claim queued request alias '{name}' for {kind}")] ClaimQueueCorrupted { name: String, kind: RequestKind }, - #[error("no pending {kind} to match; add the request first or reference it via `request`")] - NoPendingRequest { kind: RequestKind }, } impl RequestKind { @@ -109,8 +106,8 @@ impl RequestRegistry { /// Allocate a new request ID and optionally bind a unique alias. /// - /// The new ID is always queued under `kind` so unnamed expects can claim in - /// issue order. + /// The new ID is always queued under `kind` so alias-based claims can enforce + /// one-to-one request/expect matching. pub(super) fn allocate( &mut self, kind: RequestKind, @@ -133,48 +130,40 @@ impl RequestRegistry { Ok(id) } - /// Claim the next pending request ID for `kind`. - /// - /// If `name` is provided, the claim is by alias and kind-checked. - /// Otherwise this pops from the per-kind FIFO queue. + /// Claim a pending request ID for `kind` by explicit alias. pub(super) fn claim( &mut self, kind: RequestKind, - name: Option<&str>, + name: &str, ) -> Result { - if let Some(name) = name { - let (named_kind, id) = self.named.get(name).copied().ok_or_else(|| { - RequestRegistryError::UnknownAlias { + let (named_kind, id) = + self.named + .get(name) + .copied() + .ok_or_else(|| RequestRegistryError::UnknownAlias { name: name.to_string(), kind, - } - })?; - if named_kind != kind { - return Err(RequestRegistryError::AliasKindMismatch { - name: name.to_string(), - alias_kind: named_kind, - requested_kind: kind, - }); - } - let queue = self.pending.entry(kind).or_default(); - let Some(index) = queue.iter().position(|candidate| *candidate == id) else { - return Err(RequestRegistryError::AliasAlreadyMatched { - name: name.to_string(), - kind, - }); - }; - let Some(claimed) = queue.remove(index) else { - return Err(RequestRegistryError::ClaimQueueCorrupted { - name: name.to_string(), - kind, - }); - }; - return Ok(claimed); + })?; + if named_kind != kind { + return Err(RequestRegistryError::AliasKindMismatch { + name: name.to_string(), + alias_kind: named_kind, + requested_kind: kind, + }); } - - self.pending - .get_mut(&kind) - .and_then(VecDeque::pop_front) - .ok_or(RequestRegistryError::NoPendingRequest { kind }) + let queue = self.pending.entry(kind).or_default(); + let Some(index) = queue.iter().position(|candidate| *candidate == id) else { + return Err(RequestRegistryError::AliasAlreadyMatched { + name: name.to_string(), + kind, + }); + }; + let Some(claimed) = queue.remove(index) else { + return Err(RequestRegistryError::ClaimQueueCorrupted { + name: name.to_string(), + kind, + }); + }; + Ok(claimed) } } From 4047ed11761522a48e5ec6ba54acb9ffde2a0bf6 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 19 Feb 2026 13:10:26 +0000 Subject: [PATCH 166/210] feat(config): add tri-state tanka path resolution mode --- crates/jrsonnet-lsp/src/analysis/eval.rs | 11 +- crates/jrsonnet-lsp/src/analysis/tanka.rs | 93 +++++++---- crates/jrsonnet-lsp/src/config.rs | 150 +++++++++++++++++- .../server/async_requests/commands/eval.rs | 2 +- 4 files changed, 211 insertions(+), 45 deletions(-) diff --git a/crates/jrsonnet-lsp/src/analysis/eval.rs b/crates/jrsonnet-lsp/src/analysis/eval.rs index 9c06ac0f..81a20565 100644 --- a/crates/jrsonnet-lsp/src/analysis/eval.rs +++ b/crates/jrsonnet-lsp/src/analysis/eval.rs @@ -14,6 +14,7 @@ use jrsonnet_stdlib::ContextInitializer; use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString}; use super::tanka; +use crate::config::ResolvePathsWithTankaMode; /// Configuration for evaluation diagnostics. #[derive(Debug, Clone, Default)] @@ -21,7 +22,7 @@ pub struct EvalConfig { /// Import paths (JPATH) to search for imports. pub jpath: Vec, /// Whether to resolve paths using Tanka conventions. - pub resolve_paths_with_tanka: bool, + pub resolve_paths_with_tanka: ResolvePathsWithTankaMode, } /// Create a jrsonnet State with the given jpath entries. @@ -44,8 +45,8 @@ pub(crate) fn create_state_with_jpath(jpath: &[PathBuf]) -> State { pub struct Evaluator { /// Base jpath configuration (from settings). base_jpath: Vec, - /// Whether to resolve paths using Tanka conventions. - tanka_mode: bool, + /// Tanka path resolution mode. + tanka_mode: ResolvePathsWithTankaMode, } impl Evaluator { @@ -65,9 +66,9 @@ impl Evaluator { fn get_jpath_for_file(&self, path: &Path) -> Vec { let mut jpath = self.base_jpath.clone(); - if self.tanka_mode { + if self.tanka_mode.is_enabled() { // Add Tanka-resolved paths - let tanka_paths = tanka::resolve_jpath(path); + let tanka_paths = tanka::resolve_jpath(path, self.tanka_mode); jpath.extend(tanka_paths); } else { // Add the file's directory to jpath (standard behavior) diff --git a/crates/jrsonnet-lsp/src/analysis/tanka.rs b/crates/jrsonnet-lsp/src/analysis/tanka.rs index dd99a2e8..64558fcf 100644 --- a/crates/jrsonnet-lsp/src/analysis/tanka.rs +++ b/crates/jrsonnet-lsp/src/analysis/tanka.rs @@ -1,6 +1,6 @@ //! Tanka integration for resolving import paths. //! -//! When `resolve_paths_with_tanka` is enabled, the LSP will look for +//! Depending on `resolve_paths_with_tanka` mode, the LSP can look for //! `jsonnetfile.json` or `tkrc.yaml` in parent directories to find the //! project root and add appropriate vendor/lib paths to the jpath. @@ -9,6 +9,8 @@ use std::{ path::{Path, PathBuf}, }; +use crate::config::ResolvePathsWithTankaMode; + /// Find the Tanka project root by walking up from the given path. /// /// The root is the directory that contains either `tkrc.yaml` or `jsonnetfile.json`. @@ -41,30 +43,15 @@ fn find_parent_file(start: &Path, filename: &str) -> Option { } } -/// Resolve jpath entries for a file in a Tanka project. -/// -/// Returns a list of paths to add to jpath for import resolution: -/// - `/vendor` -/// - `/vendor` (if different from root) -/// - `/lib` -/// - `` (the directory containing the file) -/// -/// Returns an empty list if no Tanka root is found. -pub fn resolve_jpath(path: &Path) -> Vec { - let base = if path.is_file() { +fn file_base(path: &Path) -> Option { + if path.is_file() { path.parent().map(Path::to_path_buf) } else { Some(path.to_path_buf()) - }; - - let Some(base) = base else { - return Vec::new(); - }; - - let Some(root) = find_root(path) else { - return Vec::new(); - }; + } +} +fn build_jpath(root: &Path, base: &Path) -> Vec { let mut jpath = Vec::new(); // Add root/vendor @@ -88,11 +75,42 @@ pub fn resolve_jpath(path: &Path) -> Vec { } // Add base directory - jpath.push(base); - + jpath.push(base.to_path_buf()); jpath } +/// Resolve jpath entries for a file using a configured Tanka mode. +/// +/// Returns a list of paths to add to jpath for import resolution: +/// - `/vendor` +/// - `/vendor` (if different from root) +/// - `/lib` +/// - `` (the directory containing the file) +/// +/// Mode behavior: +/// - `false`: no extra roots +/// - `auto`: only when a Tanka root marker is found +/// - `true`: same as `auto`, but falls back to using `` as root +pub fn resolve_jpath(path: &Path, mode: ResolvePathsWithTankaMode) -> Vec { + let Some(base) = file_base(path) else { + return Vec::new(); + }; + + match mode { + ResolvePathsWithTankaMode::False => Vec::new(), + ResolvePathsWithTankaMode::Auto => { + let Some(root) = find_root(path) else { + return Vec::new(); + }; + build_jpath(&root, &base) + } + ResolvePathsWithTankaMode::True => { + let root = find_root(path).unwrap_or_else(|| base.clone()); + build_jpath(&root, &base) + } + } +} + /// Compute effective import roots for LSP import resolution. /// /// This always includes configured roots. When Tanka mode is enabled, additional @@ -102,12 +120,10 @@ pub fn resolve_jpath(path: &Path) -> Vec { pub fn effective_import_roots( path: &Path, configured_roots: &[PathBuf], - resolve_paths_with_tanka: bool, + resolve_paths_with_tanka: ResolvePathsWithTankaMode, ) -> Vec { let mut roots: Vec = configured_roots.to_vec(); - if resolve_paths_with_tanka { - roots.extend(resolve_jpath(path)); - } + roots.extend(resolve_jpath(path, resolve_paths_with_tanka)); let mut seen = HashSet::new(); roots @@ -175,7 +191,7 @@ mod tests { fs::create_dir_all(&env).unwrap(); // Resolve jpath from environment directory - let jpath = resolve_jpath(&env); + let jpath = resolve_jpath(&env, ResolvePathsWithTankaMode::Auto); let expected = vec![root.join("vendor"), root.join("lib"), env]; assert_eq!(jpath, expected); } @@ -194,7 +210,7 @@ mod tests { fs::create_dir(env.join("vendor")).unwrap(); // Resolve jpath from environment directory - let jpath = resolve_jpath(&env); + let jpath = resolve_jpath(&env, ResolvePathsWithTankaMode::Auto); let expected = vec![root.join("vendor"), env.join("vendor"), env]; assert_eq!(jpath, expected); } @@ -202,10 +218,23 @@ mod tests { #[test] fn test_resolve_jpath_no_root() { let tmp = TempDir::new().unwrap(); - let jpath = resolve_jpath(tmp.path()); + let jpath = resolve_jpath(tmp.path(), ResolvePathsWithTankaMode::Auto); assert_eq!(jpath, Vec::::new()); } + #[test] + fn test_resolve_jpath_force_mode_without_root() { + let tmp = TempDir::new().unwrap(); + let base = tmp.path().join("env"); + fs::create_dir_all(&base).unwrap(); + fs::create_dir(base.join("vendor")).unwrap(); + fs::create_dir(base.join("lib")).unwrap(); + + let jpath = resolve_jpath(&base, ResolvePathsWithTankaMode::True); + let expected = vec![base.join("vendor"), base.join("lib"), base]; + assert_eq!(jpath, expected); + } + #[test] fn test_effective_import_roots_without_tanka() { let tmp = TempDir::new().unwrap(); @@ -217,7 +246,7 @@ mod tests { PathBuf::from("/configured/one"), PathBuf::from("/configured/two"), ]; - let roots = effective_import_roots(&base, &configured, false); + let roots = effective_import_roots(&base, &configured, ResolvePathsWithTankaMode::False); assert_eq!(roots, configured); } @@ -233,7 +262,7 @@ mod tests { fs::write(&file, "{}").unwrap(); let configured = vec![root.join("vendor")]; - let roots = effective_import_roots(&file, &configured, true); + let roots = effective_import_roots(&file, &configured, ResolvePathsWithTankaMode::Auto); let expected = vec![root.join("vendor"), env]; assert_eq!(roots, expected); } diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index 2ad1e398..978472a1 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -5,7 +5,7 @@ //! //! Configuration is compatible with the Go [grafana/jsonnet-language-server](https://github.com/grafana/jsonnet-language-server). -use std::{collections::HashMap, path::PathBuf}; +use std::{collections::HashMap, fmt, path::PathBuf}; // Re-export config types from handlers crate pub use jrsonnet_lsp_handlers::{ @@ -14,6 +14,80 @@ pub use jrsonnet_lsp_handlers::{ }; use serde::{Deserialize, Serialize}; +/// Tanka import-root resolution mode. +/// +/// Accepts string values `"false"`, `"auto"`, `"true"` and also boolean values +/// (`false`/`true`) for convenience. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)] +#[serde(rename_all = "lowercase")] +pub enum ResolvePathsWithTankaMode { + /// Disable Tanka-based path resolution. + False, + /// Resolve using Tanka conventions only when a Tanka root marker is found. + Auto, + /// Force Tanka path resolution behavior even when no marker is found. + True, +} + +impl Default for ResolvePathsWithTankaMode { + fn default() -> Self { + Self::Auto + } +} + +impl ResolvePathsWithTankaMode { + #[must_use] + pub const fn is_enabled(self) -> bool { + !matches!(self, Self::False) + } + + #[must_use] + pub const fn is_forced(self) -> bool { + matches!(self, Self::True) + } +} + +impl fmt::Display for ResolvePathsWithTankaMode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::False => write!(f, "false"), + Self::Auto => write!(f, "auto"), + Self::True => write!(f, "true"), + } + } +} + +impl<'de> Deserialize<'de> for ResolvePathsWithTankaMode { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + #[derive(Deserialize)] + #[serde(untagged)] + enum Repr { + Bool(bool), + String(String), + } + + match Repr::deserialize(deserializer)? { + Repr::Bool(value) => Ok(if value { Self::True } else { Self::False }), + Repr::String(value) => { + if value.eq_ignore_ascii_case("false") { + Ok(Self::False) + } else if value.eq_ignore_ascii_case("auto") { + Ok(Self::Auto) + } else if value.eq_ignore_ascii_case("true") { + Ok(Self::True) + } else { + Err(serde::de::Error::custom( + "resolve_paths_with_tanka must be one of: false, auto, true", + )) + } + } + } + } +} + /// Server configuration options. /// /// These can be passed via: @@ -52,9 +126,11 @@ pub struct ServerConfig { pub enable_lint_diagnostics: bool, /// Resolve import paths using Tanka conventions. - /// When enabled, looks for jsonnetfile.json and vendor directories. + /// - `false`: disabled + /// - `auto` (default): enabled only when a Tanka root marker exists + /// - `true`: force-enabled even without root markers #[serde(alias = "resolvePathsWithTanka", alias = "tankaMode")] - pub resolve_paths_with_tanka: bool, + pub resolve_paths_with_tanka: ResolvePathsWithTankaMode, /// Formatting options. #[serde(default)] @@ -87,7 +163,7 @@ struct ServerConfigPatch { #[serde(alias = "enableLintDiagnostics", alias = "lint")] enable_lint_diagnostics: Option, #[serde(alias = "resolvePathsWithTanka", alias = "tankaMode")] - resolve_paths_with_tanka: Option, + resolve_paths_with_tanka: Option, formatting: Option, #[serde(rename = "codeActions")] code_actions: Option, @@ -191,8 +267,8 @@ impl ServerConfig { if other.enable_lint_diagnostics { self.enable_lint_diagnostics = true; } - if other.resolve_paths_with_tanka { - self.resolve_paths_with_tanka = true; + if other.resolve_paths_with_tanka != ResolvePathsWithTankaMode::default() { + self.resolve_paths_with_tanka = other.resolve_paths_with_tanka; } if other.log_level.is_some() { self.log_level = other.log_level; @@ -288,6 +364,10 @@ mod tests { assert!(config.jpath.is_empty()); assert!(config.ext_vars.is_empty()); assert!(!config.enable_eval_diagnostics); + assert_eq!( + config.resolve_paths_with_tanka, + ResolvePathsWithTankaMode::Auto + ); assert_eq!(config.code_actions, CodeActionConfig::default()); assert_eq!(config.inlay_hints, InlayHintsConfig::default()); } @@ -336,7 +416,36 @@ mod tests { let config = ServerConfig::from_initialization_options(Some(json)); assert_eq!(config.jpath, vec![PathBuf::from("/usr/share/jsonnet")]); assert_eq!(config.ext_vars.get("env"), Some(&"dev".to_string())); - assert!(config.resolve_paths_with_tanka); + assert_eq!( + config.resolve_paths_with_tanka, + ResolvePathsWithTankaMode::True + ); + } + + #[test] + fn test_parse_tanka_mode_string() { + let json = serde_json::json!({ + "resolvePathsWithTanka": "auto" + }); + + let config = ServerConfig::from_initialization_options(Some(json)); + assert_eq!( + config.resolve_paths_with_tanka, + ResolvePathsWithTankaMode::Auto + ); + } + + #[test] + fn test_parse_tanka_mode_legacy_bool_false() { + let json = serde_json::json!({ + "tankaMode": false + }); + + let config = ServerConfig::from_initialization_options(Some(json)); + assert_eq!( + config.resolve_paths_with_tanka, + ResolvePathsWithTankaMode::False + ); } #[test] @@ -398,6 +507,33 @@ mod tests { assert!(!config.enable_eval_diagnostics); } + #[test] + fn test_update_from_settings_updates_tanka_mode() { + let mut config = ServerConfig::new(); + assert_eq!( + config.resolve_paths_with_tanka, + ResolvePathsWithTankaMode::Auto + ); + + let settings = serde_json::json!({ + "resolvePathsWithTanka": "true" + }); + assert!(config.update_from_settings(settings)); + assert_eq!( + config.resolve_paths_with_tanka, + ResolvePathsWithTankaMode::True + ); + + let settings = serde_json::json!({ + "resolvePathsWithTanka": false + }); + assert!(config.update_from_settings(settings)); + assert_eq!( + config.resolve_paths_with_tanka, + ResolvePathsWithTankaMode::False + ); + } + #[test] fn test_code_action_config_from_initialization_options() { let json = serde_json::json!({ diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs index f56fb2f9..4a86f7ab 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs @@ -99,7 +99,7 @@ impl AsyncRequestContext { &config.jpath, config.resolve_paths_with_tanka, ); - if !config.resolve_paths_with_tanka { + if !config.resolve_paths_with_tanka.is_enabled() { if let Some(dir) = base_path.as_path().parent() { if !roots.iter().any(|entry| entry == dir) { roots.push(dir.to_path_buf()); From db2ee6073d179afbc0bb967cdaf17cb665b91fa3 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Thu, 19 Feb 2026 13:19:11 +0000 Subject: [PATCH 167/210] docs(lsp): add user-facing README with neovim setup --- docs/lsp/README.md | 224 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 224 insertions(+) create mode 100644 docs/lsp/README.md diff --git a/docs/lsp/README.md b/docs/lsp/README.md new file mode 100644 index 00000000..4514c726 --- /dev/null +++ b/docs/lsp/README.md @@ -0,0 +1,224 @@ +# jrsonnet-lsp + +Rustanka includes a Jsonnet Language Server (`jrsonnet-lsp`) that adds editor +features like navigation, completion, diagnostics, and formatting over LSP. This +README is a practical guide to what it supports and how to configure it. + +For internal architecture details, see: + +- `docs/lsp/ARCHITECTURE.md` +- `docs/lsp/HANDLERS.md` +- `docs/lsp/TYPE_SYSTEM.md` + +## Running the server + +The server speaks LSP over stdio: + +```bash +jrsonnet-lsp --log-level info +``` + +## Capabilities + +Advertised LSP features: + +- Text sync: open/close, incremental changes, save. +- Navigation: `definition`, `declaration`, `typeDefinition`, `implementation`. +- Info and discovery: `hover`, `documentSymbol`, `workspace/symbol`, + `documentHighlight`. +- Editing helpers: `completion` (trigger `.`), `signatureHelp` (triggers `(` and + `,`), `rename` (with `prepareRename`), `references`. +- In-editor metadata: `inlayHint`, `semanticTokens/full`, + `semanticTokens/range`, `codeLens` (+ `codeLens/resolve`). +- Actions: `codeAction` (`quickfix`, `source.fixAll`), `formatting`. +- Commands via `workspace/executeCommand`: + - `jrsonnet.evalFile` + - `jrsonnet.evalExpression` + - `jrsonnet.findTransitiveImporters` + - `jrsonnet.findReferences` + - `jrsonnet.showErrors` + +When the client supports dynamic watched-file registration, the server also +registers watchers for: + +- `**/*.jsonnet` +- `**/*.libsonnet` +- `**/*.json` + +## Configuration + +Configuration is accepted from: + +- `initialize.initializationOptions` +- `workspace/didChangeConfiguration` + +`didChangeConfiguration` can be either: + +- Flat settings object, or +- Nested under `jsonnet` or `jsonnet-language-server`. + +Top-level options: + +| Key | Type | Default | Accepted values and notes | +| -------------------------- | ---------------- | ------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `jpath` | `string[]` | `[]` | Import search paths (same idea as `jsonnet -J`). | +| `ext_vars` | `object` | `{}` | Map of external string vars. Aliases: `extVars`, `ext_vars`. | +| `ext_code` | `object` | `{}` | Map of external code vars. Aliases: `extCode`, `ext_code`. | +| `enable_eval_diagnostics` | `boolean` | `false` | Aliases: `enableEvalDiagnostics`, `eval`. | +| `enable_lint_diagnostics` | `boolean` | `false` | Aliases: `enableLintDiagnostics`, `lint`. | +| `resolve_paths_with_tanka` | `string` | `"auto"` | Modes: "false", "auto", "true". Booleans are also accepted (`false` -> "false", `true` -> "true"). Aliases: `resolvePathsWithTanka`, `tankaMode`. | +| `formatting` | `object` | `{}` | Formatting options, see below. | +| `code_actions` | `object` | `{ "removeUnused": "all", "removeUnusedComments": "none" }` | Alias: `codeActions`. | +| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off" }` | Alias: `inlayHints`. | +| `log_level` | `string \| null` | `null` | Alias: `logLevel`. Intended values are standard log levels (for example `error`, `warn`, `info`, `debug`). Currently this value is parsed/stored but runtime logging is still controlled by process startup flags/env. | + +### `code_actions` + +| Key | Default | Accepted values | +| ---------------------- | ------- | -------------------------------------------- | +| `removeUnused` | `all` | `all`, `importBindings`, `nonImportBindings` | +| `removeUnusedComments` | `none` | `none`, `above`, `below`, `all` | + +### `inlay_hints` + +| Key | Default | Accepted values | +| --------------- | ------- | -------------------------------------- | +| `local` | `all` | `off`, `variables`, `functions`, `all` | +| `objectLocal` | `all` | `off`, `variables`, `functions`, `all` | +| `objectMembers` | `off` | `off`, `fields`, `methods`, `all` | + +### `formatting` + +If a field is omitted, the formatter's own default is used. + +`formatter_path` takes precedence over `formatter_engine`. + +`formatter_engine` values: + +- `path` (default; aliases: `auto`, `auto-path`): try `jrsonnet-fmt`, then + `jsonnetfmt` from `PATH`. +- `bin-jsonnetfmt-stdio` (aliases: `workspace-jsonnetfmt-stdio`, + `workspaceBinJsonnetfmtStdio`): run + `{workspaceRoot}/bin/jsonnetfmt -stdio `. + +Formatting fields: + +| Key | Type | Default when unset | Accepted values | +| ------------------------ | --------- | ----------------------------------------- | ---------------------------------------------------------------------------------- | +| `indent` | `number` | formatter default (`2` in jsonnetfmt) | non-negative integer | +| `max_blank_lines` | `number` | formatter default (`2` in jsonnetfmt) | non-negative integer | +| `string_style` | `string` | formatter default (`leave` in jsonnetfmt) | `double`, `single`, `leave` (also accepts `d`/`s`; other values behave as `leave`) | +| `comment_style` | `string` | formatter default (`leave` in jsonnetfmt) | `hash`, `slash`, `leave` (also accepts `h`/`s`; other values behave as `leave`) | +| `pad_arrays` | `boolean` | formatter default (`false` in jsonnetfmt) | `true`/`false` | +| `pad_objects` | `boolean` | formatter default (`true` in jsonnetfmt) | `true`/`false` | +| `pretty_field_names` | `boolean` | formatter default (`true` in jsonnetfmt) | `true`/`false` | +| `sort_imports` | `boolean` | formatter default (`true` in jsonnetfmt) | `true`/`false` | +| `use_implicit_plus` | `boolean` | formatter default (`true` in jsonnetfmt) | `true`/`false` | +| `strip_everything` | `boolean` | formatter default (`false` in jsonnetfmt) | `true`/`false` | +| `strip_comments` | `boolean` | formatter default (`false` in jsonnetfmt) | `true`/`false` | +| `strip_all_but_comments` | `boolean` | formatter default (`false` in jsonnetfmt) | `true`/`false` | +| `formatter_path` | `string` | unset | path to formatter binary | +| `formatter_engine` | `string` | `path` | see values above | + +Formatting keys also accept legacy go-jsonnet style aliases: + +- `Indent`, `MaxBlankLines`, `StringStyle`, `CommentStyle` +- `PadArrays`, `PadObjects`, `PrettyFieldNames`, `SortImports`, + `UseImplicitPlus` +- `StripEverything`, `StripComments`, `StripAllButComments` +- `FormatterPath`, `FormatterEngine` + +## Example + +```json +{ + "jsonnet": { + "jpath": ["vendor", "lib"], + "enableEvalDiagnostics": false, + "enableLintDiagnostics": true, + "resolvePathsWithTanka": "auto", + "codeActions": { + "removeUnused": "all", + "removeUnusedComments": "none" + }, + "inlayHints": { + "local": "all", + "objectLocal": "all", + "objectMembers": "fields" + }, + "formatting": { + "indent": 2, + "string_style": "leave", + "formatter_engine": "path" + } + } +} +``` + +### Neovim 0.11+ configuration + +Neovim 0.11+ uses built-in `vim.lsp.config(...)` to define/extend a server +config, then `vim.lsp.enable(...)` to activate it for matching buffers. +`nvim-lspconfig` is still useful for shipping server config files, but +`require('lspconfig').*.setup{}` is deprecated in favor of this flow. + +```lua +-- init.lua +-- Practical Neovim 0.11+ setup. +vim.lsp.config("jrsonnet_lsp", { + -- Neovim options most people set: + cmd = { "jrsonnet-lsp" }, + filetypes = { "jsonnet", "libsonnet" }, + root_markers = { "jsonnetfile.json", ".git" }, + -- Optional custom root detection: + -- root_dir = function(bufnr, on_dir) + -- local root = vim.fs.root(bufnr, { "jsonnetfile.json", ".git" }) + -- if root then + -- on_dir(root) + -- end + -- end, + + -- jrsonnet-lsp initializationOptions: + -- init_options = { + -- jpath = {}, -- string[] + -- extVars = {}, -- map; aliases: extVars/ext_vars + -- extCode = {}, -- map; aliases: extCode/ext_code + -- enableEvalDiagnostics = false, -- boolean; aliases: enableEvalDiagnostics/eval + -- enableLintDiagnostics = false, -- boolean; aliases: enableLintDiagnostics/lint + -- resolvePathsWithTanka = "auto", -- "false"|"auto"|"true"; booleans also accepted; aliases: resolvePathsWithTanka/tankaMode + -- formatting = { + -- indent = 2, -- integer >= 0 | nil + -- max_blank_lines = 2, -- integer >= 0 | nil + -- string_style = "leave", -- "double"|"single"|"leave"|"d"|"s"|nil + -- comment_style = "leave", -- "hash"|"slash"|"leave"|"h"|"s"|nil + -- pad_arrays = false, -- boolean|nil + -- pad_objects = true, -- boolean|nil + -- pretty_field_names = true, -- boolean|nil + -- sort_imports = true, -- boolean|nil + -- use_implicit_plus = true, -- boolean|nil + -- strip_everything = false, -- boolean|nil + -- strip_comments = false, -- boolean|nil + -- strip_all_but_comments = false, -- boolean|nil + -- formatter_path = nil, -- string|nil + -- formatter_engine = "path", -- "path"|"auto"|"auto-path"|"bin-jsonnetfmt-stdio"|"workspace-jsonnetfmt-stdio"|"workspaceBinJsonnetfmtStdio"|nil + -- }, + -- codeActions = { + -- removeUnused = "all", -- "all"|"importBindings"|"nonImportBindings" + -- removeUnusedComments = "none", -- "none"|"above"|"below"|"all" + -- }, + -- inlayHints = { + -- local = "all", -- "off"|"variables"|"functions"|"all" + -- objectLocal = "all", -- "off"|"variables"|"functions"|"all" + -- objectMembers = "off", -- "off"|"fields"|"methods"|"all" + -- }, + -- logLevel = nil, -- string|nil (for example: "error"|"warn"|"info"|"debug") + -- }, + + -- Same keys are also accepted via didChangeConfiguration. + -- `settings` can be flat, or nested under: + -- settings = { jsonnet = { ... } } + -- settings = { ["jsonnet-language-server"] = { ... } } +}) + +vim.lsp.enable("jrsonnet_lsp") +``` From 08fafd95e4670862e3a5d066fce2dac39bdec9a3 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 10:18:07 +0000 Subject: [PATCH 168/210] feat(scenario): add custom request and code-lens execute steps Add first-class scenario steps for requestCustom/expectCustom and requestExecuteCodeLens/expectExecuteCodeLens. Extend the compiler, request registry, and runner so custom request ids are tracked like other request kinds, and code-lens execution can target a command from a prior requestCodeLens response by index. Also add recursive { file: "..." } URI shorthand resolution inside JSON params and execute-command argument payloads so scenario scripts can stay path-relative and concise. --- .../jrsonnet-lsp-scenario/src/scenario/mod.rs | 21 +-- .../src/scenario/request_steps.rs | 35 +++++ .../src/scenario_runner/errors.rs | 9 ++ .../src/scenario_runner/expectation_steps.rs | 52 +++++++- .../src/scenario_runner/request_steps.rs | 97 +++++++++++++- .../src/scenario_runner/runner.rs | 8 ++ .../src/scenario_script/compile.rs | 120 +++++++++++++++--- .../src/scenario_script/inputs.rs | 56 ++++++++ .../src/scenario_script/registry.rs | 28 ++++ 9 files changed, 392 insertions(+), 34 deletions(-) diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs index a7a05728..595f9865 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs @@ -45,15 +45,16 @@ use lsp_types::{ TextDocumentContentChangeEvent, TextEdit, WorkspaceEdit, WorkspaceSymbolResponse, }; pub use request_steps::{ - ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectDeclarationStep, - ExpectDefinitionStep, ExpectDocumentSymbolStep, ExpectExecuteCommandStep, ExpectFormattingStep, - ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, - ExpectRenameStep, ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, - ExpectSignatureHelpStep, ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, - HoverSectionExpectation, RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, + ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectCustomStep, + ExpectDeclarationStep, ExpectDefinitionStep, ExpectDocumentSymbolStep, + ExpectExecuteCodeLensStep, ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, + ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, + ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, + ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, HoverSectionExpectation, + RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestCustomStep, RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, - RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, - RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, + RequestExecuteCodeLensStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, + RequestInlayHintsStep, RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, }; @@ -124,6 +125,10 @@ pub enum ScenarioStep { ExpectCodeLens(ExpectCodeLensStep), RequestExecuteCommand(RequestExecuteCommandStep), ExpectExecuteCommand(ExpectExecuteCommandStep), + RequestExecuteCodeLens(RequestExecuteCodeLensStep), + ExpectExecuteCodeLens(ExpectExecuteCodeLensStep), + RequestCustom(RequestCustomStep), + ExpectCustom(ExpectCustomStep), ExpectDiagnostics(ExpectDiagnosticsStep), DiagnosticsSettled(DiagnosticsSettledStep), } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs index 2f61e3b4..8f2d2dbd 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs @@ -1212,3 +1212,38 @@ pub struct ExpectExecuteCommandStep { pub(crate) id: i32, pub result: Option, } + +/// Execute one command from a prior `requestCodeLens` response. +/// +/// This step pulls the command at `index` from the referenced code-lens +/// response and dispatches it via `workspace/executeCommand`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestExecuteCodeLensStep { + pub(crate) id: i32, + pub code_lens_request_id: i32, + pub index: usize, +} + +/// Expected response for a preceding `requestExecuteCodeLens`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectExecuteCodeLensStep { + pub(crate) id: i32, + pub result: Option, +} + +/// Generic custom request for non-standard methods. +/// +/// Sends an arbitrary request method and JSON params payload. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestCustomStep { + pub(crate) id: i32, + pub method: String, + pub params: serde_json::Value, +} + +/// Expected response for a preceding `requestCustom`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectCustomStep { + pub(crate) id: i32, + pub result: Option, +} diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs index ff9ccf6b..3e37ca32 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/errors.rs @@ -4,6 +4,7 @@ use super::{ document_steps::FilesystemError, expectation_steps::AssertionError, helpers::UriError, + request_steps::RequestStepError, transport::{RpcError, SerdeError, TransportError}, }; @@ -20,6 +21,8 @@ pub enum RunnerError { #[error(transparent)] Assertion(Box), #[error(transparent)] + RequestStep(Box), + #[error(transparent)] Filesystem(Box), #[error("server thread panicked")] ServerThreadPanicked, @@ -57,6 +60,12 @@ impl From for RunnerError { } } +impl From for RunnerError { + fn from(error: RequestStepError) -> Self { + Self::RequestStep(Box::new(error)) + } +} + impl From for RunnerError { fn from(error: FilesystemError) -> Self { Self::Filesystem(Box::new(error)) diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs index 0fd97053..0f3e576c 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs @@ -11,9 +11,10 @@ use super::{ }; use crate::scenario::{ DiagnosticsSettledStep, ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, - ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, ExpectDocumentSymbolStep, - ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, ExpectInlayHintsStep, - ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, + ExpectCustomStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, + ExpectDocumentSymbolStep, ExpectExecuteCodeLensStep, ExpectExecuteCommandStep, + ExpectFormattingStep, ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, + ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, }; @@ -285,6 +286,51 @@ impl ScenarioRunner { Ok(()) } + pub(super) fn step_expect_custom(&mut self, step: &ExpectCustomStep) -> RunnerResult<()> { + let response = self.wait_response(step.id, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: "customRequest", + id: step.id, + error, + } + .into()); + } + if response.result != step.result { + return Self::response_mismatch( + "customRequest", + step.id, + &response.result, + &step.result, + ); + } + Ok(()) + } + + pub(super) fn step_expect_execute_code_lens( + &mut self, + step: &ExpectExecuteCodeLensStep, + ) -> RunnerResult<()> { + let response = self.wait_response(step.id, REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: "executeCodeLens", + id: step.id, + error, + } + .into()); + } + if response.result != step.result { + return Self::response_mismatch( + "executeCodeLens", + step.id, + &response.result, + &step.result, + ); + } + Ok(()) + } + pub(super) fn step_expect_diagnostics( &mut self, step: &ExpectDiagnosticsStep, diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs index f8eddf16..d1005250 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs @@ -15,17 +15,36 @@ use lsp_types::{ TextDocumentIdentifier, TextDocumentPositionParams, WorkDoneProgressParams, WorkspaceSymbolParams, }; +use thiserror::Error; -use super::{helpers::parse_uri, RunnerResult, ScenarioRunner}; +use super::{ + helpers::parse_uri, + transport::{RpcError, SerdeError}, + RunnerResult, ScenarioRunner, +}; use crate::scenario::{ - RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestDeclarationStep, - RequestDefinitionStep, RequestDocumentSymbolStep, RequestExecuteCommandStep, - RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, RequestPrepareRenameStep, - RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, - RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, - RequestWorkspaceSymbolStep, + RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestCustomStep, + RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, + RequestExecuteCodeLensStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, + RequestInlayHintsStep, RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, + RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, + RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, }; +#[derive(Debug, Error)] +pub enum RequestStepError { + #[error("codeLens request id {request_id} returned null result")] + MissingResult { request_id: i32 }, + #[error("codeLens request id {request_id} returned no lens at index {index} (len={len})")] + IndexOutOfBounds { + request_id: i32, + index: usize, + len: usize, + }, + #[error("codeLens request id {request_id} lens index {index} has no command")] + MissingCommand { request_id: i32, index: usize }, +} + fn text_document_position_params( uri: lsp_types::Uri, position: lsp_types::Position, @@ -294,4 +313,68 @@ impl ScenarioRunner { }; self.send_request_with_params(step.id, ExecuteCommand::METHOD, params, "executeCommand") } + + pub(super) fn step_request_execute_code_lens( + &mut self, + step: &RequestExecuteCodeLensStep, + ) -> RunnerResult<()> { + let response = self.wait_response(step.code_lens_request_id, super::REQUEST_TIMEOUT)?; + if let Some(error) = response.error { + return Err(RpcError::ResponseReturnedError { + method: "codeLens", + id: step.code_lens_request_id, + error, + } + .into()); + } + + let code_lenses: Option> = response.result.clone().map_or_else( + || Ok(None), + |value| { + serde_json::from_value(value).map(Some).map_err(|source| { + SerdeError::DeserializeResponseResult { + method: "codeLens", + id: step.code_lens_request_id, + source, + } + }) + }, + )?; + + // Keep response available for an expectCodeLens step. + self.pending_responses.push(response); + + let Some(code_lenses) = code_lenses else { + return Err(RequestStepError::MissingResult { + request_id: step.code_lens_request_id, + } + .into()); + }; + let Some(code_lens) = code_lenses.get(step.index) else { + return Err(RequestStepError::IndexOutOfBounds { + request_id: step.code_lens_request_id, + index: step.index, + len: code_lenses.len(), + } + .into()); + }; + let Some(command) = &code_lens.command else { + return Err(RequestStepError::MissingCommand { + request_id: step.code_lens_request_id, + index: step.index, + } + .into()); + }; + + let params = ExecuteCommandParams { + command: command.command.clone(), + arguments: command.arguments.clone().unwrap_or_default(), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, ExecuteCommand::METHOD, params, "executeCodeLens") + } + + pub(super) fn step_request_custom(&self, step: &RequestCustomStep) -> RunnerResult<()> { + self.send_request_with_params(step.id, &step.method, &step.params, "customRequest") + } } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs index a0b14e11..5c1b2e9e 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs @@ -194,6 +194,14 @@ impl ScenarioRunner { ScenarioStep::ExpectExecuteCommand(expectation) => { self.step_expect_execute_command(expectation) } + ScenarioStep::RequestExecuteCodeLens(request) => { + self.step_request_execute_code_lens(request) + } + ScenarioStep::ExpectExecuteCodeLens(expectation) => { + self.step_expect_execute_code_lens(expectation) + } + ScenarioStep::RequestCustom(request) => self.step_request_custom(request), + ScenarioStep::ExpectCustom(expectation) => self.step_expect_custom(expectation), ScenarioStep::ExpectDiagnostics(expectation) => { self.step_expect_diagnostics(expectation) } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs index aba122bb..d3deb55f 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs @@ -19,11 +19,11 @@ use thiserror::Error; use super::{ inputs::{ - CodeActionOrCommandInput, CodeLensInput, DiagnosticInput, ExpectCompletionScriptStep, - ExpectDocumentSymbolScriptStep, ExpectExecuteCommandScriptStep, ExpectFormattingScriptStep, - ExpectHoverScriptStep, ExpectPrepareRenameScriptStep, ExpectSignatureHelpScriptStep, - GotoDefinitionResponseInput, InlayHintInput, InputError, LocationInput, - SemanticTokensResultInput, WorkspaceEditInput, WorkspaceSymbolResponseInput, + resolve_file_uri_shorthand_json, CodeActionOrCommandInput, CodeLensInput, DiagnosticInput, + ExpectCompletionScriptStep, ExpectDocumentSymbolScriptStep, ExpectExecuteCommandScriptStep, + ExpectFormattingScriptStep, ExpectHoverScriptStep, ExpectPrepareRenameScriptStep, + ExpectSignatureHelpScriptStep, GotoDefinitionResponseInput, InlayHintInput, InputError, + LocationInput, SemanticTokensResultInput, WorkspaceEditInput, WorkspaceSymbolResponseInput, }, markers::{MarkerError, MarkerStore, PositionSpec, RangeInput}, paths::{file_path, file_uri}, @@ -32,18 +32,19 @@ use super::{ use crate::scenario::{ ChangeFullStep, ChangeIncrementalStep, CloseStep, ConfigStep, DeleteFileStep, DiagnosticsSettledStep, ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, - ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, ExpectDocumentSymbolStep, - ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, ExpectInlayHintsStep, - ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, + ExpectCustomStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, + ExpectDocumentSymbolStep, ExpectExecuteCodeLensStep, ExpectExecuteCommandStep, + ExpectFormattingStep, ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, + ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, HoverSectionExpectation, NotifyWatchedFilesStep, OpenStep, - RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestDeclarationStep, - RequestDefinitionStep, RequestDocumentSymbolStep, RequestExecuteCommandStep, - RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, RequestPrepareRenameStep, - RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, - RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, - RequestWorkspaceSymbolStep, SaveStep, Scenario, ScenarioFileChangeType, ScenarioStep, - WatchedFileChangeStep, WriteFileStep, + RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestCustomStep, + RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, + RequestExecuteCodeLensStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, + RequestInlayHintsStep, RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, + RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, + RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, SaveStep, Scenario, + ScenarioFileChangeType, ScenarioStep, WatchedFileChangeStep, WriteFileStep, }; /// Parsed YAML root object for one scenario script file. @@ -734,12 +735,23 @@ impl ScenarioScript { })] } ScenarioScriptStep::RequestExecuteCommand(step) => { + let arguments = step + .arguments + .into_iter() + .map(|argument| { + resolve_file_uri_shorthand_json( + argument, + base_dir, + "requestExecuteCommand.arguments", + ) + }) + .collect::, _>>()?; vec![ScenarioStep::RequestExecuteCommand( RequestExecuteCommandStep { id: registry .allocate(RequestKind::ExecuteCommand, step.request_name)?, command: step.command, - arguments: step.arguments, + arguments, }, )] } @@ -752,6 +764,45 @@ impl ScenarioScript { }, )] } + ScenarioScriptStep::RequestExecuteCodeLens(step) => { + let code_lens_request_id = + registry.resolve(RequestKind::CodeLens, step.request.as_str())?; + vec![ScenarioStep::RequestExecuteCodeLens( + RequestExecuteCodeLensStep { + id: registry + .allocate(RequestKind::ExecuteCodeLens, step.request_name)?, + code_lens_request_id, + index: step.index, + }, + )] + } + ScenarioScriptStep::ExpectExecuteCodeLens(step) => { + vec![ScenarioStep::ExpectExecuteCodeLens( + ExpectExecuteCodeLensStep { + id: registry + .claim(RequestKind::ExecuteCodeLens, step.request.as_str())?, + result: step.result, + }, + )] + } + ScenarioScriptStep::RequestCustom(step) => { + let params = resolve_file_uri_shorthand_json( + step.params, + base_dir, + "requestCustom.params", + )?; + vec![ScenarioStep::RequestCustom(RequestCustomStep { + id: registry.allocate(RequestKind::Custom, step.request_name)?, + method: step.method, + params, + })] + } + ScenarioScriptStep::ExpectCustom(step) => { + vec![ScenarioStep::ExpectCustom(ExpectCustomStep { + id: registry.claim(RequestKind::Custom, step.request.as_str())?, + result: step.result, + })] + } ScenarioScriptStep::ExpectDiagnostics(step) => { let diagnostics = step .diagnostics @@ -887,6 +938,10 @@ enum ScenarioScriptStep { ExpectCodeLens(ExpectCodeLensScriptStep), RequestExecuteCommand(RequestExecuteCommandScriptStep), ExpectExecuteCommand(ExpectExecuteCommandScriptStep), + RequestExecuteCodeLens(RequestExecuteCodeLensScriptStep), + ExpectExecuteCodeLens(ExpectExecuteCodeLensScriptStep), + RequestCustom(RequestCustomScriptStep), + ExpectCustom(ExpectCustomScriptStep), ExpectDiagnostics(ExpectDiagnosticsScriptStep), DiagnosticsSettled(DiagnosticsSettledScriptStep), } @@ -1278,6 +1333,39 @@ struct RequestExecuteCommandScriptStep { arguments: Vec, } +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestExecuteCodeLensScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + request: String, + index: usize, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectExecuteCodeLensScriptStep { + request: String, + result: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestCustomScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + method: String, + #[serde(default)] + params: serde_json::Value, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct ExpectCustomScriptStep { + request: String, + result: Option, +} + #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(deny_unknown_fields)] struct ExpectDiagnosticsScriptStep { diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs index 0ccba3d9..84075d82 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs @@ -650,6 +650,62 @@ impl CommandArgumentInput { } } +/// Resolve recursive `{ file: "" }` URI shorthands in JSON. +/// +/// Only object values with exactly one `file` key are rewritten to URI strings. +pub(super) fn resolve_file_uri_shorthand_json( + value: serde_json::Value, + base_dir: &Path, + context: &str, +) -> Result { + resolve_file_uri_shorthand_json_at(value, base_dir, context, "$") +} + +fn resolve_file_uri_shorthand_json_at( + value: serde_json::Value, + base_dir: &Path, + context: &str, + path: &str, +) -> Result { + match value { + serde_json::Value::Object(mut object) => { + if object.len() == 1 && object.contains_key("file") { + let file = object + .remove("file") + .expect("checked key existence before remove"); + let Some(file) = file.as_str() else { + return Err(input_err!("{context}: `{path}.file` must be a string")); + }; + return Ok(serde_json::Value::String(file_uri(base_dir, file))); + } + + let mut resolved = serde_json::Map::with_capacity(object.len()); + for (key, child) in object { + let child_path = format!("{path}.{key}"); + resolved.insert( + key, + resolve_file_uri_shorthand_json_at(child, base_dir, context, &child_path)?, + ); + } + Ok(serde_json::Value::Object(resolved)) + } + serde_json::Value::Array(values) => { + let mut resolved = Vec::with_capacity(values.len()); + for (index, child) in values.into_iter().enumerate() { + let child_path = format!("{path}[{index}]"); + resolved.push(resolve_file_uri_shorthand_json_at( + child, + base_dir, + context, + &child_path, + )?); + } + Ok(serde_json::Value::Array(resolved)) + } + _ => Ok(value), + } +} + /// Accept either a full semantic-token result or marker-driven shorthand. #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(untagged)] diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs index 67076259..0b86f631 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs @@ -32,6 +32,8 @@ pub enum RequestKind { WorkspaceSymbol, CodeLens, ExecuteCommand, + ExecuteCodeLens, + Custom, } impl fmt::Display for RequestKind { @@ -82,6 +84,8 @@ impl RequestKind { Self::WorkspaceSymbol => "requestWorkspaceSymbol", Self::CodeLens => "requestCodeLens", Self::ExecuteCommand => "requestExecuteCommand", + Self::ExecuteCodeLens => "requestExecuteCodeLens", + Self::Custom => "requestCustom", } } } @@ -166,4 +170,28 @@ impl RequestRegistry { }; Ok(claimed) } + + /// Resolve a named request ID without consuming it from pending queues. + pub(super) fn resolve( + &self, + kind: RequestKind, + name: &str, + ) -> Result { + let (named_kind, id) = + self.named + .get(name) + .copied() + .ok_or_else(|| RequestRegistryError::UnknownAlias { + name: name.to_string(), + kind, + })?; + if named_kind != kind { + return Err(RequestRegistryError::AliasKindMismatch { + name: name.to_string(), + alias_kind: named_kind, + requested_kind: kind, + }); + } + Ok(id) + } } From ca297b6e72652dcd756b5f6f6b24616217993dfe Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 10:18:57 +0000 Subject: [PATCH 169/210] feat(lsp): add custom operation registry and migrate evalFile Introduce a server-side custom operation registry that owns operation metadata, request handlers, execute-command bridge handlers, and code-lens providers in one place. Move jrsonnet.evalFile off handler-side hardcoded code-lens generation and into this registry, then wire request dispatch and initialization to route through operation specs. Keep executeCommand compatibility by advertising registry-backed command ids alongside legacy command ids. Refactor custom-operations module layout so mod.rs is declarations only, with implementation split into dedicated files and typed handler aliases for clearer signatures. --- .../src/code_lens/actions.rs | 30 ----- .../src/code_lens/dispatch.rs | 62 +--------- crates/jrsonnet-lsp/src/server.rs | 4 +- .../src/server/async_requests/code_lens.rs | 5 +- .../async_requests/commands/dispatch.rs | 4 - .../server/async_requests/commands/eval.rs | 2 +- .../src/server/custom_operations/eval_file.rs | 113 ++++++++++++++++++ .../src/server/custom_operations/mod.rs | 8 ++ .../custom_operations/operation_spec.rs | 19 +++ .../src/server/custom_operations/registry.rs | 88 ++++++++++++++ .../jrsonnet-lsp/src/server/initialization.rs | 6 +- .../src/server/request_dispatch.rs | 34 +++++- .../jrsonnet-lsp/src/server/watched_files.rs | 3 +- 13 files changed, 274 insertions(+), 104 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs create mode 100644 crates/jrsonnet-lsp/src/server/custom_operations/mod.rs create mode 100644 crates/jrsonnet-lsp/src/server/custom_operations/operation_spec.rs create mode 100644 crates/jrsonnet-lsp/src/server/custom_operations/registry.rs diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs index cd16ea06..a3532e8f 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs @@ -1,36 +1,6 @@ use jrsonnet_lsp_document::Document; use lsp_types::{CodeLens, Command, Range, Uri}; -/// Generate "Evaluate" code lens for the document root. -pub(super) fn evaluate_lens(document: &Document, uri: &Uri) -> Option { - let ast = document.ast(); - - // Only show evaluate lens if document has a root expression - ast.expr()?; - - // Place the lens at line 0 - let range = Range { - start: lsp_types::Position { - line: 0, - character: 0, - }, - end: lsp_types::Position { - line: 0, - character: 0, - }, - }; - - Some(CodeLens { - range, - command: Some(Command { - title: "Evaluate".to_string(), - command: "jrsonnet.evalFile".to_string(), - arguments: Some(vec![serde_json::json!(uri.to_string())]), - }), - data: None, - }) -} - /// Generate error status code lens for the document. /// /// Shows the number of syntax errors at the top of the file. diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs index 1894f299..e7e23ddb 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs @@ -2,19 +2,13 @@ use jrsonnet_lsp_document::Document; use jrsonnet_lsp_inference::TypeAnalysis; use lsp_types::{CodeLens, Uri}; -use super::{ - actions::{error_status_lens, evaluate_lens}, - refs::reference_count_lenses, - type_lenses::type_lenses, -}; +use super::{actions::error_status_lens, refs::reference_count_lenses, type_lenses::type_lenses}; /// Configuration for code lens generation. #[derive(Debug, Clone, Default)] pub struct CodeLensConfig { /// Show reference counts for definitions. pub show_references: bool, - /// Show "Evaluate" action for executable files. - pub show_evaluate: bool, /// Show inferred types for function definitions. pub show_types: bool, /// Show error status at top of file. @@ -41,7 +35,6 @@ impl CodeLensConfig { pub fn all() -> Self { Self { show_references: true, - show_evaluate: true, show_types: true, show_errors: ErrorLensVisibility::Visible, } @@ -64,12 +57,6 @@ pub fn code_lens( lenses.extend(reference_count_lenses(document, uri)); } - if config.show_evaluate { - if let Some(lens) = evaluate_lens(document, uri) { - lenses.push(lens); - } - } - if config.show_types { if let Some(analysis) = analysis { lenses.extend(type_lenses(document, analysis)); @@ -163,27 +150,6 @@ mod tests { } } - fn expected_evaluate_lens(uri: &Uri) -> CodeLens { - CodeLens { - range: Range { - start: lsp_types::Position { - line: 0, - character: 0, - }, - end: lsp_types::Position { - line: 0, - character: 0, - }, - }, - command: Some(Command { - title: "Evaluate".to_string(), - command: "jrsonnet.evalFile".to_string(), - arguments: Some(vec![serde_json::to_value(uri.to_string()).unwrap()]), - }), - data: None, - } - } - fn expected_function_type_lens( doc: &Document, analysis: &TypeAnalysis, @@ -277,7 +243,6 @@ mod tests { let config = CodeLensConfig { show_references: true, - show_evaluate: false, show_types: false, show_errors: ErrorLensVisibility::Hidden, }; @@ -296,7 +261,6 @@ mod tests { let config = CodeLensConfig { show_references: true, - show_evaluate: false, show_types: false, show_errors: ErrorLensVisibility::Hidden, }; @@ -307,24 +271,6 @@ mod tests { assert!(lenses.is_empty()); } - #[test] - fn test_evaluate_lens() { - let code = "{ a: 1 }"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let uri = make_uri("test"); - - let config = CodeLensConfig { - show_references: false, - show_evaluate: true, - show_types: false, - show_errors: ErrorLensVisibility::Hidden, - }; - - let lenses = code_lens(&doc, &uri, &config, None); - let expected = vec![expected_evaluate_lens(&uri)]; - assert_eq!(lenses, expected); - } - #[test] fn test_type_lens_for_function() { let code = "local add(a, b) = a + b; add(1, 2)"; @@ -334,7 +280,6 @@ mod tests { let config = CodeLensConfig { show_references: false, - show_evaluate: false, show_types: true, show_errors: ErrorLensVisibility::Hidden, }; @@ -353,7 +298,6 @@ mod tests { let config = CodeLensConfig { show_references: false, - show_evaluate: false, show_types: true, show_errors: ErrorLensVisibility::Hidden, }; @@ -376,7 +320,6 @@ mod tests { let expected = vec![ expected_reference_lens(&doc, &uri, "f"), expected_reference_lens(&doc, &uri, "x"), - expected_evaluate_lens(&uri), expected_function_type_lens(&doc, &analysis, "f"), ]; assert_eq!(lenses, expected); @@ -390,7 +333,6 @@ mod tests { let config = CodeLensConfig { show_references: true, - show_evaluate: false, show_types: false, show_errors: ErrorLensVisibility::Hidden, }; @@ -409,7 +351,6 @@ mod tests { let config = CodeLensConfig { show_references: false, - show_evaluate: false, show_types: false, show_errors: ErrorLensVisibility::Visible, }; @@ -446,7 +387,6 @@ mod tests { let config = CodeLensConfig { show_references: false, - show_evaluate: false, show_types: false, show_errors: ErrorLensVisibility::Visible, }; diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 42477621..5a44ded3 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -4,6 +4,7 @@ //! Diagnostics are computed asynchronously with debouncing to avoid blocking the event loop. mod async_requests; +mod custom_operations; mod event_loop; mod import_graph; mod initialization; @@ -85,8 +86,7 @@ struct InitializeRoots { root_path: Option, } -const SUPPORTED_EXECUTE_COMMANDS: [&str; 5] = [ - "jrsonnet.evalFile", +const LEGACY_EXECUTE_COMMANDS: [&str; 4] = [ "jrsonnet.evalExpression", "jrsonnet.findTransitiveImporters", "jrsonnet.findReferences", diff --git a/crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs b/crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs index 6d3b57ee..c2eab16f 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/code_lens.rs @@ -3,6 +3,7 @@ use jrsonnet_lsp_handlers as handlers; use lsp_types::{CodeLens, CodeLensParams}; use super::AsyncRequestContext; +use crate::server::custom_operations; impl AsyncRequestContext { pub(crate) fn code_lens(&self, params: &CodeLensParams) -> Option> { @@ -12,6 +13,8 @@ impl AsyncRequestContext { let config = handlers::CodeLensConfig::all(); let analysis = self.analyze_document(&path, &doc); - Some(handlers::code_lens(&doc, uri, &config, Some(&analysis))) + let mut lenses = handlers::code_lens(&doc, uri, &config, Some(&analysis)); + custom_operations::extend_code_lenses(self, &path, &doc, uri, &mut lenses); + Some(lenses) } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs index 6862edf7..a3489bfe 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs @@ -11,10 +11,6 @@ impl AsyncRequestContext { info!("Execute command: {}", params.command); match params.command.as_str() { - "jrsonnet.evalFile" => { - let uri = params.arguments.first()?.as_str()?; - self.execute_eval_file(uri) - } "jrsonnet.evalExpression" => { let expr = params.arguments.first()?.as_str()?; let base_uri = params.arguments.get(1).and_then(|v| v.as_str()); diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs index 4a86f7ab..1a99832e 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs @@ -5,7 +5,7 @@ use super::super::AsyncRequestContext; use crate::analysis::tanka::effective_import_roots; impl AsyncRequestContext { - pub(super) fn execute_eval_file(&self, uri: &str) -> Option { + pub(in crate::server) fn execute_eval_file(&self, uri: &str) -> Option { use jrsonnet_evaluator::manifest::JsonFormat; use jrsonnet_parser::{SourceFile, SourcePath}; diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs b/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs new file mode 100644 index 00000000..ea91bcee --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs @@ -0,0 +1,113 @@ +use anyhow::Context as _; +use jrsonnet_lsp_document::{CanonicalPath, Document}; +use lsp_types::{CodeLens, Command, Position, Range, TextDocumentIdentifier, Uri}; +use serde::Deserialize; + +use super::operation_spec::OperationSpec; +use crate::server::async_requests::AsyncRequestContext; + +const CUSTOM_METHOD: &str = "jrsonnet/evalFile"; +const EXECUTE_COMMAND: &str = "jrsonnet.evalFile"; + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct EvalFileParams { + text_document: TextDocumentIdentifier, +} + +pub(super) const OPERATION: OperationSpec = OperationSpec { + custom_method: CUSTOM_METHOD, + execute_command: Some(EXECUTE_COMMAND), + code_lens: Some(code_lenses), + handle_custom_request, + handle_execute_command: Some(handle_execute_command), +}; + +fn handle_custom_request( + context: &AsyncRequestContext, + params: serde_json::Value, +) -> anyhow::Result { + let params: EvalFileParams = serde_json::from_value(params) + .context("invalid params for jrsonnet/evalFile: expected textDocument.uri")?; + context + .execute_eval_file(params.text_document.uri.as_str()) + .context("could not evaluate requested file") +} + +fn handle_execute_command( + context: &AsyncRequestContext, + args: &[serde_json::Value], +) -> Option { + let uri = args.first()?.as_str()?; + context.execute_eval_file(uri) +} + +fn code_lenses( + _context: &AsyncRequestContext, + _path: &CanonicalPath, + document: &Document, + uri: &Uri, +) -> Vec { + build_code_lenses(document, uri) +} + +fn build_code_lenses(document: &Document, uri: &Uri) -> Vec { + if document.ast().expr().is_none() { + return Vec::new(); + } + + vec![CodeLens { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 0, + }, + }, + command: Some(Command { + title: "Evaluate".to_string(), + command: EXECUTE_COMMAND.to_string(), + arguments: Some(vec![serde_json::json!(uri.to_string())]), + }), + data: None, + }] +} + +#[cfg(test)] +mod tests { + use jrsonnet_lsp_document::{DocVersion, Document}; + + use super::{build_code_lenses, CUSTOM_METHOD, EXECUTE_COMMAND}; + + #[test] + fn operation_ids_are_stable() { + assert_eq!(CUSTOM_METHOD, "jrsonnet/evalFile"); + assert_eq!(EXECUTE_COMMAND, "jrsonnet.evalFile"); + } + + #[test] + fn eval_file_lens_present_for_root_expression() { + let uri: lsp_types::Uri = "file:///test/main.jsonnet".parse().unwrap(); + let doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); + let lenses = build_code_lenses(&doc, &uri); + + assert_eq!(lenses.len(), 1); + let lens = &lenses[0]; + assert_eq!(lens.command.as_ref().unwrap().command, EXECUTE_COMMAND); + assert_eq!( + lens.command.as_ref().unwrap().arguments.as_ref().unwrap(), + &vec![serde_json::json!(uri.to_string())] + ); + } + + #[test] + fn eval_file_lens_absent_without_root_expression() { + let uri: lsp_types::Uri = "file:///test/main.jsonnet".parse().unwrap(); + let doc = Document::new(String::new(), DocVersion::new(1)); + let lenses = build_code_lenses(&doc, &uri); + assert!(lenses.is_empty()); + } +} diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/mod.rs b/crates/jrsonnet-lsp/src/server/custom_operations/mod.rs new file mode 100644 index 00000000..de651cd0 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/mod.rs @@ -0,0 +1,8 @@ +mod eval_file; +mod operation_spec; +mod registry; + +pub(super) use registry::{ + execute_command_ids, extend_code_lenses, operation_for_custom_method, + operation_for_execute_command, +}; diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/operation_spec.rs b/crates/jrsonnet-lsp/src/server/custom_operations/operation_spec.rs new file mode 100644 index 00000000..b6aab92b --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/operation_spec.rs @@ -0,0 +1,19 @@ +use jrsonnet_lsp_document::{CanonicalPath, Document}; +use lsp_types::{CodeLens, Uri}; + +use super::super::async_requests::AsyncRequestContext; + +pub(in crate::server) type CodeLensProvider = + fn(&AsyncRequestContext, &CanonicalPath, &Document, &Uri) -> Vec; +pub(in crate::server) type CustomRequestHandler = + fn(&AsyncRequestContext, serde_json::Value) -> anyhow::Result; +pub(in crate::server) type ExecuteCommandHandler = + fn(&AsyncRequestContext, &[serde_json::Value]) -> Option; + +pub(in crate::server) struct OperationSpec { + pub(in crate::server) custom_method: &'static str, + pub(in crate::server) execute_command: Option<&'static str>, + pub(in crate::server) code_lens: Option, + pub(in crate::server) handle_custom_request: CustomRequestHandler, + pub(in crate::server) handle_execute_command: Option, +} diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/registry.rs b/crates/jrsonnet-lsp/src/server/custom_operations/registry.rs new file mode 100644 index 00000000..7b40e452 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/registry.rs @@ -0,0 +1,88 @@ +use std::{collections::HashMap, sync::LazyLock}; + +use jrsonnet_lsp_document::{CanonicalPath, Document}; +use lsp_types::{CodeLens, Uri}; + +use super::{super::async_requests::AsyncRequestContext, eval_file, operation_spec::OperationSpec}; + +const OPERATIONS: &[OperationSpec] = &[eval_file::OPERATION]; + +static OPERATIONS_BY_CUSTOM_METHOD: LazyLock> = + LazyLock::new(|| { + OPERATIONS + .iter() + .map(|operation| (operation.custom_method, operation)) + .collect() + }); + +static OPERATIONS_BY_EXECUTE_COMMAND: LazyLock> = + LazyLock::new(|| { + OPERATIONS + .iter() + .filter_map(|operation| { + operation + .execute_command + .map(|command| (command, operation)) + }) + .collect() + }); + +pub(in crate::server) fn operation_for_custom_method( + method: &str, +) -> Option<&'static OperationSpec> { + OPERATIONS_BY_CUSTOM_METHOD.get(method).copied() +} + +pub(in crate::server) fn operation_for_execute_command( + command: &str, +) -> Option<&'static OperationSpec> { + OPERATIONS_BY_EXECUTE_COMMAND.get(command).copied() +} + +pub(in crate::server) fn execute_command_ids() -> Vec { + OPERATIONS + .iter() + .filter_map(|operation| operation.execute_command) + .map(ToString::to_string) + .collect() +} + +pub(in crate::server) fn extend_code_lenses( + context: &AsyncRequestContext, + path: &CanonicalPath, + document: &Document, + uri: &Uri, + lenses: &mut Vec, +) { + for operation in OPERATIONS { + if let Some(code_lens) = operation.code_lens { + lenses.extend(code_lens(context, path, document, uri)); + } + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashSet; + + use super::OPERATIONS; + + #[test] + fn operations_have_unique_custom_method_ids() { + let unique: HashSet<_> = OPERATIONS + .iter() + .map(|operation| operation.custom_method) + .collect(); + assert_eq!(unique.len(), OPERATIONS.len()); + } + + #[test] + fn operations_have_unique_execute_command_ids() { + let command_ids: Vec<_> = OPERATIONS + .iter() + .filter_map(|operation| operation.execute_command) + .collect(); + let unique: HashSet<_> = command_ids.iter().copied().collect(); + assert_eq!(unique.len(), command_ids.len()); + } +} diff --git a/crates/jrsonnet-lsp/src/server/initialization.rs b/crates/jrsonnet-lsp/src/server/initialization.rs index 1ec28ffe..a6b21854 100644 --- a/crates/jrsonnet-lsp/src/server/initialization.rs +++ b/crates/jrsonnet-lsp/src/server/initialization.rs @@ -10,7 +10,7 @@ use lsp_types::{ }; use tracing::info; -use super::{InitializeRoots, Server, SUPPORTED_EXECUTE_COMMANDS}; +use super::{custom_operations, InitializeRoots, Server, LEGACY_EXECUTE_COMMANDS}; impl Server { /// Handle the initialize request. @@ -92,9 +92,9 @@ impl Server { }), ), execute_command_provider: Some(ExecuteCommandOptions { - commands: SUPPORTED_EXECUTE_COMMANDS + commands: custom_operations::execute_command_ids() .into_iter() - .map(ToString::to_string) + .chain(LEGACY_EXECUTE_COMMANDS.into_iter().map(ToString::to_string)) .collect(), work_done_progress_options: WorkDoneProgressOptions::default(), }), diff --git a/crates/jrsonnet-lsp/src/server/request_dispatch.rs b/crates/jrsonnet-lsp/src/server/request_dispatch.rs index cfcbc1fb..ea224fff 100644 --- a/crates/jrsonnet-lsp/src/server/request_dispatch.rs +++ b/crates/jrsonnet-lsp/src/server/request_dispatch.rs @@ -12,7 +12,7 @@ use lsp_types::{ use serde::{de::DeserializeOwned, Serialize}; use tracing::{debug, info, warn}; -use super::{async_requests::AsyncRequestContext, requests, Server}; +use super::{async_requests::AsyncRequestContext, custom_operations, requests, Server}; use crate::protocol::inflight_requests::IncomingRequest; impl Server { @@ -48,6 +48,9 @@ impl Server { | ExecuteCommand::METHOD => self.handle_async_request(id, method.as_str(), params), CodeLensResolve::METHOD => self.handle_sync_request(id, method.as_str(), params), _ => { + if self.handle_custom_operation_request(id.clone(), method.as_str(), params) { + return Ok(()); + } let request = self.inflight_requests.begin_unknown(id, method.as_str()); warn!("Unhandled request: {}", request.method()); let message = format!("Method not found: {}", request.method()); @@ -97,6 +100,25 @@ impl Server { Ok(()) } + fn handle_custom_operation_request( + &mut self, + id: RequestId, + method: &str, + params: serde_json::Value, + ) -> bool { + let Some(operation) = custom_operations::operation_for_custom_method(method) else { + return false; + }; + + self.inflight_requests.begin_unknown(id.clone(), method); + let context = self.async_request_context(); + let compute = operation.handle_custom_request; + self.spawn_async_response(id, operation.custom_method, move || { + compute(&context, params) + }); + true + } + fn handle_sync_typed( &mut self, request: IncomingRequest, @@ -363,6 +385,16 @@ impl Server { return Ok(()); } + if let Some(operation) = custom_operations::operation_for_execute_command(¶ms.command) { + let context = self.async_request_context(); + let args = params.arguments.clone(); + let compute = operation + .handle_execute_command + .expect("operation with execute command id must provide execute handler"); + self.spawn_typed_json_response(request, move || compute(&context, &args)); + return Ok(()); + } + let context = self.async_request_context(); self.spawn_typed_json_response(request, move || { requests::async_handlers::execute_command::handle(&context, ¶ms) diff --git a/crates/jrsonnet-lsp/src/server/watched_files.rs b/crates/jrsonnet-lsp/src/server/watched_files.rs index f68de860..276baa38 100644 --- a/crates/jrsonnet-lsp/src/server/watched_files.rs +++ b/crates/jrsonnet-lsp/src/server/watched_files.rs @@ -9,7 +9,8 @@ use super::*; impl Server { pub(super) fn is_supported_execute_command(command: &str) -> bool { - SUPPORTED_EXECUTE_COMMANDS.contains(&command) + LEGACY_EXECUTE_COMMANDS.contains(&command) + || super::custom_operations::operation_for_execute_command(command).is_some() } pub(super) fn supports_dynamic_watched_files_registration(params: &InitializeParams) -> bool { From b640a2ead668714c4779a104704390df4e664a0d Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 10:19:27 +0000 Subject: [PATCH 170/210] fix(eval): always include parent dir in eval import roots Unify eval import-root construction for both eval diagnostics and eval request paths, and always include the document parent directory for file-based snippet evaluation. This removes mode-dependent parent-dir fallback behavior and keeps import resolution deterministic in resolvePathsWithTanka=auto when no Tanka root marker is present. Add focused unit tests covering parent-dir inclusion without a Tanka root and precedence over configured roots. --- crates/jrsonnet-lsp/src/analysis/eval.rs | 81 +++++++++++++++---- .../server/async_requests/commands/eval.rs | 14 +--- 2 files changed, 69 insertions(+), 26 deletions(-) diff --git a/crates/jrsonnet-lsp/src/analysis/eval.rs b/crates/jrsonnet-lsp/src/analysis/eval.rs index 81a20565..5597c1a2 100644 --- a/crates/jrsonnet-lsp/src/analysis/eval.rs +++ b/crates/jrsonnet-lsp/src/analysis/eval.rs @@ -3,7 +3,10 @@ //! This module provides the ability to evaluate Jsonnet documents and convert //! runtime errors into LSP diagnostics. -use std::path::{Path, PathBuf}; +use std::{ + collections::HashSet, + path::{Path, PathBuf}, +}; use jrsonnet_evaluator::{ error::Error as EvalError, trace::PathResolver, FileImportResolver, State, @@ -41,6 +44,34 @@ pub(crate) fn create_state_with_jpath(jpath: &[PathBuf]) -> State { builder.build() } +/// Build effective import roots for eval-style snippet execution. +/// +/// Eval paths are executed via virtual snippets, so they do not naturally +/// resolve relative imports from the document directory. To mirror file-based +/// behavior, always include the file parent as a search root. +#[must_use] +pub(crate) fn eval_import_roots_for_file( + path: &Path, + configured_roots: &[PathBuf], + resolve_paths_with_tanka: ResolvePathsWithTankaMode, +) -> Vec { + let mut roots = Vec::new(); + if let Some(parent) = path.parent() { + roots.push(parent.to_path_buf()); + } + roots.extend(tanka::effective_import_roots( + path, + configured_roots, + resolve_paths_with_tanka, + )); + + let mut seen = HashSet::new(); + roots + .into_iter() + .filter(|root| seen.insert(root.clone())) + .collect() +} + /// Evaluator for Jsonnet documents. pub struct Evaluator { /// Base jpath configuration (from settings). @@ -64,20 +95,7 @@ impl Evaluator { /// If Tanka mode is enabled, this will resolve paths based on the /// file's location in the Tanka project structure. fn get_jpath_for_file(&self, path: &Path) -> Vec { - let mut jpath = self.base_jpath.clone(); - - if self.tanka_mode.is_enabled() { - // Add Tanka-resolved paths - let tanka_paths = tanka::resolve_jpath(path, self.tanka_mode); - jpath.extend(tanka_paths); - } else { - // Add the file's directory to jpath (standard behavior) - if let Some(dir) = path.parent() { - jpath.push(dir.to_path_buf()); - } - } - - jpath + eval_import_roots_for_file(path, &self.base_jpath, self.tanka_mode) } /// Evaluate a document and return any diagnostics. @@ -164,7 +182,10 @@ fn eval_error_to_diagnostic( #[cfg(test)] mod tests { + use std::fs; + use jrsonnet_lsp_document::{DocVersion, Document}; + use tempfile::TempDir; use super::*; @@ -283,4 +304,34 @@ mod tests { let diag = result.expect("expected evaluation to fail with runtime error"); assert_eval_diagnostic(&diag, &["custom error message"]); } + + #[test] + fn test_eval_import_roots_include_parent_in_auto_without_tanka_root() { + let tmp = TempDir::new().expect("tmp should be created"); + let env_dir = tmp.path().join("env"); + fs::create_dir_all(&env_dir).expect("env should be created"); + let file_path = env_dir.join("main.jsonnet"); + fs::write(&file_path, "{}").expect("file should be created"); + + let roots = eval_import_roots_for_file(&file_path, &[], ResolvePathsWithTankaMode::Auto); + assert_eq!(roots, vec![env_dir]); + } + + #[test] + fn test_eval_import_roots_parent_precedes_configured_roots() { + let tmp = TempDir::new().expect("tmp should be created"); + let env_dir = tmp.path().join("env"); + let configured_dir = tmp.path().join("configured"); + fs::create_dir_all(&env_dir).expect("env should be created"); + fs::create_dir_all(&configured_dir).expect("configured should be created"); + let file_path = env_dir.join("main.jsonnet"); + fs::write(&file_path, "{}").expect("file should be created"); + + let roots = eval_import_roots_for_file( + &file_path, + std::slice::from_ref(&configured_dir), + ResolvePathsWithTankaMode::False, + ); + assert_eq!(roots, vec![env_dir, configured_dir]); + } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs index 1a99832e..619493ca 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs @@ -2,7 +2,7 @@ use jrsonnet_lsp_document::CanonicalPath; use tracing::warn; use super::super::AsyncRequestContext; -use crate::analysis::tanka::effective_import_roots; +use crate::analysis::eval::eval_import_roots_for_file; impl AsyncRequestContext { pub(in crate::server) fn execute_eval_file(&self, uri: &str) -> Option { @@ -94,19 +94,11 @@ impl AsyncRequestContext { let jpath = base_path.map_or_else( || config.jpath.clone(), |base_path| { - let mut roots = effective_import_roots( + eval_import_roots_for_file( base_path.as_path(), &config.jpath, config.resolve_paths_with_tanka, - ); - if !config.resolve_paths_with_tanka.is_enabled() { - if let Some(dir) = base_path.as_path().parent() { - if !roots.iter().any(|entry| entry == dir) { - roots.push(dir.to_path_buf()); - } - } - } - roots + ) }, ); drop(config); From 9954296b92bba62f8fe8dda9f3afbe939b6d7ce9 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 10:19:52 +0000 Subject: [PATCH 171/210] test(lsp): cover custom eval request and code-lens execute flow Add integration coverage for jrsonnet/evalFile as a direct custom request and for the executeCommand bridge path used by evaluate code lenses. Extend scenario coverage with explicit requestExecuteCodeLens and expectExecuteCodeLens assertions, and add a dedicated custom-request scenario that evaluates a multi-file program where lib.libsonnet exports a function value directly. These tests verify both custom-method integration and no-client-work code-lens execution behavior end-to-end. --- crates/jrsonnet-lsp/tests/integration_test.rs | 12 ++ .../tests/integration_test/features.rs | 113 ++++++++++++++++-- .../code_lens_references_and_evaluate.yaml | 9 ++ .../runner/custom_eval_file_request.yaml | 24 ++++ 4 files changed, 148 insertions(+), 10 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/custom_eval_file_request.yaml diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 73c5d5f8..9a9dd46a 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -595,6 +595,18 @@ fn execute_command_request(id: i32, command: &str, arguments: Vec Request { + Request::new( + id.into(), + "jrsonnet/evalFile".to_string(), + serde_json::json!({ + "textDocument": { + "uri": uri, + }, + }), + ) +} + fn workspace_symbol_request(id: i32, query: &str) -> Request { let params = lsp_types::WorkspaceSymbolParams { query: query.to_string(), diff --git a/crates/jrsonnet-lsp/tests/integration_test/features.rs b/crates/jrsonnet-lsp/tests/integration_test/features.rs index c795e24f..a290be6f 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/features.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/features.rs @@ -790,30 +790,46 @@ fn test_eval_commands_use_tanka_import_roots() { .send(Message::Notification(initialized_notification())) .unwrap(); + client_conn + .sender + .send(Message::Request(custom_eval_file_request(2, &main_uri))) + .unwrap(); + let eval_file_custom_response = recv_response(&client_conn, 2); + assert!( + eval_file_custom_response.error.is_none(), + "jrsonnet/evalFile request should succeed" + ); + assert_eq!( + eval_file_custom_response + .result + .expect("jrsonnet/evalFile should return a result"), + serde_json::json!(42) + ); + client_conn .sender .send(Message::Request(execute_command_request( - 2, + 3, "jrsonnet.evalFile", vec![serde_json::Value::String(main_uri.clone())], ))) .unwrap(); - let eval_file_response = recv_response(&client_conn, 2); + let eval_file_bridge_response = recv_response(&client_conn, 3); assert!( - eval_file_response.error.is_none(), - "evalFile command should succeed" + eval_file_bridge_response.error.is_none(), + "evalFile executeCommand bridge should succeed" ); assert_eq!( - eval_file_response + eval_file_bridge_response .result - .expect("evalFile should return a result"), + .expect("evalFile bridge should return a result"), serde_json::json!(42) ); client_conn .sender .send(Message::Request(execute_command_request( - 3, + 4, "jrsonnet.evalExpression", vec![ serde_json::Value::String(r#"(import "lib.libsonnet").answer"#.to_string()), @@ -821,7 +837,7 @@ fn test_eval_commands_use_tanka_import_roots() { ], ))) .unwrap(); - let eval_expression_response = recv_response(&client_conn, 3); + let eval_expression_response = recv_response(&client_conn, 4); assert!( eval_expression_response.error.is_none(), "evalExpression command should succeed" @@ -835,9 +851,9 @@ fn test_eval_commands_use_tanka_import_roots() { client_conn .sender - .send(Message::Request(shutdown_request(4))) + .send(Message::Request(shutdown_request(5))) .unwrap(); - let _ = recv_response(&client_conn, 4); + let _ = recv_response(&client_conn, 5); client_conn .sender .send(Message::Notification(exit_notification())) @@ -979,6 +995,83 @@ fn test_code_lens_resolve_request() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_code_lens_evaluate_command_executes_and_returns_result() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request(1))) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + let uri = "file:///test/code-lens-evaluate.jsonnet"; + let text = "local x = 1; x"; + client_conn + .sender + .send(Message::Notification(did_open_notification(uri, text))) + .unwrap(); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + + client_conn + .sender + .send(Message::Request(code_lens_request(2, uri))) + .unwrap(); + let response = recv_response(&client_conn, 2); + assert!(response.error.is_none(), "code lens request should succeed"); + let lenses: Vec = + serde_json::from_value(response.result.expect("should have result")).unwrap(); + let evaluate_command = lenses + .into_iter() + .filter_map(|lens| lens.command) + .find(|command| command.command == "jrsonnet.evalFile") + .expect("expected evaluate code lens command"); + assert_eq!(evaluate_command.title, "Evaluate"); + assert_eq!( + evaluate_command.arguments, + Some(vec![serde_json::Value::String(uri.to_string())]), + "evaluate lens should target the opened file URI", + ); + + client_conn + .sender + .send(Message::Request(execute_command_request( + 3, + &evaluate_command.command, + evaluate_command.arguments.unwrap_or_default(), + ))) + .unwrap(); + let response = recv_response(&client_conn, 3); + assert!( + response.error.is_none(), + "executeCommand for evaluate lens should succeed", + ); + assert_eq!( + response + .result + .expect("evaluate lens execution should return a result"), + serde_json::json!(1), + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(4))) + .unwrap(); + let _ = recv_response(&client_conn, 4); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_cancel_request_returns_request_canceled_error() { let (client_conn, server_conn) = Connection::memory(); diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml index 383114fc..7bba0e0b 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml @@ -30,3 +30,12 @@ steps: command: jrsonnet.evalFile arguments: - file: main.jsonnet + +- step: requestExecuteCodeLens + as: evaluateViaCodeLens + request: lenses + index: 1 + +- step: expectExecuteCodeLens + request: evaluateViaCodeLens + result: 1 diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/custom_eval_file_request.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/custom_eval_file_request.yaml new file mode 100644 index 00000000..022ade64 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/custom_eval_file_request.yaml @@ -0,0 +1,24 @@ +# Verify custom request execution for eval file is covered independently. +# Use a multi-file input so the request is exercised on a non-trivial program. +steps: + - step: create + files: + lib.libsonnet: | + function(v) v + 2 + main.jsonnet: | + local plus = import "./lib.libsonnet"; + plus(30) + + - step: diagnosticsSettled + + - step: requestCustom + as: evaluateViaCustom + method: jrsonnet/evalFile + params: + textDocument: + uri: + file: main.jsonnet + + - step: expectCustom + request: evaluateViaCustom + result: 32 From b553029307ecbcb00099c6a14915c950ef1c2be3 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 10:54:51 +0000 Subject: [PATCH 172/210] docs(lsp): document the `jrsonnet/evalFile` custom request We're migrating from `workspace/executeCommand` to custom requests for better ergonomics. This documents the `jrsonnet/evalFile` custom request, which evaluates a Jsonnet file and returns its resulting JSON value. --- docs/lsp/README.md | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/docs/lsp/README.md b/docs/lsp/README.md index 4514c726..39258ce8 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -32,7 +32,6 @@ Advertised LSP features: `semanticTokens/range`, `codeLens` (+ `codeLens/resolve`). - Actions: `codeAction` (`quickfix`, `source.fixAll`), `formatting`. - Commands via `workspace/executeCommand`: - - `jrsonnet.evalFile` - `jrsonnet.evalExpression` - `jrsonnet.findTransitiveImporters` - `jrsonnet.findReferences` @@ -45,6 +44,36 @@ registers watchers for: - `**/*.libsonnet` - `**/*.json` +## Custom Requests + +These custom requests are also exposed as code lenses. However, implementing +directly may provide a better experience. + +### `jrsonnet/evalFile` + +Evaluate a Jsonnet file and return its resulting JSON value. + +Example request: + +```json +{ + "method": "jrsonnet/evalFile", + "params": { + "textDocument": { + "uri": "file:///a/b/main.jsonnet" + } + } +} +``` + +Example response: + +```json +42 +``` + +This operation is also exposed via the Evaluate code lens. + ## Configuration Configuration is accepted from: From 18920aa175217abdb89f5a79120d41da709a0e18 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 11:19:34 +0000 Subject: [PATCH 173/210] feat(lsp): add custom evalExpression and transitive importer requests Add typed custom-operation handlers for: - jrsonnet/evalExpression - jrsonnet/findTransitiveImporters Wire both into the custom operation registry and expose the existing async helpers to the server module boundary used by the new handlers. --- .../server/async_requests/commands/eval.rs | 2 +- .../server/async_requests/commands/graph.rs | 5 ++- .../custom_operations/eval_expression.rs | 45 +++++++++++++++++++ .../find_transitive_importers.rs | 44 ++++++++++++++++++ .../src/server/custom_operations/mod.rs | 2 + .../src/server/custom_operations/registry.rs | 11 ++++- 6 files changed, 105 insertions(+), 4 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/server/custom_operations/eval_expression.rs create mode 100644 crates/jrsonnet-lsp/src/server/custom_operations/find_transitive_importers.rs diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs index 619493ca..99d1ff08 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/eval.rs @@ -46,7 +46,7 @@ impl AsyncRequestContext { } } - pub(super) fn execute_eval_expression( + pub(in crate::server) fn execute_eval_expression( &self, expr: &str, base_uri: Option<&str>, diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs index cb9104de..7f812c4c 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs @@ -3,7 +3,10 @@ use jrsonnet_lsp_document::CanonicalPath; use super::super::AsyncRequestContext; impl AsyncRequestContext { - pub(super) fn execute_find_transitive_importers(&self, uri: &str) -> Option { + pub(in crate::server) fn execute_find_transitive_importers( + &self, + uri: &str, + ) -> Option { let uri_parsed: lsp_types::Uri = uri.parse().ok()?; let path = CanonicalPath::from_uri(&uri_parsed).ok()?; diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/eval_expression.rs b/crates/jrsonnet-lsp/src/server/custom_operations/eval_expression.rs new file mode 100644 index 00000000..adc1462e --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/eval_expression.rs @@ -0,0 +1,45 @@ +use anyhow::Context as _; +use lsp_types::TextDocumentIdentifier; +use serde::Deserialize; + +use super::operation_spec::OperationSpec; +use crate::server::async_requests::AsyncRequestContext; + +const CUSTOM_METHOD: &str = "jrsonnet/evalExpression"; + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct EvalExpressionParams { + expression: String, + #[serde(default)] + base_document: Option, +} + +pub(super) const OPERATION: OperationSpec = OperationSpec { + custom_method: CUSTOM_METHOD, + execute_command: None, + code_lens: None, + handle_custom_request, + handle_execute_command: None, +}; + +fn handle_custom_request( + context: &AsyncRequestContext, + params: serde_json::Value, +) -> anyhow::Result { + let params: EvalExpressionParams = serde_json::from_value(params).context( + "invalid params for jrsonnet/evalExpression: expected expression and optional baseDocument.uri", + )?; + let base_uri = params.base_document.as_ref().map(|doc| doc.uri.as_str()); + Ok(context.execute_eval_expression(¶ms.expression, base_uri)) +} + +#[cfg(test)] +mod tests { + use super::CUSTOM_METHOD; + + #[test] + fn operation_id_is_stable() { + assert_eq!(CUSTOM_METHOD, "jrsonnet/evalExpression"); + } +} diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/find_transitive_importers.rs b/crates/jrsonnet-lsp/src/server/custom_operations/find_transitive_importers.rs new file mode 100644 index 00000000..4ea06392 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/custom_operations/find_transitive_importers.rs @@ -0,0 +1,44 @@ +use anyhow::Context as _; +use lsp_types::TextDocumentIdentifier; +use serde::Deserialize; + +use super::operation_spec::OperationSpec; +use crate::server::async_requests::AsyncRequestContext; + +const CUSTOM_METHOD: &str = "jrsonnet/findTransitiveImporters"; + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct FindTransitiveImportersParams { + text_document: TextDocumentIdentifier, +} + +pub(super) const OPERATION: OperationSpec = OperationSpec { + custom_method: CUSTOM_METHOD, + execute_command: None, + code_lens: None, + handle_custom_request, + handle_execute_command: None, +}; + +fn handle_custom_request( + context: &AsyncRequestContext, + params: serde_json::Value, +) -> anyhow::Result { + let params: FindTransitiveImportersParams = serde_json::from_value(params).context( + "invalid params for jrsonnet/findTransitiveImporters: expected textDocument.uri", + )?; + context + .execute_find_transitive_importers(params.text_document.uri.as_str()) + .context("could not compute transitive importers") +} + +#[cfg(test)] +mod tests { + use super::CUSTOM_METHOD; + + #[test] + fn operation_id_is_stable() { + assert_eq!(CUSTOM_METHOD, "jrsonnet/findTransitiveImporters"); + } +} diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/mod.rs b/crates/jrsonnet-lsp/src/server/custom_operations/mod.rs index de651cd0..eb39d63c 100644 --- a/crates/jrsonnet-lsp/src/server/custom_operations/mod.rs +++ b/crates/jrsonnet-lsp/src/server/custom_operations/mod.rs @@ -1,4 +1,6 @@ +mod eval_expression; mod eval_file; +mod find_transitive_importers; mod operation_spec; mod registry; diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/registry.rs b/crates/jrsonnet-lsp/src/server/custom_operations/registry.rs index 7b40e452..ac10b443 100644 --- a/crates/jrsonnet-lsp/src/server/custom_operations/registry.rs +++ b/crates/jrsonnet-lsp/src/server/custom_operations/registry.rs @@ -3,9 +3,16 @@ use std::{collections::HashMap, sync::LazyLock}; use jrsonnet_lsp_document::{CanonicalPath, Document}; use lsp_types::{CodeLens, Uri}; -use super::{super::async_requests::AsyncRequestContext, eval_file, operation_spec::OperationSpec}; +use super::{ + super::async_requests::AsyncRequestContext, eval_expression, eval_file, + find_transitive_importers, operation_spec::OperationSpec, +}; -const OPERATIONS: &[OperationSpec] = &[eval_file::OPERATION]; +const OPERATIONS: &[OperationSpec] = &[ + eval_file::OPERATION, + eval_expression::OPERATION, + find_transitive_importers::OPERATION, +]; static OPERATIONS_BY_CUSTOM_METHOD: LazyLock> = LazyLock::new(|| { From f721373b3f8af18491e4e7906945df55982f6624 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 11:20:03 +0000 Subject: [PATCH 174/210] refactor(lsp): remove legacy executeCommand-only operations Drop non-standard executeCommand compatibility paths for: - jrsonnet.evalExpression - jrsonnet.findTransitiveImporters - jrsonnet.findReferences - jrsonnet.showErrors Keep executeCommand only as the evalFile code-lens bridge. Update tests and scenario fixtures to use standard references/diagnostics surfaces and custom request methods where applicable. --- .../src/code_lens/actions.rs | 45 ---- .../src/code_lens/dispatch.rs | 223 ++---------------- .../src/code_lens/mod.rs | 9 +- .../src/code_lens/refs.rs | 70 ------ crates/jrsonnet-lsp/src/server.rs | 7 - .../async_requests/commands/diagnostics.rs | 49 ---- .../async_requests/commands/dispatch.rs | 46 ---- .../src/server/async_requests/commands/mod.rs | 3 - .../async_requests/commands/references.rs | 32 --- .../jrsonnet-lsp/src/server/initialization.rs | 7 +- .../src/server/request_dispatch.rs | 9 +- .../async_handlers/execute_command.rs | 10 - .../src/server/requests/async_handlers/mod.rs | 1 - .../jrsonnet-lsp/src/server/watched_files.rs | 3 +- crates/jrsonnet-lsp/tests/integration_test.rs | 55 ++--- .../tests/integration_test/features.rs | 131 +++------- .../tests/integration_test/lifecycle.rs | 20 +- .../integration_test/workspace_cross_file.rs | 36 +-- .../code_lens_references_and_evaluate.yaml | 25 +- .../hover_completion_execute_command.yaml | 12 +- 20 files changed, 121 insertions(+), 672 deletions(-) delete mode 100644 crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs delete mode 100644 crates/jrsonnet-lsp-handlers/src/code_lens/refs.rs delete mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands/diagnostics.rs delete mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs delete mode 100644 crates/jrsonnet-lsp/src/server/async_requests/commands/references.rs delete mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/execute_command.rs diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs deleted file mode 100644 index a3532e8f..00000000 --- a/crates/jrsonnet-lsp-handlers/src/code_lens/actions.rs +++ /dev/null @@ -1,45 +0,0 @@ -use jrsonnet_lsp_document::Document; -use lsp_types::{CodeLens, Command, Range, Uri}; - -/// Generate error status code lens for the document. -/// -/// Shows the number of syntax errors at the top of the file. -/// Only shown when there are errors (no lens for clean files to reduce clutter). -pub(super) fn error_status_lens(document: &Document, uri: &Uri) -> Option { - let errors = document.errors(); - - // Only show lens if there are errors - if errors.is_empty() { - return None; - } - - let error_count = errors.len(); - - // Place the lens at line 0 - let range = Range { - start: lsp_types::Position { - line: 0, - character: 0, - }, - end: lsp_types::Position { - line: 0, - character: 0, - }, - }; - - let title = if error_count == 1 { - "1 syntax error".to_string() - } else { - format!("{error_count} syntax errors") - }; - - Some(CodeLens { - range, - command: Some(Command { - title, - command: "jrsonnet.showErrors".to_string(), - arguments: Some(vec![serde_json::json!(uri.to_string())]), - }), - data: None, - }) -} diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs index e7e23ddb..607f94be 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/dispatch.rs @@ -2,42 +2,20 @@ use jrsonnet_lsp_document::Document; use jrsonnet_lsp_inference::TypeAnalysis; use lsp_types::{CodeLens, Uri}; -use super::{actions::error_status_lens, refs::reference_count_lenses, type_lenses::type_lenses}; +use super::type_lenses::type_lenses; /// Configuration for code lens generation. #[derive(Debug, Clone, Default)] pub struct CodeLensConfig { - /// Show reference counts for definitions. - pub show_references: bool, /// Show inferred types for function definitions. pub show_types: bool, - /// Show error status at top of file. - pub show_errors: ErrorLensVisibility, -} - -/// Whether to include the file-level error status lens. -#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] -pub enum ErrorLensVisibility { - #[default] - Hidden, - Visible, -} - -impl ErrorLensVisibility { - const fn is_visible(self) -> bool { - matches!(self, Self::Visible) - } } impl CodeLensConfig { - /// Create a config that shows all code lenses. + /// Create a config that shows all supported code lenses. #[must_use] pub fn all() -> Self { - Self { - show_references: true, - show_types: true, - show_errors: ErrorLensVisibility::Visible, - } + Self { show_types: true } } } @@ -47,29 +25,17 @@ impl CodeLensConfig { /// that import types are properly resolved. Pass `None` to skip type lenses. pub fn code_lens( document: &Document, - uri: &Uri, + _uri: &Uri, config: &CodeLensConfig, analysis: Option<&TypeAnalysis>, ) -> Vec { - let mut lenses = Vec::new(); - - if config.show_references { - lenses.extend(reference_count_lenses(document, uri)); - } - if config.show_types { if let Some(analysis) = analysis { - lenses.extend(type_lenses(document, analysis)); + return type_lenses(document, analysis); } } - if config.show_errors.is_visible() { - if let Some(lens) = error_status_lens(document, uri) { - lenses.push(lens); - } - } - - lenses + Vec::new() } /// Resolve a code lens (add command if not present). @@ -78,7 +44,7 @@ pub fn code_lens( /// that was returned without a command. #[must_use] pub fn resolve_code_lens(lens: CodeLens) -> CodeLens { - // Our code lenses always include commands, so no resolution needed + // Our code lenses always include commands, so no resolution needed. lens } @@ -88,14 +54,12 @@ mod tests { use jrsonnet_lsp_document::{to_lsp_range, DocVersion, Document}; use jrsonnet_lsp_inference::TypeAnalysis; - use jrsonnet_lsp_scope::{is_definition_site, ScopeResolver}; use jrsonnet_lsp_types::GlobalTyStore; use jrsonnet_rowan_parser::{ nodes::{Bind, BindFunction, StmtLocal}, AstNode, SyntaxKind, }; - use lsp_types::{CodeLens, Command, Range, Uri}; - use rowan::NodeOrToken; + use lsp_types::{CodeLens, Command, Uri}; use super::*; @@ -108,48 +72,6 @@ mod tests { TypeAnalysis::analyze_with_global(doc, global_types) } - fn expected_reference_lens(doc: &Document, uri: &Uri, name: &str) -> CodeLens { - let ast = doc.ast(); - let text = doc.text(); - let line_index = doc.line_index(); - let token = ast - .syntax() - .descendants_with_tokens() - .filter_map(NodeOrToken::into_token) - .find(|token| { - token.kind() == SyntaxKind::IDENT - && token.text() == name - && is_definition_site(token) - }) - .expect("definition token should exist"); - let def_range = token - .parent() - .expect("definition token should have parent") - .text_range(); - let range = to_lsp_range(def_range, line_index, text); - let resolver = ScopeResolver::new(ast.syntax()); - let references = resolver.find_references(ast.syntax(), name, def_range); - let ref_count = references.len().saturating_sub(1); - let title = if ref_count == 1 { - "1 reference".to_string() - } else { - format!("{ref_count} references") - }; - CodeLens { - range, - command: Some(Command { - title, - command: "jrsonnet.findReferences".to_string(), - arguments: Some(vec![ - serde_json::to_value(uri.to_string()).unwrap(), - serde_json::to_value(range.start.line).unwrap(), - serde_json::to_value(range.start.character).unwrap(), - ]), - }), - data: None, - } - } - fn expected_function_type_lens( doc: &Document, analysis: &TypeAnalysis, @@ -236,53 +158,13 @@ mod tests { } #[test] - fn test_reference_count_lens() { - let code = "local x = 1; x + x + x"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let uri = make_uri("test"); - - let config = CodeLensConfig { - show_references: true, - show_types: false, - show_errors: ErrorLensVisibility::Hidden, - }; - - let lenses = code_lens(&doc, &uri, &config, None); - let expected = vec![expected_reference_lens(&doc, &uri, "x")]; - - assert_eq!(lenses, expected); - } - - #[test] - fn test_no_lens_for_unused() { - let code = "local unused = 1; 42"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let uri = make_uri("test"); - - let config = CodeLensConfig { - show_references: true, - show_types: false, - show_errors: ErrorLensVisibility::Hidden, - }; - - let lenses = code_lens(&doc, &uri, &config, None); - - // No lens because 'unused' has 0 references - assert!(lenses.is_empty()); - } - - #[test] - fn test_type_lens_for_function() { + fn type_lens_for_function() { let code = "local add(a, b) = a + b; add(1, 2)"; let doc = Document::new(code.to_string(), DocVersion::new(1)); let uri = make_uri("test"); let analysis = test_analysis(&doc); - let config = CodeLensConfig { - show_references: false, - show_types: true, - show_errors: ErrorLensVisibility::Hidden, - }; + let config = CodeLensConfig { show_types: true }; let lenses = code_lens(&doc, &uri, &config, Some(&analysis)); let expected = vec![expected_function_type_lens(&doc, &analysis, "add")]; @@ -290,17 +172,13 @@ mod tests { } #[test] - fn test_type_lens_for_complex_binding() { + fn type_lens_for_complex_binding() { let code = "local config = { name: 'test', count: 42 }; config"; let doc = Document::new(code.to_string(), DocVersion::new(1)); let uri = make_uri("test"); let analysis = test_analysis(&doc); - let config = CodeLensConfig { - show_references: false, - show_types: true, - show_errors: ErrorLensVisibility::Hidden, - }; + let config = CodeLensConfig { show_types: true }; let lenses = code_lens(&doc, &uri, &config, Some(&analysis)); let expected = vec![expected_complex_binding_type_lens(&doc, &analysis)]; @@ -308,7 +186,7 @@ mod tests { } #[test] - fn test_all_lenses_with_types() { + fn all_lenses_config_returns_types() { let code = "local f(x) = x * 2; f(21)"; let doc = Document::new(code.to_string(), DocVersion::new(1)); let uri = make_uri("test"); @@ -317,84 +195,19 @@ mod tests { let config = CodeLensConfig::all(); let lenses = code_lens(&doc, &uri, &config, Some(&analysis)); - let expected = vec![ - expected_reference_lens(&doc, &uri, "f"), - expected_reference_lens(&doc, &uri, "x"), - expected_function_type_lens(&doc, &analysis, "f"), - ]; - assert_eq!(lenses, expected); - } - - #[test] - fn test_singular_reference() { - let code = "local x = 1; x"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let uri = make_uri("test"); - - let config = CodeLensConfig { - show_references: true, - show_types: false, - show_errors: ErrorLensVisibility::Hidden, - }; - - let lenses = code_lens(&doc, &uri, &config, None); - let expected = vec![expected_reference_lens(&doc, &uri, "x")]; - assert_eq!(lenses, expected); - } - - #[test] - fn test_error_status_lens_with_errors() { - // Invalid syntax - missing expression after + - let code = "1 +"; - let doc = Document::new(code.to_string(), DocVersion::new(1)); - let uri = make_uri("test"); - - let config = CodeLensConfig { - show_references: false, - show_types: false, - show_errors: ErrorLensVisibility::Visible, - }; - - let lenses = code_lens(&doc, &uri, &config, None); - - let expected = vec![CodeLens { - range: Range { - start: lsp_types::Position { - line: 0, - character: 0, - }, - end: lsp_types::Position { - line: 0, - character: 0, - }, - }, - command: Some(Command { - title: "1 syntax error".to_string(), - command: "jrsonnet.showErrors".to_string(), - arguments: Some(vec![serde_json::to_value(uri.to_string()).unwrap()]), - }), - data: None, - }]; - + let expected = vec![expected_function_type_lens(&doc, &analysis, "f")]; assert_eq!(lenses, expected); } #[test] - fn test_error_status_lens_no_errors() { - let code = "{ a: 1, b: 2 }"; + fn no_lenses_without_analysis() { + let code = "local add(a, b) = a + b; add(1, 2)"; let doc = Document::new(code.to_string(), DocVersion::new(1)); let uri = make_uri("test"); - let config = CodeLensConfig { - show_references: false, - show_types: false, - show_errors: ErrorLensVisibility::Visible, - }; - + let config = CodeLensConfig::all(); let lenses = code_lens(&doc, &uri, &config, None); - let expected: Vec = vec![]; - - assert_eq!(lenses, expected); + assert!(lenses.is_empty()); } } diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs index dbb53540..ecadf37b 100644 --- a/crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs +++ b/crates/jrsonnet-lsp-handlers/src/code_lens/mod.rs @@ -1,14 +1,9 @@ -//! Code lens handler for showing reference counts and actions. +//! Code lens handler for type annotations. //! //! Provides: -//! - Reference counts for definitions (functions, variables) -//! - "Evaluate" action for executable Jsonnet files //! - Type annotations for function definitions -//! - Error status indicator for the file -mod actions; mod dispatch; -mod refs; mod type_lenses; -pub use dispatch::{code_lens, resolve_code_lens, CodeLensConfig, ErrorLensVisibility}; +pub use dispatch::{code_lens, resolve_code_lens, CodeLensConfig}; diff --git a/crates/jrsonnet-lsp-handlers/src/code_lens/refs.rs b/crates/jrsonnet-lsp-handlers/src/code_lens/refs.rs deleted file mode 100644 index 5359af1f..00000000 --- a/crates/jrsonnet-lsp-handlers/src/code_lens/refs.rs +++ /dev/null @@ -1,70 +0,0 @@ -use jrsonnet_lsp_document::{to_lsp_range, Document}; -use jrsonnet_lsp_scope::{is_definition_site, ScopeResolver}; -use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; -use lsp_types::{CodeLens, Command, Uri}; - -/// Generate reference count code lenses for all definitions. -pub(super) fn reference_count_lenses(document: &Document, uri: &Uri) -> Vec { - let mut lenses = Vec::new(); - let ast = document.ast(); - let text = document.text(); - let line_index = document.line_index(); - - // Build scope resolver for reference counting - let resolver = ScopeResolver::new(ast.syntax()); - - // Find all definitions and count their references - for token in ast - .syntax() - .descendants_with_tokens() - .filter_map(rowan::NodeOrToken::into_token) - { - if token.kind() != SyntaxKind::IDENT { - continue; - } - - if !is_definition_site(&token) { - continue; - } - - let Some(parent) = token.parent() else { - continue; - }; - - let def_range = parent.text_range(); - let name = token.text(); - - // Count references (excluding the definition itself) - let references = resolver.find_references(ast.syntax(), name, def_range); - let ref_count = references.len().saturating_sub(1); // Exclude definition - - // Skip if no references (to avoid clutter) - if ref_count == 0 { - continue; - } - - let range = to_lsp_range(def_range, line_index, text); - - let title = if ref_count == 1 { - "1 reference".to_string() - } else { - format!("{ref_count} references") - }; - - lenses.push(CodeLens { - range, - command: Some(Command { - title, - command: "jrsonnet.findReferences".to_string(), - arguments: Some(vec![ - serde_json::json!(uri.to_string()), - serde_json::json!(range.start.line), - serde_json::json!(range.start.character), - ]), - }), - data: None, - }); - } - - lenses -} diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 5a44ded3..3997ee74 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -86,13 +86,6 @@ struct InitializeRoots { root_path: Option, } -const LEGACY_EXECUTE_COMMANDS: [&str; 4] = [ - "jrsonnet.evalExpression", - "jrsonnet.findTransitiveImporters", - "jrsonnet.findReferences", - "jrsonnet.showErrors", -]; - const WATCHED_FILE_GLOB_PATTERNS: [&str; 3] = ["**/*.jsonnet", "**/*.libsonnet", "**/*.json"]; pub(super) fn unique_files(files: impl IntoIterator) -> Vec { diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/diagnostics.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/diagnostics.rs deleted file mode 100644 index d508be85..00000000 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/diagnostics.rs +++ /dev/null @@ -1,49 +0,0 @@ -use jrsonnet_lsp_document::CanonicalPath; -use jrsonnet_lsp_import::ImportResolution; - -use super::super::AsyncRequestContext; -use crate::analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}; - -impl AsyncRequestContext { - pub(super) fn execute_show_errors(&self, uri: &str) -> Option { - let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let path = CanonicalPath::from_uri(&uri_parsed).ok()?; - let doc = self.load_document_for_path(&path)?; - let analysis = self.analyze_document(&path, &doc); - let (enable_lint_diagnostics, evaluator, import_roots) = { - let config = self.config.read(); - let evaluator = config.enable_eval_diagnostics.then(|| { - let eval_config = EvalConfig { - jpath: config.jpath.clone(), - resolve_paths_with_tanka: config.resolve_paths_with_tanka, - }; - Evaluator::new(&eval_config) - }); - let import_roots = effective_import_roots( - path.as_path(), - &config.jpath, - config.resolve_paths_with_tanka, - ); - (config.enable_lint_diagnostics, evaluator, import_roots) - }; - let import_resolution = ImportResolution::new(&path, &import_roots); - let import_occurrences = import_resolution.parse_occurrences(&doc); - - let diagnostics = crate::handlers::compute_diagnostics( - &doc, - &path, - enable_lint_diagnostics, - evaluator.as_ref(), - &uri_parsed, - &analysis, - &import_occurrences, - ); - - let response = lsp_types::PublishDiagnosticsParams { - uri: uri_parsed, - diagnostics, - version: Some(doc.version().0), - }; - serde_json::to_value(response).ok() - } -} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs deleted file mode 100644 index a3489bfe..00000000 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/dispatch.rs +++ /dev/null @@ -1,46 +0,0 @@ -use lsp_types::ExecuteCommandParams; -use tracing::{info, warn}; - -use super::super::AsyncRequestContext; - -impl AsyncRequestContext { - pub(crate) fn execute_command( - &self, - params: &ExecuteCommandParams, - ) -> Option { - info!("Execute command: {}", params.command); - - match params.command.as_str() { - "jrsonnet.evalExpression" => { - let expr = params.arguments.first()?.as_str()?; - let base_uri = params.arguments.get(1).and_then(|v| v.as_str()); - Some(self.execute_eval_expression(expr, base_uri)) - } - "jrsonnet.findTransitiveImporters" => { - let uri = params.arguments.first()?.as_str()?; - self.execute_find_transitive_importers(uri) - } - "jrsonnet.findReferences" => { - let uri = params.arguments.first()?.as_str()?; - let line = params.arguments.get(1)?.as_u64()?; - let line = u32::try_from(line).ok()?; - let character = params.arguments.get(2)?.as_u64()?; - let character = u32::try_from(character).ok()?; - let include_declaration = params - .arguments - .get(3) - .and_then(serde_json::Value::as_bool) - .unwrap_or(false); - self.execute_find_references(uri, line, character, include_declaration) - } - "jrsonnet.showErrors" => { - let uri = params.arguments.first()?.as_str()?; - self.execute_show_errors(uri) - } - _ => { - warn!("Unknown command: {}", params.command); - None - } - } - } -} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs index fc117fd6..29a5e8ca 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/mod.rs @@ -1,5 +1,2 @@ -mod diagnostics; -mod dispatch; mod eval; mod graph; -mod references; diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/references.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/references.rs deleted file mode 100644 index 14d385d3..00000000 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/references.rs +++ /dev/null @@ -1,32 +0,0 @@ -use lsp_types::{ - PartialResultParams, Position, ReferenceContext, ReferenceParams, TextDocumentIdentifier, - TextDocumentPositionParams, WorkDoneProgressParams, -}; - -use super::super::AsyncRequestContext; - -impl AsyncRequestContext { - pub(super) fn execute_find_references( - &self, - uri: &str, - line: u32, - character: u32, - include_declaration: bool, - ) -> Option { - let uri_parsed: lsp_types::Uri = uri.parse().ok()?; - let params = ReferenceParams { - text_document_position: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { uri: uri_parsed }, - position: Position { line, character }, - }, - context: ReferenceContext { - include_declaration, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - let refs = self.references(¶ms).unwrap_or_default(); - - serde_json::to_value(refs).ok() - } -} diff --git a/crates/jrsonnet-lsp/src/server/initialization.rs b/crates/jrsonnet-lsp/src/server/initialization.rs index a6b21854..3c1cf290 100644 --- a/crates/jrsonnet-lsp/src/server/initialization.rs +++ b/crates/jrsonnet-lsp/src/server/initialization.rs @@ -10,7 +10,7 @@ use lsp_types::{ }; use tracing::info; -use super::{custom_operations, InitializeRoots, Server, LEGACY_EXECUTE_COMMANDS}; +use super::{custom_operations, InitializeRoots, Server}; impl Server { /// Handle the initialize request. @@ -92,10 +92,7 @@ impl Server { }), ), execute_command_provider: Some(ExecuteCommandOptions { - commands: custom_operations::execute_command_ids() - .into_iter() - .chain(LEGACY_EXECUTE_COMMANDS.into_iter().map(ToString::to_string)) - .collect(), + commands: custom_operations::execute_command_ids(), work_done_progress_options: WorkDoneProgressOptions::default(), }), code_lens_provider: Some(CodeLensOptions { diff --git a/crates/jrsonnet-lsp/src/server/request_dispatch.rs b/crates/jrsonnet-lsp/src/server/request_dispatch.rs index ea224fff..7b6e4317 100644 --- a/crates/jrsonnet-lsp/src/server/request_dispatch.rs +++ b/crates/jrsonnet-lsp/src/server/request_dispatch.rs @@ -395,10 +395,11 @@ impl Server { return Ok(()); } - let context = self.async_request_context(); - self.spawn_typed_json_response(request, move || { - requests::async_handlers::execute_command::handle(&context, ¶ms) - }); + let _ = self.inflight_requests.send_err( + request, + lsp_server::ErrorCode::InvalidParams, + format!("Unknown execute command: {}", params.command), + )?; Ok(()) } } diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/execute_command.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/execute_command.rs deleted file mode 100644 index 29c15e30..00000000 --- a/crates/jrsonnet-lsp/src/server/requests/async_handlers/execute_command.rs +++ /dev/null @@ -1,10 +0,0 @@ -use lsp_types::ExecuteCommandParams; - -use crate::server::async_requests::AsyncRequestContext; - -pub(crate) fn handle( - context: &AsyncRequestContext, - params: &ExecuteCommandParams, -) -> Option { - context.execute_command(params) -} diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs index be2c838a..c8cbb8b0 100644 --- a/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs @@ -3,7 +3,6 @@ pub(crate) mod code_lens; pub(crate) mod completion; pub(crate) mod document_highlight; pub(crate) mod document_symbol; -pub(crate) mod execute_command; pub(crate) mod formatting; pub(crate) mod goto_declaration; pub(crate) mod goto_definition; diff --git a/crates/jrsonnet-lsp/src/server/watched_files.rs b/crates/jrsonnet-lsp/src/server/watched_files.rs index 276baa38..55a8f135 100644 --- a/crates/jrsonnet-lsp/src/server/watched_files.rs +++ b/crates/jrsonnet-lsp/src/server/watched_files.rs @@ -9,8 +9,7 @@ use super::*; impl Server { pub(super) fn is_supported_execute_command(command: &str) -> bool { - LEGACY_EXECUTE_COMMANDS.contains(&command) - || super::custom_operations::operation_for_execute_command(command).is_some() + super::custom_operations::operation_for_execute_command(command).is_some() } pub(super) fn supports_dynamic_watched_files_registration(params: &InitializeParams) -> bool { diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 9a9dd46a..9d392a75 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -607,6 +607,31 @@ fn custom_eval_file_request(id: i32, uri: &str) -> Request { ) } +fn custom_eval_expression_request(id: i32, expression: &str, base_uri: Option<&str>) -> Request { + let mut params = serde_json::json!({ + "expression": expression, + }); + if let Some(base_uri) = base_uri { + params["baseDocument"] = serde_json::json!({ + "uri": base_uri, + }); + } + + Request::new(id.into(), "jrsonnet/evalExpression".to_string(), params) +} + +fn custom_find_transitive_importers_request(id: i32, uri: &str) -> Request { + Request::new( + id.into(), + "jrsonnet/findTransitiveImporters".to_string(), + serde_json::json!({ + "textDocument": { + "uri": uri, + }, + }), + ) +} + fn workspace_symbol_request(id: i32, query: &str) -> Request { let params = lsp_types::WorkspaceSymbolParams { query: query.to_string(), @@ -903,36 +928,6 @@ fn expected_unused_import_binding_actions( ] } -fn find_references_command_args(uri: &str, include_declaration: bool) -> Vec { - let mut args = vec![ - serde_json::Value::String(uri.to_string()), - serde_json::Value::Number(0_u64.into()), - serde_json::Value::Number(13_u64.into()), - ]; - if include_declaration { - args.push(serde_json::Value::Bool(true)); - } - args -} - -fn request_find_references_command( - conn: &Connection, - id: i32, - uri: &str, - include_declaration: bool, -) -> Vec { - conn.sender - .send(Message::Request(execute_command_request( - id, - "jrsonnet.findReferences", - find_references_command_args(uri, include_declaration), - ))) - .unwrap(); - let response = recv_response(conn, id); - assert!(response.error.is_none(), "Command should succeed"); - serde_json::from_value(response.result.expect("command should return result")).unwrap() -} - fn location(uri: &str, start_character: u32, end_character: u32) -> lsp_types::Location { lsp_types::Location { uri: uri.parse().unwrap(), diff --git a/crates/jrsonnet-lsp/tests/integration_test/features.rs b/crates/jrsonnet-lsp/tests/integration_test/features.rs index a290be6f..db1cf70c 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/features.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/features.rs @@ -576,7 +576,7 @@ fn test_code_action_comment_policy_updates_via_configuration_change() { } #[test] -fn test_execute_command_find_references() { +fn test_text_document_references() { let (client_conn, server_conn) = Connection::memory(); let server_thread = run_server(server_conn); @@ -598,101 +598,47 @@ fn test_execute_command_find_references() { .unwrap(); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); - let refs = request_find_references_command(&client_conn, 2, uri, false); - assert_eq!(refs, expected_find_references(uri, false)); - - let refs_with_declaration = request_find_references_command(&client_conn, 3, uri, true); - assert_eq!(refs_with_declaration, expected_find_references(uri, true)); - - client_conn - .sender - .send(Message::Request(shutdown_request(4))) - .unwrap(); - let _ = recv_response(&client_conn, 4); client_conn .sender - .send(Message::Notification(exit_notification())) + .send(Message::Request(references_request(2, uri, 0, 13, false))) .unwrap(); - server_thread - .join() - .expect("Server thread should exit cleanly"); -} - -#[test] -fn test_execute_command_show_errors() { - let (client_conn, server_conn) = Connection::memory(); - let server_thread = run_server(server_conn); - - client_conn - .sender - .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = recv_response(&client_conn, 1); - client_conn - .sender - .send(Message::Notification(initialized_notification())) - .unwrap(); - - let uri = "file:///test/show-errors-command.jsonnet"; - let text = "{ hello: }"; - client_conn - .sender - .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let refs_response = recv_response(&client_conn, 2); + assert!( + refs_response.error.is_none(), + "textDocument/references should succeed" + ); + let refs: Option> = + serde_json::from_value(refs_response.result.expect("should have result")).unwrap(); + assert_eq!( + refs.unwrap_or_default(), + expected_find_references(uri, false) + ); client_conn .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.showErrors", - vec![serde_json::Value::String(uri.to_string())], - ))) + .send(Message::Request(references_request(3, uri, 0, 13, true))) .unwrap(); - let response = recv_response(&client_conn, 2); + let refs_with_declaration_response = recv_response(&client_conn, 3); assert!( - response.error.is_none(), - "showErrors command should succeed" + refs_with_declaration_response.error.is_none(), + "textDocument/references should succeed" ); - let diagnostics: lsp_types::PublishDiagnosticsParams = - serde_json::from_value(response.result.expect("showErrors should return a result")) - .expect("showErrors result should be publish diagnostics payload"); - + let refs_with_declaration: Option> = serde_json::from_value( + refs_with_declaration_response + .result + .expect("should have result"), + ) + .unwrap(); assert_eq!( - diagnostics, - lsp_types::PublishDiagnosticsParams { - uri: uri.parse().unwrap(), - diagnostics: vec![lsp_types::Diagnostic { - range: lsp_types::Range { - start: Position { - line: 0, - character: 9, - }, - end: Position { - line: 0, - character: 9, - }, - }, - severity: Some(lsp_types::DiagnosticSeverity::ERROR), - code: Some(lsp_types::NumberOrString::String( - "syntax-error".to_string() - )), - code_description: None, - source: Some("jrsonnet".to_string()), - message: "expected expression".to_string(), - related_information: None, - tags: None, - data: None, - }], - version: Some(1), - } + refs_with_declaration.unwrap_or_default(), + expected_find_references(uri, true) ); client_conn .sender - .send(Message::Request(shutdown_request(3))) + .send(Message::Request(shutdown_request(4))) .unwrap(); - let _ = recv_response(&client_conn, 3); + let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) @@ -828,24 +774,21 @@ fn test_eval_commands_use_tanka_import_roots() { client_conn .sender - .send(Message::Request(execute_command_request( + .send(Message::Request(custom_eval_expression_request( 4, - "jrsonnet.evalExpression", - vec![ - serde_json::Value::String(r#"(import "lib.libsonnet").answer"#.to_string()), - serde_json::Value::String(main_uri), - ], + r#"(import "lib.libsonnet").answer"#, + Some(&main_uri), ))) .unwrap(); let eval_expression_response = recv_response(&client_conn, 4); assert!( eval_expression_response.error.is_none(), - "evalExpression command should succeed" + "jrsonnet/evalExpression request should succeed" ); assert_eq!( eval_expression_response .result - .expect("evalExpression should return a result"), + .expect("jrsonnet/evalExpression should return a result"), serde_json::json!(42) ); @@ -956,20 +899,20 @@ fn test_code_lens_resolve_request() { let lenses: Vec = serde_json::from_value(response.result.expect("should have result")).unwrap(); assert!(!lenses.is_empty(), "expected code lenses for test document"); - let reference_lens = lenses + let evaluate_lens = lenses .into_iter() .find(|lens| { lens.command .as_ref() - .is_some_and(|command| command.command == "jrsonnet.findReferences") + .is_some_and(|command| command.command == "jrsonnet.evalFile") }) - .expect("expected reference count code lens"); + .expect("expected evaluate code lens"); client_conn .sender .send(Message::Request(code_lens_resolve_request( 3, - reference_lens.clone(), + evaluate_lens.clone(), ))) .unwrap(); let response = recv_response(&client_conn, 3); @@ -979,7 +922,7 @@ fn test_code_lens_resolve_request() { ); let resolved: lsp_types::CodeLens = serde_json::from_value(response.result.expect("should have result")).unwrap(); - assert_eq!(resolved, reference_lens); + assert_eq!(resolved, evaluate_lens); client_conn .sender diff --git a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs index f4a0081a..bed68e19 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs @@ -43,13 +43,7 @@ fn test_initialize_shutdown() { ); assert_eq!( result["capabilities"]["executeCommandProvider"]["commands"], - serde_json::json!([ - "jrsonnet.evalFile", - "jrsonnet.evalExpression", - "jrsonnet.findTransitiveImporters", - "jrsonnet.findReferences", - "jrsonnet.showErrors" - ]), + serde_json::json!(["jrsonnet.evalFile"]), "execute command capability should advertise all command IDs", ); assert_eq!( @@ -375,10 +369,8 @@ fn test_configuration_change_reindexes_closed_import_graph_entries() { client_conn .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib_a_uri.clone())], + .send(Message::Request(custom_find_transitive_importers_request( + 2, &lib_a_uri, ))) .unwrap(); let old_target_response = recv_response(&client_conn, 2); @@ -398,10 +390,8 @@ fn test_configuration_change_reindexes_closed_import_graph_entries() { client_conn .sender - .send(Message::Request(execute_command_request( - 3, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib_b_uri.clone())], + .send(Message::Request(custom_find_transitive_importers_request( + 3, &lib_b_uri, ))) .unwrap(); let new_target_response = recv_response(&client_conn, 3); diff --git a/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs b/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs index b3454433..dc6a50a9 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs @@ -52,10 +52,8 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { client_conn .sender - .send(Message::Request(execute_command_request( - 20, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib1_uri.clone())], + .send(Message::Request(custom_find_transitive_importers_request( + 20, &lib1_uri, ))) .unwrap(); let response = recv_response(&client_conn, 20); @@ -190,25 +188,21 @@ fn test_initialize_bootstraps_workspace_import_graph() { }); let mut actual_result = serde_json::Value::Null; for request_id in 2..=42 { + let target_uri = expected_result["file"] + .as_str() + .expect("expected file URI should be a string"); client_conn .sender - .send(Message::Request(execute_command_request( - request_id, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String( - expected_result["file"] - .as_str() - .expect("expected file URI should be a string") - .to_string(), - )], + .send(Message::Request(custom_find_transitive_importers_request( + request_id, target_uri, ))) .unwrap(); let response = recv_response(&client_conn, request_id); assert!( response.error.is_none(), - "findTransitiveImporters command should succeed" + "jrsonnet/findTransitiveImporters request should succeed" ); - actual_result = response.result.expect("command should return result"); + actual_result = response.result.expect("request should return result"); if actual_result == expected_result { break; } @@ -478,10 +472,8 @@ fn test_find_transitive_importers_returns_sorted_uris() { client_conn .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib_uri.clone())], + .send(Message::Request(custom_find_transitive_importers_request( + 2, &lib_uri, ))) .unwrap(); let response = recv_response(&client_conn, 2); @@ -568,10 +560,8 @@ fn test_did_close_preserves_import_graph_for_references() { client_conn .sender - .send(Message::Request(execute_command_request( - 2, - "jrsonnet.findTransitiveImporters", - vec![serde_json::Value::String(lib_uri.clone())], + .send(Message::Request(custom_find_transitive_importers_request( + 2, &lib_uri, ))) .unwrap(); let response = recv_response(&client_conn, 2); diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml index 7bba0e0b..2fc58324 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/code_lens_references_and_evaluate.yaml @@ -1,10 +1,8 @@ -# Verify two concrete code lenses for `local x = 1; x`: -# - a references lens at the `x` binding with a stable find-references payload -# - a file-level Evaluate lens with the expected eval command payload. +# Verify Evaluate code lens is emitted with a stable execute payload. steps: - step: create files: - main.jsonnet: ((fileStart:|))local [[decl:x]] = 1; x + main.jsonnet: ((fileStart:|))local x = 1; x - step: diagnosticsSettled @@ -12,18 +10,14 @@ steps: as: lenses file: main.jsonnet +- step: requestExecuteCodeLens + as: evaluateViaCodeLens + request: lenses + index: 0 + - step: expectCodeLens request: lenses result: - - at: decl - text: x - command: - title: 1 reference - command: jrsonnet.findReferences - arguments: - - file: main.jsonnet - - 0 - - 6 - range: fileStart command: title: Evaluate @@ -31,11 +25,6 @@ steps: arguments: - file: main.jsonnet -- step: requestExecuteCodeLens - as: evaluateViaCodeLens - request: lenses - index: 1 - - step: expectExecuteCodeLens request: evaluateViaCodeLens result: 1 diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml index f1c9788d..b89d1c5f 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml @@ -1,5 +1,5 @@ # Smoke-test heterogeneous request handling in one scenario: -# hover, completion, and executeCommand request/expect flow. +# hover, completion, and custom request/expect flow. steps: - step: create files: @@ -34,12 +34,12 @@ steps: kind: 9 detail: Jsonnet standard library -- step: requestExecuteCommand +- step: requestCustom as: evalExpression - command: jrsonnet.evalExpression - arguments: - - 1 + 2 + method: jrsonnet/evalExpression + params: + expression: 1 + 2 -- step: expectExecuteCommand +- step: expectCustom request: evalExpression result: 3 From d58c1613c0508e5954a50333295dafe5b6742e32 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 11:20:16 +0000 Subject: [PATCH 175/210] docs(lsp): describe custom requests and eval code-lens bridge Update docs to reflect the spec-native surface: - custom requests for evalExpression and transitive importers - standard references/diagnostics flows replacing legacy command paths - evalFile exposed via Evaluate code lens with executeCommand as transport bridge --- docs/lsp/ARCHITECTURE.md | 24 ++++------------ docs/lsp/HANDLERS.md | 31 ++++++--------------- docs/lsp/README.md | 60 +++++++++++++++++++++++++++++++++++----- 3 files changed, 67 insertions(+), 48 deletions(-) diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index c1a0f11f..8933eb9a 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -236,8 +236,8 @@ The common resolution order is: That ordering is applied through a shared boundary API: `jrsonnet_lsp_import::ImportResolution`. Server graph updates, async -diagnostics, and `showErrors` all resolve and parse imports through this single -type so import behavior stays consistent across subsystems. +diagnostics, and custom eval operations all resolve and parse imports through +this single type so import behavior stays consistent across subsystems. Cross-file navigation and reference/rename paths then use the graph's resolved entries as the source of truth instead of re-resolving import strings @@ -477,25 +477,13 @@ steps: Advertised commands: - `jrsonnet.evalFile` -- `jrsonnet.evalExpression` -- `jrsonnet.findTransitiveImporters` -- `jrsonnet.findReferences` -- `jrsonnet.showErrors` - -Current async command implementation handles: - -- `jrsonnet.evalFile` -- `jrsonnet.evalExpression` -- `jrsonnet.findTransitiveImporters` -- `jrsonnet.findReferences` -- `jrsonnet.showErrors` +- Custom requests: + - `jrsonnet/evalFile` + - `jrsonnet/evalExpression` + - `jrsonnet/findTransitiveImporters` Unknown command IDs are returned as explicit LSP `InvalidParams` errors. -The `jrsonnet.showErrors` command reuses the same diagnostics pipeline as -`textDocument/publishDiagnostics` and returns a typed diagnostics payload for -the requested file. - ## Concurrency Strategy Concurrency is intentionally split: diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index c5d4701f..c11587ba 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -90,10 +90,7 @@ File: `crates/jrsonnet-lsp-handlers/src/code_lens.rs` `CodeLensConfig` supports: -- `show_references` -- `show_evaluate` - `show_types` -- `show_errors` (`Hidden` or `Visible`) Current server path builds `CodeLensConfig::all()` in async context and passes computed `TypeAnalysis`. @@ -103,10 +100,11 @@ computed `TypeAnalysis`. Lens categories: -- reference count lenses at definition sites -- evaluate-file command lens (`jrsonnet.evalFile`) - inferred type lenses for selected bindings -- syntax-status lens for parse errors + +The server augments these handler lenses with a custom-operation evaluate lens +(`jrsonnet.evalFile`) so generic clients can run file evaluation via the code +lens execute-command bridge. ### Completion @@ -365,27 +363,14 @@ These analyses are computed in async context before handler invocation. Advertised command IDs: - `jrsonnet.evalFile` -- `jrsonnet.evalExpression` -- `jrsonnet.findTransitiveImporters` -- `jrsonnet.findReferences` -- `jrsonnet.showErrors` - -Current async command implementation handles: - -- `jrsonnet.evalFile` -- `jrsonnet.evalExpression` -- `jrsonnet.findTransitiveImporters` -- `jrsonnet.findReferences` -- `jrsonnet.showErrors` +- Custom requests: + - `jrsonnet/evalFile` + - `jrsonnet/evalExpression` + - `jrsonnet/findTransitiveImporters` Unknown command IDs are rejected with an explicit LSP `InvalidParams` response error. -`jrsonnet.showErrors` returns a `PublishDiagnosticsParams` payload for the -target URI so clients can render the same diagnostics data that the server -publishes asynchronously (syntax, unresolved imports, lint/type checks, and -optional eval diagnostics). - ## Testing Strategy - Handler unit tests live with handler modules in diff --git a/docs/lsp/README.md b/docs/lsp/README.md index 39258ce8..198fee5b 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -31,11 +31,6 @@ Advertised LSP features: - In-editor metadata: `inlayHint`, `semanticTokens/full`, `semanticTokens/range`, `codeLens` (+ `codeLens/resolve`). - Actions: `codeAction` (`quickfix`, `source.fixAll`), `formatting`. -- Commands via `workspace/executeCommand`: - - `jrsonnet.evalExpression` - - `jrsonnet.findTransitiveImporters` - - `jrsonnet.findReferences` - - `jrsonnet.showErrors` When the client supports dynamic watched-file registration, the server also registers watchers for: @@ -46,8 +41,8 @@ registers watchers for: ## Custom Requests -These custom requests are also exposed as code lenses. However, implementing -directly may provide a better experience. +The server exposes custom methods for non-standard operations. `jrsonnet/evalFile` +is also exposed via the Evaluate code lens. ### `jrsonnet/evalFile` @@ -74,6 +69,57 @@ Example response: This operation is also exposed via the Evaluate code lens. +### `jrsonnet/evalExpression` + +Evaluate an arbitrary Jsonnet expression and return its resulting JSON value. +Optionally provide a base document URI to resolve imports relative to a file. + +Example request: + +```json +{ + "method": "jrsonnet/evalExpression", + "params": { + "expression": "(import \"lib.libsonnet\")(40)", + "baseDocument": { + "uri": "file:///a/b/main.jsonnet" + } + } +} +``` + +Example response: + +```json +42 +``` + +### `jrsonnet/findTransitiveImporters` + +Return all files that transitively import the requested document. + +Example request: + +```json +{ + "method": "jrsonnet/findTransitiveImporters", + "params": { + "textDocument": { + "uri": "file:///a/b/lib.libsonnet" + } + } +} +``` + +Example response: + +```json +{ + "file": "file:///a/b/lib.libsonnet", + "transitiveImporters": ["file:///a/b/main.jsonnet"] +} +``` + ## Configuration Configuration is accepted from: From 19d19b887d69339068ad98290f7bcdfd69a418c5 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 17:29:55 +0000 Subject: [PATCH 176/210] feat(formatting): migrate LSP formatting to in-process engine --- Cargo.lock | 5 + cmds/jrsonnet-fmt/Cargo.toml | 5 + cmds/jrsonnet-fmt/src/api.rs | 56 +++ cmds/jrsonnet-fmt/src/children.rs | 38 +- cmds/jrsonnet-fmt/src/context.rs | 83 ++- cmds/jrsonnet-fmt/src/lib.rs | 12 + cmds/jrsonnet-fmt/src/printable.rs | 54 +- cmds/jrsonnet-fmt/src/tests.rs | 52 ++ crates/jrsonnet-lsp-handlers/Cargo.toml | 1 + .../src/formatting/args.rs | 76 --- .../src/formatting/dispatch.rs | 52 +- .../src/formatting/engine.rs | 371 ++------------ .../src/formatting/mod.rs | 9 +- .../src/formatting/types.rs | 115 ----- crates/jrsonnet-lsp-handlers/src/lib.rs | 4 +- crates/jrsonnet-lsp/Cargo.toml | 1 + crates/jrsonnet-lsp/src/config.rs | 473 ++++++++++++------ crates/jrsonnet-lsp/src/server.rs | 1 - .../jrsonnet-lsp/src/server/async_requests.rs | 5 +- .../src/server/async_requests/formatting.rs | 56 ++- crates/jrsonnet-lsp/tests/integration_test.rs | 30 +- .../tests/integration_test/formatting.rs | 309 ++++++++++++ .../formatting_config_updates_apply.yaml | 76 +++ .../formatting_default_returns_full_edit.yaml | 26 + .../runner/missing_step_coverage.yaml | 9 +- docs/lsp/ARCHITECTURE.md | 10 +- docs/lsp/HANDLERS.md | 14 +- docs/lsp/README.md | 61 +-- 28 files changed, 1170 insertions(+), 834 deletions(-) create mode 100644 cmds/jrsonnet-fmt/src/api.rs create mode 100644 cmds/jrsonnet-fmt/src/lib.rs delete mode 100644 crates/jrsonnet-lsp-handlers/src/formatting/args.rs delete mode 100644 crates/jrsonnet-lsp-handlers/src/formatting/types.rs create mode 100644 crates/jrsonnet-lsp/tests/integration_test/formatting.rs create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/formatting_config_updates_apply.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/formatting_default_returns_full_edit.yaml diff --git a/Cargo.lock b/Cargo.lock index e7fbea22..3fd8cfbf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1760,6 +1760,9 @@ dependencies = [ "indoc", "insta", "jrsonnet-rowan-parser", + "rstest 0.23.0", + "serde", + "serde_json", "tempfile", "thiserror 1.0.69", ] @@ -1832,6 +1835,7 @@ dependencies = [ "serde", "serde_json", "tempfile", + "thiserror 1.0.69", "tracing", ] @@ -1882,6 +1886,7 @@ version = "0.5.0-pre97" dependencies = [ "assert_matches", "indoc", + "jrsonnet-fmt", "jrsonnet-lsp-document", "jrsonnet-lsp-import", "jrsonnet-lsp-inference", diff --git a/cmds/jrsonnet-fmt/Cargo.toml b/cmds/jrsonnet-fmt/Cargo.toml index 8257dbec..e0e8221f 100644 --- a/cmds/jrsonnet-fmt/Cargo.toml +++ b/cmds/jrsonnet-fmt/Cargo.toml @@ -15,6 +15,11 @@ jrsonnet-rowan-parser.workspace = true insta.workspace = true indoc.workspace = true hi-doc.workspace = true +serde = { workspace = true, features = ["derive"] } clap = { workspace = true, features = ["derive"] } tempfile.workspace = true thiserror.workspace = true + +[dev-dependencies] +rstest = "0.23" +serde_json.workspace = true diff --git a/cmds/jrsonnet-fmt/src/api.rs b/cmds/jrsonnet-fmt/src/api.rs new file mode 100644 index 00000000..a9128018 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/api.rs @@ -0,0 +1,56 @@ +use dprint_core::formatting::{PrintItems, PrintOptions}; + +use crate::{FormatContext, FormatOptions, Printable}; + +const CONVERGENCE_LIMIT: usize = 10; + +/// Format Jsonnet source code in-process. +/// +/// Applies repeated formatting passes until output stabilizes or the +/// convergence limit is reached. +/// Returns `None` when parsing fails. +#[must_use] +pub fn format_code(input: &str, opts: &FormatOptions) -> Option { + let mut iteration = 0; + let mut formatted = input.to_owned(); + + // https://github.com/dprint/dprint/pull/423 + loop { + let reformatted = format_once(&formatted, opts)?; + let convergence_tmp = reformatted.trim().to_owned(); + if formatted == convergence_tmp { + break; + } + formatted = convergence_tmp; + iteration += 1; + if iteration > CONVERGENCE_LIMIT { + return None; + } + } + + formatted.push('\n'); + Some(formatted) +} + +fn format_once(input: &str, opts: &FormatOptions) -> Option { + let (parsed, errors) = jrsonnet_rowan_parser::parse(input); + if !errors.is_empty() { + return None; + } + + let ctx = FormatContext::new(opts.clone()); + + Some(dprint_core::formatting::format( + || { + let mut out = PrintItems::new(); + parsed.print(&mut out, &ctx); + out + }, + PrintOptions { + indent_width: if opts.indent == 0 { 3 } else { opts.indent }, + max_width: 100, + use_tabs: opts.indent == 0, + new_line_text: "\n", + }, + )) +} diff --git a/cmds/jrsonnet-fmt/src/children.rs b/cmds/jrsonnet-fmt/src/children.rs index 2ecd4210..0e8a1a51 100644 --- a/cmds/jrsonnet-fmt/src/children.rs +++ b/cmds/jrsonnet-fmt/src/children.rs @@ -80,14 +80,13 @@ pub fn children_between( ) } -pub fn should_start_with_newline(prev_inline: Option<&ChildTrivia>, tt: &ChildTrivia) -> bool { - count_newlines_before(tt) - + prev_inline - .map(count_newlines_after) - .unwrap_or_default() - - // First for previous item end, second for current item - >= 2 +fn extra_newlines_before_item(prev_inline: Option<&ChildTrivia>, tt: &ChildTrivia) -> u8 { + let newlines_between = + count_newlines_before(tt) + prev_inline.map(count_newlines_after).unwrap_or_default(); + + // A single newline is the default separator between items. + // Any extra newline means preserving one blank line. + u8::try_from(newlines_between.saturating_sub(1)).unwrap_or(u8::MAX) } fn count_newlines_before(tt: &ChildTrivia) -> usize { @@ -148,12 +147,15 @@ pub fn children( mem::take(&mut next) }; let last_child = current_child.replace(Child { - // First item should not start with newline - should_start_with_newline: had_some - && should_start_with_newline( + // First item should not start with blank lines. + extra_newlines_before: if had_some { + extra_newlines_before_item( current_child.as_ref().map(|c| &c.inline_trivia), &before_trivia, - ), + ) + } else { + 0 + }, before_trivia, value, inline_trivia: Vec::new(), @@ -200,7 +202,7 @@ pub fn children( } let ending_comments = EndingComments { - should_start_with_newline: should_start_with_newline( + extra_newlines_before: extra_newlines_before_item( current_child.as_ref().map(|c| &c.inline_trivia), &next, ), @@ -216,8 +218,8 @@ pub fn children( #[derive(Debug)] pub struct Child { - /// If this child has two newlines above in source code, so it needs to have it in the output - pub should_start_with_newline: bool, + /// Number of extra newlines before this child, beyond the default line break. + pub extra_newlines_before: u8, /// Comment before item, i.e /// /// ```ignore @@ -237,13 +239,13 @@ pub struct Child { } pub struct EndingComments { - /// If this child has two newlines above in source code, so it needs to have it in the output - pub should_start_with_newline: bool, + /// Number of extra newlines before ending comments, beyond the default line break. + pub extra_newlines_before: u8, pub trivia: ChildTrivia, } impl EndingComments { pub fn is_empty(&self) -> bool { - !self.should_start_with_newline && self.trivia.is_empty() + self.extra_newlines_before == 0 && self.trivia.is_empty() } pub fn extract_trailing(&mut self) -> ChildTrivia { mem::take(&mut self.trivia) diff --git a/cmds/jrsonnet-fmt/src/context.rs b/cmds/jrsonnet-fmt/src/context.rs index e5c93b43..0ef9ee3f 100644 --- a/cmds/jrsonnet-fmt/src/context.rs +++ b/cmds/jrsonnet-fmt/src/context.rs @@ -1,33 +1,69 @@ //! Formatting context and options. use dprint_core::formatting::PrintItems; +use serde::{Deserialize, Serialize}; /// Comment style for formatting. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize)] pub enum CommentStyle { /// Convert all comments to hash-style (#). + #[serde(rename = "hash")] Hash, /// Convert all comments to slash-style (//). + #[serde(rename = "slash")] Slash, /// Leave comments as-is. #[default] + #[serde(rename = "leave")] Leave, } +impl<'de> Deserialize<'de> for CommentStyle { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let value = String::deserialize(deserializer)?; + Ok(match value.to_lowercase().as_str() { + "h" | "hash" => Self::Hash, + "s" | "slash" => Self::Slash, + _ => Self::Leave, + }) + } +} + /// String literal style for formatting. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize)] pub enum StringStyle { /// Convert all strings to double quotes. + #[serde(rename = "double")] Double, /// Convert all strings to single quotes. + #[serde(rename = "single")] Single, /// Leave strings as-is. #[default] + #[serde(rename = "leave")] Leave, } +impl<'de> Deserialize<'de> for StringStyle { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let value = String::deserialize(deserializer)?; + Ok(match value.to_lowercase().as_str() { + "d" | "double" => Self::Double, + "s" | "single" => Self::Single, + _ => Self::Leave, + }) + } +} + /// Formatting options that control output style. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(default)] pub struct FormatOptions { /// 0 for hard tabs, otherwise number of spaces. pub indent: u8, @@ -70,15 +106,38 @@ impl FormatContext { Self { opts } } - /// Emit a blank line if condition is true and max_blank_lines allows it. - /// This is used to preserve source blank lines between items. - pub fn emit_blank_line_if(&self, condition: bool, out: &mut PrintItems) { - if !condition { - return; + /// Emit extra blank lines (beyond the regular line break) up to configured max. + pub fn emit_blank_lines(&self, extra_newlines: u8, out: &mut PrintItems) { + let newlines_to_emit = extra_newlines.min(self.opts.max_blank_lines); + for _ in 0..newlines_to_emit { + out.push_signal(dprint_core::formatting::Signal::NewLine); } - if self.opts.max_blank_lines == 0 { - return; - } - out.push_signal(dprint_core::formatting::Signal::NewLine); + } +} + +#[cfg(test)] +mod tests { + use super::FormatOptions; + + #[test] + fn test_format_options_deserialize_in_range_indent() { + let parsed: Result = serde_json::from_value(serde_json::json!({ + "indent": 8 + })); + assert_eq!( + parsed.ok(), + Some(FormatOptions { + indent: 8, + ..FormatOptions::default() + }) + ); + } + + #[test] + fn test_format_options_reject_out_of_range_indent() { + let parsed: Result = serde_json::from_value(serde_json::json!({ + "indent": 300 + })); + assert_eq!(parsed.ok(), None); } } diff --git a/cmds/jrsonnet-fmt/src/lib.rs b/cmds/jrsonnet-fmt/src/lib.rs new file mode 100644 index 00000000..8a9ad2e7 --- /dev/null +++ b/cmds/jrsonnet-fmt/src/lib.rs @@ -0,0 +1,12 @@ +//! Reusable Jsonnet formatter API used by CLI and LSP integrations. + +mod api; +mod children; +mod comments; +mod context; +mod macros; +mod printable; + +pub use api::format_code; +pub use context::{CommentStyle, FormatContext, FormatOptions, StringStyle}; +pub use printable::Printable; diff --git a/cmds/jrsonnet-fmt/src/printable.rs b/cmds/jrsonnet-fmt/src/printable.rs index 9ca077f1..fa003c6a 100644 --- a/cmds/jrsonnet-fmt/src/printable.rs +++ b/cmds/jrsonnet-fmt/src/printable.rs @@ -49,8 +49,8 @@ where } impl Printable for SyntaxToken { - fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { - p!(out, ctx, string(self.to_string())); + fn print(&self, out: &mut PrintItems, _ctx: &FormatContext) { + p!(out, _ctx, string(self.to_string())); } } @@ -114,8 +114,8 @@ fn convert_string_quotes(s: &str, from_quote: char, to_quote: char) -> String { result } impl Printable for Number { - fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { - p!(out, ctx, string(format!("{}", self))); + fn print(&self, out: &mut PrintItems, _ctx: &FormatContext) { + p!(out, _ctx, string(format!("{}", self))); } } @@ -196,8 +196,8 @@ impl Printable for FieldName { } impl Printable for Visibility { - fn print(&self, out: &mut PrintItems, ctx: &FormatContext) { - p!(out, ctx, string(self.to_string())); + fn print(&self, out: &mut PrintItems, _ctx: &FormatContext) { + p!(out, _ctx, string(self.to_string())); } } @@ -245,7 +245,7 @@ impl Printable for ArgsDesc { ); let mut args = children.into_iter().peekable(); while let Some(ele) = args.next() { - ctx.emit_blank_line_if(ele.should_start_with_newline, out); + ctx.emit_blank_lines(ele.extra_newlines_before, out); format_comments(&ele.before_trivia, CommentLocation::AboveItem, out, ctx); let arg = ele.value; if arg.name().is_some() || arg.assign_token().is_some() { @@ -260,7 +260,7 @@ impl Printable for ArgsDesc { format_comments(&ele.inline_trivia, CommentLocation::ItemInline, out, ctx); p!(out, ctx, if("between args", multi_line, nl)); } - ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); p!(out, ctx, if("end args", multi_line, i nl); for mem in children { - ctx.emit_blank_line_if(mem.should_start_with_newline, out); + ctx.emit_blank_lines(mem.extra_newlines_before, out); format_comments(&mem.before_trivia, CommentLocation::AboveItem, out, ctx); p!(out, ctx, {mem.value} str(",")); format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out, ctx); p!(out, ctx, nl); } - ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); let (compspecs, end_comments) = children_between::( @@ -344,12 +344,12 @@ impl Printable for ObjBody { Some(trailing_for_comp), ); for mem in compspecs { - ctx.emit_blank_line_if(mem.should_start_with_newline, out); + ctx.emit_blank_lines(mem.extra_newlines_before, out); format_comments(&mem.before_trivia, CommentLocation::AboveItem, out, ctx); p!(out, ctx, { mem.value }); format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out, ctx); } - ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); p!(out, ctx, nl i nl); for (i, mem) in children.into_iter().enumerate() { - // Don't emit blank line before first item - ctx.emit_blank_line_if(mem.should_start_with_newline && i != 0, out); + // Don't emit blank line before first item. + ctx.emit_blank_lines(if i == 0 { 0 } else { mem.extra_newlines_before }, out); format_comments(&mem.before_trivia, CommentLocation::AboveItem, out, ctx); p!(out, ctx, {mem.value} str(",")); format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out, ctx); p!(out, ctx, nl); } - ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); p!(out, ctx, i nl); for bind in binds { - ctx.emit_blank_line_if(bind.should_start_with_newline, out); + ctx.emit_blank_lines(bind.extra_newlines_before, out); format_comments(&bind.before_trivia, CommentLocation::AboveItem, out, ctx); p!(out, ctx, {bind.value} str(",")); format_comments(&bind.inline_trivia, CommentLocation::ItemInline, out, ctx); p!(out, ctx, nl); } - ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); p!(out, ctx, i nl); for (i, el) in children.into_iter().enumerate() { - ctx.emit_blank_line_if(el.should_start_with_newline && i != 0, out); + ctx.emit_blank_lines(if i == 0 { 0 } else { el.extra_newlines_before }, out); format_comments(&el.before_trivia, CommentLocation::AboveItem, out, ctx); p!(out, ctx, {el.value} str(",")); format_comments(&el.inline_trivia, CommentLocation::ItemInline, out, ctx); p!(out, ctx, nl); } - ctx.emit_blank_line_if(end_comments.should_start_with_newline, out); + ctx.emit_blank_lines(end_comments.extra_newlines_before, out); format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out, ctx); p!(out, ctx, Vec { - let mut args = vec!["-".to_string()]; // Read from stdin - - if let Some(indent) = config.indent { - args.push("--indent".to_string()); - args.push(indent.to_string()); - } - - if let Some(max_blank_lines) = config.max_blank_lines { - args.push("--max-blank-lines".to_string()); - args.push(max_blank_lines.to_string()); - } - - if let Some(ref style) = config.string_style { - // jsonnetfmt uses single-letter codes: d=double, s=single, l=leave - let code = match style.to_lowercase().as_str() { - "double" | "d" => "d", - "single" | "s" => "s", - _ => "l", - }; - args.push("--string-style".to_string()); - args.push(code.to_string()); - } - - if let Some(ref style) = config.comment_style { - // jsonnetfmt uses single-letter codes: h=hash, s=slash, l=leave - let code = match style.to_lowercase().as_str() { - "hash" | "h" => "h", - "slash" | "s" => "s", - _ => "l", - }; - args.push("--comment-style".to_string()); - args.push(code.to_string()); - } - - if config.pad_arrays == Some(true) { - args.push("--pad-arrays".to_string()); - } - - if config.pad_objects == Some(false) { - args.push("--no-pad-objects".to_string()); - } - - if config.pretty_field_names == Some(false) { - args.push("--no-pretty-field-names".to_string()); - } - - if config.sort_imports == Some(false) { - args.push("--no-sort-imports".to_string()); - } - - if config.use_implicit_plus == Some(false) { - args.push("--no-use-implicit-plus".to_string()); - } - - if config.strip_everything == Some(true) { - args.push("--strip-everything".to_string()); - } - - if config.strip_comments == Some(true) { - args.push("--strip-comments".to_string()); - } - - if config.strip_all_but_comments == Some(true) { - args.push("--strip-all-but-comments".to_string()); - } - - args -} diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs b/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs index ea23adc6..726cb35e 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs @@ -1,13 +1,22 @@ +use jrsonnet_lsp_document::{ByteOffset, LineIndex}; use lsp_types::{Position, Range, TextEdit}; use tracing::debug; -use super::{ - engine::run_formatter, - types::{FormattingConfig, FormattingContext}, -}; +use super::{engine::run_formatter, FormattingConfig}; -fn to_u32(value: usize) -> u32 { - u32::try_from(value).unwrap_or(u32::MAX) +fn full_document_range(text: &str) -> Range { + let line_index = LineIndex::new(text); + let end = line_index + .position(ByteOffset::from(text.len()), text) + .unwrap_or_default(); + + Range { + start: Position { + line: 0, + character: 0, + }, + end: end.into(), + } } /// Format a Jsonnet document with default configuration. @@ -16,11 +25,7 @@ fn to_u32(value: usize) -> u32 { /// On error, returns None. #[must_use] pub fn format_document(text: &str) -> Option> { - format_document_with_config( - text, - &FormattingConfig::default(), - FormattingContext::detached(), - ) + format_document_with_config(text, &FormattingConfig::default()) } /// Format a Jsonnet document with the given configuration. @@ -28,13 +33,9 @@ pub fn format_document(text: &str) -> Option> { /// Returns a list of text edits to apply to the document. /// On error, returns None. #[must_use] -pub fn format_document_with_config( - text: &str, - config: &FormattingConfig, - context: FormattingContext<'_>, -) -> Option> { +pub fn format_document_with_config(text: &str, config: &FormattingConfig) -> Option> { // Try to run the formatter - let formatted = match run_formatter(text, config, context) { + let formatted = match run_formatter(text, config) { Ok(formatted) => formatted, Err(err) => { debug!("Formatting unavailable: {err}"); @@ -47,22 +48,9 @@ pub fn format_document_with_config( return Some(Vec::new()); } - // Return a single edit that replaces the entire document - let lines: Vec<&str> = text.lines().collect(); - let last_line = to_u32(lines.len().saturating_sub(1)); - let last_col = to_u32(lines.last().map_or(0, |l| l.len())); - + // Return a single edit that replaces the entire document. Some(vec![TextEdit { - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: last_line, - character: last_col, - }, - }, + range: full_document_range(text), new_text: formatted, }]) } diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs b/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs index b5cf1f27..56726248 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs @@ -1,369 +1,106 @@ -use std::{ - ffi::OsStr, - fmt, io, - path::{Path, PathBuf}, - process::{Command, Stdio}, -}; +use std::fmt; -use super::{ - args::build_formatter_args, - types::{FormatterEngine, FormattingConfig, FormattingContext}, -}; +use jrsonnet_fmt::format_code; + +use super::FormattingConfig; #[derive(Debug)] pub(super) enum FormatterError { - NoFormatterInPath, - MissingDocumentPath, - MissingWorkspaceRoot { - document_path: PathBuf, - }, - Spawn { - program: String, - source: io::Error, - }, - MissingStdin { - program: String, - }, - WriteStdin { - program: String, - source: io::Error, - }, - Wait { - program: String, - source: io::Error, - }, - UnsuccessfulExit { - program: String, - stderr: String, - }, - NonUtf8Stdout { - program: String, - source: std::string::FromUtf8Error, - }, -} - -impl FormatterError { - fn is_not_found(&self) -> bool { - matches!( - self, - Self::Spawn { source, .. } if source.kind() == io::ErrorKind::NotFound - ) - } + FormatFailed, } impl fmt::Display for FormatterError { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::NoFormatterInPath => { - formatter.write_str("no formatter found in PATH (tried jrsonnet-fmt, jsonnetfmt)") - } - Self::MissingDocumentPath => { - formatter.write_str("document path is required for workspace formatter engine") - } - Self::MissingWorkspaceRoot { document_path } => write!( - formatter, - "no workspace root available for document {}", - document_path.to_string_lossy() - ), - Self::Spawn { program, source } => { - write!(formatter, "failed to spawn formatter {program}: {source}") - } - Self::MissingStdin { program } => { - write!( - formatter, - "formatter {program} did not provide stdin handle" - ) - } - Self::WriteStdin { program, source } => { - write!( - formatter, - "failed to write to formatter {program} stdin: {source}" - ) - } - Self::Wait { program, source } => { - write!( - formatter, - "failed waiting for formatter {program}: {source}" - ) - } - Self::UnsuccessfulExit { program, stderr } => { - write!( - formatter, - "formatter {program} exited with failure: {}", - stderr.trim() - ) - } - Self::NonUtf8Stdout { program, source } => { - write!( - formatter, - "formatter {program} emitted non-UTF8 stdout: {source}" - ) - } + Self::FormatFailed => formatter.write_str("formatter failed"), } } } -/// Run the formatter on the input text. -/// -/// If `config.formatter_path` is set, that formatter is used directly. -/// Otherwise, formatter behavior is selected by `formatter_engine`. pub(super) fn run_formatter( input: &str, config: &FormattingConfig, - context: FormattingContext<'_>, ) -> Result { - // If a custom formatter path is provided, use it directly - if let Some(path) = &config.formatter_path { - let args = build_formatter_args(config); - return try_run_formatter_binary(path.as_str(), &args, None, input); - } - - match config.formatter_engine.unwrap_or_default() { - FormatterEngine::Path => { - let args = build_formatter_args(config); - let mut first_error = None; - for name in ["jrsonnet-fmt", "jsonnetfmt"] { - match try_run_formatter_binary(name, &args, None, input) { - Ok(result) => return Ok(result), - Err(err) if err.is_not_found() => {} - Err(err) => { - if first_error.is_none() { - first_error = Some(err); - } - } - } - } - - Err(first_error.unwrap_or(FormatterError::NoFormatterInPath)) - } - FormatterEngine::BinJsonnetfmtStdio => { - let document_path = context - .document_path - .ok_or(FormatterError::MissingDocumentPath)?; - let workspace_root = select_workspace_root(document_path, context.workspace_roots) - .ok_or_else(|| FormatterError::MissingWorkspaceRoot { - document_path: document_path.to_path_buf(), - })?; - let formatter = workspace_root.join("bin").join("jsonnetfmt"); - let args = vec![ - "-stdio".to_string(), - document_path.to_string_lossy().into_owned(), - ]; - try_run_formatter_binary(formatter.as_os_str(), &args, Some(workspace_root), input) - } - } -} - -/// Try to run a specific formatter binary. -fn try_run_formatter_binary( - program: S, - args: &[String], - current_dir: Option<&Path>, - input: &str, -) -> Result -where - S: AsRef, -{ - let program = program.as_ref(); - let program_name = program.to_string_lossy().into_owned(); - - let mut command = Command::new(program); - command.args(args); - if let Some(current_dir) = current_dir { - command.current_dir(current_dir); - } - let mut child = command - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn() - .map_err(|source| FormatterError::Spawn { - program: program_name.clone(), - source, - })?; - - // Write input to stdin - let mut stdin = child - .stdin - .take() - .ok_or_else(|| FormatterError::MissingStdin { - program: program_name.clone(), - })?; - use std::io::Write; - stdin - .write_all(input.as_bytes()) - .map_err(|source| FormatterError::WriteStdin { - program: program_name.clone(), - source, - })?; - drop(stdin); - - let output = child - .wait_with_output() - .map_err(|source| FormatterError::Wait { - program: program_name.clone(), - source, - })?; - - if !output.status.success() { - let stderr = String::from_utf8_lossy(&output.stderr).to_string(); - return Err(FormatterError::UnsuccessfulExit { - program: program_name, - stderr, - }); - } - - String::from_utf8(output.stdout).map_err(|source| FormatterError::NonUtf8Stdout { - program: program_name, - source, - }) -} - -fn select_workspace_root<'a>( - document_path: &Path, - workspace_roots: &'a [PathBuf], -) -> Option<&'a PathBuf> { - workspace_roots - .iter() - .filter(|root| document_path.starts_with(root)) - .max_by_key(|root| root.components().count()) - .or_else(|| workspace_roots.first()) + format_code(input, config).ok_or(FormatterError::FormatFailed) } #[cfg(test)] mod tests { - use std::{fs, path::Path}; - use lsp_types::{Position, Range, TextEdit}; + use rstest::rstest; use super::{super::dispatch::format_document_with_config, *}; - #[test] - fn test_select_workspace_root_prefers_deepest_match() { - let roots = vec![ - PathBuf::from("/workspace"), - PathBuf::from("/workspace/team/project"), - ]; - let document = Path::new("/workspace/team/project/src/main.jsonnet"); - - assert_eq!( - select_workspace_root(document, &roots), - Some(&PathBuf::from("/workspace/team/project")) - ); - } - - #[test] - fn test_format_contract_engine_without_document_context_returns_none() { - let config = FormattingConfig { - formatter_engine: Some(FormatterEngine::BinJsonnetfmtStdio), - ..FormattingConfig::default() - }; + const SIMPLE_OBJECT: &str = "{a:1}"; + const SIMPLE_OBJECT_WITH_TRAILING_NEWLINE: &str = "{a:1}\n"; + const STRING_OBJECT: &str = "{a:'x'}"; + const STRING_OBJECT_WITH_EMOJI: &str = "{a:'🦀'}"; - assert_eq!( - format_document_with_config("{}", &config, FormattingContext::detached()), - None - ); - } - - #[cfg(unix)] - fn write_executable_script(path: &Path, body: &str) { - use std::os::unix::fs::PermissionsExt; - - fs::write(path, body).unwrap(); - let permissions = fs::Permissions::from_mode(0o755); - fs::set_permissions(path, permissions).unwrap(); - } - - fn to_u32(value: usize) -> u32 { - u32::try_from(value).unwrap_or(u32::MAX) - } - - fn full_replacement_edit(old: &str, new_text: String) -> Vec { + fn full_replacement_edit(end: Position, new_text: String) -> Vec { vec![TextEdit { range: Range { start: Position { line: 0, character: 0, }, - end: Position { - line: 0, - character: to_u32(old.len()), - }, + end, }, new_text, }] } - #[cfg(unix)] - #[test] - fn test_format_contract_engine_uses_workspace_bin_jsonnetfmt_stdio() { - let temp_dir = tempfile::tempdir().unwrap(); - let root = temp_dir.path(); - let bin_dir = root.join("bin"); - let src_dir = root.join("src"); - fs::create_dir_all(&bin_dir).unwrap(); - fs::create_dir_all(&src_dir).unwrap(); + fn expected_end(text: &str) -> Position { + let line_index = jrsonnet_lsp_document::LineIndex::new(text); + line_index + .position(jrsonnet_lsp_document::ByteOffset::from(text.len()), text) + .map(Into::into) + .unwrap_or_default() + } - let formatter_path = bin_dir.join("jsonnetfmt"); - write_executable_script( - &formatter_path, - "#!/usr/bin/env sh\nset -eu\nprintf 'cwd=%s\\narg1=%s\\narg2=%s\\n' \"$(pwd)\" \"$1\" \"$2\"\ncat\n", + #[test] + fn test_formats_document() { + assert_eq!( + format_document_with_config(SIMPLE_OBJECT, &FormattingConfig::default()), + Some(full_replacement_edit( + expected_end(SIMPLE_OBJECT), + "{\n\ta: 1,\n}\n".to_string() + )) ); + } - let document_path = src_dir.join("main.jsonnet"); - let workspace_roots = vec![root.to_path_buf()]; - let input = "{}"; + #[test] + fn test_respects_string_style_option() { let config = FormattingConfig { - formatter_engine: Some(FormatterEngine::BinJsonnetfmtStdio), + string_style: jrsonnet_fmt::StringStyle::Double, ..FormattingConfig::default() }; - let context = FormattingContext::for_document(document_path.as_path(), &workspace_roots); - - let output = format!( - "cwd={}\narg1=-stdio\narg2={}\n{input}", - root.to_string_lossy(), - document_path.to_string_lossy() - ); assert_eq!( - format_document_with_config(input, &config, context), - Some(full_replacement_edit(input, output)) + format_document_with_config(STRING_OBJECT, &config), + Some(full_replacement_edit( + expected_end(STRING_OBJECT), + "{\n\ta: \"x\",\n}\n".to_string() + )) ); } - #[cfg(unix)] #[test] - fn test_formatter_path_takes_precedence_over_engine() { - let temp_dir = tempfile::tempdir().unwrap(); - let root = temp_dir.path(); - let bin_dir = root.join("bin"); - let src_dir = root.join("src"); - fs::create_dir_all(&bin_dir).unwrap(); - fs::create_dir_all(&src_dir).unwrap(); - - let workspace_formatter = bin_dir.join("jsonnetfmt"); - write_executable_script( - &workspace_formatter, - "#!/usr/bin/env sh\nset -eu\necho workspace\n", - ); - - let custom_formatter = root.join("custom-formatter"); - write_executable_script( - &custom_formatter, - "#!/usr/bin/env sh\nset -eu\necho custom\n", - ); - - let document_path = src_dir.join("main.jsonnet"); - let workspace_roots = vec![root.to_path_buf()]; - let config = FormattingConfig { - formatter_path: Some(custom_formatter.to_string_lossy().into_owned()), - formatter_engine: Some(FormatterEngine::BinJsonnetfmtStdio), - ..FormattingConfig::default() - }; - let context = FormattingContext::for_document(document_path.as_path(), &workspace_roots); - + fn test_returns_none_on_parse_failure() { assert_eq!( - format_document_with_config("{}", &config, context), - Some(full_replacement_edit("{}", "custom\n".to_string())) + format_document_with_config("local x = ", &FormattingConfig::default()), + None ); } + + #[rstest] + #[case(SIMPLE_OBJECT_WITH_TRAILING_NEWLINE, Position { line: 1, character: 0 })] + #[case(STRING_OBJECT_WITH_EMOJI, Position { line: 0, character: 8 })] + fn test_full_replacement_range_uses_lsp_positions( + #[case] input: &str, + #[case] expected_end: Position, + ) { + let edits = format_document_with_config(input, &FormattingConfig::default()) + .expect("formatting edit"); + assert_eq!(edits.len(), 1); + assert_eq!(edits[0].range.end, expected_end); + } } diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs b/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs index 20d18a8a..b9a7314c 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs @@ -1,11 +1,12 @@ //! Document formatting handler. //! -//! Formats Jsonnet code using an external formatter (jrsonnet-fmt, jsonnetfmt, etc.). +//! Formats Jsonnet code using an in-process formatter. -mod args; mod dispatch; mod engine; -mod types; pub use dispatch::{format_document, format_document_with_config}; -pub use types::{FormatterEngine, FormattingConfig, FormattingContext}; +pub use jrsonnet_fmt::{ + CommentStyle as FormattingCommentStyle, FormatOptions as FormattingConfig, + StringStyle as FormattingStringStyle, +}; diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/types.rs b/crates/jrsonnet-lsp-handlers/src/formatting/types.rs deleted file mode 100644 index 0a1661c8..00000000 --- a/crates/jrsonnet-lsp-handlers/src/formatting/types.rs +++ /dev/null @@ -1,115 +0,0 @@ -use std::path::{Path, PathBuf}; - -use serde::{Deserialize, Serialize}; - -/// Formatter execution mode. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -pub enum FormatterEngine { - /// Resolve formatter via PATH (auto mode). - #[serde(rename = "path", alias = "auto", alias = "auto-path")] - Path, - /// Use workspace-local contract: `{workspace}/bin/jsonnetfmt -stdio `. - #[serde( - rename = "bin-jsonnetfmt-stdio", - alias = "workspace-jsonnetfmt-stdio", - alias = "workspaceBinJsonnetfmtStdio" - )] - BinJsonnetfmtStdio, -} - -impl Default for FormatterEngine { - fn default() -> Self { - Self::Path - } -} - -/// Formatting configuration options. -/// -/// These options correspond to the go-jsonnet formatter (jsonnetfmt) options. -/// Not all formatters support all options - jrsonnet-fmt only supports `indent`. -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(default)] -pub struct FormattingConfig { - /// Number of spaces per indentation level (default: 2). - #[serde(alias = "Indent")] - pub indent: Option, - - /// Maximum blank lines allowed (default: 2). - #[serde(alias = "MaxBlankLines")] - pub max_blank_lines: Option, - - /// String style: "double", "single", or "leave" (default: "leave"). - #[serde(alias = "StringStyle")] - pub string_style: Option, - - /// Comment style: "hash", "slash", or "leave" (default: "leave"). - #[serde(alias = "CommentStyle")] - pub comment_style: Option, - - /// Add padding inside arrays (default: false). - #[serde(alias = "PadArrays")] - pub pad_arrays: Option, - - /// Add padding inside objects (default: true). - #[serde(alias = "PadObjects")] - pub pad_objects: Option, - - /// Pretty print fields (one per line) (default: true). - #[serde(alias = "PrettyFieldNames")] - pub pretty_field_names: Option, - - /// Sort imports (default: true). - #[serde(alias = "SortImports")] - pub sort_imports: Option, - - /// Use implicit plus for object inheritance (default: true). - #[serde(alias = "UseImplicitPlus")] - pub use_implicit_plus: Option, - - /// Strip everything after // (default: false). - #[serde(alias = "StripEverything")] - pub strip_everything: Option, - - /// Strip all comments (default: false). - #[serde(alias = "StripComments")] - pub strip_comments: Option, - - /// Strip all comments except those at the start (default: false). - #[serde(alias = "StripAllButComments")] - pub strip_all_but_comments: Option, - - /// Path to the formatter binary (default: searches PATH for jrsonnet-fmt or jsonnetfmt). - #[serde(alias = "FormatterPath")] - pub formatter_path: Option, - - /// Formatter engine mode. - /// - /// - `None` or `path`: try `jrsonnet-fmt` then `jsonnetfmt` in `PATH` - /// - `bin-jsonnetfmt-stdio`: run `{workspaceRoot}/bin/jsonnetfmt -stdio ` - #[serde(alias = "FormatterEngine")] - pub formatter_engine: Option, -} - -/// Context required for formatter resolution. -#[derive(Debug, Clone, Copy, Default)] -pub struct FormattingContext<'a> { - /// Absolute path to the document being formatted. - pub document_path: Option<&'a Path>, - /// Known workspace roots from initialization. - pub workspace_roots: &'a [PathBuf], -} - -impl<'a> FormattingContext<'a> { - #[must_use] - pub fn detached() -> Self { - Self::default() - } - - #[must_use] - pub fn for_document(document_path: &'a Path, workspace_roots: &'a [PathBuf]) -> Self { - Self { - document_path: Some(document_path), - workspace_roots, - } - } -} diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index 5173ada4..e2e4986d 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -26,8 +26,8 @@ pub use definition::{ }; pub use document_highlight::document_highlights; pub use formatting::{ - format_document, format_document_with_config, FormatterEngine, FormattingConfig, - FormattingContext, + format_document, format_document_with_config, FormattingCommentStyle, FormattingConfig, + FormattingStringStyle, }; pub use hover::{hover, hover_with_import_field_type}; pub use inlay_hint::{ diff --git a/crates/jrsonnet-lsp/Cargo.toml b/crates/jrsonnet-lsp/Cargo.toml index a9e42fc3..d493eb80 100644 --- a/crates/jrsonnet-lsp/Cargo.toml +++ b/crates/jrsonnet-lsp/Cargo.toml @@ -39,6 +39,7 @@ jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } rstest = "0.23" serde_json.workspace = true tempfile.workspace = true +thiserror.workspace = true [[bench]] name = "type_inference" diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index 978472a1..df63a8b9 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -9,8 +9,9 @@ use std::{collections::HashMap, fmt, path::PathBuf}; // Re-export config types from handlers crate pub use jrsonnet_lsp_handlers::{ - CodeActionConfig, FormatterEngine, FormattingConfig, InlayHintsConfig, LocalHintsMode, - ObjectMemberHintsMode, RemoveUnusedCommentsMode, RemoveUnusedMode, + CodeActionConfig, FormattingCommentStyle, FormattingConfig, FormattingStringStyle, + InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, RemoveUnusedCommentsMode, + RemoveUnusedMode, }; use serde::{Deserialize, Serialize}; @@ -96,7 +97,7 @@ impl<'de> Deserialize<'de> for ResolvePathsWithTankaMode { /// /// Field names use `snake_case` internally but accept camelCase from JSON /// for compatibility with VS Code settings. -#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] #[serde(default)] pub struct ServerConfig { /// Library search paths for imports (equivalent to jsonnet -J flag). @@ -164,7 +165,7 @@ struct ServerConfigPatch { enable_lint_diagnostics: Option, #[serde(alias = "resolvePathsWithTanka", alias = "tankaMode")] resolve_paths_with_tanka: Option, - formatting: Option, + formatting: Option, #[serde(rename = "codeActions")] code_actions: Option, #[serde(rename = "inlayHints")] @@ -173,6 +174,25 @@ struct ServerConfigPatch { log_level: Option, } +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(default)] +struct FormattingConfigPatch { + #[serde(alias = "Indent")] + indent: Option, + #[serde(alias = "maxBlankLines", alias = "MaxBlankLines")] + max_blank_lines: Option, + #[serde(alias = "stringStyle", alias = "StringStyle")] + string_style: Option, + #[serde(alias = "commentStyle", alias = "CommentStyle")] + comment_style: Option, + #[serde(alias = "padArrays", alias = "PadArrays")] + pad_arrays: Option, + #[serde(alias = "padObjects", alias = "PadObjects")] + pad_objects: Option, + #[serde(alias = "prettyFieldNames", alias = "PrettyFieldNames")] + pretty_field_names: Option, +} + impl ServerConfigPatch { fn is_empty(&self) -> bool { self.jpath.is_none() @@ -231,9 +251,13 @@ impl ServerConfig { /// Parse configuration from LSP initialization options. #[must_use] pub fn from_initialization_options(value: Option) -> Self { - value.map_or_else(Self::default, |v| { - serde_json::from_value(v).unwrap_or_default() - }) + let mut config = Self::default(); + if let Some(value) = value { + if let Ok(patch) = serde_json::from_value::(value) { + patch.apply(&mut config); + } + } + config } /// Update configuration from a didChangeConfiguration notification. @@ -273,7 +297,7 @@ impl ServerConfig { if other.log_level.is_some() { self.log_level = other.log_level; } - self.merge_formatting(other.formatting); + self.formatting = other.formatting; if other.code_actions != CodeActionConfig::default() { self.code_actions = other.code_actions; } @@ -283,48 +307,27 @@ impl ServerConfig { } /// Merge formatting configuration. - fn merge_formatting(&mut self, other: FormattingConfig) { - if other.indent.is_some() { - self.formatting.indent = other.indent; - } - if other.max_blank_lines.is_some() { - self.formatting.max_blank_lines = other.max_blank_lines; - } - if other.string_style.is_some() { - self.formatting.string_style = other.string_style; - } - if other.comment_style.is_some() { - self.formatting.comment_style = other.comment_style; - } - if other.pad_arrays.is_some() { - self.formatting.pad_arrays = other.pad_arrays; - } - if other.pad_objects.is_some() { - self.formatting.pad_objects = other.pad_objects; - } - if other.pretty_field_names.is_some() { - self.formatting.pretty_field_names = other.pretty_field_names; - } - if other.sort_imports.is_some() { - self.formatting.sort_imports = other.sort_imports; + fn merge_formatting(&mut self, other: FormattingConfigPatch) { + if let Some(indent) = other.indent { + self.formatting.indent = indent; } - if other.use_implicit_plus.is_some() { - self.formatting.use_implicit_plus = other.use_implicit_plus; + if let Some(max_blank_lines) = other.max_blank_lines { + self.formatting.max_blank_lines = max_blank_lines; } - if other.strip_everything.is_some() { - self.formatting.strip_everything = other.strip_everything; + if let Some(string_style) = other.string_style { + self.formatting.string_style = string_style; } - if other.strip_comments.is_some() { - self.formatting.strip_comments = other.strip_comments; + if let Some(comment_style) = other.comment_style { + self.formatting.comment_style = comment_style; } - if other.strip_all_but_comments.is_some() { - self.formatting.strip_all_but_comments = other.strip_all_but_comments; + if let Some(pad_arrays) = other.pad_arrays { + self.formatting.pad_arrays = pad_arrays; } - if other.formatter_path.is_some() { - self.formatting.formatter_path = other.formatter_path; + if let Some(pad_objects) = other.pad_objects { + self.formatting.pad_objects = pad_objects; } - if other.formatter_engine.is_some() { - self.formatting.formatter_engine = other.formatter_engine; + if let Some(pretty_field_names) = other.pretty_field_names { + self.formatting.pretty_field_names = pretty_field_names; } } @@ -356,20 +359,35 @@ impl ServerConfig { #[cfg(test)] mod tests { + use std::collections::HashMap; + + use rstest::rstest; + use super::*; + #[derive(Debug, PartialEq, Eq)] + struct SettingsUpdate { + updated: bool, + config: ServerConfig, + } + + fn apply_settings(config: ServerConfig, settings: serde_json::Value) -> SettingsUpdate { + let mut config = config; + let updated = config.update_from_settings(settings); + SettingsUpdate { updated, config } + } + #[test] fn test_default_config() { - let config = ServerConfig::new(); - assert!(config.jpath.is_empty()); - assert!(config.ext_vars.is_empty()); - assert!(!config.enable_eval_diagnostics); assert_eq!( - config.resolve_paths_with_tanka, - ResolvePathsWithTankaMode::Auto + ServerConfig::new(), + ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::Auto, + code_actions: CodeActionConfig::default(), + inlay_hints: InlayHintsConfig::default(), + ..ServerConfig::default() + } ); - assert_eq!(config.code_actions, CodeActionConfig::default()); - assert_eq!(config.inlay_hints, InlayHintsConfig::default()); } #[test] @@ -386,21 +404,19 @@ mod tests { "enableEvalDiagnostics": true }); - let config = ServerConfig::from_initialization_options(Some(json)); assert_eq!( - config.jpath, - vec![PathBuf::from("/usr/share/jsonnet"), PathBuf::from("./lib")] - ); - assert_eq!( - config.ext_vars.get("environment"), - Some(&"production".to_string()) - ); - assert_eq!(config.ext_vars.get("version"), Some(&"1.0.0".to_string())); - assert_eq!( - config.ext_code.get("config"), - Some(&"{ key: 'value' }".to_string()) + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + jpath: vec![PathBuf::from("/usr/share/jsonnet"), PathBuf::from("./lib")], + ext_vars: HashMap::from([ + ("environment".to_string(), "production".to_string()), + ("version".to_string(), "1.0.0".to_string()), + ]), + ext_code: HashMap::from([("config".to_string(), "{ key: 'value' }".to_string())]), + enable_eval_diagnostics: true, + ..ServerConfig::default() + } ); - assert!(config.enable_eval_diagnostics); } #[test] @@ -413,12 +429,14 @@ mod tests { "resolve_paths_with_tanka": true }); - let config = ServerConfig::from_initialization_options(Some(json)); - assert_eq!(config.jpath, vec![PathBuf::from("/usr/share/jsonnet")]); - assert_eq!(config.ext_vars.get("env"), Some(&"dev".to_string())); assert_eq!( - config.resolve_paths_with_tanka, - ResolvePathsWithTankaMode::True + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + jpath: vec![PathBuf::from("/usr/share/jsonnet")], + ext_vars: HashMap::from([("env".to_string(), "dev".to_string())]), + resolve_paths_with_tanka: ResolvePathsWithTankaMode::True, + ..ServerConfig::default() + } ); } @@ -428,10 +446,12 @@ mod tests { "resolvePathsWithTanka": "auto" }); - let config = ServerConfig::from_initialization_options(Some(json)); assert_eq!( - config.resolve_paths_with_tanka, - ResolvePathsWithTankaMode::Auto + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::Auto, + ..ServerConfig::default() + } ); } @@ -441,96 +461,133 @@ mod tests { "tankaMode": false }); - let config = ServerConfig::from_initialization_options(Some(json)); assert_eq!( - config.resolve_paths_with_tanka, - ResolvePathsWithTankaMode::False + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::False, + ..ServerConfig::default() + } ); } #[test] fn test_parse_empty_options() { - let config = ServerConfig::from_initialization_options(None); - assert!(config.jpath.is_empty()); + assert_eq!( + ServerConfig::from_initialization_options(None), + ServerConfig::default() + ); } #[test] fn test_merge_configs() { - let mut base = ServerConfig::new(); - base.jpath = vec![PathBuf::from("/base/path")]; - base.ext_vars - .insert("key1".to_string(), "value1".to_string()); - - let mut other = ServerConfig::new(); - other.jpath = vec![PathBuf::from("/other/path")]; - other - .ext_vars - .insert("key2".to_string(), "value2".to_string()); - other.enable_eval_diagnostics = true; + let mut base = ServerConfig { + jpath: vec![PathBuf::from("/base/path")], + ext_vars: HashMap::from([("key1".to_string(), "value1".to_string())]), + ..ServerConfig::default() + }; + let other = ServerConfig { + jpath: vec![PathBuf::from("/other/path")], + ext_vars: HashMap::from([("key2".to_string(), "value2".to_string())]), + enable_eval_diagnostics: true, + ..ServerConfig::default() + }; base.merge(other); - // jpath is replaced, not merged - assert_eq!(base.jpath, vec![PathBuf::from("/other/path")]); - // ext_vars are merged - assert_eq!(base.ext_vars.get("key1"), Some(&"value1".to_string())); - assert_eq!(base.ext_vars.get("key2"), Some(&"value2".to_string())); - assert!(base.enable_eval_diagnostics); + assert_eq!( + base, + ServerConfig { + jpath: vec![PathBuf::from("/other/path")], + ext_vars: HashMap::from([ + ("key1".to_string(), "value1".to_string()), + ("key2".to_string(), "value2".to_string()), + ]), + enable_eval_diagnostics: true, + ..ServerConfig::default() + } + ); } #[test] fn test_update_from_settings() { - let mut config = ServerConfig::new(); - let settings = serde_json::json!({ "jpath": ["/new/path"], "enableEvalDiagnostics": true, "logLevel": "debug" }); - assert!(config.update_from_settings(settings)); - assert_eq!(config.jpath, vec![PathBuf::from("/new/path")]); - assert!(config.enable_eval_diagnostics); - assert_eq!(config.log_level, Some("debug".to_string())); + assert_eq!( + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: true, + config: ServerConfig { + jpath: vec![PathBuf::from("/new/path")], + enable_eval_diagnostics: true, + log_level: Some("debug".to_string()), + ..ServerConfig::default() + } + } + ); } #[test] fn test_update_from_settings_can_disable_eval_diagnostics() { - let mut config = ServerConfig::new(); - config.enable_eval_diagnostics = true; - let settings = serde_json::json!({ "enableEvalDiagnostics": false }); - assert!(config.update_from_settings(settings)); - assert!(!config.enable_eval_diagnostics); - } - - #[test] - fn test_update_from_settings_updates_tanka_mode() { - let mut config = ServerConfig::new(); assert_eq!( - config.resolve_paths_with_tanka, - ResolvePathsWithTankaMode::Auto + apply_settings( + ServerConfig { + enable_eval_diagnostics: true, + ..ServerConfig::default() + }, + settings + ), + SettingsUpdate { + updated: true, + config: ServerConfig::default() + } ); + } + #[test] + fn test_update_from_settings_updates_tanka_mode_to_true() { let settings = serde_json::json!({ "resolvePathsWithTanka": "true" }); - assert!(config.update_from_settings(settings)); assert_eq!( - config.resolve_paths_with_tanka, - ResolvePathsWithTankaMode::True + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: true, + config: ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::True, + ..ServerConfig::default() + } + } ); + } + #[test] + fn test_update_from_settings_updates_tanka_mode_to_false() { let settings = serde_json::json!({ "resolvePathsWithTanka": false }); - assert!(config.update_from_settings(settings)); assert_eq!( - config.resolve_paths_with_tanka, - ResolvePathsWithTankaMode::False + apply_settings( + ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::True, + ..ServerConfig::default() + }, + settings + ), + SettingsUpdate { + updated: true, + config: ServerConfig { + resolve_paths_with_tanka: ResolvePathsWithTankaMode::False, + ..ServerConfig::default() + } + } ); } @@ -543,21 +600,20 @@ mod tests { } }); - let config = ServerConfig::from_initialization_options(Some(json)); - assert_eq!( - config.code_actions.remove_unused, - RemoveUnusedMode::NonImportBindings - ); assert_eq!( - config.code_actions.remove_unused_comments, - RemoveUnusedCommentsMode::Above + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + code_actions: CodeActionConfig { + remove_unused: RemoveUnusedMode::NonImportBindings, + remove_unused_comments: RemoveUnusedCommentsMode::Above, + }, + ..ServerConfig::default() + } ); } #[test] fn test_update_from_settings_updates_code_actions_config() { - let mut config = ServerConfig::new(); - let settings = serde_json::json!({ "codeActions": { "removeUnused": "nonImportBindings", @@ -565,14 +621,18 @@ mod tests { } }); - assert!(config.update_from_settings(settings)); assert_eq!( - config.code_actions.remove_unused, - RemoveUnusedMode::NonImportBindings - ); - assert_eq!( - config.code_actions.remove_unused_comments, - RemoveUnusedCommentsMode::Below + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: true, + config: ServerConfig { + code_actions: CodeActionConfig { + remove_unused: RemoveUnusedMode::NonImportBindings, + remove_unused_comments: RemoveUnusedCommentsMode::Below, + }, + ..ServerConfig::default() + } + } ); } @@ -585,18 +645,21 @@ mod tests { } }); - let config = ServerConfig::from_initialization_options(Some(json)); - assert_eq!(config.inlay_hints.local, LocalHintsMode::Off); assert_eq!( - config.inlay_hints.object_members, - ObjectMemberHintsMode::Fields + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + inlay_hints: InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::All, + object_members: ObjectMemberHintsMode::Fields, + }, + ..ServerConfig::default() + } ); } #[test] fn test_update_from_settings_updates_inlay_hints_config() { - let mut config = ServerConfig::new(); - let settings = serde_json::json!({ "inlayHints": { "local": "functions", @@ -604,32 +667,136 @@ mod tests { } }); - assert!(config.update_from_settings(settings)); - assert_eq!(config.inlay_hints.local, LocalHintsMode::Functions); assert_eq!( - config.inlay_hints.object_members, - ObjectMemberHintsMode::Methods + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: true, + config: ServerConfig { + inlay_hints: InlayHintsConfig { + local: LocalHintsMode::Functions, + object_local: LocalHintsMode::All, + object_members: ObjectMemberHintsMode::Methods, + }, + ..ServerConfig::default() + } + } ); } #[test] fn test_formatting_config() { + let json = serde_json::json!({ + "formatting": { + "indent": 4, + "string_style": "double", + "pad_arrays": true + } + }); + + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + formatting: FormattingConfig { + indent: 4, + string_style: FormattingStringStyle::Double, + pad_arrays: true, + ..FormattingConfig::default() + }, + ..ServerConfig::default() + } + ); + } + + #[test] + fn test_formatting_config_accepts_legacy_go_jsonnet_aliases() { let json = serde_json::json!({ "formatting": { "Indent": 4, - "StringStyle": "double", + "MaxBlankLines": 1, + "StringStyle": "single", + "CommentStyle": "hash", "PadArrays": true, - "FormatterEngine": "bin-jsonnetfmt-stdio" + "PadObjects": false, + "PrettyFieldNames": false } }); - let config = ServerConfig::from_initialization_options(Some(json)); - assert_eq!(config.formatting.indent, Some(4)); - assert_eq!(config.formatting.string_style, Some("double".to_string())); - assert_eq!(config.formatting.pad_arrays, Some(true)); assert_eq!( - config.formatting.formatter_engine, - Some(FormatterEngine::BinJsonnetfmtStdio) + ServerConfig::from_initialization_options(Some(json)), + ServerConfig { + formatting: FormattingConfig { + indent: 4, + max_blank_lines: 1, + string_style: FormattingStringStyle::Single, + comment_style: FormattingCommentStyle::Hash, + pad_arrays: true, + pad_objects: false, + pretty_field_names: false, + }, + ..ServerConfig::default() + } + ); + } + + #[rstest] + #[case( + serde_json::json!({"formatting":{"stringStyle":"double","padObjects":false}}), + FormattingConfig { + string_style: FormattingStringStyle::Double, + pad_objects: false, + ..FormattingConfig::default() + } + )] + #[case( + serde_json::json!({"formatting":{"StringStyle":"single","PadObjects":false}}), + FormattingConfig { + string_style: FormattingStringStyle::Single, + pad_objects: false, + ..FormattingConfig::default() + } + )] + fn test_update_from_settings_accepts_formatting_aliases( + #[case] settings: serde_json::Value, + #[case] expected_formatting: FormattingConfig, + ) { + assert_eq!( + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: true, + config: ServerConfig { + formatting: expected_formatting, + ..ServerConfig::default() + } + } + ); + } + + #[test] + fn test_formatting_rejects_out_of_range_indent_in_initialization_options() { + let json = serde_json::json!({ + "formatting": { + "indent": 300 + } + }); + assert_eq!( + ServerConfig::from_initialization_options(Some(json)), + ServerConfig::default() + ); + } + + #[test] + fn test_formatting_rejects_out_of_range_indent_in_settings_update() { + let settings = serde_json::json!({ + "formatting": { + "indent": 300 + } + }); + assert_eq!( + apply_settings(ServerConfig::new(), settings), + SettingsUpdate { + updated: false, + config: ServerConfig::default() + } ); } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 3997ee74..adcc73bf 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -169,7 +169,6 @@ impl Server { Arc::clone(&self.global_types), Arc::clone(&self.type_cache), Arc::clone(&self.config), - self.workspace_roots.clone(), ) } diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 29a7a49e..986d17dd 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -21,7 +21,7 @@ mod semantic_tokens_range; mod signature_help; mod workspace_symbol; -use std::{path::PathBuf, sync::Arc}; +use std::sync::Arc; use jrsonnet_lsp_document::{CanonicalPath, Document}; use jrsonnet_lsp_import::ImportGraph; @@ -38,7 +38,6 @@ pub(super) struct AsyncRequestContext { global_types: Arc, type_cache: SharedTypeCache, config: SharedConfig, - workspace_roots: Vec, } impl AsyncRequestContext { @@ -48,7 +47,6 @@ impl AsyncRequestContext { global_types: Arc, type_cache: SharedTypeCache, config: SharedConfig, - workspace_roots: Vec, ) -> Self { Self { documents, @@ -56,7 +54,6 @@ impl AsyncRequestContext { global_types, type_cache, config, - workspace_roots, } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs b/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs index df0ec001..a795c42e 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs @@ -1,19 +1,65 @@ use jrsonnet_lsp_document::CanonicalPath; use jrsonnet_lsp_handlers as handlers; -use lsp_types::{DocumentFormattingParams, TextEdit}; +use lsp_types::{DocumentFormattingParams, FormattingOptions, TextEdit}; use super::AsyncRequestContext; +fn formatting_config_for_request( + base: &handlers::FormattingConfig, + options: &FormattingOptions, +) -> handlers::FormattingConfig { + let mut config = base.clone(); + config.indent = if options.insert_spaces { + u8::try_from(options.tab_size).unwrap_or(u8::MAX) + } else { + 0 + }; + config +} + impl AsyncRequestContext { pub(crate) fn formatting(&self, params: &DocumentFormattingParams) -> Option> { let uri = ¶ms.text_document.uri; let path = CanonicalPath::from_uri(uri).ok()?; let doc = self.documents.get(&path)?; - let config = self.config.read().formatting.clone(); - let context = - handlers::FormattingContext::for_document(path.as_path(), &self.workspace_roots); + let config = formatting_config_for_request(&self.config.read().formatting, ¶ms.options); + + handlers::format_document_with_config(doc.text(), &config) + } +} + +#[cfg(test)] +mod tests { + use lsp_types::FormattingOptions; + use rstest::rstest; + + use super::formatting_config_for_request; + + #[rstest] + #[case(true, 2, 2)] + #[case(true, 300, u8::MAX)] + #[case(false, 8, 0)] + fn test_formatting_options_control_indent( + #[case] insert_spaces: bool, + #[case] tab_size: u32, + #[case] expected_indent: u8, + ) { + let base = jrsonnet_lsp_handlers::FormattingConfig { + indent: 7, + ..jrsonnet_lsp_handlers::FormattingConfig::default() + }; + let options = FormattingOptions { + tab_size, + insert_spaces, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: None, + trim_final_newlines: None, + }; - handlers::format_document_with_config(doc.text(), &config, context) + let merged = formatting_config_for_request(&base, &options); + assert_eq!(merged.indent, expected_indent); + assert_eq!(merged.max_blank_lines, base.max_blank_lines); } } diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 9d392a75..bc1145b9 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -15,9 +15,9 @@ use lsp_types::{ }, request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, - ExecuteCommand, GotoDeclaration, GotoDefinition, GotoImplementation, GotoTypeDefinition, - Initialize, InlayHintRequest, References, RegisterCapability, Rename, Request as _, - SemanticTokensRangeRequest, Shutdown, WorkspaceSymbolRequest, + ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, GotoImplementation, + GotoTypeDefinition, Initialize, InlayHintRequest, References, RegisterCapability, Rename, + Request as _, SemanticTokensRangeRequest, Shutdown, WorkspaceSymbolRequest, }, CancelParams, DidChangeConfigurationParams, DidChangeWatchedFilesClientCapabilities, DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, @@ -484,6 +484,28 @@ fn inlay_hint_request( ) } +fn formatting_request(id: i32, uri: &str, tab_size: u32, insert_spaces: bool) -> Request { + let params = lsp_types::DocumentFormattingParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + options: lsp_types::FormattingOptions { + tab_size, + insert_spaces, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: None, + trim_final_newlines: None, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + Formatting::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + fn semantic_tokens_range_request( id: i32, uri: &str, @@ -1006,6 +1028,8 @@ fn run_server(connection: Connection) -> thread::JoinHandle<()> { #[path = "integration_test/features.rs"] mod features; +#[path = "integration_test/formatting.rs"] +mod formatting; #[path = "integration_test/lifecycle.rs"] mod lifecycle; #[path = "integration_test/navigation.rs"] diff --git a/crates/jrsonnet-lsp/tests/integration_test/formatting.rs b/crates/jrsonnet-lsp/tests/integration_test/formatting.rs new file mode 100644 index 00000000..9a3926af --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration_test/formatting.rs @@ -0,0 +1,309 @@ +use std::{any::Any, thread::JoinHandle}; + +use rstest::rstest; +use thiserror::Error; + +use super::*; + +type Result = std::result::Result; + +#[derive(Debug, Error)] +enum FormatTestError { + #[error("failed to send LSP message")] + SendMessage { + #[source] + source: Box>, + }, + #[error("formatting request #{request_id} returned error: {error}")] + FormattingRequestFailed { request_id: i32, error: String }, + #[error("formatting request #{request_id} missing result")] + MissingFormattingResult { request_id: i32 }, + #[error("failed to parse formatting response edits")] + FormattingResultParse(#[from] serde_json::Error), + #[error(transparent)] + JoinServerThread(#[from] JoinThreadPanic), + #[error(transparent)] + JoinServerThreadPayload(#[from] JoinThreadPanicParseError), +} + +impl From> for FormatTestError { + fn from(source: crossbeam_channel::SendError) -> Self { + Self::SendMessage { + source: Box::new(source), + } + } +} + +#[derive(Debug, Error)] +#[error("server thread panicked while joining: {message}")] +struct JoinThreadPanic { + message: String, +} + +#[derive(Debug, Error)] +#[error("server thread panicked while joining: non-string panic payload")] +struct JoinThreadPanicParseError; + +impl TryFrom> for JoinThreadPanic { + type Error = JoinThreadPanicParseError; + + // `JoinHandle::join` returns an opaque panic payload (`Any`), so we downcast common string forms + // to keep panic diagnostics readable. Non-string payloads are reported explicitly. + fn try_from(payload: Box) -> std::result::Result { + let payload = match payload.downcast::() { + Ok(value) => return Ok(Self { message: *value }), + Err(payload) => payload, + }; + + let message = if let Ok(value) = payload.downcast::<&'static str>() { + (*value).to_string() + } else { + return Err(JoinThreadPanicParseError); + }; + Ok(Self { message }) + } +} + +impl From> for FormatTestError { + // Bridge `join()`'s panic payload into our test error so `handle.join()?` works at the call site + // while keeping the enum variant typed via `#[from]`. + fn from(value: Box) -> Self { + match JoinThreadPanic::try_from(value) { + Ok(panic) => panic.into(), + Err(err) => err.into(), + } + } +} + +struct FormatSession { + client_conn: Connection, + server_thread: Option>, + next_id: i32, +} + +impl FormatSession { + fn send(&self, message: Message) -> Result<()> { + self.client_conn.sender.send(message)?; + Ok(()) + } + + fn start(initialization_options: serde_json::Value) -> Result { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + let session = Self { + client_conn, + server_thread: Some(server_thread), + next_id: 2, + }; + + session.send(Message::Request(initialize_request_with_options( + 1, + initialization_options, + )))?; + let _ = recv_response(&session.client_conn, 1); + session.send(Message::Notification(initialized_notification()))?; + + Ok(session) + } + + fn open(&self, uri: &str, text: &str) -> Result<()> { + self.send(Message::Notification(did_open_notification(uri, text))) + } + + fn change_configuration(&self, settings: serde_json::Value) -> Result<()> { + self.send(Message::Notification( + did_change_configuration_notification(settings), + )) + } + + fn request_formatting( + &mut self, + uri: &str, + tab_size: u32, + insert_spaces: bool, + ) -> Result>> { + let request_id = self.next_id; + self.next_id += 1; + + self.send(Message::Request(formatting_request( + request_id, + uri, + tab_size, + insert_spaces, + )))?; + + let response = recv_response(&self.client_conn, request_id); + if let Some(error) = response.error { + return Err(FormatTestError::FormattingRequestFailed { + request_id, + error: format!("{error:?}"), + }); + } + + let result = response + .result + .ok_or(FormatTestError::MissingFormattingResult { request_id })?; + Ok(serde_json::from_value(result)?) + } + + fn shutdown(mut self) -> Result<()> { + let request_id = self.next_id; + self.next_id += 1; + + self.send(Message::Request(shutdown_request(request_id)))?; + let _ = recv_response(&self.client_conn, request_id); + self.send(Message::Notification(exit_notification()))?; + + if let Some(handle) = self.server_thread.take() { + handle.join()?; + } + + Ok(()) + } +} + +#[derive(Debug)] +struct SingleFormattingCase { + init_options: serde_json::Value, + uri: &'static str, + text: &'static str, + tab_size: u32, + insert_spaces: bool, + expected: Option>, +} + +const SIMPLE_OBJECT: &str = "{a:1}"; + +#[rstest] +#[case(SingleFormattingCase { + init_options: serde_json::Value::Null, + uri: "file:///test/format-default.jsonnet", + text: SIMPLE_OBJECT, + tab_size: 8, + insert_spaces: true, + expected: Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { line: 0, character: 0 }, + end: Position { line: 0, character: 5 }, + }, + new_text: "{\n a: 1,\n}\n".to_string(), + }]), +})] +#[case(SingleFormattingCase { + init_options: serde_json::Value::Null, + uri: "file:///test/format-parse-error.jsonnet", + text: "local x = ", + tab_size: 2, + insert_spaces: true, + expected: None, +})] +#[case(SingleFormattingCase { + init_options: serde_json::json!({ + "formatting": { + "indent": 6 + } + }), + uri: "file:///test/format-request-options-override.jsonnet", + text: SIMPLE_OBJECT, + tab_size: 3, + insert_spaces: true, + expected: Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { line: 0, character: 0 }, + end: Position { line: 0, character: 5 }, + }, + new_text: "{\n a: 1,\n}\n".to_string(), + }]), +})] +fn test_document_formatting_single_request_cases(#[case] case: SingleFormattingCase) -> Result<()> { + let mut session = FormatSession::start(case.init_options)?; + session.open(case.uri, case.text)?; + let edits = session.request_formatting(case.uri, case.tab_size, case.insert_spaces)?; + assert_eq!(edits, case.expected); + session.shutdown() +} + +#[test] +fn test_document_formatting_applies_runtime_formatting_config_changes() -> Result<()> { + let mut session = FormatSession::start(serde_json::Value::Null)?; + + let string_uri = "file:///test/format-config-string.jsonnet"; + session.open(string_uri, "{a:'x'}")?; + + let before_edits = session.request_formatting(string_uri, 2, true)?; + assert_eq!( + before_edits, + Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "{\n a: 'x',\n}\n".to_string(), + }]) + ); + + session.change_configuration(serde_json::json!({ + "jsonnet": { + "formatting": { + "indent": 2, + "string_style": "double" + } + } + }))?; + + let after_style_edits = session.request_formatting(string_uri, 8, false)?; + assert_eq!( + after_style_edits, + Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "{\n\ta: \"x\",\n}\n".to_string(), + }]) + ); + + let empty_uri = "file:///test/format-config-empty.jsonnet"; + session.open(empty_uri, "{}")?; + + session.change_configuration(serde_json::json!({ + "jsonnet": { + "formatting": { + "pad_objects": false + } + } + }))?; + + let after_padding_edits = session.request_formatting(empty_uri, 4, true)?; + assert_eq!( + after_padding_edits, + Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 2, + }, + }, + new_text: "{}\n".to_string(), + }]) + ); + + session.shutdown() +} diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_config_updates_apply.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_config_updates_apply.yaml new file mode 100644 index 00000000..108b4989 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_config_updates_apply.yaml @@ -0,0 +1,76 @@ +# Verify formatting configuration updates affect subsequent textDocument/formatting requests. +steps: +- step: create + files: + string.jsonnet: "{a:'x'}" + empty.jsonnet: "{}" + +- step: diagnosticsSettled + +- step: requestFormatting + as: defaultStringFormatting + file: string.jsonnet + +- step: expectFormatting + request: defaultStringFormatting + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 7 + newText: | + { + a: 'x', + } + +- step: config + settings: + jsonnet: + formatting: + indent: 2 + string_style: double + +- step: requestFormatting + as: configuredStringFormatting + file: string.jsonnet + +- step: expectFormatting + request: configuredStringFormatting + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 7 + newText: | + { + a: "x", + } + +- step: config + settings: + jsonnet: + formatting: + pad_objects: false + +- step: requestFormatting + as: configuredEmptyFormatting + file: empty.jsonnet + +- step: expectFormatting + request: configuredEmptyFormatting + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 2 + newText: | + {} diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_default_returns_full_edit.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_default_returns_full_edit.yaml new file mode 100644 index 00000000..60da340b --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_default_returns_full_edit.yaml @@ -0,0 +1,26 @@ +# Verify textDocument/formatting returns a full-document replacement edit by default. +steps: +- step: create + files: + main.jsonnet: "{a:1}" + +- step: diagnosticsSettled + +- step: requestFormatting + as: defaultFormatting + file: main.jsonnet + +- step: expectFormatting + request: defaultFormatting + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 5 + newText: | + { + a: 1, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml index 4c1aa331..f0d27cf5 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml @@ -110,11 +110,10 @@ steps: activeSignature: 0 activeParameter: 1 -- step: config - settings: - jsonnet: - formatting: - formatterPath: /definitely/missing-jsonnetfmt +- step: changeFull + file: main.jsonnet + text: "local broken = " + version: 2 - step: requestFormatting as: fmt diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 8933eb9a..3deec6ef 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -346,14 +346,8 @@ Important behavior in `on_did_change_configuration`: `formatting` currently includes: -- formatter option flags forwarded to CLI formatters (`indent`, - `max_blank_lines`, string/comment style, padding/sort/strip toggles) -- `formatter_path` (explicit formatter binary path, highest precedence) -- `formatter_engine`: - - unset or `path`: PATH probing (`jrsonnet-fmt`, then `jsonnetfmt`) - - `bin-jsonnetfmt-stdio`: workspace-local contract mode - (`{workspaceRoot}/bin/jsonnetfmt -stdio `, cwd at workspace - root) +- built-in formatter options (`indent`, `max_blank_lines`, string/comment + style, padding toggles) Configuration can arrive via initialization options or `workspace/didChangeConfiguration` settings payloads. The update logic accepts diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index c11587ba..6efe5109 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -197,17 +197,11 @@ File: `crates/jrsonnet-lsp-handlers/src/document_highlight.rs` ### Formatting -File: `crates/jrsonnet-lsp-handlers/src/formatting.rs` - -- Uses `formatter_path` when configured (highest precedence). -- Otherwise uses `formatting.formatter_engine`. -- `path` (or unset): tries `jrsonnet-fmt` then `jsonnetfmt` in `PATH`. -- `bin-jsonnetfmt-stdio`: runs - `{workspaceRoot}/bin/jsonnetfmt -stdio `. -- Workspace contract mode receives request context from the server: - document path and initialize-time workspace roots. The nearest containing - workspace root is selected. +File: `crates/jrsonnet-lsp-handlers/src/formatting/mod.rs` + +- Uses the built-in formatter implementation. - Returns a full-document replacement edit when formatting changes text. +- Computes replacement ranges with UTF-16-aware position conversion. - Returns `None` when formatting is unavailable or fails. ### Hover diff --git a/docs/lsp/README.md b/docs/lsp/README.md index 198fee5b..4b8c7f32 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -164,44 +164,29 @@ Top-level options: ### `formatting` -If a field is omitted, the formatter's own default is used. - -`formatter_path` takes precedence over `formatter_engine`. - -`formatter_engine` values: - -- `path` (default; aliases: `auto`, `auto-path`): try `jrsonnet-fmt`, then - `jsonnetfmt` from `PATH`. -- `bin-jsonnetfmt-stdio` (aliases: `workspace-jsonnetfmt-stdio`, - `workspaceBinJsonnetfmtStdio`): run - `{workspaceRoot}/bin/jsonnetfmt -stdio `. +If a field is omitted, the formatter default is used. +For `textDocument/formatting`, the request's `FormattingOptions` control indentation: +`insert_spaces=false` forces tabs (`indent=0`), and `insert_spaces=true` uses +`tab_size` (clamped to `u8` range). Formatting fields: | Key | Type | Default when unset | Accepted values | | ------------------------ | --------- | ----------------------------------------- | ---------------------------------------------------------------------------------- | -| `indent` | `number` | formatter default (`2` in jsonnetfmt) | non-negative integer | -| `max_blank_lines` | `number` | formatter default (`2` in jsonnetfmt) | non-negative integer | -| `string_style` | `string` | formatter default (`leave` in jsonnetfmt) | `double`, `single`, `leave` (also accepts `d`/`s`; other values behave as `leave`) | -| `comment_style` | `string` | formatter default (`leave` in jsonnetfmt) | `hash`, `slash`, `leave` (also accepts `h`/`s`; other values behave as `leave`) | -| `pad_arrays` | `boolean` | formatter default (`false` in jsonnetfmt) | `true`/`false` | -| `pad_objects` | `boolean` | formatter default (`true` in jsonnetfmt) | `true`/`false` | -| `pretty_field_names` | `boolean` | formatter default (`true` in jsonnetfmt) | `true`/`false` | -| `sort_imports` | `boolean` | formatter default (`true` in jsonnetfmt) | `true`/`false` | -| `use_implicit_plus` | `boolean` | formatter default (`true` in jsonnetfmt) | `true`/`false` | -| `strip_everything` | `boolean` | formatter default (`false` in jsonnetfmt) | `true`/`false` | -| `strip_comments` | `boolean` | formatter default (`false` in jsonnetfmt) | `true`/`false` | -| `strip_all_but_comments` | `boolean` | formatter default (`false` in jsonnetfmt) | `true`/`false` | -| `formatter_path` | `string` | unset | path to formatter binary | -| `formatter_engine` | `string` | `path` | see values above | - -Formatting keys also accept legacy go-jsonnet style aliases: - -- `Indent`, `MaxBlankLines`, `StringStyle`, `CommentStyle` -- `PadArrays`, `PadObjects`, `PrettyFieldNames`, `SortImports`, - `UseImplicitPlus` -- `StripEverything`, `StripComments`, `StripAllButComments` -- `FormatterPath`, `FormatterEngine` +| `indent` | `number` | `0` (tab-based indentation) | non-negative integer | +| `max_blank_lines` | `number` | `2` | non-negative integer | +| `string_style` | `string` | `leave` | `double`, `single`, `leave` (also accepts `d`/`s`; other values behave as `leave`) | +| `comment_style` | `string` | `leave` | `hash`, `slash`, `leave` (also accepts `h`/`s`; other values behave as `leave`) | +| `pad_arrays` | `boolean` | `false` | `true`/`false` | +| `pad_objects` | `boolean` | `true` | `true`/`false` | +| `pretty_field_names` | `boolean` | `true` | `true`/`false` | + +Formatting keys also accept additional aliases: + +- camelCase: `maxBlankLines`, `stringStyle`, `commentStyle`, `padArrays`, + `padObjects`, `prettyFieldNames` +- legacy go-jsonnet style: `Indent`, `MaxBlankLines`, `StringStyle`, + `CommentStyle`, `PadArrays`, `PadObjects`, `PrettyFieldNames` ## Example @@ -223,8 +208,7 @@ Formatting keys also accept legacy go-jsonnet style aliases: }, "formatting": { "indent": 2, - "string_style": "leave", - "formatter_engine": "path" + "string_style": "leave" } } } @@ -269,13 +253,6 @@ vim.lsp.config("jrsonnet_lsp", { -- pad_arrays = false, -- boolean|nil -- pad_objects = true, -- boolean|nil -- pretty_field_names = true, -- boolean|nil - -- sort_imports = true, -- boolean|nil - -- use_implicit_plus = true, -- boolean|nil - -- strip_everything = false, -- boolean|nil - -- strip_comments = false, -- boolean|nil - -- strip_all_but_comments = false, -- boolean|nil - -- formatter_path = nil, -- string|nil - -- formatter_engine = "path", -- "path"|"auto"|"auto-path"|"bin-jsonnetfmt-stdio"|"workspace-jsonnetfmt-stdio"|"workspaceBinJsonnetfmtStdio"|nil -- }, -- codeActions = { -- removeUnused = "all", -- "all"|"importBindings"|"nonImportBindings" From 11df5690bacc46aba26ac120ce030ca47243cde0 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 17:55:14 +0000 Subject: [PATCH 177/210] feat(lsp): add function parameter inlay hints Add parameter type inlay hints on function definitions while keeping default behavior unchanged. What this includes: - new "inlayHints.functionParameters" setting with modes "off|all" (default "off") - handler support for parameter hints on both "local f(...) = ..." and "function(...) ..." forms - unit coverage for enabled behavior and default non-intrusive behavior - README updates for defaults, accepted values, and config examples Notes: - existing defaults remain unchanged to avoid extra visual noise unless users opt in --- .../jrsonnet-lsp-handlers/src/inlay_hint.rs | 215 +++++++++++++++++- crates/jrsonnet-lsp-handlers/src/lib.rs | 3 +- crates/jrsonnet-lsp/src/config.rs | 6 +- docs/lsp/README.md | 7 +- 4 files changed, 223 insertions(+), 8 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs index 1cd7daba..b99c43ce 100644 --- a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs +++ b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs @@ -6,12 +6,13 @@ use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_rowan_parser::{ nodes::{ - Bind, BindDestruct, BindFunction, Destruct, Member, MemberFieldMethod, MemberFieldNormal, - ObjBodyMemberList, StmtLocal, + Bind, BindDestruct, BindFunction, Destruct, ExprFunction, Member, MemberFieldMethod, + MemberFieldNormal, ParamsDesc, StmtLocal, }, AstNode, }; use lsp_types::{InlayHint, InlayHintKind, InlayHintLabel, Position, Range}; +use rowan::TextRange; use serde::{Deserialize, Serialize}; use strum::{Display, EnumString}; @@ -63,6 +64,24 @@ impl ObjectMemberHintsMode { } } +/// Category selection for function parameter inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum FunctionParameterHintsMode { + #[default] + Off, + All, +} + +impl FunctionParameterHintsMode { + const fn enabled(self) -> bool { + matches!(self, Self::All) + } +} + /// Configuration for inlay hint generation. #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] #[serde(default, rename_all = "camelCase")] @@ -73,6 +92,8 @@ pub struct InlayHintsConfig { pub object_local: LocalHintsMode, /// Category filter for object fields and methods. pub object_members: ObjectMemberHintsMode, + /// Category filter for function parameter type hints. + pub function_parameters: FunctionParameterHintsMode, } impl Default for InlayHintsConfig { @@ -81,6 +102,7 @@ impl Default for InlayHintsConfig { local: LocalHintsMode::All, object_local: LocalHintsMode::All, object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, } } } @@ -164,7 +186,7 @@ pub fn inlay_hints_with_config( for member_list in ast .syntax() .descendants() - .filter_map(ObjBodyMemberList::cast) + .filter_map(jrsonnet_rowan_parser::nodes::ObjBodyMemberList::cast) { for member in member_list.members() { match member { @@ -228,6 +250,30 @@ pub fn inlay_hints_with_config( } } + if config.function_parameters.enabled() { + for bind_function in ast.syntax().descendants().filter_map(BindFunction::cast) { + push_bind_function_parameter_hints( + &mut hints, + &bind_function, + analysis, + visible_range, + line_index, + text, + ); + } + + for expr_function in ast.syntax().descendants().filter_map(ExprFunction::cast) { + push_expr_function_parameter_hints( + &mut hints, + &expr_function, + analysis, + visible_range, + line_index, + text, + ); + } + } + hints } @@ -356,6 +402,101 @@ fn push_method_return_hint( } } +fn push_bind_function_parameter_hints( + hints: &mut Vec, + bind_function: &BindFunction, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(params_desc) = bind_function.params() else { + return; + }; + let Some(param_types) = function_param_types(analysis, bind_function.syntax().text_range()) + else { + return; + }; + + push_function_parameter_hints( + hints, + ¶ms_desc, + param_types, + analysis, + visible_range, + line_index, + text, + ); +} + +fn push_expr_function_parameter_hints( + hints: &mut Vec, + expr_function: &ExprFunction, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(params_desc) = expr_function.params_desc() else { + return; + }; + let Some(param_types) = function_param_types(analysis, expr_function.syntax().text_range()) + else { + return; + }; + + push_function_parameter_hints( + hints, + ¶ms_desc, + param_types, + analysis, + visible_range, + line_index, + text, + ); +} + +fn function_param_types( + analysis: &TypeAnalysis, + range: TextRange, +) -> Option> { + let ty = analysis.type_for_range(range)?; + let function_data = analysis.get_function(ty)?; + Some( + function_data + .params + .into_iter() + .map(|param| param.ty) + .collect(), + ) +} + +fn push_function_parameter_hints( + hints: &mut Vec, + params_desc: &ParamsDesc, + param_types: Vec, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + for (param, param_ty) in params_desc.params().zip(param_types.into_iter()) { + let Some(destruct) = param.destruct() else { + continue; + }; + + let type_str = analysis.display(param_ty); + if is_uninformative_type(&type_str) { + continue; + } + + let destruct_range = to_lsp_range(destruct.syntax().text_range(), line_index, text); + if position_in_range(destruct_range.end, visible_range) { + hints.push(type_hint(destruct_range.end, format!(": {type_str}"))); + } + } +} + #[cfg(test)] mod tests { use std::sync::Arc; @@ -471,6 +612,7 @@ mod tests { local: LocalHintsMode::Off, object_local: LocalHintsMode::Off, object_members: ObjectMemberHintsMode::Fields, + ..InlayHintsConfig::default() }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -500,6 +642,7 @@ mod tests { local: LocalHintsMode::Off, object_local: LocalHintsMode::Off, object_members: ObjectMemberHintsMode::Methods, + ..InlayHintsConfig::default() }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -569,4 +712,70 @@ mod tests { }], ); } + + #[test] + fn test_function_parameter_hints_for_bind_function_when_enabled() { + let doc = Document::new( + "local add(x, y=1) = x + y; add(1, 2)".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 14, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } + + #[test] + fn test_function_parameter_hints_for_expr_function_when_enabled() { + let doc = Document::new( + "local f = function(a, b=1) a + b; f(1, 2)".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 23, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } } diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index e2e4986d..bce07177 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -31,7 +31,8 @@ pub use formatting::{ }; pub use hover::{hover, hover_with_import_field_type}; pub use inlay_hint::{ - inlay_hints, inlay_hints_with_config, InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, + inlay_hints, inlay_hints_with_config, FunctionParameterHintsMode, InlayHintsConfig, + LocalHintsMode, ObjectMemberHintsMode, }; pub use references::{ find_cross_file_references, find_cross_file_references_with_semantic, find_references, diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index df63a8b9..3e467fd7 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -10,8 +10,8 @@ use std::{collections::HashMap, fmt, path::PathBuf}; // Re-export config types from handlers crate pub use jrsonnet_lsp_handlers::{ CodeActionConfig, FormattingCommentStyle, FormattingConfig, FormattingStringStyle, - InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, RemoveUnusedCommentsMode, - RemoveUnusedMode, + FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, + RemoveUnusedCommentsMode, RemoveUnusedMode, }; use serde::{Deserialize, Serialize}; @@ -652,6 +652,7 @@ mod tests { local: LocalHintsMode::Off, object_local: LocalHintsMode::All, object_members: ObjectMemberHintsMode::Fields, + function_parameters: FunctionParameterHintsMode::Off, }, ..ServerConfig::default() } @@ -676,6 +677,7 @@ mod tests { local: LocalHintsMode::Functions, object_local: LocalHintsMode::All, object_members: ObjectMemberHintsMode::Methods, + function_parameters: FunctionParameterHintsMode::Off, }, ..ServerConfig::default() } diff --git a/docs/lsp/README.md b/docs/lsp/README.md index 4b8c7f32..95262710 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -144,7 +144,7 @@ Top-level options: | `resolve_paths_with_tanka` | `string` | `"auto"` | Modes: "false", "auto", "true". Booleans are also accepted (`false` -> "false", `true` -> "true"). Aliases: `resolvePathsWithTanka`, `tankaMode`. | | `formatting` | `object` | `{}` | Formatting options, see below. | | `code_actions` | `object` | `{ "removeUnused": "all", "removeUnusedComments": "none" }` | Alias: `codeActions`. | -| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off" }` | Alias: `inlayHints`. | +| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off" }` | Alias: `inlayHints`. | | `log_level` | `string \| null` | `null` | Alias: `logLevel`. Intended values are standard log levels (for example `error`, `warn`, `info`, `debug`). Currently this value is parsed/stored but runtime logging is still controlled by process startup flags/env. | ### `code_actions` @@ -161,6 +161,7 @@ Top-level options: | `local` | `all` | `off`, `variables`, `functions`, `all` | | `objectLocal` | `all` | `off`, `variables`, `functions`, `all` | | `objectMembers` | `off` | `off`, `fields`, `methods`, `all` | +| `functionParameters` | `off` | `off`, `all` | ### `formatting` @@ -204,7 +205,8 @@ Formatting keys also accept additional aliases: "inlayHints": { "local": "all", "objectLocal": "all", - "objectMembers": "fields" + "objectMembers": "fields", + "functionParameters": "all" }, "formatting": { "indent": 2, @@ -262,6 +264,7 @@ vim.lsp.config("jrsonnet_lsp", { -- local = "all", -- "off"|"variables"|"functions"|"all" -- objectLocal = "all", -- "off"|"variables"|"functions"|"all" -- objectMembers = "off", -- "off"|"fields"|"methods"|"all" + -- functionParameters = "off", -- "off"|"all" -- }, -- logLevel = nil, -- string|nil (for example: "error"|"warn"|"info"|"debug") -- }, From 5c417c24e1d3d81b774c3cf3fcff6571fefe52b2 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 18:05:29 +0000 Subject: [PATCH 178/210] feat(lsp): add anonymous function return inlay hints Add optional return-type inlay hints for anonymous function expressions while preserving non-intrusive defaults. What this includes: - new "inlayHints.anonymousFunctionReturns" setting with modes "off|all" (default "off") - handler support for expression functions, with hints rendered at the end of the parameter list - unit coverage for enabled behavior and expected hint placement - config test updates and README updates for defaults and examples Notes: - existing default inlay behavior is unchanged unless users opt in --- .../jrsonnet-lsp-handlers/src/inlay_hint.rs | 104 ++++++++++++++++++ crates/jrsonnet-lsp-handlers/src/lib.rs | 4 +- crates/jrsonnet-lsp/src/config.rs | 8 +- docs/lsp/README.md | 7 +- 4 files changed, 116 insertions(+), 7 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs index b99c43ce..2c3cee1f 100644 --- a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs +++ b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs @@ -82,6 +82,24 @@ impl FunctionParameterHintsMode { } } +/// Category selection for anonymous function return inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum AnonymousFunctionReturnHintsMode { + #[default] + Off, + All, +} + +impl AnonymousFunctionReturnHintsMode { + const fn enabled(self) -> bool { + matches!(self, Self::All) + } +} + /// Configuration for inlay hint generation. #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] #[serde(default, rename_all = "camelCase")] @@ -94,6 +112,8 @@ pub struct InlayHintsConfig { pub object_members: ObjectMemberHintsMode, /// Category filter for function parameter type hints. pub function_parameters: FunctionParameterHintsMode, + /// Category filter for anonymous function return type hints. + pub anonymous_function_returns: AnonymousFunctionReturnHintsMode, } impl Default for InlayHintsConfig { @@ -103,6 +123,7 @@ impl Default for InlayHintsConfig { object_local: LocalHintsMode::All, object_members: ObjectMemberHintsMode::Off, function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, } } } @@ -274,6 +295,19 @@ pub fn inlay_hints_with_config( } } + if config.anonymous_function_returns.enabled() { + for expr_function in ast.syntax().descendants().filter_map(ExprFunction::cast) { + push_anonymous_function_return_hint( + &mut hints, + &expr_function, + analysis, + visible_range, + line_index, + text, + ); + } + } + hints } @@ -497,6 +531,42 @@ fn push_function_parameter_hints( } } +fn push_anonymous_function_return_hint( + hints: &mut Vec, + expr_function: &ExprFunction, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(body) = expr_function.expr() else { + return; + }; + let Some(return_ty) = analysis.type_for_range(body.syntax().text_range()) else { + return; + }; + let type_str = analysis.display(return_ty); + if is_uninformative_type(&type_str) { + return; + } + + let hint_pos = expr_function + .params_desc() + .map(|params| to_lsp_range(params.syntax().text_range(), line_index, text).end) + .or_else(|| { + expr_function + .expr() + .map(|expr| to_lsp_range(expr.syntax().text_range(), line_index, text).start) + }); + let Some(hint_pos) = hint_pos else { + return; + }; + + if position_in_range(hint_pos, visible_range) { + hints.push(type_hint(hint_pos, format!(" -> {type_str}"))); + } +} + #[cfg(test)] mod tests { use std::sync::Arc; @@ -725,6 +795,7 @@ mod tests { object_local: LocalHintsMode::Off, object_members: ObjectMemberHintsMode::Off, function_parameters: FunctionParameterHintsMode::All, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -758,6 +829,7 @@ mod tests { object_local: LocalHintsMode::Off, object_members: ObjectMemberHintsMode::Off, function_parameters: FunctionParameterHintsMode::All, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -778,4 +850,36 @@ mod tests { }], ); } + + #[test] + fn test_anonymous_function_return_hint_when_enabled() { + let doc = Document::new("(function(x) 1)(2)".to_string(), DocVersion::new(1)); + let analysis = test_analysis(&doc); + + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 12, + }, + label: InlayHintLabel::String(" -> number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } } diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index bce07177..0c113fbd 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -31,8 +31,8 @@ pub use formatting::{ }; pub use hover::{hover, hover_with_import_field_type}; pub use inlay_hint::{ - inlay_hints, inlay_hints_with_config, FunctionParameterHintsMode, InlayHintsConfig, - LocalHintsMode, ObjectMemberHintsMode, + inlay_hints, inlay_hints_with_config, AnonymousFunctionReturnHintsMode, + FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, }; pub use references::{ find_cross_file_references, find_cross_file_references_with_semantic, find_references, diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index 3e467fd7..ee06c427 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -9,9 +9,9 @@ use std::{collections::HashMap, fmt, path::PathBuf}; // Re-export config types from handlers crate pub use jrsonnet_lsp_handlers::{ - CodeActionConfig, FormattingCommentStyle, FormattingConfig, FormattingStringStyle, - FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, - RemoveUnusedCommentsMode, RemoveUnusedMode, + AnonymousFunctionReturnHintsMode, CodeActionConfig, FormattingCommentStyle, FormattingConfig, + FormattingStringStyle, FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, + ObjectMemberHintsMode, RemoveUnusedCommentsMode, RemoveUnusedMode, }; use serde::{Deserialize, Serialize}; @@ -653,6 +653,7 @@ mod tests { object_local: LocalHintsMode::All, object_members: ObjectMemberHintsMode::Fields, function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, }, ..ServerConfig::default() } @@ -678,6 +679,7 @@ mod tests { object_local: LocalHintsMode::All, object_members: ObjectMemberHintsMode::Methods, function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, }, ..ServerConfig::default() } diff --git a/docs/lsp/README.md b/docs/lsp/README.md index 95262710..7738a8a5 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -144,7 +144,7 @@ Top-level options: | `resolve_paths_with_tanka` | `string` | `"auto"` | Modes: "false", "auto", "true". Booleans are also accepted (`false` -> "false", `true` -> "true"). Aliases: `resolvePathsWithTanka`, `tankaMode`. | | `formatting` | `object` | `{}` | Formatting options, see below. | | `code_actions` | `object` | `{ "removeUnused": "all", "removeUnusedComments": "none" }` | Alias: `codeActions`. | -| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off" }` | Alias: `inlayHints`. | +| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off", "anonymousFunctionReturns": "off" }` | Alias: `inlayHints`. | | `log_level` | `string \| null` | `null` | Alias: `logLevel`. Intended values are standard log levels (for example `error`, `warn`, `info`, `debug`). Currently this value is parsed/stored but runtime logging is still controlled by process startup flags/env. | ### `code_actions` @@ -162,6 +162,7 @@ Top-level options: | `objectLocal` | `all` | `off`, `variables`, `functions`, `all` | | `objectMembers` | `off` | `off`, `fields`, `methods`, `all` | | `functionParameters` | `off` | `off`, `all` | +| `anonymousFunctionReturns` | `off` | `off`, `all` | ### `formatting` @@ -206,7 +207,8 @@ Formatting keys also accept additional aliases: "local": "all", "objectLocal": "all", "objectMembers": "fields", - "functionParameters": "all" + "functionParameters": "all", + "anonymousFunctionReturns": "all" }, "formatting": { "indent": 2, @@ -265,6 +267,7 @@ vim.lsp.config("jrsonnet_lsp", { -- objectLocal = "all", -- "off"|"variables"|"functions"|"all" -- objectMembers = "off", -- "off"|"fields"|"methods"|"all" -- functionParameters = "off", -- "off"|"all" + -- anonymousFunctionReturns = "off", -- "off"|"all" -- }, -- logLevel = nil, -- string|nil (for example: "error"|"warn"|"info"|"debug") -- }, From d9f04a14a3c565560e421e2cb6332f828042ec31 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 18:09:47 +0000 Subject: [PATCH 179/210] feat(lsp): add call-argument inlay parameter name hints Add optional call-site inlay hints that label positional arguments with inferred parameter names. What this includes: - new "inlayHints.callArguments" setting with modes "off|all" (default "off") - handler support for call expressions using inferred callee function types and parameter metadata - hints emitted only for positional args, while named args are intentionally skipped - unit tests for local-call hints and named-argument skip behavior - config test updates and README updates for defaults and examples Notes: - default inlay behavior remains unchanged unless users opt in --- .../jrsonnet-lsp-handlers/src/inlay_hint.rs | 183 +++++++++++++++++- crates/jrsonnet-lsp-handlers/src/lib.rs | 2 +- crates/jrsonnet-lsp/src/config.rs | 9 +- docs/lsp/README.md | 7 +- 4 files changed, 193 insertions(+), 8 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs index 2c3cee1f..73c409ee 100644 --- a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs +++ b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs @@ -6,8 +6,8 @@ use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_rowan_parser::{ nodes::{ - Bind, BindDestruct, BindFunction, Destruct, ExprFunction, Member, MemberFieldMethod, - MemberFieldNormal, ParamsDesc, StmtLocal, + Bind, BindDestruct, BindFunction, Destruct, ExprCall, ExprFunction, Member, + MemberFieldMethod, MemberFieldNormal, ParamsDesc, StmtLocal, }, AstNode, }; @@ -100,6 +100,24 @@ impl AnonymousFunctionReturnHintsMode { } } +/// Category selection for call-argument inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum CallArgumentHintsMode { + #[default] + Off, + All, +} + +impl CallArgumentHintsMode { + const fn enabled(self) -> bool { + matches!(self, Self::All) + } +} + /// Configuration for inlay hint generation. #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] #[serde(default, rename_all = "camelCase")] @@ -114,6 +132,8 @@ pub struct InlayHintsConfig { pub function_parameters: FunctionParameterHintsMode, /// Category filter for anonymous function return type hints. pub anonymous_function_returns: AnonymousFunctionReturnHintsMode, + /// Category filter for call-argument parameter-name hints. + pub call_arguments: CallArgumentHintsMode, } impl Default for InlayHintsConfig { @@ -124,6 +144,7 @@ impl Default for InlayHintsConfig { object_members: ObjectMemberHintsMode::Off, function_parameters: FunctionParameterHintsMode::Off, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, } } } @@ -141,6 +162,19 @@ fn type_hint(position: Position, label: String) -> InlayHint { } } +fn parameter_hint(position: Position, label: String) -> InlayHint { + InlayHint { + position, + label: InlayHintLabel::String(label), + kind: Some(InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: Some(true), + data: None, + } +} + fn position_in_range(pos: Position, range: Range) -> bool { (range.start.line, range.start.character) <= (pos.line, pos.character) && (pos.line, pos.character) <= (range.end.line, range.end.character) @@ -308,6 +342,12 @@ pub fn inlay_hints_with_config( } } + if config.call_arguments.enabled() { + for call in ast.syntax().descendants().filter_map(ExprCall::cast) { + push_call_argument_hints(&mut hints, &call, analysis, visible_range, line_index, text); + } + } + hints } @@ -567,6 +607,57 @@ fn push_anonymous_function_return_hint( } } +fn push_call_argument_hints( + hints: &mut Vec, + call: &ExprCall, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(callee) = call.callee() else { + return; + }; + let Some(callee_ty) = analysis.type_for_range(callee.syntax().text_range()) else { + return; + }; + let Some(function_data) = analysis.get_function(callee_ty) else { + return; + }; + let Some(args_desc) = call.args_desc() else { + return; + }; + + for (index, arg) in args_desc.args().enumerate() { + if arg.name().is_some() { + continue; + } + let Some(expr) = arg.expr() else { + continue; + }; + + let param_name = function_data + .params + .get(index) + .map(|param| param.name.as_str()) + .or_else(|| { + if function_data.variadic { + function_data.params.last().map(|param| param.name.as_str()) + } else { + None + } + }); + let Some(param_name) = param_name else { + continue; + }; + + let hint_pos = to_lsp_range(expr.syntax().text_range(), line_index, text).start; + if position_in_range(hint_pos, visible_range) { + hints.push(parameter_hint(hint_pos, format!("{param_name}:"))); + } + } +} + #[cfg(test)] mod tests { use std::sync::Arc; @@ -796,6 +887,7 @@ mod tests { object_members: ObjectMemberHintsMode::Off, function_parameters: FunctionParameterHintsMode::All, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -830,6 +922,7 @@ mod tests { object_members: ObjectMemberHintsMode::Off, function_parameters: FunctionParameterHintsMode::All, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -862,6 +955,7 @@ mod tests { object_members: ObjectMemberHintsMode::Off, function_parameters: FunctionParameterHintsMode::Off, anonymous_function_returns: AnonymousFunctionReturnHintsMode::All, + call_arguments: CallArgumentHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -882,4 +976,89 @@ mod tests { }], ); } + + #[test] + fn test_call_argument_hints_for_local_function_when_enabled() { + let doc = Document::new( + "local add(x, y=1) = x + y; add(1, 2)".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![ + InlayHint { + position: Position { + line: 0, + character: 31, + }, + label: InlayHintLabel::String("x:".to_string()), + kind: Some(InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: Some(true), + data: None, + }, + InlayHint { + position: Position { + line: 0, + character: 34, + }, + label: InlayHintLabel::String("y:".to_string()), + kind: Some(InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: Some(true), + data: None, + }, + ], + ); + } + + #[test] + fn test_call_argument_hints_skip_named_arguments() { + let doc = Document::new( + "local add(x, y=1) = x + y; add(1, y=2)".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 31, + }, + label: InlayHintLabel::String("x:".to_string()), + kind: Some(InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: Some(true), + data: None, + }], + ); + } } diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index 0c113fbd..cac729a2 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -31,7 +31,7 @@ pub use formatting::{ }; pub use hover::{hover, hover_with_import_field_type}; pub use inlay_hint::{ - inlay_hints, inlay_hints_with_config, AnonymousFunctionReturnHintsMode, + inlay_hints, inlay_hints_with_config, AnonymousFunctionReturnHintsMode, CallArgumentHintsMode, FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, }; pub use references::{ diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index ee06c427..b767f636 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -9,9 +9,10 @@ use std::{collections::HashMap, fmt, path::PathBuf}; // Re-export config types from handlers crate pub use jrsonnet_lsp_handlers::{ - AnonymousFunctionReturnHintsMode, CodeActionConfig, FormattingCommentStyle, FormattingConfig, - FormattingStringStyle, FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, - ObjectMemberHintsMode, RemoveUnusedCommentsMode, RemoveUnusedMode, + AnonymousFunctionReturnHintsMode, CallArgumentHintsMode, CodeActionConfig, + FormattingCommentStyle, FormattingConfig, FormattingStringStyle, FunctionParameterHintsMode, + InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, RemoveUnusedCommentsMode, + RemoveUnusedMode, }; use serde::{Deserialize, Serialize}; @@ -654,6 +655,7 @@ mod tests { object_members: ObjectMemberHintsMode::Fields, function_parameters: FunctionParameterHintsMode::Off, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, }, ..ServerConfig::default() } @@ -680,6 +682,7 @@ mod tests { object_members: ObjectMemberHintsMode::Methods, function_parameters: FunctionParameterHintsMode::Off, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, }, ..ServerConfig::default() } diff --git a/docs/lsp/README.md b/docs/lsp/README.md index 7738a8a5..0138354d 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -144,7 +144,7 @@ Top-level options: | `resolve_paths_with_tanka` | `string` | `"auto"` | Modes: "false", "auto", "true". Booleans are also accepted (`false` -> "false", `true` -> "true"). Aliases: `resolvePathsWithTanka`, `tankaMode`. | | `formatting` | `object` | `{}` | Formatting options, see below. | | `code_actions` | `object` | `{ "removeUnused": "all", "removeUnusedComments": "none" }` | Alias: `codeActions`. | -| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off", "anonymousFunctionReturns": "off" }` | Alias: `inlayHints`. | +| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off", "anonymousFunctionReturns": "off", "callArguments": "off" }` | Alias: `inlayHints`. | | `log_level` | `string \| null` | `null` | Alias: `logLevel`. Intended values are standard log levels (for example `error`, `warn`, `info`, `debug`). Currently this value is parsed/stored but runtime logging is still controlled by process startup flags/env. | ### `code_actions` @@ -163,6 +163,7 @@ Top-level options: | `objectMembers` | `off` | `off`, `fields`, `methods`, `all` | | `functionParameters` | `off` | `off`, `all` | | `anonymousFunctionReturns` | `off` | `off`, `all` | +| `callArguments` | `off` | `off`, `all` | ### `formatting` @@ -208,7 +209,8 @@ Formatting keys also accept additional aliases: "objectLocal": "all", "objectMembers": "fields", "functionParameters": "all", - "anonymousFunctionReturns": "all" + "anonymousFunctionReturns": "all", + "callArguments": "all" }, "formatting": { "indent": 2, @@ -268,6 +270,7 @@ vim.lsp.config("jrsonnet_lsp", { -- objectMembers = "off", -- "off"|"fields"|"methods"|"all" -- functionParameters = "off", -- "off"|"all" -- anonymousFunctionReturns = "off", -- "off"|"all" + -- callArguments = "off", -- "off"|"all" -- }, -- logLevel = nil, -- string|nil (for example: "error"|"warn"|"info"|"debug") -- }, From 1f8d8df925bf2c33a806c7de592029067f6ba939 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 18:12:33 +0000 Subject: [PATCH 180/210] feat(lsp): add comprehension binding inlay hints Add optional inlay hints for comprehension loop bindings so users can see inferred element types in `for ... in ...` clauses. What this includes: - new "inlayHints.comprehensions" setting with modes "off|all" (default "off") - handler support for `ForSpec` bindings using inferred iterator element types - support for array, tuple, and union-of-sequence iterator types - unit coverage for enabled comprehension hints - config test updates and README updates for defaults and examples Notes: - existing inlay defaults remain unchanged unless users opt in --- .../jrsonnet-lsp-handlers/src/inlay_hint.rs | 152 +++++++++++++++++- crates/jrsonnet-lsp-handlers/src/lib.rs | 3 +- crates/jrsonnet-lsp/src/config.rs | 8 +- docs/lsp/README.md | 7 +- 4 files changed, 163 insertions(+), 7 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs index 73c409ee..4115612b 100644 --- a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs +++ b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs @@ -6,7 +6,7 @@ use jrsonnet_lsp_document::{to_lsp_range, Document, LineIndex}; use jrsonnet_lsp_inference::TypeAnalysis; use jrsonnet_rowan_parser::{ nodes::{ - Bind, BindDestruct, BindFunction, Destruct, ExprCall, ExprFunction, Member, + Bind, BindDestruct, BindFunction, Destruct, ExprCall, ExprFunction, ForSpec, Member, MemberFieldMethod, MemberFieldNormal, ParamsDesc, StmtLocal, }, AstNode, @@ -118,6 +118,24 @@ impl CallArgumentHintsMode { } } +/// Category selection for comprehension binding inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum ComprehensionHintsMode { + #[default] + Off, + All, +} + +impl ComprehensionHintsMode { + const fn enabled(self) -> bool { + matches!(self, Self::All) + } +} + /// Configuration for inlay hint generation. #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] #[serde(default, rename_all = "camelCase")] @@ -134,6 +152,8 @@ pub struct InlayHintsConfig { pub anonymous_function_returns: AnonymousFunctionReturnHintsMode, /// Category filter for call-argument parameter-name hints. pub call_arguments: CallArgumentHintsMode, + /// Category filter for comprehension variable hints. + pub comprehensions: ComprehensionHintsMode, } impl Default for InlayHintsConfig { @@ -145,6 +165,7 @@ impl Default for InlayHintsConfig { function_parameters: FunctionParameterHintsMode::Off, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, } } } @@ -348,6 +369,19 @@ pub fn inlay_hints_with_config( } } + if config.comprehensions.enabled() { + for for_spec in ast.syntax().descendants().filter_map(ForSpec::cast) { + push_comprehension_binding_hint( + &mut hints, + &for_spec, + analysis, + visible_range, + line_index, + text, + ); + } + } + hints } @@ -658,6 +692,81 @@ fn push_call_argument_hints( } } +fn push_comprehension_binding_hint( + hints: &mut Vec, + for_spec: &ForSpec, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(iter_expr) = for_spec.expr() else { + return; + }; + let Some(iter_ty) = analysis.type_for_range(iter_expr.syntax().text_range()) else { + return; + }; + let Some(elem_ty) = comprehension_element_type(analysis, iter_ty) else { + return; + }; + let type_str = analysis.display(elem_ty); + if is_uninformative_type(&type_str) { + return; + } + + let Some(destruct) = for_spec.bind() else { + return; + }; + let Destruct::DestructFull(full) = destruct else { + return; + }; + let Some(name) = full.name() else { + return; + }; + let range = to_lsp_range(name.syntax().text_range(), line_index, text); + if position_in_range(range.end, visible_range) { + hints.push(type_hint(range.end, format!(": {type_str}"))); + } +} + +fn comprehension_element_type( + analysis: &TypeAnalysis, + iter_ty: jrsonnet_lsp_types::Ty, +) -> Option { + use jrsonnet_lsp_types::TyData; + + match analysis.get_data(iter_ty) { + TyData::Array { elem, .. } => Some(elem), + TyData::Tuple { elems } => { + if elems.is_empty() { + None + } else { + Some(analysis.union(elems)) + } + } + TyData::Union(variants) => { + let mut elem_types = Vec::new(); + for variant in variants { + match analysis.get_data(variant) { + TyData::Array { elem, .. } => elem_types.push(elem), + TyData::Tuple { elems } => { + if !elems.is_empty() { + elem_types.push(analysis.union(elems)); + } + } + _ => return None, + } + } + if elem_types.is_empty() { + None + } else { + Some(analysis.union(elem_types)) + } + } + _ => None, + } +} + #[cfg(test)] mod tests { use std::sync::Arc; @@ -888,6 +997,7 @@ mod tests { function_parameters: FunctionParameterHintsMode::All, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -923,6 +1033,7 @@ mod tests { function_parameters: FunctionParameterHintsMode::All, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -956,6 +1067,7 @@ mod tests { function_parameters: FunctionParameterHintsMode::Off, anonymous_function_returns: AnonymousFunctionReturnHintsMode::All, call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -991,6 +1103,7 @@ mod tests { function_parameters: FunctionParameterHintsMode::Off, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::All, + comprehensions: ComprehensionHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -1041,6 +1154,7 @@ mod tests { function_parameters: FunctionParameterHintsMode::Off, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::All, + comprehensions: ComprehensionHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -1061,4 +1175,40 @@ mod tests { }], ); } + + #[test] + fn test_comprehension_binding_hint_when_enabled() { + let doc = Document::new( + "[x + 1 for x in std.range(1, 3)]".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 12, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } } diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index cac729a2..1074e364 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -32,7 +32,8 @@ pub use formatting::{ pub use hover::{hover, hover_with_import_field_type}; pub use inlay_hint::{ inlay_hints, inlay_hints_with_config, AnonymousFunctionReturnHintsMode, CallArgumentHintsMode, - FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, + ComprehensionHintsMode, FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, + ObjectMemberHintsMode, }; pub use references::{ find_cross_file_references, find_cross_file_references_with_semantic, find_references, diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index b767f636..01085271 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -10,9 +10,9 @@ use std::{collections::HashMap, fmt, path::PathBuf}; // Re-export config types from handlers crate pub use jrsonnet_lsp_handlers::{ AnonymousFunctionReturnHintsMode, CallArgumentHintsMode, CodeActionConfig, - FormattingCommentStyle, FormattingConfig, FormattingStringStyle, FunctionParameterHintsMode, - InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, RemoveUnusedCommentsMode, - RemoveUnusedMode, + ComprehensionHintsMode, FormattingCommentStyle, FormattingConfig, FormattingStringStyle, + FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, + RemoveUnusedCommentsMode, RemoveUnusedMode, }; use serde::{Deserialize, Serialize}; @@ -656,6 +656,7 @@ mod tests { function_parameters: FunctionParameterHintsMode::Off, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, }, ..ServerConfig::default() } @@ -683,6 +684,7 @@ mod tests { function_parameters: FunctionParameterHintsMode::Off, anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, }, ..ServerConfig::default() } diff --git a/docs/lsp/README.md b/docs/lsp/README.md index 0138354d..b09fb4d2 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -144,7 +144,7 @@ Top-level options: | `resolve_paths_with_tanka` | `string` | `"auto"` | Modes: "false", "auto", "true". Booleans are also accepted (`false` -> "false", `true` -> "true"). Aliases: `resolvePathsWithTanka`, `tankaMode`. | | `formatting` | `object` | `{}` | Formatting options, see below. | | `code_actions` | `object` | `{ "removeUnused": "all", "removeUnusedComments": "none" }` | Alias: `codeActions`. | -| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off", "anonymousFunctionReturns": "off", "callArguments": "off" }` | Alias: `inlayHints`. | +| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off", "anonymousFunctionReturns": "off", "callArguments": "off", "comprehensions": "off" }` | Alias: `inlayHints`. | | `log_level` | `string \| null` | `null` | Alias: `logLevel`. Intended values are standard log levels (for example `error`, `warn`, `info`, `debug`). Currently this value is parsed/stored but runtime logging is still controlled by process startup flags/env. | ### `code_actions` @@ -164,6 +164,7 @@ Top-level options: | `functionParameters` | `off` | `off`, `all` | | `anonymousFunctionReturns` | `off` | `off`, `all` | | `callArguments` | `off` | `off`, `all` | +| `comprehensions` | `off` | `off`, `all` | ### `formatting` @@ -210,7 +211,8 @@ Formatting keys also accept additional aliases: "objectMembers": "fields", "functionParameters": "all", "anonymousFunctionReturns": "all", - "callArguments": "all" + "callArguments": "all", + "comprehensions": "all" }, "formatting": { "indent": 2, @@ -271,6 +273,7 @@ vim.lsp.config("jrsonnet_lsp", { -- functionParameters = "off", -- "off"|"all" -- anonymousFunctionReturns = "off", -- "off"|"all" -- callArguments = "off", -- "off"|"all" + -- comprehensions = "off", -- "off"|"all" -- }, -- logLevel = nil, -- string|nil (for example: "error"|"warn"|"info"|"debug") -- }, From 840f24590876176b1a82644e96a193669e41c804 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 18:35:04 +0000 Subject: [PATCH 181/210] feat(lsp): add destructuring inlay hints Add a new inlay hint category for destructuring bindings with a\ndefault of off. When enabled, the server now emits type hints for\narray and object destructured names in both top-level local binds\nand object-local local binds.\n\nWhile implementing this, I fixed a parser/inference mismatch around\ndestructuring array elements. The parser now emits\nDESTRUCT_ARRAY_ELEMENT nodes so destruct_array_parts() reflects the\ngrammar, and inference now binds and records destructured locals\nrecursively instead of only handling DestructFull.\n\nThis also fixes object destruct field typing to use the source field\nname (for example foo: x) and supports shorthand object destruct\nbindings in type recording and binding.\n\nUpdated docs and config parsing for the new\ninlayHints.destructuring setting, and added coverage for\ndestructuring inlay hints plus inference query tests for\ndestructured bind-name type lookup. --- .../jrsonnet-lsp-handlers/src/inlay_hint.rs | 383 +++++++++++++++++- crates/jrsonnet-lsp-handlers/src/lib.rs | 4 +- .../src/analysis/queries.rs | 26 ++ crates/jrsonnet-lsp-inference/src/expr/mod.rs | 118 ++++-- crates/jrsonnet-lsp-inference/src/object.rs | 12 +- crates/jrsonnet-lsp/src/config.rs | 8 +- crates/jrsonnet-rowan-parser/src/parser.rs | 2 + docs/lsp/README.md | 7 +- 8 files changed, 487 insertions(+), 73 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs index 4115612b..a51ac7c9 100644 --- a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs +++ b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs @@ -136,6 +136,24 @@ impl ComprehensionHintsMode { } } +/// Category selection for destructuring binding inlay hints. +#[derive( + Debug, Clone, Copy, Default, Display, EnumString, Serialize, Deserialize, PartialEq, Eq, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum DestructuringHintsMode { + #[default] + Off, + All, +} + +impl DestructuringHintsMode { + const fn enabled(self) -> bool { + matches!(self, Self::All) + } +} + /// Configuration for inlay hint generation. #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] #[serde(default, rename_all = "camelCase")] @@ -154,6 +172,8 @@ pub struct InlayHintsConfig { pub call_arguments: CallArgumentHintsMode, /// Category filter for comprehension variable hints. pub comprehensions: ComprehensionHintsMode, + /// Category filter for destructuring variable hints. + pub destructuring: DestructuringHintsMode, } impl Default for InlayHintsConfig { @@ -166,6 +186,7 @@ impl Default for InlayHintsConfig { anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, } } } @@ -234,15 +255,27 @@ pub fn inlay_hints_with_config( for stmt_local in ast.syntax().descendants().filter_map(StmtLocal::cast) { for bind in stmt_local.binds() { match bind { - Bind::BindDestruct(bind_destruct) if config.local.variable_hints_enabled() => { - push_binding_type_hint( - &mut hints, - &bind_destruct, - analysis, - visible_range, - line_index, - text, - ); + Bind::BindDestruct(bind_destruct) => { + if config.local.variable_hints_enabled() { + push_binding_type_hint( + &mut hints, + &bind_destruct, + analysis, + visible_range, + line_index, + text, + ); + } + if config.destructuring.enabled() { + push_destructuring_binding_hints( + &mut hints, + &bind_destruct, + analysis, + visible_range, + line_index, + text, + ); + } } Bind::BindFunction(bind_function) if config.local.function_hints_enabled() => { push_function_return_hint( @@ -254,7 +287,7 @@ pub fn inlay_hints_with_config( text, ); } - _ => {} + Bind::BindFunction(_) => {} } } } @@ -272,17 +305,27 @@ pub fn inlay_hints_with_config( continue; }; match bind { - Bind::BindDestruct(bind_destruct) - if config.object_local.variable_hints_enabled() => - { - push_binding_type_hint( - &mut hints, - &bind_destruct, - analysis, - visible_range, - line_index, - text, - ); + Bind::BindDestruct(bind_destruct) => { + if config.object_local.variable_hints_enabled() { + push_binding_type_hint( + &mut hints, + &bind_destruct, + analysis, + visible_range, + line_index, + text, + ); + } + if config.destructuring.enabled() { + push_destructuring_binding_hints( + &mut hints, + &bind_destruct, + analysis, + visible_range, + line_index, + text, + ); + } } Bind::BindFunction(bind_function) if config.object_local.function_hints_enabled() => @@ -296,7 +339,7 @@ pub fn inlay_hints_with_config( text, ); } - _ => {} + Bind::BindFunction(_) => {} } } Member::MemberFieldNormal(field) if config.object_members.field_hints_enabled() => { @@ -767,6 +810,181 @@ fn comprehension_element_type( } } +fn push_destructuring_binding_hints( + hints: &mut Vec, + bind_destruct: &BindDestruct, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + let Some(value) = bind_destruct.value() else { + return; + }; + let Some(destruct) = BindDestruct::into(bind_destruct) else { + return; + }; + if matches!(destruct, Destruct::DestructFull(_)) { + return; + } + let value_ty = analysis.type_for_range(value.syntax().text_range()); + + push_destruct_hints( + hints, + &destruct, + value_ty, + analysis, + visible_range, + line_index, + text, + ); +} + +fn push_destruct_hints( + hints: &mut Vec, + destruct: &Destruct, + source_ty: Option, + analysis: &TypeAnalysis, + visible_range: Range, + line_index: &LineIndex, + text: &str, +) { + match destruct { + Destruct::DestructFull(full) => { + let Some(name) = full.name() else { + return; + }; + let ty = source_ty.or_else(|| analysis.type_for_range(name.syntax().text_range())); + let Some(ty) = ty else { + return; + }; + let type_str = analysis.display(ty); + if is_uninformative_type(&type_str) { + return; + } + let range = to_lsp_range(name.syntax().text_range(), line_index, text); + if position_in_range(range.end, visible_range) { + hints.push(type_hint(range.end, format!(": {type_str}"))); + } + } + Destruct::DestructSkip(_) => {} + Destruct::DestructArray(array) => { + let mut elem_index = 0usize; + for part in array.destruct_array_parts() { + let jrsonnet_rowan_parser::nodes::DestructArrayPart::DestructArrayElement(elem) = + part + else { + continue; + }; + let Some(inner) = elem.destruct() else { + elem_index += 1; + continue; + }; + let elem_ty = + source_ty.and_then(|ty| array_destruct_elem_type(analysis, ty, elem_index)); + push_destruct_hints( + hints, + &inner, + elem_ty, + analysis, + visible_range, + line_index, + text, + ); + elem_index += 1; + } + } + Destruct::DestructObject(object) => { + for field in object.destruct_object_fields() { + let Some(field_name) = field.field().and_then(|name| name.ident_lit()) else { + continue; + }; + let field_ty = source_ty + .and_then(|ty| object_destruct_field_type(analysis, ty, field_name.text())); + + let Some(inner) = field.destruct() else { + let type_str = + field_ty.map_or_else(|| "any".to_string(), |ty| analysis.display(ty)); + if is_uninformative_type(&type_str) { + continue; + } + let range = to_lsp_range(field_name.text_range(), line_index, text); + if position_in_range(range.end, visible_range) { + hints.push(type_hint(range.end, format!(": {type_str}"))); + } + continue; + }; + push_destruct_hints( + hints, + &inner, + field_ty, + analysis, + visible_range, + line_index, + text, + ); + } + } + } +} + +fn array_destruct_elem_type( + analysis: &TypeAnalysis, + source_ty: jrsonnet_lsp_types::Ty, + index: usize, +) -> Option { + use jrsonnet_lsp_types::TyData; + + match analysis.get_data(source_ty) { + TyData::Array { elem, .. } => Some(elem), + TyData::Tuple { elems } => elems + .get(index) + .copied() + .or_else(|| (!elems.is_empty()).then(|| analysis.union(elems))), + TyData::Union(variants) => { + let mut out = Vec::new(); + for variant in variants { + let elem = array_destruct_elem_type(analysis, variant, index)?; + out.push(elem); + } + if out.is_empty() { + None + } else { + Some(analysis.union(out)) + } + } + TyData::Any => Some(jrsonnet_lsp_types::Ty::ANY), + _ => None, + } +} + +fn object_destruct_field_type( + analysis: &TypeAnalysis, + source_ty: jrsonnet_lsp_types::Ty, + field_name: &str, +) -> Option { + use jrsonnet_lsp_types::TyData; + + match analysis.get_data(source_ty) { + TyData::Object(object) => object.get_field(field_name).map(|field| field.ty), + TyData::AttrsOf { value } => Some(value), + TyData::Union(variants) => { + let mut out = Vec::new(); + for variant in variants { + let field_ty = object_destruct_field_type(analysis, variant, field_name)?; + out.push(field_ty); + } + if out.is_empty() { + None + } else { + Some(analysis.union(out)) + } + } + TyData::Any => Some(jrsonnet_lsp_types::Ty::ANY), + _ => None, + } +} + #[cfg(test)] mod tests { use std::sync::Arc; @@ -998,6 +1216,7 @@ mod tests { anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -1034,6 +1253,7 @@ mod tests { anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -1068,6 +1288,7 @@ mod tests { anonymous_function_returns: AnonymousFunctionReturnHintsMode::All, call_arguments: CallArgumentHintsMode::Off, comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -1104,6 +1325,7 @@ mod tests { anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::All, comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -1155,6 +1377,7 @@ mod tests { anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::All, comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -1191,6 +1414,7 @@ mod tests { anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, comprehensions: ComprehensionHintsMode::All, + destructuring: DestructuringHintsMode::Off, }; let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); @@ -1211,4 +1435,119 @@ mod tests { }], ); } + + #[test] + fn test_destructuring_array_binding_hints_when_enabled() { + let doc = Document::new( + "local [a, b] = [1, 2]; a + b".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let bind_destruct = doc + .ast() + .syntax() + .descendants() + .find_map(BindDestruct::cast) + .expect("expected destruct bind"); + let value = bind_destruct.value().expect("expected bind value"); + assert!( + analysis + .type_for_range(value.syntax().text_range()) + .is_some(), + "destructuring RHS should have an inferred type" + ); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![ + InlayHint { + position: Position { + line: 0, + character: 8, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }, + InlayHint { + position: Position { + line: 0, + character: 11, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }, + ], + ); + } + + #[test] + fn test_destructuring_object_binding_hints_when_enabled() { + let doc = Document::new( + "local { foo: x } = { foo: 1 }; x".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let bind_destruct = doc + .ast() + .syntax() + .descendants() + .find_map(BindDestruct::cast) + .expect("expected destruct bind"); + let value = bind_destruct.value().expect("expected bind value"); + assert!( + analysis + .type_for_range(value.syntax().text_range()) + .is_some(), + "destructuring RHS should have an inferred type" + ); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![InlayHint { + position: Position { + line: 0, + character: 14, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }], + ); + } } diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index 1074e364..f76bcff0 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -32,8 +32,8 @@ pub use formatting::{ pub use hover::{hover, hover_with_import_field_type}; pub use inlay_hint::{ inlay_hints, inlay_hints_with_config, AnonymousFunctionReturnHintsMode, CallArgumentHintsMode, - ComprehensionHintsMode, FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, - ObjectMemberHintsMode, + ComprehensionHintsMode, DestructuringHintsMode, FunctionParameterHintsMode, InlayHintsConfig, + LocalHintsMode, ObjectMemberHintsMode, }; pub use references::{ find_cross_file_references, find_cross_file_references_with_semantic, find_references, diff --git a/crates/jrsonnet-lsp-inference/src/analysis/queries.rs b/crates/jrsonnet-lsp-inference/src/analysis/queries.rs index ef5dfb27..f7a506d9 100644 --- a/crates/jrsonnet-lsp-inference/src/analysis/queries.rs +++ b/crates/jrsonnet-lsp-inference/src/analysis/queries.rs @@ -500,6 +500,32 @@ mod tests { }); } + #[test] + fn test_type_at_position_local_array_destruct_bind_name_uses_element_type() { + let code = "local [a, b] = [1, 2]; a + b"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let bind_name_offset = nth_offset(code, "a", 0); + let ty = analysis + .type_at_position(&root, bind_name_offset) + .expect("should find type at array-destruct bind name"); + assert_eq!(ty, Ty::NUMBER); + } + + #[test] + fn test_type_at_position_local_object_destruct_bind_name_uses_field_type() { + let code = "local { foo: x } = { foo: 1 }; x"; + let (analysis, doc) = analyze_doc(code); + let root = doc.ast().syntax().clone(); + + let bind_name_offset = nth_offset(code, "x", 0); + let ty = analysis + .type_at_position(&root, bind_name_offset) + .expect("should find type at object-destruct bind name"); + assert_eq!(ty, Ty::NUMBER); + } + #[test] fn test_type_at_position_object_local_bind_equals_uses_value_type() { let code = r"{ local x = { a: 1 }, y: x }"; diff --git a/crates/jrsonnet-lsp-inference/src/expr/mod.rs b/crates/jrsonnet-lsp-inference/src/expr/mod.rs index 6e82b6fc..8d1f7e3f 100644 --- a/crates/jrsonnet-lsp-inference/src/expr/mod.rs +++ b/crates/jrsonnet-lsp-inference/src/expr/mod.rs @@ -137,23 +137,16 @@ pub(super) fn infer_expr_ty_impl( fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: &mut R) { match bind { Bind::BindDestruct(bd) => { + let ty = bd.value().map_or(Ty::ANY, |value| { + infer_expr_ty_impl(&value, env, None, recorder) + }); + recorder.record(bd.syntax().text_range(), ty); + if let Some(assign_token) = bd.assign_token() { + recorder.record(assign_token.text_range(), ty); + } if let Some(destruct) = bd.into() { - if let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct { - if let Some(name_node) = full.name() { - if let Some(ident) = name_node.ident_lit() { - let name = ident.text().to_string(); - let ty = bd - .value() - .map_or(Ty::ANY, |v| infer_expr_ty_impl(&v, env, None, recorder)); - recorder.record(bd.syntax().text_range(), ty); - recorder.record(name_node.syntax().text_range(), ty); - if let Some(assign_token) = bd.assign_token() { - recorder.record(assign_token.text_range(), ty); - } - env.define_ty(name, ty); - } - } - } + record_destruct_binding_types_ty(&destruct, ty, env, recorder); + bind_destruct_with_type_ty(&destruct, ty, env); } } Bind::BindFunction(bf) => { @@ -257,7 +250,7 @@ pub(super) fn bind_destruct_with_type_ty( ty: Ty, env: &mut TypeEnv, ) { - use jrsonnet_rowan_parser::nodes::{Destruct, DestructArrayPart}; + use jrsonnet_rowan_parser::nodes::Destruct; match destruct { Destruct::DestructFull(full) => { @@ -268,23 +261,32 @@ pub(super) fn bind_destruct_with_type_ty( } Destruct::DestructArray(arr) => { let elem_types = extract_array_element_types_ty(ty, env); - for (i, part) in arr.destruct_array_parts().enumerate() { - let DestructArrayPart::DestructArrayElement(elem) = part else { + let mut elem_index = 0usize; + for part in arr.destruct_array_parts() { + let jrsonnet_rowan_parser::nodes::DestructArrayPart::DestructArrayElement(elem) = + part + else { continue; }; let Some(inner) = elem.destruct() else { + elem_index += 1; continue; }; - let elem_ty = elem_types.get(i).copied().unwrap_or(Ty::ANY); + let elem_ty = array_destruct_element_ty(&elem_types, elem_index); bind_destruct_with_type_ty(&inner, elem_ty, env); + elem_index += 1; } } Destruct::DestructObject(obj) => { for field in obj.destruct_object_fields() { + let Some(field_name) = field.field().and_then(|name| name.ident_lit()) else { + continue; + }; + let field_ty = lookup_destruct_field_type_ty(field_name.text(), ty, env); let Some(inner) = field.destruct() else { + env.define_ty(field_name.text().to_string(), field_ty); continue; }; - let field_ty = lookup_destruct_field_type_ty(&inner, ty, env); bind_destruct_with_type_ty(&inner, field_ty, env); } } @@ -292,6 +294,56 @@ pub(super) fn bind_destruct_with_type_ty( } } +fn record_destruct_binding_types_ty( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, + ty: Ty, + env: &TypeEnv, + recorder: &mut R, +) { + use jrsonnet_rowan_parser::nodes::Destruct; + + match destruct { + Destruct::DestructFull(full) => { + let Some(name) = full.name() else { + return; + }; + recorder.record(name.syntax().text_range(), ty); + } + Destruct::DestructArray(arr) => { + let elem_types = extract_array_element_types_ty(ty, env); + let mut elem_index = 0usize; + for part in arr.destruct_array_parts() { + let jrsonnet_rowan_parser::nodes::DestructArrayPart::DestructArrayElement(elem) = + part + else { + continue; + }; + let Some(inner) = elem.destruct() else { + elem_index += 1; + continue; + }; + let elem_ty = array_destruct_element_ty(&elem_types, elem_index); + record_destruct_binding_types_ty(&inner, elem_ty, env, recorder); + elem_index += 1; + } + } + Destruct::DestructObject(obj) => { + for field in obj.destruct_object_fields() { + let Some(field_name) = field.field().and_then(|name| name.ident_lit()) else { + continue; + }; + let field_ty = lookup_destruct_field_type_ty(field_name.text(), ty, env); + let Some(inner) = field.destruct() else { + recorder.record(field_name.text_range(), field_ty); + continue; + }; + record_destruct_binding_types_ty(&inner, field_ty, env, recorder); + } + } + Destruct::DestructSkip(_) => {} + } +} + /// Extract element types from an array or tuple type (Ty version). fn extract_array_element_types_ty(ty: Ty, env: &TypeEnv) -> Vec { let store = env.store(); @@ -302,26 +354,22 @@ fn extract_array_element_types_ty(ty: Ty, env: &TypeEnv) -> Vec { } } +fn array_destruct_element_ty(elem_types: &[Ty], index: usize) -> Ty { + match elem_types { + [] => Ty::ANY, + [elem] => *elem, + _ => elem_types.get(index).copied().unwrap_or(Ty::ANY), + } +} + /// Look up the type for a destructured field from an object type (Ty version). -fn lookup_destruct_field_type_ty( - destruct: &jrsonnet_rowan_parser::nodes::Destruct, - ty: Ty, - env: &TypeEnv, -) -> Ty { +fn lookup_destruct_field_type_ty(field_name: &str, ty: Ty, env: &TypeEnv) -> Ty { let store = env.store(); let TyData::Object(ref obj_data) = store.get(ty) else { return Ty::ANY; }; - let jrsonnet_rowan_parser::nodes::Destruct::DestructFull(full) = destruct else { - return Ty::ANY; - }; - - let Some(ident) = full.name().and_then(|n| n.ident_lit()) else { - return Ty::ANY; - }; - - obj_data.get_field(ident.text()).map_or(Ty::ANY, |fd| fd.ty) + obj_data.get_field(field_name).map_or(Ty::ANY, |fd| fd.ty) } /// Check if an expression is guaranteed to diverge (never return). diff --git a/crates/jrsonnet-lsp-inference/src/object.rs b/crates/jrsonnet-lsp-inference/src/object.rs index 9d6395d5..2e4efd25 100644 --- a/crates/jrsonnet-lsp-inference/src/object.rs +++ b/crates/jrsonnet-lsp-inference/src/object.rs @@ -3,11 +3,12 @@ use jrsonnet_lsp_types::{ FieldDefInterned, FieldVis, FunctionData, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, }; -use jrsonnet_rowan_parser::nodes::{Bind, Destruct, Expr, Member, ObjBody}; +use jrsonnet_rowan_parser::nodes::{Bind, Expr, Member, ObjBody}; use rustc_hash::FxHashMap; use crate::{ env::TypeEnv, + expr::bind_destruct_with_type_ty, helpers::{convert_visibility_ty, extract_field_name, extract_params_with_default_types_ty}, }; @@ -239,17 +240,10 @@ fn infer_object_local_bind_ty( let Some(destruct) = bind_destruct.into() else { return; }; - let Destruct::DestructFull(full) = destruct else { - return; - }; - let Some(ident) = full.name().and_then(|name| name.ident_lit()) else { - return; - }; - let name = ident.text().to_string(); let ty = bind_destruct .value() .map_or(Ty::ANY, |value| infer_expr(&value, env)); - env.define_ty(name, ty); + bind_destruct_with_type_ty(&destruct, ty, env); } Bind::BindFunction(bind_function) => { let Some(name_node) = bind_function.name() else { diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index 01085271..4ca0d00a 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -10,9 +10,9 @@ use std::{collections::HashMap, fmt, path::PathBuf}; // Re-export config types from handlers crate pub use jrsonnet_lsp_handlers::{ AnonymousFunctionReturnHintsMode, CallArgumentHintsMode, CodeActionConfig, - ComprehensionHintsMode, FormattingCommentStyle, FormattingConfig, FormattingStringStyle, - FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, ObjectMemberHintsMode, - RemoveUnusedCommentsMode, RemoveUnusedMode, + ComprehensionHintsMode, DestructuringHintsMode, FormattingCommentStyle, FormattingConfig, + FormattingStringStyle, FunctionParameterHintsMode, InlayHintsConfig, LocalHintsMode, + ObjectMemberHintsMode, RemoveUnusedCommentsMode, RemoveUnusedMode, }; use serde::{Deserialize, Serialize}; @@ -657,6 +657,7 @@ mod tests { anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, }, ..ServerConfig::default() } @@ -685,6 +686,7 @@ mod tests { anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, call_arguments: CallArgumentHintsMode::Off, comprehensions: ComprehensionHintsMode::Off, + destructuring: DestructuringHintsMode::Off, }, ..ServerConfig::default() } diff --git a/crates/jrsonnet-rowan-parser/src/parser.rs b/crates/jrsonnet-rowan-parser/src/parser.rs index 3f156932..f908bcaf 100644 --- a/crates/jrsonnet-rowan-parser/src/parser.rs +++ b/crates/jrsonnet-rowan-parser/src/parser.rs @@ -798,7 +798,9 @@ fn destruct(p: &mut Parser) -> CompletedMarker { // } // had_rest = true; } else { + let m_elem = p.start(); destruct(p); + m_elem.complete(p, DESTRUCT_ARRAY_ELEMENT); } if p.at(T![,]) { p.bump(); diff --git a/docs/lsp/README.md b/docs/lsp/README.md index b09fb4d2..6eb39228 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -144,7 +144,7 @@ Top-level options: | `resolve_paths_with_tanka` | `string` | `"auto"` | Modes: "false", "auto", "true". Booleans are also accepted (`false` -> "false", `true` -> "true"). Aliases: `resolvePathsWithTanka`, `tankaMode`. | | `formatting` | `object` | `{}` | Formatting options, see below. | | `code_actions` | `object` | `{ "removeUnused": "all", "removeUnusedComments": "none" }` | Alias: `codeActions`. | -| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off", "anonymousFunctionReturns": "off", "callArguments": "off", "comprehensions": "off" }` | Alias: `inlayHints`. | +| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off", "anonymousFunctionReturns": "off", "callArguments": "off", "comprehensions": "off", "destructuring": "off" }` | Alias: `inlayHints`. | | `log_level` | `string \| null` | `null` | Alias: `logLevel`. Intended values are standard log levels (for example `error`, `warn`, `info`, `debug`). Currently this value is parsed/stored but runtime logging is still controlled by process startup flags/env. | ### `code_actions` @@ -165,6 +165,7 @@ Top-level options: | `anonymousFunctionReturns` | `off` | `off`, `all` | | `callArguments` | `off` | `off`, `all` | | `comprehensions` | `off` | `off`, `all` | +| `destructuring` | `off` | `off`, `all` | ### `formatting` @@ -212,7 +213,8 @@ Formatting keys also accept additional aliases: "functionParameters": "all", "anonymousFunctionReturns": "all", "callArguments": "all", - "comprehensions": "all" + "comprehensions": "all", + "destructuring": "all" }, "formatting": { "indent": 2, @@ -274,6 +276,7 @@ vim.lsp.config("jrsonnet_lsp", { -- anonymousFunctionReturns = "off", -- "off"|"all" -- callArguments = "off", -- "off"|"all" -- comprehensions = "off", -- "off"|"all" + -- destructuring = "off", -- "off"|"all" -- }, -- logLevel = nil, -- string|nil (for example: "error"|"warn"|"info"|"debug") -- }, From 6acf822d70afd6b1cd1b3a7267c056f1148285c9 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 18:41:56 +0000 Subject: [PATCH 182/210] fix(inference): infer dynamic object key expressions Fix the failing type-at-position test for computed object field names by addressing the inference gap instead of weakening the assertion. Object inference previously skipped dynamic field-name expressions entirely, so no type information was recorded for tokens inside `[expr]` field keys. This caused `type_at_position` to fall back to less-specific ancestor types. The object inference passes now: - mark objects with dynamic fields as open (`has_unknown = true`) - infer dynamic key expressions in pass 2 so their expression ranges are recorded in analysis - preserve unknown-field state when merging super objects Also adds a regression test that dynamic-field objects are open. Together with the existing query test, this verifies the root cause and the behavior end-to-end. --- crates/jrsonnet-lsp-inference/src/object.rs | 32 +++++++++++++++++++-- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/crates/jrsonnet-lsp-inference/src/object.rs b/crates/jrsonnet-lsp-inference/src/object.rs index 2e4efd25..6e4c0f01 100644 --- a/crates/jrsonnet-lsp-inference/src/object.rs +++ b/crates/jrsonnet-lsp-inference/src/object.rs @@ -3,7 +3,7 @@ use jrsonnet_lsp_types::{ FieldDefInterned, FieldVis, FunctionData, ObjectData, ParamInterned, ReturnSpec, Ty, TyData, }; -use jrsonnet_rowan_parser::nodes::{Bind, Expr, Member, ObjBody}; +use jrsonnet_rowan_parser::nodes::{Bind, Expr, FieldName, Member, ObjBody}; use rustc_hash::FxHashMap; use crate::{ @@ -39,6 +39,7 @@ pub fn infer_object_type_with_super_ty( // Pass 1: Collect all field names with preliminary types // This creates a "skeleton" of the object for self references let mut preliminary_fields: Vec<(String, FieldVis)> = Vec::new(); + let mut preliminary_has_unknown = false; for member in members.members() { match &member { @@ -47,6 +48,8 @@ pub fn infer_object_type_with_super_ty( if let Some(name_str) = extract_field_name(&field_name) { let visibility = convert_visibility_ty(field.visibility()); preliminary_fields.push((name_str, visibility)); + } else { + preliminary_has_unknown = true; } } } @@ -55,6 +58,8 @@ pub fn infer_object_type_with_super_ty( if let Some(name_str) = extract_field_name(&field_name) { let visibility = convert_visibility_ty(method.visibility()); preliminary_fields.push((name_str, visibility)); + } else { + preliminary_has_unknown = true; } } } @@ -85,6 +90,7 @@ pub fn infer_object_type_with_super_ty( preliminary_obj_fields.push((name.clone(), field_def.clone())); } } + preliminary_has_unknown |= super_obj.has_unknown; } } @@ -93,7 +99,7 @@ pub fn infer_object_type_with_super_ty( let preliminary_obj = ObjectData { fields: preliminary_obj_fields, - has_unknown: false, + has_unknown: preliminary_has_unknown, }; let preliminary_ty = env.store_mut().object(preliminary_obj); @@ -117,11 +123,18 @@ pub fn infer_object_type_with_super_ty( // Pass 2: Infer actual field types with self available let mut final_fields: Vec<(String, FieldDefInterned)> = Vec::new(); + let mut final_has_unknown = preliminary_has_unknown; for member in members.members() { match member { Member::MemberFieldNormal(field) => { if let Some(field_name) = field.field_name() { + if let FieldName::FieldNameDynamic(dynamic) = &field_name { + final_has_unknown = true; + if let Some(expr) = dynamic.expr() { + let _ = infer_expr(&expr, env); + } + } if let Some(name_str) = extract_field_name(&field_name) { let field_ty = field.expr().map_or(Ty::ANY, |e| infer_expr(&e, env)); @@ -139,6 +152,12 @@ pub fn infer_object_type_with_super_ty( } Member::MemberFieldMethod(method) => { if let Some(field_name) = method.field_name() { + if let FieldName::FieldNameDynamic(dynamic) = &field_name { + final_has_unknown = true; + if let Some(expr) = dynamic.expr() { + let _ = infer_expr(&expr, env); + } + } if let Some(name_str) = extract_field_name(&field_name) { let params = method .params_desc() @@ -220,7 +239,7 @@ pub fn infer_object_type_with_super_ty( env.store_mut().object(ObjectData { fields: final_fields, - has_unknown: false, + has_unknown: final_has_unknown, }) } ObjBody::ObjBodyComp(_) => { @@ -371,6 +390,13 @@ mod tests { assert_fields_ty(&obj, &["a", "b"]); } + #[test] + fn test_dynamic_field_object_is_open() { + let (ty, env) = infer_doc(r#"{ [("x" + "y")]: 1 }"#); + let obj = try_object(&env, ty).expect("expected object"); + assert!(obj.has_unknown, "Dynamic field object should be open"); + } + fn try_function(env: &TypeEnv, ty: Ty) -> Option { match env.store().get(ty) { TyData::Function(func) => Some(func), From f50d78ab1227a77ed76b02e10f9ae4ecdf23e1a3 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 18:44:56 +0000 Subject: [PATCH 183/210] refactor(inference): split expr core from module exports Split expression inference implementation out of `expr/mod.rs` so the module file only declares submodules and re-exports public/internal entry points. The implementation previously lived directly in `mod.rs`, which made it harder to navigate and expanded diff noise for unrelated changes. This moves the core inference logic into `expr/core.rs` and leaves `mod.rs` as import/export wiring only. No behavior changes are intended. The refactor keeps existing visibility boundaries by re-exporting crate-internal items used by sibling modules and preserving external APIs. Also updates one test import in `expr/advanced.rs` to match how the test module references `Document` after the split. --- .../src/expr/advanced.rs | 2 +- .../jrsonnet-lsp-inference/src/expr/core.rs | 378 +++++++++++++++++ crates/jrsonnet-lsp-inference/src/expr/mod.rs | 380 +----------------- 3 files changed, 383 insertions(+), 377 deletions(-) create mode 100644 crates/jrsonnet-lsp-inference/src/expr/core.rs diff --git a/crates/jrsonnet-lsp-inference/src/expr/advanced.rs b/crates/jrsonnet-lsp-inference/src/expr/advanced.rs index c7d3c20f..7a81f488 100644 --- a/crates/jrsonnet-lsp-inference/src/expr/advanced.rs +++ b/crates/jrsonnet-lsp-inference/src/expr/advanced.rs @@ -462,7 +462,7 @@ pub(super) fn infer_obj_extend_expr_base_ty( mod tests { use std::collections::BTreeSet; - use jrsonnet_lsp_document::DocVersion; + use jrsonnet_lsp_document::{DocVersion, Document}; use jrsonnet_lsp_types::{ FieldDefInterned, FieldVis, FunctionData, MutStore, ObjectData, ParamInterned, ReturnSpec, TyData, diff --git a/crates/jrsonnet-lsp-inference/src/expr/core.rs b/crates/jrsonnet-lsp-inference/src/expr/core.rs new file mode 100644 index 00000000..86de4da7 --- /dev/null +++ b/crates/jrsonnet-lsp-inference/src/expr/core.rs @@ -0,0 +1,378 @@ +//! Expression type inference. + +use jrsonnet_lsp_document::Document; +use jrsonnet_lsp_types::{FunctionData, ParamInterned, ReturnSpec, Ty, TyData}; +use jrsonnet_rowan_parser::{nodes::Bind, AstNode}; +use rowan::TextRange; +use rustc_hash::FxHashMap; + +use super::base::infer_base_ty; +use crate::{ + env::TypeEnv, + flow::{self, Facts}, + helpers::extract_params_with_default_types_ty, +}; + +/// Apply type facts to the environment, narrowing variable types. +/// +/// This is used to apply facts extracted from assert statements and +/// type guard conditions to narrow types for subsequent code. +fn apply_facts_to_env(facts: &Facts, env: &mut TypeEnv) { + for (var_name, fact) in facts.iter() { + // Get the current type of the variable + let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); + // Apply the fact to narrow the type + let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); + // Update the environment with the narrowed type + env.define_ty(var_name.clone(), narrowed_ty); + } +} + +pub(crate) trait TypeRecorder { + fn record(&mut self, range: TextRange, ty: Ty); +} + +struct NoopRecorder; + +impl TypeRecorder for NoopRecorder { + fn record(&mut self, _range: TextRange, _ty: Ty) {} +} + +impl TypeRecorder for FxHashMap { + fn record(&mut self, range: TextRange, ty: Ty) { + self.insert(range, ty); + } +} + +fn record_expr_and_base( + recorder: &mut R, + expr: &jrsonnet_rowan_parser::nodes::Expr, + ty: Ty, +) { + recorder.record(expr.syntax().text_range(), ty); + if let Some(base) = expr.expr_base() { + recorder.record(base.syntax().text_range(), ty); + } +} + +/// Infer the type of a document's root expression, returning an interned `Ty` and the environment. +/// +/// This is useful for tests that need to inspect the type structure using `TyData`. +#[must_use] +pub fn infer_document_type_ty(document: &Document) -> (Ty, TypeEnv) { + let ast = document.ast(); + let mut env = TypeEnv::new_default(); + + let ty = ast + .expr() + .map_or(Ty::ANY, |expr| infer_expr_ty(&expr, &mut env)); + + (ty, env) +} + +/// Infer the type of an expression, returning an interned `Ty`. +pub fn infer_expr_ty(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> Ty { + infer_expr_ty_with_expected(expr, env, None) +} + +pub(crate) fn infer_expr_ty_and_record( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, + recorder: &mut FxHashMap, +) -> Ty { + infer_expr_ty_impl(expr, env, expected, recorder) +} + +/// Infer the type of an expression with an optional expected type, returning `Ty`. +/// +/// This is the efficient internal version that works with interned types throughout. +pub fn infer_expr_ty_with_expected( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, +) -> Ty { + let mut recorder = NoopRecorder; + infer_expr_ty_impl(expr, env, expected, &mut recorder) +} + +pub(crate) fn infer_expr_ty_impl( + expr: &jrsonnet_rowan_parser::nodes::Expr, + env: &mut TypeEnv, + expected: Option, + recorder: &mut R, +) -> Ty { + // First, handle local bindings and assert statements that may precede the expression + for stmt in expr.stmts() { + match stmt { + jrsonnet_rowan_parser::nodes::Stmt::StmtLocal(stmt_local) => { + for bind in stmt_local.binds() { + infer_bind_type_ty(&bind, env, recorder); + } + } + jrsonnet_rowan_parser::nodes::Stmt::StmtAssert(stmt_assert) => { + // Extract type facts from assert conditions and apply them + if let Some(assertion) = stmt_assert.assertion() { + if let Some(cond) = assertion.condition() { + let facts = flow::extract_facts(&cond); + apply_facts_to_env(&facts, env); + } + } + } + } + } + + // Get the base expression type + let ty = expr.expr_base().map_or(Ty::ANY, |base| { + infer_base_ty(&base, env, expected, recorder) + }); + record_expr_and_base(recorder, expr, ty); + ty +} + +/// Infer types from a bind (local variable definition) using interned types. +fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: &mut R) { + match bind { + Bind::BindDestruct(bd) => { + let ty = bd.value().map_or(Ty::ANY, |value| { + infer_expr_ty_impl(&value, env, None, recorder) + }); + recorder.record(bd.syntax().text_range(), ty); + if let Some(assign_token) = bd.assign_token() { + recorder.record(assign_token.text_range(), ty); + } + if let Some(destruct) = bd.into() { + record_destruct_binding_types_ty(&destruct, ty, env, recorder); + bind_destruct_with_type_ty(&destruct, ty, env); + } + } + Bind::BindFunction(bf) => { + let Some(name_node) = bf.name() else { + return; + }; + let Some(ident) = name_node.ident_lit() else { + return; + }; + let name = ident.text().to_string(); + let params_desc = bf.params(); + let params = params_desc + .as_ref() + .map(|p| extract_params_with_default_types_ty(p, env)) + .unwrap_or_default(); + + // Install a provisional function first so recursive self-calls can resolve. + let provisional_func = FunctionData { + params: params.clone(), + return_spec: ReturnSpec::Fixed(Ty::ANY), + variadic: false, + }; + let provisional_ty = env.store_mut().intern(TyData::Function(provisional_func)); + env.define_ty(name.clone(), provisional_ty); + + let (return_ty, param_constraints) = if env.can_infer_function_body() { + bf.value().map_or_else( + || (Ty::ANY, FxHashMap::default()), + |body| { + env.push_scope(); + let param_names: Vec = + params.iter().map(|p| p.name.clone()).collect(); + for param in ¶ms { + env.define_ty(param.name.clone(), param.ty); + } + + env.start_constraint_tracking(¶m_names); + env.enter_function(); + let body_ty = infer_expr_ty_impl(&body, env, None, recorder); + env.exit_function(); + let constraints = env.stop_constraint_tracking_ty(); + env.pop_scope(); + (body_ty, constraints) + }, + ) + } else { + (Ty::ANY, FxHashMap::default()) + }; + + let final_params: Vec = params + .into_iter() + .map(|param| { + let mut narrowed_ty = param.ty; + if let Some(constraints) = param_constraints.get(¶m.name) { + for constraint_ty in constraints { + narrowed_ty = env.store_mut().narrow(narrowed_ty, *constraint_ty); + } + } + ParamInterned { + name: param.name, + ty: narrowed_ty, + has_default: param.has_default, + } + }) + .collect(); + + let final_func = FunctionData { + params: final_params.clone(), + return_spec: ReturnSpec::Fixed(return_ty), + variadic: false, + }; + let final_ty = env.store_mut().intern(TyData::Function(final_func)); + recorder.record(bf.syntax().text_range(), final_ty); + recorder.record(name_node.syntax().text_range(), final_ty); + if let Some(assign_token) = bf.assign_token() { + recorder.record(assign_token.text_range(), final_ty); + } + if let Some(params_desc) = params_desc { + recorder.record(params_desc.syntax().text_range(), final_ty); + for (param, param_ty) in params_desc.params().zip(final_params.iter()) { + recorder.record(param.syntax().text_range(), param_ty.ty); + if let Some(destruct) = param.destruct() { + recorder.record(destruct.syntax().text_range(), param_ty.ty); + } + if let Some(assign_token) = param.assign_token() { + recorder.record(assign_token.text_range(), param_ty.ty); + } + } + } + env.define_ty(name, final_ty); + } + } +} + +/// Bind a destructuring pattern with an interned type. +/// +/// This is used for comprehension variables where we know the element type +/// from the iterator expression. +pub(crate) fn bind_destruct_with_type_ty( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, + ty: Ty, + env: &mut TypeEnv, +) { + use jrsonnet_rowan_parser::nodes::Destruct; + + match destruct { + Destruct::DestructFull(full) => { + let Some(ident) = full.name().and_then(|n| n.ident_lit()) else { + return; + }; + env.define_ty(ident.text().to_string(), ty); + } + Destruct::DestructArray(arr) => { + let elem_types = extract_array_element_types_ty(ty, env); + let mut elem_index = 0usize; + for part in arr.destruct_array_parts() { + let jrsonnet_rowan_parser::nodes::DestructArrayPart::DestructArrayElement(elem) = + part + else { + continue; + }; + let Some(inner) = elem.destruct() else { + elem_index += 1; + continue; + }; + let elem_ty = array_destruct_element_ty(&elem_types, elem_index); + bind_destruct_with_type_ty(&inner, elem_ty, env); + elem_index += 1; + } + } + Destruct::DestructObject(obj) => { + for field in obj.destruct_object_fields() { + let Some(field_name) = field.field().and_then(|name| name.ident_lit()) else { + continue; + }; + let field_ty = lookup_destruct_field_type_ty(field_name.text(), ty, env); + let Some(inner) = field.destruct() else { + env.define_ty(field_name.text().to_string(), field_ty); + continue; + }; + bind_destruct_with_type_ty(&inner, field_ty, env); + } + } + Destruct::DestructSkip(_) => {} + } +} + +fn record_destruct_binding_types_ty( + destruct: &jrsonnet_rowan_parser::nodes::Destruct, + ty: Ty, + env: &TypeEnv, + recorder: &mut R, +) { + use jrsonnet_rowan_parser::nodes::Destruct; + + match destruct { + Destruct::DestructFull(full) => { + let Some(name) = full.name() else { + return; + }; + recorder.record(name.syntax().text_range(), ty); + } + Destruct::DestructArray(arr) => { + let elem_types = extract_array_element_types_ty(ty, env); + let mut elem_index = 0usize; + for part in arr.destruct_array_parts() { + let jrsonnet_rowan_parser::nodes::DestructArrayPart::DestructArrayElement(elem) = + part + else { + continue; + }; + let Some(inner) = elem.destruct() else { + elem_index += 1; + continue; + }; + let elem_ty = array_destruct_element_ty(&elem_types, elem_index); + record_destruct_binding_types_ty(&inner, elem_ty, env, recorder); + elem_index += 1; + } + } + Destruct::DestructObject(obj) => { + for field in obj.destruct_object_fields() { + let Some(field_name) = field.field().and_then(|name| name.ident_lit()) else { + continue; + }; + let field_ty = lookup_destruct_field_type_ty(field_name.text(), ty, env); + let Some(inner) = field.destruct() else { + recorder.record(field_name.text_range(), field_ty); + continue; + }; + record_destruct_binding_types_ty(&inner, field_ty, env, recorder); + } + } + Destruct::DestructSkip(_) => {} + } +} + +/// Extract element types from an array or tuple type (Ty version). +fn extract_array_element_types_ty(ty: Ty, env: &TypeEnv) -> Vec { + let store = env.store(); + match store.get(ty) { + TyData::Tuple { ref elems } => elems.clone(), + TyData::Array { elem, .. } => vec![elem], + _ => vec![], + } +} + +fn array_destruct_element_ty(elem_types: &[Ty], index: usize) -> Ty { + match elem_types { + [] => Ty::ANY, + [elem] => *elem, + _ => elem_types.get(index).copied().unwrap_or(Ty::ANY), + } +} + +/// Look up the type for a destructured field from an object type (Ty version). +fn lookup_destruct_field_type_ty(field_name: &str, ty: Ty, env: &TypeEnv) -> Ty { + let store = env.store(); + let TyData::Object(ref obj_data) = store.get(ty) else { + return Ty::ANY; + }; + + obj_data.get_field(field_name).map_or(Ty::ANY, |fd| fd.ty) +} + +/// Check if an expression is guaranteed to diverge (never return). +/// +/// An expression diverges if it has type `Never` - meaning it always +/// throws an error. This is used for unreachable code detection. +pub fn is_divergent(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> bool { + infer_expr_ty(expr, env).is_never() +} diff --git a/crates/jrsonnet-lsp-inference/src/expr/mod.rs b/crates/jrsonnet-lsp-inference/src/expr/mod.rs index 8d1f7e3f..3af363f8 100644 --- a/crates/jrsonnet-lsp-inference/src/expr/mod.rs +++ b/crates/jrsonnet-lsp-inference/src/expr/mod.rs @@ -2,380 +2,8 @@ mod advanced; mod base; +mod core; -use jrsonnet_lsp_document::Document; -use jrsonnet_lsp_types::{FunctionData, ParamInterned, ReturnSpec, Ty, TyData}; -use jrsonnet_rowan_parser::{nodes::Bind, AstNode}; -use rowan::TextRange; -use rustc_hash::FxHashMap; - -use self::base::infer_base_ty; -use crate::{ - env::TypeEnv, - flow::{self, Facts}, - helpers::extract_params_with_default_types_ty, -}; - -/// Apply type facts to the environment, narrowing variable types. -/// -/// This is used to apply facts extracted from assert statements and -/// type guard conditions to narrow types for subsequent code. -fn apply_facts_to_env(facts: &Facts, env: &mut TypeEnv) { - for (var_name, fact) in facts.iter() { - // Get the current type of the variable - let current_ty = env.lookup(var_name).unwrap_or(Ty::ANY); - // Apply the fact to narrow the type - let narrowed_ty = fact.apply_to(current_ty, env.store_mut()); - // Update the environment with the narrowed type - env.define_ty(var_name.clone(), narrowed_ty); - } -} - -pub(super) trait TypeRecorder { - fn record(&mut self, range: TextRange, ty: Ty); -} - -struct NoopRecorder; - -impl TypeRecorder for NoopRecorder { - fn record(&mut self, _range: TextRange, _ty: Ty) {} -} - -impl TypeRecorder for FxHashMap { - fn record(&mut self, range: TextRange, ty: Ty) { - self.insert(range, ty); - } -} - -fn record_expr_and_base( - recorder: &mut R, - expr: &jrsonnet_rowan_parser::nodes::Expr, - ty: Ty, -) { - recorder.record(expr.syntax().text_range(), ty); - if let Some(base) = expr.expr_base() { - recorder.record(base.syntax().text_range(), ty); - } -} - -/// Infer the type of a document's root expression, returning an interned `Ty` and the environment. -/// -/// This is useful for tests that need to inspect the type structure using `TyData`. -#[must_use] -pub fn infer_document_type_ty(document: &Document) -> (Ty, TypeEnv) { - let ast = document.ast(); - let mut env = TypeEnv::new_default(); - - let ty = ast - .expr() - .map_or(Ty::ANY, |expr| infer_expr_ty(&expr, &mut env)); - - (ty, env) -} - -/// Infer the type of an expression, returning an interned `Ty`. -pub fn infer_expr_ty(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> Ty { - infer_expr_ty_with_expected(expr, env, None) -} - -pub(crate) fn infer_expr_ty_and_record( - expr: &jrsonnet_rowan_parser::nodes::Expr, - env: &mut TypeEnv, - expected: Option, - recorder: &mut FxHashMap, -) -> Ty { - infer_expr_ty_impl(expr, env, expected, recorder) -} - -/// Infer the type of an expression with an optional expected type, returning `Ty`. -/// -/// This is the efficient internal version that works with interned types throughout. -pub fn infer_expr_ty_with_expected( - expr: &jrsonnet_rowan_parser::nodes::Expr, - env: &mut TypeEnv, - expected: Option, -) -> Ty { - let mut recorder = NoopRecorder; - infer_expr_ty_impl(expr, env, expected, &mut recorder) -} - -pub(super) fn infer_expr_ty_impl( - expr: &jrsonnet_rowan_parser::nodes::Expr, - env: &mut TypeEnv, - expected: Option, - recorder: &mut R, -) -> Ty { - // First, handle local bindings and assert statements that may precede the expression - for stmt in expr.stmts() { - match stmt { - jrsonnet_rowan_parser::nodes::Stmt::StmtLocal(stmt_local) => { - for bind in stmt_local.binds() { - infer_bind_type_ty(&bind, env, recorder); - } - } - jrsonnet_rowan_parser::nodes::Stmt::StmtAssert(stmt_assert) => { - // Extract type facts from assert conditions and apply them - if let Some(assertion) = stmt_assert.assertion() { - if let Some(cond) = assertion.condition() { - let facts = flow::extract_facts(&cond); - apply_facts_to_env(&facts, env); - } - } - } - } - } - - // Get the base expression type - let ty = expr.expr_base().map_or(Ty::ANY, |base| { - infer_base_ty(&base, env, expected, recorder) - }); - record_expr_and_base(recorder, expr, ty); - ty -} - -/// Infer types from a bind (local variable definition) using interned types. -fn infer_bind_type_ty(bind: &Bind, env: &mut TypeEnv, recorder: &mut R) { - match bind { - Bind::BindDestruct(bd) => { - let ty = bd.value().map_or(Ty::ANY, |value| { - infer_expr_ty_impl(&value, env, None, recorder) - }); - recorder.record(bd.syntax().text_range(), ty); - if let Some(assign_token) = bd.assign_token() { - recorder.record(assign_token.text_range(), ty); - } - if let Some(destruct) = bd.into() { - record_destruct_binding_types_ty(&destruct, ty, env, recorder); - bind_destruct_with_type_ty(&destruct, ty, env); - } - } - Bind::BindFunction(bf) => { - let Some(name_node) = bf.name() else { - return; - }; - let Some(ident) = name_node.ident_lit() else { - return; - }; - let name = ident.text().to_string(); - let params_desc = bf.params(); - let params = params_desc - .as_ref() - .map(|p| extract_params_with_default_types_ty(p, env)) - .unwrap_or_default(); - - // Install a provisional function first so recursive self-calls can resolve. - let provisional_func = FunctionData { - params: params.clone(), - return_spec: ReturnSpec::Fixed(Ty::ANY), - variadic: false, - }; - let provisional_ty = env.store_mut().intern(TyData::Function(provisional_func)); - env.define_ty(name.clone(), provisional_ty); - - let (return_ty, param_constraints) = if env.can_infer_function_body() { - bf.value().map_or_else( - || (Ty::ANY, FxHashMap::default()), - |body| { - env.push_scope(); - let param_names: Vec = - params.iter().map(|p| p.name.clone()).collect(); - for param in ¶ms { - env.define_ty(param.name.clone(), param.ty); - } - - env.start_constraint_tracking(¶m_names); - env.enter_function(); - let body_ty = infer_expr_ty_impl(&body, env, None, recorder); - env.exit_function(); - let constraints = env.stop_constraint_tracking_ty(); - env.pop_scope(); - (body_ty, constraints) - }, - ) - } else { - (Ty::ANY, FxHashMap::default()) - }; - - let final_params: Vec = params - .into_iter() - .map(|param| { - let mut narrowed_ty = param.ty; - if let Some(constraints) = param_constraints.get(¶m.name) { - for constraint_ty in constraints { - narrowed_ty = env.store_mut().narrow(narrowed_ty, *constraint_ty); - } - } - ParamInterned { - name: param.name, - ty: narrowed_ty, - has_default: param.has_default, - } - }) - .collect(); - - let final_func = FunctionData { - params: final_params.clone(), - return_spec: ReturnSpec::Fixed(return_ty), - variadic: false, - }; - let final_ty = env.store_mut().intern(TyData::Function(final_func)); - recorder.record(bf.syntax().text_range(), final_ty); - recorder.record(name_node.syntax().text_range(), final_ty); - if let Some(assign_token) = bf.assign_token() { - recorder.record(assign_token.text_range(), final_ty); - } - if let Some(params_desc) = params_desc { - recorder.record(params_desc.syntax().text_range(), final_ty); - for (param, param_ty) in params_desc.params().zip(final_params.iter()) { - recorder.record(param.syntax().text_range(), param_ty.ty); - if let Some(destruct) = param.destruct() { - recorder.record(destruct.syntax().text_range(), param_ty.ty); - } - if let Some(assign_token) = param.assign_token() { - recorder.record(assign_token.text_range(), param_ty.ty); - } - } - } - env.define_ty(name, final_ty); - } - } -} - -/// Bind a destructuring pattern with an interned type. -/// -/// This is used for comprehension variables where we know the element type -/// from the iterator expression. -pub(super) fn bind_destruct_with_type_ty( - destruct: &jrsonnet_rowan_parser::nodes::Destruct, - ty: Ty, - env: &mut TypeEnv, -) { - use jrsonnet_rowan_parser::nodes::Destruct; - - match destruct { - Destruct::DestructFull(full) => { - let Some(ident) = full.name().and_then(|n| n.ident_lit()) else { - return; - }; - env.define_ty(ident.text().to_string(), ty); - } - Destruct::DestructArray(arr) => { - let elem_types = extract_array_element_types_ty(ty, env); - let mut elem_index = 0usize; - for part in arr.destruct_array_parts() { - let jrsonnet_rowan_parser::nodes::DestructArrayPart::DestructArrayElement(elem) = - part - else { - continue; - }; - let Some(inner) = elem.destruct() else { - elem_index += 1; - continue; - }; - let elem_ty = array_destruct_element_ty(&elem_types, elem_index); - bind_destruct_with_type_ty(&inner, elem_ty, env); - elem_index += 1; - } - } - Destruct::DestructObject(obj) => { - for field in obj.destruct_object_fields() { - let Some(field_name) = field.field().and_then(|name| name.ident_lit()) else { - continue; - }; - let field_ty = lookup_destruct_field_type_ty(field_name.text(), ty, env); - let Some(inner) = field.destruct() else { - env.define_ty(field_name.text().to_string(), field_ty); - continue; - }; - bind_destruct_with_type_ty(&inner, field_ty, env); - } - } - Destruct::DestructSkip(_) => {} - } -} - -fn record_destruct_binding_types_ty( - destruct: &jrsonnet_rowan_parser::nodes::Destruct, - ty: Ty, - env: &TypeEnv, - recorder: &mut R, -) { - use jrsonnet_rowan_parser::nodes::Destruct; - - match destruct { - Destruct::DestructFull(full) => { - let Some(name) = full.name() else { - return; - }; - recorder.record(name.syntax().text_range(), ty); - } - Destruct::DestructArray(arr) => { - let elem_types = extract_array_element_types_ty(ty, env); - let mut elem_index = 0usize; - for part in arr.destruct_array_parts() { - let jrsonnet_rowan_parser::nodes::DestructArrayPart::DestructArrayElement(elem) = - part - else { - continue; - }; - let Some(inner) = elem.destruct() else { - elem_index += 1; - continue; - }; - let elem_ty = array_destruct_element_ty(&elem_types, elem_index); - record_destruct_binding_types_ty(&inner, elem_ty, env, recorder); - elem_index += 1; - } - } - Destruct::DestructObject(obj) => { - for field in obj.destruct_object_fields() { - let Some(field_name) = field.field().and_then(|name| name.ident_lit()) else { - continue; - }; - let field_ty = lookup_destruct_field_type_ty(field_name.text(), ty, env); - let Some(inner) = field.destruct() else { - recorder.record(field_name.text_range(), field_ty); - continue; - }; - record_destruct_binding_types_ty(&inner, field_ty, env, recorder); - } - } - Destruct::DestructSkip(_) => {} - } -} - -/// Extract element types from an array or tuple type (Ty version). -fn extract_array_element_types_ty(ty: Ty, env: &TypeEnv) -> Vec { - let store = env.store(); - match store.get(ty) { - TyData::Tuple { ref elems } => elems.clone(), - TyData::Array { elem, .. } => vec![elem], - _ => vec![], - } -} - -fn array_destruct_element_ty(elem_types: &[Ty], index: usize) -> Ty { - match elem_types { - [] => Ty::ANY, - [elem] => *elem, - _ => elem_types.get(index).copied().unwrap_or(Ty::ANY), - } -} - -/// Look up the type for a destructured field from an object type (Ty version). -fn lookup_destruct_field_type_ty(field_name: &str, ty: Ty, env: &TypeEnv) -> Ty { - let store = env.store(); - let TyData::Object(ref obj_data) = store.get(ty) else { - return Ty::ANY; - }; - - obj_data.get_field(field_name).map_or(Ty::ANY, |fd| fd.ty) -} - -/// Check if an expression is guaranteed to diverge (never return). -/// -/// An expression diverges if it has type `Never` - meaning it always -/// throws an error. This is used for unreachable code detection. -pub fn is_divergent(expr: &jrsonnet_rowan_parser::nodes::Expr, env: &mut TypeEnv) -> bool { - infer_expr_ty(expr, env).is_never() -} +pub(crate) use core::infer_expr_ty_and_record; +pub(crate) use core::{bind_destruct_with_type_ty, infer_expr_ty_impl, TypeRecorder}; +pub use core::{infer_document_type_ty, infer_expr_ty, infer_expr_ty_with_expected, is_divergent}; From 43019310ba1e0a69175a8285db01faa4880792ae Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 18:47:23 +0000 Subject: [PATCH 184/210] feat(lsp): add destructuring hints for comprehension bindings Extend inlay hint rendering so comprehension bindings can emit per-name type hints when the binding uses destructuring. Previously, the comprehension hint path only handled simple bindings (`for x in ...`). Destructured forms like `for [a, b] in ...` produced no binding hints even when the inferred element type was known. The comprehension pipeline now reuses the existing destructuring hint walker with the inferred iterator element type. This keeps behavior consistent with local/object-local destructuring hints and supports nested array/object patterns. This remains non-intrusive by default. Destructured comprehension hints appear only when `inlayHints.comprehensions = "all"` and `inlayHints.destructuring = "all"` are both enabled. Also adds unit coverage for both the enabled case and the config-gated disabled case, and documents the category interaction in `docs/lsp/README.md`. --- .../jrsonnet-lsp-handlers/src/inlay_hint.rs | 107 ++++++++++++++++-- docs/lsp/README.md | 7 ++ 2 files changed, 105 insertions(+), 9 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs index a51ac7c9..791297e0 100644 --- a/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs +++ b/crates/jrsonnet-lsp-handlers/src/inlay_hint.rs @@ -417,6 +417,7 @@ pub fn inlay_hints_with_config( push_comprehension_binding_hint( &mut hints, &for_spec, + config.destructuring.enabled(), analysis, visible_range, line_index, @@ -738,6 +739,7 @@ fn push_call_argument_hints( fn push_comprehension_binding_hint( hints: &mut Vec, for_spec: &ForSpec, + include_destructuring: bool, analysis: &TypeAnalysis, visible_range: Range, line_index: &LineIndex, @@ -760,15 +762,28 @@ fn push_comprehension_binding_hint( let Some(destruct) = for_spec.bind() else { return; }; - let Destruct::DestructFull(full) = destruct else { - return; - }; - let Some(name) = full.name() else { - return; - }; - let range = to_lsp_range(name.syntax().text_range(), line_index, text); - if position_in_range(range.end, visible_range) { - hints.push(type_hint(range.end, format!(": {type_str}"))); + match destruct { + Destruct::DestructFull(full) => { + let Some(name) = full.name() else { + return; + }; + let range = to_lsp_range(name.syntax().text_range(), line_index, text); + if position_in_range(range.end, visible_range) { + hints.push(type_hint(range.end, format!(": {type_str}"))); + } + } + _ if include_destructuring => { + push_destruct_hints( + hints, + &destruct, + Some(elem_ty), + analysis, + visible_range, + line_index, + text, + ); + } + _ => {} } } @@ -1436,6 +1451,80 @@ mod tests { ); } + #[test] + fn test_comprehension_destructuring_hints_require_destructuring_mode() { + let doc = Document::new( + "[a + b for [a, b] in [[1, 2]]]".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::All, + destructuring: DestructuringHintsMode::Off, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq(&hints, vec![]); + } + + #[test] + fn test_comprehension_destructuring_binding_hints_when_enabled() { + let doc = Document::new( + "[a + b for [a, b] in [[1, 2]]]".to_string(), + DocVersion::new(1), + ); + let analysis = test_analysis(&doc); + let config = InlayHintsConfig { + local: LocalHintsMode::Off, + object_local: LocalHintsMode::Off, + object_members: ObjectMemberHintsMode::Off, + function_parameters: FunctionParameterHintsMode::Off, + anonymous_function_returns: AnonymousFunctionReturnHintsMode::Off, + call_arguments: CallArgumentHintsMode::Off, + comprehensions: ComprehensionHintsMode::All, + destructuring: DestructuringHintsMode::All, + }; + + let hints = inlay_hints_with_config(&doc, &analysis, full_line_range(), &config); + assert_hints_eq( + &hints, + vec![ + InlayHint { + position: Position { + line: 0, + character: 13, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }, + InlayHint { + position: Position { + line: 0, + character: 16, + }, + label: InlayHintLabel::String(": number".to_string()), + kind: Some(InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(true), + padding_right: None, + data: None, + }, + ], + ); + } + #[test] fn test_destructuring_array_binding_hints_when_enabled() { let doc = Document::new( diff --git a/docs/lsp/README.md b/docs/lsp/README.md index 6eb39228..7b6bf342 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -167,6 +167,13 @@ Top-level options: | `comprehensions` | `off` | `off`, `all` | | `destructuring` | `off` | `off`, `all` | +Notes: + +- `comprehensions` controls hints for `for ... in ...` bindings. +- Destructured comprehension bindings (for example `for [a, b] in ...`) + emit per-name hints when both `comprehensions = "all"` and + `destructuring = "all"` are enabled. + ### `formatting` If a field is omitted, the formatter default is used. From 3a4184d5ba5b9af0ec04614aea9ea9ec623845e3 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Wed, 25 Feb 2026 18:50:13 +0000 Subject: [PATCH 185/210] test(lsp): expand inlay hint runner scenario coverage Add focused end-to-end scenarios for inlay hints so configuration and category interactions are validated through the scenario runner, not only unit-level handlers. New coverage includes: - local/object-local mode filters (`all`, `variables`, `functions`) - comprehension + destructuring gating for destructured `for` bindings - function-parameter hints combined with call-argument hints, including named-argument skip behavior Each fixture keeps one concern per scenario so failures are easy to triage and configuration regressions are easier to localize. --- ...nts_comprehension_destructuring_modes.yaml | 53 +++++++++++ .../inlay_hints_local_mode_filters.yaml | 87 +++++++++++++++++++ ...y_hints_parameters_and_call_arguments.yaml | 40 +++++++++ 3 files changed, 180 insertions(+) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_comprehension_destructuring_modes.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_local_mode_filters.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_parameters_and_call_arguments.yaml diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_comprehension_destructuring_modes.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_comprehension_destructuring_modes.yaml new file mode 100644 index 00000000..19d28d45 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_comprehension_destructuring_modes.yaml @@ -0,0 +1,53 @@ +# Verify destructured comprehension hints require both comprehension and +# destructuring categories. +steps: +- step: create + files: + main.jsonnet: | + [[hintRange:[a + b for [((compA:a|)), ((compB:b|))] in [[1, 2]]]]] + +- step: diagnosticsSettled + +- step: config + settings: + jsonnet: + inlayHints: + local: "off" + objectLocal: "off" + comprehensions: "all" + destructuring: "off" + +- step: requestInlayHints + as: comprehensionOnly + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: comprehensionOnly + result: + +- step: config + settings: + jsonnet: + inlayHints: + local: "off" + objectLocal: "off" + comprehensions: "all" + destructuring: "all" + +- step: requestInlayHints + as: comprehensionAndDestructuring + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: comprehensionAndDestructuring + result: + - positionOf: compA + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: compB + label: ": number" + kind: 1 + paddingLeft: true diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_local_mode_filters.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_local_mode_filters.yaml new file mode 100644 index 00000000..2f2f92ad --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_local_mode_filters.yaml @@ -0,0 +1,87 @@ +# Verify local/object-local mode filters variable and function hints independently. +steps: +- step: create + files: + main.jsonnet: | + [[hintRange:local ((topVar:x|)) = 1; + local ((topFn:f|))() = 1; + { + local ((objVar:y|)) = 2, + local ((objFn:g|))() = 2, + a: x + y + f() + g(), + }]] + +- step: diagnosticsSettled + +- step: requestInlayHints + as: defaultHints + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: defaultHints + result: + - positionOf: topVar + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: topFn + label: " -> number" + kind: 1 + paddingLeft: true + - positionOf: objVar + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: objFn + label: " -> number" + kind: 1 + paddingLeft: true + +- step: config + settings: + jsonnet: + inlayHints: + local: "variables" + objectLocal: "variables" + +- step: requestInlayHints + as: variableOnly + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: variableOnly + result: + - positionOf: topVar + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: objVar + label: ": number" + kind: 1 + paddingLeft: true + +- step: config + settings: + jsonnet: + inlayHints: + local: "functions" + objectLocal: "functions" + +- step: requestInlayHints + as: functionOnly + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: functionOnly + result: + - positionOf: topFn + label: " -> number" + kind: 1 + paddingLeft: true + - positionOf: objFn + label: " -> number" + kind: 1 + paddingLeft: true diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_parameters_and_call_arguments.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_parameters_and_call_arguments.yaml new file mode 100644 index 00000000..7356cfa4 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/inlay_hints_parameters_and_call_arguments.yaml @@ -0,0 +1,40 @@ +# Verify parameter and call-argument categories together, including +# named-argument skip behavior. +steps: +- step: create + files: + main.jsonnet: | + [[hintRange:local add(((paramX:x|))=1, ((paramY:y|))=1) = x + y; + add(((argX:|1)), y=2)]] + +- step: diagnosticsSettled + +- step: config + settings: + jsonnet: + inlayHints: + local: "off" + objectLocal: "off" + functionParameters: "all" + callArguments: "all" + +- step: requestInlayHints + as: parameterAndCallHints + file: main.jsonnet + range: hintRange + +- step: expectInlayHints + request: parameterAndCallHints + result: + - positionOf: paramX + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: paramY + label: ": number" + kind: 1 + paddingLeft: true + - positionOf: argX + label: "x:" + kind: 2 + paddingRight: true From 25bd3c81b5423173ab98536e09095140195f2fc9 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 11:36:36 +0000 Subject: [PATCH 186/210] feat(lsp): support formatting request option combinations Make formatting request options first-class across formatter, LSP, and scenario harness paths. What changed: - map request options for indentation (`insertSpaces`/`tabSize`) and trimming flags through the LSP formatting request path - move trailing-whitespace and final-newline trimming behavior into `jrsonnet-fmt` options so normalization lives in the formatter - keep `insertFinalNewline` handling at the LSP layer as a final request-specific post-step Also expands test coverage: - async formatting option unit tests in LSP - integration tests for optional formatting request flags - scenario runner support for formatting request option fields - new scenario fixtures covering option combinations, files with trailing input newlines, and tabs/spaces/tabSize behavior README formatting docs now describe these request option effects. --- cmds/jrsonnet-fmt/src/api.rs | 41 ++++++ cmds/jrsonnet-fmt/src/context.rs | 6 + cmds/jrsonnet-fmt/src/main.rs | 2 + .../src/scenario/request_steps.rs | 5 + .../src/scenario_runner/request_steps.rs | 6 +- .../src/scenario_script/compile.rs | 9 ++ crates/jrsonnet-lsp/src/config.rs | 1 + .../src/server/async_requests/formatting.rs | 125 +++++++++++++++++- crates/jrsonnet-lsp/tests/integration_test.rs | 18 ++- .../tests/integration_test/formatting.rs | 70 ++++++++++ ...ormatting_request_option_combinations.yaml | 70 ++++++++++ ...ormatting_request_tabs_spaces_tabsize.yaml | 72 ++++++++++ ...equest_trailing_newline_input_options.yaml | 69 ++++++++++ docs/lsp/README.md | 44 +++--- 14 files changed, 510 insertions(+), 28 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_option_combinations.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_tabs_spaces_tabsize.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_trailing_newline_input_options.yaml diff --git a/cmds/jrsonnet-fmt/src/api.rs b/cmds/jrsonnet-fmt/src/api.rs index a9128018..3962ce96 100644 --- a/cmds/jrsonnet-fmt/src/api.rs +++ b/cmds/jrsonnet-fmt/src/api.rs @@ -4,6 +4,19 @@ use crate::{FormatContext, FormatOptions, Printable}; const CONVERGENCE_LIMIT: usize = 10; +fn trim_trailing_whitespace(text: &str) -> String { + let mut trimmed = String::with_capacity(text.len()); + for segment in text.split_inclusive('\n') { + if let Some(line) = segment.strip_suffix('\n') { + trimmed.push_str(line.trim_end_matches([' ', '\t'])); + trimmed.push('\n'); + } else { + trimmed.push_str(segment.trim_end_matches([' ', '\t'])); + } + } + trimmed +} + /// Format Jsonnet source code in-process. /// /// Applies repeated formatting passes until output stabilizes or the @@ -28,7 +41,13 @@ pub fn format_code(input: &str, opts: &FormatOptions) -> Option { } } + if opts.trim_trailing_whitespace { + formatted = trim_trailing_whitespace(&formatted); + } formatted.push('\n'); + if opts.trim_final_newlines { + formatted.truncate(formatted.trim_end_matches('\n').len()); + } Some(formatted) } @@ -54,3 +73,25 @@ fn format_once(input: &str, opts: &FormatOptions) -> Option { }, )) } + +#[cfg(test)] +mod tests { + use super::trim_trailing_whitespace; + + #[test] + fn test_trim_trailing_whitespace_removes_spaces_and_tabs_at_line_end() { + assert_eq!(trim_trailing_whitespace("a \n\tb\t \n c\t"), "a\n\tb\n c"); + } + + #[test] + fn test_format_code_can_trim_final_newlines() { + let options = crate::FormatOptions { + trim_final_newlines: true, + ..crate::FormatOptions::default() + }; + assert_eq!( + super::format_code("{a:1}", &options), + Some("{\n\ta: 1,\n}".to_string()) + ); + } +} diff --git a/cmds/jrsonnet-fmt/src/context.rs b/cmds/jrsonnet-fmt/src/context.rs index 0ef9ee3f..466a2db4 100644 --- a/cmds/jrsonnet-fmt/src/context.rs +++ b/cmds/jrsonnet-fmt/src/context.rs @@ -79,6 +79,10 @@ pub struct FormatOptions { pub pad_objects: bool, /// Use pretty field names (unquoted when possible). pub pretty_field_names: bool, + /// Remove trailing spaces/tabs at the end of each line. + pub trim_trailing_whitespace: bool, + /// Remove all trailing newline characters from the formatted output. + pub trim_final_newlines: bool, } impl Default for FormatOptions { @@ -91,6 +95,8 @@ impl Default for FormatOptions { pad_arrays: false, pad_objects: true, pretty_field_names: true, + trim_trailing_whitespace: false, + trim_final_newlines: false, } } } diff --git a/cmds/jrsonnet-fmt/src/main.rs b/cmds/jrsonnet-fmt/src/main.rs index 1bd6f9d0..7ea13496 100644 --- a/cmds/jrsonnet-fmt/src/main.rs +++ b/cmds/jrsonnet-fmt/src/main.rs @@ -194,6 +194,8 @@ fn main_result() -> Result<(), Error> { pad_arrays: opts.pad_arrays, pad_objects: !opts.no_pad_objects, pretty_field_names: !opts.no_pretty_field_names, + trim_trailing_whitespace: false, + trim_final_newlines: false, }; let mut iteration = 0; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs index 8f2d2dbd..ba2b0cbf 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs @@ -757,6 +757,8 @@ pub struct ExpectCompletionStep { /// file: main.jsonnet /// tab_size: 4 /// insert_spaces: false +/// trim_final_newlines: true +/// insert_final_newline: true /// - step: expectFormatting /// request: formatting /// result: null @@ -770,6 +772,9 @@ pub struct RequestFormattingStep { pub uri: String, pub tab_size: u32, pub insert_spaces: bool, + pub trim_trailing_whitespace: Option, + pub insert_final_newline: Option, + pub trim_final_newlines: Option, } /// Expected `textDocument/formatting` response. diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs index d1005250..0fc97506 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs @@ -198,9 +198,9 @@ impl ScenarioRunner { tab_size: step.tab_size, insert_spaces: step.insert_spaces, properties: HashMap::new(), - trim_trailing_whitespace: None, - insert_final_newline: None, - trim_final_newlines: None, + trim_trailing_whitespace: step.trim_trailing_whitespace, + insert_final_newline: step.insert_final_newline, + trim_final_newlines: step.trim_final_newlines, }, work_done_progress_params: WorkDoneProgressParams::default(), }; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs index d3deb55f..1b4d002d 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs @@ -520,6 +520,9 @@ impl ScenarioScript { uri: file_uri(base_dir, &step.file), tab_size: step.tab_size, insert_spaces: step.insert_spaces, + trim_trailing_whitespace: step.trim_trailing_whitespace, + insert_final_newline: step.insert_final_newline, + trim_final_newlines: step.trim_final_newlines, })] } ScenarioScriptStep::ExpectFormatting(step) => { @@ -1226,6 +1229,12 @@ struct RequestFormattingScriptStep { tab_size: u32, #[serde(default = "default_formatting_insert_spaces")] insert_spaces: bool, + #[serde(default)] + trim_trailing_whitespace: Option, + #[serde(default)] + insert_final_newline: Option, + #[serde(default)] + trim_final_newlines: Option, } #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] diff --git a/crates/jrsonnet-lsp/src/config.rs b/crates/jrsonnet-lsp/src/config.rs index 4ca0d00a..257969f3 100644 --- a/crates/jrsonnet-lsp/src/config.rs +++ b/crates/jrsonnet-lsp/src/config.rs @@ -743,6 +743,7 @@ mod tests { pad_arrays: true, pad_objects: false, pretty_field_names: false, + ..FormattingConfig::default() }, ..ServerConfig::default() } diff --git a/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs b/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs index a795c42e..2fc3b961 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs @@ -14,9 +14,37 @@ fn formatting_config_for_request( } else { 0 }; + if let Some(trim_trailing_whitespace) = options.trim_trailing_whitespace { + config.trim_trailing_whitespace = trim_trailing_whitespace; + } + if let Some(trim_final_newlines) = options.trim_final_newlines { + config.trim_final_newlines = trim_final_newlines; + } config } +fn apply_text_options(mut formatted: String, options: &FormattingOptions) -> String { + match options.insert_final_newline { + Some(true) => { + if !formatted.ends_with('\n') { + formatted.push('\n'); + } + } + Some(false) => { + formatted.truncate(formatted.trim_end_matches('\n').len()); + } + None => {} + } + + formatted +} + +fn apply_text_options_to_edits(edits: &mut [TextEdit], options: &FormattingOptions) { + for edit in edits { + edit.new_text = apply_text_options(std::mem::take(&mut edit.new_text), options); + } +} + impl AsyncRequestContext { pub(crate) fn formatting(&self, params: &DocumentFormattingParams) -> Option> { let uri = ¶ms.text_document.uri; @@ -24,8 +52,10 @@ impl AsyncRequestContext { let doc = self.documents.get(&path)?; let config = formatting_config_for_request(&self.config.read().formatting, ¶ms.options); + let mut edits = handlers::format_document_with_config(doc.text(), &config)?; + apply_text_options_to_edits(&mut edits, ¶ms.options); - handlers::format_document_with_config(doc.text(), &config) + Some(edits) } } @@ -34,7 +64,7 @@ mod tests { use lsp_types::FormattingOptions; use rstest::rstest; - use super::formatting_config_for_request; + use super::{apply_text_options, formatting_config_for_request}; #[rstest] #[case(true, 2, 2)] @@ -62,4 +92,95 @@ mod tests { assert_eq!(merged.indent, expected_indent); assert_eq!(merged.max_blank_lines, base.max_blank_lines); } + + #[rstest] + #[case(None, false, false)] + #[case(Some(true), false, true)] + #[case(Some(false), true, false)] + fn test_formatting_options_control_trim_trailing_whitespace( + #[case] request_trim: Option, + #[case] base_trim: bool, + #[case] expected_trim: bool, + ) { + let base = jrsonnet_lsp_handlers::FormattingConfig { + trim_trailing_whitespace: base_trim, + ..jrsonnet_lsp_handlers::FormattingConfig::default() + }; + let options = FormattingOptions { + tab_size: 2, + insert_spaces: true, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: request_trim, + insert_final_newline: None, + trim_final_newlines: None, + }; + + let merged = formatting_config_for_request(&base, &options); + assert_eq!(merged.trim_trailing_whitespace, expected_trim); + } + + #[rstest] + #[case(None, false, false)] + #[case(Some(true), false, true)] + #[case(Some(false), true, false)] + fn test_formatting_options_control_trim_final_newlines( + #[case] request_trim: Option, + #[case] base_trim: bool, + #[case] expected_trim: bool, + ) { + let base = jrsonnet_lsp_handlers::FormattingConfig { + trim_final_newlines: base_trim, + ..jrsonnet_lsp_handlers::FormattingConfig::default() + }; + let options = FormattingOptions { + tab_size: 2, + insert_spaces: true, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: None, + trim_final_newlines: request_trim, + }; + + let merged = formatting_config_for_request(&base, &options); + assert_eq!(merged.trim_final_newlines, expected_trim); + } + + #[test] + fn test_insert_final_newline_true_appends_newline() { + let options = FormattingOptions { + tab_size: 2, + insert_spaces: true, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: Some(true), + trim_final_newlines: None, + }; + assert_eq!(apply_text_options("{}".to_string(), &options), "{}\n"); + } + + #[test] + fn test_insert_final_newline_false_removes_trailing_newlines() { + let options = FormattingOptions { + tab_size: 2, + insert_spaces: true, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: Some(false), + trim_final_newlines: None, + }; + assert_eq!(apply_text_options("{}\n\n".to_string(), &options), "{}"); + } + + #[test] + fn test_trim_final_newlines_with_insert_final_newline_true_keeps_one() { + let options = FormattingOptions { + tab_size: 2, + insert_spaces: true, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace: None, + insert_final_newline: Some(true), + trim_final_newlines: Some(true), + }; + assert_eq!(apply_text_options("{}".to_string(), &options), "{}\n"); + } } diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index bc1145b9..4fe66900 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -485,6 +485,18 @@ fn inlay_hint_request( } fn formatting_request(id: i32, uri: &str, tab_size: u32, insert_spaces: bool) -> Request { + formatting_request_with_options(id, uri, tab_size, insert_spaces, None, None, None) +} + +fn formatting_request_with_options( + id: i32, + uri: &str, + tab_size: u32, + insert_spaces: bool, + trim_trailing_whitespace: Option, + insert_final_newline: Option, + trim_final_newlines: Option, +) -> Request { let params = lsp_types::DocumentFormattingParams { text_document: TextDocumentIdentifier { uri: uri.parse().unwrap(), @@ -493,9 +505,9 @@ fn formatting_request(id: i32, uri: &str, tab_size: u32, insert_spaces: bool) -> tab_size, insert_spaces, properties: std::collections::HashMap::new(), - trim_trailing_whitespace: None, - insert_final_newline: None, - trim_final_newlines: None, + trim_trailing_whitespace, + insert_final_newline, + trim_final_newlines, }, work_done_progress_params: WorkDoneProgressParams::default(), }; diff --git a/crates/jrsonnet-lsp/tests/integration_test/formatting.rs b/crates/jrsonnet-lsp/tests/integration_test/formatting.rs index 9a3926af..ba64b06f 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/formatting.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/formatting.rs @@ -147,6 +147,42 @@ impl FormatSession { Ok(serde_json::from_value(result)?) } + fn request_formatting_with_options( + &mut self, + uri: &str, + tab_size: u32, + insert_spaces: bool, + trim_trailing_whitespace: Option, + insert_final_newline: Option, + trim_final_newlines: Option, + ) -> Result>> { + let request_id = self.next_id; + self.next_id += 1; + + self.send(Message::Request(formatting_request_with_options( + request_id, + uri, + tab_size, + insert_spaces, + trim_trailing_whitespace, + insert_final_newline, + trim_final_newlines, + )))?; + + let response = recv_response(&self.client_conn, request_id); + if let Some(error) = response.error { + return Err(FormatTestError::FormattingRequestFailed { + request_id, + error: format!("{error:?}"), + }); + } + + let result = response + .result + .ok_or(FormatTestError::MissingFormattingResult { request_id })?; + Ok(serde_json::from_value(result)?) + } + fn shutdown(mut self) -> Result<()> { let request_id = self.next_id; self.next_id += 1; @@ -307,3 +343,37 @@ fn test_document_formatting_applies_runtime_formatting_config_changes() -> Resul session.shutdown() } + +#[test] +fn test_document_formatting_respects_lsp_optional_formatting_options() -> Result<()> { + let mut session = FormatSession::start(serde_json::Value::Null)?; + let uri = "file:///test/format-request-options.jsonnet"; + session.open(uri, SIMPLE_OBJECT)?; + + let edits = session.request_formatting_with_options( + uri, + 2, + true, + Some(true), + Some(false), + Some(true), + )?; + assert_eq!( + edits, + Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 5, + }, + }, + new_text: "{\n a: 1,\n}".to_string(), + }]) + ); + + session.shutdown() +} diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_option_combinations.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_option_combinations.yaml new file mode 100644 index 00000000..bd4de4ec --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_option_combinations.yaml @@ -0,0 +1,70 @@ +# Verify request-level formatting option combinations for LSP formatting. +steps: +- step: create + files: + main.jsonnet: "{a:1}" + +- step: diagnosticsSettled + +- step: requestFormatting + as: trimFinalOnly + file: main.jsonnet + trim_final_newlines: true + +- step: expectFormatting + request: trimFinalOnly + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 5 + newText: |- + { + a: 1, + } + +- step: requestFormatting + as: trimThenInsertFinal + file: main.jsonnet + trim_trailing_whitespace: true + trim_final_newlines: true + insert_final_newline: true + +- step: expectFormatting + request: trimThenInsertFinal + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 5 + newText: | + { + a: 1, + } + +- step: requestFormatting + as: trimAndSuppressFinal + file: main.jsonnet + trim_trailing_whitespace: true + insert_final_newline: false + +- step: expectFormatting + request: trimAndSuppressFinal + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 5 + newText: |- + { + a: 1, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_tabs_spaces_tabsize.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_tabs_spaces_tabsize.yaml new file mode 100644 index 00000000..45ef1061 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_tabs_spaces_tabsize.yaml @@ -0,0 +1,72 @@ +# Verify formatting indentation is controlled by request options: +# insert_spaces + tab_size. +steps: +- step: create + files: + main.jsonnet: "{a:{b:1}}" + +- step: diagnosticsSettled + +- step: requestFormatting + as: tabs + file: main.jsonnet + insert_spaces: false + tab_size: 8 + +- step: expectFormatting + request: tabs + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 9 + newText: "{\n\ta: {\n\t\tb: 1,\n\t},\n}\n" + +- step: requestFormatting + as: spaces2 + file: main.jsonnet + insert_spaces: true + tab_size: 2 + +- step: expectFormatting + request: spaces2 + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 9 + newText: | + { + a: { + b: 1, + }, + } + +- step: requestFormatting + as: spaces4 + file: main.jsonnet + insert_spaces: true + tab_size: 4 + +- step: expectFormatting + request: spaces4 + result: + - range: + start: + line: 0 + character: 0 + end: + line: 0 + character: 9 + newText: | + { + a: { + b: 1, + }, + } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_trailing_newline_input_options.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_trailing_newline_input_options.yaml new file mode 100644 index 00000000..42c822df --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_request_trailing_newline_input_options.yaml @@ -0,0 +1,69 @@ +# Verify formatting request newline options when the source file has +# trailing newlines. +steps: +- step: create + files: + main.jsonnet: "{a:1}\n\n" + +- step: diagnosticsSettled + +- step: requestFormatting + as: trimFinal + file: main.jsonnet + trim_final_newlines: true + +- step: expectFormatting + request: trimFinal + result: + - range: + start: + line: 0 + character: 0 + end: + line: 2 + character: 0 + newText: |- + { + a: 1, + } + +- step: requestFormatting + as: keepFinal + file: main.jsonnet + trim_final_newlines: false + +- step: expectFormatting + request: keepFinal + result: + - range: + start: + line: 0 + character: 0 + end: + line: 2 + character: 0 + newText: | + { + a: 1, + } + +- step: requestFormatting + as: trimThenInsertOne + file: main.jsonnet + trim_final_newlines: true + insert_final_newline: true + +- step: expectFormatting + request: trimThenInsertOne + result: + - range: + start: + line: 0 + character: 0 + end: + line: 2 + character: 0 + newText: | + { + a: 1, + } diff --git a/docs/lsp/README.md b/docs/lsp/README.md index 7b6bf342..f0f988dc 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -134,18 +134,18 @@ Configuration is accepted from: Top-level options: -| Key | Type | Default | Accepted values and notes | -| -------------------------- | ---------------- | ------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `jpath` | `string[]` | `[]` | Import search paths (same idea as `jsonnet -J`). | -| `ext_vars` | `object` | `{}` | Map of external string vars. Aliases: `extVars`, `ext_vars`. | -| `ext_code` | `object` | `{}` | Map of external code vars. Aliases: `extCode`, `ext_code`. | -| `enable_eval_diagnostics` | `boolean` | `false` | Aliases: `enableEvalDiagnostics`, `eval`. | -| `enable_lint_diagnostics` | `boolean` | `false` | Aliases: `enableLintDiagnostics`, `lint`. | -| `resolve_paths_with_tanka` | `string` | `"auto"` | Modes: "false", "auto", "true". Booleans are also accepted (`false` -> "false", `true` -> "true"). Aliases: `resolvePathsWithTanka`, `tankaMode`. | -| `formatting` | `object` | `{}` | Formatting options, see below. | -| `code_actions` | `object` | `{ "removeUnused": "all", "removeUnusedComments": "none" }` | Alias: `codeActions`. | -| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off", "anonymousFunctionReturns": "off", "callArguments": "off", "comprehensions": "off", "destructuring": "off" }` | Alias: `inlayHints`. | -| `log_level` | `string \| null` | `null` | Alias: `logLevel`. Intended values are standard log levels (for example `error`, `warn`, `info`, `debug`). Currently this value is parsed/stored but runtime logging is still controlled by process startup flags/env. | +| Key | Type | Default | Accepted values and notes | +| --- | --- | --- | --- | +| `jpath` | `string[]` | `[]` | Import search paths (same idea as `jsonnet -J`). | +| `ext_vars` | `object` | `{}` | Map of external string vars. Aliases: `extVars`, `ext_vars`. | +| `ext_code` | `object` | `{}` | Map of external code vars. Aliases: `extCode`, `ext_code`. | +| `enable_eval_diagnostics` | `boolean` | `false` | Aliases: `enableEvalDiagnostics`, `eval`. | +| `enable_lint_diagnostics` | `boolean` | `false` | Aliases: `enableLintDiagnostics`, `lint`. | +| `resolve_paths_with_tanka` | `string` | `"auto"` | Modes: "false", "auto", "true". Booleans are also accepted (`false` -> "false", `true` -> "true"). Aliases: `resolvePathsWithTanka`, `tankaMode`. | +| `formatting` | `object` | `{}` | Formatting options, see below. | +| `code_actions` | `object` | `{ "removeUnused": "all", "removeUnusedComments": "none" }` | Alias: `codeActions`. | +| `inlay_hints` | `object` | `{ "local": "all", "objectLocal": "all", "objectMembers": "off", "functionParameters": "off", "anonymousFunctionReturns": "off", "callArguments": "off", "comprehensions": "off", "destructuring": "off" }` | Alias: `inlayHints`. | +| `log_level` | `string \| null` | `null` | Alias: `logLevel`. Intended values are standard log levels (for example `error`, `warn`, `info`, `debug`). Currently this value is parsed/stored but runtime logging is still controlled by process startup flags/env. | ### `code_actions` @@ -156,11 +156,11 @@ Top-level options: ### `inlay_hints` -| Key | Default | Accepted values | -| --------------- | ------- | -------------------------------------- | -| `local` | `all` | `off`, `variables`, `functions`, `all` | -| `objectLocal` | `all` | `off`, `variables`, `functions`, `all` | -| `objectMembers` | `off` | `off`, `fields`, `methods`, `all` | +| Key | Default | Accepted values | +| --- | --- | --- | +| `local` | `all` | `off`, `variables`, `functions`, `all` | +| `objectLocal` | `all` | `off`, `variables`, `functions`, `all` | +| `objectMembers` | `off` | `off`, `fields`, `methods`, `all` | | `functionParameters` | `off` | `off`, `all` | | `anonymousFunctionReturns` | `off` | `off`, `all` | | `callArguments` | `off` | `off`, `all` | @@ -177,9 +177,13 @@ Notes: ### `formatting` If a field is omitted, the formatter default is used. -For `textDocument/formatting`, the request's `FormattingOptions` control indentation: -`insert_spaces=false` forces tabs (`indent=0`), and `insert_spaces=true` uses -`tab_size` (clamped to `u8` range). +For `textDocument/formatting`, the request's `FormattingOptions` are applied per request: +- indentation: `insert_spaces=false` forces tabs (`indent=0`), and + `insert_spaces=true` uses `tab_size` (clamped to `u8` range) +- trailing whitespace: `trim_trailing_whitespace=true` removes line-end spaces/tabs +- final newline handling: + `trim_final_newlines=true` removes trailing newline characters, then + `insert_final_newline=true|false` enforces presence/absence of one final newline Formatting fields: From f2fc8041528679ad8d468d2c2c28a2dd053bbcc2 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 12:00:19 +0000 Subject: [PATCH 187/210] docs(lsp): remove request formatting-option behavior notes Drop README language that promised how the server applies `FormattingOptions` on formatting requests. This was over-specific for user-facing docs and implied a behavioral contract users should not need spelled out. The section now keeps the configuration field reference and removes per-request option mapping details. --- docs/lsp/README.md | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/docs/lsp/README.md b/docs/lsp/README.md index f0f988dc..d5dab068 100644 --- a/docs/lsp/README.md +++ b/docs/lsp/README.md @@ -30,7 +30,8 @@ Advertised LSP features: `,`), `rename` (with `prepareRename`), `references`. - In-editor metadata: `inlayHint`, `semanticTokens/full`, `semanticTokens/range`, `codeLens` (+ `codeLens/resolve`). -- Actions: `codeAction` (`quickfix`, `source.fixAll`), `formatting`. +- Actions: `codeAction` (`quickfix`, `source.fixAll`), `formatting`, + `rangeFormatting`. When the client supports dynamic watched-file registration, the server also registers watchers for: @@ -177,13 +178,9 @@ Notes: ### `formatting` If a field is omitted, the formatter default is used. -For `textDocument/formatting`, the request's `FormattingOptions` are applied per request: -- indentation: `insert_spaces=false` forces tabs (`indent=0`), and - `insert_spaces=true` uses `tab_size` (clamped to `u8` range) -- trailing whitespace: `trim_trailing_whitespace=true` removes line-end spaces/tabs -- final newline handling: - `trim_final_newlines=true` removes trailing newline characters, then - `insert_final_newline=true|false` enforces presence/absence of one final newline + +Range formatting is conservative: it returns edits only when formatter changes +are fully contained in the requested range. Formatting fields: From e6683d90ef225804b75d7c266ef1d63b52b315f1 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 12:01:49 +0000 Subject: [PATCH 188/210] feat(lsp): add textDocument/rangeFormatting support Add end-to-end range formatting support across formatter, handlers, server wiring, and scenario/integration test harnesses. Core behavior: - add `jrsonnet-fmt::format_code_range` and `ByteRangeEdit` so range edit computation lives in the formatter crate - compute a minimal changed byte span from full formatted output and return edits only when changes are fully contained in the requested range - keep full-document formatting behavior unchanged LSP wiring: - advertise `documentRangeFormattingProvider` - route `textDocument/rangeFormatting` through request dispatch and async handler plumbing - apply the same request option merge path used by document formatting (`insertSpaces`/`tabSize`, trimming options) Scenario and test coverage: - extend scenario DSL with `requestRangeFormatting` and `expectRangeFormatting` - add focused runner fixtures for: - edits contained in requested range - no-op when formatter changes escape requested range - request option behavior for tabs/spaces and tab size - add integration tests for range formatting responses and lifecycle capability assertions - update missing-step coverage scenario to include range formatting Docs: - update architecture/handlers docs to include range formatting request routing and capability coverage --- cmds/jrsonnet-fmt/src/api.rs | 138 +++++++++++++++++- cmds/jrsonnet-fmt/src/lib.rs | 2 +- .../src/formatting/dispatch.rs | 133 +++++++++++++++-- .../src/formatting/mod.rs | 5 +- crates/jrsonnet-lsp-handlers/src/lib.rs | 4 +- .../jrsonnet-lsp-scenario/src/scenario/mod.rs | 19 ++- .../src/scenario/request_steps.rs | 44 ++++++ .../src/scenario_runner/expectation_steps.rs | 13 +- .../src/scenario_runner/request_steps.rs | 40 +++-- .../src/scenario_runner/runner.rs | 6 + .../src/scenario_script/compile.rs | 65 ++++++++- .../src/scenario_script/registry.rs | 2 + .../src/server/async_requests/formatting.rs | 16 +- .../jrsonnet-lsp/src/server/initialization.rs | 1 + .../src/server/request_dispatch.rs | 14 +- .../src/server/requests/async_handlers/mod.rs | 1 + .../async_handlers/range_formatting.rs | 10 ++ crates/jrsonnet-lsp/tests/integration_test.rs | 48 +++++- .../tests/integration_test/formatting.rs | 94 ++++++++++++ .../tests/integration_test/lifecycle.rs | 15 +- ...ng_range_outside_requested_range_noop.yaml | 21 +++ ...ormatting_range_request_options_apply.yaml | 27 ++++ ...rmatting_range_within_requested_range.yaml | 29 ++++ .../runner/missing_step_coverage.yaml | 11 +- docs/lsp/ARCHITECTURE.md | 3 +- docs/lsp/HANDLERS.md | 3 + 26 files changed, 708 insertions(+), 56 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/server/requests/async_handlers/range_formatting.rs create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_outside_requested_range_noop.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_request_options_apply.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_within_requested_range.yaml diff --git a/cmds/jrsonnet-fmt/src/api.rs b/cmds/jrsonnet-fmt/src/api.rs index 3962ce96..59ef9722 100644 --- a/cmds/jrsonnet-fmt/src/api.rs +++ b/cmds/jrsonnet-fmt/src/api.rs @@ -4,6 +4,13 @@ use crate::{FormatContext, FormatOptions, Printable}; const CONVERGENCE_LIMIT: usize = 10; +/// A byte-range text edit produced by formatter helpers. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ByteRangeEdit { + pub range: std::ops::Range, + pub new_text: String, +} + fn trim_trailing_whitespace(text: &str) -> String { let mut trimmed = String::with_capacity(text.len()); for segment in text.split_inclusive('\n') { @@ -51,6 +58,87 @@ pub fn format_code(input: &str, opts: &FormatOptions) -> Option { Some(formatted) } +fn common_prefix_len(left: &str, right: &str) -> usize { + let mut prefix = 0; + let mut left_chars = left.char_indices(); + let mut right_chars = right.char_indices(); + + loop { + match (left_chars.next(), right_chars.next()) { + (Some((idx, left_char)), Some((_, right_char))) if left_char == right_char => { + prefix = idx + left_char.len_utf8(); + } + _ => return prefix, + } + } +} + +fn common_suffix_len(left: &str, right: &str) -> usize { + let mut suffix = 0; + let mut left_chars = left.char_indices().rev(); + let mut right_chars = right.char_indices().rev(); + + loop { + match (left_chars.next(), right_chars.next()) { + (Some((left_idx, left_char)), Some((right_idx, right_char))) + if left_char == right_char => + { + let left_suffix = left.len().saturating_sub(left_idx); + let right_suffix = right.len().saturating_sub(right_idx); + suffix = left_suffix.min(right_suffix); + } + _ => return suffix, + } + } +} + +fn minimal_change_ranges( + before: &str, + after: &str, +) -> Option<(std::ops::Range, std::ops::Range)> { + if before == after { + return None; + } + + let prefix = common_prefix_len(before, after); + let suffix = common_suffix_len(&before[prefix..], &after[prefix..]); + let before_end = before.len().saturating_sub(suffix); + let after_end = after.len().saturating_sub(suffix); + Some((prefix..before_end, prefix..after_end)) +} + +/// Format Jsonnet source and return a constrained range edit. +/// +/// This runs full-document formatting and computes the minimal changed byte +/// range. An edit is returned only when the formatter's changes are fully +/// inside `requested_range`. +/// +/// Returns `None` when parsing fails. +#[must_use] +pub fn format_code_range( + input: &str, + requested_range: std::ops::Range, + opts: &FormatOptions, +) -> Option> { + if requested_range.start > requested_range.end || requested_range.end > input.len() { + return Some(Vec::new()); + } + + let formatted = format_code(input, opts)?; + let Some((old_range, new_range)) = minimal_change_ranges(input, &formatted) else { + return Some(Vec::new()); + }; + + if old_range.start < requested_range.start || old_range.end > requested_range.end { + return Some(Vec::new()); + } + + Some(vec![ByteRangeEdit { + range: old_range, + new_text: formatted[new_range].to_string(), + }]) +} + fn format_once(input: &str, opts: &FormatOptions) -> Option { let (parsed, errors) = jrsonnet_rowan_parser::parse(input); if !errors.is_empty() { @@ -76,7 +164,9 @@ fn format_once(input: &str, opts: &FormatOptions) -> Option { #[cfg(test)] mod tests { - use super::trim_trailing_whitespace; + use super::{ + format_code_range, minimal_change_ranges, trim_trailing_whitespace, ByteRangeEdit, + }; #[test] fn test_trim_trailing_whitespace_removes_spaces_and_tabs_at_line_end() { @@ -94,4 +184,50 @@ mod tests { Some("{\n\ta: 1,\n}".to_string()) ); } + + #[test] + fn test_format_code_range_applies_edit_when_change_is_inside_range() { + let input = "{\n a: 1,\n b:2,\n}\n"; + let edits = format_code_range( + input, + 11..17, + &crate::FormatOptions { + indent: 2, + ..crate::FormatOptions::default() + }, + ) + .expect("format"); + assert_eq!( + edits, + vec![ByteRangeEdit { + range: 14..14, + new_text: " ".to_string(), + }] + ); + } + + #[test] + fn test_format_code_range_returns_no_edits_when_change_is_outside_range() { + let input = "{\n a:1,\n b:2,\n}\n"; + let edits = format_code_range( + input, + 0..9, + &crate::FormatOptions { + indent: 2, + ..crate::FormatOptions::default() + }, + ) + .expect("format"); + assert!(edits.is_empty()); + } + + #[test] + fn test_minimal_change_ranges_respects_utf8_boundaries() { + let before = "{x: 'é'}\n"; + let after = "{x: 'ê'}\n"; + let (old_range, new_range) = + minimal_change_ranges(before, after).expect("should detect change"); + assert_eq!(old_range, 5..7); + assert_eq!(&after[new_range], "ê"); + } } diff --git a/cmds/jrsonnet-fmt/src/lib.rs b/cmds/jrsonnet-fmt/src/lib.rs index 8a9ad2e7..eb005903 100644 --- a/cmds/jrsonnet-fmt/src/lib.rs +++ b/cmds/jrsonnet-fmt/src/lib.rs @@ -7,6 +7,6 @@ mod context; mod macros; mod printable; -pub use api::format_code; +pub use api::{format_code, format_code_range, ByteRangeEdit}; pub use context::{CommentStyle, FormatContext, FormatOptions, StringStyle}; pub use printable::Printable; diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs b/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs index 726cb35e..1d9cb257 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting/dispatch.rs @@ -1,4 +1,5 @@ -use jrsonnet_lsp_document::{ByteOffset, LineIndex}; +use jrsonnet_fmt::format_code_range; +use jrsonnet_lsp_document::{ByteOffset, LineIndex, LspRange}; use lsp_types::{Position, Range, TextEdit}; use tracing::debug; @@ -19,6 +20,16 @@ fn full_document_range(text: &str) -> Range { } } +fn try_format(text: &str, config: &FormattingConfig) -> Option { + match run_formatter(text, config) { + Ok(formatted) => Some(formatted), + Err(err) => { + debug!("Formatting unavailable: {err}"); + None + } + } +} + /// Format a Jsonnet document with default configuration. /// /// Returns a list of text edits to apply to the document. @@ -34,14 +45,7 @@ pub fn format_document(text: &str) -> Option> { /// On error, returns None. #[must_use] pub fn format_document_with_config(text: &str, config: &FormattingConfig) -> Option> { - // Try to run the formatter - let formatted = match run_formatter(text, config) { - Ok(formatted) => formatted, - Err(err) => { - debug!("Formatting unavailable: {err}"); - return None; - } - }; + let formatted = try_format(text, config)?; if formatted == text { // No changes needed @@ -54,3 +58,114 @@ pub fn format_document_with_config(text: &str, config: &FormattingConfig) -> Opt new_text: formatted, }]) } + +/// Format a Jsonnet document range with default configuration. +/// +/// Returns a list of text edits constrained to the requested range. +/// On error, returns None. +#[must_use] +pub fn format_document_range(text: &str, range: Range) -> Option> { + format_document_range_with_config(text, range, &FormattingConfig::default()) +} + +/// Format a Jsonnet document range with the given configuration. +/// +/// Returns range edits only when formatter changes are fully inside `range`. +/// On error, returns None. +#[must_use] +pub fn format_document_range_with_config( + text: &str, + range: Range, + config: &FormattingConfig, +) -> Option> { + let line_index = LineIndex::new(text); + let requested = line_index.text_range(LspRange::from(range), text)?; + let requested_range = usize::from(ByteOffset::from(requested.start())) + ..usize::from(ByteOffset::from(requested.end())); + + let edits = if let Some(edits) = format_code_range(text, requested_range, config) { + edits + } else { + debug!("Formatting unavailable: formatter failed for range request"); + return None; + }; + + edits + .into_iter() + .map(|edit| { + let edit_range = rowan::TextRange::new( + ByteOffset::from(edit.range.start).into(), + ByteOffset::from(edit.range.end).into(), + ); + Some(TextEdit { + range: line_index.range(edit_range, text)?.into(), + new_text: edit.new_text, + }) + }) + .collect() +} + +#[cfg(test)] +mod tests { + use lsp_types::{Position, Range}; + + use super::{format_document_range_with_config, FormattingConfig}; + + #[test] + fn test_range_formatting_applies_edit_within_requested_range() { + let text = "{\n a: 1,\n b:2,\n}\n"; + let config = FormattingConfig { + indent: 2, + ..FormattingConfig::default() + }; + let range = Range { + start: Position { + line: 2, + character: 0, + }, + end: Position { + line: 2, + character: 6, + }, + }; + let edits = format_document_range_with_config(text, range, &config) + .expect("range formatting should succeed"); + assert_eq!(edits.len(), 1); + assert_eq!( + edits[0].range, + Range { + start: Position { + line: 2, + character: 4, + }, + end: Position { + line: 2, + character: 4, + }, + } + ); + assert_eq!(edits[0].new_text, " "); + } + + #[test] + fn test_range_formatting_returns_no_edits_when_changes_escape_requested_range() { + let text = "{\n a:1,\n b:2,\n}\n"; + let config = FormattingConfig { + indent: 2, + ..FormattingConfig::default() + }; + let range = Range { + start: Position { + line: 1, + character: 0, + }, + end: Position { + line: 1, + character: 6, + }, + }; + let edits = format_document_range_with_config(text, range, &config) + .expect("range formatting should succeed"); + assert!(edits.is_empty()); + } +} diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs b/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs index b9a7314c..c858b1c5 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting/mod.rs @@ -5,7 +5,10 @@ mod dispatch; mod engine; -pub use dispatch::{format_document, format_document_with_config}; +pub use dispatch::{ + format_document, format_document_range, format_document_range_with_config, + format_document_with_config, +}; pub use jrsonnet_fmt::{ CommentStyle as FormattingCommentStyle, FormatOptions as FormattingConfig, StringStyle as FormattingStringStyle, diff --git a/crates/jrsonnet-lsp-handlers/src/lib.rs b/crates/jrsonnet-lsp-handlers/src/lib.rs index f76bcff0..9aa39749 100644 --- a/crates/jrsonnet-lsp-handlers/src/lib.rs +++ b/crates/jrsonnet-lsp-handlers/src/lib.rs @@ -26,8 +26,8 @@ pub use definition::{ }; pub use document_highlight::document_highlights; pub use formatting::{ - format_document, format_document_with_config, FormattingCommentStyle, FormattingConfig, - FormattingStringStyle, + format_document, format_document_range, format_document_range_with_config, + format_document_with_config, FormattingCommentStyle, FormattingConfig, FormattingStringStyle, }; pub use hover::{hover, hover_with_import_field_type}; pub use inlay_hint::{ diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs index 595f9865..7aa5a328 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/mod.rs @@ -48,15 +48,16 @@ pub use request_steps::{ ExpectCodeActionStep, ExpectCodeLensStep, ExpectCompletionStep, ExpectCustomStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDocumentSymbolStep, ExpectExecuteCodeLensStep, ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, - ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectReferencesStep, ExpectRenameStep, - ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, - ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, HoverSectionExpectation, - RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestCustomStep, - RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, + ExpectInlayHintsStep, ExpectPrepareRenameStep, ExpectRangeFormattingStep, ExpectReferencesStep, + ExpectRenameStep, ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, + ExpectSignatureHelpStep, ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, + HoverSectionExpectation, RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, + RequestCustomStep, RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, RequestExecuteCodeLensStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, - RequestInlayHintsStep, RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, - RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, - RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, + RequestInlayHintsStep, RequestPrepareRenameStep, RequestRangeFormattingStep, + RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, + RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, + RequestWorkspaceSymbolStep, }; use serde::Deserialize; pub use workspace_steps::{ @@ -111,6 +112,8 @@ pub enum ScenarioStep { ExpectCompletion(ExpectCompletionStep), RequestFormatting(RequestFormattingStep), ExpectFormatting(ExpectFormattingStep), + RequestRangeFormatting(RequestRangeFormattingStep), + ExpectRangeFormatting(ExpectRangeFormattingStep), RequestSemanticTokensFull(RequestSemanticTokensFullStep), ExpectSemanticTokensFull(ExpectSemanticTokensFullStep), RequestSemanticTokensRange(RequestSemanticTokensRangeStep), diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs index ba2b0cbf..62ef0575 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/request_steps.rs @@ -805,6 +805,50 @@ pub struct ExpectFormattingStep { pub result: Option>, } +/// `textDocument/rangeFormatting` request. +/// +/// Requests formatting edits for a selected range in a document. +/// +/// Example: +/// ```rust +/// # use jrsonnet_lsp_scenario::scenario::doctest_assertions::assert_yaml_scenario_runs_without_error; +/// let yaml = r#" +/// steps: +/// - step: open +/// file: main.jsonnet +/// text: "{ [[f:x]]:1 }" +/// - step: requestRangeFormatting +/// as: formattingRange +/// file: main.jsonnet +/// range: f +/// - step: expectRangeFormatting +/// request: formattingRange +/// result: null +/// "#; +/// +/// assert_yaml_scenario_runs_without_error(yaml); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RequestRangeFormattingStep { + pub(crate) id: i32, + pub uri: String, + pub range: Range, + pub tab_size: u32, + pub insert_spaces: bool, + pub trim_trailing_whitespace: Option, + pub insert_final_newline: Option, + pub trim_final_newlines: Option, +} + +/// Expected `textDocument/rangeFormatting` response. +/// +/// Asserts the response for a preceding `requestRangeFormatting`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExpectRangeFormattingStep { + pub(crate) id: i32, + pub result: Option>, +} + /// `textDocument/semanticTokens/full` request. /// /// Requests semantic tokens for the whole document. diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs index 0f3e576c..b9cc5dd1 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/expectation_steps.rs @@ -14,9 +14,9 @@ use crate::scenario::{ ExpectCustomStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, ExpectDocumentSymbolStep, ExpectExecuteCodeLensStep, ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, - ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, - ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, ExpectTypeDefinitionStep, - ExpectWorkspaceSymbolStep, + ExpectRangeFormattingStep, ExpectReferencesStep, ExpectRenameStep, + ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, + ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, }; #[derive(Debug, Error)] @@ -204,6 +204,13 @@ impl ScenarioRunner { self.expect_typed_response("formatting", step.id, &step.result) } + pub(super) fn step_expect_range_formatting( + &mut self, + step: &ExpectRangeFormattingStep, + ) -> RunnerResult<()> { + self.expect_typed_response("rangeFormatting", step.id, &step.result) + } + pub(super) fn step_expect_semantic_tokens_full( &mut self, step: &ExpectSemanticTokensFullStep, diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs index 0fc97506..833251ce 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/request_steps.rs @@ -4,16 +4,16 @@ use lsp_types::{ request::{ CodeActionRequest, CodeLensRequest, Completion, DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, GotoTypeDefinition, HoverRequest, - InlayHintRequest, PrepareRenameRequest, References, Rename, Request as _, + InlayHintRequest, PrepareRenameRequest, RangeFormatting, References, Rename, Request as _, SemanticTokensFullRequest, SemanticTokensRangeRequest, SignatureHelpRequest, WorkspaceSymbolRequest, }, CodeActionContext, CodeActionParams, CodeLensParams, DocumentFormattingParams, - DocumentSymbolParams, ExecuteCommandParams, FormattingOptions, GotoDefinitionParams, - HoverParams, InlayHintParams, PartialResultParams, ReferenceContext, ReferenceParams, - RenameParams, SemanticTokensParams, SemanticTokensRangeParams, SignatureHelpParams, - TextDocumentIdentifier, TextDocumentPositionParams, WorkDoneProgressParams, - WorkspaceSymbolParams, + DocumentRangeFormattingParams, DocumentSymbolParams, ExecuteCommandParams, FormattingOptions, + GotoDefinitionParams, HoverParams, InlayHintParams, PartialResultParams, ReferenceContext, + ReferenceParams, RenameParams, SemanticTokensParams, SemanticTokensRangeParams, + SignatureHelpParams, TextDocumentIdentifier, TextDocumentPositionParams, + WorkDoneProgressParams, WorkspaceSymbolParams, }; use thiserror::Error; @@ -26,9 +26,10 @@ use crate::scenario::{ RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestCustomStep, RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, RequestExecuteCodeLensStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, - RequestInlayHintsStep, RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, - RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, - RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, + RequestInlayHintsStep, RequestPrepareRenameStep, RequestRangeFormattingStep, + RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, + RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, + RequestWorkspaceSymbolStep, }; #[derive(Debug, Error)] @@ -207,6 +208,27 @@ impl ScenarioRunner { self.send_request_with_params(step.id, Formatting::METHOD, params, "formatting") } + pub(super) fn step_request_range_formatting( + &self, + step: &RequestRangeFormattingStep, + ) -> RunnerResult<()> { + let uri = parse_uri(&step.uri, "rangeFormatting")?; + let params = DocumentRangeFormattingParams { + text_document: TextDocumentIdentifier { uri }, + range: step.range, + options: FormattingOptions { + tab_size: step.tab_size, + insert_spaces: step.insert_spaces, + properties: HashMap::new(), + trim_trailing_whitespace: step.trim_trailing_whitespace, + insert_final_newline: step.insert_final_newline, + trim_final_newlines: step.trim_final_newlines, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + self.send_request_with_params(step.id, RangeFormatting::METHOD, params, "rangeFormatting") + } + pub(super) fn step_request_semantic_tokens_full( &self, step: &RequestSemanticTokensFullStep, diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs index 5c1b2e9e..ee9d1fc4 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs @@ -158,6 +158,12 @@ impl ScenarioRunner { ScenarioStep::ExpectCompletion(expectation) => self.step_expect_completion(expectation), ScenarioStep::RequestFormatting(request) => self.step_request_formatting(request), ScenarioStep::ExpectFormatting(expectation) => self.step_expect_formatting(expectation), + ScenarioStep::RequestRangeFormatting(request) => { + self.step_request_range_formatting(request) + } + ScenarioStep::ExpectRangeFormatting(expectation) => { + self.step_expect_range_formatting(expectation) + } ScenarioStep::RequestSemanticTokensFull(request) => { self.step_request_semantic_tokens_full(request) } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs index 1b4d002d..76dd9f49 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/compile.rs @@ -35,13 +35,14 @@ use crate::scenario::{ ExpectCustomStep, ExpectDeclarationStep, ExpectDefinitionStep, ExpectDiagnosticsStep, ExpectDocumentSymbolStep, ExpectExecuteCodeLensStep, ExpectExecuteCommandStep, ExpectFormattingStep, ExpectHoverStep, ExpectInlayHintsStep, ExpectPrepareRenameStep, - ExpectReferencesStep, ExpectRenameStep, ExpectSemanticTokensFullStep, - ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, ExpectTypeDefinitionStep, - ExpectWorkspaceSymbolStep, HoverSectionExpectation, NotifyWatchedFilesStep, OpenStep, - RequestCodeActionStep, RequestCodeLensStep, RequestCompletionStep, RequestCustomStep, - RequestDeclarationStep, RequestDefinitionStep, RequestDocumentSymbolStep, - RequestExecuteCodeLensStep, RequestExecuteCommandStep, RequestFormattingStep, RequestHoverStep, - RequestInlayHintsStep, RequestPrepareRenameStep, RequestReferencesStep, RequestRenameStep, + ExpectRangeFormattingStep, ExpectReferencesStep, ExpectRenameStep, + ExpectSemanticTokensFullStep, ExpectSemanticTokensRangeStep, ExpectSignatureHelpStep, + ExpectTypeDefinitionStep, ExpectWorkspaceSymbolStep, HoverSectionExpectation, + NotifyWatchedFilesStep, OpenStep, RequestCodeActionStep, RequestCodeLensStep, + RequestCompletionStep, RequestCustomStep, RequestDeclarationStep, RequestDefinitionStep, + RequestDocumentSymbolStep, RequestExecuteCodeLensStep, RequestExecuteCommandStep, + RequestFormattingStep, RequestHoverStep, RequestInlayHintsStep, RequestPrepareRenameStep, + RequestRangeFormattingStep, RequestReferencesStep, RequestRenameStep, RequestSemanticTokensFullStep, RequestSemanticTokensRangeStep, RequestSignatureHelpStep, RequestTypeDefinitionStep, RequestWorkspaceSymbolStep, SaveStep, Scenario, ScenarioFileChangeType, ScenarioStep, WatchedFileChangeStep, WriteFileStep, @@ -531,6 +532,34 @@ impl ScenarioScript { result: step.result, })] } + ScenarioScriptStep::RequestRangeFormatting(step) => { + vec![ScenarioStep::RequestRangeFormatting( + RequestRangeFormattingStep { + id: registry + .allocate(RequestKind::RangeFormatting, step.request_name)?, + uri: file_uri(base_dir, &step.file), + range: marker_store.resolve_range( + &step.file, + step.range, + "requestRangeFormatting", + )?, + tab_size: step.tab_size, + insert_spaces: step.insert_spaces, + trim_trailing_whitespace: step.trim_trailing_whitespace, + insert_final_newline: step.insert_final_newline, + trim_final_newlines: step.trim_final_newlines, + }, + )] + } + ScenarioScriptStep::ExpectRangeFormatting(step) => { + vec![ScenarioStep::ExpectRangeFormatting( + ExpectRangeFormattingStep { + id: registry + .claim(RequestKind::RangeFormatting, step.request.as_str())?, + result: step.result, + }, + )] + } ScenarioScriptStep::RequestSemanticTokensFull(step) => { let request_id = registry.allocate(RequestKind::SemanticTokensFull, step.request_name)?; @@ -927,6 +956,8 @@ enum ScenarioScriptStep { ExpectCompletion(ExpectCompletionScriptStep), RequestFormatting(RequestFormattingScriptStep), ExpectFormatting(ExpectFormattingScriptStep), + RequestRangeFormatting(RequestRangeFormattingScriptStep), + ExpectRangeFormatting(ExpectFormattingScriptStep), RequestSemanticTokensFull(RequestSemanticTokensFullScriptStep), ExpectSemanticTokensFull(ExpectSemanticTokensFullScriptStep), RequestSemanticTokensRange(RequestSemanticTokensRangeScriptStep), @@ -1237,6 +1268,26 @@ struct RequestFormattingScriptStep { trim_final_newlines: Option, } +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(deny_unknown_fields)] +struct RequestRangeFormattingScriptStep { + #[serde(default, rename = "as")] + request_name: Option, + file: String, + #[serde(flatten)] + range: RangeInput, + #[serde(default = "default_formatting_tab_size")] + tab_size: u32, + #[serde(default = "default_formatting_insert_spaces")] + insert_spaces: bool, + #[serde(default)] + trim_trailing_whitespace: Option, + #[serde(default)] + insert_final_newline: Option, + #[serde(default)] + trim_final_newlines: Option, +} + #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] #[serde(deny_unknown_fields)] struct RequestSemanticTokensFullScriptStep { diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs index 0b86f631..ff28fd1b 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/registry.rs @@ -25,6 +25,7 @@ pub enum RequestKind { SignatureHelp, Completion, Formatting, + RangeFormatting, SemanticTokensFull, SemanticTokensRange, InlayHints, @@ -77,6 +78,7 @@ impl RequestKind { Self::SignatureHelp => "requestSignatureHelp", Self::Completion => "requestCompletion", Self::Formatting => "requestFormatting", + Self::RangeFormatting => "requestRangeFormatting", Self::SemanticTokensFull => "requestSemanticTokensFull", Self::SemanticTokensRange => "requestSemanticTokensRange", Self::InlayHints => "requestInlayHints", diff --git a/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs b/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs index 2fc3b961..a32baa68 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/formatting.rs @@ -1,6 +1,8 @@ use jrsonnet_lsp_document::CanonicalPath; use jrsonnet_lsp_handlers as handlers; -use lsp_types::{DocumentFormattingParams, FormattingOptions, TextEdit}; +use lsp_types::{ + DocumentFormattingParams, DocumentRangeFormattingParams, FormattingOptions, TextEdit, +}; use super::AsyncRequestContext; @@ -57,6 +59,18 @@ impl AsyncRequestContext { Some(edits) } + + pub(crate) fn formatting_range( + &self, + params: &DocumentRangeFormattingParams, + ) -> Option> { + let uri = ¶ms.text_document.uri; + let path = CanonicalPath::from_uri(uri).ok()?; + let doc = self.documents.get(&path)?; + + let config = formatting_config_for_request(&self.config.read().formatting, ¶ms.options); + handlers::format_document_range_with_config(doc.text(), params.range, &config) + } } #[cfg(test)] diff --git a/crates/jrsonnet-lsp/src/server/initialization.rs b/crates/jrsonnet-lsp/src/server/initialization.rs index 3c1cf290..162b9f64 100644 --- a/crates/jrsonnet-lsp/src/server/initialization.rs +++ b/crates/jrsonnet-lsp/src/server/initialization.rs @@ -77,6 +77,7 @@ impl Server { work_done_progress_options: WorkDoneProgressOptions::default(), }), document_formatting_provider: Some(OneOf::Left(true)), + document_range_formatting_provider: Some(OneOf::Left(true)), references_provider: Some(OneOf::Left(true)), workspace_symbol_provider: Some(OneOf::Left(true)), rename_provider: Some(OneOf::Right(lsp_types::RenameOptions { diff --git a/crates/jrsonnet-lsp/src/server/request_dispatch.rs b/crates/jrsonnet-lsp/src/server/request_dispatch.rs index 7b6e4317..76b732b3 100644 --- a/crates/jrsonnet-lsp/src/server/request_dispatch.rs +++ b/crates/jrsonnet-lsp/src/server/request_dispatch.rs @@ -4,8 +4,9 @@ use lsp_types::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, Completion, DocumentHighlightRequest, DocumentSymbolRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, GotoImplementation, GotoTypeDefinition, HoverRequest, InlayHintRequest, - PrepareRenameRequest, References, Rename, Request as _, SemanticTokensFullRequest, - SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, WorkspaceSymbolRequest, + PrepareRenameRequest, RangeFormatting, References, Rename, Request as _, + SemanticTokensFullRequest, SemanticTokensRangeRequest, Shutdown, SignatureHelpRequest, + WorkspaceSymbolRequest, }, ExecuteCommandParams, }; @@ -38,6 +39,7 @@ impl Server { | Rename::METHOD | CodeLensRequest::METHOD | Formatting::METHOD + | RangeFormatting::METHOD | DocumentSymbolRequest::METHOD | DocumentHighlightRequest::METHOD | CodeActionRequest::METHOD @@ -255,6 +257,14 @@ impl Server { requests::async_handlers::formatting::handle, ) } + RangeFormatting::METHOD => { + let request = self.inflight_requests.begin::(id); + self.handle_async_typed( + request, + params, + requests::async_handlers::range_formatting::handle, + ) + } DocumentSymbolRequest::METHOD => { let request = self.inflight_requests.begin::(id); self.handle_async_typed( diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs index c8cbb8b0..fa80892f 100644 --- a/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/mod.rs @@ -11,6 +11,7 @@ pub(crate) mod goto_type_definition; pub(crate) mod hover; pub(crate) mod inlay_hints; pub(crate) mod prepare_rename; +pub(crate) mod range_formatting; pub(crate) mod references; pub(crate) mod rename; pub(crate) mod semantic_tokens_full; diff --git a/crates/jrsonnet-lsp/src/server/requests/async_handlers/range_formatting.rs b/crates/jrsonnet-lsp/src/server/requests/async_handlers/range_formatting.rs new file mode 100644 index 00000000..6167db71 --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/requests/async_handlers/range_formatting.rs @@ -0,0 +1,10 @@ +use lsp_types::{DocumentRangeFormattingParams, TextEdit}; + +use crate::server::async_requests::AsyncRequestContext; + +pub(crate) fn handle( + context: &AsyncRequestContext, + params: &DocumentRangeFormattingParams, +) -> Option> { + context.formatting_range(params) +} diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 4fe66900..4092d685 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -16,8 +16,9 @@ use lsp_types::{ request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, GotoImplementation, - GotoTypeDefinition, Initialize, InlayHintRequest, References, RegisterCapability, Rename, - Request as _, SemanticTokensRangeRequest, Shutdown, WorkspaceSymbolRequest, + GotoTypeDefinition, Initialize, InlayHintRequest, RangeFormatting, References, + RegisterCapability, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, + WorkspaceSymbolRequest, }, CancelParams, DidChangeConfigurationParams, DidChangeWatchedFilesClientCapabilities, DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, @@ -488,6 +489,23 @@ fn formatting_request(id: i32, uri: &str, tab_size: u32, insert_spaces: bool) -> formatting_request_with_options(id, uri, tab_size, insert_spaces, None, None, None) } +fn formatting_options( + tab_size: u32, + insert_spaces: bool, + trim_trailing_whitespace: Option, + insert_final_newline: Option, + trim_final_newlines: Option, +) -> lsp_types::FormattingOptions { + lsp_types::FormattingOptions { + tab_size, + insert_spaces, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace, + insert_final_newline, + trim_final_newlines, + } +} + fn formatting_request_with_options( id: i32, uri: &str, @@ -501,14 +519,13 @@ fn formatting_request_with_options( text_document: TextDocumentIdentifier { uri: uri.parse().unwrap(), }, - options: lsp_types::FormattingOptions { + options: formatting_options( tab_size, insert_spaces, - properties: std::collections::HashMap::new(), trim_trailing_whitespace, insert_final_newline, trim_final_newlines, - }, + ), work_done_progress_params: WorkDoneProgressParams::default(), }; Request::new( @@ -518,6 +535,27 @@ fn formatting_request_with_options( ) } +fn range_formatting_request_with_options( + id: i32, + uri: &str, + range: lsp_types::Range, + options: lsp_types::FormattingOptions, +) -> Request { + let params = lsp_types::DocumentRangeFormattingParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().unwrap(), + }, + range, + options, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + RangeFormatting::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + fn semantic_tokens_range_request( id: i32, uri: &str, diff --git a/crates/jrsonnet-lsp/tests/integration_test/formatting.rs b/crates/jrsonnet-lsp/tests/integration_test/formatting.rs index ba64b06f..8b014413 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/formatting.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/formatting.rs @@ -183,6 +183,33 @@ impl FormatSession { Ok(serde_json::from_value(result)?) } + fn request_range_formatting_with_options( + &mut self, + uri: &str, + range: lsp_types::Range, + options: lsp_types::FormattingOptions, + ) -> Result>> { + let request_id = self.next_id; + self.next_id += 1; + + self.send(Message::Request(range_formatting_request_with_options( + request_id, uri, range, options, + )))?; + + let response = recv_response(&self.client_conn, request_id); + if let Some(error) = response.error { + return Err(FormatTestError::FormattingRequestFailed { + request_id, + error: format!("{error:?}"), + }); + } + + let result = response + .result + .ok_or(FormatTestError::MissingFormattingResult { request_id })?; + Ok(serde_json::from_value(result)?) + } + fn shutdown(mut self) -> Result<()> { let request_id = self.next_id; self.next_id += 1; @@ -377,3 +404,70 @@ fn test_document_formatting_respects_lsp_optional_formatting_options() -> Result session.shutdown() } + +#[test] +fn test_range_formatting_returns_edit_for_changes_within_requested_range() -> Result<()> { + let mut session = FormatSession::start(serde_json::Value::Null)?; + let uri = "file:///test/format-range-contained.jsonnet"; + session.open(uri, "{\n a: 1,\n b:2,\n}\n")?; + + let edits = session.request_range_formatting_with_options( + uri, + lsp_types::Range { + start: Position { + line: 2, + character: 0, + }, + end: Position { + line: 2, + character: 6, + }, + }, + formatting_options(2, true, None, None, None), + )?; + + assert_eq!( + edits, + Some(vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 2, + character: 4, + }, + end: Position { + line: 2, + character: 4, + }, + }, + new_text: " ".to_string(), + }]) + ); + + session.shutdown() +} + +#[test] +fn test_range_formatting_returns_no_edits_when_changes_escape_requested_range() -> Result<()> { + let mut session = FormatSession::start(serde_json::Value::Null)?; + let uri = "file:///test/format-range-outside.jsonnet"; + session.open(uri, "{\n a:1,\n b:2,\n}\n")?; + + let edits = session.request_range_formatting_with_options( + uri, + lsp_types::Range { + start: Position { + line: 1, + character: 0, + }, + end: Position { + line: 1, + character: 6, + }, + }, + formatting_options(2, true, None, None, None), + )?; + + assert_eq!(edits, Some(Vec::new())); + + session.shutdown() +} diff --git a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs index bed68e19..b5cf440f 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs @@ -26,11 +26,16 @@ fn test_initialize_shutdown() { serde_json::Value::Bool(true), "document highlight capability should be advertised", ); - assert_eq!( - result["capabilities"]["inlayHintProvider"], - serde_json::Value::Bool(true), - "inlay hint capability should be advertised", - ); + assert_eq!( + result["capabilities"]["inlayHintProvider"], + serde_json::Value::Bool(true), + "inlay hint capability should be advertised", + ); + assert_eq!( + result["capabilities"]["documentRangeFormattingProvider"], + serde_json::Value::Bool(true), + "range formatting capability should be advertised", + ); assert_eq!( result["capabilities"]["codeActionProvider"]["codeActionKinds"][0], serde_json::Value::String("quickfix".to_string()), diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_outside_requested_range_noop.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_outside_requested_range_noop.yaml new file mode 100644 index 00000000..4087b60c --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_outside_requested_range_noop.yaml @@ -0,0 +1,21 @@ +# Verify textDocument/rangeFormatting returns no edits when formatter changes +# would touch content outside the requested range. +steps: +- step: create + files: + main.jsonnet: | + { + [[target:a: 1,]] + b:2, + } + +- step: diagnosticsSettled + +- step: requestRangeFormatting + as: formattingRange + file: main.jsonnet + range: target + +- step: expectRangeFormatting + request: formattingRange + result: [] diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_request_options_apply.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_request_options_apply.yaml new file mode 100644 index 00000000..72667799 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_request_options_apply.yaml @@ -0,0 +1,27 @@ +# Verify range formatting honors request indentation options. +steps: +- step: create + files: + main.jsonnet: "((wholeStart:|)){a:{b:1}}" + +- step: diagnosticsSettled + +- step: requestRangeFormatting + as: formattingRange + file: main.jsonnet + at: wholeStart + len: 9 + insert_spaces: false + tab_size: 8 + +- step: expectRangeFormatting + request: formattingRange + result: + - range: + start: + line: 0 + character: 1 + end: + line: 0 + character: 9 + newText: "\n\ta: {\n\t\tb: 1,\n\t},\n}\n" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_within_requested_range.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_within_requested_range.yaml new file mode 100644 index 00000000..1f01aae8 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/formatting_range_within_requested_range.yaml @@ -0,0 +1,29 @@ +# Verify textDocument/rangeFormatting returns edits when formatter changes +# are contained in the requested range. +steps: +- step: create + files: + main.jsonnet: | + { + a: 1, + [[target:b:2,]] + } + +- step: diagnosticsSettled + +- step: requestRangeFormatting + as: formattingRange + file: main.jsonnet + range: target + +- step: expectRangeFormatting + request: formattingRange + result: + - range: + start: + line: 2 + character: 4 + end: + line: 2 + character: 4 + newText: " " diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml index f0d27cf5..74b71d66 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/missing_step_coverage.yaml @@ -112,7 +112,7 @@ steps: - step: changeFull file: main.jsonnet - text: "local broken = " + text: "local [[rangeFmt:broken]] = " version: 2 - step: requestFormatting @@ -123,6 +123,15 @@ steps: request: fmt result: +- step: requestRangeFormatting + as: rangeFmt + file: main.jsonnet + range: rangeFmt + +- step: expectRangeFormatting + request: rangeFmt + result: + - step: requestDocumentSymbol as: docSymbols file: missing.jsonnet diff --git a/docs/lsp/ARCHITECTURE.md b/docs/lsp/ARCHITECTURE.md index 3deec6ef..f003b7b9 100644 --- a/docs/lsp/ARCHITECTURE.md +++ b/docs/lsp/ARCHITECTURE.md @@ -120,6 +120,7 @@ Dispatched via `spawn_async_response` (Rayon): - `textDocument/rename` - `textDocument/codeLens` - `textDocument/formatting` +- `textDocument/rangeFormatting` - `textDocument/semanticTokens/full` - `textDocument/semanticTokens/range` - `workspace/executeCommand` @@ -168,7 +169,7 @@ The main loop uses this boundary for all request responses after initialization. symbols, document highlights - completion (trigger `.`) - signature help (triggers `(` and `,`) -- formatting +- formatting and range formatting - references - workspace symbol search - rename with `prepareRename` diff --git a/docs/lsp/HANDLERS.md b/docs/lsp/HANDLERS.md index 6efe5109..5b9d8b8c 100644 --- a/docs/lsp/HANDLERS.md +++ b/docs/lsp/HANDLERS.md @@ -48,6 +48,7 @@ Current request routing in `crates/jrsonnet-lsp/src/server.rs`: | `textDocument/completion` | async context (`completion`) | handlers crate (`completion_with_import_roots`) | async | yes | | `textDocument/signatureHelp` | `on_signature_help` | handlers crate (`signature_help`) | sync | no | | `textDocument/formatting` | `on_formatting` | handlers crate (`format_document_with_config`) | sync | no | +| `textDocument/rangeFormatting` | `on_range_formatting` | handlers crate (`format_document_range_with_config`) | sync | no | | `textDocument/references` | async context (`references`) | mixed: handlers + server import graph merge | async | no | | `workspace/symbol` | async context (`workspace_symbol`) | handlers crate (`workspace_symbols_for_document`) | async | no | | `textDocument/prepareRename` | `on_prepare_rename` | handlers crate (`prepare_rename`) | sync | no | @@ -201,6 +202,8 @@ File: `crates/jrsonnet-lsp-handlers/src/formatting/mod.rs` - Uses the built-in formatter implementation. - Returns a full-document replacement edit when formatting changes text. +- Range formatting returns edits only when formatter changes are entirely + contained in the requested range. - Computes replacement ranges with UTF-16-aware position conversion. - Returns `None` when formatting is unavailable or fails. From 88906cfa59da67c043997314ef2743cc6b6c46cb Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 12:15:46 +0000 Subject: [PATCH 189/210] lsp: add inlay hint refresh support and scenario auto-handling --- .../src/scenario_runner/runner.rs | 91 +++++++++++++++++-- .../src/scenario_runner/transport.rs | 28 +++++- crates/jrsonnet-lsp/src/server.rs | 4 + .../jrsonnet-lsp/src/server/initialization.rs | 10 ++ .../jrsonnet-lsp/src/server/notifications.rs | 19 ++++ crates/jrsonnet-lsp/tests/integration_test.rs | 38 ++++++-- .../tests/integration_test/lifecycle.rs | 67 ++++++++++++++ 7 files changed, 242 insertions(+), 15 deletions(-) diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs index ee9d1fc4..02715a91 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs @@ -7,7 +7,8 @@ use std::{ use lsp_server::{Connection, Response}; use lsp_types::{ request::{Initialize, Request as _, Shutdown}, - InitializeParams, + ClientCapabilities, InitializeParams, InlayHintWorkspaceClientCapabilities, + WorkspaceClientCapabilities, }; use super::{transport::RpcError, RunnerError, RunnerResult}; @@ -65,10 +66,23 @@ impl ScenarioRunner { } fn initialize(&mut self) -> RunnerResult<()> { + let params = InitializeParams { + capabilities: ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + inlay_hint: Some(InlayHintWorkspaceClientCapabilities { + refresh_support: Some(true), + }), + ..WorkspaceClientCapabilities::default() + }), + ..ClientCapabilities::default() + }, + ..InitializeParams::default() + }; + self.send_request_with_params( INITIALIZE_REQUEST_ID, Initialize::METHOD, - InitializeParams::default(), + params, "initialize", )?; let response = self.wait_response(INITIALIZE_REQUEST_ID, REQUEST_TIMEOUT)?; @@ -80,7 +94,8 @@ impl ScenarioRunner { } .into()); } - self.send_notification_with_params("initialized", serde_json::json!({}), "initialized") + self.send_notification_with_params("initialized", serde_json::json!({}), "initialized")?; + self.drain_background_messages() } fn shutdown(self) -> RunnerResult<()> { @@ -108,7 +123,7 @@ impl ScenarioRunner { } fn run_step(&mut self, step: &ScenarioStep) -> RunnerResult<()> { - match step { + let result = match step { ScenarioStep::Open(open) => self.step_open(open), ScenarioStep::ChangeFull(change) => self.step_change_full(change), ScenarioStep::ChangeIncremental(change) => self.step_change_incremental(change), @@ -212,7 +227,9 @@ impl ScenarioRunner { self.step_expect_diagnostics(expectation) } ScenarioStep::DiagnosticsSettled(settled) => self.step_diagnostics_settled(*settled), - } + }; + result?; + self.drain_background_messages() } } @@ -222,7 +239,7 @@ mod tests { use lsp_server::{Message, Notification, Response}; use lsp_types::{ notification::{DidOpenTextDocument, Notification as _, PublishDiagnostics}, - request::{HoverRequest, Request as _}, + request::{HoverRequest, InlayHintRefreshRequest, Request as _}, Diagnostic, DiagnosticSeverity, Hover, HoverContents, MarkedString, Position, PublishDiagnosticsParams, Range, }; @@ -357,6 +374,68 @@ mod tests { }) } + #[test] + fn run_scenario_auto_handles_server_initiated_requests() -> Result<(), super::RunnerError> { + let scenario = Scenario::new(vec![]); + let refresh_id = lsp_server::RequestId::from(42_i32); + + run_scenario(&scenario, move |connection| { + let mut saw_refresh_response = false; + + loop { + let Ok(message) = connection.receiver.recv() else { + break; + }; + + match message { + Message::Request(request) => { + let response = match request.method.as_str() { + lsp_types::request::Initialize::METHOD => { + Response::new_ok(request.id, serde_json::json!({})) + } + lsp_types::request::Shutdown::METHOD => { + Response::new_ok(request.id, serde_json::Value::Null) + } + _ => Response::new_ok(request.id, serde_json::Value::Null), + }; + if connection.sender.send(Message::Response(response)).is_err() { + break; + } + } + Message::Notification(notification) => { + if notification.method == "initialized" { + let refresh = lsp_server::Request::new( + refresh_id.clone(), + InlayHintRefreshRequest::METHOD.to_string(), + serde_json::Value::Null, + ); + if connection.sender.send(Message::Request(refresh)).is_err() { + break; + } + } else if notification.method == "exit" { + break; + } + } + Message::Response(response) => { + if response.id == refresh_id { + assert!( + response.error.is_none(), + "refresh response should not contain an error", + ); + assert_eq!(response.result, Some(serde_json::Value::Null)); + saw_refresh_response = true; + } + } + } + } + + assert!( + saw_refresh_response, + "client should acknowledge server-initiated refresh requests", + ); + }) + } + #[test] fn run_scenario_reports_mismatched_expectation() { let scenario = Scenario::new(vec![ diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs index 313e5968..664fb810 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/transport.rs @@ -1,6 +1,6 @@ use std::time::{Duration, Instant}; -use crossbeam_channel::RecvTimeoutError; +use crossbeam_channel::{RecvTimeoutError, TryRecvError}; use lsp_server::{Message, Notification, Request, Response, ResponseError}; use lsp_types::notification::{Notification as _, PublishDiagnostics}; use serde::{de::DeserializeOwned, Serialize}; @@ -60,6 +60,11 @@ pub enum TransportError { #[source] source: Box>, }, + #[error("send response failed: {source}")] + SendResponse { + #[source] + source: Box>, + }, } #[derive(Debug, Error)] @@ -79,6 +84,15 @@ pub enum RpcError { } impl ScenarioRunner { + pub(super) fn drain_background_messages(&mut self) -> RunnerResult<()> { + loop { + match self.conn.receiver.try_recv() { + Ok(message) => self.capture_background_message(message)?, + Err(TryRecvError::Empty | TryRecvError::Disconnected) => return Ok(()), + } + } + } + pub(super) fn send_notification_with_params( &self, method: &str, @@ -215,7 +229,17 @@ impl ScenarioRunner { self.last_diagnostic_at = Some(Instant::now()); Ok(()) } - Message::Notification(_) | Message::Request(_) => Ok(()), + Message::Notification(_) => Ok(()), + Message::Request(request) => { + let response = Response::new_ok(request.id, serde_json::Value::Null); + self.conn + .sender + .send(Message::Response(response)) + .map_err(|source| TransportError::SendResponse { + source: Box::new(source), + }) + .map_err(Into::into) + } } } diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index adcc73bf..7b420078 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -73,6 +73,8 @@ pub struct Server { request_response_receiver: Receiver, /// Shutdown requested flag. shutdown_requested: bool, + /// Whether the client supports `workspace/inlayHint/refresh`. + client_supports_inlay_hint_refresh: bool, } #[derive(Debug, Clone, Default, serde::Deserialize)] @@ -130,6 +132,7 @@ impl Server { request_response_sender, request_response_receiver, shutdown_requested: false, + client_supports_inlay_hint_refresh: false, } } @@ -233,6 +236,7 @@ impl Server { // Handle initialize request let (id, params, init_roots) = self.initialize()?; + self.client_supports_inlay_hint_refresh = Self::supports_inlay_hint_refresh(¶ms); // Parse initialization options into configuration let init_config = diff --git a/crates/jrsonnet-lsp/src/server/initialization.rs b/crates/jrsonnet-lsp/src/server/initialization.rs index 162b9f64..80bebf95 100644 --- a/crates/jrsonnet-lsp/src/server/initialization.rs +++ b/crates/jrsonnet-lsp/src/server/initialization.rs @@ -103,6 +103,16 @@ impl Server { } } + pub(super) fn supports_inlay_hint_refresh(params: &InitializeParams) -> bool { + params + .capabilities + .workspace + .as_ref() + .and_then(|workspace| workspace.inlay_hint.as_ref()) + .and_then(|capabilities| capabilities.refresh_support) + .unwrap_or(false) + } + pub(super) fn initialize_result() -> InitializeResult { InitializeResult { capabilities: Self::server_capabilities(), diff --git a/crates/jrsonnet-lsp/src/server/notifications.rs b/crates/jrsonnet-lsp/src/server/notifications.rs index c9c17a23..a9082ade 100644 --- a/crates/jrsonnet-lsp/src/server/notifications.rs +++ b/crates/jrsonnet-lsp/src/server/notifications.rs @@ -3,6 +3,7 @@ use lsp_types::{ Cancel, DidChangeConfiguration, DidChangeTextDocument, DidChangeWatchedFiles, DidCloseTextDocument, DidOpenTextDocument, DidSaveTextDocument, Notification as _, }, + request::InlayHintRefreshRequest, DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, FileChangeType, @@ -11,6 +12,19 @@ use lsp_types::{ use super::*; impl Server { + fn request_inlay_hint_refresh_if_supported(&mut self) { + if !self.client_supports_inlay_hint_refresh { + return; + } + + if let Err(error) = self + .inflight_requests + .send_outgoing_request::(()) + { + warn!("Failed to request inlay-hint refresh: {error:#}"); + } + } + /// Handle an incoming notification. /// /// Returns true if exit notification was received. @@ -236,6 +250,7 @@ impl Server { || old_config.resolve_paths_with_tanka != updated_config.resolve_paths_with_tanka; let diagnostics_config_changed = old_config.enable_lint_diagnostics != updated_config.enable_lint_diagnostics; + let inlay_hints_config_changed = old_config.inlay_hints != updated_config.inlay_hints; if runtime_config_changed { self.reconfigure_runtime_components(&updated_config); @@ -257,6 +272,10 @@ impl Server { } } + if inlay_hints_config_changed { + self.request_inlay_hint_refresh_if_supported(); + } + info!( "Configuration updated: jpath={:?}, eval_diagnostics={}, tanka_mode={}", updated_config.jpath, diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 4092d685..4f75b2a6 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -16,19 +16,20 @@ use lsp_types::{ request::{ CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, GotoImplementation, - GotoTypeDefinition, Initialize, InlayHintRequest, RangeFormatting, References, - RegisterCapability, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, + GotoTypeDefinition, Initialize, InlayHintRefreshRequest, InlayHintRequest, RangeFormatting, + References, RegisterCapability, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, WorkspaceSymbolRequest, }, CancelParams, DidChangeConfigurationParams, DidChangeWatchedFilesClientCapabilities, DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, ExecuteCommandParams, FileChangeType, FileEvent, FileSystemWatcher, GlobPattern, - GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, NumberOrString, OneOf, - PartialResultParams, Position, ReferenceContext, ReferenceParams, Registration, - RegistrationParams, RelativePattern, RenameParams, SemanticTokensRangeParams, - TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, - WorkspaceClientCapabilities, WorkspaceFolder, + GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, + InlayHintWorkspaceClientCapabilities, NumberOrString, OneOf, PartialResultParams, Position, + ReferenceContext, ReferenceParams, Registration, RegistrationParams, RelativePattern, + RenameParams, SemanticTokensRangeParams, TextDocumentIdentifier, TextDocumentItem, + TextDocumentPositionParams, WorkDoneProgressParams, WorkspaceClientCapabilities, + WorkspaceFolder, }; use serde_json::json; use tempfile::TempDir; @@ -185,6 +186,29 @@ fn initialize_request_with_dynamic_watched_files_relative(id: i32, root_uri: &st ) } +/// Helper to create an initialize request that advertises +/// `workspace/inlayHint/refresh` support. +fn initialize_request_with_inlay_hint_refresh_support(id: i32) -> Request { + let params = InitializeParams { + capabilities: lsp_types::ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + inlay_hint: Some(InlayHintWorkspaceClientCapabilities { + refresh_support: Some(true), + }), + ..WorkspaceClientCapabilities::default() + }), + ..lsp_types::ClientCapabilities::default() + }, + ..InitializeParams::default() + }; + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).unwrap(), + ) +} + /// Helper to create a shutdown request. fn shutdown_request(id: i32) -> Request { Request::new( diff --git a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs index b5cf440f..9ce65a3c 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs @@ -299,6 +299,73 @@ fn test_configuration_change_reconfigures_eval_diagnostics() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_configuration_change_requests_inlay_hint_refresh_when_supported() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request( + initialize_request_with_inlay_hint_refresh_support(1), + )) + .unwrap(); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .unwrap(); + + client_conn + .sender + .send(Message::Notification( + did_change_configuration_notification(serde_json::json!({ + "jsonnet": { + "inlayHints": { + "callArguments": "all", + } + } + })), + )) + .unwrap(); + + let refresh_request = loop { + let message = client_conn + .receiver + .recv_timeout(Duration::from_secs(3)) + .expect("expected inlay hint refresh request"); + match message { + Message::Request(request) if request.method == InlayHintRefreshRequest::METHOD => { + break request; + } + Message::Notification(_) => continue, + _ => continue, + } + }; + assert_eq!(refresh_request.params, serde_json::Value::Null); + + client_conn + .sender + .send(Message::Response(lsp_server::Response::new_ok( + refresh_request.id, + serde_json::Value::Null, + ))) + .unwrap(); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .unwrap(); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .unwrap(); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_configuration_change_reindexes_closed_import_graph_entries() { let tmp = TempDir::new().expect("tempdir should be created"); From 525105d725eeb0246061d26ff611c9d0272a3e2a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 12:37:16 +0000 Subject: [PATCH 190/210] refactor(lsp): eliminate panic-prone test patterns and indexing --- Cargo.lock | 1 + crates/jrsonnet-lsp-document/Cargo.toml | 3 + crates/jrsonnet-lsp-document/src/error.rs | 69 +++-- crates/jrsonnet-lsp-document/src/file_ids.rs | 2 +- crates/jrsonnet-lsp-document/src/types.rs | 85 +++--- crates/jrsonnet-lsp-import/src/graph/tests.rs | 74 ++++- crates/jrsonnet-lsp-import/src/work_queue.rs | 34 ++- .../src/scenario/doctest_assertions.rs | 109 +++++-- .../src/scenario_runner/helpers.rs | 12 +- .../src/scenario_runner/runner.rs | 21 +- .../src/scenario_script/inputs.rs | 8 +- crates/jrsonnet-lsp/src/analysis/tanka.rs | 61 ++-- .../jrsonnet-lsp/src/handlers/diagnostics.rs | 2 +- .../src/protocol/inflight_requests.rs | 144 ++++++---- .../src/server/custom_operations/eval_file.rs | 32 ++- .../src/server/request_dispatch.rs | 14 +- crates/jrsonnet-lsp/tests/cross_file_tests.rs | 76 ++--- .../jrsonnet-lsp/tests/docs_lsp_examples.rs | 7 +- crates/jrsonnet-lsp/tests/integration_test.rs | 131 +++++---- .../tests/integration_test/features.rs | 213 +++++++------- .../tests/integration_test/formatting.rs | 175 +++++++----- .../tests/integration_test/lifecycle.rs | 228 ++++++++------- .../tests/integration_test/navigation.rs | 235 +++++++-------- .../integration_test/workspace_cross_file.rs | 270 ++++++++++-------- crates/jrsonnet-lsp/tests/stress_tests.rs | 141 +++++---- 25 files changed, 1249 insertions(+), 898 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3fd8cfbf..a3c998b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1871,6 +1871,7 @@ dependencies = [ name = "jrsonnet-lsp-document" version = "0.5.0-pre97" dependencies = [ + "assert_matches", "derive_more 1.0.0", "jrsonnet-rowan-parser", "lsp-types", diff --git a/crates/jrsonnet-lsp-document/Cargo.toml b/crates/jrsonnet-lsp-document/Cargo.toml index 047cba82..f9dcf648 100644 --- a/crates/jrsonnet-lsp-document/Cargo.toml +++ b/crates/jrsonnet-lsp-document/Cargo.toml @@ -16,5 +16,8 @@ rowan.workspace = true thiserror.workspace = true url.workspace = true +[dev-dependencies] +assert_matches = "1.5.0" + [lints] workspace = true diff --git a/crates/jrsonnet-lsp-document/src/error.rs b/crates/jrsonnet-lsp-document/src/error.rs index 3397760f..a2c9863d 100644 --- a/crates/jrsonnet-lsp-document/src/error.rs +++ b/crates/jrsonnet-lsp-document/src/error.rs @@ -4,6 +4,19 @@ use thiserror::Error; +/// Structured reasons why a Jsonnet identifier is invalid. +#[derive(Error, Debug, Clone, PartialEq, Eq)] +pub enum InvalidIdentifierReason { + #[error("identifier cannot be empty")] + Empty, + #[error("identifier must start with a letter or underscore, got '{0}'")] + InvalidStart(char), + #[error("identifier contains invalid character '{0}'")] + InvalidCharacter(char), + #[error("'{0}' is a reserved keyword")] + ReservedKeyword(String), +} + /// Errors that can occur during LSP operations. #[derive(Error, Debug)] pub enum LspError { @@ -41,7 +54,7 @@ pub enum LspError { /// Identifier is not valid for Jsonnet. #[error("invalid identifier: {0}")] - InvalidIdentifier(String), + InvalidIdentifier(InvalidIdentifierReason), /// IO error occurred. #[error("IO error: {0}")] @@ -120,35 +133,31 @@ pub fn is_valid_jsonnet_identifier(name: &str) -> bool { /// starts with an invalid character, contains invalid characters, or is a keyword. pub fn validate_identifier(name: &str) -> LspResult<()> { if name.is_empty() { - return Err(LspError::InvalidIdentifier( - "identifier cannot be empty".to_string(), - )); + return Err(LspError::InvalidIdentifier(InvalidIdentifierReason::Empty)); } let mut chars = name.chars(); let Some(first) = chars.next() else { - return Err(LspError::InvalidIdentifier( - "identifier cannot be empty".to_string(), - )); + return Err(LspError::InvalidIdentifier(InvalidIdentifierReason::Empty)); }; if !first.is_ascii_alphabetic() && first != '_' { - return Err(LspError::InvalidIdentifier(format!( - "identifier must start with a letter or underscore, got '{first}'" - ))); + return Err(LspError::InvalidIdentifier( + InvalidIdentifierReason::InvalidStart(first), + )); } for c in chars { if !c.is_ascii_alphanumeric() && c != '_' { - return Err(LspError::InvalidIdentifier(format!( - "identifier contains invalid character '{c}'" - ))); + return Err(LspError::InvalidIdentifier( + InvalidIdentifierReason::InvalidCharacter(c), + )); } } if JSONNET_KEYWORDS.contains(&name) { - return Err(LspError::InvalidIdentifier(format!( - "'{name}' is a reserved keyword" - ))); + return Err(LspError::InvalidIdentifier( + InvalidIdentifierReason::ReservedKeyword(name.to_string()), + )); } Ok(()) @@ -156,6 +165,8 @@ pub fn validate_identifier(name: &str) -> LspResult<()> { #[cfg(test)] mod tests { + use assert_matches::assert_matches; + use super::*; #[test] @@ -196,13 +207,23 @@ mod tests { fn test_validate_identifier_errors() { validate_identifier("foo").expect("foo should be valid"); - let err = validate_identifier("").unwrap_err(); - assert!(matches!(err, LspError::InvalidIdentifier(_))); - - let err = validate_identifier("123foo").unwrap_err(); - assert!(matches!(err, LspError::InvalidIdentifier(_))); - - let err = validate_identifier("local").unwrap_err(); - assert!(matches!(err, LspError::InvalidIdentifier(_))); + let err = validate_identifier("").expect_err("empty identifier should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::Empty) + ); + + let err = validate_identifier("123foo").expect_err("leading digit should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::InvalidStart('1')) + ); + + let err = validate_identifier("local").expect_err("keyword should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::ReservedKeyword(keyword)) + if keyword == "local" + ); } } diff --git a/crates/jrsonnet-lsp-document/src/file_ids.rs b/crates/jrsonnet-lsp-document/src/file_ids.rs index 3d2dab74..c1f0728e 100644 --- a/crates/jrsonnet-lsp-document/src/file_ids.rs +++ b/crates/jrsonnet-lsp-document/src/file_ids.rs @@ -50,7 +50,7 @@ impl PathInterner { return id; } - let raw = u32::try_from(self.id_to_path.len()).expect("too many interned file paths"); + let raw = self.id_to_path.len() as u32; let id = FileId::from_raw(raw); self.path_to_id.insert(path.as_path().to_path_buf(), id); self.id_to_path.push(Arc::new(path.clone())); diff --git a/crates/jrsonnet-lsp-document/src/types.rs b/crates/jrsonnet-lsp-document/src/types.rs index 8f8f46cc..2beecfec 100644 --- a/crates/jrsonnet-lsp-document/src/types.rs +++ b/crates/jrsonnet-lsp-document/src/types.rs @@ -229,6 +229,9 @@ impl SymbolName { mod tests { use std::time::{SystemTime, UNIX_EPOCH}; + use crate::error::InvalidIdentifierReason; + use assert_matches::assert_matches; + use super::*; #[test] @@ -265,7 +268,7 @@ mod tests { #[test] fn test_symbol_name_valid() { - let name = SymbolName::new("foo").unwrap(); + let name = SymbolName::new("foo").expect("foo should be a valid symbol name"); assert_eq!(&*name, "foo"); assert_eq!(name.as_ref(), "foo"); @@ -276,34 +279,56 @@ mod tests { #[test] fn test_symbol_name_invalid() { - use crate::error::LspError; - // Empty - let err = SymbolName::new("").unwrap_err(); - assert!(matches!(err, LspError::InvalidIdentifier(_))); + let err = SymbolName::new("").expect_err("empty name should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::Empty) + ); // Starts with digit - let err = SymbolName::new("123foo").unwrap_err(); - assert!(matches!(err, LspError::InvalidIdentifier(_))); + let err = SymbolName::new("123foo").expect_err("leading digit should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::InvalidStart('1')) + ); // Invalid characters - let err = SymbolName::new("foo-bar").unwrap_err(); - assert!(matches!(err, LspError::InvalidIdentifier(_))); - let err = SymbolName::new("foo.bar").unwrap_err(); - assert!(matches!(err, LspError::InvalidIdentifier(_))); + let err = SymbolName::new("foo-bar").expect_err("dash should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::InvalidCharacter('-')) + ); + let err = SymbolName::new("foo.bar").expect_err("dot should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::InvalidCharacter('.')) + ); // Keywords - let err = SymbolName::new("local").unwrap_err(); - assert!(matches!(err, LspError::InvalidIdentifier(_))); - let err = SymbolName::new("function").unwrap_err(); - assert!(matches!(err, LspError::InvalidIdentifier(_))); - let err = SymbolName::new("if").unwrap_err(); - assert!(matches!(err, LspError::InvalidIdentifier(_))); + let err = SymbolName::new("local").expect_err("keyword should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::ReservedKeyword(keyword)) + if keyword == "local" + ); + let err = SymbolName::new("function").expect_err("keyword should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::ReservedKeyword(keyword)) + if keyword == "function" + ); + let err = SymbolName::new("if").expect_err("keyword should fail"); + assert_matches!( + err, + LspError::InvalidIdentifier(InvalidIdentifierReason::ReservedKeyword(keyword)) + if keyword == "if" + ); } #[test] fn test_symbol_name_display() { - let name = SymbolName::new("myVar").unwrap(); + let name = SymbolName::new("myVar").expect("myVar should be a valid symbol name"); assert_eq!(format!("{name}"), "myVar"); } @@ -320,14 +345,11 @@ mod tests { #[test] fn test_canonical_path_from_uri_rejects_non_file_uri() { - use crate::error::LspError; - - let uri: lsp_types::Uri = "https://example.com/test.jsonnet".parse().unwrap(); - let err = CanonicalPath::from_uri(&uri).unwrap_err(); - match err { - LspError::NonFileUri(value) => assert_eq!(value, uri.as_str()), - other => panic!("unexpected error: {other:?}"), - } + let uri: lsp_types::Uri = "https://example.com/test.jsonnet" + .parse() + .expect("URI should parse"); + let err = CanonicalPath::from_uri(&uri).expect_err("non-file URI should fail"); + assert_matches!(err, LspError::NonFileUri(value) if value == uri.as_str()); } #[test] @@ -352,13 +374,10 @@ mod tests { #[test] fn test_canonical_path_to_uri_rejects_relative_path() { - use crate::error::LspError; - let path = CanonicalPath::new(PathBuf::from("relative.jsonnet")); - let err = path.to_uri().unwrap_err(); - match err { - LspError::PathToUri(value) => assert_eq!(value, "relative.jsonnet"), - other => panic!("unexpected error: {other:?}"), - } + let err = path + .to_uri() + .expect_err("relative path should fail URI conversion"); + assert_matches!(err, LspError::PathToUri(value) if value == "relative.jsonnet"); } } diff --git a/crates/jrsonnet-lsp-import/src/graph/tests.rs b/crates/jrsonnet-lsp-import/src/graph/tests.rs index 8acc8e70..35b33d80 100644 --- a/crates/jrsonnet-lsp-import/src/graph/tests.rs +++ b/crates/jrsonnet-lsp-import/src/graph/tests.rs @@ -52,8 +52,10 @@ fn test_parse_import_occurrences_include_string_token_range() { code.find("\"lib.jsonnet\"") .expect("import string should exist"), ) - .unwrap(); - let end = start + u32::try_from("\"lib.jsonnet\"".len()).unwrap(); + .expect("import start offset should fit into u32"); + let end = start + + u32::try_from("\"lib.jsonnet\"".len()) + .expect("import literal length should fit into u32"); assert_eq!( occurrences, @@ -79,8 +81,8 @@ fn test_parse_import_occurrences_fallback_unterminated_string() { code.find("\"lib.jsonnet") .expect("unterminated import string should exist"), ) - .unwrap(); - let end = u32::try_from(code.len()).unwrap(); + .expect("unterminated import start should fit into u32"); + let end = u32::try_from(code.len()).expect("source length should fit into u32"); assert_eq!( occurrences, @@ -450,10 +452,19 @@ fn test_process_in_parallel_order() { let processed_order = Arc::new(Mutex::new(Vec::new())); let order_clone = Arc::clone(&processed_order); graph.process_in_parallel(move |file| { - order_clone.lock().unwrap().push(file); + order_clone + .lock() + .expect("processed_order mutex should not be poisoned") + .push(file); }); - let order = graph_paths(&graph, processed_order.lock().unwrap().clone()); + let order = graph_paths( + &graph, + processed_order + .lock() + .expect("processed_order mutex should not be poisoned") + .clone(), + ); // lib should be processed before main (lib has no deps, main depends on lib) assert_eq!(order, vec![lib, main]); } @@ -491,11 +502,20 @@ fn test_process_with_dependencies() { graph.intern(&main), |_| true, move |file| { - processed_clone.lock().unwrap().push(file); + processed_clone + .lock() + .expect("processed mutex should not be poisoned") + .push(file); }, ); - let order = graph_paths(&graph, processed.lock().unwrap().clone()); + let order = graph_paths( + &graph, + processed + .lock() + .expect("processed mutex should not be poisoned") + .clone(), + ); // lib should be processed before utils, utils before main assert_eq!(order, vec![lib, utils, main]); @@ -538,11 +558,20 @@ fn test_process_with_dependencies_filtered_by_kind() { graph.intern(&main), |entry| entry.kind == ImportKind::Code, move |file| { - processed_clone.lock().unwrap().push(file); + processed_clone + .lock() + .expect("processed mutex should not be poisoned") + .push(file); }, ); - let order = graph_paths(&graph, processed.lock().unwrap().clone()); + let order = graph_paths( + &graph, + processed + .lock() + .expect("processed mutex should not be poisoned") + .clone(), + ); assert_eq!(order, vec![data, main]); } @@ -576,10 +605,19 @@ fn test_process_importers_with_work_queue() { // Process lib and its importers (cascade) graph.process_importers_with_work_queue(graph.intern(&lib), move |file| { - processed_clone.lock().unwrap().push(file); + processed_clone + .lock() + .expect("processed mutex should not be poisoned") + .push(file); }); - let order = graph_paths(&graph, processed.lock().unwrap().clone()); + let order = graph_paths( + &graph, + processed + .lock() + .expect("processed mutex should not be poisoned") + .clone(), + ); // lib first, then utils (imports lib), then main (imports utils) assert_eq!(order, vec![lib, utils, main]); @@ -599,10 +637,18 @@ fn test_process_with_dependencies_unknown_root_is_noop() { root, |_| true, move |file| { - processed_clone.lock().unwrap().push(file); + processed_clone + .lock() + .expect("processed mutex should not be poisoned") + .push(file); }, ); } - assert_eq!(*processed.lock().unwrap(), Vec::::new()); + assert_eq!( + *processed + .lock() + .expect("processed mutex should not be poisoned"), + Vec::::new() + ); } diff --git a/crates/jrsonnet-lsp-import/src/work_queue.rs b/crates/jrsonnet-lsp-import/src/work_queue.rs index e6c626d6..86a9db1a 100644 --- a/crates/jrsonnet-lsp-import/src/work_queue.rs +++ b/crates/jrsonnet-lsp-import/src/work_queue.rs @@ -189,7 +189,9 @@ where let mut levels: Vec> = vec![Vec::new(); max_level + 1]; for (item, level) in self.item_levels { - levels[level].push(item); + if let Some(items_at_level) = levels.get_mut(level) { + items_at_level.push(item); + } } levels @@ -317,13 +319,14 @@ mod tests { } }); - // Level 0 should have 2 and 3, level 1 should have 1 - assert_eq!(levels.len(), 2); - assert_eq!(levels[1], vec![1]); - - let mut first_level = levels[0].clone(); - first_level.sort_unstable(); - assert_eq!(first_level, vec![2, 3]); + let normalized_levels: Vec> = levels + .into_iter() + .map(|mut level| { + level.sort_unstable(); + level + }) + .collect(); + assert_eq!(normalized_levels, vec![vec![2, 3], vec![1]]); } #[test] @@ -344,13 +347,14 @@ mod tests { } }); - assert_eq!(levels.len(), 3); - assert_eq!(levels[0], vec![4]); // 4 (leaf) - assert_eq!(levels[2], vec![1]); // 1 (root) - - let mut middle_level = levels[1].clone(); - middle_level.sort_unstable(); - assert_eq!(middle_level, vec![2, 3]); // 2 and 3 + let normalized_levels: Vec> = levels + .into_iter() + .map(|mut level| { + level.sort_unstable(); + level + }) + .collect(); + assert_eq!(normalized_levels, vec![vec![4], vec![2, 3], vec![1]]); } #[test] diff --git a/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs b/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs index 88e5b69b..e05123e8 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario/doctest_assertions.rs @@ -1,20 +1,65 @@ +use crossbeam_channel::Sender; use lsp_server::{Message, Notification, Response}; use lsp_types::{ notification::{DidOpenTextDocument, Notification as _, PublishDiagnostics}, request::{HoverRequest, Request as _}, Hover, HoverContents, MarkedString, PublishDiagnosticsParams, }; +use thiserror::Error; use super::Scenario; -pub fn assert_yaml_scenario_runs_without_error(yaml: &str) { - let base_dir = tempfile::tempdir().expect("create temp directory for scenario"); - let scenario = crate::parse_scenario_yaml(yaml, base_dir.path()).expect("parse scenario yaml"); - assert_scenario_runs_without_error(&scenario); +#[derive(Debug, Error)] +pub enum DoctestAssertionError { + #[error("create temp directory for scenario: {source}")] + CreateTempDirectory { + #[source] + source: std::io::Error, + }, + #[error("parse scenario yaml: {source}")] + ParseScenario { + #[source] + source: crate::ParseScenarioError, + }, + #[error("run scenario: {source}")] + RunScenario { + #[source] + source: crate::RunnerError, + }, + #[error("serialize hover response payload: {source}")] + SerializeHoverResponse { + #[source] + source: serde_json::Error, + }, + #[error("deserialize didOpen notification payload: {source}")] + DeserializeDidOpenNotification { + #[source] + source: serde_json::Error, + }, + #[error("serialize publishDiagnostics payload: {source}")] + SerializePublishDiagnostics { + #[source] + source: serde_json::Error, + }, + #[error("send response to scenario runner")] + SendResponse, + #[error("send diagnostics notification to scenario runner")] + SendDiagnosticsNotification, } -pub fn assert_scenario_runs_without_error(scenario: &Scenario) { - let result = crate::run_scenario(scenario, |connection| loop { +pub fn assert_yaml_scenario_runs_without_error(yaml: &str) -> Result<(), DoctestAssertionError> { + let base_dir = tempfile::tempdir() + .map_err(|source| DoctestAssertionError::CreateTempDirectory { source })?; + let scenario = crate::parse_scenario_yaml(yaml, base_dir.path()) + .map_err(|source| DoctestAssertionError::ParseScenario { source })?; + assert_scenario_runs_without_error(&scenario) +} + +pub fn assert_scenario_runs_without_error( + scenario: &Scenario, +) -> Result<(), DoctestAssertionError> { + let (callback_error_tx, callback_error_rx) = crossbeam_channel::bounded(1); + let result = crate::run_scenario(scenario, move |connection| loop { let Ok(message) = connection.receiver.recv() else { break; }; @@ -30,7 +75,13 @@ pub fn assert_scenario_runs_without_error(scenario: &Scenario) { }; match serde_json::to_value(hover) { Ok(result) => Response::new_ok(request.id, result), - Err(_) => break, + Err(source) => { + send_callback_error( + &callback_error_tx, + DoctestAssertionError::SerializeHoverResponse { source }, + ); + break; + } } } _ => Response { @@ -40,24 +91,39 @@ pub fn assert_scenario_runs_without_error(scenario: &Scenario) { }, }; if connection.sender.send(Message::Response(response)).is_err() { + send_callback_error(&callback_error_tx, DoctestAssertionError::SendResponse); break; } } Message::Notification(notification) if notification.method == DidOpenTextDocument::METHOD => { - let Ok(params) = serde_json::from_value::( + let params = match serde_json::from_value::( notification.params, - ) else { - break; + ) { + Ok(params) => params, + Err(source) => { + send_callback_error( + &callback_error_tx, + DoctestAssertionError::DeserializeDidOpenNotification { source }, + ); + break; + } }; let publish = PublishDiagnosticsParams { uri: params.text_document.uri, version: Some(params.text_document.version), diagnostics: vec![], }; - let Ok(payload) = serde_json::to_value(publish) else { - break; + let payload = match serde_json::to_value(publish) { + Ok(payload) => payload, + Err(source) => { + send_callback_error( + &callback_error_tx, + DoctestAssertionError::SerializePublishDiagnostics { source }, + ); + break; + } }; let publish_notification = Notification::new(PublishDiagnostics::METHOD.to_string(), payload); @@ -66,6 +132,10 @@ pub fn assert_scenario_runs_without_error(scenario: &Scenario) { .send(Message::Notification(publish_notification)) .is_err() { + send_callback_error( + &callback_error_tx, + DoctestAssertionError::SendDiagnosticsNotification, + ); break; } } @@ -73,8 +143,15 @@ pub fn assert_scenario_runs_without_error(scenario: &Scenario) { Message::Notification(_) | Message::Response(_) => {} } }); - assert!( - result.is_ok(), - "scenario should run without error: {result:?}" - ); + if let Ok(error) = callback_error_rx.try_recv() { + return Err(error); + } + result.map_err(|source| DoctestAssertionError::RunScenario { source }) +} + +fn send_callback_error( + callback_error: &Sender, + error: DoctestAssertionError, +) { + let _ = callback_error.try_send(error); } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs index 420c838c..990e84f4 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs @@ -196,18 +196,14 @@ fn collect_json_differences( }, ); } - let min_len = actual_array.len().min(expected_array.len()); - for index in 0..min_len { + for (index, (actual_value, expected_value)) in + actual_array.iter().zip(expected_array.iter()).enumerate() + { if report.truncated { return; } let child_path = format!("{path}[{index}]"); - collect_json_differences( - &child_path, - &actual_array[index], - &expected_array[index], - report, - ); + collect_json_differences(&child_path, actual_value, expected_value, report); } } _ => push_difference( diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs index 02715a91..f1b66946 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/runner.rs @@ -375,11 +375,11 @@ mod tests { } #[test] - fn run_scenario_auto_handles_server_initiated_requests() -> Result<(), super::RunnerError> { + fn run_scenario_auto_handles_server_initiated_requests() { let scenario = Scenario::new(vec![]); let refresh_id = lsp_server::RequestId::from(42_i32); - run_scenario(&scenario, move |connection| { + let result = run_scenario(&scenario, move |connection| { let mut saw_refresh_response = false; loop { @@ -418,11 +418,14 @@ mod tests { } Message::Response(response) => { if response.id == refresh_id { - assert!( - response.error.is_none(), - "refresh response should not contain an error", + assert_matches!( + response, + Response { + id, + result: Some(serde_json::Value::Null), + error: None, + } if id == refresh_id ); - assert_eq!(response.result, Some(serde_json::Value::Null)); saw_refresh_response = true; } } @@ -433,7 +436,11 @@ mod tests { saw_refresh_response, "client should acknowledge server-initiated refresh requests", ); - }) + }); + assert!( + result.is_ok(), + "scenario should finish successfully: {result:?}" + ); } #[test] diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs index 84075d82..d1aa09a7 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs @@ -668,11 +668,11 @@ fn resolve_file_uri_shorthand_json_at( path: &str, ) -> Result { match value { - serde_json::Value::Object(mut object) => { + serde_json::Value::Object(object) => { if object.len() == 1 && object.contains_key("file") { - let file = object - .remove("file") - .expect("checked key existence before remove"); + let Some(file) = object.get("file") else { + return Ok(serde_json::Value::Object(object)); + }; let Some(file) = file.as_str() else { return Err(input_err!("{context}: `{path}.file` must be a string")); }; diff --git a/crates/jrsonnet-lsp/src/analysis/tanka.rs b/crates/jrsonnet-lsp/src/analysis/tanka.rs index 64558fcf..59198cf9 100644 --- a/crates/jrsonnet-lsp/src/analysis/tanka.rs +++ b/crates/jrsonnet-lsp/src/analysis/tanka.rs @@ -142,15 +142,15 @@ mod tests { #[test] fn test_find_root_with_jsonnetfile() { - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let root = tmp.path(); // Create jsonnetfile.json at root - fs::write(root.join("jsonnetfile.json"), "{}").unwrap(); + fs::write(root.join("jsonnetfile.json"), "{}").expect("expected success"); // Create a nested directory let nested = root.join("environments").join("prod"); - fs::create_dir_all(&nested).unwrap(); + fs::create_dir_all(&nested).expect("expected success"); // Find root from nested directory let found = find_root(&nested); @@ -159,12 +159,12 @@ mod tests { #[test] fn test_find_root_with_tkrc() { - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let root = tmp.path(); // Create tkrc.yaml at root (takes precedence over jsonnetfile.json) - fs::write(root.join("tkrc.yaml"), "").unwrap(); - fs::write(root.join("jsonnetfile.json"), "{}").unwrap(); + fs::write(root.join("tkrc.yaml"), "").expect("expected success"); + fs::write(root.join("jsonnetfile.json"), "{}").expect("expected success"); let found = find_root(root); assert_eq!(found, Some(root.to_path_buf())); @@ -172,23 +172,23 @@ mod tests { #[test] fn test_find_root_not_found() { - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let found = find_root(tmp.path()); assert_eq!(found, None); } #[test] fn test_resolve_jpath() { - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let root = tmp.path(); // Create Tanka structure - fs::write(root.join("jsonnetfile.json"), "{}").unwrap(); - fs::create_dir(root.join("vendor")).unwrap(); - fs::create_dir(root.join("lib")).unwrap(); + fs::write(root.join("jsonnetfile.json"), "{}").expect("expected success"); + fs::create_dir(root.join("vendor")).expect("expected success"); + fs::create_dir(root.join("lib")).expect("expected success"); let env = root.join("environments").join("prod"); - fs::create_dir_all(&env).unwrap(); + fs::create_dir_all(&env).expect("expected success"); // Resolve jpath from environment directory let jpath = resolve_jpath(&env, ResolvePathsWithTankaMode::Auto); @@ -198,16 +198,16 @@ mod tests { #[test] fn test_resolve_jpath_with_env_vendor() { - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let root = tmp.path(); // Create Tanka structure with env-level vendor - fs::write(root.join("jsonnetfile.json"), "{}").unwrap(); - fs::create_dir(root.join("vendor")).unwrap(); + fs::write(root.join("jsonnetfile.json"), "{}").expect("expected success"); + fs::create_dir(root.join("vendor")).expect("expected success"); let env = root.join("environments").join("prod"); - fs::create_dir_all(&env).unwrap(); - fs::create_dir(env.join("vendor")).unwrap(); + fs::create_dir_all(&env).expect("expected success"); + fs::create_dir(env.join("vendor")).expect("expected success"); // Resolve jpath from environment directory let jpath = resolve_jpath(&env, ResolvePathsWithTankaMode::Auto); @@ -217,18 +217,18 @@ mod tests { #[test] fn test_resolve_jpath_no_root() { - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let jpath = resolve_jpath(tmp.path(), ResolvePathsWithTankaMode::Auto); assert_eq!(jpath, Vec::::new()); } #[test] fn test_resolve_jpath_force_mode_without_root() { - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let base = tmp.path().join("env"); - fs::create_dir_all(&base).unwrap(); - fs::create_dir(base.join("vendor")).unwrap(); - fs::create_dir(base.join("lib")).unwrap(); + fs::create_dir_all(&base).expect("expected success"); + fs::create_dir(base.join("vendor")).expect("expected success"); + fs::create_dir(base.join("lib")).expect("expected success"); let jpath = resolve_jpath(&base, ResolvePathsWithTankaMode::True); let expected = vec![base.join("vendor"), base.join("lib"), base]; @@ -237,10 +237,11 @@ mod tests { #[test] fn test_effective_import_roots_without_tanka() { - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let base = tmp.path().join("env").join("main.jsonnet"); - fs::create_dir_all(base.parent().expect("base should have parent")).unwrap(); - fs::write(&base, "{}").unwrap(); + fs::create_dir_all(base.parent().expect("base should have parent")) + .expect("expected success"); + fs::write(&base, "{}").expect("expected success"); let configured = vec![ PathBuf::from("/configured/one"), @@ -252,14 +253,14 @@ mod tests { #[test] fn test_effective_import_roots_with_tanka_dedups() { - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let root = tmp.path(); - fs::write(root.join("jsonnetfile.json"), "{}").unwrap(); - fs::create_dir(root.join("vendor")).unwrap(); + fs::write(root.join("jsonnetfile.json"), "{}").expect("expected success"); + fs::create_dir(root.join("vendor")).expect("expected success"); let env = root.join("environments").join("prod"); - fs::create_dir_all(&env).unwrap(); + fs::create_dir_all(&env).expect("expected success"); let file = env.join("main.jsonnet"); - fs::write(&file, "{}").unwrap(); + fs::write(&file, "{}").expect("expected success"); let configured = vec![root.join("vendor")]; let roots = effective_import_roots(&file, &configured, ResolvePathsWithTankaMode::Auto); diff --git a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs index fdad0a97..0de5a5e3 100644 --- a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs +++ b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs @@ -165,7 +165,7 @@ mod tests { use crate::analysis::EvalConfig; fn test_uri() -> Uri { - "file:///test.jsonnet".parse().unwrap() + "file:///test.jsonnet".parse().expect("expected success") } fn test_path() -> CanonicalPath { diff --git a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs index a7c8ce42..5cadb578 100644 --- a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs +++ b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs @@ -185,6 +185,7 @@ impl InflightRequests { #[cfg(test)] mod tests { + use assert_matches::assert_matches; use crossbeam_channel::unbounded; use lsp_server::{ErrorCode, Message, RequestId, Response}; use lsp_types::request::{CodeLensResolve, RegisterCapability, Request as _}; @@ -202,23 +203,31 @@ mod tests { id.clone(), serde_json::json!({"ok": true}), )) - .unwrap()); + .expect("expected success")); assert!(receiver.try_recv().is_err()); inflight.begin_unknown(id.clone(), "example/method"); assert!(inflight .send_inflight_response(Response::new_ok(id, serde_json::json!({"ok": true}))) - .unwrap()); - - let message = receiver.recv().unwrap(); - match message { - Message::Response(response) => { - assert_eq!(response.id, RequestId::from(7)); - assert!(response.error.is_none()); - assert_eq!(response.result, Some(serde_json::json!({"ok": true}))); + .expect("expected success")); + + let message = receiver.recv().expect("expected success"); + assert_matches!( + message, + Message::Response(response) if { + assert_matches!( + response, + Response { + ref id, + result: Some(ref result), + error: None, + } + if id == &RequestId::from(7) + && result == &serde_json::json!({"ok": true}) + ); + true } - other => panic!("unexpected message: {other:?}"), - } + ); } #[test] @@ -243,20 +252,28 @@ mod tests { command: None, data: None, }; - assert!(inflight.send_ok(request, expected.clone()).unwrap()); - - let message = receiver.recv().unwrap(); - match message { - Message::Response(response) => { - assert_eq!(response.id, RequestId::from(15)); - assert!(response.error.is_none()); - assert_eq!( - response.result, - Some(serde_json::to_value(expected).unwrap()) + assert!(inflight + .send_ok(request, expected.clone()) + .expect("expected success")); + + let message = receiver.recv().expect("expected success"); + assert_matches!( + message, + Message::Response(response) if { + assert_matches!( + response, + Response { + ref id, + result: Some(ref result), + error: None, + } + if id == &RequestId::from(15) + && result + == &serde_json::to_value(expected).expect("expected success") ); + true } - other => panic!("unexpected message: {other:?}"), - } + ); } #[test] @@ -272,19 +289,26 @@ mod tests { ErrorCode::MethodNotFound, "Method not found: custom/method", ) - .unwrap()); - - let message = receiver.recv().unwrap(); - match message { - Message::Response(response) => { - assert_eq!(response.id, RequestId::from(23)); - let error = response.error.expect("expected method-not-found error"); - assert_eq!(error.code, ErrorCode::MethodNotFound as i32); - assert_eq!(error.message, "Method not found: custom/method"); - assert_eq!(response.result, None); + .expect("expected success")); + + let message = receiver.recv().expect("expected success"); + assert_matches!( + message, + Message::Response(response) if { + assert_matches!( + response, + Response { + ref id, + result: None, + error: Some(ref error), + } + if id == &RequestId::from(23) + && error.code == ErrorCode::MethodNotFound as i32 + && error.message == "Method not found: custom/method" + ); + true } - other => panic!("unexpected message: {other:?}"), - } + ); } #[test] @@ -294,19 +318,26 @@ mod tests { let id = RequestId::from(31); inflight.begin_unknown(id.clone(), "textDocument/codeLens"); - assert!(inflight.cancel_request(id).unwrap()); + assert!(inflight.cancel_request(id).expect("expected success")); - let message = receiver.recv().unwrap(); - match message { - Message::Response(response) => { - assert_eq!(response.id, RequestId::from(31)); - assert!(response.result.is_none()); - let error = response.error.expect("expected cancel error"); - assert_eq!(error.code, ErrorCode::RequestCanceled as i32); - assert_eq!(error.message, "Request canceled: textDocument/codeLens"); + let message = receiver.recv().expect("expected success"); + assert_matches!( + message, + Message::Response(response) if { + assert_matches!( + response, + Response { + ref id, + result: None, + error: Some(ref error), + } + if id == &RequestId::from(31) + && error.code == ErrorCode::RequestCanceled as i32 + && error.message == "Request canceled: textDocument/codeLens" + ); + true } - other => panic!("unexpected message: {other:?}"), - } + ); } #[test] @@ -314,7 +345,9 @@ mod tests { let (sender, receiver) = unbounded(); let mut inflight = InflightRequests::new(sender); - assert!(!inflight.cancel_request(RequestId::from(41)).unwrap()); + assert!(!inflight + .cancel_request(RequestId::from(41)) + .expect("expected success")); assert!(receiver.try_recv().is_err()); } @@ -328,21 +361,22 @@ mod tests { inflight .send_outgoing_request::(params.clone()) - .unwrap(); + .expect("expected success"); - let message = receiver.recv().unwrap(); - match message { - Message::Request(request) => { + let message = receiver.recv().expect("expected success"); + assert_matches!( + message, + Message::Request(request) if { assert_eq!(request.method, RegisterCapability::METHOD); let parsed_params: lsp_types::RegistrationParams = - serde_json::from_value(request.params).unwrap(); + serde_json::from_value(request.params.clone()).expect("expected success"); assert_eq!(parsed_params, params); let meta = inflight - .complete_outgoing(request.id) + .complete_outgoing(request.id.clone()) .expect("outgoing request should be tracked"); assert_eq!(meta.method, RegisterCapability::METHOD); + true } - other => panic!("unexpected message: {other:?}"), - } + ); } } diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs b/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs index ea91bcee..82383e0b 100644 --- a/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs +++ b/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs @@ -90,22 +90,40 @@ mod tests { #[test] fn eval_file_lens_present_for_root_expression() { - let uri: lsp_types::Uri = "file:///test/main.jsonnet".parse().unwrap(); + let uri: lsp_types::Uri = "file:///test/main.jsonnet" + .parse() + .expect("expected success"); let doc = Document::new("{ a: 1 }".to_string(), DocVersion::new(1)); let lenses = build_code_lenses(&doc, &uri); - assert_eq!(lenses.len(), 1); - let lens = &lenses[0]; - assert_eq!(lens.command.as_ref().unwrap().command, EXECUTE_COMMAND); assert_eq!( - lens.command.as_ref().unwrap().arguments.as_ref().unwrap(), - &vec![serde_json::json!(uri.to_string())] + lenses, + vec![lsp_types::CodeLens { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 0, + }, + end: lsp_types::Position { + line: 0, + character: 0, + }, + }, + command: Some(lsp_types::Command { + title: "Evaluate".to_string(), + command: EXECUTE_COMMAND.to_string(), + arguments: Some(vec![serde_json::json!(uri.to_string())]), + }), + data: None, + }] ); } #[test] fn eval_file_lens_absent_without_root_expression() { - let uri: lsp_types::Uri = "file:///test/main.jsonnet".parse().unwrap(); + let uri: lsp_types::Uri = "file:///test/main.jsonnet" + .parse() + .expect("expected success"); let doc = Document::new(String::new(), DocVersion::new(1)); let lenses = build_code_lenses(&doc, &uri); assert!(lenses.is_empty()); diff --git a/crates/jrsonnet-lsp/src/server/request_dispatch.rs b/crates/jrsonnet-lsp/src/server/request_dispatch.rs index 76b732b3..9c2b87b9 100644 --- a/crates/jrsonnet-lsp/src/server/request_dispatch.rs +++ b/crates/jrsonnet-lsp/src/server/request_dispatch.rs @@ -398,9 +398,17 @@ impl Server { if let Some(operation) = custom_operations::operation_for_execute_command(¶ms.command) { let context = self.async_request_context(); let args = params.arguments.clone(); - let compute = operation - .handle_execute_command - .expect("operation with execute command id must provide execute handler"); + let Some(compute) = operation.handle_execute_command else { + let _ = self.inflight_requests.send_err( + request, + lsp_server::ErrorCode::InvalidParams, + format!( + "Missing execute handler for custom operation: {}", + params.command + ), + )?; + return Ok(()); + }; self.spawn_typed_json_response(request, move || compute(&context, &args)); return Ok(()); } diff --git a/crates/jrsonnet-lsp/tests/cross_file_tests.rs b/crates/jrsonnet-lsp/tests/cross_file_tests.rs index ebbe0486..aa43fd99 100644 --- a/crates/jrsonnet-lsp/tests/cross_file_tests.rs +++ b/crates/jrsonnet-lsp/tests/cross_file_tests.rs @@ -19,15 +19,15 @@ use tempfile::TempDir; fn write_file(dir: &TempDir, name: &str, content: &str) -> PathBuf { let path = dir.path().join(name); if let Some(parent) = path.parent() { - fs::create_dir_all(parent).unwrap(); + fs::create_dir_all(parent).expect("expected success"); } - fs::write(&path, content).unwrap(); + fs::write(&path, content).expect("expected success"); path } /// Helper to get canonical path from a temp file. fn canonical_path(path: &PathBuf) -> CanonicalPath { - CanonicalPath::new(path.canonicalize().unwrap()) + CanonicalPath::new(path.canonicalize().expect("expected success")) } /// Create a resolver function for the given base directory. @@ -68,7 +68,7 @@ mod import_graph_tests { #[test] fn test_deep_import_chain() { // Create a chain: file1 -> file2 -> file3 -> file4 -> file5 -> file6 - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let base_dir = tmp.path(); let file6 = write_file(&tmp, "file6.jsonnet", "{ value: 6 }"); @@ -103,7 +103,7 @@ mod import_graph_tests { // Parse all files for file in [&file1, &file2, &file3, &file4, &file5, &file6] { - let content = fs::read_to_string(file).unwrap(); + let content = fs::read_to_string(file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); graph.update_file(graph.intern(&path), &doc, &resolver); @@ -136,7 +136,7 @@ mod import_graph_tests { // B C // \ / // D - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let base_dir = tmp.path(); let file_d = write_file(&tmp, "d.jsonnet", "{ shared: 'value' }"); @@ -164,7 +164,7 @@ mod import_graph_tests { let resolver = make_resolver(base_dir); for file in [&file_d, &file_b, &file_c, &file_a] { - let content = fs::read_to_string(file).unwrap(); + let content = fs::read_to_string(file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); graph.update_file(graph.intern(&path), &doc, &resolver); @@ -193,7 +193,7 @@ mod import_graph_tests { #[test] fn test_import_graph_removal() { - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let base_dir = tmp.path(); let lib = write_file(&tmp, "lib.jsonnet", "{ helper: 42 }"); @@ -207,12 +207,12 @@ mod import_graph_tests { let resolver = make_resolver(base_dir); // Add both files - let lib_content = fs::read_to_string(&lib).unwrap(); + let lib_content = fs::read_to_string(&lib).expect("expected success"); let lib_doc = Document::new(lib_content, DocVersion::new(1)); let lib_path = canonical_path(&lib); graph.update_file(graph.intern(&lib_path), &lib_doc, &resolver); - let main_content = fs::read_to_string(&main).unwrap(); + let main_content = fs::read_to_string(&main).expect("expected success"); let main_doc = Document::new(main_content, DocVersion::new(1)); let main_path = canonical_path(&main); graph.update_file(graph.intern(&main_path), &main_doc, &resolver); @@ -232,7 +232,7 @@ mod import_graph_tests { #[test] fn test_multiple_imports_same_file() { // Test that importing the same file from multiple locations is tracked correctly - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let base_dir = tmp.path(); let shared = write_file(&tmp, "shared.jsonnet", "{ x: 1 }"); @@ -256,7 +256,7 @@ mod import_graph_tests { let resolver = make_resolver(base_dir); for file in [&shared, &user1, &user2, &user3] { - let content = fs::read_to_string(file).unwrap(); + let content = fs::read_to_string(file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); graph.update_file(graph.intern(&path), &doc, &resolver); @@ -277,7 +277,7 @@ mod type_cache_tests { fn test_cache_basic_types() { let global = Arc::new(GlobalTyStore::new()); let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file1 = write_file(&tmp, "number.jsonnet", "42"); let file2 = write_file(&tmp, "string.jsonnet", "\"hello\""); @@ -305,7 +305,7 @@ mod type_cache_tests { fn test_cache_version_tracking() { let global = Arc::new(GlobalTyStore::new()); let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file = write_file(&tmp, "test.jsonnet", "1"); let path = canonical_path(&file); @@ -326,7 +326,7 @@ mod type_cache_tests { fn test_cache_invalidation() { let global = Arc::new(GlobalTyStore::new()); let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file1 = write_file(&tmp, "a.jsonnet", "1"); let file2 = write_file(&tmp, "b.jsonnet", "2"); @@ -357,7 +357,7 @@ mod type_cache_tests { fn test_cache_invalidate_many() { let global = Arc::new(GlobalTyStore::new()); let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let files: Vec<_> = (0..10) .map(|i| { @@ -389,12 +389,12 @@ mod cross_file_type_tests { fn test_analyze_and_cache_basic() { let global = Arc::new(GlobalTyStore::new()); let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file = write_file(&tmp, "number.jsonnet", "42"); let path = canonical_path(&file); - let content = fs::read_to_string(&file).unwrap(); + let content = fs::read_to_string(&file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); // Analyze and cache @@ -414,7 +414,7 @@ mod cross_file_type_tests { fn test_analyze_and_cache_different_types() { let global = Arc::new(GlobalTyStore::new()); let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); // Test various Jsonnet literal types let test_cases = [ @@ -438,7 +438,7 @@ mod cross_file_type_tests { fn test_cache_hit_on_same_version() { let global = Arc::new(GlobalTyStore::new()); let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file = write_file(&tmp, "test.jsonnet", "42"); let path = canonical_path(&file); @@ -458,7 +458,7 @@ mod cross_file_type_tests { fn test_cache_miss_on_new_version() { let global = Arc::new(GlobalTyStore::new()); let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file = write_file(&tmp, "test.jsonnet", "42"); let path = canonical_path(&file); @@ -478,7 +478,7 @@ mod cross_file_type_tests { fn test_shared_global_store() { let global = Arc::new(GlobalTyStore::new()); let cache = new_shared_cache(Arc::clone(&global), PathStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); // Analyze multiple files let file1 = write_file(&tmp, "a.jsonnet", "1"); @@ -508,7 +508,7 @@ mod transitive_update_tests { #[test] fn test_transitive_invalidation_chain() { // When a base file changes, all transitive dependents should be invalidated - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let base_dir = tmp.path(); let base = write_file(&tmp, "base.jsonnet", "{ x: 1 }"); @@ -522,7 +522,7 @@ mod transitive_update_tests { // Build graph for file in [&base, &mid, &top_file] { - let content = fs::read_to_string(file).unwrap(); + let content = fs::read_to_string(file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); graph.update_file(graph.intern(&path), &doc, &resolver); @@ -557,7 +557,7 @@ mod transitive_update_tests { #[test] fn test_partial_invalidation() { // When a leaf file changes, only its importers should be affected - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let base_dir = tmp.path(); let lib1 = write_file(&tmp, "lib1.jsonnet", "{ a: 1 }"); @@ -571,7 +571,7 @@ mod transitive_update_tests { // Build graph - main imports lib1, not lib2 for file in [&lib1, &lib2, &main] { - let content = fs::read_to_string(file).unwrap(); + let content = fs::read_to_string(file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); graph.update_file(graph.intern(&path), &doc, &resolver); @@ -600,7 +600,7 @@ mod transitive_update_tests { #[test] fn test_diamond_invalidation() { // When D changes in A -> B,C -> D diamond, all should be invalidated - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let base_dir = tmp.path(); let d = write_file(&tmp, "d.jsonnet", "{ shared: 1 }"); @@ -622,7 +622,7 @@ mod transitive_update_tests { let mut cache = TypeCache::new(Arc::clone(&global), PathStore::new()); for file in [&d, &b, &c, &a] { - let content = fs::read_to_string(file).unwrap(); + let content = fs::read_to_string(file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let path = canonical_path(file); graph.update_file(graph.intern(&path), &doc, &resolver); @@ -656,10 +656,10 @@ mod type_analysis_imports_tests { #[test] fn test_analyze_object_type() { let global = Arc::new(GlobalTyStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file = write_file(&tmp, "obj.jsonnet", "{ a: 1, b: 'hello', c: true }"); - let content = fs::read_to_string(&file).unwrap(); + let content = fs::read_to_string(&file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); @@ -675,10 +675,10 @@ mod type_analysis_imports_tests { #[test] fn test_analyze_array_type() { let global = Arc::new(GlobalTyStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file = write_file(&tmp, "arr.jsonnet", "[1, 2, 3]"); - let content = fs::read_to_string(&file).unwrap(); + let content = fs::read_to_string(&file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); @@ -692,10 +692,10 @@ mod type_analysis_imports_tests { #[test] fn test_analyze_function_type() { let global = Arc::new(GlobalTyStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file = write_file(&tmp, "func.jsonnet", "function(x) x + 1"); - let content = fs::read_to_string(&file).unwrap(); + let content = fs::read_to_string(&file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); @@ -709,10 +709,10 @@ mod type_analysis_imports_tests { #[test] fn test_analyze_local_binding() { let global = Arc::new(GlobalTyStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file = write_file(&tmp, "local.jsonnet", "local x = 42; x"); - let content = fs::read_to_string(&file).unwrap(); + let content = fs::read_to_string(&file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); @@ -724,10 +724,10 @@ mod type_analysis_imports_tests { #[test] fn test_analyze_conditional() { let global = Arc::new(GlobalTyStore::new()); - let tmp = TempDir::new().unwrap(); + let tmp = TempDir::new().expect("expected success"); let file = write_file(&tmp, "cond.jsonnet", "if true then 1 else 2"); - let content = fs::read_to_string(&file).unwrap(); + let content = fs::read_to_string(&file).expect("expected success"); let doc = Document::new(content, DocVersion::new(1)); let analysis = TypeAnalysis::analyze_with_global(&doc, Arc::clone(&global)); diff --git a/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs b/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs index c19df6f0..2abc89f1 100644 --- a/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs +++ b/crates/jrsonnet-lsp/tests/docs_lsp_examples.rs @@ -23,7 +23,7 @@ fn docs_lsp_dir() -> PathBuf { fn docs_lsp_markdown_paths() -> Vec { let mut paths = fs::read_dir(docs_lsp_dir()) - .unwrap_or_else(|err| panic!("failed to list docs/lsp: {err}")) + .expect("failed to list docs/lsp") .filter_map(Result::ok) .map(|entry| entry.path()) .filter(|path| path.extension().is_some_and(|ext| ext == "md")) @@ -36,7 +36,7 @@ fn relative_path(path: &Path) -> String { let workspace_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../.."); let relative = path .strip_prefix(workspace_root) - .unwrap_or_else(|err| panic!("failed to relativize {}: {err}", path.to_string_lossy())); + .expect("failed to relativize docs path"); relative.to_string_lossy().into_owned() } @@ -90,8 +90,7 @@ fn extract_markdown_fenced_blocks(markdown: &str) -> Vec { fn collect_jsonnet_doc_examples() -> Vec { let mut examples = Vec::new(); for path in docs_lsp_markdown_paths() { - let text = fs::read_to_string(&path) - .unwrap_or_else(|err| panic!("failed to read {}: {err}", path.to_string_lossy())); + let text = fs::read_to_string(&path).expect("failed to read docs/lsp markdown file"); let source_path = relative_path(&path); for block in extract_markdown_fenced_blocks(&text) { if block.language == "jsonnet" { diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index 4f75b2a6..d7ab7cb4 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -121,13 +121,13 @@ fn initialize_request_with_options(id: i32, initialization_options: serde_json:: Request::new( id.into(), Initialize::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } /// Helper to create an initialize request with a workspace root URI. fn initialize_request_with_root_uri(id: i32, root_uri: &str) -> Request { - let mut params = serde_json::to_value(InitializeParams::default()).unwrap(); + let mut params = serde_json::to_value(InitializeParams::default()).expect("expected success"); if let Some(object) = params.as_object_mut() { object.insert("rootUri".to_string(), serde_json::json!(root_uri)); } @@ -154,7 +154,7 @@ fn initialize_request_with_dynamic_watched_files(id: i32) -> Request { Request::new( id.into(), Initialize::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -163,7 +163,7 @@ fn initialize_request_with_dynamic_watched_files(id: i32) -> Request { fn initialize_request_with_dynamic_watched_files_relative(id: i32, root_uri: &str) -> Request { let params = InitializeParams { workspace_folders: Some(vec![WorkspaceFolder { - uri: root_uri.parse().unwrap(), + uri: root_uri.parse().expect("expected success"), name: "workspace".to_owned(), }]), capabilities: lsp_types::ClientCapabilities { @@ -182,7 +182,7 @@ fn initialize_request_with_dynamic_watched_files_relative(id: i32, root_uri: &st Request::new( id.into(), Initialize::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -205,7 +205,7 @@ fn initialize_request_with_inlay_hint_refresh_support(id: i32) -> Request { Request::new( id.into(), Initialize::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -235,7 +235,7 @@ fn cancel_request_notification(request_id: i32) -> Notification { }; Notification::new( Cancel::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -243,7 +243,7 @@ fn cancel_request_notification(request_id: i32) -> Notification { fn did_open_notification(uri: &str, text: &str) -> Notification { let params = DidOpenTextDocumentParams { text_document: TextDocumentItem { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), language_id: "jsonnet".to_string(), version: 1, text: text.to_string(), @@ -251,32 +251,32 @@ fn did_open_notification(uri: &str, text: &str) -> Notification { }; Notification::new( DidOpenTextDocument::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } fn did_save_notification(uri: &str, text: Option<&str>) -> Notification { let params = DidSaveTextDocumentParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, text: text.map(ToString::to_string), }; Notification::new( DidSaveTextDocument::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } fn did_close_notification(uri: &str) -> Notification { let params = DidCloseTextDocumentParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, }; Notification::new( DidCloseTextDocument::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -285,7 +285,7 @@ fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Req let params = GotoDefinitionParams { text_document_position_params: TextDocumentPositionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, position: Position { line, character }, }, @@ -295,7 +295,7 @@ fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Req Request::new( id.into(), GotoDefinition::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -303,7 +303,7 @@ fn goto_type_definition_request(id: i32, uri: &str, line: u32, character: u32) - let params = GotoDefinitionParams { text_document_position_params: TextDocumentPositionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, position: Position { line, character }, }, @@ -313,7 +313,7 @@ fn goto_type_definition_request(id: i32, uri: &str, line: u32, character: u32) - Request::new( id.into(), GotoTypeDefinition::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -332,14 +332,15 @@ fn assert_type_definition_matches_definition( line, character, ))) - .unwrap(); + .expect("expected success"); let definition_response = recv_response(conn, definition_id); assert!( definition_response.error.is_none(), "Goto definition request should succeed" ); let definition_result: Option = - serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); conn.sender .send(Message::Request(goto_type_definition_request( @@ -348,7 +349,7 @@ fn assert_type_definition_matches_definition( line, character, ))) - .unwrap(); + .expect("expected success"); let type_definition_response = recv_response(conn, type_definition_id); assert!( type_definition_response.error.is_none(), @@ -356,7 +357,7 @@ fn assert_type_definition_matches_definition( ); let type_definition_result: Option = serde_json::from_value(type_definition_response.result.expect("should have result")) - .unwrap(); + .expect("expected success"); assert_eq!( type_definition_result, definition_result, @@ -371,10 +372,12 @@ fn send_goto_and_parse( label: &str, request: Request, ) -> Option { - conn.sender.send(Message::Request(request)).unwrap(); + conn.sender + .send(Message::Request(request)) + .expect("expected success"); let response = recv_response(conn, id); assert!(response.error.is_none(), "{label} request should succeed"); - serde_json::from_value(response.result.expect("should have result")).unwrap() + serde_json::from_value(response.result.expect("should have result")).expect("expected success") } /// Helper to create a goto declaration request. @@ -382,7 +385,7 @@ fn goto_declaration_request(id: i32, uri: &str, line: u32, character: u32) -> Re let params = GotoDefinitionParams { text_document_position_params: TextDocumentPositionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, position: Position { line, character }, }, @@ -392,7 +395,7 @@ fn goto_declaration_request(id: i32, uri: &str, line: u32, character: u32) -> Re Request::new( id.into(), GotoDeclaration::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -400,7 +403,7 @@ fn goto_implementation_request(id: i32, uri: &str, line: u32, character: u32) -> let params = GotoDefinitionParams { text_document_position_params: TextDocumentPositionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, position: Position { line, character }, }, @@ -410,7 +413,7 @@ fn goto_implementation_request(id: i32, uri: &str, line: u32, character: u32) -> Request::new( id.into(), GotoImplementation::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -425,7 +428,7 @@ fn references_request( let params = ReferenceParams { text_document_position: TextDocumentPositionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, position: Position { line, character }, }, @@ -438,7 +441,7 @@ fn references_request( Request::new( id.into(), References::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -446,7 +449,7 @@ fn document_highlight_request(id: i32, uri: &str, line: u32, character: u32) -> let params = lsp_types::DocumentHighlightParams { text_document_position_params: TextDocumentPositionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, position: Position { line, character }, }, @@ -456,7 +459,7 @@ fn document_highlight_request(id: i32, uri: &str, line: u32, character: u32) -> Request::new( id.into(), DocumentHighlightRequest::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -464,7 +467,7 @@ fn rename_request(id: i32, uri: &str, line: u32, character: u32, new_name: &str) let params = RenameParams { text_document_position: TextDocumentPositionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, position: Position { line, character }, }, @@ -474,7 +477,7 @@ fn rename_request(id: i32, uri: &str, line: u32, character: u32, new_name: &str) Request::new( id.into(), Rename::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -489,7 +492,7 @@ fn inlay_hint_request( let params = lsp_types::InlayHintParams { work_done_progress_params: WorkDoneProgressParams::default(), text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, range: lsp_types::Range { start: Position { @@ -505,7 +508,7 @@ fn inlay_hint_request( Request::new( id.into(), InlayHintRequest::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -541,7 +544,7 @@ fn formatting_request_with_options( ) -> Request { let params = lsp_types::DocumentFormattingParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, options: formatting_options( tab_size, @@ -555,7 +558,7 @@ fn formatting_request_with_options( Request::new( id.into(), Formatting::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -567,7 +570,7 @@ fn range_formatting_request_with_options( ) -> Request { let params = lsp_types::DocumentRangeFormattingParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, range, options, @@ -576,7 +579,7 @@ fn range_formatting_request_with_options( Request::new( id.into(), RangeFormatting::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -592,7 +595,7 @@ fn semantic_tokens_range_request( work_done_progress_params: WorkDoneProgressParams::default(), partial_result_params: PartialResultParams::default(), text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, range: lsp_types::Range { start: Position { @@ -608,7 +611,7 @@ fn semantic_tokens_range_request( Request::new( id.into(), SemanticTokensRangeRequest::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -621,7 +624,7 @@ fn code_action_request( ) -> Request { let params = lsp_types::CodeActionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, range, context: lsp_types::CodeActionContext { @@ -635,14 +638,14 @@ fn code_action_request( Request::new( id.into(), CodeActionRequest::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } fn code_lens_request(id: i32, uri: &str) -> Request { let params = lsp_types::CodeLensParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, work_done_progress_params: WorkDoneProgressParams::default(), partial_result_params: PartialResultParams::default(), @@ -650,7 +653,7 @@ fn code_lens_request(id: i32, uri: &str) -> Request { Request::new( id.into(), CodeLensRequest::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -658,7 +661,7 @@ fn code_lens_resolve_request(id: i32, lens: lsp_types::CodeLens) -> Request { Request::new( id.into(), CodeLensResolve::METHOD.to_string(), - serde_json::to_value(lens).unwrap(), + serde_json::to_value(lens).expect("expected success"), ) } @@ -666,7 +669,7 @@ fn did_change_watched_files_notification(changes: Vec) -> Notificatio let params = DidChangeWatchedFilesParams { changes }; Notification::new( DidChangeWatchedFiles::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -674,7 +677,7 @@ fn did_change_configuration_notification(settings: serde_json::Value) -> Notific let params = DidChangeConfigurationParams { settings }; Notification::new( DidChangeConfiguration::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -687,7 +690,7 @@ fn execute_command_request(id: i32, command: &str, arguments: Vec Request { Request::new( id.into(), WorkspaceSymbolRequest::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -748,7 +757,7 @@ fn request_workspace_symbols( ) -> Option> { conn.sender .send(Message::Request(workspace_symbol_request(id, query))) - .unwrap(); + .expect("expected success"); let response = recv_response(conn, id); assert!(response.error.is_none(), "workspace/symbol should succeed"); serde_json::from_value( @@ -756,7 +765,7 @@ fn request_workspace_symbols( .result .expect("workspace/symbol should return result"), ) - .unwrap() + .expect("expected success") } fn code_action_test_range() -> lsp_types::Range { @@ -812,17 +821,17 @@ fn request_code_actions( diagnostics, only, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(conn, id); assert!(response.error.is_none(), "Code action should succeed"); - serde_json::from_value(response.result.expect("should have result")).unwrap() + serde_json::from_value(response.result.expect("should have result")).expect("expected success") } fn expected_unused_variable_quickfix( uri: &str, diagnostic: lsp_types::Diagnostic, ) -> Vec { - let parsed_uri: lsp_types::Uri = uri.parse().unwrap(); + let parsed_uri: lsp_types::Uri = uri.parse().expect("expected success"); let mut prefix_changes = std::collections::HashMap::new(); prefix_changes.insert( parsed_uri.clone(), @@ -925,7 +934,7 @@ fn expected_unused_import_binding_actions( uri: &str, diagnostic: lsp_types::Diagnostic, ) -> Vec { - let parsed_uri: lsp_types::Uri = uri.parse().unwrap(); + let parsed_uri: lsp_types::Uri = uri.parse().expect("expected success"); let mut prefix_changes = std::collections::HashMap::new(); prefix_changes.insert( parsed_uri.clone(), @@ -1026,7 +1035,7 @@ fn expected_unused_import_binding_actions( fn location(uri: &str, start_character: u32, end_character: u32) -> lsp_types::Location { lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 0, @@ -1084,7 +1093,7 @@ fn recv_publish_diagnostics_for_uri( } let params: lsp_types::PublishDiagnosticsParams = - serde_json::from_value(notif.params).unwrap(); + serde_json::from_value(notif.params).expect("expected success"); if params.uri.as_str() == uri { return params; } diff --git a/crates/jrsonnet-lsp/tests/integration_test/features.rs b/crates/jrsonnet-lsp/tests/integration_test/features.rs index db1cf70c..58309e4a 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/features.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/features.rs @@ -8,24 +8,24 @@ fn test_document_highlight() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/highlight.jsonnet"; let text = "local x = 1; x + x"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(document_highlight_request(2, uri, 0, 13))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!( response.error.is_none(), @@ -33,7 +33,8 @@ fn test_document_highlight() { ); let highlights: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); let highlights = highlights.unwrap_or_default(); assert_eq!(highlights.len(), 3); assert!( @@ -47,12 +48,12 @@ fn test_document_highlight() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -66,29 +67,30 @@ fn test_inlay_hint() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/inlay.jsonnet"; let text = "local x = 1; x"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(inlay_hint_request(2, uri, 0, 0, 0, 50))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!(response.error.is_none(), "Inlay hint should succeed"); let hints: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); let hints = hints.unwrap_or_default(); let hints_json = serde_json::to_value(&hints).expect("hints should serialize"); let expected_json = serde_json::json!([{ @@ -102,12 +104,12 @@ fn test_inlay_hint() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -121,28 +123,29 @@ fn test_inlay_hint_config_updates_via_configuration_change() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/inlay-config.jsonnet"; let text = "{ local x = 1, z: x, a: 1 }"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(inlay_hint_request(2, uri, 0, 0, 0, 80))) - .unwrap(); + .expect("expected success"); let before = recv_response(&client_conn, 2); assert!(before.error.is_none(), "Inlay hint should succeed"); let before_hints: Option> = - serde_json::from_value(before.result.expect("should have result")).unwrap(); + serde_json::from_value(before.result.expect("should have result")) + .expect("expected success"); let before_hints = before_hints.unwrap_or_default(); let before_json = serde_json::to_value(&before_hints).expect("hints should serialize"); let expected_before = serde_json::json!([{ @@ -166,16 +169,17 @@ fn test_inlay_hint_config_updates_via_configuration_change() { } })), )) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(inlay_hint_request(3, uri, 0, 0, 0, 80))) - .unwrap(); + .expect("expected success"); let after = recv_response(&client_conn, 3); assert!(after.error.is_none(), "Inlay hint should succeed"); let after_hints: Option> = - serde_json::from_value(after.result.expect("should have result")).unwrap(); + serde_json::from_value(after.result.expect("should have result")) + .expect("expected success"); let after_hints = after_hints.unwrap_or_default(); let after_json = serde_json::to_value(&after_hints).expect("hints should serialize"); let expected_after = serde_json::json!([ @@ -197,12 +201,12 @@ fn test_inlay_hint_config_updates_via_configuration_change() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -216,19 +220,19 @@ fn test_code_action_unused_variable_quickfix() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/code-action.jsonnet"; let text = "local x = 1; 42"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let diagnostic = unused_variable_diagnostic(); let actions = request_code_actions(&client_conn, 2, uri, vec![diagnostic.clone()], None); @@ -254,7 +258,7 @@ fn test_code_action_unused_variable_quickfix() { diagnostics: Some(vec![diagnostic]), edit: Some(lsp_types::WorkspaceEdit { changes: Some(std::collections::HashMap::from([( - uri.parse().unwrap(), + uri.parse().expect("expected success"), vec![lsp_types::TextEdit { range: lsp_types::Range { start: Position { @@ -283,12 +287,12 @@ fn test_code_action_unused_variable_quickfix() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -302,19 +306,19 @@ fn test_code_action_policy_updates_via_configuration_change() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/code-action-policy.jsonnet"; let text = "local x = import \"foo.libsonnet\"; 42"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let diagnostic = unused_variable_diagnostic(); let actions_before = request_code_actions(&client_conn, 2, uri, vec![diagnostic.clone()], None); @@ -337,7 +341,7 @@ fn test_code_action_policy_updates_via_configuration_change() { } })), )) - .unwrap(); + .expect("expected success"); let actions_after = request_code_actions(&client_conn, 3, uri, vec![diagnostic.clone()], None); assert_eq!( @@ -349,7 +353,7 @@ fn test_code_action_policy_updates_via_configuration_change() { diagnostics: Some(vec![diagnostic.clone()]), edit: Some(lsp_types::WorkspaceEdit { changes: Some(std::collections::HashMap::from([( - uri.parse().unwrap(), + uri.parse().expect("expected success"), vec![lsp_types::TextEdit { range: lsp_types::Range { start: Position { @@ -387,12 +391,12 @@ fn test_code_action_policy_updates_via_configuration_change() { client_conn .sender .send(Message::Request(shutdown_request(5))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 5); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -406,19 +410,19 @@ fn test_code_action_comment_policy_updates_via_configuration_change() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/code-action-comment-policy.jsonnet"; let text = "// heading\nlocal x = 1;\n42"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let diagnostic = lsp_types::Diagnostic { range: lsp_types::Range { @@ -459,7 +463,7 @@ fn test_code_action_comment_policy_updates_via_configuration_change() { diagnostics: Some(vec![diagnostic.clone()]), edit: Some(lsp_types::WorkspaceEdit { changes: Some(std::collections::HashMap::from([( - uri.parse().unwrap(), + uri.parse().expect("expected success"), vec![lsp_types::TextEdit { range: lsp_types::Range { start: Position { @@ -496,7 +500,7 @@ fn test_code_action_comment_policy_updates_via_configuration_change() { } })), )) - .unwrap(); + .expect("expected success"); let fix_all_after = request_code_actions( &client_conn, @@ -535,7 +539,7 @@ fn test_code_action_comment_policy_updates_via_configuration_change() { }]), edit: Some(lsp_types::WorkspaceEdit { changes: Some(std::collections::HashMap::from([( - uri.parse().unwrap(), + uri.parse().expect("expected success"), vec![lsp_types::TextEdit { range: lsp_types::Range { start: Position { @@ -564,12 +568,12 @@ fn test_code_action_comment_policy_updates_via_configuration_change() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -583,32 +587,33 @@ fn test_text_document_references() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/find-refs-command.jsonnet"; let text = "local x = 1; x + x"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); client_conn .sender .send(Message::Request(references_request(2, uri, 0, 13, false))) - .unwrap(); + .expect("expected success"); let refs_response = recv_response(&client_conn, 2); assert!( refs_response.error.is_none(), "textDocument/references should succeed" ); let refs: Option> = - serde_json::from_value(refs_response.result.expect("should have result")).unwrap(); + serde_json::from_value(refs_response.result.expect("should have result")) + .expect("expected success"); assert_eq!( refs.unwrap_or_default(), expected_find_references(uri, false) @@ -617,7 +622,7 @@ fn test_text_document_references() { client_conn .sender .send(Message::Request(references_request(3, uri, 0, 13, true))) - .unwrap(); + .expect("expected success"); let refs_with_declaration_response = recv_response(&client_conn, 3); assert!( refs_with_declaration_response.error.is_none(), @@ -628,7 +633,7 @@ fn test_text_document_references() { .result .expect("should have result"), ) - .unwrap(); + .expect("expected success"); assert_eq!( refs_with_declaration.unwrap_or_default(), expected_find_references(uri, true) @@ -637,12 +642,12 @@ fn test_text_document_references() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -656,12 +661,12 @@ fn test_execute_command_unknown_returns_invalid_params_error() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender @@ -670,7 +675,7 @@ fn test_execute_command_unknown_returns_invalid_params_error() { "jrsonnet.unknownCommand", vec![], ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert_eq!(response.result, None); let error = response @@ -686,12 +691,12 @@ fn test_execute_command_unknown_returns_invalid_params_error() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -729,17 +734,17 @@ fn test_eval_commands_use_tanka_import_roots() { "resolvePathsWithTanka": true }), ))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(custom_eval_file_request(2, &main_uri))) - .unwrap(); + .expect("expected success"); let eval_file_custom_response = recv_response(&client_conn, 2); assert!( eval_file_custom_response.error.is_none(), @@ -759,7 +764,7 @@ fn test_eval_commands_use_tanka_import_roots() { "jrsonnet.evalFile", vec![serde_json::Value::String(main_uri.clone())], ))) - .unwrap(); + .expect("expected success"); let eval_file_bridge_response = recv_response(&client_conn, 3); assert!( eval_file_bridge_response.error.is_none(), @@ -779,7 +784,7 @@ fn test_eval_commands_use_tanka_import_roots() { r#"(import "lib.libsonnet").answer"#, Some(&main_uri), ))) - .unwrap(); + .expect("expected success"); let eval_expression_response = recv_response(&client_conn, 4); assert!( eval_expression_response.error.is_none(), @@ -795,12 +800,12 @@ fn test_eval_commands_use_tanka_import_roots() { client_conn .sender .send(Message::Request(shutdown_request(5))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 5); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -814,19 +819,19 @@ fn test_semantic_tokens_range_request() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/semantic-range.jsonnet"; let text = "local first = 1\nlocal second = first + 1"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); client_conn @@ -834,7 +839,7 @@ fn test_semantic_tokens_range_request() { .send(Message::Request(semantic_tokens_range_request( 2, uri, 1, 0, 1, 100, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!( @@ -842,7 +847,8 @@ fn test_semantic_tokens_range_request() { "semantic tokens range request should succeed" ); let tokens: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); let tokens = tokens.expect("semantic tokens range should be returned"); let expected = encode_semantic_tokens(vec![ semantic_token(1, 0, 5, SemanticTokenTypeName::Keyword, &[]), @@ -856,12 +862,12 @@ fn test_semantic_tokens_range_request() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -875,29 +881,30 @@ fn test_code_lens_resolve_request() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/code-lens-resolve.jsonnet"; let text = "local x = 1; x + x"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); client_conn .sender .send(Message::Request(code_lens_request(2, uri))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!(response.error.is_none(), "code lens request should succeed"); let lenses: Vec = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); assert!(!lenses.is_empty(), "expected code lenses for test document"); let evaluate_lens = lenses .into_iter() @@ -914,25 +921,26 @@ fn test_code_lens_resolve_request() { 3, evaluate_lens.clone(), ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 3); assert!( response.error.is_none(), "code lens resolve request should succeed" ); let resolved: lsp_types::CodeLens = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); assert_eq!(resolved, evaluate_lens); client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -946,29 +954,30 @@ fn test_code_lens_evaluate_command_executes_and_returns_result() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/code-lens-evaluate.jsonnet"; let text = "local x = 1; x"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); client_conn .sender .send(Message::Request(code_lens_request(2, uri))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!(response.error.is_none(), "code lens request should succeed"); let lenses: Vec = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); let evaluate_command = lenses .into_iter() .filter_map(|lens| lens.command) @@ -988,7 +997,7 @@ fn test_code_lens_evaluate_command_executes_and_returns_result() { &evaluate_command.command, evaluate_command.arguments.unwrap_or_default(), ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 3); assert!( response.error.is_none(), @@ -1004,12 +1013,12 @@ fn test_code_lens_evaluate_command_executes_and_returns_result() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -1023,12 +1032,12 @@ fn test_cancel_request_returns_request_canceled_error() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/cancel-code-lens.jsonnet"; let mut text = String::new(); @@ -1041,16 +1050,16 @@ fn test_cancel_request_returns_request_canceled_error() { client_conn .sender .send(Message::Notification(did_open_notification(uri, &text))) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(code_lens_request(2, uri))) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(cancel_request_notification(2))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert_eq!(response.result, None); @@ -1064,12 +1073,12 @@ fn test_cancel_request_returns_request_canceled_error() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -1083,12 +1092,12 @@ fn test_code_lens_resolve_invalid_params_returns_invalid_params_error() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender @@ -1097,7 +1106,7 @@ fn test_code_lens_resolve_invalid_params_returns_invalid_params_error() { CodeLensResolve::METHOD.to_string(), json!({"not": "a code lens"}), ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert_eq!(response.result, None); @@ -1117,12 +1126,12 @@ fn test_code_lens_resolve_invalid_params_returns_invalid_params_error() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); diff --git a/crates/jrsonnet-lsp/tests/integration_test/formatting.rs b/crates/jrsonnet-lsp/tests/integration_test/formatting.rs index 8b014413..11eeca5d 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/formatting.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/formatting.rs @@ -279,22 +279,30 @@ const SIMPLE_OBJECT: &str = "{a:1}"; new_text: "{\n a: 1,\n}\n".to_string(), }]), })] -fn test_document_formatting_single_request_cases(#[case] case: SingleFormattingCase) -> Result<()> { - let mut session = FormatSession::start(case.init_options)?; - session.open(case.uri, case.text)?; - let edits = session.request_formatting(case.uri, case.tab_size, case.insert_spaces)?; +fn test_document_formatting_single_request_cases(#[case] case: SingleFormattingCase) { + let mut session = FormatSession::start(case.init_options).expect("start format session"); + session + .open(case.uri, case.text) + .expect("open document for formatting test"); + let edits = session + .request_formatting(case.uri, case.tab_size, case.insert_spaces) + .expect("request document formatting"); assert_eq!(edits, case.expected); - session.shutdown() + session.shutdown().expect("shutdown format session"); } #[test] -fn test_document_formatting_applies_runtime_formatting_config_changes() -> Result<()> { - let mut session = FormatSession::start(serde_json::Value::Null)?; +fn test_document_formatting_applies_runtime_formatting_config_changes() { + let mut session = FormatSession::start(serde_json::Value::Null).expect("start format session"); let string_uri = "file:///test/format-config-string.jsonnet"; - session.open(string_uri, "{a:'x'}")?; + session + .open(string_uri, "{a:'x'}") + .expect("open string-style document"); - let before_edits = session.request_formatting(string_uri, 2, true)?; + let before_edits = session + .request_formatting(string_uri, 2, true) + .expect("request formatting before config change"); assert_eq!( before_edits, Some(vec![lsp_types::TextEdit { @@ -312,16 +320,20 @@ fn test_document_formatting_applies_runtime_formatting_config_changes() -> Resul }]) ); - session.change_configuration(serde_json::json!({ - "jsonnet": { - "formatting": { - "indent": 2, - "string_style": "double" + session + .change_configuration(serde_json::json!({ + "jsonnet": { + "formatting": { + "indent": 2, + "string_style": "double" + } } - } - }))?; + })) + .expect("apply string_style config change"); - let after_style_edits = session.request_formatting(string_uri, 8, false)?; + let after_style_edits = session + .request_formatting(string_uri, 8, false) + .expect("request formatting after string_style change"); assert_eq!( after_style_edits, Some(vec![lsp_types::TextEdit { @@ -340,17 +352,23 @@ fn test_document_formatting_applies_runtime_formatting_config_changes() -> Resul ); let empty_uri = "file:///test/format-config-empty.jsonnet"; - session.open(empty_uri, "{}")?; - - session.change_configuration(serde_json::json!({ - "jsonnet": { - "formatting": { - "pad_objects": false + session + .open(empty_uri, "{}") + .expect("open empty object document"); + + session + .change_configuration(serde_json::json!({ + "jsonnet": { + "formatting": { + "pad_objects": false + } } - } - }))?; + })) + .expect("apply pad_objects config change"); - let after_padding_edits = session.request_formatting(empty_uri, 4, true)?; + let after_padding_edits = session + .request_formatting(empty_uri, 4, true) + .expect("request formatting after pad_objects change"); assert_eq!( after_padding_edits, Some(vec![lsp_types::TextEdit { @@ -368,23 +386,20 @@ fn test_document_formatting_applies_runtime_formatting_config_changes() -> Resul }]) ); - session.shutdown() + session.shutdown().expect("shutdown format session"); } #[test] -fn test_document_formatting_respects_lsp_optional_formatting_options() -> Result<()> { - let mut session = FormatSession::start(serde_json::Value::Null)?; +fn test_document_formatting_respects_lsp_optional_formatting_options() { + let mut session = FormatSession::start(serde_json::Value::Null).expect("start format session"); let uri = "file:///test/format-request-options.jsonnet"; - session.open(uri, SIMPLE_OBJECT)?; - - let edits = session.request_formatting_with_options( - uri, - 2, - true, - Some(true), - Some(false), - Some(true), - )?; + session + .open(uri, SIMPLE_OBJECT) + .expect("open formatting options document"); + + let edits = session + .request_formatting_with_options(uri, 2, true, Some(true), Some(false), Some(true)) + .expect("request formatting with options"); assert_eq!( edits, Some(vec![lsp_types::TextEdit { @@ -402,29 +417,33 @@ fn test_document_formatting_respects_lsp_optional_formatting_options() -> Result }]) ); - session.shutdown() + session.shutdown().expect("shutdown format session"); } #[test] -fn test_range_formatting_returns_edit_for_changes_within_requested_range() -> Result<()> { - let mut session = FormatSession::start(serde_json::Value::Null)?; +fn test_range_formatting_returns_edit_for_changes_within_requested_range() { + let mut session = FormatSession::start(serde_json::Value::Null).expect("start format session"); let uri = "file:///test/format-range-contained.jsonnet"; - session.open(uri, "{\n a: 1,\n b:2,\n}\n")?; - - let edits = session.request_range_formatting_with_options( - uri, - lsp_types::Range { - start: Position { - line: 2, - character: 0, - }, - end: Position { - line: 2, - character: 6, + session + .open(uri, "{\n a: 1,\n b:2,\n}\n") + .expect("open range formatting document"); + + let edits = session + .request_range_formatting_with_options( + uri, + lsp_types::Range { + start: Position { + line: 2, + character: 0, + }, + end: Position { + line: 2, + character: 6, + }, }, - }, - formatting_options(2, true, None, None, None), - )?; + formatting_options(2, true, None, None, None), + ) + .expect("request range formatting"); assert_eq!( edits, @@ -443,31 +462,35 @@ fn test_range_formatting_returns_edit_for_changes_within_requested_range() -> Re }]) ); - session.shutdown() + session.shutdown().expect("shutdown format session"); } #[test] -fn test_range_formatting_returns_no_edits_when_changes_escape_requested_range() -> Result<()> { - let mut session = FormatSession::start(serde_json::Value::Null)?; +fn test_range_formatting_returns_no_edits_when_changes_escape_requested_range() { + let mut session = FormatSession::start(serde_json::Value::Null).expect("start format session"); let uri = "file:///test/format-range-outside.jsonnet"; - session.open(uri, "{\n a:1,\n b:2,\n}\n")?; - - let edits = session.request_range_formatting_with_options( - uri, - lsp_types::Range { - start: Position { - line: 1, - character: 0, - }, - end: Position { - line: 1, - character: 6, + session + .open(uri, "{\n a:1,\n b:2,\n}\n") + .expect("open range formatting document"); + + let edits = session + .request_range_formatting_with_options( + uri, + lsp_types::Range { + start: Position { + line: 1, + character: 0, + }, + end: Position { + line: 1, + character: 6, + }, }, - }, - formatting_options(2, true, None, None, None), - )?; + formatting_options(2, true, None, None, None), + ) + .expect("request range formatting"); assert_eq!(edits, Some(Vec::new())); - session.shutdown() + session.shutdown().expect("shutdown format session"); } diff --git a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs index 9ce65a3c..755168c6 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs @@ -12,65 +12,68 @@ fn test_initialize_shutdown() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); // Receive initialize response - let response = client_conn.receiver.recv().unwrap(); + let response = client_conn.receiver.recv().expect("expected success"); assert_matches!(response, Message::Response(resp) => { assert_eq!(resp.id, 1.into()); assert!(resp.error.is_none(), "Initialize should succeed"); let result = resp.result.expect("should have result"); - assert!(result.get("capabilities").is_some(), "should have capabilities"); + let capabilities = result + .get("capabilities") + .expect("should have capabilities"); assert_eq!( - result["capabilities"]["documentHighlightProvider"], - serde_json::Value::Bool(true), + capabilities.get("documentHighlightProvider"), + Some(&serde_json::Value::Bool(true)), "document highlight capability should be advertised", ); assert_eq!( - result["capabilities"]["inlayHintProvider"], - serde_json::Value::Bool(true), - "inlay hint capability should be advertised", - ); - assert_eq!( - result["capabilities"]["documentRangeFormattingProvider"], - serde_json::Value::Bool(true), - "range formatting capability should be advertised", - ); - assert_eq!( - result["capabilities"]["codeActionProvider"]["codeActionKinds"][0], - serde_json::Value::String("quickfix".to_string()), - "quickfix code action capability should be advertised", - ); - assert_eq!( - result["capabilities"]["codeActionProvider"]["codeActionKinds"][1], - serde_json::Value::String("source.fixAll".to_string()), - "source fix-all code action capability should be advertised", - ); + capabilities.get("inlayHintProvider"), + Some(&serde_json::Value::Bool(true)), + "inlay hint capability should be advertised", + ); + assert_eq!( + capabilities.get("documentRangeFormattingProvider"), + Some(&serde_json::Value::Bool(true)), + "range formatting capability should be advertised", + ); + assert_eq!( + capabilities + .get("codeActionProvider") + .and_then(|provider| provider.get("codeActionKinds")), + Some(&serde_json::json!(["quickfix", "source.fixAll"])), + "code action kinds should be advertised", + ); assert_eq!( - result["capabilities"]["executeCommandProvider"]["commands"], - serde_json::json!(["jrsonnet.evalFile"]), + capabilities + .get("executeCommandProvider") + .and_then(|provider| provider.get("commands")), + Some(&serde_json::json!(["jrsonnet.evalFile"])), "execute command capability should advertise all command IDs", ); - assert_eq!( - result["capabilities"]["codeLensProvider"]["resolveProvider"], - serde_json::Value::Bool(true), - "code lens resolve capability should be advertised", - ); - assert_eq!( - result["capabilities"]["declarationProvider"], - serde_json::Value::Bool(true), - "declaration capability should be advertised", - ); assert_eq!( - result["capabilities"]["implementationProvider"], - serde_json::Value::Bool(true), - "implementation capability should be advertised", + capabilities + .get("codeLensProvider") + .and_then(|provider| provider.get("resolveProvider")), + Some(&serde_json::Value::Bool(true)), + "code lens resolve capability should be advertised", ); - let server_name = result - .get("serverInfo") - .and_then(|s| s.get("name")) - .and_then(|n| n.as_str()) - .expect("should have serverInfo.name"); + assert_eq!( + capabilities.get("declarationProvider"), + Some(&serde_json::Value::Bool(true)), + "declaration capability should be advertised", + ); + assert_eq!( + capabilities.get("implementationProvider"), + Some(&serde_json::Value::Bool(true)), + "implementation capability should be advertised", + ); + let server_name = result + .get("serverInfo") + .and_then(|s| s.get("name")) + .and_then(|n| n.as_str()) + .expect("should have serverInfo.name"); assert!(server_name.contains("jrsonnet")); }); @@ -78,16 +81,16 @@ fn test_initialize_shutdown() { client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); // Send shutdown request client_conn .sender .send(Message::Request(shutdown_request(2))) - .unwrap(); + .expect("expected success"); // Receive shutdown response - let response = client_conn.receiver.recv().unwrap(); + let response = client_conn.receiver.recv().expect("expected success"); assert_matches!(response, Message::Response(resp) => { assert_eq!(resp.id, 2.into()); assert!(resp.error.is_none(), "Shutdown should succeed"); @@ -97,7 +100,7 @@ fn test_initialize_shutdown() { client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); // Wait for server to exit server_thread @@ -114,13 +117,13 @@ fn test_diagnostics_on_open() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); // ignore response + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); // ignore response client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); // Open a document with a syntax error let uri = "file:///test/error.jsonnet"; @@ -128,14 +131,14 @@ fn test_diagnostics_on_open() { client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); // Should receive diagnostics notification - let notification = client_conn.receiver.recv().unwrap(); + let notification = client_conn.receiver.recv().expect("expected success"); assert_matches!(notification, Message::Notification(notif) => { assert_eq!(notif.method, PublishDiagnostics::METHOD); let params: lsp_types::PublishDiagnosticsParams = - serde_json::from_value(notif.params).unwrap(); + serde_json::from_value(notif.params).expect("expected success"); assert!( !params.diagnostics.is_empty(), "Should have diagnostics for syntax error" @@ -146,13 +149,13 @@ fn test_diagnostics_on_open() { client_conn .sender .send(Message::Request(shutdown_request(2))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() @@ -167,13 +170,13 @@ fn test_diagnostics_refresh_on_did_save_with_text() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/save-refresh.jsonnet"; client_conn @@ -181,7 +184,7 @@ fn test_diagnostics_refresh_on_did_save_with_text() { .send(Message::Notification(did_open_notification( uri, "{ a: 1 }", ))) - .unwrap(); + .expect("expected success"); let opened = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); assert!(opened.diagnostics.is_empty()); @@ -191,7 +194,7 @@ fn test_diagnostics_refresh_on_did_save_with_text() { uri, Some("{ a: }"), ))) - .unwrap(); + .expect("expected success"); let saved_invalid = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); assert!( !saved_invalid.diagnostics.is_empty(), @@ -204,19 +207,19 @@ fn test_diagnostics_refresh_on_did_save_with_text() { uri, Some("{ a: 2 }"), ))) - .unwrap(); + .expect("expected success"); let saved_valid = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); assert!(saved_valid.diagnostics.is_empty()); client_conn .sender .send(Message::Request(shutdown_request(2))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 2); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() @@ -237,20 +240,20 @@ fn test_configuration_change_reconfigures_eval_diagnostics() { "enableEvalDiagnostics": true }), ))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/eval-config-change.jsonnet"; let text = "error 'boom'"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let initial_diagnostics = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); @@ -271,7 +274,7 @@ fn test_configuration_change_reconfigures_eval_diagnostics() { } })), )) - .unwrap(); + .expect("expected success"); let updated_diagnostics = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); @@ -286,13 +289,13 @@ fn test_configuration_change_reconfigures_eval_diagnostics() { client_conn .sender .send(Message::Request(shutdown_request(2))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 2); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() @@ -309,12 +312,12 @@ fn test_configuration_change_requests_inlay_hint_refresh_when_supported() { .send(Message::Request( initialize_request_with_inlay_hint_refresh_support(1), )) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender @@ -327,7 +330,7 @@ fn test_configuration_change_requests_inlay_hint_refresh_when_supported() { } })), )) - .unwrap(); + .expect("expected success"); let refresh_request = loop { let message = client_conn @@ -338,8 +341,7 @@ fn test_configuration_change_requests_inlay_hint_refresh_when_supported() { Message::Request(request) if request.method == InlayHintRefreshRequest::METHOD => { break request; } - Message::Notification(_) => continue, - _ => continue, + _ => {} } }; assert_eq!(refresh_request.params, serde_json::Value::Null); @@ -350,17 +352,17 @@ fn test_configuration_change_requests_inlay_hint_refresh_when_supported() { refresh_request.id, serde_json::Value::Null, ))) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(shutdown_request(2))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 2); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -408,25 +410,25 @@ fn test_configuration_change_reindexes_closed_import_graph_entries() { "jpath": [jpath_a.to_string_lossy().to_string()], }), ))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(did_open_notification( &main_uri, &main_text, ))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); client_conn .sender .send(Message::Notification(did_close_notification(&main_uri))) - .unwrap(); + .expect("expected success"); client_conn .sender @@ -437,14 +439,14 @@ fn test_configuration_change_reindexes_closed_import_graph_entries() { } })), )) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(custom_find_transitive_importers_request( 2, &lib_a_uri, ))) - .unwrap(); + .expect("expected success"); let old_target_response = recv_response(&client_conn, 2); assert!( old_target_response.error.is_none(), @@ -465,7 +467,7 @@ fn test_configuration_change_reindexes_closed_import_graph_entries() { .send(Message::Request(custom_find_transitive_importers_request( 3, &lib_b_uri, ))) - .unwrap(); + .expect("expected success"); let new_target_response = recv_response(&client_conn, 3); assert!( new_target_response.error.is_none(), @@ -484,12 +486,12 @@ fn test_configuration_change_reindexes_closed_import_graph_entries() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -504,13 +506,13 @@ fn test_valid_document_no_errors() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); // Open a valid document let uri = "file:///test/valid.jsonnet"; @@ -518,14 +520,14 @@ fn test_valid_document_no_errors() { client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); // Should receive diagnostics notification with empty diagnostics - let notification = client_conn.receiver.recv().unwrap(); + let notification = client_conn.receiver.recv().expect("expected success"); assert_matches!(notification, Message::Notification(notif) => { assert_eq!(notif.method, PublishDiagnostics::METHOD); let params: lsp_types::PublishDiagnosticsParams = - serde_json::from_value(notif.params).unwrap(); + serde_json::from_value(notif.params).expect("expected success"); assert!( params.diagnostics.is_empty(), "Valid document should have no diagnostics" @@ -536,13 +538,13 @@ fn test_valid_document_no_errors() { client_conn .sender .send(Message::Request(shutdown_request(2))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() @@ -559,13 +561,13 @@ fn test_initialize_registers_did_change_watched_files_when_supported() { .send(Message::Request( initialize_request_with_dynamic_watched_files(1), )) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let register_request = loop { let message = client_conn @@ -579,7 +581,7 @@ fn test_initialize_registers_did_change_watched_files_when_supported() { assert_eq!(register_request.method, RegisterCapability::METHOD); let actual_params: RegistrationParams = - serde_json::from_value(register_request.params).unwrap(); + serde_json::from_value(register_request.params).expect("expected success"); let expected_options = DidChangeWatchedFilesRegistrationOptions { watchers: vec![ FileSystemWatcher { @@ -600,7 +602,9 @@ fn test_initialize_registers_did_change_watched_files_when_supported() { registrations: vec![Registration { id: "jrsonnet-lsp.did-change-watched-files".to_owned(), method: DidChangeWatchedFiles::METHOD.to_owned(), - register_options: Some(serde_json::to_value(expected_options).unwrap()), + register_options: Some( + serde_json::to_value(expected_options).expect("expected success"), + ), }], }; assert_eq!(actual_params, expected_params); @@ -611,17 +615,17 @@ fn test_initialize_registers_did_change_watched_files_when_supported() { register_request.id, serde_json::Value::Null, ))) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(shutdown_request(2))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 2); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -631,7 +635,7 @@ fn test_initialize_registers_did_change_watched_files_when_supported() { fn test_initialize_uses_relative_watch_patterns_when_supported() { let tmp = TempDir::new().expect("tempdir should be created"); let root_uri = file_uri(tmp.path()); - let parsed_root_uri: lsp_types::Uri = root_uri.parse().unwrap(); + let parsed_root_uri: lsp_types::Uri = root_uri.parse().expect("expected success"); let (client_conn, server_conn) = Connection::memory(); let server_thread = run_server(server_conn); @@ -641,13 +645,13 @@ fn test_initialize_uses_relative_watch_patterns_when_supported() { .send(Message::Request( initialize_request_with_dynamic_watched_files_relative(1, &root_uri), )) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let register_request = loop { let message = client_conn @@ -661,7 +665,7 @@ fn test_initialize_uses_relative_watch_patterns_when_supported() { assert_eq!(register_request.method, RegisterCapability::METHOD); let actual_params: RegistrationParams = - serde_json::from_value(register_request.params).unwrap(); + serde_json::from_value(register_request.params).expect("expected success"); let expected_options = DidChangeWatchedFilesRegistrationOptions { watchers: vec![ FileSystemWatcher { @@ -691,7 +695,9 @@ fn test_initialize_uses_relative_watch_patterns_when_supported() { registrations: vec![Registration { id: "jrsonnet-lsp.did-change-watched-files".to_owned(), method: DidChangeWatchedFiles::METHOD.to_owned(), - register_options: Some(serde_json::to_value(expected_options).unwrap()), + register_options: Some( + serde_json::to_value(expected_options).expect("expected success"), + ), }], }; assert_eq!(actual_params, expected_params); @@ -702,17 +708,17 @@ fn test_initialize_uses_relative_watch_patterns_when_supported() { register_request.id, serde_json::Value::Null, ))) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(shutdown_request(2))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 2); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); diff --git a/crates/jrsonnet-lsp/tests/integration_test/navigation.rs b/crates/jrsonnet-lsp/tests/integration_test/navigation.rs index 28f2d8e6..a38b1572 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/navigation.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/navigation.rs @@ -9,13 +9,13 @@ fn test_goto_definition() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); // Open a document with a local binding let uri = "file:///test/definition.jsonnet"; @@ -24,28 +24,29 @@ fn test_goto_definition() { client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); // Receive diagnostics notification (discard) - let _ = client_conn.receiver.recv().unwrap(); + let _ = client_conn.receiver.recv().expect("expected success"); // Send goto definition request for 'x' usage at position (0, 13) client_conn .sender .send(Message::Request(goto_definition_request(2, uri, 0, 13))) - .unwrap(); + .expect("expected success"); // Should receive definition response - let response = client_conn.receiver.recv().unwrap(); + let response = client_conn.receiver.recv().expect("expected success"); let response = assert_matches!(response, Message::Response(resp) => resp); assert_eq!(response.id, 2.into()); assert!(response.error.is_none(), "Goto definition should succeed"); let result: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); assert_eq!( result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 0, @@ -63,13 +64,13 @@ fn test_goto_definition() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() @@ -84,19 +85,19 @@ fn test_goto_type_definition() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/type-definition.jsonnet"; let text = "local x = 1; x + 1"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); client_conn @@ -104,18 +105,19 @@ fn test_goto_type_definition() { .send(Message::Request(goto_type_definition_request( 2, uri, 0, 13, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!( response.error.is_none(), "Goto type definition request should succeed" ); let result: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); assert_eq!( result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 0, @@ -132,12 +134,12 @@ fn test_goto_type_definition() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -151,26 +153,26 @@ fn test_goto_type_definition_matches_definition_for_local_alias() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/type-definition-local-alias.jsonnet"; let text = "local x = 1;\nlocal y = x;\ny"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); let result = assert_type_definition_matches_definition(&client_conn, 2, 3, uri, 2, 0); assert_eq!( result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 0, @@ -187,12 +189,12 @@ fn test_goto_type_definition_matches_definition_for_local_alias() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -215,7 +217,7 @@ alias + std.length(plain)"#, .expect("failed to write main file"); let uri = file_uri(&main_path); - let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().expect("expected success"); let text = fs::read_to_string(&main_path).expect("failed to read main file"); let (client_conn, server_conn) = Connection::memory(); @@ -224,16 +226,16 @@ alias + std.length(plain)"#, client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(did_open_notification(&uri, &text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); // `alias` usage at line 3, col 0 resolves to imported field `foo`. @@ -268,12 +270,12 @@ alias + std.length(plain)"#, client_conn .sender .send(Message::Request(shutdown_request(6))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 6); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -287,19 +289,19 @@ fn test_navigation_matrix_local_alias() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/navigation-matrix-local.jsonnet"; let text = "local x = 1;\nlocal y = x;\ny"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); let declaration = send_goto_and_parse( @@ -330,7 +332,7 @@ fn test_navigation_matrix_local_alias() { assert_eq!( declaration, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 1, @@ -346,7 +348,7 @@ fn test_navigation_matrix_local_alias() { assert_eq!( definition, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 0, @@ -363,7 +365,7 @@ fn test_navigation_matrix_local_alias() { assert_eq!( implementation, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 1, @@ -380,12 +382,12 @@ fn test_navigation_matrix_local_alias() { client_conn .sender .send(Message::Request(shutdown_request(6))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 6); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -407,7 +409,7 @@ alias"#, .expect("failed to write main file"); let uri = file_uri(&main_path); - let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().expect("expected success"); let text = fs::read_to_string(&main_path).expect("failed to read main file"); let (client_conn, server_conn) = Connection::memory(); @@ -416,17 +418,17 @@ alias"#, client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(did_open_notification(&uri, &text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); let declaration = send_goto_and_parse( @@ -457,7 +459,7 @@ alias"#, assert_eq!( declaration, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 1, @@ -490,7 +492,7 @@ alias"#, assert_eq!( implementation, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 1, @@ -507,12 +509,12 @@ alias"#, client_conn .sender .send(Message::Request(shutdown_request(6))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 6); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -526,25 +528,25 @@ fn test_goto_declaration() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/declaration.jsonnet"; let text = "local x = 1; x + 1"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); client_conn .sender .send(Message::Request(goto_declaration_request(2, uri, 0, 13))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!( @@ -552,11 +554,12 @@ fn test_goto_declaration() { "Goto declaration request should succeed" ); let result: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); assert_eq!( result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 0, @@ -573,12 +576,12 @@ fn test_goto_declaration() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -592,32 +595,33 @@ fn test_goto_implementation_local_binding() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/implementation-local.jsonnet"; let text = "local x = 1; x + 1"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); client_conn .sender .send(Message::Request(goto_definition_request(2, uri, 0, 13))) - .unwrap(); + .expect("expected success"); let definition_response = recv_response(&client_conn, 2); let definition_result: Option = - serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); assert_eq!( definition_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 0, @@ -634,15 +638,15 @@ fn test_goto_implementation_local_binding() { client_conn .sender .send(Message::Request(goto_implementation_request(3, uri, 0, 13))) - .unwrap(); + .expect("expected success"); let implementation_response = recv_response(&client_conn, 3); let implementation_result: Option = serde_json::from_value(implementation_response.result.expect("should have result")) - .unwrap(); + .expect("expected success"); assert_eq!( implementation_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 0, @@ -659,12 +663,12 @@ fn test_goto_implementation_local_binding() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -678,32 +682,33 @@ fn test_goto_definition_and_declaration_diverge_for_local_alias() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/alias-definition-vs-declaration.jsonnet"; let text = "local x = 1;\nlocal y = x;\ny"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); client_conn .sender .send(Message::Request(goto_definition_request(2, uri, 2, 0))) - .unwrap(); + .expect("expected success"); let definition_response = recv_response(&client_conn, 2); let definition_result: Option = - serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); assert_eq!( definition_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 0, @@ -720,14 +725,15 @@ fn test_goto_definition_and_declaration_diverge_for_local_alias() { client_conn .sender .send(Message::Request(goto_declaration_request(3, uri, 2, 0))) - .unwrap(); + .expect("expected success"); let declaration_response = recv_response(&client_conn, 3); let declaration_result: Option = - serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); + serde_json::from_value(declaration_response.result.expect("should have result")) + .expect("expected success"); assert_eq!( declaration_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 1, @@ -744,12 +750,12 @@ fn test_goto_definition_and_declaration_diverge_for_local_alias() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -766,7 +772,7 @@ fn test_goto_implementation_import_field() { .expect("failed to write main file"); let uri = file_uri(&main_path); - let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().expect("expected success"); let text = fs::read_to_string(&main_path).expect("failed to read main file"); let (client_conn, server_conn) = Connection::memory(); @@ -775,25 +781,26 @@ fn test_goto_implementation_import_field() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(did_open_notification(&uri, &text))) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(goto_definition_request(2, &uri, 0, 40))) - .unwrap(); + .expect("expected success"); let definition_response = recv_response(&client_conn, 2); let definition_result: Option = - serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); assert_eq!( definition_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { @@ -814,10 +821,11 @@ fn test_goto_implementation_import_field() { client_conn .sender .send(Message::Request(goto_declaration_request(3, &uri, 0, 40))) - .unwrap(); + .expect("expected success"); let declaration_response = recv_response(&client_conn, 3); let declaration_result: Option = - serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); + serde_json::from_value(declaration_response.result.expect("should have result")) + .expect("expected success"); assert_eq!( declaration_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { @@ -840,11 +848,11 @@ fn test_goto_implementation_import_field() { .send(Message::Request(goto_implementation_request( 4, &uri, 0, 40, ))) - .unwrap(); + .expect("expected success"); let implementation_response = recv_response(&client_conn, 4); let implementation_result: Option = serde_json::from_value(implementation_response.result.expect("should have result")) - .unwrap(); + .expect("expected success"); assert_eq!( implementation_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { @@ -865,12 +873,12 @@ fn test_goto_implementation_import_field() { client_conn .sender .send(Message::Request(shutdown_request(5))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 5); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -892,7 +900,7 @@ alias"#, .expect("failed to write main file"); let uri = file_uri(&main_path); - let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().expect("expected success"); let text = fs::read_to_string(&main_path).expect("failed to read main file"); let (client_conn, server_conn) = Connection::memory(); @@ -901,26 +909,27 @@ alias"#, client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(did_open_notification(&uri, &text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); client_conn .sender .send(Message::Request(goto_definition_request(2, &uri, 2, 0))) - .unwrap(); + .expect("expected success"); let definition_response = recv_response(&client_conn, 2); let definition_result: Option = - serde_json::from_value(definition_response.result.expect("should have result")).unwrap(); + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); assert_eq!( definition_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { @@ -941,14 +950,15 @@ alias"#, client_conn .sender .send(Message::Request(goto_declaration_request(3, &uri, 2, 0))) - .unwrap(); + .expect("expected success"); let declaration_response = recv_response(&client_conn, 3); let declaration_result: Option = - serde_json::from_value(declaration_response.result.expect("should have result")).unwrap(); + serde_json::from_value(declaration_response.result.expect("should have result")) + .expect("expected success"); assert_eq!( declaration_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 1, @@ -965,15 +975,15 @@ alias"#, client_conn .sender .send(Message::Request(goto_implementation_request(4, &uri, 2, 0))) - .unwrap(); + .expect("expected success"); let implementation_response = recv_response(&client_conn, 4); let implementation_result: Option = serde_json::from_value(implementation_response.result.expect("should have result")) - .unwrap(); + .expect("expected success"); assert_eq!( implementation_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), range: lsp_types::Range { start: Position { line: 1, @@ -990,12 +1000,12 @@ alias"#, client_conn .sender .send(Message::Request(shutdown_request(5))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 5); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -1012,7 +1022,7 @@ fn test_diagnostics_import_file_and_definition_resolution() { .expect("failed to write main file"); let uri = file_uri(&main_path); - let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().unwrap(); + let lib_uri: lsp_types::Uri = file_uri(&lib_path).parse().expect("expected success"); let text = fs::read_to_string(&main_path).expect("failed to read main file"); let (client_conn, server_conn) = Connection::memory(); @@ -1021,17 +1031,17 @@ fn test_diagnostics_import_file_and_definition_resolution() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(did_open_notification(&uri, &text))) - .unwrap(); + .expect("expected success"); let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); assert_eq!(diagnostics.uri.as_str(), uri); @@ -1043,11 +1053,12 @@ fn test_diagnostics_import_file_and_definition_resolution() { client_conn .sender .send(Message::Request(goto_definition_request(2, &uri, 0, 40))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!(response.error.is_none(), "Goto definition should succeed"); let result: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); assert_eq!( result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { @@ -1068,12 +1079,12 @@ fn test_diagnostics_import_file_and_definition_resolution() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); diff --git a/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs b/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs index dc6a50a9..ac1f9453 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs @@ -22,12 +22,12 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); // Open lib1 (current document for references requests) client_conn @@ -36,39 +36,38 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { &lib1_uri, "local target = 1; target", ))) - .unwrap(); - let _ = client_conn.receiver.recv().unwrap(); // initial diagnostics + .expect("expected success"); + let _ = client_conn.receiver.recv().expect("expected success"); // initial diagnostics // Index unopened main file via watched-files notification client_conn .sender .send(Message::Notification( did_change_watched_files_notification(vec![FileEvent { - uri: main_uri.parse().unwrap(), + uri: main_uri.parse().expect("expected success"), typ: FileChangeType::CREATED, }]), )) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(custom_find_transitive_importers_request( 20, &lib1_uri, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 20); assert!(response.error.is_none(), "Command should succeed"); let command_result = response.result.expect("command should return result"); - let importers = command_result["transitiveImporters"] + let importers = command_result + .get("transitiveImporters") + .expect("transitiveImporters key should exist") .as_array() .expect("transitiveImporters should be an array") .iter() .filter_map(|value| value.as_str()) .collect::>(); - assert!( - importers.iter().any(|uri| *uri == main_uri), - "Expected main to be indexed as lib1 importer, got: {importers:?}" - ); + assert_eq!(importers, vec![main_uri.as_str()]); // Query references to `target` definition in lib1 (line 0, col 6) client_conn @@ -76,11 +75,12 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { .send(Message::Request(references_request( 2, &lib1_uri, 0, 6, false, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!(response.error.is_none(), "References should succeed"); let refs: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); let refs = refs.unwrap_or_default(); assert!( refs.iter() @@ -94,11 +94,12 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { .send(Message::Request(references_request( 21, &lib1_uri, 0, 18, false, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 21); assert!(response.error.is_none(), "References should succeed"); let refs: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); let refs = refs.unwrap_or_default(); assert!( refs.iter() @@ -114,11 +115,11 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { .sender .send(Message::Notification( did_change_watched_files_notification(vec![FileEvent { - uri: main_uri.parse().unwrap(), + uri: main_uri.parse().expect("expected success"), typ: FileChangeType::CHANGED, }]), )) - .unwrap(); + .expect("expected success"); // References to lib1 target should no longer include main client_conn @@ -126,11 +127,12 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { .send(Message::Request(references_request( 3, &lib1_uri, 0, 6, false, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 3); assert!(response.error.is_none(), "References should succeed"); let refs: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); let refs = refs.unwrap_or_default(); assert!( !refs @@ -143,12 +145,12 @@ fn test_watched_file_refreshes_unopened_importers_for_references() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -175,12 +177,12 @@ fn test_initialize_bootstraps_workspace_import_graph() { .send(Message::Request(initialize_request_with_root_uri( 1, &root_uri, ))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let expected_result = json!({ "file": lib_uri, @@ -188,15 +190,12 @@ fn test_initialize_bootstraps_workspace_import_graph() { }); let mut actual_result = serde_json::Value::Null; for request_id in 2..=42 { - let target_uri = expected_result["file"] - .as_str() - .expect("expected file URI should be a string"); client_conn .sender .send(Message::Request(custom_find_transitive_importers_request( - request_id, target_uri, + request_id, &lib_uri, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, request_id); assert!( response.error.is_none(), @@ -213,12 +212,12 @@ fn test_initialize_bootstraps_workspace_import_graph() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -246,12 +245,12 @@ fn test_workspace_symbol_includes_unopened_workspace_files() { .send(Message::Request(initialize_request_with_root_uri( 1, &root_uri, ))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let symbols = request_workspace_symbols(&client_conn, 2, "workspaceOnly"); @@ -259,7 +258,7 @@ fn test_workspace_symbol_includes_unopened_workspace_files() { closed_text.to_string(), jrsonnet_lsp_document::DocVersion::new(0), ); - let expected_uri: lsp_types::Uri = closed_uri.parse().unwrap(); + let expected_uri: lsp_types::Uri = closed_uri.parse().expect("expected success"); let expected_symbols = Some(jrsonnet_lsp_handlers::workspace_symbols_for_document( &expected_doc, &expected_uri, @@ -280,12 +279,12 @@ fn test_workspace_symbol_includes_unopened_workspace_files() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -299,12 +298,12 @@ fn test_workspace_symbol_ranks_exact_prefix_then_substring() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/workspace-symbol-ranking.jsonnet"; let text = @@ -312,7 +311,7 @@ fn test_workspace_symbol_ranks_exact_prefix_then_substring() { client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); let symbols = request_workspace_symbols(&client_conn, 2, "needle"); @@ -320,7 +319,7 @@ fn test_workspace_symbol_ranks_exact_prefix_then_substring() { text.to_string(), jrsonnet_lsp_document::DocVersion::new(1), ); - let expected_uri: lsp_types::Uri = uri.parse().unwrap(); + let expected_uri: lsp_types::Uri = uri.parse().expect("expected success"); let expected_all = jrsonnet_lsp_handlers::workspace_symbols_for_document( &expected_doc, &expected_uri, @@ -353,12 +352,12 @@ fn test_workspace_symbol_ranks_exact_prefix_then_substring() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -372,12 +371,12 @@ fn test_workspace_symbol_caps_results_with_deterministic_order() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); let uri = "file:///test/workspace-symbol-cap.jsonnet"; let text = { @@ -391,13 +390,13 @@ fn test_workspace_symbol_caps_results_with_deterministic_order() { client_conn .sender .send(Message::Notification(did_open_notification(uri, &text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); let symbols = request_workspace_symbols(&client_conn, 2, "capsymbol"); let expected_doc = jrsonnet_lsp_document::Document::new(text, jrsonnet_lsp_document::DocVersion::new(1)); - let expected_uri: lsp_types::Uri = uri.parse().unwrap(); + let expected_uri: lsp_types::Uri = uri.parse().expect("expected success"); let expected_all = jrsonnet_lsp_handlers::workspace_symbols_for_document( &expected_doc, &expected_uri, @@ -418,12 +417,12 @@ fn test_workspace_symbol_caps_results_with_deterministic_order() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -451,12 +450,12 @@ fn test_find_transitive_importers_returns_sorted_uris() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); for (uri, text) in [ (&lib_uri, "{ target: 1 }"), @@ -466,7 +465,7 @@ fn test_find_transitive_importers_returns_sorted_uris() { client_conn .sender .send(Message::Notification(did_open_notification(uri, text))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); } @@ -475,7 +474,7 @@ fn test_find_transitive_importers_returns_sorted_uris() { .send(Message::Request(custom_find_transitive_importers_request( 2, &lib_uri, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!(response.error.is_none(), "Command should succeed"); assert_eq!( @@ -489,12 +488,12 @@ fn test_find_transitive_importers_returns_sorted_uris() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -520,19 +519,19 @@ fn test_did_close_preserves_import_graph_for_references() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(did_open_notification( &lib_uri, lib_text, ))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); client_conn @@ -540,19 +539,19 @@ fn test_did_close_preserves_import_graph_for_references() { .send(Message::Notification(did_open_notification( &main_uri, main_text, ))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); client_conn .sender .send(Message::Notification(did_close_notification(&main_uri))) - .unwrap(); + .expect("expected success"); let closed_diagnostics = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); assert_eq!( closed_diagnostics, lsp_types::PublishDiagnosticsParams { - uri: main_uri.parse().unwrap(), + uri: main_uri.parse().expect("expected success"), diagnostics: Vec::new(), version: None, } @@ -563,7 +562,7 @@ fn test_did_close_preserves_import_graph_for_references() { .send(Message::Request(custom_find_transitive_importers_request( 2, &lib_uri, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!(response.error.is_none(), "Command should succeed"); let transitive_importers = response.result.expect("command should return result"); @@ -580,11 +579,12 @@ fn test_did_close_preserves_import_graph_for_references() { .send(Message::Request(references_request( 3, &lib_uri, 0, 6, false, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 3); assert!(response.error.is_none(), "References should succeed"); let references: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); let references = references.unwrap_or_default(); assert_eq!( references, @@ -594,12 +594,12 @@ fn test_did_close_preserves_import_graph_for_references() { client_conn .sender .send(Message::Request(shutdown_request(4))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 4); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -627,12 +627,12 @@ fn test_cross_file_rename_updates_definition_and_importers() { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender @@ -640,27 +640,28 @@ fn test_cross_file_rename_updates_definition_and_importers() { &lib_uri, "{ helper: function(x) x * 2 }", ))) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification( did_change_watched_files_notification(vec![FileEvent { - uri: main_uri.parse().unwrap(), + uri: main_uri.parse().expect("expected success"), typ: FileChangeType::CREATED, }]), )) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Request(rename_request(2, &lib_uri, 0, 2, "util"))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!(response.error.is_none(), "Rename should succeed"); let edit: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); let edit = edit.expect("rename should produce workspace edit"); let changes = edit.changes.expect("workspace edit should include changes"); @@ -668,29 +669,70 @@ fn test_cross_file_rename_updates_definition_and_importers() { .iter() .find_map(|(uri, edits)| (uri.as_str() == lib_uri).then_some(edits)) .expect("lib file should be edited"); - assert_eq!(lib_edits.len(), 1, "lib should have one definition rename"); - assert_eq!(lib_edits[0].new_text, "util"); + assert_eq!( + lib_edits, + &vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 2, + }, + end: lsp_types::Position { + line: 0, + character: 8, + }, + }, + new_text: "util".to_string(), + }], + "lib should have one definition rename edit", + ); let main_edits = changes .iter() .find_map(|(uri, edits)| (uri.as_str() == main_uri).then_some(edits)) .expect("main importer should be edited"); assert_eq!( - main_edits.len(), - 2, - "main should rename both field references" + main_edits, + &vec![ + lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 38, + }, + end: lsp_types::Position { + line: 0, + character: 44, + }, + }, + new_text: "util".to_string(), + }, + lsp_types::TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 54, + }, + end: lsp_types::Position { + line: 0, + character: 60, + }, + }, + new_text: "util".to_string(), + }, + ], + "main should rename both helper references", ); - assert!(main_edits.iter().all(|edit| edit.new_text == "util")); client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -725,19 +767,19 @@ fn test_cross_file_references_resolve_jpath_importers() { "jpath": [jpath_dir.to_string_lossy().to_string()], }), ))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(did_open_notification( &lib_uri, lib_text, ))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); client_conn @@ -745,7 +787,7 @@ fn test_cross_file_references_resolve_jpath_importers() { .send(Message::Notification(did_open_notification( &main_uri, main_text, ))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); client_conn @@ -753,11 +795,12 @@ fn test_cross_file_references_resolve_jpath_importers() { .send(Message::Request(references_request( 2, &lib_uri, 0, 6, false, ))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!(response.error.is_none(), "References should succeed"); let references: Option> = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); assert_eq!( references.unwrap_or_default(), vec![location(&lib_uri, 18, 24), location(&main_uri, 40, 46)] @@ -766,12 +809,12 @@ fn test_cross_file_references_resolve_jpath_importers() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -806,19 +849,19 @@ fn test_cross_file_rename_updates_jpath_importers() { "jpath": [jpath_dir.to_string_lossy().to_string()], }), ))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(did_open_notification( &lib_uri, lib_text, ))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); client_conn @@ -826,23 +869,24 @@ fn test_cross_file_rename_updates_jpath_importers() { .send(Message::Notification(did_open_notification( &main_uri, main_text, ))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); client_conn .sender .send(Message::Request(rename_request(2, &lib_uri, 0, 2, "util"))) - .unwrap(); + .expect("expected success"); let response = recv_response(&client_conn, 2); assert!(response.error.is_none(), "Rename should succeed"); let edit: Option = - serde_json::from_value(response.result.expect("should have result")).unwrap(); + serde_json::from_value(response.result.expect("should have result")) + .expect("expected success"); let edit = edit.expect("rename should produce workspace edit"); let mut expected_changes = std::collections::HashMap::new(); expected_changes.insert( - lib_uri.parse().unwrap(), + lib_uri.parse().expect("expected success"), vec![lsp_types::TextEdit { range: lsp_types::Range { start: Position { @@ -858,7 +902,7 @@ fn test_cross_file_rename_updates_jpath_importers() { }], ); expected_changes.insert( - main_uri.parse().unwrap(), + main_uri.parse().expect("expected success"), vec![ lsp_types::TextEdit { range: lsp_types::Range { @@ -900,12 +944,12 @@ fn test_cross_file_rename_updates_jpath_importers() { client_conn .sender .send(Message::Request(shutdown_request(3))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 3); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); @@ -940,19 +984,19 @@ fn test_navigation_resolves_jpath_imports_from_graph() { "jpath": [jpath_dir.to_string_lossy().to_string()], }), ))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 1); client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); client_conn .sender .send(Message::Notification(did_open_notification( &lib_uri, lib_text, ))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); client_conn @@ -960,7 +1004,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { .send(Message::Notification(did_open_notification( &main_uri, main_text, ))) - .unwrap(); + .expect("expected success"); let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); client_conn @@ -968,7 +1012,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { .send(Message::Request(goto_definition_request( 2, &main_uri, 0, 22, ))) - .unwrap(); + .expect("expected success"); let import_definition_response = recv_response(&client_conn, 2); assert!( import_definition_response.error.is_none(), @@ -979,7 +1023,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { .result .expect("should have goto definition result"), ) - .unwrap(); + .expect("expected success"); assert_eq!( import_definition, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { @@ -993,7 +1037,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { .send(Message::Request(goto_definition_request( 3, &main_uri, 0, 40, ))) - .unwrap(); + .expect("expected success"); let definition_response = recv_response(&client_conn, 3); assert!( definition_response.error.is_none(), @@ -1004,7 +1048,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { .result .expect("should have definition result"), ) - .unwrap(); + .expect("expected success"); assert_eq!( definition, Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) @@ -1015,7 +1059,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { .send(Message::Request(goto_declaration_request( 4, &main_uri, 0, 40, ))) - .unwrap(); + .expect("expected success"); let declaration_response = recv_response(&client_conn, 4); assert!( declaration_response.error.is_none(), @@ -1026,7 +1070,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { .result .expect("should have declaration result"), ) - .unwrap(); + .expect("expected success"); assert_eq!( declaration, Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) @@ -1037,7 +1081,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { .send(Message::Request(goto_type_definition_request( 5, &main_uri, 0, 40, ))) - .unwrap(); + .expect("expected success"); let type_definition_response = recv_response(&client_conn, 5); assert!( type_definition_response.error.is_none(), @@ -1048,7 +1092,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { .result .expect("should have type definition result"), ) - .unwrap(); + .expect("expected success"); assert_eq!( type_definition, Some(GotoDefinitionResponse::Scalar(location(&lib_uri, 2, 8))) @@ -1057,12 +1101,12 @@ fn test_navigation_resolves_jpath_imports_from_graph() { client_conn .sender .send(Message::Request(shutdown_request(6))) - .unwrap(); + .expect("expected success"); let _ = recv_response(&client_conn, 6); client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); server_thread .join() .expect("Server thread should exit cleanly"); diff --git a/crates/jrsonnet-lsp/tests/stress_tests.rs b/crates/jrsonnet-lsp/tests/stress_tests.rs index 662e8611..4c1921c7 100644 --- a/crates/jrsonnet-lsp/tests/stress_tests.rs +++ b/crates/jrsonnet-lsp/tests/stress_tests.rs @@ -30,7 +30,7 @@ fn initialize_request(id: i32) -> Request { Request::new( id.into(), Initialize::METHOD.to_string(), - serde_json::to_value(InitializeParams::default()).unwrap(), + serde_json::to_value(InitializeParams::default()).expect("expected success"), ) } @@ -57,7 +57,7 @@ fn exit_notification() -> Notification { fn did_open_notification(uri: &str, text: &str, version: i32) -> Notification { let params = DidOpenTextDocumentParams { text_document: TextDocumentItem { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), language_id: "jsonnet".to_string(), version, text: text.to_string(), @@ -65,7 +65,7 @@ fn did_open_notification(uri: &str, text: &str, version: i32) -> Notification { }; Notification::new( DidOpenTextDocument::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -73,7 +73,7 @@ fn did_open_notification(uri: &str, text: &str, version: i32) -> Notification { fn did_change_notification_full(uri: &str, text: &str, version: i32) -> Notification { let params = DidChangeTextDocumentParams { text_document: VersionedTextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), version, }, content_changes: vec![TextDocumentContentChangeEvent { @@ -84,7 +84,7 @@ fn did_change_notification_full(uri: &str, text: &str, version: i32) -> Notifica }; Notification::new( DidChangeTextDocument::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -97,7 +97,7 @@ fn did_change_notification_incremental( ) -> Notification { let params = DidChangeTextDocumentParams { text_document: VersionedTextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), version, }, content_changes: vec![TextDocumentContentChangeEvent { @@ -108,7 +108,7 @@ fn did_change_notification_incremental( }; Notification::new( DidChangeTextDocument::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -117,7 +117,7 @@ fn hover_request(id: i32, uri: &str, line: u32, character: u32) -> Request { let params = HoverParams { text_document_position_params: TextDocumentPositionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, position: Position { line, character }, }, @@ -126,7 +126,7 @@ fn hover_request(id: i32, uri: &str, line: u32, character: u32) -> Request { Request::new( id.into(), HoverRequest::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -135,7 +135,7 @@ fn completion_request(id: i32, uri: &str, line: u32, character: u32) -> Request let params = CompletionParams { text_document_position: TextDocumentPositionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, position: Position { line, character }, }, @@ -146,7 +146,7 @@ fn completion_request(id: i32, uri: &str, line: u32, character: u32) -> Request Request::new( id.into(), Completion::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -155,7 +155,7 @@ fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Req let params = GotoDefinitionParams { text_document_position_params: TextDocumentPositionParams { text_document: TextDocumentIdentifier { - uri: uri.parse().unwrap(), + uri: uri.parse().expect("expected success"), }, position: Position { line, character }, }, @@ -165,7 +165,7 @@ fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Req Request::new( id.into(), GotoDefinition::METHOD.to_string(), - serde_json::to_value(params).unwrap(), + serde_json::to_value(params).expect("expected success"), ) } @@ -186,10 +186,10 @@ fn init_server() -> (Connection, thread::JoinHandle<()>) { client_conn .sender .send(Message::Request(initialize_request(1))) - .unwrap(); + .expect("expected success"); // Receive initialize response - let response = client_conn.receiver.recv().unwrap(); + let response = client_conn.receiver.recv().expect("expected success"); assert_matches!(response, Message::Response(resp) => { assert!(resp.error.is_none(), "Initialize should succeed"); }); @@ -198,7 +198,7 @@ fn init_server() -> (Connection, thread::JoinHandle<()>) { client_conn .sender .send(Message::Notification(initialized_notification())) - .unwrap(); + .expect("expected success"); (client_conn, server_thread) } @@ -209,10 +209,10 @@ fn shutdown_server(client_conn: &Connection, server_thread: thread::JoinHandle<( client_conn .sender .send(Message::Request(shutdown_request(req_id))) - .unwrap(); + .expect("expected success"); // Receive shutdown response - let response = client_conn.receiver.recv().unwrap(); + let response = client_conn.receiver.recv().expect("expected success"); assert_matches!(response, Message::Response(resp) => { assert!(resp.error.is_none(), "Shutdown should succeed"); }); @@ -221,7 +221,7 @@ fn shutdown_server(client_conn: &Connection, server_thread: thread::JoinHandle<( client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); // Wait for server to exit server_thread @@ -252,7 +252,7 @@ fn test_rapid_document_changes() { client_conn .sender .send(Message::Notification(did_open_notification(uri, "{}", 1))) - .unwrap(); + .expect("expected success"); // Wait for initial diagnostics drain_messages(&client_conn, Duration::from_millis(50)); @@ -265,7 +265,7 @@ fn test_rapid_document_changes() { .send(Message::Notification(did_change_notification_full( uri, &content, i, ))) - .unwrap(); + .expect("expected success"); } // Wait for processing to settle @@ -275,7 +275,7 @@ fn test_rapid_document_changes() { client_conn .sender .send(Message::Request(hover_request(1000, uri, 0, 3))) - .unwrap(); + .expect("expected success"); // Should get a response (not necessarily with content, but should respond) let response = client_conn @@ -305,7 +305,7 @@ fn test_rapid_incremental_changes() { "local x = 1;\n", 1, ))) - .unwrap(); + .expect("expected success"); // Wait for initial diagnostics drain_messages(&client_conn, Duration::from_millis(50)); @@ -327,7 +327,7 @@ fn test_rapid_incremental_changes() { &ch.to_string(), version, ))) - .unwrap(); + .expect("expected success"); version += 1; } @@ -338,7 +338,7 @@ fn test_rapid_incremental_changes() { client_conn .sender .send(Message::Request(hover_request(100, uri, 0, 6))) - .unwrap(); + .expect("expected success"); let response = client_conn .receiver @@ -368,7 +368,7 @@ add(x, y)"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text, 1))) - .unwrap(); + .expect("expected success"); // Wait for initial diagnostics drain_messages(&client_conn, Duration::from_millis(100)); @@ -384,7 +384,7 @@ add(x, y)"; // Send hover request sender .send(Message::Request(hover_request(base_id, &uri, 0, 7))) - .unwrap(); + .expect("expected success"); // Send goto definition request sender .send(Message::Request(goto_definition_request( @@ -393,7 +393,7 @@ add(x, y)"; 3, 4, ))) - .unwrap(); + .expect("expected success"); // Send completion request sender .send(Message::Request(completion_request( @@ -402,14 +402,14 @@ add(x, y)"; 3, 0, ))) - .unwrap(); + .expect("expected success"); }) }) .collect(); // Wait for all sends to complete for handle in handles { - handle.join().unwrap(); + handle.join().expect("expected success"); } // Collect all responses (30 requests total) @@ -475,7 +475,7 @@ f{i}(1)" .send(Message::Notification(did_open_notification( &uri, &content, 1, ))) - .unwrap(); + .expect("expected success"); } // Wait for all documents to be processed @@ -488,7 +488,7 @@ f{i}(1)" client_conn .sender .send(Message::Request(hover_request(i + 100, &uri, 0, 7))) - .unwrap(); + .expect("expected success"); match client_conn.receiver.recv_timeout(Duration::from_secs(1)) { Ok(Message::Response(resp)) => { @@ -548,7 +548,7 @@ fn test_large_document() { .send(Message::Notification(did_open_notification( uri, &content, 1, ))) - .unwrap(); + .expect("expected success"); // Wait for processing drain_messages(&client_conn, Duration::from_millis(500)); @@ -557,19 +557,22 @@ fn test_large_document() { client_conn .sender .send(Message::Request(hover_request(100, uri, 0, 7))) - .unwrap(); + .expect("expected success"); loop { - match client_conn.receiver.recv_timeout(Duration::from_secs(5)) { - Ok(Message::Response(resp)) => { + let message = client_conn + .receiver + .recv_timeout(Duration::from_secs(5)) + .expect("Server should respond to hover on large document"); + match message { + Message::Response(resp) => { assert!( resp.error.is_none(), "Hover should succeed on large document" ); break; } - Ok(Message::Notification(_) | Message::Request(_)) => {} - Err(err) => panic!("Server should respond to hover on large document: {err:?}"), + Message::Notification(_) | Message::Request(_) => {} } } @@ -583,20 +586,23 @@ fn test_large_document() { u32::try_from(middle_line).expect("middle line should fit in u32"), 12, ))) - .unwrap(); + .expect("expected success"); // Drain any notifications and get the response loop { - match client_conn.receiver.recv_timeout(Duration::from_secs(5)) { - Ok(Message::Response(resp)) => { + let message = client_conn + .receiver + .recv_timeout(Duration::from_secs(5)) + .expect("Should receive goto definition response"); + match message { + Message::Response(resp) => { assert!( resp.error.is_none(), "Goto definition should succeed on large document" ); break; } - Ok(Message::Notification(_) | Message::Request(_)) => {} - Err(err) => panic!("Should receive goto definition response: {err:?}"), + Message::Notification(_) | Message::Request(_) => {} } } @@ -621,7 +627,7 @@ fn test_changes_during_requests() { "local x = 1; x", 1, ))) - .unwrap(); + .expect("expected success"); // Wait for initial processing drain_messages(&client_conn, Duration::from_millis(100)); @@ -630,7 +636,7 @@ fn test_changes_during_requests() { client_conn .sender .send(Message::Request(goto_definition_request(100, uri, 0, 13))) - .unwrap(); + .expect("expected success"); // Change document before response client_conn @@ -640,7 +646,7 @@ fn test_changes_during_requests() { "local y = 2; y", 2, ))) - .unwrap(); + .expect("expected success"); // The server should handle this gracefully // Either return a result for the old or new document, but not crash @@ -670,19 +676,22 @@ fn test_changes_during_requests() { client_conn .sender .send(Message::Request(hover_request(200, uri, 0, 6))) - .unwrap(); + .expect("expected success"); loop { - match client_conn.receiver.recv_timeout(Duration::from_secs(2)) { - Ok(Message::Response(resp)) => { + let message = client_conn + .receiver + .recv_timeout(Duration::from_secs(2)) + .expect("Server should respond after document change"); + match message { + Message::Response(resp) => { assert!( resp.error.is_none(), "Server should be responsive after document change" ); break; } - Ok(Message::Notification(_) | Message::Request(_)) => {} - Err(err) => panic!("Server should respond after document change: {err:?}"), + Message::Notification(_) | Message::Request(_) => {} } } @@ -705,7 +714,7 @@ f(x)"; client_conn .sender .send(Message::Notification(did_open_notification(uri, text, 1))) - .unwrap(); + .expect("expected success"); // Wait for processing drain_messages(&client_conn, Duration::from_millis(100)); @@ -719,17 +728,24 @@ f(x)"; for req in requests { let req_id = req.id.clone(); - client_conn.sender.send(Message::Request(req)).unwrap(); + client_conn + .sender + .send(Message::Request(req)) + .expect("expected success"); let start = std::time::Instant::now(); let timeout = Duration::from_secs(5); loop { - let Some(remaining) = timeout.checked_sub(start.elapsed()) else { - panic!("Request {req_id:?} timed out"); - }; - match client_conn.receiver.recv_timeout(remaining) { - Ok(Message::Response(resp)) => { + let remaining = timeout + .checked_sub(start.elapsed()) + .expect("Request timed out before a response was received"); + let message = client_conn + .receiver + .recv_timeout(remaining) + .expect("Request timed out while waiting for response"); + match message { + Message::Response(resp) => { assert_eq!( resp.id, req_id, "Should receive response for correct request" @@ -737,8 +753,7 @@ f(x)"; assert!(resp.error.is_none(), "Request {req_id:?} should not error"); break; } - Ok(Message::Notification(_) | Message::Request(_)) => {} - Err(err) => panic!("Request {req_id:?} timed out: {err:?}"), + Message::Notification(_) | Message::Request(_) => {} } } } @@ -763,7 +778,7 @@ fn test_shutdown_during_processing() { .send(Message::Notification(did_open_notification( &uri, &content, 1, ))) - .unwrap(); + .expect("expected success"); } // Don't wait for diagnostics - immediately shutdown @@ -771,7 +786,7 @@ fn test_shutdown_during_processing() { client_conn .sender .send(Message::Request(shutdown_request(100))) - .unwrap(); + .expect("expected success"); // Should still get a clean shutdown response let response = client_conn @@ -811,7 +826,7 @@ fn test_shutdown_during_processing() { client_conn .sender .send(Message::Notification(exit_notification())) - .unwrap(); + .expect("expected success"); // Server should exit cleanly server_thread From 6ccb521fadc2490d2ee25e4fa85966e8dd722822 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 13:07:17 +0000 Subject: [PATCH 191/210] refactor(lsp): unify request errors with typed wire mapping --- crates/jrsonnet-lsp/Cargo.toml | 2 +- .../src/protocol/inflight_requests.rs | 115 ++++++++++-------- crates/jrsonnet-lsp/src/protocol/mod.rs | 1 + .../src/protocol/request_error.rs | 103 ++++++++++++++++ crates/jrsonnet-lsp/src/server.rs | 49 ++++++-- crates/jrsonnet-lsp/src/server/event_loop.rs | 9 +- .../src/server/request_dispatch.rs | 65 +++++----- crates/jrsonnet-lsp/tests/integration_test.rs | 22 ++++ .../tests/integration_test/features.rs | 25 ++-- 9 files changed, 277 insertions(+), 114 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/protocol/request_error.rs diff --git a/crates/jrsonnet-lsp/Cargo.toml b/crates/jrsonnet-lsp/Cargo.toml index d493eb80..e0a1a536 100644 --- a/crates/jrsonnet-lsp/Cargo.toml +++ b/crates/jrsonnet-lsp/Cargo.toml @@ -26,6 +26,7 @@ rayon = "1.11.0" rustc-hash.workspace = true serde = { workspace = true, features = ["derive"] } serde_json.workspace = true +thiserror.workspace = true tracing = "0.1.44" [lints] @@ -39,7 +40,6 @@ jrsonnet-lsp-scope = { version = "0.5.0-pre97", path = "../jrsonnet-lsp-scope" } rstest = "0.23" serde_json.workspace = true tempfile.workspace = true -thiserror.workspace = true [[bench]] name = "type_inference" diff --git a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs index 5cadb578..9e4fa34e 100644 --- a/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs +++ b/crates/jrsonnet-lsp/src/protocol/inflight_requests.rs @@ -2,9 +2,11 @@ use std::marker::PhantomData; use anyhow::Result; use crossbeam_channel::Sender; -use lsp_server::{ErrorCode, Message, ReqQueue, RequestId, Response}; +use lsp_server::{Message, ReqQueue, RequestId, Response}; use serde::Serialize; +use super::request_error::RequestError; + #[derive(Debug, Clone, PartialEq, Eq)] struct IncomingRequestMeta { method: String, @@ -119,12 +121,14 @@ impl InflightRequests { let Some(meta) = self.queue.incoming.complete(&id) else { return Ok(false); }; - let message = format!("Request canceled: {}", meta.method); - self.sender.send(Message::Response(Response::new_err( + let response_error = lsp_server::ResponseError::try_from(RequestError::RequestCanceled { + method: meta.method, + })?; + self.sender.send(Message::Response(Response { id, - ErrorCode::RequestCanceled as i32, - message, - )))?; + result: None, + error: Some(response_error), + }))?; Ok(true) } @@ -136,13 +140,14 @@ impl InflightRequests { self.send_inflight_response(response) } - fn send_err_by_id( - &mut self, - id: RequestId, - code: ErrorCode, - message: impl Into, - ) -> Result { - self.send_inflight_response(Response::new_err(id, code as i32, message.into())) + fn send_error_by_id(&mut self, id: RequestId, error: RequestError) -> Result { + let response_error = lsp_server::ResponseError::try_from(error)?; + let response = Response { + id, + result: None, + error: Some(response_error), + }; + self.send_inflight_response(response) } pub(crate) fn send_ok( @@ -157,25 +162,23 @@ impl InflightRequests { self.send_ok_by_id(request.id, result) } - pub(crate) fn send_err( + pub(crate) fn send_error( &mut self, request: IncomingRequest, - code: ErrorCode, - message: impl Into, + error: RequestError, ) -> Result where R: lsp_types::request::Request, { - self.send_err_by_id(request.id, code, message) + self.send_error_by_id(request.id, error) } - pub(crate) fn send_unknown_err( + pub(crate) fn send_unknown_error( &mut self, request: UnknownIncomingRequest, - code: ErrorCode, - message: impl Into, + error: RequestError, ) -> Result { - self.send_err_by_id(request.id, code, message) + self.send_error_by_id(request.id, error) } pub(crate) fn complete_outgoing(&mut self, id: RequestId) -> Option { @@ -191,6 +194,7 @@ mod tests { use lsp_types::request::{CodeLensResolve, RegisterCapability, Request as _}; use super::InflightRequests; + use crate::protocol::request_error::{RequestError, RequestErrorData}; #[test] fn send_inflight_response_requires_registered_request_id() { @@ -284,30 +288,34 @@ mod tests { let request = inflight.begin_unknown(RequestId::from(23), "custom/method"); assert_eq!(request.method(), "custom/method"); assert!(inflight - .send_unknown_err( + .send_unknown_error( request, - ErrorCode::MethodNotFound, - "Method not found: custom/method", + RequestError::MethodNotFound { + method: "custom/method".to_string(), + }, ) .expect("expected success")); let message = receiver.recv().expect("expected success"); + let response = assert_matches!(message, Message::Response(response) => response); assert_matches!( - message, - Message::Response(response) if { - assert_matches!( - response, - Response { - ref id, - result: None, - error: Some(ref error), - } - if id == &RequestId::from(23) - && error.code == ErrorCode::MethodNotFound as i32 - && error.message == "Method not found: custom/method" - ); - true + response, + Response { + ref id, + result: None, + error: Some(ref error), } + if id == &RequestId::from(23) && error.code == ErrorCode::MethodNotFound as i32 + ); + let data = response + .error + .and_then(|error| error.data) + .expect("method-not-found should include structured error data"); + let data: RequestErrorData = + serde_json::from_value(data).expect("error data should deserialize"); + assert_matches!( + data, + RequestErrorData::MethodNotFound { method } if method == "custom/method" ); } @@ -321,22 +329,25 @@ mod tests { assert!(inflight.cancel_request(id).expect("expected success")); let message = receiver.recv().expect("expected success"); + let response = assert_matches!(message, Message::Response(response) => response); assert_matches!( - message, - Message::Response(response) if { - assert_matches!( - response, - Response { - ref id, - result: None, - error: Some(ref error), - } - if id == &RequestId::from(31) - && error.code == ErrorCode::RequestCanceled as i32 - && error.message == "Request canceled: textDocument/codeLens" - ); - true + response, + Response { + ref id, + result: None, + error: Some(ref error), } + if id == &RequestId::from(31) && error.code == ErrorCode::RequestCanceled as i32 + ); + let data = response + .error + .and_then(|error| error.data) + .expect("request-canceled should include structured error data"); + let data: RequestErrorData = + serde_json::from_value(data).expect("error data should deserialize"); + assert_matches!( + data, + RequestErrorData::RequestCanceled { method } if method == "textDocument/codeLens" ); } diff --git a/crates/jrsonnet-lsp/src/protocol/mod.rs b/crates/jrsonnet-lsp/src/protocol/mod.rs index 9cd9c900..434acb66 100644 --- a/crates/jrsonnet-lsp/src/protocol/mod.rs +++ b/crates/jrsonnet-lsp/src/protocol/mod.rs @@ -1 +1,2 @@ pub(crate) mod inflight_requests; +pub(crate) mod request_error; diff --git a/crates/jrsonnet-lsp/src/protocol/request_error.rs b/crates/jrsonnet-lsp/src/protocol/request_error.rs new file mode 100644 index 00000000..fafba371 --- /dev/null +++ b/crates/jrsonnet-lsp/src/protocol/request_error.rs @@ -0,0 +1,103 @@ +use lsp_server::{ErrorCode, ResponseError}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub(crate) enum RequestErrorData { + InvalidParams { method: String }, + MethodNotFound { method: String }, + UnknownExecuteCommand { command: String }, + MissingExecuteHandler { command: String }, + RequestCanceled { method: String }, + ServerShuttingDown, + AsyncHandlerFailed { method: String }, + AsyncHandlerPanicked { method: String }, +} + +#[derive(Debug, Error, Clone, PartialEq, Eq)] +pub(crate) enum RequestError { + #[error("Invalid params for {method}: {reason}")] + InvalidParams { method: String, reason: String }, + #[error("Method not found: {method}")] + MethodNotFound { method: String }, + #[error("Unknown execute command: {command}")] + UnknownExecuteCommand { command: String }, + #[error("Missing execute handler for custom operation: {command}")] + MissingExecuteHandler { command: String }, + #[error("Request canceled: {method}")] + RequestCanceled { method: String }, + #[error("Server is shutting down")] + ServerShuttingDown, + #[error("{method} failed: {details}")] + AsyncHandlerFailed { method: String, details: String }, + #[error("{method} panicked")] + AsyncHandlerPanicked { method: String }, +} + +impl RequestError { + #[must_use] + pub(crate) fn code(&self) -> ErrorCode { + match self { + Self::InvalidParams { .. } + | Self::UnknownExecuteCommand { .. } + | Self::MissingExecuteHandler { .. } => ErrorCode::InvalidParams, + Self::MethodNotFound { .. } => ErrorCode::MethodNotFound, + Self::RequestCanceled { .. } => ErrorCode::RequestCanceled, + Self::ServerShuttingDown => ErrorCode::InvalidRequest, + Self::AsyncHandlerFailed { .. } | Self::AsyncHandlerPanicked { .. } => { + ErrorCode::InternalError + } + } + } + + #[must_use] + pub(crate) fn invalid_params(method: &str, reason: impl Into) -> Self { + Self::InvalidParams { + method: method.to_string(), + reason: reason.into(), + } + } +} + +impl From<&RequestError> for RequestErrorData { + fn from(error: &RequestError) -> Self { + match error { + RequestError::InvalidParams { method, .. } => Self::InvalidParams { + method: method.clone(), + }, + RequestError::MethodNotFound { method } => Self::MethodNotFound { + method: method.clone(), + }, + RequestError::UnknownExecuteCommand { command } => Self::UnknownExecuteCommand { + command: command.clone(), + }, + RequestError::MissingExecuteHandler { command } => Self::MissingExecuteHandler { + command: command.clone(), + }, + RequestError::RequestCanceled { method } => Self::RequestCanceled { + method: method.clone(), + }, + RequestError::ServerShuttingDown => Self::ServerShuttingDown, + RequestError::AsyncHandlerFailed { method, .. } => Self::AsyncHandlerFailed { + method: method.clone(), + }, + RequestError::AsyncHandlerPanicked { method } => Self::AsyncHandlerPanicked { + method: method.clone(), + }, + } + } +} + +impl TryFrom for ResponseError { + type Error = serde_json::Error; + + fn try_from(error: RequestError) -> Result { + let data = serde_json::to_value(RequestErrorData::from(&error))?; + Ok(ResponseError { + code: error.code() as i32, + message: error.to_string(), + data: Some(data), + }) + } +} diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index 7b420078..ce2575fd 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -39,7 +39,7 @@ use crate::{ analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}, async_diagnostics::{AsyncDiagnostics, DiagnosticsConfig}, config::ServerConfig, - protocol::inflight_requests::InflightRequests, + protocol::{inflight_requests::InflightRequests, request_error::RequestError}, }; /// Shared server configuration. @@ -205,19 +205,50 @@ impl Server { Ok(Ok(value)) => Response::new_ok(id, value), Ok(Err(err)) => { error!("Async handler failed for {}: {err:#}", method); - Response::new_err( + let response_error = + lsp_server::ResponseError::try_from(RequestError::AsyncHandlerFailed { + method: method.to_string(), + details: format!("{err:#}"), + }) + .unwrap_or_else(|serialize_error| { + error!( + "Failed to serialize async handler error for {}: {}", + method, serialize_error + ); + lsp_server::ResponseError { + code: lsp_server::ErrorCode::InternalError as i32, + message: format!("{method} failed"), + data: None, + } + }); + Response { id, - lsp_server::ErrorCode::InternalError as i32, - format!("{method} failed: {err:#}"), - ) + result: None, + error: Some(response_error), + } } Err(_) => { error!("Async handler panicked for {}", method); - Response::new_err( + let response_error = + lsp_server::ResponseError::try_from(RequestError::AsyncHandlerPanicked { + method: method.to_string(), + }) + .unwrap_or_else(|serialize_error| { + error!( + "Failed to serialize async panic error for {}: {}", + method, serialize_error + ); + lsp_server::ResponseError { + code: lsp_server::ErrorCode::InternalError as i32, + message: format!("{method} panicked"), + data: None, + } + }); + Response { id, - lsp_server::ErrorCode::InternalError as i32, - format!("{method} panicked"), - ) + result: None, + error: Some(response_error), + } } }; if sender.send(response).is_err() { diff --git a/crates/jrsonnet-lsp/src/server/event_loop.rs b/crates/jrsonnet-lsp/src/server/event_loop.rs index 8c213dd6..d82936e1 100644 --- a/crates/jrsonnet-lsp/src/server/event_loop.rs +++ b/crates/jrsonnet-lsp/src/server/event_loop.rs @@ -5,6 +5,7 @@ use lsp_types::notification::PublishDiagnostics; use tracing::{debug, error, warn}; use super::Server; +use crate::protocol::request_error::RequestError; impl Server { /// Main message loop. @@ -79,11 +80,9 @@ impl Server { // After shutdown, only respond with errors. let Request { id, method, .. } = req; let request = self.inflight_requests.begin_unknown(id, method.as_str()); - let _ = self.inflight_requests.send_unknown_err( - request, - lsp_server::ErrorCode::InvalidRequest, - "Server is shutting down", - )?; + let _ = self + .inflight_requests + .send_unknown_error(request, RequestError::ServerShuttingDown)?; } else { self.handle_request(req)?; } diff --git a/crates/jrsonnet-lsp/src/server/request_dispatch.rs b/crates/jrsonnet-lsp/src/server/request_dispatch.rs index 9c2b87b9..a64076c4 100644 --- a/crates/jrsonnet-lsp/src/server/request_dispatch.rs +++ b/crates/jrsonnet-lsp/src/server/request_dispatch.rs @@ -14,7 +14,7 @@ use serde::{de::DeserializeOwned, Serialize}; use tracing::{debug, info, warn}; use super::{async_requests::AsyncRequestContext, custom_operations, requests, Server}; -use crate::protocol::inflight_requests::IncomingRequest; +use crate::protocol::{inflight_requests::IncomingRequest, request_error::RequestError}; impl Server { /// Handle an incoming request. @@ -55,11 +55,11 @@ impl Server { } let request = self.inflight_requests.begin_unknown(id, method.as_str()); warn!("Unhandled request: {}", request.method()); - let message = format!("Method not found: {}", request.method()); - let _ = self.inflight_requests.send_unknown_err( + let _ = self.inflight_requests.send_unknown_error( request, - lsp_server::ErrorCode::MethodNotFound, - message, + RequestError::MethodNotFound { + method: method.to_string(), + }, )?; Ok(()) } @@ -93,11 +93,11 @@ impl Server { let request = self.inflight_requests.begin_unknown(id, method); warn!("Unhandled request: {}", request.method()); - let message = format!("Method not found: {}", request.method()); - let _ = self.inflight_requests.send_unknown_err( + let _ = self.inflight_requests.send_unknown_error( request, - lsp_server::ErrorCode::MethodNotFound, - message, + RequestError::MethodNotFound { + method: method.to_string(), + }, )?; Ok(()) } @@ -135,10 +135,9 @@ impl Server { let params: R::Params = match serde_json::from_value(params) { Ok(params) => params, Err(err) => { - let _ = self.inflight_requests.send_err( + let _ = self.inflight_requests.send_error( request, - lsp_server::ErrorCode::InvalidParams, - format!("Invalid params for {}: {err}", R::METHOD), + RequestError::invalid_params(R::METHOD, err.to_string()), )?; return Ok(()); } @@ -332,11 +331,11 @@ impl Server { _ => { let request = self.inflight_requests.begin_unknown(id, method); warn!("Unhandled request: {}", request.method()); - let message = format!("Method not found: {}", request.method()); - let _ = self.inflight_requests.send_unknown_err( + let _ = self.inflight_requests.send_unknown_error( request, - lsp_server::ErrorCode::MethodNotFound, - message, + RequestError::MethodNotFound { + method: method.to_string(), + }, )?; Ok(()) } @@ -357,10 +356,9 @@ impl Server { let params: R::Params = match serde_json::from_value(params) { Ok(params) => params, Err(err) => { - let _ = self.inflight_requests.send_err( + let _ = self.inflight_requests.send_error( request, - lsp_server::ErrorCode::InvalidParams, - format!("Invalid params for {}: {err}", R::METHOD), + RequestError::invalid_params(R::METHOD, err.to_string()), )?; return Ok(()); } @@ -378,19 +376,19 @@ impl Server { let params: ExecuteCommandParams = match serde_json::from_value(params) { Ok(params) => params, Err(err) => { - let _ = self.inflight_requests.send_err( + let _ = self.inflight_requests.send_error( request, - lsp_server::ErrorCode::InvalidParams, - format!("Invalid params for {}: {err}", ExecuteCommand::METHOD), + RequestError::invalid_params(ExecuteCommand::METHOD, err.to_string()), )?; return Ok(()); } }; if !Self::is_supported_execute_command(¶ms.command) { - let _ = self.inflight_requests.send_err( + let _ = self.inflight_requests.send_error( request, - lsp_server::ErrorCode::InvalidParams, - format!("Unknown execute command: {}", params.command), + RequestError::UnknownExecuteCommand { + command: params.command, + }, )?; return Ok(()); } @@ -399,13 +397,11 @@ impl Server { let context = self.async_request_context(); let args = params.arguments.clone(); let Some(compute) = operation.handle_execute_command else { - let _ = self.inflight_requests.send_err( + let _ = self.inflight_requests.send_error( request, - lsp_server::ErrorCode::InvalidParams, - format!( - "Missing execute handler for custom operation: {}", - params.command - ), + RequestError::MissingExecuteHandler { + command: params.command.clone(), + }, )?; return Ok(()); }; @@ -413,10 +409,11 @@ impl Server { return Ok(()); } - let _ = self.inflight_requests.send_err( + let _ = self.inflight_requests.send_error( request, - lsp_server::ErrorCode::InvalidParams, - format!("Unknown execute command: {}", params.command), + RequestError::UnknownExecuteCommand { + command: params.command, + }, )?; Ok(()) } diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index d7ab7cb4..dd9d7756 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -31,6 +31,7 @@ use lsp_types::{ TextDocumentPositionParams, WorkDoneProgressParams, WorkspaceClientCapabilities, WorkspaceFolder, }; +use serde::Deserialize; use serde_json::json; use tempfile::TempDir; @@ -43,6 +44,19 @@ struct ExpectedSemanticToken { modifiers: u32, } +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +enum RequestErrorData { + InvalidParams { method: String }, + MethodNotFound { method: String }, + UnknownExecuteCommand { command: String }, + MissingExecuteHandler { command: String }, + RequestCanceled { method: String }, + ServerShuttingDown, + AsyncHandlerFailed { method: String }, + AsyncHandlerPanicked { method: String }, +} + impl ExpectedSemanticToken { const fn new( line: u32, @@ -1077,6 +1091,14 @@ fn recv_response(conn: &Connection, expected_id: i32) -> lsp_server::Response { } } +fn parse_request_error_data(error: &lsp_server::ResponseError) -> RequestErrorData { + let data = error + .data + .clone() + .expect("response error should include structured data"); + serde_json::from_value(data).expect("response error data should decode") +} + fn recv_publish_diagnostics_for_uri( conn: &Connection, uri: &str, diff --git a/crates/jrsonnet-lsp/tests/integration_test/features.rs b/crates/jrsonnet-lsp/tests/integration_test/features.rs index 58309e4a..2205fcef 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/features.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/features.rs @@ -682,11 +682,11 @@ fn test_execute_command_unknown_returns_invalid_params_error() { .error .expect("unknown execute command should return an error"); assert_eq!(error.code, lsp_server::ErrorCode::InvalidParams as i32); - assert_eq!( - error.message, - "Unknown execute command: jrsonnet.unknownCommand" + assert_matches!( + parse_request_error_data(&error), + RequestErrorData::UnknownExecuteCommand { command } + if command == "jrsonnet.unknownCommand" ); - assert_eq!(error.data, None); client_conn .sender @@ -1067,8 +1067,11 @@ fn test_cancel_request_returns_request_canceled_error() { .error .expect("cancelled request should return request-canceled error"); assert_eq!(error.code, lsp_server::ErrorCode::RequestCanceled as i32); - assert_eq!(error.message, "Request canceled: textDocument/codeLens"); - assert_eq!(error.data, None); + assert_matches!( + parse_request_error_data(&error), + RequestErrorData::RequestCanceled { method } + if method == "textDocument/codeLens" + ); client_conn .sender @@ -1114,14 +1117,10 @@ fn test_code_lens_resolve_invalid_params_returns_invalid_params_error() { .error .expect("invalid code lens resolve params should return an error"); assert_eq!(error.code, lsp_server::ErrorCode::InvalidParams as i32); - assert!( - error - .message - .starts_with("Invalid params for codeLens/resolve:"), - "unexpected error message: {}", - error.message + assert_matches!( + parse_request_error_data(&error), + RequestErrorData::InvalidParams { method } if method == "codeLens/resolve" ); - assert_eq!(error.data, None); client_conn .sender From c8a816d86fe64bcba0916df55562229689e018f3 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 13:08:23 +0000 Subject: [PATCH 192/210] style: apply nix fmt leftovers --- crates/jrsonnet-lsp-document/src/types.rs | 2 +- crates/jrsonnet-lsp-inference/src/expr/mod.rs | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/jrsonnet-lsp-document/src/types.rs b/crates/jrsonnet-lsp-document/src/types.rs index 2beecfec..3d2ed897 100644 --- a/crates/jrsonnet-lsp-document/src/types.rs +++ b/crates/jrsonnet-lsp-document/src/types.rs @@ -229,10 +229,10 @@ impl SymbolName { mod tests { use std::time::{SystemTime, UNIX_EPOCH}; - use crate::error::InvalidIdentifierReason; use assert_matches::assert_matches; use super::*; + use crate::error::InvalidIdentifierReason; #[test] fn test_byte_offset_conversions() { diff --git a/crates/jrsonnet-lsp-inference/src/expr/mod.rs b/crates/jrsonnet-lsp-inference/src/expr/mod.rs index 3af363f8..dcdc09d7 100644 --- a/crates/jrsonnet-lsp-inference/src/expr/mod.rs +++ b/crates/jrsonnet-lsp-inference/src/expr/mod.rs @@ -4,6 +4,7 @@ mod advanced; mod base; mod core; -pub(crate) use core::infer_expr_ty_and_record; -pub(crate) use core::{bind_destruct_with_type_ty, infer_expr_ty_impl, TypeRecorder}; +pub(crate) use core::{ + bind_destruct_with_type_ty, infer_expr_ty_and_record, infer_expr_ty_impl, TypeRecorder, +}; pub use core::{infer_document_type_ty, infer_expr_ty, infer_expr_ty_with_expected, is_divergent}; From 7583ae4b605ca3efe52cd3806e295e0e040c8ef7 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 13:46:20 +0000 Subject: [PATCH 193/210] refactor(lsp): replace stringly errors with typed variants --- Cargo.lock | 2 + .../src/type_check/calls.rs | 19 +- .../jrsonnet-lsp-check/src/type_check/core.rs | 20 +- .../src/type_check/types.rs | 322 ++++++++----- crates/jrsonnet-lsp-handlers/Cargo.toml | 1 + .../src/formatting/engine.rs | 14 +- crates/jrsonnet-lsp-scenario/src/fixture.rs | 4 +- .../src/scenario_runner/helpers.rs | 16 +- .../src/scenario_script/inputs.rs | 261 ++++++---- .../src/scenario_script/markers.rs | 452 ++++++++++++------ crates/jrsonnet-lsp-types/Cargo.toml | 1 + crates/jrsonnet-lsp-types/src/operations.rs | 4 +- .../src/operations/operators.rs | 88 ++-- 13 files changed, 773 insertions(+), 431 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a3c998b0..68e8d889 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1903,6 +1903,7 @@ dependencies = [ "serde_json", "strum", "tempfile", + "thiserror 1.0.69", "tracing", ] @@ -1987,6 +1988,7 @@ dependencies = [ "jrsonnet-rowan-parser", "rstest 0.23.0", "rustc-hash", + "thiserror 1.0.69", ] [[package]] diff --git a/crates/jrsonnet-lsp-check/src/type_check/calls.rs b/crates/jrsonnet-lsp-check/src/type_check/calls.rs index 1b73061c..38cf06e1 100644 --- a/crates/jrsonnet-lsp-check/src/type_check/calls.rs +++ b/crates/jrsonnet-lsp-check/src/type_check/calls.rs @@ -9,9 +9,7 @@ use jrsonnet_rowan_parser::{ use rowan::TextRange; use super::{TypeError, TypeErrorKind}; -use crate::format_check::{ - parse_format_string, FormatParseError, FormatPlaceholder, FormatTypeKind, -}; +use crate::format_check::{parse_format_string, FormatPlaceholder, FormatTypeKind}; /// Validate a function call using `FunctionData` (Ty-native version). pub(super) fn validate_function_call_ty( @@ -339,20 +337,9 @@ fn check_format_call(call: &ExprCall, analysis: &TypeAnalysis, errors: &mut Vec< // Parse the format string let format_spec = match parse_format_string(&fmt_string) { Ok(spec) => spec, - Err(e) => { - let message = match e { - FormatParseError::IncompleteSpecifier => "incomplete format specifier".to_string(), - FormatParseError::UnknownSpecifier(c) => format!("unknown specifier '%{c}'"), - FormatParseError::UnclosedNamedPlaceholder => { - "unclosed named placeholder".to_string() - } - FormatParseError::EmptyName => "empty name in named placeholder".to_string(), - FormatParseError::MixedPositionalAndNamed => { - "cannot mix positional and named placeholders".to_string() - } - }; + Err(parse_error) => { errors.push(TypeError { - kind: TypeErrorKind::FormatStringError { message }, + kind: TypeErrorKind::FormatStringError { parse_error }, range: fmt_expr.syntax().text_range(), }); return; diff --git a/crates/jrsonnet-lsp-check/src/type_check/core.rs b/crates/jrsonnet-lsp-check/src/type_check/core.rs index 72c76fdc..e31b8575 100644 --- a/crates/jrsonnet-lsp-check/src/type_check/core.rs +++ b/crates/jrsonnet-lsp-check/src/type_check/core.rs @@ -146,7 +146,7 @@ fn check_binary_base( if lhs_ty.is_any() || rhs_ty.is_any() || lhs_ty.is_never() || rhs_ty.is_never() { return; } - if let Err(message) = + if let Err(reason) = analysis.with_store_mut(|store| binary_op_result_ty(op.kind(), lhs_ty, rhs_ty, store)) { errors.push(TypeError { @@ -154,7 +154,7 @@ fn check_binary_base( lhs: lhs_ty, rhs: rhs_ty, op: binary_op_str(op.kind()), - message, + reason, }, range: base.syntax().text_range(), }); @@ -185,14 +185,14 @@ fn check_unary_base( if rhs_ty.is_any() || rhs_ty.is_never() { return; } - if let Err(message) = + if let Err(reason) = analysis.with_store_mut(|store| unary_op_result_ty(op.kind(), rhs_ty, store)) { errors.push(TypeError { kind: TypeErrorKind::UnaryOpMismatch { operand: rhs_ty, op: unary_op_str(op.kind()), - message, + reason, }, range: base.syntax().text_range(), }); @@ -1112,9 +1112,11 @@ mod tests { matches!( errors.as_slice(), [TypeError { - kind: TypeErrorKind::FormatStringError { message }, + kind: TypeErrorKind::FormatStringError { + parse_error: crate::format_check::FormatParseError::UnknownSpecifier('z') + }, .. - }] if message.contains("%z") + }] ), "expected FormatStringError for unknown specifier, got: {errors:?}" ); @@ -1208,9 +1210,11 @@ mod tests { matches!( errors.as_slice(), [TypeError { - kind: TypeErrorKind::FormatStringError { message }, + kind: TypeErrorKind::FormatStringError { + parse_error: crate::format_check::FormatParseError::IncompleteSpecifier + }, .. - }] if message.contains("incomplete") + }] ), "expected FormatStringError for incomplete specifier, got: {errors:?}" ); diff --git a/crates/jrsonnet-lsp-check/src/type_check/types.rs b/crates/jrsonnet-lsp-check/src/type_check/types.rs index fd4e4e48..a82492d5 100644 --- a/crates/jrsonnet-lsp-check/src/type_check/types.rs +++ b/crates/jrsonnet-lsp-check/src/type_check/types.rs @@ -1,9 +1,11 @@ use jrsonnet_lsp_document::{to_lsp_range, LineIndex}; use jrsonnet_lsp_inference::TypeAnalysis; -use jrsonnet_lsp_types::Ty; +use jrsonnet_lsp_types::{BinaryOpTypeError, Ty, UnaryOpTypeError}; use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString}; use rowan::TextRange; +use crate::format_check::FormatParseError; + /// A type error detected during static analysis. #[derive(Debug, Clone, PartialEq, Eq)] pub struct TypeError { @@ -21,13 +23,13 @@ pub enum TypeErrorKind { lhs: Ty, rhs: Ty, op: &'static str, - message: String, + reason: BinaryOpTypeError, }, /// Unary operator applied to incompatible type. UnaryOpMismatch { operand: Ty, op: &'static str, - message: String, + reason: UnaryOpTypeError, }, /// Field access (`.field`) on a non-object type. FieldAccessOnNonObject { actual: Ty }, @@ -58,7 +60,7 @@ pub enum TypeErrorKind { /// Index out of bounds on a tuple with known length. TupleIndexOutOfBounds { tuple_len: usize, index: usize }, /// Format string parse error. - FormatStringError { message: String }, + FormatStringError { parse_error: FormatParseError }, /// Wrong number of format arguments. FormatArgCount { expected: usize, provided: usize }, /// Format argument type mismatch. @@ -97,21 +99,21 @@ impl TypeErrorKind { lhs, rhs, op, - message, + reason, } => TypeErrorKind::BinaryOpMismatch { lhs: subst.apply(*lhs), rhs: subst.apply(*rhs), op, - message: message.clone(), + reason: *reason, }, TypeErrorKind::UnaryOpMismatch { operand, op, - message, + reason, } => TypeErrorKind::UnaryOpMismatch { operand: subst.apply(*operand), op, - message: message.clone(), + reason: *reason, }, TypeErrorKind::FieldAccessOnNonObject { actual } => { TypeErrorKind::FieldAccessOnNonObject { @@ -191,130 +193,210 @@ impl TypeError { text: &str, analysis: &TypeAnalysis, ) -> Diagnostic { - let message = match &self.kind { - TypeErrorKind::BinaryOpMismatch { message, .. } - | TypeErrorKind::UnaryOpMismatch { message, .. } => message.clone(), - TypeErrorKind::FieldAccessOnNonObject { actual } => { - format!( - "field access on non-object type `{}`", - analysis.display(*actual) - ) - } - TypeErrorKind::IndexOnNonIndexable { actual } => { - format!( - "index access on non-indexable type `{}`", - analysis.display(*actual) - ) - } - TypeErrorKind::CallOnNonFunction { actual } => { - format!( - "cannot call non-function type `{}`", - analysis.display(*actual) - ) - } - TypeErrorKind::WrongArgCount { expected, actual } => { - format!("function expects {expected} argument(s), but {actual} provided") - } - TypeErrorKind::TooFewArguments { - function_name, - required, - provided, - } => { - format!( - "`{function_name}` requires at least {required} argument(s), but {provided} provided" - ) + let message = render_type_error(&self.kind, analysis); + + Diagnostic { + range: to_lsp_range(self.range, line_index, text), + severity: Some(DiagnosticSeverity::WARNING), + code: Some(NumberOrString::String("type-error".to_string())), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message, + related_information: None, + tags: None, + data: None, + } + } +} + +fn render_type_error(kind: &TypeErrorKind, analysis: &TypeAnalysis) -> String { + match kind { + TypeErrorKind::BinaryOpMismatch { + lhs, + rhs, + op, + reason, + } => render_binary_op_mismatch(*lhs, *rhs, op, *reason, analysis), + TypeErrorKind::UnaryOpMismatch { + operand, + op, + reason, + } => render_unary_op_mismatch(*operand, op, *reason, analysis), + TypeErrorKind::FieldAccessOnNonObject { actual } => { + format!( + "field access on non-object type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::IndexOnNonIndexable { actual } => { + format!( + "index access on non-indexable type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::CallOnNonFunction { actual } => { + format!( + "cannot call non-function type `{}`", + analysis.display(*actual) + ) + } + TypeErrorKind::WrongArgCount { expected, actual } => { + format!("function expects {expected} argument(s), but {actual} provided") + } + TypeErrorKind::TooFewArguments { + function_name, + required, + provided, + } => { + format!( + "`{function_name}` requires at least {required} argument(s), but {provided} provided" + ) + } + TypeErrorKind::TooManyArguments { + function_name, + max_allowed, + provided, + } => { + format!( + "`{function_name}` accepts at most {max_allowed} argument(s), but {provided} provided" + ) + } + TypeErrorKind::NoSuchField { + field, + available, + suggestion, + } => { + let mut msg = format!("no such field `{field}`"); + if let Some(suggested) = suggestion { + msg.push_str("; did you mean `"); + msg.push_str(suggested); + msg.push_str("`?"); + } else if !available.is_empty() { + let available_str = available.join(", "); + msg.push_str("; available fields: "); + msg.push_str(&available_str); } - TypeErrorKind::TooManyArguments { - function_name, - max_allowed, - provided, - } => { - format!( - "`{function_name}` accepts at most {max_allowed} argument(s), but {provided} provided" - ) + msg + } + TypeErrorKind::TupleIndexOutOfBounds { tuple_len, index } => { + format!("index {index} is out of bounds for tuple of length {tuple_len}") + } + TypeErrorKind::FormatStringError { parse_error } => match parse_error { + FormatParseError::IncompleteSpecifier => { + "invalid format string: incomplete format specifier".to_string() } - TypeErrorKind::NoSuchField { - field, - available, - suggestion, - } => { - let mut msg = format!("no such field `{field}`"); - if let Some(suggested) = suggestion { - msg.push_str("; did you mean `"); - msg.push_str(suggested); - msg.push_str("`?"); - } else if !available.is_empty() { - let available_str = available.join(", "); - msg.push_str("; available fields: "); - msg.push_str(&available_str); - } - msg + FormatParseError::UnknownSpecifier(specifier) => { + format!("invalid format string: unknown specifier '%{specifier}'") } - TypeErrorKind::TupleIndexOutOfBounds { tuple_len, index } => { - format!("index {index} is out of bounds for tuple of length {tuple_len}") + FormatParseError::UnclosedNamedPlaceholder => { + "invalid format string: unclosed named placeholder".to_string() } - TypeErrorKind::FormatStringError { message } => { - format!("invalid format string: {message}") + FormatParseError::EmptyName => { + "invalid format string: empty name in named placeholder".to_string() } - TypeErrorKind::FormatArgCount { expected, provided } => { - format!("format string expects {expected} argument(s), but {provided} provided") + FormatParseError::MixedPositionalAndNamed => { + "invalid format string: cannot mix positional and named placeholders".to_string() } - TypeErrorKind::FormatArgTypeMismatch { - index, - expected, - actual, + }, + TypeErrorKind::FormatArgCount { expected, provided } => { + format!("format string expects {expected} argument(s), but {provided} provided") + } + TypeErrorKind::FormatArgTypeMismatch { + index, + expected, + actual, + specifier, + } => { + format!( + "format argument {} (specifier %{}) expects `{}`, got `{}`", + index + 1, specifier, - } => { - format!( - "format argument {} (specifier %{}) expects `{}`, got `{}`", - index + 1, - specifier, - analysis.display(*expected), - analysis.display(*actual) - ) - } - TypeErrorKind::ArgumentTypeMismatch { + analysis.display(*expected), + analysis.display(*actual) + ) + } + TypeErrorKind::ArgumentTypeMismatch { + function_name, + param_name, + param_index, + expected, + actual, + } => { + format!( + "`{}` argument {} (`{}`) expects `{}`, got `{}`", function_name, + param_index + 1, param_name, - param_index, - expected, - actual, - } => { - format!( - "`{}` argument {} (`{}`) expects `{}`, got `{}`", - function_name, - param_index + 1, - param_name, - analysis.display(*expected), - analysis.display(*actual) - ) - } - TypeErrorKind::CallbackTypeMismatch { + analysis.display(*expected), + analysis.display(*actual) + ) + } + TypeErrorKind::CallbackTypeMismatch { + function_name, + callback_param, + element_type, + callback_param_type, + } => { + format!( + "`{}` callback parameter `{}` has type `{}`, but array elements have type `{}`", function_name, callback_param, - element_type, - callback_param_type, - } => { - format!( - "`{}` callback parameter `{}` has type `{}`, but array elements have type `{}`", - function_name, - callback_param, - analysis.display(*callback_param_type), - analysis.display(*element_type) - ) - } - }; + analysis.display(*callback_param_type), + analysis.display(*element_type) + ) + } + } +} - Diagnostic { - range: to_lsp_range(self.range, line_index, text), - severity: Some(DiagnosticSeverity::WARNING), - code: Some(NumberOrString::String("type-error".to_string())), - code_description: None, - source: Some("jrsonnet-lint".to_string()), - message, - related_information: None, - tags: None, - data: None, +fn render_binary_op_mismatch( + lhs: Ty, + rhs: Ty, + op: &str, + reason: BinaryOpTypeError, + analysis: &TypeAnalysis, +) -> String { + match reason { + BinaryOpTypeError::RequiresNumberPair => format!( + "operator `{op}` requires (number, number), got ({}, {})", + analysis.display(lhs), + analysis.display(rhs) + ), + BinaryOpTypeError::InvalidPlusOperands => format!( + "operator `+` requires matching types (number+number, string+string, array+array, or object+object), got ({}, {})", + analysis.display(lhs), + analysis.display(rhs) + ), + BinaryOpTypeError::RequiresBitwiseNumberPair => format!( + "bitwise operator `{op}` requires (number, number), got ({}, {})", + analysis.display(lhs), + analysis.display(rhs) + ), + BinaryOpTypeError::RequiresStringAndObject => format!( + "operator `in` requires (string, object), got ({}, {})", + analysis.display(lhs), + analysis.display(rhs) + ), + } +} + +fn render_unary_op_mismatch( + operand: Ty, + op: &str, + reason: UnaryOpTypeError, + analysis: &TypeAnalysis, +) -> String { + match reason { + UnaryOpTypeError::NotRequiresBoolean => { + format!( + "operator `{op}` requires boolean, got {}", + analysis.display(operand) + ) + } + UnaryOpTypeError::MinusRequiresNumber | UnaryOpTypeError::BitNotRequiresNumber => { + format!( + "operator `{op}` requires number, got {}", + analysis.display(operand) + ) } } } diff --git a/crates/jrsonnet-lsp-handlers/Cargo.toml b/crates/jrsonnet-lsp-handlers/Cargo.toml index 37a418f9..a48902c3 100644 --- a/crates/jrsonnet-lsp-handlers/Cargo.toml +++ b/crates/jrsonnet-lsp-handlers/Cargo.toml @@ -22,6 +22,7 @@ serde = { workspace = true, features = ["derive"] } rowan.workspace = true serde_json.workspace = true strum = { version = "0.26", features = ["derive"] } +thiserror.workspace = true tracing = "0.1.44" [lints] diff --git a/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs b/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs index 56726248..b924bf62 100644 --- a/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs +++ b/crates/jrsonnet-lsp-handlers/src/formatting/engine.rs @@ -1,22 +1,14 @@ -use std::fmt; - use jrsonnet_fmt::format_code; +use thiserror::Error; use super::FormattingConfig; -#[derive(Debug)] +#[derive(Debug, Error)] pub(super) enum FormatterError { + #[error("formatter failed")] FormatFailed, } -impl fmt::Display for FormatterError { - fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::FormatFailed => formatter.write_str("formatter failed"), - } - } -} - pub(super) fn run_formatter( input: &str, config: &FormattingConfig, diff --git a/crates/jrsonnet-lsp-scenario/src/fixture.rs b/crates/jrsonnet-lsp-scenario/src/fixture.rs index 2868f279..e6a88bad 100644 --- a/crates/jrsonnet-lsp-scenario/src/fixture.rs +++ b/crates/jrsonnet-lsp-scenario/src/fixture.rs @@ -29,7 +29,7 @@ pub enum ScenarioFixtureError { ParseFixture { path: PathBuf, #[source] - source: ParseScenarioError, + source: Box, }, #[error(transparent)] RunScenario(#[from] RunnerError), @@ -52,7 +52,7 @@ where let scenario = parse_scenario_yaml(&script, base_dir.path()).map_err(|source| { ScenarioFixtureError::ParseFixture { path: path.to_path_buf(), - source, + source: Box::new(source), } })?; run_scenario(&scenario, start_server)?; diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs index 990e84f4..b091c7e9 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_runner/helpers.rs @@ -5,13 +5,15 @@ use thiserror::Error; use super::RunnerResult; +type UriParseError = ::Err; + #[derive(Debug, Error)] pub enum UriError { - #[error("parse {context} uri '{uri}': {reason}")] + #[error("parse {context} uri '{uri}': {source}")] Parse { context: &'static str, uri: String, - reason: String, + source: UriParseError, }, } @@ -20,7 +22,7 @@ pub(super) fn parse_uri(uri: &str, context: &'static str) -> RunnerResult { let UriError::Parse { context, uri, - reason, + source, } = *error_box; - (context, uri, reason) + (context, uri, source) } ); assert_eq!(context, "hover"); assert_eq!(uri, "://not-a-uri"); - assert!(!reason.is_empty()); + assert!(!source.to_string().is_empty()); } #[test] diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs index d1aa09a7..75c2bd6e 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/inputs.rs @@ -33,32 +33,84 @@ use crate::{ semantic_tokens::{encode_semantic_tokens, semantic_modifiers, ExpectedSemanticToken}, }; +type UriParseError = ::Err; + #[derive(Debug, Error)] pub enum InputError { #[error(transparent)] Markers(#[from] MarkerError), - #[error("{message}")] - Message { message: String }, -} - -impl InputError { - fn message(message: impl Into) -> Self { - Self::Message { - message: message.into(), - } - } -} - -impl From for InputError { - fn from(message: String) -> Self { - Self::message(message) - } -} - -macro_rules! input_err { - ($($arg:tt)*) => { - InputError::message(format!($($arg)*)) - }; + #[error("{context}: parse URI for '{path}': {source}")] + UriParse { + context: String, + path: String, + source: UriParseError, + }, + #[error("{context}: resolve edit for '{path}': {source}")] + ResolveEditForPath { + context: String, + path: String, + source: Box, + }, + #[error("{context}: decode {target}: {source}")] + JsonDecode { + context: String, + target: &'static str, + source: serde_json::Error, + }, + #[error("{context}: decode workspace symbol shorthand into SymbolInformation: {source}")] + DecodeWorkspaceSymbolShorthand { + context: String, + source: serde_json::Error, + }, + #[error("{context}: serialize {target} at {path}: {source}")] + JsonSerialize { + context: String, + target: &'static str, + path: String, + source: serde_json::Error, + }, + #[error("{context}: command argument `file` must be a string")] + CommandArgumentFileMustBeString { context: String }, + #[error("{context}: `{path}.file` must be a string")] + FileUriShorthandFieldMustBeString { context: String, path: String }, + #[error("{context}: unknown semantic token type '{token_type}' at {location}")] + UnknownSemanticTokenType { + context: String, + token_type: String, + location: String, + }, + #[error("{context}: unknown semantic token modifier '{modifier}' at {location}")] + UnknownSemanticTokenModifier { + context: String, + modifier: String, + location: String, + }, + #[error("{context}: tokensByMarker[{index}] marker '{marker}' spans multiple lines")] + SemanticTokenMarkerSpansMultipleLines { + context: String, + index: usize, + marker: String, + }, + #[error("{context}: tokensByMarker[{index}] marker '{marker}' has invalid range")] + SemanticTokenMarkerInvalidRange { + context: String, + index: usize, + marker: String, + }, + #[error("{context}: tokensByMarker[{index}] marker '{marker}' resolves to an empty range")] + SemanticTokenMarkerEmptyRange { + context: String, + index: usize, + marker: String, + }, + #[error("{context}: {path} cannot be empty")] + MarkerNameCannotBeEmpty { context: String, path: String }, + #[error("{context}: {path} must be a string marker name, got {actual}")] + MarkerNameMustBeString { + context: String, + path: String, + actual: serde_json::Value, + }, } /// Accept either full `Diagnostic` JSON or concise shorthand fields. @@ -261,11 +313,10 @@ impl CodeActionShorthandInput { .map(|(relative_path, edits)| { let uri: lsp_types::Uri = file_uri(base_dir, &relative_path) .parse() - .map_err(|error| { - input_err!( - "{context}: parse edit URI for '{}': {error}", - relative_path - ) + .map_err(|source| InputError::UriParse { + context: context.to_string(), + path: relative_path.clone(), + source, })?; let edits = edits .into_iter() @@ -275,11 +326,13 @@ impl CodeActionShorthandInput { &relative_path, "expectCodeAction.result.edits", ) - .map_err(|error| { - input_err!( - "{context}: resolve edit for '{}': {error}", - relative_path - ) + .map_err(|error| match error { + InputError::Markers(source) => InputError::ResolveEditForPath { + context: context.to_string(), + path: relative_path.clone(), + source: Box::new(source), + }, + other => other, }) }) .collect::, _>>()?; @@ -346,9 +399,14 @@ impl LocationInput { marker_store: &MarkerStore, context: &str, ) -> Result { - let uri: lsp_types::Uri = file_uri(base_dir, &self.file).parse().map_err(|error| { - input_err!("{context}: parse location URI for '{}': {error}", self.file) - })?; + let uri: lsp_types::Uri = + file_uri(base_dir, &self.file) + .parse() + .map_err(|error| InputError::UriParse { + context: context.to_string(), + path: self.file.clone(), + source: error, + })?; let range = marker_store.resolve_range(&self.file, self.range, context)?; Ok(Location { uri, range }) } @@ -429,8 +487,10 @@ impl WorkspaceEditShorthandInput { let uri: lsp_types::Uri = file_uri(base_dir, &relative_path) .parse() - .map_err(|error| { - input_err!("{context}: parse edit URI for '{}': {error}", relative_path) + .map_err(|source| InputError::UriParse { + context: context.to_string(), + path: relative_path.clone(), + source, })?; let edits = edits .into_iter() @@ -512,12 +572,14 @@ impl WorkspaceSymbolInput { marker_store: &MarkerStore, context: &str, ) -> Result { - let uri: lsp_types::Uri = file_uri(base_dir, &self.file).parse().map_err(|error| { - input_err!( - "{context}: parse workspace symbol URI for '{}': {error}", - self.file - ) - })?; + let uri: lsp_types::Uri = + file_uri(base_dir, &self.file) + .parse() + .map_err(|error| InputError::UriParse { + context: context.to_string(), + path: self.file.clone(), + source: error, + })?; // Build via JSON so field names/types track the wire representation directly, // including deprecated-but-still-used `containerName`. let value = serde_json::json!({ @@ -530,10 +592,9 @@ impl WorkspaceSymbolInput { }, "containerName": self.container_name, }); - serde_json::from_value(value).map_err(|error| { - input_err!( - "{context}: decode workspace symbol shorthand into SymbolInformation: {error}" - ) + serde_json::from_value(value).map_err(|source| InputError::DecodeWorkspaceSymbolShorthand { + context: context.to_string(), + source, }) } } @@ -640,9 +701,9 @@ impl CommandArgumentInput { if let Some(file) = file.as_str() { return Ok(serde_json::Value::String(file_uri(base_dir, file))); } - return Err(input_err!( - "{context}: command argument `file` must be a string" - )); + return Err(InputError::CommandArgumentFileMustBeString { + context: context.to_string(), + }); } Ok(value) } @@ -674,7 +735,10 @@ fn resolve_file_uri_shorthand_json_at( return Ok(serde_json::Value::Object(object)); }; let Some(file) = file.as_str() else { - return Err(input_err!("{context}: `{path}.file` must be a string")); + return Err(InputError::FileUriShorthandFieldMustBeString { + context: context.to_string(), + path: path.to_string(), + }); }; return Ok(serde_json::Value::String(file_uri(base_dir, file))); } @@ -800,6 +864,7 @@ impl SemanticTokenByMarkerInput { file: &str, index: usize, ) -> Result { + let marker = self.marker; let token_type = self .token_type .resolve_at(context, &format!("tokensByMarker[{index}].type"))?; @@ -814,28 +879,28 @@ impl SemanticTokenByMarkerInput { ) }) .collect::, _>>()?; - let range = marker_store.resolve_named_range(file, &self.marker, context)?; + let range = marker_store.resolve_named_range(file, &marker, context)?; if range.start.line != range.end.line { - return Err(input_err!( - "{context}: tokensByMarker[{index}] marker '{}' spans multiple lines", - self.marker - )); + return Err(InputError::SemanticTokenMarkerSpansMultipleLines { + context: context.to_string(), + index, + marker, + }); } - let len = range - .end - .character - .checked_sub(range.start.character) - .ok_or_else(|| { - input_err!( - "{context}: tokensByMarker[{index}] marker '{}' has invalid range", - self.marker - ) - })?; + if range.end.character < range.start.character { + return Err(InputError::SemanticTokenMarkerInvalidRange { + context: context.to_string(), + index, + marker, + }); + } + let len = range.end.character - range.start.character; if len == 0 { - return Err(input_err!( - "{context}: tokensByMarker[{index}] marker '{}' resolves to an empty range", - self.marker - )); + return Err(InputError::SemanticTokenMarkerEmptyRange { + context: context.to_string(), + index, + marker, + }); } Ok(ExpectedSemanticToken::new( @@ -859,10 +924,11 @@ impl SemanticTokenTypeInput { location: &str, ) -> Result { SemanticTokenTypeName::from_str(&self.0).map_err(|()| { - input_err!( - "{context}: unknown semantic token type '{}' at {location}", - self.0 - ) + InputError::UnknownSemanticTokenType { + context: context.to_string(), + token_type: self.0, + location: location.to_string(), + } }) } } @@ -878,10 +944,11 @@ impl SemanticTokenModifierInput { location: &str, ) -> Result { SemanticTokenModifierName::from_str(&self.0).map_err(|()| { - input_err!( - "{context}: unknown semantic token modifier '{}' at {location}", - self.0 - ) + InputError::UnknownSemanticTokenModifier { + context: context.to_string(), + modifier: self.0, + location: location.to_string(), + } }) } } @@ -986,8 +1053,11 @@ where T: DeserializeOwned, { let resolved = resolve_marker_references_json(value, marker_store, file, context, "$")?; - serde_json::from_value(resolved) - .map_err(|error| input_err!("{context}: decode marker-expanded value: {error}")) + serde_json::from_value(resolved).map_err(|source| InputError::JsonDecode { + context: context.to_string(), + target: "marker-expanded value", + source, + }) } /// Recursively rewrite `{ positionOf: ... }` and `{ rangeOf: ... }` objects. @@ -1013,16 +1083,26 @@ fn resolve_marker_references_json( PositionSpec::Marker(marker_name), context, )?; - return serde_json::to_value(position).map_err(|error| { - input_err!("{context}: serialize position at {path}: {error}") + return serde_json::to_value(position).map_err(|source| { + InputError::JsonSerialize { + context: context.to_string(), + target: "position", + path: path.to_string(), + source, + } }); } if let Some(marker) = object.remove("rangeOf") { let marker_name = marker_name_from_value(marker, context, &format!("{path}.rangeOf"))?; let range = marker_store.resolve_named_range(file, &marker_name, context)?; - return serde_json::to_value(range).map_err(|error| { - input_err!("{context}: serialize range at {path}: {error}") + return serde_json::to_value(range).map_err(|source| { + InputError::JsonSerialize { + context: context.to_string(), + target: "range", + path: path.to_string(), + source, + } }); } } @@ -1062,10 +1142,15 @@ fn marker_name_from_value( ) -> Result { match value { serde_json::Value::String(name) if !name.is_empty() => Ok(name), - serde_json::Value::String(_) => Err(input_err!("{context}: {path} cannot be empty")), - other => Err(input_err!( - "{context}: {path} must be a string marker name, got {other}" - )), + serde_json::Value::String(_) => Err(InputError::MarkerNameCannotBeEmpty { + context: context.to_string(), + path: path.to_string(), + }), + other => Err(InputError::MarkerNameMustBeString { + context: context.to_string(), + path: path.to_string(), + actual: other, + }), } } diff --git a/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs b/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs index 8c2776cb..adc95730 100644 --- a/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs +++ b/crates/jrsonnet-lsp-scenario/src/scenario_script/markers.rs @@ -21,29 +21,126 @@ use serde::Deserialize; use thiserror::Error; #[derive(Debug, Error)] -#[error("{message}")] +#[error("{context}: {kind}")] pub struct MarkerError { - message: String, + context: String, + kind: Box, } -impl MarkerError { - fn message(message: impl Into) -> Self { - Self { - message: message.into(), - } - } +#[derive(Debug, Error)] +enum MarkerErrorKind { + #[error("specify either `range` or shorthand (`at` + `text`/`len`), not both")] + ConflictingRangeAndShorthand, + #[error("marker range '{name}' requires file context")] + MarkerRangeRequiresFileContext { name: String }, + #[error("missing range, provide `range` or shorthand (`at` + `text`/`len`)")] + MissingRange, + #[error("shorthand cannot include both `text` and `len`")] + ShorthandTextAndLenBothSet, + #[error("shorthand requires one of `text` or `len`")] + ShorthandRequiresTextOrLen, + #[error("marker position '{name}' requires file context")] + MarkerPositionRequiresFileContext { name: String }, + #[error("missing position, provide `positionOf`")] + MissingPositionOf, + #[error("missing range, provide `rangeOf`")] + MissingRangeOf, + #[error("incremental range start is after end for file '{file}'")] + IncrementalRangeStartAfterEnd { file: String }, + #[error("{bound} offset out of bounds in '{file}'")] + FileOffsetOutOfBounds { file: String, bound: OffsetBound }, + #[error("incremental text width overflow for '{file}'")] + IncrementalTextWidthOverflow { file: String }, + #[error("duplicate marker name '{name}' in file '{file}'")] + DuplicateMarkerNameInFile { file: String, name: String }, + #[error("marker '{name}' start overflow")] + MarkerStartOverflow { name: String }, + #[error("marker '{name}' end overflow")] + MarkerEndOverflow { name: String }, + #[error("missing position, provide `at`")] + MissingPositionAt, + #[error("shorthand `text` length does not fit in u32")] + ShorthandTextLengthTooLong, + #[error("range end overflow")] + RangeEndOverflow, + #[error("no tracked text for file '{file}' while resolving marker '{marker_name}'")] + NoTrackedTextForMarker { file: String, marker_name: String }, + #[error("file '{file}' has no parsed text/markers; define it in `create`, `open`, `writeFile`, or `changeFull` first")] + FileHasNoParsedMarkers { file: String }, + #[error("unknown marker '{marker_name}' in file '{file}', available markers: {available:?}")] + UnknownMarker { + file: String, + marker_name: String, + available: Vec, + }, + #[error("text length overflow")] + TextLengthOverflow, + #[error("duplicate marker name '{name}' in one text block")] + DuplicateMarkerNameInTextBlock { name: String }, + #[error("unexpected marker syntax node {kind:?} while translating marker tree")] + UnexpectedSyntaxNode { kind: MarkerSyntaxKind }, + #[error("malformed range marker '{marker_name}', missing body")] + MalformedRangeMarkerMissingBody { marker_name: String }, + #[error("malformed cursor marker '{marker_name}', missing before segment")] + MalformedCursorMarkerMissingBefore { marker_name: String }, + #[error("malformed cursor marker '{marker_name}', missing after segment")] + MalformedCursorMarkerMissingAfter { marker_name: String }, + #[error("unexpected marker syntax token {kind:?} while translating marker tree")] + UnexpectedSyntaxToken { kind: MarkerSyntaxKind }, + #[error("malformed marker node, missing marker name")] + MalformedMarkerNodeMissingName, + #[error("marker parser index overflow")] + ParserIndexOverflow, + #[error("unterminated range marker starting at byte {start}")] + UnterminatedRangeMarker { start: usize }, + #[error("cursor marker '{marker_name}' must include exactly one top-level `|`")] + CursorMarkerMustIncludeOnePipe { marker_name: String }, + #[error("unterminated cursor marker starting at byte {start}")] + UnterminatedCursorMarker { start: usize }, + #[error("marker parser unexpectedly reached EOF")] + ParserUnexpectedEof, + #[error("marker name cannot be empty")] + MarkerNameEmpty, + #[error("offset overflow")] + OffsetOverflow, + #[error("line overflow")] + LineOverflow, + #[error("character overflow")] + CharacterOverflow, + #[error("marker offset {offset} is out of bounds (text has {text_chars} chars)")] + MarkerOffsetOutOfBounds { offset: usize, text_chars: usize }, + #[error("position ({line}, {character}) is out of bounds for current text")] + PositionOutOfBounds { line: u32, character: u32 }, + #[error("marker offset underflow while shifting")] + MarkerOffsetUnderflowWhileShifting, + #[error("marker shift overflow")] + MarkerShiftOverflow, + #[error("marker offset overflow while shifting")] + MarkerOffsetOverflowWhileShifting, } -impl From for MarkerError { - fn from(message: String) -> Self { - Self::message(message) +#[derive(Debug, Clone, Copy)] +enum OffsetBound { + Start, + End, +} + +impl std::fmt::Display for OffsetBound { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Start => f.write_str("start"), + Self::End => f.write_str("end"), + } } } -macro_rules! marker_err { - ($($arg:tt)*) => { - MarkerError::message(format!($($arg)*)) - }; +impl MarkerError { + fn new(context: impl Into, kind: MarkerErrorKind) -> Self { + Self { + context: context.into(), + kind: Box::new(kind), + } + } } /// Position selector used by scenario script fields like `at`. @@ -90,40 +187,43 @@ impl RangeInput { pub(super) fn resolve_range(self, context: &str) -> Result { if let Some(range_spec) = self.range { if self.at.is_some() || self.text.is_some() || self.len.is_some() { - return Err(marker_err!( - "{context}: specify either `range` or shorthand (`at` + `text`/`len`), not both" + return Err(MarkerError::new( + context, + MarkerErrorKind::ConflictingRangeAndShorthand, )); } return match range_spec { - RangeSpec::Marker(name) => Err(marker_err!( - "{context}: marker range '{name}' requires file context" + RangeSpec::Marker(name) => Err(MarkerError::new( + context, + MarkerErrorKind::MarkerRangeRequiresFileContext { name }, )), }; } let Some(start_spec) = self.at else { - return Err(marker_err!( - "{context}: missing range, provide `range` or shorthand (`at` + `text`/`len`)" - )); + return Err(MarkerError::new(context, MarkerErrorKind::MissingRange)); }; match (self.text, self.len) { (Some(_), Some(_)) => { - return Err(marker_err!( - "{context}: shorthand cannot include both `text` and `len`" + return Err(MarkerError::new( + context, + MarkerErrorKind::ShorthandTextAndLenBothSet, )); } (None, None) => { - return Err(marker_err!( - "{context}: shorthand requires one of `text` or `len`" + return Err(MarkerError::new( + context, + MarkerErrorKind::ShorthandRequiresTextOrLen, )); } _ => {} } match start_spec { - PositionSpec::Marker(name) => Err(marker_err!( - "{context}: marker position '{name}' requires file context" + PositionSpec::Marker(name) => Err(MarkerError::new( + context, + MarkerErrorKind::MarkerPositionRequiresFileContext { name }, )), } } @@ -147,8 +247,9 @@ impl PositionFieldInput { ) -> Result { self.position_of.map_or_else( || { - Err(marker_err!( - "{context}: missing position, provide `positionOf`" + Err(MarkerError::new( + context, + MarkerErrorKind::MissingPositionOf, )) }, |name| marker_store.resolve_position_spec(file, PositionSpec::Marker(name), context), @@ -173,7 +274,7 @@ impl RangeFieldInput { context: &str, ) -> Result { self.range_of.map_or_else( - || Err(marker_err!("{context}: missing range, provide `rangeOf`")), + || Err(MarkerError::new(context, MarkerErrorKind::MissingRangeOf)), |marker| marker_store.resolve_named_range(file, &marker, context), ) } @@ -252,15 +353,33 @@ impl MarkerStore { let start_offset = position_to_offset(&document.text, range.start, context)?; let end_offset = position_to_offset(&document.text, range.end, context)?; if start_offset > end_offset { - return Err(marker_err!( - "{context}: incremental range start is after end for file '{file}'" + return Err(MarkerError::new( + context, + MarkerErrorKind::IncrementalRangeStartAfterEnd { + file: file.to_string(), + }, )); } - let start_byte = char_offset_to_byte_offset(&document.text, start_offset) - .ok_or_else(|| marker_err!("{context}: start offset out of bounds in '{file}'"))?; - let end_byte = char_offset_to_byte_offset(&document.text, end_offset) - .ok_or_else(|| marker_err!("{context}: end offset out of bounds in '{file}'"))?; + let start_byte = + char_offset_to_byte_offset(&document.text, start_offset).ok_or_else(|| { + MarkerError::new( + context, + MarkerErrorKind::FileOffsetOutOfBounds { + file: file.to_string(), + bound: OffsetBound::Start, + }, + ) + })?; + let end_byte = char_offset_to_byte_offset(&document.text, end_offset).ok_or_else(|| { + MarkerError::new( + context, + MarkerErrorKind::FileOffsetOutOfBounds { + file: file.to_string(), + bound: OffsetBound::End, + }, + ) + })?; let mut next_text = String::with_capacity( start_byte + text.len() + document.text.len().saturating_sub(end_byte), @@ -275,7 +394,14 @@ impl MarkerStore { .and_then(|inserted| { isize::try_from(replaced_width).map(|replaced| inserted - replaced) }) - .map_err(|_| marker_err!("{context}: incremental text width overflow for '{file}'"))?; + .map_err(|_| { + MarkerError::new( + context, + MarkerErrorKind::IncrementalTextWidthOverflow { + file: file.to_string(), + }, + ) + })?; let mut next_markers = HashMap::with_capacity(document.markers.len() + inserted_markers.len()); @@ -296,18 +422,26 @@ impl MarkerStore { for (name, marker) in inserted_markers { if next_markers.contains_key(&name) { - return Err(marker_err!( - "{context}: duplicate marker name '{name}' in file '{file}'" + return Err(MarkerError::new( + context, + MarkerErrorKind::DuplicateMarkerNameInFile { + file: file.to_string(), + name, + }, )); } - let start = marker - .start - .checked_add(start_offset) - .ok_or_else(|| marker_err!("{context}: marker '{name}' start overflow"))?; - let end = marker - .end - .checked_add(start_offset) - .ok_or_else(|| marker_err!("{context}: marker '{name}' end overflow"))?; + let start = marker.start.checked_add(start_offset).ok_or_else(|| { + MarkerError::new( + context, + MarkerErrorKind::MarkerStartOverflow { name: name.clone() }, + ) + })?; + let end = marker.end.checked_add(start_offset).ok_or_else(|| { + MarkerError::new( + context, + MarkerErrorKind::MarkerEndOverflow { name: name.clone() }, + ) + })?; next_markers.insert(name, MarkerRangeOffsets::new(start, end)); } @@ -324,7 +458,12 @@ impl MarkerStore { context: &str, ) -> Result { at.map_or_else( - || Err(marker_err!("{context}: missing position, provide `at`")), + || { + Err(MarkerError::new( + context, + MarkerErrorKind::MissingPositionAt, + )) + }, |spec| self.resolve_position_spec(file, spec, context), ) } @@ -338,8 +477,9 @@ impl MarkerStore { ) -> Result { if let Some(range_spec) = input.range { if input.at.is_some() || input.text.is_some() || input.len.is_some() { - return Err(marker_err!( - "{context}: specify either `range` or shorthand (`at` + `text`/`len`), not both" + return Err(MarkerError::new( + context, + MarkerErrorKind::ConflictingRangeAndShorthand, )); } return match range_spec { @@ -348,21 +488,19 @@ impl MarkerStore { } let Some(start_spec) = input.at else { - return Err(marker_err!( - "{context}: missing range, provide `range` or shorthand (`at` + `text`/`len`)" - )); + return Err(MarkerError::new(context, MarkerErrorKind::MissingRange)); }; match (input.text, input.len) { (Some(text), None) => { let width = u32::try_from(text.chars().count()).map_err(|_| { - marker_err!("{context}: shorthand `text` length does not fit in u32") + MarkerError::new(context, MarkerErrorKind::ShorthandTextLengthTooLong) })?; let start = self.resolve_position_spec(file, start_spec, context)?; let end_character = start .character .checked_add(width) - .ok_or_else(|| marker_err!("{context}: range end overflow"))?; + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::RangeEndOverflow))?; Ok(Range { start, end: Position { @@ -376,7 +514,7 @@ impl MarkerStore { let end_character = start .character .checked_add(len) - .ok_or_else(|| marker_err!("{context}: range end overflow"))?; + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::RangeEndOverflow))?; Ok(Range { start, end: Position { @@ -385,8 +523,9 @@ impl MarkerStore { }, }) } - (Some(_), Some(_)) => Err(marker_err!( - "{context}: shorthand cannot include both `text` and `len`" + (Some(_), Some(_)) => Err(MarkerError::new( + context, + MarkerErrorKind::ShorthandTextAndLenBothSet, )), (None, None) => match start_spec { PositionSpec::Marker(name) => self.resolve_named_range(file, &name, context), @@ -403,8 +542,12 @@ impl MarkerStore { ) -> Result { let marker = self.lookup_marker(file, marker_name, context)?; let text = self.full_text(file).ok_or_else(|| { - marker_err!( - "{context}: no tracked text for file '{file}' while resolving marker '{marker_name}'" + MarkerError::new( + context, + MarkerErrorKind::NoTrackedTextForMarker { + file: file.to_string(), + marker_name: marker_name.to_string(), + }, ) })?; let start = offset_to_position(text, marker.start, context)?; @@ -423,8 +566,12 @@ impl MarkerStore { PositionSpec::Marker(name) => { let marker = self.lookup_marker(file, &name, context)?; let text = self.full_text(file).ok_or_else(|| { - marker_err!( - "{context}: no tracked text for file '{file}' while resolving marker '{name}'" + MarkerError::new( + context, + MarkerErrorKind::NoTrackedTextForMarker { + file: file.to_string(), + marker_name: name.clone(), + }, ) })?; offset_to_position(text, marker.start, context) @@ -440,20 +587,23 @@ impl MarkerStore { context: &str, ) -> Result<&MarkerRangeOffsets, MarkerError> { let Some(document) = self.documents.get(file) else { - return Err(marker_err!( - "{context}: file '{file}' has no parsed text/markers; define it in `create`, `open`, `writeFile`, or `changeFull` first" + return Err(MarkerError::new( + context, + MarkerErrorKind::FileHasNoParsedMarkers { + file: file.to_string(), + }, )); }; document.markers.get(marker_name).ok_or_else(|| { - let available = document - .markers - .keys() - .map(String::as_str) - .collect::>(); - marker_err!( - "{context}: unknown marker '{marker_name}' in file '{file}', available markers: {:?}", - available + let available = document.markers.keys().cloned().collect::>(); + MarkerError::new( + context, + MarkerErrorKind::UnknownMarker { + file: file.to_string(), + marker_name: marker_name.to_string(), + available, + }, ) }) } @@ -496,7 +646,7 @@ impl ParsedSegment { self.char_len = self .char_len .checked_add(text.chars().count()) - .ok_or_else(|| marker_err!("{context}: text length overflow"))?; + .ok_or_else(|| MarkerError::new(context, MarkerErrorKind::TextLengthOverflow))?; Ok(()) } @@ -509,8 +659,9 @@ impl ParsedSegment { context: &str, ) -> Result<(), MarkerError> { if self.markers.contains_key(&name) { - return Err(marker_err!( - "{context}: duplicate marker name '{name}' in one text block" + return Err(MarkerError::new( + context, + MarkerErrorKind::DuplicateMarkerNameInTextBlock { name }, )); } self.markers @@ -617,8 +768,9 @@ fn append_marker_node( | MarkerSyntaxKind::CursorAfter => append_marker_node_contents(target, node, context), MarkerSyntaxKind::RangeMarker => append_range_marker(target, node, context), MarkerSyntaxKind::CursorMarker => append_cursor_marker(target, node, context), - kind => Err(marker_err!( - "{context}: unexpected marker syntax node {kind:?} while translating marker tree" + kind => Err(MarkerError::new( + context, + MarkerErrorKind::UnexpectedSyntaxNode { kind }, )), } } @@ -641,8 +793,9 @@ fn append_range_marker( } if !body_seen { - return Err(marker_err!( - "{context}: malformed range marker '{marker_name}', missing body" + return Err(MarkerError::new( + context, + MarkerErrorKind::MalformedRangeMarkerMissingBody { marker_name }, )); } @@ -668,12 +821,22 @@ fn append_cursor_marker( } } - let before = before.ok_or_else(|| { - marker_err!("{context}: malformed cursor marker '{marker_name}', missing before segment") - })?; - let after = after.ok_or_else(|| { - marker_err!("{context}: malformed cursor marker '{marker_name}', missing after segment") - })?; + let before = if let Some(before) = before { + before + } else { + return Err(MarkerError::new( + context, + MarkerErrorKind::MalformedCursorMarkerMissingBefore { marker_name }, + )); + }; + let after = if let Some(after) = after { + after + } else { + return Err(MarkerError::new( + context, + MarkerErrorKind::MalformedCursorMarkerMissingAfter { marker_name }, + )); + }; append_marker_node_contents(target, &before, context)?; let cursor = target.char_len; @@ -696,8 +859,9 @@ fn append_marker_token( | MarkerSyntaxKind::CloseCursor | MarkerSyntaxKind::Colon | MarkerSyntaxKind::Pipe => Ok(()), - kind => Err(marker_err!( - "{context}: unexpected marker syntax token {kind:?} while translating marker tree" + kind => Err(MarkerError::new( + context, + MarkerErrorKind::UnexpectedSyntaxToken { kind }, )), } } @@ -712,7 +876,7 @@ fn marker_name_for_node(node: &MarkerSyntaxNode, context: &str) -> Result MarkerSyntaxParser<'a> { } let start = self.index; - let Some((name_width, marker_name)) = self.peek_marker_name_and_colon( - start - .checked_add(2) - .ok_or_else(|| marker_err!("{}: marker parser index overflow", self.context))?, - ) else { + let Some((name_width, marker_name)) = + self.peek_marker_name_and_colon(start.checked_add(2).ok_or_else(|| { + MarkerError::new(self.context, MarkerErrorKind::ParserIndexOverflow) + })?) + else { return Ok(false); }; let marker_name = parse_marker_name(marker_name, self.context)?; @@ -790,9 +954,9 @@ impl<'a> MarkerSyntaxParser<'a> { self.parse_item()?; } - Err(marker_err!( - "{}: unterminated range marker starting at byte {start}", - self.context + Err(MarkerError::new( + self.context, + MarkerErrorKind::UnterminatedRangeMarker { start }, )) } @@ -806,11 +970,11 @@ impl<'a> MarkerSyntaxParser<'a> { } let start = self.index; - let Some((name_width, marker_name)) = self.peek_marker_name_and_colon( - start - .checked_add(2) - .ok_or_else(|| marker_err!("{}: marker parser index overflow", self.context))?, - ) else { + let Some((name_width, marker_name)) = + self.peek_marker_name_and_colon(start.checked_add(2).ok_or_else(|| { + MarkerError::new(self.context, MarkerErrorKind::ParserIndexOverflow) + })?) + else { return Ok(false); }; let marker_name = parse_marker_name(marker_name, self.context)?; @@ -824,10 +988,11 @@ impl<'a> MarkerSyntaxParser<'a> { self.start_node(MarkerSyntaxKind::CursorBefore); while !self.is_eof() { if self.starts_with("))") { - return Err(marker_err!( - "{}: cursor marker '{}' must include exactly one top-level `|`", + return Err(MarkerError::new( self.context, - marker_name + MarkerErrorKind::CursorMarkerMustIncludeOnePipe { + marker_name: marker_name.to_string(), + }, )); } if self.peek_char() == Some('|') { @@ -840,9 +1005,9 @@ impl<'a> MarkerSyntaxParser<'a> { } if self.is_eof() { - return Err(marker_err!( - "{}: unterminated cursor marker starting at byte {start}", - self.context + return Err(MarkerError::new( + self.context, + MarkerErrorKind::UnterminatedCursorMarker { start }, )); } @@ -854,31 +1019,32 @@ impl<'a> MarkerSyntaxParser<'a> { return Ok(true); } if self.peek_char() == Some('|') { - return Err(marker_err!( - "{}: cursor marker '{}' must include exactly one top-level `|`", + return Err(MarkerError::new( self.context, - marker_name + MarkerErrorKind::CursorMarkerMustIncludeOnePipe { + marker_name: marker_name.to_string(), + }, )); } self.parse_item()?; } - Err(marker_err!( - "{}: unterminated cursor marker starting at byte {start}", - self.context + Err(MarkerError::new( + self.context, + MarkerErrorKind::UnterminatedCursorMarker { start }, )) } /// Emit one non-marker UTF-8 scalar as plain text. fn parse_text_token(&mut self) -> Result<(), MarkerError> { - let ch = self.peek_char().ok_or_else(|| { - marker_err!("{}: marker parser unexpectedly reached EOF", self.context) - })?; + let ch = self + .peek_char() + .ok_or_else(|| MarkerError::new(self.context, MarkerErrorKind::ParserUnexpectedEof))?; let width = ch.len_utf8(); let end = self .index .checked_add(width) - .ok_or_else(|| marker_err!("{}: marker parser index overflow", self.context))?; + .ok_or_else(|| MarkerError::new(self.context, MarkerErrorKind::ParserIndexOverflow))?; let text = &self.input[self.index..end]; self.emit_text_token(MarkerSyntaxKind::Text, text); self.index = end; @@ -939,7 +1105,7 @@ impl<'a> MarkerSyntaxParser<'a> { self.index = self .index .checked_add(bytes) - .ok_or_else(|| marker_err!("{}: marker parser index overflow", self.context))?; + .ok_or_else(|| MarkerError::new(self.context, MarkerErrorKind::ParserIndexOverflow))?; Ok(()) } @@ -959,7 +1125,7 @@ const fn is_marker_name_continue(ch: char) -> bool { /// Validate marker name lexical constraints. fn parse_marker_name<'a>(name: &'a str, context: &str) -> Result<&'a str, MarkerError> { if name.is_empty() { - return Err(marker_err!("{context}: marker name cannot be empty")); + return Err(MarkerError::new(context, MarkerErrorKind::MarkerNameEmpty)); } Ok(name) } @@ -976,16 +1142,16 @@ fn offset_to_position(text: &str, offset: usize, context: &str) -> Result Result Result Result Option { fn shift_offset(value: usize, delta: isize, context: &str) -> Result { if delta.is_negative() { let amount = delta.unsigned_abs(); - value - .checked_sub(amount) - .ok_or_else(|| marker_err!("{context}: marker offset underflow while shifting")) + value.checked_sub(amount).ok_or_else(|| { + MarkerError::new(context, MarkerErrorKind::MarkerOffsetUnderflowWhileShifting) + }) } else { - let amount = - usize::try_from(delta).map_err(|_| marker_err!("{context}: marker shift overflow"))?; - value - .checked_add(amount) - .ok_or_else(|| marker_err!("{context}: marker offset overflow while shifting")) + let amount = usize::try_from(delta) + .map_err(|_| MarkerError::new(context, MarkerErrorKind::MarkerShiftOverflow))?; + value.checked_add(amount).ok_or_else(|| { + MarkerError::new(context, MarkerErrorKind::MarkerOffsetOverflowWhileShifting) + }) } } diff --git a/crates/jrsonnet-lsp-types/Cargo.toml b/crates/jrsonnet-lsp-types/Cargo.toml index 55397c90..19d3ce27 100644 --- a/crates/jrsonnet-lsp-types/Cargo.toml +++ b/crates/jrsonnet-lsp-types/Cargo.toml @@ -10,6 +10,7 @@ description = "Type system for jrsonnet LSP" [dependencies] jrsonnet-rowan-parser = { version = "0.5.0-pre97", path = "../jrsonnet-rowan-parser" } rustc-hash.workspace = true +thiserror.workspace = true [dev-dependencies] assert_matches = "1.5.0" diff --git a/crates/jrsonnet-lsp-types/src/operations.rs b/crates/jrsonnet-lsp-types/src/operations.rs index 5ef5a2c3..acbf0963 100644 --- a/crates/jrsonnet-lsp-types/src/operations.rs +++ b/crates/jrsonnet-lsp-types/src/operations.rs @@ -20,4 +20,6 @@ mod logic; mod operators; pub use logic::{ty_and, ty_minus, ty_with_field, ty_with_len, ty_with_min_len}; -pub use operators::{array_concat_ty, binary_op_result_ty, unary_op_result_ty}; +pub use operators::{ + array_concat_ty, binary_op_result_ty, unary_op_result_ty, BinaryOpTypeError, UnaryOpTypeError, +}; diff --git a/crates/jrsonnet-lsp-types/src/operations/operators.rs b/crates/jrsonnet-lsp-types/src/operations/operators.rs index 5935b21f..376108a1 100644 --- a/crates/jrsonnet-lsp-types/src/operations/operators.rs +++ b/crates/jrsonnet-lsp-types/src/operations/operators.rs @@ -1,15 +1,47 @@ //! Type checking and result-type computation for Jsonnet operators. use jrsonnet_rowan_parser::nodes::{BinaryOperatorKind, UnaryOperatorKind}; +use thiserror::Error; use crate::store::{FieldDefInterned, ObjectData, Ty, TyData, TypeStoreOps}; +/// Structured reason for a binary operator type mismatch. +#[derive(Debug, Error, Clone, Copy, PartialEq, Eq)] +pub enum BinaryOpTypeError { + /// Arithmetic operators require `(number, number)`. + #[error("operator requires (number, number)")] + RequiresNumberPair, + /// `+` requires compatible operand families. + #[error("operator `+` requires compatible operand families")] + InvalidPlusOperands, + /// Bitwise operators require `(number, number)`. + #[error("bitwise operator requires (number, number)")] + RequiresBitwiseNumberPair, + /// `in` requires `(string, object)`. + #[error("operator `in` requires (string, object)")] + RequiresStringAndObject, +} + +/// Structured reason for a unary operator type mismatch. +#[derive(Debug, Error, Clone, Copy, PartialEq, Eq)] +pub enum UnaryOpTypeError { + /// `!` requires a boolean. + #[error("operator `!` requires boolean")] + NotRequiresBoolean, + /// `-` requires a number. + #[error("operator `-` requires number")] + MinusRequiresNumber, + /// `~` requires a number. + #[error("operator `~` requires number")] + BitNotRequiresNumber, +} + pub fn binary_op_result_ty( op: BinaryOperatorKind, lhs: Ty, rhs: Ty, store: &mut S, -) -> Result { +) -> Result { // Any, Never short-circuit if lhs.is_any() || rhs.is_any() { return Ok(Ty::ANY); @@ -86,19 +118,17 @@ pub fn binary_op_result_ty( if is_number_ty(&lhs_data) && is_number_ty(&rhs_data) { Ok(Ty::NUMBER) } else { - Err(format!( - "operator requires (number, number), got ({}, {})", - store.display(lhs), - store.display(rhs) - )) + Err(BinaryOpTypeError::RequiresNumberPair) } } // Plus: overloaded for number, string, char, array, tuple, object BinaryOperatorKind::Plus => match (&lhs_data, &rhs_data) { (d1, d2) if is_number_ty(d1) && is_number_ty(d2) => Ok(Ty::NUMBER), - (TyData::String | TyData::Char | TyData::LiteralString(_), -TyData::String | TyData::Char | TyData::LiteralString(_)) => Ok(Ty::STRING), + ( + TyData::String | TyData::Char | TyData::LiteralString(_), + TyData::String | TyData::Char | TyData::LiteralString(_), + ) => Ok(Ty::STRING), (TyData::Array { elem: l, .. }, TyData::Array { elem: r, .. }) => { let elem = store.union(vec![*l, *r]); Ok(store.array(elem)) @@ -145,10 +175,7 @@ TyData::String | TyData::Char | TyData::LiteralString(_)) => Ok(Ty::STRING), has_unknown: true, // AttrsOf adds unknown fields })) } - _ => Err(format!( - "operator `+` requires matching types (number+number, string+string, array+array, or object+object), got ({}, {})", - store.display(lhs), store.display(rhs) - )), + _ => Err(BinaryOpTypeError::InvalidPlusOperands), }, // Logical: (Bool, Bool) -> Bool (though Jsonnet actually allows any types) @@ -172,11 +199,7 @@ TyData::String | TyData::Char | TyData::LiteralString(_)) => Ok(Ty::STRING), if is_number_ty(&lhs_data) && is_number_ty(&rhs_data) { Ok(Ty::NUMBER) } else { - Err(format!( - "bitwise operator requires (number, number), got ({}, {})", - store.display(lhs), - store.display(rhs) - )) + Err(BinaryOpTypeError::RequiresBitwiseNumberPair) } } @@ -190,17 +213,15 @@ TyData::String | TyData::Char | TyData::LiteralString(_)) => Ok(Ty::STRING), // In: (String, Object) -> Bool BinaryOperatorKind::InKw => { - let lhs_is_string = - matches!(lhs_data, TyData::String | TyData::Char | TyData::LiteralString(_)); + let lhs_is_string = matches!( + lhs_data, + TyData::String | TyData::Char | TyData::LiteralString(_) + ); let rhs_is_object = matches!(rhs_data, TyData::Object(_) | TyData::AttrsOf { .. }); if lhs_is_string && rhs_is_object { Ok(Ty::BOOL) } else { - Err(format!( - "operator `in` requires (string, object), got ({}, {})", - store.display(lhs), - store.display(rhs) - )) + Err(BinaryOpTypeError::RequiresStringAndObject) } } @@ -215,7 +236,7 @@ TyData::String | TyData::Char | TyData::LiteralString(_)) => Ok(Ty::STRING), /// Check if a unary operation is valid and return the result type. /// /// Returns `Ok(result_ty)` if the operation is valid for the given operand type, -/// or `Err(error_message)` if the operation is invalid. +/// or `Err(error_kind)` if the operation is invalid. /// /// # Errors /// Returns `Err` when the operand type does not support the requested operator. @@ -223,7 +244,7 @@ pub fn unary_op_result_ty( op: UnaryOperatorKind, operand: Ty, store: &mut S, -) -> Result { +) -> Result { // Any, Never short-circuit if operand.is_any() { return Ok(Ty::ANY); @@ -256,30 +277,21 @@ pub fn unary_op_result_ty( if is_bool_ty(&operand_data) { Ok(Ty::BOOL) } else { - Err(format!( - "operator `!` requires boolean, got {}", - store.display(operand) - )) + Err(UnaryOpTypeError::NotRequiresBoolean) } } UnaryOperatorKind::Minus => { if is_number_ty(&operand_data) { Ok(Ty::NUMBER) } else { - Err(format!( - "operator `-` requires number, got {}", - store.display(operand) - )) + Err(UnaryOpTypeError::MinusRequiresNumber) } } UnaryOperatorKind::BitNot => { if is_number_ty(&operand_data) { Ok(Ty::NUMBER) } else { - Err(format!( - "operator `~` requires number, got {}", - store.display(operand) - )) + Err(UnaryOpTypeError::BitNotRequiresNumber) } } } From 4fc212b62bd5d4cd8d2d1602b97999739c257888 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 14:03:02 +0000 Subject: [PATCH 194/210] perf(lsp-import): use FxHash graph maps and add import graph benches --- Cargo.lock | 1 + crates/jrsonnet-lsp-import/Cargo.toml | 5 + .../benches/import_graph.rs | 159 ++++++++++++++++++ .../src/graph/operations.rs | 25 ++- 4 files changed, 176 insertions(+), 14 deletions(-) create mode 100644 crates/jrsonnet-lsp-import/benches/import_graph.rs diff --git a/Cargo.lock b/Cargo.lock index 68e8d889..fc3c2d95 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1911,6 +1911,7 @@ dependencies = [ name = "jrsonnet-lsp-import" version = "0.5.0-pre97" dependencies = [ + "criterion", "jrsonnet-lsp-document", "jrsonnet-rowan-parser", "rayon", diff --git a/crates/jrsonnet-lsp-import/Cargo.toml b/crates/jrsonnet-lsp-import/Cargo.toml index a9a2810e..4f3e82bd 100644 --- a/crates/jrsonnet-lsp-import/Cargo.toml +++ b/crates/jrsonnet-lsp-import/Cargo.toml @@ -15,7 +15,12 @@ rowan.workspace = true rustc-hash.workspace = true [dev-dependencies] +criterion = { version = "0.5", features = ["html_reports"] } tempfile.workspace = true +[[bench]] +name = "import_graph" +harness = false + [lints] workspace = true diff --git a/crates/jrsonnet-lsp-import/benches/import_graph.rs b/crates/jrsonnet-lsp-import/benches/import_graph.rs new file mode 100644 index 00000000..4167b195 --- /dev/null +++ b/crates/jrsonnet-lsp-import/benches/import_graph.rs @@ -0,0 +1,159 @@ +//! Benchmarks for import-graph operations. + +use std::path::PathBuf; + +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; +use jrsonnet_lsp_document::{CanonicalPath, PathStore}; +use jrsonnet_lsp_import::{ImportEntry, ImportGraph, ImportKind}; + +fn bench_path(index: usize) -> CanonicalPath { + CanonicalPath::new(PathBuf::from(format!("/bench/file-{index}.jsonnet"))) +} + +fn bench_update_file_with_entries(c: &mut Criterion) { + let mut group = c.benchmark_group("import_graph/update_file_with_entries"); + + for dep_count in [1usize, 8, 32, 128] { + let mut graph = ImportGraph::new(PathStore::new()); + let main_path = bench_path(0); + let main_file = graph.intern(&main_path); + + let mut entries_template = Vec::with_capacity(dep_count); + for dep_idx in 0..dep_count { + let dep_path = bench_path(dep_idx + 1); + let dep_file = graph.intern(&dep_path); + entries_template.push(ImportEntry { + kind: ImportKind::Code, + binding_name: None, + import_path: format!("dep-{dep_idx}.jsonnet"), + resolved_file: Some(dep_file), + resolved_path: Some(dep_path), + }); + } + + group.throughput(Throughput::Elements(dep_count as u64)); + group.bench_function(BenchmarkId::new("deps", dep_count), |b| { + b.iter_batched( + || entries_template.clone(), + |entries| { + graph.update_file_with_entries(main_file, black_box(entries)); + }, + criterion::BatchSize::SmallInput, + ); + }); + } + + group.finish(); +} + +fn build_chain_graph( + size: usize, +) -> ( + ImportGraph, + Vec, + Vec, +) { + let mut graph = ImportGraph::new(PathStore::new()); + let mut paths = Vec::with_capacity(size); + let mut files = Vec::with_capacity(size); + + for idx in 0..size { + let path = bench_path(idx); + let file = graph.intern(&path); + paths.push(path); + files.push(file); + } + + for idx in 0..size.saturating_sub(1) { + graph.update_file_with_entries( + files[idx], + vec![ImportEntry { + kind: ImportKind::Code, + binding_name: None, + import_path: format!("file-{}.jsonnet", idx + 1), + resolved_file: Some(files[idx + 1]), + resolved_path: Some(paths[idx + 1].clone()), + }], + ); + } + + (graph, paths, files) +} + +fn bench_transitive_importers(c: &mut Criterion) { + let mut group = c.benchmark_group("import_graph/transitive_importers"); + + for size in [64usize, 512, 2048] { + let (graph, _, files) = build_chain_graph(size); + let leaf = files[size - 1]; + + group.throughput(Throughput::Elements(size as u64)); + group.bench_function(BenchmarkId::new("chain", size), |b| { + b.iter(|| black_box(graph.transitive_importers(leaf))); + }); + } + + group.finish(); +} + +fn build_bulk_fixture( + file_count: usize, + fanout: usize, +) -> Vec<(CanonicalPath, Vec)> { + let paths: Vec<_> = (0..file_count).map(bench_path).collect(); + + let mut fixture = Vec::with_capacity(file_count); + for (idx, path) in paths.iter().enumerate() { + let mut deps = Vec::with_capacity(fanout); + for dep in (idx + 1)..(idx + 1 + fanout) { + if dep >= file_count { + break; + } + deps.push(paths[dep].clone()); + } + fixture.push((path.clone(), deps)); + } + + fixture +} + +fn bench_bulk_update(c: &mut Criterion) { + let mut group = c.benchmark_group("import_graph/bulk_update"); + + for file_count in [128usize, 1024] { + let fanout = 3usize; + let fixture = build_bulk_fixture(file_count, fanout); + + group.throughput(Throughput::Elements(file_count as u64)); + group.bench_function(BenchmarkId::new("files", file_count), |b| { + b.iter(|| { + let mut graph = ImportGraph::new(PathStore::new()); + for (path, deps) in &fixture { + let file = graph.intern(path); + let entries: Vec<_> = deps + .iter() + .map(|dep_path| ImportEntry { + kind: ImportKind::Code, + binding_name: None, + import_path: dep_path.as_path().to_string_lossy().into_owned(), + resolved_file: None, + resolved_path: Some(dep_path.clone()), + }) + .collect(); + graph.update_file_with_entries(file, entries); + } + black_box(graph.file_count()); + }); + }); + } + + group.finish(); +} + +criterion_group!( + benches, + bench_update_file_with_entries, + bench_transitive_importers, + bench_bulk_update +); +criterion_main!(benches); diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs index 59529eb4..b2e2629d 100644 --- a/crates/jrsonnet-lsp-import/src/graph/operations.rs +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -3,13 +3,10 @@ //! Maintains a bidirectional graph of import relationships between files, //! enabling efficient cross-file reference lookups. -use std::{ - collections::{HashMap, HashSet, VecDeque}, - sync::Arc, -}; +use std::{collections::VecDeque, sync::Arc}; use jrsonnet_lsp_document::{CanonicalPath, Document, FileId, PathResolver, PathStore}; -use rustc_hash::FxHashMap; +use rustc_hash::{FxHashMap, FxHashSet}; pub use super::parse::{parse_document_import_occurrences, parse_document_imports}; @@ -58,13 +55,13 @@ pub struct ImportGraph { /// Read-only resolver over interned mapping. pub(super) resolver: PathResolver, /// Map of file → import entries in that file. - pub(super) imports: HashMap>, + pub(super) imports: FxHashMap>, /// Cache of resolved imports by raw import path (`import`/`importstr`/`importbin`). - pub(super) resolved_imports: HashMap>, + pub(super) resolved_imports: FxHashMap>, /// Cache of resolved code imports by raw import path (`import` only). - pub(super) resolved_code_imports: HashMap>, + pub(super) resolved_code_imports: FxHashMap>, /// Reverse index: file → files that import it. - pub(super) imported_by: HashMap>, + pub(super) imported_by: FxHashMap>, } impl ImportGraph { @@ -75,10 +72,10 @@ impl ImportGraph { Self { paths, resolver, - imports: HashMap::new(), - resolved_imports: HashMap::new(), - resolved_code_imports: HashMap::new(), - imported_by: HashMap::new(), + imports: FxHashMap::default(), + resolved_imports: FxHashMap::default(), + resolved_code_imports: FxHashMap::default(), + imported_by: FxHashMap::default(), } } @@ -235,7 +232,7 @@ impl ImportGraph { /// to find all files that depend on the given file, directly or indirectly. #[must_use] pub fn transitive_importers(&self, file: FileId) -> Vec { - let mut result = HashSet::new(); + let mut result = FxHashSet::default(); let mut queue = VecDeque::from([file]); while let Some(current) = queue.pop_front() { From 277d66ddd983c624159fb0d3e622abaa47591303 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 14:21:19 +0000 Subject: [PATCH 195/210] style(lsp-import): simplify bench dependency iteration --- crates/jrsonnet-lsp-import/benches/import_graph.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/crates/jrsonnet-lsp-import/benches/import_graph.rs b/crates/jrsonnet-lsp-import/benches/import_graph.rs index 4167b195..ffe9b6f4 100644 --- a/crates/jrsonnet-lsp-import/benches/import_graph.rs +++ b/crates/jrsonnet-lsp-import/benches/import_graph.rs @@ -105,11 +105,8 @@ fn build_bulk_fixture( let mut fixture = Vec::with_capacity(file_count); for (idx, path) in paths.iter().enumerate() { let mut deps = Vec::with_capacity(fanout); - for dep in (idx + 1)..(idx + 1 + fanout) { - if dep >= file_count { - break; - } - deps.push(paths[dep].clone()); + for dep_path in paths.iter().skip(idx + 1).take(fanout) { + deps.push(dep_path.clone()); } fixture.push((path.clone(), deps)); } From 9f7a8af5e4b70b46777b346e8d88d55d34b30183 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 14:21:38 +0000 Subject: [PATCH 196/210] fix(rtk): preserve formatting behavior with state-scoped evaluator flags --- cmds/rtk/src/eval.rs | 27 +++---- crates/jrsonnet-evaluator/src/lib.rs | 94 +++++++++++++++++++++-- crates/jrsonnet-evaluator/src/manifest.rs | 24 ++---- crates/jrsonnet-evaluator/src/obj.rs | 17 +--- crates/jrsonnet-evaluator/src/tla.rs | 36 +++++---- 5 files changed, 124 insertions(+), 74 deletions(-) diff --git a/cmds/rtk/src/eval.rs b/cmds/rtk/src/eval.rs index 4628c7d6..da9c302d 100644 --- a/cmds/rtk/src/eval.rs +++ b/cmds/rtk/src/eval.rs @@ -7,9 +7,8 @@ use std::{collections::HashMap, fs, path::Path}; use anyhow::{Context, Result}; use jrsonnet_evaluator::{ - function::TlaArg, gc::GcHashMap, set_lenient_super, set_skip_assertions, - stack::set_stack_depth_limit, trace::PathResolver, FileImportResolver, IStr, ImportResolver, - State, + function::TlaArg, gc::GcHashMap, stack::set_stack_depth_limit, trace::PathResolver, + FileImportResolver, IStr, ImportResolver, State, }; use jrsonnet_stdlib::ContextInitializer; use tracing::instrument; @@ -120,12 +119,6 @@ pub fn eval_with_resolver( spec: Option, opts: EvalOpts, ) -> Result { - set_skip_assertions(false); - - // Enable lenient super mode to handle mixins that reference super fields that don't exist yet - // This works around go-jsonnet compatibility issues in libraries like k8s-libsonnet - set_lenient_super(true); - // Set up the evaluator state let state = setup_state(import_resolver, config_base, &spec, &opts)?; @@ -200,7 +193,7 @@ fn setup_state( } } - apply_rtk_config(&context_init, &config); + let use_go_style_floats = apply_rtk_config(&context_init, &config); // Add external variables from spec (environment config) if let Some(env) = spec { @@ -232,7 +225,10 @@ fn setup_state( let mut builder = State::builder(); builder .import_resolver(import_resolver) - .context_initializer(context_init); + .context_initializer(context_init) + .lenient_super(true) + .skip_assertions(false) + .use_go_style_floats(use_go_style_floats); // Set max stack if specified - must be done before building state if let Some(max_stack) = opts.max_stack { @@ -245,8 +241,7 @@ fn setup_state( } /// Apply settings from .rtk-config.yaml to the context initializer -fn apply_rtk_config(context_init: &ContextInitializer, config: &RtkConfig) { - use jrsonnet_evaluator::manifest::set_use_go_style_floats; +fn apply_rtk_config(context_init: &ContextInitializer, config: &RtkConfig) -> bool { use jrsonnet_stdlib::{ ManifestYamlDocFormatting, ManifestYamlStreamEmptyBehavior, ManifestYamlStreamFormatting, QuoteValuesBehavior, @@ -280,7 +275,7 @@ fn apply_rtk_config(context_init: &ContextInitializer, config: &RtkConfig) { Some(JsonnetImplementation::Jrsonnet) => false, Some(JsonnetImplementation::GoJsonnet) | None => true, }; - set_use_go_style_floats(use_go_style); + use_go_style } /// Register Tanka-compatible native functions @@ -381,8 +376,8 @@ main{}{} let result = apply_tla(state, result, opts)?; // Manifest the result to JSON - let manifest = result - .manifest(jrsonnet_evaluator::manifest::JsonFormat::default()) + let manifest = state + .with_behavior(|| result.manifest(jrsonnet_evaluator::manifest::JsonFormat::default())) .map_err(|e| anyhow::anyhow!("manifest error:\n{}", e))?; Ok(manifest.to_string()) diff --git a/crates/jrsonnet-evaluator/src/lib.rs b/crates/jrsonnet-evaluator/src/lib.rs index c90a46a0..1fd4305e 100644 --- a/crates/jrsonnet-evaluator/src/lib.rs +++ b/crates/jrsonnet-evaluator/src/lib.rs @@ -27,7 +27,7 @@ pub mod val; use std::{ any::Any, - cell::{RefCell, RefMut}, + cell::{Cell, RefCell, RefMut}, fmt::{self, Debug}, path::Path, }; @@ -51,6 +51,55 @@ use stack::check_depth; pub use tla::apply_tla; pub use val::{Thunk, Val}; +#[derive(Clone, Copy, Debug, Trace)] +pub struct EvaluationBehavior { + pub skip_assertions: bool, + pub lenient_super: bool, + pub use_go_style_floats: bool, +} + +impl Default for EvaluationBehavior { + fn default() -> Self { + Self { + skip_assertions: false, + lenient_super: false, + use_go_style_floats: true, + } + } +} + +thread_local! { + static ACTIVE_BEHAVIOR: Cell = const { Cell::new(EvaluationBehavior { + skip_assertions: false, + lenient_super: false, + use_go_style_floats: true, + }) }; +} + +pub(crate) fn active_behavior() -> EvaluationBehavior { + ACTIVE_BEHAVIOR.with(Cell::get) +} + +pub(crate) fn with_active_behavior( + behavior: EvaluationBehavior, + f: impl FnOnce() -> Result, +) -> Result { + struct ResetBehavior(EvaluationBehavior); + impl Drop for ResetBehavior { + fn drop(&mut self) { + ACTIVE_BEHAVIOR.with(|slot| slot.set(self.0)); + } + } + + let prev = ACTIVE_BEHAVIOR.with(|slot| { + let prev = slot.get(); + slot.set(behavior); + prev + }); + let _reset = ResetBehavior(prev); + f() +} + /// Thunk without bound `super`/`this` /// object inheritance may be overriden multiple times, and will be fixed only on field read pub trait Unbound: Trace { @@ -221,6 +270,7 @@ pub struct EvaluationStateInternals { context_initializer: TraceBox, /// Used to resolve file locations/contents import_resolver: TraceBox, + behavior: EvaluationBehavior, } /// Maintains stack trace and import resolution @@ -336,7 +386,9 @@ impl State { file.evaluating = true; // Dropping file cache guard here, as evaluation may use this map too drop(file_cache); - let res = evaluate(self.create_default_context(file_name), &parsed); + let res = with_active_behavior(self.behavior(), || { + evaluate(self.create_default_context(file_name), &parsed) + }); let mut file_cache = self.file_cache(); let mut file = file_cache.raw_entry_mut().from_key(&path); @@ -443,7 +495,9 @@ impl State { path: source.clone(), error: Box::new(e), })?; - evaluate(self.create_default_context(source), &parsed) + with_active_behavior(self.behavior(), || { + evaluate(self.create_default_context(source), &parsed) + }) } /// Parses and evaluates the given snippet with custom context modifier pub fn evaluate_snippet_with( @@ -464,10 +518,12 @@ impl State { path: source.clone(), error: Box::new(e), })?; - evaluate( - self.create_default_context_with(source, context_initializer), - &parsed, - ) + with_active_behavior(self.behavior(), || { + evaluate( + self.create_default_context_with(source, context_initializer), + &parsed, + ) + }) } } @@ -490,6 +546,12 @@ impl State { pub fn context_initializer(&self) -> &dyn ContextInitializer { &*self.0.context_initializer } + pub fn behavior(&self) -> EvaluationBehavior { + self.0.behavior + } + pub fn with_behavior(&self, f: impl FnOnce() -> Result) -> Result { + with_active_behavior(self.behavior(), f) + } } impl State { @@ -508,6 +570,7 @@ impl Default for State { pub struct StateBuilder { import_resolver: Option>, context_initializer: Option>, + behavior: EvaluationBehavior, } impl StateBuilder { pub fn import_resolver(&mut self, import_resolver: impl ImportResolver) -> &mut Self { @@ -521,6 +584,22 @@ impl StateBuilder { let _ = self.context_initializer.insert(tb!(context_initializer)); self } + pub fn behavior(&mut self, behavior: EvaluationBehavior) -> &mut Self { + self.behavior = behavior; + self + } + pub fn skip_assertions(&mut self, skip_assertions: bool) -> &mut Self { + self.behavior.skip_assertions = skip_assertions; + self + } + pub fn lenient_super(&mut self, lenient_super: bool) -> &mut Self { + self.behavior.lenient_super = lenient_super; + self + } + pub fn use_go_style_floats(&mut self, use_go_style_floats: bool) -> &mut Self { + self.behavior.use_go_style_floats = use_go_style_floats; + self + } pub fn build(mut self) -> State { State(Cc::new(EvaluationStateInternals { file_cache: RefCell::new(GcHashMap::new()), @@ -529,6 +608,7 @@ impl StateBuilder { .import_resolver .take() .unwrap_or_else(|| tb!(DummyImportResolver)), + behavior: self.behavior, })) } } diff --git a/crates/jrsonnet-evaluator/src/manifest.rs b/crates/jrsonnet-evaluator/src/manifest.rs index c19fc133..b85ee6bf 100644 --- a/crates/jrsonnet-evaluator/src/manifest.rs +++ b/crates/jrsonnet-evaluator/src/manifest.rs @@ -1,24 +1,10 @@ -use std::{borrow::Cow, cell::Cell, fmt::Write, ptr}; +use std::{borrow::Cow, fmt::Write, ptr}; -use crate::{bail, in_description_frame, Result, ResultExt, Val}; - -// Thread-local flag to control float formatting style in std.toString -// When true, uses Go's %.17g format (e.g., 0.59999999999999998) -// When false (default), uses Rust's shortest representation (e.g., 0.6) -thread_local! { - static USE_GO_STYLE_FLOATS: Cell = const { Cell::new(false) }; -} - -/// Set whether to use Go-style float formatting in std.toString -/// - true: Use Go's %.17g format (matches go-jsonnet) -/// - false (default): Use Rust's Display (shortest representation, matches jrsonnet binary) -pub fn set_use_go_style_floats(use_go_style: bool) { - USE_GO_STYLE_FLOATS.with(|s| s.set(use_go_style)); -} +use crate::{active_behavior, bail, in_description_frame, Result, ResultExt, Val}; /// Check if Go-style float formatting is enabled pub(crate) fn should_use_go_style_floats() -> bool { - USE_GO_STYLE_FLOATS.with(Cell::get) + active_behavior().use_go_style_floats } /// Format a float like Go's %.17g format @@ -278,8 +264,8 @@ fn manifest_json_ex_buf( match mtype { // std.toString uses Go's unparseNumber: %.0f for integers, %.17g for floats // This is critical for config_hash compatibility (std.md5(std.toString(...))) - // The go-style formatting can be disabled via set_use_go_style_floats(false) - // to match upstream jrsonnet binary behavior + // The go-style formatting is configured per evaluator state to match + // go-jsonnet or upstream jrsonnet behavior. ToString => { let floor = v.floor(); let integer_margin = f64::EPSILON * v.abs().max(1.0); diff --git a/crates/jrsonnet-evaluator/src/obj.rs b/crates/jrsonnet-evaluator/src/obj.rs index a58e005b..d0c3a71e 100644 --- a/crates/jrsonnet-evaluator/src/obj.rs +++ b/crates/jrsonnet-evaluator/src/obj.rs @@ -14,8 +14,6 @@ use rustc_hash::FxHashMap; // Thread-local flag to disable assertion checking // This is used to match Go Tanka's behavior of not running assertions during manifest generation thread_local! { - static SKIP_ASSERTIONS: Cell = const { Cell::new(false) }; - static LENIENT_SUPER: Cell = const { Cell::new(false) }; // Counter for how many assertions are currently being evaluated. // When inside assertion evaluation (counter > 0), we skip triggering new assertions // on field accesses to prevent infinite recursion when an assertion accesses a field @@ -23,14 +21,9 @@ thread_local! { static ASSERTION_DEPTH: Cell = const { Cell::new(0) }; } -/// Set whether to skip assertion checks (for manifest generation compatibility with Go Tanka) -pub fn set_skip_assertions(skip: bool) { - SKIP_ASSERTIONS.with(|s| s.set(skip)); -} - /// Check if assertions should be skipped fn should_skip_assertions() -> bool { - SKIP_ASSERTIONS.with(std::cell::Cell::get) + crate::active_behavior().skip_assertions } /// Check if we're currently inside assertion evaluation @@ -54,15 +47,9 @@ impl Drop for AssertionGuard { } } -/// Set whether to use lenient mode for super field access (return empty object instead of error) -/// This works around go-jsonnet compatibility issues where mixins reference super fields that don't exist yet -pub fn set_lenient_super(lenient: bool) { - LENIENT_SUPER.with(|s| s.set(lenient)); -} - /// Check if lenient super mode is enabled pub fn should_use_lenient_super() -> bool { - LENIENT_SUPER.with(std::cell::Cell::get) + crate::active_behavior().lenient_super } use crate::{ diff --git a/crates/jrsonnet-evaluator/src/tla.rs b/crates/jrsonnet-evaluator/src/tla.rs index 8e9b36eb..a7685fa5 100644 --- a/crates/jrsonnet-evaluator/src/tla.rs +++ b/crates/jrsonnet-evaluator/src/tla.rs @@ -7,22 +7,24 @@ use crate::{ }; pub fn apply_tla(s: State, args: &A, val: Val) -> Result { - Ok(if let Val::Func(func) = val { - in_description_frame( - || "during TLA call".to_owned(), - || { - func.evaluate( - s.create_default_context(Source::new_virtual( - "".into(), - IStr::empty(), - )), - CallLocation::native(), - args, - false, - ) - }, - )? - } else { - val + s.with_behavior(|| { + Ok(if let Val::Func(func) = val { + in_description_frame( + || "during TLA call".to_owned(), + || { + func.evaluate( + s.create_default_context(Source::new_virtual( + "".into(), + IStr::empty(), + )), + CallLocation::native(), + args, + false, + ) + }, + )? + } else { + val + }) }) } From b038bba440f83f0f3619f5ae5a578a5facd8341a Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 14:30:51 +0000 Subject: [PATCH 197/210] perf(lsp-inference): share resolved import maps via Arc --- .../src/graph/operations.rs | 26 +++++++++++++----- crates/jrsonnet-lsp-inference/src/provider.rs | 23 ++++++++-------- .../jrsonnet-lsp-inference/src/type_cache.rs | 27 ++++++++++++++++--- 3 files changed, 55 insertions(+), 21 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs index b2e2629d..3e32d5dd 100644 --- a/crates/jrsonnet-lsp-import/src/graph/operations.rs +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -57,9 +57,9 @@ pub struct ImportGraph { /// Map of file → import entries in that file. pub(super) imports: FxHashMap>, /// Cache of resolved imports by raw import path (`import`/`importstr`/`importbin`). - pub(super) resolved_imports: FxHashMap>, + pub(super) resolved_imports: FxHashMap>>, /// Cache of resolved code imports by raw import path (`import` only). - pub(super) resolved_code_imports: FxHashMap>, + pub(super) resolved_code_imports: FxHashMap>>, /// Reverse index: file → files that import it. pub(super) imported_by: FxHashMap>, } @@ -132,13 +132,14 @@ impl ImportGraph { if resolved_imports.is_empty() { self.resolved_imports.remove(&file_id); } else { - self.resolved_imports.insert(file_id, resolved_imports); + self.resolved_imports + .insert(file_id, Arc::new(resolved_imports)); } if resolved_code_imports.is_empty() { self.resolved_code_imports.remove(&file_id); } else { self.resolved_code_imports - .insert(file_id, resolved_code_imports); + .insert(file_id, Arc::new(resolved_code_imports)); } // Store the import entries @@ -255,12 +256,25 @@ impl ImportGraph { /// Get cached resolved imports (`import`, `importstr`, `importbin`) for a file. pub fn resolved_import_map(&self, file: FileId) -> Option<&FxHashMap> { - self.resolved_imports.get(&file) + self.resolved_imports.get(&file).map(Arc::as_ref) + } + + /// Get shared cached resolved imports (`import`, `importstr`, `importbin`) for a file. + pub fn resolved_import_map_arc(&self, file: FileId) -> Option>> { + self.resolved_imports.get(&file).cloned() } /// Get cached resolved code imports (`import`) for a file. pub fn resolved_code_import_map(&self, file: FileId) -> Option<&FxHashMap> { - self.resolved_code_imports.get(&file) + self.resolved_code_imports.get(&file).map(Arc::as_ref) + } + + /// Get shared cached resolved code imports (`import`) for a file. + pub fn resolved_code_import_map_arc( + &self, + file: FileId, + ) -> Option>> { + self.resolved_code_imports.get(&file).cloned() } /// Resolve one raw import path from a file using the cached import map. diff --git a/crates/jrsonnet-lsp-inference/src/provider.rs b/crates/jrsonnet-lsp-inference/src/provider.rs index 127d6956..a8ef2e55 100644 --- a/crates/jrsonnet-lsp-inference/src/provider.rs +++ b/crates/jrsonnet-lsp-inference/src/provider.rs @@ -10,12 +10,12 @@ use jrsonnet_lsp_import::{ImportGraph, ImportKind}; use jrsonnet_lsp_types::GlobalTyStore; use parking_lot::RwLock; use rayon::prelude::*; -use rustc_hash::FxHashMap; use crate::{ analysis::TypeAnalysis, type_cache::{ - analyze_and_cache_file_with_resolved_imports, CachingImportResolver, SharedTypeCache, + analyze_and_cache_file_with_resolved_import_map, CachingImportResolver, ResolvedImportMap, + SharedTypeCache, }, }; @@ -81,13 +81,15 @@ impl TypeProvider { self.ensure_dependencies_analyzed(path, doc_source); let resolved_imports = { let graph = self.import_graph.read(); - graph.file(path).map_or_else(FxHashMap::default, |file| { - resolved_imports_for(&graph, file) - }) + graph + .file(path) + .map_or_else(ResolvedImportMap::default, |file| { + resolved_imports_for(&graph, file) + }) }; // Analyze with import resolution - let import_resolver = Arc::new(CachingImportResolver::new( + let import_resolver = Arc::new(CachingImportResolver::with_resolved_import_map( resolved_imports, Arc::clone(&self.type_cache), )); @@ -130,7 +132,7 @@ impl TypeProvider { let Some(doc) = doc_source.get_document_file(dep_file) else { return; }; - analyze_and_cache_file_with_resolved_imports( + analyze_and_cache_file_with_resolved_import_map( dep_file, &doc, &self.type_cache, @@ -153,11 +155,8 @@ impl TypeProvider { } } -fn resolved_imports_for(graph: &ImportGraph, file: FileId) -> FxHashMap { - graph - .resolved_code_import_map(file) - .cloned() - .unwrap_or_default() +fn resolved_imports_for(graph: &ImportGraph, file: FileId) -> ResolvedImportMap { + graph.resolved_code_import_map_arc(file).unwrap_or_default() } #[cfg(test)] diff --git a/crates/jrsonnet-lsp-inference/src/type_cache.rs b/crates/jrsonnet-lsp-inference/src/type_cache.rs index 98e58500..f8baebc1 100644 --- a/crates/jrsonnet-lsp-inference/src/type_cache.rs +++ b/crates/jrsonnet-lsp-inference/src/type_cache.rs @@ -20,6 +20,9 @@ use rustc_hash::FxHashMap; use crate::analysis::TypeAnalysis; +/// Shared import-path resolution map used during type analysis. +pub type ResolvedImportMap = Arc>; + /// Cache of top-level types for documents. /// /// This stores the inferred type of each file's top-level expression, @@ -199,6 +202,16 @@ pub(crate) fn analyze_and_cache_file_with_resolved_imports( where I: IntoIterator, { + let resolved_imports = Arc::new(resolved_imports.into_iter().collect()); + analyze_and_cache_file_with_resolved_import_map(file, doc, cache, resolved_imports) +} + +pub(crate) fn analyze_and_cache_file_with_resolved_import_map( + file: FileId, + doc: &Document, + cache: &SharedTypeCache, + resolved_imports: ResolvedImportMap, +) -> GlobalTy { let version = doc.version().0; // Check if we already have a cached type for this version @@ -218,7 +231,7 @@ where }; // Create an import resolver for cross-file type resolution - let import_resolver = Arc::new(CachingImportResolver::new( + let import_resolver = Arc::new(CachingImportResolver::with_resolved_import_map( resolved_imports, Arc::clone(cache), )); @@ -242,7 +255,7 @@ where #[derive(Debug)] pub struct CachingImportResolver { /// Import path -> resolved file. - resolved_imports: FxHashMap, + resolved_imports: ResolvedImportMap, /// Type cache for looking up cached file types. cache: SharedTypeCache, } @@ -257,8 +270,16 @@ impl CachingImportResolver { where I: IntoIterator, { + Self::with_resolved_import_map(Arc::new(resolved_imports.into_iter().collect()), cache) + } + + /// Create a resolver from a shared pre-built import map. + pub fn with_resolved_import_map( + resolved_imports: ResolvedImportMap, + cache: SharedTypeCache, + ) -> Self { Self { - resolved_imports: resolved_imports.into_iter().collect(), + resolved_imports, cache, } } From 9a97bf10d24247efb8d1a3f2060c4ac0be948a2c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 14:32:21 +0000 Subject: [PATCH 198/210] perf(lsp-import): streamline import parsing passes --- crates/jrsonnet-lsp-import/src/graph/parse.rs | 116 +++++++++--------- 1 file changed, 59 insertions(+), 57 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/graph/parse.rs b/crates/jrsonnet-lsp-import/src/graph/parse.rs index ddd31eed..b01cb85c 100644 --- a/crates/jrsonnet-lsp-import/src/graph/parse.rs +++ b/crates/jrsonnet-lsp-import/src/graph/parse.rs @@ -3,6 +3,7 @@ use jrsonnet_rowan_parser::{ nodes::{Bind, Destruct, ExprImport, ImportKindKind, StmtLocal}, AstNode, AstToken, SyntaxKind, }; +use rustc_hash::FxHashSet; use super::{ImportEntry, ImportKind, ImportOccurrence}; use crate::parse::extract_import_path; @@ -34,46 +35,48 @@ where F: Fn(&str) -> Option, { let mut occurrences = Vec::new(); - let mut seen_expr_import_ranges = std::collections::HashSet::new(); - let mut seen_string_ranges = std::collections::HashSet::new(); + let mut seen_expr_import_ranges = FxHashSet::default(); + let mut seen_string_ranges = FxHashSet::default(); let ast = doc.ast(); - // First pass: find imports in local statements (these have bindings) + // Single pass over AST nodes: + // - capture local-bound imports with binding names + // - capture bare import expressions + // - dedupe imports that were already captured from local statements for node in ast.syntax().descendants() { - if node.kind() == SyntaxKind::STMT_LOCAL { - if let Some(stmt_local) = StmtLocal::cast(node.clone()) { - for bind in stmt_local.binds() { - if let Some((occurrence, import_range)) = - parse_bind_import_with_range(&bind, resolve_import) + match node.kind() { + SyntaxKind::STMT_LOCAL => { + if let Some(stmt_local) = StmtLocal::cast(node.clone()) { + for bind in stmt_local.binds() { + if let Some((occurrence, import_range)) = + parse_bind_import_with_range(&bind, resolve_import) + { + seen_expr_import_ranges.insert(import_range); + seen_string_ranges.insert(occurrence.import_range); + occurrences.push(occurrence); + } + } + } + } + SyntaxKind::EXPR_IMPORT => { + let range = node.text_range(); + // Skip if we already captured this import via a local statement. + if !seen_expr_import_ranges.insert(range) { + continue; + } + if let Some(import) = ExprImport::cast(node) { + if let Some(occurrence) = parse_import_occurrence(&import, None, resolve_import) { - seen_expr_import_ranges.insert(import_range); seen_string_ranges.insert(occurrence.import_range); occurrences.push(occurrence); } } } + _ => {} } } - // Second pass: find bare import expressions that weren't part of a local statement - for node in ast.syntax().descendants() { - if node.kind() == SyntaxKind::EXPR_IMPORT { - let range = node.text_range(); - // Skip if we already captured this import in a local statement - if seen_expr_import_ranges.contains(&range) { - continue; - } - if let Some(import) = ExprImport::cast(node) { - if let Some(occurrence) = parse_import_occurrence(&import, None, resolve_import) { - seen_expr_import_ranges.insert(range); - seen_string_ranges.insert(occurrence.import_range); - occurrences.push(occurrence); - } - } - } - } - - // Third pass fallback: recover imports from token stream for syntax-broken files. + // Fallback pass: recover imports from token stream for syntax-broken files. occurrences.extend(parse_token_fallback_import_occurrences( doc, resolve_import, @@ -102,7 +105,7 @@ where let bind_name = full.name()?.ident_lit()?.text().to_string(); - // Check if the expression is an import + // Check if the expression is an import. let expr = bd.value()?; for node in expr.syntax().descendants() { if node.kind() == SyntaxKind::EXPR_IMPORT { @@ -148,48 +151,61 @@ where fn parse_token_fallback_import_occurrences( doc: &Document, resolve_import: &F, - seen_string_ranges: &mut std::collections::HashSet, + seen_string_ranges: &mut FxHashSet, ) -> Vec where F: Fn(&str) -> Option, { - let tokens: Vec<_> = doc + let mut occurrences = Vec::new(); + let mut pending_import_kind = None; + + for token in doc .ast() .syntax() .descendants_with_tokens() .filter_map(rowan::NodeOrToken::into_token) - .collect(); + { + let kind = token.kind(); + if is_import_keyword(kind) { + pending_import_kind = Some(kind); + continue; + } - let mut occurrences = Vec::new(); - for (idx, token) in tokens.iter().enumerate() { - if !is_import_keyword(token.kind()) { + if matches!( + kind, + SyntaxKind::WHITESPACE + | SyntaxKind::MULTI_LINE_COMMENT + | SyntaxKind::SINGLE_LINE_HASH_COMMENT + | SyntaxKind::SINGLE_LINE_SLASH_COMMENT + ) { continue; } - let Some(import_text) = next_non_trivia_token(&tokens, idx + 1) else { + let Some(keyword_kind) = pending_import_kind.take() else { continue; }; - if !is_import_string_token(import_text.kind()) { + if !is_import_string_token(kind) { continue; } - let import_range = import_text.text_range(); + let import_range = token.text_range(); if !seen_string_ranges.insert(import_range) { continue; } - let import_path = strip_string_quotes(import_text.text()); + let import_path = strip_string_quotes(token.text()); if import_path.is_empty() { continue; } - let Some(kind) = import_kind_from_keyword_token(token.kind()) else { + + let Some(import_kind) = import_kind_from_keyword_token(keyword_kind) else { continue; }; occurrences.push(ImportOccurrence { entry: ImportEntry { - kind, - binding_name: binding_name_from_import_token(import_text), + kind: import_kind, + binding_name: binding_name_from_import_token(&token), resolved_path: resolve_import(&import_path), import_path, resolved_file: None, @@ -197,22 +213,8 @@ where import_range, }); } - occurrences -} -fn next_non_trivia_token( - tokens: &[jrsonnet_rowan_parser::SyntaxToken], - start_idx: usize, -) -> Option<&jrsonnet_rowan_parser::SyntaxToken> { - tokens.get(start_idx..)?.iter().find(|token| { - !matches!( - token.kind(), - SyntaxKind::WHITESPACE - | SyntaxKind::MULTI_LINE_COMMENT - | SyntaxKind::SINGLE_LINE_HASH_COMMENT - | SyntaxKind::SINGLE_LINE_SLASH_COMMENT - ) - }) + occurrences } const fn is_import_keyword(kind: SyntaxKind) -> bool { From 6dc03f586cfc390e2c580a2ce27dc21d1b8cf96e Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 15:25:05 +0000 Subject: [PATCH 199/210] refactor(lsp): make workspace import graph demand-driven --- crates/jrsonnet-lsp/src/server.rs | 31 +++- .../jrsonnet-lsp/src/server/async_requests.rs | 23 ++- .../server/async_requests/commands/graph.rs | 5 +- .../async_requests/import_graph_precision.rs | 172 ++++++++++++++++++ .../src/server/async_requests/references.rs | 7 +- .../src/server/async_requests/rename.rs | 1 + .../server/async_requests/workspace_symbol.rs | 12 +- .../jrsonnet-lsp/src/server/import_graph.rs | 32 +++- .../jrsonnet-lsp/src/server/notifications.rs | 47 +++-- .../src/server/workspace_index.rs | 156 +++++++++------- 10 files changed, 374 insertions(+), 112 deletions(-) create mode 100644 crates/jrsonnet-lsp/src/server/async_requests/import_graph_precision.rs diff --git a/crates/jrsonnet-lsp/src/server.rs b/crates/jrsonnet-lsp/src/server.rs index ce2575fd..a79ddb0b 100644 --- a/crates/jrsonnet-lsp/src/server.rs +++ b/crates/jrsonnet-lsp/src/server.rs @@ -18,7 +18,7 @@ use std::{ collections::BTreeSet, panic::{catch_unwind, AssertUnwindSafe}, path::PathBuf, - sync::Arc, + sync::{atomic::AtomicBool, Arc}, }; use anyhow::Result; @@ -31,10 +31,11 @@ use jrsonnet_lsp_inference::{ use jrsonnet_lsp_types::GlobalTyStore; use lsp_server::{Connection, Message, Notification, RequestId, Response}; use lsp_types::{notification::PublishDiagnostics, InitializeParams, OneOf}; -use parking_lot::RwLock; +use parking_lot::{Mutex, RwLock}; +use rustc_hash::FxHashSet; use tracing::{debug, error, info, warn}; -use self::async_requests::AsyncRequestContext; +use self::async_requests::{AsyncRequestContext, WorkspaceRequestState}; use crate::{ analysis::{tanka::effective_import_roots, EvalConfig, Evaluator}, async_diagnostics::{AsyncDiagnostics, DiagnosticsConfig}, @@ -61,6 +62,14 @@ pub struct Server { config: SharedConfig, /// Workspace roots derived from initialize params. workspace_roots: Vec, + /// Files discovered under workspace roots. + workspace_known_files: Arc>>, + /// Files whose import entries should be (re)materialized on-demand. + workspace_dirty_files: Arc>>, + /// Whether workspace discovery has been performed at least once. + workspace_discovery_done: Arc, + /// Serializes workspace discovery/materialization for consistent graph reads. + workspace_graph_materialization_lock: Arc>, /// Evaluator for runtime diagnostics (wrapped in Arc for sharing with async diagnostics). evaluator: Option>, /// Async diagnostics runner. @@ -110,6 +119,10 @@ impl Server { let type_cache = new_shared_cache(Arc::clone(&global_types), path_store); let (request_response_sender, request_response_receiver) = crossbeam_channel::unbounded(); let inflight_requests = InflightRequests::new(connection.sender.clone()); + let workspace_known_files = Arc::new(RwLock::new(FxHashSet::default())); + let workspace_dirty_files = Arc::new(RwLock::new(FxHashSet::default())); + let workspace_discovery_done = Arc::new(AtomicBool::new(false)); + let workspace_graph_materialization_lock = Arc::new(Mutex::new(())); let diagnostics = AsyncDiagnostics::new(DiagnosticsConfig { evaluator: None, documents: Arc::clone(&documents), @@ -126,6 +139,10 @@ impl Server { global_types, config: Arc::new(RwLock::new(ServerConfig::default())), workspace_roots: Vec::new(), + workspace_known_files, + workspace_dirty_files, + workspace_discovery_done, + workspace_graph_materialization_lock, evaluator: None, diagnostics, inflight_requests, @@ -172,6 +189,13 @@ impl Server { Arc::clone(&self.global_types), Arc::clone(&self.type_cache), Arc::clone(&self.config), + WorkspaceRequestState { + roots: self.workspace_roots.clone(), + known_files: Arc::clone(&self.workspace_known_files), + dirty_files: Arc::clone(&self.workspace_dirty_files), + discovery_done: Arc::clone(&self.workspace_discovery_done), + graph_materialization_lock: Arc::clone(&self.workspace_graph_materialization_lock), + }, ) } @@ -310,7 +334,6 @@ impl Server { self.register_did_change_watched_files(¶ms, &init_roots)?; let workspace_roots = Self::workspace_root_paths(&init_roots); self.workspace_roots.clone_from(&workspace_roots); - self.schedule_workspace_index_bootstrap(workspace_roots); // Main loop self.main_loop()?; diff --git a/crates/jrsonnet-lsp/src/server/async_requests.rs b/crates/jrsonnet-lsp/src/server/async_requests.rs index 986d17dd..53bfc4a0 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests.rs @@ -11,6 +11,7 @@ mod goto_implementation; mod goto_shared; mod goto_type_definition; mod hover; +mod import_graph_precision; mod import_lookup; mod inlay_hints; mod prepare_rename; @@ -21,16 +22,29 @@ mod semantic_tokens_range; mod signature_help; mod workspace_symbol; -use std::sync::Arc; +use std::{ + path::PathBuf, + sync::{atomic::AtomicBool, Arc}, +}; -use jrsonnet_lsp_document::{CanonicalPath, Document}; +use jrsonnet_lsp_document::{CanonicalPath, Document, FileId}; use jrsonnet_lsp_import::ImportGraph; use jrsonnet_lsp_inference::{SharedDocumentManager, SharedTypeCache, TypeAnalysis, TypeProvider}; use jrsonnet_lsp_types::GlobalTyStore; -use parking_lot::RwLock; +use parking_lot::{Mutex, RwLock}; +use rustc_hash::FxHashSet; use super::SharedConfig; +#[derive(Clone)] +pub(super) struct WorkspaceRequestState { + pub(super) roots: Vec, + pub(super) known_files: Arc>>, + pub(super) dirty_files: Arc>>, + pub(super) discovery_done: Arc, + pub(super) graph_materialization_lock: Arc>, +} + #[derive(Clone)] pub(super) struct AsyncRequestContext { documents: SharedDocumentManager, @@ -38,6 +52,7 @@ pub(super) struct AsyncRequestContext { global_types: Arc, type_cache: SharedTypeCache, config: SharedConfig, + workspace: WorkspaceRequestState, } impl AsyncRequestContext { @@ -47,6 +62,7 @@ impl AsyncRequestContext { global_types: Arc, type_cache: SharedTypeCache, config: SharedConfig, + workspace: WorkspaceRequestState, ) -> Self { Self { documents, @@ -54,6 +70,7 @@ impl AsyncRequestContext { global_types, type_cache, config, + workspace, } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs b/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs index 7f812c4c..30503843 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/commands/graph.rs @@ -9,11 +9,8 @@ impl AsyncRequestContext { ) -> Option { let uri_parsed: lsp_types::Uri = uri.parse().ok()?; let path = CanonicalPath::from_uri(&uri_parsed).ok()?; - + let importers = self.ensure_precise_transitive_importers(&path); let import_graph = self.import_graph.read(); - let importers = import_graph - .file(&path) - .map_or_else(Vec::new, |file| import_graph.transitive_importers(file)); let mut importer_uris: Vec = importers .iter() .filter_map(|file| { diff --git a/crates/jrsonnet-lsp/src/server/async_requests/import_graph_precision.rs b/crates/jrsonnet-lsp/src/server/async_requests/import_graph_precision.rs new file mode 100644 index 00000000..2ec0e0fd --- /dev/null +++ b/crates/jrsonnet-lsp/src/server/async_requests/import_graph_precision.rs @@ -0,0 +1,172 @@ +use std::sync::atomic::Ordering; + +use jrsonnet_lsp_document::{CanonicalPath, FileId}; +use jrsonnet_lsp_import::{ImportParseMode, ImportResolution}; +use rayon::prelude::*; +use tracing::debug; + +use super::AsyncRequestContext; +use crate::{ + analysis::tanka::effective_import_roots, + server::workspace_index::collect_workspace_files_for_roots, +}; + +impl AsyncRequestContext { + pub(super) fn ensure_workspace_discovered(&self) { + if self.workspace.discovery_done.load(Ordering::Acquire) { + return; + } + + let _guard = self.workspace.graph_materialization_lock.lock(); + if self.workspace.discovery_done.load(Ordering::Acquire) { + return; + } + if self.workspace.roots.is_empty() { + self.workspace.discovery_done.store(true, Ordering::Release); + return; + } + + let discovered_paths = collect_workspace_files_for_roots(&self.workspace.roots); + let discovered_files = discovered_paths + .into_iter() + .map(|path| self.documents.intern(&path)) + .collect::>(); + + { + let mut known = self.workspace.known_files.write(); + known.extend(discovered_files.iter().copied()); + } + { + let mut dirty = self.workspace.dirty_files.write(); + dirty.extend( + discovered_files + .iter() + .copied() + .filter(|file| !self.documents.is_open_file(*file)), + ); + } + + self.workspace.discovery_done.store(true, Ordering::Release); + debug!("Discovered {} workspace files", discovered_files.len()); + } + + pub(super) fn ensure_workspace_graph_materialized(&self) { + self.ensure_workspace_discovered(); + self.materialize_workspace_dirty_files(); + } + + pub(super) fn ensure_file_materialized(&self, path: &CanonicalPath) -> Option { + self.ensure_workspace_discovered(); + + let file = self.documents.intern(path); + self.workspace.known_files.write().insert(file); + + let is_tracked = self.import_graph.read().parse_mode(file).is_some(); + if !is_tracked { + if self.documents.is_open_file(file) { + self.reparse_files_precisely(&[file]); + } else { + self.workspace.dirty_files.write().insert(file); + } + } + + self.materialize_workspace_dirty_files(); + self.import_graph.read().file(path) + } + + pub(super) fn ensure_precise_transitive_importers(&self, path: &CanonicalPath) -> Vec { + self.ensure_workspace_graph_materialized(); + let Some(root_file) = self.ensure_file_materialized(path) else { + return Vec::new(); + }; + + let importers = self.import_graph.read().transitive_importers(root_file); + self.ensure_precise_import_graph_files(&importers); + self.import_graph.read().transitive_importers(root_file) + } + + fn materialize_workspace_dirty_files(&self) { + let _guard = self.workspace.graph_materialization_lock.lock(); + let dirty_files = { + let mut dirty = self.workspace.dirty_files.write(); + if dirty.is_empty() { + return; + } + dirty.drain().collect::>() + }; + + self.reparse_files_with_mode(&dirty_files, false); + } + + fn reparse_files_precisely(&self, files: &[FileId]) { + self.reparse_files_with_mode(files, true); + } + + fn reparse_files_with_mode(&self, files: &[FileId], precise_only: bool) { + if files.is_empty() { + return; + } + + let (jpath, resolve_paths_with_tanka) = { + let config = self.config.read(); + (config.jpath.clone(), config.resolve_paths_with_tanka) + }; + + let parsed = files + .par_iter() + .map(|file| { + let Some(path) = self.documents.path(*file) else { + return (*file, None); + }; + let Some(doc) = self.documents.get_document_file(*file) else { + return (*file, None); + }; + + let parse_mode = if precise_only || self.documents.is_open_file(*file) { + ImportParseMode::Precise + } else { + ImportParseMode::Approximate + }; + + let import_roots = effective_import_roots( + path.as_ref().as_path(), + &jpath, + resolve_paths_with_tanka, + ); + let import_resolution = ImportResolution::new(path.as_ref(), &import_roots); + let entries = match parse_mode { + ImportParseMode::Precise => import_resolution.parse_entries(&doc), + ImportParseMode::Approximate => { + import_resolution.parse_entries_approximate(&doc) + } + }; + (*file, Some((entries, parse_mode))) + }) + .collect::>(); + + let mut graph = self.import_graph.write(); + for (file, parsed_entry) in parsed { + if let Some((entries, parse_mode)) = parsed_entry { + graph.update_file_with_entries_mode(file, entries, parse_mode); + } else { + graph.remove_file(file); + } + } + } + + fn ensure_precise_import_graph_files(&self, files: &[FileId]) { + let to_upgrade = { + let graph = self.import_graph.read(); + files + .iter() + .copied() + .filter(|file| !graph.is_precise(*file)) + .collect::>() + }; + if to_upgrade.is_empty() { + return; + } + + self.reparse_files_precisely(&to_upgrade); + } +} diff --git a/crates/jrsonnet-lsp/src/server/async_requests/references.rs b/crates/jrsonnet-lsp/src/server/async_requests/references.rs index 7d9c7423..5f1a3fa2 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/references.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/references.rs @@ -22,12 +22,7 @@ impl AsyncRequestContext { semantic.as_deref(), ); - let importers = { - let import_graph = self.import_graph.read(); - import_graph - .file(&path) - .map_or_else(Vec::new, |file| import_graph.transitive_importers(file)) - }; + let importers = self.ensure_precise_transitive_importers(&path); let importer_docs: Vec<_> = importers .into_iter() diff --git a/crates/jrsonnet-lsp/src/server/async_requests/rename.rs b/crates/jrsonnet-lsp/src/server/async_requests/rename.rs index 735a0c27..5b9257fd 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/rename.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/rename.rs @@ -21,6 +21,7 @@ impl AsyncRequestContext { }; let lsp_pos = position.into(); + self.ensure_precise_transitive_importers(&path); let import_graph = self.import_graph.read(); handlers::rename_cross_file( diff --git a/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs index 4aa398b4..756357d0 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/workspace_symbol.rs @@ -13,10 +13,18 @@ impl AsyncRequestContext { params: &WorkspaceSymbolParams, ) -> Option { let query = ¶ms.query; + self.ensure_workspace_discovered(); let files = { - let import_graph = self.import_graph.read(); - unique_files(import_graph.all_files().chain(self.documents.open_files())) + let known_workspace_files = self.workspace.known_files.read(); + let graph_files = self.import_graph.read().all_files().collect::>(); + unique_files( + known_workspace_files + .iter() + .copied() + .chain(graph_files) + .chain(self.documents.open_files()), + ) }; let all_symbols: Vec = files diff --git a/crates/jrsonnet-lsp/src/server/import_graph.rs b/crates/jrsonnet-lsp/src/server/import_graph.rs index b6d1787e..2e0ce991 100644 --- a/crates/jrsonnet-lsp/src/server/import_graph.rs +++ b/crates/jrsonnet-lsp/src/server/import_graph.rs @@ -6,22 +6,25 @@ impl Server { /// Parses the document's import statements and updates the graph /// so that cross-file references can be found efficiently. pub(super) fn update_import_graph(&self, file: FileId) { - Self::update_import_graph_for_file(&self.documents, &self.import_graph, &self.config, file); + Self::update_import_graph_for_file_mode( + &self.documents, + &self.import_graph, + &self.config, + file, + jrsonnet_lsp_import::ImportParseMode::Precise, + ); } - pub(super) fn tracked_files_for_reindex(&self) -> Vec { - let known_files = { - let import_graph = self.import_graph.read(); - import_graph.all_files().collect::>() - }; - unique_files(known_files.into_iter().chain(self.documents.open_files())) + pub(super) fn unresolved_files_for_reindex(&self) -> Vec { + self.import_graph.read().files_with_unresolved_imports() } - pub(super) fn update_import_graph_for_file( + pub(super) fn update_import_graph_for_file_mode( documents: &SharedDocumentManager, import_graph: &Arc>, config: &SharedConfig, file: FileId, + parse_mode: jrsonnet_lsp_import::ImportParseMode, ) { let entries = { let Some(path) = documents.path(file) else { @@ -43,12 +46,21 @@ impl Server { // Parse imports OUTSIDE the graph lock to minimize lock hold time. let import_resolution = ImportResolution::new(path.as_ref(), &import_roots); - import_resolution.parse_entries(&doc) + match parse_mode { + jrsonnet_lsp_import::ImportParseMode::Precise => { + import_resolution.parse_entries(&doc) + } + jrsonnet_lsp_import::ImportParseMode::Approximate => { + import_resolution.parse_entries_approximate(&doc) + } + } }; // Acquire the write lock and perform the graph update; entry file-id // resolution is handled defensively by the graph update path. - import_graph.write().update_file_with_entries(file, entries); + import_graph + .write() + .update_file_with_entries_mode(file, entries, parse_mode); } /// Schedule diagnostics for currently-open files that import `file`. diff --git a/crates/jrsonnet-lsp/src/server/notifications.rs b/crates/jrsonnet-lsp/src/server/notifications.rs index a9082ade..fae043cc 100644 --- a/crates/jrsonnet-lsp/src/server/notifications.rs +++ b/crates/jrsonnet-lsp/src/server/notifications.rs @@ -93,6 +93,7 @@ impl Server { let text = params.text_document.text; let version = DocVersion::new(params.text_document.version); let file = self.documents.intern(&path); + self.register_known_workspace_file(file); self.documents.open(path.clone(), text, version); self.documents.refresh_semantic_artifacts(&path); @@ -102,6 +103,7 @@ impl Server { // Update import graph self.update_import_graph(file); + self.mark_workspace_file_clean(file); // Publish diagnostics self.schedule_diagnostics_file(file); @@ -119,6 +121,7 @@ impl Server { let version = DocVersion::new(params.text_document.version); let file = self.documents.intern(&path); + self.register_known_workspace_file(file); // Process each change (INCREMENTAL sync may send multiple changes) for change in params.content_changes { @@ -144,6 +147,7 @@ impl Server { // Update import graph (imports may have changed) self.update_import_graph(file); + self.mark_workspace_file_clean(file); // Publish diagnostics self.schedule_diagnostics_file(file); @@ -159,6 +163,7 @@ impl Server { return Ok(()); }; let file = self.documents.intern(&path); + self.register_known_workspace_file(file); self.documents.close(&path); @@ -169,6 +174,7 @@ impl Server { // Keep import graph semantics for closed documents by re-indexing from // cached/disk content instead of dropping the file node. self.update_import_graph(file); + self.mark_workspace_file_clean(file); self.schedule_diagnostics_for_open_importers(file); // Clear diagnostics for closed document @@ -191,6 +197,7 @@ impl Server { return; }; let file = self.documents.intern(&path); + self.register_known_workspace_file(file); if let Some(text) = params.text { let Some(doc) = self.documents.get(&path) else { @@ -207,6 +214,7 @@ impl Server { self.invalidate_type_cache_with_dependents(file); self.update_import_graph(file); + self.mark_workspace_file_clean(file); if self.documents.is_open_file(file) { self.schedule_diagnostics_file(file); } @@ -245,9 +253,10 @@ impl Server { }; if let Some(updated_config) = updated_config { - let runtime_config_changed = old_config.jpath != updated_config.jpath - || old_config.enable_eval_diagnostics != updated_config.enable_eval_diagnostics + let import_resolution_changed = old_config.jpath != updated_config.jpath || old_config.resolve_paths_with_tanka != updated_config.resolve_paths_with_tanka; + let runtime_config_changed = import_resolution_changed + || old_config.enable_eval_diagnostics != updated_config.enable_eval_diagnostics; let diagnostics_config_changed = old_config.enable_lint_diagnostics != updated_config.enable_lint_diagnostics; let inlay_hints_config_changed = old_config.inlay_hints != updated_config.inlay_hints; @@ -257,12 +266,19 @@ impl Server { debug!("Runtime components reconfigured after settings update"); } - if runtime_config_changed { + if import_resolution_changed { // Import resolution and cached file types depend on jpath/tanka settings. self.type_cache.write().clear(); + self.mark_workspace_known_files_dirty(); + let tracked_files = { + let graph = self.import_graph.read(); + graph.all_files().collect::>() + }; + self.mark_workspace_files_dirty(tracked_files); - for file in self.tracked_files_for_reindex() { + for file in self.documents.open_files() { self.update_import_graph(file); + self.mark_workspace_file_clean(file); } } @@ -291,29 +307,36 @@ impl Server { /// while not being open in the editor. pub(super) fn on_did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { let mut changed_files = Vec::new(); - let mut requires_global_reindex = false; for change in params.changes { let Ok(path) = CanonicalPath::from_uri(&change.uri) else { continue; }; let file = self.documents.intern(&path); + self.register_known_workspace_file(file); self.invalidate_type_cache_with_dependents(file); match change.typ { FileChangeType::DELETED => { + let importers = self.import_graph.read().transitive_importers(file); + changed_files.extend(importers.iter().copied()); + self.mark_workspace_files_dirty(importers.iter().copied()); self.documents.remove_closed(&path); self.import_graph.write().remove_file(file); - requires_global_reindex = true; + self.remove_known_workspace_file(file); + self.mark_workspace_file_clean(file); } FileChangeType::CHANGED | FileChangeType::CREATED => { - if !self.documents.is_open(&path) { + if self.documents.is_open(&path) { + self.update_import_graph(file); + self.mark_workspace_file_clean(file); + } else { self.documents.refresh_closed_from_disk(&path); + self.mark_workspace_file_dirty(file); } - self.update_import_graph(file); if change.typ == FileChangeType::CREATED { - requires_global_reindex = true; + self.mark_workspace_files_dirty(self.unresolved_files_for_reindex()); } } _ => {} @@ -322,12 +345,6 @@ impl Server { changed_files.push(file); } - if requires_global_reindex { - for file in self.tracked_files_for_reindex() { - self.update_import_graph(file); - } - } - for file in unique_files(changed_files) { if self.documents.is_open_file(file) { self.schedule_diagnostics_file(file); diff --git a/crates/jrsonnet-lsp/src/server/workspace_index.rs b/crates/jrsonnet-lsp/src/server/workspace_index.rs index 7ff24771..e2d3ff2b 100644 --- a/crates/jrsonnet-lsp/src/server/workspace_index.rs +++ b/crates/jrsonnet-lsp/src/server/workspace_index.rs @@ -1,43 +1,59 @@ -use std::{ - path::{Path, PathBuf}, - sync::Arc, -}; +use std::path::{Path, PathBuf}; -use jrsonnet_lsp_document::CanonicalPath; -use tracing::{debug, info, warn}; +use jrsonnet_lsp_document::{CanonicalPath, FileId}; +use rustc_hash::FxHashSet; +use tracing::{debug, warn}; use super::{InitializeRoots, Server}; -impl Server { - pub(super) fn schedule_workspace_index_bootstrap(&self, roots: Vec) { - if roots.is_empty() { - debug!("No workspace roots provided; skipping startup index bootstrap"); - return; - } +pub(crate) fn collect_workspace_files_for_roots(roots: &[PathBuf]) -> Vec { + let mut files = Vec::new(); + let mut seen_paths: FxHashSet = FxHashSet::default(); - let documents = Arc::clone(&self.documents); - let import_graph = Arc::clone(&self.import_graph); - let config = Arc::clone(&self.config); - rayon::spawn(move || { - let mut files = Vec::new(); - for root in &roots { - files.extend(Self::collect_workspace_files(root)); + for root in roots { + collect_workspace_files(root, |path| { + let key = path.as_path().to_path_buf(); + if seen_paths.insert(key) { + files.push(path); } - files.sort_by(|a, b| a.as_path().cmp(b.as_path())); - files.dedup(); + true + }); + } - let file_count = files.len(); - for path in &files { - let file = documents.intern(path); - Self::update_import_graph_for_file(&documents, &import_graph, &config, file); - } + files.sort_by(|lhs, rhs| lhs.as_path().cmp(rhs.as_path())); + files +} - info!( - "Startup workspace index bootstrap complete: indexed {} files across {} roots", - file_count, - roots.len() - ); - }); +impl Server { + pub(super) fn register_known_workspace_file(&self, file: FileId) { + self.workspace_known_files.write().insert(file); + } + + pub(super) fn remove_known_workspace_file(&self, file: FileId) { + self.workspace_known_files.write().remove(&file); + } + + pub(super) fn mark_workspace_file_dirty(&self, file: FileId) { + self.workspace_dirty_files.write().insert(file); + } + + pub(super) fn mark_workspace_files_dirty(&self, files: impl IntoIterator) { + self.workspace_dirty_files.write().extend(files); + } + + pub(super) fn mark_workspace_file_clean(&self, file: FileId) { + self.workspace_dirty_files.write().remove(&file); + } + + pub(super) fn mark_workspace_known_files_dirty(&self) { + let known = self + .workspace_known_files + .read() + .iter() + .copied() + .collect::>(); + self.mark_workspace_files_dirty(known); + debug!("Marked all known workspace files dirty"); } pub(super) fn workspace_root_paths(init_roots: &InitializeRoots) -> Vec { @@ -80,55 +96,59 @@ impl Server { } None } +} - fn collect_workspace_files(root: &Path) -> Vec { - let mut files = Vec::new(); - let mut to_visit = vec![root.to_path_buf()]; +fn collect_workspace_files(root: &Path, mut on_file: F) +where + F: FnMut(CanonicalPath) -> bool, +{ + let mut to_visit = vec![root.to_path_buf()]; - while let Some(dir) = to_visit.pop() { - let Ok(entries) = std::fs::read_dir(&dir) else { + while let Some(dir) = to_visit.pop() { + let Ok(entries) = std::fs::read_dir(&dir) else { + continue; + }; + + for entry in entries.flatten() { + let path = entry.path(); + let Ok(file_type) = entry.file_type() else { continue; }; - for entry in entries.flatten() { - let path = entry.path(); - let Ok(file_type) = entry.file_type() else { - continue; - }; - - if file_type.is_dir() { - if Self::should_skip_workspace_dir(&path) { - continue; - } - to_visit.push(path); + if file_type.is_dir() { + if should_skip_workspace_dir(&path) { continue; } + to_visit.push(path); + continue; + } - if !file_type.is_file() || !Self::is_indexed_workspace_file(&path) { - continue; - } + if !file_type.is_file() || !is_indexed_workspace_file(&path) { + continue; + } - match CanonicalPath::try_from_path(&path) { - Ok(path) => files.push(path), - Err(err) => warn!("Skipping workspace file {}: {err}", path.to_string_lossy()), + match CanonicalPath::try_from_path(&path) { + Ok(path) => { + if !on_file(path) { + return; + } } + Err(err) => warn!("Skipping workspace file {}: {err}", path.to_string_lossy()), } } - - files } +} - fn should_skip_workspace_dir(path: &Path) -> bool { - let Some(name) = path.file_name().and_then(|name| name.to_str()) else { - return false; - }; - matches!(name, ".git" | ".jj" | ".svn" | "node_modules" | "target") - } +fn should_skip_workspace_dir(path: &Path) -> bool { + let Some(name) = path.file_name().and_then(|name| name.to_str()) else { + return false; + }; + matches!(name, ".git" | ".jj" | ".svn" | "node_modules" | "target") +} - fn is_indexed_workspace_file(path: &Path) -> bool { - let Some(extension) = path.extension().and_then(|extension| extension.to_str()) else { - return false; - }; - matches!(extension, "jsonnet" | "libsonnet" | "json") - } +fn is_indexed_workspace_file(path: &Path) -> bool { + let Some(extension) = path.extension().and_then(|extension| extension.to_str()) else { + return false; + }; + matches!(extension, "jsonnet" | "libsonnet" | "json") } From 67c61564a75eb9b2b8ecb030876a037302379e3f Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 15:26:20 +0000 Subject: [PATCH 200/210] perf(lsp-import): add approximate parse mode tracking --- crates/jrsonnet-lsp-import/src/graph/mod.rs | 5 +- .../src/graph/operations.rs | 60 ++++++++++++- crates/jrsonnet-lsp-import/src/graph/parse.rs | 30 ++++++- crates/jrsonnet-lsp-import/src/graph/tests.rs | 90 +++++++++++++++++++ .../src/graph/traversal.rs | 28 ++++-- crates/jrsonnet-lsp-import/src/lib.rs | 5 +- .../src/resolve/parse_adapter.rs | 13 ++- 7 files changed, 213 insertions(+), 18 deletions(-) diff --git a/crates/jrsonnet-lsp-import/src/graph/mod.rs b/crates/jrsonnet-lsp-import/src/graph/mod.rs index cd22709a..18c8888e 100644 --- a/crates/jrsonnet-lsp-import/src/graph/mod.rs +++ b/crates/jrsonnet-lsp-import/src/graph/mod.rs @@ -3,6 +3,7 @@ mod parse; mod traversal; pub use operations::{ - parse_document_import_occurrences, parse_document_imports, ImportEntry, ImportGraph, - ImportKind, ImportOccurrence, + parse_document_import_occurrences, parse_document_import_occurrences_approximate, + parse_document_imports, parse_document_imports_approximate, ImportEntry, ImportGraph, + ImportKind, ImportOccurrence, ImportParseMode, }; diff --git a/crates/jrsonnet-lsp-import/src/graph/operations.rs b/crates/jrsonnet-lsp-import/src/graph/operations.rs index 3e32d5dd..6dc6388f 100644 --- a/crates/jrsonnet-lsp-import/src/graph/operations.rs +++ b/crates/jrsonnet-lsp-import/src/graph/operations.rs @@ -8,7 +8,10 @@ use std::{collections::VecDeque, sync::Arc}; use jrsonnet_lsp_document::{CanonicalPath, Document, FileId, PathResolver, PathStore}; use rustc_hash::{FxHashMap, FxHashSet}; -pub use super::parse::{parse_document_import_occurrences, parse_document_imports}; +pub use super::parse::{ + parse_document_import_occurrences, parse_document_import_occurrences_approximate, + parse_document_imports, parse_document_imports_approximate, +}; /// Information about an import in a file. #[derive(Debug, Clone, PartialEq, Eq)] @@ -43,6 +46,15 @@ pub struct ImportOccurrence { pub import_range: rowan::TextRange, } +/// How import entries for a file were produced. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ImportParseMode { + /// Entries come from the full AST path. + Precise, + /// Entries come from a cheaper token-only approximation. + Approximate, +} + /// Import graph tracking dependencies between files. /// /// This structure maintains two maps: @@ -62,6 +74,8 @@ pub struct ImportGraph { pub(super) resolved_code_imports: FxHashMap>>, /// Reverse index: file → files that import it. pub(super) imported_by: FxHashMap>, + /// Parse mode used for each file's current import entries. + pub(super) parse_modes: FxHashMap, } impl ImportGraph { @@ -76,6 +90,7 @@ impl ImportGraph { resolved_imports: FxHashMap::default(), resolved_code_imports: FxHashMap::default(), imported_by: FxHashMap::default(), + parse_modes: FxHashMap::default(), } } @@ -106,7 +121,17 @@ impl ImportGraph { /// This is the preferred method when you want to minimize lock hold time. /// Parse the imports first using [`parse_document_imports`], then call this /// method while holding the write lock. - pub fn update_file_with_entries(&mut self, file_id: FileId, mut entries: Vec) { + pub fn update_file_with_entries(&mut self, file_id: FileId, entries: Vec) { + self.update_file_with_entries_mode(file_id, entries, ImportParseMode::Precise); + } + + /// Update a file's imports in the graph with explicit parse mode. + pub fn update_file_with_entries_mode( + &mut self, + file_id: FileId, + mut entries: Vec, + parse_mode: ImportParseMode, + ) { self.resolve_entry_files(&mut entries); debug_assert!( entries @@ -144,6 +169,7 @@ impl ImportGraph { // Store the import entries self.imports.insert(file_id, entries); + self.parse_modes.insert(file_id, parse_mode); } /// Update a file's imports in the graph. @@ -205,6 +231,19 @@ impl ImportGraph { self.imports.remove(&file_id); self.resolved_imports.remove(&file_id); self.resolved_code_imports.remove(&file_id); + self.parse_modes.remove(&file_id); + } + + /// Get the parse mode for a file's stored import entries. + #[must_use] + pub fn parse_mode(&self, file: FileId) -> Option { + self.parse_modes.get(&file).copied() + } + + /// Returns true when a file currently has precise import entries. + #[must_use] + pub fn is_precise(&self, file: FileId) -> bool { + matches!(self.parse_mode(file), Some(ImportParseMode::Precise)) } pub(super) fn direct_importers_by_id(&self, file_id: FileId) -> Vec { @@ -307,6 +346,23 @@ impl ImportGraph { self.imports.keys().copied() } + /// Get files that currently contain unresolved imports. + #[must_use] + pub fn files_with_unresolved_imports(&self) -> Vec { + let mut files = self + .imports + .iter() + .filter_map(|(&file, entries)| { + entries + .iter() + .any(|entry| entry.resolved_file.is_none()) + .then_some(file) + }) + .collect::>(); + files.sort_unstable(); + files + } + fn build_resolved_import_maps( entries: &[ImportEntry], ) -> (FxHashMap, FxHashMap) { diff --git a/crates/jrsonnet-lsp-import/src/graph/parse.rs b/crates/jrsonnet-lsp-import/src/graph/parse.rs index b01cb85c..6c902925 100644 --- a/crates/jrsonnet-lsp-import/src/graph/parse.rs +++ b/crates/jrsonnet-lsp-import/src/graph/parse.rs @@ -23,6 +23,20 @@ where .collect() } +/// Parse import statements using a token-only fast path. +/// +/// This is less precise than [`parse_document_imports`] for some malformed +/// constructs, but significantly cheaper for broad background indexing. +pub fn parse_document_imports_approximate(doc: &Document, resolve_import: &F) -> Vec +where + F: Fn(&str) -> Option, +{ + parse_document_import_occurrences_approximate(doc, resolve_import) + .into_iter() + .map(|occurrence| occurrence.entry) + .collect() +} + /// Parse import occurrences from a document with source ranges. /// /// This is useful for diagnostics where callers need to point at the exact @@ -77,7 +91,7 @@ where } // Fallback pass: recover imports from token stream for syntax-broken files. - occurrences.extend(parse_token_fallback_import_occurrences( + occurrences.extend(parse_token_import_occurrences( doc, resolve_import, &mut seen_string_ranges, @@ -86,6 +100,18 @@ where occurrences } +/// Parse import occurrences with a token-only approximation. +pub fn parse_document_import_occurrences_approximate( + doc: &Document, + resolve_import: &F, +) -> Vec +where + F: Fn(&str) -> Option, +{ + let mut seen_string_ranges = FxHashSet::default(); + parse_token_import_occurrences(doc, resolve_import, &mut seen_string_ranges) +} + /// Parse a bind to extract import information, returning the import's text range. fn parse_bind_import_with_range( bind: &Bind, @@ -148,7 +174,7 @@ where }) } -fn parse_token_fallback_import_occurrences( +fn parse_token_import_occurrences( doc: &Document, resolve_import: &F, seen_string_ranges: &mut FxHashSet, diff --git a/crates/jrsonnet-lsp-import/src/graph/tests.rs b/crates/jrsonnet-lsp-import/src/graph/tests.rs index 35b33d80..5fe84ed8 100644 --- a/crates/jrsonnet-lsp-import/src/graph/tests.rs +++ b/crates/jrsonnet-lsp-import/src/graph/tests.rs @@ -206,6 +206,10 @@ fn test_import_graph_lookups_with_equivalent_paths() { resolved_path: Some(lib_lookup), }] ); + assert_eq!( + graph.parse_mode(graph.intern(&main_lookup)), + Some(ImportParseMode::Precise) + ); } #[test] @@ -321,6 +325,61 @@ local text = importstr "data.txt"; assert!(graph.resolved_code_import_map(main_file).is_none()); } +#[test] +fn test_files_with_unresolved_imports() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let unresolved = test_path("broken.jsonnet"); + + let unresolved_doc = Document::new( + r#"local x = import "missing.jsonnet"; x"#.to_string(), + DocVersion::new(1), + ); + graph.update_file(graph.intern(&unresolved), &unresolved_doc, |import| { + if import == "missing.jsonnet" { + None + } else { + simple_resolver(import) + } + }); + + let resolved_doc = Document::new( + r#"local lib = import "main.jsonnet"; lib"#.to_string(), + DocVersion::new(1), + ); + graph.update_file(graph.intern(&main), &resolved_doc, simple_resolver); + + let unresolved_files = graph_paths(&graph, graph.files_with_unresolved_imports()); + assert_eq!(unresolved_files, vec![unresolved]); +} + +#[test] +fn test_update_file_with_entries_mode_approximate() { + let mut graph = ImportGraph::new(PathStore::new()); + let main = test_path("main.jsonnet"); + let lib = test_path("lib.jsonnet"); + let main_file = graph.intern(&main); + + graph.update_file_with_entries_mode( + main_file, + vec![ImportEntry { + kind: ImportKind::Code, + binding_name: None, + import_path: "lib.jsonnet".to_string(), + resolved_file: Some(graph.intern(&lib)), + resolved_path: Some(lib), + }], + ImportParseMode::Approximate, + ); + + assert_eq!( + graph.parse_mode(main_file), + Some(ImportParseMode::Approximate) + ); + assert!(!graph.is_precise(main_file)); +} + #[test] fn test_topological_order_simple() { let mut graph = ImportGraph::new(PathStore::new()); @@ -623,6 +682,37 @@ fn test_process_importers_with_work_queue() { assert_eq!(order, vec![lib, utils, main]); } +#[test] +fn test_importer_levels_root_first() { + let mut graph = ImportGraph::new(PathStore::new()); + + let main = test_path("main.jsonnet"); + let utils = test_path("utils.jsonnet"); + let lib = test_path("lib.jsonnet"); + + let lib_doc = Document::new("{}".to_string(), DocVersion::new(1)); + graph.update_file(graph.intern(&lib), &lib_doc, simple_resolver); + + let utils_doc = Document::new( + r#"local lib = import "lib.jsonnet"; lib"#.to_string(), + DocVersion::new(1), + ); + graph.update_file(graph.intern(&utils), &utils_doc, simple_resolver); + + let main_doc = Document::new( + r#"local utils = import "utils.jsonnet"; utils"#.to_string(), + DocVersion::new(1), + ); + graph.update_file(graph.intern(&main), &main_doc, simple_resolver); + + let levels = graph.importer_levels(graph.intern(&lib)); + let got = levels + .into_iter() + .map(|level| graph_paths(&graph, level)) + .collect::>(); + assert_eq!(got, vec![vec![lib], vec![utils], vec![main]]); +} + #[test] fn test_process_with_dependencies_unknown_root_is_noop() { use std::sync::{Arc, Mutex}; diff --git a/crates/jrsonnet-lsp-import/src/graph/traversal.rs b/crates/jrsonnet-lsp-import/src/graph/traversal.rs index a21e013d..7d4eb4c2 100644 --- a/crates/jrsonnet-lsp-import/src/graph/traversal.rs +++ b/crates/jrsonnet-lsp-import/src/graph/traversal.rs @@ -210,25 +210,35 @@ impl ImportGraph { /// /// Useful for invalidation cascading: when a file changes, process it /// and all files that depend on it. - pub fn process_importers_with_work_queue(&self, root: FileId, f: F) - where - F: Fn(FileId) + Sync, - { + #[must_use] + pub fn importer_levels(&self, root: FileId) -> Vec> { let mut work = WorkQueue::new(); work.push(root); let mut levels = work.run(|path_id, deps| { - // Get files that import this file + // Get files that import this file. for importer in self.direct_importers_by_id(*path_id) { deps.push(importer); } }); - // Reverse levels: work queue puts leaves (files with no importers) at level 0, - // but we want root first, then progressively outward to importers + // Work queue returns leaves first; importers should be processed root-first. levels.reverse(); + levels + } - // Process levels (root first, then importers) - levels.process_parallel(|path_id| f(*path_id)); + /// Process a file and its transitive importers using a work queue. + /// + /// This processes files in reverse dependency order - the root file first, + /// then files that import it, and so on. Uses per-level parallelism. + /// + /// Useful for invalidation cascading: when a file changes, process it + /// and all files that depend on it. + pub fn process_importers_with_work_queue(&self, root: FileId, f: F) + where + F: Fn(FileId) + Sync, + { + self.importer_levels(root) + .process_parallel(|path_id| f(*path_id)); } } diff --git a/crates/jrsonnet-lsp-import/src/lib.rs b/crates/jrsonnet-lsp-import/src/lib.rs index ddda2f6e..e7159c3d 100644 --- a/crates/jrsonnet-lsp-import/src/lib.rs +++ b/crates/jrsonnet-lsp-import/src/lib.rs @@ -11,8 +11,9 @@ pub mod resolve; pub mod work_queue; pub use graph::{ - parse_document_import_occurrences, parse_document_imports, ImportEntry, ImportGraph, - ImportKind, ImportOccurrence, + parse_document_import_occurrences, parse_document_import_occurrences_approximate, + parse_document_imports, parse_document_imports_approximate, ImportEntry, ImportGraph, + ImportKind, ImportOccurrence, ImportParseMode, }; pub use parse::{ check_import_from_token, check_import_path, extract_import_path, find_import_in_node, diff --git a/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs b/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs index 553e3267..ad57632b 100644 --- a/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs +++ b/crates/jrsonnet-lsp-import/src/resolve/parse_adapter.rs @@ -2,7 +2,8 @@ use jrsonnet_lsp_document::{CanonicalPath, Document}; use super::{path_resolution::resolve_import_path, ImportResolution}; use crate::graph::{ - parse_document_import_occurrences, parse_document_imports, ImportEntry, ImportOccurrence, + parse_document_import_occurrences, parse_document_import_occurrences_approximate, + parse_document_imports, parse_document_imports_approximate, ImportEntry, ImportOccurrence, }; impl ImportResolution<'_> { @@ -16,10 +17,20 @@ impl ImportResolution<'_> { parse_document_imports(doc, &|import_path| self.resolve(import_path)) } + #[must_use] + pub fn parse_entries_approximate(self, doc: &Document) -> Vec { + parse_document_imports_approximate(doc, &|import_path| self.resolve(import_path)) + } + #[must_use] pub fn parse_occurrences(self, doc: &Document) -> Vec { parse_document_import_occurrences(doc, &|import_path| self.resolve(import_path)) } + + #[must_use] + pub fn parse_occurrences_approximate(self, doc: &Document) -> Vec { + parse_document_import_occurrences_approximate(doc, &|import_path| self.resolve(import_path)) + } } #[cfg(test)] From f91248186ad66f06c8e9a480f3145acd20c5b310 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 16:46:52 +0000 Subject: [PATCH 201/210] fix(evaluator): reintroduce lazy-safe import cycle detection --- crates/jrsonnet-evaluator/src/lib.rs | 325 ++++++++++++++++-- .../lazy_import_cycle_env/cycle.libsonnet | 3 + .../default/ConfigMap-lazy-cycle.golden | 7 + .../lazy_import_cycle_env/jsonnetfile.json | 1 + .../lazy_import_cycle_env/lazy.libsonnet | 4 + .../lazy_import_cycle_env/main.jsonnet | 25 ++ 6 files changed, 336 insertions(+), 29 deletions(-) create mode 100644 test_fixtures/golden_envs/lazy_import_cycle_env/cycle.libsonnet create mode 100644 test_fixtures/golden_envs/lazy_import_cycle_env/golden/default/ConfigMap-lazy-cycle.golden create mode 100644 test_fixtures/golden_envs/lazy_import_cycle_env/jsonnetfile.json create mode 100644 test_fixtures/golden_envs/lazy_import_cycle_env/lazy.libsonnet create mode 100644 test_fixtures/golden_envs/lazy_import_cycle_env/main.jsonnet diff --git a/crates/jrsonnet-evaluator/src/lib.rs b/crates/jrsonnet-evaluator/src/lib.rs index 1fd4305e..72262fe2 100644 --- a/crates/jrsonnet-evaluator/src/lib.rs +++ b/crates/jrsonnet-evaluator/src/lib.rs @@ -74,6 +74,7 @@ thread_local! { lenient_super: false, use_go_style_floats: true, }) }; + static ACTIVE_IMPORT_STACK: RefCell> = const { RefCell::new(Vec::new()) }; } pub(crate) fn active_behavior() -> EvaluationBehavior { @@ -100,6 +101,76 @@ pub(crate) fn with_active_behavior( f() } +#[derive(Clone, Copy, Debug, Trace, PartialEq, Eq)] +enum ImportEvaluationState { + Idle, + Evaluating, +} + +fn import_cycle_description(cycle: &[SourcePath]) -> String { + let chain = cycle + .iter() + .map(ToString::to_string) + .collect::>() + .join(" -> "); + format!("import cycle detected: {chain}") +} + +struct ImportEvaluationGuard { + state: State, + path: SourcePath, +} + +impl ImportEvaluationGuard { + fn enter(state: &State, path: &SourcePath, file: &mut FileData) -> Result { + let cycle = ACTIVE_IMPORT_STACK.with(|stack| { + let stack = stack.borrow(); + stack + .iter() + .position(|stack_path| stack_path == path) + .map(|cycle_start| { + let mut cycle = stack[cycle_start..].to_vec(); + cycle.push(path.clone()); + cycle + }) + }); + if let Some(cycle) = cycle { + let mut err = Error::from(InfiniteRecursionDetected); + err.trace_mut().0.push(error::StackTraceElement { + location: None, + desc: import_cycle_description(&cycle), + }); + return Err(err); + } + + if matches!(file.import_state, ImportEvaluationState::Evaluating) { + bail!(InfiniteRecursionDetected); + } + + ACTIVE_IMPORT_STACK.with(|stack| stack.borrow_mut().push(path.clone())); + file.import_state = ImportEvaluationState::Evaluating; + Ok(Self { + state: state.clone(), + path: path.clone(), + }) + } +} + +impl Drop for ImportEvaluationGuard { + fn drop(&mut self) { + ACTIVE_IMPORT_STACK.with(|stack| { + let mut stack = stack.borrow_mut(); + let popped = stack.pop(); + debug_assert_eq!(popped.as_ref(), Some(&self.path)); + }); + + let mut file_cache = self.state.file_cache(); + if let RawEntryMut::Occupied(mut file) = file_cache.raw_entry_mut().from_key(&self.path) { + file.get_mut().import_state = ImportEvaluationState::Idle; + } + } +} + /// Thunk without bound `super`/`this` /// object inheritance may be overriden multiple times, and will be fixed only on field read pub trait Unbound: Trace { @@ -226,7 +297,7 @@ struct FileData { parsed: Option, evaluated: Option, - evaluating: bool, + import_state: ImportEvaluationState, } impl FileData { fn new_string(data: IStr) -> Self { @@ -235,7 +306,7 @@ impl FileData { bytes: None, parsed: None, evaluated: None, - evaluating: false, + import_state: ImportEvaluationState::Idle, } } fn new_bytes(data: IBytes) -> Self { @@ -244,7 +315,7 @@ impl FileData { bytes: Some(data), parsed: None, evaluated: None, - evaluating: false, + import_state: ImportEvaluationState::Idle, } } pub(crate) fn get_string(&mut self) -> Option { @@ -371,35 +442,23 @@ impl State { ); } let parsed = file.parsed.as_ref().expect("just set").clone(); - // RELAXED: Allow re-importing files during evaluation to support lazy evaluation patterns. - // In Jsonnet, it's valid to have apparent "circular" imports as long as they're in lazy - // thunks that don't get evaluated. For example: - // { value: if cond then (import 'self.libsonnet').other else 42 } - // The original check was too strict and prevented legitimate patterns that Go Tanka handles. - // Real infinite recursion is still caught by: - // 1. Thunk Pending state (val.rs:105) - // 2. Stack depth limits (stack.rs) - // 3. Pending value access (dynamic.rs:44) - // if file.evaluating { - // bail!(InfiniteRecursionDetected) - // } - file.evaluating = true; - // Dropping file cache guard here, as evaluation may use this map too - drop(file_cache); - let res = with_active_behavior(self.behavior(), || { - evaluate(self.create_default_context(file_name), &parsed) - }); - - let mut file_cache = self.file_cache(); - let mut file = file_cache.raw_entry_mut().from_key(&path); - - let RawEntryMut::Occupied(file) = &mut file else { - unreachable!("this file was just here!") + let res = { + let _import_guard = ImportEvaluationGuard::enter(self, &path, file)?; + // Drop file cache guard here, as evaluation may use this map too. + drop(file_cache); + with_active_behavior(self.behavior(), || { + evaluate(self.create_default_context(file_name), &parsed) + }) }; - let file = file.get_mut(); - file.evaluating = false; + match res { Ok(v) => { + let mut file_cache = self.file_cache(); + let mut file = file_cache.raw_entry_mut().from_key(&path); + let RawEntryMut::Occupied(file) = &mut file else { + unreachable!("this file was just here!") + }; + let file = file.get_mut(); file.evaluated = Some(v.clone()); Ok(v) } @@ -612,3 +671,211 @@ impl StateBuilder { })) } } + +#[cfg(test)] +mod tests { + use std::{ + fs, + path::{Path, PathBuf}, + process, + sync::{Arc, Barrier}, + thread, + time::{SystemTime, UNIX_EPOCH}, + }; + + use super::*; + use crate::error::ErrorKind; + + struct TempFixture { + root: PathBuf, + } + + impl TempFixture { + fn new(prefix: &str) -> Self { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("clock should be monotonic") + .as_nanos(); + let root = std::env::temp_dir().join(format!( + "jrsonnet-evaluator-{prefix}-{}-{nanos}", + process::id() + )); + fs::create_dir_all(&root).expect("fixture dir should be created"); + Self { root } + } + + fn write(&self, relative: &str, contents: &str) { + let path = self.root.join(relative); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).expect("fixture parent dir should be created"); + } + fs::write(path, contents).expect("fixture file should be written"); + } + + fn path(&self, relative: &str) -> PathBuf { + self.root.join(relative) + } + } + + impl Drop for TempFixture { + fn drop(&mut self) { + let _ = fs::remove_dir_all(&self.root); + } + } + + fn state_with_file_imports(root: &Path) -> State { + let mut builder = State::builder(); + builder.import_resolver(FileImportResolver::new(vec![root.to_path_buf()])); + builder.build() + } + + fn expect_infinite_recursion(err: &Error) { + assert!( + matches!(err.error(), ErrorKind::InfiniteRecursionDetected), + "expected InfiniteRecursionDetected, got: {err}" + ); + } + + fn expect_cycle_trace_path_order(err: &Error, path_order: &[&str]) { + let Some(cycle_trace) = err + .trace() + .0 + .iter() + .find(|frame| frame.desc.starts_with("import cycle detected:")) + else { + panic!("expected import cycle trace frame, got: {err}"); + }; + + let mut cursor = 0usize; + for path_fragment in path_order { + let haystack = &cycle_trace.desc[cursor..]; + let Some(rel_pos) = haystack.find(path_fragment) else { + panic!( + "expected path fragment {path_fragment:?} in cycle trace {:?}", + cycle_trace.desc + ); + }; + cursor += rel_pos + path_fragment.len(); + } + } + + #[test] + fn import_cycle_reports_infinite_recursion() { + let fixture = TempFixture::new("cycle-top-level"); + fixture.write("a.jsonnet", "(import 'b.jsonnet') + 1"); + fixture.write("b.jsonnet", "(import 'a.jsonnet') + 1"); + + let state = state_with_file_imports(&fixture.root); + let err = state + .import(fixture.path("a.jsonnet")) + .expect_err("top-level import cycle must fail"); + expect_infinite_recursion(&err); + expect_cycle_trace_path_order(&err, &["a.jsonnet", "b.jsonnet", "a.jsonnet"]); + } + + #[test] + fn lazy_import_cycle_is_allowed_until_forced() { + let fixture = TempFixture::new("cycle-lazy"); + fixture.write( + "a.jsonnet", + r#" +{ + ok: 1, + deferred: (import 'b.jsonnet').deferred, +} +"#, + ); + fixture.write( + "b.jsonnet", + r#" +{ + deferred: (import 'a.jsonnet').deferred, +} +"#, + ); + + let state = state_with_file_imports(&fixture.root); + let root = state + .import(fixture.path("a.jsonnet")) + .expect("lazy cycle should not fail before forcing"); + let Val::Obj(root_obj) = root else { + panic!("expected object root"); + }; + let ok = root_obj + .get("ok".into()) + .expect("reading non-cyclic field should succeed") + .expect("ok field should exist"); + assert_eq!(ok.to_string().expect("number to string").as_str(), "1"); + + let err = root_obj + .get("deferred".into()) + .expect_err("forcing cyclic field should fail"); + expect_infinite_recursion(&err); + } + + #[test] + fn behavior_flags_do_not_leak_across_threads() { + let start = Arc::new(Barrier::new(3)); + + let go_thread = { + let start = Arc::clone(&start); + thread::spawn(move || { + let mut builder = State::builder(); + builder.skip_assertions(false).use_go_style_floats(true); + let state = builder.build(); + start.wait(); + + for _ in 0..64 { + let rendered = state + .evaluate_snippet("", r#""%s" % 0.8"#) + .expect("go-style formatting should evaluate") + .to_string() + .expect("formatted value should render"); + assert_eq!(rendered.as_str(), "0.80000000000000004"); + + let err = state + .evaluate_snippet( + "", + r#"{ assert false : "boom", v: 1 }.v"#, + ) + .expect_err("assert-enabled state should fail"); + assert!(matches!(err.error(), ErrorKind::AssertionFailed(_))); + } + }) + }; + + let jr_thread = { + let start = Arc::clone(&start); + thread::spawn(move || { + let mut builder = State::builder(); + builder.skip_assertions(true).use_go_style_floats(false); + let state = builder.build(); + start.wait(); + + for _ in 0..64 { + let rendered = state + .evaluate_snippet("", r#""%s" % 0.8"#) + .expect("jr-style formatting should evaluate") + .to_string() + .expect("formatted value should render"); + assert_eq!(rendered.as_str(), "0.8"); + + let value = state + .evaluate_snippet( + "", + r#"{ assert false : "boom", v: 1 }.v"#, + ) + .expect("assert-skipped state should pass"); + assert_eq!( + value.to_string().expect("value should render").as_str(), + "1" + ); + } + }) + }; + + start.wait(); + go_thread.join().expect("go-style thread should finish"); + jr_thread.join().expect("jr-style thread should finish"); + } +} diff --git a/test_fixtures/golden_envs/lazy_import_cycle_env/cycle.libsonnet b/test_fixtures/golden_envs/lazy_import_cycle_env/cycle.libsonnet new file mode 100644 index 00000000..6f36b8a6 --- /dev/null +++ b/test_fixtures/golden_envs/lazy_import_cycle_env/cycle.libsonnet @@ -0,0 +1,3 @@ +{ + deferred: (import 'lazy.libsonnet').deferred, +} diff --git a/test_fixtures/golden_envs/lazy_import_cycle_env/golden/default/ConfigMap-lazy-cycle.golden b/test_fixtures/golden_envs/lazy_import_cycle_env/golden/default/ConfigMap-lazy-cycle.golden new file mode 100644 index 00000000..568b3e75 --- /dev/null +++ b/test_fixtures/golden_envs/lazy_import_cycle_env/golden/default/ConfigMap-lazy-cycle.golden @@ -0,0 +1,7 @@ +apiVersion: v1 +data: + message: lazy-cycle-ok +kind: ConfigMap +metadata: + name: lazy-cycle + namespace: default diff --git a/test_fixtures/golden_envs/lazy_import_cycle_env/jsonnetfile.json b/test_fixtures/golden_envs/lazy_import_cycle_env/jsonnetfile.json new file mode 100644 index 00000000..4aeabd69 --- /dev/null +++ b/test_fixtures/golden_envs/lazy_import_cycle_env/jsonnetfile.json @@ -0,0 +1 @@ +{"version":1} diff --git a/test_fixtures/golden_envs/lazy_import_cycle_env/lazy.libsonnet b/test_fixtures/golden_envs/lazy_import_cycle_env/lazy.libsonnet new file mode 100644 index 00000000..a0edf9ec --- /dev/null +++ b/test_fixtures/golden_envs/lazy_import_cycle_env/lazy.libsonnet @@ -0,0 +1,4 @@ +{ + message: 'lazy-cycle-ok', + deferred: (import 'cycle.libsonnet').deferred, +} diff --git a/test_fixtures/golden_envs/lazy_import_cycle_env/main.jsonnet b/test_fixtures/golden_envs/lazy_import_cycle_env/main.jsonnet new file mode 100644 index 00000000..8584e65d --- /dev/null +++ b/test_fixtures/golden_envs/lazy_import_cycle_env/main.jsonnet @@ -0,0 +1,25 @@ +local lazy = import 'lazy.libsonnet'; + +{ + apiVersion: 'tanka.dev/v1alpha1', + kind: 'Environment', + metadata: { + name: 'lazy-import-cycle', + }, + spec: { + namespace: 'default', + }, + data: { + 'ConfigMap-lazy-cycle': { + apiVersion: 'v1', + kind: 'ConfigMap', + metadata: { + name: 'lazy-cycle', + namespace: 'default', + }, + data: { + message: lazy.message, + }, + }, + }, +} From 9d66004effc6223e1bb42218e9c62b02bdd8d0a3 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 16:50:10 +0000 Subject: [PATCH 202/210] jrsonnet-lsp: type-driven diagnostic dedupe and eval spans --- crates/jrsonnet-lsp-check/src/lint/mod.rs | 12 ++ crates/jrsonnet-lsp/src/analysis/eval.rs | 203 ++++++++++++------ crates/jrsonnet-lsp/src/analysis/mod.rs | 2 +- .../jrsonnet-lsp/src/handlers/diagnostics.rs | 93 +++++++- .../tests/integration_test/lifecycle.rs | 70 ++++++ .../diagnostics_eval_reports_source_span.yaml | 22 ++ .../diagnostics_type_mismatch_dedup_eval.yaml | 24 +++ 7 files changed, 350 insertions(+), 76 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_eval_reports_source_span.yaml create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_mismatch_dedup_eval.yaml diff --git a/crates/jrsonnet-lsp-check/src/lint/mod.rs b/crates/jrsonnet-lsp-check/src/lint/mod.rs index ac90774c..23494983 100644 --- a/crates/jrsonnet-lsp-check/src/lint/mod.rs +++ b/crates/jrsonnet-lsp-check/src/lint/mod.rs @@ -82,6 +82,18 @@ impl LintConfig { | LintRule::DuplicateParams.bit(), } } + + /// Create a config with all lints enabled except type-error checks. + #[must_use] + pub fn all_except_type_errors() -> Self { + Self { + enabled: LintRule::UnusedVariables.bit() + | LintRule::UnreachableCode.bit() + | LintRule::ShadowedVariables.bit() + | LintRule::DuplicateFields.bit() + | LintRule::DuplicateParams.bit(), + } + } } /// Run lint checks on a document. diff --git a/crates/jrsonnet-lsp/src/analysis/eval.rs b/crates/jrsonnet-lsp/src/analysis/eval.rs index 5597c1a2..677f8ebb 100644 --- a/crates/jrsonnet-lsp/src/analysis/eval.rs +++ b/crates/jrsonnet-lsp/src/analysis/eval.rs @@ -9,12 +9,14 @@ use std::{ }; use jrsonnet_evaluator::{ - error::Error as EvalError, trace::PathResolver, FileImportResolver, State, + error::{Error as EvalError, ErrorKind as EvalErrorKind}, + trace::PathResolver, + FileImportResolver, State, }; use jrsonnet_lsp_document::{CanonicalPath, LineIndex}; use jrsonnet_parser::{SourceFile, SourcePath}; use jrsonnet_stdlib::ContextInitializer; -use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString}; +use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString, Range}; use super::tanka; use crate::config::ResolvePathsWithTankaMode; @@ -80,6 +82,32 @@ pub struct Evaluator { tanka_mode: ResolvePathsWithTankaMode, } +#[derive(Debug, Clone)] +pub struct EvalDiagnostic { + pub error_kind: EvalErrorKind, + pub diagnostic: Diagnostic, +} + +impl EvalDiagnostic { + #[must_use] + pub const fn is_type_like(&self) -> bool { + matches!( + self.error_kind, + EvalErrorKind::UnaryOperatorDoesNotOperateOnType(_, _) + | EvalErrorKind::BinaryOperatorDoesNotOperateOnValues(_, _, _) + | EvalErrorKind::TypeMismatch(_, _, _) + | EvalErrorKind::NoSuchField(_, _) + | EvalErrorKind::OnlyFunctionsCanBeCalledGot(_) + | EvalErrorKind::FieldMustBeStringGot(_) + | EvalErrorKind::AttemptedIndexAnArrayWithString(_) + | EvalErrorKind::ValueIndexMustBeTypeGot(_, _, _) + | EvalErrorKind::CantIndexInto(_) + | EvalErrorKind::ValueIsNotIndexable(_) + | EvalErrorKind::TypeError(_) + ) + } +} + impl Evaluator { /// Create a new evaluator with the given configuration. #[must_use] @@ -107,7 +135,7 @@ impl Evaluator { path: &CanonicalPath, text: &str, line_index: &LineIndex, - ) -> Option { + ) -> Option { let jpath = self.get_jpath_for_file(path.as_path()); let state = create_state_with_jpath(&jpath); @@ -122,46 +150,13 @@ impl Evaluator { } /// Convert an evaluation error to an LSP diagnostic. -fn eval_error_to_diagnostic( +fn eval_error_to_lsp_diagnostic( err: &EvalError, file_path: &CanonicalPath, text: &str, line_index: &LineIndex, ) -> Diagnostic { - // Try to find the location in the error trace that matches our file - let mut range = lsp_types::Range::default(); - let mut found_location = false; - - let trace = err.trace(); - for element in &trace.0 { - if let Some(span) = &element.location { - // Check if this span is from our file - let span_path = span.0.source_path(); - if let Some(span_file) = span_path.downcast_ref::() { - if span_file.path() == file_path.as_path() { - // Convert byte offsets to LSP positions - let start_offset = span.1; - let end_offset = span.2; - - if let Some(start) = line_index.position(start_offset.into(), text) { - range.start = start.into(); - if let Some(end) = line_index.position(end_offset.into(), text) { - range.end = end.into(); - } else { - range.end = range.start; - } - found_location = true; - break; - } - } - } - } - } - - // If we didn't find a location in the trace, use the beginning of the file - if !found_location { - range = lsp_types::Range::default(); - } + let range = find_trace_range_in_file(err, file_path, text, line_index).unwrap_or_default(); // Format the error message let error_kind = err.error(); @@ -180,10 +175,58 @@ fn eval_error_to_diagnostic( } } +/// Convert an evaluation error to a typed diagnostic. +fn eval_error_to_diagnostic( + err: &EvalError, + file_path: &CanonicalPath, + text: &str, + line_index: &LineIndex, +) -> EvalDiagnostic { + let diagnostic = eval_error_to_lsp_diagnostic(err, file_path, text, line_index); + EvalDiagnostic { + error_kind: err.error().clone(), + diagnostic, + } +} + +fn find_trace_range_in_file( + err: &EvalError, + file_path: &CanonicalPath, + text: &str, + line_index: &LineIndex, +) -> Option { + let file_display = file_path.as_path().display().to_string(); + for element in &err.trace().0 { + let Some(span) = &element.location else { + continue; + }; + + let span_path = span.0.source_path(); + let in_current_file = span_path + .path() + .is_some_and(|path| path == file_path.as_path()) + || span_path.to_string() == file_display; + if !in_current_file { + continue; + } + + if let Some(start) = line_index.position(span.1.into(), text) { + let end = line_index.position(span.2.into(), text).unwrap_or(start); + return Some(Range { + start: start.into(), + end: end.into(), + }); + } + } + + None +} + #[cfg(test)] mod tests { use std::fs; + use assert_matches::assert_matches; use jrsonnet_lsp_document::{DocVersion, Document}; use tempfile::TempDir; @@ -194,36 +237,27 @@ mod tests { } /// Assert that a diagnostic has the expected eval-error structure. - /// - /// Checks that: - /// - severity is ERROR - /// - code is "eval-error" - /// - source is "jrsonnet-eval" - /// - message contains the expected substring(s) - fn assert_eval_diagnostic(diag: &Diagnostic, message_contains: &[&str]) { + fn assert_eval_diagnostic(diag: &EvalDiagnostic) { + let lsp_diag = &diag.diagnostic; assert_eq!( - diag.severity, + lsp_diag.severity, Some(DiagnosticSeverity::ERROR), "expected ERROR severity" ); assert_eq!( - diag.code, + lsp_diag.code, Some(NumberOrString::String("eval-error".to_string())), "expected eval-error code" ); assert_eq!( - diag.source, + lsp_diag.source, Some("jrsonnet-eval".to_string()), "expected jrsonnet-eval source" ); - for expected in message_contains { - assert!( - diag.message.contains(expected), - "expected message to contain '{}', got: {}", - expected, - diag.message - ); - } + assert!( + !lsp_diag.message.is_empty(), + "eval diagnostic message should not be empty" + ); } #[test] @@ -233,7 +267,7 @@ mod tests { let doc = Document::new(r#"{ hello: "world" }"#.to_string(), DocVersion::new(1)); let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); - assert_eq!(result, None); + assert_matches!(result, None); } #[test] @@ -244,7 +278,12 @@ mod tests { let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); let diag = result.expect("expected evaluation to fail with undefined variable"); - assert_eval_diagnostic(&diag, &["not defined"]); + assert_matches!( + diag.error_kind, + EvalErrorKind::VariableIsNotDefined(ref name, ref suggestions) + if name == "undefined_var" && suggestions.is_empty() + ); + assert_eval_diagnostic(&diag); } #[test] @@ -256,16 +295,30 @@ mod tests { let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); let diag = result.expect("expected evaluation to fail with type error"); - // jrsonnet may report this as "index" or "type" error - assert_eq!(diag.severity, Some(DiagnosticSeverity::ERROR)); - assert_eq!( - diag.code, - Some(NumberOrString::String("eval-error".to_string())) + assert!(diag.is_type_like(), "expected type-like eval error variant"); + assert_matches!( + diag.error_kind, + EvalErrorKind::TypeError(_) + | EvalErrorKind::TypeMismatch(_, _, _) + | EvalErrorKind::ValueIndexMustBeTypeGot(_, _, _) + | EvalErrorKind::CantIndexInto(_) ); - assert!( - diag.message.contains("index") || diag.message.contains("type"), - "expected message about index/type error, got: {}", - diag.message + assert_eval_diagnostic(&diag); + } + + #[test] + fn test_eval_uses_non_default_range_for_virtual_main_file() { + let config = EvalConfig::default(); + let evaluator = Evaluator::new(&config); + let doc = Document::new("std.length(1)".to_string(), DocVersion::new(1)); + + let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); + let diag = result.expect("expected evaluation to fail with type error"); + assert!(diag.is_type_like(), "expected type-like eval error variant"); + assert_ne!( + diag.diagnostic.range, + lsp_types::Range::default(), + "eval diagnostics should point at a concrete span instead of 0:0" ); } @@ -277,7 +330,8 @@ mod tests { let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); let diag = result.expect("expected evaluation to fail with assert"); - assert_eval_diagnostic(&diag, &["assert"]); + assert_matches!(diag.error_kind, EvalErrorKind::AssertionFailed(_)); + assert_eval_diagnostic(&diag); } #[test] @@ -288,7 +342,12 @@ mod tests { let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); let diag = result.expect("expected evaluation to fail with field access error"); - assert_eval_diagnostic(&diag, &["field"]); + assert_matches!( + diag.error_kind, + EvalErrorKind::NoSuchField(ref field, ref suggestions) + if field == "nonexistent" && suggestions.is_empty() + ); + assert_eval_diagnostic(&diag); } #[test] @@ -302,7 +361,11 @@ mod tests { let result = evaluator.evaluate(&test_path(), doc.text(), doc.line_index()); let diag = result.expect("expected evaluation to fail with runtime error"); - assert_eval_diagnostic(&diag, &["custom error message"]); + assert_matches!( + diag.error_kind, + EvalErrorKind::RuntimeError(ref message) if message == "custom error message" + ); + assert_eval_diagnostic(&diag); } #[test] diff --git a/crates/jrsonnet-lsp/src/analysis/mod.rs b/crates/jrsonnet-lsp/src/analysis/mod.rs index 9cc9fb5e..062aa4a3 100644 --- a/crates/jrsonnet-lsp/src/analysis/mod.rs +++ b/crates/jrsonnet-lsp/src/analysis/mod.rs @@ -6,4 +6,4 @@ pub mod eval; pub mod tanka; -pub use eval::{EvalConfig, Evaluator}; +pub use eval::{EvalConfig, EvalDiagnostic, Evaluator}; diff --git a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs index 0de5a5e3..cbd91c65 100644 --- a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs +++ b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs @@ -1,12 +1,12 @@ //! Diagnostics handler for publishing parse errors, lint warnings, and evaluation errors. -use jrsonnet_lsp_check::lint; +use jrsonnet_lsp_check::{lint, type_check}; use jrsonnet_lsp_document::{CanonicalPath, Document, LineIndex, SyntaxError}; use jrsonnet_lsp_import::ImportOccurrence; use jrsonnet_lsp_inference::TypeAnalysis; -use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString, Range}; +use lsp_types::{Diagnostic, DiagnosticSeverity, NumberOrString, Position, Range}; -use crate::analysis::Evaluator; +use crate::analysis::{EvalDiagnostic, Evaluator}; /// Convert a syntax error to an LSP diagnostic. fn syntax_error_to_diagnostic( @@ -68,6 +68,43 @@ fn unresolved_import_to_diagnostic( }) } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +struct OrderedPosition(Position); + +impl Ord for OrderedPosition { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + (self.0.line, self.0.character).cmp(&(other.0.line, other.0.character)) + } +} + +impl PartialOrd for OrderedPosition { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +fn ranges_overlap(a: &Range, b: &Range) -> bool { + OrderedPosition(a.start) <= OrderedPosition(b.end) + && OrderedPosition(b.start) <= OrderedPosition(a.end) +} + +fn should_suppress_eval_diagnostic(eval_diag: &EvalDiagnostic, lint_type_ranges: &[Range]) -> bool { + if !eval_diag.is_type_like() { + return false; + } + if lint_type_ranges.is_empty() { + return false; + } + let eval_range = &eval_diag.diagnostic.range; + if *eval_range == Range::default() { + return true; + } + + lint_type_ranges + .iter() + .any(|range| ranges_overlap(range, eval_range)) +} + /// Compute diagnostics for a document. /// /// # Arguments @@ -104,15 +141,37 @@ pub fn compute_diagnostics( // Add lint diagnostics if enabled and the document parsed successfully if enable_lint && errors.is_empty() { - let lint_config = lint::LintConfig::all(); + let lint_config = lint::LintConfig::all_except_type_errors(); let lint_diagnostics = lint::lint(document, analysis, &lint_config, uri); diagnostics.extend(lint_diagnostics); + + let type_check_config = type_check::TypeCheckConfig::all(); + let type_errors = type_check::check_types(document, analysis, &type_check_config); + let type_error_diagnostics: Vec = type_errors + .into_iter() + .map(|error| error.to_diagnostic(line_index, text, analysis)) + .collect(); + let lint_type_ranges: Vec = type_error_diagnostics + .iter() + .map(|diag| diag.range) + .collect(); + diagnostics.extend(type_error_diagnostics); + + // Add evaluation diagnostics if enabled and the document parsed successfully + if let Some(eval) = evaluator.filter(|_| errors.is_empty()) { + if let Some(eval_diag) = eval.evaluate(path, text, line_index) { + if !should_suppress_eval_diagnostic(&eval_diag, &lint_type_ranges) { + diagnostics.push(eval_diag.diagnostic); + } + } + } + return diagnostics; } // Add evaluation diagnostics if enabled and the document parsed successfully if let Some(eval) = evaluator.filter(|_| errors.is_empty()) { if let Some(eval_diag) = eval.evaluate(path, text, line_index) { - diagnostics.push(eval_diag); + diagnostics.push(eval_diag.diagnostic); } } @@ -503,6 +562,30 @@ mod tests { .is_some_and(|c| matches!(c, NumberOrString::String(s) if s == "syntax-error")))); } + #[test] + fn test_eval_type_error_suppressed_when_lint_type_error_exists() { + let doc = Document::new("std.length(1)".to_string(), DocVersion::new(1)); + let eval = test_evaluator(); + let diagnostics = diagnostics_for(&doc, true, Some(&eval)); + + assert_eq!( + diagnostics.len(), + 1, + "expected lint type diagnostic to suppress duplicate eval type diagnostic" + ); + assert_eq!( + diagnostics[0].source.as_deref(), + Some("jrsonnet-lint"), + "expected remaining diagnostic to come from lint" + ); + assert!( + diagnostics + .iter() + .all(|diag| diag.source.as_deref() != Some("jrsonnet-eval")), + "eval diagnostic should be suppressed when equivalent lint type diagnostic exists" + ); + } + #[test] fn test_unresolved_import_reports_diagnostic() { let doc = Document::new( diff --git a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs index 755168c6..f967acc2 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs +++ b/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs @@ -302,6 +302,76 @@ fn test_configuration_change_reconfigures_eval_diagnostics() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_lint_and_eval_do_not_duplicate_type_diagnostics() { + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_options( + 1, + serde_json::json!({ + "enableEvalDiagnostics": true, + "enableLintDiagnostics": true + }), + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + let uri = "file:///test/type-dedupe.jsonnet"; + client_conn + .sender + .send(Message::Notification(did_open_notification( + uri, + "std.length(1)", + ))) + .expect("expected success"); + + let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + assert_eq!( + diagnostics.diagnostics.len(), + 1, + "expected duplicate eval type diagnostic to be suppressed when lint type diagnostic exists" + ); + let diag = &diagnostics.diagnostics[0]; + assert_eq!(diag.source.as_deref(), Some("jrsonnet-lint")); + assert_eq!( + diag.code, + Some(lsp_types::NumberOrString::String("type-error".to_string())) + ); + assert_eq!( + diag.range, + lsp_types::Range { + start: Position { + line: 0, + character: 11, + }, + end: Position { + line: 0, + character: 12, + }, + } + ); + + client_conn + .sender + .send(Message::Request(shutdown_request(2))) + .expect("expected success"); + let _ = recv_response(&client_conn, 2); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_configuration_change_requests_inlay_hint_refresh_when_supported() { let (client_conn, server_conn) = Connection::memory(); diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_eval_reports_source_span.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_eval_reports_source_span.yaml new file mode 100644 index 00000000..a57ad5fb --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_eval_reports_source_span.yaml @@ -0,0 +1,22 @@ +# Verify eval diagnostics map to a concrete in-file span instead of 0:0. +steps: +- step: create + files: + main.jsonnet: ((m1:|))undefined_var + +- step: config + settings: + jsonnet: + enableEvalDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: undefined_var + severity: error + code: eval-error + source: jrsonnet-eval + message: "local is not defined: undefined_var" diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_mismatch_dedup_eval.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_mismatch_dedup_eval.yaml new file mode 100644 index 00000000..f7bf9326 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_mismatch_dedup_eval.yaml @@ -0,0 +1,24 @@ +# Verify that enabling both lint + eval does not emit duplicate diagnostics for +# one underlying type mismatch. +steps: +- step: create + files: + main.jsonnet: std.length(((m1:|))1) + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + enableEvalDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: main.jsonnet + diagnostics: + - at: m1 + text: "1" + severity: warning + code: type-error + source: jrsonnet-lint + message: "`std.length` argument 1 (`x`) expects `string | object | function() | array`, got `number`" From 852c259ba43a37ed1fbe8d2644ecac8cd1804761 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 17:35:32 +0000 Subject: [PATCH 203/210] refactor(jrsonnet-lsp-tests): remove path attrs via module layout cleanup --- .../features.rs | 8 +- .../formatting.rs | 0 .../lifecycle.rs | 71 +- crates/jrsonnet-lsp/tests/integration/mod.rs | 1127 ++++++++++++++++ .../navigation.rs | 20 +- .../workspace_cross_file.rs | 142 +- crates/jrsonnet-lsp/tests/integration_test.rs | 1145 +---------------- crates/jrsonnet-lsp/tests/stress_tests.rs | 291 ++--- .../tests/support/lsp_test_transport.rs | 39 + crates/jrsonnet-lsp/tests/support/mod.rs | 5 + 10 files changed, 1410 insertions(+), 1438 deletions(-) rename crates/jrsonnet-lsp/tests/{integration_test => integration}/features.rs (98%) rename crates/jrsonnet-lsp/tests/{integration_test => integration}/formatting.rs (100%) rename crates/jrsonnet-lsp/tests/{integration_test => integration}/lifecycle.rs (95%) create mode 100644 crates/jrsonnet-lsp/tests/integration/mod.rs rename crates/jrsonnet-lsp/tests/{integration_test => integration}/navigation.rs (97%) rename crates/jrsonnet-lsp/tests/{integration_test => integration}/workspace_cross_file.rs (90%) create mode 100644 crates/jrsonnet-lsp/tests/support/lsp_test_transport.rs create mode 100644 crates/jrsonnet-lsp/tests/support/mod.rs diff --git a/crates/jrsonnet-lsp/tests/integration_test/features.rs b/crates/jrsonnet-lsp/tests/integration/features.rs similarity index 98% rename from crates/jrsonnet-lsp/tests/integration_test/features.rs rename to crates/jrsonnet-lsp/tests/integration/features.rs index 2205fcef..559d6cd1 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/features.rs +++ b/crates/jrsonnet-lsp/tests/integration/features.rs @@ -600,7 +600,7 @@ fn test_text_document_references() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); client_conn .sender @@ -832,7 +832,7 @@ fn test_semantic_tokens_range_request() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); client_conn .sender @@ -894,7 +894,7 @@ fn test_code_lens_resolve_request() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); client_conn .sender @@ -967,7 +967,7 @@ fn test_code_lens_evaluate_command_executes_and_returns_result() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); client_conn .sender diff --git a/crates/jrsonnet-lsp/tests/integration_test/formatting.rs b/crates/jrsonnet-lsp/tests/integration/formatting.rs similarity index 100% rename from crates/jrsonnet-lsp/tests/integration_test/formatting.rs rename to crates/jrsonnet-lsp/tests/integration/formatting.rs diff --git a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs b/crates/jrsonnet-lsp/tests/integration/lifecycle.rs similarity index 95% rename from crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs rename to crates/jrsonnet-lsp/tests/integration/lifecycle.rs index f967acc2..88cc0e74 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/lifecycle.rs +++ b/crates/jrsonnet-lsp/tests/integration/lifecycle.rs @@ -185,7 +185,7 @@ fn test_diagnostics_refresh_on_did_save_with_text() { uri, "{ a: 1 }", ))) .expect("expected success"); - let opened = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let opened = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); assert!(opened.diagnostics.is_empty()); client_conn @@ -195,7 +195,7 @@ fn test_diagnostics_refresh_on_did_save_with_text() { Some("{ a: }"), ))) .expect("expected success"); - let saved_invalid = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let saved_invalid = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); assert!( !saved_invalid.diagnostics.is_empty(), "saving invalid text should publish diagnostics" @@ -208,7 +208,7 @@ fn test_diagnostics_refresh_on_did_save_with_text() { Some("{ a: 2 }"), ))) .expect("expected success"); - let saved_valid = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let saved_valid = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); assert!(saved_valid.diagnostics.is_empty()); client_conn @@ -255,8 +255,7 @@ fn test_configuration_change_reconfigures_eval_diagnostics() { .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let initial_diagnostics = - recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let initial_diagnostics = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); assert!( initial_diagnostics .diagnostics @@ -276,8 +275,7 @@ fn test_configuration_change_reconfigures_eval_diagnostics() { )) .expect("expected success"); - let updated_diagnostics = - recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let updated_diagnostics = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); assert!( updated_diagnostics .diagnostics @@ -332,7 +330,7 @@ fn test_lint_and_eval_do_not_duplicate_type_diagnostics() { ))) .expect("expected success"); - let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); assert_eq!( diagnostics.diagnostics.len(), 1, @@ -402,18 +400,17 @@ fn test_configuration_change_requests_inlay_hint_refresh_when_supported() { )) .expect("expected success"); - let refresh_request = loop { - let message = client_conn - .receiver - .recv_timeout(Duration::from_secs(3)) - .expect("expected inlay hint refresh request"); - match message { + let refresh_request = super::recv_until( + &client_conn, + super::LONG_RESPONSE_TIMEOUT, + |message| match message { Message::Request(request) if request.method == InlayHintRefreshRequest::METHOD => { - break request; + Some(request) } - _ => {} - } - }; + _ => None, + }, + ) + .expect("timed out waiting for inlay hint refresh request"); assert_eq!(refresh_request.params, serde_json::Value::Null); client_conn @@ -493,7 +490,7 @@ fn test_configuration_change_reindexes_closed_import_graph_entries() { &main_uri, &main_text, ))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); client_conn .sender @@ -639,15 +636,15 @@ fn test_initialize_registers_did_change_watched_files_when_supported() { .send(Message::Notification(initialized_notification())) .expect("expected success"); - let register_request = loop { - let message = client_conn - .receiver - .recv_timeout(Duration::from_secs(3)) - .expect("expected registerCapability request"); - if let Message::Request(request) = message { - break request; - } - }; + let register_request = super::recv_until( + &client_conn, + super::LONG_RESPONSE_TIMEOUT, + |message| match message { + Message::Request(request) => Some(request), + _ => None, + }, + ) + .expect("timed out waiting for registerCapability request"); assert_eq!(register_request.method, RegisterCapability::METHOD); let actual_params: RegistrationParams = @@ -723,15 +720,15 @@ fn test_initialize_uses_relative_watch_patterns_when_supported() { .send(Message::Notification(initialized_notification())) .expect("expected success"); - let register_request = loop { - let message = client_conn - .receiver - .recv_timeout(Duration::from_secs(3)) - .expect("expected registerCapability request"); - if let Message::Request(request) = message { - break request; - } - }; + let register_request = super::recv_until( + &client_conn, + super::LONG_RESPONSE_TIMEOUT, + |message| match message { + Message::Request(request) => Some(request), + _ => None, + }, + ) + .expect("timed out waiting for registerCapability request"); assert_eq!(register_request.method, RegisterCapability::METHOD); let actual_params: RegistrationParams = diff --git a/crates/jrsonnet-lsp/tests/integration/mod.rs b/crates/jrsonnet-lsp/tests/integration/mod.rs new file mode 100644 index 00000000..1d08e162 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/integration/mod.rs @@ -0,0 +1,1127 @@ +//! Integration tests for the LSP server. +//! +//! Uses in-process testing with channels rather than subprocess management, +//! following patterns from ast-grep and simple-completion-language-server. + +use std::{fmt::Write as _, fs, thread, time::Duration}; + +pub(crate) use crate::support::{ + recv_response_by_id, recv_until, LONG_RESPONSE_TIMEOUT, RESPONSE_TIMEOUT, +}; +use assert_matches::assert_matches; +use jrsonnet_lsp_handlers::{SemanticTokenModifierName, SemanticTokenTypeName}; +use lsp_server::{Connection, Message, Notification, Request}; +use lsp_types::{ + notification::{ + Cancel, DidChangeConfiguration, DidChangeWatchedFiles, DidCloseTextDocument, + DidOpenTextDocument, DidSaveTextDocument, Notification as _, PublishDiagnostics, + }, + request::{ + CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, + ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, GotoImplementation, + GotoTypeDefinition, Initialize, InlayHintRefreshRequest, InlayHintRequest, RangeFormatting, + References, RegisterCapability, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, + WorkspaceSymbolRequest, + }, + CancelParams, DidChangeConfigurationParams, DidChangeWatchedFilesClientCapabilities, + DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, + DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, + ExecuteCommandParams, FileChangeType, FileEvent, FileSystemWatcher, GlobPattern, + GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, + InlayHintWorkspaceClientCapabilities, NumberOrString, OneOf, PartialResultParams, Position, + ReferenceContext, ReferenceParams, Registration, RegistrationParams, RelativePattern, + RenameParams, SemanticTokensRangeParams, TextDocumentIdentifier, TextDocumentItem, + TextDocumentPositionParams, WorkDoneProgressParams, WorkspaceClientCapabilities, + WorkspaceFolder, +}; +use serde::Deserialize; +use serde_json::json; +use tempfile::TempDir; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +struct ExpectedSemanticToken { + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: u32, +} + +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +enum RequestErrorData { + InvalidParams { method: String }, + MethodNotFound { method: String }, + UnknownExecuteCommand { command: String }, + MissingExecuteHandler { command: String }, + RequestCanceled { method: String }, + ServerShuttingDown, + AsyncHandlerFailed { method: String }, + AsyncHandlerPanicked { method: String }, +} + +impl ExpectedSemanticToken { + const fn new( + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: u32, + ) -> Self { + Self { + line, + start, + len, + token_type, + modifiers, + } + } +} + +fn semantic_modifiers(modifiers: &[SemanticTokenModifierName]) -> u32 { + modifiers + .iter() + .fold(0_u32, |acc, modifier| acc | modifier.as_bitset()) +} + +fn semantic_token( + line: u32, + start: u32, + len: u32, + token_type: SemanticTokenTypeName, + modifiers: &[SemanticTokenModifierName], +) -> ExpectedSemanticToken { + ExpectedSemanticToken::new(line, start, len, token_type, semantic_modifiers(modifiers)) +} + +fn encode_semantic_tokens(mut tokens: Vec) -> lsp_types::SemanticTokens { + tokens.sort_by_key(|token| (token.line, token.start)); + let mut encoded = Vec::with_capacity(tokens.len()); + let mut prev_line = 0_u32; + let mut prev_start = 0_u32; + for token in tokens { + let delta_line = token.line - prev_line; + let delta_start = if delta_line == 0 { + token.start - prev_start + } else { + token.start + }; + encoded.push(lsp_types::SemanticToken { + delta_line, + delta_start, + length: token.len, + token_type: token.token_type.as_index(), + token_modifiers_bitset: token.modifiers, + }); + prev_line = token.line; + prev_start = token.start; + } + + lsp_types::SemanticTokens { + result_id: None, + data: encoded, + } +} + +/// Helper to create an initialize request. +fn initialize_request(id: i32) -> Request { + initialize_request_with_options(id, serde_json::Value::Null) +} + +/// Helper to create an initialize request with custom initialization options. +fn initialize_request_with_options(id: i32, initialization_options: serde_json::Value) -> Request { + let mut params = InitializeParams::default(); + if !initialization_options.is_null() { + params.initialization_options = Some(initialization_options); + } + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create an initialize request with a workspace root URI. +fn initialize_request_with_root_uri(id: i32, root_uri: &str) -> Request { + let mut params = serde_json::to_value(InitializeParams::default()).expect("expected success"); + if let Some(object) = params.as_object_mut() { + object.insert("rootUri".to_string(), serde_json::json!(root_uri)); + } + Request::new(id.into(), Initialize::METHOD.to_string(), params) +} + +/// Helper to create an initialize request that advertises dynamic watched-file +/// registration support. +fn initialize_request_with_dynamic_watched_files(id: i32) -> Request { + let params = InitializeParams { + capabilities: lsp_types::ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { + dynamic_registration: Some(true), + relative_pattern_support: Some(false), + }), + ..WorkspaceClientCapabilities::default() + }), + ..lsp_types::ClientCapabilities::default() + }, + ..InitializeParams::default() + }; + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create an initialize request that advertises dynamic watched-file +/// registration support and relative pattern support. +fn initialize_request_with_dynamic_watched_files_relative(id: i32, root_uri: &str) -> Request { + let params = InitializeParams { + workspace_folders: Some(vec![WorkspaceFolder { + uri: root_uri.parse().expect("expected success"), + name: "workspace".to_owned(), + }]), + capabilities: lsp_types::ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { + dynamic_registration: Some(true), + relative_pattern_support: Some(true), + }), + ..WorkspaceClientCapabilities::default() + }), + ..lsp_types::ClientCapabilities::default() + }, + ..InitializeParams::default() + }; + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create an initialize request that advertises +/// `workspace/inlayHint/refresh` support. +fn initialize_request_with_inlay_hint_refresh_support(id: i32) -> Request { + let params = InitializeParams { + capabilities: lsp_types::ClientCapabilities { + workspace: Some(WorkspaceClientCapabilities { + inlay_hint: Some(InlayHintWorkspaceClientCapabilities { + refresh_support: Some(true), + }), + ..WorkspaceClientCapabilities::default() + }), + ..lsp_types::ClientCapabilities::default() + }, + ..InitializeParams::default() + }; + + Request::new( + id.into(), + Initialize::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a shutdown request. +fn shutdown_request(id: i32) -> Request { + Request::new( + id.into(), + Shutdown::METHOD.to_string(), + serde_json::Value::Null, + ) +} + +/// Helper to create an initialized notification. +fn initialized_notification() -> Notification { + Notification::new("initialized".to_string(), json!({})) +} + +/// Helper to create an exit notification. +fn exit_notification() -> Notification { + Notification::new("exit".to_string(), json!({})) +} + +/// Helper to create a $/cancelRequest notification. +fn cancel_request_notification(request_id: i32) -> Notification { + let params = CancelParams { + id: NumberOrString::Number(request_id), + }; + Notification::new( + Cancel::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a didOpen notification. +fn did_open_notification(uri: &str, text: &str) -> Notification { + let params = DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: uri.parse().expect("expected success"), + language_id: "jsonnet".to_string(), + version: 1, + text: text.to_string(), + }, + }; + Notification::new( + DidOpenTextDocument::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn did_save_notification(uri: &str, text: Option<&str>) -> Notification { + let params = DidSaveTextDocumentParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + text: text.map(ToString::to_string), + }; + Notification::new( + DidSaveTextDocument::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn did_close_notification(uri: &str) -> Notification { + let params = DidCloseTextDocumentParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + }; + Notification::new( + DidCloseTextDocument::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a goto definition request. +fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoDefinition::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn goto_type_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoTypeDefinition::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn assert_type_definition_matches_definition( + conn: &Connection, + definition_id: i32, + type_definition_id: i32, + uri: &str, + line: u32, + character: u32, +) -> Option { + conn.sender + .send(Message::Request(goto_definition_request( + definition_id, + uri, + line, + character, + ))) + .expect("expected success"); + let definition_response = recv_response(conn, definition_id); + assert!( + definition_response.error.is_none(), + "Goto definition request should succeed" + ); + let definition_result: Option = + serde_json::from_value(definition_response.result.expect("should have result")) + .expect("expected success"); + + conn.sender + .send(Message::Request(goto_type_definition_request( + type_definition_id, + uri, + line, + character, + ))) + .expect("expected success"); + let type_definition_response = recv_response(conn, type_definition_id); + assert!( + type_definition_response.error.is_none(), + "Goto type definition request should succeed" + ); + let type_definition_result: Option = + serde_json::from_value(type_definition_response.result.expect("should have result")) + .expect("expected success"); + + assert_eq!( + type_definition_result, definition_result, + "typeDefinition should match definition for Jsonnet symbol navigation" + ); + definition_result +} + +fn send_goto_and_parse( + conn: &Connection, + id: i32, + label: &str, + request: Request, +) -> Option { + conn.sender + .send(Message::Request(request)) + .expect("expected success"); + let response = recv_response(conn, id); + assert!(response.error.is_none(), "{label} request should succeed"); + serde_json::from_value(response.result.expect("should have result")).expect("expected success") +} + +/// Helper to create a goto declaration request. +fn goto_declaration_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoDeclaration::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn goto_implementation_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = GotoDefinitionParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + GotoImplementation::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +/// Helper to create a references request. +fn references_request( + id: i32, + uri: &str, + line: u32, + character: u32, + include_declaration: bool, +) -> Request { + let params = ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + context: ReferenceContext { + include_declaration, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + References::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn document_highlight_request(id: i32, uri: &str, line: u32, character: u32) -> Request { + let params = lsp_types::DocumentHighlightParams { + text_document_position_params: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + DocumentHighlightRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn rename_request(id: i32, uri: &str, line: u32, character: u32, new_name: &str) -> Request { + let params = RenameParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + position: Position { line, character }, + }, + new_name: new_name.to_string(), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + Rename::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn inlay_hint_request( + id: i32, + uri: &str, + start_line: u32, + start_character: u32, + end_line: u32, + end_character: u32, +) -> Request { + let params = lsp_types::InlayHintParams { + work_done_progress_params: WorkDoneProgressParams::default(), + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + range: lsp_types::Range { + start: Position { + line: start_line, + character: start_character, + }, + end: Position { + line: end_line, + character: end_character, + }, + }, + }; + Request::new( + id.into(), + InlayHintRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn formatting_request(id: i32, uri: &str, tab_size: u32, insert_spaces: bool) -> Request { + formatting_request_with_options(id, uri, tab_size, insert_spaces, None, None, None) +} + +fn formatting_options( + tab_size: u32, + insert_spaces: bool, + trim_trailing_whitespace: Option, + insert_final_newline: Option, + trim_final_newlines: Option, +) -> lsp_types::FormattingOptions { + lsp_types::FormattingOptions { + tab_size, + insert_spaces, + properties: std::collections::HashMap::new(), + trim_trailing_whitespace, + insert_final_newline, + trim_final_newlines, + } +} + +fn formatting_request_with_options( + id: i32, + uri: &str, + tab_size: u32, + insert_spaces: bool, + trim_trailing_whitespace: Option, + insert_final_newline: Option, + trim_final_newlines: Option, +) -> Request { + let params = lsp_types::DocumentFormattingParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + options: formatting_options( + tab_size, + insert_spaces, + trim_trailing_whitespace, + insert_final_newline, + trim_final_newlines, + ), + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + Formatting::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn range_formatting_request_with_options( + id: i32, + uri: &str, + range: lsp_types::Range, + options: lsp_types::FormattingOptions, +) -> Request { + let params = lsp_types::DocumentRangeFormattingParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + range, + options, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + RangeFormatting::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn semantic_tokens_range_request( + id: i32, + uri: &str, + start_line: u32, + start_character: u32, + end_line: u32, + end_character: u32, +) -> Request { + let params = SemanticTokensRangeParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + range: lsp_types::Range { + start: Position { + line: start_line, + character: start_character, + }, + end: Position { + line: end_line, + character: end_character, + }, + }, + }; + Request::new( + id.into(), + SemanticTokensRangeRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn code_action_request( + id: i32, + uri: &str, + range: lsp_types::Range, + diagnostics: Vec, + only: Option>, +) -> Request { + let params = lsp_types::CodeActionParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + range, + context: lsp_types::CodeActionContext { + diagnostics, + only, + trigger_kind: None, + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + CodeActionRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn code_lens_request(id: i32, uri: &str) -> Request { + let params = lsp_types::CodeLensParams { + text_document: TextDocumentIdentifier { + uri: uri.parse().expect("expected success"), + }, + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + CodeLensRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn code_lens_resolve_request(id: i32, lens: lsp_types::CodeLens) -> Request { + Request::new( + id.into(), + CodeLensResolve::METHOD.to_string(), + serde_json::to_value(lens).expect("expected success"), + ) +} + +fn did_change_watched_files_notification(changes: Vec) -> Notification { + let params = DidChangeWatchedFilesParams { changes }; + Notification::new( + DidChangeWatchedFiles::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn did_change_configuration_notification(settings: serde_json::Value) -> Notification { + let params = DidChangeConfigurationParams { settings }; + Notification::new( + DidChangeConfiguration::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn execute_command_request(id: i32, command: &str, arguments: Vec) -> Request { + let params = ExecuteCommandParams { + command: command.to_string(), + arguments, + work_done_progress_params: WorkDoneProgressParams::default(), + }; + Request::new( + id.into(), + ExecuteCommand::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn custom_eval_file_request(id: i32, uri: &str) -> Request { + Request::new( + id.into(), + "jrsonnet/evalFile".to_string(), + serde_json::json!({ + "textDocument": { + "uri": uri, + }, + }), + ) +} + +fn custom_eval_expression_request(id: i32, expression: &str, base_uri: Option<&str>) -> Request { + let mut params = serde_json::json!({ + "expression": expression, + }); + if let Some(base_uri) = base_uri { + params + .as_object_mut() + .expect("evalExpression params should be an object") + .insert( + "baseDocument".to_string(), + serde_json::json!({ + "uri": base_uri, + }), + ); + } + + Request::new(id.into(), "jrsonnet/evalExpression".to_string(), params) +} + +fn custom_find_transitive_importers_request(id: i32, uri: &str) -> Request { + Request::new( + id.into(), + "jrsonnet/findTransitiveImporters".to_string(), + serde_json::json!({ + "textDocument": { + "uri": uri, + }, + }), + ) +} + +fn workspace_symbol_request(id: i32, query: &str) -> Request { + let params = lsp_types::WorkspaceSymbolParams { + query: query.to_string(), + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + }; + Request::new( + id.into(), + WorkspaceSymbolRequest::METHOD.to_string(), + serde_json::to_value(params).expect("expected success"), + ) +} + +fn request_workspace_symbols( + conn: &Connection, + id: i32, + query: &str, +) -> Option> { + conn.sender + .send(Message::Request(workspace_symbol_request(id, query))) + .expect("expected success"); + let response = recv_response(conn, id); + assert!(response.error.is_none(), "workspace/symbol should succeed"); + serde_json::from_value( + response + .result + .expect("workspace/symbol should return result"), + ) + .expect("expected success") +} + +fn code_action_test_range() -> lsp_types::Range { + lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 20, + }, + } +} + +fn unused_variable_diagnostic() -> lsp_types::Diagnostic { + lsp_types::Diagnostic { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + severity: Some(lsp_types::DiagnosticSeverity::WARNING), + code: Some(lsp_types::NumberOrString::String( + "unused-variable".to_string(), + )), + code_description: None, + source: Some("jrsonnet-lint".to_string()), + message: "unused variable".to_string(), + related_information: None, + tags: None, + data: None, + } +} + +fn request_code_actions( + conn: &Connection, + id: i32, + uri: &str, + diagnostics: Vec, + only: Option>, +) -> Option> { + conn.sender + .send(Message::Request(code_action_request( + id, + uri, + code_action_test_range(), + diagnostics, + only, + ))) + .expect("expected success"); + let response = recv_response(conn, id); + assert!(response.error.is_none(), "Code action should succeed"); + serde_json::from_value(response.result.expect("should have result")).expect("expected success") +} + +fn expected_unused_variable_quickfix( + uri: &str, + diagnostic: lsp_types::Diagnostic, +) -> Vec { + let parsed_uri: lsp_types::Uri = uri.parse().expect("expected success"); + let mut prefix_changes = std::collections::HashMap::new(); + prefix_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + ); + let mut remove_changes = std::collections::HashMap::new(); + remove_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 11, + }, + }, + new_text: String::new(), + }], + ); + let mut fix_all_changes = std::collections::HashMap::new(); + fix_all_changes.insert( + parsed_uri, + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 11, + }, + }, + new_text: String::new(), + }], + ); + + vec![ + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(prefix_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(remove_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(fix_all_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] +} + +fn expected_unused_import_binding_actions( + uri: &str, + diagnostic: lsp_types::Diagnostic, +) -> Vec { + let parsed_uri: lsp_types::Uri = uri.parse().expect("expected success"); + let mut prefix_changes = std::collections::HashMap::new(); + prefix_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 6, + }, + end: Position { + line: 0, + character: 7, + }, + }, + new_text: "_x".to_string(), + }], + ); + let mut remove_changes = std::collections::HashMap::new(); + remove_changes.insert( + parsed_uri.clone(), + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 32, + }, + }, + new_text: String::new(), + }], + ); + let mut fix_all_changes = std::collections::HashMap::new(); + fix_all_changes.insert( + parsed_uri, + vec![lsp_types::TextEdit { + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 32, + }, + }, + new_text: String::new(), + }], + ); + + vec![ + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Prefix `x` with `_`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(prefix_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(true), + disabled: None, + data: None, + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove unused binding `x`".to_string(), + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: Some(vec![diagnostic.clone()]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(remove_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { + title: "Remove all unused bindings".to_string(), + kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), + diagnostics: Some(vec![diagnostic]), + edit: Some(lsp_types::WorkspaceEdit { + changes: Some(fix_all_changes), + document_changes: None, + change_annotations: None, + }), + command: None, + is_preferred: Some(false), + disabled: None, + data: None, + }), + ] +} + +fn location(uri: &str, start_character: u32, end_character: u32) -> lsp_types::Location { + lsp_types::Location { + uri: uri.parse().expect("expected success"), + range: lsp_types::Range { + start: Position { + line: 0, + character: start_character, + }, + end: Position { + line: 0, + character: end_character, + }, + }, + } +} + +fn expected_find_references(uri: &str, include_declaration: bool) -> Vec { + let mut references = Vec::with_capacity(if include_declaration { 3 } else { 2 }); + if include_declaration { + references.push(location(uri, 6, 7)); + } + references.push(location(uri, 13, 14)); + references.push(location(uri, 17, 18)); + references +} + +fn file_uri(path: &std::path::Path) -> String { + format!("file://{}", path.to_string_lossy()) +} + +fn recv_response(conn: &Connection, expected_id: i32) -> lsp_server::Response { + recv_response_by_id(conn, expected_id.into(), LONG_RESPONSE_TIMEOUT) + .expect("timed out waiting for response") +} + +fn parse_request_error_data(error: &lsp_server::ResponseError) -> RequestErrorData { + let data = error + .data + .clone() + .expect("response error should include structured data"); + serde_json::from_value(data).expect("response error data should decode") +} + +fn recv_publish_diagnostics_for_uri( + conn: &Connection, + uri: &str, + timeout: Duration, +) -> lsp_types::PublishDiagnosticsParams { + recv_until(conn, timeout, |message| { + let Message::Notification(notif) = message else { + return None; + }; + if notif.method != PublishDiagnostics::METHOD { + return None; + } + let params: lsp_types::PublishDiagnosticsParams = + serde_json::from_value(notif.params).expect("expected success"); + (params.uri.as_str() == uri).then_some(params) + }) + .expect("expected diagnostics notification") +} + +/// Run the server with the given connection in a separate thread. +fn run_server(connection: Connection) -> thread::JoinHandle<()> { + thread::spawn(move || { + let server = jrsonnet_lsp::server::Server::new(connection); + let _ = server.run(); + }) +} + +mod features; +mod formatting; +mod lifecycle; +mod navigation; +mod workspace_cross_file; diff --git a/crates/jrsonnet-lsp/tests/integration_test/navigation.rs b/crates/jrsonnet-lsp/tests/integration/navigation.rs similarity index 97% rename from crates/jrsonnet-lsp/tests/integration_test/navigation.rs rename to crates/jrsonnet-lsp/tests/integration/navigation.rs index a38b1572..791cca7c 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/navigation.rs +++ b/crates/jrsonnet-lsp/tests/integration/navigation.rs @@ -98,7 +98,7 @@ fn test_goto_type_definition() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); client_conn .sender @@ -166,7 +166,7 @@ fn test_goto_type_definition_matches_definition_for_local_alias() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); let result = assert_type_definition_matches_definition(&client_conn, 2, 3, uri, 2, 0); assert_eq!( @@ -236,7 +236,7 @@ alias + std.length(plain)"#, .sender .send(Message::Notification(did_open_notification(&uri, &text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, RESPONSE_TIMEOUT); // `alias` usage at line 3, col 0 resolves to imported field `foo`. let alias_result = assert_type_definition_matches_definition(&client_conn, 2, 3, &uri, 3, 0); @@ -302,7 +302,7 @@ fn test_navigation_matrix_local_alias() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); let declaration = send_goto_and_parse( &client_conn, @@ -429,7 +429,7 @@ alias"#, .sender .send(Message::Notification(did_open_notification(&uri, &text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, RESPONSE_TIMEOUT); let declaration = send_goto_and_parse( &client_conn, @@ -541,7 +541,7 @@ fn test_goto_declaration() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); client_conn .sender @@ -608,7 +608,7 @@ fn test_goto_implementation_local_binding() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); client_conn .sender @@ -695,7 +695,7 @@ fn test_goto_definition_and_declaration_diverge_for_local_alias() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); client_conn .sender @@ -920,7 +920,7 @@ alias"#, .sender .send(Message::Notification(did_open_notification(&uri, &text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &uri, RESPONSE_TIMEOUT); client_conn .sender @@ -1043,7 +1043,7 @@ fn test_diagnostics_import_file_and_definition_resolution() { .send(Message::Notification(did_open_notification(&uri, &text))) .expect("expected success"); - let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, &uri, Duration::from_secs(5)); + let diagnostics = recv_publish_diagnostics_for_uri(&client_conn, &uri, RESPONSE_TIMEOUT); assert_eq!(diagnostics.uri.as_str(), uri); assert!( diagnostics.diagnostics.is_empty(), diff --git a/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs b/crates/jrsonnet-lsp/tests/integration/workspace_cross_file.rs similarity index 90% rename from crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs rename to crates/jrsonnet-lsp/tests/integration/workspace_cross_file.rs index ac1f9453..aa6b35b5 100644 --- a/crates/jrsonnet-lsp/tests/integration_test/workspace_cross_file.rs +++ b/crates/jrsonnet-lsp/tests/integration/workspace_cross_file.rs @@ -188,25 +188,18 @@ fn test_initialize_bootstraps_workspace_import_graph() { "file": lib_uri, "transitiveImporters": [main_uri], }); - let mut actual_result = serde_json::Value::Null; - for request_id in 2..=42 { - client_conn - .sender - .send(Message::Request(custom_find_transitive_importers_request( - request_id, &lib_uri, - ))) - .expect("expected success"); - let response = recv_response(&client_conn, request_id); - assert!( - response.error.is_none(), - "jrsonnet/findTransitiveImporters request should succeed" - ); - actual_result = response.result.expect("request should return result"); - if actual_result == expected_result { - break; - } - thread::sleep(Duration::from_millis(25)); - } + client_conn + .sender + .send(Message::Request(custom_find_transitive_importers_request( + 2, &lib_uri, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "jrsonnet/findTransitiveImporters request should succeed" + ); + let actual_result = response.result.expect("request should return result"); assert_eq!(actual_result, expected_result); client_conn @@ -223,6 +216,81 @@ fn test_initialize_bootstraps_workspace_import_graph() { .expect("Server thread should exit cleanly"); } +#[test] +fn test_initialize_bootstrap_large_workspace_startup() { + let tmp = TempDir::new().expect("tempdir should be created"); + let lib_path = tmp.path().join("lib.jsonnet"); + fs::write(&lib_path, "{ value: 1 }").expect("lib should be written"); + + const FILE_COUNT: usize = 300; + let mut expected_importers = std::collections::BTreeSet::new(); + for idx in 0..FILE_COUNT { + let file_path = tmp.path().join(format!("svc_{idx}.jsonnet")); + fs::write( + &file_path, + format!("local lib = import 'lib.jsonnet'; {{ name: 'svc-{idx}', value: lib.value }}"), + ) + .expect("workspace file should be written"); + let file_uri = file_uri(&file_path.canonicalize().expect("file should canonicalize")); + expected_importers.insert(file_uri.to_string()); + } + + let root_uri = file_uri(&tmp.path().canonicalize().expect("root should canonicalize")); + let lib_uri = file_uri(&lib_path.canonicalize().expect("lib should canonicalize")); + + let (client_conn, server_conn) = Connection::memory(); + let server_thread = run_server(server_conn); + + client_conn + .sender + .send(Message::Request(initialize_request_with_root_uri( + 1, &root_uri, + ))) + .expect("expected success"); + let _ = recv_response(&client_conn, 1); + client_conn + .sender + .send(Message::Notification(initialized_notification())) + .expect("expected success"); + + client_conn + .sender + .send(Message::Request(custom_find_transitive_importers_request( + 2, &lib_uri, + ))) + .expect("expected success"); + let response = recv_response(&client_conn, 2); + assert!( + response.error.is_none(), + "jrsonnet/findTransitiveImporters request should succeed" + ); + let result = response.result.expect("request should return result"); + let importers = result + .get("transitiveImporters") + .expect("transitiveImporters key should exist") + .as_array() + .expect("transitiveImporters should be an array"); + let actual_importers = importers + .iter() + .filter_map(|value| value.as_str().map(ToOwned::to_owned)) + .collect::>(); + + assert_eq!(actual_importers, expected_importers); + + client_conn + .sender + .send(Message::Request(shutdown_request(3))) + .expect("expected success"); + let _ = recv_response(&client_conn, 3); + client_conn + .sender + .send(Message::Notification(exit_notification())) + .expect("expected success"); + server_thread + .join() + .expect("Server thread should exit cleanly"); +} + #[test] fn test_workspace_symbol_includes_unopened_workspace_files() { let tmp = TempDir::new().expect("tempdir should be created"); @@ -264,17 +332,7 @@ fn test_workspace_symbol_includes_unopened_workspace_files() { &expected_uri, "workspaceOnly", )); - let mut actual_symbols = symbols; - if actual_symbols != expected_symbols { - for request_id in 3..=43 { - actual_symbols = request_workspace_symbols(&client_conn, request_id, "workspaceOnly"); - if actual_symbols == expected_symbols { - break; - } - thread::sleep(Duration::from_millis(25)); - } - } - assert_eq!(actual_symbols, expected_symbols); + assert_eq!(symbols, expected_symbols); client_conn .sender @@ -312,7 +370,7 @@ fn test_workspace_symbol_ranks_exact_prefix_then_substring() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); let symbols = request_workspace_symbols(&client_conn, 2, "needle"); let expected_doc = jrsonnet_lsp_document::Document::new( @@ -391,7 +449,7 @@ fn test_workspace_symbol_caps_results_with_deterministic_order() { .sender .send(Message::Notification(did_open_notification(uri, &text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); let symbols = request_workspace_symbols(&client_conn, 2, "capsymbol"); let expected_doc = @@ -466,7 +524,7 @@ fn test_find_transitive_importers_returns_sorted_uris() { .sender .send(Message::Notification(did_open_notification(uri, text))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, uri, RESPONSE_TIMEOUT); } client_conn @@ -532,7 +590,7 @@ fn test_did_close_preserves_import_graph_for_references() { &lib_uri, lib_text, ))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, RESPONSE_TIMEOUT); client_conn .sender @@ -540,14 +598,14 @@ fn test_did_close_preserves_import_graph_for_references() { &main_uri, main_text, ))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); client_conn .sender .send(Message::Notification(did_close_notification(&main_uri))) .expect("expected success"); let closed_diagnostics = - recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); assert_eq!( closed_diagnostics, lsp_types::PublishDiagnosticsParams { @@ -780,7 +838,7 @@ fn test_cross_file_references_resolve_jpath_importers() { &lib_uri, lib_text, ))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, RESPONSE_TIMEOUT); client_conn .sender @@ -788,7 +846,7 @@ fn test_cross_file_references_resolve_jpath_importers() { &main_uri, main_text, ))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); client_conn .sender @@ -862,7 +920,7 @@ fn test_cross_file_rename_updates_jpath_importers() { &lib_uri, lib_text, ))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, RESPONSE_TIMEOUT); client_conn .sender @@ -870,7 +928,7 @@ fn test_cross_file_rename_updates_jpath_importers() { &main_uri, main_text, ))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); client_conn .sender @@ -997,7 +1055,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { &lib_uri, lib_text, ))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &lib_uri, RESPONSE_TIMEOUT); client_conn .sender @@ -1005,7 +1063,7 @@ fn test_navigation_resolves_jpath_imports_from_graph() { &main_uri, main_text, ))) .expect("expected success"); - let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, Duration::from_secs(5)); + let _ = recv_publish_diagnostics_for_uri(&client_conn, &main_uri, RESPONSE_TIMEOUT); client_conn .sender diff --git a/crates/jrsonnet-lsp/tests/integration_test.rs b/crates/jrsonnet-lsp/tests/integration_test.rs index dd9d7756..d37fb195 100644 --- a/crates/jrsonnet-lsp/tests/integration_test.rs +++ b/crates/jrsonnet-lsp/tests/integration_test.rs @@ -1,1143 +1,2 @@ -//! Integration tests for the LSP server. -//! -//! Uses in-process testing with channels rather than subprocess management, -//! following patterns from ast-grep and simple-completion-language-server. - -use std::{fmt::Write as _, fs, thread, time::Duration}; - -use assert_matches::assert_matches; -use jrsonnet_lsp_handlers::{SemanticTokenModifierName, SemanticTokenTypeName}; -use lsp_server::{Connection, Message, Notification, Request}; -use lsp_types::{ - notification::{ - Cancel, DidChangeConfiguration, DidChangeWatchedFiles, DidCloseTextDocument, - DidOpenTextDocument, DidSaveTextDocument, Notification as _, PublishDiagnostics, - }, - request::{ - CodeActionRequest, CodeLensRequest, CodeLensResolve, DocumentHighlightRequest, - ExecuteCommand, Formatting, GotoDeclaration, GotoDefinition, GotoImplementation, - GotoTypeDefinition, Initialize, InlayHintRefreshRequest, InlayHintRequest, RangeFormatting, - References, RegisterCapability, Rename, Request as _, SemanticTokensRangeRequest, Shutdown, - WorkspaceSymbolRequest, - }, - CancelParams, DidChangeConfigurationParams, DidChangeWatchedFilesClientCapabilities, - DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, - DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, - ExecuteCommandParams, FileChangeType, FileEvent, FileSystemWatcher, GlobPattern, - GotoDefinitionParams, GotoDefinitionResponse, InitializeParams, - InlayHintWorkspaceClientCapabilities, NumberOrString, OneOf, PartialResultParams, Position, - ReferenceContext, ReferenceParams, Registration, RegistrationParams, RelativePattern, - RenameParams, SemanticTokensRangeParams, TextDocumentIdentifier, TextDocumentItem, - TextDocumentPositionParams, WorkDoneProgressParams, WorkspaceClientCapabilities, - WorkspaceFolder, -}; -use serde::Deserialize; -use serde_json::json; -use tempfile::TempDir; - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -struct ExpectedSemanticToken { - line: u32, - start: u32, - len: u32, - token_type: SemanticTokenTypeName, - modifiers: u32, -} - -#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -enum RequestErrorData { - InvalidParams { method: String }, - MethodNotFound { method: String }, - UnknownExecuteCommand { command: String }, - MissingExecuteHandler { command: String }, - RequestCanceled { method: String }, - ServerShuttingDown, - AsyncHandlerFailed { method: String }, - AsyncHandlerPanicked { method: String }, -} - -impl ExpectedSemanticToken { - const fn new( - line: u32, - start: u32, - len: u32, - token_type: SemanticTokenTypeName, - modifiers: u32, - ) -> Self { - Self { - line, - start, - len, - token_type, - modifiers, - } - } -} - -fn semantic_modifiers(modifiers: &[SemanticTokenModifierName]) -> u32 { - modifiers - .iter() - .fold(0_u32, |acc, modifier| acc | modifier.as_bitset()) -} - -fn semantic_token( - line: u32, - start: u32, - len: u32, - token_type: SemanticTokenTypeName, - modifiers: &[SemanticTokenModifierName], -) -> ExpectedSemanticToken { - ExpectedSemanticToken::new(line, start, len, token_type, semantic_modifiers(modifiers)) -} - -fn encode_semantic_tokens(mut tokens: Vec) -> lsp_types::SemanticTokens { - tokens.sort_by_key(|token| (token.line, token.start)); - let mut encoded = Vec::with_capacity(tokens.len()); - let mut prev_line = 0_u32; - let mut prev_start = 0_u32; - for token in tokens { - let delta_line = token.line - prev_line; - let delta_start = if delta_line == 0 { - token.start - prev_start - } else { - token.start - }; - encoded.push(lsp_types::SemanticToken { - delta_line, - delta_start, - length: token.len, - token_type: token.token_type.as_index(), - token_modifiers_bitset: token.modifiers, - }); - prev_line = token.line; - prev_start = token.start; - } - - lsp_types::SemanticTokens { - result_id: None, - data: encoded, - } -} - -/// Helper to create an initialize request. -fn initialize_request(id: i32) -> Request { - initialize_request_with_options(id, serde_json::Value::Null) -} - -/// Helper to create an initialize request with custom initialization options. -fn initialize_request_with_options(id: i32, initialization_options: serde_json::Value) -> Request { - let mut params = InitializeParams::default(); - if !initialization_options.is_null() { - params.initialization_options = Some(initialization_options); - } - - Request::new( - id.into(), - Initialize::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -/// Helper to create an initialize request with a workspace root URI. -fn initialize_request_with_root_uri(id: i32, root_uri: &str) -> Request { - let mut params = serde_json::to_value(InitializeParams::default()).expect("expected success"); - if let Some(object) = params.as_object_mut() { - object.insert("rootUri".to_string(), serde_json::json!(root_uri)); - } - Request::new(id.into(), Initialize::METHOD.to_string(), params) -} - -/// Helper to create an initialize request that advertises dynamic watched-file -/// registration support. -fn initialize_request_with_dynamic_watched_files(id: i32) -> Request { - let params = InitializeParams { - capabilities: lsp_types::ClientCapabilities { - workspace: Some(WorkspaceClientCapabilities { - did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { - dynamic_registration: Some(true), - relative_pattern_support: Some(false), - }), - ..WorkspaceClientCapabilities::default() - }), - ..lsp_types::ClientCapabilities::default() - }, - ..InitializeParams::default() - }; - - Request::new( - id.into(), - Initialize::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -/// Helper to create an initialize request that advertises dynamic watched-file -/// registration support and relative pattern support. -fn initialize_request_with_dynamic_watched_files_relative(id: i32, root_uri: &str) -> Request { - let params = InitializeParams { - workspace_folders: Some(vec![WorkspaceFolder { - uri: root_uri.parse().expect("expected success"), - name: "workspace".to_owned(), - }]), - capabilities: lsp_types::ClientCapabilities { - workspace: Some(WorkspaceClientCapabilities { - did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities { - dynamic_registration: Some(true), - relative_pattern_support: Some(true), - }), - ..WorkspaceClientCapabilities::default() - }), - ..lsp_types::ClientCapabilities::default() - }, - ..InitializeParams::default() - }; - - Request::new( - id.into(), - Initialize::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -/// Helper to create an initialize request that advertises -/// `workspace/inlayHint/refresh` support. -fn initialize_request_with_inlay_hint_refresh_support(id: i32) -> Request { - let params = InitializeParams { - capabilities: lsp_types::ClientCapabilities { - workspace: Some(WorkspaceClientCapabilities { - inlay_hint: Some(InlayHintWorkspaceClientCapabilities { - refresh_support: Some(true), - }), - ..WorkspaceClientCapabilities::default() - }), - ..lsp_types::ClientCapabilities::default() - }, - ..InitializeParams::default() - }; - - Request::new( - id.into(), - Initialize::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -/// Helper to create a shutdown request. -fn shutdown_request(id: i32) -> Request { - Request::new( - id.into(), - Shutdown::METHOD.to_string(), - serde_json::Value::Null, - ) -} - -/// Helper to create an initialized notification. -fn initialized_notification() -> Notification { - Notification::new("initialized".to_string(), json!({})) -} - -/// Helper to create an exit notification. -fn exit_notification() -> Notification { - Notification::new("exit".to_string(), json!({})) -} - -/// Helper to create a $/cancelRequest notification. -fn cancel_request_notification(request_id: i32) -> Notification { - let params = CancelParams { - id: NumberOrString::Number(request_id), - }; - Notification::new( - Cancel::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -/// Helper to create a didOpen notification. -fn did_open_notification(uri: &str, text: &str) -> Notification { - let params = DidOpenTextDocumentParams { - text_document: TextDocumentItem { - uri: uri.parse().expect("expected success"), - language_id: "jsonnet".to_string(), - version: 1, - text: text.to_string(), - }, - }; - Notification::new( - DidOpenTextDocument::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn did_save_notification(uri: &str, text: Option<&str>) -> Notification { - let params = DidSaveTextDocumentParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - text: text.map(ToString::to_string), - }; - Notification::new( - DidSaveTextDocument::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn did_close_notification(uri: &str) -> Notification { - let params = DidCloseTextDocumentParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - }; - Notification::new( - DidCloseTextDocument::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -/// Helper to create a goto definition request. -fn goto_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { - let params = GotoDefinitionParams { - text_document_position_params: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - position: Position { line, character }, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - Request::new( - id.into(), - GotoDefinition::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn goto_type_definition_request(id: i32, uri: &str, line: u32, character: u32) -> Request { - let params = GotoDefinitionParams { - text_document_position_params: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - position: Position { line, character }, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - Request::new( - id.into(), - GotoTypeDefinition::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn assert_type_definition_matches_definition( - conn: &Connection, - definition_id: i32, - type_definition_id: i32, - uri: &str, - line: u32, - character: u32, -) -> Option { - conn.sender - .send(Message::Request(goto_definition_request( - definition_id, - uri, - line, - character, - ))) - .expect("expected success"); - let definition_response = recv_response(conn, definition_id); - assert!( - definition_response.error.is_none(), - "Goto definition request should succeed" - ); - let definition_result: Option = - serde_json::from_value(definition_response.result.expect("should have result")) - .expect("expected success"); - - conn.sender - .send(Message::Request(goto_type_definition_request( - type_definition_id, - uri, - line, - character, - ))) - .expect("expected success"); - let type_definition_response = recv_response(conn, type_definition_id); - assert!( - type_definition_response.error.is_none(), - "Goto type definition request should succeed" - ); - let type_definition_result: Option = - serde_json::from_value(type_definition_response.result.expect("should have result")) - .expect("expected success"); - - assert_eq!( - type_definition_result, definition_result, - "typeDefinition should match definition for Jsonnet symbol navigation" - ); - definition_result -} - -fn send_goto_and_parse( - conn: &Connection, - id: i32, - label: &str, - request: Request, -) -> Option { - conn.sender - .send(Message::Request(request)) - .expect("expected success"); - let response = recv_response(conn, id); - assert!(response.error.is_none(), "{label} request should succeed"); - serde_json::from_value(response.result.expect("should have result")).expect("expected success") -} - -/// Helper to create a goto declaration request. -fn goto_declaration_request(id: i32, uri: &str, line: u32, character: u32) -> Request { - let params = GotoDefinitionParams { - text_document_position_params: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - position: Position { line, character }, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - Request::new( - id.into(), - GotoDeclaration::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn goto_implementation_request(id: i32, uri: &str, line: u32, character: u32) -> Request { - let params = GotoDefinitionParams { - text_document_position_params: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - position: Position { line, character }, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - Request::new( - id.into(), - GotoImplementation::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -/// Helper to create a references request. -fn references_request( - id: i32, - uri: &str, - line: u32, - character: u32, - include_declaration: bool, -) -> Request { - let params = ReferenceParams { - text_document_position: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - position: Position { line, character }, - }, - context: ReferenceContext { - include_declaration, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - Request::new( - id.into(), - References::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn document_highlight_request(id: i32, uri: &str, line: u32, character: u32) -> Request { - let params = lsp_types::DocumentHighlightParams { - text_document_position_params: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - position: Position { line, character }, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - Request::new( - id.into(), - DocumentHighlightRequest::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn rename_request(id: i32, uri: &str, line: u32, character: u32, new_name: &str) -> Request { - let params = RenameParams { - text_document_position: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - position: Position { line, character }, - }, - new_name: new_name.to_string(), - work_done_progress_params: WorkDoneProgressParams::default(), - }; - Request::new( - id.into(), - Rename::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn inlay_hint_request( - id: i32, - uri: &str, - start_line: u32, - start_character: u32, - end_line: u32, - end_character: u32, -) -> Request { - let params = lsp_types::InlayHintParams { - work_done_progress_params: WorkDoneProgressParams::default(), - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - range: lsp_types::Range { - start: Position { - line: start_line, - character: start_character, - }, - end: Position { - line: end_line, - character: end_character, - }, - }, - }; - Request::new( - id.into(), - InlayHintRequest::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn formatting_request(id: i32, uri: &str, tab_size: u32, insert_spaces: bool) -> Request { - formatting_request_with_options(id, uri, tab_size, insert_spaces, None, None, None) -} - -fn formatting_options( - tab_size: u32, - insert_spaces: bool, - trim_trailing_whitespace: Option, - insert_final_newline: Option, - trim_final_newlines: Option, -) -> lsp_types::FormattingOptions { - lsp_types::FormattingOptions { - tab_size, - insert_spaces, - properties: std::collections::HashMap::new(), - trim_trailing_whitespace, - insert_final_newline, - trim_final_newlines, - } -} - -fn formatting_request_with_options( - id: i32, - uri: &str, - tab_size: u32, - insert_spaces: bool, - trim_trailing_whitespace: Option, - insert_final_newline: Option, - trim_final_newlines: Option, -) -> Request { - let params = lsp_types::DocumentFormattingParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - options: formatting_options( - tab_size, - insert_spaces, - trim_trailing_whitespace, - insert_final_newline, - trim_final_newlines, - ), - work_done_progress_params: WorkDoneProgressParams::default(), - }; - Request::new( - id.into(), - Formatting::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn range_formatting_request_with_options( - id: i32, - uri: &str, - range: lsp_types::Range, - options: lsp_types::FormattingOptions, -) -> Request { - let params = lsp_types::DocumentRangeFormattingParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - range, - options, - work_done_progress_params: WorkDoneProgressParams::default(), - }; - Request::new( - id.into(), - RangeFormatting::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn semantic_tokens_range_request( - id: i32, - uri: &str, - start_line: u32, - start_character: u32, - end_line: u32, - end_character: u32, -) -> Request { - let params = SemanticTokensRangeParams { - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - range: lsp_types::Range { - start: Position { - line: start_line, - character: start_character, - }, - end: Position { - line: end_line, - character: end_character, - }, - }, - }; - Request::new( - id.into(), - SemanticTokensRangeRequest::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn code_action_request( - id: i32, - uri: &str, - range: lsp_types::Range, - diagnostics: Vec, - only: Option>, -) -> Request { - let params = lsp_types::CodeActionParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - range, - context: lsp_types::CodeActionContext { - diagnostics, - only, - trigger_kind: None, - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - Request::new( - id.into(), - CodeActionRequest::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn code_lens_request(id: i32, uri: &str) -> Request { - let params = lsp_types::CodeLensParams { - text_document: TextDocumentIdentifier { - uri: uri.parse().expect("expected success"), - }, - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - Request::new( - id.into(), - CodeLensRequest::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn code_lens_resolve_request(id: i32, lens: lsp_types::CodeLens) -> Request { - Request::new( - id.into(), - CodeLensResolve::METHOD.to_string(), - serde_json::to_value(lens).expect("expected success"), - ) -} - -fn did_change_watched_files_notification(changes: Vec) -> Notification { - let params = DidChangeWatchedFilesParams { changes }; - Notification::new( - DidChangeWatchedFiles::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn did_change_configuration_notification(settings: serde_json::Value) -> Notification { - let params = DidChangeConfigurationParams { settings }; - Notification::new( - DidChangeConfiguration::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn execute_command_request(id: i32, command: &str, arguments: Vec) -> Request { - let params = ExecuteCommandParams { - command: command.to_string(), - arguments, - work_done_progress_params: WorkDoneProgressParams::default(), - }; - Request::new( - id.into(), - ExecuteCommand::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn custom_eval_file_request(id: i32, uri: &str) -> Request { - Request::new( - id.into(), - "jrsonnet/evalFile".to_string(), - serde_json::json!({ - "textDocument": { - "uri": uri, - }, - }), - ) -} - -fn custom_eval_expression_request(id: i32, expression: &str, base_uri: Option<&str>) -> Request { - let mut params = serde_json::json!({ - "expression": expression, - }); - if let Some(base_uri) = base_uri { - params - .as_object_mut() - .expect("evalExpression params should be an object") - .insert( - "baseDocument".to_string(), - serde_json::json!({ - "uri": base_uri, - }), - ); - } - - Request::new(id.into(), "jrsonnet/evalExpression".to_string(), params) -} - -fn custom_find_transitive_importers_request(id: i32, uri: &str) -> Request { - Request::new( - id.into(), - "jrsonnet/findTransitiveImporters".to_string(), - serde_json::json!({ - "textDocument": { - "uri": uri, - }, - }), - ) -} - -fn workspace_symbol_request(id: i32, query: &str) -> Request { - let params = lsp_types::WorkspaceSymbolParams { - query: query.to_string(), - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - }; - Request::new( - id.into(), - WorkspaceSymbolRequest::METHOD.to_string(), - serde_json::to_value(params).expect("expected success"), - ) -} - -fn request_workspace_symbols( - conn: &Connection, - id: i32, - query: &str, -) -> Option> { - conn.sender - .send(Message::Request(workspace_symbol_request(id, query))) - .expect("expected success"); - let response = recv_response(conn, id); - assert!(response.error.is_none(), "workspace/symbol should succeed"); - serde_json::from_value( - response - .result - .expect("workspace/symbol should return result"), - ) - .expect("expected success") -} - -fn code_action_test_range() -> lsp_types::Range { - lsp_types::Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 20, - }, - } -} - -fn unused_variable_diagnostic() -> lsp_types::Diagnostic { - lsp_types::Diagnostic { - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - severity: Some(lsp_types::DiagnosticSeverity::WARNING), - code: Some(lsp_types::NumberOrString::String( - "unused-variable".to_string(), - )), - code_description: None, - source: Some("jrsonnet-lint".to_string()), - message: "unused variable".to_string(), - related_information: None, - tags: None, - data: None, - } -} - -fn request_code_actions( - conn: &Connection, - id: i32, - uri: &str, - diagnostics: Vec, - only: Option>, -) -> Option> { - conn.sender - .send(Message::Request(code_action_request( - id, - uri, - code_action_test_range(), - diagnostics, - only, - ))) - .expect("expected success"); - let response = recv_response(conn, id); - assert!(response.error.is_none(), "Code action should succeed"); - serde_json::from_value(response.result.expect("should have result")).expect("expected success") -} - -fn expected_unused_variable_quickfix( - uri: &str, - diagnostic: lsp_types::Diagnostic, -) -> Vec { - let parsed_uri: lsp_types::Uri = uri.parse().expect("expected success"); - let mut prefix_changes = std::collections::HashMap::new(); - prefix_changes.insert( - parsed_uri.clone(), - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - new_text: "_x".to_string(), - }], - ); - let mut remove_changes = std::collections::HashMap::new(); - remove_changes.insert( - parsed_uri.clone(), - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 11, - }, - }, - new_text: String::new(), - }], - ); - let mut fix_all_changes = std::collections::HashMap::new(); - fix_all_changes.insert( - parsed_uri, - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 11, - }, - }, - new_text: String::new(), - }], - ); - - vec![ - lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { - title: "Prefix `x` with `_`".to_string(), - kind: Some(lsp_types::CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(lsp_types::WorkspaceEdit { - changes: Some(prefix_changes), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(true), - disabled: None, - data: None, - }), - lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { - title: "Remove unused binding `x`".to_string(), - kind: Some(lsp_types::CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(lsp_types::WorkspaceEdit { - changes: Some(remove_changes), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }), - lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { - title: "Remove all unused bindings".to_string(), - kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), - diagnostics: Some(vec![diagnostic]), - edit: Some(lsp_types::WorkspaceEdit { - changes: Some(fix_all_changes), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }), - ] -} - -fn expected_unused_import_binding_actions( - uri: &str, - diagnostic: lsp_types::Diagnostic, -) -> Vec { - let parsed_uri: lsp_types::Uri = uri.parse().expect("expected success"); - let mut prefix_changes = std::collections::HashMap::new(); - prefix_changes.insert( - parsed_uri.clone(), - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 6, - }, - end: Position { - line: 0, - character: 7, - }, - }, - new_text: "_x".to_string(), - }], - ); - let mut remove_changes = std::collections::HashMap::new(); - remove_changes.insert( - parsed_uri.clone(), - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 32, - }, - }, - new_text: String::new(), - }], - ); - let mut fix_all_changes = std::collections::HashMap::new(); - fix_all_changes.insert( - parsed_uri, - vec![lsp_types::TextEdit { - range: lsp_types::Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 32, - }, - }, - new_text: String::new(), - }], - ); - - vec![ - lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { - title: "Prefix `x` with `_`".to_string(), - kind: Some(lsp_types::CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(lsp_types::WorkspaceEdit { - changes: Some(prefix_changes), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(true), - disabled: None, - data: None, - }), - lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { - title: "Remove unused binding `x`".to_string(), - kind: Some(lsp_types::CodeActionKind::QUICKFIX), - diagnostics: Some(vec![diagnostic.clone()]), - edit: Some(lsp_types::WorkspaceEdit { - changes: Some(remove_changes), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }), - lsp_types::CodeActionOrCommand::CodeAction(lsp_types::CodeAction { - title: "Remove all unused bindings".to_string(), - kind: Some(lsp_types::CodeActionKind::SOURCE_FIX_ALL), - diagnostics: Some(vec![diagnostic]), - edit: Some(lsp_types::WorkspaceEdit { - changes: Some(fix_all_changes), - document_changes: None, - change_annotations: None, - }), - command: None, - is_preferred: Some(false), - disabled: None, - data: None, - }), - ] -} - -fn location(uri: &str, start_character: u32, end_character: u32) -> lsp_types::Location { - lsp_types::Location { - uri: uri.parse().expect("expected success"), - range: lsp_types::Range { - start: Position { - line: 0, - character: start_character, - }, - end: Position { - line: 0, - character: end_character, - }, - }, - } -} - -fn expected_find_references(uri: &str, include_declaration: bool) -> Vec { - let mut references = Vec::with_capacity(if include_declaration { 3 } else { 2 }); - if include_declaration { - references.push(location(uri, 6, 7)); - } - references.push(location(uri, 13, 14)); - references.push(location(uri, 17, 18)); - references -} - -fn file_uri(path: &std::path::Path) -> String { - format!("file://{}", path.to_string_lossy()) -} - -fn recv_response(conn: &Connection, expected_id: i32) -> lsp_server::Response { - loop { - let message = conn - .receiver - .recv_timeout(Duration::from_secs(3)) - .expect("expected response message"); - if let Message::Response(response) = message { - if response.id == expected_id.into() { - return response; - } - } - } -} - -fn parse_request_error_data(error: &lsp_server::ResponseError) -> RequestErrorData { - let data = error - .data - .clone() - .expect("response error should include structured data"); - serde_json::from_value(data).expect("response error data should decode") -} - -fn recv_publish_diagnostics_for_uri( - conn: &Connection, - uri: &str, - timeout: Duration, -) -> lsp_types::PublishDiagnosticsParams { - loop { - let message = conn - .receiver - .recv_timeout(timeout) - .expect("expected diagnostics notification"); - if let Message::Notification(notif) = message { - if notif.method != PublishDiagnostics::METHOD { - continue; - } - - let params: lsp_types::PublishDiagnosticsParams = - serde_json::from_value(notif.params).expect("expected success"); - if params.uri.as_str() == uri { - return params; - } - } - } -} - -/// Run the server with the given connection in a separate thread. -fn run_server(connection: Connection) -> thread::JoinHandle<()> { - thread::spawn(move || { - let server = jrsonnet_lsp::server::Server::new(connection); - let _ = server.run(); - }) -} - -#[path = "integration_test/features.rs"] -mod features; -#[path = "integration_test/formatting.rs"] -mod formatting; -#[path = "integration_test/lifecycle.rs"] -mod lifecycle; -#[path = "integration_test/navigation.rs"] -mod navigation; -#[path = "integration_test/workspace_cross_file.rs"] -mod workspace_cross_file; +mod integration; +mod support; diff --git a/crates/jrsonnet-lsp/tests/stress_tests.rs b/crates/jrsonnet-lsp/tests/stress_tests.rs index 4c1921c7..6c23b5db 100644 --- a/crates/jrsonnet-lsp/tests/stress_tests.rs +++ b/crates/jrsonnet-lsp/tests/stress_tests.rs @@ -6,10 +6,14 @@ //! - Many documents open simultaneously //! - Large document handling -use std::{fmt::Write as _, sync::Arc, thread, time::Duration}; +use std::{ + fmt::Write as _, + sync::Arc, + thread, + time::{Duration, Instant}, +}; use assert_matches::assert_matches; -use crossbeam_channel::RecvTimeoutError; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{ notification::{DidChangeTextDocument, DidOpenTextDocument, Notification as _}, @@ -21,6 +25,11 @@ use lsp_types::{ }; use serde_json::json; +mod support; +use support::{recv_response_by_id, recv_until, LONG_RESPONSE_TIMEOUT, RESPONSE_TIMEOUT}; + +const QUIESCENCE_TIMEOUT: Duration = Duration::from_millis(100); + // ============================================================================= // Test Helpers // ============================================================================= @@ -212,10 +221,9 @@ fn shutdown_server(client_conn: &Connection, server_thread: thread::JoinHandle<( .expect("expected success"); // Receive shutdown response - let response = client_conn.receiver.recv().expect("expected success"); - assert_matches!(response, Message::Response(resp) => { - assert!(resp.error.is_none(), "Shutdown should succeed"); - }); + let response = recv_response_by_id(client_conn, req_id.into(), RESPONSE_TIMEOUT) + .expect("expected shutdown response"); + assert!(response.error.is_none(), "Shutdown should succeed"); // Send exit notification client_conn @@ -229,11 +237,6 @@ fn shutdown_server(client_conn: &Connection, server_thread: thread::JoinHandle<( .expect("Server thread should exit cleanly"); } -/// Drain all pending messages from the receiver. -fn drain_messages(conn: &Connection, timeout: Duration) { - while conn.receiver.recv_timeout(timeout).is_ok() {} -} - // ============================================================================= // Stress Tests // ============================================================================= @@ -255,7 +258,7 @@ fn test_rapid_document_changes() { .expect("expected success"); // Wait for initial diagnostics - drain_messages(&client_conn, Duration::from_millis(50)); + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); // Rapid full-document changes (simulating fast typing) for i in 2..=100 { @@ -269,7 +272,7 @@ fn test_rapid_document_changes() { } // Wait for processing to settle - drain_messages(&client_conn, Duration::from_millis(200)); + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); // Verify server is still responsive with a hover request client_conn @@ -278,14 +281,9 @@ fn test_rapid_document_changes() { .expect("expected success"); // Should get a response (not necessarily with content, but should respond) - let response = client_conn - .receiver - .recv_timeout(Duration::from_secs(2)) + let response = recv_response_by_id(&client_conn, 1000.into(), RESPONSE_TIMEOUT) .expect("Server should respond after rapid changes"); - assert_matches!(response, Message::Response(resp) => { - assert_eq!(resp.id, 1000.into()); - assert!(resp.error.is_none(), "Request should not error"); - }); + assert!(response.error.is_none(), "Request should not error"); shutdown_server(&client_conn, server_thread, 1001); } @@ -308,7 +306,7 @@ fn test_rapid_incremental_changes() { .expect("expected success"); // Wait for initial diagnostics - drain_messages(&client_conn, Duration::from_millis(50)); + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); // Simulate typing "x + 1" character by character at the end let chars = ['x', ' ', '+', ' ', '1']; @@ -332,7 +330,7 @@ fn test_rapid_incremental_changes() { } // Wait for processing - drain_messages(&client_conn, Duration::from_millis(200)); + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); // Verify server is responsive client_conn @@ -340,13 +338,9 @@ fn test_rapid_incremental_changes() { .send(Message::Request(hover_request(100, uri, 0, 6))) .expect("expected success"); - let response = client_conn - .receiver - .recv_timeout(Duration::from_secs(2)) + let response = recv_response_by_id(&client_conn, 100.into(), RESPONSE_TIMEOUT) .expect("Server should respond"); - assert_matches!(response, Message::Response(resp) => { - assert!(resp.error.is_none()); - }); + assert!(response.error.is_none()); shutdown_server(&client_conn, server_thread, 101); } @@ -371,7 +365,7 @@ add(x, y)"; .expect("expected success"); // Wait for initial diagnostics - drain_messages(&client_conn, Duration::from_millis(100)); + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); // Send multiple requests concurrently let sender = Arc::new(client_conn.sender.clone()); @@ -413,30 +407,26 @@ add(x, y)"; } // Collect all responses (30 requests total) - let mut responses = Vec::new(); - let mut attempts = 0; - while responses.len() < 30 && attempts < 100 { - match client_conn - .receiver - .recv_timeout(Duration::from_millis(100)) - { - Ok(Message::Response(resp)) => { - responses.push(resp); - } - Ok(_) => { - // Ignore notifications - } - Err(_) => { - attempts += 1; - } - } + let deadline = Instant::now() + LONG_RESPONSE_TIMEOUT; + let mut responses = Vec::with_capacity(30); + while responses.len() < 30 { + let Some(remaining) = deadline.checked_duration_since(Instant::now()) else { + break; + }; + let Some(response) = recv_until(&client_conn, remaining, |message| match message { + Message::Response(response) => Some(response), + _ => None, + }) else { + break; + }; + responses.push(response); } // Verify we got all responses - assert!( - responses.len() >= 25, - "Should receive most responses, got {}", - responses.len() + assert_eq!( + responses.len(), + 30, + "Should receive all concurrent request responses" ); // Verify no errors in responses @@ -479,7 +469,7 @@ f{i}(1)" } // Wait for all documents to be processed - drain_messages(&client_conn, Duration::from_millis(500)); + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); // Query each document to verify they're all accessible let mut successful_queries = 0; @@ -490,31 +480,18 @@ f{i}(1)" .send(Message::Request(hover_request(i + 100, &uri, 0, 7))) .expect("expected success"); - match client_conn.receiver.recv_timeout(Duration::from_secs(1)) { - Ok(Message::Response(resp)) => { - if resp.error.is_none() { - successful_queries += 1; - } - } - Ok(Message::Notification(_)) => { - // Retry to get the response - if let Ok(Message::Response(resp)) = client_conn - .receiver - .recv_timeout(Duration::from_millis(500)) - { - if resp.error.is_none() { - successful_queries += 1; - } - } - } - Ok(Message::Request(_)) - | Err(RecvTimeoutError::Timeout | RecvTimeoutError::Disconnected) => {} - } + let response = recv_response_by_id(&client_conn, (i + 100).into(), RESPONSE_TIMEOUT) + .expect("Should receive a response for every hover request"); + assert!( + response.error.is_none(), + "Hover request for document {i} should succeed" + ); + successful_queries += 1; } - assert!( - successful_queries >= num_documents / 2, - "Should successfully query at least half the documents, got {successful_queries}/{num_documents}" + assert_eq!( + successful_queries, num_documents, + "Should successfully query every opened document" ); shutdown_server(&client_conn, server_thread, 9999); @@ -551,7 +528,7 @@ fn test_large_document() { .expect("expected success"); // Wait for processing - drain_messages(&client_conn, Duration::from_millis(500)); + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); // Test hover at the beginning client_conn @@ -559,22 +536,12 @@ fn test_large_document() { .send(Message::Request(hover_request(100, uri, 0, 7))) .expect("expected success"); - loop { - let message = client_conn - .receiver - .recv_timeout(Duration::from_secs(5)) - .expect("Server should respond to hover on large document"); - match message { - Message::Response(resp) => { - assert!( - resp.error.is_none(), - "Hover should succeed on large document" - ); - break; - } - Message::Notification(_) | Message::Request(_) => {} - } - } + let hover_response = recv_response_by_id(&client_conn, 100.into(), RESPONSE_TIMEOUT) + .expect("Server should respond to hover on large document"); + assert!( + hover_response.error.is_none(), + "Hover should succeed on large document" + ); // Test goto definition in the middle let middle_line = num_locals + (num_locals / 2); @@ -588,23 +555,12 @@ fn test_large_document() { ))) .expect("expected success"); - // Drain any notifications and get the response - loop { - let message = client_conn - .receiver - .recv_timeout(Duration::from_secs(5)) - .expect("Should receive goto definition response"); - match message { - Message::Response(resp) => { - assert!( - resp.error.is_none(), - "Goto definition should succeed on large document" - ); - break; - } - Message::Notification(_) | Message::Request(_) => {} - } - } + let goto_response = recv_response_by_id(&client_conn, 101.into(), RESPONSE_TIMEOUT) + .expect("Should receive goto definition response"); + assert!( + goto_response.error.is_none(), + "Goto definition should succeed on large document" + ); shutdown_server(&client_conn, server_thread, 9999); } @@ -630,7 +586,7 @@ fn test_changes_during_requests() { .expect("expected success"); // Wait for initial processing - drain_messages(&client_conn, Duration::from_millis(100)); + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); // Send a request and immediately change the document client_conn @@ -648,52 +604,30 @@ fn test_changes_during_requests() { ))) .expect("expected success"); - // The server should handle this gracefully - // Either return a result for the old or new document, but not crash - loop { - match client_conn.receiver.recv_timeout(Duration::from_secs(2)) { - Ok(Message::Response(resp)) => { - // Response may succeed or fail, but shouldn't be an internal error - if let Some(err) = &resp.error { - assert!( - err.code != -32603, - "Should not have internal error: {err:?}" - ); - } - break; - } - Ok(Message::Notification(_) | Message::Request(_)) => {} - Err(RecvTimeoutError::Timeout | RecvTimeoutError::Disconnected) => { - // Timeout is acceptable - document changed - break; - } - } + // The server should handle this gracefully and still return a response. + let response = recv_response_by_id(&client_conn, 100.into(), RESPONSE_TIMEOUT) + .expect("Should receive goto definition response despite document change"); + if let Some(err) = &response.error { + assert!( + err.code != -32603, + "Should not have internal error: {err:?}" + ); } // Verify server is still responsive after the change - drain_messages(&client_conn, Duration::from_millis(100)); + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); client_conn .sender .send(Message::Request(hover_request(200, uri, 0, 6))) .expect("expected success"); - loop { - let message = client_conn - .receiver - .recv_timeout(Duration::from_secs(2)) - .expect("Server should respond after document change"); - match message { - Message::Response(resp) => { - assert!( - resp.error.is_none(), - "Server should be responsive after document change" - ); - break; - } - Message::Notification(_) | Message::Request(_) => {} - } - } + let post_change_response = recv_response_by_id(&client_conn, 200.into(), RESPONSE_TIMEOUT) + .expect("Server should respond after document change"); + assert!( + post_change_response.error.is_none(), + "Server should be responsive after document change" + ); shutdown_server(&client_conn, server_thread, 9999); } @@ -717,7 +651,7 @@ f(x)"; .expect("expected success"); // Wait for processing - drain_messages(&client_conn, Duration::from_millis(100)); + let _ = recv_until::<()>(&client_conn, QUIESCENCE_TIMEOUT, |_| None); // Send multiple different request types and verify all respond within timeout let requests = vec![ @@ -733,29 +667,12 @@ f(x)"; .send(Message::Request(req)) .expect("expected success"); - let start = std::time::Instant::now(); - let timeout = Duration::from_secs(5); - - loop { - let remaining = timeout - .checked_sub(start.elapsed()) - .expect("Request timed out before a response was received"); - let message = client_conn - .receiver - .recv_timeout(remaining) - .expect("Request timed out while waiting for response"); - match message { - Message::Response(resp) => { - assert_eq!( - resp.id, req_id, - "Should receive response for correct request" - ); - assert!(resp.error.is_none(), "Request {req_id:?} should not error"); - break; - } - Message::Notification(_) | Message::Request(_) => {} - } - } + let response = recv_response_by_id(&client_conn, req_id.clone(), RESPONSE_TIMEOUT) + .expect("Request timed out while waiting for response"); + assert!( + response.error.is_none(), + "Request {req_id:?} should not error" + ); } shutdown_server(&client_conn, server_thread, 9999); @@ -788,39 +705,9 @@ fn test_shutdown_during_processing() { .send(Message::Request(shutdown_request(100))) .expect("expected success"); - // Should still get a clean shutdown response - let response = client_conn - .receiver - .recv_timeout(Duration::from_secs(5)) + let response = recv_response_by_id(&client_conn, 100.into(), RESPONSE_TIMEOUT) .expect("Should receive shutdown response"); - - // May receive notifications before the response - let mut got_response = false; - match response { - Message::Response(resp) => { - assert!(resp.error.is_none(), "Shutdown should succeed"); - got_response = true; - } - Message::Notification(_) => { - // Try to get the response - loop { - match client_conn.receiver.recv_timeout(Duration::from_secs(2)) { - Ok(Message::Response(resp)) => { - assert!(resp.error.is_none(), "Shutdown should succeed"); - got_response = true; - break; - } - Ok(Message::Notification(_) | Message::Request(_)) => {} - Err(RecvTimeoutError::Timeout | RecvTimeoutError::Disconnected) => break, - } - } - } - Message::Request(_) => { - // Unexpected request from server, ignore and wait for response - } - } - - assert!(got_response, "Should receive shutdown response"); + assert!(response.error.is_none(), "Shutdown should succeed"); // Send exit client_conn diff --git a/crates/jrsonnet-lsp/tests/support/lsp_test_transport.rs b/crates/jrsonnet-lsp/tests/support/lsp_test_transport.rs new file mode 100644 index 00000000..7538e873 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/support/lsp_test_transport.rs @@ -0,0 +1,39 @@ +use std::time::{Duration, Instant}; + +use lsp_server::{Connection, Message, RequestId}; + +pub(crate) const RESPONSE_TIMEOUT: Duration = Duration::from_secs(5); +pub(crate) const LONG_RESPONSE_TIMEOUT: Duration = Duration::from_secs(10); + +pub(crate) fn recv_until( + conn: &Connection, + timeout: Duration, + mut matcher: impl FnMut(Message) -> Option, +) -> Option { + let start = Instant::now(); + loop { + let remaining = timeout.checked_sub(start.elapsed())?; + match conn.receiver.recv_timeout(remaining) { + Ok(message) => { + if let Some(value) = matcher(message) { + return Some(value); + } + } + Err( + crossbeam_channel::RecvTimeoutError::Timeout + | crossbeam_channel::RecvTimeoutError::Disconnected, + ) => return None, + } + } +} + +pub(crate) fn recv_response_by_id( + conn: &Connection, + request_id: RequestId, + timeout: Duration, +) -> Option { + recv_until(conn, timeout, |message| match message { + Message::Response(response) if response.id == request_id => Some(response), + _ => None, + }) +} diff --git a/crates/jrsonnet-lsp/tests/support/mod.rs b/crates/jrsonnet-lsp/tests/support/mod.rs new file mode 100644 index 00000000..f8e6cfcd --- /dev/null +++ b/crates/jrsonnet-lsp/tests/support/mod.rs @@ -0,0 +1,5 @@ +mod lsp_test_transport; + +pub(crate) use lsp_test_transport::{ + recv_response_by_id, recv_until, LONG_RESPONSE_TIMEOUT, RESPONSE_TIMEOUT, +}; From dbac01049cdd8d5e4e1098b9fce6979b37c45fc7 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 17:44:18 +0000 Subject: [PATCH 204/210] fix(lsp): support imported-member refs and rename from use sites --- .../src/references/cross_file.rs | 47 ++++++++++- .../src/references/local.rs | 41 +++++++++- .../src/server/async_requests/references.rs | 78 +++++++++++++++++++ .../src/server/async_requests/rename.rs | 50 +++++++++++- .../import_member_cross_file_refs_rename.yaml | 66 ++++++++++++++++ 5 files changed, 279 insertions(+), 3 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/import_member_cross_file_refs_rename.yaml diff --git a/crates/jrsonnet-lsp-handlers/src/references/cross_file.rs b/crates/jrsonnet-lsp-handlers/src/references/cross_file.rs index 398d4a3e..1c5a150a 100644 --- a/crates/jrsonnet-lsp-handlers/src/references/cross_file.rs +++ b/crates/jrsonnet-lsp-handlers/src/references/cross_file.rs @@ -7,7 +7,7 @@ use jrsonnet_lsp_scope::{ find_definition_range, is_at_file_scope, is_definition_site, is_variable_reference, }; use jrsonnet_rowan_parser::{ - nodes::{ExprBase, ExprField}, + nodes::{ExprBase, ExprField, ExprObject}, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, }; use lsp_types::Location; @@ -146,6 +146,10 @@ fn resolve_exported_symbol_name( return None; } + if is_top_level_object_field_definition(document, token) { + return Some(token.text().to_string()); + } + if is_definition_site(token) && is_at_file_scope(token) { return Some(token.text().to_string()); } @@ -182,6 +186,47 @@ fn definition_token(document: &Document, range: TextRange, name: &str) -> Option }) } +fn is_top_level_object_field_definition(document: &Document, token: &SyntaxToken) -> bool { + if token.kind() != SyntaxKind::IDENT || field_definition_range(token).is_none() { + return false; + } + + let Some(root_expr) = document.ast().expr() else { + return false; + }; + let Some(ExprBase::ExprObject(root_object)) = root_expr.expr_base() else { + return false; + }; + let Some(field_object) = token.parent_ancestors().find_map(ExprObject::cast) else { + return false; + }; + + field_object.syntax().text_range() == root_object.syntax().text_range() +} + +fn field_definition_range(token: &SyntaxToken) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let mut node = token.parent(); + while let Some(current) = node { + if current.kind() == SyntaxKind::FIELD_NAME_FIXED { + let parent = current.parent()?; + if matches!( + parent.kind(), + SyntaxKind::MEMBER_FIELD_NORMAL | SyntaxKind::MEMBER_FIELD_METHOD + ) { + return Some(token.text_range()); + } + return None; + } + node = current.parent(); + } + + None +} + /// Find references to an imported symbol in a document. fn find_references_to_import( doc: &Document, diff --git a/crates/jrsonnet-lsp-handlers/src/references/local.rs b/crates/jrsonnet-lsp-handlers/src/references/local.rs index b79663ba..6069ee74 100644 --- a/crates/jrsonnet-lsp-handlers/src/references/local.rs +++ b/crates/jrsonnet-lsp-handlers/src/references/local.rs @@ -3,8 +3,9 @@ use jrsonnet_lsp_inference::SemanticArtifacts; use jrsonnet_lsp_scope::{ find_definition_range, is_definition_site, is_variable_reference, ScopeResolver, }; -use jrsonnet_rowan_parser::{AstNode, SyntaxKind}; +use jrsonnet_rowan_parser::{AstNode, SyntaxKind, SyntaxToken}; use lsp_types::{Location, Uri}; +use rowan::TextRange; use tracing::debug; /// Find all references to the symbol at the given position. @@ -44,6 +45,21 @@ pub fn find_references_with_semantic( let name = token.text(); + if let Some(field_definition) = field_definition_range(&token) { + let references = if include_declaration { + vec![field_definition] + } else { + Vec::new() + }; + return references + .into_iter() + .map(|range| Location { + uri: uri.clone(), + range: to_lsp_range(range, line_index, text), + }) + .collect(); + } + let definition_range = semantic .and_then(|artifacts| artifacts.definition_for_ident_token(&token)) .or_else(|| { @@ -84,6 +100,29 @@ pub fn find_references_with_semantic( .collect() } +fn field_definition_range(token: &SyntaxToken) -> Option { + if token.kind() != SyntaxKind::IDENT { + return None; + } + + let mut node = token.parent(); + while let Some(current) = node { + if current.kind() == SyntaxKind::FIELD_NAME_FIXED { + let parent = current.parent()?; + if matches!( + parent.kind(), + SyntaxKind::MEMBER_FIELD_NORMAL | SyntaxKind::MEMBER_FIELD_METHOD + ) { + return Some(token.text_range()); + } + return None; + } + node = current.parent(); + } + + None +} + #[cfg(test)] mod tests { use jrsonnet_lsp_document::DocVersion; diff --git a/crates/jrsonnet-lsp/src/server/async_requests/references.rs b/crates/jrsonnet-lsp/src/server/async_requests/references.rs index 5f1a3fa2..cc9119bc 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/references.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/references.rs @@ -1,5 +1,6 @@ use jrsonnet_lsp_document::CanonicalPath; use jrsonnet_lsp_handlers as handlers; +use jrsonnet_lsp_inference::SemanticArtifacts; use lsp_types::{Location, ReferenceParams}; use super::AsyncRequestContext; @@ -51,9 +52,86 @@ impl AsyncRequestContext { }; refs.extend(cross_refs); + if refs.is_empty() { + if let Some(import_member_refs) = self.references_for_import_member_use_site( + &doc, + &path, + lsp_pos, + include_declaration, + semantic.as_deref(), + ) { + refs.extend(import_member_refs); + } + } + if refs.is_empty() { return None; } Some(refs) } + + fn references_for_import_member_use_site( + &self, + document: &jrsonnet_lsp_document::Document, + path: &CanonicalPath, + position: jrsonnet_lsp_document::LspPosition, + include_declaration: bool, + semantic: Option<&SemanticArtifacts>, + ) -> Option> { + let handlers::DefinitionResult::ImportField { + path: import_path, + fields, + } = handlers::goto_definition_with_semantic(document, position, semantic)? + else { + return None; + }; + + let target_path = self.resolve_import_path(path, &import_path)?; + let declaration = self + .find_field_in_file(&target_path, &fields) + .map(|locations| locations.declaration) + .or_else(|| self.find_export_binding_in_file(&target_path, &fields))?; + let declaration_pos = declaration.start.into(); + + let target_doc = self.documents.get_document(&target_path)?; + let target_uri = target_path.to_uri().ok()?; + let target_semantic = self.documents.get_semantic_artifacts(&target_path); + let mut refs = handlers::find_references_with_semantic( + &target_doc, + declaration_pos, + &target_uri, + include_declaration, + target_semantic.as_deref(), + ); + + let importers = self.ensure_precise_transitive_importers(&target_path); + let importer_docs: Vec<_> = importers + .into_iter() + .filter_map(|file| { + let path = self.documents.path(file)?; + let doc = self.documents.get_document_file(file)?; + let semantic = self.documents.get_semantic_artifacts(path.as_ref()); + Some((path.as_ref().clone(), doc, semantic)) + }) + .collect(); + let importer_refs: Vec<_> = importer_docs + .iter() + .map(|(k, v, semantic)| (k, v, semantic.as_deref())) + .collect(); + + let cross_refs = { + let import_graph = self.import_graph.read(); + handlers::find_cross_file_references_with_semantic( + &target_doc, + &target_path, + declaration_pos, + target_semantic.as_deref(), + &importer_refs, + &import_graph, + ) + }; + refs.extend(cross_refs); + + (!refs.is_empty()).then_some(refs) + } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests/rename.rs b/crates/jrsonnet-lsp/src/server/async_requests/rename.rs index 5b9257fd..586ec6b7 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/rename.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/rename.rs @@ -21,9 +21,19 @@ impl AsyncRequestContext { }; let lsp_pos = position.into(); + let semantic = self.documents.get_semantic_artifacts(&path); + if let Some(rename) = self.rename_from_import_member_use_site( + &doc, + &path, + lsp_pos, + &new_name, + semantic.as_deref(), + ) { + return Some(rename); + } + self.ensure_precise_transitive_importers(&path); let import_graph = self.import_graph.read(); - handlers::rename_cross_file( &doc, lsp_pos, @@ -34,4 +44,42 @@ impl AsyncRequestContext { &import_graph, ) } + + fn rename_from_import_member_use_site( + &self, + document: &jrsonnet_lsp_document::Document, + path: &CanonicalPath, + position: jrsonnet_lsp_document::LspPosition, + new_name: &SymbolName, + semantic: Option<&jrsonnet_lsp_inference::SemanticArtifacts>, + ) -> Option { + let handlers::DefinitionResult::ImportField { + path: import_path, + fields, + } = handlers::goto_definition_with_semantic(document, position, semantic)? + else { + return None; + }; + + let target_path = self.resolve_import_path(path, &import_path)?; + let declaration = self + .find_field_in_file(&target_path, &fields) + .map(|locations| locations.declaration) + .or_else(|| self.find_export_binding_in_file(&target_path, &fields))?; + let target_position = declaration.start.into(); + + let target_doc = self.documents.get_document(&target_path)?; + let target_uri = target_path.to_uri().ok()?; + self.ensure_precise_transitive_importers(&target_path); + let import_graph = self.import_graph.read(); + handlers::rename_cross_file( + &target_doc, + target_position, + new_name, + &target_uri, + &target_path, + &self.documents, + &import_graph, + ) + } } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_cross_file_refs_rename.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_cross_file_refs_rename.yaml new file mode 100644 index 00000000..0860cf65 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_cross_file_refs_rename.yaml @@ -0,0 +1,66 @@ +# Imported-member references and rename should work from importer use sites. +steps: +- step: create + files: + cross_file_lib.libsonnet: | + { + ((libValue:|value)): 42, + } + cross_file_main.jsonnet: | + local lib = import "./cross_file_lib.libsonnet"; + { + one: lib.((mainValueOne:|value)), + two: lib.((mainValueTwo:|value)), + } + +- step: diagnosticsSettled + +- step: requestDefinition + as: defFromImporterMember + file: cross_file_main.jsonnet + at: mainValueOne +- step: expectDefinition + request: defFromImporterMember + result: + file: cross_file_lib.libsonnet + at: libValue + text: value + +- step: requestReferences + as: refsFromImporterMember + file: cross_file_main.jsonnet + at: mainValueOne + include_declaration: true +- step: expectReferences + request: refsFromImporterMember + result: + - file: cross_file_lib.libsonnet + at: libValue + text: value + - file: cross_file_main.jsonnet + at: mainValueOne + text: value + - file: cross_file_main.jsonnet + at: mainValueTwo + text: value + +- step: requestRename + as: renameFromImporterMember + file: cross_file_main.jsonnet + at: mainValueOne + new_name: renamed +- step: expectRename + request: renameFromImporterMember + result: + edits: + cross_file_lib.libsonnet: + - at: libValue + text: value + replace: renamed + cross_file_main.jsonnet: + - at: mainValueOne + text: value + replace: renamed + - at: mainValueTwo + text: value + replace: renamed From 19e2f3c3bb4dfc8f9949da4684cc6a5691344c2c Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 17:50:47 +0000 Subject: [PATCH 205/210] fix(lsp): return precise ranges for import definitions --- .../src/server/async_requests/goto_shared.rs | 6 +--- .../tests/integration/navigation.rs | 11 +++++- .../tests/integration/workspace_cross_file.rs | 11 +++++- .../import_definition_precise_range.yaml | 36 +++++++++++++++++++ 4 files changed, 57 insertions(+), 7 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/import_definition_precise_range.yaml diff --git a/crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs b/crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs index 1e2c9c61..77d8aefb 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/goto_shared.rs @@ -48,11 +48,7 @@ impl AsyncRequestContext { handlers::DefinitionResult::Import(import_path) => { let resolved = self.resolve_import_from_graph(&path, &import_path)?; let resolved_uri = resolved.to_uri().ok()?; - let range = if target == GotoTarget::Implementation { - self.document_root_expr_range(&resolved).unwrap_or_default() - } else { - lsp_types::Range::default() - }; + let range = self.document_root_expr_range(&resolved).unwrap_or_default(); Some(GotoDefinitionResponse::Scalar(Location { uri: resolved_uri, range, diff --git a/crates/jrsonnet-lsp/tests/integration/navigation.rs b/crates/jrsonnet-lsp/tests/integration/navigation.rs index 791cca7c..b00c4744 100644 --- a/crates/jrsonnet-lsp/tests/integration/navigation.rs +++ b/crates/jrsonnet-lsp/tests/integration/navigation.rs @@ -263,7 +263,16 @@ alias + std.length(plain)"#, plain_result, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { uri: lib_uri, - range: lsp_types::Range::default(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 11, + }, + }, })) ); diff --git a/crates/jrsonnet-lsp/tests/integration/workspace_cross_file.rs b/crates/jrsonnet-lsp/tests/integration/workspace_cross_file.rs index aa6b35b5..ce7b263b 100644 --- a/crates/jrsonnet-lsp/tests/integration/workspace_cross_file.rs +++ b/crates/jrsonnet-lsp/tests/integration/workspace_cross_file.rs @@ -1086,7 +1086,16 @@ fn test_navigation_resolves_jpath_imports_from_graph() { import_definition, Some(GotoDefinitionResponse::Scalar(lsp_types::Location { uri: lib_uri.parse().expect("lib URI should parse"), - range: lsp_types::Range::default(), + range: lsp_types::Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 14, + }, + }, })) ); diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/import_definition_precise_range.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/import_definition_precise_range.yaml new file mode 100644 index 00000000..a4cdc1be --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/import_definition_precise_range.yaml @@ -0,0 +1,36 @@ +# Import-definition targets should resolve to a concrete range in the imported file. +steps: +- step: create + files: + import_target.libsonnet: | + [[targetRoot:{ + value: 7, + }]] + import_binding_definition.jsonnet: | + local lib = import "./import_target.libsonnet"; + ((importBindingUse:|lib)).value + import_path_definition.jsonnet: | + local lib = import "./((importPath:|import_target.libsonnet))"; + lib + +- step: diagnosticsSettled + +- step: requestDefinition + as: defFromImportBinding + file: import_binding_definition.jsonnet + at: importBindingUse +- step: expectDefinition + request: defFromImportBinding + result: + file: import_target.libsonnet + range: targetRoot + +- step: requestDefinition + as: defFromImportPath + file: import_path_definition.jsonnet + at: importPath +- step: expectDefinition + request: defFromImportPath + result: + file: import_target.libsonnet + range: targetRoot From d130027bb7b1da3804540a3b90854d6dd7439826 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 17:58:36 +0000 Subject: [PATCH 206/210] fix(lsp): suppress noisy hover on structural braces --- .../src/hover/handler.rs | 44 +++++-------------- .../hover_close_brace_enclosing_type.yaml | 12 +---- .../runner/hover_consistency_noise.yaml | 40 +++++++++++++++++ .../hover_open_brace_enclosing_type.yaml | 12 +---- 4 files changed, 54 insertions(+), 54 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/hover_consistency_noise.yaml diff --git a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs index 0a59ffc4..7f93c2db 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs @@ -67,6 +67,14 @@ pub fn hover_with_import_field_type( }| (type_markdown, context_markdown, preview_code), ); let docs_facts = docs_facts_for_token(&token); + let expression_hover_allowed = token.kind().is_hover_eligible(); + if local_kind.is_none() + && !docs_facts.has_stdlib_docs + && !docs_facts.has_token_docs + && !expression_hover_allowed + { + return None; + } let type_markdown = local_type_markdown.or_else(|| inferred_type_markdown(document, analysis, offset)); let preview_code = select_preview_code(&token, definition_preview); @@ -787,47 +795,19 @@ mod tests { } #[test] - fn test_hover_on_open_brace_returns_enclosing_object_type() { + fn test_no_hover_on_open_brace() { let code = "{ z: { a: 1, b: 2 } }"; let result = get_hover(code, 0, 0); - assert_matches!(result, Some(Hover { - contents, - range: None - }) => { - assert_hover_contents( - &contents, - &[ - "`{ z: { a: number, b: number } }`", - indoc! {r" - ```jsonnet - { z: { a: 1, b: 2 } } - ```"}, - ], - ); - }); + assert_matches!(result, None); } #[test] - fn test_hover_on_close_brace_returns_enclosing_object_type() { + fn test_no_hover_on_close_brace() { let code = "{ z: { a: 1, b: 2 } }"; let close_brace = u32::try_from(code.rfind('}').expect("code should end with `}`")) .expect("close brace offset should fit u32"); let result = get_hover(code, 0, close_brace); - assert_matches!(result, Some(Hover { - contents, - range: None - }) => { - assert_hover_contents( - &contents, - &[ - "`{ z: { a: number, b: number } }`", - indoc! {r" - ```jsonnet - { z: { a: 1, b: 2 } } - ```"}, - ], - ); - }); + assert_matches!(result, None); } #[test] diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml index c1649cc3..cdcbaa8b 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_close_brace_enclosing_type.yaml @@ -1,4 +1,4 @@ -# Hover on a closing `}` should still report the enclosing object type. +# Hover on structural braces should not emit object-level noise. steps: - step: create files: @@ -19,13 +19,3 @@ steps: - step: expectHover request: closeBraceHover result: - - type: '{ z: { a: number, b: number } }' - - preview: - language: jsonnet - value: |- - { - z: { - a: 1, - b: 2, - }, - ... diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_consistency_noise.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_consistency_noise.yaml new file mode 100644 index 00000000..bf1e6b76 --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_consistency_noise.yaml @@ -0,0 +1,40 @@ +# Keep stdlib hover rich while ensuring structural-token hover remains quiet. +steps: +- step: create + files: + stdlib_symbol.jsonnet: | + std.((stdLength:|))length('abc') + no_hover.jsonnet: | + ((openBrace:|)){ + answer: 42, + } + +- step: diagnosticsSettled + +- step: requestHover + as: stdlibHover + file: stdlib_symbol.jsonnet + at: stdLength +- step: expectHover + request: stdlibHover + result: + - type: 'function(x: string | object | function() | array)' + - docs: + language: jsonnet + value: std.length(x) + - docs: Returns the length of an array, string, object, or function parameters. + - docs: "**Example:**" + - docs: + language: jsonnet + value: std.length([1,2,3]) // 3 + - preview: + language: jsonnet + value: std.length + +- step: requestHover + as: noHover + file: no_hover.jsonnet + at: openBrace +- step: expectHover + request: noHover + result: diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml index 37f04140..3900583d 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_open_brace_enclosing_type.yaml @@ -1,4 +1,4 @@ -# Hover on an opening `{` should still report the enclosing object type. +# Hover on structural braces should not emit object-level noise. steps: - step: create files: @@ -19,13 +19,3 @@ steps: - step: expectHover request: openBraceHover result: - - type: '{ z: { a: number, b: number } }' - - preview: - language: jsonnet - value: |- - { - z: { - a: 1, - b: 2, - }, - ... From aac2e5547fe24b61f1ec3949e836210840fcd3ce Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 18:01:20 +0000 Subject: [PATCH 207/210] fix(lsp-check): report unknown named arguments in calls --- .../src/type_check/calls.rs | 54 +++++++++++++++++++ .../jrsonnet-lsp-check/src/type_check/core.rs | 42 +++++++++++++++ .../src/type_check/types.rs | 25 +++++++++ .../diagnostics_type_coverage_matrix.yaml | 48 +++++++++++++++++ 4 files changed, 169 insertions(+) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_coverage_matrix.yaml diff --git a/crates/jrsonnet-lsp-check/src/type_check/calls.rs b/crates/jrsonnet-lsp-check/src/type_check/calls.rs index 38cf06e1..65044818 100644 --- a/crates/jrsonnet-lsp-check/src/type_check/calls.rs +++ b/crates/jrsonnet-lsp-check/src/type_check/calls.rs @@ -45,6 +45,47 @@ pub(super) fn validate_function_call_ty( } } +fn collect_unknown_named_argument_errors( + call: &ExprCall, + func_data: &FunctionData, + function_name: &str, +) -> Vec { + let expected: Vec = func_data + .params + .iter() + .map(|param| param.name.clone()) + .collect(); + let mut errors = Vec::new(); + let Some(args_desc) = call.args_desc() else { + return errors; + }; + + for arg in args_desc.args() { + let Some(arg_name) = arg + .name() + .and_then(|name| name.ident_lit()) + .map(|token| token.text().to_string()) + else { + continue; + }; + + if expected.iter().any(|candidate| candidate == &arg_name) { + continue; + } + + errors.push(TypeError { + kind: TypeErrorKind::UnknownNamedArgument { + function_name: function_name.to_string(), + arg_name, + expected: expected.clone(), + }, + range: arg.syntax().text_range(), + }); + } + + errors +} + /// Check if an `ExprCall` is a stdlib function call and validate argument count and types. /// /// Matches the pattern: `std.functionName(args...)`, including aliases that @@ -92,6 +133,12 @@ pub(super) fn check_stdlib_call_expr( let Some(func_data) = sig.func_data() else { return; }; + let unknown_named_argument_errors = + collect_unknown_named_argument_errors(call, &func_data, &qualified_name); + if !unknown_named_argument_errors.is_empty() { + errors.extend(unknown_named_argument_errors); + return; + } if let Some(error) = validate_function_call_ty( &func_data, qualified_name.clone(), @@ -553,6 +600,13 @@ pub(super) fn check_user_function_call_expr( let arg_count = call.args_desc().map_or(0, |args| args.args().count()); // Validate using the Ty-native function + let unknown_named_argument_errors = + collect_unknown_named_argument_errors(call, &func_data, &var_name); + if !unknown_named_argument_errors.is_empty() { + errors.extend(unknown_named_argument_errors); + return; + } + if let Some(error) = validate_function_call_ty(&func_data, var_name, arg_count, call.syntax().text_range()) { diff --git a/crates/jrsonnet-lsp-check/src/type_check/core.rs b/crates/jrsonnet-lsp-check/src/type_check/core.rs index e31b8575..21769c0c 100644 --- a/crates/jrsonnet-lsp-check/src/type_check/core.rs +++ b/crates/jrsonnet-lsp-check/src/type_check/core.rs @@ -912,6 +912,48 @@ mod tests { assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); } + #[test] + fn test_user_function_unknown_named_arg() { + let errors = check_code("local add(x, y) = x + y; add(z = 1)"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::UnknownNamedArgument { + function_name, + arg_name, + expected, + }, + .. + }] if function_name == "add" + && arg_name == "z" + && expected == &["x", "y"] + ), + "expected UnknownNamedArgument for add(z = 1), got: {errors:?}" + ); + } + + #[test] + fn test_stdlib_unknown_named_arg() { + let errors = check_code("std.length(y = [1, 2, 3])"); + assert!( + matches!( + errors.as_slice(), + [TypeError { + kind: TypeErrorKind::UnknownNamedArgument { + function_name, + arg_name, + expected, + }, + .. + }] if function_name == "std.length" + && arg_name == "y" + && expected == &["x"] + ), + "expected UnknownNamedArgument for std.length(y = ...), got: {errors:?}" + ); + } + #[test] fn test_user_function_with_defaults() { // User function with default parameter diff --git a/crates/jrsonnet-lsp-check/src/type_check/types.rs b/crates/jrsonnet-lsp-check/src/type_check/types.rs index a82492d5..90eeb16d 100644 --- a/crates/jrsonnet-lsp-check/src/type_check/types.rs +++ b/crates/jrsonnet-lsp-check/src/type_check/types.rs @@ -51,6 +51,12 @@ pub enum TypeErrorKind { max_allowed: usize, provided: usize, }, + /// Named argument does not match any declared parameter. + UnknownNamedArgument { + function_name: String, + arg_name: String, + expected: Vec, + }, /// Access to non-existent field on object with known structure. NoSuchField { field: String, @@ -130,6 +136,7 @@ impl TypeErrorKind { TypeErrorKind::WrongArgCount { .. } | TypeErrorKind::TooFewArguments { .. } | TypeErrorKind::TooManyArguments { .. } + | TypeErrorKind::UnknownNamedArgument { .. } | TypeErrorKind::NoSuchField { .. } | TypeErrorKind::TupleIndexOutOfBounds { .. } | TypeErrorKind::FormatStringError { .. } @@ -261,6 +268,24 @@ fn render_type_error(kind: &TypeErrorKind, analysis: &TypeAnalysis) -> String { "`{function_name}` accepts at most {max_allowed} argument(s), but {provided} provided" ) } + TypeErrorKind::UnknownNamedArgument { + function_name, + arg_name, + expected, + } => { + let mut message = format!("`{function_name}` has no parameter named `{arg_name}`"); + if !expected.is_empty() { + message.push_str("; expected one of: "); + message.push_str( + &expected + .iter() + .map(|name| format!("`{name}`")) + .collect::>() + .join(", "), + ); + } + message + } TypeErrorKind::NoSuchField { field, available, diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_coverage_matrix.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_coverage_matrix.yaml new file mode 100644 index 00000000..a21c652d --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/diagnostics_type_coverage_matrix.yaml @@ -0,0 +1,48 @@ +# Type diagnostics should cover arity, named-argument, and field-access mistakes. +steps: +- step: create + files: + wrong_arg_count.jsonnet: ((mArgCount:|))std.length() + wrong_named_arg.jsonnet: | + local add(x, y) = x + y; + add(((mNamedArg:|))z = 1) + unknown_field_access.jsonnet: | + local obj = { known: 1 }; + ((mUnknownField:|))obj.missing + +- step: config + settings: + jsonnet: + enableLintDiagnostics: true + +- step: diagnosticsSettled + +- step: expectDiagnostics + file: wrong_arg_count.jsonnet + diagnostics: + - at: mArgCount + text: std.length() + severity: warning + code: type-error + source: jrsonnet-lint + message: "`std.length` requires at least 1 argument(s), but 0 provided" + +- step: expectDiagnostics + file: wrong_named_arg.jsonnet + diagnostics: + - at: mNamedArg + text: z = 1 + severity: warning + code: type-error + source: jrsonnet-lint + message: "`add` has no parameter named `z`; expected one of: `x`, `y`" + +- step: expectDiagnostics + file: unknown_field_access.jsonnet + diagnostics: + - at: mUnknownField + text: obj.missing + severity: warning + code: type-error + source: jrsonnet-lint + message: "no such field `missing`; available fields: known" From d102c84a20f644071699c9f56393dbab2d989832 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 18:06:34 +0000 Subject: [PATCH 208/210] fix(lsp): align nested import-member refs and rename --- .../src/server/async_requests/references.rs | 153 ++++++++++++++---- .../src/server/async_requests/rename.rs | 49 ++++-- .../import_member_nested_consistency.yaml | 81 ++++++++++ 3 files changed, 237 insertions(+), 46 deletions(-) create mode 100644 crates/jrsonnet-lsp/tests/scenarios/runner/import_member_nested_consistency.yaml diff --git a/crates/jrsonnet-lsp/src/server/async_requests/references.rs b/crates/jrsonnet-lsp/src/server/async_requests/references.rs index cc9119bc..cd665b68 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/references.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/references.rs @@ -1,6 +1,11 @@ -use jrsonnet_lsp_document::CanonicalPath; +use jrsonnet_lsp_document::{to_lsp_range, CanonicalPath, Document}; use jrsonnet_lsp_handlers as handlers; use jrsonnet_lsp_inference::SemanticArtifacts; +use jrsonnet_rowan_parser::{ + nodes::{ExprBase, ExprField}, + rowan::TextRange, + AstNode, +}; use lsp_types::{Location, ReferenceParams}; use super::AsyncRequestContext; @@ -72,7 +77,7 @@ impl AsyncRequestContext { fn references_for_import_member_use_site( &self, - document: &jrsonnet_lsp_document::Document, + document: &Document, path: &CanonicalPath, position: jrsonnet_lsp_document::LspPosition, include_declaration: bool, @@ -91,11 +96,26 @@ impl AsyncRequestContext { .find_field_in_file(&target_path, &fields) .map(|locations| locations.declaration) .or_else(|| self.find_export_binding_in_file(&target_path, &fields))?; + self.collect_import_member_references( + &target_path, + declaration, + &fields, + include_declaration, + ) + } + + pub(super) fn collect_import_member_references( + &self, + target_path: &CanonicalPath, + declaration: lsp_types::Range, + fields: &[String], + include_declaration: bool, + ) -> Option> { let declaration_pos = declaration.start.into(); - let target_doc = self.documents.get_document(&target_path)?; + let target_doc = self.documents.get_document(target_path)?; let target_uri = target_path.to_uri().ok()?; - let target_semantic = self.documents.get_semantic_artifacts(&target_path); + let target_semantic = self.documents.get_semantic_artifacts(target_path); let mut refs = handlers::find_references_with_semantic( &target_doc, declaration_pos, @@ -104,34 +124,105 @@ impl AsyncRequestContext { target_semantic.as_deref(), ); - let importers = self.ensure_precise_transitive_importers(&target_path); - let importer_docs: Vec<_> = importers - .into_iter() - .filter_map(|file| { - let path = self.documents.path(file)?; - let doc = self.documents.get_document_file(file)?; - let semantic = self.documents.get_semantic_artifacts(path.as_ref()); - Some((path.as_ref().clone(), doc, semantic)) - }) - .collect(); - let importer_refs: Vec<_> = importer_docs - .iter() - .map(|(k, v, semantic)| (k, v, semantic.as_deref())) - .collect(); - - let cross_refs = { - let import_graph = self.import_graph.read(); - handlers::find_cross_file_references_with_semantic( - &target_doc, - &target_path, - declaration_pos, - target_semantic.as_deref(), - &importer_refs, - &import_graph, - ) + let importers = self.ensure_precise_transitive_importers(target_path); + let import_graph = self.import_graph.read(); + let Some(target_file) = import_graph.file(target_path) else { + return (!refs.is_empty()).then_some(refs); }; - refs.extend(cross_refs); - (!refs.is_empty()).then_some(refs) + for importer_file in importers { + let Some(importer_doc) = self.documents.get_document_file(importer_file) else { + continue; + }; + let Some(importer_path) = self.documents.path(importer_file) else { + continue; + }; + let Some(importer_graph_file) = import_graph.file(importer_path.as_ref()) else { + continue; + }; + + let mut binding_names: Vec = import_graph + .imports_of_target(importer_graph_file, target_file) + .into_iter() + .filter_map(|entry| entry.binding_name.clone()) + .collect(); + binding_names.sort(); + binding_names.dedup(); + + if binding_names.is_empty() { + continue; + } + + let Ok(importer_uri) = importer_path.as_ref().to_uri() else { + continue; + }; + + for binding_name in binding_names { + for range in + find_import_member_chain_references(&importer_doc, &binding_name, fields) + { + refs.push(Location { + uri: importer_uri.clone(), + range: to_lsp_range(range, importer_doc.line_index(), importer_doc.text()), + }); + } + } + } + + let mut deduped = Vec::with_capacity(refs.len()); + for reference in refs { + if deduped.iter().any(|existing: &Location| { + existing.uri == reference.uri && existing.range == reference.range + }) { + continue; + } + deduped.push(reference); + } + + (!deduped.is_empty()).then_some(deduped) + } +} + +fn find_import_member_chain_references( + document: &Document, + binding_name: &str, + fields: &[String], +) -> Vec { + if fields.is_empty() { + return Vec::new(); + } + + document + .ast() + .syntax() + .descendants() + .filter_map(ExprField::cast) + .filter_map(|field| import_member_chain_data(&field)) + .filter_map(|(base, chain, range)| { + (base == binding_name && chain == fields).then_some(range) + }) + .collect() +} + +fn import_member_chain_data(field: &ExprField) -> Option<(String, Vec, TextRange)> { + let field_ident = field.field()?.ident_lit()?; + let leaf_range = field_ident.text_range(); + let mut chain_rev = vec![field_ident.text().to_string()]; + let mut current_base = field.base()?; + + loop { + match current_base.expr_base()? { + ExprBase::ExprField(parent_field) => { + let parent_ident = parent_field.field()?.ident_lit()?; + chain_rev.push(parent_ident.text().to_string()); + current_base = parent_field.base()?; + } + ExprBase::ExprVar(var) => { + let binding = var.name()?.ident_lit()?.text().to_string(); + chain_rev.reverse(); + return Some((binding, chain_rev, leaf_range)); + } + _ => return None, + } } } diff --git a/crates/jrsonnet-lsp/src/server/async_requests/rename.rs b/crates/jrsonnet-lsp/src/server/async_requests/rename.rs index 586ec6b7..3d17a796 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/rename.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/rename.rs @@ -1,6 +1,8 @@ +use std::collections::HashMap; + use jrsonnet_lsp_document::{CanonicalPath, SymbolName}; use jrsonnet_lsp_handlers as handlers; -use lsp_types::{RenameParams, WorkspaceEdit}; +use lsp_types::{RenameParams, TextEdit, WorkspaceEdit}; use tracing::warn; use super::AsyncRequestContext; @@ -66,20 +68,37 @@ impl AsyncRequestContext { .find_field_in_file(&target_path, &fields) .map(|locations| locations.declaration) .or_else(|| self.find_export_binding_in_file(&target_path, &fields))?; - let target_position = declaration.start.into(); + let refs = + self.collect_import_member_references(&target_path, declaration, &fields, true)?; + let mut changes: HashMap> = HashMap::new(); + for reference in refs { + changes.entry(reference.uri).or_default().push(TextEdit { + range: reference.range, + new_text: new_name.as_ref().to_string(), + }); + } - let target_doc = self.documents.get_document(&target_path)?; - let target_uri = target_path.to_uri().ok()?; - self.ensure_precise_transitive_importers(&target_path); - let import_graph = self.import_graph.read(); - handlers::rename_cross_file( - &target_doc, - target_position, - new_name, - &target_uri, - &target_path, - &self.documents, - &import_graph, - ) + for edits in changes.values_mut() { + edits.sort_by(|left, right| { + ( + left.range.start.line, + left.range.start.character, + left.range.end.line, + left.range.end.character, + ) + .cmp(&( + right.range.start.line, + right.range.start.character, + right.range.end.line, + right.range.end.character, + )) + }); + } + + Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }) } } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_nested_consistency.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_nested_consistency.yaml new file mode 100644 index 00000000..ca2f3e8f --- /dev/null +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/import_member_nested_consistency.yaml @@ -0,0 +1,81 @@ +# Definition/hover/references/rename should align for nested imported members. +steps: +- step: create + files: + nested.libsonnet: | + { + outer: { + ((libValue:|value)): 42, + }, + } + main.jsonnet: | + local lib = import "./nested.libsonnet"; + { + one: lib.outer.((mainValueOne:|value)), + two: lib.outer.((mainValueTwo:|value)), + } + +- step: diagnosticsSettled + +- step: requestDefinition + as: defFromImporterMember + file: main.jsonnet + at: mainValueOne +- step: expectDefinition + request: defFromImporterMember + result: + file: nested.libsonnet + at: libValue + text: value + +- step: requestHover + as: hoverFromImporterMember + file: main.jsonnet + at: mainValueOne +- step: expectHover + request: hoverFromImporterMember + result: + - type: number + - context: "`outer.value` from `./nested.libsonnet`" + - preview: + language: jsonnet + value: lib.outer.value + +- step: requestReferences + as: refsFromImporterMember + file: main.jsonnet + at: mainValueOne + include_declaration: true +- step: expectReferences + request: refsFromImporterMember + result: + - file: nested.libsonnet + at: libValue + text: value + - file: main.jsonnet + at: mainValueOne + text: value + - file: main.jsonnet + at: mainValueTwo + text: value + +- step: requestRename + as: renameFromImporterMember + file: main.jsonnet + at: mainValueOne + new_name: renamedValue +- step: expectRename + request: renameFromImporterMember + result: + edits: + nested.libsonnet: + - at: libValue + text: value + replace: renamedValue + main.jsonnet: + - at: mainValueOne + text: value + replace: renamedValue + - at: mainValueTwo + text: value + replace: renamedValue From 0710c79709a9f743d3e9cdba4917560640bdb399 Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 18:07:03 +0000 Subject: [PATCH 209/210] refactor(lsp): tighten import-graph lock lifetime in refs --- crates/jrsonnet-lsp/src/server/async_requests/references.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/jrsonnet-lsp/src/server/async_requests/references.rs b/crates/jrsonnet-lsp/src/server/async_requests/references.rs index cd665b68..b8572b63 100644 --- a/crates/jrsonnet-lsp/src/server/async_requests/references.rs +++ b/crates/jrsonnet-lsp/src/server/async_requests/references.rs @@ -168,6 +168,7 @@ impl AsyncRequestContext { } } } + drop(import_graph); let mut deduped = Vec::with_capacity(refs.len()); for reference in refs { From 3a61ab7a8702a368314aa794dd51e0bbd843464e Mon Sep 17 00:00:00 2001 From: Iain Lane Date: Fri, 27 Feb 2026 18:50:10 +0000 Subject: [PATCH 210/210] Fix LSP eval and hover test expectations --- .../src/hover/handler.rs | 36 +++---------------- crates/jrsonnet-lsp/src/analysis/eval.rs | 1 + .../jrsonnet-lsp/src/handlers/diagnostics.rs | 11 +++++- .../src/server/custom_operations/eval_file.rs | 2 +- ...ross_file_rename_navigation_roundtrip.yaml | 8 +++-- .../hover_completion_execute_command.yaml | 5 --- .../runner/hover_edge_positions.yaml | 5 --- .../hover_function_sugar_positions.yaml | 10 ------ .../hover_object_local_binding_precision.yaml | 8 ----- 9 files changed, 22 insertions(+), 64 deletions(-) diff --git a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs index 7f93c2db..c8cac4c6 100644 --- a/crates/jrsonnet-lsp-handlers/src/hover/handler.rs +++ b/crates/jrsonnet-lsp-handlers/src/hover/handler.rs @@ -774,24 +774,10 @@ mod tests { } #[test] - fn test_hover_on_object_local_bind_equals_uses_bound_value_type() { + fn test_no_hover_on_object_local_bind_equals() { let code = "{ local x = { a: 1 }, z: x }"; let result = get_hover(code, 0, 10); - assert_matches!(result, Some(Hover { - contents, - range: None - }) => { - assert_hover_contents( - &contents, - &[ - "`{ a: number }`", - indoc! {r" - ```jsonnet - x = { a: 1 } - ```"}, - ], - ); - }); + assert_matches!(result, None); } #[test] @@ -853,23 +839,9 @@ mod tests { } #[test] - fn test_hover_on_function_sugar_equals_returns_function_type() { + fn test_no_hover_on_function_sugar_equals() { let code = "local f(x) = x + 1;\nf"; let result = get_hover(code, 0, 11); - assert_matches!(result, Some(Hover { - contents, - range: None - }) => { - assert_hover_contents( - &contents, - &[ - "`(x: any) -> number`", - indoc! {r" - ```jsonnet - f(x) = x + 1 - ```"}, - ], - ); - }); + assert_matches!(result, None); } } diff --git a/crates/jrsonnet-lsp/src/analysis/eval.rs b/crates/jrsonnet-lsp/src/analysis/eval.rs index 677f8ebb..0fe85055 100644 --- a/crates/jrsonnet-lsp/src/analysis/eval.rs +++ b/crates/jrsonnet-lsp/src/analysis/eval.rs @@ -300,6 +300,7 @@ mod tests { diag.error_kind, EvalErrorKind::TypeError(_) | EvalErrorKind::TypeMismatch(_, _, _) + | EvalErrorKind::AttemptedIndexAnArrayWithString(_) | EvalErrorKind::ValueIndexMustBeTypeGot(_, _, _) | EvalErrorKind::CantIndexInto(_) ); diff --git a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs index cbd91c65..a1a60d58 100644 --- a/crates/jrsonnet-lsp/src/handlers/diagnostics.rs +++ b/crates/jrsonnet-lsp/src/handlers/diagnostics.rs @@ -535,7 +535,16 @@ mod tests { assert_eq!( diagnostics, vec![Diagnostic { - range: Range::default(), + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 13, + }, + }, severity: Some(DiagnosticSeverity::ERROR), code: Some(NumberOrString::String("eval-error".to_string())), code_description: None, diff --git a/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs b/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs index 82383e0b..fac08a30 100644 --- a/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs +++ b/crates/jrsonnet-lsp/src/server/custom_operations/eval_file.rs @@ -52,7 +52,7 @@ fn code_lenses( } fn build_code_lenses(document: &Document, uri: &Uri) -> Vec { - if document.ast().expr().is_none() { + if document.text().trim().is_empty() || document.ast().expr().is_none() { return Vec::new(); } diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml index 2afd0d4d..755d3773 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/cross_file_rename_navigation_roundtrip.yaml @@ -59,11 +59,15 @@ steps: at: libFieldAfter include_declaration: false - # For object-field rename, references currently only assert no stale entries; - # positive post-rename references are covered in cross_file_rename_references_roundtrip. - step: expectReferences request: refsAfter result: + - file: main.jsonnet + at: mainFieldAfter + text: util + - file: main.jsonnet + at: mainFieldAfter2 + text: util - step: requestDefinition as: defAfter diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml index b89d1c5f..72648dee 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_completion_execute_command.yaml @@ -15,11 +15,6 @@ steps: - step: expectHover request: hoverRequest result: - - type: any - - preview: - language: jsonnet - value: |- - local lib = import 'lib.jsonnet'; - step: requestCompletion as: completionRequest diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml index 4da8bf5b..e04069c0 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_edge_positions.yaml @@ -26,11 +26,6 @@ steps: - step: expectHover request: hoverEquals result: - - type: number - - preview: - language: jsonnet - value: |- - x = 1 # Real symbol usage should still resolve to a type. - step: requestHover diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml index a64e6f80..eabaf388 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_function_sugar_positions.yaml @@ -29,11 +29,6 @@ steps: - step: expectHover request: rparenHover result: - - type: any - - preview: - language: jsonnet - value: |- - f(x) = x + 1 - step: requestHover as: equalsHover @@ -42,11 +37,6 @@ steps: - step: expectHover request: equalsHover result: - - type: '(x: any) -> number' - - preview: - language: jsonnet - value: |- - f(x) = x + 1 - step: requestHover as: usageHover diff --git a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml index 3bf14cf3..cc4c3ede 100644 --- a/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml +++ b/crates/jrsonnet-lsp/tests/scenarios/runner/hover_object_local_binding_precision.yaml @@ -37,14 +37,6 @@ steps: - step: expectHover request: bindEqHover result: - - type: '{ a: number, b: number }' - - preview: - language: jsonnet - value: |- - x = { - a: 1, - b: 2, - } - step: requestHover as: useHover