diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 958407bb8..41b00230f 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -17,6 +17,7 @@ Please search on the issue tracker before creating one. --> ### Environment - Platform: +- Terminal emulator: - Helix version:
~/.cache/helix/helix.log diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7f18da6a4..65c2f9495 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -136,7 +136,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: clippy - args: -- -D warnings + args: --all-targets -- -D warnings docs: name: Docs diff --git a/.gitmodules b/.gitmodules index b617e60c0..5b6609a80 100644 --- a/.gitmodules +++ b/.gitmodules @@ -174,6 +174,10 @@ path = helix-syntax/languages/tree-sitter-git-commit url = https://github.com/the-mikedavis/tree-sitter-git-commit.git shallow = true +[submodule "helix-syntax/languages/tree-sitter-llvm-mir"] + path = helix-syntax/languages/tree-sitter-llvm-mir + url = https://github.com/Flakebi/tree-sitter-llvm-mir.git + shallow = true [submodule "helix-syntax/languages/tree-sitter-git-diff"] path = helix-syntax/languages/tree-sitter-git-diff url = https://github.com/the-mikedavis/tree-sitter-git-diff.git @@ -186,3 +190,42 @@ path = helix-syntax/languages/tree-sitter-git-rebase url = https://github.com/the-mikedavis/tree-sitter-git-rebase.git shallow = true +[submodule "helix-syntax/languages/tree-sitter-lean"] + path = helix-syntax/languages/tree-sitter-lean + url = https://github.com/Julian/tree-sitter-lean + shallow = true +[submodule "helix-syntax/languages/tree-sitter-regex"] + path = helix-syntax/languages/tree-sitter-regex + url = https://github.com/tree-sitter/tree-sitter-regex.git + shallow = true +[submodule "helix-syntax/languages/tree-sitter-make"] + path = helix-syntax/languages/tree-sitter-make + url = https://github.com/alemuller/tree-sitter-make + shallow = true +[submodule "helix-syntax/languages/tree-sitter-git-config"] + path = helix-syntax/languages/tree-sitter-git-config + url = https://github.com/the-mikedavis/tree-sitter-git-config.git + shallow = true +[submodule "helix-syntax/languages/tree-sitter-graphql"] + path = helix-syntax/languages/tree-sitter-graphql + url = https://github.com/bkegley/tree-sitter-graphql + shallow = true +[submodule "helix-syntax/languages/tree-sitter-elm"] + path = helix-syntax/languages/tree-sitter-elm + url = https://github.com/elm-tooling/tree-sitter-elm + shallow = true +[submodule "helix-syntax/languages/tree-sitter-iex"] + path = helix-syntax/languages/tree-sitter-iex + url = https://github.com/elixir-lang/tree-sitter-iex + shallow = true +[submodule "helix-syntax/languages/tree-sitter-twig"] + path = helix-syntax/languages/tree-sitter-twig + url = https://github.com/eirabben/tree-sitter-twig.git + shallow = true +[submodule "helix-syntax/languages/tree-sitter-rescript"] + path = helix-syntax/languages/tree-sitter-rescript + url = https://github.com/jaredramirez/tree-sitter-rescript + shallow = true +[submodule "helix-syntax/languages/tree-sitter-erlang"] + path = helix-syntax/languages/tree-sitter-erlang + url = https://github.com/the-mikedavis/tree-sitter-erlang diff --git a/CHANGELOG.md b/CHANGELOG.md index 52ca2d602..389279912 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,123 @@ +# 0.6.0 (2022-01-04) + +Happy new year and a big shout out to all the contributors! We had 55 contributors in this release. + +Helix has popped up in DPorts and Fedora Linux via COPR ([#1270](https://github.com/helix-editor/helix/pull/1270)) + +As usual the following is a brief summary, refer to the git history for a full log: + +Breaking changes: + +- fix: Normalize backtab into shift-tab + +Features: + +- Macros ([#1234](https://github.com/helix-editor/helix/pull/1234)) +- Add reverse search functionality ([#958](https://github.com/helix-editor/helix/pull/958)) +- Allow keys to be mapped to sequences of commands ([#589](https://github.com/helix-editor/helix/pull/589)) +- Make it possible to keybind TypableCommands ([#1169](https://github.com/helix-editor/helix/pull/1169)) +- Detect workspace root using language markers ([#1370](https://github.com/helix-editor/helix/pull/1370)) +- Add WORD textobject ([#991](https://github.com/helix-editor/helix/pull/991)) +- Add LSP rename_symbol (space-r) ([#1011](https://github.com/helix-editor/helix/pull/1011)) +- Added workspace_symbol_picker ([#1041](https://github.com/helix-editor/helix/pull/1041)) +- Detect filetype from shebang line ([#1001](https://github.com/helix-editor/helix/pull/1001)) +- Allow piping from stdin into a buffer on startup ([#996](https://github.com/helix-editor/helix/pull/996)) +- Add auto pairs for same-char pairs ([#1219](https://github.com/helix-editor/helix/pull/1219)) +- Update settings at runtime ([#798](https://github.com/helix-editor/helix/pull/798)) +- Enable thin LTO (cccc194) + +Commands: +- :wonly -- window only ([#1057](https://github.com/helix-editor/helix/pull/1057)) +- buffer-close (:bc, :bclose) ([#1035](https://github.com/helix-editor/helix/pull/1035)) +- Add : and :goto commands ([#1128](https://github.com/helix-editor/helix/pull/1128)) +- :sort command ([#1288](https://github.com/helix-editor/helix/pull/1288)) +- Add m textobject for pair under cursor ([#961](https://github.com/helix-editor/helix/pull/961)) +- Implement "Goto next buffer / Goto previous buffer" commands ([#950](https://github.com/helix-editor/helix/pull/950)) +- Implement "Goto last modification" command ([#1067](https://github.com/helix-editor/helix/pull/1067)) +- Add trim_selections command ([#1092](https://github.com/helix-editor/helix/pull/1092)) +- Add movement shortcut for history ([#1088](https://github.com/helix-editor/helix/pull/1088)) +- Add command to inc/dec number under cursor ([#1027](https://github.com/helix-editor/helix/pull/1027)) + - Add support for dates for increment/decrement +- Align selections (&) ([#1101](https://github.com/helix-editor/helix/pull/1101)) +- Implement no-yank delete/change ([#1099](https://github.com/helix-editor/helix/pull/1099)) +- Implement black hole register ([#1165](https://github.com/helix-editor/helix/pull/1165)) +- gf as goto_file (gf) ([#1102](https://github.com/helix-editor/helix/pull/1102)) +- Add last modified file (gm) ([#1093](https://github.com/helix-editor/helix/pull/1093)) +- ensure_selections_forward ([#1393](https://github.com/helix-editor/helix/pull/1393)) +- Readline style insert mode ([#1039](https://github.com/helix-editor/helix/pull/1039)) + +Usability improvements and fixes: + +- Detect filetype on :write ([#1141](https://github.com/helix-editor/helix/pull/1141)) +- Add single and double quotes to matching pairs ([#995](https://github.com/helix-editor/helix/pull/995)) +- Launch with defaults upon invalid config/theme (rather than panicking) ([#982](https://github.com/helix-editor/helix/pull/982)) +- If switching away from an empty scratch buffer, remove it ([#935](https://github.com/helix-editor/helix/pull/935)) +- Truncate the starts of file paths instead of the ends in picker ([#951](https://github.com/helix-editor/helix/pull/951)) +- Truncate the start of file paths in the StatusLine ([#1351](https://github.com/helix-editor/helix/pull/1351)) +- Prevent picker from previewing binaries or large file ([#939](https://github.com/helix-editor/helix/pull/939)) +- Inform when reaching undo/redo bounds ([#981](https://github.com/helix-editor/helix/pull/981)) +- search_impl will only align cursor center when it isn't in view ([#959](https://github.com/helix-editor/helix/pull/959)) +- Add , , , Delete in prompt mode ([#1034](https://github.com/helix-editor/helix/pull/1034)) +- Restore screen position when aborting search ([#1047](https://github.com/helix-editor/helix/pull/1047)) +- Buffer picker: show is_modifier flag ([#1020](https://github.com/helix-editor/helix/pull/1020)) +- Add commit hash to version info, if present ([#957](https://github.com/helix-editor/helix/pull/957)) +- Implement indent-aware delete ([#1120](https://github.com/helix-editor/helix/pull/1120)) +- Jump to end char of surrounding pair from any cursor pos ([#1121](https://github.com/helix-editor/helix/pull/1121)) +- File picker configuration ([#988](https://github.com/helix-editor/helix/pull/988)) +- Fix surround cursor position calculation ([#1183](https://github.com/helix-editor/helix/pull/1183)) +- Accept count for goto_window ([#1033](https://github.com/helix-editor/helix/pull/1033)) +- Make kill_to_line_end behave like emacs ([#1235](https://github.com/helix-editor/helix/pull/1235)) +- Only use a single documentation popup ([#1241](https://github.com/helix-editor/helix/pull/1241)) +- ui: popup: Don't allow scrolling past the end of content (3307f44c) +- Open files with spaces in filename, allow opening multiple files ([#1231](https://github.com/helix-editor/helix/pull/1231)) +- Allow paste commands to take a count ([#1261](https://github.com/helix-editor/helix/pull/1261)) +- Auto pairs selection ([#1254](https://github.com/helix-editor/helix/pull/1254)) +- Use a fuzzy matcher for commands ([#1386](https://github.com/helix-editor/helix/pull/1386)) +- Add c-s to pick word under doc cursor to prompt line & search completion ([#831](https://github.com/helix-editor/helix/pull/831)) +- Fix :earlier/:later missing changeset update ([#1069](https://github.com/helix-editor/helix/pull/1069)) +- Support extend for multiple goto ([#909](https://github.com/helix-editor/helix/pull/909)) +- Add arrow-key bindings for window switching ([#933](https://github.com/helix-editor/helix/pull/933)) +- Implement key ordering for info box ([#952](https://github.com/helix-editor/helix/pull/952)) + +LSP: +- Implement MarkedString rendering (e128a8702) +- Don't panic if init fails (d31bef7) +- Configurable diagnostic severity ([#1325](https://github.com/helix-editor/helix/pull/1325)) +- Resolve completion item ([#1315](https://github.com/helix-editor/helix/pull/1315)) +- Code action command support ([#1304](https://github.com/helix-editor/helix/pull/1304)) + +Grammars: + +- Adds mint language server ([#974](https://github.com/helix-editor/helix/pull/974)) +- Perl ([#978](https://github.com/helix-editor/helix/pull/978)) ([#1280](https://github.com/helix-editor/helix/pull/1280)) +- GLSL ([#993](https://github.com/helix-editor/helix/pull/993)) +- Racket ([#1143](https://github.com/helix-editor/helix/pull/1143)) +- WGSL ([#1166](https://github.com/helix-editor/helix/pull/1166)) +- LLVM ([#1167](https://github.com/helix-editor/helix/pull/1167)) ([#1388](https://github.com/helix-editor/helix/pull/1388)) ([#1409](https://github.com/helix-editor/helix/pull/1409)) ([#1398](https://github.com/helix-editor/helix/pull/1398)) +- Markdown (49e06787) +- Scala ([#1278](https://github.com/helix-editor/helix/pull/1278)) +- Dart ([#1250](https://github.com/helix-editor/helix/pull/1250)) +- Fish ([#1308](https://github.com/helix-editor/helix/pull/1308)) +- Dockerfile ([#1303](https://github.com/helix-editor/helix/pull/1303)) +- Git (commit, rebase, diff) ([#1338](https://github.com/helix-editor/helix/pull/1338)) ([#1402](https://github.com/helix-editor/helix/pull/1402)) ([#1373](https://github.com/helix-editor/helix/pull/1373)) +- tree-sitter-comment ([#1300](https://github.com/helix-editor/helix/pull/1300)) +- Highlight comments in c, cpp, cmake and llvm ([#1309](https://github.com/helix-editor/helix/pull/1309)) +- Improve yaml syntax highlighting highlighting ([#1294](https://github.com/helix-editor/helix/pull/1294)) +- Improve rust syntax highlighting ([#1295](https://github.com/helix-editor/helix/pull/1295)) +- Add textobjects and indents to cmake ([#1307](https://github.com/helix-editor/helix/pull/1307)) +- Add textobjects and indents to c and cpp ([#1293](https://github.com/helix-editor/helix/pull/1293)) + +New themes: + +- Solarized dark ([#999](https://github.com/helix-editor/helix/pull/999)) +- Solarized light ([#1010](https://github.com/helix-editor/helix/pull/1010)) +- Spacebones light ([#1131](https://github.com/helix-editor/helix/pull/1131)) +- Monokai Pro ([#1206](https://github.com/helix-editor/helix/pull/1206)) +- Base16 Light and Terminal ([#1078](https://github.com/helix-editor/helix/pull/1078)) + - and a default 16 color theme, truecolor detection +- Dracula ([#1258](https://github.com/helix-editor/helix/pull/1258)) + # 0.5.0 (2021-10-28) A big shout out to all the contributors! We had 46 contributors in this release. diff --git a/Cargo.lock b/Cargo.lock index 83343fc48..cff9c9914 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,9 +13,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.52" +version = "1.0.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84450d0b4a8bd1ba4144ce8ce718fbc5d071358b1e5384bace6536b3d1f2d5b3" +checksum = "94a45b455c14666b85fc40a019e8ab9eb75e3a124e05494f5397122bc9eb06e0" [[package]] name = "arc-swap" @@ -101,9 +101,9 @@ dependencies = [ [[package]] name = "clipboard-win" -version = "4.2.2" +version = "4.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3db8340083d28acb43451166543b98c838299b7e0863621be53a338adceea0ed" +checksum = "2f3e1238132dc01f081e1cbb9dace14e5ef4c3a51ee244bd982275fb514605db" dependencies = [ "error-code", "str-buf", @@ -121,9 +121,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" +checksum = "b5e5bed1f1c269533fa816a0a5492b3545209a205ca1a54842be180eb63a16a6" dependencies = [ "cfg-if", "lazy_static", @@ -131,16 +131,16 @@ dependencies = [ [[package]] name = "crossterm" -version = "0.22.1" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85525306c4291d1b73ce93c8acf9c339f9b213aef6c1d85c3830cbf1c16325c" +checksum = "77b75a27dc8d220f1f8521ea69cd55a34d720a200ebb3a624d9aa19193d3b432" dependencies = [ "bitflags", "crossterm_winapi", "futures-core", "libc", "mio", - "parking_lot", + "parking_lot 0.12.0", "signal-hook", "signal-hook-mio", "winapi", @@ -202,9 +202,9 @@ dependencies = [ [[package]] name = "error-code" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5115567ac25674e0043e472be13d14e537f37ea8aa4bdc4aef0c89add1db1ff" +checksum = "64f18991e7bf11e7ffee451b5318b5c1a73c52d0d0ada6e5a3017c8c1ced6a21" dependencies = [ "libc", "str-buf", @@ -246,27 +246,17 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "futf" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c9c1ce3fa9336301af935ab852c437817d14cd33690446569392e65170aac3b" -dependencies = [ - "mac", - "new_debug_unreachable", -] - [[package]] name = "futures-core" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" +checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" [[package]] name = "futures-executor" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29d6d2ff5bb10fb95c85b8ce46538a2e5f5e7fdc755623a7d4529ab8a4ed9d2a" +checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6" dependencies = [ "futures-core", "futures-task", @@ -275,15 +265,15 @@ dependencies = [ [[package]] name = "futures-task" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" +checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" [[package]] name = "futures-util" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" +checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" dependencies = [ "futures-core", "futures-task", @@ -303,9 +293,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" +checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c" dependencies = [ "cfg-if", "libc", @@ -366,7 +356,7 @@ dependencies = [ [[package]] name = "helix-core" -version = "0.5.0" +version = "0.6.0" dependencies = [ "arc-swap", "chrono", @@ -381,8 +371,9 @@ dependencies = [ "serde", "serde_json", "similar", + "slotmap", "smallvec", - "tendril", + "smartstring", "toml", "tree-sitter", "unicode-general-category", @@ -390,9 +381,23 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "helix-dap" +version = "0.6.0" +dependencies = [ + "anyhow", + "fern", + "helix-core", + "log", + "serde", + "serde_json", + "thiserror", + "tokio", +] + [[package]] name = "helix-lsp" -version = "0.5.0" +version = "0.6.0" dependencies = [ "anyhow", "futures-executor", @@ -410,7 +415,7 @@ dependencies = [ [[package]] name = "helix-syntax" -version = "0.5.0" +version = "0.6.0" dependencies = [ "anyhow", "cc", @@ -421,7 +426,7 @@ dependencies = [ [[package]] name = "helix-term" -version = "0.5.0" +version = "0.6.0" dependencies = [ "anyhow", "chrono", @@ -433,6 +438,7 @@ dependencies = [ "grep-regex", "grep-searcher", "helix-core", + "helix-dap", "helix-lsp", "helix-tui", "helix-view", @@ -452,7 +458,7 @@ dependencies = [ [[package]] name = "helix-tui" -version = "0.5.0" +version = "0.6.0" dependencies = [ "bitflags", "cassowary", @@ -465,7 +471,7 @@ dependencies = [ [[package]] name = "helix-view" -version = "0.5.0" +version = "0.6.0" dependencies = [ "anyhow", "bitflags", @@ -474,6 +480,7 @@ dependencies = [ "crossterm", "futures-util", "helix-core", + "helix-dap", "helix-lsp", "helix-tui", "log", @@ -481,6 +488,7 @@ dependencies = [ "serde", "slotmap", "tokio", + "tokio-stream", "toml", "url", "which", @@ -560,15 +568,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.104" +version = "0.2.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b2f96d100e1cf1929e7719b7edb3b90ab5298072638fccd77be9ce942ecdfce" +checksum = "e74d72e0f9b65b5b4ca49a346af3976df0f9c61d550727f349ecd559f251a26c" [[package]] name = "libloading" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afe203d669ec979b7128619bae5a63b7b42e9203c1b29146079ee05e2f604b52" +checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd" dependencies = [ "cfg-if", "winapi", @@ -576,9 +584,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712a4d093c9976e24e7dbca41db895dabcbac38eb5f4045393d17a95bdfb1109" +checksum = "88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b" dependencies = [ "scopeguard", ] @@ -594,9 +602,9 @@ dependencies = [ [[package]] name = "lsp-types" -version = "0.91.1" +version = "0.92.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2368312c59425dd133cb9a327afee65be0a633a8ce471d248e2202a48f8f68ae" +checksum = "e8a69d4142d51b208c9fc3cea68b1a7fcef30354e7aa6ccad07250fd8430fc76" dependencies = [ "bitflags", "serde", @@ -605,12 +613,6 @@ dependencies = [ "url", ] -[[package]] -name = "mac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" - [[package]] name = "matches" version = "0.1.9" @@ -654,12 +656,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "new_debug_unreachable" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" - [[package]] name = "ntapi" version = "0.3.6" @@ -712,7 +708,17 @@ checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" dependencies = [ "instant", "lock_api", - "parking_lot_core", + "parking_lot_core 0.8.5", +] + +[[package]] +name = "parking_lot" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.1", ] [[package]] @@ -729,6 +735,19 @@ dependencies = [ "winapi", ] +[[package]] +name = "parking_lot_core" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28141e0cc4143da2443301914478dc976a61ffdb3f043058310c70df2fed8954" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-sys", +] + [[package]] name = "percent-encoding" version = "2.1.0" @@ -737,9 +756,9 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "pin-project-lite" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" +checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" [[package]] name = "pin-utils" @@ -749,18 +768,18 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "proc-macro2" -version = "1.0.30" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70" +checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" dependencies = [ "unicode-xid", ] [[package]] name = "pulldown-cmark" -version = "0.8.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffade02495f22453cd593159ea2f59827aae7f53fa8323f756799b670881dcf8" +checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6" dependencies = [ "bitflags", "memchr", @@ -778,9 +797,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.10" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" +checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" dependencies = [ "proc-macro2", ] @@ -847,18 +866,18 @@ checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" [[package]] name = "ropey" -version = "1.3.1" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9150aff6deb25b20ed110889f070a678bcd1033e46e5e9d6fb1abeab17947f28" +checksum = "e6b9aa65bcd9f308d37c7158b4a1afaaa32b8450213e20c9b98e7d5b3cc2fec3" dependencies = [ "smallvec", ] [[package]] name = "ryu" -version = "1.0.5" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" [[package]] name = "same-file" @@ -877,18 +896,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "serde" -version = "1.0.132" +version = "1.0.136" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9875c23cf305cd1fd7eb77234cbb705f21ea6a72c637a5c6db5fe4b8e7f008" +checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.132" +version = "1.0.136" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc0db5cb2556c0e558887d9bbdcf6ac4471e83ff66cf696e5419024d1606276" +checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" dependencies = [ "proc-macro2", "quote", @@ -897,9 +916,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.73" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5" +checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" dependencies = [ "itoa", "ryu", @@ -949,9 +968,9 @@ dependencies = [ [[package]] name = "signal-hook-tokio" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6c5d32165ff8b94e68e7b3bdecb1b082e958c22434b363482cfb89dcd6f3ff8" +checksum = "213241f76fb1e37e27de3b6aa1b068a2c333233b59cca6634f634b80a27ecf1e" dependencies = [ "futures-core", "libc", @@ -982,9 +1001,24 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.7.0" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" + +[[package]] +name = "smartstring" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31aa6a31c0c2b21327ce875f7e8952322acfcfd0c27569a6e18a647281352c9b" +dependencies = [ + "static_assertions", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "str-buf" @@ -994,26 +1028,15 @@ checksum = "d44a3643b4ff9caf57abcee9c2c621d6c03d9135e0d8b589bd9afb5992cb176a" [[package]] name = "syn" -version = "1.0.80" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194" +checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" dependencies = [ "proc-macro2", "quote", "unicode-xid", ] -[[package]] -name = "tendril" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9ef557cb397a4f0a5a3a628f06515f78563f2209e64d47055d9dc6052bf5e33" -dependencies = [ - "futf", - "mac", - "utf-8", -] - [[package]] name = "thiserror" version = "1.0.30" @@ -1036,9 +1059,9 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8018d24e04c95ac8790716a5987d0fec4f8b27249ffa0f7d33f1369bdfb88cbd" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" dependencies = [ "once_cell", ] @@ -1054,9 +1077,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83b2a3d4d9091d0abd7eba4dc2710b1718583bd4d8992e2190720ea38f391f7" +checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" dependencies = [ "tinyvec_macros", ] @@ -1069,9 +1092,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.15.0" +version = "1.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbbf1c778ec206785635ce8ad57fe52b3009ae9e0c9f574a728f3049d3e55838" +checksum = "0c27a64b625de6d309e8c57716ba93021dccf1b3b5c97edd6d3dd2d2135afc0a" dependencies = [ "bytes", "libc", @@ -1079,7 +1102,7 @@ dependencies = [ "mio", "num_cpus", "once_cell", - "parking_lot", + "parking_lot 0.11.2", "pin-project-lite", "signal-hook-registry", "tokio-macros", @@ -1119,9 +1142,9 @@ dependencies = [ [[package]] name = "tree-sitter" -version = "0.20.1" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9394e9dbfe967b5f3d6ab79e302e78b5fb7b530c368d634ff3b8d67ede138bf1" +checksum = "4e34327f8eac545e3f037382471b2b19367725a242bba7bc45edb9efb49fe39a" dependencies = [ "cc", "regex", @@ -1144,9 +1167,9 @@ checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" [[package]] name = "unicode-general-category" -version = "0.4.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07547e3ee45e28326cc23faac56d44f58f16ab23e413db526debce3b0bfd2742" +checksum = "1218098468b8085b19a2824104c70d976491d247ce194bbd9dc77181150cdfd6" [[package]] name = "unicode-normalization" @@ -1159,9 +1182,9 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" [[package]] name = "unicode-width" @@ -1188,17 +1211,11 @@ dependencies = [ "serde", ] -[[package]] -name = "utf-8" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" - [[package]] name = "version_check" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "walkdir" @@ -1219,9 +1236,9 @@ checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" [[package]] name = "which" -version = "4.2.2" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea187a8ef279bc014ec368c27a920da2024d2a711109bfbe3440585d5cf27ad9" +checksum = "2a5a7e487e921cf220206864a94a89b6c6905bfc19f1057fa26a4cb360e5c1d2" dependencies = [ "either", "lazy_static", @@ -1259,9 +1276,52 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-sys" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3df6e476185f92a12c072be4a189a0210dcdcf512a1891d6dff9edb874deadc6" +dependencies = [ + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_msvc" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8e92753b1c443191654ec532f14c199742964a061be25d77d7a96f09db20bf5" + +[[package]] +name = "windows_i686_gnu" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a711c68811799e017b6038e0922cb27a5e2f43a2ddb609fe0b6f3eeda9de615" + +[[package]] +name = "windows_i686_msvc" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c11bb1a02615db74680b32a68e2d61f553cc24c4eb5b4ca10311740e44172" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c912b12f7454c6620635bbff3450962753834be2a594819bd5e945af18ec64bc" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "504a2476202769977a040c6364301a3f65d0cc9e3fb08600b2bda150a0488316" + [[package]] name = "xtask" -version = "0.5.0" +version = "0.6.0" dependencies = [ "helix-core", "helix-term", diff --git a/Cargo.toml b/Cargo.toml index 8c3ee6717..36dcb09f3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ members = [ "helix-tui", "helix-syntax", "helix-lsp", + "helix-dap", "xtask", ] @@ -18,3 +19,4 @@ split-debuginfo = "unpacked" [profile.release] lto = "thin" +# debug = true diff --git a/TODO.md b/TODO.md index 80a9be05e..ab94cf9a0 100644 --- a/TODO.md +++ b/TODO.md @@ -1,25 +1,12 @@ -- tree sitter: - - markdown - - regex - - kotlin - - clojure - - erlang - - [ ] completion isIncomplete support - -1 - [ ] respect view fullscreen flag - [ ] Implement marks (superset of Selection/Range) - [ ] = for auto indent line/selection -- [ ] :x for closing buffers - [ ] lsp: signature help 2 -- [ ] macro recording -- [ ] extend selection (treesitter select parent node) (replaces viw, vi(, va( etc ) -- [ ] selection align - [ ] store some state between restarts: file positions, prompt history - [ ] highlight matched characters in picker diff --git a/base16_theme.toml b/base16_theme.toml index bb60a3ea5..42e02a98a 100644 --- a/base16_theme.toml +++ b/base16_theme.toml @@ -11,7 +11,7 @@ "ui.statusline" = { fg = "black", bg = "white" } "ui.statusline.inactive" = { fg = "gray", bg = "white" } "ui.help" = { modifiers = ["reversed"] } -"ui.cursor" = { modifiers = ["reversed"] } +"ui.cursor" = { fg = "white", modifiers = ["reversed"] } "variable" = "red" "constant.numeric" = "yellow" "constant" = "yellow" @@ -29,6 +29,15 @@ "namespace" = "magenta" "ui.help" = { fg = "white", bg = "black" } +"markup.heading" = "blue" +"markup.list" = "red" +"markup.bold" = { fg = "yellow", modifiers = ["bold"] } +"markup.italic" = { fg = "magenta", modifiers = ["italic"] } +"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] } +"markup.link.text" = "red" +"markup.quote" = "cyan" +"markup.raw" = "green" + "diff.plus" = "green" "diff.delta" = "yellow" "diff.minus" = "red" diff --git a/book/src/configuration.md b/book/src/configuration.md index 33a933b2b..8048f5484 100644 --- a/book/src/configuration.md +++ b/book/src/configuration.md @@ -5,9 +5,27 @@ To override global configuration parameters, create a `config.toml` file located * Linux and Mac: `~/.config/helix/config.toml` * Windows: `%AppData%\helix\config.toml` +Example config: + +```toml +theme = "onedark" + +[editor] +line-number = "relative" +mouse = false + +[editor.cursor-shape] +insert = "bar" +normal = "block" +select = "underline" + +[editor.file-picker] +hidden = false +``` + ## Editor -`[editor]` section of the config. +### `[editor]` Section | Key | Description | Default | |--|--|---------| @@ -16,7 +34,7 @@ To override global configuration parameters, create a `config.toml` file located | `middle-click-paste` | Middle click paste support. | `true` | | `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` | | `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`
Windows: `["cmd", "/C"]` | -| `line-number` | Line number display (`absolute`, `relative`) | `absolute` | +| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` | | `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` | | `auto-pairs` | Enable automatic insertion of pairs to parenthese, brackets, etc. | `true` | | `auto-completion` | Enable automatic pop up of auto-completion. | `true` | @@ -25,7 +43,28 @@ To override global configuration parameters, create a `config.toml` file located | `auto-info` | Whether to display infoboxes | `true` | | `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` | -`[editor.filepicker]` section of the config. Sets options for file picker and global search. All but the last key listed in the default file-picker configuration below are IgnoreOptions: whether hidden files and files listed within ignore files are ignored by (not visible in) the helix file picker and global search. There is also one other key, `max-depth` available, which is not defined by default. +### `[editor.cursor-shape]` Section + +Defines the shape of cursor in each mode. Note that due to limitations +of the terminal environment, only the primary cursor can change shape. + +| Key | Description | Default | +| --- | ----------- | ------- | +| `normal` | Cursor shape in [normal mode][normal mode] | `block` | +| `insert` | Cursor shape in [insert mode][insert mode] | `block` | +| `select` | Cursor shape in [select mode][select mode] | `block` | + +[normal mode]: ./keymap.md#normal-mode +[insert mode]: ./keymap.md#insert-mode +[select mode]: ./keymap.md#select--extend-mode + +### `[editor.file-picker]` Section + +Sets options for file picker and global search. All but the last key listed in +the default file-picker configuration below are IgnoreOptions: whether hidden +files and files listed within ignore files are ignored by (not visible in) the +helix file picker and global search. There is also one other key, `max-depth` +available, which is not defined by default. | Key | Description | Default | |--|--|---------| diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index 73712ff2c..894050731 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -1,41 +1,53 @@ | Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default LSP | | --- | --- | --- | --- | --- | | bash | ✓ | | | `bash-language-server` | -| c | ✓ | | | `clangd` | +| c | ✓ | ✓ | ✓ | `clangd` | | c-sharp | ✓ | | | | | cmake | ✓ | ✓ | ✓ | `cmake-language-server` | | comment | ✓ | | | | -| cpp | ✓ | | | `clangd` | +| cpp | ✓ | ✓ | ✓ | `clangd` | | css | ✓ | | | | | dart | ✓ | | ✓ | `dart` | | dockerfile | ✓ | | | `docker-langserver` | | elixir | ✓ | | | `elixir-ls` | +| elm | ✓ | | | `elm-language-server` | +| erlang | ✓ | | | | | fish | ✓ | ✓ | ✓ | | | git-commit | ✓ | | | | +| git-config | ✓ | | | | | git-diff | ✓ | | | | | git-rebase | ✓ | | | | | glsl | ✓ | | ✓ | | | go | ✓ | ✓ | ✓ | `gopls` | +| graphql | ✓ | | | | +| haskell | ✓ | | | `haskell-language-server-wrapper` | | html | ✓ | | | | +| iex | ✓ | | | | | java | ✓ | | | | -| javascript | ✓ | | ✓ | | +| javascript | ✓ | | ✓ | `typescript-language-server` | | json | ✓ | | ✓ | | | julia | ✓ | | | `julia` | | latex | ✓ | | | | +| lean | ✓ | | | `lean` | | ledger | ✓ | | | | | llvm | ✓ | ✓ | ✓ | | +| llvm-mir | ✓ | ✓ | ✓ | | +| llvm-mir-yaml | ✓ | | ✓ | | | lua | ✓ | | ✓ | | +| make | ✓ | | | | | markdown | ✓ | | | | | mint | | | | `mint` | | nix | ✓ | | ✓ | `rnix-lsp` | | ocaml | ✓ | | ✓ | | | ocaml-interface | ✓ | | | | | perl | ✓ | ✓ | ✓ | | -| php | ✓ | | ✓ | | +| php | ✓ | ✓ | ✓ | | | prolog | | | | `swipl` | | protobuf | ✓ | | ✓ | | | python | ✓ | ✓ | ✓ | `pylsp` | | racket | | | | `racket` | +| regex | ✓ | | | | +| rescript | ✓ | ✓ | | `rescript-language-server` | | ruby | ✓ | | ✓ | `solargraph` | | rust | ✓ | ✓ | ✓ | `rust-analyzer` | | scala | ✓ | | ✓ | `metals` | @@ -44,6 +56,7 @@ | toml | ✓ | | | | | tsq | ✓ | | | | | tsx | ✓ | | | `typescript-language-server` | +| twig | ✓ | | | | | typescript | ✓ | | ✓ | `typescript-language-server` | | vue | ✓ | | | | | wgsl | ✓ | | | | diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md index 779fdaa2e..04e5fae31 100644 --- a/book/src/generated/typable-cmd.md +++ b/book/src/generated/typable-cmd.md @@ -38,6 +38,9 @@ | `:encoding` | Set encoding based on `https://encoding.spec.whatwg.org` | | `:reload` | Discard changes and reload from the source file. | | `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. | +| `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. | +| `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. | +| `:debug-eval` | Evaluate expression in current debug context. | | `:vsplit`, `:vs` | Open the file in a vertical split. | | `:hsplit`, `:hs`, `:sp` | Open the file in a horizontal split. | | `:tutor` | Open the tutorial. | @@ -45,4 +48,5 @@ | `:set-option`, `:set` | Set a config option at runtime | | `:sort` | Sort ranges in selection. | | `:rsort` | Sort ranges in selection in reverse order. | +| `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. | | `:help`, `:h` | Open documentation for a command or keybind. | diff --git a/book/src/guides/textobject.md b/book/src/guides/textobject.md index dd726b7c9..7200a5144 100644 --- a/book/src/guides/textobject.md +++ b/book/src/guides/textobject.md @@ -24,7 +24,22 @@ The following [captures][tree-sitter-captures] are recognized: [Example query files][textobject-examples] can be found in the helix GitHub repository. +## Queries for Textobject Based Navigation + +[Tree-sitter based navigation][textobjects-nav] is done using captures in the +following order: + +- `object.movement` +- `object.around` +- `object.inside` + +For example if a `function.around` capture has been already defined for a language +in it's `textobjects.scm` file, function navigation should also work automatically. +`function.movement` should be defined only if the node captured by `function.around` +doesn't make sense in a navigation context. + [textobjects]: ../usage.md#textobjects +[textobjects-nav]: ../usage.md#tree-sitter-textobject-based-navigation [tree-sitter-queries]: https://tree-sitter.github.io/tree-sitter/using-parsers#query-syntax [tree-sitter-captures]: https://tree-sitter.github.io/tree-sitter/using-parsers#capturing-nodes [textobject-examples]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+filename%3Atextobjects.scm&type=Code&ref=advsearch&l=&l= diff --git a/book/src/keymap.md b/book/src/keymap.md index 581e70e93..5de4edf93 100644 --- a/book/src/keymap.md +++ b/book/src/keymap.md @@ -25,7 +25,9 @@ | `f` | Find next char | `find_next_char` | | `T` | Find 'till previous char | `till_prev_char` | | `F` | Find previous char | `find_prev_char` | +| `G` | Go to line number `` | `goto_line` | | `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` | +| `Alt-:` | Ensures the selection is in forward direction | `ensure_selections_forward` | | `Home` | Move to the start of the line | `goto_line_start` | | `End` | Move to the end of the line | `goto_line_end` | | `PageUp` | Move page up | `page_up` | @@ -88,6 +90,7 @@ | Alt-| | Pipe each selection into shell command, ignoring output | `shell_pipe_to` | | `!` | Run shell command, inserting output before each selection | `shell_insert_output` | | `Alt-!` | Run shell command, appending output after each selection | `shell_append_output` | +| `$` | Pipe each selection into shell command, keep selections where command returned 0 | `shell_keep_pipe` | ### Selection manipulation @@ -112,12 +115,14 @@ | `%` | Select entire file | `select_all` | | `x` | Select current line, if already selected, extend to next line | `extend_line` | | `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` | -| | Expand selection to parent syntax node TODO: pick a key (**TS**) | `expand_selection` | | `J` | Join lines inside selection | `join_selections` | | `K` | Keep selections matching the regex | `keep_selections` | | `Alt-K` | Remove selections matching the regex | `remove_selections` | -| `$` | Pipe each selection into shell command, keep selections where command returned 0 | `shell_keep_pipe` | | `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` | +| `Alt-k` | Expand selection to parent syntax node (**TS**) | `expand_selection` | +| `Alt-j` | Shrink syntax tree object selection (**TS**) | `shrink_selection` | +| `Alt-h` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` | +| `Alt-l` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` | ### Search @@ -150,10 +155,10 @@ over text and not actively editing it). | `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` | | `j` , `down` | Scroll the view downwards | `scroll_down` | | `k` , `up` | Scroll the view upwards | `scroll_up` | -| `f` | Move page down | `page_down` | -| `b` | Move page up | `page_up` | -| `d` | Move half page down | `half_page_down` | -| `u` | Move half page up | `half_page_up` | +| `Ctrl-f` | Move page down | `page_down` | +| `Ctrl-b` | Move page up | `page_up` | +| `Ctrl-d` | Move half page down | `half_page_down` | +| `Ctrl-u` | Move half page up | `half_page_up` | #### Goto mode @@ -253,14 +258,20 @@ Displays documentation for item under cursor. Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired). -| Key | Description | Command | -| ----- | ----------- | ------- | -| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` | -| `]d` | Go to next diagnostic (**LSP**) | `goto_next_diag` | -| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` | -| `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` | -| `[space` | Add newline above | `add_newline_above` | -| `]space` | Add newline below | `add_newline_below` | +| Key | Description | Command | +| ----- | ----------- | ------- | +| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` | +| `]d` | Go to next diagnostic (**LSP**) | `goto_next_diag` | +| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` | +| `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` | +| `]f` | Go to next function (**TS**) | `goto_next_function` | +| `[f` | Go to previous function (**TS**) | `goto_prev_function` | +| `]c` | Go to next class (**TS**) | `goto_next_class` | +| `[c` | Go to previous class (**TS**) | `goto_prev_class` | +| `]p` | Go to next parameter (**TS**) | `goto_next_parameter` | +| `[p` | Go to previous parameter (**TS**) | `goto_prev_parameter` | +| `[space` | Add newline above | `add_newline_above` | +| `]space` | Add newline below | `add_newline_below` | ## Insert Mode @@ -299,7 +310,11 @@ Keys to use within picker. Remapping currently not supported. | Key | Description | | ----- | ------------- | | `Up`, `Ctrl-k`, `Ctrl-p` | Previous entry | +| `PageUp`, `Ctrl-b` | Page up | | `Down`, `Ctrl-j`, `Ctrl-n` | Next entry | +| `PageDown`, `Ctrl-f` | Page down | +| `Home` | Go to first entry | +| `End` | Go to last entry | | `Ctrl-space` | Filter options | | `Enter` | Open selected | | `Ctrl-s` | Open horizontally | diff --git a/book/src/themes.md b/book/src/themes.md index 8eee334b9..9abcfe8c1 100644 --- a/book/src/themes.md +++ b/book/src/themes.md @@ -1,14 +1,14 @@ # Themes -First you'll need to place selected themes in your `themes` directory (i.e `~/.config/helix/themes`), the directory might have to be created beforehand. +To use a theme add `theme = ""` to your [`config.toml`](./configuration.md) at the very top of the file before the first section or select it during runtime using `:theme `. -To use a custom theme add `theme = ` to your [`config.toml`](./configuration.md) or override it during runtime using `:theme `. +## Creating a theme -The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes). +Create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`). The directory might have to be created beforehand. -## Creating a theme +The names "default" and "base16_default" are reserved for the builtin themes and cannot be overridden by user defined themes. -First create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`). +The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes). Each line in the theme file is specified as below: @@ -147,6 +147,7 @@ We use a similar set of scopes as - `repeat` - `for`, `while`, `loop` - `import` - `import`, `export` - `return` + - `exception` - `operator` - `or`, `in` - `directive` - Preprocessor directives (`#if` in C) - `function` - `fn`, `func` @@ -171,8 +172,9 @@ We use a similar set of scopes as - `bold` - `italic` - `link` - - `url` - - `label` + - `url` - urls pointed to by links + - `label` - non-url link references + - `text` - url and image descriptions in links - `quote` - `raw` - `inline` @@ -188,6 +190,18 @@ We use a similar set of scopes as These scopes are used for theming the editor interface. +- `markup` + - `normal` + - `completion` - for completion doc popup ui + - `hover` - for hover popup ui + - `heading` + - `completion` - for completion doc popup ui + - `hover` - for hover popup ui + - `raw` + - `inline` + - `completion` - for completion doc popup ui + - `hover` - for hover popup ui + | Key | Notes | | --- | --- | diff --git a/book/src/usage.md b/book/src/usage.md index cf7d9d488..039628bf8 100644 --- a/book/src/usage.md +++ b/book/src/usage.md @@ -42,7 +42,7 @@ helix. The keymappings have been inspired from [vim-sandwich](https://github.com `ms` acts on a selection, so select the text first and use `ms`. `mr` and `md` work on the closest pairs found and selections are not required; use counts to act in outer pairs. -It can also act on multiple seletions (yay!). For example, to change every occurance of `(use)` to `[use]`: +It can also act on multiple selections (yay!). For example, to change every occurrence of `(use)` to `[use]`: - `%` to select the whole file - `s` to split the selections on a search term @@ -70,7 +70,26 @@ Currently supported: `word`, `surround`, `function`, `class`, `parameter`. | `c` | Class | | `p` | Parameter | -Note: `f`, `c`, etc need a tree-sitter grammar active for the current +> NOTE: `f`, `c`, etc need a tree-sitter grammar active for the current document and a special tree-sitter query file to work properly. [Only -some grammars](https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+filename%3Atextobjects.scm&type=Code&ref=advsearch&l=&l=) -currently have the query file implemented. Contributions are welcome ! +some grammars][lang-support] currently have the query file implemented. +Contributions are welcome! + +## Tree-sitter Textobject Based Navigation + +Navigating between functions, classes, parameters, etc is made +possible by leveraging tree-sitter and textobjects queries. For +example to move to the next function use `]f`, to move to previous +class use `[c`, and so on. + +![tree-sitter-nav-demo][tree-sitter-nav-demo] + +See the [unimpaired][unimpaired-keybinds] section of the keybind +documentation for the full reference. + +> NOTE: This feature is dependent on tree-sitter based textobjects +and therefore requires the corresponding query file to work properly. + +[lang-support]: ./lang-support.md +[unimpaired-keybinds]: ./keymap.md#unimpaired +[tree-sitter-nav-demo]: https://user-images.githubusercontent.com/23398472/152332550-7dfff043-36a2-4aec-b8f2-77c13eb56d6f.gif diff --git a/flake.lock b/flake.lock index acd10f766..94e443e3a 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "devshell": { "locked": { - "lastModified": 1639692811, - "narHash": "sha256-wOOBH0fVsfNqw/5ZWRoKspyesoXBgiwEOUBH4c7JKEo=", + "lastModified": 1641980203, + "narHash": "sha256-RiWJ3+6V267Ji+P54K1Xrj1Nsah9BfG/aLfIhqgVyBY=", "owner": "numtide", "repo": "devshell", - "rev": "d3a1f5bec3632b33346865b1c165bf2420bb2f52", + "rev": "d897c1ddb4eab66cc2b783c7868d78555b9880ad", "type": "github" }, "original": { @@ -41,11 +41,11 @@ ] }, "locked": { - "lastModified": 1639807801, - "narHash": "sha256-y32tMq1LTRVbMW3QN5i98iOQjQt2QSsif3ayUkD1o3g=", + "lastModified": 1642054253, + "narHash": "sha256-kHh9VmaB7gbS6pheheC4x0uT84LEmhfbsbWEQJgU2E4=", "owner": "yusdacra", "repo": "nix-cargo-integration", - "rev": "b5bbaa4f5239e6f0619846f9a5380f07baa853d3", + "rev": "f8fa9af990195a3f63fe2dde84aa187e193da793", "type": "github" }, "original": { @@ -56,11 +56,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1639699734, - "narHash": "sha256-tlX6WebGmiHb2Hmniff+ltYp+7dRfdsBxw9YczLsP60=", + "lastModified": 1641887635, + "narHash": "sha256-kDGpufwzVaiGe5e1sBUBPo9f1YN+nYHJlYqCaVpZTQQ=", "owner": "nixos", "repo": "nixpkgs", - "rev": "03ec468b14067729a285c2c7cfa7b9434a04816c", + "rev": "b2737d4980a17cc2b7d600d7d0b32fd7333aca88", "type": "github" }, "original": { @@ -99,11 +99,11 @@ "nixpkgs": "nixpkgs_2" }, "locked": { - "lastModified": 1639880499, - "narHash": "sha256-/BibDmFwgWuuTUkNVO6YlvuTSWM9dpBvlZoTAPs7ORI=", + "lastModified": 1642128126, + "narHash": "sha256-av8JUACdrTfQYl/ftZJvKpZEmZfa0avCq7tt5Usdoq0=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "c6c83589ae048af20d93d01eb07a4176012093d0", + "rev": "ce4ef6f2d74f2b68f7547df1de22d1b0037ce4ad", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index cbf10c975..660207eda 100644 --- a/flake.nix +++ b/flake.nix @@ -20,50 +20,64 @@ # Set default package to helix-term release build defaultOutputs = { app = "hx"; package = "helix"; }; overrides = { - crateOverrides = common: _: { - helix-term = prev: { - # link languages and theme toml files since helix-term expects them (for tests) - preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml} .."; - buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ]; - }; - # link languages and theme toml files since helix-view expects them - helix-view = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml} .."; }; - helix-syntax = _prev: { + crateOverrides = common: _: rec { + # link languages and theme toml files since helix-core/helix-view expects them + helix-core = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} .."; }; + helix-view = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} .."; }; + helix-syntax = prev: { + src = + let + pkgs = common.pkgs; + helix = pkgs.fetchgit { + url = "https://github.com/helix-editor/helix.git"; + rev = "a8fd33ac012a79069ef1409503a2edcf3a585153"; + fetchSubmodules = true; + sha256 = "sha256-5AtOC55ttWT+7RYMboaFxpGZML51ix93wAkYJTt+8JI="; + }; + in + pkgs.runCommand prev.src.name { } '' + mkdir -p $out + ln -s ${prev.src}/* $out + ln -sf ${helix}/helix-syntax/languages $out + ''; preConfigure = "mkdir -p ../runtime/grammars"; postInstall = "cp -r ../runtime $out/runtime"; }; - }; - mainBuild = common: prev: - let - inherit (common) pkgs lib; - helixSyntax = lib.buildCrate { - root = self; - memberName = "helix-syntax"; - defaultCrateOverrides = { - helix-syntax = common.crateOverrides.helix-syntax; + helix-term = prev: + let + inherit (common) pkgs lib; + helixSyntax = lib.buildCrate { + root = self; + memberName = "helix-syntax"; + defaultCrateOverrides = { + helix-syntax = helix-syntax; + }; + release = false; }; - release = false; + runtimeDir = pkgs.runCommand "helix-runtime" { } '' + mkdir -p $out + ln -s ${common.root}/runtime/* $out + ln -sf ${helixSyntax}/runtime/grammars $out + ''; + in + { + # link languages and theme toml files since helix-term expects them (for tests) + preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} .."; + buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ]; + nativeBuildInputs = [ pkgs.makeWrapper ]; + postFixup = '' + if [ -f "$out/bin/hx" ]; then + wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}" + fi + ''; }; - runtimeDir = pkgs.runCommand "helix-runtime" { } '' - mkdir -p $out - ln -s ${common.root}/runtime/* $out - ln -sf ${helixSyntax}/runtime/grammars $out - ''; - in - lib.optionalAttrs (common.memberName == "helix-term") { - nativeBuildInputs = [ pkgs.makeWrapper ]; - postFixup = '' - if [ -f "$out/bin/hx" ]; then - wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}" - fi - ''; - }; + }; shell = common: prev: { - packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin ]); + packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin cargo-flamegraph ]); env = prev.env ++ [ { name = "HELIX_RUNTIME"; eval = "$PWD/runtime"; } { name = "RUST_BACKTRACE"; value = "1"; } - { name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native"; } + { name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment"; } ]; }; }; diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 3d7fe8662..7ff91cfda 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "helix-core" -version = "0.5.0" +version = "0.6.0" authors = ["Blaž Hrastnik "] edition = "2021" license = "MPL-2.0" @@ -13,15 +13,16 @@ include = ["src/**/*", "README.md"] [features] [dependencies] -helix-syntax = { version = "0.5", path = "../helix-syntax" } +helix-syntax = { version = "0.6", path = "../helix-syntax" } ropey = "1.3" -smallvec = "1.7" -tendril = "0.4.2" -unicode-segmentation = "1.8" +smallvec = "1.8" +smartstring = "0.2.9" +unicode-segmentation = "1.9" unicode-width = "0.1" -unicode-general-category = "0.4" +unicode-general-category = "0.5" # slab = "0.4.2" +slotmap = "1.0" tree-sitter = "0.20" once_cell = "1.9" arc-swap = "1" diff --git a/helix-core/src/auto_pairs.rs b/helix-core/src/auto_pairs.rs index 1b3de6ea0..f4359a342 100644 --- a/helix-core/src/auto_pairs.rs +++ b/helix-core/src/auto_pairs.rs @@ -1,7 +1,9 @@ //! When typing the opening character of one of the possible pairs defined below, //! this module provides the functionality to insert the paired closing character. -use crate::{movement::Direction, Range, Rope, Selection, Tendril, Transaction}; +use crate::{ + graphemes, movement::Direction, Range, Rope, RopeGraphemes, Selection, Tendril, Transaction, +}; use log::debug; use smallvec::SmallVec; @@ -63,31 +65,132 @@ fn prev_char(doc: &Rope, pos: usize) -> Option { doc.get_char(pos - 1) } +fn is_single_grapheme(doc: &Rope, range: &Range) -> bool { + let mut graphemes = RopeGraphemes::new(doc.slice(range.from()..range.to())); + let first = graphemes.next(); + let second = graphemes.next(); + debug!("first: {:#?}, second: {:#?}", first, second); + first.is_some() && second.is_none() +} + /// calculate what the resulting range should be for an auto pair insertion fn get_next_range( + doc: &Rope, start_range: &Range, offset: usize, typed_char: char, len_inserted: usize, ) -> Range { - let end_head = start_range.head + offset + typed_char.len_utf8(); + // When the character under the cursor changes due to complete pair + // insertion, we must look backward a grapheme and then add the length + // of the insertion to put the resulting cursor in the right place, e.g. + // + // foo[\r\n] - anchor: 3, head: 5 + // foo([)]\r\n - anchor: 4, head: 5 + // + // foo[\r\n] - anchor: 3, head: 5 + // foo'[\r\n] - anchor: 4, head: 6 + // + // foo([)]\r\n - anchor: 4, head: 5 + // foo()[\r\n] - anchor: 5, head: 7 + // + // [foo]\r\n - anchor: 0, head: 3 + // [foo(])\r\n - anchor: 0, head: 5 + + // inserting at the very end of the document after the last newline + if start_range.head == doc.len_chars() && start_range.anchor == doc.len_chars() { + return Range::new( + start_range.anchor + offset + typed_char.len_utf8(), + start_range.head + offset + typed_char.len_utf8(), + ); + } + + let single_grapheme = is_single_grapheme(doc, start_range); + let doc_slice = doc.slice(..); + + // just skip over graphemes + if len_inserted == 0 { + let end_anchor = if single_grapheme { + graphemes::next_grapheme_boundary(doc_slice, start_range.anchor) + offset + + // even for backward inserts with multiple grapheme selections, + // we want the anchor to stay where it is so that the relative + // selection does not change, e.g.: + // + // foo([) wor]d -> insert ) -> foo()[ wor]d + } else { + start_range.anchor + offset + }; + + return Range::new( + end_anchor, + graphemes::next_grapheme_boundary(doc_slice, start_range.head) + offset, + ); + } + + // trivial case: only inserted a single-char opener, just move the selection + if len_inserted == 1 { + let end_anchor = if single_grapheme || start_range.direction() == Direction::Backward { + start_range.anchor + offset + typed_char.len_utf8() + } else { + start_range.anchor + offset + }; + + return Range::new( + end_anchor, + start_range.head + offset + typed_char.len_utf8(), + ); + } + + // If the head = 0, then we must be in insert mode with a backward + // cursor, which implies the head will just move + let end_head = if start_range.head == 0 || start_range.direction() == Direction::Backward { + start_range.head + offset + typed_char.len_utf8() + } else { + // We must have a forward cursor, which means we must move to the + // other end of the grapheme to get to where the new characters + // are inserted, then move the head to where it should be + let prev_bound = graphemes::prev_grapheme_boundary(doc_slice, start_range.head); + debug!( + "prev_bound: {}, offset: {}, len_inserted: {}", + prev_bound, offset, len_inserted + ); + prev_bound + offset + len_inserted + }; let end_anchor = match (start_range.len(), start_range.direction()) { // if we have a zero width cursor, it shifts to the same number (0, _) => end_head, - // if we are inserting for a regular one-width cursor, the anchor - // moves with the head + // If we are inserting for a regular one-width cursor, the anchor + // moves with the head. This is the fast path for ASCII. (1, Direction::Forward) => end_head - 1, (1, Direction::Backward) => end_head + 1, - // if we are appending, the anchor stays where it is; only offset - // for multiple range insertions - (_, Direction::Forward) => start_range.anchor + offset, + (_, Direction::Forward) => { + if single_grapheme { + graphemes::prev_grapheme_boundary(doc.slice(..), start_range.head) + + typed_char.len_utf8() - // when we are inserting in front of a selection, we need to move - // the anchor over by however many characters were inserted overall - (_, Direction::Backward) => start_range.anchor + offset + len_inserted, + // if we are appending, the anchor stays where it is; only offset + // for multiple range insertions + } else { + start_range.anchor + offset + } + } + + (_, Direction::Backward) => { + if single_grapheme { + // if we're backward, then the head is at the first char + // of the typed char, so we need to add the length of + // the closing char + graphemes::prev_grapheme_boundary(doc.slice(..), start_range.anchor) + len_inserted + } else { + // when we are inserting in front of a selection, we need to move + // the anchor over by however many characters were inserted overall + start_range.anchor + offset + len_inserted + } + } }; Range::new(end_anchor, end_head) @@ -111,7 +214,9 @@ fn handle_open( let change = match next_char { Some(ch) if !close_before.contains(ch) => { len_inserted = open.len_utf8(); - (cursor, cursor, Some(Tendril::from_char(open))) + let mut tendril = Tendril::new(); + tendril.push(open); + (cursor, cursor, Some(tendril)) } // None | Some(ch) if close_before.contains(ch) => {} _ => { @@ -122,7 +227,7 @@ fn handle_open( } }; - let next_range = get_next_range(start_range, offs, open, len_inserted); + let next_range = get_next_range(doc, start_range, offs, open, len_inserted); end_ranges.push(next_range); offs += len_inserted; @@ -149,10 +254,12 @@ fn handle_close(doc: &Rope, selection: &Selection, _open: char, close: char) -> (cursor, cursor, None) // no-op } else { len_inserted += close.len_utf8(); - (cursor, cursor, Some(Tendril::from_char(close))) + let mut tendril = Tendril::new(); + tendril.push(close); + (cursor, cursor, Some(tendril)) }; - let next_range = get_next_range(start_range, offs, close, len_inserted); + let next_range = get_next_range(doc, start_range, offs, close, len_inserted); end_ranges.push(next_range); offs += len_inserted; @@ -187,22 +294,22 @@ fn handle_same( // return transaction that moves past close (cursor, cursor, None) // no-op } else { - let mut pair = Tendril::with_capacity(2 * token.len_utf8() as u32); - pair.push_char(token); + let mut pair = Tendril::new(); + pair.push(token); // for equal pairs, don't insert both open and close if either // side has a non-pair char if (next_char.is_none() || close_before.contains(next_char.unwrap())) && (prev_char.is_none() || open_before.contains(prev_char.unwrap())) { - pair.push_char(token); + pair.push(token); } len_inserted += pair.len(); (cursor, cursor, Some(pair)) }; - let next_range = get_next_range(start_range, offs, token, len_inserted); + let next_range = get_next_range(doc, start_range, offs, token, len_inserted); end_ranges.push(next_range); offs += len_inserted; @@ -219,6 +326,8 @@ mod test { use super::*; use smallvec::smallvec; + const LINE_END: &str = crate::DEFAULT_LINE_ENDING.as_str(); + fn differing_pairs() -> impl Iterator { PAIRS.iter().filter(|(open, close)| open != close) } @@ -234,7 +343,7 @@ mod test { expected_doc: &Rope, expected_sel: &Selection, ) { - let trans = hook(&in_doc, &in_sel, ch).unwrap(); + let trans = hook(in_doc, in_sel, ch).unwrap(); let mut actual_doc = in_doc.clone(); assert!(trans.apply(&mut actual_doc)); assert_eq!(expected_doc, &actual_doc); @@ -270,12 +379,59 @@ mod test { #[test] fn test_insert_blank() { test_hooks_with_pairs( - &Rope::new(), + &Rope::from(LINE_END), &Selection::single(1, 0), PAIRS, - |open, close| format!("{}{}", open, close), + |open, close| format!("{}{}{}", open, close, LINE_END), &Selection::single(2, 1), ); + + let empty_doc = Rope::from(format!("{line_end}{line_end}", line_end = LINE_END)); + + test_hooks_with_pairs( + &empty_doc, + &Selection::single(empty_doc.len_chars(), LINE_END.len()), + PAIRS, + |open, close| { + format!( + "{line_end}{open}{close}{line_end}", + open = open, + close = close, + line_end = LINE_END + ) + }, + &Selection::single(LINE_END.len() + 2, LINE_END.len() + 1), + ); + } + + #[test] + fn test_insert_before_multi_code_point_graphemes() { + test_hooks_with_pairs( + &Rope::from(format!("hello 👨‍👩‍👧‍👦 goodbye{}", LINE_END)), + &Selection::single(13, 6), + PAIRS, + |open, _| format!("hello {}👨‍👩‍👧‍👦 goodbye{}", open, LINE_END), + &Selection::single(14, 7), + ); + } + + #[test] + fn test_insert_at_end_of_document() { + test_hooks_with_pairs( + &Rope::from(LINE_END), + &Selection::single(LINE_END.len(), LINE_END.len()), + PAIRS, + |open, close| format!("{}{}{}", LINE_END, open, close), + &Selection::single(LINE_END.len() + 1, LINE_END.len() + 1), + ); + + test_hooks_with_pairs( + &Rope::from(format!("foo{}", LINE_END)), + &Selection::single(3 + LINE_END.len(), 3 + LINE_END.len()), + PAIRS, + |open, close| format!("foo{}{}{}", LINE_END, open, close), + &Selection::single(LINE_END.len() + 4, LINE_END.len() + 4), + ); } /// [] -> append ( -> ([]) @@ -283,11 +439,20 @@ mod test { fn test_append_blank() { test_hooks_with_pairs( // this is what happens when you have a totally blank document and then append - &Rope::from("\n\n"), - &Selection::single(0, 2), + &Rope::from(format!("{line_end}{line_end}", line_end = LINE_END)), + // before inserting the pair, the cursor covers all of both empty lines + &Selection::single(0, LINE_END.len() * 2), PAIRS, - |open, close| format!("\n{}{}\n", open, close), - &Selection::single(0, 3), + |open, close| { + format!( + "{line_end}{open}{close}{line_end}", + line_end = LINE_END, + open = open, + close = close + ) + }, + // after inserting pair, the cursor covers the first new line and the open char + &Selection::single(0, LINE_END.len() + 2), ); } @@ -329,6 +494,18 @@ mod test { ); } + /// foo[] -> append to end of line ( -> foo([]) + #[test] + fn test_append_single_cursor() { + test_hooks_with_pairs( + &Rope::from(format!("foo{}", LINE_END)), + &Selection::single(3, 3 + LINE_END.len()), + differing_pairs(), + |open, close| format!("foo{}{}{}", open, close, LINE_END), + &Selection::single(4, 5), + ); + } + /// fo[o] fo[o(]) /// fo[o] -> append ( -> fo[o(]) /// fo[o] fo[o(]) @@ -355,18 +532,18 @@ mod test { ); } - /// ([]) -> insert ) -> ()[] + /// ([)] -> insert ) -> ()[] #[test] fn test_insert_close_inside_pair() { for (open, close) in PAIRS { - let doc = Rope::from(format!("{}{}", open, close)); + let doc = Rope::from(format!("{}{}{}", open, close, LINE_END)); test_hooks( &doc, &Selection::single(2, 1), *close, &doc, - &Selection::single(3, 2), + &Selection::single(2 + LINE_END.len(), 2), ); } } @@ -375,14 +552,14 @@ mod test { #[test] fn test_append_close_inside_pair() { for (open, close) in PAIRS { - let doc = Rope::from(format!("{}{}\n", open, close)); + let doc = Rope::from(format!("{}{}{}", open, close, LINE_END)); test_hooks( &doc, &Selection::single(0, 2), *close, &doc, - &Selection::single(0, 3), + &Selection::single(0, 2 + LINE_END.len()), ); } } @@ -564,6 +741,20 @@ mod test { ) } + /// foo([) wor]d -> insert ) -> foo()[ wor]d + #[test] + fn test_insert_close_inside_pair_trailing_word_with_selection() { + for (open, close) in differing_pairs() { + test_hooks( + &Rope::from(format!("foo{}{} word{}", open, close, LINE_END)), + &Selection::single(9, 4), + *close, + &Rope::from(format!("foo{}{} word{}", open, close, LINE_END)), + &Selection::single(9, 5), + ) + } + } + /// we want pairs that are *not* the same char to be inserted after /// a non-pair char, for cases like functions, but for pairs that are /// the same char, we want to *not* insert a pair to handle cases like "I'm" @@ -572,7 +763,7 @@ mod test { /// word[] -> insert ' -> word'[] #[test] fn test_insert_open_after_non_pair() { - let doc = Rope::from("word"); + let doc = Rope::from(format!("word{}", LINE_END)); let sel = Selection::single(5, 4); let expected_sel = Selection::single(6, 5); @@ -580,7 +771,7 @@ mod test { &doc, &sel, differing_pairs(), - |open, close| format!("word{}{}", open, close), + |open, close| format!("word{}{}{}", open, close, LINE_END), &expected_sel, ); @@ -588,22 +779,8 @@ mod test { &doc, &sel, matching_pairs(), - |open, _| format!("word{}", open), + |open, _| format!("word{}{}", open, LINE_END), &expected_sel, ); } - - /// appending with only a cursor should stay a cursor - /// - /// [] -> append to end "foo -> "foo[]" - #[test] - fn test_append_single_cursor() { - test_hooks_with_pairs( - &Rope::from("\n"), - &Selection::single(0, 1), - PAIRS, - |open, close| format!("{}{}\n", open, close), - &Selection::single(1, 2), - ); - } } diff --git a/helix-core/src/chars.rs b/helix-core/src/chars.rs index c8e5efbde..549915740 100644 --- a/helix-core/src/chars.rs +++ b/helix-core/src/chars.rs @@ -91,12 +91,11 @@ mod test { #[test] fn test_categorize() { - const EOL_TEST_CASE: &'static str = "\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}"; - const WORD_TEST_CASE: &'static str = - "_hello_world_あいうえおー12345678901234567890"; - const PUNCTUATION_TEST_CASE: &'static str = + const EOL_TEST_CASE: &str = "\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}"; + const WORD_TEST_CASE: &str = "_hello_world_あいうえおー12345678901234567890"; + const PUNCTUATION_TEST_CASE: &str = "!\"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~!”#$%&’()*+、。:;<=>?@「」^`{|}~"; - const WHITESPACE_TEST_CASE: &'static str = "      "; + const WHITESPACE_TEST_CASE: &str = "      "; for ch in EOL_TEST_CASE.chars() { assert_eq!(CharCategory::Eol, categorize_char(ch)); diff --git a/helix-core/src/config.rs b/helix-core/src/config.rs new file mode 100644 index 000000000..5bd16abde --- /dev/null +++ b/helix-core/src/config.rs @@ -0,0 +1,33 @@ +use crate::merge_toml_values; + +/// Default bultin-in languages.toml. +pub fn default_lang_config() -> toml::Value { + toml::from_slice(include_bytes!("../../languages.toml")) + .expect("Could not parse bultin-in languages.toml to valid toml") +} + +/// User configured languages.toml file, merged with the default config. +pub fn user_lang_config() -> Result { + let def_lang_conf = default_lang_config(); + let data = std::fs::read(crate::config_dir().join("languages.toml")); + let user_lang_conf = match data { + Ok(raw) => { + let value = toml::from_slice(&raw)?; + merge_toml_values(def_lang_conf, value) + } + Err(_) => def_lang_conf, + }; + + Ok(user_lang_conf) +} + +/// Syntax configuration loader based on built-in languages.toml. +pub fn default_syntax_loader() -> crate::syntax::Configuration { + default_lang_config() + .try_into() + .expect("Could not serialize built-in language.toml") +} +/// Syntax configuration loader based on user configured languages.toml. +pub fn user_syntax_loader() -> Result { + user_lang_config()?.try_into() +} diff --git a/helix-core/src/diff.rs b/helix-core/src/diff.rs index a83db3338..6960c679c 100644 --- a/helix-core/src/diff.rs +++ b/helix-core/src/diff.rs @@ -11,10 +11,6 @@ pub fn compare_ropes(old: &Rope, new: &Rope) -> Transaction { // A timeout is set so after 1 seconds, the algorithm will start // approximating. This is especially important for big `Rope`s or // `Rope`s that are extremely dissimilar to each other. - // - // Note: Ignore the clippy warning, as the trait bounds of - // `Transaction::change()` require an iterator implementing - // `ExactIterator`. let mut config = similar::TextDiff::configure(); config.timeout(std::time::Duration::from_secs(1)); @@ -62,7 +58,7 @@ mod tests { let mut old = Rope::from(a); let new = Rope::from(b); compare_ropes(&old, &new).apply(&mut old); - old.to_string() == new.to_string() + old == new } } } diff --git a/helix-core/src/graphemes.rs b/helix-core/src/graphemes.rs index c63988757..aa8986844 100644 --- a/helix-core/src/graphemes.rs +++ b/helix-core/src/graphemes.rs @@ -120,6 +120,43 @@ pub fn nth_next_grapheme_boundary(slice: RopeSlice, char_idx: usize, n: usize) - chunk_char_idx + tmp } +#[must_use] +pub fn nth_next_grapheme_boundary_byte(slice: RopeSlice, mut byte_idx: usize, n: usize) -> usize { + // Bounds check + debug_assert!(byte_idx <= slice.len_bytes()); + + // Get the chunk with our byte index in it. + let (mut chunk, mut chunk_byte_idx, mut _chunk_char_idx, _) = slice.chunk_at_byte(byte_idx); + + // Set up the grapheme cursor. + let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true); + + // Find the nth next grapheme cluster boundary. + for _ in 0..n { + loop { + match gc.next_boundary(chunk, chunk_byte_idx) { + Ok(None) => return slice.len_bytes(), + Ok(Some(n)) => { + byte_idx = n; + break; + } + Err(GraphemeIncomplete::NextChunk) => { + chunk_byte_idx += chunk.len(); + let (a, _, _c, _) = slice.chunk_at_byte(chunk_byte_idx); + chunk = a; + // chunk_char_idx = c; + } + Err(GraphemeIncomplete::PreContext(n)) => { + let ctx_chunk = slice.chunk_at_byte(n - 1).0; + gc.provide_context(ctx_chunk, n - ctx_chunk.len()); + } + _ => unreachable!(), + } + } + } + byte_idx +} + /// Finds the next grapheme boundary after the given char position. #[must_use] #[inline(always)] @@ -127,6 +164,13 @@ pub fn next_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> usize { nth_next_grapheme_boundary(slice, char_idx, 1) } +/// Finds the next grapheme boundary after the given byte position. +#[must_use] +#[inline(always)] +pub fn next_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> usize { + nth_next_grapheme_boundary_byte(slice, byte_idx, 1) +} + /// Returns the passed char index if it's already a grapheme boundary, /// or the next grapheme boundary char index if not. #[must_use] @@ -151,6 +195,23 @@ pub fn ensure_grapheme_boundary_prev(slice: RopeSlice, char_idx: usize) -> usize } } +/// Returns the passed byte index if it's already a grapheme boundary, +/// or the next grapheme boundary byte index if not. +#[must_use] +#[inline] +pub fn ensure_grapheme_boundary_next_byte(slice: RopeSlice, byte_idx: usize) -> usize { + if byte_idx == 0 { + byte_idx + } else { + // TODO: optimize so we're not constructing grapheme cursor twice + if is_grapheme_boundary_byte(slice, byte_idx) { + byte_idx + } else { + next_grapheme_boundary_byte(slice, byte_idx) + } + } +} + /// Returns whether the given char position is a grapheme boundary. #[must_use] pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool { @@ -179,6 +240,31 @@ pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool { } } +/// Returns whether the given byte position is a grapheme boundary. +#[must_use] +pub fn is_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> bool { + // Bounds check + debug_assert!(byte_idx <= slice.len_bytes()); + + // Get the chunk with our byte index in it. + let (chunk, chunk_byte_idx, _, _) = slice.chunk_at_byte(byte_idx); + + // Set up the grapheme cursor. + let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true); + + // Determine if the given position is a grapheme cluster boundary. + loop { + match gc.is_boundary(chunk, chunk_byte_idx) { + Ok(n) => return n, + Err(GraphemeIncomplete::PreContext(n)) => { + let (ctx_chunk, ctx_byte_start, _, _) = slice.chunk_at_byte(n - 1); + gc.provide_context(ctx_chunk, ctx_byte_start); + } + Err(_) => unreachable!(), + } + } +} + /// An iterator over the graphemes of a `RopeSlice`. #[derive(Clone)] pub struct RopeGraphemes<'a> { diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 4b1c8d3b3..bb95213c1 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -448,8 +448,8 @@ mod test { change: crate::transaction::Change, instant: Instant, ) { - let txn = Transaction::change(&state.doc, vec![change.clone()].into_iter()); - history.commit_revision_at_timestamp(&txn, &state, instant); + let txn = Transaction::change(&state.doc, vec![change].into_iter()); + history.commit_revision_at_timestamp(&txn, state, instant); txn.apply(&mut state.doc); } diff --git a/helix-core/src/increment/date_time.rs b/helix-core/src/increment/date_time.rs index e3cfe107d..91fa59631 100644 --- a/helix-core/src/increment/date_time.rs +++ b/helix-core/src/increment/date_time.rs @@ -195,82 +195,82 @@ struct DateField { impl DateField { fn from_specifier(specifier: &str) -> Option { match specifier { - "Y" => Some(DateField { + "Y" => Some(Self { regex: r"\d{4}", unit: DateUnit::Years, max_len: 5, }), - "y" => Some(DateField { + "y" => Some(Self { regex: r"\d\d", unit: DateUnit::Years, max_len: 2, }), - "m" => Some(DateField { + "m" => Some(Self { regex: r"[0-1]\d", unit: DateUnit::Months, max_len: 2, }), - "d" => Some(DateField { + "d" => Some(Self { regex: r"[0-3]\d", unit: DateUnit::Days, max_len: 2, }), - "-d" => Some(DateField { + "-d" => Some(Self { regex: r"[1-3]?\d", unit: DateUnit::Days, max_len: 2, }), - "a" => Some(DateField { + "a" => Some(Self { regex: r"Sun|Mon|Tue|Wed|Thu|Fri|Sat", unit: DateUnit::Days, max_len: 3, }), - "A" => Some(DateField { + "A" => Some(Self { regex: r"Sunday|Monday|Tuesday|Wednesday|Thursday|Friday|Saturday", unit: DateUnit::Days, max_len: 9, }), - "b" | "h" => Some(DateField { + "b" | "h" => Some(Self { regex: r"Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec", unit: DateUnit::Months, max_len: 3, }), - "B" => Some(DateField { + "B" => Some(Self { regex: r"January|February|March|April|May|June|July|August|September|October|November|December", unit: DateUnit::Months, max_len: 9, }), - "H" => Some(DateField { + "H" => Some(Self { regex: r"[0-2]\d", unit: DateUnit::Hours, max_len: 2, }), - "M" => Some(DateField { + "M" => Some(Self { regex: r"[0-5]\d", unit: DateUnit::Minutes, max_len: 2, }), - "S" => Some(DateField { + "S" => Some(Self { regex: r"[0-5]\d", unit: DateUnit::Seconds, max_len: 2, }), - "I" => Some(DateField { + "I" => Some(Self { regex: r"[0-1]\d", unit: DateUnit::Hours, max_len: 2, }), - "-I" => Some(DateField { + "-I" => Some(Self { regex: r"1?\d", unit: DateUnit::Hours, max_len: 2, }), - "P" => Some(DateField { + "P" => Some(Self { regex: r"am|pm", unit: DateUnit::AmPm, max_len: 2, }), - "p" => Some(DateField { + "p" => Some(Self { regex: r"AM|PM", unit: DateUnit::AmPm, max_len: 2, @@ -451,7 +451,7 @@ mod test { .unwrap() .increment(amount) .1, - expected.into() + Tendril::from(expected) ); } } diff --git a/helix-core/src/increment/number.rs b/helix-core/src/increment/number.rs index a19b7e754..57171f671 100644 --- a/helix-core/src/increment/number.rs +++ b/helix-core/src/increment/number.rs @@ -371,7 +371,7 @@ mod test { .unwrap() .increment(amount) .1, - expected.into() + Tendril::from(expected) ); } } @@ -398,7 +398,7 @@ mod test { .unwrap() .increment(amount) .1, - expected.into() + Tendril::from(expected) ); } } @@ -426,7 +426,7 @@ mod test { .unwrap() .increment(amount) .1, - expected.into() + Tendril::from(expected) ); } } @@ -472,7 +472,7 @@ mod test { .unwrap() .increment(amount) .1, - expected.into() + Tendril::from(expected) ); } } @@ -500,7 +500,7 @@ mod test { .unwrap() .increment(amount) .1, - expected.into() + Tendril::from(expected) ); } } diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs index 28066aa6f..5d20edc1a 100644 --- a/helix-core/src/indent.rs +++ b/helix-core/src/indent.rs @@ -192,10 +192,7 @@ fn get_highest_syntax_node_at_bytepos(syntax: &Syntax, pos: usize) -> Option node, - None => return None, - }; + let mut node = tree.root_node().descendant_for_byte_range(pos, pos)?; while let Some(parent) = node.parent() { if parent.start_byte() == node.start_byte() { @@ -416,7 +413,7 @@ where ", ); - let doc = Rope::from(doc); + let doc = doc; use crate::diagnostic::Severity; use crate::syntax::{ Configuration, IndentationConfiguration, LanguageConfiguration, Loader, @@ -436,6 +433,7 @@ where comment_token: None, auto_format: false, diagnostic_severity: Severity::Warning, + tree_sitter_library: None, language_server: None, indent: Some(IndentationConfiguration { tab_width: 4, @@ -443,6 +441,7 @@ where }), indent_query: OnceCell::new(), textobject_query: OnceCell::new(), + debugger: None, }], }); @@ -453,7 +452,7 @@ where let language_config = loader.language_config_for_scope("source.rust").unwrap(); let highlight_config = language_config.highlight_config(&[]).unwrap(); - let syntax = Syntax::new(&doc, highlight_config.clone()); + let syntax = Syntax::new(&doc, highlight_config, std::sync::Arc::new(loader)); let text = doc.slice(..); let tab_width = 4; diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index 7fd23b977..8e5950deb 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -3,6 +3,7 @@ pub use encoding_rs as encoding; pub mod auto_pairs; pub mod chars; pub mod comment; +pub mod config; pub mod diagnostic; pub mod diff; pub mod graphemes; @@ -212,7 +213,10 @@ use etcetera::base_strategy::{choose_base_strategy, BaseStrategy}; pub use ropey::{Rope, RopeBuilder, RopeSlice}; -pub use tendril::StrTendril as Tendril; +// pub use tendril::StrTendril as Tendril; +pub use smartstring::SmartString; + +pub type Tendril = SmartString; #[doc(inline)] pub use {regex, tree_sitter}; @@ -220,7 +224,7 @@ pub use {regex, tree_sitter}; pub use graphemes::RopeGraphemes; pub use position::{coords_at_pos, pos_at_coords, visual_coords_at_pos, Position}; pub use selection::{Range, Selection}; -pub use smallvec::SmallVec; +pub use smallvec::{smallvec, SmallVec}; pub use syntax::Syntax; pub use diagnostic::Diagnostic; diff --git a/helix-core/src/line_ending.rs b/helix-core/src/line_ending.rs index 3541305c3..8eb426e1e 100644 --- a/helix-core/src/line_ending.rs +++ b/helix-core/src/line_ending.rs @@ -250,7 +250,7 @@ mod line_ending_tests { assert_eq!(get_line_ending_of_str(&text[..6]), Some(LineEnding::CR)); assert_eq!(get_line_ending_of_str(&text[..12]), Some(LineEnding::LF)); assert_eq!(get_line_ending_of_str(&text[..17]), Some(LineEnding::Crlf)); - assert_eq!(get_line_ending_of_str(&text[..]), None); + assert_eq!(get_line_ending_of_str(text), None); } #[test] diff --git a/helix-core/src/movement.rs b/helix-core/src/movement.rs index 47fe68272..e559f1ea6 100644 --- a/helix-core/src/movement.rs +++ b/helix-core/src/movement.rs @@ -1,6 +1,7 @@ use std::iter; use ropey::iter::Chars; +use tree_sitter::{Node, QueryCursor}; use crate::{ chars::{categorize_char, char_is_line_ending, CharCategory}, @@ -9,7 +10,10 @@ use crate::{ next_grapheme_boundary, nth_next_grapheme_boundary, nth_prev_grapheme_boundary, prev_grapheme_boundary, }, - pos_at_coords, Position, Range, RopeSlice, + pos_at_coords, + syntax::LanguageConfiguration, + textobject::TextObject, + Position, Range, RopeSlice, }; #[derive(Debug, Copy, Clone, PartialEq, Eq)] @@ -305,6 +309,56 @@ fn reached_target(target: WordMotionTarget, prev_ch: char, next_ch: char) -> boo } } +pub fn goto_treesitter_object( + slice: RopeSlice, + range: Range, + object_name: &str, + dir: Direction, + slice_tree: Node, + lang_config: &LanguageConfiguration, + _count: usize, +) -> Range { + let get_range = move || -> Option { + let byte_pos = slice.char_to_byte(range.cursor(slice)); + + let cap_name = |t: TextObject| format!("{}.{}", object_name, t); + let mut cursor = QueryCursor::new(); + let nodes = lang_config.textobject_query()?.capture_nodes_any( + &[ + &cap_name(TextObject::Movement), + &cap_name(TextObject::Around), + &cap_name(TextObject::Inside), + ], + slice_tree, + slice, + &mut cursor, + )?; + + let node = match dir { + Direction::Forward => nodes + .filter(|n| n.start_byte() > byte_pos) + .min_by_key(|n| n.start_byte())?, + Direction::Backward => nodes + .filter(|n| n.start_byte() < byte_pos) + .max_by_key(|n| n.start_byte())?, + }; + + let len = slice.len_bytes(); + let start_byte = node.start_byte(); + let end_byte = node.end_byte(); + if start_byte >= len || end_byte >= len { + return None; + } + + let start_char = slice.byte_to_char(start_byte); + let end_char = slice.byte_to_char(end_byte); + + // head of range should be at beginning + Some(Range::new(end_char, start_char)) + }; + get_range().unwrap_or(range) +} + #[cfg(test)] mod test { use ropey::Rope; diff --git a/helix-core/src/object.rs b/helix-core/src/object.rs index 717c59947..b06f41444 100644 --- a/helix-core/src/object.rs +++ b/helix-core/src/object.rs @@ -1,31 +1,72 @@ use crate::{Range, RopeSlice, Selection, Syntax}; +use tree_sitter::Node; -// TODO: to contract_selection we'd need to store the previous ranges before expand. -// Maybe just contract to the first child node? -pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: &Selection) -> Selection { +pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { + select_node_impl(syntax, text, selection, |descendant, from, to| { + if descendant.start_byte() == from && descendant.end_byte() == to { + descendant.parent() + } else { + Some(descendant) + } + }) +} + +pub fn shrink_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { + select_node_impl(syntax, text, selection, |descendant, _from, _to| { + descendant.child(0).or(Some(descendant)) + }) +} + +pub fn select_sibling( + syntax: &Syntax, + text: RopeSlice, + selection: Selection, + sibling_fn: &F, +) -> Selection +where + F: Fn(Node) -> Option, +{ + select_node_impl(syntax, text, selection, |descendant, _from, _to| { + find_sibling_recursive(descendant, sibling_fn) + }) +} + +fn find_sibling_recursive(node: Node, sibling_fn: F) -> Option +where + F: Fn(Node) -> Option, +{ + sibling_fn(node).or_else(|| { + node.parent() + .and_then(|node| find_sibling_recursive(node, sibling_fn)) + }) +} + +fn select_node_impl( + syntax: &Syntax, + text: RopeSlice, + selection: Selection, + select_fn: F, +) -> Selection +where + F: Fn(Node, usize, usize) -> Option, +{ let tree = syntax.tree(); - selection.clone().transform(|range| { + selection.transform(|range| { let from = text.char_to_byte(range.from()); let to = text.char_to_byte(range.to()); - // find parent of a descendant that matches the range - let parent = match tree + let node = match tree .root_node() .descendant_for_byte_range(from, to) - .and_then(|node| { - if node.child_count() == 0 || (node.start_byte() == from && node.end_byte() == to) { - node.parent() - } else { - Some(node) - } - }) { - Some(parent) => parent, + .and_then(|node| select_fn(node, from, to)) + { + Some(node) => node, None => return range, }; - let from = text.byte_to_char(parent.start_byte()); - let to = text.byte_to_char(parent.end_byte()); + let from = text.byte_to_char(node.start_byte()); + let to = text.byte_to_char(node.end_byte()); if range.head < range.anchor { Range::new(to, from) diff --git a/helix-core/src/position.rs b/helix-core/src/position.rs index c6018ce69..93362c775 100644 --- a/helix-core/src/position.rs +++ b/helix-core/src/position.rs @@ -109,7 +109,10 @@ pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Po /// TODO: this should be changed to work in terms of visual row/column, not /// graphemes. pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending: bool) -> usize { - let Position { row, col } = coords; + let Position { mut row, col } = coords; + if limit_before_line_ending { + row = row.min(text.len_lines() - 1); + }; let line_start = text.line_to_char(row); let line_end = if limit_before_line_ending { line_end_char_index(&text, row) @@ -290,5 +293,12 @@ mod test { assert_eq!(pos_at_coords(slice, (0, 0).into(), false), 0); assert_eq!(pos_at_coords(slice, (0, 1).into(), false), 1); assert_eq!(pos_at_coords(slice, (0, 2).into(), false), 2); + + // Test out of bounds. + let text = Rope::new(); + let slice = text.slice(..); + assert_eq!(pos_at_coords(slice, (10, 0).into(), true), 0); + assert_eq!(pos_at_coords(slice, (0, 10).into(), true), 0); + assert_eq!(pos_at_coords(slice, (10, 10).into(), true), 0); } } diff --git a/helix-core/src/register.rs b/helix-core/src/register.rs index b9eb497df..b39e4034e 100644 --- a/helix-core/src/register.rs +++ b/helix-core/src/register.rs @@ -68,4 +68,8 @@ impl Registers { pub fn read(&self, name: char) -> Option<&[String]> { self.get(name).map(|reg| reg.read()) } + + pub fn inner(&self) -> &HashMap { + &self.inner + } } diff --git a/helix-core/src/selection.rs b/helix-core/src/selection.rs index 06ea9d67b..c6eceb4b5 100644 --- a/helix-core/src/selection.rs +++ b/helix-core/src/selection.rs @@ -140,6 +140,11 @@ impl Range { self.from() == other.from() || (self.to() > other.from() && other.to() > self.from()) } + #[inline] + pub fn contains_range(&self, other: &Self) -> bool { + self.from() <= other.from() && self.to() >= other.to() + } + pub fn contains(&self, pos: usize) -> bool { self.from() <= pos && pos < self.to() } @@ -544,6 +549,39 @@ impl Selection { pub fn len(&self) -> usize { self.ranges.len() } + + // returns true if self ⊇ other + pub fn contains(&self, other: &Selection) -> bool { + // can't contain other if it is larger + if other.len() > self.len() { + return false; + } + + let (mut iter_self, mut iter_other) = (self.iter(), other.iter()); + let (mut ele_self, mut ele_other) = (iter_self.next(), iter_other.next()); + + loop { + match (ele_self, ele_other) { + (Some(ra), Some(rb)) => { + if !ra.contains_range(rb) { + // `self` doesn't contain next element from `other`, advance `self`, we need to match all from `other` + ele_self = iter_self.next(); + } else { + // matched element from `other`, advance `other` + ele_other = iter_other.next(); + }; + } + (None, Some(_)) => { + // exhausted `self`, we can't match the reminder of `other` + return false; + } + (_, None) => { + // no elements from `other` left to match, `self` contains `other` + return true; + } + } + } + } } impl<'a> IntoIterator for &'a Selection { @@ -728,16 +766,16 @@ mod test { fn test_contains() { let range = Range::new(10, 12); - assert_eq!(range.contains(9), false); - assert_eq!(range.contains(10), true); - assert_eq!(range.contains(11), true); - assert_eq!(range.contains(12), false); - assert_eq!(range.contains(13), false); + assert!(!range.contains(9)); + assert!(range.contains(10)); + assert!(range.contains(11)); + assert!(!range.contains(12)); + assert!(!range.contains(13)); let range = Range::new(9, 6); - assert_eq!(range.contains(9), false); - assert_eq!(range.contains(7), true); - assert_eq!(range.contains(6), true); + assert!(!range.contains(9)); + assert!(range.contains(7)); + assert!(range.contains(6)); } #[test] @@ -982,4 +1020,30 @@ mod test { &["", "abcd", "efg", "rs", "xyz"] ); } + #[test] + fn test_selection_contains() { + fn contains(a: Vec<(usize, usize)>, b: Vec<(usize, usize)>) -> bool { + let sela = Selection::new(a.iter().map(|a| Range::new(a.0, a.1)).collect(), 0); + let selb = Selection::new(b.iter().map(|b| Range::new(b.0, b.1)).collect(), 0); + sela.contains(&selb) + } + + // exact match + assert!(contains(vec!((1, 1)), vec!((1, 1)))); + + // larger set contains smaller + assert!(contains(vec!((1, 1), (2, 2), (3, 3)), vec!((2, 2)))); + + // multiple matches + assert!(contains(vec!((1, 1), (2, 2)), vec!((1, 1), (2, 2)))); + + // smaller set can't contain bigger + assert!(!contains(vec!((1, 1)), vec!((1, 1), (2, 2)))); + + assert!(contains( + vec!((1, 1), (2, 4), (5, 6), (7, 9), (10, 13)), + vec!((3, 4), (7, 9)) + )); + assert!(!contains(vec!((1, 1), (5, 6)), vec!((1, 6)))); + } } diff --git a/helix-core/src/surround.rs b/helix-core/src/surround.rs index b53b0a78c..58eb23cf2 100644 --- a/helix-core/src/surround.rs +++ b/helix-core/src/surround.rs @@ -172,6 +172,7 @@ mod test { use ropey::Rope; use smallvec::SmallVec; + #[allow(clippy::type_complexity)] fn check_find_nth_pair_pos( text: &str, cases: Vec<(usize, char, usize, Option<(usize, usize)>)>, diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index cdae02103..ccf91100f 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -8,12 +8,13 @@ use crate::{ pub use helix_syntax::get_language; -use arc_swap::ArcSwap; +use arc_swap::{ArcSwap, Guard}; +use slotmap::{DefaultKey as LayerId, HopSlotMap}; use std::{ borrow::Cow, cell::RefCell, - collections::{HashMap, HashSet}, + collections::{HashMap, HashSet, VecDeque}, fmt, path::Path, sync::Arc, @@ -67,6 +68,8 @@ pub struct LanguageConfiguration { #[serde(default)] pub diagnostic_severity: Severity, + pub tree_sitter_library: Option, // tree-sitter library name, defaults to language_id + // content_regex #[serde(default, skip_serializing, deserialize_with = "deserialize_regex")] pub injection_regex: Option, @@ -84,6 +87,8 @@ pub struct LanguageConfiguration { pub(crate) indent_query: OnceCell>, #[serde(skip)] pub(crate) textobject_query: OnceCell>, + #[serde(skip_serializing_if = "Option::is_none")] + pub debugger: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -93,6 +98,61 @@ pub struct LanguageServerConfiguration { #[serde(default)] #[serde(skip_serializing_if = "Vec::is_empty")] pub args: Vec, + pub language_id: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct AdvancedCompletion { + pub name: Option, + pub completion: Option, + pub default: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case", untagged)] +pub enum DebugConfigCompletion { + Named(String), + Advanced(AdvancedCompletion), +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(untagged)] +pub enum DebugArgumentValue { + String(String), + Array(Vec), + Boolean(bool), +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct DebugTemplate { + pub name: String, + pub request: String, + pub completion: Vec, + pub args: HashMap, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct DebugAdapterConfig { + pub name: String, + pub transport: String, + #[serde(default)] + pub command: String, + #[serde(default)] + pub args: Vec, + pub port_arg: Option, + pub templates: Vec, + #[serde(default)] + pub quirks: DebuggerQuirks, +} + +// Different workarounds for adapters' differences +#[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize)] +pub struct DebuggerQuirks { + #[serde(default)] + pub absolute_paths: bool, } #[derive(Debug, Serialize, Deserialize)] @@ -128,7 +188,21 @@ impl TextObjectQuery { slice: RopeSlice<'a>, cursor: &'a mut QueryCursor, ) -> Option>> { - let capture_idx = self.query.capture_index_for_name(capture_name)?; + self.capture_nodes_any(&[capture_name], node, slice, cursor) + } + + /// Find the first capture that exists out of all given `capture_names` + /// and return sub nodes that match this capture. + pub fn capture_nodes_any<'a>( + &'a self, + capture_names: &[&str], + node: Node<'a>, + slice: RopeSlice<'a>, + cursor: &'a mut QueryCursor, + ) -> Option>> { + let capture_idx = capture_names + .iter() + .find_map(|cap| self.query.capture_index_for_name(cap))?; let captures = cursor.captures(&self.query, node, RopeProvider(slice)); captures @@ -192,20 +266,22 @@ impl LanguageConfiguration { if highlights_query.is_empty() { None } else { - let language = get_language(&crate::RUNTIME_DIR, &self.language_id) - .map_err(|e| log::info!("{}", e)) - .ok()?; + let language = get_language( + &crate::RUNTIME_DIR, + self.tree_sitter_library + .as_deref() + .unwrap_or(&self.language_id), + ) + .map_err(|e| log::info!("{}", e)) + .ok()?; let config = HighlightConfiguration::new( language, &highlights_query, &injections_query, &locals_query, - ); + ) + .unwrap(); // TODO: avoid panic - let config = match config { - Ok(config) => config, - Err(err) => panic!("{}", err), - }; // TODO: avoid panic config.configure(scopes); Some(Arc::new(config)) } @@ -255,12 +331,16 @@ impl LanguageConfiguration { } } +// Expose loader as Lazy<> global since it's always static? + #[derive(Debug)] pub struct Loader { // highlight_names ? language_configs: Vec>, language_config_ids_by_file_type: HashMap, // Vec language_config_ids_by_shebang: HashMap, + + scopes: ArcSwap>, } impl Loader { @@ -269,6 +349,7 @@ impl Loader { language_configs: Vec::new(), language_config_ids_by_file_type: HashMap::new(), language_config_ids_by_shebang: HashMap::new(), + scopes: ArcSwap::from_pointee(Vec::new()), }; for config in config.language { @@ -354,8 +435,22 @@ impl Loader { } None } - pub fn language_configs_iter(&self) -> impl Iterator> { - self.language_configs.iter() + + pub fn set_scopes(&self, scopes: Vec) { + self.scopes.store(Arc::new(scopes)); + + // Reconfigure existing grammars + for config in self + .language_configs + .iter() + .filter(|cfg| cfg.is_highlight_initialized()) + { + config.reconfigure(&self.scopes()); + } + } + + pub fn scopes(&self) -> Guard>> { + self.scopes.load() } } @@ -364,12 +459,6 @@ pub struct TsParser { cursors: Vec, } -impl fmt::Debug for TsParser { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("TsParser").finish() - } -} - // could also just use a pool, or a single instance? thread_local! { pub static PARSER: RefCell = RefCell::new(TsParser { @@ -380,9 +469,9 @@ thread_local! { #[derive(Debug)] pub struct Syntax { - config: Arc, - - root_layer: LanguageLayer, + layers: HopSlotMap, + root: LayerId, + loader: Arc, } fn byte_range_to_str(range: std::ops::Range, source: RopeSlice) -> Cow { @@ -392,38 +481,34 @@ fn byte_range_to_str(range: std::ops::Range, source: RopeSlice) -> Cow, - ) -> Self { - let root_layer = LanguageLayer { tree: None }; + pub fn new(source: &Rope, config: Arc, loader: Arc) -> Self { + let root_layer = LanguageLayer { + tree: None, + config, + depth: 0, + ranges: vec![Range { + start_byte: 0, + end_byte: usize::MAX, + start_point: Point::new(0, 0), + end_point: Point::new(usize::MAX, usize::MAX), + }], + }; - // track markers of injections // track scope_descriptor: a Vec of scopes for item in tree + let mut layers = HopSlotMap::default(); + let root = layers.insert(root_layer); + let mut syntax = Self { - // grammar, - config, - root_layer, + root, + layers, + loader, }; - // update root layer - PARSER.with(|ts_parser| { - // TODO: handle the returned `Result` properly. - let _ = syntax.root_layer.parse( - &mut ts_parser.borrow_mut(), - &syntax.config, - source, - 0, - vec![Range { - start_byte: 0, - end_byte: usize::MAX, - start_point: Point::new(0, 0), - end_point: Point::new(usize::MAX, usize::MAX), - }], - ); - }); + syntax + .update(source, source, &ChangeSet::new(source)) + .unwrap(); + syntax } @@ -433,32 +518,255 @@ impl Syntax { source: &Rope, changeset: &ChangeSet, ) -> Result<(), Error> { + let mut queue = VecDeque::new(); + queue.push_back(self.root); + + let scopes = self.loader.scopes.load(); + let injection_callback = |language: &str| { + self.loader + .language_configuration_for_injection_string(language) + .and_then(|language_config| language_config.highlight_config(&scopes)) + }; + + // Convert the changeset into tree sitter edits. + let edits = generate_edits(old_source, changeset); + + // Use the edits to update all layers markers + if !edits.is_empty() { + fn point_add(a: Point, b: Point) -> Point { + if b.row > 0 { + Point::new(a.row.saturating_add(b.row), b.column) + } else { + Point::new(0, a.column.saturating_add(b.column)) + } + } + fn point_sub(a: Point, b: Point) -> Point { + if a.row > b.row { + Point::new(a.row.saturating_sub(b.row), a.column) + } else { + Point::new(0, a.column.saturating_sub(b.column)) + } + } + + for layer in &mut self.layers.values_mut() { + // The root layer always covers the whole range (0..usize::MAX) + if layer.depth == 0 { + continue; + } + + for range in &mut layer.ranges { + // Roughly based on https://github.com/tree-sitter/tree-sitter/blob/ddeaa0c7f534268b35b4f6cb39b52df082754413/lib/src/subtree.c#L691-L720 + for edit in edits.iter().rev() { + let is_pure_insertion = edit.old_end_byte == edit.start_byte; + + // if edit is after range, skip + if edit.start_byte > range.end_byte { + // TODO: || (is_noop && edit.start_byte == range.end_byte) + continue; + } + + // if edit is before range, shift entire range by len + if edit.old_end_byte < range.start_byte { + range.start_byte = + edit.new_end_byte + (range.start_byte - edit.old_end_byte); + range.start_point = point_add( + edit.new_end_position, + point_sub(range.start_point, edit.old_end_position), + ); + + range.end_byte = edit + .new_end_byte + .saturating_add(range.end_byte - edit.old_end_byte); + range.end_point = point_add( + edit.new_end_position, + point_sub(range.end_point, edit.old_end_position), + ); + } + // if the edit starts in the space before and extends into the range + else if edit.start_byte < range.start_byte { + range.start_byte = edit.new_end_byte; + range.start_point = edit.new_end_position; + + range.end_byte = range + .end_byte + .saturating_sub(edit.old_end_byte) + .saturating_add(edit.new_end_byte); + range.end_point = point_add( + edit.new_end_position, + point_sub(range.end_point, edit.old_end_position), + ); + } + // If the edit is an insertion at the start of the tree, shift + else if edit.start_byte == range.start_byte && is_pure_insertion { + range.start_byte = edit.new_end_byte; + range.start_point = edit.new_end_position; + } else { + range.end_byte = range + .end_byte + .saturating_sub(edit.old_end_byte) + .saturating_add(edit.new_end_byte); + range.end_point = point_add( + edit.new_end_position, + point_sub(range.end_point, edit.old_end_position), + ); + } + } + } + } + } + PARSER.with(|ts_parser| { - self.root_layer.update( - &mut ts_parser.borrow_mut(), - &self.config, - old_source, - source, - changeset, - ) - }) + let ts_parser = &mut ts_parser.borrow_mut(); + let mut cursor = ts_parser.cursors.pop().unwrap_or_else(QueryCursor::new); + // TODO: might need to set cursor range + cursor.set_byte_range(0..usize::MAX); - // TODO: deal with injections and update them too - } + let source_slice = source.slice(..); - // fn buffer_changed -> call layer.update(range, new_text) on root layer and then all marker layers + let mut touched = HashSet::new(); - // call this on transaction.apply() -> buffer_changed(changes) - // - // fn parse(language, old_tree, ranges) - // - pub fn tree(&self) -> &Tree { - self.root_layer.tree() + // TODO: we should be able to avoid editing & parsing layers with ranges earlier in the document before the edit + + while let Some(layer_id) = queue.pop_front() { + // Mark the layer as touched + touched.insert(layer_id); + + let layer = &mut self.layers[layer_id]; + + // If a tree already exists, notify it of changes. + if let Some(tree) = &mut layer.tree { + for edit in edits.iter().rev() { + // Apply the edits in reverse. + // If we applied them in order then edit 1 would disrupt the positioning of edit 2. + tree.edit(edit); + } + } + + // Re-parse the tree. + layer.parse(&mut ts_parser.parser, source)?; + + // Switch to an immutable borrow. + let layer = &self.layers[layer_id]; + + // Process injections. + let matches = cursor.matches( + &layer.config.injections_query, + layer.tree().root_node(), + RopeProvider(source_slice), + ); + let mut injections = Vec::new(); + for mat in matches { + let (language_name, content_node, include_children) = injection_for_match( + &layer.config, + &layer.config.injections_query, + &mat, + source_slice, + ); + + // Explicitly remove this match so that none of its other captures will remain + // in the stream of captures. + mat.remove(); + + // If a language is found with the given name, then add a new language layer + // to the highlighted document. + if let (Some(language_name), Some(content_node)) = (language_name, content_node) + { + if let Some(config) = (injection_callback)(&language_name) { + let ranges = + intersect_ranges(&layer.ranges, &[content_node], include_children); + + if !ranges.is_empty() { + injections.push((config, ranges)); + } + } + } + } + + // Process combined injections. + if let Some(combined_injections_query) = &layer.config.combined_injections_query { + let mut injections_by_pattern_index = + vec![(None, Vec::new(), false); combined_injections_query.pattern_count()]; + let matches = cursor.matches( + combined_injections_query, + layer.tree().root_node(), + RopeProvider(source_slice), + ); + for mat in matches { + let entry = &mut injections_by_pattern_index[mat.pattern_index]; + let (language_name, content_node, include_children) = injection_for_match( + &layer.config, + combined_injections_query, + &mat, + source_slice, + ); + if language_name.is_some() { + entry.0 = language_name; + } + if let Some(content_node) = content_node { + entry.1.push(content_node); + } + entry.2 = include_children; + } + for (lang_name, content_nodes, includes_children) in injections_by_pattern_index + { + if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) { + if let Some(config) = (injection_callback)(&lang_name) { + let ranges = intersect_ranges( + &layer.ranges, + &content_nodes, + includes_children, + ); + if !ranges.is_empty() { + injections.push((config, ranges)); + } + } + } + } + } + + let depth = layer.depth + 1; + // TODO: can't inline this since matches borrows self.layers + for (config, ranges) in injections { + // Find an existing layer + let layer = self + .layers + .iter_mut() + .find(|(_, layer)| { + layer.depth == depth && // TODO: track parent id instead + layer.config.language == config.language && layer.ranges == ranges + }) + .map(|(id, _layer)| id); + + // ...or insert a new one. + let layer_id = layer.unwrap_or_else(|| { + self.layers.insert(LanguageLayer { + tree: None, + config, + depth, + ranges, + }) + }); + + queue.push_back(layer_id); + } + + // TODO: pre-process local scopes at this time, rather than highlight? + // would solve problems with locals not working across boundaries + } + + // Return the cursor back in the pool. + ts_parser.cursors.push(cursor); + + // Remove all untouched layers + self.layers.retain(|id, _| touched.contains(&id)); + + Ok(()) + }) } - // - // - // Highlighting + pub fn tree(&self) -> &Tree { + self.layers[self.root].tree() + } /// Iterate over the highlighted regions for a given slice of source code. pub fn highlight_iter<'a>( @@ -466,65 +774,76 @@ impl Syntax { source: RopeSlice<'a>, range: Option>, cancellation_flag: Option<&'a AtomicUsize>, - injection_callback: impl FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a, ) -> impl Iterator> + 'a { - // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which - // prevents them from being moved. But both of these values are really just - // pointers, so it's actually ok to move them. - - // reuse a cursor from the pool if possible - let mut cursor = PARSER.with(|ts_parser| { - let highlighter = &mut ts_parser.borrow_mut(); - highlighter.cursors.pop().unwrap_or_else(QueryCursor::new) + let mut layers = self + .layers + .iter() + .filter_map(|(_, layer)| { + // TODO: if range doesn't overlap layer range, skip it + + // Reuse a cursor from the pool if available. + let mut cursor = PARSER.with(|ts_parser| { + let highlighter = &mut ts_parser.borrow_mut(); + highlighter.cursors.pop().unwrap_or_else(QueryCursor::new) + }); + + // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which + // prevents them from being moved. But both of these values are really just + // pointers, so it's actually ok to move them. + let cursor_ref = + unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) }; + + // if reusing cursors & no range this resets to whole range + cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX)); + + let mut captures = cursor_ref + .captures( + &layer.config.query, + layer.tree().root_node(), + RopeProvider(source), + ) + .peekable(); + + // If there's no captures, skip the layer + captures.peek()?; + + Some(HighlightIterLayer { + highlight_end_stack: Vec::new(), + scope_stack: vec![LocalScope { + inherits: false, + range: 0..usize::MAX, + local_defs: Vec::new(), + }], + cursor, + _tree: None, + captures, + config: layer.config.as_ref(), // TODO: just reuse `layer` + depth: layer.depth, // TODO: just reuse `layer` + ranges: &layer.ranges, // TODO: temp + }) + }) + .collect::>(); + + // HAXX: arrange layers by byte range, with deeper layers positioned first + layers.sort_by_key(|layer| { + ( + layer.ranges.first().cloned(), + std::cmp::Reverse(layer.depth), + ) }); - let tree_ref = self.tree(); - let cursor_ref = unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) }; - let query_ref = &self.config.query; - let config_ref = self.config.as_ref(); - - // if reusing cursors & no range this resets to whole range - cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX)); - - let captures = cursor_ref - .captures(query_ref, tree_ref.root_node(), RopeProvider(source)) - .peekable(); - - // manually craft the root layer based on the existing tree - let layer = HighlightIterLayer { - highlight_end_stack: Vec::new(), - scope_stack: vec![LocalScope { - inherits: false, - range: 0..usize::MAX, - local_defs: Vec::new(), - }], - cursor, - depth: 0, - _tree: None, - captures, - config: config_ref, - ranges: vec![Range { - start_byte: 0, - end_byte: usize::MAX, - start_point: Point::new(0, 0), - end_point: Point::new(usize::MAX, usize::MAX), - }], - }; let mut result = HighlightIter { source, - byte_offset: range.map_or(0, |r| r.start), // TODO: simplify - injection_callback, + byte_offset: range.map_or(0, |r| r.start), cancellation_flag, iter_count: 0, - layers: vec![layer], + layers, next_event: None, last_highlight_range: None, }; result.sort_layers(); result } - // on_tokenize - // on_change_highlighting // Commenting // comment_strings_for_pos @@ -536,246 +855,157 @@ impl Syntax { // indent_level_for_line // TODO: Folding - - // Syntax APIs - // get_syntax_node_containing_range -> - // ... - // get_syntax_node_at_pos - // buffer_range_for_scope_at_pos } #[derive(Debug)] pub struct LanguageLayer { // mode // grammar - // depth + pub config: Arc, pub(crate) tree: Option, + pub ranges: Vec, + pub depth: usize, } impl LanguageLayer { - // pub fn new() -> Self { - // Self { tree: None } - // } - pub fn tree(&self) -> &Tree { // TODO: no unwrap self.tree.as_ref().unwrap() } - fn parse( - &mut self, - ts_parser: &mut TsParser, - config: &HighlightConfiguration, - source: &Rope, - _depth: usize, - ranges: Vec, - ) -> Result<(), Error> { - if ts_parser.parser.set_included_ranges(&ranges).is_ok() { - ts_parser - .parser - .set_language(config.language) - .map_err(|_| Error::InvalidLanguage)?; - - // unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) }; - let tree = ts_parser - .parser - .parse_with( - &mut |byte, _| { - if byte <= source.len_bytes() { - let (chunk, start_byte, _, _) = source.chunk_at_byte(byte); - chunk[byte - start_byte..].as_bytes() - } else { - // out of range - &[] - } - }, - self.tree.as_ref(), - ) - .ok_or(Error::Cancelled)?; + fn parse(&mut self, parser: &mut Parser, source: &Rope) -> Result<(), Error> { + parser.set_included_ranges(&self.ranges).unwrap(); - self.tree = Some(tree) - } + parser + .set_language(self.config.language) + .map_err(|_| Error::InvalidLanguage)?; + + // unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) }; + let tree = parser + .parse_with( + &mut |byte, _| { + if byte <= source.len_bytes() { + let (chunk, start_byte, _, _) = source.chunk_at_byte(byte); + chunk[byte - start_byte..].as_bytes() + } else { + // out of range + &[] + } + }, + self.tree.as_ref(), + ) + .ok_or(Error::Cancelled)?; + // unsafe { ts_parser.parser.set_cancellation_flag(None) }; + self.tree = Some(tree); Ok(()) } +} - pub(crate) fn generate_edits( - old_text: RopeSlice, - changeset: &ChangeSet, - ) -> Vec { - use Operation::*; - let mut old_pos = 0; +pub(crate) fn generate_edits( + old_text: &Rope, + changeset: &ChangeSet, +) -> Vec { + use Operation::*; + let mut old_pos = 0; - let mut edits = Vec::new(); + let mut edits = Vec::new(); - let mut iter = changeset.changes.iter().peekable(); + if changeset.changes.is_empty() { + return edits; + } - // TODO; this is a lot easier with Change instead of Operation. + let mut iter = changeset.changes.iter().peekable(); - fn point_at_pos(text: RopeSlice, pos: usize) -> (usize, Point) { - let byte = text.char_to_byte(pos); // <- attempted to index past end - let line = text.char_to_line(pos); - let line_start_byte = text.line_to_byte(line); - let col = byte - line_start_byte; + // TODO; this is a lot easier with Change instead of Operation. - (byte, Point::new(line, col)) - } + fn point_at_pos(text: &Rope, pos: usize) -> (usize, Point) { + let byte = text.char_to_byte(pos); // <- attempted to index past end + let line = text.char_to_line(pos); + let line_start_byte = text.line_to_byte(line); + let col = byte - line_start_byte; - fn traverse(point: Point, text: &Tendril) -> Point { - let Point { - mut row, - mut column, - } = point; - - // TODO: there should be a better way here. - let mut chars = text.chars().peekable(); - while let Some(ch) = chars.next() { - if char_is_line_ending(ch) && !(ch == '\r' && chars.peek() == Some(&'\n')) { - row += 1; - column = 0; - } else { - column += 1; - } + (byte, Point::new(line, col)) + } + + fn traverse(point: Point, text: &Tendril) -> Point { + let Point { + mut row, + mut column, + } = point; + + // TODO: there should be a better way here. + let mut chars = text.chars().peekable(); + while let Some(ch) = chars.next() { + if char_is_line_ending(ch) && !(ch == '\r' && chars.peek() == Some(&'\n')) { + row += 1; + column = 0; + } else { + column += 1; } - Point { row, column } } + Point { row, column } + } - while let Some(change) = iter.next() { - let len = match change { - Delete(i) | Retain(i) => *i, - Insert(_) => 0, - }; - let mut old_end = old_pos + len; + while let Some(change) = iter.next() { + let len = match change { + Delete(i) | Retain(i) => *i, + Insert(_) => 0, + }; + let mut old_end = old_pos + len; + + match change { + Retain(_) => {} + Delete(_) => { + let (start_byte, start_position) = point_at_pos(old_text, old_pos); + let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end); + + // deletion + edits.push(tree_sitter::InputEdit { + start_byte, // old_pos to byte + old_end_byte, // old_end to byte + new_end_byte: start_byte, // old_pos to byte + start_position, // old pos to coords + old_end_position, // old_end to coords + new_end_position: start_position, // old pos to coords + }); + } + Insert(s) => { + let (start_byte, start_position) = point_at_pos(old_text, old_pos); - match change { - Retain(_) => {} - Delete(_) => { - let (start_byte, start_position) = point_at_pos(old_text, old_pos); + // a subsequent delete means a replace, consume it + if let Some(Delete(len)) = iter.peek() { + old_end = old_pos + len; let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end); - // TODO: Position also needs to be byte based... - // let byte = char_to_byte(old_pos) - // let line = char_to_line(old_pos) - // let line_start_byte = line_to_byte() - // Position::new(line, line_start_byte - byte) + iter.next(); - // deletion + // replacement edits.push(tree_sitter::InputEdit { - start_byte, // old_pos to byte - old_end_byte, // old_end to byte - new_end_byte: start_byte, // old_pos to byte - start_position, // old pos to coords - old_end_position, // old_end to coords - new_end_position: start_position, // old pos to coords + start_byte, // old_pos to byte + old_end_byte, // old_end to byte + new_end_byte: start_byte + s.len(), // old_pos to byte + s.len() + start_position, // old pos to coords + old_end_position, // old_end to coords + new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over) + }); + } else { + // insert + edits.push(tree_sitter::InputEdit { + start_byte, // old_pos to byte + old_end_byte: start_byte, // same + new_end_byte: start_byte + s.len(), // old_pos + s.len() + start_position, // old pos to coords + old_end_position: start_position, // same + new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over) }); - } - Insert(s) => { - let (start_byte, start_position) = point_at_pos(old_text, old_pos); - - // a subsequent delete means a replace, consume it - if let Some(Delete(len)) = iter.peek() { - old_end = old_pos + len; - let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end); - - iter.next(); - - // replacement - edits.push(tree_sitter::InputEdit { - start_byte, // old_pos to byte - old_end_byte, // old_end to byte - new_end_byte: start_byte + s.len(), // old_pos to byte + s.len() - start_position, // old pos to coords - old_end_position, // old_end to coords - new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over) - }); - } else { - // insert - edits.push(tree_sitter::InputEdit { - start_byte, // old_pos to byte - old_end_byte: start_byte, // same - new_end_byte: start_byte + s.len(), // old_pos + s.len() - start_position, // old pos to coords - old_end_position: start_position, // same - new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over) - }); - } } } - old_pos = old_end; - } - edits - } - - fn update( - &mut self, - ts_parser: &mut TsParser, - config: &HighlightConfiguration, - old_source: &Rope, - source: &Rope, - changeset: &ChangeSet, - ) -> Result<(), Error> { - if changeset.is_empty() { - return Ok(()); } - - let edits = Self::generate_edits(old_source.slice(..), changeset); - - // Notify the tree about all the changes - for edit in edits.iter().rev() { - // apply the edits in reverse. If we applied them in order then edit 1 would disrupt - // the positioning of edit 2 - self.tree.as_mut().unwrap().edit(edit); - } - - self.parse( - ts_parser, - config, - source, - 0, - // TODO: what to do about this range on update - vec![Range { - start_byte: 0, - end_byte: usize::MAX, - start_point: Point::new(0, 0), - end_point: Point::new(usize::MAX, usize::MAX), - }], - ) + old_pos = old_end; } - - // fn highlight_iter() -> same as Mode but for this layer. Mode composits these - // fn buffer_changed - // fn update(range) - // fn update_injections() + edits } -// -- refactored from tree-sitter-highlight to be able to retain state -// TODO: add seek() to iter - -// problem: any time a layer is updated it must update it's injections on the parent (potentially -// removing some from use) -// can't modify to vec and exist in it at the same time since that would violate borrows -// maybe we can do with an arena -// maybe just caching on the top layer and nevermind the injections for now? -// -// Grammar { -// layers: Vec> to prevent memory moves when vec is modified -// } -// injections tracked by marker: -// if marker areas match it's fine and update -// if not found add new layer -// if length 0 then area got removed, clean up the layer -// -// layer update: -// if range.len = 0 then remove the layer -// for change in changes { tree.edit(change) } -// tree = parser.parse(.., tree, ..) -// calculate affected range and update injections -// injection update: -// look for existing injections -// if present, range = (first injection start, last injection end) -// -// For now cheat and just throw out non-root layers if they exist. This should still improve -// parsing in majority of cases. - use std::sync::atomic::{AtomicUsize, Ordering}; use std::{iter, mem, ops, str, usize}; use tree_sitter::{ @@ -812,8 +1042,8 @@ pub enum HighlightEvent { pub struct HighlightConfiguration { pub language: Grammar, pub query: Query, + injections_query: Query, combined_injections_query: Option, - locals_pattern_index: usize, highlights_pattern_index: usize, highlight_indices: ArcSwap>>, non_local_variable_patterns: Vec, @@ -840,13 +1070,9 @@ struct LocalScope<'a> { } #[derive(Debug)] -struct HighlightIter<'a, F> -where - F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a, -{ +struct HighlightIter<'a> { source: RopeSlice<'a>, byte_offset: usize, - injection_callback: F, cancellation_flag: Option<&'a AtomicUsize>, layers: Vec>, iter_count: usize, @@ -886,8 +1112,8 @@ struct HighlightIterLayer<'a> { config: &'a HighlightConfiguration, highlight_end_stack: Vec, scope_stack: Vec>, - ranges: Vec, depth: usize, + ranges: &'a [Range], } impl<'a> fmt::Debug for HighlightIterLayer<'a> { @@ -919,38 +1145,32 @@ impl HighlightConfiguration { ) -> Result { // Concatenate the query strings, keeping track of the start offset of each section. let mut query_source = String::new(); - query_source.push_str(injection_query); - let locals_query_offset = query_source.len(); query_source.push_str(locals_query); let highlights_query_offset = query_source.len(); query_source.push_str(highlights_query); // Construct a single query by concatenating the three query strings, but record the // range of pattern indices that belong to each individual string. - let mut query = Query::new(language, &query_source)?; - let mut locals_pattern_index = 0; + let query = Query::new(language, &query_source)?; let mut highlights_pattern_index = 0; for i in 0..(query.pattern_count()) { let pattern_offset = query.start_byte_for_pattern(i); if pattern_offset < highlights_query_offset { - if pattern_offset < highlights_query_offset { - highlights_pattern_index += 1; - } - if pattern_offset < locals_query_offset { - locals_pattern_index += 1; - } + highlights_pattern_index += 1; } } + let mut injections_query = Query::new(language, injection_query)?; + // Construct a separate query just for dealing with the 'combined injections'. // Disable the combined injection patterns in the main query. let mut combined_injections_query = Query::new(language, injection_query)?; let mut has_combined_queries = false; - for pattern_index in 0..locals_pattern_index { - let settings = query.property_settings(pattern_index); + for pattern_index in 0..injections_query.pattern_count() { + let settings = injections_query.property_settings(pattern_index); if settings.iter().any(|s| &*s.key == "injection.combined") { has_combined_queries = true; - query.disable_pattern(pattern_index); + injections_query.disable_pattern(pattern_index); } else { combined_injections_query.disable_pattern(pattern_index); } @@ -982,8 +1202,6 @@ impl HighlightConfiguration { for (i, name) in query.capture_names().iter().enumerate() { let i = Some(i as u32); match name.as_str() { - "injection.content" => injection_content_capture_index = i, - "injection.language" => injection_language_capture_index = i, "local.definition" => local_def_capture_index = i, "local.definition-value" => local_def_value_capture_index = i, "local.reference" => local_ref_capture_index = i, @@ -992,12 +1210,21 @@ impl HighlightConfiguration { } } + for (i, name) in injections_query.capture_names().iter().enumerate() { + let i = Some(i as u32); + match name.as_str() { + "injection.content" => injection_content_capture_index = i, + "injection.language" => injection_language_capture_index = i, + _ => {} + } + } + let highlight_indices = ArcSwap::from_pointee(vec![None; query.capture_names().len()]); Ok(Self { language, query, + injections_query, combined_injections_query, - locals_pattern_index, highlights_pattern_index, highlight_indices, non_local_variable_patterns, @@ -1062,238 +1289,6 @@ impl HighlightConfiguration { } impl<'a> HighlightIterLayer<'a> { - /// Create a new 'layer' of highlighting for this document. - /// - /// In the even that the new layer contains "combined injections" (injections where multiple - /// disjoint ranges are parsed as one syntax tree), these will be eagerly processed and - /// added to the returned vector. - fn new Option<&'a HighlightConfiguration> + 'a>( - source: RopeSlice<'a>, - cancellation_flag: Option<&'a AtomicUsize>, - injection_callback: &mut F, - mut config: &'a HighlightConfiguration, - mut depth: usize, - mut ranges: Vec, - ) -> Result, Error> { - let mut result = Vec::with_capacity(1); - let mut queue = Vec::new(); - loop { - // --> Tree parsing part - - PARSER.with(|ts_parser| { - let highlighter = &mut ts_parser.borrow_mut(); - - if highlighter.parser.set_included_ranges(&ranges).is_ok() { - highlighter - .parser - .set_language(config.language) - .map_err(|_| Error::InvalidLanguage)?; - - unsafe { highlighter.parser.set_cancellation_flag(cancellation_flag) }; - let tree = highlighter - .parser - .parse_with( - &mut |byte, _| { - if byte <= source.len_bytes() { - let (chunk, start_byte, _, _) = source.chunk_at_byte(byte); - chunk[byte - start_byte..].as_bytes() - } else { - // out of range - &[] - } - }, - None, - ) - .ok_or(Error::Cancelled)?; - unsafe { highlighter.parser.set_cancellation_flag(None) }; - let mut cursor = highlighter.cursors.pop().unwrap_or_else(QueryCursor::new); - - // Process combined injections. - if let Some(combined_injections_query) = &config.combined_injections_query { - let mut injections_by_pattern_index = vec![ - (None, Vec::new(), false); - combined_injections_query - .pattern_count() - ]; - let matches = cursor.matches( - combined_injections_query, - tree.root_node(), - RopeProvider(source), - ); - for mat in matches { - let entry = &mut injections_by_pattern_index[mat.pattern_index]; - let (language_name, content_node, include_children) = - injection_for_match( - config, - combined_injections_query, - &mat, - source, - ); - if language_name.is_some() { - entry.0 = language_name; - } - if let Some(content_node) = content_node { - entry.1.push(content_node); - } - entry.2 = include_children; - } - for (lang_name, content_nodes, includes_children) in - injections_by_pattern_index - { - if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) - { - if let Some(next_config) = (injection_callback)(&lang_name) { - let ranges = Self::intersect_ranges( - &ranges, - &content_nodes, - includes_children, - ); - if !ranges.is_empty() { - queue.push((next_config, depth + 1, ranges)); - } - } - } - } - } - - // --> Highlighting query part - - // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which - // prevents them from being moved. But both of these values are really just - // pointers, so it's actually ok to move them. - let tree_ref = unsafe { mem::transmute::<_, &'static Tree>(&tree) }; - let cursor_ref = - unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) }; - let captures = cursor_ref - .captures(&config.query, tree_ref.root_node(), RopeProvider(source)) - .peekable(); - - result.push(HighlightIterLayer { - highlight_end_stack: Vec::new(), - scope_stack: vec![LocalScope { - inherits: false, - range: 0..usize::MAX, - local_defs: Vec::new(), - }], - cursor, - depth, - _tree: Some(tree), - captures, - config, - ranges, - }); - } - - Ok(()) // so we can use the try operator - })?; - - if queue.is_empty() { - break; - } - - let (next_config, next_depth, next_ranges) = queue.remove(0); - config = next_config; - depth = next_depth; - ranges = next_ranges; - } - - Ok(result) - } - - // Compute the ranges that should be included when parsing an injection. - // This takes into account three things: - // * `parent_ranges` - The ranges must all fall within the *current* layer's ranges. - // * `nodes` - Every injection takes place within a set of nodes. The injection ranges - // are the ranges of those nodes. - // * `includes_children` - For some injections, the content nodes' children should be - // excluded from the nested document, so that only the content nodes' *own* content - // is reparsed. For other injections, the content nodes' entire ranges should be - // reparsed, including the ranges of their children. - fn intersect_ranges( - parent_ranges: &[Range], - nodes: &[Node], - includes_children: bool, - ) -> Vec { - let mut cursor = nodes[0].walk(); - let mut result = Vec::new(); - let mut parent_range_iter = parent_ranges.iter(); - let mut parent_range = parent_range_iter - .next() - .expect("Layers should only be constructed with non-empty ranges vectors"); - for node in nodes.iter() { - let mut preceding_range = Range { - start_byte: 0, - start_point: Point::new(0, 0), - end_byte: node.start_byte(), - end_point: node.start_position(), - }; - let following_range = Range { - start_byte: node.end_byte(), - start_point: node.end_position(), - end_byte: usize::MAX, - end_point: Point::new(usize::MAX, usize::MAX), - }; - - for excluded_range in node - .children(&mut cursor) - .filter_map(|child| { - if includes_children { - None - } else { - Some(child.range()) - } - }) - .chain([following_range].iter().cloned()) - { - let mut range = Range { - start_byte: preceding_range.end_byte, - start_point: preceding_range.end_point, - end_byte: excluded_range.start_byte, - end_point: excluded_range.start_point, - }; - preceding_range = excluded_range; - - if range.end_byte < parent_range.start_byte { - continue; - } - - while parent_range.start_byte <= range.end_byte { - if parent_range.end_byte > range.start_byte { - if range.start_byte < parent_range.start_byte { - range.start_byte = parent_range.start_byte; - range.start_point = parent_range.start_point; - } - - if parent_range.end_byte < range.end_byte { - if range.start_byte < parent_range.end_byte { - result.push(Range { - start_byte: range.start_byte, - start_point: range.start_point, - end_byte: parent_range.end_byte, - end_point: parent_range.end_point, - }); - } - range.start_byte = parent_range.end_byte; - range.start_point = parent_range.end_point; - } else { - if range.start_byte < range.end_byte { - result.push(range); - } - break; - } - } - - if let Some(next_range) = parent_range_iter.next() { - parent_range = next_range; - } else { - return result; - } - } - } - } - result - } - // First, sort scope boundaries by their byte offset in the document. At a // given position, emit scope endings before scope beginnings. Finally, emit // scope boundaries from deeper layers first. @@ -1319,10 +1314,101 @@ impl<'a> HighlightIterLayer<'a> { } } -impl<'a, F> HighlightIter<'a, F> -where - F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a, -{ +// Compute the ranges that should be included when parsing an injection. +// This takes into account three things: +// * `parent_ranges` - The ranges must all fall within the *current* layer's ranges. +// * `nodes` - Every injection takes place within a set of nodes. The injection ranges +// are the ranges of those nodes. +// * `includes_children` - For some injections, the content nodes' children should be +// excluded from the nested document, so that only the content nodes' *own* content +// is reparsed. For other injections, the content nodes' entire ranges should be +// reparsed, including the ranges of their children. +fn intersect_ranges( + parent_ranges: &[Range], + nodes: &[Node], + includes_children: bool, +) -> Vec { + let mut cursor = nodes[0].walk(); + let mut result = Vec::new(); + let mut parent_range_iter = parent_ranges.iter(); + let mut parent_range = parent_range_iter + .next() + .expect("Layers should only be constructed with non-empty ranges vectors"); + for node in nodes.iter() { + let mut preceding_range = Range { + start_byte: 0, + start_point: Point::new(0, 0), + end_byte: node.start_byte(), + end_point: node.start_position(), + }; + let following_range = Range { + start_byte: node.end_byte(), + start_point: node.end_position(), + end_byte: usize::MAX, + end_point: Point::new(usize::MAX, usize::MAX), + }; + + for excluded_range in node + .children(&mut cursor) + .filter_map(|child| { + if includes_children { + None + } else { + Some(child.range()) + } + }) + .chain([following_range].iter().cloned()) + { + let mut range = Range { + start_byte: preceding_range.end_byte, + start_point: preceding_range.end_point, + end_byte: excluded_range.start_byte, + end_point: excluded_range.start_point, + }; + preceding_range = excluded_range; + + if range.end_byte < parent_range.start_byte { + continue; + } + + while parent_range.start_byte <= range.end_byte { + if parent_range.end_byte > range.start_byte { + if range.start_byte < parent_range.start_byte { + range.start_byte = parent_range.start_byte; + range.start_point = parent_range.start_point; + } + + if parent_range.end_byte < range.end_byte { + if range.start_byte < parent_range.end_byte { + result.push(Range { + start_byte: range.start_byte, + start_point: range.start_point, + end_byte: parent_range.end_byte, + end_point: parent_range.end_point, + }); + } + range.start_byte = parent_range.end_byte; + range.start_point = parent_range.end_point; + } else { + if range.start_byte < range.end_byte { + result.push(range); + } + break; + } + } + + if let Some(next_range) = parent_range_iter.next() { + parent_range = next_range; + } else { + return result; + } + } + } + } + result +} + +impl<'a> HighlightIter<'a> { fn emit_event( &mut self, offset: usize, @@ -1353,6 +1439,12 @@ where i += 1; continue; } + } else { + let layer = self.layers.remove(i + 1); + PARSER.with(|ts_parser| { + let highlighter = &mut ts_parser.borrow_mut(); + highlighter.cursors.push(layer.cursor); + }); } break; } @@ -1369,30 +1461,9 @@ where } } } - - fn insert_layer(&mut self, mut layer: HighlightIterLayer<'a>) { - if let Some(sort_key) = layer.sort_key() { - let mut i = 1; - while i < self.layers.len() { - if let Some(sort_key_i) = self.layers[i].sort_key() { - if sort_key_i > sort_key { - self.layers.insert(i, layer); - return; - } - i += 1; - } else { - self.layers.remove(i); - } - } - self.layers.push(layer); - } - } } -impl<'a, F> Iterator for HighlightIter<'a, F> -where - F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a, -{ +impl<'a> Iterator for HighlightIter<'a> { type Item = Result; fn next(&mut self) -> Option { @@ -1452,55 +1523,12 @@ where layer.highlight_end_stack.pop(); return self.emit_event(end_byte, Some(HighlightEvent::HighlightEnd)); } else { - // return self.emit_event(self.source.len(), None); - return None; + return self.emit_event(self.source.len_bytes(), None); }; let (mut match_, capture_index) = layer.captures.next().unwrap(); let mut capture = match_.captures[capture_index]; - // If this capture represents an injection, then process the injection. - if match_.pattern_index < layer.config.locals_pattern_index { - let (language_name, content_node, include_children) = - injection_for_match(layer.config, &layer.config.query, &match_, self.source); - - // Explicitly remove this match so that none of its other captures will remain - // in the stream of captures. - match_.remove(); - - // If a language is found with the given name, then add a new language layer - // to the highlighted document. - if let (Some(language_name), Some(content_node)) = (language_name, content_node) { - if let Some(config) = (self.injection_callback)(&language_name) { - let ranges = HighlightIterLayer::intersect_ranges( - &self.layers[0].ranges, - &[content_node], - include_children, - ); - if !ranges.is_empty() { - match HighlightIterLayer::new( - self.source, - self.cancellation_flag, - &mut self.injection_callback, - config, - self.layers[0].depth + 1, - ranges, - ) { - Ok(layers) => { - for layer in layers { - self.insert_layer(layer); - } - } - Err(e) => return Some(Err(e)), - } - } - } - } - - self.sort_layers(); - continue 'main; - } - // Remove from the local scope stack any local scopes that have already ended. while range.start > layer.scope_stack.last().unwrap().range.end { layer.scope_stack.pop(); @@ -1695,14 +1723,6 @@ fn injection_for_match<'a>( (language_name, content_node, include_children) } -// fn shrink_and_clear(vec: &mut Vec, capacity: usize) { -// if vec.len() > capacity { -// vec.truncate(capacity); -// vec.shrink_to_fit(); -// } -// vec.clear(); -// } - pub struct Merge { iter: I, spans: Box)>>, @@ -1869,6 +1889,8 @@ mod test { .map(String::from) .collect(); + let loader = Loader::new(Configuration { language: vec![] }); + let language = get_language(&crate::RUNTIME_DIR, "Rust").unwrap(); let config = HighlightConfiguration::new( language, @@ -1891,7 +1913,7 @@ mod test { fn main() {} ", ); - let syntax = Syntax::new(&source, Arc::new(config)); + let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)); let tree = syntax.tree(); let root = tree.root_node(); assert_eq!(root.kind(), "source_file"); @@ -1918,7 +1940,7 @@ mod test { &doc, vec![(6, 11, Some("test".into())), (12, 17, None)].into_iter(), ); - let edits = LanguageLayer::generate_edits(doc.slice(..), transaction.changes()); + let edits = generate_edits(&doc, transaction.changes()); // transaction.apply(&mut state); assert_eq!( @@ -1947,7 +1969,7 @@ mod test { let mut doc = Rope::from("fn test() {}"); let transaction = Transaction::change(&doc, vec![(8, 8, Some("a: u32".into()))].into_iter()); - let edits = LanguageLayer::generate_edits(doc.slice(..), transaction.changes()); + let edits = generate_edits(&doc, transaction.changes()); transaction.apply(&mut doc); assert_eq!(doc, "fn test(a: u32) {}"); diff --git a/helix-core/src/textobject.rs b/helix-core/src/textobject.rs index 21ceec04f..5a55a6f1d 100644 --- a/helix-core/src/textobject.rs +++ b/helix-core/src/textobject.rs @@ -53,6 +53,8 @@ fn find_word_boundary(slice: RopeSlice, mut pos: usize, direction: Direction, lo pub enum TextObject { Around, Inside, + /// Used for moving between objects. + Movement, } impl Display for TextObject { @@ -60,6 +62,7 @@ impl Display for TextObject { f.write_str(match self { Self::Around => "around", Self::Inside => "inside", + Self::Movement => "movement", }) } } @@ -104,6 +107,7 @@ pub fn textobject_word( Range::new(word_start - whitespace_count_left, word_end) } } + TextObject::Movement => unreachable!(), } } @@ -118,6 +122,7 @@ pub fn textobject_surround( .map(|(anchor, head)| match textobject { TextObject::Inside => Range::new(next_grapheme_boundary(slice, anchor), head), TextObject::Around => Range::new(anchor, next_grapheme_boundary(slice, head)), + TextObject::Movement => unreachable!(), }) .unwrap_or(range) } diff --git a/helix-core/src/transaction.rs b/helix-core/src/transaction.rs index d8d389f3b..2e34a9864 100644 --- a/helix-core/src/transaction.rs +++ b/helix-core/src/transaction.rs @@ -85,7 +85,7 @@ impl ChangeSet { let new_last = match self.changes.as_mut_slice() { [.., Insert(prev)] | [.., Insert(prev), Delete(_)] => { - prev.push_tendril(&fragment); + prev.push_str(&fragment); return; } [.., last @ Delete(_)] => std::mem::replace(last, Insert(fragment)), @@ -189,7 +189,7 @@ impl ChangeSet { // TODO: cover this with a test // figure out the byte index of the truncated string end let (pos, _) = s.char_indices().nth(j).unwrap(); - s.pop_front(pos as u32); + s.replace_range(0..pos, ""); head_a = Some(Insert(s)); head_b = changes_b.next(); } @@ -211,9 +211,11 @@ impl ChangeSet { Ordering::Greater => { // figure out the byte index of the truncated string end let (pos, _) = s.char_indices().nth(j).unwrap(); - let pos = pos as u32; - changes.insert(s.subtendril(0, pos)); - head_a = Some(Insert(s.subtendril(pos, s.len() as u32 - pos))); + let mut before = s; + let after = before.split_off(pos); + + changes.insert(before); + head_a = Some(Insert(after)); head_b = changes_b.next(); } } @@ -277,7 +279,7 @@ impl ChangeSet { } Delete(n) => { let text = Cow::from(original_doc.slice(pos..pos + *n)); - changes.insert(Tendril::from_slice(&text)); + changes.insert(Tendril::from(text.as_ref())); pos += n; } Insert(s) => { @@ -710,19 +712,19 @@ mod test { #[test] fn optimized_composition() { let mut state = State::new("".into()); - let t1 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('h')); + let t1 = Transaction::insert(&state.doc, &state.selection, Tendril::from("h")); t1.apply(&mut state.doc); state.selection = state.selection.clone().map(t1.changes()); - let t2 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('e')); + let t2 = Transaction::insert(&state.doc, &state.selection, Tendril::from("e")); t2.apply(&mut state.doc); state.selection = state.selection.clone().map(t2.changes()); - let t3 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('l')); + let t3 = Transaction::insert(&state.doc, &state.selection, Tendril::from("l")); t3.apply(&mut state.doc); state.selection = state.selection.clone().map(t3.changes()); - let t4 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('l')); + let t4 = Transaction::insert(&state.doc, &state.selection, Tendril::from("l")); t4.apply(&mut state.doc); state.selection = state.selection.clone().map(t4.changes()); - let t5 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('o')); + let t5 = Transaction::insert(&state.doc, &state.selection, Tendril::from("o")); t5.apply(&mut state.doc); state.selection = state.selection.clone().map(t5.changes()); @@ -761,7 +763,7 @@ mod test { #[test] fn combine_with_utf8() { - const TEST_CASE: &'static str = "Hello, これはヘリックスエディターです!"; + const TEST_CASE: &str = "Hello, これはヘリックスエディターです!"; let empty = Rope::from(""); let a = ChangeSet::new(&empty); diff --git a/helix-dap/Cargo.toml b/helix-dap/Cargo.toml new file mode 100644 index 000000000..24288697e --- /dev/null +++ b/helix-dap/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "helix-dap" +version = "0.6.0" +authors = ["Blaž Hrastnik "] +edition = "2018" +license = "MPL-2.0" +description = "DAP client implementation for Helix project" +categories = ["editor"] +repository = "https://github.com/helix-editor/helix" +homepage = "https://helix-editor.com" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +helix-core = { version = "0.6", path = "../helix-core" } +anyhow = "1.0" +log = "0.4" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +thiserror = "1.0" +tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] } + +[dev-dependencies] +fern = "0.6" diff --git a/helix-dap/src/client.rs b/helix-dap/src/client.rs new file mode 100644 index 000000000..562544296 --- /dev/null +++ b/helix-dap/src/client.rs @@ -0,0 +1,477 @@ +use crate::{ + transport::{Payload, Request, Response, Transport}, + types::*, + Error, Result, ThreadId, +}; +use helix_core::syntax::DebuggerQuirks; + +use serde_json::Value; + +use anyhow::anyhow; +pub use log::{error, info}; +use std::{ + collections::HashMap, + future::Future, + net::{IpAddr, Ipv4Addr, SocketAddr}, + path::PathBuf, + process::Stdio, + sync::atomic::{AtomicU64, Ordering}, +}; +use tokio::{ + io::{AsyncBufRead, AsyncWrite, BufReader, BufWriter}, + net::TcpStream, + process::{Child, Command}, + sync::mpsc::{channel, unbounded_channel, UnboundedReceiver, UnboundedSender}, + time, +}; + +#[derive(Debug)] +pub struct Client { + id: usize, + _process: Option, + server_tx: UnboundedSender, + request_counter: AtomicU64, + pub caps: Option, + // thread_id -> frames + pub stack_frames: HashMap>, + pub thread_states: HashMap, + pub thread_id: Option, + /// Currently active frame for the current thread. + pub active_frame: Option, + pub quirks: DebuggerQuirks, +} + +impl Client { + // Spawn a process and communicate with it by either TCP or stdio + pub async fn process( + transport: &str, + command: &str, + args: Vec<&str>, + port_arg: Option<&str>, + id: usize, + ) -> Result<(Self, UnboundedReceiver)> { + if command.is_empty() { + return Result::Err(Error::Other(anyhow!("Command not provided"))); + } + if transport == "tcp" && port_arg.is_some() { + Self::tcp_process(command, args, port_arg.unwrap(), id).await + } else if transport == "stdio" { + Self::stdio(command, args, id) + } else { + Result::Err(Error::Other(anyhow!("Incorrect transport {}", transport))) + } + } + + pub fn streams( + rx: Box, + tx: Box, + err: Option>, + id: usize, + process: Option, + ) -> Result<(Self, UnboundedReceiver)> { + let (server_rx, server_tx) = Transport::start(rx, tx, err, id); + let (client_rx, client_tx) = unbounded_channel(); + + let client = Self { + id, + _process: process, + server_tx, + request_counter: AtomicU64::new(0), + caps: None, + // + stack_frames: HashMap::new(), + thread_states: HashMap::new(), + thread_id: None, + active_frame: None, + quirks: DebuggerQuirks::default(), + }; + + tokio::spawn(Self::recv(server_rx, client_rx)); + + Ok((client, client_tx)) + } + + pub async fn tcp( + addr: std::net::SocketAddr, + id: usize, + ) -> Result<(Self, UnboundedReceiver)> { + let stream = TcpStream::connect(addr).await?; + let (rx, tx) = stream.into_split(); + Self::streams(Box::new(BufReader::new(rx)), Box::new(tx), None, id, None) + } + + pub fn stdio( + cmd: &str, + args: Vec<&str>, + id: usize, + ) -> Result<(Self, UnboundedReceiver)> { + let process = Command::new(cmd) + .args(args) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + // make sure the process is reaped on drop + .kill_on_drop(true) + .spawn(); + + let mut process = process?; + + // TODO: do we need bufreader/writer here? or do we use async wrappers on unblock? + let writer = BufWriter::new(process.stdin.take().expect("Failed to open stdin")); + let reader = BufReader::new(process.stdout.take().expect("Failed to open stdout")); + let errors = process.stderr.take().map(BufReader::new); + + Self::streams( + Box::new(BufReader::new(reader)), + Box::new(writer), + // errors.map(|errors| Box::new(BufReader::new(errors))), + match errors { + Some(errors) => Some(Box::new(BufReader::new(errors))), + None => None, + }, + id, + Some(process), + ) + } + + async fn get_port() -> Option { + Some( + tokio::net::TcpListener::bind(SocketAddr::new( + IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), + 0, + )) + .await + .ok()? + .local_addr() + .ok()? + .port(), + ) + } + + pub async fn tcp_process( + cmd: &str, + args: Vec<&str>, + port_format: &str, + id: usize, + ) -> Result<(Self, UnboundedReceiver)> { + let port = Self::get_port().await.unwrap(); + + let process = Command::new(cmd) + .args(args) + .args(port_format.replace("{}", &port.to_string()).split(' ')) + // silence messages + .stdin(Stdio::null()) + .stdout(Stdio::null()) + .stderr(Stdio::null()) + // Do not kill debug adapter when leaving, it should exit automatically + .spawn()?; + + // Wait for adapter to become ready for connection + time::sleep(time::Duration::from_millis(500)).await; + + let stream = TcpStream::connect(SocketAddr::new( + IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), + port, + )) + .await?; + + let (rx, tx) = stream.into_split(); + Self::streams( + Box::new(BufReader::new(rx)), + Box::new(tx), + None, + id, + Some(process), + ) + } + + async fn recv(mut server_rx: UnboundedReceiver, client_tx: UnboundedSender) { + while let Some(msg) = server_rx.recv().await { + match msg { + Payload::Event(ev) => { + client_tx.send(Payload::Event(ev)).expect("Failed to send"); + } + Payload::Response(_) => unreachable!(), + Payload::Request(req) => { + client_tx + .send(Payload::Request(req)) + .expect("Failed to send"); + } + } + } + } + + pub fn id(&self) -> usize { + self.id + } + + fn next_request_id(&self) -> u64 { + self.request_counter.fetch_add(1, Ordering::Relaxed) + } + + // Internal, called by specific DAP commands when resuming + pub fn resume_application(&mut self) { + if let Some(thread_id) = self.thread_id { + self.thread_states.insert(thread_id, "running".to_string()); + self.stack_frames.remove(&thread_id); + } + self.active_frame = None; + self.thread_id = None; + } + + /// Execute a RPC request on the debugger. + pub fn call( + &self, + arguments: R::Arguments, + ) -> impl Future> + where + R::Arguments: serde::Serialize, + { + let server_tx = self.server_tx.clone(); + let id = self.next_request_id(); + + async move { + use std::time::Duration; + use tokio::time::timeout; + + let arguments = Some(serde_json::to_value(arguments)?); + + let (callback_tx, mut callback_rx) = channel(1); + + let req = Request { + back_ch: Some(callback_tx), + seq: id, + command: R::COMMAND.to_string(), + arguments, + }; + + server_tx + .send(Payload::Request(req)) + .map_err(|e| Error::Other(e.into()))?; + + // TODO: specifiable timeout, delay other calls until initialize success + timeout(Duration::from_secs(20), callback_rx.recv()) + .await + .map_err(|_| Error::Timeout)? // return Timeout + .ok_or(Error::StreamClosed)? + .map(|response| response.body.unwrap_or_default()) + // TODO: check response.success + } + } + + pub async fn request(&self, params: R::Arguments) -> Result + where + R::Arguments: serde::Serialize, + R::Result: core::fmt::Debug, // TODO: temporary + { + // a future that resolves into the response + let json = self.call::(params).await?; + let response = serde_json::from_value(json)?; + Ok(response) + } + + pub fn reply( + &self, + request_seq: u64, + command: &str, + result: core::result::Result, + ) -> impl Future> { + let server_tx = self.server_tx.clone(); + let command = command.to_string(); + + async move { + let response = match result { + Ok(result) => Response { + request_seq, + command, + success: true, + message: None, + body: Some(result), + }, + Err(error) => Response { + request_seq, + command, + success: false, + message: Some(error.to_string()), + body: None, + }, + }; + + server_tx + .send(Payload::Response(response)) + .map_err(|e| Error::Other(e.into()))?; + + Ok(()) + } + } + + pub fn capabilities(&self) -> &DebuggerCapabilities { + self.caps.as_ref().expect("debugger not yet initialized!") + } + + pub async fn initialize(&mut self, adapter_id: String) -> Result<()> { + let args = requests::InitializeArguments { + client_id: Some("hx".to_owned()), + client_name: Some("helix".to_owned()), + adapter_id, + locale: Some("en-us".to_owned()), + lines_start_at_one: Some(true), + columns_start_at_one: Some(true), + path_format: Some("path".to_owned()), + supports_variable_type: Some(true), + supports_variable_paging: Some(false), + supports_run_in_terminal_request: Some(true), + supports_memory_references: Some(false), + supports_progress_reporting: Some(false), + supports_invalidated_event: Some(false), + }; + + let response = self.request::(args).await?; + self.caps = Some(response); + + Ok(()) + } + + pub fn disconnect(&self) -> impl Future> { + self.call::(()) + } + + pub fn launch(&self, args: serde_json::Value) -> impl Future> { + self.call::(args) + } + + pub fn attach(&self, args: serde_json::Value) -> impl Future> { + self.call::(args) + } + + pub async fn set_breakpoints( + &self, + file: PathBuf, + breakpoints: Vec, + ) -> Result>> { + let args = requests::SetBreakpointsArguments { + source: Source { + path: Some(file), + name: None, + source_reference: None, + presentation_hint: None, + origin: None, + sources: None, + adapter_data: None, + checksums: None, + }, + breakpoints: Some(breakpoints), + source_modified: Some(false), + }; + + let response = self.request::(args).await?; + + Ok(response.breakpoints) + } + + pub async fn configuration_done(&self) -> Result<()> { + self.request::(()).await + } + + pub fn continue_thread(&self, thread_id: ThreadId) -> impl Future> { + let args = requests::ContinueArguments { thread_id }; + + self.call::(args) + } + + pub async fn stack_trace( + &self, + thread_id: ThreadId, + ) -> Result<(Vec, Option)> { + let args = requests::StackTraceArguments { + thread_id, + start_frame: None, + levels: None, + format: None, + }; + + let response = self.request::(args).await?; + Ok((response.stack_frames, response.total_frames)) + } + + pub fn threads(&self) -> impl Future> { + self.call::(()) + } + + pub async fn scopes(&self, frame_id: usize) -> Result> { + let args = requests::ScopesArguments { frame_id }; + + let response = self.request::(args).await?; + Ok(response.scopes) + } + + pub async fn variables(&self, variables_reference: usize) -> Result> { + let args = requests::VariablesArguments { + variables_reference, + filter: None, + start: None, + count: None, + format: None, + }; + + let response = self.request::(args).await?; + Ok(response.variables) + } + + pub fn step_in(&self, thread_id: ThreadId) -> impl Future> { + let args = requests::StepInArguments { + thread_id, + target_id: None, + granularity: None, + }; + + self.call::(args) + } + + pub fn step_out(&self, thread_id: ThreadId) -> impl Future> { + let args = requests::StepOutArguments { + thread_id, + granularity: None, + }; + + self.call::(args) + } + + pub fn next(&self, thread_id: ThreadId) -> impl Future> { + let args = requests::NextArguments { + thread_id, + granularity: None, + }; + + self.call::(args) + } + + pub fn pause(&self, thread_id: ThreadId) -> impl Future> { + let args = requests::PauseArguments { thread_id }; + + self.call::(args) + } + + pub async fn eval( + &self, + expression: String, + frame_id: Option, + ) -> Result { + let args = requests::EvaluateArguments { + expression, + frame_id, + context: None, + format: None, + }; + + self.request::(args).await + } + + pub fn set_exception_breakpoints( + &self, + filters: Vec, + ) -> impl Future> { + let args = requests::SetExceptionBreakpointsArguments { filters }; + + self.call::(args) + } +} diff --git a/helix-dap/src/lib.rs b/helix-dap/src/lib.rs new file mode 100644 index 000000000..f60b102c0 --- /dev/null +++ b/helix-dap/src/lib.rs @@ -0,0 +1,24 @@ +mod client; +mod transport; +mod types; + +pub use client::Client; +pub use events::Event; +pub use transport::{Payload, Response, Transport}; +pub use types::*; + +use thiserror::Error; +#[derive(Error, Debug)] +pub enum Error { + #[error("failed to parse: {0}")] + Parse(#[from] serde_json::Error), + #[error("IO Error: {0}")] + IO(#[from] std::io::Error), + #[error("request timed out")] + Timeout, + #[error("server closed the stream")] + StreamClosed, + #[error(transparent)] + Other(#[from] anyhow::Error), +} +pub type Result = core::result::Result; diff --git a/helix-dap/src/transport.rs b/helix-dap/src/transport.rs new file mode 100644 index 000000000..783a6f5d0 --- /dev/null +++ b/helix-dap/src/transport.rs @@ -0,0 +1,280 @@ +use crate::{Error, Event, Result}; +use anyhow::Context; +use log::{error, info, warn}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::{ + io::{AsyncBufRead, AsyncBufReadExt, AsyncReadExt, AsyncWrite, AsyncWriteExt}, + sync::{ + mpsc::{unbounded_channel, Sender, UnboundedReceiver, UnboundedSender}, + Mutex, + }, +}; + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct Request { + #[serde(skip)] + pub back_ch: Option>>, + pub seq: u64, + pub command: String, + pub arguments: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +pub struct Response { + // seq is omitted as unused and is not sent by some implementations + pub request_seq: u64, + pub success: bool, + pub command: String, + pub message: Option, + pub body: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(tag = "type", rename_all = "camelCase")] +pub enum Payload { + // type = "event" + Event(Box), + // type = "response" + Response(Response), + // type = "request" + Request(Request), +} + +#[derive(Debug)] +pub struct Transport { + #[allow(unused)] + id: usize, + pending_requests: Mutex>>>, +} + +impl Transport { + pub fn start( + server_stdout: Box, + server_stdin: Box, + server_stderr: Option>, + id: usize, + ) -> (UnboundedReceiver, UnboundedSender) { + let (client_tx, rx) = unbounded_channel(); + let (tx, client_rx) = unbounded_channel(); + + let transport = Self { + id, + pending_requests: Mutex::new(HashMap::default()), + }; + + let transport = Arc::new(transport); + + tokio::spawn(Self::recv(transport.clone(), server_stdout, client_tx)); + tokio::spawn(Self::send(transport, server_stdin, client_rx)); + if let Some(stderr) = server_stderr { + tokio::spawn(Self::err(stderr)); + } + + (rx, tx) + } + + async fn recv_server_message( + reader: &mut Box, + buffer: &mut String, + ) -> Result { + let mut content_length = None; + loop { + buffer.truncate(0); + if reader.read_line(buffer).await? == 0 { + return Err(Error::StreamClosed); + }; + + if buffer == "\r\n" { + // look for an empty CRLF line + break; + } + + let header = buffer.trim(); + let parts = header.split_once(": "); + + match parts { + Some(("Content-Length", value)) => { + content_length = Some(value.parse().context("invalid content length")?); + } + Some((_, _)) => {} + None => { + // Workaround: Some non-conformant language servers will output logging and other garbage + // into the same stream as JSON-RPC messages. This can also happen from shell scripts that spawn + // the server. Skip such lines and log a warning. + + // warn!("Failed to parse header: {:?}", header); + } + } + } + + let content_length = content_length.context("missing content length")?; + + //TODO: reuse vector + let mut content = vec![0; content_length]; + reader.read_exact(&mut content).await?; + let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?; + + info!("<- DAP {}", msg); + + // try parsing as output (server response) or call (server request) + let output: serde_json::Result = serde_json::from_str(msg); + + Ok(output?) + } + + async fn recv_server_error( + err: &mut (impl AsyncBufRead + Unpin + Send), + buffer: &mut String, + ) -> Result<()> { + buffer.truncate(0); + if err.read_line(buffer).await? == 0 { + return Err(Error::StreamClosed); + }; + error!("err <- {}", buffer); + + Ok(()) + } + + async fn send_payload_to_server( + &self, + server_stdin: &mut Box, + mut payload: Payload, + ) -> Result<()> { + if let Payload::Request(request) = &mut payload { + if let Some(back) = request.back_ch.take() { + self.pending_requests.lock().await.insert(request.seq, back); + } + } + let json = serde_json::to_string(&payload)?; + self.send_string_to_server(server_stdin, json).await + } + + async fn send_string_to_server( + &self, + server_stdin: &mut Box, + request: String, + ) -> Result<()> { + info!("-> DAP {}", request); + + // send the headers + server_stdin + .write_all(format!("Content-Length: {}\r\n\r\n", request.len()).as_bytes()) + .await?; + + // send the body + server_stdin.write_all(request.as_bytes()).await?; + + server_stdin.flush().await?; + + Ok(()) + } + + fn process_response(res: Response) -> Result { + if res.success { + info!("<- DAP success in response to {}", res.request_seq); + + Ok(res) + } else { + error!( + "<- DAP error {:?} ({:?}) for command #{} {}", + res.message, res.body, res.request_seq, res.command + ); + + Err(Error::Other(anyhow::format_err!("{:?}", res.body))) + } + } + + async fn process_server_message( + &self, + client_tx: &UnboundedSender, + msg: Payload, + ) -> Result<()> { + match msg { + Payload::Response(res) => { + let request_seq = res.request_seq; + let tx = self.pending_requests.lock().await.remove(&request_seq); + + match tx { + Some(tx) => match tx.send(Self::process_response(res)).await { + Ok(_) => (), + Err(_) => error!( + "Tried sending response into a closed channel (id={:?}), original request likely timed out", + request_seq + ), + } + None => { + warn!("Response to nonexistent request #{}", res.request_seq); + client_tx.send(Payload::Response(res)).expect("Failed to send"); + } + } + + Ok(()) + } + Payload::Request(Request { + ref command, + ref seq, + .. + }) => { + info!("<- DAP request {} #{}", command, seq); + client_tx.send(msg).expect("Failed to send"); + Ok(()) + } + Payload::Event(ref event) => { + info!("<- DAP event {:?}", event); + client_tx.send(msg).expect("Failed to send"); + Ok(()) + } + } + } + + async fn recv( + transport: Arc, + mut server_stdout: Box, + client_tx: UnboundedSender, + ) { + let mut recv_buffer = String::new(); + loop { + match Self::recv_server_message(&mut server_stdout, &mut recv_buffer).await { + Ok(msg) => { + transport + .process_server_message(&client_tx, msg) + .await + .unwrap(); + } + Err(err) => { + error!("err: <- {:?}", err); + break; + } + } + } + } + + async fn send( + transport: Arc, + mut server_stdin: Box, + mut client_rx: UnboundedReceiver, + ) { + while let Some(payload) = client_rx.recv().await { + transport + .send_payload_to_server(&mut server_stdin, payload) + .await + .unwrap() + } + } + + async fn err(mut server_stderr: Box) { + let mut recv_buffer = String::new(); + loop { + match Self::recv_server_error(&mut server_stderr, &mut recv_buffer).await { + Ok(_) => {} + Err(err) => { + error!("err: <- {:?}", err); + break; + } + } + } + } +} diff --git a/helix-dap/src/types.rs b/helix-dap/src/types.rs new file mode 100644 index 000000000..2c3df9c33 --- /dev/null +++ b/helix-dap/src/types.rs @@ -0,0 +1,707 @@ +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::HashMap; +use std::path::PathBuf; + +#[derive( + Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, +)] +pub struct ThreadId(isize); + +impl std::fmt::Display for ThreadId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +pub trait Request { + type Arguments: serde::de::DeserializeOwned + serde::Serialize; + type Result: serde::de::DeserializeOwned + serde::Serialize; + const COMMAND: &'static str; +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ColumnDescriptor { + pub attribute_name: String, + pub label: String, + pub format: Option, + #[serde(rename = "type")] + pub ty: Option, + pub width: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ExceptionBreakpointsFilter { + pub filter: String, + pub label: String, + pub description: Option, + pub default: Option, + pub supports_condition: Option, + pub condition_description: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct DebuggerCapabilities { + pub supports_configuration_done_request: Option, + pub supports_function_breakpoints: Option, + pub supports_conditional_breakpoints: Option, + pub supports_hit_conditional_breakpoints: Option, + pub supports_evaluate_for_hovers: Option, + pub supports_step_back: Option, + pub supports_set_variable: Option, + pub supports_restart_frame: Option, + pub supports_goto_targets_request: Option, + pub supports_step_in_targets_request: Option, + pub supports_completions_request: Option, + pub supports_modules_request: Option, + pub supports_restart_request: Option, + pub supports_exception_options: Option, + pub supports_value_formatting_options: Option, + pub supports_exception_info_request: Option, + pub support_terminate_debuggee: Option, + pub support_suspend_debuggee: Option, + pub supports_delayed_stack_trace_loading: Option, + pub supports_loaded_sources_request: Option, + pub supports_log_points: Option, + pub supports_terminate_threads_request: Option, + pub supports_set_expression: Option, + pub supports_terminate_request: Option, + pub supports_data_breakpoints: Option, + pub supports_read_memory_request: Option, + pub supports_write_memory_request: Option, + pub supports_disassemble_request: Option, + pub supports_cancel_request: Option, + pub supports_breakpoint_locations_request: Option, + pub supports_clipboard_context: Option, + pub supports_stepping_granularity: Option, + pub supports_instruction_breakpoints: Option, + pub supports_exception_filter_options: Option, + pub exception_breakpoint_filters: Option>, + pub completion_trigger_characters: Option>, + pub additional_module_columns: Option>, + pub supported_checksum_algorithms: Option>, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Checksum { + pub algorithm: String, + pub checksum: String, +} + +#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Source { + pub name: Option, + pub path: Option, + pub source_reference: Option, + pub presentation_hint: Option, + pub origin: Option, + pub sources: Option>, + pub adapter_data: Option, + pub checksums: Option>, +} + +#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct SourceBreakpoint { + pub line: usize, + pub column: Option, + pub condition: Option, + pub hit_condition: Option, + pub log_message: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Breakpoint { + pub id: Option, + pub verified: bool, + pub message: Option, + pub source: Option, + pub line: Option, + pub column: Option, + pub end_line: Option, + pub end_column: Option, + pub instruction_reference: Option, + pub offset: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct StackFrameFormat { + pub parameters: Option, + pub parameter_types: Option, + pub parameter_names: Option, + pub parameter_values: Option, + pub line: Option, + pub module: Option, + pub include_all: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct StackFrame { + pub id: usize, + pub name: String, + pub source: Option, + pub line: usize, + pub column: usize, + pub end_line: Option, + pub end_column: Option, + pub can_restart: Option, + pub instruction_pointer_reference: Option, + pub module_id: Option, + pub presentation_hint: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Thread { + pub id: ThreadId, + pub name: String, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Scope { + pub name: String, + pub presentation_hint: Option, + pub variables_reference: usize, + pub named_variables: Option, + pub indexed_variables: Option, + pub expensive: bool, + pub source: Option, + pub line: Option, + pub column: Option, + pub end_line: Option, + pub end_column: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ValueFormat { + pub hex: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct VariablePresentationHint { + pub kind: Option, + pub attributes: Option>, + pub visibility: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Variable { + pub name: String, + pub value: String, + #[serde(rename = "type")] + pub ty: Option, + pub presentation_hint: Option, + pub evaluate_name: Option, + pub variables_reference: usize, + pub named_variables: Option, + pub indexed_variables: Option, + pub memory_reference: Option, +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Module { + pub id: String, // TODO: || number + pub name: String, + pub path: Option, + pub is_optimized: Option, + pub is_user_code: Option, + pub version: Option, + pub symbol_status: Option, + pub symbol_file_path: Option, + pub date_time_stamp: Option, + pub address_range: Option, +} + +pub mod requests { + use super::*; + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct InitializeArguments { + #[serde(rename = "clientID")] + pub client_id: Option, + pub client_name: Option, + #[serde(rename = "adapterID")] + pub adapter_id: String, + pub locale: Option, + #[serde(rename = "linesStartAt1")] + pub lines_start_at_one: Option, + #[serde(rename = "columnsStartAt1")] + pub columns_start_at_one: Option, + pub path_format: Option, + pub supports_variable_type: Option, + pub supports_variable_paging: Option, + pub supports_run_in_terminal_request: Option, + pub supports_memory_references: Option, + pub supports_progress_reporting: Option, + pub supports_invalidated_event: Option, + } + + #[derive(Debug)] + pub enum Initialize {} + + impl Request for Initialize { + type Arguments = InitializeArguments; + type Result = DebuggerCapabilities; + const COMMAND: &'static str = "initialize"; + } + + #[derive(Debug)] + pub enum Launch {} + + impl Request for Launch { + type Arguments = Value; + type Result = Value; + const COMMAND: &'static str = "launch"; + } + + #[derive(Debug)] + pub enum Attach {} + + impl Request for Attach { + type Arguments = Value; + type Result = Value; + const COMMAND: &'static str = "attach"; + } + + #[derive(Debug)] + pub enum Disconnect {} + + impl Request for Disconnect { + type Arguments = (); + type Result = (); + const COMMAND: &'static str = "disconnect"; + } + + #[derive(Debug)] + pub enum ConfigurationDone {} + + impl Request for ConfigurationDone { + type Arguments = (); + type Result = (); + const COMMAND: &'static str = "configurationDone"; + } + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct SetBreakpointsArguments { + pub source: Source, + pub breakpoints: Option>, + // lines is deprecated + pub source_modified: Option, + } + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct SetBreakpointsResponse { + pub breakpoints: Option>, + } + + #[derive(Debug)] + pub enum SetBreakpoints {} + + impl Request for SetBreakpoints { + type Arguments = SetBreakpointsArguments; + type Result = SetBreakpointsResponse; + const COMMAND: &'static str = "setBreakpoints"; + } + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct ContinueArguments { + pub thread_id: ThreadId, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct ContinueResponse { + pub all_threads_continued: Option, + } + + #[derive(Debug)] + pub enum Continue {} + + impl Request for Continue { + type Arguments = ContinueArguments; + type Result = ContinueResponse; + const COMMAND: &'static str = "continue"; + } + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct StackTraceArguments { + pub thread_id: ThreadId, + pub start_frame: Option, + pub levels: Option, + pub format: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct StackTraceResponse { + pub total_frames: Option, + pub stack_frames: Vec, + } + + #[derive(Debug)] + pub enum StackTrace {} + + impl Request for StackTrace { + type Arguments = StackTraceArguments; + type Result = StackTraceResponse; + const COMMAND: &'static str = "stackTrace"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct ThreadsResponse { + pub threads: Vec, + } + + #[derive(Debug)] + pub enum Threads {} + + impl Request for Threads { + type Arguments = (); + type Result = ThreadsResponse; + const COMMAND: &'static str = "threads"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct ScopesArguments { + pub frame_id: usize, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct ScopesResponse { + pub scopes: Vec, + } + + #[derive(Debug)] + pub enum Scopes {} + + impl Request for Scopes { + type Arguments = ScopesArguments; + type Result = ScopesResponse; + const COMMAND: &'static str = "scopes"; + } + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct VariablesArguments { + pub variables_reference: usize, + pub filter: Option, + pub start: Option, + pub count: Option, + pub format: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct VariablesResponse { + pub variables: Vec, + } + + #[derive(Debug)] + pub enum Variables {} + + impl Request for Variables { + type Arguments = VariablesArguments; + type Result = VariablesResponse; + const COMMAND: &'static str = "variables"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct StepInArguments { + pub thread_id: ThreadId, + pub target_id: Option, + pub granularity: Option, + } + + #[derive(Debug)] + pub enum StepIn {} + + impl Request for StepIn { + type Arguments = StepInArguments; + type Result = (); + const COMMAND: &'static str = "stepIn"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct StepOutArguments { + pub thread_id: ThreadId, + pub granularity: Option, + } + + #[derive(Debug)] + pub enum StepOut {} + + impl Request for StepOut { + type Arguments = StepOutArguments; + type Result = (); + const COMMAND: &'static str = "stepOut"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct NextArguments { + pub thread_id: ThreadId, + pub granularity: Option, + } + + #[derive(Debug)] + pub enum Next {} + + impl Request for Next { + type Arguments = NextArguments; + type Result = (); + const COMMAND: &'static str = "next"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct PauseArguments { + pub thread_id: ThreadId, + } + + #[derive(Debug)] + pub enum Pause {} + + impl Request for Pause { + type Arguments = PauseArguments; + type Result = (); + const COMMAND: &'static str = "pause"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct EvaluateArguments { + pub expression: String, + pub frame_id: Option, + pub context: Option, + pub format: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct EvaluateResponse { + pub result: String, + #[serde(rename = "type")] + pub ty: Option, + pub presentation_hint: Option, + pub variables_reference: usize, + pub named_variables: Option, + pub indexed_variables: Option, + pub memory_reference: Option, + } + + #[derive(Debug)] + pub enum Evaluate {} + + impl Request for Evaluate { + type Arguments = EvaluateArguments; + type Result = EvaluateResponse; + const COMMAND: &'static str = "evaluate"; + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct SetExceptionBreakpointsArguments { + pub filters: Vec, + // pub filterOptions: Option>, // needs capability + // pub exceptionOptions: Option>, // needs capability + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct SetExceptionBreakpointsResponse { + pub breakpoints: Option>, + } + + #[derive(Debug)] + pub enum SetExceptionBreakpoints {} + + impl Request for SetExceptionBreakpoints { + type Arguments = SetExceptionBreakpointsArguments; + type Result = SetExceptionBreakpointsResponse; + const COMMAND: &'static str = "setExceptionBreakpoints"; + } + + // Reverse Requests + + #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct RunInTerminalResponse { + pub process_id: Option, + pub shell_process_id: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct RunInTerminalArguments { + pub kind: Option, + pub title: Option, + pub cwd: Option, + pub args: Vec, + pub env: Option>>, + } + + #[derive(Debug)] + pub enum RunInTerminal {} + + impl Request for RunInTerminal { + type Arguments = RunInTerminalArguments; + type Result = RunInTerminalResponse; + const COMMAND: &'static str = "runInTerminal"; + } +} + +// Events + +pub mod events { + use super::*; + + #[derive(Debug, Clone, Serialize, Deserialize)] + #[serde(rename_all = "camelCase")] + #[serde(tag = "event", content = "body")] + // seq is omitted as unused and is not sent by some implementations + pub enum Event { + Initialized, + Stopped(Stopped), + Continued(Continued), + Exited(Exited), + Terminated(Option), + Thread(Thread), + Output(Output), + Breakpoint(Breakpoint), + Module(Module), + LoadedSource(LoadedSource), + Process(Process), + Capabilities(Capabilities), + // ProgressStart(), + // ProgressUpdate(), + // ProgressEnd(), + // Invalidated(), + Memory(Memory), + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Stopped { + pub reason: String, + pub description: Option, + pub thread_id: Option, + pub preserve_focus_hint: Option, + pub text: Option, + pub all_threads_stopped: Option, + pub hit_breakpoint_ids: Option>, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Continued { + pub thread_id: ThreadId, + pub all_threads_continued: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Exited { + pub exit_code: usize, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Terminated { + pub restart: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Thread { + pub reason: String, + pub thread_id: ThreadId, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Output { + pub output: String, + pub category: Option, + pub group: Option, + pub line: Option, + pub column: Option, + pub variables_reference: Option, + pub source: Option, + pub data: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Breakpoint { + pub reason: String, + pub breakpoint: super::Breakpoint, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Module { + pub reason: String, + pub module: super::Module, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct LoadedSource { + pub reason: String, + pub source: super::Source, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Process { + pub name: String, + pub system_process_id: Option, + pub is_local_process: Option, + pub start_method: Option, // TODO: use enum + pub pointer_size: Option, + } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Capabilities { + pub capabilities: super::DebuggerCapabilities, + } + + // #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + // #[serde(rename_all = "camelCase")] + // pub struct Invalidated { + // pub areas: Vec, + // pub thread_id: Option, + // pub stack_frame_id: Option, + // } + + #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct Memory { + pub memory_reference: String, + pub offset: usize, + pub count: usize, + } +} diff --git a/helix-lsp/Cargo.toml b/helix-lsp/Cargo.toml index b7fe94ab6..39b537063 100644 --- a/helix-lsp/Cargo.toml +++ b/helix-lsp/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "helix-lsp" -version = "0.5.0" +version = "0.6.0" authors = ["Blaž Hrastnik "] edition = "2021" license = "MPL-2.0" @@ -12,16 +12,16 @@ homepage = "https://helix-editor.com" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -helix-core = { version = "0.5", path = "../helix-core" } +helix-core = { version = "0.6", path = "../helix-core" } anyhow = "1.0" futures-executor = "0.3" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } jsonrpc-core = { version = "18.0", default-features = false } # don't pull in all of futures log = "0.4" -lsp-types = { version = "0.91", features = ["proposed"] } +lsp-types = { version = "0.92", features = ["proposed"] } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" thiserror = "1.0" -tokio = { version = "1.15", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } +tokio = { version = "1.16", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } tokio-stream = "0.1.8" diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index c80f70b54..15cbca0eb 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -438,7 +438,7 @@ impl Client { changes.push(lsp::TextDocumentContentChangeEvent { range: Some(lsp::Range::new(start, end)), - text: s.into(), + text: s.to_string(), range_length: None, }); } diff --git a/helix-syntax/Cargo.toml b/helix-syntax/Cargo.toml index cceec4127..855839be0 100644 --- a/helix-syntax/Cargo.toml +++ b/helix-syntax/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "helix-syntax" -version = "0.5.0" +version = "0.6.0" authors = ["Blaž Hrastnik "] edition = "2021" license = "MPL-2.0" diff --git a/helix-syntax/build.rs b/helix-syntax/build.rs index 28f85e74f..fa8be8b38 100644 --- a/helix-syntax/build.rs +++ b/helix-syntax/build.rs @@ -175,7 +175,6 @@ fn build_dir(dir: &str, language: &str) { fn main() { let ignore = vec![ "tree-sitter-typescript".to_string(), - "tree-sitter-haskell".to_string(), // aarch64 failures: https://github.com/tree-sitter/tree-sitter-haskell/issues/34 "tree-sitter-ocaml".to_string(), ]; let dirs = collect_tree_sitter_dirs(&ignore).unwrap(); diff --git a/helix-syntax/languages/tree-sitter-elm b/helix-syntax/languages/tree-sitter-elm new file mode 160000 index 000000000..bd50ccf66 --- /dev/null +++ b/helix-syntax/languages/tree-sitter-elm @@ -0,0 +1 @@ +Subproject commit bd50ccf66b42c55252ac8efc1086af4ac6bab8cd diff --git a/helix-syntax/languages/tree-sitter-erlang b/helix-syntax/languages/tree-sitter-erlang new file mode 160000 index 000000000..86985bde3 --- /dev/null +++ b/helix-syntax/languages/tree-sitter-erlang @@ -0,0 +1 @@ +Subproject commit 86985bde399c5f40b00bc75f7ab70a6c69a5f9c3 diff --git a/helix-syntax/languages/tree-sitter-git-config b/helix-syntax/languages/tree-sitter-git-config new file mode 160000 index 000000000..0e4f0baf9 --- /dev/null +++ b/helix-syntax/languages/tree-sitter-git-config @@ -0,0 +1 @@ +Subproject commit 0e4f0baf90b57e5aeb62dcdbf03062c6315d43ea diff --git a/helix-syntax/languages/tree-sitter-go b/helix-syntax/languages/tree-sitter-go index 2a83dfdd7..0fa917a70 160000 --- a/helix-syntax/languages/tree-sitter-go +++ b/helix-syntax/languages/tree-sitter-go @@ -1 +1 @@ -Subproject commit 2a83dfdd759a632651f852aa4dc0af2525fae5cd +Subproject commit 0fa917a7022d1cd2e9b779a6a8fc5dc7fad69c75 diff --git a/helix-syntax/languages/tree-sitter-graphql b/helix-syntax/languages/tree-sitter-graphql new file mode 160000 index 000000000..5e66e961e --- /dev/null +++ b/helix-syntax/languages/tree-sitter-graphql @@ -0,0 +1 @@ +Subproject commit 5e66e961eee421786bdda8495ed1db045e06b5fe diff --git a/helix-syntax/languages/tree-sitter-haskell b/helix-syntax/languages/tree-sitter-haskell index 237f4eb44..b6ec26f18 160000 --- a/helix-syntax/languages/tree-sitter-haskell +++ b/helix-syntax/languages/tree-sitter-haskell @@ -1 +1 @@ -Subproject commit 237f4eb4417c28f643a29d795ed227246afb66f9 +Subproject commit b6ec26f181dd059eedd506fa5fbeae1b8e5556c8 diff --git a/helix-syntax/languages/tree-sitter-iex b/helix-syntax/languages/tree-sitter-iex new file mode 160000 index 000000000..3ec55082c --- /dev/null +++ b/helix-syntax/languages/tree-sitter-iex @@ -0,0 +1 @@ +Subproject commit 3ec55082cf0be015d03148be8edfdfa8c56e77f9 diff --git a/helix-syntax/languages/tree-sitter-lean b/helix-syntax/languages/tree-sitter-lean new file mode 160000 index 000000000..d98426109 --- /dev/null +++ b/helix-syntax/languages/tree-sitter-lean @@ -0,0 +1 @@ +Subproject commit d98426109258b266e1e92358c5f11716d2e8f638 diff --git a/helix-syntax/languages/tree-sitter-llvm-mir b/helix-syntax/languages/tree-sitter-llvm-mir new file mode 160000 index 000000000..06fabca19 --- /dev/null +++ b/helix-syntax/languages/tree-sitter-llvm-mir @@ -0,0 +1 @@ +Subproject commit 06fabca19454b2dc00c1b211a7cb7ad0bc2585f1 diff --git a/helix-syntax/languages/tree-sitter-make b/helix-syntax/languages/tree-sitter-make new file mode 160000 index 000000000..a4b918741 --- /dev/null +++ b/helix-syntax/languages/tree-sitter-make @@ -0,0 +1 @@ +Subproject commit a4b9187417d6be349ee5fd4b6e77b4172c6827dd diff --git a/helix-syntax/languages/tree-sitter-php b/helix-syntax/languages/tree-sitter-php index 0d63eaf94..57f855461 160000 --- a/helix-syntax/languages/tree-sitter-php +++ b/helix-syntax/languages/tree-sitter-php @@ -1 +1 @@ -Subproject commit 0d63eaf94e8d6c0694551b016c802787e61b3fb2 +Subproject commit 57f855461aeeca73bd4218754fb26b5ac143f98f diff --git a/helix-syntax/languages/tree-sitter-regex b/helix-syntax/languages/tree-sitter-regex new file mode 160000 index 000000000..e1cfca3c7 --- /dev/null +++ b/helix-syntax/languages/tree-sitter-regex @@ -0,0 +1 @@ +Subproject commit e1cfca3c79896ff79842f057ea13e529b66af636 diff --git a/helix-syntax/languages/tree-sitter-rescript b/helix-syntax/languages/tree-sitter-rescript new file mode 160000 index 000000000..761eb9126 --- /dev/null +++ b/helix-syntax/languages/tree-sitter-rescript @@ -0,0 +1 @@ +Subproject commit 761eb9126b65e078b1b5770ac296b4af8870f933 diff --git a/helix-syntax/languages/tree-sitter-twig b/helix-syntax/languages/tree-sitter-twig new file mode 160000 index 000000000..b7444181f --- /dev/null +++ b/helix-syntax/languages/tree-sitter-twig @@ -0,0 +1 @@ +Subproject commit b7444181fb38e603e25ea8fcdac55f9492e49c27 diff --git a/helix-syntax/languages/tree-sitter-zig b/helix-syntax/languages/tree-sitter-zig index 1f27fd1df..93331b8bd 160000 --- a/helix-syntax/languages/tree-sitter-zig +++ b/helix-syntax/languages/tree-sitter-zig @@ -1 +1 @@ -Subproject commit 1f27fd1dfe7f352408f01b4894c7825f3a1d6c47 +Subproject commit 93331b8bd8b4ebee2b575490b2758f16ad4e9f30 diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index 58c713cb5..e62496f29 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "helix-term" -version = "0.5.0" +version = "0.6.0" description = "A post-modern text editor." authors = ["Blaž Hrastnik "] edition = "2021" @@ -22,9 +22,10 @@ name = "hx" path = "src/main.rs" [dependencies] -helix-core = { version = "0.5", path = "../helix-core" } -helix-view = { version = "0.5", path = "../helix-view" } -helix-lsp = { version = "0.5", path = "../helix-lsp" } +helix-core = { version = "0.6", path = "../helix-core" } +helix-view = { version = "0.6", path = "../helix-view" } +helix-lsp = { version = "0.6", path = "../helix-lsp" } +helix-dap = { version = "0.6", path = "../helix-dap" } anyhow = "1" once_cell = "1.9" @@ -32,9 +33,9 @@ once_cell = "1.9" tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } num_cpus = "1" tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } -crossterm = { version = "0.22", features = ["event-stream"] } +crossterm = { version = "0.23", features = ["event-stream"] } signal-hook = "0.3" - +tokio-stream = "0.1" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } # Logging @@ -46,7 +47,7 @@ log = "0.4" fuzzy-matcher = "0.3" ignore = "0.4" # markdown doc rendering -pulldown-cmark = { version = "0.8", default-features = false } +pulldown-cmark = { version = "0.9", default-features = false } # file type detection content_inspector = "0.2.4" @@ -59,7 +60,6 @@ serde = { version = "1.0", features = ["derive"] } # ripgrep for global search grep-regex = "0.1.9" grep-searcher = "0.1.8" -tokio-stream = "0.1.8" [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } diff --git a/helix-term/build.rs b/helix-term/build.rs index 61ffa6f4f..21dd5612d 100644 --- a/helix-term/build.rs +++ b/helix-term/build.rs @@ -1,12 +1,17 @@ +use std::borrow::Cow; use std::process::Command; fn main() { let git_hash = Command::new("git") - .args(&["describe", "--dirty"]) + .args(&["rev-parse", "HEAD"]) .output() - .map(|x| String::from_utf8(x.stdout).ok()) .ok() - .flatten() - .unwrap_or_else(|| String::from(env!("CARGO_PKG_VERSION"))); - println!("cargo:rustc-env=VERSION_AND_GIT_HASH={}", git_hash); + .and_then(|x| String::from_utf8(x.stdout).ok()); + + let version: Cow<_> = match git_hash { + Some(git_hash) => format!("{} ({})", env!("CARGO_PKG_VERSION"), &git_hash[..8]).into(), + None => env!("CARGO_PKG_VERSION").into(), + }; + + println!("cargo:rustc-env=VERSION_AND_GIT_HASH={}", version); } diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index c7202feba..49eb08d0d 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -1,15 +1,22 @@ -use helix_core::{merge_toml_values, syntax}; +use helix_core::{ + config::{default_syntax_loader, user_syntax_loader}, + pos_at_coords, syntax, Selection, +}; +use helix_dap::{self as dap, Payload, Request}; use helix_lsp::{lsp, util::lsp_pos_to_pos, LspProgressMap}; -use helix_view::{theme, Editor}; +use helix_view::{editor::Breakpoint, theme, Editor}; use serde_json::json; use crate::{ - args::Args, commands::apply_workspace_edit, compositor::Compositor, config::Config, job::Jobs, - ui, + args::Args, + commands::{align_view, apply_workspace_edit, fetch_stack_trace, Align}, + compositor::Compositor, + config::Config, + job::Jobs, + ui::{self, overlay::overlayed}, }; use log::{error, warn}; - use std::{ io::{stdin, stdout, Write}, sync::Arc, @@ -65,21 +72,6 @@ impl Application { std::sync::Arc::new(theme::Loader::new(&conf_dir, &helix_core::runtime_dir())); // load default and user config, and merge both - let builtin_err_msg = - "Could not parse built-in languages.toml, something must be very wrong"; - let def_lang_conf: toml::Value = - toml::from_slice(include_bytes!("../../languages.toml")).expect(builtin_err_msg); - let def_syn_loader_conf: helix_core::syntax::Configuration = - def_lang_conf.clone().try_into().expect(builtin_err_msg); - let user_lang_conf = std::fs::read(conf_dir.join("languages.toml")) - .ok() - .map(|raw| toml::from_slice(&raw)); - let lang_conf = match user_lang_conf { - Some(Ok(value)) => Ok(merge_toml_values(def_lang_conf, value)), - Some(err @ Err(_)) => err, - None => Ok(def_lang_conf), - }; - let true_color = config.editor.true_color || crate::true_color(); let theme = config .theme @@ -102,16 +94,14 @@ impl Application { } }); - let syn_loader_conf: helix_core::syntax::Configuration = lang_conf - .and_then(|conf| conf.try_into()) - .unwrap_or_else(|err| { - eprintln!("Bad language config: {}", err); - eprintln!("Press to continue with default language config"); - use std::io::Read; - // This waits for an enter press. - let _ = std::io::stdin().read(&mut []); - def_syn_loader_conf - }); + let syn_loader_conf = user_syntax_loader().unwrap_or_else(|err| { + eprintln!("Bad language config: {}", err); + eprintln!("Press to continue with default language config"); + use std::io::Read; + // This waits for an enter press. + let _ = std::io::stdin().read(&mut []); + default_syntax_loader() + }); let syn_loader = std::sync::Arc::new(syntax::Loader::new(syn_loader_conf)); let mut editor = Editor::new( @@ -130,24 +120,34 @@ impl Application { // Unset path to prevent accidentally saving to the original tutor file. doc_mut!(editor).set_path(None)?; } else if !args.files.is_empty() { - let first = &args.files[0]; // we know it's not empty + let first = &args.files[0].0; // we know it's not empty if first.is_dir() { std::env::set_current_dir(&first)?; editor.new_file(Action::VerticalSplit); - compositor.push(Box::new(ui::file_picker(".".into(), &config.editor))); + let picker = ui::file_picker(".".into(), &config.editor); + compositor.push(Box::new(overlayed(picker))); } else { let nr_of_files = args.files.len(); editor.open(first.to_path_buf(), Action::VerticalSplit)?; - for file in args.files { + for (file, pos) in args.files { if file.is_dir() { return Err(anyhow::anyhow!( "expected a path to file, found a directory. (to open a directory pass it as first argument)" )); } else { - editor.open(file.to_path_buf(), Action::Load)?; + let doc_id = editor.open(file, Action::Load)?; + // with Action::Load all documents have the same view + let view_id = editor.tree.focus; + let doc = editor.document_mut(doc_id).unwrap(); + let pos = Selection::point(pos_at_coords(doc.text().slice(..), pos, true)); + doc.set_selection(view_id, pos); } } editor.set_status(format!("Loaded {} files.", nr_of_files)); + // align the view to center after all files are loaded, + // does not affect views without pos since it is at the top + let (view, doc) = current!(editor); + align_view(doc, view, Align::Center); } } else if stdin().is_tty() { editor.new_file(Action::VerticalSplit); @@ -209,7 +209,6 @@ impl Application { loop { if self.editor.should_close() { - self.jobs.finish(); break; } @@ -233,6 +232,9 @@ impl Application { last_render = Instant::now(); } } + Some(payload) = self.editor.debugger_events.next() => { + self.handle_debugger_message(payload).await; + } Some(callback) = self.jobs.futures.next() => { self.jobs.handle_callback(&mut self.editor, &mut self.compositor, callback); self.render(); @@ -327,6 +329,185 @@ impl Application { } } + pub async fn handle_debugger_message(&mut self, payload: helix_dap::Payload) { + use crate::commands::dap::{breakpoints_changed, select_thread_id}; + use dap::requests::RunInTerminal; + use helix_dap::{events, Event}; + + let debugger = match self.editor.debugger.as_mut() { + Some(debugger) => debugger, + None => return, + }; + match payload { + Payload::Event(ev) => match *ev { + Event::Stopped(events::Stopped { + thread_id, + description, + text, + reason, + all_threads_stopped, + .. + }) => { + let all_threads_stopped = all_threads_stopped.unwrap_or_default(); + + if all_threads_stopped { + if let Ok(response) = debugger.request::(()).await { + for thread in response.threads { + fetch_stack_trace(debugger, thread.id).await; + } + select_thread_id( + &mut self.editor, + thread_id.unwrap_or_default(), + false, + ) + .await; + } + } else if let Some(thread_id) = thread_id { + debugger.thread_states.insert(thread_id, reason.clone()); // TODO: dap uses "type" || "reason" here + + // whichever thread stops is made "current" (if no previously selected thread). + select_thread_id(&mut self.editor, thread_id, false).await; + } + + let scope = match thread_id { + Some(id) => format!("Thread {}", id), + None => "Target".to_owned(), + }; + + let mut status = format!("{} stopped because of {}", scope, reason); + if let Some(desc) = description { + status.push_str(&format!(" {}", desc)); + } + if let Some(text) = text { + status.push_str(&format!(" {}", text)); + } + if all_threads_stopped { + status.push_str(" (all threads stopped)"); + } + + self.editor.set_status(status); + } + Event::Continued(events::Continued { thread_id, .. }) => { + debugger + .thread_states + .insert(thread_id, "running".to_owned()); + if debugger.thread_id == Some(thread_id) { + debugger.resume_application(); + } + } + Event::Thread(_) => { + // TODO: update thread_states, make threads request + } + Event::Breakpoint(events::Breakpoint { reason, breakpoint }) => { + match &reason[..] { + "new" => { + if let Some(source) = breakpoint.source { + self.editor + .breakpoints + .entry(source.path.unwrap()) // TODO: no unwraps + .or_default() + .push(Breakpoint { + id: breakpoint.id, + verified: breakpoint.verified, + message: breakpoint.message, + line: breakpoint.line.unwrap().saturating_sub(1), // TODO: no unwrap + column: breakpoint.column, + ..Default::default() + }); + } + } + "changed" => { + for breakpoints in self.editor.breakpoints.values_mut() { + if let Some(i) = + breakpoints.iter().position(|b| b.id == breakpoint.id) + { + breakpoints[i].verified = breakpoint.verified; + breakpoints[i].message = breakpoint.message.clone(); + breakpoints[i].line = + breakpoint.line.unwrap().saturating_sub(1); // TODO: no unwrap + breakpoints[i].column = breakpoint.column; + } + } + } + "removed" => { + for breakpoints in self.editor.breakpoints.values_mut() { + if let Some(i) = + breakpoints.iter().position(|b| b.id == breakpoint.id) + { + breakpoints.remove(i); + } + } + } + reason => { + warn!("Unknown breakpoint event: {}", reason); + } + } + } + Event::Output(events::Output { + category, output, .. + }) => { + let prefix = match category { + Some(category) => { + if &category == "telemetry" { + return; + } + format!("Debug ({}):", category) + } + None => "Debug:".to_owned(), + }; + + log::info!("{}", output); + self.editor.set_status(format!("{} {}", prefix, output)); + } + Event::Initialized => { + // send existing breakpoints + for (path, breakpoints) in &mut self.editor.breakpoints { + // TODO: call futures in parallel, await all + let _ = breakpoints_changed(debugger, path.clone(), breakpoints); + } + // TODO: fetch breakpoints (in case we're attaching) + + if debugger.configuration_done().await.is_ok() { + self.editor.set_status("Debugged application started"); + }; // TODO: do we need to handle error? + } + ev => { + log::warn!("Unhandled event {:?}", ev); + return; // return early to skip render + } + }, + Payload::Response(_) => unreachable!(), + Payload::Request(request) => match request.command.as_str() { + RunInTerminal::COMMAND => { + let arguments: dap::requests::RunInTerminalArguments = + serde_json::from_value(request.arguments.unwrap_or_default()).unwrap(); + // TODO: no unwrap + + // TODO: handle cwd + let process = std::process::Command::new("tmux") + .arg("split-window") + .arg(arguments.args.join(" ")) // TODO: first arg is wrong, it uses current dir + .spawn() + .unwrap(); + + let _ = debugger + .reply( + request.seq, + dap::requests::RunInTerminal::COMMAND, + serde_json::to_value(dap::requests::RunInTerminalResponse { + process_id: Some(process.id()), + shell_process_id: None, + }) + .map_err(|e| e.into()), + ) + .await; + } + _ => log::error!("DAP reverse request not implemented: {:?}", request), + }, + } + self.render(); + } + pub async fn handle_language_server_message( &mut self, call: helix_lsp::Call, @@ -358,12 +539,8 @@ impl Application { // trigger textDocument/didOpen for docs that are already open for doc in docs { - // TODO: extract and share with editor.open - let language_id = doc - .language() - .and_then(|s| s.split('.').last()) // source.rust - .map(ToOwned::to_owned) - .unwrap_or_default(); + let language_id = + doc.language_id().map(ToOwned::to_owned).unwrap_or_default(); tokio::spawn(language_server.text_document_did_open( doc.url().unwrap(), @@ -657,6 +834,8 @@ impl Application { self.event_loop().await; + self.jobs.finish().await; + if self.editor.close_language_servers(None).await.is_err() { log::error!("Timed out waiting for language servers to shutdown"); }; diff --git a/helix-term/src/args.rs b/helix-term/src/args.rs index 40113db92..247d5b320 100644 --- a/helix-term/src/args.rs +++ b/helix-term/src/args.rs @@ -1,5 +1,6 @@ use anyhow::{Error, Result}; -use std::path::PathBuf; +use helix_core::Position; +use std::path::{Path, PathBuf}; #[derive(Default)] pub struct Args { @@ -7,7 +8,7 @@ pub struct Args { pub display_version: bool, pub load_tutor: bool, pub verbosity: u64, - pub files: Vec, + pub files: Vec<(PathBuf, Position)>, } impl Args { @@ -41,15 +42,49 @@ impl Args { } } } - arg => args.files.push(PathBuf::from(arg)), + arg => args.files.push(parse_file(arg)), } } // push the remaining args, if any to the files - for filename in iter { - args.files.push(PathBuf::from(filename)); + for arg in iter { + args.files.push(parse_file(arg)); } Ok(args) } } + +/// Parse arg into [`PathBuf`] and position. +pub(crate) fn parse_file(s: &str) -> (PathBuf, Position) { + let def = || (PathBuf::from(s), Position::default()); + if Path::new(s).exists() { + return def(); + } + split_path_row_col(s) + .or_else(|| split_path_row(s)) + .unwrap_or_else(def) +} + +/// Split file.rs:10:2 into [`PathBuf`], row and col. +/// +/// Does not validate if file.rs is a file or directory. +fn split_path_row_col(s: &str) -> Option<(PathBuf, Position)> { + let mut s = s.rsplitn(3, ':'); + let col: usize = s.next()?.parse().ok()?; + let row: usize = s.next()?.parse().ok()?; + let path = s.next()?.into(); + let pos = Position::new(row.saturating_sub(1), col.saturating_sub(1)); + Some((path, pos)) +} + +/// Split file.rs:10 into [`PathBuf`] and row. +/// +/// Does not validate if file.rs is a file or directory. +fn split_path_row(s: &str) -> Option<(PathBuf, Position)> { + let (row, path) = s.rsplit_once(':')?; + let row: usize = row.parse().ok()?; + let path = path.into(); + let pos = Position::new(row.saturating_sub(1), 0); + Some((path, pos)) +} diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index f815b428a..ecf954b26 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -1,3 +1,7 @@ +pub(crate) mod dap; + +pub use dap::*; + use helix_core::{ comment, coords_at_pos, find_first_non_whitespace_char, find_root, graphemes, history::UndoKind, @@ -11,6 +15,7 @@ use helix_core::{ object, pos_at_coords, regex::{self, Regex, RegexBuilder}, search, selection, shellwords, surround, textobject, + tree_sitter::Node, unicode::width::UnicodeWidthChar, LineEnding, Position, Range, Rope, RopeGraphemes, RopeSlice, Selection, SmallVec, Tendril, Transaction, @@ -19,6 +24,7 @@ use helix_view::{ clipboard::ClipboardType, document::{Mode, SCRATCH_BUFFER_NAME}, editor::{Action, Motion}, + info::Info, input::KeyEvent, keyboard::KeyCode, view::View, @@ -36,14 +42,15 @@ use insert::*; use movement::Movement; use crate::{ + args, compositor::{self, Component, Compositor}, - ui::{self, FilePicker, Picker, Popup, Prompt, PromptEvent}, + ui::{self, overlay::overlayed, FilePicker, Popup, Prompt, PromptEvent}, }; use crate::job::{self, Job, Jobs}; use futures_util::{FutureExt, StreamExt}; +use std::{collections::HashMap, fmt, future::Future}; use std::{collections::HashSet, num::NonZeroUsize}; -use std::{fmt, future::Future}; use std::{ borrow::Cow, @@ -112,13 +119,13 @@ impl<'a> Context<'a> { } } -enum Align { +pub enum Align { Top, Center, Bottom, } -fn align_view(doc: &Document, view: &mut View, align: Align) { +pub fn align_view(doc: &Document, view: &mut View, align: Align) { let pos = doc .selection(view.id) .primary() @@ -173,7 +180,7 @@ macro_rules! static_commands { impl MappableCommand { pub fn execute(&self, cx: &mut Context) { match &self { - MappableCommand::Typable { name, args, doc: _ } => { + Self::Typable { name, args, doc: _ } => { let args: Vec> = args.iter().map(Cow::from).collect(); if let Some(command) = cmd::TYPABLE_COMMAND_MAP.get(name.as_str()) { let mut cx = compositor::Context { @@ -186,21 +193,21 @@ impl MappableCommand { } } } - MappableCommand::Static { fun, .. } => (fun)(cx), + Self::Static { fun, .. } => (fun)(cx), } } pub fn name(&self) -> &str { match &self { - MappableCommand::Typable { name, .. } => name, - MappableCommand::Static { name, .. } => name, + Self::Typable { name, .. } => name, + Self::Static { name, .. } => name, } } pub fn doc(&self) -> &str { match &self { - MappableCommand::Typable { doc, .. } => doc, - MappableCommand::Static { doc, .. } => doc, + Self::Typable { doc, .. } => doc, + Self::Static { doc, .. } => doc, } } @@ -362,6 +369,9 @@ impl MappableCommand { rotate_selection_contents_forward, "Rotate selection contents forward", rotate_selection_contents_backward, "Rotate selections contents backward", expand_selection, "Expand selection to parent syntax node", + shrink_selection, "Shrink selection to previously expanded syntax node", + select_next_sibling, "Select the next sibling in the syntax tree", + select_prev_sibling, "Select the previous sibling in the syntax tree", jump_forward, "Jump forward on jumplist", jump_backward, "Jump backward on jumplist", save_selection, "Save the current selection to the jumplist", @@ -388,6 +398,27 @@ impl MappableCommand { surround_delete, "Surround delete", select_textobject_around, "Select around object", select_textobject_inner, "Select inside object", + goto_next_function, "Goto next function", + goto_prev_function, "Goto previous function", + goto_next_class, "Goto next class", + goto_prev_class, "Goto previous class", + goto_next_parameter, "Goto next parameter", + goto_prev_parameter, "Goto previous parameter", + dap_launch, "Launch debug target", + dap_toggle_breakpoint, "Toggle breakpoint", + dap_continue, "Continue program execution", + dap_pause, "Pause program execution", + dap_step_in, "Step in", + dap_step_out, "Step out", + dap_next, "Step to next", + dap_variables, "List variables", + dap_terminate, "End debug session", + dap_edit_condition, "Edit condition of the breakpoint on the current line", + dap_edit_log, "Edit log message of the breakpoint on the current line", + dap_switch_thread, "Switch current thread", + dap_switch_stack_frame, "Switch stack frame", + dap_enable_exceptions, "Enable exception breakpoints", + dap_disable_exceptions, "Disable exception breakpoints", shell_pipe, "Pipe selections through shell command", shell_pipe_to, "Pipe selections into shell command, ignoring command output", shell_insert_output, "Insert output of shell command before each selection", @@ -439,8 +470,8 @@ impl std::str::FromStr for MappableCommand { } else { MappableCommand::STATIC_COMMAND_LIST .iter() - .cloned() .find(|cmd| cmd.name() == s) + .cloned() .ok_or_else(|| anyhow!("No command named '{}'", s)) } } @@ -745,9 +776,8 @@ fn trim_selections(cx: &mut Context) { fn align_selections(cx: &mut Context) { let align_style = cx.count(); if align_style > 3 { - cx.editor.set_error( - "align only accept 1,2,3 as count to set left/center/right align".to_string(), - ); + cx.editor + .set_error("align only accept 1,2,3 as count to set left/center/right align"); return; } @@ -762,7 +792,7 @@ fn align_selections(cx: &mut Context) { let (l1, l2) = sel.line_range(text); if l1 != l2 { cx.editor - .set_error("align cannot work with multi line selections".to_string()); + .set_error("align cannot work with multi line selections"); return; } // if the selection is not in the same line with last selection, we set the column to 0 @@ -796,7 +826,6 @@ fn align_selections(cx: &mut Context) { }); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } fn align_fragment_to_width(fragment: &str, width: usize, align_style: usize) -> String { @@ -1199,7 +1228,6 @@ fn replace(cx: &mut Context) { }); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } }) } @@ -1217,7 +1245,6 @@ where }); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } fn switch_case(cx: &mut Context) { @@ -1456,6 +1483,7 @@ fn split_selection_on_newline(cx: &mut Context) { doc.set_selection(view.id, selection); } +#[allow(clippy::too_many_arguments)] fn search_impl( doc: &mut Document, view: &mut View, @@ -1464,6 +1492,7 @@ fn search_impl( movement: Movement, direction: Direction, scrolloff: usize, + wrap_around: bool, ) { let text = doc.text().slice(..); let selection = doc.selection(view.id); @@ -1489,16 +1518,22 @@ fn search_impl( // use find_at to find the next match after the cursor, loop around the end // Careful, `Regex` uses `bytes` as offsets, not character indices! - let mat = match direction { - Direction::Forward => regex - .find_at(contents, start) - .or_else(|| regex.find(contents)), - Direction::Backward => regex.find_iter(&contents[..start]).last().or_else(|| { - offset = start; - regex.find_iter(&contents[start..]).last() - }), + let mut mat = match direction { + Direction::Forward => regex.find_at(contents, start), + Direction::Backward => regex.find_iter(&contents[..start]).last(), }; - // TODO: message on wraparound + + if wrap_around && mat.is_none() { + mat = match direction { + Direction::Forward => regex.find(contents), + Direction::Backward => { + offset = start; + regex.find_iter(&contents[start..]).last() + } + } + // TODO: message on wraparound + } + if let Some(mat) = mat { let start = text.byte_to_char(mat.start() + offset); let end = text.byte_to_char(mat.end() + offset); @@ -1539,7 +1574,6 @@ fn search_completions(cx: &mut Context, reg: Option) -> Vec { items.into_iter().cloned().collect() } -// TODO: use one function for search vs extend fn search(cx: &mut Context) { searcher(cx, Direction::Forward) } @@ -1547,10 +1581,11 @@ fn search(cx: &mut Context) { fn rsearch(cx: &mut Context) { searcher(cx, Direction::Backward) } -// TODO: use one function for search vs extend + fn searcher(cx: &mut Context, direction: Direction) { let reg = cx.register.unwrap_or('/'); let scrolloff = cx.editor.config.scrolloff; + let wrap_around = cx.editor.config.search.wrap_around; let doc = doc!(cx.editor); @@ -1584,6 +1619,7 @@ fn searcher(cx: &mut Context, direction: Direction) { Movement::Move, direction, scrolloff, + wrap_around, ); }, ); @@ -1598,16 +1634,27 @@ fn search_next_or_prev_impl(cx: &mut Context, movement: Movement, direction: Dir if let Some(query) = registers.read('/') { let query = query.last().unwrap(); let contents = doc.text().slice(..).to_string(); - let case_insensitive = if cx.editor.config.smart_case { + let search_config = &cx.editor.config.search; + let case_insensitive = if search_config.smart_case { !query.chars().any(char::is_uppercase) } else { false }; + let wrap_around = search_config.wrap_around; if let Ok(regex) = RegexBuilder::new(query) .case_insensitive(case_insensitive) .build() { - search_impl(doc, view, &contents, ®ex, movement, direction, scrolloff); + search_impl( + doc, + view, + &contents, + ®ex, + movement, + direction, + scrolloff, + wrap_around, + ); } else { // get around warning `mutable_borrow_reservation_conflict` // which will be a hard error in the future @@ -1639,14 +1686,14 @@ fn search_selection(cx: &mut Context) { let query = doc.selection(view.id).primary().fragment(contents); let regex = regex::escape(&query); cx.editor.registers.get_mut('/').push(regex); - let msg = format!("register '{}' set to '{}'", '\\', query); + let msg = format!("register '{}' set to '{}'", '/', query); cx.editor.set_status(msg); } fn global_search(cx: &mut Context) { let (all_matches_sx, all_matches_rx) = tokio::sync::mpsc::unbounded_channel::<(usize, PathBuf)>(); - let smart_case = cx.editor.config.smart_case; + let smart_case = cx.editor.config.search.smart_case; let file_picker_config = cx.editor.config.file_picker.clone(); let completions = search_completions(cx, None); @@ -1742,7 +1789,7 @@ fn global_search(cx: &mut Context) { let call: job::Callback = Box::new(move |editor: &mut Editor, compositor: &mut Compositor| { if all_matches.is_empty() { - editor.set_status("No matches found".to_string()); + editor.set_status("No matches found"); return; } @@ -1750,20 +1797,19 @@ fn global_search(cx: &mut Context) { all_matches, move |(_line_num, path)| { let relative_path = helix_core::path::get_relative_path(path) - .to_str() - .unwrap() - .to_owned(); + .to_string_lossy() + .into_owned(); if current_path.as_ref().map(|p| p == path).unwrap_or(false) { format!("{} (*)", relative_path).into() } else { relative_path.into() } }, - move |editor: &mut Editor, (line_num, path), action| { - match editor.open(path.into(), action) { + move |cx, (line_num, path), action| { + match cx.editor.open(path.into(), action) { Ok(_) => {} Err(e) => { - editor.set_error(format!( + cx.editor.set_error(format!( "Failed to open file '{}': {}", path.display(), e @@ -1773,7 +1819,7 @@ fn global_search(cx: &mut Context) { } let line_num = *line_num; - let (view, doc) = current!(editor); + let (view, doc) = current!(cx.editor); let text = doc.text(); let start = text.line_to_char(line_num); let end = text.line_to_char((line_num + 1).min(text.len_lines())); @@ -1783,7 +1829,7 @@ fn global_search(cx: &mut Context) { }, |_editor, (line_num, path)| Some((path.clone(), Some((*line_num, *line_num)))), ); - compositor.push(Box::new(picker)); + compositor.push(Box::new(overlayed(picker))); }); Ok(call) }; @@ -1857,7 +1903,6 @@ fn delete_selection_impl(cx: &mut Context, op: Operation) { match op { Operation::Delete => { - doc.append_changes_to_history(view.id); // exit select mode, if currently in select mode exit_select_mode(cx); } @@ -1977,7 +2022,6 @@ fn append_mode(cx: &mut Context) { pub mod cmd { use super::*; - use std::collections::HashMap; use helix_view::editor::Action; use ui::completers::{self, Completer}; @@ -2024,7 +2068,13 @@ pub mod cmd { ) -> anyhow::Result<()> { ensure!(!args.is_empty(), "wrong argument count"); for arg in args { - let _ = cx.editor.open(arg.as_ref().into(), Action::Replace)?; + let (path, pos) = args::parse_file(arg); + let _ = cx.editor.open(path, Action::Replace)?; + let (view, doc) = current!(cx.editor); + let pos = Selection::point(pos_at_coords(doc.text().slice(..), pos, true)); + doc.set_selection(view.id, pos); + // does not affect opening a buffer without pos + align_view(doc, view, Align::Center); } Ok(()) } @@ -2126,10 +2176,10 @@ pub mod cmd { if args.is_empty() { let style = doc!(cx.editor).indent_style; cx.editor.set_status(match style { - Tabs => "tabs".into(), - Spaces(1) => "1 space".into(), + Tabs => "tabs".to_owned(), + Spaces(1) => "1 space".to_owned(), Spaces(n) if (2..=8).contains(&n) => format!("{} spaces", n), - _ => "error".into(), // Shouldn't happen. + _ => unreachable!(), // Shouldn't happen. }); return Ok(()); } @@ -2165,14 +2215,14 @@ pub mod cmd { if args.is_empty() { let line_ending = doc!(cx.editor).line_ending; cx.editor.set_status(match line_ending { - Crlf => "crlf".into(), - LF => "line feed".into(), - FF => "form feed".into(), - CR => "carriage return".into(), - Nel => "next line".into(), + Crlf => "crlf", + LF => "line feed", + FF => "form feed", + CR => "carriage return", + Nel => "next line", // These should never be a document's default line ending. - VT | LS | PS => "error".into(), + VT | LS | PS => "error", }); return Ok(()); @@ -2208,7 +2258,7 @@ pub mod cmd { let (view, doc) = current!(cx.editor); let success = doc.earlier(view.id, uk); if !success { - cx.editor.set_status("Already at oldest change".to_owned()); + cx.editor.set_status("Already at oldest change"); } Ok(()) @@ -2223,7 +2273,7 @@ pub mod cmd { let (view, doc) = current!(cx.editor); let success = doc.later(view.id, uk); if !success { - cx.editor.set_status("Already at newest change".to_owned()); + cx.editor.set_status("Already at newest change"); } Ok(()) @@ -2277,7 +2327,7 @@ pub mod cmd { force: bool, ) -> anyhow::Result<()> { let mut errors = String::new(); - + let jobs = &mut cx.jobs; // save all documents for doc in &mut cx.editor.documents.values_mut() { if doc.path().is_none() { @@ -2285,9 +2335,23 @@ pub mod cmd { continue; } - // TODO: handle error. - let handle = doc.save(); - cx.jobs.add(Job::new(handle).wait_before_exiting()); + if !doc.is_modified() { + continue; + } + + let fmt = doc.auto_format().map(|fmt| { + let shared = fmt.shared(); + let callback = make_format_callback( + doc.id(), + doc.version(), + Modified::SetUnmodified, + shared.clone(), + ); + jobs.callback(callback); + shared + }); + let future = doc.format_and_save(fmt); + jobs.add(Job::new(future).wait_before_exiting()); } if quit { @@ -2569,7 +2633,7 @@ pub mod cmd { if let Some(label) = args.first() { doc.set_encoding(label) } else { - let encoding = doc.encoding().name().to_string(); + let encoding = doc.encoding().name().to_owned(); cx.editor.set_status(encoding); Ok(()) } @@ -2637,6 +2701,58 @@ pub mod cmd { Ok(()) } + fn debug_eval( + cx: &mut compositor::Context, + args: &[Cow], + _event: PromptEvent, + ) -> anyhow::Result<()> { + if let Some(debugger) = cx.editor.debugger.as_mut() { + let (frame, thread_id) = match (debugger.active_frame, debugger.thread_id) { + (Some(frame), Some(thread_id)) => (frame, thread_id), + _ => { + bail!("Cannot find current stack frame to access variables") + } + }; + + // TODO: support no frame_id + + let frame_id = debugger.stack_frames[&thread_id][frame].id; + let response = block_on(debugger.eval(args.join(" "), Some(frame_id)))?; + cx.editor.set_status(response.result); + } + Ok(()) + } + + fn debug_start( + cx: &mut compositor::Context, + args: &[Cow], + _event: PromptEvent, + ) -> anyhow::Result<()> { + let mut args = args.to_owned(); + let name = match args.len() { + 0 => None, + _ => Some(args.remove(0)), + }; + dap_start_impl(cx, name.as_deref(), None, Some(args)) + } + + fn debug_remote( + cx: &mut compositor::Context, + args: &[Cow], + _event: PromptEvent, + ) -> anyhow::Result<()> { + let mut args = args.to_owned(); + let address = match args.len() { + 0 => None, + _ => Some(args.remove(0).parse()?), + }; + let name = match args.len() { + 0 => None, + _ => Some(args.remove(0)), + }; + dap_start_impl(cx, name.as_deref(), address, Some(args)) + } + fn tutor( cx: &mut compositor::Context, _args: &[Cow], @@ -2685,12 +2801,13 @@ pub mod cmd { "mouse" => runtime_config.mouse = arg.parse()?, "line-number" => runtime_config.line_number = arg.parse()?, "middle-click_paste" => runtime_config.middle_click_paste = arg.parse()?, - "smart-case" => runtime_config.smart_case = arg.parse()?, "auto-pairs" => runtime_config.auto_pairs = arg.parse()?, "auto-completion" => runtime_config.auto_completion = arg.parse()?, "completion-trigger-len" => runtime_config.completion_trigger_len = arg.parse()?, "auto-info" => runtime_config.auto_info = arg.parse()?, "true-color" => runtime_config.true_color = arg.parse()?, + "search.smart-case" => runtime_config.search.smart_case = arg.parse()?, + "search.wrap-around" => runtime_config.search.wrap_around = arg.parse()?, _ => anyhow::bail!("Unknown key `{}`.", args[0]), } @@ -2725,7 +2842,7 @@ pub mod cmd { let mut fragments: Vec<_> = selection .fragments(text) - .map(|fragment| Tendril::from_slice(&fragment)) + .map(|fragment| Tendril::from(fragment.as_ref())) .collect(); fragments.sort_by(match reverse { @@ -2747,6 +2864,42 @@ pub mod cmd { Ok(()) } + fn tree_sitter_subtree( + cx: &mut compositor::Context, + _args: &[Cow], + _event: PromptEvent, + ) -> anyhow::Result<()> { + let (view, doc) = current!(cx.editor); + + if let Some(syntax) = doc.syntax() { + let primary_selection = doc.selection(view.id).primary(); + let text = doc.text(); + let from = text.char_to_byte(primary_selection.from()); + let to = text.char_to_byte(primary_selection.to()); + if let Some(selected_node) = syntax + .tree() + .root_node() + .descendant_for_byte_range(from, to) + { + let contents = format!("```tsq\n{}\n```", selected_node.to_sexp()); + + let callback = async move { + let call: job::Callback = + Box::new(move |editor: &mut Editor, compositor: &mut Compositor| { + let contents = ui::Markdown::new(contents, editor.syn_loader.clone()); + let popup = Popup::new("hover", contents); + compositor.replace_or_push("hover", Box::new(popup)); + }); + Ok(call) + }; + + cx.jobs.callback(callback); + } + } + + Ok(()) + } + fn help( cx: &mut compositor::Context, args: &[Cow], @@ -2782,18 +2935,19 @@ pub mod cmd { .map(From::from) .unwrap_or_default() }, - |editor, path, _action| { - if let Err(e) = editor + |cx, path, _action| { + if let Err(e) = cx + .editor .open(path.clone(), Action::HorizontalSplit) .and_then(|id| { - editor + cx.editor .document_mut(id) .unwrap() .set_path(None) .map_err(Into::into) }) { - editor.set_error(e.to_string()); + cx.editor.set_error(e.to_string()); } }, |_editor, path| Some((path.clone(), None)), @@ -3153,6 +3307,27 @@ pub mod cmd { fun: tree_sitter_scopes, completer: None, }, + TypableCommand { + name: "debug-start", + aliases: &["dbg"], + doc: "Start a debug session from a given template with given parameters.", + fun: debug_start, + completer: None, + }, + TypableCommand { + name: "debug-remote", + aliases: &["dbg-tcp"], + doc: "Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters.", + fun: debug_remote, + completer: None, + }, + TypableCommand { + name: "debug-eval", + aliases: &[], + doc: "Evaluate expression in current debug context.", + fun: debug_eval, + completer: None, + }, TypableCommand { name: "vsplit", aliases: &["vs"], @@ -3202,6 +3377,13 @@ pub mod cmd { fun: sort_reverse, completer: None, }, + TypableCommand { + name: "tree-sitter-subtree", + aliases: &["ts-subtree"], + doc: "Display tree sitter subtree under cursor, primarily for debugging queries.", + fun: tree_sitter_subtree, + completer: None, + }, TypableCommand { name: "help", aliases: &["h"], @@ -3293,7 +3475,16 @@ fn command_mode(cx: &mut Context) { // Handle typable commands if let Some(cmd) = cmd::TYPABLE_COMMAND_MAP.get(parts[0]) { - let args = shellwords::shellwords(input); + let args = if cfg!(unix) { + shellwords::shellwords(input) + } else { + // Windows doesn't support POSIX, so fallback for now + parts + .into_iter() + .map(|part| part.into()) + .collect::>() + }; + if let Err(e) = (cmd.fun)(cx, &args[1..], event) { cx.editor.set_error(format!("{}", e)); } @@ -3320,7 +3511,7 @@ fn file_picker(cx: &mut Context) { // We don't specify language markers, root will be the root of the current git repo let root = find_root(None, &[]).unwrap_or_else(|| PathBuf::from("./")); let picker = ui::file_picker(root, &cx.editor.config); - cx.push_layer(Box::new(picker)); + cx.push_layer(Box::new(overlayed(picker))); } fn buffer_picker(cx: &mut Context) { @@ -3375,8 +3566,8 @@ fn buffer_picker(cx: &mut Context) { .map(|(_, doc)| new_meta(doc)) .collect(), BufferMeta::format, - |editor: &mut Editor, meta, _action| { - editor.switch(meta.id, Action::Replace); + |cx, meta, action| { + cx.editor.switch(meta.id, action); }, |editor, meta| { let doc = &editor.documents.get(&meta.id)?; @@ -3388,7 +3579,7 @@ fn buffer_picker(cx: &mut Context) { Some((meta.path.clone()?, Some((line, line)))) }, ); - cx.push_layer(Box::new(picker)); + cx.push_layer(Box::new(overlayed(picker))); } fn symbol_picker(cx: &mut Context) { @@ -3443,9 +3634,9 @@ fn symbol_picker(cx: &mut Context) { let mut picker = FilePicker::new( symbols, |symbol| (&symbol.name).into(), - move |editor: &mut Editor, symbol, _action| { - push_jump(editor); - let (view, doc) = current!(editor); + move |cx, symbol, _action| { + push_jump(cx.editor); + let (view, doc) = current!(cx.editor); if let Some(range) = lsp_range_to_range(doc.text(), symbol.location.range, offset_encoding) @@ -3466,7 +3657,7 @@ fn symbol_picker(cx: &mut Context) { }, ); picker.truncate_start = false; - compositor.push(Box::new(picker)) + compositor.push(Box::new(overlayed(picker))) } }, ) @@ -3496,16 +3687,15 @@ fn workspace_symbol_picker(cx: &mut Context) { (&symbol.name).into() } else { let relative_path = helix_core::path::get_relative_path(path.as_path()) - .to_str() - .unwrap() - .to_owned(); + .to_string_lossy() + .into_owned(); format!("{} ({})", &symbol.name, relative_path).into() } }, - move |editor: &mut Editor, symbol, action| { + move |cx, symbol, action| { let path = symbol.location.uri.to_file_path().unwrap(); - editor.open(path, action).expect("editor.open failed"); - let (view, doc) = current!(editor); + cx.editor.open(path, action).expect("editor.open failed"); + let (view, doc) = current!(cx.editor); if let Some(range) = lsp_range_to_range(doc.text(), symbol.location.range, offset_encoding) @@ -3526,12 +3716,21 @@ fn workspace_symbol_picker(cx: &mut Context) { }, ); picker.truncate_start = false; - compositor.push(Box::new(picker)) + compositor.push(Box::new(overlayed(picker))) } }, ) } +impl ui::menu::Item for lsp::CodeActionOrCommand { + fn label(&self) -> &str { + match self { + lsp::CodeActionOrCommand::CodeAction(action) => action.title.as_str(), + lsp::CodeActionOrCommand::Command(command) => command.title.as_str(), + } + } +} + pub fn code_action(cx: &mut Context) { let (view, doc) = current!(cx.editor); @@ -3551,41 +3750,53 @@ pub fn code_action(cx: &mut Context) { cx.callback( future, - move |_editor: &mut Editor, + move |editor: &mut Editor, compositor: &mut Compositor, response: Option| { - if let Some(actions) = response { - let picker = Picker::new( - true, - actions, - |action| match action { - lsp::CodeActionOrCommand::CodeAction(action) => { - action.title.as_str().into() + let actions = match response { + Some(a) => a, + None => return, + }; + if actions.is_empty() { + editor.set_status("No code actions available"); + return; + } + + let mut picker = ui::Menu::new(actions, move |editor, code_action, event| { + if event != PromptEvent::Validate { + return; + } + + // always present here + let code_action = code_action.unwrap(); + + match code_action { + lsp::CodeActionOrCommand::Command(command) => { + log::debug!("code action command: {:?}", command); + execute_lsp_command(editor, command.clone()); + } + lsp::CodeActionOrCommand::CodeAction(code_action) => { + log::debug!("code action: {:?}", code_action); + if let Some(ref workspace_edit) = code_action.edit { + log::debug!("edit: {:?}", workspace_edit); + apply_workspace_edit(editor, offset_encoding, workspace_edit); } - lsp::CodeActionOrCommand::Command(command) => command.title.as_str().into(), - }, - move |editor, code_action, _action| match code_action { - lsp::CodeActionOrCommand::Command(command) => { - log::debug!("code action command: {:?}", command); + + // if code action provides both edit and command first the edit + // should be applied and then the command + if let Some(command) = &code_action.command { execute_lsp_command(editor, command.clone()); } - lsp::CodeActionOrCommand::CodeAction(code_action) => { - log::debug!("code action: {:?}", code_action); - if let Some(ref workspace_edit) = code_action.edit { - log::debug!("edit: {:?}", workspace_edit); - apply_workspace_edit(editor, offset_encoding, workspace_edit); - } + } + } + }); + picker.move_down(); // pre-select the first item - // if code action provides both edit and command first the edit - // should be applied and then the command - if let Some(command) = &code_action.command { - execute_lsp_command(editor, command.clone()); - } - } - }, - ); - compositor.push(Box::new(picker)) - } + let popup = Popup::new("code-action", picker).margin(helix_view::graphics::Margin { + vertical: 1, + horizontal: 1, + }); + compositor.replace_or_push("code-action", Box::new(popup)); }, ) } @@ -3615,11 +3826,9 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> { match op { ResourceOp::Create(op) => { let path = op.uri.to_file_path().unwrap(); - let ignore_if_exists = if let Some(options) = &op.options { + let ignore_if_exists = op.options.as_ref().map_or(false, |options| { !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) - } else { - false - }; + }); if ignore_if_exists && path.exists() { Ok(()) } else { @@ -3629,11 +3838,12 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> { ResourceOp::Delete(op) => { let path = op.uri.to_file_path().unwrap(); if path.is_dir() { - let recursive = if let Some(options) = &op.options { - options.recursive.unwrap_or(false) - } else { - false - }; + let recursive = op + .options + .as_ref() + .and_then(|options| options.recursive) + .unwrap_or(false); + if recursive { fs::remove_dir_all(&path) } else { @@ -3648,11 +3858,9 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> { ResourceOp::Rename(op) => { let from = op.old_uri.to_file_path().unwrap(); let to = op.new_uri.to_file_path().unwrap(); - let ignore_if_exists = if let Some(options) = &op.options { + let ignore_if_exists = op.options.as_ref().map_or(false, |options| { !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) - } else { - false - }; + }); if ignore_if_exists && to.exists() { Ok(()) } else { @@ -3774,7 +3982,7 @@ fn last_picker(cx: &mut Context) { compositor.push(picker); } // XXX: figure out how to show error when no last picker lifetime - // cx.editor.set_error("no last picker".to_owned()) + // cx.editor.set_error("no last picker") })); } @@ -3929,7 +4137,6 @@ fn normal_mode(cx: &mut Context) { doc.mode = Mode::Normal; try_restore_indent(doc, view.id); - doc.append_changes_to_history(view.id); // if leaving append mode, move cursor back by 1 if doc.restore_cursor { @@ -3954,7 +4161,7 @@ fn try_restore_indent(doc: &mut Document, view_id: ViewId) { if let [Operation::Retain(move_pos), Operation::Insert(ref inserted_str), Operation::Retain(_)] = changes { - move_pos + inserted_str.len32() as usize == pos + move_pos + inserted_str.len() == pos && inserted_str.starts_with('\n') && inserted_str.chars().skip(1).all(char_is_whitespace) && pos == line_end_pos // ensure no characters exists after current position @@ -4037,7 +4244,7 @@ fn goto_last_accessed_file(cx: &mut Context) { if let Some(alt) = alternate_file { cx.editor.switch(alt, Action::Replace); } else { - cx.editor.set_error("no last accessed buffer".to_owned()) + cx.editor.set_error("no last accessed buffer") } } @@ -4064,7 +4271,7 @@ fn goto_last_modified_file(cx: &mut Context) { if let Some(alt) = alternate_file { cx.editor.switch(alt, Action::Replace); } else { - cx.editor.set_error("no last modified buffer".to_owned()) + cx.editor.set_error("no last modified buffer") } } @@ -4135,7 +4342,7 @@ fn goto_impl( jump_to(editor, location, offset_encoding, Action::Replace); } [] => { - editor.set_error("No definition found.".to_string()); + editor.set_error("No definition found."); } _locations => { let picker = FilePicker::new( @@ -4152,17 +4359,15 @@ fn goto_impl( .map(|path| path.to_path_buf()) .unwrap_or(path) }) + .map(|path| Cow::from(path.to_string_lossy().into_owned())) .ok() - .and_then(|path| path.to_str().map(|path| path.to_owned().into())) }) .flatten() .unwrap_or_else(|| location.uri.as_str().into()); let line = location.range.start.line; format!("{}:{}", file, line).into() }, - move |editor: &mut Editor, location, action| { - jump_to(editor, location, offset_encoding, action) - }, + move |cx, location, action| jump_to(cx.editor, location, offset_encoding, action), |_editor, location| { let path = location.uri.to_file_path().unwrap(); let line = Some(( @@ -4172,7 +4377,7 @@ fn goto_impl( Some((path, line)) }, ); - compositor.push(Box::new(picker)); + compositor.push(Box::new(overlayed(picker))); } } } @@ -4451,7 +4656,6 @@ fn signature_help(cx: &mut Context) { ); } -// NOTE: Transactions in this module get appended to history when we switch back to normal mode. pub mod insert { use super::*; pub type Hook = fn(&Rope, &Selection, char) -> Option; @@ -4547,7 +4751,8 @@ pub mod insert { #[allow(clippy::unnecessary_wraps)] // need to use Option<> because of the Hook signature fn insert(doc: &Rope, selection: &Selection, ch: char) -> Option { let cursors = selection.clone().cursors(doc.slice(..)); - let t = Tendril::from_char(ch); + let mut t = Tendril::new(); + t.push(ch); let transaction = Transaction::insert(doc, &cursors, t); Some(transaction) } @@ -4782,7 +4987,7 @@ fn undo(cx: &mut Context) { let (view, doc) = current!(cx.editor); for _ in 0..count { if !doc.undo(view.id) { - cx.editor.set_status("Already at oldest change".to_owned()); + cx.editor.set_status("Already at oldest change"); break; } } @@ -4793,7 +4998,7 @@ fn redo(cx: &mut Context) { let (view, doc) = current!(cx.editor); for _ in 0..count { if !doc.redo(view.id) { - cx.editor.set_status("Already at newest change".to_owned()); + cx.editor.set_status("Already at newest change"); break; } } @@ -4805,7 +5010,7 @@ fn earlier(cx: &mut Context) { for _ in 0..count { // rather than doing in batch we do this so get error halfway if !doc.earlier(view.id, UndoKind::Steps(1)) { - cx.editor.set_status("Already at oldest change".to_owned()); + cx.editor.set_status("Already at oldest change"); break; } } @@ -4817,7 +5022,7 @@ fn later(cx: &mut Context) { for _ in 0..count { // rather than doing in batch we do this so get error halfway if !doc.later(view.id, UndoKind::Steps(1)) { - cx.editor.set_status("Already at newest change".to_owned()); + cx.editor.set_status("Already at newest change"); break; } } @@ -4903,7 +5108,7 @@ fn yank_main_selection_to_clipboard_impl( bail!("Couldn't set system clipboard content: {}", e); } - editor.set_status("yanked main selection to system clipboard".to_owned()); + editor.set_status("yanked main selection to system clipboard"); Ok(()) } @@ -5049,12 +5254,12 @@ fn replace_with_yanked(cx: &mut Context) { let repeat = std::iter::repeat( values .last() - .map(|value| Tendril::from_slice(&value.repeat(count))) + .map(|value| Tendril::from(&value.repeat(count))) .unwrap(), ); let mut values = values .iter() - .map(|value| Tendril::from_slice(&value.repeat(count))) + .map(|value| Tendril::from(&value.repeat(count))) .chain(repeat); let selection = doc.selection(view.id); let transaction = Transaction::change_by_selection(doc.text(), selection, |range| { @@ -5066,7 +5271,6 @@ fn replace_with_yanked(cx: &mut Context) { }); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } } } @@ -5116,7 +5320,6 @@ fn paste_after(cx: &mut Context) { .and_then(|values| paste_impl(values, doc, view, Paste::After, count)) { doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } } @@ -5131,7 +5334,6 @@ fn paste_before(cx: &mut Context) { .and_then(|values| paste_impl(values, doc, view, Paste::Before, count)) { doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } } @@ -5167,7 +5369,6 @@ fn indent(cx: &mut Context) { }), ); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } fn unindent(cx: &mut Context) { @@ -5207,7 +5408,6 @@ fn unindent(cx: &mut Context) { let transaction = Transaction::change(doc.text(), changes.into_iter()); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } fn format_selections(cx: &mut Context) { @@ -5254,8 +5454,6 @@ fn format_selections(cx: &mut Context) { // doc.apply(&transaction, view.id); } - - doc.append_changes_to_history(view.id); } fn join_selections(cx: &mut Context) { @@ -5298,7 +5496,6 @@ fn join_selections(cx: &mut Context) { // .with_selection(selection); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } fn keep_or_remove_selections_impl(cx: &mut Context, remove: bool) { @@ -5348,7 +5545,7 @@ fn remove_primary_selection(cx: &mut Context) { let selection = doc.selection(view.id); if selection.len() == 1 { - cx.editor.set_error("no selections remaining".to_owned()); + cx.editor.set_error("no selections remaining"); return; } let index = selection.primary_index(); @@ -5455,7 +5652,7 @@ pub fn completion(cx: &mut Context) { } if items.is_empty() { - // editor.set_error("No completion available".to_string()); + // editor.set_error("No completion available"); return; } let size = compositor.size(); @@ -5523,13 +5720,10 @@ fn hover(cx: &mut Context) { // skip if contents empty - let contents = ui::Markdown::new(contents, editor.syn_loader.clone()); - let popup = Popup::new("documentation", contents); - if let Some(doc_popup) = compositor.find_id("documentation") { - *doc_popup = popup; - } else { - compositor.push(Box::new(popup)); - } + let contents = + ui::Markdown::new(contents, editor.syn_loader.clone()).style_group("hover"); + let popup = Popup::new("hover", contents); + compositor.replace_or_push("hover", Box::new(popup)); } }, ); @@ -5545,7 +5739,6 @@ fn toggle_comments(cx: &mut Context) { let transaction = comment::toggle_line_comments(doc.text(), doc.selection(view.id), token); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); exit_select_mode(cx); } @@ -5576,7 +5769,7 @@ fn rotate_selection_contents(cx: &mut Context, direction: Direction) { let selection = doc.selection(view.id); let mut fragments: Vec<_> = selection .fragments(text) - .map(|fragment| Tendril::from_slice(&fragment)) + .map(|fragment| Tendril::from(fragment.as_ref())) .collect(); let group = count @@ -5602,8 +5795,8 @@ fn rotate_selection_contents(cx: &mut Context, direction: Direction) { ); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } + fn rotate_selection_contents_forward(cx: &mut Context) { rotate_selection_contents(cx, Direction::Forward) } @@ -5619,7 +5812,13 @@ fn expand_selection(cx: &mut Context) { if let Some(syntax) = doc.syntax() { let text = doc.text().slice(..); - let selection = object::expand_selection(syntax, text, doc.selection(view.id)); + + let current_selection = doc.selection(view.id); + + // save current selection so it can be restored using shrink_selection + view.object_selections.push(current_selection.clone()); + + let selection = object::expand_selection(syntax, text, current_selection.clone()); doc.set_selection(view.id, selection); } }; @@ -5627,6 +5826,59 @@ fn expand_selection(cx: &mut Context) { cx.editor.last_motion = Some(Motion(Box::new(motion))); } +fn shrink_selection(cx: &mut Context) { + let motion = |editor: &mut Editor| { + let (view, doc) = current!(editor); + let current_selection = doc.selection(view.id); + // try to restore previous selection + if let Some(prev_selection) = view.object_selections.pop() { + if current_selection.contains(&prev_selection) { + // allow shrinking the selection only if current selection contains the previous object selection + doc.set_selection(view.id, prev_selection); + return; + } else { + // clear existing selection as they can't be shrinked to anyway + view.object_selections.clear(); + } + } + // if not previous selection, shrink to first child + if let Some(syntax) = doc.syntax() { + let text = doc.text().slice(..); + let selection = object::shrink_selection(syntax, text, current_selection.clone()); + doc.set_selection(view.id, selection); + } + }; + motion(cx.editor); + cx.editor.last_motion = Some(Motion(Box::new(motion))); +} + +fn select_sibling_impl(cx: &mut Context, sibling_fn: &'static F) +where + F: Fn(Node) -> Option, +{ + let motion = |editor: &mut Editor| { + let (view, doc) = current!(editor); + + if let Some(syntax) = doc.syntax() { + let text = doc.text().slice(..); + let current_selection = doc.selection(view.id); + let selection = + object::select_sibling(syntax, text, current_selection.clone(), sibling_fn); + doc.set_selection(view.id, selection); + } + }; + motion(cx.editor); + cx.editor.last_motion = Some(Motion(Box::new(motion))); +} + +fn select_next_sibling(cx: &mut Context) { + select_sibling_impl(cx, &|node| Node::next_sibling(&node)) +} + +fn select_prev_sibling(cx: &mut Context) { + select_sibling_impl(cx, &|node| Node::prev_sibling(&node)) +} + fn match_brackets(cx: &mut Context) { let (view, doc) = current!(cx.editor); @@ -5681,8 +5933,7 @@ fn jump_backward(cx: &mut Context) { fn save_selection(cx: &mut Context) { push_jump(cx.editor); - cx.editor - .set_status("Selection saved to jumplist".to_owned()); + cx.editor.set_status("Selection saved to jumplist"); } fn rotate_view(cx: &mut Context) { @@ -5755,8 +6006,10 @@ fn wonly(cx: &mut Context) { } fn select_register(cx: &mut Context) { + cx.editor.autoinfo = Some(Info::from_registers(&cx.editor.registers)); cx.on_next_key(move |cx, event| { if let Some(ch) = event.char() { + cx.editor.autoinfo = None; cx.editor.selected_register = Some(ch); } }) @@ -5805,6 +6058,52 @@ fn scroll_down(cx: &mut Context) { scroll(cx, cx.count(), Direction::Forward); } +fn goto_ts_object_impl(cx: &mut Context, object: &str, direction: Direction) { + let count = cx.count(); + let (view, doc) = current!(cx.editor); + let text = doc.text().slice(..); + let range = doc.selection(view.id).primary(); + + let new_range = match doc.language_config().zip(doc.syntax()) { + Some((lang_config, syntax)) => movement::goto_treesitter_object( + text, + range, + object, + direction, + syntax.tree().root_node(), + lang_config, + count, + ), + None => range, + }; + + doc.set_selection(view.id, Selection::single(new_range.anchor, new_range.head)); +} + +fn goto_next_function(cx: &mut Context) { + goto_ts_object_impl(cx, "function", Direction::Forward) +} + +fn goto_prev_function(cx: &mut Context) { + goto_ts_object_impl(cx, "function", Direction::Backward) +} + +fn goto_next_class(cx: &mut Context) { + goto_ts_object_impl(cx, "class", Direction::Forward) +} + +fn goto_prev_class(cx: &mut Context) { + goto_ts_object_impl(cx, "class", Direction::Backward) +} + +fn goto_next_parameter(cx: &mut Context) { + goto_ts_object_impl(cx, "parameter", Direction::Forward) +} + +fn goto_prev_parameter(cx: &mut Context) { + goto_ts_object_impl(cx, "parameter", Direction::Backward) +} + fn select_textobject_around(cx: &mut Context) { select_textobject(cx, textobject::TextObject::Around); } @@ -5876,13 +6175,16 @@ fn surround_add(cx: &mut Context) { let mut changes = Vec::with_capacity(selection.len() * 2); for range in selection.iter() { - changes.push((range.from(), range.from(), Some(Tendril::from_char(open)))); - changes.push((range.to(), range.to(), Some(Tendril::from_char(close)))); + let mut o = Tendril::new(); + o.push(open); + let mut c = Tendril::new(); + c.push(close); + changes.push((range.from(), range.from(), Some(o))); + changes.push((range.to(), range.to(), Some(c))); } let transaction = Transaction::change(doc.text(), changes.into_iter()); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } }) } @@ -5907,15 +6209,12 @@ fn surround_replace(cx: &mut Context) { let transaction = Transaction::change( doc.text(), change_pos.iter().enumerate().map(|(i, &pos)| { - ( - pos, - pos + 1, - Some(Tendril::from_char(if i % 2 == 0 { open } else { close })), - ) + let mut t = Tendril::new(); + t.push(if i % 2 == 0 { open } else { close }); + (pos, pos + 1, Some(t)) }), ); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } }); } @@ -5938,7 +6237,6 @@ fn surround_delete(cx: &mut Context) { let transaction = Transaction::change(doc.text(), change_pos.into_iter().map(|p| (p, p + 1, None))); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } }) } @@ -6008,7 +6306,7 @@ fn shell_keep_pipe(cx: &mut Context) { } if ranges.is_empty() { - cx.editor.set_error("No selections remaining".to_string()); + cx.editor.set_error("No selections remaining"); return; } @@ -6053,8 +6351,9 @@ fn shell_impl( log::error!("Shell error: {}", String::from_utf8_lossy(&output.stderr)); } - let tendril = Tendril::try_from_byte_slice(&output.stdout) + let str = std::str::from_utf8(&output.stdout) .map_err(|_| anyhow!("Process did not output valid UTF-8"))?; + let tendril = Tendril::from(str); Ok((tendril, output.status.success())) } @@ -6093,7 +6392,7 @@ fn shell(cx: &mut Context, prompt: Cow<'static, str>, behavior: ShellBehavior) { }; if !success { - cx.editor.set_error("Command failed".to_string()); + cx.editor.set_error("Command failed"); return; } @@ -6109,7 +6408,6 @@ fn shell(cx: &mut Context, prompt: Cow<'static, str>, behavior: ShellBehavior) { if behavior != ShellBehavior::Ignore { let transaction = Transaction::change(doc.text(), changes.into_iter()); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } // after replace cursor may be out of bounds, do this to @@ -6157,7 +6455,6 @@ fn add_newline_impl(cx: &mut Context, open: Open) { let transaction = Transaction::change(text, changes); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } fn rename_symbol(cx: &mut Context) { @@ -6257,7 +6554,6 @@ fn increment_impl(cx: &mut Context, amount: i64) { let transaction = transaction.with_selection(selection.clone()); doc.apply(&transaction, view.id); - doc.append_changes_to_history(view.id); } } diff --git a/helix-term/src/commands/dap.rs b/helix-term/src/commands/dap.rs new file mode 100644 index 000000000..9da2715f4 --- /dev/null +++ b/helix-term/src/commands/dap.rs @@ -0,0 +1,827 @@ +use super::{align_view, Align, Context, Editor}; +use crate::{ + compositor::{self, Compositor}, + job::{Callback, Jobs}, + ui::{self, overlay::overlayed, FilePicker, Picker, Popup, Prompt, PromptEvent, Text}, +}; +use helix_core::{ + syntax::{DebugArgumentValue, DebugConfigCompletion}, + Selection, +}; +use helix_dap::{self as dap, Client, ThreadId}; +use helix_lsp::block_on; +use helix_view::editor::Breakpoint; + +use serde_json::{to_value, Value}; +use tokio_stream::wrappers::UnboundedReceiverStream; + +use std::collections::HashMap; +use std::future::Future; +use std::path::PathBuf; + +use anyhow::{anyhow, bail}; + +#[macro_export] +macro_rules! debugger { + ($editor:expr) => {{ + match &mut $editor.debugger { + Some(debugger) => debugger, + None => return, + } + }}; +} + +// general utils: +pub fn dap_pos_to_pos(doc: &helix_core::Rope, line: usize, column: usize) -> Option { + // 1-indexing to 0 indexing + let line = doc.try_line_to_char(line - 1).ok()?; + let pos = line + column.saturating_sub(1); + // TODO: this is probably utf-16 offsets + Some(pos) +} + +pub async fn select_thread_id(editor: &mut Editor, thread_id: ThreadId, force: bool) { + let debugger = debugger!(editor); + + if !force && debugger.thread_id.is_some() { + return; + } + + debugger.thread_id = Some(thread_id); + fetch_stack_trace(debugger, thread_id).await; + + let frame = debugger.stack_frames[&thread_id].get(0).cloned(); + if let Some(frame) = &frame { + jump_to_stack_frame(editor, frame); + } +} + +pub async fn fetch_stack_trace(debugger: &mut Client, thread_id: ThreadId) { + let (frames, _) = match debugger.stack_trace(thread_id).await { + Ok(frames) => frames, + Err(_) => return, + }; + debugger.stack_frames.insert(thread_id, frames); + debugger.active_frame = Some(0); +} + +pub fn jump_to_stack_frame(editor: &mut Editor, frame: &helix_dap::StackFrame) { + let path = if let Some(helix_dap::Source { + path: Some(ref path), + .. + }) = frame.source + { + path.clone() + } else { + return; + }; + + if let Err(e) = editor.open(path, helix_view::editor::Action::Replace) { + editor.set_error(format!("Unable to jump to stack frame: {}", e)); + return; + } + + let (view, doc) = current!(editor); + + let text_end = doc.text().len_chars().saturating_sub(1); + let start = dap_pos_to_pos(doc.text(), frame.line, frame.column).unwrap_or(0); + let end = frame + .end_line + .and_then(|end_line| dap_pos_to_pos(doc.text(), end_line, frame.end_column.unwrap_or(0))) + .unwrap_or(start); + + let selection = Selection::single(start.min(text_end), end.min(text_end)); + doc.set_selection(view.id, selection); + align_view(doc, view, Align::Center); +} + +fn thread_picker( + cx: &mut Context, + callback_fn: impl Fn(&mut Editor, &dap::Thread) + Send + 'static, +) { + let debugger = debugger!(cx.editor); + + let future = debugger.threads(); + dap_callback( + cx.jobs, + future, + move |editor: &mut Editor, + compositor: &mut Compositor, + response: dap::requests::ThreadsResponse| { + let threads = response.threads; + if threads.len() == 1 { + callback_fn(editor, &threads[0]); + return; + } + let debugger = debugger!(editor); + + let thread_states = debugger.thread_states.clone(); + let picker = FilePicker::new( + threads, + move |thread| { + format!( + "{} ({})", + thread.name, + thread_states + .get(&thread.id) + .map(|state| state.as_str()) + .unwrap_or("unknown") + ) + .into() + }, + move |cx, thread, _action| callback_fn(cx.editor, thread), + move |editor, thread| { + let frames = editor.debugger.as_ref()?.stack_frames.get(&thread.id)?; + let frame = frames.get(0)?; + let path = frame.source.as_ref()?.path.clone()?; + let pos = Some(( + frame.line.saturating_sub(1), + frame.end_line.unwrap_or(frame.line).saturating_sub(1), + )); + Some((path, pos)) + }, + ); + compositor.push(Box::new(picker)); + }, + ); +} + +fn get_breakpoint_at_current_line(editor: &mut Editor) -> Option<(usize, Breakpoint)> { + let (view, doc) = current!(editor); + let text = doc.text().slice(..); + + let line = doc.selection(view.id).primary().cursor_line(text); + let path = doc.path()?; + editor.breakpoints.get(path).and_then(|breakpoints| { + let i = breakpoints.iter().position(|b| b.line == line); + i.map(|i| (i, breakpoints[i].clone())) + }) +} + +// -- DAP + +fn dap_callback( + jobs: &mut Jobs, + call: impl Future> + 'static + Send, + callback: F, +) where + T: for<'de> serde::Deserialize<'de> + Send + 'static, + F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, +{ + let callback = Box::pin(async move { + let json = call.await?; + let response = serde_json::from_value(json)?; + let call: Callback = Box::new(move |editor: &mut Editor, compositor: &mut Compositor| { + callback(editor, compositor, response) + }); + Ok(call) + }); + jobs.callback(callback); +} + +pub fn dap_start_impl( + cx: &mut compositor::Context, + name: Option<&str>, + socket: Option, + params: Option>>, +) -> Result<(), anyhow::Error> { + let doc = doc!(cx.editor); + + let config = doc + .language_config() + .and_then(|config| config.debugger.as_ref()) + .ok_or(anyhow!("No debug adapter available for language"))?; + + let result = match socket { + Some(socket) => block_on(Client::tcp(socket, 0)), + None => block_on(Client::process( + &config.transport, + &config.command, + config.args.iter().map(|arg| arg.as_str()).collect(), + config.port_arg.as_deref(), + 0, + )), + }; + + let (mut debugger, events) = match result { + Ok(r) => r, + Err(e) => bail!("Failed to start debug session: {}", e), + }; + + let request = debugger.initialize(config.name.clone()); + if let Err(e) = block_on(request) { + bail!("Failed to initialize debug adapter: {}", e); + } + + debugger.quirks = config.quirks.clone(); + + // TODO: avoid refetching all of this... pass a config in + let template = match name { + Some(name) => config.templates.iter().find(|t| t.name == name), + None => config.templates.get(0), + } + .ok_or(anyhow!("No debug config with given name"))?; + + let mut args: HashMap<&str, Value> = HashMap::new(); + + if let Some(params) = params { + for (k, t) in &template.args { + let mut value = t.clone(); + for (i, x) in params.iter().enumerate() { + let mut param = x.to_string(); + if let Some(DebugConfigCompletion::Advanced(cfg)) = template.completion.get(i) { + if matches!(cfg.completion.as_deref(), Some("filename" | "directory")) { + param = std::fs::canonicalize(x.as_ref()) + .ok() + .and_then(|pb| pb.into_os_string().into_string().ok()) + .unwrap_or_else(|| x.to_string()); + } + } + // For param #0 replace {0} in args + let pattern = format!("{{{}}}", i); + value = match value { + // TODO: just use toml::Value -> json::Value + DebugArgumentValue::String(v) => { + DebugArgumentValue::String(v.replace(&pattern, ¶m)) + } + DebugArgumentValue::Array(arr) => DebugArgumentValue::Array( + arr.iter().map(|v| v.replace(&pattern, ¶m)).collect(), + ), + DebugArgumentValue::Boolean(_) => value, + }; + } + + match value { + DebugArgumentValue::String(string) => { + if let Ok(integer) = string.parse::() { + args.insert(k, to_value(integer).unwrap()); + } else { + args.insert(k, to_value(string).unwrap()); + } + } + DebugArgumentValue::Array(arr) => { + args.insert(k, to_value(arr).unwrap()); + } + DebugArgumentValue::Boolean(bool) => { + args.insert(k, to_value(bool).unwrap()); + } + } + } + } + + let args = to_value(args).unwrap(); + + let callback = |_editor: &mut Editor, _compositor: &mut Compositor, _response: Value| { + // if let Err(e) = result { + // editor.set_error(format!("Failed {} target: {}", template.request, e)); + // } + }; + + match &template.request[..] { + "launch" => { + let call = debugger.launch(args); + dap_callback(cx.jobs, call, callback); + } + "attach" => { + let call = debugger.attach(args); + dap_callback(cx.jobs, call, callback); + } + request => bail!("Unsupported request '{}'", request), + }; + + // TODO: either await "initialized" or buffer commands until event is received + cx.editor.debugger = Some(debugger); + let stream = UnboundedReceiverStream::new(events); + cx.editor.debugger_events.push(stream); + Ok(()) +} + +pub fn dap_launch(cx: &mut Context) { + if cx.editor.debugger.is_some() { + cx.editor.set_error("Debugger is already running"); + return; + } + + let doc = doc!(cx.editor); + + let config = match doc + .language_config() + .and_then(|config| config.debugger.as_ref()) + { + Some(c) => c, + None => { + cx.editor + .set_error("No debug adapter available for language"); + return; + } + }; + + let templates = config.templates.clone(); + + cx.push_layer(Box::new(overlayed(Picker::new( + templates, + |template| template.name.as_str().into(), + |cx, template, _action| { + let completions = template.completion.clone(); + let name = template.name.clone(); + let callback = Box::pin(async move { + let call: Callback = + Box::new(move |_editor: &mut Editor, compositor: &mut Compositor| { + let prompt = debug_parameter_prompt(completions, name, Vec::new()); + compositor.push(Box::new(prompt)); + }); + Ok(call) + }); + cx.jobs.callback(callback); + }, + )))); +} + +fn debug_parameter_prompt( + completions: Vec, + config_name: String, + mut params: Vec, +) -> Prompt { + let completion = completions.get(params.len()).unwrap(); + let field_type = if let DebugConfigCompletion::Advanced(cfg) = completion { + cfg.completion.as_deref().unwrap_or("") + } else { + "" + }; + let name = match completion { + DebugConfigCompletion::Advanced(cfg) => cfg.name.as_deref().unwrap_or(field_type), + DebugConfigCompletion::Named(name) => name.as_str(), + }; + let default_val = match completion { + DebugConfigCompletion::Advanced(cfg) => cfg.default.as_deref().unwrap_or(""), + _ => "", + } + .to_owned(); + + let completer = match field_type { + "filename" => ui::completers::filename, + "directory" => ui::completers::directory, + _ => |_input: &str| Vec::new(), + }; + Prompt::new( + format!("{}: ", name).into(), + None, + completer, + move |cx, input: &str, event: PromptEvent| { + if event != PromptEvent::Validate { + return; + } + + let mut value = input.to_owned(); + if value.is_empty() { + value = default_val.clone(); + } + params.push(value); + + if params.len() < completions.len() { + let completions = completions.clone(); + let config_name = config_name.clone(); + let params = params.clone(); + let callback = Box::pin(async move { + let call: Callback = + Box::new(move |_editor: &mut Editor, compositor: &mut Compositor| { + let prompt = debug_parameter_prompt(completions, config_name, params); + compositor.push(Box::new(prompt)); + }); + Ok(call) + }); + cx.jobs.callback(callback); + } else if let Err(e) = dap_start_impl( + cx, + Some(&config_name), + None, + Some(params.iter().map(|x| x.into()).collect()), + ) { + cx.editor.set_error(e.to_string()); + } + }, + ) +} + +pub fn dap_toggle_breakpoint(cx: &mut Context) { + let (view, doc) = current!(cx.editor); + let path = match doc.path() { + Some(path) => path.clone(), + None => { + cx.editor + .set_error("Can't set breakpoint: document has no path"); + return; + } + }; + let text = doc.text().slice(..); + let line = doc.selection(view.id).primary().cursor_line(text); + dap_toggle_breakpoint_impl(cx, path, line); +} + +pub fn breakpoints_changed( + debugger: &mut dap::Client, + path: PathBuf, + breakpoints: &mut [Breakpoint], +) -> Result<(), anyhow::Error> { + // TODO: handle capabilities correctly again, by filterin breakpoints when emitting + // if breakpoint.condition.is_some() + // && !debugger + // .caps + // .as_ref() + // .unwrap() + // .supports_conditional_breakpoints + // .unwrap_or_default() + // { + // bail!( + // "Can't edit breakpoint: debugger does not support conditional breakpoints" + // ) + // } + // if breakpoint.log_message.is_some() + // && !debugger + // .caps + // .as_ref() + // .unwrap() + // .supports_log_points + // .unwrap_or_default() + // { + // bail!("Can't edit breakpoint: debugger does not support logpoints") + // } + let source_breakpoints = breakpoints + .iter() + .map(|breakpoint| helix_dap::SourceBreakpoint { + line: breakpoint.line + 1, // convert from 0-indexing to 1-indexing (TODO: could set debugger to 0-indexing on init) + ..Default::default() + }) + .collect::>(); + + let request = debugger.set_breakpoints(path, source_breakpoints); + match block_on(request) { + Ok(Some(dap_breakpoints)) => { + for (breakpoint, dap_breakpoint) in breakpoints.iter_mut().zip(dap_breakpoints) { + breakpoint.id = dap_breakpoint.id; + breakpoint.verified = dap_breakpoint.verified; + breakpoint.message = dap_breakpoint.message; + // TODO: handle breakpoint.message + // TODO: verify source matches + breakpoint.line = dap_breakpoint.line.unwrap_or(0).saturating_sub(1); // convert to 0-indexing + // TODO: no unwrap + breakpoint.column = dap_breakpoint.column; + // TODO: verify end_linef/col instruction reference, offset + } + } + Err(e) => anyhow::bail!("Failed to set breakpoints: {}", e), + _ => {} + }; + Ok(()) +} + +pub fn dap_toggle_breakpoint_impl(cx: &mut Context, path: PathBuf, line: usize) { + // TODO: need to map breakpoints over edits and update them? + // we shouldn't really allow editing while debug is running though + + let breakpoints = cx.editor.breakpoints.entry(path.clone()).or_default(); + // TODO: always keep breakpoints sorted and use binary search to determine insertion point + if let Some(pos) = breakpoints + .iter() + .position(|breakpoint| breakpoint.line == line) + { + breakpoints.remove(pos); + } else { + breakpoints.push(Breakpoint { + line, + ..Default::default() + }); + } + + let debugger = debugger!(cx.editor); + + if let Err(e) = breakpoints_changed(debugger, path, breakpoints) { + cx.editor + .set_error(format!("Failed to set breakpoints: {}", e)); + } +} + +pub fn dap_continue(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if let Some(thread_id) = debugger.thread_id { + let request = debugger.continue_thread(thread_id); + + dap_callback( + cx.jobs, + request, + |editor, _compositor, _response: dap::requests::ContinueResponse| { + debugger!(editor).resume_application(); + }, + ); + } else { + cx.editor + .set_error("Currently active thread is not stopped. Switch the thread."); + } +} + +pub fn dap_pause(cx: &mut Context) { + thread_picker(cx, |editor, thread| { + let debugger = debugger!(editor); + let request = debugger.pause(thread.id); + // NOTE: we don't need to set active thread id here because DAP will emit a "stopped" event + if let Err(e) = block_on(request) { + editor.set_error(format!("Failed to pause: {}", e)); + } + }) +} + +pub fn dap_step_in(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if let Some(thread_id) = debugger.thread_id { + let request = debugger.step_in(thread_id); + + dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| { + debugger!(editor).resume_application(); + }); + } else { + cx.editor + .set_error("Currently active thread is not stopped. Switch the thread."); + } +} + +pub fn dap_step_out(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if let Some(thread_id) = debugger.thread_id { + let request = debugger.step_out(thread_id); + dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| { + debugger!(editor).resume_application(); + }); + } else { + cx.editor + .set_error("Currently active thread is not stopped. Switch the thread."); + } +} + +pub fn dap_next(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if let Some(thread_id) = debugger.thread_id { + let request = debugger.next(thread_id); + dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| { + debugger!(editor).resume_application(); + }); + } else { + cx.editor + .set_error("Currently active thread is not stopped. Switch the thread."); + } +} + +pub fn dap_variables(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + if debugger.thread_id.is_none() { + cx.editor + .set_status("Cannot access variables while target is running"); + return; + } + let (frame, thread_id) = match (debugger.active_frame, debugger.thread_id) { + (Some(frame), Some(thread_id)) => (frame, thread_id), + _ => { + cx.editor + .set_status("Cannot find current stack frame to access variables"); + return; + } + }; + + let frame_id = debugger.stack_frames[&thread_id][frame].id; + let scopes = match block_on(debugger.scopes(frame_id)) { + Ok(s) => s, + Err(e) => { + cx.editor.set_error(format!("Failed to get scopes: {}", e)); + return; + } + }; + + // TODO: allow expanding variables into sub-fields + let mut variables = Vec::new(); + + let theme = &cx.editor.theme; + let scope_style = theme.get("ui.linenr.selected"); + let type_style = theme.get("ui.text"); + let text_style = theme.get("ui.text.focus"); + + for scope in scopes.iter() { + // use helix_view::graphics::Style; + use tui::text::{Span, Spans}; + let response = block_on(debugger.variables(scope.variables_reference)); + + variables.push(Spans::from(Span::styled( + format!("▸ {}", scope.name), + scope_style, + ))); + + if let Ok(vars) = response { + variables.reserve(vars.len()); + for var in vars { + let mut spans = Vec::with_capacity(5); + + spans.push(Span::styled(var.name.to_owned(), text_style)); + if let Some(ty) = var.ty { + spans.push(Span::raw(": ")); + spans.push(Span::styled(ty.to_owned(), type_style)); + } + spans.push(Span::raw(" = ")); + spans.push(Span::styled(var.value.to_owned(), text_style)); + variables.push(Spans::from(spans)); + } + } + } + + let contents = Text::from(tui::text::Text::from(variables)); + let popup = Popup::new("dap-variables", contents); + cx.push_layer(Box::new(popup)); +} + +pub fn dap_terminate(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + let request = debugger.disconnect(); + dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| { + // editor.set_error(format!("Failed to disconnect: {}", e)); + editor.debugger = None; + }); +} + +pub fn dap_enable_exceptions(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + let filters = match &debugger.capabilities().exception_breakpoint_filters { + Some(filters) => filters.iter().map(|f| f.filter.clone()).collect(), + None => return, + }; + + let request = debugger.set_exception_breakpoints(filters); + + dap_callback( + cx.jobs, + request, + |_editor, _compositor, _response: dap::requests::SetExceptionBreakpointsResponse| { + // editor.set_error(format!("Failed to set up exception breakpoints: {}", e)); + }, + ) +} + +pub fn dap_disable_exceptions(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + let request = debugger.set_exception_breakpoints(Vec::new()); + + dap_callback( + cx.jobs, + request, + |_editor, _compositor, _response: dap::requests::SetExceptionBreakpointsResponse| { + // editor.set_error(format!("Failed to set up exception breakpoints: {}", e)); + }, + ) +} + +// TODO: both edit condition and edit log need to be stable: we might get new breakpoints from the debugger which can change offsets +pub fn dap_edit_condition(cx: &mut Context) { + if let Some((pos, breakpoint)) = get_breakpoint_at_current_line(cx.editor) { + let path = match doc!(cx.editor).path() { + Some(path) => path.clone(), + None => return, + }; + let callback = Box::pin(async move { + let call: Callback = + Box::new(move |_editor: &mut Editor, compositor: &mut Compositor| { + let mut prompt = Prompt::new( + "condition:".into(), + None, + |_input: &str| Vec::new(), + move |cx, input: &str, event: PromptEvent| { + if event != PromptEvent::Validate { + return; + } + + let breakpoints = &mut cx.editor.breakpoints.get_mut(&path).unwrap(); + breakpoints[pos].condition = match input { + "" => None, + input => Some(input.to_owned()), + }; + + let debugger = debugger!(cx.editor); + + if let Err(e) = breakpoints_changed(debugger, path.clone(), breakpoints) + { + cx.editor + .set_error(format!("Failed to set breakpoints: {}", e)); + } + }, + ); + if let Some(condition) = breakpoint.condition { + prompt.insert_str(&condition) + } + compositor.push(Box::new(prompt)); + }); + Ok(call) + }); + cx.jobs.callback(callback); + } +} + +pub fn dap_edit_log(cx: &mut Context) { + if let Some((pos, breakpoint)) = get_breakpoint_at_current_line(cx.editor) { + let path = match doc!(cx.editor).path() { + Some(path) => path.clone(), + None => return, + }; + let callback = Box::pin(async move { + let call: Callback = + Box::new(move |_editor: &mut Editor, compositor: &mut Compositor| { + let mut prompt = Prompt::new( + "log-message:".into(), + None, + |_input: &str| Vec::new(), + move |cx, input: &str, event: PromptEvent| { + if event != PromptEvent::Validate { + return; + } + + let breakpoints = &mut cx.editor.breakpoints.get_mut(&path).unwrap(); + breakpoints[pos].log_message = match input { + "" => None, + input => Some(input.to_owned()), + }; + + let debugger = debugger!(cx.editor); + if let Err(e) = breakpoints_changed(debugger, path.clone(), breakpoints) + { + cx.editor + .set_error(format!("Failed to set breakpoints: {}", e)); + } + }, + ); + if let Some(log_message) = breakpoint.log_message { + prompt.insert_str(&log_message); + } + compositor.push(Box::new(prompt)); + }); + Ok(call) + }); + cx.jobs.callback(callback); + } +} + +pub fn dap_switch_thread(cx: &mut Context) { + thread_picker(cx, |editor, thread| { + block_on(select_thread_id(editor, thread.id, true)); + }) +} +pub fn dap_switch_stack_frame(cx: &mut Context) { + let debugger = debugger!(cx.editor); + + let thread_id = match debugger.thread_id { + Some(thread_id) => thread_id, + None => { + cx.editor.set_error("No thread is currently active"); + return; + } + }; + + let frames = debugger.stack_frames[&thread_id].clone(); + + let picker = FilePicker::new( + frames, + |frame| frame.name.clone().into(), // TODO: include thread_states in the label + move |cx, frame, _action| { + let debugger = debugger!(cx.editor); + // TODO: this should be simpler to find + let pos = debugger.stack_frames[&thread_id] + .iter() + .position(|f| f.id == frame.id); + debugger.active_frame = pos; + + let frame = debugger.stack_frames[&thread_id] + .get(pos.unwrap_or(0)) + .cloned(); + if let Some(frame) = &frame { + jump_to_stack_frame(cx.editor, frame); + } + }, + move |_editor, frame| { + frame + .source + .as_ref() + .and_then(|source| source.path.clone()) + .map(|path| { + ( + path, + Some(( + frame.line.saturating_sub(1), + frame.end_line.unwrap_or(frame.line).saturating_sub(1), + )), + ) + }) + }, + ); + cx.push_layer(Box::new(picker)) +} diff --git a/helix-term/src/compositor.rs b/helix-term/src/compositor.rs index 321f56a5e..dd7ebe1d8 100644 --- a/helix-term/src/compositor.rs +++ b/helix-term/src/compositor.rs @@ -126,6 +126,16 @@ impl Compositor { self.layers.push(layer); } + /// Replace a component that has the given `id` with the new layer and if + /// no component is found, push the layer normally. + pub fn replace_or_push(&mut self, id: &'static str, layer: Box) { + if let Some(component) = self.find_id(id) { + *component = layer; + } else { + self.push(layer) + } + } + pub fn pop(&mut self) -> Option> { self.layers.pop() } diff --git a/helix-term/src/config.rs b/helix-term/src/config.rs index 3745f871a..6b8bbc1b8 100644 --- a/helix-term/src/config.rs +++ b/helix-term/src/config.rs @@ -20,14 +20,18 @@ pub struct LspConfig { pub display_messages: bool, } -#[test] -fn parsing_keymaps_config_file() { - use crate::keymap; - use crate::keymap::Keymap; - use helix_core::hashmap; - use helix_view::document::Mode; - - let sample_keymaps = r#" +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parsing_keymaps_config_file() { + use crate::keymap; + use crate::keymap::Keymap; + use helix_core::hashmap; + use helix_view::document::Mode; + + let sample_keymaps = r#" [keys.insert] y = "move_line_down" S-C-a = "delete_selection" @@ -36,19 +40,20 @@ fn parsing_keymaps_config_file() { A-F12 = "move_next_word_end" "#; - assert_eq!( - toml::from_str::(sample_keymaps).unwrap(), - Config { - keys: Keymaps(hashmap! { - Mode::Insert => Keymap::new(keymap!({ "Insert mode" - "y" => move_line_down, - "S-C-a" => delete_selection, - })), - Mode::Normal => Keymap::new(keymap!({ "Normal mode" - "A-F12" => move_next_word_end, - })), - }), - ..Default::default() - } - ); + assert_eq!( + toml::from_str::(sample_keymaps).unwrap(), + Config { + keys: Keymaps(hashmap! { + Mode::Insert => Keymap::new(keymap!({ "Insert mode" + "y" => move_line_down, + "S-C-a" => delete_selection, + })), + Mode::Normal => Keymap::new(keymap!({ "Normal mode" + "A-F12" => move_next_word_end, + })), + }), + ..Default::default() + } + ); + } } diff --git a/helix-term/src/job.rs b/helix-term/src/job.rs index 4fa381748..a6a770211 100644 --- a/helix-term/src/job.rs +++ b/helix-term/src/job.rs @@ -22,8 +22,8 @@ pub struct Jobs { } impl Job { - pub fn new> + Send + 'static>(f: F) -> Job { - Job { + pub fn new> + Send + 'static>(f: F) -> Self { + Self { future: f.map(|r| r.map(|()| None)).boxed(), wait: false, } @@ -31,22 +31,22 @@ impl Job { pub fn with_callback> + Send + 'static>( f: F, - ) -> Job { - Job { + ) -> Self { + Self { future: f.map(|r| r.map(Some)).boxed(), wait: false, } } - pub fn wait_before_exiting(mut self) -> Job { + pub fn wait_before_exiting(mut self) -> Self { self.wait = true; self } } impl Jobs { - pub fn new() -> Jobs { - Jobs::default() + pub fn new() -> Self { + Self::default() } pub fn spawn> + Send + 'static>(&mut self, f: F) { @@ -93,8 +93,8 @@ impl Jobs { } /// Blocks until all the jobs that need to be waited on are done. - pub fn finish(&mut self) { + pub async fn finish(&mut self) { let wait_futures = std::mem::take(&mut self.wait_futures); - helix_lsp::block_on(wait_futures.for_each(|_| future::ready(()))); + wait_futures.for_each(|_| future::ready(())).await } } diff --git a/helix-term/src/keymap.rs b/helix-term/src/keymap.rs index 728332129..f414f797c 100644 --- a/helix-term/src/keymap.rs +++ b/helix-term/src/keymap.rs @@ -222,9 +222,8 @@ impl KeyTrieNode { .map(|(desc, keys)| (desc.strip_prefix(&prefix).unwrap(), keys)) .collect(); } - Info::new(self.name(), body) + Info::from_keymap(self.name(), body) } - /// Get a reference to the key trie node's order. pub fn order(&self) -> &[KeyEvent] { self.order.as_slice() @@ -344,7 +343,7 @@ pub struct Keymap { impl Keymap { pub fn new(root: KeyTrie) -> Self { - Keymap { + Self { root, state: Vec::new(), sticky: None, @@ -368,7 +367,7 @@ impl Keymap { /// key cancels pending keystrokes. If there are no pending keystrokes but a /// sticky node is in use, it will be cleared. pub fn get(&mut self, key: KeyEvent) -> KeymapResult { - if let key!(Esc) = key { + if key!(Esc) == key { if !self.state.is_empty() { return KeymapResult::new( // Note that Esc is not included here @@ -477,7 +476,7 @@ impl DerefMut for Keymaps { } impl Default for Keymaps { - fn default() -> Keymaps { + fn default() -> Self { let normal = keymap!({ "Normal mode" "h" | "left" => move_char_left, "j" | "down" => move_line_down, @@ -552,6 +551,11 @@ impl Default for Keymaps { "S" => split_selection, ";" => collapse_selection, "A-;" => flip_selections, + "A-k" => expand_selection, + "A-j" => shrink_selection, + "A-h" => select_prev_sibling, + "A-l" => select_next_sibling, + "%" => select_all, "x" => extend_line, "X" => extend_to_line_bounds, @@ -568,11 +572,17 @@ impl Default for Keymaps { "[" => { "Left bracket" "d" => goto_prev_diag, "D" => goto_first_diag, + "f" => goto_prev_function, + "c" => goto_prev_class, + "p" => goto_prev_parameter, "space" => add_newline_above, }, "]" => { "Right bracket" "d" => goto_next_diag, "D" => goto_last_diag, + "f" => goto_next_function, + "c" => goto_next_class, + "p" => goto_next_parameter, "space" => add_newline_below, }, @@ -655,6 +665,26 @@ impl Default for Keymaps { "S" => workspace_symbol_picker, "a" => code_action, "'" => last_picker, + "d" => { "Debug (experimental)" sticky=true + "l" => dap_launch, + "b" => dap_toggle_breakpoint, + "c" => dap_continue, + "h" => dap_pause, + "i" => dap_step_in, + "o" => dap_step_out, + "n" => dap_next, + "v" => dap_variables, + "t" => dap_terminate, + "C-c" => dap_edit_condition, + "C-l" => dap_edit_log, + "s" => { "Switch" + "t" => dap_switch_thread, + "f" => dap_switch_stack_frame, + // sl, sb + }, + "e" => dap_enable_exceptions, + "E" => dap_disable_exceptions, + }, "w" => { "Window" "C-w" | "w" => rotate_view, "C-s" | "s" => hsplit, @@ -749,8 +779,10 @@ impl Default for Keymaps { "del" => delete_char_forward, "C-d" => delete_char_forward, "ret" => insert_newline, + "C-j" => insert_newline, "tab" => insert_tab, "C-w" => delete_word_backward, + "A-backspace" => delete_word_backward, "A-d" => delete_word_forward, "left" => move_char_left, @@ -765,6 +797,8 @@ impl Default for Keymaps { "A-left" => move_prev_word_end, "A-f" => move_next_word_start, "A-right" => move_next_word_start, + "A-<" => goto_file_start, + "A->" => goto_file_end, "pageup" => page_up, "pagedown" => page_down, "home" => goto_line_start, @@ -778,7 +812,7 @@ impl Default for Keymaps { "C-x" => completion, "C-r" => insert_register, }); - Keymaps(hashmap!( + Self(hashmap!( Mode::Normal => Keymap::new(normal), Mode::Select => Keymap::new(select), Mode::Insert => Keymap::new(insert), diff --git a/helix-term/src/main.rs b/helix-term/src/main.rs index 881401304..0f504046f 100644 --- a/helix-term/src/main.rs +++ b/helix-term/src/main.rs @@ -56,7 +56,7 @@ USAGE: hx [FLAGS] [files]... ARGS: - ... Sets the input file to use + ... Sets the input file to use, position can also be specified via file[:row[:col]] FLAGS: -h, --help Prints help information diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 274330c0e..35afe81e9 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -158,7 +158,7 @@ impl Completion { let resolved_additional_text_edits = if item.additional_text_edits.is_some() { None } else { - Completion::resolve_completion_item(doc, item.clone()) + Self::resolve_completion_item(doc, item.clone()) .and_then(|item| item.additional_text_edits) }; @@ -304,6 +304,9 @@ impl Component for Completion { let cursor_pos = doc.selection(view.id).primary().cursor(text); let coords = helix_core::visual_coords_at_pos(text, cursor_pos, doc.tab_width()); let cursor_pos = (coords.row - view.offset.row) as u16; + + let markdown_ui = + |content, syn_loader| Markdown::new(content, syn_loader).style_group("completion"); let mut markdown_doc = match &option.documentation { Some(lsp::Documentation::String(contents)) | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { @@ -311,7 +314,7 @@ impl Component for Completion { value: contents, })) => { // TODO: convert to wrapped text - Markdown::new( + markdown_ui( format!( "```{}\n{}\n```\n{}", language, @@ -326,7 +329,7 @@ impl Component for Completion { value: contents, })) => { // TODO: set language based on doc scope - Markdown::new( + markdown_ui( format!( "```{}\n{}\n```\n{}", language, @@ -340,7 +343,7 @@ impl Component for Completion { // TODO: copied from above // TODO: set language based on doc scope - Markdown::new( + markdown_ui( format!( "```{}\n{}\n```", language, diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index dded95471..fc749ebb8 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -8,7 +8,9 @@ use crate::{ use helix_core::{ coords_at_pos, encoding, - graphemes::{ensure_grapheme_boundary_next, next_grapheme_boundary, prev_grapheme_boundary}, + graphemes::{ + ensure_grapheme_boundary_next_byte, next_grapheme_boundary, prev_grapheme_boundary, + }, movement::Direction, syntax::{self, HighlightEvent}, unicode::segmentation::UnicodeSegmentation, @@ -17,8 +19,8 @@ use helix_core::{ }; use helix_view::{ document::{Mode, SCRATCH_BUFFER_NAME}, + editor::CursorShapeConfig, graphics::{CursorKind, Modifier, Rect, Style}, - info::Info, input::KeyEvent, keyboard::{KeyCode, KeyModifiers}, Document, Editor, Theme, View, @@ -34,7 +36,6 @@ pub struct EditorView { last_insert: (commands::MappableCommand, Vec), pub(crate) completion: Option, spinners: ProgressSpinners, - autoinfo: Option, } impl Default for EditorView { @@ -51,7 +52,6 @@ impl EditorView { last_insert: (commands::MappableCommand::normal_mode, Vec::new()), completion: None, spinners: ProgressSpinners::default(), - autoinfo: None, } } @@ -59,34 +59,66 @@ impl EditorView { &mut self.spinners } - #[allow(clippy::too_many_arguments)] pub fn render_view( &self, + editor: &Editor, doc: &Document, view: &View, viewport: Rect, surface: &mut Surface, - theme: &Theme, is_focused: bool, - loader: &syntax::Loader, - config: &helix_view::editor::Config, ) { let inner = view.inner_area(); let area = view.area; + let theme = &editor.theme; + + // DAP: Highlight current stack frame position + let stack_frame = editor.debugger.as_ref().and_then(|debugger| { + if let (Some(frame), Some(thread_id)) = (debugger.active_frame, debugger.thread_id) { + debugger + .stack_frames + .get(&thread_id) + .and_then(|bt| bt.get(frame)) + } else { + None + } + }); + if let Some(frame) = stack_frame { + if doc.path().is_some() + && frame + .source + .as_ref() + .and_then(|source| source.path.as_ref()) + == doc.path() + { + let line = frame.line - 1; // convert to 0-indexing + if line >= view.offset.row && line < view.offset.row + area.height as usize { + surface.set_style( + Rect::new( + area.x, + area.y + (line - view.offset.row) as u16, + area.width, + 1, + ), + theme.get("ui.highlight"), + ); + } + } + } - let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme, loader); + let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme); let highlights = syntax::merge(highlights, Self::doc_diagnostics_highlights(doc, theme)); let highlights: Box> = if is_focused { Box::new(syntax::merge( highlights, - Self::doc_selection_highlights(doc, view, theme), + Self::doc_selection_highlights(doc, view, theme, &editor.config.cursor_shape), )) } else { Box::new(highlights) }; Self::render_text_highlights(doc, view.offset, inner, surface, theme, highlights); - Self::render_gutter(doc, view, view.area, surface, theme, is_focused, config); + Self::render_gutter(editor, doc, view, view.area, surface, theme, is_focused); if is_focused { Self::render_focused_view_elements(view, doc, inner, theme, surface); @@ -97,8 +129,7 @@ impl EditorView { let x = area.right(); let border_style = theme.get("ui.window"); for y in area.top()..area.bottom() { - surface - .get_mut(x, y) + surface[(x, y)] .set_symbol(tui::symbols::line::VERTICAL) //.set_symbol(" ") .set_style(border_style); @@ -117,13 +148,11 @@ impl EditorView { /// Get syntax highlights for a document in a view represented by the first line /// and column (`offset`) and the last line. This is done instead of using a view /// directly to enable rendering syntax highlighted docs anywhere (eg. picker preview) - #[allow(clippy::too_many_arguments)] pub fn doc_syntax_highlights<'doc>( doc: &'doc Document, offset: Position, height: u16, - theme: &Theme, - loader: &syntax::Loader, + _theme: &Theme, ) -> Box + 'doc> { let text = doc.text().slice(..); let last_line = std::cmp::min( @@ -140,48 +169,34 @@ impl EditorView { start..end }; - // TODO: range doesn't actually restrict source, just highlight range - let highlights = match doc.syntax() { + match doc.syntax() { Some(syntax) => { - let scopes = theme.scopes(); - syntax - .highlight_iter(text.slice(..), Some(range), None, |language| { - loader.language_configuration_for_injection_string(language) - .and_then(|language_config| { - let config = language_config.highlight_config(scopes)?; - let config_ref = config.as_ref(); - // SAFETY: the referenced `HighlightConfiguration` behind - // the `Arc` is guaranteed to remain valid throughout the - // duration of the highlight. - let config_ref = unsafe { - std::mem::transmute::< - _, - &'static syntax::HighlightConfiguration, - >(config_ref) - }; - Some(config_ref) - }) - }) + let iter = syntax + // TODO: range doesn't actually restrict source, just highlight range + .highlight_iter(text.slice(..), Some(range), None) .map(|event| event.unwrap()) - .collect() // TODO: we collect here to avoid holding the lock, fix later + .map(move |event| match event { + // convert byte offsets to char offset + HighlightEvent::Source { start, end } => { + let start = + text.byte_to_char(ensure_grapheme_boundary_next_byte(text, start)); + let end = + text.byte_to_char(ensure_grapheme_boundary_next_byte(text, end)); + HighlightEvent::Source { start, end } + } + event => event, + }); + + Box::new(iter) } - None => vec![HighlightEvent::Source { - start: range.start, - end: range.end, - }], + None => Box::new( + [HighlightEvent::Source { + start: text.byte_to_char(range.start), + end: text.byte_to_char(range.end), + }] + .into_iter(), + ), } - .into_iter() - .map(move |event| match event { - // convert byte offsets to char offset - HighlightEvent::Source { start, end } => { - let start = ensure_grapheme_boundary_next(text, text.byte_to_char(start)); - let end = ensure_grapheme_boundary_next(text, text.byte_to_char(end)); - HighlightEvent::Source { start, end } - } - event => event, - }); - - Box::new(highlights) } /// Get highlight spans for document diagnostics @@ -213,11 +228,16 @@ impl EditorView { doc: &Document, view: &View, theme: &Theme, + cursor_shape_config: &CursorShapeConfig, ) -> Vec<(usize, std::ops::Range)> { let text = doc.text().slice(..); let selection = doc.selection(view.id); let primary_idx = selection.primary_index(); + let mode = doc.mode(); + let cursorkind = cursor_shape_config.from_mode(mode); + let cursor_is_block = cursorkind == CursorKind::Block; + let selection_scope = theme .find_scope_index("ui.selection") .expect("could not find `ui.selection` scope in the theme!"); @@ -225,7 +245,7 @@ impl EditorView { .find_scope_index("ui.cursor") .unwrap_or(selection_scope); - let cursor_scope = match doc.mode() { + let cursor_scope = match mode { Mode::Insert => theme.find_scope_index("ui.cursor.insert"), Mode::Select => theme.find_scope_index("ui.cursor.select"), Mode::Normal => Some(base_cursor_scope), @@ -241,7 +261,8 @@ impl EditorView { let mut spans: Vec<(usize, std::ops::Range)> = Vec::new(); for (i, range) in selection.iter().enumerate() { - let (cursor_scope, selection_scope) = if i == primary_idx { + let selection_is_primary = i == primary_idx; + let (cursor_scope, selection_scope) = if selection_is_primary { (primary_cursor_scope, primary_selection_scope) } else { (cursor_scope, selection_scope) @@ -249,7 +270,14 @@ impl EditorView { // Special-case: cursor at end of the rope. if range.head == range.anchor && range.head == text.len_chars() { - spans.push((cursor_scope, range.head..range.head + 1)); + if !selection_is_primary || cursor_is_block { + // Bar and underline cursors are drawn by the terminal + // BUG: If the editor area loses focus while having a bar or + // underline cursor (eg. when a regex prompt has focus) then + // the primary cursor will be invisible. This doesn't happen + // with block cursors since we manually draw *all* cursors. + spans.push((cursor_scope, range.head..range.head + 1)); + } continue; } @@ -258,11 +286,15 @@ impl EditorView { // Standard case. let cursor_start = prev_grapheme_boundary(text, range.head); spans.push((selection_scope, range.anchor..cursor_start)); - spans.push((cursor_scope, cursor_start..range.head)); + if !selection_is_primary || cursor_is_block { + spans.push((cursor_scope, cursor_start..range.head)); + } } else { // Reverse case. let cursor_end = next_grapheme_boundary(text, range.head); - spans.push((cursor_scope, range.head..cursor_end)); + if !selection_is_primary || cursor_is_block { + spans.push((cursor_scope, range.head..cursor_end)); + } spans.push((selection_scope, cursor_end..range.anchor)); } } @@ -288,6 +320,10 @@ impl EditorView { let text_style = theme.get("ui.text"); + // It's slightly more efficient to produce a full RopeSlice from the Rope, then slice that a bunch + // of times than it is to always call Rope::slice/get_slice (it will internally always hit RSEnum::Light). + let text = text.slice(..); + 'outer: for event in highlights { match event { HighlightEvent::HighlightStart(span) => { @@ -393,23 +429,21 @@ impl EditorView { .add_modifier(Modifier::DIM) }); - surface - .get_mut(viewport.x + pos.col as u16, viewport.y + pos.row as u16) + surface[(viewport.x + pos.col as u16, viewport.y + pos.row as u16)] .set_style(style); } } } } - #[allow(clippy::too_many_arguments)] pub fn render_gutter( + editor: &Editor, doc: &Document, view: &View, viewport: Rect, surface: &mut Surface, theme: &Theme, is_focused: bool, - config: &helix_view::editor::Config, ) { let text = doc.text().slice(..); let last_line = view.last_line(doc); @@ -431,7 +465,7 @@ impl EditorView { let mut text = String::with_capacity(8); for (constructor, width) in view.gutters() { - let gutter = constructor(doc, view, theme, config, is_focused, *width); + let gutter = constructor(editor, doc, view, theme, is_focused, *width); text.reserve(*width); // ensure there's enough space for the gutter for (i, line) in (view.offset.row..(last_line + 1)).enumerate() { let selected = cursors.contains(&line); @@ -447,6 +481,7 @@ impl EditorView { } text.clear(); } + offset += *width as u16; } } @@ -506,7 +541,6 @@ impl EditorView { ); } - #[allow(clippy::too_many_arguments)] pub fn render_statusline( &self, doc: &Document, @@ -675,13 +709,13 @@ impl EditorView { cxt: &mut commands::Context, event: KeyEvent, ) -> Option { - self.autoinfo = None; + cxt.editor.autoinfo = None; let key_result = self.keymaps.get_mut(&mode).unwrap().get(event); - self.autoinfo = key_result.sticky.map(|node| node.infobox()); + cxt.editor.autoinfo = key_result.sticky.map(|node| node.infobox()); match &key_result.kind { KeymapResultKind::Matched(command) => command.execute(cxt), - KeymapResultKind::Pending(node) => self.autoinfo = Some(node.infobox()), + KeymapResultKind::Pending(node) => cxt.editor.autoinfo = Some(node.infobox()), KeymapResultKind::MatchedSequence(commands) => { for command in commands { command.execute(cxt); @@ -825,6 +859,31 @@ impl EditorView { return EventResult::Consumed(None); } + let result = editor.tree.views().find_map(|(view, _focus)| { + view.gutter_coords_at_screen_coords(row, column) + .map(|coords| (coords, view.id)) + }); + + if let Some((coords, view_id)) = result { + editor.tree.focus = view_id; + + let view = editor.tree.get(view_id); + let doc = editor.documents.get_mut(&view.doc).unwrap(); + + let path = match doc.path() { + Some(path) => path.clone(), + None => { + return EventResult::Ignored; + } + }; + + let line = coords.row + view.offset.row; + if line < doc.text().len_lines() { + commands::dap_toggle_breakpoint_impl(cxt, path, line); + return EventResult::Consumed(None); + } + } + EventResult::Ignored } @@ -900,6 +959,38 @@ impl EditorView { EventResult::Consumed(None) } + MouseEvent { + kind: MouseEventKind::Up(MouseButton::Right), + row, + column, + modifiers, + .. + } => { + let result = cxt.editor.tree.views().find_map(|(view, _focus)| { + view.gutter_coords_at_screen_coords(row, column) + .map(|coords| (coords, view.id)) + }); + + if let Some((coords, view_id)) = result { + cxt.editor.tree.focus = view_id; + + let view = cxt.editor.tree.get(view_id); + let doc = cxt.editor.documents.get_mut(&view.doc).unwrap(); + let line = coords.row + view.offset.row; + if let Ok(pos) = doc.text().try_line_to_char(line) { + doc.set_selection(view_id, Selection::point(pos)); + if modifiers == crossterm::event::KeyModifiers::ALT { + commands::MappableCommand::dap_edit_log.execute(cxt); + } else { + commands::MappableCommand::dap_edit_condition.execute(cxt); + } + + return EventResult::Consumed(None); + } + } + EventResult::Ignored + } + MouseEvent { kind: MouseEventKind::Up(MouseButton::Middle), row, @@ -1032,6 +1123,12 @@ impl Component for EditorView { let (view, doc) = current!(cx.editor); view.ensure_cursor_in_view(doc, cx.editor.config.scrolloff); + // Store a history state if not in insert mode. This also takes care of + // commiting changes when leaving insert mode. + if doc.mode() != Mode::Insert { + doc.append_changes_to_history(view.id); + } + // mode transitions match (mode, doc.mode()) { (Mode::Normal, Mode::Insert) => { @@ -1072,22 +1169,13 @@ impl Component for EditorView { for (view, is_focused) in cx.editor.tree.views() { let doc = cx.editor.document(view.doc).unwrap(); - let loader = &cx.editor.syn_loader; - self.render_view( - doc, - view, - area, - surface, - &cx.editor.theme, - is_focused, - loader, - &cx.editor.config, - ); + self.render_view(cx.editor, doc, view, area, surface, is_focused); } if cx.editor.config.auto_info { - if let Some(ref mut info) = self.autoinfo { + if let Some(mut info) = cx.editor.autoinfo.take() { info.render(area, surface, cx); + cx.editor.autoinfo = Some(info) } } @@ -1158,11 +1246,11 @@ impl Component for EditorView { } fn cursor(&self, _area: Rect, editor: &Editor) -> (Option, CursorKind) { - // match view.doc.mode() { - // Mode::Insert => write!(stdout, "\x1B[6 q"), - // mode => write!(stdout, "\x1B[2 q"), - // }; - editor.cursor() + match editor.cursor() { + // All block cursors are drawn manually + (pos, CursorKind::Block) => (pos, CursorKind::Hidden), + cursor => cursor, + } } } diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 46657fb9a..6a7b641ad 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -21,6 +21,9 @@ pub struct Markdown { contents: String, config_loader: Arc, + + block_style: String, + heading_style: String, } // TODO: pre-render and self reference via Pin @@ -31,120 +34,137 @@ impl Markdown { Self { contents, config_loader, + block_style: "markup.raw.inline".into(), + heading_style: "markup.heading".into(), } } -} -fn parse<'a>( - contents: &'a str, - theme: Option<&Theme>, - loader: &syntax::Loader, -) -> tui::text::Text<'a> { - // // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}} - // let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```"; - - let mut options = Options::empty(); - options.insert(Options::ENABLE_STRIKETHROUGH); - let parser = Parser::new_ext(contents, options); - - // TODO: if possible, render links as terminal hyperlinks: https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda - let mut tags = Vec::new(); - let mut spans = Vec::new(); - let mut lines = Vec::new(); - - fn to_span(text: pulldown_cmark::CowStr) -> Span { - use std::ops::Deref; - Span::raw::>(match text { - CowStr::Borrowed(s) => s.into(), - CowStr::Boxed(s) => s.to_string().into(), - CowStr::Inlined(s) => s.deref().to_owned().into(), - }) + pub fn style_group(mut self, suffix: &str) -> Self { + self.block_style = format!("markup.raw.inline.{}", suffix); + self.heading_style = format!("markup.heading.{}", suffix); + self } - let text_style = theme.map(|theme| theme.get("ui.text")).unwrap_or_default(); - - // TODO: use better scopes for these, `markup.raw.block`, `markup.heading` - let code_style = theme - .map(|theme| theme.get("ui.text.focus")) - .unwrap_or_default(); // white - let heading_style = theme - .map(|theme| theme.get("ui.linenr.selected")) - .unwrap_or_default(); // lilac - - for event in parser { - match event { - Event::Start(tag) => tags.push(tag), - Event::End(tag) => { - tags.pop(); - match tag { - Tag::Heading(_) | Tag::Paragraph | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => { - // whenever code block or paragraph closes, new line - let spans = std::mem::take(&mut spans); - if !spans.is_empty() { - lines.push(Spans::from(spans)); + fn parse(&self, theme: Option<&Theme>) -> tui::text::Text<'_> { + // // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}} + // let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```"; + + let mut options = Options::empty(); + options.insert(Options::ENABLE_STRIKETHROUGH); + let parser = Parser::new_ext(&self.contents, options); + + // TODO: if possible, render links as terminal hyperlinks: https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda + let mut tags = Vec::new(); + let mut spans = Vec::new(); + let mut lines = Vec::new(); + + fn to_span(text: pulldown_cmark::CowStr) -> Span { + use std::ops::Deref; + Span::raw::>(match text { + CowStr::Borrowed(s) => s.into(), + CowStr::Boxed(s) => s.to_string().into(), + CowStr::Inlined(s) => s.deref().to_owned().into(), + }) + } + + macro_rules! get_theme { + ($s1: expr) => { + theme + .map(|theme| theme.try_get($s1.as_str())) + .flatten() + .unwrap_or_default() + }; + } + let text_style = theme.map(|theme| theme.get("ui.text")).unwrap_or_default(); + let code_style = get_theme!(self.block_style); + let heading_style = get_theme!(self.heading_style); + + for event in parser { + match event { + Event::Start(tag) => tags.push(tag), + Event::End(tag) => { + tags.pop(); + match tag { + Tag::Heading(_, _, _) + | Tag::Paragraph + | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => { + // whenever code block or paragraph closes, new line + let spans = std::mem::take(&mut spans); + if !spans.is_empty() { + lines.push(Spans::from(spans)); + } + lines.push(Spans::default()); } - lines.push(Spans::default()); + _ => (), } - _ => (), } - } - Event::Text(text) => { - // TODO: temp workaround - if let Some(Tag::CodeBlock(CodeBlockKind::Fenced(language))) = tags.last() { - if let Some(theme) = theme { - let rope = Rope::from(text.as_ref()); - let syntax = loader - .language_configuration_for_injection_string(language) - .and_then(|config| config.highlight_config(theme.scopes())) - .map(|config| Syntax::new(&rope, config)); - - if let Some(syntax) = syntax { - // if we have a syntax available, highlight_iter and generate spans - let mut highlights = Vec::new(); - - for event in syntax.highlight_iter(rope.slice(..), None, None, |_| None) - { - match event.unwrap() { - HighlightEvent::HighlightStart(span) => { - highlights.push(span); - } - HighlightEvent::HighlightEnd => { - highlights.pop(); - } - HighlightEvent::Source { start, end } => { - let style = match highlights.first() { - Some(span) => theme.get(&theme.scopes()[span.0]), - None => text_style, - }; - - // TODO: replace tabs with indentation - - let mut slice = &text[start..end]; - // TODO: do we need to handle all unicode line endings - // here, or is just '\n' okay? - while let Some(end) = slice.find('\n') { - // emit span up to newline - let text = &slice[..end]; - let text = text.replace('\t', " "); // replace tabs - let span = Span::styled(text, style); - spans.push(span); - - // truncate slice to after newline - slice = &slice[end + 1..]; - - // make a new line - let spans = std::mem::take(&mut spans); - lines.push(Spans::from(spans)); + Event::Text(text) => { + // TODO: temp workaround + if let Some(Tag::CodeBlock(CodeBlockKind::Fenced(language))) = tags.last() { + if let Some(theme) = theme { + let rope = Rope::from(text.as_ref()); + let syntax = self + .config_loader + .language_configuration_for_injection_string(language) + .and_then(|config| config.highlight_config(theme.scopes())) + .map(|config| { + Syntax::new(&rope, config, self.config_loader.clone()) + }); + + if let Some(syntax) = syntax { + // if we have a syntax available, highlight_iter and generate spans + let mut highlights = Vec::new(); + + for event in syntax.highlight_iter(rope.slice(..), None, None) { + match event.unwrap() { + HighlightEvent::HighlightStart(span) => { + highlights.push(span); } + HighlightEvent::HighlightEnd => { + highlights.pop(); + } + HighlightEvent::Source { start, end } => { + let style = match highlights.first() { + Some(span) => theme.get(&theme.scopes()[span.0]), + None => text_style, + }; + + // TODO: replace tabs with indentation + + let mut slice = &text[start..end]; + // TODO: do we need to handle all unicode line endings + // here, or is just '\n' okay? + while let Some(end) = slice.find('\n') { + // emit span up to newline + let text = &slice[..end]; + let text = text.replace('\t', " "); // replace tabs + let span = Span::styled(text, style); + spans.push(span); + + // truncate slice to after newline + slice = &slice[end + 1..]; + + // make a new line + let spans = std::mem::take(&mut spans); + lines.push(Spans::from(spans)); + } - // if there's anything left, emit it too - if !slice.is_empty() { - let span = - Span::styled(slice.replace('\t', " "), style); - spans.push(span); + // if there's anything left, emit it too + if !slice.is_empty() { + let span = Span::styled( + slice.replace('\t', " "), + style, + ); + spans.push(span); + } } } } + } else { + for line in text.lines() { + let span = Span::styled(line.to_string(), code_style); + lines.push(Spans::from(span)); + } } } else { for line in text.lines() { @@ -152,64 +172,60 @@ fn parse<'a>( lines.push(Spans::from(span)); } } + } else if let Some(Tag::Heading(_, _, _)) = tags.last() { + let mut span = to_span(text); + span.style = heading_style; + spans.push(span); } else { - for line in text.lines() { - let span = Span::styled(line.to_string(), code_style); - lines.push(Spans::from(span)); - } + let mut span = to_span(text); + span.style = text_style; + spans.push(span); } - } else if let Some(Tag::Heading(_)) = tags.last() { - let mut span = to_span(text); - span.style = heading_style; - spans.push(span); - } else { + } + Event::Code(text) | Event::Html(text) => { let mut span = to_span(text); - span.style = text_style; + span.style = code_style; spans.push(span); } + Event::SoftBreak | Event::HardBreak => { + // let spans = std::mem::replace(&mut spans, Vec::new()); + // lines.push(Spans::from(spans)); + spans.push(Span::raw(" ")); + } + Event::Rule => { + let mut span = Span::raw("---"); + span.style = code_style; + lines.push(Spans::from(span)); + lines.push(Spans::default()); + } + // TaskListMarker(bool) true if checked + _ => { + log::warn!("unhandled markdown event {:?}", event); + } } - Event::Code(text) | Event::Html(text) => { - let mut span = to_span(text); - span.style = code_style; - spans.push(span); - } - Event::SoftBreak | Event::HardBreak => { - // let spans = std::mem::replace(&mut spans, Vec::new()); - // lines.push(Spans::from(spans)); - spans.push(Span::raw(" ")); - } - Event::Rule => { - let mut span = Span::raw("---"); - span.style = code_style; - lines.push(Spans::from(span)); - lines.push(Spans::default()); - } - // TaskListMarker(bool) true if checked - _ => { - log::warn!("unhandled markdown event {:?}", event); - } + // build up a vec of Paragraph tui widgets } - // build up a vec of Paragraph tui widgets - } - if !spans.is_empty() { - lines.push(Spans::from(spans)); - } + if !spans.is_empty() { + lines.push(Spans::from(spans)); + } - // if last line is empty, remove it - if let Some(line) = lines.last() { - if line.0.is_empty() { - lines.pop(); + // if last line is empty, remove it + if let Some(line) = lines.last() { + if line.0.is_empty() { + lines.pop(); + } } - } - Text::from(lines) + Text::from(lines) + } } + impl Component for Markdown { fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { use tui::widgets::{Paragraph, Widget, Wrap}; - let text = parse(&self.contents, Some(&cx.editor.theme), &self.config_loader); + let text = self.parse(Some(&cx.editor.theme)); let par = Paragraph::new(text) .wrap(Wrap { trim: false }) @@ -227,7 +243,8 @@ impl Component for Markdown { if padding >= viewport.1 || padding >= viewport.0 { return None; } - let contents = parse(&self.contents, None, &self.config_loader); + let contents = self.parse(None); + // TODO: account for tab width let max_text_width = (viewport.0 - padding).min(120); let mut text_width = 0; diff --git a/helix-term/src/ui/menu.rs b/helix-term/src/ui/menu.rs index 69053db3e..f9a0438c5 100644 --- a/helix-term/src/ui/menu.rs +++ b/helix-term/src/ui/menu.rs @@ -14,11 +14,18 @@ use helix_view::{graphics::Rect, Editor}; use tui::layout::Constraint; pub trait Item { - fn sort_text(&self) -> &str; - fn filter_text(&self) -> &str; - fn label(&self) -> &str; - fn row(&self) -> Row; + + fn sort_text(&self) -> &str { + self.label() + } + fn filter_text(&self) -> &str { + self.label() + } + + fn row(&self) -> Row { + Row::new(vec![Cell::from(self.label())]) + } } pub struct Menu { @@ -132,7 +139,17 @@ impl Menu { acc }); - let len = max_lens.iter().sum::() + n + 1; // +1: reserve some space for scrollbar + + let height = self.matches.len().min(10).min(viewport.1 as usize); + // do all the matches fit on a single screen? + let fits = self.matches.len() <= height; + + let mut len = max_lens.iter().sum::() + n; + + if !fits { + len += 1; // +1: reserve some space for scrollbar + } + let width = len.min(viewport.0 as usize); self.widths = max_lens @@ -140,8 +157,6 @@ impl Menu { .map(|len| Constraint::Length(len as u16)) .collect(); - let height = self.matches.len().min(10).min(viewport.1 as usize); - self.size = (width as u16, height as u16); // adjust scroll offsets if size changed @@ -297,12 +312,14 @@ impl Component for Menu { }, ); + let fits = len <= win_height; + for (i, _) in (scroll..(scroll + win_height).min(len)).enumerate() { let is_marked = i >= scroll_line && i < scroll_line + scroll_height; - if is_marked { - let cell = surface.get_mut(area.x + area.width - 2, area.y + i as u16); - cell.set_symbol("▐ "); + if !fits && is_marked { + let cell = &mut surface[(area.x + area.width - 2, area.y + i as u16)]; + cell.set_symbol("▐"); // cell.set_style(selected); // cell.set_style(if is_marked { selected } else { style }); } diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index 8743054c2..082a25639 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -2,7 +2,8 @@ mod completion; pub(crate) mod editor; mod info; mod markdown; -mod menu; +pub mod menu; +pub mod overlay; mod picker; mod popup; mod prompt; @@ -21,7 +22,7 @@ pub use text::Text; use helix_core::regex::Regex; use helix_core::regex::RegexBuilder; -use helix_view::{Document, Editor, View}; +use helix_view::{Document, View}; use std::path::PathBuf; @@ -65,7 +66,7 @@ pub fn regex_prompt( return; } - let case_insensitive = if cx.editor.config.smart_case { + let case_insensitive = if cx.editor.config.search.smart_case { !input.chars().any(char::is_uppercase) } else { false @@ -155,14 +156,10 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi files, move |path: &PathBuf| { // format_fn - path.strip_prefix(&root) - .unwrap_or(path) - .to_str() - .unwrap() - .into() + path.strip_prefix(&root).unwrap_or(path).to_string_lossy() }, - move |editor: &mut Editor, path: &PathBuf, action| { - editor + move |cx, path: &PathBuf, action| { + cx.editor .open(path.into(), action) .expect("editor.open failed"); }, @@ -327,12 +324,12 @@ pub mod completers { let is_tilde = input.starts_with('~') && input.len() == 1; let path = helix_core::path::expand_tilde(Path::new(input)); - let (dir, file_name) = if input.ends_with('/') { + let (dir, file_name) = if input.ends_with(std::path::MAIN_SEPARATOR) { (path, None) } else { let file_name = path .file_name() - .map(|file| file.to_str().unwrap().to_owned()); + .and_then(|file| file.to_str().map(|path| path.to_owned())); let path = match path.parent() { Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(), @@ -357,7 +354,7 @@ pub mod completers { return None; } - let is_dir = entry.file_type().map_or(false, |entry| entry.is_dir()); + //let is_dir = entry.file_type().map_or(false, |entry| entry.is_dir()); let path = entry.path(); let mut path = if is_tilde { @@ -375,12 +372,7 @@ pub mod completers { path.push(""); } - let path = if cfg!(windows) && is_dir { - // Convert Windows style path separator to Unix style - path.to_str().unwrap().replace("\\", "/") - } else { - path.to_str().unwrap().to_owned() - }; + let path = path.to_str()?.to_owned(); Some((end.clone(), Cow::from(path))) }) }) // TODO: unwrap or skip diff --git a/helix-term/src/ui/overlay.rs b/helix-term/src/ui/overlay.rs new file mode 100644 index 000000000..9f522e355 --- /dev/null +++ b/helix-term/src/ui/overlay.rs @@ -0,0 +1,73 @@ +use crossterm::event::Event; +use helix_core::Position; +use helix_view::{ + graphics::{CursorKind, Rect}, + Editor, +}; +use tui::buffer::Buffer; + +use crate::compositor::{Component, Context, EventResult}; + +/// Contains a component placed in the center of the parent component +pub struct Overlay { + /// Child component + pub content: T, + /// Function to compute the size and position of the child component + pub calc_child_size: Box Rect>, +} + +/// Surrounds the component with a margin of 5% on each side, and an additional 2 rows at the bottom +pub fn overlayed(content: T) -> Overlay { + Overlay { + content, + calc_child_size: Box::new(|rect: Rect| clip_rect_relative(rect.clip_bottom(2), 90, 90)), + } +} + +fn clip_rect_relative(rect: Rect, percent_horizontal: u8, percent_vertical: u8) -> Rect { + fn mul_and_cast(size: u16, factor: u8) -> u16 { + ((size as u32) * (factor as u32) / 100).try_into().unwrap() + } + + let inner_w = mul_and_cast(rect.width, percent_horizontal); + let inner_h = mul_and_cast(rect.height, percent_vertical); + + let offset_x = rect.width.saturating_sub(inner_w) / 2; + let offset_y = rect.height.saturating_sub(inner_h) / 2; + + Rect { + x: rect.x + offset_x, + y: rect.y + offset_y, + width: inner_w, + height: inner_h, + } +} + +impl Component for Overlay { + fn render(&mut self, area: Rect, frame: &mut Buffer, ctx: &mut Context) { + let dimensions = (self.calc_child_size)(area); + self.content.render(dimensions, frame, ctx) + } + + fn required_size(&mut self, (width, height): (u16, u16)) -> Option<(u16, u16)> { + let area = Rect { + x: 0, + y: 0, + width, + height, + }; + let dimensions = (self.calc_child_size)(area); + let viewport = (dimensions.width, dimensions.height); + let _ = self.content.required_size(viewport)?; + Some((width, height)) + } + + fn handle_event(&mut self, event: Event, ctx: &mut Context) -> EventResult { + self.content.handle_event(event, ctx) + } + + fn cursor(&self, area: Rect, ctx: &Editor) -> (Option, CursorKind) { + let dimensions = (self.calc_child_size)(area); + self.content.cursor(dimensions, ctx) + } +} diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs index 1ef94df01..9cddbc607 100644 --- a/helix-term/src/ui/picker.rs +++ b/helix-term/src/ui/picker.rs @@ -21,14 +21,14 @@ use std::{ }; use crate::ui::{Prompt, PromptEvent}; -use helix_core::Position; +use helix_core::{movement::Direction, Position}; use helix_view::{ editor::Action, graphics::{Color, CursorKind, Margin, Rect, Style}, Document, Editor, }; -pub const MIN_SCREEN_WIDTH_FOR_PREVIEW: u16 = 80; +pub const MIN_AREA_WIDTH_FOR_PREVIEW: u16 = 72; /// Biggest file size to preview in bytes pub const MAX_FILE_SIZE_FOR_PREVIEW: u64 = 10 * 1024 * 1024; @@ -86,11 +86,11 @@ impl FilePicker { pub fn new( options: Vec, format_fn: impl Fn(&T) -> Cow + 'static, - callback_fn: impl Fn(&mut Editor, &T, Action) + 'static, + callback_fn: impl Fn(&mut Context, &T, Action) + 'static, preview_fn: impl Fn(&Editor, &T) -> Option + 'static, ) -> Self { Self { - picker: Picker::new(false, options, format_fn, callback_fn), + picker: Picker::new(options, format_fn, callback_fn), truncate_start: true, preview_cache: HashMap::new(), read_buffer: Vec::with_capacity(1024), @@ -139,7 +139,7 @@ impl FilePicker { (size, _) if size > MAX_FILE_SIZE_FOR_PREVIEW => CachedPreview::LargeFile, _ => { // TODO: enable syntax highlighting; blocked by async rendering - Document::open(path, None, Some(&editor.theme), None) + Document::open(path, None, None) .map(|doc| CachedPreview::Document(Box::new(doc))) .unwrap_or(CachedPreview::NotFound) } @@ -159,8 +159,8 @@ impl Component for FilePicker { // |picker | | | // | | | | // +---------+ +---------+ - let render_preview = area.width > MIN_SCREEN_WIDTH_FOR_PREVIEW; - let area = inner_rect(area); + + let render_preview = area.width > MIN_AREA_WIDTH_FOR_PREVIEW; // -- Render the frame: // clear area let background = cx.editor.theme.get("ui.background"); @@ -220,13 +220,8 @@ impl Component for FilePicker { let offset = Position::new(first_line, 0); - let highlights = EditorView::doc_syntax_highlights( - doc, - offset, - area.height, - &cx.editor.theme, - &cx.editor.syn_loader, - ); + let highlights = + EditorView::doc_syntax_highlights(doc, offset, area.height, &cx.editor.theme); EditorView::render_text_highlights( doc, offset, @@ -264,6 +259,16 @@ impl Component for FilePicker { fn cursor(&self, area: Rect, ctx: &Editor) -> (Option, CursorKind) { self.picker.cursor(area, ctx) } + + fn required_size(&mut self, (width, height): (u16, u16)) -> Option<(u16, u16)> { + let picker_width = if width > MIN_AREA_WIDTH_FOR_PREVIEW { + width / 2 + } else { + width + }; + self.picker.required_size((picker_width, height))?; + Some((width, height)) + } } pub struct Picker { @@ -275,24 +280,24 @@ pub struct Picker { /// Filter over original options. filters: Vec, // could be optimized into bit but not worth it now + /// Current height of the completions box + completion_height: u16, + cursor: usize, // pattern: String, prompt: Prompt, - /// Whether to render in the middle of the area - render_centered: bool, /// Wheather to truncate the start (default true) pub truncate_start: bool, format_fn: Box Cow>, - callback_fn: Box, + callback_fn: Box, } impl Picker { pub fn new( - render_centered: bool, options: Vec, format_fn: impl Fn(&T) -> Cow + 'static, - callback_fn: impl Fn(&mut Editor, &T, Action) + 'static, + callback_fn: impl Fn(&mut Context, &T, Action) + 'static, ) -> Self { let prompt = Prompt::new( "".into(), @@ -310,10 +315,10 @@ impl Picker { filters: Vec::new(), cursor: 0, prompt, - render_centered, truncate_start: true, format_fn: Box::new(format_fn), callback_fn: Box::new(callback_fn), + completion_height: 0, }; // TODO: scoring on empty input should just use a fastpath @@ -350,22 +355,38 @@ impl Picker { self.cursor = 0; } - pub fn move_up(&mut self) { - if self.matches.is_empty() { - return; - } + /// Move the cursor by a number of lines, either down (`Forward`) or up (`Backward`) + pub fn move_by(&mut self, amount: usize, direction: Direction) { let len = self.matches.len(); - let pos = ((self.cursor + len.saturating_sub(1)) % len) % len; - self.cursor = pos; - } - pub fn move_down(&mut self) { - if self.matches.is_empty() { - return; + match direction { + Direction::Forward => { + self.cursor = self.cursor.saturating_add(amount) % len; + } + Direction::Backward => { + self.cursor = self.cursor.saturating_add(len).saturating_sub(amount) % len; + } } - let len = self.matches.len(); - let pos = (self.cursor + 1) % len; - self.cursor = pos; + } + + /// Move the cursor down by exactly one page. After the last page comes the first page. + pub fn page_up(&mut self) { + self.move_by(self.completion_height as usize, Direction::Backward); + } + + /// Move the cursor up by exactly one page. After the first page comes the last page. + pub fn page_down(&mut self) { + self.move_by(self.completion_height as usize, Direction::Forward); + } + + /// Move the cursor to the first entry + pub fn to_start(&mut self) { + self.cursor = 0; + } + + /// Move the cursor to the last entry + pub fn to_end(&mut self) { + self.cursor = self.matches.len().saturating_sub(1); } pub fn selection(&self) -> Option<&T> { @@ -388,15 +409,12 @@ impl Picker { // - on input change: // - score all the names in relation to input -fn inner_rect(area: Rect) -> Rect { - let margin = Margin { - vertical: area.height * 10 / 100, - horizontal: area.width * 10 / 100, - }; - area.inner(&margin) -} - impl Component for Picker { + fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> { + self.completion_height = viewport.1.saturating_sub(4); + Some(viewport) + } + fn handle_event(&mut self, event: Event, cx: &mut Context) -> EventResult { let key_event = match event { Event::Key(event) => event, @@ -411,29 +429,41 @@ impl Component for Picker { match key_event.into() { shift!(Tab) | key!(Up) | ctrl!('p') | ctrl!('k') => { - self.move_up(); + self.move_by(1, Direction::Backward); } key!(Tab) | key!(Down) | ctrl!('n') | ctrl!('j') => { - self.move_down(); + self.move_by(1, Direction::Forward); + } + key!(PageDown) | ctrl!('f') => { + self.page_down(); + } + key!(PageUp) | ctrl!('b') => { + self.page_up(); + } + key!(Home) => { + self.to_start(); + } + key!(End) => { + self.to_end(); } key!(Esc) | ctrl!('c') => { return close_fn; } key!(Enter) => { if let Some(option) = self.selection() { - (self.callback_fn)(cx.editor, option, Action::Replace); + (self.callback_fn)(cx, option, Action::Replace); } return close_fn; } ctrl!('s') => { if let Some(option) = self.selection() { - (self.callback_fn)(cx.editor, option, Action::HorizontalSplit); + (self.callback_fn)(cx, option, Action::HorizontalSplit); } return close_fn; } ctrl!('v') => { if let Some(option) = self.selection() { - (self.callback_fn)(cx.editor, option, Action::VerticalSplit); + (self.callback_fn)(cx, option, Action::VerticalSplit); } return close_fn; } @@ -452,12 +482,6 @@ impl Component for Picker { } fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { - let area = if self.render_centered { - inner_rect(area) - } else { - area - }; - let text_style = cx.editor.theme.get("ui.text"); // -- Render the frame: @@ -492,10 +516,9 @@ impl Component for Picker { let sep_style = Style::default().fg(Color::Rgb(90, 89, 119)); let borders = BorderType::line_symbols(BorderType::Plain); for x in inner.left()..inner.right() { - surface - .get_mut(x, inner.y + 1) - .set_symbol(borders.horizontal) - .set_style(sep_style); + if let Some(cell) = surface.get_mut(x, inner.y + 1) { + cell.set_symbol(borders.horizontal).set_style(sep_style); + } } // -- Render the contents: @@ -505,7 +528,7 @@ impl Component for Picker { let selected = cx.editor.theme.get("ui.text.focus"); let rows = inner.height; - let offset = self.cursor / (rows as usize) * (rows as usize); + let offset = self.cursor - (self.cursor % std::cmp::max(1, rows as usize)); let files = self.matches.iter().skip(offset).map(|(index, _score)| { (index, self.options.get(*index).unwrap()) // get_unchecked @@ -513,7 +536,7 @@ impl Component for Picker { for (i, (_index, option)) in files.take(rows as usize).enumerate() { if i == (self.cursor - offset) { - surface.set_string(inner.x - 2, inner.y + i as u16, ">", selected); + surface.set_string(inner.x.saturating_sub(2), inner.y + i as u16, ">", selected); } surface.set_string_truncated( @@ -533,8 +556,6 @@ impl Component for Picker { } fn cursor(&self, area: Rect, editor: &Editor) -> (Option, CursorKind) { - // TODO: this is mostly duplicate code - let area = inner_rect(area); let block = Block::default().borders(Borders::ALL); // calculate the inner area inside the box let inner = block.inner(area); diff --git a/helix-term/src/ui/popup.rs b/helix-term/src/ui/popup.rs index bf7510a25..4d319423a 100644 --- a/helix-term/src/ui/popup.rs +++ b/helix-term/src/ui/popup.rs @@ -6,7 +6,7 @@ use crossterm::event::Event; use tui::buffer::Buffer as Surface; use helix_core::Position; -use helix_view::graphics::Rect; +use helix_view::graphics::{Margin, Rect}; // TODO: share logic with Menu, it's essentially Popup(render_fn), but render fn needs to return // a width/height hint. maybe Popup(Box) @@ -14,6 +14,7 @@ use helix_view::graphics::Rect; pub struct Popup { contents: T, position: Option, + margin: Margin, size: (u16, u16), child_size: (u16, u16), scroll: usize, @@ -25,6 +26,10 @@ impl Popup { Self { contents, position: None, + margin: Margin { + vertical: 0, + horizontal: 0, + }, size: (0, 0), child_size: (0, 0), scroll: 0, @@ -36,6 +41,11 @@ impl Popup { self.position = pos; } + pub fn margin(mut self, margin: Margin) -> Self { + self.margin = margin; + self + } + pub fn get_rel_position(&mut self, viewport: Rect, cx: &Context) -> (u16, u16) { let position = self .position @@ -126,13 +136,18 @@ impl Component for Popup { let max_width = 120.min(viewport.0); let max_height = 26.min(viewport.1.saturating_sub(2)); // add some spacing in the viewport + let inner = Rect::new(0, 0, max_width, max_height).inner(&self.margin); + let (width, height) = self .contents - .required_size((max_width, max_height)) + .required_size((inner.width, inner.height)) .expect("Component needs required_size implemented in order to be embedded in a popup"); self.child_size = (width, height); - self.size = (width.min(max_width), height.min(max_height)); + self.size = ( + (width + self.margin.horizontal * 2).min(max_width), + (height + self.margin.vertical * 2).min(max_height), + ); // re-clamp scroll offset let max_offset = self.child_size.1.saturating_sub(self.size.1); @@ -156,7 +171,8 @@ impl Component for Popup { let background = cx.editor.theme.get("ui.popup"); surface.clear_with(area, background); - self.contents.render(area, surface, cx); + let inner = area.inner(&self.margin); + self.contents.render(inner, surface, cx); } fn id(&self) -> Option<&'static str> { diff --git a/helix-term/src/ui/prompt.rs b/helix-term/src/ui/prompt.rs index 29e0339ae..4c4fef268 100644 --- a/helix-term/src/ui/prompt.rs +++ b/helix-term/src/ui/prompt.rs @@ -330,7 +330,7 @@ impl Prompt { .max(BASE_WIDTH); let cols = std::cmp::max(1, area.width / max_len); - let col_width = (area.width - (cols)) / cols; + let col_width = (area.width.saturating_sub(cols)) / cols; let height = ((self.completion.len() as u16 + cols - 1) / cols) .min(10) // at most 10 rows (or less) @@ -473,7 +473,7 @@ impl Component for Prompt { } } key!(Enter) => { - if self.selection.is_some() && self.line.ends_with('/') { + if self.selection.is_some() && self.line.ends_with(std::path::MAIN_SEPARATOR) { self.completion = (self.completion_fn)(&self.line); self.exit_selection(); } else { diff --git a/helix-term/src/ui/spinner.rs b/helix-term/src/ui/spinner.rs index e8a43b48d..68965469d 100644 --- a/helix-term/src/ui/spinner.rs +++ b/helix-term/src/ui/spinner.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, time::SystemTime}; +use std::{collections::HashMap, time::Instant}; #[derive(Default, Debug)] pub struct ProgressSpinners { @@ -25,7 +25,7 @@ impl Default for Spinner { pub struct Spinner { frames: Vec<&'static str>, count: usize, - start: Option, + start: Option, interval: u64, } @@ -50,14 +50,13 @@ impl Spinner { } pub fn start(&mut self) { - self.start = Some(SystemTime::now()); + self.start = Some(Instant::now()); } pub fn frame(&self) -> Option<&str> { let idx = (self .start - .map(|time| SystemTime::now().duration_since(time))? - .ok()? + .map(|time| Instant::now().duration_since(time))? .as_millis() / self.interval as u128) as usize % self.count; diff --git a/helix-term/src/ui/text.rs b/helix-term/src/ui/text.rs index 4641fae1f..caece049c 100644 --- a/helix-term/src/ui/text.rs +++ b/helix-term/src/ui/text.rs @@ -4,13 +4,23 @@ use tui::buffer::Buffer as Surface; use helix_view::graphics::Rect; pub struct Text { - contents: String, + contents: tui::text::Text<'static>, size: (u16, u16), viewport: (u16, u16), } impl Text { pub fn new(contents: String) -> Self { + Self { + contents: tui::text::Text::from(contents), + size: (0, 0), + viewport: (0, 0), + } + } +} + +impl From> for Text { + fn from(contents: tui::text::Text<'static>) -> Self { Self { contents, size: (0, 0), @@ -18,12 +28,12 @@ impl Text { } } } + impl Component for Text { fn render(&mut self, area: Rect, surface: &mut Surface, _cx: &mut Context) { use tui::widgets::{Paragraph, Widget, Wrap}; - let contents = tui::text::Text::from(self.contents.clone()); - let par = Paragraph::new(contents).wrap(Wrap { trim: false }); + let par = Paragraph::new(self.contents.clone()).wrap(Wrap { trim: false }); // .scroll(x, y) offsets par.render(area, surface); @@ -31,9 +41,8 @@ impl Component for Text { fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> { if viewport != self.viewport { - let contents = tui::text::Text::from(self.contents.clone()); - let width = std::cmp::min(contents.width() as u16, viewport.0); - let height = std::cmp::min(contents.height() as u16, viewport.1); + let width = std::cmp::min(self.contents.width() as u16, viewport.0); + let height = std::cmp::min(self.contents.height() as u16, viewport.1); self.size = (width, height); self.viewport = viewport; } diff --git a/helix-tui/Cargo.toml b/helix-tui/Cargo.toml index 6df65d360..e4cfbe4cd 100644 --- a/helix-tui/Cargo.toml +++ b/helix-tui/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "helix-tui" -version = "0.5.0" +version = "0.6.0" authors = ["Blaž Hrastnik "] description = """ A library to build rich terminal user interfaces or dashboards @@ -18,8 +18,8 @@ default = ["crossterm"] [dependencies] bitflags = "1.3" cassowary = "0.3" -unicode-segmentation = "1.8" -crossterm = { version = "0.22", optional = true } +unicode-segmentation = "1.9" +crossterm = { version = "0.23", optional = true } serde = { version = "1", "optional" = true, features = ["derive"]} -helix-view = { version = "0.5", path = "../helix-view", features = ["term"] } -helix-core = { version = "0.5", path = "../helix-core" } +helix-view = { version = "0.6", path = "../helix-view", features = ["term"] } +helix-core = { version = "0.6", path = "../helix-core" } diff --git a/helix-tui/README.md b/helix-tui/README.md index 97b3d1d9e..5cc80aa65 100644 --- a/helix-tui/README.md +++ b/helix-tui/README.md @@ -2,5 +2,5 @@ This library is a fork of the great library [tui-rs](https://github.com/fdehau/tui-rs/). We've mainly relied on the double -buffer implementation and render diffing, side-stepping it's widget and +buffer implementation and render diffing, side-stepping its widget and layouting. diff --git a/helix-tui/src/backend/test.rs b/helix-tui/src/backend/test.rs index 3f56b49c8..52474148e 100644 --- a/helix-tui/src/backend/test.rs +++ b/helix-tui/src/backend/test.rs @@ -111,8 +111,7 @@ impl Backend for TestBackend { I: Iterator, { for (x, y, c) in content { - let cell = self.buffer.get_mut(x, y); - *cell = c.clone(); + self.buffer[(x, y)] = c.clone(); } Ok(()) } diff --git a/helix-tui/src/buffer.rs b/helix-tui/src/buffer.rs index c49a02008..f8673e436 100644 --- a/helix-tui/src/buffer.rs +++ b/helix-tui/src/buffer.rs @@ -90,17 +90,17 @@ impl Default for Cell { /// use helix_view::graphics::{Rect, Color, Style, Modifier}; /// /// let mut buf = Buffer::empty(Rect{x: 0, y: 0, width: 10, height: 5}); -/// buf.get_mut(0, 2).set_symbol("x"); -/// assert_eq!(buf.get(0, 2).symbol, "x"); +/// buf[(0, 2)].set_symbol("x"); +/// assert_eq!(buf[(0, 2)].symbol, "x"); /// buf.set_string(3, 0, "string", Style::default().fg(Color::Red).bg(Color::White)); -/// assert_eq!(buf.get(5, 0), &Cell{ +/// assert_eq!(buf[(5, 0)], Cell{ /// symbol: String::from("r"), /// fg: Color::Red, /// bg: Color::White, /// modifier: Modifier::empty() /// }); -/// buf.get_mut(5, 0).set_char('x'); -/// assert_eq!(buf.get(5, 0).symbol, "x"); +/// buf[(5, 0)].set_char('x'); +/// assert_eq!(buf[(5, 0)].symbol, "x"); /// ``` #[derive(Debug, Default, Clone, PartialEq)] pub struct Buffer { @@ -162,15 +162,38 @@ impl Buffer { } /// Returns a reference to Cell at the given coordinates - pub fn get(&self, x: u16, y: u16) -> &Cell { - let i = self.index_of(x, y); - &self.content[i] + pub fn get(&self, x: u16, y: u16) -> Option<&Cell> { + self.index_of_opt(x, y).map(|i| &self.content[i]) } /// Returns a mutable reference to Cell at the given coordinates - pub fn get_mut(&mut self, x: u16, y: u16) -> &mut Cell { - let i = self.index_of(x, y); - &mut self.content[i] + pub fn get_mut(&mut self, x: u16, y: u16) -> Option<&mut Cell> { + self.index_of_opt(x, y).map(|i| &mut self.content[i]) + } + + /// Tells whether the global (x, y) coordinates are inside the Buffer's area. + /// + /// Global coordinates are offset by the Buffer's area offset (`x`/`y`). + /// + /// # Examples + /// + /// ``` + /// # use helix_tui::buffer::Buffer; + /// # use helix_view::graphics::Rect; + /// let rect = Rect::new(200, 100, 10, 10); + /// let buffer = Buffer::empty(rect); + /// // Global coordinates inside the Buffer's area + /// assert!(buffer.in_bounds(209, 100)); + /// // Global coordinates outside the Buffer's area + /// assert!(!buffer.in_bounds(210, 100)); + /// ``` + /// + /// Global coordinates are offset by the Buffer's area offset (`x`/`y`). + pub fn in_bounds(&self, x: u16, y: u16) -> bool { + x >= self.area.left() + && x < self.area.right() + && y >= self.area.top() + && y < self.area.bottom() } /// Returns the index in the Vec for the given global (x, y) coordinates. @@ -184,7 +207,7 @@ impl Buffer { /// # use helix_view::graphics::Rect; /// let rect = Rect::new(200, 100, 10, 10); /// let buffer = Buffer::empty(rect); - /// // Global coordinates to the top corner of this buffer's area + /// // Global coordinates to the top corner of this Buffer's area /// assert_eq!(buffer.index_of(200, 100), 0); /// ``` /// @@ -193,10 +216,7 @@ impl Buffer { /// Panics when given an coordinate that is outside of this Buffer's area. pub fn index_of(&self, x: u16, y: u16) -> usize { debug_assert!( - x >= self.area.left() - && x < self.area.right() - && y >= self.area.top() - && y < self.area.bottom(), + self.in_bounds(x, y), "Trying to access position outside the buffer: x={}, y={}, area={:?}", x, y, @@ -205,6 +225,16 @@ impl Buffer { ((y - self.area.y) * self.area.width + (x - self.area.x)) as usize } + /// Returns the index in the Vec for the given global (x, y) coordinates, + /// or `None` if the coordinates are outside the buffer's area. + fn index_of_opt(&self, x: u16, y: u16) -> Option { + if self.in_bounds(x, y) { + Some(self.index_of(x, y)) + } else { + None + } + } + /// Returns the (global) coordinates of a cell given its index /// /// Global coordinates are offset by the Buffer's area offset (`x`/`y`). @@ -278,6 +308,11 @@ impl Buffer { where S: AsRef, { + // prevent panic if out of range + if !self.in_bounds(x, y) || width == 0 { + return (x, y); + } + let mut index = self.index_of(x, y); let mut x_offset = x as usize; let width = if ellipsis { width - 1 } else { width }; @@ -372,7 +407,7 @@ impl Buffer { pub fn set_background(&mut self, area: Rect, color: Color) { for y in area.top()..area.bottom() { for x in area.left()..area.right() { - self.get_mut(x, y).set_bg(color); + self[(x, y)].set_bg(color); } } } @@ -380,7 +415,7 @@ impl Buffer { pub fn set_style(&mut self, area: Rect, style: Style) { for y in area.top()..area.bottom() { for x in area.left()..area.right() { - self.get_mut(x, y).set_style(style); + self[(x, y)].set_style(style); } } } @@ -408,7 +443,7 @@ impl Buffer { pub fn clear(&mut self, area: Rect) { for x in area.left()..area.right() { for y in area.top()..area.bottom() { - self.get_mut(x, y).reset(); + self[(x, y)].reset(); } } } @@ -417,7 +452,7 @@ impl Buffer { pub fn clear_with(&mut self, area: Rect, style: Style) { for x in area.left()..area.right() { for y in area.top()..area.bottom() { - let cell = self.get_mut(x, y); + let cell = &mut self[(x, y)]; cell.reset(); cell.set_style(style); } @@ -500,15 +535,32 @@ impl Buffer { updates.push((x, y, &next_buffer[i])); } - to_skip = current.symbol.width().saturating_sub(1); + let current_width = current.symbol.width(); + to_skip = current_width.saturating_sub(1); - let affected_width = std::cmp::max(current.symbol.width(), previous.symbol.width()); + let affected_width = std::cmp::max(current_width, previous.symbol.width()); invalidated = std::cmp::max(affected_width, invalidated).saturating_sub(1); } updates } } +impl std::ops::Index<(u16, u16)> for Buffer { + type Output = Cell; + + fn index(&self, (x, y): (u16, u16)) -> &Self::Output { + let i = self.index_of(x, y); + &self.content[i] + } +} + +impl std::ops::IndexMut<(u16, u16)> for Buffer { + fn index_mut(&mut self, (x, y): (u16, u16)) -> &mut Self::Output { + let i = self.index_of(x, y); + &mut self.content[i] + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/helix-tui/src/widgets/block.rs b/helix-tui/src/widgets/block.rs index 648c2d7ee..26223c3eb 100644 --- a/helix-tui/src/widgets/block.rs +++ b/helix-tui/src/widgets/block.rs @@ -15,12 +15,12 @@ pub enum BorderType { } impl BorderType { - pub fn line_symbols(border_type: BorderType) -> line::Set { + pub fn line_symbols(border_type: Self) -> line::Set { match border_type { - BorderType::Plain => line::NORMAL, - BorderType::Rounded => line::ROUNDED, - BorderType::Double => line::DOUBLE, - BorderType::Thick => line::THICK, + Self::Plain => line::NORMAL, + Self::Rounded => line::ROUNDED, + Self::Double => line::DOUBLE, + Self::Thick => line::THICK, } } } @@ -140,14 +140,14 @@ impl<'a> Widget for Block<'a> { // Sides if self.borders.intersects(Borders::LEFT) { for y in area.top()..area.bottom() { - buf.get_mut(area.left(), y) + buf[(area.left(), y)] .set_symbol(symbols.vertical) .set_style(self.border_style); } } if self.borders.intersects(Borders::TOP) { for x in area.left()..area.right() { - buf.get_mut(x, area.top()) + buf[(x, area.top())] .set_symbol(symbols.horizontal) .set_style(self.border_style); } @@ -155,7 +155,7 @@ impl<'a> Widget for Block<'a> { if self.borders.intersects(Borders::RIGHT) { let x = area.right() - 1; for y in area.top()..area.bottom() { - buf.get_mut(x, y) + buf[(x, y)] .set_symbol(symbols.vertical) .set_style(self.border_style); } @@ -163,7 +163,7 @@ impl<'a> Widget for Block<'a> { if self.borders.intersects(Borders::BOTTOM) { let y = area.bottom() - 1; for x in area.left()..area.right() { - buf.get_mut(x, y) + buf[(x, y)] .set_symbol(symbols.horizontal) .set_style(self.border_style); } @@ -171,22 +171,22 @@ impl<'a> Widget for Block<'a> { // Corners if self.borders.contains(Borders::RIGHT | Borders::BOTTOM) { - buf.get_mut(area.right() - 1, area.bottom() - 1) + buf[(area.right() - 1, area.bottom() - 1)] .set_symbol(symbols.bottom_right) .set_style(self.border_style); } if self.borders.contains(Borders::RIGHT | Borders::TOP) { - buf.get_mut(area.right() - 1, area.top()) + buf[(area.right() - 1, area.top())] .set_symbol(symbols.top_right) .set_style(self.border_style); } if self.borders.contains(Borders::LEFT | Borders::BOTTOM) { - buf.get_mut(area.left(), area.bottom() - 1) + buf[(area.left(), area.bottom() - 1)] .set_symbol(symbols.bottom_left) .set_style(self.border_style); } if self.borders.contains(Borders::LEFT | Borders::TOP) { - buf.get_mut(area.left(), area.top()) + buf[(area.left(), area.top())] .set_symbol(symbols.top_left) .set_style(self.border_style); } diff --git a/helix-tui/src/widgets/paragraph.rs b/helix-tui/src/widgets/paragraph.rs index fee35d250..4e8391621 100644 --- a/helix-tui/src/widgets/paragraph.rs +++ b/helix-tui/src/widgets/paragraph.rs @@ -166,7 +166,7 @@ impl<'a> Widget for Paragraph<'a> { Box::new(WordWrapper::new(&mut styled, text_area.width, trim)) } else { let mut line_composer = Box::new(LineTruncator::new(&mut styled, text_area.width)); - if let Alignment::Left = self.alignment { + if self.alignment == Alignment::Left { line_composer.set_horizontal_offset(self.scroll.1); } line_composer @@ -176,7 +176,7 @@ impl<'a> Widget for Paragraph<'a> { if y >= self.scroll.0 { let mut x = get_line_offset(current_line_width, text_area.width, self.alignment); for StyledGrapheme { symbol, style } in current_line { - buf.get_mut(text_area.left() + x, text_area.top() + y - self.scroll.0) + buf[(text_area.left() + x, text_area.top() + y - self.scroll.0)] .set_symbol(if symbol.is_empty() { // If the symbol is empty, the last char which rendered last time will // leave on the line. It's a quick fix. diff --git a/helix-tui/src/widgets/reflow.rs b/helix-tui/src/widgets/reflow.rs index 21847783b..33e52bb4e 100644 --- a/helix-tui/src/widgets/reflow.rs +++ b/helix-tui/src/widgets/reflow.rs @@ -404,8 +404,8 @@ mod test { let text = "コンピュータ上で文字を扱う場合、典型的には文字による通信を行う場合にその両端点\ では、"; let (word_wrapper, word_wrapper_width) = - run_composer(Composer::WordWrapper { trim: true }, &text, width); - let (line_truncator, _) = run_composer(Composer::LineTruncator, &text, width); + run_composer(Composer::WordWrapper { trim: true }, text, width); + let (line_truncator, _) = run_composer(Composer::LineTruncator, text, width); assert_eq!(line_truncator, vec!["コンピュータ上で文字"]); let wrapped = vec![ "コンピュータ上で文字", diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index 121a518c3..932c33216 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "helix-view" -version = "0.5.0" +version = "0.6.0" authors = ["Blaž Hrastnik "] edition = "2021" license = "MPL-2.0" @@ -16,15 +16,17 @@ term = ["crossterm"] [dependencies] bitflags = "1.3" anyhow = "1" -helix-core = { version = "0.5", path = "../helix-core" } -helix-lsp = { version = "0.5", path = "../helix-lsp"} -crossterm = { version = "0.22", optional = true } +helix-core = { version = "0.6", path = "../helix-core" } +helix-lsp = { version = "0.6", path = "../helix-lsp"} +helix-dap = { version = "0.6", path = "../helix-dap"} +crossterm = { version = "0.23", optional = true } # Conversion traits once_cell = "1.9" url = "2" tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } +tokio-stream = "0.1" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } slotmap = "1" @@ -38,7 +40,7 @@ log = "~0.4" which = "4.2" [target.'cfg(windows)'.dependencies] -clipboard-win = { version = "4.2", features = ["std"] } +clipboard-win = { version = "4.4", features = ["std"] } [dev-dependencies] helix-tui = { path = "../helix-tui" } diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 9652d7b3c..c0186ee53 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -1,5 +1,6 @@ -use anyhow::{anyhow, Context, Error}; +use anyhow::{anyhow, bail, Context, Error}; use serde::de::{self, Deserialize, Deserializer}; +use serde::Serialize; use std::cell::Cell; use std::collections::HashMap; use std::fmt::Display; @@ -10,7 +11,7 @@ use std::sync::Arc; use helix_core::{ encoding, - history::History, + history::{History, UndoKind}, indent::{auto_detect_indent_style, IndentStyle}, line_ending::auto_detect_line_ending, syntax::{self, LanguageConfiguration}, @@ -19,7 +20,7 @@ use helix_core::{ }; use helix_lsp::util::LspFormatting; -use crate::{DocumentId, Theme, ViewId}; +use crate::{DocumentId, ViewId}; /// 8kB of buffer space for encoding and decoding `Rope`s. const BUF_SIZE: usize = 8192; @@ -30,9 +31,9 @@ pub const SCRATCH_BUFFER_NAME: &str = "[scratch]"; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Mode { - Normal, - Select, - Insert, + Normal = 0, + Select = 1, + Insert = 2, } impl Display for Mode { @@ -53,7 +54,7 @@ impl FromStr for Mode { "normal" => Ok(Mode::Normal), "select" => Ok(Mode::Select), "insert" => Ok(Mode::Insert), - _ => Err(anyhow!("Invalid mode '{}'", s)), + _ => bail!("Invalid mode '{}'", s), } } } @@ -69,6 +70,15 @@ impl<'de> Deserialize<'de> for Mode { } } +impl Serialize for Mode { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.collect_str(self) + } +} + pub struct Document { pub(crate) id: DocumentId, text: Rope, @@ -358,8 +368,7 @@ impl Document { pub fn open( path: &Path, encoding: Option<&'static encoding::Encoding>, - theme: Option<&Theme>, - config_loader: Option<&syntax::Loader>, + config_loader: Option>, ) -> Result { // Open the file if it exists, otherwise assume it is a new file (and thus empty). let (rope, encoding) = if path.exists() { @@ -376,7 +385,7 @@ impl Document { // set the path and try detecting the language doc.set_path(Some(path))?; if let Some(loader) = config_loader { - doc.detect_language(theme, loader); + doc.detect_language(loader); } doc.detect_indent_and_line_ending(); @@ -387,7 +396,7 @@ impl Document { /// The same as [`format`], but only returns formatting changes if auto-formatting /// is configured. pub fn auto_format(&self) -> Option + 'static> { - if self.language_config().map(|c| c.auto_format) == Some(true) { + if self.language_config()?.auto_format { self.format() } else { None @@ -397,30 +406,27 @@ impl Document { /// If supported, returns the changes that should be applied to this document in order /// to format it nicely. pub fn format(&self) -> Option + 'static> { - if let Some(language_server) = self.language_server() { - let text = self.text.clone(); - let offset_encoding = language_server.offset_encoding(); - let request = language_server.text_document_formatting( - self.identifier(), - lsp::FormattingOptions::default(), - None, - )?; - - let fut = async move { - let edits = request.await.unwrap_or_else(|e| { - log::warn!("LSP formatting failed: {}", e); - Default::default() - }); - LspFormatting { - doc: text, - edits, - offset_encoding, - } - }; - Some(fut) - } else { - None - } + let language_server = self.language_server()?; + let text = self.text.clone(); + let offset_encoding = language_server.offset_encoding(); + let request = language_server.text_document_formatting( + self.identifier(), + lsp::FormattingOptions::default(), + None, + )?; + + let fut = async move { + let edits = request.await.unwrap_or_else(|e| { + log::warn!("LSP formatting failed: {}", e); + Default::default() + }); + LspFormatting { + doc: text, + edits, + offset_encoding, + } + }; + Some(fut) } pub fn save(&mut self) -> impl Future> { @@ -464,9 +470,7 @@ impl Document { if let Some(parent) = path.parent() { // TODO: display a prompt asking the user if the directories should be created if !parent.exists() { - return Err(Error::msg( - "can't save file, parent directory does not exist", - )); + bail!("can't save file, parent directory does not exist"); } } @@ -498,12 +502,12 @@ impl Document { } /// Detect the programming language based on the file type. - pub fn detect_language(&mut self, theme: Option<&Theme>, config_loader: &syntax::Loader) { + pub fn detect_language(&mut self, config_loader: Arc) { if let Some(path) = &self.path { let language_config = config_loader .language_config_for_file_name(path) .or_else(|| config_loader.language_config_for_shebang(self.text())); - self.set_language(theme, language_config); + self.set_language(language_config, Some(config_loader)); } } @@ -513,8 +517,7 @@ impl Document { /// line ending. pub fn detect_indent_and_line_ending(&mut self) { self.indent_style = auto_detect_indent_style(&self.text).unwrap_or_else(|| { - self.language - .as_ref() + self.language_config() .and_then(|config| config.indent.as_ref()) .map_or(DEFAULT_INDENT, |config| IndentStyle::from_str(&config.unit)) }); @@ -528,7 +531,7 @@ impl Document { // If there is no path or the path no longer exists. if path.is_none() { - return Err(anyhow!("can't find file to reload from")); + bail!("can't find file to reload from"); } let mut file = std::fs::File::open(path.unwrap())?; @@ -549,10 +552,8 @@ impl Document { /// Sets the [`Document`]'s encoding with the encoding correspondent to `label`. pub fn set_encoding(&mut self, label: &str) -> Result<(), Error> { - match encoding::Encoding::for_label(label.as_bytes()) { - Some(encoding) => self.encoding = encoding, - None => return Err(anyhow::anyhow!("unknown encoding")), - } + self.encoding = encoding::Encoding::for_label(label.as_bytes()) + .ok_or_else(|| anyhow!("unknown encoding"))?; Ok(()) } @@ -577,15 +578,13 @@ impl Document { /// if it exists. pub fn set_language( &mut self, - theme: Option<&Theme>, language_config: Option>, + loader: Option>, ) { - if let Some(language_config) = language_config { - let scopes = theme.map(|theme| theme.scopes()).unwrap_or(&[]); - if let Some(highlight_config) = language_config.highlight_config(scopes) { - let syntax = Syntax::new(&self.text, highlight_config); + if let (Some(language_config), Some(loader)) = (language_config, loader) { + if let Some(highlight_config) = language_config.highlight_config(&loader.scopes()) { + let syntax = Syntax::new(&self.text, highlight_config, loader); self.syntax = Some(syntax); - // TODO: config.configure(scopes) is now delayed, is that ok? } self.language = Some(language_config); @@ -597,15 +596,10 @@ impl Document { /// Set the programming language for the file if you know the name (scope) but don't have the /// [`syntax::LanguageConfiguration`] for it. - pub fn set_language2( - &mut self, - scope: &str, - theme: Option<&Theme>, - config_loader: Arc, - ) { + pub fn set_language2(&mut self, scope: &str, config_loader: Arc) { let language_config = config_loader.language_config_for_scope(scope); - self.set_language(theme, language_config); + self.set_language(language_config, Some(config_loader)); } /// Set the LSP. @@ -644,7 +638,6 @@ impl Document { ); } - // set modified since accessed self.modified_since_accessed = true; } @@ -687,7 +680,7 @@ impl Document { if let Some(notify) = notify { tokio::spawn(notify); - } //.expect("failed to emit textDocument/didChange"); + } } } success @@ -715,11 +708,11 @@ impl Document { success } - /// Undo the last modification to the [`Document`]. Returns whether the undo was successful. - pub fn undo(&mut self, view_id: ViewId) -> bool { + fn undo_redo_impl(&mut self, view_id: ViewId, undo: bool) -> bool { let mut history = self.history.take(); - let success = if let Some(transaction) = history.undo() { - self.apply_impl(transaction, view_id) + let txn = if undo { history.undo() } else { history.redo() }; + let success = if let Some(txn) = txn { + self.apply_impl(txn, view_id) } else { false }; @@ -732,21 +725,14 @@ impl Document { success } + /// Undo the last modification to the [`Document`]. Returns whether the undo was successful. + pub fn undo(&mut self, view_id: ViewId) -> bool { + self.undo_redo_impl(view_id, true) + } + /// Redo the last modification to the [`Document`]. Returns whether the redo was sucessful. pub fn redo(&mut self, view_id: ViewId) -> bool { - let mut history = self.history.take(); - let success = if let Some(transaction) = history.redo() { - self.apply_impl(transaction, view_id) - } else { - false - }; - self.history.set(history); - - if success { - // reset changeset to fix len - self.changes = ChangeSet::new(self.text()); - } - success + self.undo_redo_impl(view_id, false) } pub fn savepoint(&mut self) { @@ -759,9 +745,12 @@ impl Document { } } - /// Undo modifications to the [`Document`] according to `uk`. - pub fn earlier(&mut self, view_id: ViewId, uk: helix_core::history::UndoKind) -> bool { - let txns = self.history.get_mut().earlier(uk); + fn earlier_later_impl(&mut self, view_id: ViewId, uk: UndoKind, earlier: bool) -> bool { + let txns = if earlier { + self.history.get_mut().earlier(uk) + } else { + self.history.get_mut().later(uk) + }; let mut success = false; for txn in txns { if self.apply_impl(&txn, view_id) { @@ -775,20 +764,14 @@ impl Document { success } + /// Undo modifications to the [`Document`] according to `uk`. + pub fn earlier(&mut self, view_id: ViewId, uk: UndoKind) -> bool { + self.earlier_later_impl(view_id, uk, true) + } + /// Redo modifications to the [`Document`] according to `uk`. - pub fn later(&mut self, view_id: ViewId, uk: helix_core::history::UndoKind) -> bool { - let txns = self.history.get_mut().later(uk); - let mut success = false; - for txn in txns { - if self.apply_impl(&txn, view_id) { - success = true; - } - } - if success { - // reset changeset to fix len - self.changes = ChangeSet::new(self.text()); - } - success + pub fn later(&mut self, view_id: ViewId, uk: UndoKind) -> bool { + self.earlier_later_impl(view_id, uk, false) } /// Commit pending changes to history @@ -844,6 +827,16 @@ impl Document { .map(|language| language.scope.as_str()) } + /// Language ID for the document. Either the `language-id` from the + /// `language-server` configuration, or the document language if no + /// `language-id` has been specified. + pub fn language_id(&self) -> Option<&str> { + self.language_config() + .and_then(|config| config.language_server.as_ref()) + .and_then(|lsp_config| lsp_config.language_id.as_deref()) + .or_else(|| Some(self.language()?.rsplit_once('.')?.1)) + } + /// Corresponding [`LanguageConfiguration`]. pub fn language_config(&self) -> Option<&LanguageConfiguration> { self.language.as_deref() @@ -854,18 +847,10 @@ impl Document { self.version } + /// Language server if it has been initialized. pub fn language_server(&self) -> Option<&helix_lsp::Client> { - let server = self.language_server.as_deref(); - let initialized = server - .map(|server| server.is_initialized()) - .unwrap_or(false); - - // only resolve language_server if it's initialized - if initialized { - server - } else { - None - } + let server = self.language_server.as_deref()?; + server.is_initialized().then(|| server) } #[inline] @@ -876,8 +861,7 @@ impl Document { /// Tab size in columns. pub fn tab_width(&self) -> usize { - self.language - .as_ref() + self.language_config() .and_then(|config| config.indent.as_ref()) .map_or(4, |config| config.tab_width) // fallback to 4 columns } @@ -902,7 +886,7 @@ impl Document { /// File path as a URL. pub fn url(&self) -> Option { - self.path().map(|path| Url::from_file_path(path).unwrap()) + Url::from_file_path(self.path()?).ok() } #[inline] @@ -925,10 +909,6 @@ impl Document { .map(helix_core::path::get_relative_path) } - // pub fn slice(&self, range: R) -> RopeSlice where R: RangeBounds { - // self.state.doc.slice - // } - // transact(Fn) ? // -- LSP methods @@ -949,7 +929,6 @@ impl Document { pub fn set_diagnostics(&mut self, diagnostics: Vec) { self.diagnostics = diagnostics; - // sort by range self.diagnostics .sort_unstable_by_key(|diagnostic| diagnostic.range); } diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index f4b0f73e7..d44dc1c67 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -1,7 +1,8 @@ use crate::{ clipboard::{get_clipboard_provider, ClipboardProvider}, - document::SCRATCH_BUFFER_NAME, + document::{Mode, SCRATCH_BUFFER_NAME}, graphics::{CursorKind, Rect}, + info::Info, input::KeyEvent, theme::{self, Theme}, tree::{self, Tree}, @@ -9,8 +10,12 @@ use crate::{ }; use futures_util::future; +use futures_util::stream::select_all::SelectAll; +use tokio_stream::wrappers::UnboundedReceiverStream; + use std::{ - collections::BTreeMap, + borrow::Cow, + collections::{BTreeMap, HashMap}, io::stdin, num::NonZeroUsize, path::{Path, PathBuf}, @@ -26,8 +31,9 @@ pub use helix_core::diagnostic::Severity; pub use helix_core::register::Registers; use helix_core::syntax; use helix_core::{Position, Selection}; +use helix_dap as dap; -use serde::{Deserialize, Serialize}; +use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize}; fn deserialize_duration_millis<'de, D>(deserializer: D) -> Result where @@ -92,8 +98,6 @@ pub struct Config { pub line_number: LineNumber, /// Middle click paste support. Defaults to true. pub middle_click_paste: bool, - /// Smart case: Case insensitive searching unless pattern contains upper case characters. Defaults to true. - pub smart_case: bool, /// Automatic insertion of pairs to parentheses, brackets, etc. Defaults to true. pub auto_pairs: bool, /// Automatic auto-completion, automatically pop up without user trigger. Defaults to true. @@ -105,8 +109,76 @@ pub struct Config { /// Whether to display infoboxes. Defaults to true. pub auto_info: bool, pub file_picker: FilePickerConfig, + /// Shape for cursor in each mode + pub cursor_shape: CursorShapeConfig, /// Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. Defaults to `false`. pub true_color: bool, + /// Search configuration. + #[serde(default)] + pub search: SearchConfig, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case", default, deny_unknown_fields)] +pub struct SearchConfig { + /// Smart case: Case insensitive searching unless pattern contains upper case characters. Defaults to true. + pub smart_case: bool, + /// Whether the search should wrap after depleting the matches. Default to true. + pub wrap_around: bool, +} + +// Cursor shape is read and used on every rendered frame and so needs +// to be fast. Therefore we avoid a hashmap and use an enum indexed array. +#[derive(Debug, Clone, PartialEq)] +pub struct CursorShapeConfig([CursorKind; 3]); + +impl CursorShapeConfig { + pub fn from_mode(&self, mode: Mode) -> CursorKind { + self.get(mode as usize).copied().unwrap_or_default() + } +} + +impl<'de> Deserialize<'de> for CursorShapeConfig { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let m = HashMap::::deserialize(deserializer)?; + let into_cursor = |mode: Mode| m.get(&mode).copied().unwrap_or_default(); + Ok(CursorShapeConfig([ + into_cursor(Mode::Normal), + into_cursor(Mode::Select), + into_cursor(Mode::Insert), + ])) + } +} + +impl Serialize for CursorShapeConfig { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut map = serializer.serialize_map(Some(self.len()))?; + let modes = [Mode::Normal, Mode::Select, Mode::Insert]; + for mode in modes { + map.serialize_entry(&mode, &self.from_mode(mode))?; + } + map.end() + } +} + +impl std::ops::Deref for CursorShapeConfig { + type Target = [CursorKind; 3]; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Default for CursorShapeConfig { + fn default() -> Self { + Self([CursorKind::Block; 3]) + } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] @@ -115,7 +187,8 @@ pub enum LineNumber { /// Show absolute line number Absolute, - /// Show relative line number to the primary cursor + /// If focused and in normal/select mode, show relative line number to the primary cursor. + /// If unfocused or in insert mode, show absolute line number. Relative, } @@ -144,14 +217,24 @@ impl Default for Config { }, line_number: LineNumber::Absolute, middle_click_paste: true, - smart_case: true, auto_pairs: true, auto_completion: true, idle_timeout: Duration::from_millis(400), completion_trigger_len: 2, auto_info: true, file_picker: FilePickerConfig::default(), + cursor_shape: CursorShapeConfig::default(), true_color: false, + search: SearchConfig::default(), + } + } +} + +impl Default for SearchConfig { + fn default() -> Self { + Self { + wrap_around: true, + smart_case: true, } } } @@ -168,6 +251,19 @@ impl std::fmt::Debug for Motion { } } +#[derive(Debug, Clone, Default)] +pub struct Breakpoint { + pub id: Option, + pub verified: bool, + pub message: Option, + + pub line: usize, + pub column: Option, + pub condition: Option, + pub hit_condition: Option, + pub log_message: Option, +} + #[derive(Debug)] pub struct Editor { pub tree: Tree, @@ -179,12 +275,18 @@ pub struct Editor { pub macro_recording: Option<(char, Vec)>, pub theme: Theme, pub language_servers: helix_lsp::Registry, + + pub debugger: Option, + pub debugger_events: SelectAll>, + pub breakpoints: HashMap>, + pub clipboard_provider: Box, pub syn_loader: Arc, pub theme_loader: Arc, - pub status_msg: Option<(String, Severity)>, + pub status_msg: Option<(Cow<'static, str>, Severity)>, + pub autoinfo: Option, pub config: Config, @@ -223,11 +325,15 @@ impl Editor { macro_recording: None, theme: theme_loader.default(), language_servers, + debugger: None, + debugger_events: SelectAll::new(), + breakpoints: HashMap::new(), syn_loader, theme_loader, registers: Registers::default(), clipboard_provider: get_clipboard_provider(), status_msg: None, + autoinfo: None, idle_timer: Box::pin(sleep(config.idle_timeout)), last_motion: None, config, @@ -252,29 +358,25 @@ impl Editor { self.status_msg = None; } - pub fn set_status(&mut self, status: String) { - self.status_msg = Some((status, Severity::Info)); + #[inline] + pub fn set_status>>(&mut self, status: T) { + self.status_msg = Some((status.into(), Severity::Info)); } - pub fn set_error(&mut self, error: String) { - self.status_msg = Some((error, Severity::Error)); + #[inline] + pub fn set_error>>(&mut self, error: T) { + self.status_msg = Some((error.into(), Severity::Error)); } pub fn set_theme(&mut self, theme: Theme) { // `ui.selection` is the only scope required to be able to render a theme. if theme.find_scope_index("ui.selection").is_none() { - self.set_error("Invalid theme: `ui.selection` required".to_owned()); + self.set_error("Invalid theme: `ui.selection` required"); return; } let scopes = theme.scopes(); - for config in self - .syn_loader - .language_configs_iter() - .filter(|cfg| cfg.is_highlight_initialized()) - { - config.reconfigure(scopes); - } + self.syn_loader.set_scopes(scopes.to_vec()); self.theme = theme; self._refresh(); @@ -283,7 +385,7 @@ impl Editor { /// Refreshes the language server for a given document pub fn refresh_language_server(&mut self, doc_id: DocumentId) -> Option<()> { let doc = self.documents.get_mut(&doc_id)?; - doc.detect_language(Some(&self.theme), &self.syn_loader); + doc.detect_language(self.syn_loader.clone()); Self::launch_language_server(&mut self.language_servers, doc) } @@ -307,11 +409,8 @@ impl Editor { if let Some(language_server) = doc.language_server() { tokio::spawn(language_server.text_document_did_close(doc.identifier())); } - let language_id = doc - .language() - .and_then(|s| s.split('.').last()) // source.rust - .map(ToOwned::to_owned) - .unwrap_or_default(); + + let language_id = doc.language_id().map(ToOwned::to_owned).unwrap_or_default(); // TODO: this now races with on_init code if the init happens too quickly tokio::spawn(language_server.text_document_did_open( @@ -393,7 +492,7 @@ impl Editor { view.last_accessed_doc = Some(view.doc); // Set last modified doc if modified and last modified doc is different if std::mem::take(&mut doc.modified_since_accessed) - && view.last_modified_docs[0] != Some(id) + && view.last_modified_docs[0] != Some(view.doc) { view.last_modified_docs = [Some(view.doc), view.last_modified_docs[0]]; } @@ -465,7 +564,7 @@ impl Editor { let id = if let Some(id) = id { id } else { - let mut doc = Document::open(&path, None, Some(&self.theme), Some(&self.syn_loader))?; + let mut doc = Document::open(&path, None, Some(self.syn_loader.clone()))?; let _ = Self::launch_language_server(&mut self.language_servers, &mut doc); @@ -623,9 +722,10 @@ impl Editor { let inner = view.inner_area(); pos.col += inner.x as usize; pos.row += inner.y as usize; - (Some(pos), CursorKind::Hidden) + let cursorkind = self.config.cursor_shape.from_mode(doc.mode()); + (Some(pos), cursorkind) } else { - (None, CursorKind::Hidden) + (None, CursorKind::default()) } } diff --git a/helix-view/src/graphics.rs b/helix-view/src/graphics.rs index b8e43ba56..6d0a92928 100644 --- a/helix-view/src/graphics.rs +++ b/helix-view/src/graphics.rs @@ -1,10 +1,12 @@ use bitflags::bitflags; +use serde::{Deserialize, Serialize}; use std::{ cmp::{max, min}, str::FromStr, }; -#[derive(Debug, Clone, Copy, PartialEq)] +#[derive(Debug, Clone, Copy, PartialEq, Deserialize, Serialize)] +#[serde(rename_all = "lowercase")] /// UNSTABLE pub enum CursorKind { /// █ @@ -17,6 +19,12 @@ pub enum CursorKind { Hidden, } +impl Default for CursorKind { + fn default() -> Self { + Self::Block + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Margin { pub vertical: u16, @@ -323,7 +331,7 @@ impl FromStr for Modifier { /// ]; /// let mut buffer = Buffer::empty(Rect::new(0, 0, 1, 1)); /// for style in &styles { -/// buffer.get_mut(0, 0).set_style(*style); +/// buffer[(0, 0)].set_style(*style); /// } /// assert_eq!( /// Style { @@ -332,7 +340,7 @@ impl FromStr for Modifier { /// add_modifier: Modifier::BOLD, /// sub_modifier: Modifier::empty(), /// }, -/// buffer.get(0, 0).style(), +/// buffer[(0, 0)].style(), /// ); /// ``` /// @@ -348,7 +356,7 @@ impl FromStr for Modifier { /// ]; /// let mut buffer = Buffer::empty(Rect::new(0, 0, 1, 1)); /// for style in &styles { -/// buffer.get_mut(0, 0).set_style(*style); +/// buffer[(0, 0)].set_style(*style); /// } /// assert_eq!( /// Style { @@ -357,7 +365,7 @@ impl FromStr for Modifier { /// add_modifier: Modifier::empty(), /// sub_modifier: Modifier::empty(), /// }, -/// buffer.get(0, 0).style(), +/// buffer[(0, 0)].style(), /// ); /// ``` #[derive(Debug, Clone, Copy, PartialEq)] diff --git a/helix-view/src/gutter.rs b/helix-view/src/gutter.rs index af016c56e..6a77c41fc 100644 --- a/helix-view/src/gutter.rs +++ b/helix-view/src/gutter.rs @@ -1,16 +1,19 @@ use std::fmt::Write; -use crate::{editor::Config, graphics::Style, Document, Theme, View}; +use crate::{ + graphics::{Color, Modifier, Style}, + Document, Editor, Theme, View, +}; pub type GutterFn<'doc> = Box Option