From 4df08ddbe02f1e8d84c6aa4be810a91f83c73441 Mon Sep 17 00:00:00 2001 From: Mehdi Abedi <30735843+areux@users.noreply.github.com> Date: Wed, 14 Feb 2024 10:53:15 +0000 Subject: [PATCH 01/60] Allow numbers as second input event (#8471) * Make sure pending key list is empty when count handling This will allow using numbers as second key event. * count handling; add an exception for 'g' * Lookup the key event before considering a number as count * Avoid the allocation of another vec for the pending keys --------- Co-authored-by: x --- helix-term/src/keymap.rs | 9 +++++++++ helix-term/src/ui/editor.rs | 4 +++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/helix-term/src/keymap.rs b/helix-term/src/keymap.rs index d9297e08d..975274ed1 100644 --- a/helix-term/src/keymap.rs +++ b/helix-term/src/keymap.rs @@ -303,6 +303,15 @@ impl Keymaps { self.sticky.as_ref() } + pub fn contains_key(&self, mode: Mode, key: KeyEvent) -> bool { + let keymaps = &*self.map(); + let keymap = &keymaps[&mode]; + keymap + .search(self.pending()) + .and_then(KeyTrie::node) + .is_some_and(|node| node.contains_key(&key)) + } + /// Lookup `key` in the keymap to try and find a command to execute. Escape /// key cancels pending keystrokes. If there are no pending keystrokes but a /// sticky node is in use, it will be cleared. diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index fef62a292..bb749d2e7 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -903,7 +903,9 @@ impl EditorView { fn command_mode(&mut self, mode: Mode, cxt: &mut commands::Context, event: KeyEvent) { match (event, cxt.editor.count) { // count handling - (key!(i @ '0'), Some(_)) | (key!(i @ '1'..='9'), _) => { + (key!(i @ '0'), Some(_)) | (key!(i @ '1'..='9'), _) + if !self.keymaps.contains_key(mode, event) => + { let i = i.to_digit(10).unwrap() as usize; cxt.editor.count = std::num::NonZeroUsize::new(cxt.editor.count.map_or(i, |c| c.get() * 10 + i)); From 59369d99e2ed891206bd63c9b83b034da474920a Mon Sep 17 00:00:00 2001 From: Michael Davis Date: Wed, 14 Feb 2024 11:55:55 -0500 Subject: [PATCH 02/60] Bump tree-sitter-erlang, add `*.app.src` file-type (#9627) --- languages.toml | 4 ++-- runtime/queries/erlang/highlights.scm | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/languages.toml b/languages.toml index e0f3961e0..bfa33bf58 100644 --- a/languages.toml +++ b/languages.toml @@ -1598,7 +1598,7 @@ source = { git = "https://github.com/jaredramirez/tree-sitter-rescript", rev = " name = "erlang" scope = "source.erlang" injection-regex = "erl(ang)?" -file-types = ["erl", "hrl", "app", { glob = "rebar.config" }, { glob = "rebar.lock" }] +file-types = ["erl", "hrl", "app", { glob = "rebar.config" }, { glob = "rebar.lock" }, { glob = "*.app.src" }] roots = ["rebar.config"] shebangs = ["escript"] comment-token = "%%" @@ -1615,7 +1615,7 @@ language-servers = [ "erlang-ls" ] [[grammar]] name = "erlang" -source = { git = "https://github.com/the-mikedavis/tree-sitter-erlang", rev = "ce0ed253d72c199ab93caba7542b6f62075339c4" } +source = { git = "https://github.com/the-mikedavis/tree-sitter-erlang", rev = "731e50555a51f0d8635992b0e60dc98cc47a58d7" } [[language]] name = "kotlin" diff --git a/runtime/queries/erlang/highlights.scm b/runtime/queries/erlang/highlights.scm index 741f5e544..6d0ec36d7 100644 --- a/runtime/queries/erlang/highlights.scm +++ b/runtime/queries/erlang/highlights.scm @@ -145,8 +145,9 @@ ((atom) @constant.builtin.boolean (#match? @constant.builtin.boolean "^(true|false)$")) (atom) @string.special.symbol -(string) @string +[(string) (sigil)] @string (character) @constant.character +(escape_sequence) @constant.character.escape (integer) @constant.numeric.integer (float) @constant.numeric.float From 76e512f9445b2a26655248b46cf13413f9a6bbba Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 16 Feb 2024 15:43:14 +0100 Subject: [PATCH 03/60] Remove unwrap on line option, preventing DAP crash (#9632) * Remove unwrap on line option, preventing DAP crash, ref #4683 * Update to fall back to existing values for option fields --- helix-view/src/handlers/dap.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/helix-view/src/handlers/dap.rs b/helix-view/src/handlers/dap.rs index e1437bef7..a5fa0c29c 100644 --- a/helix-view/src/handlers/dap.rs +++ b/helix-view/src/handlers/dap.rs @@ -226,10 +226,15 @@ impl Editor { breakpoints.iter().position(|b| b.id == breakpoint.id) { breakpoints[i].verified = breakpoint.verified; - breakpoints[i].message = breakpoint.message.clone(); - breakpoints[i].line = - breakpoint.line.unwrap().saturating_sub(1); // TODO: no unwrap - breakpoints[i].column = breakpoint.column; + breakpoints[i].message = breakpoint + .message + .clone() + .or_else(|| breakpoints[i].message.take()); + breakpoints[i].line = breakpoint + .line + .map_or(breakpoints[i].line, |line| line.saturating_sub(1)); + breakpoints[i].column = + breakpoint.column.or(breakpoints[i].column); } } } From 6ffe09e873821c7232d1da23ea4642214e42a1e7 Mon Sep 17 00:00:00 2001 From: Nick Condron Date: Sat, 17 Feb 2024 11:08:18 -0500 Subject: [PATCH 04/60] Fix Sonokai theme to better match original (#5379) * Make sonokai palette perfectfully faithful * Amend theme to better match original sonokai Changes based on the following references: (1) https://www.sainnhe.dev/post/contributing-guide/#sonokai (2) https://github.com/sainnhe/sonokai/blob/master/colors/sonokai.vim * Make constants white (1) * Make builtin variables purple (1) * Make members orange (1) * Make labels red (2) * Make operators red (1) * Make all punctuation grey (2) * Make builtin functions and macros green (2) * Make diff delta blue (2) * Make cursor match bg4 (2) * Make visible whitespace bg4 (2) * Make Sonokai special punctuation yellow --- runtime/themes/sonokai.toml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/runtime/themes/sonokai.toml b/runtime/themes/sonokai.toml index f586be28d..4bbdb4dbe 100644 --- a/runtime/themes/sonokai.toml +++ b/runtime/themes/sonokai.toml @@ -6,24 +6,23 @@ # License: MIT License "type" = "blue" -"constant" = "purple" +"constant" = "fg" "constant.numeric" = "purple" "constant.character.escape" = "orange" "string" = "yellow" "comment" = "grey" "variable" = "fg" -"variable.builtin" = "orange" +"variable.builtin" = "purple" "variable.parameter" = "fg" -"variable.other.member" = "fg" -"label" = "orange" +"variable.other.member" = "orange" +"label" = "red" "punctuation" = "grey" -"punctuation.delimiter" = "grey" -"punctuation.bracket" = "fg" +"punctuation.special" = "yellow" "keyword" = "red" -"operator" = "orange" +"operator" = "red" "function" = "green" -"function.builtin" = "blue" -"function.macro" = "purple" +"function.builtin" = "green" +"function.macro" = "green" "tag" = "yellow" "namespace" = "blue" "attribute" = "purple" @@ -48,12 +47,12 @@ "markup.raw" = "green" "diff.plus" = "green" -"diff.delta" = "orange" +"diff.delta" = "blue" "diff.minus" = "red" "ui.background" = { bg = "bg0" } "ui.cursor" = { modifiers = ['reversed'] } -"ui.cursor.match" = { fg = "orange", bg = "diff_yellow" } +"ui.cursor.match" = { bg = "bg4" } "ui.cursor.insert" = { fg = "black", bg = "grey" } "ui.cursor.select" = { fg = "bg0", bg = "blue" } "ui.selection" = { bg = "bg5" } @@ -73,7 +72,7 @@ "ui.text.focus" = "green" "ui.menu" = { fg = "fg", bg = "bg2" } "ui.menu.selected" = { fg = "bg0", bg = "green" } -"ui.virtual.whitespace" = { fg = "grey_dim" } +"ui.virtual.whitespace" = "bg4" "ui.virtual.ruler" = { bg = "bg3" } "ui.virtual.inlay-hint" = { fg = "grey_dim" } @@ -92,11 +91,12 @@ error = { fg = 'red', bg = 'bg2', modifiers = ['bold'] } [palette] black = "#181819" +bg_dim = "#222327" bg0 = "#2c2e34" bg1 = "#33353f" bg2 = "#363944" bg3 = "#3b3e48" -bg4 = "#5C606A" +bg4 = "#414550" bg5 = "#444852" bg_red = "#ff6077" diff_red = "#55393d" From 3e963b3c1b5eb4b5cd7f33b8ef6d6642de210a9b Mon Sep 17 00:00:00 2001 From: Vivek Kethineni <35619837+uek-1@users.noreply.github.com> Date: Sat, 17 Feb 2024 10:08:44 -0600 Subject: [PATCH 05/60] Add Rust fields as argument textobject (#9637) * added field_declaration_list and field_initializer_list as parameter textobjects * removed field_declaration_listt from textobjects.scm --- runtime/queries/rust/textobjects.scm | 3 +++ 1 file changed, 3 insertions(+) diff --git a/runtime/queries/rust/textobjects.scm b/runtime/queries/rust/textobjects.scm index 837f981e7..df26331d8 100644 --- a/runtime/queries/rust/textobjects.scm +++ b/runtime/queries/rust/textobjects.scm @@ -34,6 +34,9 @@ (arguments ((_) @parameter.inside . ","? @parameter.around) @parameter.around) +(field_initializer_list + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + [ (line_comment) (block_comment) From 78c34194b5c83beb26ca04f12bf9d53fd5aba801 Mon Sep 17 00:00:00 2001 From: melted-brownie <66597133+melted-brownie@users.noreply.github.com> Date: Sat, 17 Feb 2024 16:09:21 +0000 Subject: [PATCH 06/60] Improve textobjects for parameter/argument for Dart (#9644) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sébastien Blondiau --- runtime/queries/dart/textobjects.scm | 29 ++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/runtime/queries/dart/textobjects.scm b/runtime/queries/dart/textobjects.scm index 028276156..b88b97bc9 100644 --- a/runtime/queries/dart/textobjects.scm +++ b/runtime/queries/dart/textobjects.scm @@ -56,9 +56,34 @@ (documentation_comment)+ @comment.around -(formal_parameter) @parameter.inside +(formal_parameter_list + ( + (formal_parameter) @parameter.inside . ","? @parameter.around + ) @parameter.around +) + +(optional_formal_parameters + ( + (formal_parameter) @parameter.inside . ","? @parameter.around + ) @parameter.around +) + +(arguments + ( + [ + (argument) @parameter.inside + (named_argument (label) . (_)* @parameter.inside) + ] + . ","? @parameter.around + ) @parameter.around +) -(formal_parameter_list) @parameter.around +(type_arguments + ( + ((_) . ("." . (_) @parameter.inside @parameter.around)?) @parameter.inside + . ","? @parameter.around + ) @parameter.around +) (expression_statement ((identifier) @_name (#any-of? @_name "test" "testWidgets")) From 64326698225999016300423c4b90f8a8c7b8f38b Mon Sep 17 00:00:00 2001 From: Vince Varga Date: Sun, 18 Feb 2024 15:55:21 +0100 Subject: [PATCH 07/60] Add tmux.conf as a bash file type (#9653) * Add conf as a bash file type Tmux and tmux.conf is used widely in software developer circles. Having the tmux.conf file not have any syntax highlighting by default is (IMO) not ideal for an editor that otherwise "just works". * Use tmux.conf glob instead of simply conf for tmux Co-authored-by: Michael Davis --------- Co-authored-by: Michael Davis --- languages.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/languages.toml b/languages.toml index bfa33bf58..78078ca58 100644 --- a/languages.toml +++ b/languages.toml @@ -860,6 +860,7 @@ file-types = [ "tcshrc", "bashrc_Apple_Terminal", "zshrc_Apple_Terminal", + { glob = "tmux.conf" }, { glob = ".bash_history" }, { glob = ".bash_login" }, { glob = ".bash_logout" }, From 9ab3f9d01a8ea4967f5a7e64a3f6c8f350674c18 Mon Sep 17 00:00:00 2001 From: AlexanderDickie <75994927+AlexanderDickie@users.noreply.github.com> Date: Sun, 18 Feb 2024 23:13:04 +0000 Subject: [PATCH 08/60] Scroll cursor and page together (neovim-like scrolling) (#8015) * neovim like scroll function * clear line annotations outside of move_vertically/_visual * add nvim scroll function to commands * assign nvim-scroll to C-d and C-u (half page scrolls) * dont remove backspace and space mapping * move non-softwrap logic to seperate function, call this in nvim-scroll fn * Revert "move non-softwrap logic to seperate function, call this in nvim-scroll fn" This reverts commit e4905729c338a2260e6981f1d8fac022897b4191. * Revert "clear line annotations outside of move_vertically/_visual" This reverts commit 1df3fefe55afc840d1ab5094b2116d1127fc363f. * add TODO for when inline diagnostics gets merged * move nvim-scroll logic into scroll(), dont respect scrolloff * run cargo fmt * run cargo clippy * update documenation for Ctrl-d and Ctrl-u remap --- book/src/keymap.md | 28 ++++++------- helix-term/src/commands.rs | 68 ++++++++++++++++++++++++++++---- helix-term/src/keymap/default.rs | 12 +++--- helix-term/src/ui/editor.rs | 2 +- 4 files changed, 81 insertions(+), 29 deletions(-) diff --git a/book/src/keymap.md b/book/src/keymap.md index a3e41666f..ac84147cd 100644 --- a/book/src/keymap.md +++ b/book/src/keymap.md @@ -53,8 +53,8 @@ Normal mode is the default mode when you launch helix. You can return to it from | `End` | Move to the end of the line | `goto_line_end` | | `Ctrl-b`, `PageUp` | Move page up | `page_up` | | `Ctrl-f`, `PageDown` | Move page down | `page_down` | -| `Ctrl-u` | Move half page up | `half_page_up` | -| `Ctrl-d` | Move half page down | `half_page_down` | +| `Ctrl-u` | Move cursor and page half page up | `page_cursor_half_up` | +| `Ctrl-d` | Move cursor and page half page down | `page_cursor_half_down` | | `Ctrl-i` | Jump forward on the jumplist | `jump_forward` | | `Ctrl-o` | Jump backward on the jumplist | `jump_backward` | | `Ctrl-s` | Save the current selection to the jumplist | `save_selection` | @@ -182,18 +182,18 @@ normal mode) is persistent and can be exited using the escape key. This is useful when you're simply looking over text and not actively editing it. -| Key | Description | Command | -| ----- | ----------- | ------- | -| `z`, `c` | Vertically center the line | `align_view_center` | -| `t` | Align the line to the top of the screen | `align_view_top` | -| `b` | Align the line to the bottom of the screen | `align_view_bottom` | -| `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` | -| `j`, `down` | Scroll the view downwards | `scroll_down` | -| `k`, `up` | Scroll the view upwards | `scroll_up` | -| `Ctrl-f`, `PageDown` | Move page down | `page_down` | -| `Ctrl-b`, `PageUp` | Move page up | `page_up` | -| `Ctrl-d` | Move half page down | `half_page_down` | -| `Ctrl-u` | Move half page up | `half_page_up` | +| Key | Description | Command | +| ----- | ----------- | ------- | +| `z`, `c` | Vertically center the line | `align_view_center` | +| `t` | Align the line to the top of the screen | `align_view_top` | +| `b` | Align the line to the bottom of the screen | `align_view_bottom` | +| `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` | +| `j`, `down` | Scroll the view downwards | `scroll_down` | +| `k`, `up` | Scroll the view upwards | `scroll_up` | +| `Ctrl-f`, `PageDown` | Move page down | `page_down` | +| `Ctrl-b`, `PageUp` | Move page up | `page_up` | +| `Ctrl-u` | Move cursor and page half page up | `page_cursor_half_up` | +| `Ctrl-d` | Move cursor and page half page down | `page_cursor_half_down` | #### Goto mode diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index e46109c0f..51a1ede9b 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -277,6 +277,10 @@ impl MappableCommand { page_down, "Move page down", half_page_up, "Move half page up", half_page_down, "Move half page down", + page_cursor_up, "Move page and cursor up", + page_cursor_down, "Move page and cursor down", + page_cursor_half_up, "Move page and cursor half up", + page_cursor_half_down, "Move page and cursor half down", select_all, "Select whole document", select_regex, "Select all regex matches inside selections", split_selection, "Split selections on regex matches", @@ -1608,7 +1612,7 @@ fn switch_to_lowercase(cx: &mut Context) { }); } -pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) { +pub fn scroll(cx: &mut Context, offset: usize, direction: Direction, sync_cursor: bool) { use Direction::*; let config = cx.editor.config(); let (view, doc) = current!(cx.editor); @@ -1628,7 +1632,7 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) { let doc_text = doc.text().slice(..); let viewport = view.inner_area(doc); let text_fmt = doc.text_format(viewport.width, None); - let annotations = view.text_annotations(doc, None); + let mut annotations = view.text_annotations(doc, None); (view.offset.anchor, view.offset.vertical_offset) = char_idx_at_visual_offset( doc_text, view.offset.anchor, @@ -1638,6 +1642,30 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) { &annotations, ); + if sync_cursor { + let movement = match cx.editor.mode { + Mode::Select => Movement::Extend, + _ => Movement::Move, + }; + // TODO: When inline diagnostics gets merged- 1. move_vertically_visual removes + // line annotations/diagnostics so the cursor may jump further than the view. + // 2. If the cursor lands on a complete line of virtual text, the cursor will + // jump a different distance than the view. + let selection = doc.selection(view.id).clone().transform(|range| { + move_vertically_visual( + doc_text, + range, + direction, + offset.unsigned_abs(), + movement, + &text_fmt, + &mut annotations, + ) + }); + doc.set_selection(view.id, selection); + return; + } + let mut head; match direction { Forward => { @@ -1688,25 +1716,49 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) { fn page_up(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height(); - scroll(cx, offset, Direction::Backward); + scroll(cx, offset, Direction::Backward, false); } fn page_down(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height(); - scroll(cx, offset, Direction::Forward); + scroll(cx, offset, Direction::Forward, false); } fn half_page_up(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height() / 2; - scroll(cx, offset, Direction::Backward); + scroll(cx, offset, Direction::Backward, false); } fn half_page_down(cx: &mut Context) { let view = view!(cx.editor); let offset = view.inner_height() / 2; - scroll(cx, offset, Direction::Forward); + scroll(cx, offset, Direction::Forward, false); +} + +fn page_cursor_up(cx: &mut Context) { + let view = view!(cx.editor); + let offset = view.inner_height(); + scroll(cx, offset, Direction::Backward, true); +} + +fn page_cursor_down(cx: &mut Context) { + let view = view!(cx.editor); + let offset = view.inner_height(); + scroll(cx, offset, Direction::Forward, true); +} + +fn page_cursor_half_up(cx: &mut Context) { + let view = view!(cx.editor); + let offset = view.inner_height() / 2; + scroll(cx, offset, Direction::Backward, true); +} + +fn page_cursor_half_down(cx: &mut Context) { + let view = view!(cx.editor); + let offset = view.inner_height() / 2; + scroll(cx, offset, Direction::Forward, true); } #[allow(deprecated)] @@ -4856,11 +4908,11 @@ fn align_view_middle(cx: &mut Context) { } fn scroll_up(cx: &mut Context) { - scroll(cx, cx.count(), Direction::Backward); + scroll(cx, cx.count(), Direction::Backward, false); } fn scroll_down(cx: &mut Context) { - scroll(cx, cx.count(), Direction::Forward); + scroll(cx, cx.count(), Direction::Forward, false); } fn goto_ts_object_impl(cx: &mut Context, object: &'static str, direction: Direction) { diff --git a/helix-term/src/keymap/default.rs b/helix-term/src/keymap/default.rs index 763ed4ae7..92d6b5906 100644 --- a/helix-term/src/keymap/default.rs +++ b/helix-term/src/keymap/default.rs @@ -178,8 +178,8 @@ pub fn default() -> HashMap { "esc" => normal_mode, "C-b" | "pageup" => page_up, "C-f" | "pagedown" => page_down, - "C-u" => half_page_up, - "C-d" => half_page_down, + "C-u" => page_cursor_half_up, + "C-d" => page_cursor_half_down, "C-w" => { "Window" "C-w" | "w" => rotate_view, @@ -287,8 +287,8 @@ pub fn default() -> HashMap { "j" | "down" => scroll_down, "C-b" | "pageup" => page_up, "C-f" | "pagedown" => page_down, - "C-u" | "backspace" => half_page_up, - "C-d" | "space" => half_page_down, + "C-u" | "backspace" => page_cursor_half_up, + "C-d" | "space" => page_cursor_half_down, "/" => search, "?" => rsearch, @@ -304,8 +304,8 @@ pub fn default() -> HashMap { "j" | "down" => scroll_down, "C-b" | "pageup" => page_up, "C-f" | "pagedown" => page_down, - "C-u" | "backspace" => half_page_up, - "C-d" | "space" => half_page_down, + "C-u" | "backspace" => page_cursor_half_up, + "C-d" | "space" => page_cursor_half_down, "/" => search, "?" => rsearch, diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index bb749d2e7..a87e6cbca 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -1156,7 +1156,7 @@ impl EditorView { } let offset = config.scroll_lines.unsigned_abs(); - commands::scroll(cxt, offset, direction); + commands::scroll(cxt, offset, direction, false); cxt.editor.tree.focus = current_view; cxt.editor.ensure_cursor_in_view(current_view); From b950dea003750120f6e764afd5afd7b738aa6410 Mon Sep 17 00:00:00 2001 From: Jimmy Zelinskie Date: Sun, 18 Feb 2024 18:19:26 -0500 Subject: [PATCH 09/60] add monokai soda theme (#9651) --- runtime/themes/monokai_soda.toml | 120 +++++++++++++++++++++++++++++++ 1 file changed, 120 insertions(+) create mode 100644 runtime/themes/monokai_soda.toml diff --git a/runtime/themes/monokai_soda.toml b/runtime/themes/monokai_soda.toml new file mode 100644 index 000000000..7e43247e3 --- /dev/null +++ b/runtime/themes/monokai_soda.toml @@ -0,0 +1,120 @@ +# Monokai Soda port for Helix (https://helix-editor.com) +# Author : Jimmy Zelinskie + +# Syntax + +## Constants +"constant" = "white" +"constant.builtin" = "pink" +"constant.character.escape" = "blue" +"constant.numeric" = "purple" + +## Diagnostics +"diagnostic" = { modifiers = ["underlined"] } +"diagnostic.error" = { underline = { style = "curl", color = "pink" } } +"diagnostic.warning" = { underline = { style = "curl", color = "orange" } } +"diagnostic.info" = { underline = { style = "curl", color = "white" } } + +## Diffs +"diff.plus" = "green" +"diff.delta" = "orange" +"diff.minus" = "pink" +"diff.delta.moved" = "orange" + +## Functions +"function" = "green" +"function.macro" = "blue" +"function.builtin" = "pink" +"constructor" = "blue" + +## Keywords +"keyword" = "pink" +"keyword.directive" = "blue" + +## Punctuation +"punctuation" = "gray" + +## Strings +"string" = "yellow" + +## Types +"type" = "blue" +"type.builtin" = "pink" + +## Variables +"variable" = "white" +"variable.builtin" = "pink" +"variable.other.member" = "white" +"variable.parameter" = "softorange" + +## Markup +"markup.heading" = "green" +"markup.bold" = { fg = "orange", modifiers = ["bold"] } +"markup.italic" = { fg = "orange", modifiers = ["italic"] } +"markup.link.url" = { fg = "orange", modifiers = ["underlined"] } +"markup.link.text" = "yellow" +"markup.quote" = "green" + +## Misc +"attribute" = "blue" +"comment" = { fg = "gray", modifiers = ["italic"] } +"error" = "pink" +"hint" = "white" +"info" = "white" +"label" = "yellow" +"module" = "softorange" +"namespace" = "pink" +"operator" = "pink" +"special" = "softorange" +"warning" = "orange" + +# Editor UI + +## Main +"ui.background" = { bg = "background" } +"ui.text" = "white" +"ui.window" = { bg = "darkgray" } + +## Debug (TODO) + +## Menus +"ui.menu" = { fg = "white", bg = "darkgray" } +"ui.menu.selected" = { modifiers = ["reversed"] } +"ui.popup" = { bg = "darkgray" } +"ui.help" = { fg = "white", bg = "darkgray" } + +## Gutter +"ui.linenr" = "darkgray" +"ui.linenr.selected" = "orange" + +## Cursor +"ui.cursor.primary" = { fg = "white", modifiers = ["reversed"] } +"ui.cursor.match" = { fg = "white", modifiers = ["reversed"] } +"ui.selection" = { bg = "darkgray" } + +## Statusline +"ui.statusline" = { bg = "darkgray" } +"ui.statusline.inactive" = { fg = "white", bg = "darkgray" } +"ui.statusline.normal" = { fg = "white", bg = "blue" } +"ui.statusline.insert" = { fg = "white", bg = "green" } +"ui.statusline.select" = { fg = "white", bg = "purple" } + +"ui.text.focus" = { fg = "yellow", modifiers = ["bold"] } +"ui.virtual" = "darkgray" +"ui.virtual.ruler" = { bg = "darkgray" } + +# Palette + +[palette] +"purple" = "#AE81FF" +"yellow" = "#E6DB74" +"pink" = "#f92a72" +"white" = "#cfcfc2" +"gray" = "#75715e" +"darkgray" = "#444444" +"black" = "#222222" +"blue" = "#66d9ef" +"green" = "#a6e22e" +"softorange" = "#f59762" +"orange" = "#fd971f" +"background" = "#191919" From c72426cc874c70bfb5660b2866e634927e5fb4fd Mon Sep 17 00:00:00 2001 From: Malpha Date: Sun, 18 Feb 2024 23:19:44 +0000 Subject: [PATCH 10/60] Add docker-compose language (#9661) * languages: add docker-compose language it uses docker-compose-langserver as lsp And yaml for syntax highlighting, indents and injections * languages: add luajit as a shebang of lua This helps to provide syntax highlighting and other lua goodies when writing luajit * book(update): run cargo xtask docgen * since #8006 full filenames uses glob --- book/src/generated/lang-support.md | 1 + languages.toml | 13 ++++++++++++- runtime/queries/docker-compose/highlights.scm | 1 + runtime/queries/docker-compose/indents.scm | 1 + runtime/queries/docker-compose/injections.scm | 1 + 5 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 runtime/queries/docker-compose/highlights.scm create mode 100644 runtime/queries/docker-compose/indents.scm create mode 100644 runtime/queries/docker-compose/injections.scm diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index bfe6d6b1e..a37e165f2 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -30,6 +30,7 @@ | devicetree | ✓ | | | | | dhall | ✓ | ✓ | | `dhall-lsp-server` | | diff | ✓ | | | | +| docker-compose | ✓ | | ✓ | `docker-compose-langserver` | | dockerfile | ✓ | | | `docker-langserver` | | dot | ✓ | | | `dot-language-server` | | dtd | ✓ | | | | diff --git a/languages.toml b/languages.toml index 78078ca58..e52dcabbc 100644 --- a/languages.toml +++ b/languages.toml @@ -23,6 +23,7 @@ cuelsp = { command = "cuelsp" } dart = { command = "dart", args = ["language-server", "--client-id=helix"] } dhall-lsp-server = { command = "dhall-lsp-server" } docker-langserver = { command = "docker-langserver", args = ["--stdio"] } +docker-compose-langserver = { command = "docker-compose-langserver", args = ["--stdio"]} dot-language-server = { command = "dot-language-server", args = ["--stdio"] } elixir-ls = { command = "elixir-ls", config = { elixirLS.dialyzerEnabled = false } } elm-language-server = { command = "elm-language-server" } @@ -1092,7 +1093,7 @@ name = "lua" injection-regex = "lua" scope = "source.lua" file-types = ["lua"] -shebangs = ["lua"] +shebangs = ["lua", "luajit"] roots = [".luarc.json", ".luacheckrc", ".stylua.toml", "selene.toml", ".git"] comment-token = "--" indent = { tab-width = 2, unit = " " } @@ -1460,6 +1461,16 @@ language-servers = [ "docker-langserver" ] name = "dockerfile" source = { git = "https://github.com/camdencheek/tree-sitter-dockerfile", rev = "8ee3a0f7587b2bd8c45c8cb7d28bd414604aec62" } +[[language]] +name = "docker-compose" +scope = "source.yaml.docker-compose" +roots = ["docker-compose.yaml", "docker-compose.yml"] +language-servers = [ "docker-compose-langserver" ] +file-types = [{ glob = "docker-compose.yaml" }, { glob = "docker-compose.yml" }] +comment-token = "#" +indent = { tab-width = 2, unit = " " } +grammar = "yaml" + [[language]] name = "git-commit" scope = "git.commitmsg" diff --git a/runtime/queries/docker-compose/highlights.scm b/runtime/queries/docker-compose/highlights.scm new file mode 100644 index 000000000..4ba254e82 --- /dev/null +++ b/runtime/queries/docker-compose/highlights.scm @@ -0,0 +1 @@ +; inherits: yaml diff --git a/runtime/queries/docker-compose/indents.scm b/runtime/queries/docker-compose/indents.scm new file mode 100644 index 000000000..4ba254e82 --- /dev/null +++ b/runtime/queries/docker-compose/indents.scm @@ -0,0 +1 @@ +; inherits: yaml diff --git a/runtime/queries/docker-compose/injections.scm b/runtime/queries/docker-compose/injections.scm new file mode 100644 index 000000000..4ba254e82 --- /dev/null +++ b/runtime/queries/docker-compose/injections.scm @@ -0,0 +1 @@ +; inherits: yaml From 2dc9ce68ec84fd26e1489e3bac76fc5114d0023e Mon Sep 17 00:00:00 2001 From: Jaakko Paju Date: Mon, 19 Feb 2024 01:46:13 +0200 Subject: [PATCH 11/60] Add textobject queries for Nix (#9659) * Add textobject queries for Nix * Add to lang-support.md --- book/src/generated/lang-support.md | 2 +- runtime/queries/nix/textobjects.scm | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 runtime/queries/nix/textobjects.scm diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index a37e165f2..09ed4dac5 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -112,7 +112,7 @@ | nasm | ✓ | ✓ | | | | nickel | ✓ | | ✓ | `nls` | | nim | ✓ | ✓ | ✓ | `nimlangserver` | -| nix | ✓ | | | `nil` | +| nix | ✓ | ✓ | | `nil` | | nu | ✓ | | | `nu` | | nunjucks | ✓ | | | | | ocaml | ✓ | | ✓ | `ocamllsp` | diff --git a/runtime/queries/nix/textobjects.scm b/runtime/queries/nix/textobjects.scm new file mode 100644 index 000000000..1508d4c2b --- /dev/null +++ b/runtime/queries/nix/textobjects.scm @@ -0,0 +1,9 @@ +(comment) @comment.inside +(comment)+ @comment.around + +(formals + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + +(function_expression + body: (_) @function.inside) @function.around + From ebf155d6351a84dec010447af88b8246103537da Mon Sep 17 00:00:00 2001 From: Jaakko Paju Date: Mon, 19 Feb 2024 01:46:32 +0200 Subject: [PATCH 12/60] Add textobject queries for HCL (#9658) * Add textobject queries for HCL * Add to lang-support.md --- book/src/generated/lang-support.md | 2 +- runtime/queries/hcl/textobjects.scm | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 runtime/queries/hcl/textobjects.scm diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index 09ed4dac5..f46c9f5e2 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -68,7 +68,7 @@ | hare | ✓ | | | | | haskell | ✓ | ✓ | | `haskell-language-server-wrapper` | | haskell-persistent | ✓ | | | | -| hcl | ✓ | | ✓ | `terraform-ls` | +| hcl | ✓ | ✓ | ✓ | `terraform-ls` | | heex | ✓ | ✓ | | `elixir-ls` | | hocon | ✓ | | ✓ | | | hoon | ✓ | | | | diff --git a/runtime/queries/hcl/textobjects.scm b/runtime/queries/hcl/textobjects.scm new file mode 100644 index 000000000..1e6505876 --- /dev/null +++ b/runtime/queries/hcl/textobjects.scm @@ -0,0 +1,6 @@ +(comment) @comment.inside +(comment)+ @comment.around + +(function_arguments + ((_) @parameter.inside . ","? @parameter.around) @parameter.around) + From 787cc36092a5d1a575697287d1d6ba08336a8a96 Mon Sep 17 00:00:00 2001 From: nkitsaini <74284503+nkitsaini@users.noreply.github.com> Date: Mon, 19 Feb 2024 18:28:17 +0530 Subject: [PATCH 13/60] fix LSP ComplitionTriggerKind value for `TriggerKind::Auto` (#9660) --- helix-term/src/handlers/completion.rs | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/helix-term/src/handlers/completion.rs b/helix-term/src/handlers/completion.rs index d71fd24fc..491ca5638 100644 --- a/helix-term/src/handlers/completion.rs +++ b/helix-term/src/handlers/completion.rs @@ -221,9 +221,17 @@ fn request_completion( .iter() .find(|&trigger| trigger_text.ends_with(trigger)) }); - lsp::CompletionContext { - trigger_kind: lsp::CompletionTriggerKind::TRIGGER_CHARACTER, - trigger_character: trigger_char.cloned(), + + if trigger_char.is_some() { + lsp::CompletionContext { + trigger_kind: lsp::CompletionTriggerKind::TRIGGER_CHARACTER, + trigger_character: trigger_char.cloned(), + } + } else { + lsp::CompletionContext { + trigger_kind: lsp::CompletionTriggerKind::INVOKED, + trigger_character: None, + } } }; From cdef4f8a701f921c29fdfe66f104a2edac7fe05c Mon Sep 17 00:00:00 2001 From: Jonathan LEI Date: Mon, 19 Feb 2024 22:08:26 +0900 Subject: [PATCH 14/60] Make mouse click extend selection in select mode (#5436) * Make mouse click extend selection in select mode * chore: better readability with `Option::take()` --- helix-term/src/ui/editor.rs | 40 ++++++++++++++++++++++++++----------- helix-view/src/editor.rs | 7 +++++-- 2 files changed, 33 insertions(+), 14 deletions(-) diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index a87e6cbca..66f290a22 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -1088,6 +1088,15 @@ impl EditorView { if modifiers == KeyModifiers::ALT { let selection = doc.selection(view_id).clone(); doc.set_selection(view_id, selection.push(Range::point(pos))); + } else if editor.mode == Mode::Select { + // Discards non-primary selections for consistent UX with normal mode + let primary = doc.selection(view_id).primary().put_cursor( + doc.text().slice(..), + pos, + true, + ); + editor.mouse_down_range = Some(primary); + doc.set_selection(view_id, Selection::single(primary.anchor, primary.head)); } else { doc.set_selection(view_id, Selection::point(pos)); } @@ -1171,19 +1180,26 @@ impl EditorView { let (view, doc) = current!(cxt.editor); - if doc - .selection(view.id) - .primary() - .slice(doc.text().slice(..)) - .len_chars() - <= 1 - { - return EventResult::Ignored(None); - } - - commands::MappableCommand::yank_main_selection_to_primary_clipboard.execute(cxt); + let should_yank = match cxt.editor.mouse_down_range.take() { + Some(down_range) => doc.selection(view.id).primary() != down_range, + None => { + // This should not happen under normal cases. We fall back to the original + // behavior of yanking on non-single-char selections. + doc.selection(view.id) + .primary() + .slice(doc.text().slice(..)) + .len_chars() + > 1 + } + }; - EventResult::Consumed(None) + if should_yank { + commands::MappableCommand::yank_main_selection_to_primary_clipboard + .execute(cxt); + EventResult::Consumed(None) + } else { + EventResult::Ignored(None) + } } MouseEventKind::Up(MouseButton::Right) => { diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 68b74cf00..fffbe6207 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -42,7 +42,7 @@ pub use helix_core::diagnostic::Severity; use helix_core::{ auto_pairs::AutoPairs, syntax::{self, AutoPairConfig, IndentationHeuristic, LanguageServerFeature, SoftWrap}, - Change, LineEnding, Position, Selection, NATIVE_LINE_ENDING, + Change, LineEnding, Position, Range, Selection, NATIVE_LINE_ENDING, }; use helix_dap as dap; use helix_lsp::lsp; @@ -964,6 +964,8 @@ pub struct Editor { /// times during rendering and should not be set by other functions. pub cursor_cache: Cell>>, pub handlers: Handlers, + + pub mouse_down_range: Option, } pub type Motion = Box; @@ -1080,6 +1082,7 @@ impl Editor { needs_redraw: false, cursor_cache: Cell::new(None), handlers, + mouse_down_range: None, } } @@ -1978,7 +1981,7 @@ impl Editor { /// Switches the editor into normal mode. pub fn enter_normal_mode(&mut self) { - use helix_core::{graphemes, Range}; + use helix_core::graphemes; if self.mode == Mode::Normal { return; From 990378a46be2138b2f74799d1af8955390360c4e Mon Sep 17 00:00:00 2001 From: Volodymyr Chernetskyi Date: Mon, 19 Feb 2024 19:37:02 +0200 Subject: [PATCH 15/60] Add Groovy grammar (#9350) * Add Groovy grammar * Rewrite Neovim captures into Helix for Groovy * Simplify Groovy injections Co-authored-by: Michael Davis * Remove Neovim's spell from Groovy highlights Co-authored-by: Michael Davis * Apply suggestions to languages.toml * Escape backslash in groovy highlights.scm --------- Co-authored-by: Michael Davis --- book/src/generated/lang-support.md | 1 + languages.toml | 13 ++++ runtime/queries/groovy/highlights.scm | 96 +++++++++++++++++++++++++++ runtime/queries/groovy/injections.scm | 2 + 4 files changed, 112 insertions(+) create mode 100644 runtime/queries/groovy/highlights.scm create mode 100644 runtime/queries/groovy/injections.scm diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index f46c9f5e2..7aec37778 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -65,6 +65,7 @@ | gotmpl | ✓ | | | `gopls` | | gowork | ✓ | | | `gopls` | | graphql | ✓ | | | `graphql-lsp` | +| groovy | ✓ | | | | | hare | ✓ | | | | | haskell | ✓ | ✓ | | `haskell-language-server-wrapper` | | haskell-persistent | ✓ | | | | diff --git a/languages.toml b/languages.toml index e52dcabbc..1c4c61267 100644 --- a/languages.toml +++ b/languages.toml @@ -3125,3 +3125,16 @@ indent = { tab-width = 2, unit = " " } [[grammar]] name = "pkl" source = { git = "https://github.com/apple/tree-sitter-pkl", rev = "c03f04a313b712f8ab00a2d862c10b37318699ae" } + +[[language]] +name = "groovy" +language-id = "groovy" +scope = "source.groovy" +file-types = ["groovy", "jenkinsfile", { glob = "Jenkinsfile" }, { glob = "Jenkinsfile.*" }] +shebangs = ["groovy"] +comment-token = "//" +indent = { tab-width = 2, unit = " " } + +[[grammar]] +name = "groovy" +source = { git = "https://github.com/Decodetalkers/tree-sitter-groovy", rev = "7e023227f46fee428b16a0288eeb0f65ee2523ec" } diff --git a/runtime/queries/groovy/highlights.scm b/runtime/queries/groovy/highlights.scm new file mode 100644 index 000000000..4e94ccd3a --- /dev/null +++ b/runtime/queries/groovy/highlights.scm @@ -0,0 +1,96 @@ +(unit + (identifier) @variable) + +(string + (identifier) @variable) + +(escape_sequence) @constant.character.escape + +(block + (unit + (identifier) @namespace)) + +(func + (identifier) @function) + +(number) @constant.numeric + +((identifier) @constant.builtin.boolean + (#any-of? @constant.builtin.boolean "true" "false")) + +((identifier) @constant + (#match? @constant "^[A-Z][A-Z\\d_]*$")) + +((identifier) @constant.builtin + (#eq? @constant.builtin "null")) + +((identifier) @type + (#any-of? @type + "String" + "Map" + "Object" + "Boolean" + "Integer" + "List")) + +((identifier) @function.builtin + (#any-of? @function.builtin + "void" + "id" + "version" + "apply" + "implementation" + "testImplementation" + "androidTestImplementation" + "debugImplementation")) + +((identifier) @keyword.storage.modifier + (#eq? @keyword.storage.modifier "static")) + +((identifier) @keyword.storage.type + (#any-of? @keyword.storage.type "class" "def" "interface")) + +((identifier) @keyword + (#any-of? @keyword + "assert" + "new" + "extends" + "implements" + "instanceof")) + +((identifier) @keyword.control.import + (#any-of? @keyword.control.import "import" "package")) + +((identifier) @keyword.storage.modifier + (#any-of? @keyword.storage.modifier + "abstract" + "protected" + "private" + "public")) + +((identifier) @keyword.control.exception + (#any-of? @keyword.control.exception + "throw" + "finally" + "try" + "catch")) + +(string) @string + +[ + (line_comment) + (block_comment) +] @comment + +((block_comment) @comment.block.documentation + (#match? @comment.block.documentation "^/[*][*][^*](?s:.)*[*]/$")) + +((line_comment) @comment.block.documentation + (#match? @comment.block.documentation "^///[^/]*.*$")) + +[ + (operators) + (leading_key) +] @operator + +["(" ")" "[" "]" "{" "}"] @punctuation.bracket diff --git a/runtime/queries/groovy/injections.scm b/runtime/queries/groovy/injections.scm new file mode 100644 index 000000000..e4509a5fd --- /dev/null +++ b/runtime/queries/groovy/injections.scm @@ -0,0 +1,2 @@ +([(line_comment) (block_comment)] @injection.content + (#set! injection.language "comment")) From 27335476edc22c2da652c4636d9b1bae751fe22d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 17:06:40 +0100 Subject: [PATCH 16/60] build(deps): bump chrono from 0.4.33 to 0.4.34 (#9673) Bumps [chrono](https://github.com/chronotope/chrono) from 0.4.33 to 0.4.34. - [Release notes](https://github.com/chronotope/chrono/releases) - [Changelog](https://github.com/chronotope/chrono/blob/main/CHANGELOG.md) - [Commits](https://github.com/chronotope/chrono/compare/v0.4.33...v0.4.34) --- updated-dependencies: - dependency-name: chrono dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6f38f0034..2f8b1f97d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -168,9 +168,9 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.33" +version = "0.4.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb" +checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b" dependencies = [ "android-tzdata", "iana-time-zone", From d2aacb3e36d59fb6cef42cdc5bd317a3d15d1a0a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 17:06:58 +0100 Subject: [PATCH 17/60] build(deps): bump anyhow from 1.0.79 to 1.0.80 (#9675) Bumps [anyhow](https://github.com/dtolnay/anyhow) from 1.0.79 to 1.0.80. - [Release notes](https://github.com/dtolnay/anyhow/releases) - [Commits](https://github.com/dtolnay/anyhow/compare/1.0.79...1.0.80) --- updated-dependencies: - dependency-name: anyhow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2f8b1f97d..32b5ad84a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -62,9 +62,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.79" +version = "1.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" +checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" [[package]] name = "arc-swap" From cad0209e202a7513105639daa36c82578db4032d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 17:07:16 +0100 Subject: [PATCH 18/60] build(deps): bump textwrap from 0.16.0 to 0.16.1 (#9674) Bumps [textwrap](https://github.com/mgeisler/textwrap) from 0.16.0 to 0.16.1. - [Release notes](https://github.com/mgeisler/textwrap/releases) - [Changelog](https://github.com/mgeisler/textwrap/blob/master/CHANGELOG.md) - [Commits](https://github.com/mgeisler/textwrap/compare/0.16.0...0.16.1) --- updated-dependencies: - dependency-name: textwrap dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- helix-core/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 32b5ad84a..2b8a25c85 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2231,9 +2231,9 @@ dependencies = [ [[package]] name = "textwrap" -version = "0.16.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" dependencies = [ "smawk", "unicode-linebreak", diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index ca2f505c6..0b0dd7452 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -48,7 +48,7 @@ encoding_rs = "0.8" chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] } etcetera = "0.8" -textwrap = "0.16.0" +textwrap = "0.16.1" nucleo.workspace = true parking_lot = "0.12" From eca537615a554e3b1e379ba53d9f3e0654e67c70 Mon Sep 17 00:00:00 2001 From: Benedikt Ritter Date: Wed, 21 Feb 2024 15:46:31 +0100 Subject: [PATCH 19/60] Use groovy support when editing Gradle files (#9681) The Gradle build tool provides two DSLs for configuring builds. On is based on Groovy and Gradle build files written in Gradle Groovy DSL use *.gradle file ending. This change adds `gradle` to the supported file types of the groovy language configuration. --- languages.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/languages.toml b/languages.toml index 1c4c61267..b158606b0 100644 --- a/languages.toml +++ b/languages.toml @@ -3130,7 +3130,7 @@ source = { git = "https://github.com/apple/tree-sitter-pkl", rev = "c03f04a313b7 name = "groovy" language-id = "groovy" scope = "source.groovy" -file-types = ["groovy", "jenkinsfile", { glob = "Jenkinsfile" }, { glob = "Jenkinsfile.*" }] +file-types = ["gradle", "groovy", "jenkinsfile", { glob = "Jenkinsfile" }, { glob = "Jenkinsfile.*" }] shebangs = ["groovy"] comment-token = "//" indent = { tab-width = 2, unit = " " } From 98ebeeebd8c7462409f82d34ff4ac0a7ae9116c7 Mon Sep 17 00:00:00 2001 From: Abderrahmane TAHRI JOUTI <302837+atahrijouti@users.noreply.github.com> Date: Wed, 21 Feb 2024 15:47:14 +0100 Subject: [PATCH 20/60] Cyan Light theme : Add License and Author (#9688) * Cyan Light theme : Add License and Author * Add License Copy license from https://github.com/OlyaB/CyanTheme/blob/master/LICENSE * better credits to original author --- runtime/themes/cyan_light.toml | 10 +++++++--- runtime/themes/licenses/cyan_light.LICENSE | 21 +++++++++++++++++++++ 2 files changed, 28 insertions(+), 3 deletions(-) create mode 100644 runtime/themes/licenses/cyan_light.LICENSE diff --git a/runtime/themes/cyan_light.toml b/runtime/themes/cyan_light.toml index 45cb6539d..a35ad5847 100644 --- a/runtime/themes/cyan_light.toml +++ b/runtime/themes/cyan_light.toml @@ -1,6 +1,10 @@ -# An approximation/port of the Cyan Light Theme from Jetbrains -# -# Original Color Scheme here https://plugins.jetbrains.com/plugin/12102-cyan-light-theme +# Cyan Light +# Adapted from JetBrains' Cyan Light Theme https://plugins.jetbrains.com/plugin/12102-cyan-light-theme +# Author: Abderrahmane Tahri Jouti + +# Original Author : Olga Berdnikova +# LICENSE : MIT +# Source: https://github.com/OlyaB/CyanTheme "attribute" = "blue" "type" = "shade07" diff --git a/runtime/themes/licenses/cyan_light.LICENSE b/runtime/themes/licenses/cyan_light.LICENSE new file mode 100644 index 000000000..3a4a2fb87 --- /dev/null +++ b/runtime/themes/licenses/cyan_light.LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 CloudCannon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. From b7b6f300841bb61d8833fee1c58d0e3670849b61 Mon Sep 17 00:00:00 2001 From: Michael Davis Date: Mon, 19 Feb 2024 10:07:03 -0500 Subject: [PATCH 21/60] Use a hook for resolving completion items Previously we used the IdleTimeout event to trigger LSP `completion/resolveItem` requests. We can now refactor this to use an event system hook instead and lower the timeout. --- helix-lsp/src/client.rs | 5 +- helix-term/src/ui/completion.rs | 156 ++++++++++++++++++++------------ helix-term/src/ui/editor.rs | 8 -- 3 files changed, 100 insertions(+), 69 deletions(-) diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index 0d3a2a56e..8d03d7992 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -1017,7 +1017,7 @@ impl Client { pub fn resolve_completion_item( &self, completion_item: lsp::CompletionItem, - ) -> Option>> { + ) -> Option>> { let capabilities = self.capabilities.get().unwrap(); // Return early if the server does not support resolving completion items. @@ -1029,7 +1029,8 @@ impl Client { _ => return None, } - Some(self.call::(completion_item)) + let res = self.call::(completion_item); + Some(async move { Ok(serde_json::from_value(res.await?)?) }) } pub fn resolve_code_action( diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 48d97fbd8..6cbb5b109 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -1,7 +1,9 @@ use crate::{ compositor::{Component, Context, Event, EventResult}, handlers::trigger_auto_completion, + job, }; +use helix_event::AsyncHook; use helix_view::{ document::SavePoint, editor::CompleteAction, @@ -10,14 +12,14 @@ use helix_view::{ theme::{Modifier, Style}, ViewId, }; +use tokio::time::Instant; use tui::{buffer::Buffer as Surface, text::Span}; -use std::{borrow::Cow, sync::Arc}; +use std::{borrow::Cow, sync::Arc, time::Duration}; use helix_core::{chars, Change, Transaction}; use helix_view::{graphics::Rect, Document, Editor}; -use crate::commands; use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent}; use helix_lsp::{lsp, util, OffsetEncoding}; @@ -102,6 +104,7 @@ pub struct Completion { #[allow(dead_code)] trigger_offset: usize, filter: String, + resolve_handler: tokio::sync::mpsc::Sender, } impl Completion { @@ -368,6 +371,7 @@ impl Completion { // TODO: expand nucleo api to allow moving straight to a Utf32String here // and avoid allocation during matching filter: String::from(fragment), + resolve_handler: ResolveHandler::default().spawn(), }; // need to recompute immediately in case start_offset != trigger_offset @@ -379,6 +383,8 @@ impl Completion { completion } + /// Synchronously resolve the given completion item. This is used when + /// accepting a completion. fn resolve_completion_item( language_server: &helix_lsp::Client, completion_item: lsp::CompletionItem, @@ -386,7 +392,7 @@ impl Completion { let future = language_server.resolve_completion_item(completion_item)?; let response = helix_lsp::block_on(future); match response { - Ok(value) => serde_json::from_value(value).ok(), + Ok(item) => Some(item), Err(err) => { log::error!("Failed to resolve completion item: {}", err); None @@ -420,62 +426,6 @@ impl Completion { self.popup.contents_mut().replace_option(old_item, new_item); } - /// Asynchronously requests that the currently selection completion item is - /// resolved through LSP `completionItem/resolve`. - pub fn ensure_item_resolved(&mut self, cx: &mut commands::Context) -> bool { - // > If computing full completion items is expensive, servers can additionally provide a - // > handler for the completion item resolve request. ... - // > A typical use case is for example: the `textDocument/completion` request doesn't fill - // > in the `documentation` property for returned completion items since it is expensive - // > to compute. When the item is selected in the user interface then a - // > 'completionItem/resolve' request is sent with the selected completion item as a parameter. - // > The returned completion item should have the documentation property filled in. - // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_completion - let current_item = match self.popup.contents().selection() { - Some(item) if !item.resolved => item.clone(), - _ => return false, - }; - - let Some(language_server) = cx - .editor - .language_server_by_id(current_item.language_server_id) - else { - return false; - }; - - // This method should not block the compositor so we handle the response asynchronously. - let Some(future) = language_server.resolve_completion_item(current_item.item.clone()) - else { - return false; - }; - - cx.callback( - future, - move |_editor, compositor, response: Option| { - let resolved_item = match response { - Some(item) => item, - None => return, - }; - - if let Some(completion) = &mut compositor - .find::() - .unwrap() - .completion - { - let resolved_item = CompletionItem { - item: resolved_item, - language_server_id: current_item.language_server_id, - resolved: true, - }; - - completion.replace_item(current_item, resolved_item); - } - }, - ); - - true - } - pub fn area(&mut self, viewport: Rect, editor: &Editor) -> Rect { self.popup.area(viewport, editor) } @@ -498,6 +448,9 @@ impl Component for Completion { Some(option) => option, None => return, }; + if !option.resolved { + helix_event::send_blocking(&self.resolve_handler, option.clone()); + } // need to render: // option.detail // --- @@ -599,3 +552,88 @@ impl Component for Completion { markdown_doc.render(doc_area, surface, cx); } } + +/// A hook for resolving incomplete completion items. +/// +/// From the [LSP spec](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_completion): +/// +/// > If computing full completion items is expensive, servers can additionally provide a +/// > handler for the completion item resolve request. ... +/// > A typical use case is for example: the `textDocument/completion` request doesn't fill +/// > in the `documentation` property for returned completion items since it is expensive +/// > to compute. When the item is selected in the user interface then a +/// > 'completionItem/resolve' request is sent with the selected completion item as a parameter. +/// > The returned completion item should have the documentation property filled in. +#[derive(Debug, Default)] +struct ResolveHandler { + trigger: Option, + request: Option, +} + +impl AsyncHook for ResolveHandler { + type Event = CompletionItem; + + fn handle_event( + &mut self, + item: Self::Event, + timeout: Option, + ) -> Option { + if self + .trigger + .as_ref() + .is_some_and(|trigger| trigger == &item) + { + timeout + } else { + self.trigger = Some(item); + self.request = None; + Some(Instant::now() + Duration::from_millis(150)) + } + } + + fn finish_debounce(&mut self) { + let Some(item) = self.trigger.take() else { return }; + let (tx, rx) = helix_event::cancelation(); + self.request = Some(tx); + job::dispatch_blocking(move |editor, _| resolve_completion_item(editor, item, rx)) + } +} + +fn resolve_completion_item( + editor: &mut Editor, + item: CompletionItem, + cancel: helix_event::CancelRx, +) { + let Some(language_server) = editor.language_server_by_id(item.language_server_id) else { + return; + }; + + let Some(future) = language_server.resolve_completion_item(item.item.clone()) else { + return; + }; + + tokio::spawn(async move { + match helix_event::cancelable_future(future, cancel).await { + Some(Ok(resolved_item)) => { + job::dispatch(move |_, compositor| { + if let Some(completion) = &mut compositor + .find::() + .unwrap() + .completion + { + let resolved_item = CompletionItem { + item: resolved_item, + language_server_id: item.language_server_id, + resolved: true, + }; + + completion.replace_item(item, resolved_item); + }; + }) + .await + } + Some(Err(err)) => log::error!("completion resolve request failed: {err}"), + None => (), + } + }); +} diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index 66f290a22..15a7262a8 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -1027,14 +1027,6 @@ impl EditorView { pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult { commands::compute_inlay_hints_for_all_views(cx.editor, cx.jobs); - if let Some(completion) = &mut self.completion { - return if completion.ensure_item_resolved(cx) { - EventResult::Consumed(None) - } else { - EventResult::Ignored(None) - }; - } - EventResult::Ignored(None) } } From 7100ed4efc2207a9d7a45ce6e9550564e091f508 Mon Sep 17 00:00:00 2001 From: wr7 Date: Fri, 23 Feb 2024 09:46:41 -0600 Subject: [PATCH 22/60] Properly handle spaces in filenames in bash autocomplete (#9702) --- contrib/completion/hx.bash | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/contrib/completion/hx.bash b/contrib/completion/hx.bash index 01b42deb6..6ef0329e4 100644 --- a/contrib/completion/hx.bash +++ b/contrib/completion/hx.bash @@ -5,19 +5,20 @@ _hx() { # $1 command name # $2 word being completed # $3 word preceding - COMPREPLY=() case "$3" in -g | --grammar) - COMPREPLY=($(compgen -W "fetch build" -- $2)) + COMPREPLY="$(compgen -W "fetch build" -- $2)" ;; --health) local languages=$(hx --health |tail -n '+7' |awk '{print $1}' |sed 's/\x1b\[[0-9;]*m//g') - COMPREPLY=($(compgen -W "$languages" -- $2)) + COMPREPLY="$(compgen -W "$languages" -- $2)" ;; *) - COMPREPLY=($(compgen -fd -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- $2)) + COMPREPLY="$(compgen -fd -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- $2)" ;; esac -} && complete -o filenames -F _hx hx + local IFS=$'\n' + COMPREPLY=($COMPREPLY) +} && complete -o filenames -F _hx hx From 03623f2f407d592c38d21e085ae1fb2e1aa1ea22 Mon Sep 17 00:00:00 2001 From: Jake Low Date: Fri, 23 Feb 2024 18:53:16 -0800 Subject: [PATCH 23/60] Add osm and osc extensions to xml language filetypes (#9697) --- languages.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/languages.toml b/languages.toml index b158606b0..ad2e66589 100644 --- a/languages.toml +++ b/languages.toml @@ -2371,6 +2371,8 @@ file-types = [ "menu", "mxml", "nuspec", + "osc", + "osm", "pt", "publishsettings", "pubxml", From 38484f33e5298b2764eb9afd3bf6a60f1facc260 Mon Sep 17 00:00:00 2001 From: wr7 Date: Fri, 23 Feb 2024 20:54:40 -0600 Subject: [PATCH 24/60] Completely fix bash autocomplete handling of filenames with spaces (#9708) --- contrib/completion/hx.bash | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/contrib/completion/hx.bash b/contrib/completion/hx.bash index 6ef0329e4..62ca029bf 100644 --- a/contrib/completion/hx.bash +++ b/contrib/completion/hx.bash @@ -8,14 +8,14 @@ _hx() { case "$3" in -g | --grammar) - COMPREPLY="$(compgen -W "fetch build" -- $2)" + COMPREPLY="$(compgen -W 'fetch build' -- $2)" ;; --health) local languages=$(hx --health |tail -n '+7' |awk '{print $1}' |sed 's/\x1b\[[0-9;]*m//g') - COMPREPLY="$(compgen -W "$languages" -- $2)" + COMPREPLY="$(compgen -W """$languages""" -- $2)" ;; *) - COMPREPLY="$(compgen -fd -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- $2)" + COMPREPLY="$(compgen -fd -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- """$2""")" ;; esac From ec9efdef3b2f613a86098058f5705e7863e375e2 Mon Sep 17 00:00:00 2001 From: DS/Charlie <82801887+ds-cbo@users.noreply.github.com> Date: Sat, 24 Feb 2024 04:28:25 +0100 Subject: [PATCH 25/60] Bump tree-sitter-sql (#9634) --- languages.toml | 2 +- runtime/queries/sql/highlights.scm | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/languages.toml b/languages.toml index ad2e66589..112333ea9 100644 --- a/languages.toml +++ b/languages.toml @@ -1825,7 +1825,7 @@ injection-regex = "sql" [[grammar]] name = "sql" -source = { git = "https://github.com/DerekStride/tree-sitter-sql", rev = "25be0b8f17e9189ad9e1b875869d025c5aec1286" } +source = { git = "https://github.com/DerekStride/tree-sitter-sql", rev = "da2d1eff425b146d3c8cab7be8dfa98b11d896dc" } [[language]] name = "gdscript" diff --git a/runtime/queries/sql/highlights.scm b/runtime/queries/sql/highlights.scm index 09b07489e..e575debc5 100644 --- a/runtime/queries/sql/highlights.scm +++ b/runtime/queries/sql/highlights.scm @@ -24,20 +24,20 @@ (term alias: (identifier) @variable.parameter) -(term +((term value: (cast name: (keyword_cast) @function.builtin - parameter: [(literal)]?)) + parameter: [(literal)]?))) (literal) @string (comment) @comment.line (marginalia) @comment.block ((literal) @constant.numeric.integer - (#match? @constant.numeric.integer "^-?\\d+$")) + (#match? @constant.numeric.integer "^[-+]?\\d+$")) ((literal) @constant.numeric.float - (#match? @constant.numeric.float "^-?\\d*\\.\\d*$")) + (#match? @constant.numeric.float "^[-+]?\\d*\\.\\d*$")) (parameter) @variable.parameter From 6db666fce1fb4627c06d147554b8e1eb9970619e Mon Sep 17 00:00:00 2001 From: Mo <76752051+mo8it@users.noreply.github.com> Date: Sat, 24 Feb 2024 16:59:11 +0100 Subject: [PATCH 26/60] Optimization of tilde expansion (#9709) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Use next and avoid a redundant prefix strip * Avoid allocations Especially when `expand_tilde` is claled on a path that doesn't contain a tilde. * Add a test * Use Into> * Put the expand_tilde test at the end of the file * Remove unused importsw --- helix-loader/src/lib.rs | 2 +- helix-stdx/src/path.rs | 59 +++++++++++++++++++++++++------- helix-term/src/commands/typed.rs | 14 ++++---- helix-term/src/ui/mod.rs | 4 +-- 4 files changed, 57 insertions(+), 22 deletions(-) diff --git a/helix-loader/src/lib.rs b/helix-loader/src/lib.rs index f8fac6703..93488e452 100644 --- a/helix-loader/src/lib.rs +++ b/helix-loader/src/lib.rs @@ -53,7 +53,7 @@ fn prioritize_runtime_dirs() -> Vec { rt_dirs.push(conf_rt_dir); if let Ok(dir) = std::env::var("HELIX_RUNTIME") { - let dir = path::expand_tilde(dir); + let dir = path::expand_tilde(Path::new(&dir)); rt_dirs.push(path::normalize(dir)); } diff --git a/helix-stdx/src/path.rs b/helix-stdx/src/path.rs index 5746657c3..1dc4d0b24 100644 --- a/helix-stdx/src/path.rs +++ b/helix-stdx/src/path.rs @@ -1,6 +1,9 @@ pub use etcetera::home_dir; -use std::path::{Component, Path, PathBuf}; +use std::{ + borrow::Cow, + path::{Component, Path, PathBuf}, +}; use crate::env::current_working_dir; @@ -19,19 +22,22 @@ pub fn fold_home_dir(path: &Path) -> PathBuf { /// Expands tilde `~` into users home directory if available, otherwise returns the path /// unchanged. The tilde will only be expanded when present as the first component of the path /// and only slash follows it. -pub fn expand_tilde(path: impl AsRef) -> PathBuf { - let path = path.as_ref(); - let mut components = path.components().peekable(); - if let Some(Component::Normal(c)) = components.peek() { - if c == &"~" { - if let Ok(home) = home_dir() { - // it's ok to unwrap, the path starts with `~` - return home.join(path.strip_prefix("~").unwrap()); +pub fn expand_tilde<'a, P>(path: P) -> Cow<'a, Path> +where + P: Into>, +{ + let path = path.into(); + let mut components = path.components(); + if let Some(Component::Normal(c)) = components.next() { + if c == "~" { + if let Ok(mut buf) = home_dir() { + buf.push(components); + return Cow::Owned(buf); } } } - path.to_path_buf() + path } /// Normalize a path without resolving symlinks. @@ -109,9 +115,9 @@ pub fn normalize(path: impl AsRef) -> PathBuf { /// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify /// here if the path exists, just normalize it's components. pub fn canonicalize(path: impl AsRef) -> PathBuf { - let path = expand_tilde(path); + let path = expand_tilde(path.as_ref()); let path = if path.is_relative() { - current_working_dir().join(path) + Cow::Owned(current_working_dir().join(path)) } else { path }; @@ -183,3 +189,32 @@ pub fn get_truncated_path(path: impl AsRef) -> PathBuf { ret.push(file); ret } + +#[cfg(test)] +mod tests { + use std::{ + ffi::OsStr, + path::{Component, Path}, + }; + + use crate::path; + + #[test] + fn expand_tilde() { + for path in ["~", "~/foo"] { + let expanded = path::expand_tilde(Path::new(path)); + + let tilde = Component::Normal(OsStr::new("~")); + + let mut component_count = 0; + for component in expanded.components() { + // No tilde left. + assert_ne!(component, tilde); + component_count += 1; + } + + // The path was at least expanded to something. + assert_ne!(component_count, 0); + } + } +} diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index b7ceeba59..3d7ea3fc8 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -110,14 +110,14 @@ fn open(cx: &mut compositor::Context, args: &[Cow], event: PromptEvent) -> ensure!(!args.is_empty(), "wrong argument count"); for arg in args { let (path, pos) = args::parse_file(arg); - let path = helix_stdx::path::expand_tilde(&path); + let path = helix_stdx::path::expand_tilde(path); // If the path is a directory, open a file picker on that directory and update the status // message if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) { let callback = async move { let call: job::Callback = job::Callback::EditorCompositor(Box::new( move |editor: &mut Editor, compositor: &mut Compositor| { - let picker = ui::file_picker(path, &editor.config()); + let picker = ui::file_picker(path.into_owned(), &editor.config()); compositor.push(Box::new(overlaid(picker))); }, )); @@ -1078,11 +1078,11 @@ fn change_current_directory( return Ok(()); } - let dir = helix_stdx::path::expand_tilde( - args.first() - .context("target directory not provided")? - .as_ref(), - ); + let dir = args + .first() + .context("target directory not provided")? + .as_ref(); + let dir = helix_stdx::path::expand_tilde(Path::new(dir)); helix_stdx::env::set_current_working_dir(dir)?; diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index d27e83553..0873116cb 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -428,9 +428,9 @@ pub mod completers { path } else { match path.parent() { - Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(), + Some(path) if !path.as_os_str().is_empty() => Cow::Borrowed(path), // Path::new("h")'s parent is Some("")... - _ => helix_stdx::env::current_working_dir(), + _ => Cow::Owned(helix_stdx::env::current_working_dir()), } }; From dfa5382c51978c6a582d4586c65aa0f677be2ee8 Mon Sep 17 00:00:00 2001 From: Ryan Mehri <52933714+rmehri01@users.noreply.github.com> Date: Sun, 25 Feb 2024 02:37:54 -0800 Subject: [PATCH 27/60] Don't run scheduled builds on forks (#9718) --- .github/workflows/build.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3d47c2088..7ba46ce56 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,6 +12,7 @@ jobs: check: name: Check (msrv) runs-on: ubuntu-latest + if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' steps: - name: Checkout sources uses: actions/checkout@v4 @@ -31,6 +32,7 @@ jobs: test: name: Test Suite runs-on: ${{ matrix.os }} + if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' env: RUST_BACKTRACE: 1 HELIX_LOG_LEVEL: info @@ -65,6 +67,7 @@ jobs: lints: name: Lints runs-on: ubuntu-latest + if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' steps: - name: Checkout sources uses: actions/checkout@v4 @@ -92,6 +95,7 @@ jobs: docs: name: Docs runs-on: ubuntu-latest + if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' steps: - name: Checkout sources uses: actions/checkout@v4 From 8141a4a1ab78084df94c19e6225fc3c64a05b88f Mon Sep 17 00:00:00 2001 From: Michael Davis Date: Sun, 27 Aug 2023 13:32:17 -0500 Subject: [PATCH 28/60] LSP: Key diagnostics off file path instead of URI URIs need to be normalized to be comparable. For example a language server could send a URI for a path containing '+' as '%2B' but we might encode this in something like 'Document::url' as just '+'. We can normalize the URI straight into a PathBuf though since this is the only value we compare these diagnostics URIs against. This also covers edge-cases like windows drive letter capitalization. --- helix-term/src/application.rs | 6 +-- helix-term/src/commands/lsp.rs | 72 ++++++++++++++++------------------ helix-view/src/editor.rs | 9 ++--- 3 files changed, 39 insertions(+), 48 deletions(-) diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index 30df3981c..0ef200c2f 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -753,9 +753,7 @@ impl Application { let lang_conf = doc.language.clone(); if let Some(lang_conf) = &lang_conf { - if let Some(old_diagnostics) = - self.editor.diagnostics.get(¶ms.uri) - { + if let Some(old_diagnostics) = self.editor.diagnostics.get(&path) { if !lang_conf.persistent_diagnostic_sources.is_empty() { // Sort diagnostics first by severity and then by line numbers. // Note: The `lsp::DiagnosticSeverity` enum is already defined in decreasing order @@ -788,7 +786,7 @@ impl Application { // Insert the original lsp::Diagnostics here because we may have no open document // for diagnosic message and so we can't calculate the exact position. // When using them later in the diagnostics picker, we calculate them on-demand. - let diagnostics = match self.editor.diagnostics.entry(params.uri) { + let diagnostics = match self.editor.diagnostics.entry(path) { Entry::Occupied(o) => { let current_diagnostics = o.into_mut(); // there may entries of other language servers, which is why we can't overwrite the whole entry diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs index a1f7bf17d..a3168dc2d 100644 --- a/helix-term/src/commands/lsp.rs +++ b/helix-term/src/commands/lsp.rs @@ -38,7 +38,7 @@ use std::{ collections::{BTreeMap, HashSet}, fmt::Write, future::Future, - path::PathBuf, + path::{Path, PathBuf}, }; /// Gets the first language server that is attached to a document which supports a specific feature. @@ -134,7 +134,7 @@ struct DiagnosticStyles { } struct PickerDiagnostic { - url: lsp::Url, + path: PathBuf, diag: lsp::Diagnostic, offset_encoding: OffsetEncoding, } @@ -167,8 +167,7 @@ impl ui::menu::Item for PickerDiagnostic { let path = match format { DiagnosticsFormat::HideSourcePath => String::new(), DiagnosticsFormat::ShowSourcePath => { - let file_path = self.url.to_file_path().unwrap(); - let path = path::get_truncated_path(file_path); + let path = path::get_truncated_path(&self.path); format!("{}: ", path.to_string_lossy()) } }; @@ -208,24 +207,33 @@ fn jump_to_location( return; } }; + jump_to_position(editor, &path, location.range, offset_encoding, action); +} - let doc = match editor.open(&path, action) { +fn jump_to_position( + editor: &mut Editor, + path: &Path, + range: lsp::Range, + offset_encoding: OffsetEncoding, + action: Action, +) { + let doc = match editor.open(path, action) { Ok(id) => doc_mut!(editor, &id), Err(err) => { - let err = format!("failed to open path: {:?}: {:?}", location.uri, err); + let err = format!("failed to open path: {:?}: {:?}", path, err); editor.set_error(err); return; } }; let view = view_mut!(editor); // TODO: convert inside server - let new_range = - if let Some(new_range) = lsp_range_to_range(doc.text(), location.range, offset_encoding) { - new_range - } else { - log::warn!("lsp position out of bounds - {:?}", location.range); - return; - }; + let new_range = if let Some(new_range) = lsp_range_to_range(doc.text(), range, offset_encoding) + { + new_range + } else { + log::warn!("lsp position out of bounds - {:?}", range); + return; + }; // we flip the range so that the cursor sits on the start of the symbol // (for example start of the function). doc.set_selection(view.id, Selection::single(new_range.head, new_range.anchor)); @@ -258,21 +266,20 @@ enum DiagnosticsFormat { fn diag_picker( cx: &Context, - diagnostics: BTreeMap>, - _current_path: Option, + diagnostics: BTreeMap>, format: DiagnosticsFormat, ) -> Picker { // TODO: drop current_path comparison and instead use workspace: bool flag? // flatten the map to a vec of (url, diag) pairs let mut flat_diag = Vec::new(); - for (url, diags) in diagnostics { + for (path, diags) in diagnostics { flat_diag.reserve(diags.len()); for (diag, ls) in diags { if let Some(ls) = cx.editor.language_server_by_id(ls) { flat_diag.push(PickerDiagnostic { - url: url.clone(), + path: path.clone(), diag, offset_encoding: ls.offset_encoding(), }); @@ -292,22 +299,17 @@ fn diag_picker( (styles, format), move |cx, PickerDiagnostic { - url, + path, diag, offset_encoding, }, action| { - jump_to_location( - cx.editor, - &lsp::Location::new(url.clone(), diag.range), - *offset_encoding, - action, - ) + jump_to_position(cx.editor, path, diag.range, *offset_encoding, action) }, ) - .with_preview(move |_editor, PickerDiagnostic { url, diag, .. }| { - let location = lsp::Location::new(url.clone(), diag.range); - Some(location_to_file_location(&location)) + .with_preview(move |_editor, PickerDiagnostic { path, diag, .. }| { + let line = Some((diag.range.start.line as usize, diag.range.end.line as usize)); + Some((path.clone().into(), line)) }) .truncate_start(false) } @@ -470,17 +472,16 @@ pub fn workspace_symbol_picker(cx: &mut Context) { pub fn diagnostics_picker(cx: &mut Context) { let doc = doc!(cx.editor); - if let Some(current_url) = doc.url() { + if let Some(current_path) = doc.path() { let diagnostics = cx .editor .diagnostics - .get(¤t_url) + .get(current_path) .cloned() .unwrap_or_default(); let picker = diag_picker( cx, - [(current_url.clone(), diagnostics)].into(), - Some(current_url), + [(current_path.clone(), diagnostics)].into(), DiagnosticsFormat::HideSourcePath, ); cx.push_layer(Box::new(overlaid(picker))); @@ -488,16 +489,9 @@ pub fn diagnostics_picker(cx: &mut Context) { } pub fn workspace_diagnostics_picker(cx: &mut Context) { - let doc = doc!(cx.editor); - let current_url = doc.url(); // TODO not yet filtered by LanguageServerFeature, need to do something similar as Document::shown_diagnostics here for all open documents let diagnostics = cx.editor.diagnostics.clone(); - let picker = diag_picker( - cx, - diagnostics, - current_url, - DiagnosticsFormat::ShowSourcePath, - ); + let picker = diag_picker(cx, diagnostics, DiagnosticsFormat::ShowSourcePath); cx.push_layer(Box::new(overlaid(picker))); } diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index fffbe6207..f46a0d6a6 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -914,7 +914,7 @@ pub struct Editor { pub macro_recording: Option<(char, Vec)>, pub macro_replaying: Vec, pub language_servers: helix_lsp::Registry, - pub diagnostics: BTreeMap>, + pub diagnostics: BTreeMap>, pub diff_providers: DiffProviderRegistry, pub debugger: Option, @@ -1815,7 +1815,7 @@ impl Editor { /// Returns all supported diagnostics for the document pub fn doc_diagnostics<'a>( language_servers: &'a helix_lsp::Registry, - diagnostics: &'a BTreeMap>, + diagnostics: &'a BTreeMap>, document: &Document, ) -> impl Iterator + 'a { Editor::doc_diagnostics_with_filter(language_servers, diagnostics, document, |_, _| true) @@ -1825,7 +1825,7 @@ impl Editor { /// filtered by `filter` which is invocated with the raw `lsp::Diagnostic` and the language server id it came from pub fn doc_diagnostics_with_filter<'a>( language_servers: &'a helix_lsp::Registry, - diagnostics: &'a BTreeMap>, + diagnostics: &'a BTreeMap>, document: &Document, filter: impl Fn(&lsp::Diagnostic, usize) -> bool + 'a, @@ -1834,8 +1834,7 @@ impl Editor { let language_config = document.language.clone(); document .path() - .and_then(|path| url::Url::from_file_path(path).ok()) // TODO log error? - .and_then(|uri| diagnostics.get(&uri)) + .and_then(|path| diagnostics.get(path)) .map(|diags| { diags.iter().filter_map(move |(diagnostic, lsp_id)| { let ls = language_servers.get_by_id(*lsp_id)?; From 928bf80d9a1d6206f864e9b375f67662a49a6265 Mon Sep 17 00:00:00 2001 From: Michael Davis Date: Sat, 17 Jun 2023 17:01:36 -0500 Subject: [PATCH 29/60] LSP: Normalize diagnostic file paths --- helix-term/src/application.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index 0ef200c2f..809393c7f 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -724,7 +724,7 @@ impl Application { } Notification::PublishDiagnostics(mut params) => { let path = match params.uri.to_file_path() { - Ok(path) => path, + Ok(path) => helix_stdx::path::normalize(&path), Err(_) => { log::error!("Unsupported file URI: {}", params.uri); return; From a87614858571ed7f9ab4a3145187cf598ec0faeb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carsten=20F=C3=BChrmann?= Date: Mon, 26 Feb 2024 02:53:59 +0100 Subject: [PATCH 30/60] Fix colors of tokyonight diagnostic undercurls (#9724) --- runtime/themes/tokyonight.toml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/runtime/themes/tokyonight.toml b/runtime/themes/tokyonight.toml index 95ebd4087..fbd8f2ed5 100644 --- a/runtime/themes/tokyonight.toml +++ b/runtime/themes/tokyonight.toml @@ -58,13 +58,13 @@ variable = { fg = "fg" } "diff.plus" = { fg = "add" } error = { fg = "error" } -hint = { fg = "hint" } -info = { fg = "info" } warning = { fg = "yellow" } -"diagnostic.error" = { underline = { style = "curl" } } -"diagnostic.warning" = { underline = { style = "curl" } } -"diagnostic.info" = { underline = { style = "curl" } } -"diagnostic.hint" = { underline = { style = "curl" } } +info = { fg = "info" } +hint = { fg = "hint" } +"diagnostic.error" = { underline = { style = "curl", color = "error" } } +"diagnostic.warning" = { underline = { style = "curl", color = "yellow"} } +"diagnostic.info" = { underline = { style = "curl", color = "info"} } +"diagnostic.hint" = { underline = { style = "curl", color = "hint" } } "ui.background" = { bg = "bg", fg = "fg" } "ui.cursor" = { modifiers = ["reversed"] } @@ -114,8 +114,8 @@ change = "#6183bb" delete = "#914c54" error = "#db4b4b" -hint = "#1abc9c" info = "#0db9d7" +hint = "#1abc9c" fg = "#c0caf5" fg-dark = "#a9b1d6" From c68ec92c5e1bd3a2bf402fb583de23693f59b722 Mon Sep 17 00:00:00 2001 From: Tobias Hunger Date: Mon, 26 Feb 2024 08:08:31 +0100 Subject: [PATCH 31/60] slint: Update SHA of tree-sitter parser (#9698) --- languages.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/languages.toml b/languages.toml index 112333ea9..750ec9088 100644 --- a/languages.toml +++ b/languages.toml @@ -2178,7 +2178,7 @@ language-servers = [ "slint-lsp" ] [[grammar]] name = "slint" -source = { git = "https://github.com/slint-ui/tree-sitter-slint", rev = "15618215b79b9db08f824a5c97a12d073dcc1c00" } +source = { git = "https://github.com/slint-ui/tree-sitter-slint", rev = "3c82235f41b63f35a01ae3888206e93585cbb84a" } [[language]] name = "task" From cd02976fa3a55c2c1f01b95c40d178061968f797 Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Mon, 26 Feb 2024 08:45:20 +0100 Subject: [PATCH 32/60] switch to regex-cursor (#9422) --- Cargo.lock | 18 ++++++- helix-core/src/selection.rs | 96 +++++++++++++++++++++++-------------- helix-core/src/syntax.rs | 12 +++-- helix-stdx/Cargo.toml | 1 + helix-stdx/src/rope.rs | 45 ++++++++++++++++- helix-term/src/commands.rs | 56 ++++++++-------------- helix-term/src/ui/mod.rs | 33 +++++++++---- 7 files changed, 175 insertions(+), 86 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2b8a25c85..b8d375c51 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1344,6 +1344,7 @@ version = "23.10.0" dependencies = [ "dunce", "etcetera", + "regex-cursor", "ropey", "tempfile", "which", @@ -1938,15 +1939,28 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b7fa1134405e2ec9353fd416b17f8dacd46c473d7d3fd1cf202706a14eb792a" +checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] +[[package]] +name = "regex-cursor" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a43718aa0040434d45728c43f56bd53bda75a91c46954cdf0f2ff4dbc8aabbe7" +dependencies = [ + "log", + "memchr", + "regex-automata", + "regex-syntax", + "ropey", +] + [[package]] name = "regex-syntax" version = "0.8.2" diff --git a/helix-core/src/selection.rs b/helix-core/src/selection.rs index c44685eea..91f1d0de5 100644 --- a/helix-core/src/selection.rs +++ b/helix-core/src/selection.rs @@ -7,9 +7,11 @@ use crate::{ ensure_grapheme_boundary_next, ensure_grapheme_boundary_prev, next_grapheme_boundary, prev_grapheme_boundary, }, + line_ending::get_line_ending, movement::Direction, Assoc, ChangeSet, RopeGraphemes, RopeSlice, }; +use helix_stdx::rope::{self, RopeSliceExt}; use smallvec::{smallvec, SmallVec}; use std::borrow::Cow; @@ -708,12 +710,12 @@ impl IntoIterator for Selection { pub fn keep_or_remove_matches( text: RopeSlice, selection: &Selection, - regex: &crate::regex::Regex, + regex: &rope::Regex, remove: bool, ) -> Option { let result: SmallVec<_> = selection .iter() - .filter(|range| regex.is_match(&range.fragment(text)) ^ remove) + .filter(|range| regex.is_match(text.regex_input_at(range.from()..range.to())) ^ remove) .copied() .collect(); @@ -724,25 +726,20 @@ pub fn keep_or_remove_matches( None } +// TODO: support to split on capture #N instead of whole match pub fn select_on_matches( text: RopeSlice, selection: &Selection, - regex: &crate::regex::Regex, + regex: &rope::Regex, ) -> Option { let mut result = SmallVec::with_capacity(selection.len()); for sel in selection { - // TODO: can't avoid occasional allocations since Regex can't operate on chunks yet - let fragment = sel.fragment(text); - - let sel_start = sel.from(); - let start_byte = text.char_to_byte(sel_start); - - for mat in regex.find_iter(&fragment) { + for mat in regex.find_iter(text.regex_input_at(sel.from()..sel.to())) { // TODO: retain range direction - let start = text.byte_to_char(start_byte + mat.start()); - let end = text.byte_to_char(start_byte + mat.end()); + let start = text.byte_to_char(mat.start()); + let end = text.byte_to_char(mat.end()); let range = Range::new(start, end); // Make sure the match is not right outside of the selection. @@ -761,12 +758,7 @@ pub fn select_on_matches( None } -// TODO: support to split on capture #N instead of whole match -pub fn split_on_matches( - text: RopeSlice, - selection: &Selection, - regex: &crate::regex::Regex, -) -> Selection { +pub fn split_on_newline(text: RopeSlice, selection: &Selection) -> Selection { let mut result = SmallVec::with_capacity(selection.len()); for sel in selection { @@ -776,21 +768,47 @@ pub fn split_on_matches( continue; } - // TODO: can't avoid occasional allocations since Regex can't operate on chunks yet - let fragment = sel.fragment(text); - let sel_start = sel.from(); let sel_end = sel.to(); - let start_byte = text.char_to_byte(sel_start); + let mut start = sel_start; + for mat in sel.slice(text).lines() { + let len = mat.len_chars(); + let line_end_len = get_line_ending(&mat).map(|le| le.len_chars()).unwrap_or(0); + // TODO: retain range direction + result.push(Range::new(start, start + len - line_end_len)); + start += len; + } + + if start < sel_end { + result.push(Range::new(start, sel_end)); + } + } + + // TODO: figure out a new primary index + Selection::new(result, 0) +} + +pub fn split_on_matches(text: RopeSlice, selection: &Selection, regex: &rope::Regex) -> Selection { + let mut result = SmallVec::with_capacity(selection.len()); + + for sel in selection { + // Special case: zero-width selection. + if sel.from() == sel.to() { + result.push(*sel); + continue; + } + + let sel_start = sel.from(); + let sel_end = sel.to(); let mut start = sel_start; - for mat in regex.find_iter(&fragment) { + for mat in regex.find_iter(text.regex_input_at(sel_start..sel_end)) { // TODO: retain range direction - let end = text.byte_to_char(start_byte + mat.start()); + let end = text.byte_to_char(mat.start()); result.push(Range::new(start, end)); - start = text.byte_to_char(start_byte + mat.end()); + start = text.byte_to_char(mat.end()); } if start < sel_end { @@ -1021,14 +1039,12 @@ mod test { #[test] fn test_select_on_matches() { - use crate::regex::{Regex, RegexBuilder}; - let r = Rope::from_str("Nobody expects the Spanish inquisition"); let s = r.slice(..); let selection = Selection::single(0, r.len_chars()); assert_eq!( - select_on_matches(s, &selection, &Regex::new(r"[A-Z][a-z]*").unwrap()), + select_on_matches(s, &selection, &rope::Regex::new(r"[A-Z][a-z]*").unwrap()), Some(Selection::new( smallvec![Range::new(0, 6), Range::new(19, 26)], 0 @@ -1038,8 +1054,14 @@ mod test { let r = Rope::from_str("This\nString\n\ncontains multiple\nlines"); let s = r.slice(..); - let start_of_line = RegexBuilder::new(r"^").multi_line(true).build().unwrap(); - let end_of_line = RegexBuilder::new(r"$").multi_line(true).build().unwrap(); + let start_of_line = rope::RegexBuilder::new() + .syntax(rope::Config::new().multi_line(true)) + .build(r"^") + .unwrap(); + let end_of_line = rope::RegexBuilder::new() + .syntax(rope::Config::new().multi_line(true)) + .build(r"$") + .unwrap(); // line without ending assert_eq!( @@ -1077,9 +1099,9 @@ mod test { select_on_matches( s, &Selection::single(0, s.len_chars()), - &RegexBuilder::new(r"^[a-z ]*$") - .multi_line(true) - .build() + &rope::RegexBuilder::new() + .syntax(rope::Config::new().multi_line(true)) + .build(r"^[a-z ]*$") .unwrap() ), Some(Selection::new( @@ -1171,13 +1193,15 @@ mod test { #[test] fn test_split_on_matches() { - use crate::regex::Regex; - let text = Rope::from(" abcd efg wrs xyz 123 456"); let selection = Selection::new(smallvec![Range::new(0, 9), Range::new(11, 20),], 0); - let result = split_on_matches(text.slice(..), &selection, &Regex::new(r"\s+").unwrap()); + let result = split_on_matches( + text.slice(..), + &selection, + &rope::Regex::new(r"\s+").unwrap(), + ); assert_eq!( result.ranges(), diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index a9344448f..0d8559ca9 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -12,6 +12,7 @@ use arc_swap::{ArcSwap, Guard}; use bitflags::bitflags; use globset::GlobSet; use hashbrown::raw::RawTable; +use helix_stdx::rope::{self, RopeSliceExt}; use slotmap::{DefaultKey as LayerId, HopSlotMap}; use std::{ @@ -1961,11 +1962,16 @@ impl HighlightConfiguration { node_slice }; - static SHEBANG_REGEX: Lazy = Lazy::new(|| Regex::new(SHEBANG).unwrap()); + static SHEBANG_REGEX: Lazy = + Lazy::new(|| rope::Regex::new(SHEBANG).unwrap()); injection_capture = SHEBANG_REGEX - .captures(&Cow::from(lines)) - .map(|cap| InjectionLanguageMarker::Shebang(cap[1].to_owned())) + .captures_iter(lines.regex_input()) + .map(|cap| { + let cap = lines.byte_slice(cap.get_group(1).unwrap().range()); + InjectionLanguageMarker::Shebang(cap.into()) + }) + .next() } else if index == self.injection_content_capture_index { content_node = Some(capture.node); } diff --git a/helix-stdx/Cargo.toml b/helix-stdx/Cargo.toml index 540a1b99a..5ac7c011f 100644 --- a/helix-stdx/Cargo.toml +++ b/helix-stdx/Cargo.toml @@ -16,6 +16,7 @@ dunce = "1.0" etcetera = "0.8" ropey = { version = "1.6.1", default-features = false } which = "6.0" +regex-cursor = "0.1.3" [dev-dependencies] tempfile = "3.10" diff --git a/helix-stdx/src/rope.rs b/helix-stdx/src/rope.rs index 4ee39d4a8..7b4edda4f 100644 --- a/helix-stdx/src/rope.rs +++ b/helix-stdx/src/rope.rs @@ -1,11 +1,22 @@ +use std::ops::{Bound, RangeBounds}; + +pub use regex_cursor::engines::meta::{Builder as RegexBuilder, Regex}; +pub use regex_cursor::regex_automata::util::syntax::Config; +use regex_cursor::{Input as RegexInput, RopeyCursor}; use ropey::RopeSlice; -pub trait RopeSliceExt: Sized { +pub trait RopeSliceExt<'a>: Sized { fn ends_with(self, text: &str) -> bool; fn starts_with(self, text: &str) -> bool; + fn regex_input(self) -> RegexInput>; + fn regex_input_at_bytes>( + self, + byte_range: R, + ) -> RegexInput>; + fn regex_input_at>(self, char_range: R) -> RegexInput>; } -impl RopeSliceExt for RopeSlice<'_> { +impl<'a> RopeSliceExt<'a> for RopeSlice<'a> { fn ends_with(self, text: &str) -> bool { let len = self.len_bytes(); if len < text.len() { @@ -23,4 +34,34 @@ impl RopeSliceExt for RopeSlice<'_> { self.get_byte_slice(..len - text.len()) .map_or(false, |start| start == text) } + + fn regex_input(self) -> RegexInput> { + RegexInput::new(self) + } + + fn regex_input_at>(self, char_range: R) -> RegexInput> { + let start_bound = match char_range.start_bound() { + Bound::Included(&val) => Bound::Included(self.char_to_byte(val)), + Bound::Excluded(&val) => Bound::Excluded(self.char_to_byte(val)), + Bound::Unbounded => Bound::Unbounded, + }; + let end_bound = match char_range.end_bound() { + Bound::Included(&val) => Bound::Included(self.char_to_byte(val)), + Bound::Excluded(&val) => Bound::Excluded(self.char_to_byte(val)), + Bound::Unbounded => Bound::Unbounded, + }; + self.regex_input_at_bytes((start_bound, end_bound)) + } + fn regex_input_at_bytes>( + self, + byte_range: R, + ) -> RegexInput> { + let input = match byte_range.start_bound() { + Bound::Included(&pos) | Bound::Excluded(&pos) => { + RegexInput::new(RopeyCursor::at(self, pos)) + } + Bound::Unbounded => RegexInput::new(self), + }; + input.range(byte_range) + } } diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 51a1ede9b..fdad31a81 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -3,6 +3,7 @@ pub(crate) mod lsp; pub(crate) mod typed; pub use dap::*; +use helix_stdx::rope::{self, RopeSliceExt}; use helix_vcs::Hunk; pub use lsp::*; use tui::widgets::Row; @@ -19,7 +20,7 @@ use helix_core::{ match_brackets, movement::{self, move_vertically_visual, Direction}, object, pos_at_coords, - regex::{self, Regex, RegexBuilder}, + regex::{self, Regex}, search::{self, CharMatcher}, selection, shellwords, surround, syntax::LanguageServerFeature, @@ -1907,11 +1908,7 @@ fn split_selection(cx: &mut Context) { fn split_selection_on_newline(cx: &mut Context) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); - // only compile the regex once - #[allow(clippy::trivial_regex)] - static REGEX: Lazy = - Lazy::new(|| Regex::new(r"\r\n|[\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}]").unwrap()); - let selection = selection::split_on_matches(text, doc.selection(view.id), ®EX); + let selection = selection::split_on_newline(text, doc.selection(view.id)); doc.set_selection(view.id, selection); } @@ -1930,8 +1927,7 @@ fn merge_consecutive_selections(cx: &mut Context) { #[allow(clippy::too_many_arguments)] fn search_impl( editor: &mut Editor, - contents: &str, - regex: &Regex, + regex: &rope::Regex, movement: Movement, direction: Direction, scrolloff: usize, @@ -1959,23 +1955,20 @@ fn search_impl( // do a reverse search and wraparound to the end, we don't need to search // the text before the current cursor position for matches, but by slicing // it out, we need to add it back to the position of the selection. - let mut offset = 0; + let doc = doc!(editor).text().slice(..); // use find_at to find the next match after the cursor, loop around the end // Careful, `Regex` uses `bytes` as offsets, not character indices! let mut mat = match direction { - Direction::Forward => regex.find_at(contents, start), - Direction::Backward => regex.find_iter(&contents[..start]).last(), + Direction::Forward => regex.find(doc.regex_input_at_bytes(start..)), + Direction::Backward => regex.find_iter(doc.regex_input_at_bytes(..start)).last(), }; if mat.is_none() { if wrap_around { mat = match direction { - Direction::Forward => regex.find(contents), - Direction::Backward => { - offset = start; - regex.find_iter(&contents[start..]).last() - } + Direction::Forward => regex.find(doc.regex_input()), + Direction::Backward => regex.find_iter(doc.regex_input_at_bytes(start..)).last(), }; } if show_warnings { @@ -1992,8 +1985,8 @@ fn search_impl( let selection = doc.selection(view.id); if let Some(mat) = mat { - let start = text.byte_to_char(mat.start() + offset); - let end = text.byte_to_char(mat.end() + offset); + let start = text.byte_to_char(mat.start()); + let end = text.byte_to_char(mat.end()); if end == 0 { // skip empty matches that don't make sense @@ -2037,13 +2030,7 @@ fn searcher(cx: &mut Context, direction: Direction) { let scrolloff = config.scrolloff; let wrap_around = config.search.wrap_around; - let doc = doc!(cx.editor); - // TODO: could probably share with select_on_matches? - - // HAXX: sadly we can't avoid allocating a single string for the whole buffer since we can't - // feed chunks into the regex yet - let contents = doc.text().slice(..).to_string(); let completions = search_completions(cx, Some(reg)); ui::regex_prompt( @@ -2065,7 +2052,6 @@ fn searcher(cx: &mut Context, direction: Direction) { } search_impl( cx.editor, - &contents, ®ex, Movement::Move, direction, @@ -2085,8 +2071,6 @@ fn search_next_or_prev_impl(cx: &mut Context, movement: Movement, direction: Dir let config = cx.editor.config(); let scrolloff = config.scrolloff; if let Some(query) = cx.editor.registers.first(register, cx.editor) { - let doc = doc!(cx.editor); - let contents = doc.text().slice(..).to_string(); let search_config = &config.search; let case_insensitive = if search_config.smart_case { !query.chars().any(char::is_uppercase) @@ -2094,15 +2078,17 @@ fn search_next_or_prev_impl(cx: &mut Context, movement: Movement, direction: Dir false }; let wrap_around = search_config.wrap_around; - if let Ok(regex) = RegexBuilder::new(&query) - .case_insensitive(case_insensitive) - .multi_line(true) - .build() + if let Ok(regex) = rope::RegexBuilder::new() + .syntax( + rope::Config::new() + .case_insensitive(case_insensitive) + .multi_line(true), + ) + .build(&query) { for _ in 0..count { search_impl( cx.editor, - &contents, ®ex, movement, direction, @@ -2239,7 +2225,7 @@ fn global_search(cx: &mut Context) { let reg = cx.register.unwrap_or('/'); let completions = search_completions(cx, Some(reg)); - ui::regex_prompt( + ui::raw_regex_prompt( cx, "global-search:".into(), Some(reg), @@ -2250,7 +2236,7 @@ fn global_search(cx: &mut Context) { .map(|comp| (0.., std::borrow::Cow::Owned(comp.clone()))) .collect() }, - move |cx, regex, event| { + move |cx, _, input, event| { if event != PromptEvent::Validate { return; } @@ -2265,7 +2251,7 @@ fn global_search(cx: &mut Context) { if let Ok(matcher) = RegexMatcherBuilder::new() .case_smart(smart_case) - .build(regex.as_str()) + .build(input) { let search_root = helix_stdx::env::current_working_dir(); if !search_root.exists() { diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index 0873116cb..a4b148af3 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -18,6 +18,7 @@ use crate::filter_picker_entry; use crate::job::{self, Callback}; pub use completion::{Completion, CompletionItem}; pub use editor::EditorView; +use helix_stdx::rope; pub use markdown::Markdown; pub use menu::Menu; pub use picker::{DynamicPicker, FileLocation, Picker}; @@ -26,8 +27,6 @@ pub use prompt::{Prompt, PromptEvent}; pub use spinner::{ProgressSpinners, Spinner}; pub use text::Text; -use helix_core::regex::Regex; -use helix_core::regex::RegexBuilder; use helix_view::Editor; use std::path::PathBuf; @@ -63,7 +62,22 @@ pub fn regex_prompt( prompt: std::borrow::Cow<'static, str>, history_register: Option, completion_fn: impl FnMut(&Editor, &str) -> Vec + 'static, - fun: impl Fn(&mut crate::compositor::Context, Regex, PromptEvent) + 'static, + fun: impl Fn(&mut crate::compositor::Context, rope::Regex, PromptEvent) + 'static, +) { + raw_regex_prompt( + cx, + prompt, + history_register, + completion_fn, + move |cx, regex, _, event| fun(cx, regex, event), + ); +} +pub fn raw_regex_prompt( + cx: &mut crate::commands::Context, + prompt: std::borrow::Cow<'static, str>, + history_register: Option, + completion_fn: impl FnMut(&Editor, &str) -> Vec + 'static, + fun: impl Fn(&mut crate::compositor::Context, rope::Regex, &str, PromptEvent) + 'static, ) { let (view, doc) = current!(cx.editor); let doc_id = view.doc; @@ -94,10 +108,13 @@ pub fn regex_prompt( false }; - match RegexBuilder::new(input) - .case_insensitive(case_insensitive) - .multi_line(true) - .build() + match rope::RegexBuilder::new() + .syntax( + rope::Config::new() + .case_insensitive(case_insensitive) + .multi_line(true), + ) + .build(input) { Ok(regex) => { let (view, doc) = current!(cx.editor); @@ -110,7 +127,7 @@ pub fn regex_prompt( view.jumps.push((doc_id, snapshot.clone())); } - fun(cx, regex, event); + fun(cx, regex, input, event); let (view, doc) = current!(cx.editor); view.ensure_cursor_in_view(doc, config.scrolloff); From b43d9aa306099ca1b85543bac8453cf7b67eab3e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 01:22:00 +0100 Subject: [PATCH 33/60] build(deps): bump ahash from 0.8.6 to 0.8.9 (#9737) Bumps [ahash](https://github.com/tkaitchuck/ahash) from 0.8.6 to 0.8.9. - [Release notes](https://github.com/tkaitchuck/ahash/releases) - [Commits](https://github.com/tkaitchuck/ahash/commits/v0.8.9) --- updated-dependencies: - dependency-name: ahash dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- helix-core/Cargo.toml | 2 +- helix-event/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b8d375c51..9430cce26 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.8.6" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" +checksum = "d713b3834d76b85304d4d525563c1276e2e30dc97cc67bfb4585a4a29fc2c89f" dependencies = [ "cfg-if", "getrandom", diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 0b0dd7452..be5ea5eb8 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -32,7 +32,7 @@ once_cell = "1.19" arc-swap = "1" regex = "1" bitflags = "2.4" -ahash = "0.8.6" +ahash = "0.8.9" hashbrown = { version = "0.14.3", features = ["raw"] } dunce = "1.0" diff --git a/helix-event/Cargo.toml b/helix-event/Cargo.toml index a5c88e93d..8711568e8 100644 --- a/helix-event/Cargo.toml +++ b/helix-event/Cargo.toml @@ -12,7 +12,7 @@ homepage.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -ahash = "0.8.3" +ahash = "0.8.9" hashbrown = "0.14.0" tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] } # the event registry is essentially read only but must be an rwlock so we can From d0f8261141f22c7954d1665bcbc4e89bda9bd6cf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 01:25:17 +0100 Subject: [PATCH 34/60] build(deps): bump tempfile from 3.10.0 to 3.10.1 (#9733) Bumps [tempfile](https://github.com/Stebalien/tempfile) from 3.10.0 to 3.10.1. - [Changelog](https://github.com/Stebalien/tempfile/blob/master/CHANGELOG.md) - [Commits](https://github.com/Stebalien/tempfile/compare/v3.10.0...v3.10.1) --- updated-dependencies: - dependency-name: tempfile dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- helix-loader/Cargo.toml | 2 +- helix-term/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9430cce26..d376538e4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2215,9 +2215,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.10.0" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if", "fastrand", diff --git a/helix-loader/Cargo.toml b/helix-loader/Cargo.toml index 25b559696..d15d87f95 100644 --- a/helix-loader/Cargo.toml +++ b/helix-loader/Cargo.toml @@ -30,7 +30,7 @@ log = "0.4" # cloning/compiling tree-sitter grammars cc = { version = "1" } threadpool = { version = "1.0" } -tempfile = "3.10.0" +tempfile = "3.10.1" dunce = "1.0.4" [target.'cfg(not(target_arch = "wasm32"))'.dependencies] diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index 1e21ec161..8c6ae9f42 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -84,4 +84,4 @@ helix-loader = { path = "../helix-loader" } [dev-dependencies] smallvec = "1.13" indoc = "2.0.4" -tempfile = "3.10.0" +tempfile = "3.10.1" From ea95c687751ebee391088bde269f0b40267dfdf0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 01:25:52 +0100 Subject: [PATCH 35/60] build(deps): bump serde_json from 1.0.113 to 1.0.114 (#9735) Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.113 to 1.0.114. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.113...v1.0.114) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d376538e4..6f4d9758e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2045,9 +2045,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.113" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "itoa", "ryu", From 1a82aeeae91be33cb0923c9f652fa6db250efd7f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 01:26:24 +0100 Subject: [PATCH 36/60] build(deps): bump serde from 1.0.196 to 1.0.197 (#9736) Bumps [serde](https://github.com/serde-rs/serde) from 1.0.196 to 1.0.197. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.196...v1.0.197) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6f4d9758e..7f57b75d3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2025,18 +2025,18 @@ checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1" [[package]] name = "serde" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", From 358ac6bc1f512ca7303856dc904d4b4cdc1fe718 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=B7=A2=E9=B9=8F?= Date: Mon, 26 Feb 2024 19:41:50 -0500 Subject: [PATCH 37/60] add fidl support (#9713) --- book/src/generated/lang-support.md | 1 + languages.toml | 18 ++++++++ runtime/queries/fidl/folds.scm | 6 +++ runtime/queries/fidl/highlights.scm | 64 +++++++++++++++++++++++++++++ runtime/queries/fidl/injections.scm | 2 + 5 files changed, 91 insertions(+) create mode 100644 runtime/queries/fidl/folds.scm create mode 100644 runtime/queries/fidl/highlights.scm create mode 100644 runtime/queries/fidl/injections.scm diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index 7aec37778..1bc6b0817 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -44,6 +44,7 @@ | erb | ✓ | | | | | erlang | ✓ | ✓ | | `erlang_ls` | | esdl | ✓ | | | | +| fidl | ✓ | | | | | fish | ✓ | ✓ | ✓ | | | forth | ✓ | | | `forth-lsp` | | fortran | ✓ | | ✓ | `fortls` | diff --git a/languages.toml b/languages.toml index 750ec9088..313b3d95e 100644 --- a/languages.toml +++ b/languages.toml @@ -3140,3 +3140,21 @@ indent = { tab-width = 2, unit = " " } [[grammar]] name = "groovy" source = { git = "https://github.com/Decodetalkers/tree-sitter-groovy", rev = "7e023227f46fee428b16a0288eeb0f65ee2523ec" } + +[[language]] +name = "fidl" +scope = "source.fidl" +injection-regex = "fidl" +file-types = ["fidl"] +comment-token = "//" +indent = { tab-width = 4, unit = " " } + +[language.auto-pairs] +'"' = '"' +'{' = '}' +'(' = ')' +'<' = '>' + +[[grammar]] +name = "fidl" +source = { git = "https://github.com/google/tree-sitter-fidl", rev = "bdbb635a7f5035e424f6173f2f11b9cd79703f8d" } diff --git a/runtime/queries/fidl/folds.scm b/runtime/queries/fidl/folds.scm new file mode 100644 index 000000000..f524c455b --- /dev/null +++ b/runtime/queries/fidl/folds.scm @@ -0,0 +1,6 @@ +[ + (layout_declaration) + (protocol_declaration) + (resource_declaration) + (service_declaration) +] @fold diff --git a/runtime/queries/fidl/highlights.scm b/runtime/queries/fidl/highlights.scm new file mode 100644 index 000000000..c70d22198 --- /dev/null +++ b/runtime/queries/fidl/highlights.scm @@ -0,0 +1,64 @@ +[ + "ajar" + "alias" + "as" + "bits" + "closed" + "compose" + "const" + "enum" + "error" + "flexible" + "library" + "open" + ; "optional" we did not specify a node for optional yet + "overlay" + "protocol" + "reserved" + "resource" + "service" + "strict" + "struct" + "table" + "type" + "union" + "using" +] @keyword + +(primitives_type) @type.builtin + +(builtin_complex_type) @type.builtin + +(const_declaration + (identifier) @constant) + +[ + "=" + "|" + "&" + "->" +] @operator + +(attribute + "@" @attribute + (identifier) @attribute) + +(string_literal) @string + +(numeric_literal) @constant.numeric + +[ + (true) + (false) +] @constant.builtin.boolean + +(comment) @comment + +[ + "(" + ")" + "<" + ">" + "{" + "}" +] @punctuation.bracket diff --git a/runtime/queries/fidl/injections.scm b/runtime/queries/fidl/injections.scm new file mode 100644 index 000000000..2f0e58eb6 --- /dev/null +++ b/runtime/queries/fidl/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) From f46a09ab4f945273c7baf32e58438b501914fabb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 02:06:37 +0100 Subject: [PATCH 38/60] build(deps): bump cc from 1.0.85 to 1.0.88 (#9734) Bumps [cc](https://github.com/rust-lang/cc-rs) from 1.0.85 to 1.0.88. - [Release notes](https://github.com/rust-lang/cc-rs/releases) - [Commits](https://github.com/rust-lang/cc-rs/compare/1.0.85...1.0.88) --- updated-dependencies: - dependency-name: cc dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7f57b75d3..08fa4789e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -145,9 +145,9 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" [[package]] name = "cc" -version = "1.0.85" +version = "1.0.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b918671670962b48bc23753aef0c51d072dca6f52f01f800854ada6ddb7f7d3" +checksum = "02f341c093d19155a6e41631ce5971aac4e9a868262212153124c15fa22d1cdc" [[package]] name = "cfg-if" From 26b3dc29be886c5a2ed1a5caaaf09eb730829c3e Mon Sep 17 00:00:00 2001 From: Gabriel Dinner-David <82682503+gabydd@users.noreply.github.com> Date: Tue, 27 Feb 2024 08:36:25 -0500 Subject: [PATCH 39/60] toggling of block comments (#4718) --- book/src/keymap.md | 4 + book/src/languages.md | 5 +- helix-core/src/comment.rs | 270 ++++++++++++++++++++++++++++++- helix-core/src/indent.rs | 4 +- helix-core/src/lib.rs | 3 - helix-core/src/syntax.rs | 67 +++++++- helix-core/tests/indent.rs | 3 +- helix-stdx/src/rope.rs | 11 ++ helix-term/src/commands.rs | 136 ++++++++++++++-- helix-term/src/keymap/default.rs | 3 + languages.toml | 91 ++++++++++- 11 files changed, 568 insertions(+), 29 deletions(-) diff --git a/book/src/keymap.md b/book/src/keymap.md index ac84147cd..bb09b0319 100644 --- a/book/src/keymap.md +++ b/book/src/keymap.md @@ -12,6 +12,7 @@ - [Match mode](#match-mode) - [Window mode](#window-mode) - [Space mode](#space-mode) + - [Comment mode](#comment-mode) - [Popup](#popup) - [Unimpaired](#unimpaired) - [Insert mode](#insert-mode) @@ -289,6 +290,9 @@ This layer is a kludge of mappings, mostly pickers. | `h` | Select symbol references (**LSP**) | `select_references_to_symbol_under_cursor` | | `'` | Open last fuzzy picker | `last_picker` | | `w` | Enter [window mode](#window-mode) | N/A | +| `c` | Comment/uncomment selections | `toggle_comments` | +| `C` | Block comment/uncomment selections | `toggle_block_comments` | +| `Alt-c` | Line comment/uncomment selections | `toggle_line_comments` | | `p` | Paste system clipboard after selections | `paste_clipboard_after` | | `P` | Paste system clipboard before selections | `paste_clipboard_before` | | `y` | Yank selections to clipboard | `yank_to_clipboard` | diff --git a/book/src/languages.md b/book/src/languages.md index e3900dca9..dd93fec53 100644 --- a/book/src/languages.md +++ b/book/src/languages.md @@ -42,7 +42,7 @@ name = "mylang" scope = "source.mylang" injection-regex = "mylang" file-types = ["mylang", "myl"] -comment-token = "#" +comment-tokens = "#" indent = { tab-width = 2, unit = " " } formatter = { command = "mylang-formatter" , args = ["--stdin"] } language-servers = [ "mylang-lsp" ] @@ -61,7 +61,8 @@ These configuration keys are available: | `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` | | `auto-format` | Whether to autoformat this language when saving | | `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) | -| `comment-token` | The token to use as a comment-token | +| `comment-tokens` | The tokens to use as a comment token, either a single token `"//"` or an array `["//", "///", "//!"]` (the first token will be used for commenting). Also configurable as `comment-token` for backwards compatibility| +| `block-comment-tokens`| The start and end tokens for a multiline comment either an array or single table of `{ start = "/*", end = "*/"}`. The first set of tokens will be used for commenting, any pairs in the array can be uncommented | | `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) | | `language-servers` | The Language Servers used for this language. See below for more information in the section [Configuring Language Servers for a language](#configuring-language-servers-for-a-language) | | `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) | diff --git a/helix-core/src/comment.rs b/helix-core/src/comment.rs index 9c7e50f33..536b710ab 100644 --- a/helix-core/src/comment.rs +++ b/helix-core/src/comment.rs @@ -1,9 +1,12 @@ //! This module contains the functionality toggle comments on lines over the selection //! using the comment character defined in the user's `languages.toml` +use smallvec::SmallVec; + use crate::{ - find_first_non_whitespace_char, Change, Rope, RopeSlice, Selection, Tendril, Transaction, + syntax::BlockCommentToken, Change, Range, Rope, RopeSlice, Selection, Tendril, Transaction, }; +use helix_stdx::rope::RopeSliceExt; use std::borrow::Cow; /// Given text, a comment token, and a set of line indices, returns the following: @@ -22,12 +25,12 @@ fn find_line_comment( ) -> (bool, Vec, usize, usize) { let mut commented = true; let mut to_change = Vec::new(); - let mut min = usize::MAX; // minimum col for find_first_non_whitespace_char + let mut min = usize::MAX; // minimum col for first_non_whitespace_char let mut margin = 1; let token_len = token.chars().count(); for line in lines { let line_slice = text.line(line); - if let Some(pos) = find_first_non_whitespace_char(line_slice) { + if let Some(pos) = line_slice.first_non_whitespace_char() { let len = line_slice.len_chars(); if pos < min { @@ -94,6 +97,222 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st Transaction::change(doc, changes.into_iter()) } +#[derive(Debug, PartialEq, Eq)] +pub enum CommentChange { + Commented { + range: Range, + start_pos: usize, + end_pos: usize, + start_margin: bool, + end_margin: bool, + start_token: String, + end_token: String, + }, + Uncommented { + range: Range, + start_pos: usize, + end_pos: usize, + start_token: String, + end_token: String, + }, + Whitespace { + range: Range, + }, +} + +pub fn find_block_comments( + tokens: &[BlockCommentToken], + text: RopeSlice, + selection: &Selection, +) -> (bool, Vec) { + let mut commented = true; + let mut only_whitespace = true; + let mut comment_changes = Vec::with_capacity(selection.len()); + let default_tokens = tokens.first().cloned().unwrap_or_default(); + // TODO: check if this can be removed on MSRV bump + #[allow(clippy::redundant_clone)] + let mut start_token = default_tokens.start.clone(); + #[allow(clippy::redundant_clone)] + let mut end_token = default_tokens.end.clone(); + + let mut tokens = tokens.to_vec(); + // sort the tokens by length, so longer tokens will match first + tokens.sort_by(|a, b| { + if a.start.len() == b.start.len() { + b.end.len().cmp(&a.end.len()) + } else { + b.start.len().cmp(&a.start.len()) + } + }); + for range in selection { + let selection_slice = range.slice(text); + if let (Some(start_pos), Some(end_pos)) = ( + selection_slice.first_non_whitespace_char(), + selection_slice.last_non_whitespace_char(), + ) { + let mut line_commented = false; + let mut after_start = 0; + let mut before_end = 0; + let len = (end_pos + 1) - start_pos; + + for BlockCommentToken { start, end } in &tokens { + let start_len = start.chars().count(); + let end_len = end.chars().count(); + after_start = start_pos + start_len; + before_end = end_pos.saturating_sub(end_len); + + if len >= start_len + end_len { + let start_fragment = selection_slice.slice(start_pos..after_start); + let end_fragment = selection_slice.slice(before_end + 1..end_pos + 1); + + // block commented with these tokens + if start_fragment == start.as_str() && end_fragment == end.as_str() { + start_token = start.to_string(); + end_token = end.to_string(); + line_commented = true; + break; + } + } + } + + if !line_commented { + comment_changes.push(CommentChange::Uncommented { + range: *range, + start_pos, + end_pos, + start_token: default_tokens.start.clone(), + end_token: default_tokens.end.clone(), + }); + commented = false; + } else { + comment_changes.push(CommentChange::Commented { + range: *range, + start_pos, + end_pos, + start_margin: selection_slice + .get_char(after_start) + .map_or(false, |c| c == ' '), + end_margin: after_start != before_end + && selection_slice + .get_char(before_end) + .map_or(false, |c| c == ' '), + start_token: start_token.to_string(), + end_token: end_token.to_string(), + }); + } + only_whitespace = false; + } else { + comment_changes.push(CommentChange::Whitespace { range: *range }); + } + } + if only_whitespace { + commented = false; + } + (commented, comment_changes) +} + +#[must_use] +pub fn create_block_comment_transaction( + doc: &Rope, + selection: &Selection, + commented: bool, + comment_changes: Vec, +) -> (Transaction, SmallVec<[Range; 1]>) { + let mut changes: Vec = Vec::with_capacity(selection.len() * 2); + let mut ranges: SmallVec<[Range; 1]> = SmallVec::with_capacity(selection.len()); + let mut offs = 0; + for change in comment_changes { + if commented { + if let CommentChange::Commented { + range, + start_pos, + end_pos, + start_token, + end_token, + start_margin, + end_margin, + } = change + { + let from = range.from(); + changes.push(( + from + start_pos, + from + start_pos + start_token.len() + start_margin as usize, + None, + )); + changes.push(( + from + end_pos - end_token.len() - end_margin as usize + 1, + from + end_pos + 1, + None, + )); + } + } else { + // uncommented so manually map ranges through changes + match change { + CommentChange::Uncommented { + range, + start_pos, + end_pos, + start_token, + end_token, + } => { + let from = range.from(); + changes.push(( + from + start_pos, + from + start_pos, + Some(Tendril::from(format!("{} ", start_token))), + )); + changes.push(( + from + end_pos + 1, + from + end_pos + 1, + Some(Tendril::from(format!(" {}", end_token))), + )); + + let offset = start_token.chars().count() + end_token.chars().count() + 2; + ranges.push( + Range::new(from + offs, from + offs + end_pos + 1 + offset) + .with_direction(range.direction()), + ); + offs += offset; + } + CommentChange::Commented { range, .. } | CommentChange::Whitespace { range } => { + ranges.push(Range::new(range.from() + offs, range.to() + offs)); + } + } + } + } + (Transaction::change(doc, changes.into_iter()), ranges) +} + +#[must_use] +pub fn toggle_block_comments( + doc: &Rope, + selection: &Selection, + tokens: &[BlockCommentToken], +) -> Transaction { + let text = doc.slice(..); + let (commented, comment_changes) = find_block_comments(tokens, text, selection); + let (mut transaction, ranges) = + create_block_comment_transaction(doc, selection, commented, comment_changes); + if !commented { + transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); + } + transaction +} + +pub fn split_lines_of_selection(text: RopeSlice, selection: &Selection) -> Selection { + let mut ranges = SmallVec::new(); + for range in selection.ranges() { + let (line_start, line_end) = range.line_range(text.slice(..)); + let mut pos = text.line_to_char(line_start); + for line in text.slice(pos..text.line_to_char(line_end + 1)).lines() { + let start = pos; + pos += line.len_chars(); + ranges.push(Range::new(start, pos)); + } + } + Selection::new(ranges, 0) +} + #[cfg(test)] mod test { use super::*; @@ -149,4 +368,49 @@ mod test { // TODO: account for uncommenting with uneven comment indentation } + + #[test] + fn test_find_block_comments() { + // three lines 5 characters. + let mut doc = Rope::from("1\n2\n3"); + // select whole document + let selection = Selection::single(0, doc.len_chars()); + + let text = doc.slice(..); + + let res = find_block_comments(&[BlockCommentToken::default()], text, &selection); + + assert_eq!( + res, + ( + false, + vec![CommentChange::Uncommented { + range: Range::new(0, 5), + start_pos: 0, + end_pos: 4, + start_token: "/*".to_string(), + end_token: "*/".to_string(), + }] + ) + ); + + // comment + let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); + transaction.apply(&mut doc); + + assert_eq!(doc, "/* 1\n2\n3 */"); + + // uncomment + let selection = Selection::single(0, doc.len_chars()); + let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); + transaction.apply(&mut doc); + assert_eq!(doc, "1\n2\n3"); + + // don't panic when there is just a space in comment + doc = Rope::from("/* */"); + let selection = Selection::single(0, doc.len_chars()); + let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); + transaction.apply(&mut doc); + assert_eq!(doc, ""); + } } diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs index c29bb3a0b..2a0a3876c 100644 --- a/helix-core/src/indent.rs +++ b/helix-core/src/indent.rs @@ -1,10 +1,10 @@ use std::{borrow::Cow, collections::HashMap}; +use helix_stdx::rope::RopeSliceExt; use tree_sitter::{Query, QueryCursor, QueryPredicateArg}; use crate::{ chars::{char_is_line_ending, char_is_whitespace}, - find_first_non_whitespace_char, graphemes::{grapheme_width, tab_width_at}, syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax}, tree_sitter::Node, @@ -970,7 +970,7 @@ pub fn indent_for_newline( let mut num_attempts = 0; for line_idx in (0..=line_before).rev() { let line = text.line(line_idx); - let first_non_whitespace_char = match find_first_non_whitespace_char(line) { + let first_non_whitespace_char = match line.first_non_whitespace_char() { Some(i) => i, None => { continue; diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index 94802eba9..1abd90d10 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -37,9 +37,6 @@ pub mod unicode { pub use helix_loader::find_workspace; -pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option { - line.chars().position(|ch| !ch.is_whitespace()) -} mod rope_reader; pub use rope_reader::RopeReader; diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index 0d8559ca9..3b224e1b2 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -99,7 +99,19 @@ pub struct LanguageConfiguration { pub shebangs: Vec, // interpreter(s) associated with language #[serde(default)] pub roots: Vec, // these indicate project roots <.git, Cargo.toml> - pub comment_token: Option, + #[serde( + default, + skip_serializing, + deserialize_with = "from_comment_tokens", + alias = "comment-token" + )] + pub comment_tokens: Option>, + #[serde( + default, + skip_serializing, + deserialize_with = "from_block_comment_tokens" + )] + pub block_comment_tokens: Option>, pub text_width: Option, pub soft_wrap: Option, @@ -240,6 +252,59 @@ impl<'de> Deserialize<'de> for FileType { } } +fn from_comment_tokens<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: serde::Deserializer<'de>, +{ + #[derive(Deserialize)] + #[serde(untagged)] + enum CommentTokens { + Multiple(Vec), + Single(String), + } + Ok( + Option::::deserialize(deserializer)?.map(|tokens| match tokens { + CommentTokens::Single(val) => vec![val], + CommentTokens::Multiple(vals) => vals, + }), + ) +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BlockCommentToken { + pub start: String, + pub end: String, +} + +impl Default for BlockCommentToken { + fn default() -> Self { + BlockCommentToken { + start: "/*".to_string(), + end: "*/".to_string(), + } + } +} + +fn from_block_comment_tokens<'de, D>( + deserializer: D, +) -> Result>, D::Error> +where + D: serde::Deserializer<'de>, +{ + #[derive(Deserialize)] + #[serde(untagged)] + enum BlockCommentTokens { + Multiple(Vec), + Single(BlockCommentToken), + } + Ok( + Option::::deserialize(deserializer)?.map(|tokens| match tokens { + BlockCommentTokens::Single(val) => vec![val], + BlockCommentTokens::Multiple(vals) => vals, + }), + ) +} + #[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] #[serde(rename_all = "kebab-case")] pub enum LanguageServerFeature { diff --git a/helix-core/tests/indent.rs b/helix-core/tests/indent.rs index 53265e0b1..31946c56e 100644 --- a/helix-core/tests/indent.rs +++ b/helix-core/tests/indent.rs @@ -4,6 +4,7 @@ use helix_core::{ syntax::{Configuration, Loader}, Syntax, }; +use helix_stdx::rope::RopeSliceExt; use ropey::Rope; use std::{ops::Range, path::PathBuf, process::Command, sync::Arc}; @@ -211,7 +212,7 @@ fn test_treesitter_indent( if ignored_lines.iter().any(|range| range.contains(&(i + 1))) { continue; } - if let Some(pos) = helix_core::find_first_non_whitespace_char(line) { + if let Some(pos) = line.first_non_whitespace_char() { let tab_width: usize = 4; let suggested_indent = treesitter_indent_for_pos( indent_query, diff --git a/helix-stdx/src/rope.rs b/helix-stdx/src/rope.rs index 7b4edda4f..7e2549f5a 100644 --- a/helix-stdx/src/rope.rs +++ b/helix-stdx/src/rope.rs @@ -14,6 +14,8 @@ pub trait RopeSliceExt<'a>: Sized { byte_range: R, ) -> RegexInput>; fn regex_input_at>(self, char_range: R) -> RegexInput>; + fn first_non_whitespace_char(self) -> Option; + fn last_non_whitespace_char(self) -> Option; } impl<'a> RopeSliceExt<'a> for RopeSlice<'a> { @@ -64,4 +66,13 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> { }; input.range(byte_range) } + fn first_non_whitespace_char(self) -> Option { + self.chars().position(|ch| !ch.is_whitespace()) + } + fn last_non_whitespace_char(self) -> Option { + self.chars_at(self.len_chars()) + .reversed() + .position(|ch| !ch.is_whitespace()) + .map(|pos| self.len_chars() - pos - 1) + } } diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index fdad31a81..bd0a60b7c 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -12,7 +12,7 @@ pub use typed::*; use helix_core::{ char_idx_at_visual_offset, comment, doc_formatter::TextFormat, - encoding, find_first_non_whitespace_char, find_workspace, graphemes, + encoding, find_workspace, graphemes, history::UndoKind, increment, indent, indent::IndentStyle, @@ -23,7 +23,7 @@ use helix_core::{ regex::{self, Regex}, search::{self, CharMatcher}, selection, shellwords, surround, - syntax::LanguageServerFeature, + syntax::{BlockCommentToken, LanguageServerFeature}, text_annotations::TextAnnotations, textobject, tree_sitter::Node, @@ -415,6 +415,8 @@ impl MappableCommand { completion, "Invoke completion popup", hover, "Show docs for item under cursor", toggle_comments, "Comment/uncomment selections", + toggle_line_comments, "Line comment/uncomment selections", + toggle_block_comments, "Block comment/uncomment selections", rotate_selections_forward, "Rotate selections forward", rotate_selections_backward, "Rotate selections backward", rotate_selection_contents_forward, "Rotate selection contents forward", @@ -822,7 +824,7 @@ fn kill_to_line_start(cx: &mut Context) { let head = if anchor == first_char && line != 0 { // select until previous line line_end_char_index(&text, line - 1) - } else if let Some(pos) = find_first_non_whitespace_char(text.line(line)) { + } else if let Some(pos) = text.line(line).first_non_whitespace_char() { if first_char + pos < anchor { // select until first non-blank in line if cursor is after it first_char + pos @@ -884,7 +886,7 @@ fn goto_first_nonwhitespace_impl(view: &mut View, doc: &mut Document, movement: let selection = doc.selection(view.id).clone().transform(|range| { let line = range.cursor_line(text); - if let Some(pos) = find_first_non_whitespace_char(text.line(line)) { + if let Some(pos) = text.line(line).first_non_whitespace_char() { let pos = pos + text.line_to_char(line); range.put_cursor(text, pos, movement == Movement::Extend) } else { @@ -3087,11 +3089,11 @@ fn insert_with_indent(cx: &mut Context, cursor_fallback: IndentFallbackPos) { } else { // move cursor to the fallback position let pos = match cursor_fallback { - IndentFallbackPos::LineStart => { - find_first_non_whitespace_char(text.line(cursor_line)) - .map(|ws_offset| ws_offset + cursor_line_start) - .unwrap_or(cursor_line_start) - } + IndentFallbackPos::LineStart => text + .line(cursor_line) + .first_non_whitespace_char() + .map(|ws_offset| ws_offset + cursor_line_start) + .unwrap_or(cursor_line_start), IndentFallbackPos::LineEnd => line_end_char_index(&text, cursor_line), }; @@ -4462,18 +4464,124 @@ pub fn completion(cx: &mut Context) { } // comments -fn toggle_comments(cx: &mut Context) { +type CommentTransactionFn = fn( + line_token: Option<&str>, + block_tokens: Option<&[BlockCommentToken]>, + doc: &Rope, + selection: &Selection, +) -> Transaction; + +fn toggle_comments_impl(cx: &mut Context, comment_transaction: CommentTransactionFn) { let (view, doc) = current!(cx.editor); - let token = doc + let line_token: Option<&str> = doc + .language_config() + .and_then(|lc| lc.comment_tokens.as_ref()) + .and_then(|tc| tc.first()) + .map(|tc| tc.as_str()); + let block_tokens: Option<&[BlockCommentToken]> = doc .language_config() - .and_then(|lc| lc.comment_token.as_ref()) - .map(|tc| tc.as_ref()); - let transaction = comment::toggle_line_comments(doc.text(), doc.selection(view.id), token); + .and_then(|lc| lc.block_comment_tokens.as_ref()) + .map(|tc| &tc[..]); + + let transaction = + comment_transaction(line_token, block_tokens, doc.text(), doc.selection(view.id)); doc.apply(&transaction, view.id); exit_select_mode(cx); } +/// commenting behavior: +/// 1. only line comment tokens -> line comment +/// 2. each line block commented -> uncomment all lines +/// 3. whole selection block commented -> uncomment selection +/// 4. all lines not commented and block tokens -> comment uncommented lines +/// 5. no comment tokens and not block commented -> line comment +fn toggle_comments(cx: &mut Context) { + toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { + let text = doc.slice(..); + + // only have line comment tokens + if line_token.is_some() && block_tokens.is_none() { + return comment::toggle_line_comments(doc, selection, line_token); + } + + let split_lines = comment::split_lines_of_selection(text, selection); + + let default_block_tokens = &[BlockCommentToken::default()]; + let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); + + let (line_commented, line_comment_changes) = + comment::find_block_comments(block_comment_tokens, text, &split_lines); + + // block commented by line would also be block commented so check this first + if line_commented { + return comment::create_block_comment_transaction( + doc, + &split_lines, + line_commented, + line_comment_changes, + ) + .0; + } + + let (block_commented, comment_changes) = + comment::find_block_comments(block_comment_tokens, text, selection); + + // check if selection has block comments + if block_commented { + return comment::create_block_comment_transaction( + doc, + selection, + block_commented, + comment_changes, + ) + .0; + } + + // not commented and only have block comment tokens + if line_token.is_none() && block_tokens.is_some() { + return comment::create_block_comment_transaction( + doc, + &split_lines, + line_commented, + line_comment_changes, + ) + .0; + } + + // not block commented at all and don't have any tokens + comment::toggle_line_comments(doc, selection, line_token) + }) +} + +fn toggle_line_comments(cx: &mut Context) { + toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { + if line_token.is_none() && block_tokens.is_some() { + let default_block_tokens = &[BlockCommentToken::default()]; + let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); + comment::toggle_block_comments( + doc, + &comment::split_lines_of_selection(doc.slice(..), selection), + block_comment_tokens, + ) + } else { + comment::toggle_line_comments(doc, selection, line_token) + } + }); +} + +fn toggle_block_comments(cx: &mut Context) { + toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { + if line_token.is_some() && block_tokens.is_none() { + comment::toggle_line_comments(doc, selection, line_token) + } else { + let default_block_tokens = &[BlockCommentToken::default()]; + let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); + comment::toggle_block_comments(doc, selection, block_comment_tokens) + } + }); +} + fn rotate_selections(cx: &mut Context, direction: Direction) { let count = cx.count(); let (view, doc) = current!(cx.editor); diff --git a/helix-term/src/keymap/default.rs b/helix-term/src/keymap/default.rs index 92d6b5906..bab662b04 100644 --- a/helix-term/src/keymap/default.rs +++ b/helix-term/src/keymap/default.rs @@ -276,6 +276,9 @@ pub fn default() -> HashMap { "k" => hover, "r" => rename_symbol, "h" => select_references_to_symbol_under_cursor, + "c" => toggle_comments, + "C" => toggle_block_comments, + "A-c" => toggle_line_comments, "?" => command_palette, }, "z" => { "View" diff --git a/languages.toml b/languages.toml index 313b3d95e..5cfc0c28d 100644 --- a/languages.toml +++ b/languages.toml @@ -191,7 +191,12 @@ injection-regex = "rust" file-types = ["rs"] roots = ["Cargo.toml", "Cargo.lock"] auto-format = true -comment-token = "//" +comment-tokens = ["//", "///", "//!"] +block-comment-tokens = [ + { start = "/*", end = "*/" }, + { start = "/**", end = "*/" }, + { start = "/*!", end = "*/" }, +] language-servers = [ "rust-analyzer" ] indent = { tab-width = 4, unit = " " } persistent-diagnostic-sources = ["rustc", "clippy"] @@ -283,6 +288,7 @@ injection-regex = "protobuf" file-types = ["proto"] language-servers = [ "bufls", "pbkit" ] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } [[grammar]] @@ -326,6 +332,7 @@ injection-regex = "mint" file-types = ["mint"] shebangs = [] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "mint" ] indent = { tab-width = 2, unit = " " } @@ -408,6 +415,7 @@ scope = "source.c" injection-regex = "c" file-types = ["c"] # TODO: ["h"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "clangd" ] indent = { tab-width = 2, unit = " " } @@ -444,6 +452,7 @@ scope = "source.cpp" injection-regex = "cpp" file-types = ["cc", "hh", "c++", "cpp", "hpp", "h", "ipp", "tpp", "cxx", "hxx", "ixx", "txx", "ino", "C", "H", "cu", "cuh", "cppm", "h++", "ii", "inl", { glob = ".hpp.in" }, { glob = ".h.in" }] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "clangd" ] indent = { tab-width = 2, unit = " " } @@ -491,6 +500,7 @@ injection-regex = "c-?sharp" file-types = ["cs", "csx", "cake"] roots = ["sln", "csproj"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = "\t" } language-servers = [ "omnisharp" ] @@ -549,6 +559,7 @@ file-types = ["go"] roots = ["go.work", "go.mod"] auto-format = true comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "gopls", "golangci-lint-lsp" ] # TODO: gopls needs utf-8 offsets? indent = { tab-width = 4, unit = "\t" } @@ -614,6 +625,7 @@ scope = "source.gotmpl" injection-regex = "gotmpl" file-types = ["gotmpl"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "gopls" ] indent = { tab-width = 2, unit = " " } @@ -643,6 +655,7 @@ language-id = "javascript" file-types = ["js", "mjs", "cjs", "rules", "es6", "pac", { glob = "jakefile" }] shebangs = ["node"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "typescript-language-server" ] indent = { tab-width = 2, unit = " " } @@ -669,6 +682,7 @@ injection-regex = "jsx" language-id = "javascriptreact" file-types = ["jsx"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "typescript-language-server" ] indent = { tab-width = 2, unit = " " } grammar = "javascript" @@ -680,6 +694,8 @@ injection-regex = "(ts|typescript)" file-types = ["ts", "mts", "cts"] language-id = "typescript" shebangs = ["deno", "ts-node"] +comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "typescript-language-server" ] indent = { tab-width = 2, unit = " " } @@ -693,6 +709,8 @@ scope = "source.tsx" injection-regex = "(tsx)" # |typescript language-id = "typescriptreact" file-types = ["tsx"] +comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "typescript-language-server" ] indent = { tab-width = 2, unit = " " } @@ -705,6 +723,7 @@ name = "css" scope = "source.css" injection-regex = "css" file-types = ["css", "scss"] +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "vscode-css-language-server" ] auto-format = true indent = { tab-width = 2, unit = " " } @@ -718,6 +737,7 @@ name = "scss" scope = "source.scss" injection-regex = "scss" file-types = ["scss"] +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "vscode-css-language-server" ] auto-format = true indent = { tab-width = 2, unit = " " } @@ -731,6 +751,7 @@ name = "html" scope = "text.html.basic" injection-regex = "html" file-types = ["html", "htm", "shtml", "xhtml", "xht", "jsp", "asp", "aspx", "jshtm", "volt", "rhtml"] +block-comment-tokens = { start = "" } language-servers = [ "vscode-html-language-server" ] auto-format = true indent = { tab-width = 2, unit = " " } @@ -901,6 +922,7 @@ injection-regex = "php" file-types = ["php", "inc", "php4", "php5", "phtml", "ctp"] shebangs = ["php"] roots = ["composer.json", "index.php"] +comment-token = "//" language-servers = [ "intelephense" ] indent = { tab-width = 4, unit = " " } @@ -913,6 +935,7 @@ name = "twig" scope = "source.twig" injection-regex = "twig" file-types = ["twig"] +block-comment-tokens = { start = "{#", end = "#}" } indent = { tab-width = 2, unit = " " } [[grammar]] @@ -966,6 +989,7 @@ injection-regex = "lean" file-types = ["lean"] roots = [ "lakefile.lean" ] comment-token = "--" +block-comment-tokens = { start = "/-", end = "-/" } language-servers = [ "lean" ] indent = { tab-width = 2, unit = " " } @@ -992,6 +1016,7 @@ file-types = ["jl"] shebangs = ["julia"] roots = ["Manifest.toml", "Project.toml"] comment-token = "#" +block-comment-tokens = { start = "#=", end = "=#" } language-servers = [ "julia" ] indent = { tab-width = 4, unit = " " } @@ -1055,6 +1080,7 @@ scope = "source.ocaml" injection-regex = "ocaml" file-types = ["ml"] shebangs = ["ocaml", "ocamlrun", "ocamlscript"] +block-comment-tokens = { start = "(*", end = "*)" } comment-token = "(**)" language-servers = [ "ocamllsp" ] indent = { tab-width = 2, unit = " " } @@ -1074,6 +1100,7 @@ name = "ocaml-interface" scope = "source.ocaml.interface" file-types = ["mli"] shebangs = [] +block-comment-tokens = { start = "(*", end = "*)" } comment-token = "(**)" language-servers = [ "ocamllsp" ] indent = { tab-width = 2, unit = " " } @@ -1096,6 +1123,7 @@ file-types = ["lua"] shebangs = ["lua", "luajit"] roots = [".luarc.json", ".luacheckrc", ".stylua.toml", "selene.toml", ".git"] comment-token = "--" +block-comment-tokens = { start = "--[[", end = "--]]" } indent = { tab-width = 2, unit = " " } language-servers = [ "lua-language-server" ] @@ -1121,6 +1149,7 @@ scope = "source.vue" injection-regex = "vue" file-types = ["vue"] roots = ["package.json"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } language-servers = [ "vuels" ] @@ -1148,6 +1177,7 @@ injection-regex = "haskell" file-types = ["hs", "hs-boot"] roots = ["Setup.hs", "stack.yaml", "cabal.project"] comment-token = "--" +block-comment-tokens = { start = "{-", end = "-}" } language-servers = [ "haskell-language-server" ] indent = { tab-width = 2, unit = " " } @@ -1173,6 +1203,7 @@ injection-regex = "purescript" file-types = ["purs"] roots = ["spago.yaml", "spago.dhall", "bower.json"] comment-token = "--" +block-comment-tokens = { start = "{-", end = "-}" } language-servers = [ "purescript-language-server" ] indent = { tab-width = 2, unit = " " } auto-format = true @@ -1227,6 +1258,7 @@ scope = "source.prolog" file-types = ["pl", "prolog"] shebangs = ["swipl"] comment-token = "%" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "swipl" ] [[language]] @@ -1246,6 +1278,7 @@ name = "cmake" scope = "source.cmake" file-types = ["cmake", { glob = "CMakeLists.txt" }] comment-token = "#" +block-comment-tokens = { start = "#[[", end = "]]" } indent = { tab-width = 2, unit = " " } language-servers = [ "cmake-language-server" ] injection-regex = "cmake" @@ -1272,6 +1305,7 @@ name = "glsl" scope = "source.glsl" file-types = ["glsl", "vert", "tesc", "tese", "geom", "frag", "comp" ] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } injection-regex = "glsl" @@ -1309,6 +1343,7 @@ file-types = ["rkt", "rktd", "rktl", "scrbl"] shebangs = ["racket"] comment-token = ";" indent = { tab-width = 2, unit = " " } +block-comment-tokens = { start = "#|", end = "|#" } language-servers = [ "racket" ] grammar = "scheme" @@ -1343,6 +1378,7 @@ name = "wgsl" scope = "source.wgsl" file-types = ["wgsl"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "wgsl_analyzer" ] indent = { tab-width = 4, unit = " " } @@ -1389,6 +1425,7 @@ name = "tablegen" scope = "source.tablegen" file-types = ["td"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } injection-regex = "tablegen" @@ -1404,6 +1441,7 @@ file-types = ["md", "markdown", "mkd", "mdwn", "mdown", "markdn", "mdtxt", "mdte roots = [".marksman.toml"] language-servers = [ "marksman" ] indent = { tab-width = 2, unit = " " } +block-comment-tokens = { start = "" } [[grammar]] name = "markdown" @@ -1427,6 +1465,7 @@ file-types = ["dart"] roots = ["pubspec.yaml"] auto-format = true comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "dart" ] indent = { tab-width = 2, unit = " " } @@ -1440,6 +1479,7 @@ scope = "source.scala" roots = ["build.sbt", "build.sc", "build.gradle", "build.gradle.kts", "pom.xml", ".scala-build"] file-types = ["scala", "sbt", "sc"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } language-servers = [ "metals" ] @@ -1560,6 +1600,8 @@ scope = "source.graphql" injection-regex = "graphql" file-types = ["gql", "graphql", "graphqls"] language-servers = [ "graphql-language-service" ] +comment-token = "#" +block-comment-tokens = { start = "\"\"\"", end = "\"\"\"" } indent = { tab-width = 2, unit = " " } [[grammar]] @@ -1574,6 +1616,7 @@ file-types = ["elm"] roots = ["elm.json"] auto-format = true comment-token = "--" +block-comment-tokens = { start = "{-", end = "-}" } language-servers = [ "elm-language-server" ] indent = { tab-width = 4, unit = " " } @@ -1586,6 +1629,7 @@ name = "iex" scope = "source.iex" injection-regex = "iex" file-types = ["iex"] +comment-token = "#" [[grammar]] name = "iex" @@ -1599,6 +1643,7 @@ file-types = ["res"] roots = ["bsconfig.json"] auto-format = true comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "rescript-language-server" ] indent = { tab-width = 2, unit = " " } @@ -1635,6 +1680,7 @@ scope = "source.kotlin" file-types = ["kt", "kts"] roots = ["settings.gradle", "settings.gradle.kts"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } language-servers = [ "kotlin-language-server" ] @@ -1649,6 +1695,7 @@ injection-regex = "(hcl|tf|nomad)" language-id = "terraform" file-types = ["hcl", "tf", "nomad"] comment-token = "#" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } language-servers = [ "terraform-ls" ] auto-format = true @@ -1663,6 +1710,7 @@ scope = "source.tfvars" language-id = "terraform-vars" file-types = ["tfvars"] comment-token = "#" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } language-servers = [ "terraform-ls" ] auto-format = true @@ -1685,6 +1733,7 @@ scope = "source.sol" injection-regex = "(sol|solidity)" file-types = ["sol"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } language-servers = [ "solc" ] @@ -1713,6 +1762,7 @@ scope = "source.ron" injection-regex = "ron" file-types = ["ron"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } [[grammar]] @@ -1754,6 +1804,7 @@ injection-regex = "(r|R)md" file-types = ["rmd", "Rmd"] indent = { tab-width = 2, unit = " " } grammar = "markdown" +block-comment-tokens = { start = "" } language-servers = [ "r" ] [[language]] @@ -1763,6 +1814,7 @@ injection-regex = "swift" file-types = ["swift"] roots = [ "Package.swift" ] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } auto-format = true language-servers = [ "sourcekit-lsp" ] @@ -1775,6 +1827,7 @@ name = "erb" scope = "text.html.erb" injection-regex = "erb" file-types = ["erb"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } grammar = "embedded-template" @@ -1783,6 +1836,7 @@ name = "ejs" scope = "text.html.ejs" injection-regex = "ejs" file-types = ["ejs"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } grammar = "embedded-template" @@ -1796,6 +1850,7 @@ scope = "source.eex" injection-regex = "eex" file-types = ["eex"] roots = ["mix.exs", "mix.lock"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } [[grammar]] @@ -1808,6 +1863,7 @@ scope = "source.heex" injection-regex = "heex" file-types = ["heex"] roots = ["mix.exs", "mix.lock"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } language-servers = [ "elixir-ls" ] @@ -1820,6 +1876,7 @@ name = "sql" scope = "source.sql" file-types = ["sql", "dsql"] comment-token = "--" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } injection-regex = "sql" @@ -1878,6 +1935,7 @@ scope = "source.vala" injection-regex = "vala" file-types = ["vala", "vapi"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } language-servers = [ "vala-language-server" ] @@ -1903,6 +1961,7 @@ scope = "source.devicetree" injection-regex = "(dtsi?|devicetree|fdt)" file-types = ["dts", "dtsi"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = "\t" } [[grammar]] @@ -1941,6 +2000,7 @@ file-types = ["odin"] roots = ["ols.json"] language-servers = [ "ols" ] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = "\t" } formatter = { command = "odinfmt", args = [ "-stdin", "true" ] } @@ -1998,6 +2058,7 @@ roots = ["v.mod"] language-servers = [ "vlang-language-server" ] auto-format = true comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = "\t" } [[grammar]] @@ -2009,6 +2070,7 @@ name = "verilog" scope = "source.verilog" file-types = ["v", "vh", "sv", "svh"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "svlangserver" ] indent = { tab-width = 2, unit = " " } injection-regex = "verilog" @@ -2045,6 +2107,7 @@ scope = "source.openscad" injection-regex = "openscad" file-types = ["scad"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "openscad-lsp" ] indent = { tab-width = 2, unit = "\t" } @@ -2109,6 +2172,7 @@ injection-regex = "idr" file-types = ["idr"] shebangs = [] comment-token = "--" +block-comment-tokens = { start = "{-", end = "-}" } indent = { tab-width = 2, unit = " " } language-servers = [ "idris2-lsp" ] @@ -2144,6 +2208,7 @@ scope = "source.dot" injection-regex = "dot" file-types = ["dot"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } language-servers = [ "dot-language-server" ] @@ -2173,6 +2238,7 @@ scope = "source.slint" injection-regex = "slint" file-types = ["slint"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } language-servers = [ "slint-lsp" ] @@ -2222,6 +2288,7 @@ scope = "source.pascal" injection-regex = "pascal" file-types = ["pas", "pp", "inc", "lpr", "lfm"] comment-token = "//" +block-comment-tokens = { start = "{", end = "}" } indent = { tab-width = 2, unit = " " } language-servers = [ "pasls" ] @@ -2234,7 +2301,7 @@ name = "sml" scope = "source.sml" injection-regex = "sml" file-types = ["sml"] -comment-token = "(*" +block-comment-tokens = { start = "(*", end = "*)" } [[grammar]] name = "sml" @@ -2246,6 +2313,7 @@ scope = "source.jsonnet" file-types = ["libsonnet", "jsonnet"] roots = ["jsonnetfile.json"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } language-servers = [ "jsonnet-language-server" ] @@ -2258,6 +2326,7 @@ name = "astro" scope = "source.astro" injection-regex = "astro" file-types = ["astro"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } [[grammar]] @@ -2281,6 +2350,7 @@ source = { git = "https://github.com/vito/tree-sitter-bass", rev = "501133e260d7 name = "wat" scope = "source.wat" comment-token = ";;" +block-comment-tokens = { start = "(;", end = ";)" } file-types = ["wat"] [[grammar]] @@ -2291,6 +2361,7 @@ source = { git = "https://github.com/wasm-lsp/tree-sitter-wasm", rev = "2ca28a9f name = "wast" scope = "source.wast" comment-token = ";;" +block-comment-tokens = { start = "(;", end = ";)" } file-types = ["wast"] [[grammar]] @@ -2302,6 +2373,7 @@ name = "d" scope = "source.d" file-types = [ "d", "dd" ] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } injection-regex = "d" indent = { tab-width = 4, unit = " "} language-servers = [ "serve-d" ] @@ -2328,6 +2400,7 @@ name = "kdl" scope = "source.kdl" file-types = ["kdl"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } injection-regex = "kdl" [[grammar]] @@ -2398,6 +2471,7 @@ file-types = [ "musicxml", "glif" ] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } [language.auto-pairs] @@ -2437,6 +2511,7 @@ scope = "source.wit" injection-regex = "wit" file-types = ["wit"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " " } [language.auto-pairs] @@ -2501,6 +2576,7 @@ scope = "source.bicep" file-types = ["bicep"] auto-format = true comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 2, unit = " "} language-servers = [ "bicep-langserver" ] @@ -2513,6 +2589,8 @@ name = "qml" scope = "source.qml" file-types = ["qml"] language-servers = [ "qmlls" ] +comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } indent = { tab-width = 4, unit = " " } grammar = "qmljs" @@ -2552,6 +2630,7 @@ injection-regex = "pony" roots = ["corral.json", "lock.json"] indent = { tab-width = 2, unit = " " } comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } [[grammar]] name = "ponylang" @@ -2563,6 +2642,7 @@ scope = "source.dhall" injection-regex = "dhall" file-types = ["dhall"] comment-token = "--" +block-comment-tokens = { start = "{-", end = "-}" } indent = { tab-width = 2, unit = " " } language-servers = [ "dhall-lsp-server" ] formatter = { command = "dhall" , args = ["format"] } @@ -2586,6 +2666,7 @@ scope = "source.msbuild" injection-regex = "msbuild" file-types = ["proj", "vbproj", "csproj", "fsproj", "targets", "props"] indent = { tab-width = 2, unit = " " } +block-comment-tokens = { start = "" } grammar = "xml" [language.auto-pairs] @@ -2632,7 +2713,7 @@ scope = "source.tal" injection-regex = "tal" file-types = ["tal"] auto-format = false -comment-token = "(" +block-comment-tokens = { start = "(", end = ")" } [[grammar]] name = "uxntal" @@ -2766,6 +2847,7 @@ injection-regex = "nim" file-types = ["nim", "nims", "nimble"] shebangs = [] comment-token = "#" +block-comment-tokens = { start = "#[", end = "]#" } indent = { tab-width = 2, unit = " " } language-servers = [ "nimlangserver" ] @@ -2805,6 +2887,7 @@ source = { git = "https://github.com/pfeiferj/tree-sitter-hurl", rev = "264c4206 [[language]] name = "markdoc" scope = "text.markdoc" +block-comment-tokens = { start = "" } file-types = ["mdoc"] language-servers = [ "markdoc-ls" ] @@ -2858,6 +2941,7 @@ scope = "source.blueprint" injection-regex = "blueprint" file-types = ["blp"] comment-token = "//" +block-comment-tokens = { start = "/*", end = "*/" } language-servers = [ "blueprint-compiler" ] indent = { tab-width = 4, unit = " " } @@ -2910,6 +2994,7 @@ name = "webc" scope = "text.html.webc" injection-regex = "webc" file-types = ["webc"] +block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } grammar = "html" From 00653c772e7df6f68071d1cb1c92bfe9ca4876f9 Mon Sep 17 00:00:00 2001 From: Mo <76752051+mo8it@users.noreply.github.com> Date: Tue, 27 Feb 2024 18:24:05 +0100 Subject: [PATCH 40/60] Avoid cloning the whole paragraph content just for rendering (#9739) * Avoid cloning the whole paragraph content just for rendering * Fix tests --- helix-term/src/ui/editor.rs | 3 ++- helix-term/src/ui/info.rs | 3 ++- helix-term/src/ui/lsp.rs | 4 ++-- helix-term/src/ui/markdown.rs | 2 +- helix-term/src/ui/text.rs | 2 +- helix-tui/src/widgets/paragraph.rs | 19 ++++++++----------- helix-tui/tests/terminal.rs | 6 ++++-- helix-tui/tests/widgets_paragraph.rs | 18 +++++++++--------- 8 files changed, 29 insertions(+), 28 deletions(-) diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index 15a7262a8..dffaeea03 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -716,7 +716,8 @@ impl EditorView { } } - let paragraph = Paragraph::new(lines) + let text = Text::from(lines); + let paragraph = Paragraph::new(&text) .alignment(Alignment::Right) .wrap(Wrap { trim: true }); let width = 100.min(viewport.width); diff --git a/helix-term/src/ui/info.rs b/helix-term/src/ui/info.rs index cc6b7483f..651e5ca93 100644 --- a/helix-term/src/ui/info.rs +++ b/helix-term/src/ui/info.rs @@ -2,6 +2,7 @@ use crate::compositor::{Component, Context}; use helix_view::graphics::{Margin, Rect}; use helix_view::info::Info; use tui::buffer::Buffer as Surface; +use tui::text::Text; use tui::widgets::{Block, Borders, Paragraph, Widget}; impl Component for Info { @@ -31,7 +32,7 @@ impl Component for Info { let inner = block.inner(area).inner(&margin); block.render(area, surface); - Paragraph::new(self.text.as_str()) + Paragraph::new(&Text::from(self.text.as_str())) .style(text_style) .render(inner, surface); } diff --git a/helix-term/src/ui/lsp.rs b/helix-term/src/ui/lsp.rs index 879f963e7..a3698e38d 100644 --- a/helix-term/src/ui/lsp.rs +++ b/helix-term/src/ui/lsp.rs @@ -77,7 +77,7 @@ impl Component for SignatureHelp { let (_, sig_text_height) = crate::ui::text::required_size(&sig_text, area.width); let sig_text_area = area.clip_top(1).with_height(sig_text_height); let sig_text_area = sig_text_area.inner(&margin).intersection(surface.area); - let sig_text_para = Paragraph::new(sig_text).wrap(Wrap { trim: false }); + let sig_text_para = Paragraph::new(&sig_text).wrap(Wrap { trim: false }); sig_text_para.render(sig_text_area, surface); if self.signature_doc.is_none() { @@ -100,7 +100,7 @@ impl Component for SignatureHelp { let sig_doc_area = area .clip_top(sig_text_area.height + 2) .clip_bottom(u16::from(cx.editor.popup_border())); - let sig_doc_para = Paragraph::new(sig_doc) + let sig_doc_para = Paragraph::new(&sig_doc) .wrap(Wrap { trim: false }) .scroll((cx.scroll.unwrap_or_default() as u16, 0)); sig_doc_para.render(sig_doc_area.inner(&margin), surface); diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 749d58508..81499d039 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -346,7 +346,7 @@ impl Component for Markdown { let text = self.parse(Some(&cx.editor.theme)); - let par = Paragraph::new(text) + let par = Paragraph::new(&text) .wrap(Wrap { trim: false }) .scroll((cx.scroll.unwrap_or_default() as u16, 0)); diff --git a/helix-term/src/ui/text.rs b/helix-term/src/ui/text.rs index a379536f8..a9c995627 100644 --- a/helix-term/src/ui/text.rs +++ b/helix-term/src/ui/text.rs @@ -33,7 +33,7 @@ impl Component for Text { fn render(&mut self, area: Rect, surface: &mut Surface, _cx: &mut Context) { use tui::widgets::{Paragraph, Widget, Wrap}; - let par = Paragraph::new(self.contents.clone()).wrap(Wrap { trim: false }); + let par = Paragraph::new(&self.contents).wrap(Wrap { trim: false }); // .scroll(x, y) offsets par.render(area, surface); diff --git a/helix-tui/src/widgets/paragraph.rs b/helix-tui/src/widgets/paragraph.rs index 4e8391621..9c8ae127c 100644 --- a/helix-tui/src/widgets/paragraph.rs +++ b/helix-tui/src/widgets/paragraph.rs @@ -28,15 +28,15 @@ fn get_line_offset(line_width: u16, text_area_width: u16, alignment: Alignment) /// # use helix_tui::widgets::{Block, Borders, Paragraph, Wrap}; /// # use helix_tui::layout::{Alignment}; /// # use helix_view::graphics::{Style, Color, Modifier}; -/// let text = vec![ +/// let text = Text::from(vec![ /// Spans::from(vec![ /// Span::raw("First"), /// Span::styled("line",Style::default().add_modifier(Modifier::ITALIC)), /// Span::raw("."), /// ]), /// Spans::from(Span::styled("Second line", Style::default().fg(Color::Red))), -/// ]; -/// Paragraph::new(text) +/// ]); +/// Paragraph::new(&text) /// .block(Block::default().title("Paragraph").borders(Borders::ALL)) /// .style(Style::default().fg(Color::White).bg(Color::Black)) /// .alignment(Alignment::Center) @@ -51,7 +51,7 @@ pub struct Paragraph<'a> { /// How to wrap the text wrap: Option, /// The text to display - text: Text<'a>, + text: &'a Text<'a>, /// Scroll scroll: (u16, u16), /// Alignment of the text @@ -70,7 +70,7 @@ pub struct Paragraph<'a> { /// - Here is another point that is long enough to wrap"#); /// /// // With leading spaces trimmed (window width of 30 chars): -/// Paragraph::new(bullet_points.clone()).wrap(Wrap { trim: true }); +/// Paragraph::new(&bullet_points).wrap(Wrap { trim: true }); /// // Some indented points: /// // - First thing goes here and is /// // long so that it wraps @@ -78,7 +78,7 @@ pub struct Paragraph<'a> { /// // is long enough to wrap /// /// // But without trimming, indentation is preserved: -/// Paragraph::new(bullet_points).wrap(Wrap { trim: false }); +/// Paragraph::new(&bullet_points).wrap(Wrap { trim: false }); /// // Some indented points: /// // - First thing goes here /// // and is long so that it wraps @@ -92,15 +92,12 @@ pub struct Wrap { } impl<'a> Paragraph<'a> { - pub fn new(text: T) -> Paragraph<'a> - where - T: Into>, - { + pub fn new(text: &'a Text) -> Paragraph<'a> { Paragraph { block: None, style: Default::default(), wrap: None, - text: text.into(), + text, scroll: (0, 0), alignment: Alignment::Left, } diff --git a/helix-tui/tests/terminal.rs b/helix-tui/tests/terminal.rs index 2824c9f24..d2d8ca101 100644 --- a/helix-tui/tests/terminal.rs +++ b/helix-tui/tests/terminal.rs @@ -17,14 +17,16 @@ fn terminal_buffer_size_should_not_be_limited() { // let backend = TestBackend::new(10, 10); // let mut terminal = Terminal::new(backend)?; // let frame = terminal.draw(|f| { -// let paragraph = Paragraph::new("Test"); +// let text = Text::from("Test"); +// let paragraph = Paragraph::new(&text); // f.render_widget(paragraph, f.size()); // })?; // assert_eq!(frame.buffer.get(0, 0).symbol, "T"); // assert_eq!(frame.area, Rect::new(0, 0, 10, 10)); // terminal.backend_mut().resize(8, 8); // let frame = terminal.draw(|f| { -// let paragraph = Paragraph::new("test"); +// let text = Text::from("test"); +// let paragraph = Paragraph::new(&text); // f.render_widget(paragraph, f.size()); // })?; // assert_eq!(frame.buffer.get(0, 0).symbol, "t"); diff --git a/helix-tui/tests/widgets_paragraph.rs b/helix-tui/tests/widgets_paragraph.rs index a7c972eb5..3d2ac467b 100644 --- a/helix-tui/tests/widgets_paragraph.rs +++ b/helix-tui/tests/widgets_paragraph.rs @@ -21,8 +21,8 @@ // terminal // .draw(|f| { // let size = f.size(); -// let text = vec![Spans::from(SAMPLE_STRING)]; -// let paragraph = Paragraph::new(text) +// let text = Text::from(SAMPLE_STRING); +// let paragraph = Paragraph::new(&text) // .block(Block::default().borders(Borders::ALL)) // .alignment(alignment) // .wrap(Wrap { trim: true }); @@ -88,8 +88,8 @@ // terminal // .draw(|f| { // let size = f.size(); -// let text = vec![Spans::from(s)]; -// let paragraph = Paragraph::new(text) +// let text = Text::from(s); +// let paragraph = Paragraph::new(&text) // .block(Block::default().borders(Borders::ALL)) // .wrap(Wrap { trim: true }); // f.render_widget(paragraph, size); @@ -120,8 +120,8 @@ // terminal // .draw(|f| { // let size = f.size(); -// let text = vec![Spans::from(s)]; -// let paragraph = Paragraph::new(text) +// let text = Text::from(s); +// let paragraph = Paragraph::new(&text) // .block(Block::default().borders(Borders::ALL)) // .wrap(Wrap { trim: true }); // f.render_widget(paragraph, size); @@ -155,8 +155,8 @@ // terminal // .draw(|f| { // let size = f.size(); - -// let paragraph = Paragraph::new(line).block(Block::default().borders(Borders::ALL)); +// let text = Text::from(line); +// let paragraph = Paragraph::new(&text).block(Block::default().borders(Borders::ALL)); // f.render_widget(paragraph, size); // }) // .unwrap(); @@ -174,7 +174,7 @@ // let text = Text::from( // "段落现在可以水平滚动了!\nParagraph can scroll horizontally!\nShort line", // ); -// let paragraph = Paragraph::new(text) +// let paragraph = Paragraph::new(&text) // .block(Block::default().borders(Borders::ALL)) // .alignment(alignment) // .scroll(scroll); From 083a9e775d31678f4d984c526e7140c0d2bb2312 Mon Sep 17 00:00:00 2001 From: Chris Date: Wed, 28 Feb 2024 12:18:00 +0000 Subject: [PATCH 41/60] Add support for pde files (#9741) --- languages.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/languages.toml b/languages.toml index 5cfc0c28d..ea2339f8b 100644 --- a/languages.toml +++ b/languages.toml @@ -1028,7 +1028,7 @@ source = { git = "https://github.com/tree-sitter/tree-sitter-julia", rev = "8fb3 name = "java" scope = "source.java" injection-regex = "java" -file-types = ["java", "jav"] +file-types = ["java", "jav", "pde"] roots = ["pom.xml", "build.gradle", "build.gradle.kts"] language-servers = [ "jdtls" ] indent = { tab-width = 2, unit = " " } From f03b91d1b7907e78a4242c5b525e47c997f4457d Mon Sep 17 00:00:00 2001 From: Brian Dorsey Date: Wed, 28 Feb 2024 11:55:17 -0800 Subject: [PATCH 42/60] update languages.toml: tree-sitter-lua grammar (#9727) * update languages.toml: tree-sitter-lua grammar repo has moved, use new URL and the rev of the latest release (v0.0.19) * update highlight queries a novice attempt to port query updates from the source repo to Helix captures and ordering * Apply suggestions from code review Co-authored-by: Michael Davis --------- Co-authored-by: Michael Davis --- languages.toml | 2 +- runtime/queries/lua/highlights.scm | 81 +++++++++++++++++++++++++----- 2 files changed, 69 insertions(+), 14 deletions(-) diff --git a/languages.toml b/languages.toml index ea2339f8b..fb4cda225 100644 --- a/languages.toml +++ b/languages.toml @@ -1129,7 +1129,7 @@ language-servers = [ "lua-language-server" ] [[grammar]] name = "lua" -source = { git = "https://github.com/MunifTanjim/tree-sitter-lua", rev = "887dfd4e83c469300c279314ff1619b1d0b85b91" } +source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-lua", rev = "88e446476a1e97a8724dff7a23e2d709855077f2" } [[language]] name = "svelte" diff --git a/runtime/queries/lua/highlights.scm b/runtime/queries/lua/highlights.scm index f48e607c5..2f3b3c05f 100644 --- a/runtime/queries/lua/highlights.scm +++ b/runtime/queries/lua/highlights.scm @@ -1,9 +1,5 @@ ;;; Highlighting for lua -;;; Builtins -((identifier) @variable.builtin - (#eq? @variable.builtin "self")) - ;; Keywords (if_statement @@ -130,16 +126,65 @@ ((identifier) @constant (#match? @constant "^[A-Z][A-Z_0-9]*$")) -;; Parameters -(parameters - (identifier) @variable.parameter) +;; Tables + +(field name: (identifier) @variable.other.member) + +(dot_index_expression field: (identifier) @variable.other.member) + +(table_constructor +[ + "{" + "}" +] @constructor) -; ;; Functions -(function_declaration name: (identifier) @function) -(function_call name: (identifier) @function.call) +;; Functions -(function_declaration name: (dot_index_expression field: (identifier) @function)) -(function_call name: (dot_index_expression field: (identifier) @function.call)) +(parameters (identifier) @variable.parameter) + +(function_call + (identifier) @function.builtin + (#any-of? @function.builtin + ;; built-in functions in Lua 5.1 + "assert" "collectgarbage" "dofile" "error" "getfenv" "getmetatable" "ipairs" + "load" "loadfile" "loadstring" "module" "next" "pairs" "pcall" "print" + "rawequal" "rawget" "rawset" "require" "select" "setfenv" "setmetatable" + "tonumber" "tostring" "type" "unpack" "xpcall")) + +(function_declaration + name: [ + (identifier) @function + (dot_index_expression + field: (identifier) @function) + ]) + +(function_declaration + name: (method_index_expression + method: (identifier) @function.method)) + +(assignment_statement + (variable_list . + name: [ + (identifier) @function + (dot_index_expression + field: (identifier) @function) + ]) + (expression_list . + value: (function_definition))) + +(table_constructor + (field + name: (identifier) @function + value: (function_definition))) + +(function_call + name: [ + (identifier) @function.call + (dot_index_expression + field: (identifier) @function.call) + (method_index_expression + method: (identifier) @function.method.call) + ]) ; TODO: incorrectly highlights variable N in `N, nop = 42, function() end` (assignment_statement @@ -153,6 +198,7 @@ ;; Nodes (comment) @comment (string) @string +(escape_sequence) @constant.character.escape (number) @constant.numeric.integer (label_statement) @label ; A bit of a tricky one, this will only match field names @@ -162,7 +208,16 @@ ;; Property (dot_index_expression field: (identifier) @variable.other.member) -;; Variable +;; Variables +((identifier) @variable.builtin + (#eq? @variable.builtin "self")) + +(variable_list + (attribute + "<" @punctuation.bracket + (identifier) @attribute + ">" @punctuation.bracket)) + (identifier) @variable ;; Error From 1143f4795414c26382a5647f2be8c20a7a62ada4 Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Thu, 29 Feb 2024 02:47:41 +0100 Subject: [PATCH 43/60] fix split_on_newline (#9756) --- helix-core/src/selection.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/helix-core/src/selection.rs b/helix-core/src/selection.rs index 91f1d0de5..bd252deb9 100644 --- a/helix-core/src/selection.rs +++ b/helix-core/src/selection.rs @@ -773,12 +773,12 @@ pub fn split_on_newline(text: RopeSlice, selection: &Selection) -> Selection { let mut start = sel_start; - for mat in sel.slice(text).lines() { - let len = mat.len_chars(); - let line_end_len = get_line_ending(&mat).map(|le| le.len_chars()).unwrap_or(0); + for line in sel.slice(text).lines() { + let Some(line_ending) = get_line_ending(&line) else { break }; + let line_end = start + line.len_chars(); // TODO: retain range direction - result.push(Range::new(start, start + len - line_end_len)); - start += len; + result.push(Range::new(start, line_end - line_ending.len_chars())); + start = line_end; } if start < sel_end { From e51a1e4e2ae99b8e1ad751e7dfa024a7d0e4ba8f Mon Sep 17 00:00:00 2001 From: JJ Date: Wed, 28 Feb 2024 17:49:10 -0800 Subject: [PATCH 44/60] Switch Nim tree-sitter queries to alaviss/tree-sitter-nim (#9722) --- languages.toml | 4 +- runtime/queries/nim/highlights.scm | 522 ++++++++++++++-------------- runtime/queries/nim/indents.scm | 83 +++-- runtime/queries/nim/textobjects.scm | 44 ++- 4 files changed, 348 insertions(+), 305 deletions(-) diff --git a/languages.toml b/languages.toml index fb4cda225..d2a183ded 100644 --- a/languages.toml +++ b/languages.toml @@ -50,6 +50,7 @@ metals = { command = "metals", config = { "isHttpEnabled" = true } } mint = { command = "mint", args = ["ls"] } nil = { command = "nil" } nimlangserver = { command = "nimlangserver" } +nimlsp = { command = "nimlsp" } nls = { command = "nls" } nu-lsp = { command = "nu", args = [ "--lsp" ] } ocamllsp = { command = "ocamllsp" } @@ -2858,10 +2859,9 @@ language-servers = [ "nimlangserver" ] "'" = "'" '{' = '}' -# Nim's tree-sitter grammar is in heavy development. [[grammar]] name = "nim" -source = { git = "https://github.com/aMOPel/tree-sitter-nim", rev = "240239b232550e431d67de250d1b5856209e7f06" } +source = { git = "https://github.com/alaviss/tree-sitter-nim", rev = "c5f0ce3b65222f5dbb1a12f9fe894524881ad590" } [[language]] name = "cabal" diff --git a/runtime/queries/nim/highlights.scm b/runtime/queries/nim/highlights.scm index 1d3256853..e02ba5165 100644 --- a/runtime/queries/nim/highlights.scm +++ b/runtime/queries/nim/highlights.scm @@ -1,33 +1,32 @@ ;; Constants, Comments, and Literals (comment) @comment.line -(multilineComment) @comment.block -(docComment) @comment.block.documentation -(multilineDocComment) @comment.block.documentation -; comments - -[(literal) (generalizedLit)] @constant -[(nil_lit)] @constant.builtin -[(bool_lit)] @constant.builtin.boolean -[(char_lit)] @constant.character -[(char_esc_seq) (str_esc_seq)] @constant.character.escape -[(custom_numeric_lit)] @constant.numeric -[(int_lit) (int_suffix)] @constant.numeric.integer -[(float_lit) (float_suffix)] @constant.numeric.float +(block_comment) @comment.block +[ + (documentation_comment) + (block_documentation_comment) +] @comment.block.documentation + +(nil_literal) @constant.builtin +((identifier) @constant.builtin.boolean + (#any-of? @constant.builtin.boolean "true" "false" "on" "off")) + +(char_literal) @constant.character +(escape_sequence) @constant.character.escape +(custom_numeric_literal) @constant.numeric +(integer_literal) @constant.numeric.integer +(float_literal) @constant.numeric.float ; literals -; note: somewhat irritatingly for testing, lits have the same syntax highlighting as types +; todo: literal? [ - (str_lit) - (triplestr_lit) - (rstr_lit) - (generalized_str_lit) - (generalized_triplestr_lit) - (interpolated_str_lit) - (interpolated_triplestr_lit) + (long_string_literal) + (raw_string_literal) + (generalized_string) + (interpreted_string_literal) ] @string +; (generalized_string (string_content) @none) ; todo: attempt to un-match string_content ; [] @string.regexp -; string literals [ "." @@ -44,272 +43,291 @@ "}" "{." ".}" - "#[" - "]#" ] @punctuation.bracket -(interpolated_str_lit "&" @punctuation.special) -(interpolated_str_lit "{" @punctuation.special) -(interpolated_str_lit "}" @punctuation.special) -; punctuation +; todo: interpolated_str_lit?? & { }? [ "and" "or" "xor" "not" - "in" - "notin" - "is" - "isnot" "div" "mod" "shl" "shr" + "from" + "as" + "of" + "in" + "notin" + "is" + "isnot" ] @keyword.operator -; operators: we list them explicitly to deliminate them from symbolic operators - -[(operator) (opr) "="] @operator -; all operators (must come after @keyword.operator) -(pragma) @attribute -; pragmas +[(operator) "="] @operator +(infix_expression operator: _ @operator) +(prefix_expression operator: _ @operator) +(pragma_list + (identifier)? @attribute + (colon_expression + (identifier) @attribute)?) ;; Imports and Exports -(importStmt - (keyw) @keyword.control.import - (expr (primary (symbol) @namespace))? - (expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?) -(exportStmt - (keyw) @keyword.control.import - (expr (primary (symbol) @namespace))? - (expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?) -(fromStmt - (keyw) @keyword.control.import - (expr (primary (symbol) @namespace))? - (expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?) -(includeStmt - (keyw) @keyword.control.import - (expr (primary (symbol) @namespace))? - (expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?) -(importExceptStmt - (keyw) @keyword.control.import - (expr (primary (symbol) @namespace))? - (expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?) -; import statements -; yeah, this is a bit gross. - +[ + "import" + "export" + "include" + "from" +] @keyword.control.import + +(import_statement + [ + (identifier) @namespace + (expression_list (identifier) @namespace) + (except_clause + "except" @keyword.control.import + (expression_list (identifier) @namespace))]) +(import_from_statement + (identifier) @namespace + (expression_list (identifier) @namespace)) +(include_statement (expression_list (identifier) @namespace)) +(export_statement (expression_list (identifier) @namespace)) ;; Control Flow -(ifStmt (keyw) @keyword.control.conditional) -(whenStmt (keyw) @keyword.control.conditional) -(elifStmt (keyw) @keyword.control.conditional) -(elseStmt (keyw) @keyword.control.conditional) -(caseStmt (keyw) @keyword.control.conditional) -(ofBranch (keyw) @keyword.control.conditional) -(inlineIfStmt (keyw) @keyword.control.conditional) -(inlineWhenStmt (keyw) @keyword.control.conditional) +[ + "if" + "when" + "case" + "elif" + "else" +] @keyword.control.conditional +(of_branch "of" @keyword.control.conditional) ; conditional statements ; todo: do block -(forStmt - . (keyw) @keyword.control.repeat - . (symbol) @variable - . (keyw) @keyword.control.repeat) -(whileStmt (keyw) @keyword.control.repeat) -; loop statements - -(returnStmt (keyw) @keyword.control.repeat) -(yieldStmt (keyw) @keyword.control.repeat) -(discardStmt (keyw) @keyword.control.repeat) -(breakStmt (keyw) @keyword.control.repeat) -(continueStmt (keyw) @keyword.control.repeat) -; control flow statements - -(raiseStmt (keyw) @keyword.control.exception) -(tryStmt (keyw) @keyword.control.exception) -(tryExceptStmt (keyw) @keyword.control.exception) -(tryFinallyStmt (keyw) @keyword.control.exception) -(inlineTryStmt (keyw) @keyword.control.exception) -; (inlineTryExceptStmt (keyw) @keyword.control.exception) -; (inlineTryFinallyStmt (keyw) @keyword.control.exception) -; exception handling statements +"block" @keyword.control +(block label: (_) @label) -(staticStmt (keyw) @keyword) -(deferStmt (keyw) @keyword) -(asmStmt (keyw) @keyword) -(bindStmt (keyw) @keyword) -(mixinStmt (keyw) @keyword) -; miscellaneous blocks +[ + "for" + "while" + "continue" + "break" +] @keyword.control.repeat +(for "in" @keyword.control.repeat) -(blockStmt - (keyw) @keyword.control - (symbol) @label) -; block statements +[ + "return" + "yield" +] @keyword.control.return +; return statements +[ + "try" + "except" + "finally" + "raise" +] @keyword.control.exception +; exception handling statements -;; Types and Type Declarations +[ + "asm" + "bind" + "mixin" + "defer" + "static" +] @keyword +; miscellaneous keywords -(typeDef - (keyw) @keyword.storage.type - (symbol) @type) -; names of new types type declarations - -(exprColonEqExpr - . (expr (primary (symbol) @variable)) - . (expr (primary (symbol) @type))) -; variables in inline tuple declarations - -(primarySuffix - (indexSuffix - (exprColonEqExprList - (exprColonEqExpr - (expr - (primary - (symbol) @type)))))) -; nested types in brackets, i.e. seq[string] - -(primaryTypeDef (symbol) @type) -; primary types of type declarations (NOT nested types) - -(primaryTypeDef (primaryPrefix (keyw) @type)) -; for consistency - -(primaryTypeDesc (symbol) @type) -; type annotations, on declarations or in objects - -(primaryTypeDesc (primaryPrefix (keyw) @type)) -; var types etc - -(genericParamList (genericParam (symbol) @type)) -; types in generic blocks - -(enumDecl (keyw) @keyword.storage.type) -(enumElement (symbol) @type.enum.variant) -; enum declarations and elements - -(tupleDecl (keyw) @keyword.storage.type) -; tuple declarations - -(objectDecl (keyw) @keyword.storage.type) -(objectPart (symbol) @variable.other.member) -; object declarations and fields - -(objectCase - (keyw) @keyword.control.conditional - (symbol) @variable.other.member) -(objectBranch (keyw) @keyword.control.conditional) -(objectElif (keyw) @keyword.control.conditional) -(objectElse (keyw) @keyword.control.conditional) -(objectWhen (keyw) @keyword.control.conditional) -; variant objects - -(conceptDecl (keyw) @keyword.storage.type) -(conceptParam (keyw) @type) -(conceptParam (symbol) @variable) -; concept declarations, parameters, and qualifiers on those parameters - -((expr - (primary (symbol)) - (operator) @operator - (primary (symbol) @type)) - (#match? @operator "is")) -((exprStmt - (primary (symbol)) - (operator) @operator - (primary (symbol) @type)) - (#match? @operator "is")) -; symbols likely to be types: "x is t" means t is either a type or a type variable - -; distinct? +;; Types and Type Declarations +[ + "let" + "var" + "const" + "type" + "object" + "tuple" + "enum" + "concept" +] @keyword.storage.type + +(var_type "var" @keyword.storage.modifier) +(out_type "out" @keyword.storage.modifier) +(distinct_type "distinct" @keyword.storage.modifier) +(ref_type "ref" @keyword.storage.modifier) +(pointer_type "ptr" @keyword.storage.modifier) + +(var_parameter "var" @keyword.storage.modifier) +(type_parameter "type" @keyword.storage.modifier) +(static_parameter "static" @keyword.storage.modifier) +(ref_parameter "ref" @keyword.storage.modifier) +(pointer_parameter "ptr" @keyword.storage.modifier) +; (var_parameter (identifier) @variable.parameter) +; (type_parameter (identifier) @variable.parameter) +; (static_parameter (identifier) @variable.parameter) +; (ref_parameter (identifier) @variable.parameter) +; (pointer_parameter (identifier) @variable.parameter) +; todo: when are these used?? + +(type_section + (type_declaration + (type_symbol_declaration + name: (_) @type))) +; types in type declarations + +(enum_field_declaration + (symbol_declaration + name: (_) @type.enum.variant)) +; types as enum variants + +(variant_declaration + alternative: (of_branch + values: (expression_list (_) @type.enum.variant))) +; types as object variants + +(case + (of_branch + values: (expression_list (_) @constant))) +; case values are guaranteed to be constant + +(type_expression + [ + (identifier) @type + (bracket_expression + [ + (identifier) @type + (argument_list (identifier) @type)]) + (tuple_construction + [ + (identifier) @type + (bracket_expression + [ + (identifier) @type + (argument_list (identifier) @type)])])]) +; types in type expressions + +(call + function: (bracket_expression + right: (argument_list (identifier) @type))) +; types as generic parameters + +; (dot_generic_call +; generic_arguments: (_) @type) +; ??? + +(infix_expression + operator: + [ + "is" + "isnot" + ] + right: (_) @type) +; types in "is" comparisions + +(except_branch + values: (expression_list + [ + (identifier) @type + (infix_expression + left: (identifier) @type + operator: "as" + right: (_) @variable)])) +; types in exception branches ;; Functions -(routine - . (keyw) @keyword.function - . (symbol) @function) -; function declarations - -(routineExpr (keyw) @keyword.function) -; discarded function - -(routineExprTypeDesc (keyw) @keyword.function) -; function declarations as types - -(primary - . (symbol) @function.call - . (primarySuffix (functionCall))) -; regular function calls - -(primary - . (symbol) @function.call - . (primarySuffix (cmdCall))) -; function calls without parenthesis - -(primary - (primarySuffix (qualifiedSuffix (symbol) @function.call)) - . (primarySuffix (functionCall))) -; uniform function call syntax calls - -(primary - (primarySuffix (qualifiedSuffix (symbol) @function.call)) - . (primarySuffix (cmdCall))) -; just in case - -(primary - (symbol) @constructor - (primarySuffix (objectConstr))) -; object constructor - -; does not appear to be a way to distinguish these without verbatium matching -; [] @function.builtin -; [] @function.method -; [] @function.macro -; [] @function.special - +[ + "proc" + "func" + "method" + "converter" + "iterator" + "template" + "macro" +] @keyword.function + +(exported_symbol "*" @attribute) +(_ "=" @punctuation.delimiter [body: (_) value: (_)]) + +(proc_declaration name: (_) @function) +(func_declaration name: (_) @function) +(iterator_declaration name: (_) @function) +(converter_declaration name: (_) @function) +(method_declaration name: (_) @function.method) +(template_declaration name: (_) @function.macro) +(macro_declaration name: (_) @function.macro) +(symbol_declaration name: (_) @variable) + +(call + function: [ + (identifier) @function.call + (dot_expression + right: (identifier) @function.call) + (bracket_expression + left: [ + (identifier) @function.call + (dot_expression + right: (identifier) @function.call)])]) +(generalized_string + function: [ + (identifier) @function.call + (dot_expression + right: (identifier) @function.call) + (bracket_expression + left: [ + (identifier) @function.call + (dot_expression + right: (identifier) @function.call)])]) +(dot_generic_call function: (_) @function.call) ;; Variables -(paramList (paramColonEquals (symbol) @variable.parameter)) -; parameter identifiers - -(identColon (ident) @variable.other.member) -; named parts of tuples - -(symbolColonExpr (symbol) @variable) -; object constructor parameters - -(symbolEqExpr (symbol) @variable) -; named parameters +(parameter_declaration + (symbol_declaration_list + (symbol_declaration + name: (_) @variable.parameter))) +(argument_list + (equal_expression + left: (_) @variable.parameter)) +(concept_declaration + parameters: (parameter_list (identifier) @variable.parameter)) + +(field_declaration + (symbol_declaration_list + (symbol_declaration + name: (_) @variable.other.member))) +(call + (argument_list + (colon_expression + left: (_) @variable.other.member))) +(tuple_construction + (colon_expression + left: (_) @variable.other.member)) +(variant_declaration + (variant_discriminator_declaration + (symbol_declaration_list + (symbol_declaration + name: (_) @variable.other.member)))) + +;; Miscellaneous Matches -(variable - (keyw) @keyword.storage.type - (declColonEquals (symbol) @variable)) -; let, var, const expressions - -((primary (symbol) @variable.builtin) - (#match? @variable.builtin "result")) -; `result` is an implicit builtin variable inside function scopes - -((primary (symbol) @type) - (#match? @type "^[A-Z]")) -; assume PascalCase identifiers to be types - -((primary - (primarySuffix - (qualifiedSuffix - (symbol) @type))) - (#match? @type "^[A-Z]")) -; assume PascalCase member variables to be enum entries +[ + "cast" + "discard" + "do" +] @keyword +; also: addr end interface using -(primary (symbol) @variable) -; overzealous, matches variables +(blank_identifier) @variable.builtin +((identifier) @variable.builtin + (#eq? @variable.builtin "result")) -(primary (primarySuffix (qualifiedSuffix (symbol) @variable.other.member))) -; overzealous, matches member variables: i.e. x in foo.x +(dot_expression + left: (identifier) @variable + right: (identifier) @variable.other.member) -(keyw) @keyword -; more specific matches are done above whenever possible +(identifier) @variable diff --git a/runtime/queries/nim/indents.scm b/runtime/queries/nim/indents.scm index 677435407..3b3023868 100644 --- a/runtime/queries/nim/indents.scm +++ b/runtime/queries/nim/indents.scm @@ -1,48 +1,59 @@ [ - (typeDef) - (ifStmt) - (whenStmt) - (elifStmt) - (elseStmt) - (ofBranch) ; note: not caseStmt - (whileStmt) - (tryStmt) - (tryExceptStmt) - (tryFinallyStmt) - (forStmt) - (blockStmt) - (staticStmt) - (deferStmt) - (asmStmt) - ; exprStmt? + (if) + (when) + (elif_branch) + (else_branch) + (of_branch) ; note: not case_statement + (block) + (while) + (for) + (try) + (except_branch) + (finally_branch) + (defer) + (static_statement) + (proc_declaration) + (func_declaration) + (iterator_declaration) + (converter_declaration) + (method_declaration) + (template_declaration) + (macro_declaration) + (symbol_declaration) ] @indent ;; increase the indentation level [ - (ifStmt) - (whenStmt) - (elifStmt) - (elseStmt) - (ofBranch) ; note: not caseStmt - (whileStmt) - (tryStmt) - (tryExceptStmt) - (tryFinallyStmt) - (forStmt) - (blockStmt) - (staticStmt) - (deferStmt) - (asmStmt) - ; exprStmt? + (if) + (when) + (elif_branch) + (else_branch) + (of_branch) ; note: not case_statement + (block) + (while) + (for) + (try) + (except_branch) + (finally_branch) + (defer) + (static_statement) + (proc_declaration) + (func_declaration) + (iterator_declaration) + (converter_declaration) + (method_declaration) + (template_declaration) + (macro_declaration) + (symbol_declaration) ] @extend ;; ??? [ - (returnStmt) - (raiseStmt) - (yieldStmt) - (breakStmt) - (continueStmt) + (return_statement) + (raise_statement) + (yield_statement) + (break_statement) + (continue_statement) ] @extend.prevent-once ;; end a level of indentation while staying indented diff --git a/runtime/queries/nim/textobjects.scm b/runtime/queries/nim/textobjects.scm index 943aa7f08..eaa3e8e8c 100644 --- a/runtime/queries/nim/textobjects.scm +++ b/runtime/queries/nim/textobjects.scm @@ -1,19 +1,33 @@ -(routine - (block) @function.inside) @function.around +(proc_declaration + body: (_) @function.inside) @function.around +(func_declaration + body: (_) @function.inside) @function.around +(iterator_declaration + body: (_) @function.inside) @function.around +(converter_declaration + body: (_) @function.inside) @function.around +(method_declaration + body: (_) @function.inside) @function.around +(template_declaration + body: (_) @function.inside) @function.around +(macro_declaration + body: (_) @function.inside) @function.around -; @class.inside (types?) -; @class.around +(type_declaration (_) @class.inside) @class.around -; paramListSuffix is strange and i do not understand it -(paramList - (paramColonEquals) @parameter.inside) @parameter.around +(parameter_declaration + (symbol_declaration_list) @parameter.inside) @parameter.around -(comment) @comment.inside -(multilineComment) @comment.inside -(docComment) @comment.inside -(multilineDocComment) @comment.inside +[ + (comment) + (block_comment) + (documentation_comment) + (block_documentation_comment) +] @comment.inside -(comment)+ @comment.around -(multilineComment) @comment.around -(docComment)+ @comment.around -(multilineDocComment) @comment.around +[ + (comment)+ + (block_comment) + (documentation_comment)+ + (block_documentation_comment)+ +] @comment.around From d0bb77447138f5f70f96b174a8f29045a956c8c4 Mon Sep 17 00:00:00 2001 From: Keir Lawson Date: Thu, 29 Feb 2024 10:09:29 +0000 Subject: [PATCH 45/60] Mark GTK builder ui files as XML (#9754) --- languages.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/languages.toml b/languages.toml index d2a183ded..a2c10d114 100644 --- a/languages.toml +++ b/languages.toml @@ -2470,7 +2470,8 @@ file-types = [ "xul", "xoml", "musicxml", - "glif" + "glif", + "ui" ] block-comment-tokens = { start = "" } indent = { tab-width = 2, unit = " " } From 44db25939c9361272660854878eb2fc18fcf08e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dawid=20Ci=C4=99=C5=BCarkiewicz?= Date: Thu, 29 Feb 2024 17:57:31 -0800 Subject: [PATCH 46/60] Document embracing smart-tab navigation. (#9762) Re #4443 --- book/src/configuration.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/book/src/configuration.md b/book/src/configuration.md index de33c1ade..d87936457 100644 --- a/book/src/configuration.md +++ b/book/src/configuration.md @@ -375,8 +375,25 @@ wrap-indicator = "" # set wrap-indicator to "" to hide it ### `[editor.smart-tab]` Section +Options for navigating and editing using tab key. | Key | Description | Default | |------------|-------------|---------| | `enable` | If set to true, then when the cursor is in a position with non-whitespace to its left, instead of inserting a tab, it will run `move_parent_node_end`. If there is only whitespace to the left, then it inserts a tab as normal. With the default bindings, to explicitly insert a tab character, press Shift-tab. | `true` | | `supersede-menu` | Normally, when a menu is on screen, such as when auto complete is triggered, the tab key is bound to cycling through the items. This means when menus are on screen, one cannot use the tab key to trigger the `smart-tab` command. If this option is set to true, the `smart-tab` command always takes precedence, which means one cannot use the tab key to cycle through menu items. One of the other bindings must be used instead, such as arrow keys or `C-n`/`C-p`. | `false` | + + +Due to lack of support for S-tab in some terminals, the default keybindings don't fully embrace smart-tab editing experience. If you enjoy smart-tab navigation and a terminal that supports the [Enhanced Keyboard protocol](https://github.com/helix-editor/helix/wiki/Terminal-Support#enhanced-keyboard-protocol), consider setting extra keybindings: + +``` +[keys.normal] +tab = "move_parent_node_end" +S-tab = "move_parent_node_start" + +[keys.insert] +S-tab = "move_parent_node_start" + +[keys.select] +tab = "extend_parent_node_end" +S-tab = "extend_parent_node_start" +``` From 062fb819a21a3b17baf0cded3463a2d9f3e6b4a9 Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Fri, 1 Mar 2024 10:10:49 -0500 Subject: [PATCH 47/60] feat: Add markdown-oxide language server (#9758) --- book/src/generated/lang-support.md | 2 +- languages.toml | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index 1bc6b0817..c9668549e 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -104,7 +104,7 @@ | lua | ✓ | ✓ | ✓ | `lua-language-server` | | make | ✓ | | ✓ | | | markdoc | ✓ | | | `markdoc-ls` | -| markdown | ✓ | | | `marksman` | +| markdown | ✓ | | | `marksman`, `markdown-oxide` | | markdown.inline | ✓ | | | | | matlab | ✓ | ✓ | ✓ | | | mermaid | ✓ | | | | diff --git a/languages.toml b/languages.toml index a2c10d114..26f6509ce 100644 --- a/languages.toml +++ b/languages.toml @@ -45,6 +45,7 @@ kotlin-language-server = { command = "kotlin-language-server" } lean = { command = "lean", args = [ "--server" ] } ltex-ls = { command = "ltex-ls" } markdoc-ls = { command = "markdoc-ls", args = ["--stdio"] } +markdown-oxide = { command = "markdown-oxide" } marksman = { command = "marksman", args = ["server"] } metals = { command = "metals", config = { "isHttpEnabled" = true } } mint = { command = "mint", args = ["ls"] } @@ -1440,7 +1441,7 @@ scope = "source.md" injection-regex = "md|markdown" file-types = ["md", "markdown", "mkd", "mdwn", "mdown", "markdn", "mdtxt", "mdtext", "workbook", { glob = "PULLREQ_EDITMSG" }] roots = [".marksman.toml"] -language-servers = [ "marksman" ] +language-servers = [ "marksman", "markdown-oxide" ] indent = { tab-width = 2, unit = " " } block-comment-tokens = { start = "" } From 1d6db30acf91ec1041e014650bf263defdc3feee Mon Sep 17 00:00:00 2001 From: Marcin Drzymala <5504827+drzymalanet@users.noreply.github.com> Date: Sat, 2 Mar 2024 03:05:17 +0100 Subject: [PATCH 48/60] Fix bug 9703 by commenting out the wrong command (#9778) * Fix bug 9703 by commenting out the wrong command This fixes issue https://github.com/helix-editor/helix/issues/9703 by removing the wrong formatting command for justfiles. * Fix indentation width for justfile --- languages.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/languages.toml b/languages.toml index 26f6509ce..a4b6a5cd8 100644 --- a/languages.toml +++ b/languages.toml @@ -2915,9 +2915,9 @@ scope = "source.just" file-types = [{ glob = "justfile" }, { glob = "Justfile" }, { glob = ".justfile" }, { glob = ".Justfile" }] injection-regex = "just" comment-token = "#" -indent = { tab-width = 4, unit = "\t" } -auto-format = true -formatter = { command = "just", args = ["--dump"] } +indent = { tab-width = 4, unit = " " } +# auto-format = true +# formatter = { command = "just", args = ["--dump"] } # Please see: https://github.com/helix-editor/helix/issues/9703 [[grammar]] name = "just" From 5ca6a448e9b66f4f5b4caa7cd173252d0a78f92d Mon Sep 17 00:00:00 2001 From: Michael Davis Date: Fri, 1 Mar 2024 23:37:11 -0500 Subject: [PATCH 49/60] Support LSP diagnostic tags (#9780) --- book/src/themes.md | 2 ++ helix-lsp/src/client.rs | 6 ++++++ helix-term/src/ui/editor.rs | 15 ++++++++++++++- theme.toml | 2 ++ 4 files changed, 24 insertions(+), 1 deletion(-) diff --git a/book/src/themes.md b/book/src/themes.md index f040dfb19..04d6a69b3 100644 --- a/book/src/themes.md +++ b/book/src/themes.md @@ -333,5 +333,7 @@ These scopes are used for theming the editor interface: | `diagnostic.info` | Diagnostics info (editing area) | | `diagnostic.warning` | Diagnostics warning (editing area) | | `diagnostic.error` | Diagnostics error (editing area) | +| `diagnostic.unnecessary` | Diagnostics with unnecessary tag (editing area) | +| `diagnostic.deprecated` | Diagnostics with deprecated tag (editing area) | [editor-section]: ./configuration.md#editor-section diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index 8d03d7992..a7b3989dd 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -631,6 +631,12 @@ impl Client { }), publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities { version_support: Some(true), + tag_support: Some(lsp::TagSupport { + value_set: vec![ + lsp::DiagnosticTag::UNNECESSARY, + lsp::DiagnosticTag::DEPRECATED, + ], + }), ..Default::default() }), inlay_hint: Some(lsp::InlayHintClientCapabilities { diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index dffaeea03..f3bba5d1c 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -360,7 +360,7 @@ impl EditorView { doc: &Document, theme: &Theme, ) -> [Vec<(usize, std::ops::Range)>; 5] { - use helix_core::diagnostic::Severity; + use helix_core::diagnostic::{DiagnosticTag, Severity}; let get_scope_of = |scope| { theme .find_scope_index_exact(scope) @@ -380,6 +380,10 @@ impl EditorView { let error = get_scope_of("diagnostic.error"); let r#default = get_scope_of("diagnostic"); // this is a bit redundant but should be fine + // Diagnostic tags + let unnecessary = theme.find_scope_index_exact("diagnostic.unnecessary"); + let deprecated = theme.find_scope_index_exact("diagnostic.deprecated"); + let mut default_vec: Vec<(usize, std::ops::Range)> = Vec::new(); let mut info_vec = Vec::new(); let mut hint_vec = Vec::new(); @@ -396,6 +400,15 @@ impl EditorView { _ => (&mut default_vec, r#default), }; + let scope = diagnostic + .tags + .first() + .and_then(|tag| match tag { + DiagnosticTag::Unnecessary => unnecessary, + DiagnosticTag::Deprecated => deprecated, + }) + .unwrap_or(scope); + // If any diagnostic overlaps ranges with the prior diagnostic, // merge the two together. Otherwise push a new span. match vec.last_mut() { diff --git a/theme.toml b/theme.toml index dd1a5d889..8a5bfd72d 100644 --- a/theme.toml +++ b/theme.toml @@ -80,6 +80,8 @@ label = "honey" "diagnostic.info" = { underline = { color = "delta", style = "curl" } } "diagnostic.warning" = { underline = { color = "lightning", style = "curl" } } "diagnostic.error" = { underline = { color = "apricot", style = "curl" } } +"diagnostic.unnecessary" = { modifiers = ["dim"] } +"diagnostic.deprecated" = { modifiers = ["crossed_out"] } warning = "lightning" error = "apricot" From f04dafa2e23e30771db92fdf6f39fcd1f0f5d0d6 Mon Sep 17 00:00:00 2001 From: Malpha Date: Sat, 2 Mar 2024 07:47:10 +0000 Subject: [PATCH 50/60] languages.toml: add elvish shebang (#9779) --- languages.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/languages.toml b/languages.toml index a4b6a5cd8..7d859c2e4 100644 --- a/languages.toml +++ b/languages.toml @@ -2157,6 +2157,7 @@ grammar = "python" [[language]] name = "elvish" scope = "source.elvish" +shebangs = ["elvish"] file-types = ["elv"] comment-token = "#" indent = { tab-width = 2, unit = " " } From d769fadde085169c26a850966a6d5d8da7cc1c12 Mon Sep 17 00:00:00 2001 From: Michael Davis Date: Sat, 2 Mar 2024 02:47:24 -0500 Subject: [PATCH 51/60] Fix precedence of svelte typescript injection (#9777) --- runtime/queries/svelte/injections.scm | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/runtime/queries/svelte/injections.scm b/runtime/queries/svelte/injections.scm index 65a6e0e41..52d430c3f 100644 --- a/runtime/queries/svelte/injections.scm +++ b/runtime/queries/svelte/injections.scm @@ -19,13 +19,6 @@ (quoted_attribute_value (attribute_value) @css)) (#eq? @_attr "style")) -((script_element - (raw_text) @injection.content) - (#set! injection.language "javascript")) - -((raw_text_expr) @injection.content - (#set! injection.language "javascript")) - ( (script_element (start_tag @@ -36,5 +29,12 @@ (#set! injection.language "typescript") ) +((script_element + (raw_text) @injection.content) + (#set! injection.language "javascript")) + +((raw_text_expr) @injection.content + (#set! injection.language "javascript")) + ((comment) @injection.content (#set! injection.language "comment")) From 5bd007266a962a534bd722619821e998735b71e2 Mon Sep 17 00:00:00 2001 From: Mike Trinkala Date: Sat, 2 Mar 2024 06:05:58 -0800 Subject: [PATCH 52/60] Fix panic when using join_selections_space (#9783) Joining lines with Alt-J does not properly select the inserted spaces when the selection contains blank lines. In the worst case it panics with an out of bounds index. thread 'main' panicked at 'called `Result::unwrap()` on an `Err` value: Char index out of bounds: char index 11, Rope/RopeSlice char length 10' Steps to reproduce: * Create a new document ``` a b c d e ``` * % (Select all) * Alt-J (join and select the spaces) --- helix-term/src/commands.rs | 23 ++++++--- helix-term/tests/test/commands.rs | 83 +++++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+), 6 deletions(-) diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index bd0a60b7c..0b2ea0b8a 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -4372,16 +4372,27 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) { // select inserted spaces let transaction = if select_space { + let mut offset: usize = 0; let ranges: SmallVec<_> = changes .iter() - .scan(0, |offset, change| { - let range = Range::point(change.0 - *offset); - *offset += change.1 - change.0 - 1; // -1 because cursor is 0-sized - Some(range) + .filter_map(|change| { + if change.2.is_some() { + let range = Range::point(change.0 - offset); + offset += change.1 - change.0 - 1; // -1 adjusts for the replacement of the range by a space + Some(range) + } else { + offset += change.1 - change.0; + None + } }) .collect(); - let selection = Selection::new(ranges, 0); - Transaction::change(text, changes.into_iter()).with_selection(selection) + let t = Transaction::change(text, changes.into_iter()); + if ranges.is_empty() { + t + } else { + let selection = Selection::new(ranges, 0); + t.with_selection(selection) + } } else { Transaction::change(text, changes.into_iter()) }; diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index e52b142c6..1172a7981 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -526,3 +526,86 @@ async fn test_join_selections() -> anyhow::Result<()> { Ok(()) } + +#[tokio::test(flavor = "multi_thread")] +async fn test_join_selections_space() -> anyhow::Result<()> { + // join with empty lines panic + test(( + platform_line(indoc! {"\ + #[a + + b + + c + + d + + e|]# + "}), + "", + platform_line(indoc! {"\ + a#[ |]#b#( |)#c#( |)#d#( |)#e + "}), + )) + .await?; + + // normal join + test(( + platform_line(indoc! {"\ + #[a|]#bc + def + "}), + "", + platform_line(indoc! {"\ + abc#[ |]#def + "}), + )) + .await?; + + // join with empty line + test(( + platform_line(indoc! {"\ + #[a|]#bc + + def + "}), + "", + platform_line(indoc! {"\ + #[a|]#bc + def + "}), + )) + .await?; + + // join with additional space in non-empty line + test(( + platform_line(indoc! {"\ + #[a|]#bc + + def + "}), + "", + platform_line(indoc! {"\ + abc#[ |]#def + "}), + )) + .await?; + + // join with retained trailing spaces + test(( + platform_line(indoc! {"\ + #[aaa + + bb + + c |]# + "}), + "", + platform_line(indoc! {"\ + aaa #[ |]#bb #( |)#c + "}), + )) + .await?; + + Ok(()) +} From 9267343830228490d379c90537ff1a6e4bba1260 Mon Sep 17 00:00:00 2001 From: Mike Trinkala Date: Sun, 3 Mar 2024 09:55:09 -0800 Subject: [PATCH 53/60] Fix panic when using surround_replace/delete (#9796) 1. Create a document containing `{A}` 1. C-w v # vsplit 1. gl # goto_line_end 1. b # move_prev_word_start 1. ` # switch_to_lowercase 1. mrm( # surround replace 1. C-w v # vsplit In the debug build surround_replace/delete will immedately assert with `assertion failed: last <= from', transaction.rs:597:13`. The splits and lowercase conversion are not needed to trigger the bug. In the release build the surround becomes `)a(` and the last vsplit causes the transaction to panic. `internal error: entered unreachable code: (Some(Retain(18446744073709551573)))', transaction.rs:185:46` Since the selection direction is backwards get_surround_pos returns the pairs reversed but the downstream code assumes they are in the forward direction. --- helix-core/src/surround.rs | 3 +- helix-term/tests/test/movement.rs | 54 +++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) diff --git a/helix-core/src/surround.rs b/helix-core/src/surround.rs index b96cce5a0..513f87493 100644 --- a/helix-core/src/surround.rs +++ b/helix-core/src/surround.rs @@ -260,7 +260,8 @@ pub fn get_surround_pos( if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) { return Err(Error::CursorOverlap); } - change_pos.extend_from_slice(&[open_pos, close_pos]); + // ensure the positions are always paired in the forward direction + change_pos.extend_from_slice(&[open_pos.min(close_pos), close_pos.max(open_pos)]); } Ok(change_pos) } diff --git a/helix-term/tests/test/movement.rs b/helix-term/tests/test/movement.rs index e3c2668da..0873edbe5 100644 --- a/helix-term/tests/test/movement.rs +++ b/helix-term/tests/test/movement.rs @@ -552,3 +552,57 @@ async fn find_char_line_ending() -> anyhow::Result<()> { Ok(()) } + +#[tokio::test(flavor = "multi_thread")] +async fn test_surround_replace() -> anyhow::Result<()> { + test(( + platform_line(indoc! {"\ + (#[|a]#) + "}), + "mrm{", + platform_line(indoc! {"\ + {#[|a]#} + "}), + )) + .await?; + + test(( + platform_line(indoc! {"\ + (#[a|]#) + "}), + "mrm{", + platform_line(indoc! {"\ + {#[a|]#} + "}), + )) + .await?; + + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_surround_delete() -> anyhow::Result<()> { + test(( + platform_line(indoc! {"\ + (#[|a]#) + "}), + "mdm", + platform_line(indoc! {"\ + #[|a]# + "}), + )) + .await?; + + test(( + platform_line(indoc! {"\ + (#[a|]#) + "}), + "mdm", + platform_line(indoc! {"\ + #[a|]# + "}), + )) + .await?; + + Ok(()) +} From cc43e3521ed94e9d6e77c719c14073d3e7217c97 Mon Sep 17 00:00:00 2001 From: RoloEdits Date: Sun, 3 Mar 2024 09:56:18 -0800 Subject: [PATCH 54/60] feat(languages): add support for `*.Dockerfile` `file-types` naming convention (#9772) Current `file-types` only supports up to a `Dockerfile.frontend` naming scheme. With these changes `frontend.Dockerfile` now gives proper highlights and lsp actions. --- languages.toml | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/languages.toml b/languages.toml index 7d859c2e4..49672a30b 100644 --- a/languages.toml +++ b/languages.toml @@ -1494,7 +1494,20 @@ name = "dockerfile" scope = "source.dockerfile" injection-regex = "docker|dockerfile" roots = ["Dockerfile", "Containerfile"] -file-types = [{ glob = "Dockerfile*" }, { glob = "dockerfile*" }, { glob = "Containerfile*" }, { glob = "containerfile*" }] +file-types = [ + "Dockerfile", + { glob = "Dockerfile" }, + { glob = "Dockerfile.*" }, + "dockerfile", + { glob = "dockerfile" }, + { glob = "dockerfile.*" }, + "Containerfile", + { glob = "Containerfile" }, + { glob = "Containerfile.*" }, + "containerfile", + { glob = "containerfile" }, + { glob = "containerfile.*" }, +] comment-token = "#" indent = { tab-width = 2, unit = " " } language-servers = [ "docker-langserver" ] From 3f98891e7952a748f814e6741f4375c9b7aa0983 Mon Sep 17 00:00:00 2001 From: varris1 <38386180+varris1@users.noreply.github.com> Date: Tue, 5 Mar 2024 16:00:34 +0100 Subject: [PATCH 55/60] flake.lock: Bump flake inputs to prevent a warning message (#9816) --- flake.lock | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/flake.lock b/flake.lock index 9bb5dece1..48fb4a59f 100644 --- a/flake.lock +++ b/flake.lock @@ -7,11 +7,11 @@ ] }, "locked": { - "lastModified": 1701025348, - "narHash": "sha256-42GHmYH+GF7VjwGSt+fVT1CQuNpGanJbNgVHTAZppUM=", + "lastModified": 1709610799, + "narHash": "sha256-5jfLQx0U9hXbi2skYMGodDJkIgffrjIOgMRjZqms2QE=", "owner": "ipetkov", "repo": "crane", - "rev": "42afaeb1a0325194a7cdb526332d2cb92fddd07b", + "rev": "81c393c776d5379c030607866afef6406ca1be57", "type": "github" }, "original": { @@ -25,11 +25,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1694529238, - "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", + "lastModified": 1709126324, + "narHash": "sha256-q6EQdSeUZOG26WelxqkmR7kArjgWCdw5sfJVHPH/7j8=", "owner": "numtide", "repo": "flake-utils", - "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", + "rev": "d465f4819400de7c8d874d50b982301f28a84605", "type": "github" }, "original": { @@ -40,11 +40,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1700794826, - "narHash": "sha256-RyJTnTNKhO0yqRpDISk03I/4A67/dp96YRxc86YOPgU=", + "lastModified": 1709479366, + "narHash": "sha256-n6F0n8UV6lnTZbYPl1A9q1BS0p4hduAv1mGAP17CVd0=", "owner": "nixos", "repo": "nixpkgs", - "rev": "5a09cb4b393d58f9ed0d9ca1555016a8543c2ac8", + "rev": "b8697e57f10292a6165a20f03d2f42920dfaf973", "type": "github" }, "original": { @@ -72,11 +72,11 @@ ] }, "locked": { - "lastModified": 1701137803, - "narHash": "sha256-0LcPAdql5IhQSUXJx3Zna0dYTgdIoYO7zUrsKgiBd04=", + "lastModified": 1709604635, + "narHash": "sha256-le4fwmWmjGRYWwkho0Gr7mnnZndOOe4XGbLw68OvF40=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "9dd940c967502f844eacea52a61e9596268d4f70", + "rev": "e86c0fb5d3a22a5f30d7f64ecad88643fe26449d", "type": "github" }, "original": { From 7d8c86e4039551ebe754d9e3753b9b99e6fa6419 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 11:08:07 +0900 Subject: [PATCH 56/60] build(deps): bump arc-swap from 1.6.0 to 1.7.0 (#9809) --- Cargo.lock | 4 ++-- helix-term/Cargo.toml | 2 +- helix-vcs/Cargo.toml | 2 +- helix-view/Cargo.toml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 08fa4789e..43213764d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -68,9 +68,9 @@ checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" [[package]] name = "arc-swap" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" +checksum = "7b3d0060af21e8d11a926981cc00c6c1541aa91dd64b9f881985c3da1094425f" [[package]] name = "autocfg" diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index 8c6ae9f42..accde567e 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -41,7 +41,7 @@ crossterm = { version = "0.27", features = ["event-stream"] } signal-hook = "0.3" tokio-stream = "0.1" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } -arc-swap = { version = "1.6.0" } +arc-swap = { version = "1.7.0" } termini = "1" # Logging diff --git a/helix-vcs/Cargo.toml b/helix-vcs/Cargo.toml index 32aca4f09..874b92d03 100644 --- a/helix-vcs/Cargo.toml +++ b/helix-vcs/Cargo.toml @@ -17,7 +17,7 @@ helix-event = { path = "../helix-event" } tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] } parking_lot = "0.12" -arc-swap = { version = "1.6.0" } +arc-swap = { version = "1.7.0" } gix = { version = "0.58.0", features = ["attributes"], default-features = false, optional = true } imara-diff = "0.1.5" diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index b1b444f90..fc209b00f 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -31,7 +31,7 @@ crossterm = { version = "0.27", optional = true } once_cell = "1.19" url = "2.5.0" -arc-swap = { version = "1.6.0" } +arc-swap = { version = "1.7.0" } tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tokio-stream = "0.1" From ea0b2446441fd09b304e92581f644901018846d7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 11:08:40 +0900 Subject: [PATCH 57/60] build(deps): bump libloading from 0.8.1 to 0.8.2 (#9810) --- Cargo.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 43213764d..fd498d9ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1603,12 +1603,12 @@ checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "libloading" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161" +checksum = "2caa5afb8bf9f3a2652760ce7d4f62d21c4d5a423e68466fca30df82f2330164" dependencies = [ "cfg-if", - "windows-sys 0.48.0", + "windows-targets 0.52.0", ] [[package]] From 7a473c74945bb7fd28abf9f23f9dfdd5562c7cd6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 11:09:13 +0900 Subject: [PATCH 58/60] build(deps): bump ahash from 0.8.9 to 0.8.11 (#9813) --- Cargo.lock | 4 ++-- helix-core/Cargo.toml | 2 +- helix-event/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fd498d9ad..a330e5872 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d713b3834d76b85304d4d525563c1276e2e30dc97cc67bfb4585a4a29fc2c89f" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "getrandom", diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index be5ea5eb8..5e2dbd977 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -32,7 +32,7 @@ once_cell = "1.19" arc-swap = "1" regex = "1" bitflags = "2.4" -ahash = "0.8.9" +ahash = "0.8.11" hashbrown = { version = "0.14.3", features = ["raw"] } dunce = "1.0" diff --git a/helix-event/Cargo.toml b/helix-event/Cargo.toml index 8711568e8..616c323dc 100644 --- a/helix-event/Cargo.toml +++ b/helix-event/Cargo.toml @@ -12,7 +12,7 @@ homepage.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -ahash = "0.8.9" +ahash = "0.8.11" hashbrown = "0.14.0" tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] } # the event registry is essentially read only but must be an rwlock so we can From 4e5f19df53cbe1f1b1e9ea590415c5bd58642b6b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 11:10:24 +0900 Subject: [PATCH 59/60] build(deps): bump clipboard-win from 5.1.0 to 5.2.0 (#9811) --- Cargo.lock | 4 ++-- helix-view/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a330e5872..aa3700b93 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -180,9 +180,9 @@ dependencies = [ [[package]] name = "clipboard-win" -version = "5.1.0" +version = "5.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ec832972fefb8cf9313b45a0d1945e29c9c251f1d4c6eafc5fe2124c02d2e81" +checksum = "12f9a0700e0127ba15d1d52dd742097f821cd9c65939303a44d970465040a297" dependencies = [ "error-code", ] diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index fc209b00f..335779bc2 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -50,7 +50,7 @@ parking_lot = "0.12.1" [target.'cfg(windows)'.dependencies] -clipboard-win = { version = "5.1", features = ["std"] } +clipboard-win = { version = "5.2", features = ["std"] } [target.'cfg(unix)'.dependencies] libc = "0.2" From b93fae9c8b955e11f427979134e3494294e8e2e0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 11:10:59 +0900 Subject: [PATCH 60/60] build(deps): bump mio from 0.8.9 to 0.8.11 (#9808) --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index aa3700b93..fd74140a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1690,9 +1690,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log",