Merge remote-tracking branch 'origin/master' into goto_next_reference

pull/6465/head
Anthony Templeton 9 months ago
commit bb46d99fb7

@ -12,6 +12,7 @@ jobs:
check: check:
name: Check (msrv) name: Check (msrv)
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v4 uses: actions/checkout@v4
@ -31,6 +32,7 @@ jobs:
test: test:
name: Test Suite name: Test Suite
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
env: env:
RUST_BACKTRACE: 1 RUST_BACKTRACE: 1
HELIX_LOG_LEVEL: info HELIX_LOG_LEVEL: info
@ -65,6 +67,7 @@ jobs:
lints: lints:
name: Lints name: Lints
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v4 uses: actions/checkout@v4
@ -92,6 +95,7 @@ jobs:
docs: docs:
name: Docs name: Docs
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v4 uses: actions/checkout@v4

72
Cargo.lock generated

@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]] [[package]]
name = "ahash" name = "ahash"
version = "0.8.6" version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"getrandom", "getrandom",
@ -62,15 +62,15 @@ dependencies = [
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.79" version = "1.0.80"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1"
[[package]] [[package]]
name = "arc-swap" name = "arc-swap"
version = "1.6.0" version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" checksum = "7b3d0060af21e8d11a926981cc00c6c1541aa91dd64b9f881985c3da1094425f"
[[package]] [[package]]
name = "autocfg" name = "autocfg"
@ -145,9 +145,9 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.0.85" version = "1.0.88"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b918671670962b48bc23753aef0c51d072dca6f52f01f800854ada6ddb7f7d3" checksum = "02f341c093d19155a6e41631ce5971aac4e9a868262212153124c15fa22d1cdc"
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
@ -168,9 +168,9 @@ dependencies = [
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.4.33" version = "0.4.34"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb" checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b"
dependencies = [ dependencies = [
"android-tzdata", "android-tzdata",
"iana-time-zone", "iana-time-zone",
@ -180,9 +180,9 @@ dependencies = [
[[package]] [[package]]
name = "clipboard-win" name = "clipboard-win"
version = "5.1.0" version = "5.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ec832972fefb8cf9313b45a0d1945e29c9c251f1d4c6eafc5fe2124c02d2e81" checksum = "12f9a0700e0127ba15d1d52dd742097f821cd9c65939303a44d970465040a297"
dependencies = [ dependencies = [
"error-code", "error-code",
] ]
@ -1344,6 +1344,7 @@ version = "23.10.0"
dependencies = [ dependencies = [
"dunce", "dunce",
"etcetera", "etcetera",
"regex-cursor",
"ropey", "ropey",
"tempfile", "tempfile",
"which", "which",
@ -1602,12 +1603,12 @@ checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]] [[package]]
name = "libloading" name = "libloading"
version = "0.8.1" version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161" checksum = "2caa5afb8bf9f3a2652760ce7d4f62d21c4d5a423e68466fca30df82f2330164"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"windows-sys 0.48.0", "windows-targets 0.52.0",
] ]
[[package]] [[package]]
@ -1689,9 +1690,9 @@ dependencies = [
[[package]] [[package]]
name = "mio" name = "mio"
version = "0.8.9" version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
dependencies = [ dependencies = [
"libc", "libc",
"log", "log",
@ -1938,15 +1939,28 @@ dependencies = [
[[package]] [[package]]
name = "regex-automata" name = "regex-automata"
version = "0.4.4" version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b7fa1134405e2ec9353fd416b17f8dacd46c473d7d3fd1cf202706a14eb792a" checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
"regex-syntax", "regex-syntax",
] ]
[[package]]
name = "regex-cursor"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a43718aa0040434d45728c43f56bd53bda75a91c46954cdf0f2ff4dbc8aabbe7"
dependencies = [
"log",
"memchr",
"regex-automata",
"regex-syntax",
"ropey",
]
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.8.2" version = "0.8.2"
@ -2011,18 +2025,18 @@ checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.196" version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.196" version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -2031,9 +2045,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.113" version = "1.0.114"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0"
dependencies = [ dependencies = [
"itoa", "itoa",
"ryu", "ryu",
@ -2201,9 +2215,9 @@ dependencies = [
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.10.0" version = "3.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67" checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"fastrand", "fastrand",
@ -2231,9 +2245,9 @@ dependencies = [
[[package]] [[package]]
name = "textwrap" name = "textwrap"
version = "0.16.0" version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9"
dependencies = [ dependencies = [
"smawk", "smawk",
"unicode-linebreak", "unicode-linebreak",

@ -375,8 +375,25 @@ wrap-indicator = "" # set wrap-indicator to "" to hide it
### `[editor.smart-tab]` Section ### `[editor.smart-tab]` Section
Options for navigating and editing using tab key.
| Key | Description | Default | | Key | Description | Default |
|------------|-------------|---------| |------------|-------------|---------|
| `enable` | If set to true, then when the cursor is in a position with non-whitespace to its left, instead of inserting a tab, it will run `move_parent_node_end`. If there is only whitespace to the left, then it inserts a tab as normal. With the default bindings, to explicitly insert a tab character, press Shift-tab. | `true` | | `enable` | If set to true, then when the cursor is in a position with non-whitespace to its left, instead of inserting a tab, it will run `move_parent_node_end`. If there is only whitespace to the left, then it inserts a tab as normal. With the default bindings, to explicitly insert a tab character, press Shift-tab. | `true` |
| `supersede-menu` | Normally, when a menu is on screen, such as when auto complete is triggered, the tab key is bound to cycling through the items. This means when menus are on screen, one cannot use the tab key to trigger the `smart-tab` command. If this option is set to true, the `smart-tab` command always takes precedence, which means one cannot use the tab key to cycle through menu items. One of the other bindings must be used instead, such as arrow keys or `C-n`/`C-p`. | `false` | | `supersede-menu` | Normally, when a menu is on screen, such as when auto complete is triggered, the tab key is bound to cycling through the items. This means when menus are on screen, one cannot use the tab key to trigger the `smart-tab` command. If this option is set to true, the `smart-tab` command always takes precedence, which means one cannot use the tab key to cycle through menu items. One of the other bindings must be used instead, such as arrow keys or `C-n`/`C-p`. | `false` |
Due to lack of support for S-tab in some terminals, the default keybindings don't fully embrace smart-tab editing experience. If you enjoy smart-tab navigation and a terminal that supports the [Enhanced Keyboard protocol](https://github.com/helix-editor/helix/wiki/Terminal-Support#enhanced-keyboard-protocol), consider setting extra keybindings:
```
[keys.normal]
tab = "move_parent_node_end"
S-tab = "move_parent_node_start"
[keys.insert]
S-tab = "move_parent_node_start"
[keys.select]
tab = "extend_parent_node_end"
S-tab = "extend_parent_node_start"
```

@ -30,6 +30,7 @@
| devicetree | ✓ | | | | | devicetree | ✓ | | | |
| dhall | ✓ | ✓ | | `dhall-lsp-server` | | dhall | ✓ | ✓ | | `dhall-lsp-server` |
| diff | ✓ | | | | | diff | ✓ | | | |
| docker-compose | ✓ | | ✓ | `docker-compose-langserver` |
| dockerfile | ✓ | | | `docker-langserver` | | dockerfile | ✓ | | | `docker-langserver` |
| dot | ✓ | | | `dot-language-server` | | dot | ✓ | | | `dot-language-server` |
| dtd | ✓ | | | | | dtd | ✓ | | | |
@ -43,6 +44,7 @@
| erb | ✓ | | | | | erb | ✓ | | | |
| erlang | ✓ | ✓ | | `erlang_ls` | | erlang | ✓ | ✓ | | `erlang_ls` |
| esdl | ✓ | | | | | esdl | ✓ | | | |
| fidl | ✓ | | | |
| fish | ✓ | ✓ | ✓ | | | fish | ✓ | ✓ | ✓ | |
| forth | ✓ | | | `forth-lsp` | | forth | ✓ | | | `forth-lsp` |
| fortran | ✓ | | ✓ | `fortls` | | fortran | ✓ | | ✓ | `fortls` |
@ -64,10 +66,11 @@
| gotmpl | ✓ | | | `gopls` | | gotmpl | ✓ | | | `gopls` |
| gowork | ✓ | | | `gopls` | | gowork | ✓ | | | `gopls` |
| graphql | ✓ | | | `graphql-lsp` | | graphql | ✓ | | | `graphql-lsp` |
| groovy | ✓ | | | |
| hare | ✓ | | | | | hare | ✓ | | | |
| haskell | ✓ | ✓ | | `haskell-language-server-wrapper` | | haskell | ✓ | ✓ | | `haskell-language-server-wrapper` |
| haskell-persistent | ✓ | | | | | haskell-persistent | ✓ | | | |
| hcl | ✓ | | ✓ | `terraform-ls` | | hcl | ✓ | | ✓ | `terraform-ls` |
| heex | ✓ | ✓ | | `elixir-ls` | | heex | ✓ | ✓ | | `elixir-ls` |
| hocon | ✓ | | ✓ | | | hocon | ✓ | | ✓ | |
| hoon | ✓ | | | | | hoon | ✓ | | | |
@ -101,7 +104,7 @@
| lua | ✓ | ✓ | ✓ | `lua-language-server` | | lua | ✓ | ✓ | ✓ | `lua-language-server` |
| make | ✓ | | ✓ | | | make | ✓ | | ✓ | |
| markdoc | ✓ | | | `markdoc-ls` | | markdoc | ✓ | | | `markdoc-ls` |
| markdown | ✓ | | | `marksman` | | markdown | ✓ | | | `marksman`, `markdown-oxide` |
| markdown.inline | ✓ | | | | | markdown.inline | ✓ | | | |
| matlab | ✓ | ✓ | ✓ | | | matlab | ✓ | ✓ | ✓ | |
| mermaid | ✓ | | | | | mermaid | ✓ | | | |
@ -111,7 +114,7 @@
| nasm | ✓ | ✓ | | | | nasm | ✓ | ✓ | | |
| nickel | ✓ | | ✓ | `nls` | | nickel | ✓ | | ✓ | `nls` |
| nim | ✓ | ✓ | ✓ | `nimlangserver` | | nim | ✓ | ✓ | ✓ | `nimlangserver` |
| nix | ✓ | | | `nil` | | nix | ✓ | | | `nil` |
| nu | ✓ | | | `nu` | | nu | ✓ | | | `nu` |
| nunjucks | ✓ | | | | | nunjucks | ✓ | | | |
| ocaml | ✓ | | ✓ | `ocamllsp` | | ocaml | ✓ | | ✓ | `ocamllsp` |

@ -12,6 +12,7 @@
- [Match mode](#match-mode) - [Match mode](#match-mode)
- [Window mode](#window-mode) - [Window mode](#window-mode)
- [Space mode](#space-mode) - [Space mode](#space-mode)
- [Comment mode](#comment-mode)
- [Popup](#popup) - [Popup](#popup)
- [Unimpaired](#unimpaired) - [Unimpaired](#unimpaired)
- [Insert mode](#insert-mode) - [Insert mode](#insert-mode)
@ -53,8 +54,8 @@ Normal mode is the default mode when you launch helix. You can return to it from
| `End` | Move to the end of the line | `goto_line_end` | | `End` | Move to the end of the line | `goto_line_end` |
| `Ctrl-b`, `PageUp` | Move page up | `page_up` | | `Ctrl-b`, `PageUp` | Move page up | `page_up` |
| `Ctrl-f`, `PageDown` | Move page down | `page_down` | | `Ctrl-f`, `PageDown` | Move page down | `page_down` |
| `Ctrl-u` | Move half page up | `half_page_up` | | `Ctrl-u` | Move cursor and page half page up | `page_cursor_half_up` |
| `Ctrl-d` | Move half page down | `half_page_down` | | `Ctrl-d` | Move cursor and page half page down | `page_cursor_half_down` |
| `Ctrl-i` | Jump forward on the jumplist | `jump_forward` | | `Ctrl-i` | Jump forward on the jumplist | `jump_forward` |
| `Ctrl-o` | Jump backward on the jumplist | `jump_backward` | | `Ctrl-o` | Jump backward on the jumplist | `jump_backward` |
| `Ctrl-s` | Save the current selection to the jumplist | `save_selection` | | `Ctrl-s` | Save the current selection to the jumplist | `save_selection` |
@ -192,8 +193,8 @@ useful when you're simply looking over text and not actively editing it.
| `k`, `up` | Scroll the view upwards | `scroll_up` | | `k`, `up` | Scroll the view upwards | `scroll_up` |
| `Ctrl-f`, `PageDown` | Move page down | `page_down` | | `Ctrl-f`, `PageDown` | Move page down | `page_down` |
| `Ctrl-b`, `PageUp` | Move page up | `page_up` | | `Ctrl-b`, `PageUp` | Move page up | `page_up` |
| `Ctrl-d` | Move half page down | `half_page_down` | | `Ctrl-u` | Move cursor and page half page up | `page_cursor_half_up` |
| `Ctrl-u` | Move half page up | `half_page_up` | | `Ctrl-d` | Move cursor and page half page down | `page_cursor_half_down` |
#### Goto mode #### Goto mode
@ -289,6 +290,9 @@ This layer is a kludge of mappings, mostly pickers.
| `h` | Select symbol references (**LSP**) | `select_references_to_symbol_under_cursor` | | `h` | Select symbol references (**LSP**) | `select_references_to_symbol_under_cursor` |
| `'` | Open last fuzzy picker | `last_picker` | | `'` | Open last fuzzy picker | `last_picker` |
| `w` | Enter [window mode](#window-mode) | N/A | | `w` | Enter [window mode](#window-mode) | N/A |
| `c` | Comment/uncomment selections | `toggle_comments` |
| `C` | Block comment/uncomment selections | `toggle_block_comments` |
| `Alt-c` | Line comment/uncomment selections | `toggle_line_comments` |
| `p` | Paste system clipboard after selections | `paste_clipboard_after` | | `p` | Paste system clipboard after selections | `paste_clipboard_after` |
| `P` | Paste system clipboard before selections | `paste_clipboard_before` | | `P` | Paste system clipboard before selections | `paste_clipboard_before` |
| `y` | Yank selections to clipboard | `yank_to_clipboard` | | `y` | Yank selections to clipboard | `yank_to_clipboard` |

@ -42,7 +42,7 @@ name = "mylang"
scope = "source.mylang" scope = "source.mylang"
injection-regex = "mylang" injection-regex = "mylang"
file-types = ["mylang", "myl"] file-types = ["mylang", "myl"]
comment-token = "#" comment-tokens = "#"
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
formatter = { command = "mylang-formatter" , args = ["--stdin"] } formatter = { command = "mylang-formatter" , args = ["--stdin"] }
language-servers = [ "mylang-lsp" ] language-servers = [ "mylang-lsp" ]
@ -61,7 +61,8 @@ These configuration keys are available:
| `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` | | `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` |
| `auto-format` | Whether to autoformat this language when saving | | `auto-format` | Whether to autoformat this language when saving |
| `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) | | `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) |
| `comment-token` | The token to use as a comment-token | | `comment-tokens` | The tokens to use as a comment token, either a single token `"//"` or an array `["//", "///", "//!"]` (the first token will be used for commenting). Also configurable as `comment-token` for backwards compatibility|
| `block-comment-tokens`| The start and end tokens for a multiline comment either an array or single table of `{ start = "/*", end = "*/"}`. The first set of tokens will be used for commenting, any pairs in the array can be uncommented |
| `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) | | `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) |
| `language-servers` | The Language Servers used for this language. See below for more information in the section [Configuring Language Servers for a language](#configuring-language-servers-for-a-language) | | `language-servers` | The Language Servers used for this language. See below for more information in the section [Configuring Language Servers for a language](#configuring-language-servers-for-a-language) |
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) | | `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |

@ -333,5 +333,7 @@ These scopes are used for theming the editor interface:
| `diagnostic.info` | Diagnostics info (editing area) | | `diagnostic.info` | Diagnostics info (editing area) |
| `diagnostic.warning` | Diagnostics warning (editing area) | | `diagnostic.warning` | Diagnostics warning (editing area) |
| `diagnostic.error` | Diagnostics error (editing area) | | `diagnostic.error` | Diagnostics error (editing area) |
| `diagnostic.unnecessary` | Diagnostics with unnecessary tag (editing area) |
| `diagnostic.deprecated` | Diagnostics with deprecated tag (editing area) |
[editor-section]: ./configuration.md#editor-section [editor-section]: ./configuration.md#editor-section

@ -5,19 +5,20 @@ _hx() {
# $1 command name # $1 command name
# $2 word being completed # $2 word being completed
# $3 word preceding # $3 word preceding
COMPREPLY=()
case "$3" in case "$3" in
-g | --grammar) -g | --grammar)
COMPREPLY=($(compgen -W "fetch build" -- $2)) COMPREPLY="$(compgen -W 'fetch build' -- $2)"
;; ;;
--health) --health)
local languages=$(hx --health |tail -n '+7' |awk '{print $1}' |sed 's/\x1b\[[0-9;]*m//g') local languages=$(hx --health |tail -n '+7' |awk '{print $1}' |sed 's/\x1b\[[0-9;]*m//g')
COMPREPLY=($(compgen -W "$languages" -- $2)) COMPREPLY="$(compgen -W """$languages""" -- $2)"
;; ;;
*) *)
COMPREPLY=($(compgen -fd -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- $2)) COMPREPLY="$(compgen -fd -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- """$2""")"
;; ;;
esac esac
} && complete -o filenames -F _hx hx
local IFS=$'\n'
COMPREPLY=($COMPREPLY)
} && complete -o filenames -F _hx hx

@ -7,11 +7,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1701025348, "lastModified": 1709610799,
"narHash": "sha256-42GHmYH+GF7VjwGSt+fVT1CQuNpGanJbNgVHTAZppUM=", "narHash": "sha256-5jfLQx0U9hXbi2skYMGodDJkIgffrjIOgMRjZqms2QE=",
"owner": "ipetkov", "owner": "ipetkov",
"repo": "crane", "repo": "crane",
"rev": "42afaeb1a0325194a7cdb526332d2cb92fddd07b", "rev": "81c393c776d5379c030607866afef6406ca1be57",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -25,11 +25,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1694529238, "lastModified": 1709126324,
"narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", "narHash": "sha256-q6EQdSeUZOG26WelxqkmR7kArjgWCdw5sfJVHPH/7j8=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "ff7b65b44d01cf9ba6a71320833626af21126384", "rev": "d465f4819400de7c8d874d50b982301f28a84605",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -40,11 +40,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1700794826, "lastModified": 1709479366,
"narHash": "sha256-RyJTnTNKhO0yqRpDISk03I/4A67/dp96YRxc86YOPgU=", "narHash": "sha256-n6F0n8UV6lnTZbYPl1A9q1BS0p4hduAv1mGAP17CVd0=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "5a09cb4b393d58f9ed0d9ca1555016a8543c2ac8", "rev": "b8697e57f10292a6165a20f03d2f42920dfaf973",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -72,11 +72,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1701137803, "lastModified": 1709604635,
"narHash": "sha256-0LcPAdql5IhQSUXJx3Zna0dYTgdIoYO7zUrsKgiBd04=", "narHash": "sha256-le4fwmWmjGRYWwkho0Gr7mnnZndOOe4XGbLw68OvF40=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "9dd940c967502f844eacea52a61e9596268d4f70", "rev": "e86c0fb5d3a22a5f30d7f64ecad88643fe26449d",
"type": "github" "type": "github"
}, },
"original": { "original": {

@ -32,7 +32,7 @@ once_cell = "1.19"
arc-swap = "1" arc-swap = "1"
regex = "1" regex = "1"
bitflags = "2.4" bitflags = "2.4"
ahash = "0.8.6" ahash = "0.8.11"
hashbrown = { version = "0.14.3", features = ["raw"] } hashbrown = { version = "0.14.3", features = ["raw"] }
dunce = "1.0" dunce = "1.0"
@ -48,7 +48,7 @@ encoding_rs = "0.8"
chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] } chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] }
etcetera = "0.8" etcetera = "0.8"
textwrap = "0.16.0" textwrap = "0.16.1"
nucleo.workspace = true nucleo.workspace = true
parking_lot = "0.12" parking_lot = "0.12"

@ -1,9 +1,12 @@
//! This module contains the functionality toggle comments on lines over the selection //! This module contains the functionality toggle comments on lines over the selection
//! using the comment character defined in the user's `languages.toml` //! using the comment character defined in the user's `languages.toml`
use smallvec::SmallVec;
use crate::{ use crate::{
find_first_non_whitespace_char, Change, Rope, RopeSlice, Selection, Tendril, Transaction, syntax::BlockCommentToken, Change, Range, Rope, RopeSlice, Selection, Tendril, Transaction,
}; };
use helix_stdx::rope::RopeSliceExt;
use std::borrow::Cow; use std::borrow::Cow;
/// Given text, a comment token, and a set of line indices, returns the following: /// Given text, a comment token, and a set of line indices, returns the following:
@ -22,12 +25,12 @@ fn find_line_comment(
) -> (bool, Vec<usize>, usize, usize) { ) -> (bool, Vec<usize>, usize, usize) {
let mut commented = true; let mut commented = true;
let mut to_change = Vec::new(); let mut to_change = Vec::new();
let mut min = usize::MAX; // minimum col for find_first_non_whitespace_char let mut min = usize::MAX; // minimum col for first_non_whitespace_char
let mut margin = 1; let mut margin = 1;
let token_len = token.chars().count(); let token_len = token.chars().count();
for line in lines { for line in lines {
let line_slice = text.line(line); let line_slice = text.line(line);
if let Some(pos) = find_first_non_whitespace_char(line_slice) { if let Some(pos) = line_slice.first_non_whitespace_char() {
let len = line_slice.len_chars(); let len = line_slice.len_chars();
if pos < min { if pos < min {
@ -94,6 +97,222 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st
Transaction::change(doc, changes.into_iter()) Transaction::change(doc, changes.into_iter())
} }
#[derive(Debug, PartialEq, Eq)]
pub enum CommentChange {
Commented {
range: Range,
start_pos: usize,
end_pos: usize,
start_margin: bool,
end_margin: bool,
start_token: String,
end_token: String,
},
Uncommented {
range: Range,
start_pos: usize,
end_pos: usize,
start_token: String,
end_token: String,
},
Whitespace {
range: Range,
},
}
pub fn find_block_comments(
tokens: &[BlockCommentToken],
text: RopeSlice,
selection: &Selection,
) -> (bool, Vec<CommentChange>) {
let mut commented = true;
let mut only_whitespace = true;
let mut comment_changes = Vec::with_capacity(selection.len());
let default_tokens = tokens.first().cloned().unwrap_or_default();
// TODO: check if this can be removed on MSRV bump
#[allow(clippy::redundant_clone)]
let mut start_token = default_tokens.start.clone();
#[allow(clippy::redundant_clone)]
let mut end_token = default_tokens.end.clone();
let mut tokens = tokens.to_vec();
// sort the tokens by length, so longer tokens will match first
tokens.sort_by(|a, b| {
if a.start.len() == b.start.len() {
b.end.len().cmp(&a.end.len())
} else {
b.start.len().cmp(&a.start.len())
}
});
for range in selection {
let selection_slice = range.slice(text);
if let (Some(start_pos), Some(end_pos)) = (
selection_slice.first_non_whitespace_char(),
selection_slice.last_non_whitespace_char(),
) {
let mut line_commented = false;
let mut after_start = 0;
let mut before_end = 0;
let len = (end_pos + 1) - start_pos;
for BlockCommentToken { start, end } in &tokens {
let start_len = start.chars().count();
let end_len = end.chars().count();
after_start = start_pos + start_len;
before_end = end_pos.saturating_sub(end_len);
if len >= start_len + end_len {
let start_fragment = selection_slice.slice(start_pos..after_start);
let end_fragment = selection_slice.slice(before_end + 1..end_pos + 1);
// block commented with these tokens
if start_fragment == start.as_str() && end_fragment == end.as_str() {
start_token = start.to_string();
end_token = end.to_string();
line_commented = true;
break;
}
}
}
if !line_commented {
comment_changes.push(CommentChange::Uncommented {
range: *range,
start_pos,
end_pos,
start_token: default_tokens.start.clone(),
end_token: default_tokens.end.clone(),
});
commented = false;
} else {
comment_changes.push(CommentChange::Commented {
range: *range,
start_pos,
end_pos,
start_margin: selection_slice
.get_char(after_start)
.map_or(false, |c| c == ' '),
end_margin: after_start != before_end
&& selection_slice
.get_char(before_end)
.map_or(false, |c| c == ' '),
start_token: start_token.to_string(),
end_token: end_token.to_string(),
});
}
only_whitespace = false;
} else {
comment_changes.push(CommentChange::Whitespace { range: *range });
}
}
if only_whitespace {
commented = false;
}
(commented, comment_changes)
}
#[must_use]
pub fn create_block_comment_transaction(
doc: &Rope,
selection: &Selection,
commented: bool,
comment_changes: Vec<CommentChange>,
) -> (Transaction, SmallVec<[Range; 1]>) {
let mut changes: Vec<Change> = Vec::with_capacity(selection.len() * 2);
let mut ranges: SmallVec<[Range; 1]> = SmallVec::with_capacity(selection.len());
let mut offs = 0;
for change in comment_changes {
if commented {
if let CommentChange::Commented {
range,
start_pos,
end_pos,
start_token,
end_token,
start_margin,
end_margin,
} = change
{
let from = range.from();
changes.push((
from + start_pos,
from + start_pos + start_token.len() + start_margin as usize,
None,
));
changes.push((
from + end_pos - end_token.len() - end_margin as usize + 1,
from + end_pos + 1,
None,
));
}
} else {
// uncommented so manually map ranges through changes
match change {
CommentChange::Uncommented {
range,
start_pos,
end_pos,
start_token,
end_token,
} => {
let from = range.from();
changes.push((
from + start_pos,
from + start_pos,
Some(Tendril::from(format!("{} ", start_token))),
));
changes.push((
from + end_pos + 1,
from + end_pos + 1,
Some(Tendril::from(format!(" {}", end_token))),
));
let offset = start_token.chars().count() + end_token.chars().count() + 2;
ranges.push(
Range::new(from + offs, from + offs + end_pos + 1 + offset)
.with_direction(range.direction()),
);
offs += offset;
}
CommentChange::Commented { range, .. } | CommentChange::Whitespace { range } => {
ranges.push(Range::new(range.from() + offs, range.to() + offs));
}
}
}
}
(Transaction::change(doc, changes.into_iter()), ranges)
}
#[must_use]
pub fn toggle_block_comments(
doc: &Rope,
selection: &Selection,
tokens: &[BlockCommentToken],
) -> Transaction {
let text = doc.slice(..);
let (commented, comment_changes) = find_block_comments(tokens, text, selection);
let (mut transaction, ranges) =
create_block_comment_transaction(doc, selection, commented, comment_changes);
if !commented {
transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index()));
}
transaction
}
pub fn split_lines_of_selection(text: RopeSlice, selection: &Selection) -> Selection {
let mut ranges = SmallVec::new();
for range in selection.ranges() {
let (line_start, line_end) = range.line_range(text.slice(..));
let mut pos = text.line_to_char(line_start);
for line in text.slice(pos..text.line_to_char(line_end + 1)).lines() {
let start = pos;
pos += line.len_chars();
ranges.push(Range::new(start, pos));
}
}
Selection::new(ranges, 0)
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
@ -149,4 +368,49 @@ mod test {
// TODO: account for uncommenting with uneven comment indentation // TODO: account for uncommenting with uneven comment indentation
} }
#[test]
fn test_find_block_comments() {
// three lines 5 characters.
let mut doc = Rope::from("1\n2\n3");
// select whole document
let selection = Selection::single(0, doc.len_chars());
let text = doc.slice(..);
let res = find_block_comments(&[BlockCommentToken::default()], text, &selection);
assert_eq!(
res,
(
false,
vec![CommentChange::Uncommented {
range: Range::new(0, 5),
start_pos: 0,
end_pos: 4,
start_token: "/*".to_string(),
end_token: "*/".to_string(),
}]
)
);
// comment
let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]);
transaction.apply(&mut doc);
assert_eq!(doc, "/* 1\n2\n3 */");
// uncomment
let selection = Selection::single(0, doc.len_chars());
let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]);
transaction.apply(&mut doc);
assert_eq!(doc, "1\n2\n3");
// don't panic when there is just a space in comment
doc = Rope::from("/* */");
let selection = Selection::single(0, doc.len_chars());
let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]);
transaction.apply(&mut doc);
assert_eq!(doc, "");
}
} }

@ -1,10 +1,10 @@
use std::{borrow::Cow, collections::HashMap}; use std::{borrow::Cow, collections::HashMap};
use helix_stdx::rope::RopeSliceExt;
use tree_sitter::{Query, QueryCursor, QueryPredicateArg}; use tree_sitter::{Query, QueryCursor, QueryPredicateArg};
use crate::{ use crate::{
chars::{char_is_line_ending, char_is_whitespace}, chars::{char_is_line_ending, char_is_whitespace},
find_first_non_whitespace_char,
graphemes::{grapheme_width, tab_width_at}, graphemes::{grapheme_width, tab_width_at},
syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax}, syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax},
tree_sitter::Node, tree_sitter::Node,
@ -970,7 +970,7 @@ pub fn indent_for_newline(
let mut num_attempts = 0; let mut num_attempts = 0;
for line_idx in (0..=line_before).rev() { for line_idx in (0..=line_before).rev() {
let line = text.line(line_idx); let line = text.line(line_idx);
let first_non_whitespace_char = match find_first_non_whitespace_char(line) { let first_non_whitespace_char = match line.first_non_whitespace_char() {
Some(i) => i, Some(i) => i,
None => { None => {
continue; continue;

@ -37,9 +37,6 @@ pub mod unicode {
pub use helix_loader::find_workspace; pub use helix_loader::find_workspace;
pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> {
line.chars().position(|ch| !ch.is_whitespace())
}
mod rope_reader; mod rope_reader;
pub use rope_reader::RopeReader; pub use rope_reader::RopeReader;

@ -7,9 +7,11 @@ use crate::{
ensure_grapheme_boundary_next, ensure_grapheme_boundary_prev, next_grapheme_boundary, ensure_grapheme_boundary_next, ensure_grapheme_boundary_prev, next_grapheme_boundary,
prev_grapheme_boundary, prev_grapheme_boundary,
}, },
line_ending::get_line_ending,
movement::Direction, movement::Direction,
Assoc, ChangeSet, RopeGraphemes, RopeSlice, Assoc, ChangeSet, RopeGraphemes, RopeSlice,
}; };
use helix_stdx::rope::{self, RopeSliceExt};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use std::borrow::Cow; use std::borrow::Cow;
@ -708,12 +710,12 @@ impl IntoIterator for Selection {
pub fn keep_or_remove_matches( pub fn keep_or_remove_matches(
text: RopeSlice, text: RopeSlice,
selection: &Selection, selection: &Selection,
regex: &crate::regex::Regex, regex: &rope::Regex,
remove: bool, remove: bool,
) -> Option<Selection> { ) -> Option<Selection> {
let result: SmallVec<_> = selection let result: SmallVec<_> = selection
.iter() .iter()
.filter(|range| regex.is_match(&range.fragment(text)) ^ remove) .filter(|range| regex.is_match(text.regex_input_at(range.from()..range.to())) ^ remove)
.copied() .copied()
.collect(); .collect();
@ -724,25 +726,20 @@ pub fn keep_or_remove_matches(
None None
} }
// TODO: support to split on capture #N instead of whole match
pub fn select_on_matches( pub fn select_on_matches(
text: RopeSlice, text: RopeSlice,
selection: &Selection, selection: &Selection,
regex: &crate::regex::Regex, regex: &rope::Regex,
) -> Option<Selection> { ) -> Option<Selection> {
let mut result = SmallVec::with_capacity(selection.len()); let mut result = SmallVec::with_capacity(selection.len());
for sel in selection { for sel in selection {
// TODO: can't avoid occasional allocations since Regex can't operate on chunks yet for mat in regex.find_iter(text.regex_input_at(sel.from()..sel.to())) {
let fragment = sel.fragment(text);
let sel_start = sel.from();
let start_byte = text.char_to_byte(sel_start);
for mat in regex.find_iter(&fragment) {
// TODO: retain range direction // TODO: retain range direction
let start = text.byte_to_char(start_byte + mat.start()); let start = text.byte_to_char(mat.start());
let end = text.byte_to_char(start_byte + mat.end()); let end = text.byte_to_char(mat.end());
let range = Range::new(start, end); let range = Range::new(start, end);
// Make sure the match is not right outside of the selection. // Make sure the match is not right outside of the selection.
@ -761,12 +758,7 @@ pub fn select_on_matches(
None None
} }
// TODO: support to split on capture #N instead of whole match pub fn split_on_newline(text: RopeSlice, selection: &Selection) -> Selection {
pub fn split_on_matches(
text: RopeSlice,
selection: &Selection,
regex: &crate::regex::Regex,
) -> Selection {
let mut result = SmallVec::with_capacity(selection.len()); let mut result = SmallVec::with_capacity(selection.len());
for sel in selection { for sel in selection {
@ -776,21 +768,47 @@ pub fn split_on_matches(
continue; continue;
} }
// TODO: can't avoid occasional allocations since Regex can't operate on chunks yet
let fragment = sel.fragment(text);
let sel_start = sel.from(); let sel_start = sel.from();
let sel_end = sel.to(); let sel_end = sel.to();
let start_byte = text.char_to_byte(sel_start); let mut start = sel_start;
for line in sel.slice(text).lines() {
let Some(line_ending) = get_line_ending(&line) else { break };
let line_end = start + line.len_chars();
// TODO: retain range direction
result.push(Range::new(start, line_end - line_ending.len_chars()));
start = line_end;
}
if start < sel_end {
result.push(Range::new(start, sel_end));
}
}
// TODO: figure out a new primary index
Selection::new(result, 0)
}
pub fn split_on_matches(text: RopeSlice, selection: &Selection, regex: &rope::Regex) -> Selection {
let mut result = SmallVec::with_capacity(selection.len());
for sel in selection {
// Special case: zero-width selection.
if sel.from() == sel.to() {
result.push(*sel);
continue;
}
let sel_start = sel.from();
let sel_end = sel.to();
let mut start = sel_start; let mut start = sel_start;
for mat in regex.find_iter(&fragment) { for mat in regex.find_iter(text.regex_input_at(sel_start..sel_end)) {
// TODO: retain range direction // TODO: retain range direction
let end = text.byte_to_char(start_byte + mat.start()); let end = text.byte_to_char(mat.start());
result.push(Range::new(start, end)); result.push(Range::new(start, end));
start = text.byte_to_char(start_byte + mat.end()); start = text.byte_to_char(mat.end());
} }
if start < sel_end { if start < sel_end {
@ -1021,14 +1039,12 @@ mod test {
#[test] #[test]
fn test_select_on_matches() { fn test_select_on_matches() {
use crate::regex::{Regex, RegexBuilder};
let r = Rope::from_str("Nobody expects the Spanish inquisition"); let r = Rope::from_str("Nobody expects the Spanish inquisition");
let s = r.slice(..); let s = r.slice(..);
let selection = Selection::single(0, r.len_chars()); let selection = Selection::single(0, r.len_chars());
assert_eq!( assert_eq!(
select_on_matches(s, &selection, &Regex::new(r"[A-Z][a-z]*").unwrap()), select_on_matches(s, &selection, &rope::Regex::new(r"[A-Z][a-z]*").unwrap()),
Some(Selection::new( Some(Selection::new(
smallvec![Range::new(0, 6), Range::new(19, 26)], smallvec![Range::new(0, 6), Range::new(19, 26)],
0 0
@ -1038,8 +1054,14 @@ mod test {
let r = Rope::from_str("This\nString\n\ncontains multiple\nlines"); let r = Rope::from_str("This\nString\n\ncontains multiple\nlines");
let s = r.slice(..); let s = r.slice(..);
let start_of_line = RegexBuilder::new(r"^").multi_line(true).build().unwrap(); let start_of_line = rope::RegexBuilder::new()
let end_of_line = RegexBuilder::new(r"$").multi_line(true).build().unwrap(); .syntax(rope::Config::new().multi_line(true))
.build(r"^")
.unwrap();
let end_of_line = rope::RegexBuilder::new()
.syntax(rope::Config::new().multi_line(true))
.build(r"$")
.unwrap();
// line without ending // line without ending
assert_eq!( assert_eq!(
@ -1077,9 +1099,9 @@ mod test {
select_on_matches( select_on_matches(
s, s,
&Selection::single(0, s.len_chars()), &Selection::single(0, s.len_chars()),
&RegexBuilder::new(r"^[a-z ]*$") &rope::RegexBuilder::new()
.multi_line(true) .syntax(rope::Config::new().multi_line(true))
.build() .build(r"^[a-z ]*$")
.unwrap() .unwrap()
), ),
Some(Selection::new( Some(Selection::new(
@ -1171,13 +1193,15 @@ mod test {
#[test] #[test]
fn test_split_on_matches() { fn test_split_on_matches() {
use crate::regex::Regex;
let text = Rope::from(" abcd efg wrs xyz 123 456"); let text = Rope::from(" abcd efg wrs xyz 123 456");
let selection = Selection::new(smallvec![Range::new(0, 9), Range::new(11, 20),], 0); let selection = Selection::new(smallvec![Range::new(0, 9), Range::new(11, 20),], 0);
let result = split_on_matches(text.slice(..), &selection, &Regex::new(r"\s+").unwrap()); let result = split_on_matches(
text.slice(..),
&selection,
&rope::Regex::new(r"\s+").unwrap(),
);
assert_eq!( assert_eq!(
result.ranges(), result.ranges(),

@ -260,7 +260,8 @@ pub fn get_surround_pos(
if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) { if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) {
return Err(Error::CursorOverlap); return Err(Error::CursorOverlap);
} }
change_pos.extend_from_slice(&[open_pos, close_pos]); // ensure the positions are always paired in the forward direction
change_pos.extend_from_slice(&[open_pos.min(close_pos), close_pos.max(open_pos)]);
} }
Ok(change_pos) Ok(change_pos)
} }

@ -12,6 +12,7 @@ use arc_swap::{ArcSwap, Guard};
use bitflags::bitflags; use bitflags::bitflags;
use globset::GlobSet; use globset::GlobSet;
use hashbrown::raw::RawTable; use hashbrown::raw::RawTable;
use helix_stdx::rope::{self, RopeSliceExt};
use slotmap::{DefaultKey as LayerId, HopSlotMap}; use slotmap::{DefaultKey as LayerId, HopSlotMap};
use std::{ use std::{
@ -98,7 +99,19 @@ pub struct LanguageConfiguration {
pub shebangs: Vec<String>, // interpreter(s) associated with language pub shebangs: Vec<String>, // interpreter(s) associated with language
#[serde(default)] #[serde(default)]
pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml> pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml>
pub comment_token: Option<String>, #[serde(
default,
skip_serializing,
deserialize_with = "from_comment_tokens",
alias = "comment-token"
)]
pub comment_tokens: Option<Vec<String>>,
#[serde(
default,
skip_serializing,
deserialize_with = "from_block_comment_tokens"
)]
pub block_comment_tokens: Option<Vec<BlockCommentToken>>,
pub text_width: Option<usize>, pub text_width: Option<usize>,
pub soft_wrap: Option<SoftWrap>, pub soft_wrap: Option<SoftWrap>,
@ -239,6 +252,59 @@ impl<'de> Deserialize<'de> for FileType {
} }
} }
fn from_comment_tokens<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(untagged)]
enum CommentTokens {
Multiple(Vec<String>),
Single(String),
}
Ok(
Option::<CommentTokens>::deserialize(deserializer)?.map(|tokens| match tokens {
CommentTokens::Single(val) => vec![val],
CommentTokens::Multiple(vals) => vals,
}),
)
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BlockCommentToken {
pub start: String,
pub end: String,
}
impl Default for BlockCommentToken {
fn default() -> Self {
BlockCommentToken {
start: "/*".to_string(),
end: "*/".to_string(),
}
}
}
fn from_block_comment_tokens<'de, D>(
deserializer: D,
) -> Result<Option<Vec<BlockCommentToken>>, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(untagged)]
enum BlockCommentTokens {
Multiple(Vec<BlockCommentToken>),
Single(BlockCommentToken),
}
Ok(
Option::<BlockCommentTokens>::deserialize(deserializer)?.map(|tokens| match tokens {
BlockCommentTokens::Single(val) => vec![val],
BlockCommentTokens::Multiple(vals) => vals,
}),
)
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
pub enum LanguageServerFeature { pub enum LanguageServerFeature {
@ -1961,11 +2027,16 @@ impl HighlightConfiguration {
node_slice node_slice
}; };
static SHEBANG_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(SHEBANG).unwrap()); static SHEBANG_REGEX: Lazy<rope::Regex> =
Lazy::new(|| rope::Regex::new(SHEBANG).unwrap());
injection_capture = SHEBANG_REGEX injection_capture = SHEBANG_REGEX
.captures(&Cow::from(lines)) .captures_iter(lines.regex_input())
.map(|cap| InjectionLanguageMarker::Shebang(cap[1].to_owned())) .map(|cap| {
let cap = lines.byte_slice(cap.get_group(1).unwrap().range());
InjectionLanguageMarker::Shebang(cap.into())
})
.next()
} else if index == self.injection_content_capture_index { } else if index == self.injection_content_capture_index {
content_node = Some(capture.node); content_node = Some(capture.node);
} }

@ -4,6 +4,7 @@ use helix_core::{
syntax::{Configuration, Loader}, syntax::{Configuration, Loader},
Syntax, Syntax,
}; };
use helix_stdx::rope::RopeSliceExt;
use ropey::Rope; use ropey::Rope;
use std::{ops::Range, path::PathBuf, process::Command, sync::Arc}; use std::{ops::Range, path::PathBuf, process::Command, sync::Arc};
@ -211,7 +212,7 @@ fn test_treesitter_indent(
if ignored_lines.iter().any(|range| range.contains(&(i + 1))) { if ignored_lines.iter().any(|range| range.contains(&(i + 1))) {
continue; continue;
} }
if let Some(pos) = helix_core::find_first_non_whitespace_char(line) { if let Some(pos) = line.first_non_whitespace_char() {
let tab_width: usize = 4; let tab_width: usize = 4;
let suggested_indent = treesitter_indent_for_pos( let suggested_indent = treesitter_indent_for_pos(
indent_query, indent_query,

@ -12,7 +12,7 @@ homepage.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
ahash = "0.8.3" ahash = "0.8.11"
hashbrown = "0.14.0" hashbrown = "0.14.0"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] }
# the event registry is essentially read only but must be an rwlock so we can # the event registry is essentially read only but must be an rwlock so we can

@ -30,7 +30,7 @@ log = "0.4"
# cloning/compiling tree-sitter grammars # cloning/compiling tree-sitter grammars
cc = { version = "1" } cc = { version = "1" }
threadpool = { version = "1.0" } threadpool = { version = "1.0" }
tempfile = "3.10.0" tempfile = "3.10.1"
dunce = "1.0.4" dunce = "1.0.4"
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dependencies]

@ -53,7 +53,7 @@ fn prioritize_runtime_dirs() -> Vec<PathBuf> {
rt_dirs.push(conf_rt_dir); rt_dirs.push(conf_rt_dir);
if let Ok(dir) = std::env::var("HELIX_RUNTIME") { if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
let dir = path::expand_tilde(dir); let dir = path::expand_tilde(Path::new(&dir));
rt_dirs.push(path::normalize(dir)); rt_dirs.push(path::normalize(dir));
} }

@ -631,6 +631,12 @@ impl Client {
}), }),
publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities { publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities {
version_support: Some(true), version_support: Some(true),
tag_support: Some(lsp::TagSupport {
value_set: vec![
lsp::DiagnosticTag::UNNECESSARY,
lsp::DiagnosticTag::DEPRECATED,
],
}),
..Default::default() ..Default::default()
}), }),
inlay_hint: Some(lsp::InlayHintClientCapabilities { inlay_hint: Some(lsp::InlayHintClientCapabilities {
@ -1017,7 +1023,7 @@ impl Client {
pub fn resolve_completion_item( pub fn resolve_completion_item(
&self, &self,
completion_item: lsp::CompletionItem, completion_item: lsp::CompletionItem,
) -> Option<impl Future<Output = Result<Value>>> { ) -> Option<impl Future<Output = Result<lsp::CompletionItem>>> {
let capabilities = self.capabilities.get().unwrap(); let capabilities = self.capabilities.get().unwrap();
// Return early if the server does not support resolving completion items. // Return early if the server does not support resolving completion items.
@ -1029,7 +1035,8 @@ impl Client {
_ => return None, _ => return None,
} }
Some(self.call::<lsp::request::ResolveCompletionItem>(completion_item)) let res = self.call::<lsp::request::ResolveCompletionItem>(completion_item);
Some(async move { Ok(serde_json::from_value(res.await?)?) })
} }
pub fn resolve_code_action( pub fn resolve_code_action(

@ -16,6 +16,7 @@ dunce = "1.0"
etcetera = "0.8" etcetera = "0.8"
ropey = { version = "1.6.1", default-features = false } ropey = { version = "1.6.1", default-features = false }
which = "6.0" which = "6.0"
regex-cursor = "0.1.3"
[dev-dependencies] [dev-dependencies]
tempfile = "3.10" tempfile = "3.10"

@ -1,6 +1,9 @@
pub use etcetera::home_dir; pub use etcetera::home_dir;
use std::path::{Component, Path, PathBuf}; use std::{
borrow::Cow,
path::{Component, Path, PathBuf},
};
use crate::env::current_working_dir; use crate::env::current_working_dir;
@ -19,19 +22,22 @@ pub fn fold_home_dir(path: &Path) -> PathBuf {
/// Expands tilde `~` into users home directory if available, otherwise returns the path /// Expands tilde `~` into users home directory if available, otherwise returns the path
/// unchanged. The tilde will only be expanded when present as the first component of the path /// unchanged. The tilde will only be expanded when present as the first component of the path
/// and only slash follows it. /// and only slash follows it.
pub fn expand_tilde(path: impl AsRef<Path>) -> PathBuf { pub fn expand_tilde<'a, P>(path: P) -> Cow<'a, Path>
let path = path.as_ref(); where
let mut components = path.components().peekable(); P: Into<Cow<'a, Path>>,
if let Some(Component::Normal(c)) = components.peek() { {
if c == &"~" { let path = path.into();
if let Ok(home) = home_dir() { let mut components = path.components();
// it's ok to unwrap, the path starts with `~` if let Some(Component::Normal(c)) = components.next() {
return home.join(path.strip_prefix("~").unwrap()); if c == "~" {
if let Ok(mut buf) = home_dir() {
buf.push(components);
return Cow::Owned(buf);
} }
} }
} }
path.to_path_buf() path
} }
/// Normalize a path without resolving symlinks. /// Normalize a path without resolving symlinks.
@ -109,9 +115,9 @@ pub fn normalize(path: impl AsRef<Path>) -> PathBuf {
/// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify /// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify
/// here if the path exists, just normalize it's components. /// here if the path exists, just normalize it's components.
pub fn canonicalize(path: impl AsRef<Path>) -> PathBuf { pub fn canonicalize(path: impl AsRef<Path>) -> PathBuf {
let path = expand_tilde(path); let path = expand_tilde(path.as_ref());
let path = if path.is_relative() { let path = if path.is_relative() {
current_working_dir().join(path) Cow::Owned(current_working_dir().join(path))
} else { } else {
path path
}; };
@ -183,3 +189,32 @@ pub fn get_truncated_path(path: impl AsRef<Path>) -> PathBuf {
ret.push(file); ret.push(file);
ret ret
} }
#[cfg(test)]
mod tests {
use std::{
ffi::OsStr,
path::{Component, Path},
};
use crate::path;
#[test]
fn expand_tilde() {
for path in ["~", "~/foo"] {
let expanded = path::expand_tilde(Path::new(path));
let tilde = Component::Normal(OsStr::new("~"));
let mut component_count = 0;
for component in expanded.components() {
// No tilde left.
assert_ne!(component, tilde);
component_count += 1;
}
// The path was at least expanded to something.
assert_ne!(component_count, 0);
}
}
}

@ -1,11 +1,24 @@
use std::ops::{Bound, RangeBounds};
pub use regex_cursor::engines::meta::{Builder as RegexBuilder, Regex};
pub use regex_cursor::regex_automata::util::syntax::Config;
use regex_cursor::{Input as RegexInput, RopeyCursor};
use ropey::RopeSlice; use ropey::RopeSlice;
pub trait RopeSliceExt: Sized { pub trait RopeSliceExt<'a>: Sized {
fn ends_with(self, text: &str) -> bool; fn ends_with(self, text: &str) -> bool;
fn starts_with(self, text: &str) -> bool; fn starts_with(self, text: &str) -> bool;
fn regex_input(self) -> RegexInput<RopeyCursor<'a>>;
fn regex_input_at_bytes<R: RangeBounds<usize>>(
self,
byte_range: R,
) -> RegexInput<RopeyCursor<'a>>;
fn regex_input_at<R: RangeBounds<usize>>(self, char_range: R) -> RegexInput<RopeyCursor<'a>>;
fn first_non_whitespace_char(self) -> Option<usize>;
fn last_non_whitespace_char(self) -> Option<usize>;
} }
impl RopeSliceExt for RopeSlice<'_> { impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
fn ends_with(self, text: &str) -> bool { fn ends_with(self, text: &str) -> bool {
let len = self.len_bytes(); let len = self.len_bytes();
if len < text.len() { if len < text.len() {
@ -23,4 +36,43 @@ impl RopeSliceExt for RopeSlice<'_> {
self.get_byte_slice(..len - text.len()) self.get_byte_slice(..len - text.len())
.map_or(false, |start| start == text) .map_or(false, |start| start == text)
} }
fn regex_input(self) -> RegexInput<RopeyCursor<'a>> {
RegexInput::new(self)
}
fn regex_input_at<R: RangeBounds<usize>>(self, char_range: R) -> RegexInput<RopeyCursor<'a>> {
let start_bound = match char_range.start_bound() {
Bound::Included(&val) => Bound::Included(self.char_to_byte(val)),
Bound::Excluded(&val) => Bound::Excluded(self.char_to_byte(val)),
Bound::Unbounded => Bound::Unbounded,
};
let end_bound = match char_range.end_bound() {
Bound::Included(&val) => Bound::Included(self.char_to_byte(val)),
Bound::Excluded(&val) => Bound::Excluded(self.char_to_byte(val)),
Bound::Unbounded => Bound::Unbounded,
};
self.regex_input_at_bytes((start_bound, end_bound))
}
fn regex_input_at_bytes<R: RangeBounds<usize>>(
self,
byte_range: R,
) -> RegexInput<RopeyCursor<'a>> {
let input = match byte_range.start_bound() {
Bound::Included(&pos) | Bound::Excluded(&pos) => {
RegexInput::new(RopeyCursor::at(self, pos))
}
Bound::Unbounded => RegexInput::new(self),
};
input.range(byte_range)
}
fn first_non_whitespace_char(self) -> Option<usize> {
self.chars().position(|ch| !ch.is_whitespace())
}
fn last_non_whitespace_char(self) -> Option<usize> {
self.chars_at(self.len_chars())
.reversed()
.position(|ch| !ch.is_whitespace())
.map(|pos| self.len_chars() - pos - 1)
}
} }

@ -41,7 +41,7 @@ crossterm = { version = "0.27", features = ["event-stream"] }
signal-hook = "0.3" signal-hook = "0.3"
tokio-stream = "0.1" tokio-stream = "0.1"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
arc-swap = { version = "1.6.0" } arc-swap = { version = "1.7.0" }
termini = "1" termini = "1"
# Logging # Logging
@ -84,4 +84,4 @@ helix-loader = { path = "../helix-loader" }
[dev-dependencies] [dev-dependencies]
smallvec = "1.13" smallvec = "1.13"
indoc = "2.0.4" indoc = "2.0.4"
tempfile = "3.10.0" tempfile = "3.10.1"

@ -724,7 +724,7 @@ impl Application {
} }
Notification::PublishDiagnostics(mut params) => { Notification::PublishDiagnostics(mut params) => {
let path = match params.uri.to_file_path() { let path = match params.uri.to_file_path() {
Ok(path) => path, Ok(path) => helix_stdx::path::normalize(&path),
Err(_) => { Err(_) => {
log::error!("Unsupported file URI: {}", params.uri); log::error!("Unsupported file URI: {}", params.uri);
return; return;
@ -753,9 +753,7 @@ impl Application {
let lang_conf = doc.language.clone(); let lang_conf = doc.language.clone();
if let Some(lang_conf) = &lang_conf { if let Some(lang_conf) = &lang_conf {
if let Some(old_diagnostics) = if let Some(old_diagnostics) = self.editor.diagnostics.get(&path) {
self.editor.diagnostics.get(&params.uri)
{
if !lang_conf.persistent_diagnostic_sources.is_empty() { if !lang_conf.persistent_diagnostic_sources.is_empty() {
// Sort diagnostics first by severity and then by line numbers. // Sort diagnostics first by severity and then by line numbers.
// Note: The `lsp::DiagnosticSeverity` enum is already defined in decreasing order // Note: The `lsp::DiagnosticSeverity` enum is already defined in decreasing order
@ -788,7 +786,7 @@ impl Application {
// Insert the original lsp::Diagnostics here because we may have no open document // Insert the original lsp::Diagnostics here because we may have no open document
// for diagnosic message and so we can't calculate the exact position. // for diagnosic message and so we can't calculate the exact position.
// When using them later in the diagnostics picker, we calculate them on-demand. // When using them later in the diagnostics picker, we calculate them on-demand.
let diagnostics = match self.editor.diagnostics.entry(params.uri) { let diagnostics = match self.editor.diagnostics.entry(path) {
Entry::Occupied(o) => { Entry::Occupied(o) => {
let current_diagnostics = o.into_mut(); let current_diagnostics = o.into_mut();
// there may entries of other language servers, which is why we can't overwrite the whole entry // there may entries of other language servers, which is why we can't overwrite the whole entry

@ -3,6 +3,7 @@ pub(crate) mod lsp;
pub(crate) mod typed; pub(crate) mod typed;
pub use dap::*; pub use dap::*;
use helix_stdx::rope::{self, RopeSliceExt};
use helix_vcs::Hunk; use helix_vcs::Hunk;
pub use lsp::*; pub use lsp::*;
use tui::widgets::Row; use tui::widgets::Row;
@ -11,7 +12,7 @@ pub use typed::*;
use helix_core::{ use helix_core::{
char_idx_at_visual_offset, comment, char_idx_at_visual_offset, comment,
doc_formatter::TextFormat, doc_formatter::TextFormat,
encoding, find_first_non_whitespace_char, find_workspace, graphemes, encoding, find_workspace, graphemes,
history::UndoKind, history::UndoKind,
increment, indent, increment, indent,
indent::IndentStyle, indent::IndentStyle,
@ -19,10 +20,10 @@ use helix_core::{
match_brackets, match_brackets,
movement::{self, move_vertically_visual, Direction}, movement::{self, move_vertically_visual, Direction},
object, pos_at_coords, object, pos_at_coords,
regex::{self, Regex, RegexBuilder}, regex::{self, Regex},
search::{self, CharMatcher}, search::{self, CharMatcher},
selection, shellwords, surround, selection, shellwords, surround,
syntax::LanguageServerFeature, syntax::{BlockCommentToken, LanguageServerFeature},
text_annotations::TextAnnotations, text_annotations::TextAnnotations,
textobject, textobject,
tree_sitter::Node, tree_sitter::Node,
@ -277,6 +278,10 @@ impl MappableCommand {
page_down, "Move page down", page_down, "Move page down",
half_page_up, "Move half page up", half_page_up, "Move half page up",
half_page_down, "Move half page down", half_page_down, "Move half page down",
page_cursor_up, "Move page and cursor up",
page_cursor_down, "Move page and cursor down",
page_cursor_half_up, "Move page and cursor half up",
page_cursor_half_down, "Move page and cursor half down",
select_all, "Select whole document", select_all, "Select whole document",
select_regex, "Select all regex matches inside selections", select_regex, "Select all regex matches inside selections",
split_selection, "Split selections on regex matches", split_selection, "Split selections on regex matches",
@ -410,6 +415,8 @@ impl MappableCommand {
completion, "Invoke completion popup", completion, "Invoke completion popup",
hover, "Show docs for item under cursor", hover, "Show docs for item under cursor",
toggle_comments, "Comment/uncomment selections", toggle_comments, "Comment/uncomment selections",
toggle_line_comments, "Line comment/uncomment selections",
toggle_block_comments, "Block comment/uncomment selections",
rotate_selections_forward, "Rotate selections forward", rotate_selections_forward, "Rotate selections forward",
rotate_selections_backward, "Rotate selections backward", rotate_selections_backward, "Rotate selections backward",
rotate_selection_contents_forward, "Rotate selection contents forward", rotate_selection_contents_forward, "Rotate selection contents forward",
@ -819,7 +826,7 @@ fn kill_to_line_start(cx: &mut Context) {
let head = if anchor == first_char && line != 0 { let head = if anchor == first_char && line != 0 {
// select until previous line // select until previous line
line_end_char_index(&text, line - 1) line_end_char_index(&text, line - 1)
} else if let Some(pos) = find_first_non_whitespace_char(text.line(line)) { } else if let Some(pos) = text.line(line).first_non_whitespace_char() {
if first_char + pos < anchor { if first_char + pos < anchor {
// select until first non-blank in line if cursor is after it // select until first non-blank in line if cursor is after it
first_char + pos first_char + pos
@ -881,7 +888,7 @@ fn goto_first_nonwhitespace_impl(view: &mut View, doc: &mut Document, movement:
let selection = doc.selection(view.id).clone().transform(|range| { let selection = doc.selection(view.id).clone().transform(|range| {
let line = range.cursor_line(text); let line = range.cursor_line(text);
if let Some(pos) = find_first_non_whitespace_char(text.line(line)) { if let Some(pos) = text.line(line).first_non_whitespace_char() {
let pos = pos + text.line_to_char(line); let pos = pos + text.line_to_char(line);
range.put_cursor(text, pos, movement == Movement::Extend) range.put_cursor(text, pos, movement == Movement::Extend)
} else { } else {
@ -1610,7 +1617,7 @@ fn switch_to_lowercase(cx: &mut Context) {
}); });
} }
pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) { pub fn scroll(cx: &mut Context, offset: usize, direction: Direction, sync_cursor: bool) {
use Direction::*; use Direction::*;
let config = cx.editor.config(); let config = cx.editor.config();
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
@ -1630,7 +1637,7 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) {
let doc_text = doc.text().slice(..); let doc_text = doc.text().slice(..);
let viewport = view.inner_area(doc); let viewport = view.inner_area(doc);
let text_fmt = doc.text_format(viewport.width, None); let text_fmt = doc.text_format(viewport.width, None);
let annotations = view.text_annotations(doc, None); let mut annotations = view.text_annotations(doc, None);
(view.offset.anchor, view.offset.vertical_offset) = char_idx_at_visual_offset( (view.offset.anchor, view.offset.vertical_offset) = char_idx_at_visual_offset(
doc_text, doc_text,
view.offset.anchor, view.offset.anchor,
@ -1640,6 +1647,30 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) {
&annotations, &annotations,
); );
if sync_cursor {
let movement = match cx.editor.mode {
Mode::Select => Movement::Extend,
_ => Movement::Move,
};
// TODO: When inline diagnostics gets merged- 1. move_vertically_visual removes
// line annotations/diagnostics so the cursor may jump further than the view.
// 2. If the cursor lands on a complete line of virtual text, the cursor will
// jump a different distance than the view.
let selection = doc.selection(view.id).clone().transform(|range| {
move_vertically_visual(
doc_text,
range,
direction,
offset.unsigned_abs(),
movement,
&text_fmt,
&mut annotations,
)
});
doc.set_selection(view.id, selection);
return;
}
let mut head; let mut head;
match direction { match direction {
Forward => { Forward => {
@ -1690,25 +1721,49 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) {
fn page_up(cx: &mut Context) { fn page_up(cx: &mut Context) {
let view = view!(cx.editor); let view = view!(cx.editor);
let offset = view.inner_height(); let offset = view.inner_height();
scroll(cx, offset, Direction::Backward); scroll(cx, offset, Direction::Backward, false);
} }
fn page_down(cx: &mut Context) { fn page_down(cx: &mut Context) {
let view = view!(cx.editor); let view = view!(cx.editor);
let offset = view.inner_height(); let offset = view.inner_height();
scroll(cx, offset, Direction::Forward); scroll(cx, offset, Direction::Forward, false);
} }
fn half_page_up(cx: &mut Context) { fn half_page_up(cx: &mut Context) {
let view = view!(cx.editor); let view = view!(cx.editor);
let offset = view.inner_height() / 2; let offset = view.inner_height() / 2;
scroll(cx, offset, Direction::Backward); scroll(cx, offset, Direction::Backward, false);
} }
fn half_page_down(cx: &mut Context) { fn half_page_down(cx: &mut Context) {
let view = view!(cx.editor); let view = view!(cx.editor);
let offset = view.inner_height() / 2; let offset = view.inner_height() / 2;
scroll(cx, offset, Direction::Forward); scroll(cx, offset, Direction::Forward, false);
}
fn page_cursor_up(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height();
scroll(cx, offset, Direction::Backward, true);
}
fn page_cursor_down(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height();
scroll(cx, offset, Direction::Forward, true);
}
fn page_cursor_half_up(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height() / 2;
scroll(cx, offset, Direction::Backward, true);
}
fn page_cursor_half_down(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height() / 2;
scroll(cx, offset, Direction::Forward, true);
} }
#[allow(deprecated)] #[allow(deprecated)]
@ -1857,11 +1912,7 @@ fn split_selection(cx: &mut Context) {
fn split_selection_on_newline(cx: &mut Context) { fn split_selection_on_newline(cx: &mut Context) {
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
let text = doc.text().slice(..); let text = doc.text().slice(..);
// only compile the regex once let selection = selection::split_on_newline(text, doc.selection(view.id));
#[allow(clippy::trivial_regex)]
static REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"\r\n|[\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}]").unwrap());
let selection = selection::split_on_matches(text, doc.selection(view.id), &REGEX);
doc.set_selection(view.id, selection); doc.set_selection(view.id, selection);
} }
@ -1880,8 +1931,7 @@ fn merge_consecutive_selections(cx: &mut Context) {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn search_impl( fn search_impl(
editor: &mut Editor, editor: &mut Editor,
contents: &str, regex: &rope::Regex,
regex: &Regex,
movement: Movement, movement: Movement,
direction: Direction, direction: Direction,
scrolloff: usize, scrolloff: usize,
@ -1909,23 +1959,20 @@ fn search_impl(
// do a reverse search and wraparound to the end, we don't need to search // do a reverse search and wraparound to the end, we don't need to search
// the text before the current cursor position for matches, but by slicing // the text before the current cursor position for matches, but by slicing
// it out, we need to add it back to the position of the selection. // it out, we need to add it back to the position of the selection.
let mut offset = 0; let doc = doc!(editor).text().slice(..);
// use find_at to find the next match after the cursor, loop around the end // use find_at to find the next match after the cursor, loop around the end
// Careful, `Regex` uses `bytes` as offsets, not character indices! // Careful, `Regex` uses `bytes` as offsets, not character indices!
let mut mat = match direction { let mut mat = match direction {
Direction::Forward => regex.find_at(contents, start), Direction::Forward => regex.find(doc.regex_input_at_bytes(start..)),
Direction::Backward => regex.find_iter(&contents[..start]).last(), Direction::Backward => regex.find_iter(doc.regex_input_at_bytes(..start)).last(),
}; };
if mat.is_none() { if mat.is_none() {
if wrap_around { if wrap_around {
mat = match direction { mat = match direction {
Direction::Forward => regex.find(contents), Direction::Forward => regex.find(doc.regex_input()),
Direction::Backward => { Direction::Backward => regex.find_iter(doc.regex_input_at_bytes(start..)).last(),
offset = start;
regex.find_iter(&contents[start..]).last()
}
}; };
} }
if show_warnings { if show_warnings {
@ -1942,8 +1989,8 @@ fn search_impl(
let selection = doc.selection(view.id); let selection = doc.selection(view.id);
if let Some(mat) = mat { if let Some(mat) = mat {
let start = text.byte_to_char(mat.start() + offset); let start = text.byte_to_char(mat.start());
let end = text.byte_to_char(mat.end() + offset); let end = text.byte_to_char(mat.end());
if end == 0 { if end == 0 {
// skip empty matches that don't make sense // skip empty matches that don't make sense
@ -1987,13 +2034,7 @@ fn searcher(cx: &mut Context, direction: Direction) {
let scrolloff = config.scrolloff; let scrolloff = config.scrolloff;
let wrap_around = config.search.wrap_around; let wrap_around = config.search.wrap_around;
let doc = doc!(cx.editor);
// TODO: could probably share with select_on_matches? // TODO: could probably share with select_on_matches?
// HAXX: sadly we can't avoid allocating a single string for the whole buffer since we can't
// feed chunks into the regex yet
let contents = doc.text().slice(..).to_string();
let completions = search_completions(cx, Some(reg)); let completions = search_completions(cx, Some(reg));
ui::regex_prompt( ui::regex_prompt(
@ -2015,7 +2056,6 @@ fn searcher(cx: &mut Context, direction: Direction) {
} }
search_impl( search_impl(
cx.editor, cx.editor,
&contents,
&regex, &regex,
Movement::Move, Movement::Move,
direction, direction,
@ -2035,8 +2075,6 @@ fn search_next_or_prev_impl(cx: &mut Context, movement: Movement, direction: Dir
let config = cx.editor.config(); let config = cx.editor.config();
let scrolloff = config.scrolloff; let scrolloff = config.scrolloff;
if let Some(query) = cx.editor.registers.first(register, cx.editor) { if let Some(query) = cx.editor.registers.first(register, cx.editor) {
let doc = doc!(cx.editor);
let contents = doc.text().slice(..).to_string();
let search_config = &config.search; let search_config = &config.search;
let case_insensitive = if search_config.smart_case { let case_insensitive = if search_config.smart_case {
!query.chars().any(char::is_uppercase) !query.chars().any(char::is_uppercase)
@ -2044,15 +2082,17 @@ fn search_next_or_prev_impl(cx: &mut Context, movement: Movement, direction: Dir
false false
}; };
let wrap_around = search_config.wrap_around; let wrap_around = search_config.wrap_around;
if let Ok(regex) = RegexBuilder::new(&query) if let Ok(regex) = rope::RegexBuilder::new()
.syntax(
rope::Config::new()
.case_insensitive(case_insensitive) .case_insensitive(case_insensitive)
.multi_line(true) .multi_line(true),
.build() )
.build(&query)
{ {
for _ in 0..count { for _ in 0..count {
search_impl( search_impl(
cx.editor, cx.editor,
&contents,
&regex, &regex,
movement, movement,
direction, direction,
@ -2189,7 +2229,7 @@ fn global_search(cx: &mut Context) {
let reg = cx.register.unwrap_or('/'); let reg = cx.register.unwrap_or('/');
let completions = search_completions(cx, Some(reg)); let completions = search_completions(cx, Some(reg));
ui::regex_prompt( ui::raw_regex_prompt(
cx, cx,
"global-search:".into(), "global-search:".into(),
Some(reg), Some(reg),
@ -2200,7 +2240,7 @@ fn global_search(cx: &mut Context) {
.map(|comp| (0.., std::borrow::Cow::Owned(comp.clone()))) .map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
.collect() .collect()
}, },
move |cx, regex, event| { move |cx, _, input, event| {
if event != PromptEvent::Validate { if event != PromptEvent::Validate {
return; return;
} }
@ -2215,7 +2255,7 @@ fn global_search(cx: &mut Context) {
if let Ok(matcher) = RegexMatcherBuilder::new() if let Ok(matcher) = RegexMatcherBuilder::new()
.case_smart(smart_case) .case_smart(smart_case)
.build(regex.as_str()) .build(input)
{ {
let search_root = helix_stdx::env::current_working_dir(); let search_root = helix_stdx::env::current_working_dir();
if !search_root.exists() { if !search_root.exists() {
@ -3051,11 +3091,11 @@ fn insert_with_indent(cx: &mut Context, cursor_fallback: IndentFallbackPos) {
} else { } else {
// move cursor to the fallback position // move cursor to the fallback position
let pos = match cursor_fallback { let pos = match cursor_fallback {
IndentFallbackPos::LineStart => { IndentFallbackPos::LineStart => text
find_first_non_whitespace_char(text.line(cursor_line)) .line(cursor_line)
.first_non_whitespace_char()
.map(|ws_offset| ws_offset + cursor_line_start) .map(|ws_offset| ws_offset + cursor_line_start)
.unwrap_or(cursor_line_start) .unwrap_or(cursor_line_start),
}
IndentFallbackPos::LineEnd => line_end_char_index(&text, cursor_line), IndentFallbackPos::LineEnd => line_end_char_index(&text, cursor_line),
}; };
@ -4334,16 +4374,27 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) {
// select inserted spaces // select inserted spaces
let transaction = if select_space { let transaction = if select_space {
let mut offset: usize = 0;
let ranges: SmallVec<_> = changes let ranges: SmallVec<_> = changes
.iter() .iter()
.scan(0, |offset, change| { .filter_map(|change| {
let range = Range::point(change.0 - *offset); if change.2.is_some() {
*offset += change.1 - change.0 - 1; // -1 because cursor is 0-sized let range = Range::point(change.0 - offset);
offset += change.1 - change.0 - 1; // -1 adjusts for the replacement of the range by a space
Some(range) Some(range)
} else {
offset += change.1 - change.0;
None
}
}) })
.collect(); .collect();
let t = Transaction::change(text, changes.into_iter());
if ranges.is_empty() {
t
} else {
let selection = Selection::new(ranges, 0); let selection = Selection::new(ranges, 0);
Transaction::change(text, changes.into_iter()).with_selection(selection) t.with_selection(selection)
}
} else { } else {
Transaction::change(text, changes.into_iter()) Transaction::change(text, changes.into_iter())
}; };
@ -4426,18 +4477,124 @@ pub fn completion(cx: &mut Context) {
} }
// comments // comments
fn toggle_comments(cx: &mut Context) { type CommentTransactionFn = fn(
line_token: Option<&str>,
block_tokens: Option<&[BlockCommentToken]>,
doc: &Rope,
selection: &Selection,
) -> Transaction;
fn toggle_comments_impl(cx: &mut Context, comment_transaction: CommentTransactionFn) {
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
let token = doc let line_token: Option<&str> = doc
.language_config()
.and_then(|lc| lc.comment_tokens.as_ref())
.and_then(|tc| tc.first())
.map(|tc| tc.as_str());
let block_tokens: Option<&[BlockCommentToken]> = doc
.language_config() .language_config()
.and_then(|lc| lc.comment_token.as_ref()) .and_then(|lc| lc.block_comment_tokens.as_ref())
.map(|tc| tc.as_ref()); .map(|tc| &tc[..]);
let transaction = comment::toggle_line_comments(doc.text(), doc.selection(view.id), token);
let transaction =
comment_transaction(line_token, block_tokens, doc.text(), doc.selection(view.id));
doc.apply(&transaction, view.id); doc.apply(&transaction, view.id);
exit_select_mode(cx); exit_select_mode(cx);
} }
/// commenting behavior:
/// 1. only line comment tokens -> line comment
/// 2. each line block commented -> uncomment all lines
/// 3. whole selection block commented -> uncomment selection
/// 4. all lines not commented and block tokens -> comment uncommented lines
/// 5. no comment tokens and not block commented -> line comment
fn toggle_comments(cx: &mut Context) {
toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| {
let text = doc.slice(..);
// only have line comment tokens
if line_token.is_some() && block_tokens.is_none() {
return comment::toggle_line_comments(doc, selection, line_token);
}
let split_lines = comment::split_lines_of_selection(text, selection);
let default_block_tokens = &[BlockCommentToken::default()];
let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens);
let (line_commented, line_comment_changes) =
comment::find_block_comments(block_comment_tokens, text, &split_lines);
// block commented by line would also be block commented so check this first
if line_commented {
return comment::create_block_comment_transaction(
doc,
&split_lines,
line_commented,
line_comment_changes,
)
.0;
}
let (block_commented, comment_changes) =
comment::find_block_comments(block_comment_tokens, text, selection);
// check if selection has block comments
if block_commented {
return comment::create_block_comment_transaction(
doc,
selection,
block_commented,
comment_changes,
)
.0;
}
// not commented and only have block comment tokens
if line_token.is_none() && block_tokens.is_some() {
return comment::create_block_comment_transaction(
doc,
&split_lines,
line_commented,
line_comment_changes,
)
.0;
}
// not block commented at all and don't have any tokens
comment::toggle_line_comments(doc, selection, line_token)
})
}
fn toggle_line_comments(cx: &mut Context) {
toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| {
if line_token.is_none() && block_tokens.is_some() {
let default_block_tokens = &[BlockCommentToken::default()];
let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens);
comment::toggle_block_comments(
doc,
&comment::split_lines_of_selection(doc.slice(..), selection),
block_comment_tokens,
)
} else {
comment::toggle_line_comments(doc, selection, line_token)
}
});
}
fn toggle_block_comments(cx: &mut Context) {
toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| {
if line_token.is_some() && block_tokens.is_none() {
comment::toggle_line_comments(doc, selection, line_token)
} else {
let default_block_tokens = &[BlockCommentToken::default()];
let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens);
comment::toggle_block_comments(doc, selection, block_comment_tokens)
}
});
}
fn rotate_selections(cx: &mut Context, direction: Direction) { fn rotate_selections(cx: &mut Context, direction: Direction) {
let count = cx.count(); let count = cx.count();
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
@ -4858,11 +5015,11 @@ fn align_view_middle(cx: &mut Context) {
} }
fn scroll_up(cx: &mut Context) { fn scroll_up(cx: &mut Context) {
scroll(cx, cx.count(), Direction::Backward); scroll(cx, cx.count(), Direction::Backward, false);
} }
fn scroll_down(cx: &mut Context) { fn scroll_down(cx: &mut Context) {
scroll(cx, cx.count(), Direction::Forward); scroll(cx, cx.count(), Direction::Forward, false);
} }
fn goto_ts_object_impl(cx: &mut Context, object: &'static str, direction: Direction) { fn goto_ts_object_impl(cx: &mut Context, object: &'static str, direction: Direction) {

@ -41,7 +41,7 @@ use std::{
collections::{BTreeMap, HashSet}, collections::{BTreeMap, HashSet},
fmt::Write, fmt::Write,
future::Future, future::Future,
path::PathBuf, path::{Path, PathBuf},
}; };
/// Gets the first language server that is attached to a document which supports a specific feature. /// Gets the first language server that is attached to a document which supports a specific feature.
@ -137,7 +137,7 @@ struct DiagnosticStyles {
} }
struct PickerDiagnostic { struct PickerDiagnostic {
url: lsp::Url, path: PathBuf,
diag: lsp::Diagnostic, diag: lsp::Diagnostic,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
} }
@ -170,8 +170,7 @@ impl ui::menu::Item for PickerDiagnostic {
let path = match format { let path = match format {
DiagnosticsFormat::HideSourcePath => String::new(), DiagnosticsFormat::HideSourcePath => String::new(),
DiagnosticsFormat::ShowSourcePath => { DiagnosticsFormat::ShowSourcePath => {
let file_path = self.url.to_file_path().unwrap(); let path = path::get_truncated_path(&self.path);
let path = path::get_truncated_path(file_path);
format!("{}: ", path.to_string_lossy()) format!("{}: ", path.to_string_lossy())
} }
}; };
@ -211,22 +210,31 @@ fn jump_to_location(
return; return;
} }
}; };
jump_to_position(editor, &path, location.range, offset_encoding, action);
}
let doc = match editor.open(&path, action) { fn jump_to_position(
editor: &mut Editor,
path: &Path,
range: lsp::Range,
offset_encoding: OffsetEncoding,
action: Action,
) {
let doc = match editor.open(path, action) {
Ok(id) => doc_mut!(editor, &id), Ok(id) => doc_mut!(editor, &id),
Err(err) => { Err(err) => {
let err = format!("failed to open path: {:?}: {:?}", location.uri, err); let err = format!("failed to open path: {:?}: {:?}", path, err);
editor.set_error(err); editor.set_error(err);
return; return;
} }
}; };
let view = view_mut!(editor); let view = view_mut!(editor);
// TODO: convert inside server // TODO: convert inside server
let new_range = let new_range = if let Some(new_range) = lsp_range_to_range(doc.text(), range, offset_encoding)
if let Some(new_range) = lsp_range_to_range(doc.text(), location.range, offset_encoding) { {
new_range new_range
} else { } else {
log::warn!("lsp position out of bounds - {:?}", location.range); log::warn!("lsp position out of bounds - {:?}", range);
return; return;
}; };
// we flip the range so that the cursor sits on the start of the symbol // we flip the range so that the cursor sits on the start of the symbol
@ -261,21 +269,20 @@ enum DiagnosticsFormat {
fn diag_picker( fn diag_picker(
cx: &Context, cx: &Context,
diagnostics: BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>, diagnostics: BTreeMap<PathBuf, Vec<(lsp::Diagnostic, usize)>>,
_current_path: Option<lsp::Url>,
format: DiagnosticsFormat, format: DiagnosticsFormat,
) -> Picker<PickerDiagnostic> { ) -> Picker<PickerDiagnostic> {
// TODO: drop current_path comparison and instead use workspace: bool flag? // TODO: drop current_path comparison and instead use workspace: bool flag?
// flatten the map to a vec of (url, diag) pairs // flatten the map to a vec of (url, diag) pairs
let mut flat_diag = Vec::new(); let mut flat_diag = Vec::new();
for (url, diags) in diagnostics { for (path, diags) in diagnostics {
flat_diag.reserve(diags.len()); flat_diag.reserve(diags.len());
for (diag, ls) in diags { for (diag, ls) in diags {
if let Some(ls) = cx.editor.language_server_by_id(ls) { if let Some(ls) = cx.editor.language_server_by_id(ls) {
flat_diag.push(PickerDiagnostic { flat_diag.push(PickerDiagnostic {
url: url.clone(), path: path.clone(),
diag, diag,
offset_encoding: ls.offset_encoding(), offset_encoding: ls.offset_encoding(),
}); });
@ -295,22 +302,17 @@ fn diag_picker(
(styles, format), (styles, format),
move |cx, move |cx,
PickerDiagnostic { PickerDiagnostic {
url, path,
diag, diag,
offset_encoding, offset_encoding,
}, },
action| { action| {
jump_to_location( jump_to_position(cx.editor, path, diag.range, *offset_encoding, action)
cx.editor,
&lsp::Location::new(url.clone(), diag.range),
*offset_encoding,
action,
)
}, },
) )
.with_preview(move |_editor, PickerDiagnostic { url, diag, .. }| { .with_preview(move |_editor, PickerDiagnostic { path, diag, .. }| {
let location = lsp::Location::new(url.clone(), diag.range); let line = Some((diag.range.start.line as usize, diag.range.end.line as usize));
Some(location_to_file_location(&location)) Some((path.clone().into(), line))
}) })
.truncate_start(false) .truncate_start(false)
} }
@ -473,17 +475,16 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
pub fn diagnostics_picker(cx: &mut Context) { pub fn diagnostics_picker(cx: &mut Context) {
let doc = doc!(cx.editor); let doc = doc!(cx.editor);
if let Some(current_url) = doc.url() { if let Some(current_path) = doc.path() {
let diagnostics = cx let diagnostics = cx
.editor .editor
.diagnostics .diagnostics
.get(&current_url) .get(current_path)
.cloned() .cloned()
.unwrap_or_default(); .unwrap_or_default();
let picker = diag_picker( let picker = diag_picker(
cx, cx,
[(current_url.clone(), diagnostics)].into(), [(current_path.clone(), diagnostics)].into(),
Some(current_url),
DiagnosticsFormat::HideSourcePath, DiagnosticsFormat::HideSourcePath,
); );
cx.push_layer(Box::new(overlaid(picker))); cx.push_layer(Box::new(overlaid(picker)));
@ -491,16 +492,9 @@ pub fn diagnostics_picker(cx: &mut Context) {
} }
pub fn workspace_diagnostics_picker(cx: &mut Context) { pub fn workspace_diagnostics_picker(cx: &mut Context) {
let doc = doc!(cx.editor);
let current_url = doc.url();
// TODO not yet filtered by LanguageServerFeature, need to do something similar as Document::shown_diagnostics here for all open documents // TODO not yet filtered by LanguageServerFeature, need to do something similar as Document::shown_diagnostics here for all open documents
let diagnostics = cx.editor.diagnostics.clone(); let diagnostics = cx.editor.diagnostics.clone();
let picker = diag_picker( let picker = diag_picker(cx, diagnostics, DiagnosticsFormat::ShowSourcePath);
cx,
diagnostics,
current_url,
DiagnosticsFormat::ShowSourcePath,
);
cx.push_layer(Box::new(overlaid(picker))); cx.push_layer(Box::new(overlaid(picker)));
} }

@ -110,14 +110,14 @@ fn open(cx: &mut compositor::Context, args: &[Cow<str>], event: PromptEvent) ->
ensure!(!args.is_empty(), "wrong argument count"); ensure!(!args.is_empty(), "wrong argument count");
for arg in args { for arg in args {
let (path, pos) = args::parse_file(arg); let (path, pos) = args::parse_file(arg);
let path = helix_stdx::path::expand_tilde(&path); let path = helix_stdx::path::expand_tilde(path);
// If the path is a directory, open a file picker on that directory and update the status // If the path is a directory, open a file picker on that directory and update the status
// message // message
if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) { if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) {
let callback = async move { let callback = async move {
let call: job::Callback = job::Callback::EditorCompositor(Box::new( let call: job::Callback = job::Callback::EditorCompositor(Box::new(
move |editor: &mut Editor, compositor: &mut Compositor| { move |editor: &mut Editor, compositor: &mut Compositor| {
let picker = ui::file_picker(path, &editor.config()); let picker = ui::file_picker(path.into_owned(), &editor.config());
compositor.push(Box::new(overlaid(picker))); compositor.push(Box::new(overlaid(picker)));
}, },
)); ));
@ -1078,11 +1078,11 @@ fn change_current_directory(
return Ok(()); return Ok(());
} }
let dir = helix_stdx::path::expand_tilde( let dir = args
args.first() .first()
.context("target directory not provided")? .context("target directory not provided")?
.as_ref(), .as_ref();
); let dir = helix_stdx::path::expand_tilde(Path::new(dir));
helix_stdx::env::set_current_working_dir(dir)?; helix_stdx::env::set_current_working_dir(dir)?;

@ -221,10 +221,18 @@ fn request_completion(
.iter() .iter()
.find(|&trigger| trigger_text.ends_with(trigger)) .find(|&trigger| trigger_text.ends_with(trigger))
}); });
if trigger_char.is_some() {
lsp::CompletionContext { lsp::CompletionContext {
trigger_kind: lsp::CompletionTriggerKind::TRIGGER_CHARACTER, trigger_kind: lsp::CompletionTriggerKind::TRIGGER_CHARACTER,
trigger_character: trigger_char.cloned(), trigger_character: trigger_char.cloned(),
} }
} else {
lsp::CompletionContext {
trigger_kind: lsp::CompletionTriggerKind::INVOKED,
trigger_character: None,
}
}
}; };
let completion_response = ls.completion(doc_id, pos, None, context).unwrap(); let completion_response = ls.completion(doc_id, pos, None, context).unwrap();

@ -303,6 +303,15 @@ impl Keymaps {
self.sticky.as_ref() self.sticky.as_ref()
} }
pub fn contains_key(&self, mode: Mode, key: KeyEvent) -> bool {
let keymaps = &*self.map();
let keymap = &keymaps[&mode];
keymap
.search(self.pending())
.and_then(KeyTrie::node)
.is_some_and(|node| node.contains_key(&key))
}
/// Lookup `key` in the keymap to try and find a command to execute. Escape /// Lookup `key` in the keymap to try and find a command to execute. Escape
/// key cancels pending keystrokes. If there are no pending keystrokes but a /// key cancels pending keystrokes. If there are no pending keystrokes but a
/// sticky node is in use, it will be cleared. /// sticky node is in use, it will be cleared.

@ -180,8 +180,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"esc" => normal_mode, "esc" => normal_mode,
"C-b" | "pageup" => page_up, "C-b" | "pageup" => page_up,
"C-f" | "pagedown" => page_down, "C-f" | "pagedown" => page_down,
"C-u" => half_page_up, "C-u" => page_cursor_half_up,
"C-d" => half_page_down, "C-d" => page_cursor_half_down,
"C-w" => { "Window" "C-w" => { "Window"
"C-w" | "w" => rotate_view, "C-w" | "w" => rotate_view,
@ -278,6 +278,9 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"k" => hover, "k" => hover,
"r" => rename_symbol, "r" => rename_symbol,
"h" => select_references_to_symbol_under_cursor, "h" => select_references_to_symbol_under_cursor,
"c" => toggle_comments,
"C" => toggle_block_comments,
"A-c" => toggle_line_comments,
"?" => command_palette, "?" => command_palette,
}, },
"z" => { "View" "z" => { "View"
@ -289,8 +292,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"j" | "down" => scroll_down, "j" | "down" => scroll_down,
"C-b" | "pageup" => page_up, "C-b" | "pageup" => page_up,
"C-f" | "pagedown" => page_down, "C-f" | "pagedown" => page_down,
"C-u" | "backspace" => half_page_up, "C-u" | "backspace" => page_cursor_half_up,
"C-d" | "space" => half_page_down, "C-d" | "space" => page_cursor_half_down,
"/" => search, "/" => search,
"?" => rsearch, "?" => rsearch,
@ -306,8 +309,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"j" | "down" => scroll_down, "j" | "down" => scroll_down,
"C-b" | "pageup" => page_up, "C-b" | "pageup" => page_up,
"C-f" | "pagedown" => page_down, "C-f" | "pagedown" => page_down,
"C-u" | "backspace" => half_page_up, "C-u" | "backspace" => page_cursor_half_up,
"C-d" | "space" => half_page_down, "C-d" | "space" => page_cursor_half_down,
"/" => search, "/" => search,
"?" => rsearch, "?" => rsearch,

@ -1,7 +1,9 @@
use crate::{ use crate::{
compositor::{Component, Context, Event, EventResult}, compositor::{Component, Context, Event, EventResult},
handlers::trigger_auto_completion, handlers::trigger_auto_completion,
job,
}; };
use helix_event::AsyncHook;
use helix_view::{ use helix_view::{
document::SavePoint, document::SavePoint,
editor::CompleteAction, editor::CompleteAction,
@ -10,14 +12,14 @@ use helix_view::{
theme::{Modifier, Style}, theme::{Modifier, Style},
ViewId, ViewId,
}; };
use tokio::time::Instant;
use tui::{buffer::Buffer as Surface, text::Span}; use tui::{buffer::Buffer as Surface, text::Span};
use std::{borrow::Cow, sync::Arc}; use std::{borrow::Cow, sync::Arc, time::Duration};
use helix_core::{chars, Change, Transaction}; use helix_core::{chars, Change, Transaction};
use helix_view::{graphics::Rect, Document, Editor}; use helix_view::{graphics::Rect, Document, Editor};
use crate::commands;
use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent}; use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent};
use helix_lsp::{lsp, util, OffsetEncoding}; use helix_lsp::{lsp, util, OffsetEncoding};
@ -102,6 +104,7 @@ pub struct Completion {
#[allow(dead_code)] #[allow(dead_code)]
trigger_offset: usize, trigger_offset: usize,
filter: String, filter: String,
resolve_handler: tokio::sync::mpsc::Sender<CompletionItem>,
} }
impl Completion { impl Completion {
@ -368,6 +371,7 @@ impl Completion {
// TODO: expand nucleo api to allow moving straight to a Utf32String here // TODO: expand nucleo api to allow moving straight to a Utf32String here
// and avoid allocation during matching // and avoid allocation during matching
filter: String::from(fragment), filter: String::from(fragment),
resolve_handler: ResolveHandler::default().spawn(),
}; };
// need to recompute immediately in case start_offset != trigger_offset // need to recompute immediately in case start_offset != trigger_offset
@ -379,6 +383,8 @@ impl Completion {
completion completion
} }
/// Synchronously resolve the given completion item. This is used when
/// accepting a completion.
fn resolve_completion_item( fn resolve_completion_item(
language_server: &helix_lsp::Client, language_server: &helix_lsp::Client,
completion_item: lsp::CompletionItem, completion_item: lsp::CompletionItem,
@ -386,7 +392,7 @@ impl Completion {
let future = language_server.resolve_completion_item(completion_item)?; let future = language_server.resolve_completion_item(completion_item)?;
let response = helix_lsp::block_on(future); let response = helix_lsp::block_on(future);
match response { match response {
Ok(value) => serde_json::from_value(value).ok(), Ok(item) => Some(item),
Err(err) => { Err(err) => {
log::error!("Failed to resolve completion item: {}", err); log::error!("Failed to resolve completion item: {}", err);
None None
@ -420,62 +426,6 @@ impl Completion {
self.popup.contents_mut().replace_option(old_item, new_item); self.popup.contents_mut().replace_option(old_item, new_item);
} }
/// Asynchronously requests that the currently selection completion item is
/// resolved through LSP `completionItem/resolve`.
pub fn ensure_item_resolved(&mut self, cx: &mut commands::Context) -> bool {
// > If computing full completion items is expensive, servers can additionally provide a
// > handler for the completion item resolve request. ...
// > A typical use case is for example: the `textDocument/completion` request doesn't fill
// > in the `documentation` property for returned completion items since it is expensive
// > to compute. When the item is selected in the user interface then a
// > 'completionItem/resolve' request is sent with the selected completion item as a parameter.
// > The returned completion item should have the documentation property filled in.
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_completion
let current_item = match self.popup.contents().selection() {
Some(item) if !item.resolved => item.clone(),
_ => return false,
};
let Some(language_server) = cx
.editor
.language_server_by_id(current_item.language_server_id)
else {
return false;
};
// This method should not block the compositor so we handle the response asynchronously.
let Some(future) = language_server.resolve_completion_item(current_item.item.clone())
else {
return false;
};
cx.callback(
future,
move |_editor, compositor, response: Option<lsp::CompletionItem>| {
let resolved_item = match response {
Some(item) => item,
None => return,
};
if let Some(completion) = &mut compositor
.find::<crate::ui::EditorView>()
.unwrap()
.completion
{
let resolved_item = CompletionItem {
item: resolved_item,
language_server_id: current_item.language_server_id,
resolved: true,
};
completion.replace_item(current_item, resolved_item);
}
},
);
true
}
pub fn area(&mut self, viewport: Rect, editor: &Editor) -> Rect { pub fn area(&mut self, viewport: Rect, editor: &Editor) -> Rect {
self.popup.area(viewport, editor) self.popup.area(viewport, editor)
} }
@ -498,6 +448,9 @@ impl Component for Completion {
Some(option) => option, Some(option) => option,
None => return, None => return,
}; };
if !option.resolved {
helix_event::send_blocking(&self.resolve_handler, option.clone());
}
// need to render: // need to render:
// option.detail // option.detail
// --- // ---
@ -599,3 +552,88 @@ impl Component for Completion {
markdown_doc.render(doc_area, surface, cx); markdown_doc.render(doc_area, surface, cx);
} }
} }
/// A hook for resolving incomplete completion items.
///
/// From the [LSP spec](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_completion):
///
/// > If computing full completion items is expensive, servers can additionally provide a
/// > handler for the completion item resolve request. ...
/// > A typical use case is for example: the `textDocument/completion` request doesn't fill
/// > in the `documentation` property for returned completion items since it is expensive
/// > to compute. When the item is selected in the user interface then a
/// > 'completionItem/resolve' request is sent with the selected completion item as a parameter.
/// > The returned completion item should have the documentation property filled in.
#[derive(Debug, Default)]
struct ResolveHandler {
trigger: Option<CompletionItem>,
request: Option<helix_event::CancelTx>,
}
impl AsyncHook for ResolveHandler {
type Event = CompletionItem;
fn handle_event(
&mut self,
item: Self::Event,
timeout: Option<tokio::time::Instant>,
) -> Option<tokio::time::Instant> {
if self
.trigger
.as_ref()
.is_some_and(|trigger| trigger == &item)
{
timeout
} else {
self.trigger = Some(item);
self.request = None;
Some(Instant::now() + Duration::from_millis(150))
}
}
fn finish_debounce(&mut self) {
let Some(item) = self.trigger.take() else { return };
let (tx, rx) = helix_event::cancelation();
self.request = Some(tx);
job::dispatch_blocking(move |editor, _| resolve_completion_item(editor, item, rx))
}
}
fn resolve_completion_item(
editor: &mut Editor,
item: CompletionItem,
cancel: helix_event::CancelRx,
) {
let Some(language_server) = editor.language_server_by_id(item.language_server_id) else {
return;
};
let Some(future) = language_server.resolve_completion_item(item.item.clone()) else {
return;
};
tokio::spawn(async move {
match helix_event::cancelable_future(future, cancel).await {
Some(Ok(resolved_item)) => {
job::dispatch(move |_, compositor| {
if let Some(completion) = &mut compositor
.find::<crate::ui::EditorView>()
.unwrap()
.completion
{
let resolved_item = CompletionItem {
item: resolved_item,
language_server_id: item.language_server_id,
resolved: true,
};
completion.replace_item(item, resolved_item);
};
})
.await
}
Some(Err(err)) => log::error!("completion resolve request failed: {err}"),
None => (),
}
});
}

@ -360,7 +360,7 @@ impl EditorView {
doc: &Document, doc: &Document,
theme: &Theme, theme: &Theme,
) -> [Vec<(usize, std::ops::Range<usize>)>; 5] { ) -> [Vec<(usize, std::ops::Range<usize>)>; 5] {
use helix_core::diagnostic::Severity; use helix_core::diagnostic::{DiagnosticTag, Severity};
let get_scope_of = |scope| { let get_scope_of = |scope| {
theme theme
.find_scope_index_exact(scope) .find_scope_index_exact(scope)
@ -380,6 +380,10 @@ impl EditorView {
let error = get_scope_of("diagnostic.error"); let error = get_scope_of("diagnostic.error");
let r#default = get_scope_of("diagnostic"); // this is a bit redundant but should be fine let r#default = get_scope_of("diagnostic"); // this is a bit redundant but should be fine
// Diagnostic tags
let unnecessary = theme.find_scope_index_exact("diagnostic.unnecessary");
let deprecated = theme.find_scope_index_exact("diagnostic.deprecated");
let mut default_vec: Vec<(usize, std::ops::Range<usize>)> = Vec::new(); let mut default_vec: Vec<(usize, std::ops::Range<usize>)> = Vec::new();
let mut info_vec = Vec::new(); let mut info_vec = Vec::new();
let mut hint_vec = Vec::new(); let mut hint_vec = Vec::new();
@ -396,6 +400,15 @@ impl EditorView {
_ => (&mut default_vec, r#default), _ => (&mut default_vec, r#default),
}; };
let scope = diagnostic
.tags
.first()
.and_then(|tag| match tag {
DiagnosticTag::Unnecessary => unnecessary,
DiagnosticTag::Deprecated => deprecated,
})
.unwrap_or(scope);
// If any diagnostic overlaps ranges with the prior diagnostic, // If any diagnostic overlaps ranges with the prior diagnostic,
// merge the two together. Otherwise push a new span. // merge the two together. Otherwise push a new span.
match vec.last_mut() { match vec.last_mut() {
@ -716,7 +729,8 @@ impl EditorView {
} }
} }
let paragraph = Paragraph::new(lines) let text = Text::from(lines);
let paragraph = Paragraph::new(&text)
.alignment(Alignment::Right) .alignment(Alignment::Right)
.wrap(Wrap { trim: true }); .wrap(Wrap { trim: true });
let width = 100.min(viewport.width); let width = 100.min(viewport.width);
@ -903,7 +917,9 @@ impl EditorView {
fn command_mode(&mut self, mode: Mode, cxt: &mut commands::Context, event: KeyEvent) { fn command_mode(&mut self, mode: Mode, cxt: &mut commands::Context, event: KeyEvent) {
match (event, cxt.editor.count) { match (event, cxt.editor.count) {
// count handling // count handling
(key!(i @ '0'), Some(_)) | (key!(i @ '1'..='9'), _) => { (key!(i @ '0'), Some(_)) | (key!(i @ '1'..='9'), _)
if !self.keymaps.contains_key(mode, event) =>
{
let i = i.to_digit(10).unwrap() as usize; let i = i.to_digit(10).unwrap() as usize;
cxt.editor.count = cxt.editor.count =
std::num::NonZeroUsize::new(cxt.editor.count.map_or(i, |c| c.get() * 10 + i)); std::num::NonZeroUsize::new(cxt.editor.count.map_or(i, |c| c.get() * 10 + i));
@ -1025,14 +1041,6 @@ impl EditorView {
pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult { pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult {
commands::compute_inlay_hints_for_all_views(cx.editor, cx.jobs); commands::compute_inlay_hints_for_all_views(cx.editor, cx.jobs);
if let Some(completion) = &mut self.completion {
return if completion.ensure_item_resolved(cx) {
EventResult::Consumed(None)
} else {
EventResult::Ignored(None)
};
}
EventResult::Ignored(None) EventResult::Ignored(None)
} }
} }
@ -1086,6 +1094,15 @@ impl EditorView {
if modifiers == KeyModifiers::ALT { if modifiers == KeyModifiers::ALT {
let selection = doc.selection(view_id).clone(); let selection = doc.selection(view_id).clone();
doc.set_selection(view_id, selection.push(Range::point(pos))); doc.set_selection(view_id, selection.push(Range::point(pos)));
} else if editor.mode == Mode::Select {
// Discards non-primary selections for consistent UX with normal mode
let primary = doc.selection(view_id).primary().put_cursor(
doc.text().slice(..),
pos,
true,
);
editor.mouse_down_range = Some(primary);
doc.set_selection(view_id, Selection::single(primary.anchor, primary.head));
} else { } else {
doc.set_selection(view_id, Selection::point(pos)); doc.set_selection(view_id, Selection::point(pos));
} }
@ -1154,7 +1171,7 @@ impl EditorView {
} }
let offset = config.scroll_lines.unsigned_abs(); let offset = config.scroll_lines.unsigned_abs();
commands::scroll(cxt, offset, direction); commands::scroll(cxt, offset, direction, false);
cxt.editor.tree.focus = current_view; cxt.editor.tree.focus = current_view;
cxt.editor.ensure_cursor_in_view(current_view); cxt.editor.ensure_cursor_in_view(current_view);
@ -1169,19 +1186,26 @@ impl EditorView {
let (view, doc) = current!(cxt.editor); let (view, doc) = current!(cxt.editor);
if doc let should_yank = match cxt.editor.mouse_down_range.take() {
.selection(view.id) Some(down_range) => doc.selection(view.id).primary() != down_range,
None => {
// This should not happen under normal cases. We fall back to the original
// behavior of yanking on non-single-char selections.
doc.selection(view.id)
.primary() .primary()
.slice(doc.text().slice(..)) .slice(doc.text().slice(..))
.len_chars() .len_chars()
<= 1 > 1
{
return EventResult::Ignored(None);
} }
};
commands::MappableCommand::yank_main_selection_to_primary_clipboard.execute(cxt); if should_yank {
commands::MappableCommand::yank_main_selection_to_primary_clipboard
.execute(cxt);
EventResult::Consumed(None) EventResult::Consumed(None)
} else {
EventResult::Ignored(None)
}
} }
MouseEventKind::Up(MouseButton::Right) => { MouseEventKind::Up(MouseButton::Right) => {

@ -2,6 +2,7 @@ use crate::compositor::{Component, Context};
use helix_view::graphics::{Margin, Rect}; use helix_view::graphics::{Margin, Rect};
use helix_view::info::Info; use helix_view::info::Info;
use tui::buffer::Buffer as Surface; use tui::buffer::Buffer as Surface;
use tui::text::Text;
use tui::widgets::{Block, Borders, Paragraph, Widget}; use tui::widgets::{Block, Borders, Paragraph, Widget};
impl Component for Info { impl Component for Info {
@ -31,7 +32,7 @@ impl Component for Info {
let inner = block.inner(area).inner(&margin); let inner = block.inner(area).inner(&margin);
block.render(area, surface); block.render(area, surface);
Paragraph::new(self.text.as_str()) Paragraph::new(&Text::from(self.text.as_str()))
.style(text_style) .style(text_style)
.render(inner, surface); .render(inner, surface);
} }

@ -77,7 +77,7 @@ impl Component for SignatureHelp {
let (_, sig_text_height) = crate::ui::text::required_size(&sig_text, area.width); let (_, sig_text_height) = crate::ui::text::required_size(&sig_text, area.width);
let sig_text_area = area.clip_top(1).with_height(sig_text_height); let sig_text_area = area.clip_top(1).with_height(sig_text_height);
let sig_text_area = sig_text_area.inner(&margin).intersection(surface.area); let sig_text_area = sig_text_area.inner(&margin).intersection(surface.area);
let sig_text_para = Paragraph::new(sig_text).wrap(Wrap { trim: false }); let sig_text_para = Paragraph::new(&sig_text).wrap(Wrap { trim: false });
sig_text_para.render(sig_text_area, surface); sig_text_para.render(sig_text_area, surface);
if self.signature_doc.is_none() { if self.signature_doc.is_none() {
@ -100,7 +100,7 @@ impl Component for SignatureHelp {
let sig_doc_area = area let sig_doc_area = area
.clip_top(sig_text_area.height + 2) .clip_top(sig_text_area.height + 2)
.clip_bottom(u16::from(cx.editor.popup_border())); .clip_bottom(u16::from(cx.editor.popup_border()));
let sig_doc_para = Paragraph::new(sig_doc) let sig_doc_para = Paragraph::new(&sig_doc)
.wrap(Wrap { trim: false }) .wrap(Wrap { trim: false })
.scroll((cx.scroll.unwrap_or_default() as u16, 0)); .scroll((cx.scroll.unwrap_or_default() as u16, 0));
sig_doc_para.render(sig_doc_area.inner(&margin), surface); sig_doc_para.render(sig_doc_area.inner(&margin), surface);

@ -346,7 +346,7 @@ impl Component for Markdown {
let text = self.parse(Some(&cx.editor.theme)); let text = self.parse(Some(&cx.editor.theme));
let par = Paragraph::new(text) let par = Paragraph::new(&text)
.wrap(Wrap { trim: false }) .wrap(Wrap { trim: false })
.scroll((cx.scroll.unwrap_or_default() as u16, 0)); .scroll((cx.scroll.unwrap_or_default() as u16, 0));

@ -18,6 +18,7 @@ use crate::filter_picker_entry;
use crate::job::{self, Callback}; use crate::job::{self, Callback};
pub use completion::{Completion, CompletionItem}; pub use completion::{Completion, CompletionItem};
pub use editor::EditorView; pub use editor::EditorView;
use helix_stdx::rope;
pub use markdown::Markdown; pub use markdown::Markdown;
pub use menu::Menu; pub use menu::Menu;
pub use picker::{DynamicPicker, FileLocation, Picker}; pub use picker::{DynamicPicker, FileLocation, Picker};
@ -26,8 +27,6 @@ pub use prompt::{Prompt, PromptEvent};
pub use spinner::{ProgressSpinners, Spinner}; pub use spinner::{ProgressSpinners, Spinner};
pub use text::Text; pub use text::Text;
use helix_core::regex::Regex;
use helix_core::regex::RegexBuilder;
use helix_view::Editor; use helix_view::Editor;
use std::path::PathBuf; use std::path::PathBuf;
@ -63,7 +62,22 @@ pub fn regex_prompt(
prompt: std::borrow::Cow<'static, str>, prompt: std::borrow::Cow<'static, str>,
history_register: Option<char>, history_register: Option<char>,
completion_fn: impl FnMut(&Editor, &str) -> Vec<prompt::Completion> + 'static, completion_fn: impl FnMut(&Editor, &str) -> Vec<prompt::Completion> + 'static,
fun: impl Fn(&mut crate::compositor::Context, Regex, PromptEvent) + 'static, fun: impl Fn(&mut crate::compositor::Context, rope::Regex, PromptEvent) + 'static,
) {
raw_regex_prompt(
cx,
prompt,
history_register,
completion_fn,
move |cx, regex, _, event| fun(cx, regex, event),
);
}
pub fn raw_regex_prompt(
cx: &mut crate::commands::Context,
prompt: std::borrow::Cow<'static, str>,
history_register: Option<char>,
completion_fn: impl FnMut(&Editor, &str) -> Vec<prompt::Completion> + 'static,
fun: impl Fn(&mut crate::compositor::Context, rope::Regex, &str, PromptEvent) + 'static,
) { ) {
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
let doc_id = view.doc; let doc_id = view.doc;
@ -94,10 +108,13 @@ pub fn regex_prompt(
false false
}; };
match RegexBuilder::new(input) match rope::RegexBuilder::new()
.syntax(
rope::Config::new()
.case_insensitive(case_insensitive) .case_insensitive(case_insensitive)
.multi_line(true) .multi_line(true),
.build() )
.build(input)
{ {
Ok(regex) => { Ok(regex) => {
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
@ -110,7 +127,7 @@ pub fn regex_prompt(
view.jumps.push((doc_id, snapshot.clone())); view.jumps.push((doc_id, snapshot.clone()));
} }
fun(cx, regex, event); fun(cx, regex, input, event);
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
view.ensure_cursor_in_view(doc, config.scrolloff); view.ensure_cursor_in_view(doc, config.scrolloff);
@ -428,9 +445,9 @@ pub mod completers {
path path
} else { } else {
match path.parent() { match path.parent() {
Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(), Some(path) if !path.as_os_str().is_empty() => Cow::Borrowed(path),
// Path::new("h")'s parent is Some("")... // Path::new("h")'s parent is Some("")...
_ => helix_stdx::env::current_working_dir(), _ => Cow::Owned(helix_stdx::env::current_working_dir()),
} }
}; };

@ -33,7 +33,7 @@ impl Component for Text {
fn render(&mut self, area: Rect, surface: &mut Surface, _cx: &mut Context) { fn render(&mut self, area: Rect, surface: &mut Surface, _cx: &mut Context) {
use tui::widgets::{Paragraph, Widget, Wrap}; use tui::widgets::{Paragraph, Widget, Wrap};
let par = Paragraph::new(self.contents.clone()).wrap(Wrap { trim: false }); let par = Paragraph::new(&self.contents).wrap(Wrap { trim: false });
// .scroll(x, y) offsets // .scroll(x, y) offsets
par.render(area, surface); par.render(area, surface);

@ -526,3 +526,86 @@ async fn test_join_selections() -> anyhow::Result<()> {
Ok(()) Ok(())
} }
#[tokio::test(flavor = "multi_thread")]
async fn test_join_selections_space() -> anyhow::Result<()> {
// join with empty lines panic
test((
platform_line(indoc! {"\
#[a
b
c
d
e|]#
"}),
"<A-J>",
platform_line(indoc! {"\
a#[ |]#b#( |)#c#( |)#d#( |)#e
"}),
))
.await?;
// normal join
test((
platform_line(indoc! {"\
#[a|]#bc
def
"}),
"<A-J>",
platform_line(indoc! {"\
abc#[ |]#def
"}),
))
.await?;
// join with empty line
test((
platform_line(indoc! {"\
#[a|]#bc
def
"}),
"<A-J>",
platform_line(indoc! {"\
#[a|]#bc
def
"}),
))
.await?;
// join with additional space in non-empty line
test((
platform_line(indoc! {"\
#[a|]#bc
def
"}),
"<A-J><A-J>",
platform_line(indoc! {"\
abc#[ |]#def
"}),
))
.await?;
// join with retained trailing spaces
test((
platform_line(indoc! {"\
#[aaa
bb
c |]#
"}),
"<A-J>",
platform_line(indoc! {"\
aaa #[ |]#bb #( |)#c
"}),
))
.await?;
Ok(())
}

@ -552,3 +552,57 @@ async fn find_char_line_ending() -> anyhow::Result<()> {
Ok(()) Ok(())
} }
#[tokio::test(flavor = "multi_thread")]
async fn test_surround_replace() -> anyhow::Result<()> {
test((
platform_line(indoc! {"\
(#[|a]#)
"}),
"mrm{",
platform_line(indoc! {"\
{#[|a]#}
"}),
))
.await?;
test((
platform_line(indoc! {"\
(#[a|]#)
"}),
"mrm{",
platform_line(indoc! {"\
{#[a|]#}
"}),
))
.await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_surround_delete() -> anyhow::Result<()> {
test((
platform_line(indoc! {"\
(#[|a]#)
"}),
"mdm",
platform_line(indoc! {"\
#[|a]#
"}),
))
.await?;
test((
platform_line(indoc! {"\
(#[a|]#)
"}),
"mdm",
platform_line(indoc! {"\
#[a|]#
"}),
))
.await?;
Ok(())
}

@ -28,15 +28,15 @@ fn get_line_offset(line_width: u16, text_area_width: u16, alignment: Alignment)
/// # use helix_tui::widgets::{Block, Borders, Paragraph, Wrap}; /// # use helix_tui::widgets::{Block, Borders, Paragraph, Wrap};
/// # use helix_tui::layout::{Alignment}; /// # use helix_tui::layout::{Alignment};
/// # use helix_view::graphics::{Style, Color, Modifier}; /// # use helix_view::graphics::{Style, Color, Modifier};
/// let text = vec![ /// let text = Text::from(vec![
/// Spans::from(vec![ /// Spans::from(vec![
/// Span::raw("First"), /// Span::raw("First"),
/// Span::styled("line",Style::default().add_modifier(Modifier::ITALIC)), /// Span::styled("line",Style::default().add_modifier(Modifier::ITALIC)),
/// Span::raw("."), /// Span::raw("."),
/// ]), /// ]),
/// Spans::from(Span::styled("Second line", Style::default().fg(Color::Red))), /// Spans::from(Span::styled("Second line", Style::default().fg(Color::Red))),
/// ]; /// ]);
/// Paragraph::new(text) /// Paragraph::new(&text)
/// .block(Block::default().title("Paragraph").borders(Borders::ALL)) /// .block(Block::default().title("Paragraph").borders(Borders::ALL))
/// .style(Style::default().fg(Color::White).bg(Color::Black)) /// .style(Style::default().fg(Color::White).bg(Color::Black))
/// .alignment(Alignment::Center) /// .alignment(Alignment::Center)
@ -51,7 +51,7 @@ pub struct Paragraph<'a> {
/// How to wrap the text /// How to wrap the text
wrap: Option<Wrap>, wrap: Option<Wrap>,
/// The text to display /// The text to display
text: Text<'a>, text: &'a Text<'a>,
/// Scroll /// Scroll
scroll: (u16, u16), scroll: (u16, u16),
/// Alignment of the text /// Alignment of the text
@ -70,7 +70,7 @@ pub struct Paragraph<'a> {
/// - Here is another point that is long enough to wrap"#); /// - Here is another point that is long enough to wrap"#);
/// ///
/// // With leading spaces trimmed (window width of 30 chars): /// // With leading spaces trimmed (window width of 30 chars):
/// Paragraph::new(bullet_points.clone()).wrap(Wrap { trim: true }); /// Paragraph::new(&bullet_points).wrap(Wrap { trim: true });
/// // Some indented points: /// // Some indented points:
/// // - First thing goes here and is /// // - First thing goes here and is
/// // long so that it wraps /// // long so that it wraps
@ -78,7 +78,7 @@ pub struct Paragraph<'a> {
/// // is long enough to wrap /// // is long enough to wrap
/// ///
/// // But without trimming, indentation is preserved: /// // But without trimming, indentation is preserved:
/// Paragraph::new(bullet_points).wrap(Wrap { trim: false }); /// Paragraph::new(&bullet_points).wrap(Wrap { trim: false });
/// // Some indented points: /// // Some indented points:
/// // - First thing goes here /// // - First thing goes here
/// // and is long so that it wraps /// // and is long so that it wraps
@ -92,15 +92,12 @@ pub struct Wrap {
} }
impl<'a> Paragraph<'a> { impl<'a> Paragraph<'a> {
pub fn new<T>(text: T) -> Paragraph<'a> pub fn new(text: &'a Text) -> Paragraph<'a> {
where
T: Into<Text<'a>>,
{
Paragraph { Paragraph {
block: None, block: None,
style: Default::default(), style: Default::default(),
wrap: None, wrap: None,
text: text.into(), text,
scroll: (0, 0), scroll: (0, 0),
alignment: Alignment::Left, alignment: Alignment::Left,
} }

@ -17,14 +17,16 @@ fn terminal_buffer_size_should_not_be_limited() {
// let backend = TestBackend::new(10, 10); // let backend = TestBackend::new(10, 10);
// let mut terminal = Terminal::new(backend)?; // let mut terminal = Terminal::new(backend)?;
// let frame = terminal.draw(|f| { // let frame = terminal.draw(|f| {
// let paragraph = Paragraph::new("Test"); // let text = Text::from("Test");
// let paragraph = Paragraph::new(&text);
// f.render_widget(paragraph, f.size()); // f.render_widget(paragraph, f.size());
// })?; // })?;
// assert_eq!(frame.buffer.get(0, 0).symbol, "T"); // assert_eq!(frame.buffer.get(0, 0).symbol, "T");
// assert_eq!(frame.area, Rect::new(0, 0, 10, 10)); // assert_eq!(frame.area, Rect::new(0, 0, 10, 10));
// terminal.backend_mut().resize(8, 8); // terminal.backend_mut().resize(8, 8);
// let frame = terminal.draw(|f| { // let frame = terminal.draw(|f| {
// let paragraph = Paragraph::new("test"); // let text = Text::from("test");
// let paragraph = Paragraph::new(&text);
// f.render_widget(paragraph, f.size()); // f.render_widget(paragraph, f.size());
// })?; // })?;
// assert_eq!(frame.buffer.get(0, 0).symbol, "t"); // assert_eq!(frame.buffer.get(0, 0).symbol, "t");

@ -21,8 +21,8 @@
// terminal // terminal
// .draw(|f| { // .draw(|f| {
// let size = f.size(); // let size = f.size();
// let text = vec![Spans::from(SAMPLE_STRING)]; // let text = Text::from(SAMPLE_STRING);
// let paragraph = Paragraph::new(text) // let paragraph = Paragraph::new(&text)
// .block(Block::default().borders(Borders::ALL)) // .block(Block::default().borders(Borders::ALL))
// .alignment(alignment) // .alignment(alignment)
// .wrap(Wrap { trim: true }); // .wrap(Wrap { trim: true });
@ -88,8 +88,8 @@
// terminal // terminal
// .draw(|f| { // .draw(|f| {
// let size = f.size(); // let size = f.size();
// let text = vec![Spans::from(s)]; // let text = Text::from(s);
// let paragraph = Paragraph::new(text) // let paragraph = Paragraph::new(&text)
// .block(Block::default().borders(Borders::ALL)) // .block(Block::default().borders(Borders::ALL))
// .wrap(Wrap { trim: true }); // .wrap(Wrap { trim: true });
// f.render_widget(paragraph, size); // f.render_widget(paragraph, size);
@ -120,8 +120,8 @@
// terminal // terminal
// .draw(|f| { // .draw(|f| {
// let size = f.size(); // let size = f.size();
// let text = vec![Spans::from(s)]; // let text = Text::from(s);
// let paragraph = Paragraph::new(text) // let paragraph = Paragraph::new(&text)
// .block(Block::default().borders(Borders::ALL)) // .block(Block::default().borders(Borders::ALL))
// .wrap(Wrap { trim: true }); // .wrap(Wrap { trim: true });
// f.render_widget(paragraph, size); // f.render_widget(paragraph, size);
@ -155,8 +155,8 @@
// terminal // terminal
// .draw(|f| { // .draw(|f| {
// let size = f.size(); // let size = f.size();
// let text = Text::from(line);
// let paragraph = Paragraph::new(line).block(Block::default().borders(Borders::ALL)); // let paragraph = Paragraph::new(&text).block(Block::default().borders(Borders::ALL));
// f.render_widget(paragraph, size); // f.render_widget(paragraph, size);
// }) // })
// .unwrap(); // .unwrap();
@ -174,7 +174,7 @@
// let text = Text::from( // let text = Text::from(
// "段落现在可以水平滚动了!\nParagraph can scroll horizontally!\nShort line", // "段落现在可以水平滚动了!\nParagraph can scroll horizontally!\nShort line",
// ); // );
// let paragraph = Paragraph::new(text) // let paragraph = Paragraph::new(&text)
// .block(Block::default().borders(Borders::ALL)) // .block(Block::default().borders(Borders::ALL))
// .alignment(alignment) // .alignment(alignment)
// .scroll(scroll); // .scroll(scroll);

@ -17,7 +17,7 @@ helix-event = { path = "../helix-event" }
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] }
parking_lot = "0.12" parking_lot = "0.12"
arc-swap = { version = "1.6.0" } arc-swap = { version = "1.7.0" }
gix = { version = "0.58.0", features = ["attributes"], default-features = false, optional = true } gix = { version = "0.58.0", features = ["attributes"], default-features = false, optional = true }
imara-diff = "0.1.5" imara-diff = "0.1.5"

@ -31,7 +31,7 @@ crossterm = { version = "0.27", optional = true }
once_cell = "1.19" once_cell = "1.19"
url = "2.5.0" url = "2.5.0"
arc-swap = { version = "1.6.0" } arc-swap = { version = "1.7.0" }
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
tokio-stream = "0.1" tokio-stream = "0.1"
@ -50,7 +50,7 @@ parking_lot = "0.12.1"
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
clipboard-win = { version = "5.1", features = ["std"] } clipboard-win = { version = "5.2", features = ["std"] }
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
libc = "0.2" libc = "0.2"

@ -42,7 +42,7 @@ pub use helix_core::diagnostic::Severity;
use helix_core::{ use helix_core::{
auto_pairs::AutoPairs, auto_pairs::AutoPairs,
syntax::{self, AutoPairConfig, IndentationHeuristic, LanguageServerFeature, SoftWrap}, syntax::{self, AutoPairConfig, IndentationHeuristic, LanguageServerFeature, SoftWrap},
Change, LineEnding, Position, Selection, NATIVE_LINE_ENDING, Change, LineEnding, Position, Range, Selection, NATIVE_LINE_ENDING,
}; };
use helix_dap as dap; use helix_dap as dap;
use helix_lsp::lsp; use helix_lsp::lsp;
@ -914,7 +914,7 @@ pub struct Editor {
pub macro_recording: Option<(char, Vec<KeyEvent>)>, pub macro_recording: Option<(char, Vec<KeyEvent>)>,
pub macro_replaying: Vec<char>, pub macro_replaying: Vec<char>,
pub language_servers: helix_lsp::Registry, pub language_servers: helix_lsp::Registry,
pub diagnostics: BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>, pub diagnostics: BTreeMap<PathBuf, Vec<(lsp::Diagnostic, usize)>>,
pub diff_providers: DiffProviderRegistry, pub diff_providers: DiffProviderRegistry,
pub debugger: Option<dap::Client>, pub debugger: Option<dap::Client>,
@ -964,6 +964,8 @@ pub struct Editor {
/// times during rendering and should not be set by other functions. /// times during rendering and should not be set by other functions.
pub cursor_cache: Cell<Option<Option<Position>>>, pub cursor_cache: Cell<Option<Option<Position>>>,
pub handlers: Handlers, pub handlers: Handlers,
pub mouse_down_range: Option<Range>,
} }
pub type Motion = Box<dyn Fn(&mut Editor)>; pub type Motion = Box<dyn Fn(&mut Editor)>;
@ -1080,6 +1082,7 @@ impl Editor {
needs_redraw: false, needs_redraw: false,
cursor_cache: Cell::new(None), cursor_cache: Cell::new(None),
handlers, handlers,
mouse_down_range: None,
} }
} }
@ -1812,7 +1815,7 @@ impl Editor {
/// Returns all supported diagnostics for the document /// Returns all supported diagnostics for the document
pub fn doc_diagnostics<'a>( pub fn doc_diagnostics<'a>(
language_servers: &'a helix_lsp::Registry, language_servers: &'a helix_lsp::Registry,
diagnostics: &'a BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>, diagnostics: &'a BTreeMap<PathBuf, Vec<(lsp::Diagnostic, usize)>>,
document: &Document, document: &Document,
) -> impl Iterator<Item = helix_core::Diagnostic> + 'a { ) -> impl Iterator<Item = helix_core::Diagnostic> + 'a {
Editor::doc_diagnostics_with_filter(language_servers, diagnostics, document, |_, _| true) Editor::doc_diagnostics_with_filter(language_servers, diagnostics, document, |_, _| true)
@ -1822,7 +1825,7 @@ impl Editor {
/// filtered by `filter` which is invocated with the raw `lsp::Diagnostic` and the language server id it came from /// filtered by `filter` which is invocated with the raw `lsp::Diagnostic` and the language server id it came from
pub fn doc_diagnostics_with_filter<'a>( pub fn doc_diagnostics_with_filter<'a>(
language_servers: &'a helix_lsp::Registry, language_servers: &'a helix_lsp::Registry,
diagnostics: &'a BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>, diagnostics: &'a BTreeMap<PathBuf, Vec<(lsp::Diagnostic, usize)>>,
document: &Document, document: &Document,
filter: impl Fn(&lsp::Diagnostic, usize) -> bool + 'a, filter: impl Fn(&lsp::Diagnostic, usize) -> bool + 'a,
@ -1831,8 +1834,7 @@ impl Editor {
let language_config = document.language.clone(); let language_config = document.language.clone();
document document
.path() .path()
.and_then(|path| url::Url::from_file_path(path).ok()) // TODO log error? .and_then(|path| diagnostics.get(path))
.and_then(|uri| diagnostics.get(&uri))
.map(|diags| { .map(|diags| {
diags.iter().filter_map(move |(diagnostic, lsp_id)| { diags.iter().filter_map(move |(diagnostic, lsp_id)| {
let ls = language_servers.get_by_id(*lsp_id)?; let ls = language_servers.get_by_id(*lsp_id)?;
@ -1978,7 +1980,7 @@ impl Editor {
/// Switches the editor into normal mode. /// Switches the editor into normal mode.
pub fn enter_normal_mode(&mut self) { pub fn enter_normal_mode(&mut self) {
use helix_core::{graphemes, Range}; use helix_core::graphemes;
if self.mode == Mode::Normal { if self.mode == Mode::Normal {
return; return;

@ -226,10 +226,15 @@ impl Editor {
breakpoints.iter().position(|b| b.id == breakpoint.id) breakpoints.iter().position(|b| b.id == breakpoint.id)
{ {
breakpoints[i].verified = breakpoint.verified; breakpoints[i].verified = breakpoint.verified;
breakpoints[i].message = breakpoint.message.clone(); breakpoints[i].message = breakpoint
breakpoints[i].line = .message
breakpoint.line.unwrap().saturating_sub(1); // TODO: no unwrap .clone()
breakpoints[i].column = breakpoint.column; .or_else(|| breakpoints[i].message.take());
breakpoints[i].line = breakpoint
.line
.map_or(breakpoints[i].line, |line| line.saturating_sub(1));
breakpoints[i].column =
breakpoint.column.or(breakpoints[i].column);
} }
} }
} }

@ -23,6 +23,7 @@ cuelsp = { command = "cuelsp" }
dart = { command = "dart", args = ["language-server", "--client-id=helix"] } dart = { command = "dart", args = ["language-server", "--client-id=helix"] }
dhall-lsp-server = { command = "dhall-lsp-server" } dhall-lsp-server = { command = "dhall-lsp-server" }
docker-langserver = { command = "docker-langserver", args = ["--stdio"] } docker-langserver = { command = "docker-langserver", args = ["--stdio"] }
docker-compose-langserver = { command = "docker-compose-langserver", args = ["--stdio"]}
dot-language-server = { command = "dot-language-server", args = ["--stdio"] } dot-language-server = { command = "dot-language-server", args = ["--stdio"] }
elixir-ls = { command = "elixir-ls", config = { elixirLS.dialyzerEnabled = false } } elixir-ls = { command = "elixir-ls", config = { elixirLS.dialyzerEnabled = false } }
elm-language-server = { command = "elm-language-server" } elm-language-server = { command = "elm-language-server" }
@ -44,11 +45,13 @@ kotlin-language-server = { command = "kotlin-language-server" }
lean = { command = "lean", args = [ "--server" ] } lean = { command = "lean", args = [ "--server" ] }
ltex-ls = { command = "ltex-ls" } ltex-ls = { command = "ltex-ls" }
markdoc-ls = { command = "markdoc-ls", args = ["--stdio"] } markdoc-ls = { command = "markdoc-ls", args = ["--stdio"] }
markdown-oxide = { command = "markdown-oxide" }
marksman = { command = "marksman", args = ["server"] } marksman = { command = "marksman", args = ["server"] }
metals = { command = "metals", config = { "isHttpEnabled" = true } } metals = { command = "metals", config = { "isHttpEnabled" = true } }
mint = { command = "mint", args = ["ls"] } mint = { command = "mint", args = ["ls"] }
nil = { command = "nil" } nil = { command = "nil" }
nimlangserver = { command = "nimlangserver" } nimlangserver = { command = "nimlangserver" }
nimlsp = { command = "nimlsp" }
nls = { command = "nls" } nls = { command = "nls" }
nu-lsp = { command = "nu", args = [ "--lsp" ] } nu-lsp = { command = "nu", args = [ "--lsp" ] }
ocamllsp = { command = "ocamllsp" } ocamllsp = { command = "ocamllsp" }
@ -190,7 +193,12 @@ injection-regex = "rust"
file-types = ["rs"] file-types = ["rs"]
roots = ["Cargo.toml", "Cargo.lock"] roots = ["Cargo.toml", "Cargo.lock"]
auto-format = true auto-format = true
comment-token = "//" comment-tokens = ["//", "///", "//!"]
block-comment-tokens = [
{ start = "/*", end = "*/" },
{ start = "/**", end = "*/" },
{ start = "/*!", end = "*/" },
]
language-servers = [ "rust-analyzer" ] language-servers = [ "rust-analyzer" ]
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
persistent-diagnostic-sources = ["rustc", "clippy"] persistent-diagnostic-sources = ["rustc", "clippy"]
@ -282,6 +290,7 @@ injection-regex = "protobuf"
file-types = ["proto"] file-types = ["proto"]
language-servers = [ "bufls", "pbkit" ] language-servers = [ "bufls", "pbkit" ]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
@ -325,6 +334,7 @@ injection-regex = "mint"
file-types = ["mint"] file-types = ["mint"]
shebangs = [] shebangs = []
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "mint" ] language-servers = [ "mint" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -407,6 +417,7 @@ scope = "source.c"
injection-regex = "c" injection-regex = "c"
file-types = ["c"] # TODO: ["h"] file-types = ["c"] # TODO: ["h"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "clangd" ] language-servers = [ "clangd" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -443,6 +454,7 @@ scope = "source.cpp"
injection-regex = "cpp" injection-regex = "cpp"
file-types = ["cc", "hh", "c++", "cpp", "hpp", "h", "ipp", "tpp", "cxx", "hxx", "ixx", "txx", "ino", "C", "H", "cu", "cuh", "cppm", "h++", "ii", "inl", { glob = ".hpp.in" }, { glob = ".h.in" }] file-types = ["cc", "hh", "c++", "cpp", "hpp", "h", "ipp", "tpp", "cxx", "hxx", "ixx", "txx", "ino", "C", "H", "cu", "cuh", "cppm", "h++", "ii", "inl", { glob = ".hpp.in" }, { glob = ".h.in" }]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "clangd" ] language-servers = [ "clangd" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -490,6 +502,7 @@ injection-regex = "c-?sharp"
file-types = ["cs", "csx", "cake"] file-types = ["cs", "csx", "cake"]
roots = ["sln", "csproj"] roots = ["sln", "csproj"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = "\t" } indent = { tab-width = 4, unit = "\t" }
language-servers = [ "omnisharp" ] language-servers = [ "omnisharp" ]
@ -548,6 +561,7 @@ file-types = ["go"]
roots = ["go.work", "go.mod"] roots = ["go.work", "go.mod"]
auto-format = true auto-format = true
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "gopls", "golangci-lint-lsp" ] language-servers = [ "gopls", "golangci-lint-lsp" ]
# TODO: gopls needs utf-8 offsets? # TODO: gopls needs utf-8 offsets?
indent = { tab-width = 4, unit = "\t" } indent = { tab-width = 4, unit = "\t" }
@ -613,6 +627,7 @@ scope = "source.gotmpl"
injection-regex = "gotmpl" injection-regex = "gotmpl"
file-types = ["gotmpl"] file-types = ["gotmpl"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "gopls" ] language-servers = [ "gopls" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -642,6 +657,7 @@ language-id = "javascript"
file-types = ["js", "mjs", "cjs", "rules", "es6", "pac", { glob = "jakefile" }] file-types = ["js", "mjs", "cjs", "rules", "es6", "pac", { glob = "jakefile" }]
shebangs = ["node"] shebangs = ["node"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "typescript-language-server" ] language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -668,6 +684,7 @@ injection-regex = "jsx"
language-id = "javascriptreact" language-id = "javascriptreact"
file-types = ["jsx"] file-types = ["jsx"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "typescript-language-server" ] language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
grammar = "javascript" grammar = "javascript"
@ -679,6 +696,8 @@ injection-regex = "(ts|typescript)"
file-types = ["ts", "mts", "cts"] file-types = ["ts", "mts", "cts"]
language-id = "typescript" language-id = "typescript"
shebangs = ["deno", "ts-node"] shebangs = ["deno", "ts-node"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "typescript-language-server" ] language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -692,6 +711,8 @@ scope = "source.tsx"
injection-regex = "(tsx)" # |typescript injection-regex = "(tsx)" # |typescript
language-id = "typescriptreact" language-id = "typescriptreact"
file-types = ["tsx"] file-types = ["tsx"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "typescript-language-server" ] language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -704,6 +725,7 @@ name = "css"
scope = "source.css" scope = "source.css"
injection-regex = "css" injection-regex = "css"
file-types = ["css", "scss"] file-types = ["css", "scss"]
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "vscode-css-language-server" ] language-servers = [ "vscode-css-language-server" ]
auto-format = true auto-format = true
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -717,6 +739,7 @@ name = "scss"
scope = "source.scss" scope = "source.scss"
injection-regex = "scss" injection-regex = "scss"
file-types = ["scss"] file-types = ["scss"]
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "vscode-css-language-server" ] language-servers = [ "vscode-css-language-server" ]
auto-format = true auto-format = true
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -730,6 +753,7 @@ name = "html"
scope = "text.html.basic" scope = "text.html.basic"
injection-regex = "html" injection-regex = "html"
file-types = ["html", "htm", "shtml", "xhtml", "xht", "jsp", "asp", "aspx", "jshtm", "volt", "rhtml"] file-types = ["html", "htm", "shtml", "xhtml", "xht", "jsp", "asp", "aspx", "jshtm", "volt", "rhtml"]
block-comment-tokens = { start = "<!--", end = "-->" }
language-servers = [ "vscode-html-language-server" ] language-servers = [ "vscode-html-language-server" ]
auto-format = true auto-format = true
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -860,6 +884,7 @@ file-types = [
"tcshrc", "tcshrc",
"bashrc_Apple_Terminal", "bashrc_Apple_Terminal",
"zshrc_Apple_Terminal", "zshrc_Apple_Terminal",
{ glob = "tmux.conf" },
{ glob = ".bash_history" }, { glob = ".bash_history" },
{ glob = ".bash_login" }, { glob = ".bash_login" },
{ glob = ".bash_logout" }, { glob = ".bash_logout" },
@ -899,6 +924,7 @@ injection-regex = "php"
file-types = ["php", "inc", "php4", "php5", "phtml", "ctp"] file-types = ["php", "inc", "php4", "php5", "phtml", "ctp"]
shebangs = ["php"] shebangs = ["php"]
roots = ["composer.json", "index.php"] roots = ["composer.json", "index.php"]
comment-token = "//"
language-servers = [ "intelephense" ] language-servers = [ "intelephense" ]
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
@ -911,6 +937,7 @@ name = "twig"
scope = "source.twig" scope = "source.twig"
injection-regex = "twig" injection-regex = "twig"
file-types = ["twig"] file-types = ["twig"]
block-comment-tokens = { start = "{#", end = "#}" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
@ -964,6 +991,7 @@ injection-regex = "lean"
file-types = ["lean"] file-types = ["lean"]
roots = [ "lakefile.lean" ] roots = [ "lakefile.lean" ]
comment-token = "--" comment-token = "--"
block-comment-tokens = { start = "/-", end = "-/" }
language-servers = [ "lean" ] language-servers = [ "lean" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -990,6 +1018,7 @@ file-types = ["jl"]
shebangs = ["julia"] shebangs = ["julia"]
roots = ["Manifest.toml", "Project.toml"] roots = ["Manifest.toml", "Project.toml"]
comment-token = "#" comment-token = "#"
block-comment-tokens = { start = "#=", end = "=#" }
language-servers = [ "julia" ] language-servers = [ "julia" ]
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
@ -1001,7 +1030,7 @@ source = { git = "https://github.com/tree-sitter/tree-sitter-julia", rev = "8fb3
name = "java" name = "java"
scope = "source.java" scope = "source.java"
injection-regex = "java" injection-regex = "java"
file-types = ["java", "jav"] file-types = ["java", "jav", "pde"]
roots = ["pom.xml", "build.gradle", "build.gradle.kts"] roots = ["pom.xml", "build.gradle", "build.gradle.kts"]
language-servers = [ "jdtls" ] language-servers = [ "jdtls" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -1053,6 +1082,7 @@ scope = "source.ocaml"
injection-regex = "ocaml" injection-regex = "ocaml"
file-types = ["ml"] file-types = ["ml"]
shebangs = ["ocaml", "ocamlrun", "ocamlscript"] shebangs = ["ocaml", "ocamlrun", "ocamlscript"]
block-comment-tokens = { start = "(*", end = "*)" }
comment-token = "(**)" comment-token = "(**)"
language-servers = [ "ocamllsp" ] language-servers = [ "ocamllsp" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -1072,6 +1102,7 @@ name = "ocaml-interface"
scope = "source.ocaml.interface" scope = "source.ocaml.interface"
file-types = ["mli"] file-types = ["mli"]
shebangs = [] shebangs = []
block-comment-tokens = { start = "(*", end = "*)" }
comment-token = "(**)" comment-token = "(**)"
language-servers = [ "ocamllsp" ] language-servers = [ "ocamllsp" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -1091,15 +1122,16 @@ name = "lua"
injection-regex = "lua" injection-regex = "lua"
scope = "source.lua" scope = "source.lua"
file-types = ["lua"] file-types = ["lua"]
shebangs = ["lua"] shebangs = ["lua", "luajit"]
roots = [".luarc.json", ".luacheckrc", ".stylua.toml", "selene.toml", ".git"] roots = [".luarc.json", ".luacheckrc", ".stylua.toml", "selene.toml", ".git"]
comment-token = "--" comment-token = "--"
block-comment-tokens = { start = "--[[", end = "--]]" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "lua-language-server" ] language-servers = [ "lua-language-server" ]
[[grammar]] [[grammar]]
name = "lua" name = "lua"
source = { git = "https://github.com/MunifTanjim/tree-sitter-lua", rev = "887dfd4e83c469300c279314ff1619b1d0b85b91" } source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-lua", rev = "88e446476a1e97a8724dff7a23e2d709855077f2" }
[[language]] [[language]]
name = "svelte" name = "svelte"
@ -1119,6 +1151,7 @@ scope = "source.vue"
injection-regex = "vue" injection-regex = "vue"
file-types = ["vue"] file-types = ["vue"]
roots = ["package.json"] roots = ["package.json"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "vuels" ] language-servers = [ "vuels" ]
@ -1146,6 +1179,7 @@ injection-regex = "haskell"
file-types = ["hs", "hs-boot"] file-types = ["hs", "hs-boot"]
roots = ["Setup.hs", "stack.yaml", "cabal.project"] roots = ["Setup.hs", "stack.yaml", "cabal.project"]
comment-token = "--" comment-token = "--"
block-comment-tokens = { start = "{-", end = "-}" }
language-servers = [ "haskell-language-server" ] language-servers = [ "haskell-language-server" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -1171,6 +1205,7 @@ injection-regex = "purescript"
file-types = ["purs"] file-types = ["purs"]
roots = ["spago.yaml", "spago.dhall", "bower.json"] roots = ["spago.yaml", "spago.dhall", "bower.json"]
comment-token = "--" comment-token = "--"
block-comment-tokens = { start = "{-", end = "-}" }
language-servers = [ "purescript-language-server" ] language-servers = [ "purescript-language-server" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
auto-format = true auto-format = true
@ -1225,6 +1260,7 @@ scope = "source.prolog"
file-types = ["pl", "prolog"] file-types = ["pl", "prolog"]
shebangs = ["swipl"] shebangs = ["swipl"]
comment-token = "%" comment-token = "%"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "swipl" ] language-servers = [ "swipl" ]
[[language]] [[language]]
@ -1244,6 +1280,7 @@ name = "cmake"
scope = "source.cmake" scope = "source.cmake"
file-types = ["cmake", { glob = "CMakeLists.txt" }] file-types = ["cmake", { glob = "CMakeLists.txt" }]
comment-token = "#" comment-token = "#"
block-comment-tokens = { start = "#[[", end = "]]" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "cmake-language-server" ] language-servers = [ "cmake-language-server" ]
injection-regex = "cmake" injection-regex = "cmake"
@ -1270,6 +1307,7 @@ name = "glsl"
scope = "source.glsl" scope = "source.glsl"
file-types = ["glsl", "vert", "tesc", "tese", "geom", "frag", "comp" ] file-types = ["glsl", "vert", "tesc", "tese", "geom", "frag", "comp" ]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
injection-regex = "glsl" injection-regex = "glsl"
@ -1307,6 +1345,7 @@ file-types = ["rkt", "rktd", "rktl", "scrbl"]
shebangs = ["racket"] shebangs = ["racket"]
comment-token = ";" comment-token = ";"
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
block-comment-tokens = { start = "#|", end = "|#" }
language-servers = [ "racket" ] language-servers = [ "racket" ]
grammar = "scheme" grammar = "scheme"
@ -1341,6 +1380,7 @@ name = "wgsl"
scope = "source.wgsl" scope = "source.wgsl"
file-types = ["wgsl"] file-types = ["wgsl"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "wgsl_analyzer" ] language-servers = [ "wgsl_analyzer" ]
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
@ -1387,6 +1427,7 @@ name = "tablegen"
scope = "source.tablegen" scope = "source.tablegen"
file-types = ["td"] file-types = ["td"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
injection-regex = "tablegen" injection-regex = "tablegen"
@ -1400,8 +1441,9 @@ scope = "source.md"
injection-regex = "md|markdown" injection-regex = "md|markdown"
file-types = ["md", "markdown", "mkd", "mdwn", "mdown", "markdn", "mdtxt", "mdtext", "workbook", { glob = "PULLREQ_EDITMSG" }] file-types = ["md", "markdown", "mkd", "mdwn", "mdown", "markdn", "mdtxt", "mdtext", "workbook", { glob = "PULLREQ_EDITMSG" }]
roots = [".marksman.toml"] roots = [".marksman.toml"]
language-servers = [ "marksman" ] language-servers = [ "marksman", "markdown-oxide" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
block-comment-tokens = { start = "<!--", end = "-->" }
[[grammar]] [[grammar]]
name = "markdown" name = "markdown"
@ -1425,6 +1467,7 @@ file-types = ["dart"]
roots = ["pubspec.yaml"] roots = ["pubspec.yaml"]
auto-format = true auto-format = true
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "dart" ] language-servers = [ "dart" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -1438,6 +1481,7 @@ scope = "source.scala"
roots = ["build.sbt", "build.sc", "build.gradle", "build.gradle.kts", "pom.xml", ".scala-build"] roots = ["build.sbt", "build.sc", "build.gradle", "build.gradle.kts", "pom.xml", ".scala-build"]
file-types = ["scala", "sbt", "sc"] file-types = ["scala", "sbt", "sc"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "metals" ] language-servers = [ "metals" ]
@ -1450,7 +1494,20 @@ name = "dockerfile"
scope = "source.dockerfile" scope = "source.dockerfile"
injection-regex = "docker|dockerfile" injection-regex = "docker|dockerfile"
roots = ["Dockerfile", "Containerfile"] roots = ["Dockerfile", "Containerfile"]
file-types = [{ glob = "Dockerfile*" }, { glob = "dockerfile*" }, { glob = "Containerfile*" }, { glob = "containerfile*" }] file-types = [
"Dockerfile",
{ glob = "Dockerfile" },
{ glob = "Dockerfile.*" },
"dockerfile",
{ glob = "dockerfile" },
{ glob = "dockerfile.*" },
"Containerfile",
{ glob = "Containerfile" },
{ glob = "Containerfile.*" },
"containerfile",
{ glob = "containerfile" },
{ glob = "containerfile.*" },
]
comment-token = "#" comment-token = "#"
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "docker-langserver" ] language-servers = [ "docker-langserver" ]
@ -1459,6 +1516,16 @@ language-servers = [ "docker-langserver" ]
name = "dockerfile" name = "dockerfile"
source = { git = "https://github.com/camdencheek/tree-sitter-dockerfile", rev = "8ee3a0f7587b2bd8c45c8cb7d28bd414604aec62" } source = { git = "https://github.com/camdencheek/tree-sitter-dockerfile", rev = "8ee3a0f7587b2bd8c45c8cb7d28bd414604aec62" }
[[language]]
name = "docker-compose"
scope = "source.yaml.docker-compose"
roots = ["docker-compose.yaml", "docker-compose.yml"]
language-servers = [ "docker-compose-langserver" ]
file-types = [{ glob = "docker-compose.yaml" }, { glob = "docker-compose.yml" }]
comment-token = "#"
indent = { tab-width = 2, unit = " " }
grammar = "yaml"
[[language]] [[language]]
name = "git-commit" name = "git-commit"
scope = "git.commitmsg" scope = "git.commitmsg"
@ -1548,6 +1615,8 @@ scope = "source.graphql"
injection-regex = "graphql" injection-regex = "graphql"
file-types = ["gql", "graphql", "graphqls"] file-types = ["gql", "graphql", "graphqls"]
language-servers = [ "graphql-language-service" ] language-servers = [ "graphql-language-service" ]
comment-token = "#"
block-comment-tokens = { start = "\"\"\"", end = "\"\"\"" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
@ -1562,6 +1631,7 @@ file-types = ["elm"]
roots = ["elm.json"] roots = ["elm.json"]
auto-format = true auto-format = true
comment-token = "--" comment-token = "--"
block-comment-tokens = { start = "{-", end = "-}" }
language-servers = [ "elm-language-server" ] language-servers = [ "elm-language-server" ]
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
@ -1574,6 +1644,7 @@ name = "iex"
scope = "source.iex" scope = "source.iex"
injection-regex = "iex" injection-regex = "iex"
file-types = ["iex"] file-types = ["iex"]
comment-token = "#"
[[grammar]] [[grammar]]
name = "iex" name = "iex"
@ -1587,6 +1658,7 @@ file-types = ["res"]
roots = ["bsconfig.json"] roots = ["bsconfig.json"]
auto-format = true auto-format = true
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "rescript-language-server" ] language-servers = [ "rescript-language-server" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -1598,7 +1670,7 @@ source = { git = "https://github.com/jaredramirez/tree-sitter-rescript", rev = "
name = "erlang" name = "erlang"
scope = "source.erlang" scope = "source.erlang"
injection-regex = "erl(ang)?" injection-regex = "erl(ang)?"
file-types = ["erl", "hrl", "app", { glob = "rebar.config" }, { glob = "rebar.lock" }] file-types = ["erl", "hrl", "app", { glob = "rebar.config" }, { glob = "rebar.lock" }, { glob = "*.app.src" }]
roots = ["rebar.config"] roots = ["rebar.config"]
shebangs = ["escript"] shebangs = ["escript"]
comment-token = "%%" comment-token = "%%"
@ -1615,7 +1687,7 @@ language-servers = [ "erlang-ls" ]
[[grammar]] [[grammar]]
name = "erlang" name = "erlang"
source = { git = "https://github.com/the-mikedavis/tree-sitter-erlang", rev = "ce0ed253d72c199ab93caba7542b6f62075339c4" } source = { git = "https://github.com/the-mikedavis/tree-sitter-erlang", rev = "731e50555a51f0d8635992b0e60dc98cc47a58d7" }
[[language]] [[language]]
name = "kotlin" name = "kotlin"
@ -1623,6 +1695,7 @@ scope = "source.kotlin"
file-types = ["kt", "kts"] file-types = ["kt", "kts"]
roots = ["settings.gradle", "settings.gradle.kts"] roots = ["settings.gradle", "settings.gradle.kts"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
language-servers = [ "kotlin-language-server" ] language-servers = [ "kotlin-language-server" ]
@ -1637,6 +1710,7 @@ injection-regex = "(hcl|tf|nomad)"
language-id = "terraform" language-id = "terraform"
file-types = ["hcl", "tf", "nomad"] file-types = ["hcl", "tf", "nomad"]
comment-token = "#" comment-token = "#"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "terraform-ls" ] language-servers = [ "terraform-ls" ]
auto-format = true auto-format = true
@ -1651,6 +1725,7 @@ scope = "source.tfvars"
language-id = "terraform-vars" language-id = "terraform-vars"
file-types = ["tfvars"] file-types = ["tfvars"]
comment-token = "#" comment-token = "#"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "terraform-ls" ] language-servers = [ "terraform-ls" ]
auto-format = true auto-format = true
@ -1673,6 +1748,7 @@ scope = "source.sol"
injection-regex = "(sol|solidity)" injection-regex = "(sol|solidity)"
file-types = ["sol"] file-types = ["sol"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
language-servers = [ "solc" ] language-servers = [ "solc" ]
@ -1701,6 +1777,7 @@ scope = "source.ron"
injection-regex = "ron" injection-regex = "ron"
file-types = ["ron"] file-types = ["ron"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
[[grammar]] [[grammar]]
@ -1742,6 +1819,7 @@ injection-regex = "(r|R)md"
file-types = ["rmd", "Rmd"] file-types = ["rmd", "Rmd"]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
grammar = "markdown" grammar = "markdown"
block-comment-tokens = { start = "<!--", end = "-->" }
language-servers = [ "r" ] language-servers = [ "r" ]
[[language]] [[language]]
@ -1751,6 +1829,7 @@ injection-regex = "swift"
file-types = ["swift"] file-types = ["swift"]
roots = [ "Package.swift" ] roots = [ "Package.swift" ]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
auto-format = true auto-format = true
language-servers = [ "sourcekit-lsp" ] language-servers = [ "sourcekit-lsp" ]
@ -1763,6 +1842,7 @@ name = "erb"
scope = "text.html.erb" scope = "text.html.erb"
injection-regex = "erb" injection-regex = "erb"
file-types = ["erb"] file-types = ["erb"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
grammar = "embedded-template" grammar = "embedded-template"
@ -1771,6 +1851,7 @@ name = "ejs"
scope = "text.html.ejs" scope = "text.html.ejs"
injection-regex = "ejs" injection-regex = "ejs"
file-types = ["ejs"] file-types = ["ejs"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
grammar = "embedded-template" grammar = "embedded-template"
@ -1784,6 +1865,7 @@ scope = "source.eex"
injection-regex = "eex" injection-regex = "eex"
file-types = ["eex"] file-types = ["eex"]
roots = ["mix.exs", "mix.lock"] roots = ["mix.exs", "mix.lock"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
@ -1796,6 +1878,7 @@ scope = "source.heex"
injection-regex = "heex" injection-regex = "heex"
file-types = ["heex"] file-types = ["heex"]
roots = ["mix.exs", "mix.lock"] roots = ["mix.exs", "mix.lock"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "elixir-ls" ] language-servers = [ "elixir-ls" ]
@ -1808,12 +1891,13 @@ name = "sql"
scope = "source.sql" scope = "source.sql"
file-types = ["sql", "dsql"] file-types = ["sql", "dsql"]
comment-token = "--" comment-token = "--"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
injection-regex = "sql" injection-regex = "sql"
[[grammar]] [[grammar]]
name = "sql" name = "sql"
source = { git = "https://github.com/DerekStride/tree-sitter-sql", rev = "25be0b8f17e9189ad9e1b875869d025c5aec1286" } source = { git = "https://github.com/DerekStride/tree-sitter-sql", rev = "da2d1eff425b146d3c8cab7be8dfa98b11d896dc" }
[[language]] [[language]]
name = "gdscript" name = "gdscript"
@ -1866,6 +1950,7 @@ scope = "source.vala"
injection-regex = "vala" injection-regex = "vala"
file-types = ["vala", "vapi"] file-types = ["vala", "vapi"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "vala-language-server" ] language-servers = [ "vala-language-server" ]
@ -1891,6 +1976,7 @@ scope = "source.devicetree"
injection-regex = "(dtsi?|devicetree|fdt)" injection-regex = "(dtsi?|devicetree|fdt)"
file-types = ["dts", "dtsi"] file-types = ["dts", "dtsi"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = "\t" } indent = { tab-width = 4, unit = "\t" }
[[grammar]] [[grammar]]
@ -1929,6 +2015,7 @@ file-types = ["odin"]
roots = ["ols.json"] roots = ["ols.json"]
language-servers = [ "ols" ] language-servers = [ "ols" ]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = "\t" } indent = { tab-width = 4, unit = "\t" }
formatter = { command = "odinfmt", args = [ "-stdin", "true" ] } formatter = { command = "odinfmt", args = [ "-stdin", "true" ] }
@ -1986,6 +2073,7 @@ roots = ["v.mod"]
language-servers = [ "vlang-language-server" ] language-servers = [ "vlang-language-server" ]
auto-format = true auto-format = true
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = "\t" } indent = { tab-width = 4, unit = "\t" }
[[grammar]] [[grammar]]
@ -1997,6 +2085,7 @@ name = "verilog"
scope = "source.verilog" scope = "source.verilog"
file-types = ["v", "vh", "sv", "svh"] file-types = ["v", "vh", "sv", "svh"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "svlangserver" ] language-servers = [ "svlangserver" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
injection-regex = "verilog" injection-regex = "verilog"
@ -2033,6 +2122,7 @@ scope = "source.openscad"
injection-regex = "openscad" injection-regex = "openscad"
file-types = ["scad"] file-types = ["scad"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "openscad-lsp" ] language-servers = [ "openscad-lsp" ]
indent = { tab-width = 2, unit = "\t" } indent = { tab-width = 2, unit = "\t" }
@ -2080,6 +2170,7 @@ grammar = "python"
[[language]] [[language]]
name = "elvish" name = "elvish"
scope = "source.elvish" scope = "source.elvish"
shebangs = ["elvish"]
file-types = ["elv"] file-types = ["elv"]
comment-token = "#" comment-token = "#"
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -2097,6 +2188,7 @@ injection-regex = "idr"
file-types = ["idr"] file-types = ["idr"]
shebangs = [] shebangs = []
comment-token = "--" comment-token = "--"
block-comment-tokens = { start = "{-", end = "-}" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "idris2-lsp" ] language-servers = [ "idris2-lsp" ]
@ -2132,6 +2224,7 @@ scope = "source.dot"
injection-regex = "dot" injection-regex = "dot"
file-types = ["dot"] file-types = ["dot"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
language-servers = [ "dot-language-server" ] language-servers = [ "dot-language-server" ]
@ -2161,12 +2254,13 @@ scope = "source.slint"
injection-regex = "slint" injection-regex = "slint"
file-types = ["slint"] file-types = ["slint"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
language-servers = [ "slint-lsp" ] language-servers = [ "slint-lsp" ]
[[grammar]] [[grammar]]
name = "slint" name = "slint"
source = { git = "https://github.com/slint-ui/tree-sitter-slint", rev = "15618215b79b9db08f824a5c97a12d073dcc1c00" } source = { git = "https://github.com/slint-ui/tree-sitter-slint", rev = "3c82235f41b63f35a01ae3888206e93585cbb84a" }
[[language]] [[language]]
name = "task" name = "task"
@ -2210,6 +2304,7 @@ scope = "source.pascal"
injection-regex = "pascal" injection-regex = "pascal"
file-types = ["pas", "pp", "inc", "lpr", "lfm"] file-types = ["pas", "pp", "inc", "lpr", "lfm"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "{", end = "}" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "pasls" ] language-servers = [ "pasls" ]
@ -2222,7 +2317,7 @@ name = "sml"
scope = "source.sml" scope = "source.sml"
injection-regex = "sml" injection-regex = "sml"
file-types = ["sml"] file-types = ["sml"]
comment-token = "(*" block-comment-tokens = { start = "(*", end = "*)" }
[[grammar]] [[grammar]]
name = "sml" name = "sml"
@ -2234,6 +2329,7 @@ scope = "source.jsonnet"
file-types = ["libsonnet", "jsonnet"] file-types = ["libsonnet", "jsonnet"]
roots = ["jsonnetfile.json"] roots = ["jsonnetfile.json"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "jsonnet-language-server" ] language-servers = [ "jsonnet-language-server" ]
@ -2246,6 +2342,7 @@ name = "astro"
scope = "source.astro" scope = "source.astro"
injection-regex = "astro" injection-regex = "astro"
file-types = ["astro"] file-types = ["astro"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
@ -2269,6 +2366,7 @@ source = { git = "https://github.com/vito/tree-sitter-bass", rev = "501133e260d7
name = "wat" name = "wat"
scope = "source.wat" scope = "source.wat"
comment-token = ";;" comment-token = ";;"
block-comment-tokens = { start = "(;", end = ";)" }
file-types = ["wat"] file-types = ["wat"]
[[grammar]] [[grammar]]
@ -2279,6 +2377,7 @@ source = { git = "https://github.com/wasm-lsp/tree-sitter-wasm", rev = "2ca28a9f
name = "wast" name = "wast"
scope = "source.wast" scope = "source.wast"
comment-token = ";;" comment-token = ";;"
block-comment-tokens = { start = "(;", end = ";)" }
file-types = ["wast"] file-types = ["wast"]
[[grammar]] [[grammar]]
@ -2290,6 +2389,7 @@ name = "d"
scope = "source.d" scope = "source.d"
file-types = [ "d", "dd" ] file-types = [ "d", "dd" ]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
injection-regex = "d" injection-regex = "d"
indent = { tab-width = 4, unit = " "} indent = { tab-width = 4, unit = " "}
language-servers = [ "serve-d" ] language-servers = [ "serve-d" ]
@ -2316,6 +2416,7 @@ name = "kdl"
scope = "source.kdl" scope = "source.kdl"
file-types = ["kdl"] file-types = ["kdl"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
injection-regex = "kdl" injection-regex = "kdl"
[[grammar]] [[grammar]]
@ -2359,6 +2460,8 @@ file-types = [
"menu", "menu",
"mxml", "mxml",
"nuspec", "nuspec",
"osc",
"osm",
"pt", "pt",
"publishsettings", "publishsettings",
"pubxml", "pubxml",
@ -2382,8 +2485,10 @@ file-types = [
"xul", "xul",
"xoml", "xoml",
"musicxml", "musicxml",
"glif" "glif",
"ui"
] ]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[language.auto-pairs] [language.auto-pairs]
@ -2423,6 +2528,7 @@ scope = "source.wit"
injection-regex = "wit" injection-regex = "wit"
file-types = ["wit"] file-types = ["wit"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[language.auto-pairs] [language.auto-pairs]
@ -2487,6 +2593,7 @@ scope = "source.bicep"
file-types = ["bicep"] file-types = ["bicep"]
auto-format = true auto-format = true
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " "} indent = { tab-width = 2, unit = " "}
language-servers = [ "bicep-langserver" ] language-servers = [ "bicep-langserver" ]
@ -2499,6 +2606,8 @@ name = "qml"
scope = "source.qml" scope = "source.qml"
file-types = ["qml"] file-types = ["qml"]
language-servers = [ "qmlls" ] language-servers = [ "qmlls" ]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
grammar = "qmljs" grammar = "qmljs"
@ -2538,6 +2647,7 @@ injection-regex = "pony"
roots = ["corral.json", "lock.json"] roots = ["corral.json", "lock.json"]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
[[grammar]] [[grammar]]
name = "ponylang" name = "ponylang"
@ -2549,6 +2659,7 @@ scope = "source.dhall"
injection-regex = "dhall" injection-regex = "dhall"
file-types = ["dhall"] file-types = ["dhall"]
comment-token = "--" comment-token = "--"
block-comment-tokens = { start = "{-", end = "-}" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "dhall-lsp-server" ] language-servers = [ "dhall-lsp-server" ]
formatter = { command = "dhall" , args = ["format"] } formatter = { command = "dhall" , args = ["format"] }
@ -2572,6 +2683,7 @@ scope = "source.msbuild"
injection-regex = "msbuild" injection-regex = "msbuild"
file-types = ["proj", "vbproj", "csproj", "fsproj", "targets", "props"] file-types = ["proj", "vbproj", "csproj", "fsproj", "targets", "props"]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
block-comment-tokens = { start = "<!--", end = "-->" }
grammar = "xml" grammar = "xml"
[language.auto-pairs] [language.auto-pairs]
@ -2618,7 +2730,7 @@ scope = "source.tal"
injection-regex = "tal" injection-regex = "tal"
file-types = ["tal"] file-types = ["tal"]
auto-format = false auto-format = false
comment-token = "(" block-comment-tokens = { start = "(", end = ")" }
[[grammar]] [[grammar]]
name = "uxntal" name = "uxntal"
@ -2752,6 +2864,7 @@ injection-regex = "nim"
file-types = ["nim", "nims", "nimble"] file-types = ["nim", "nims", "nimble"]
shebangs = [] shebangs = []
comment-token = "#" comment-token = "#"
block-comment-tokens = { start = "#[", end = "]#" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "nimlangserver" ] language-servers = [ "nimlangserver" ]
@ -2762,10 +2875,9 @@ language-servers = [ "nimlangserver" ]
"'" = "'" "'" = "'"
'{' = '}' '{' = '}'
# Nim's tree-sitter grammar is in heavy development.
[[grammar]] [[grammar]]
name = "nim" name = "nim"
source = { git = "https://github.com/aMOPel/tree-sitter-nim", rev = "240239b232550e431d67de250d1b5856209e7f06" } source = { git = "https://github.com/alaviss/tree-sitter-nim", rev = "c5f0ce3b65222f5dbb1a12f9fe894524881ad590" }
[[language]] [[language]]
name = "cabal" name = "cabal"
@ -2791,6 +2903,7 @@ source = { git = "https://github.com/pfeiferj/tree-sitter-hurl", rev = "264c4206
[[language]] [[language]]
name = "markdoc" name = "markdoc"
scope = "text.markdoc" scope = "text.markdoc"
block-comment-tokens = { start = "<!--", end = "-->" }
file-types = ["mdoc"] file-types = ["mdoc"]
language-servers = [ "markdoc-ls" ] language-servers = [ "markdoc-ls" ]
@ -2816,9 +2929,9 @@ scope = "source.just"
file-types = [{ glob = "justfile" }, { glob = "Justfile" }, { glob = ".justfile" }, { glob = ".Justfile" }] file-types = [{ glob = "justfile" }, { glob = "Justfile" }, { glob = ".justfile" }, { glob = ".Justfile" }]
injection-regex = "just" injection-regex = "just"
comment-token = "#" comment-token = "#"
indent = { tab-width = 4, unit = "\t" } indent = { tab-width = 4, unit = " " }
auto-format = true # auto-format = true
formatter = { command = "just", args = ["--dump"] } # formatter = { command = "just", args = ["--dump"] } # Please see: https://github.com/helix-editor/helix/issues/9703
[[grammar]] [[grammar]]
name = "just" name = "just"
@ -2844,6 +2957,7 @@ scope = "source.blueprint"
injection-regex = "blueprint" injection-regex = "blueprint"
file-types = ["blp"] file-types = ["blp"]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "blueprint-compiler" ] language-servers = [ "blueprint-compiler" ]
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
@ -2896,6 +3010,7 @@ name = "webc"
scope = "text.html.webc" scope = "text.html.webc"
injection-regex = "webc" injection-regex = "webc"
file-types = ["webc"] file-types = ["webc"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
grammar = "html" grammar = "html"
@ -3113,3 +3228,34 @@ indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "pkl" name = "pkl"
source = { git = "https://github.com/apple/tree-sitter-pkl", rev = "c03f04a313b712f8ab00a2d862c10b37318699ae" } source = { git = "https://github.com/apple/tree-sitter-pkl", rev = "c03f04a313b712f8ab00a2d862c10b37318699ae" }
[[language]]
name = "groovy"
language-id = "groovy"
scope = "source.groovy"
file-types = ["gradle", "groovy", "jenkinsfile", { glob = "Jenkinsfile" }, { glob = "Jenkinsfile.*" }]
shebangs = ["groovy"]
comment-token = "//"
indent = { tab-width = 2, unit = " " }
[[grammar]]
name = "groovy"
source = { git = "https://github.com/Decodetalkers/tree-sitter-groovy", rev = "7e023227f46fee428b16a0288eeb0f65ee2523ec" }
[[language]]
name = "fidl"
scope = "source.fidl"
injection-regex = "fidl"
file-types = ["fidl"]
comment-token = "//"
indent = { tab-width = 4, unit = " " }
[language.auto-pairs]
'"' = '"'
'{' = '}'
'(' = ')'
'<' = '>'
[[grammar]]
name = "fidl"
source = { git = "https://github.com/google/tree-sitter-fidl", rev = "bdbb635a7f5035e424f6173f2f11b9cd79703f8d" }

@ -56,9 +56,34 @@
(documentation_comment)+ @comment.around (documentation_comment)+ @comment.around
(formal_parameter) @parameter.inside (formal_parameter_list
(
(formal_parameter) @parameter.inside . ","? @parameter.around
) @parameter.around
)
(optional_formal_parameters
(
(formal_parameter) @parameter.inside . ","? @parameter.around
) @parameter.around
)
(arguments
(
[
(argument) @parameter.inside
(named_argument (label) . (_)* @parameter.inside)
]
. ","? @parameter.around
) @parameter.around
)
(formal_parameter_list) @parameter.around (type_arguments
(
((_) . ("." . (_) @parameter.inside @parameter.around)?) @parameter.inside
. ","? @parameter.around
) @parameter.around
)
(expression_statement (expression_statement
((identifier) @_name (#any-of? @_name "test" "testWidgets")) ((identifier) @_name (#any-of? @_name "test" "testWidgets"))

@ -145,8 +145,9 @@
((atom) @constant.builtin.boolean ((atom) @constant.builtin.boolean
(#match? @constant.builtin.boolean "^(true|false)$")) (#match? @constant.builtin.boolean "^(true|false)$"))
(atom) @string.special.symbol (atom) @string.special.symbol
(string) @string [(string) (sigil)] @string
(character) @constant.character (character) @constant.character
(escape_sequence) @constant.character.escape
(integer) @constant.numeric.integer (integer) @constant.numeric.integer
(float) @constant.numeric.float (float) @constant.numeric.float

@ -0,0 +1,6 @@
[
(layout_declaration)
(protocol_declaration)
(resource_declaration)
(service_declaration)
] @fold

@ -0,0 +1,64 @@
[
"ajar"
"alias"
"as"
"bits"
"closed"
"compose"
"const"
"enum"
"error"
"flexible"
"library"
"open"
; "optional" we did not specify a node for optional yet
"overlay"
"protocol"
"reserved"
"resource"
"service"
"strict"
"struct"
"table"
"type"
"union"
"using"
] @keyword
(primitives_type) @type.builtin
(builtin_complex_type) @type.builtin
(const_declaration
(identifier) @constant)
[
"="
"|"
"&"
"->"
] @operator
(attribute
"@" @attribute
(identifier) @attribute)
(string_literal) @string
(numeric_literal) @constant.numeric
[
(true)
(false)
] @constant.builtin.boolean
(comment) @comment
[
"("
")"
"<"
">"
"{"
"}"
] @punctuation.bracket

@ -0,0 +1,2 @@
((comment) @injection.content
(#set! injection.language "comment"))

@ -0,0 +1,96 @@
(unit
(identifier) @variable)
(string
(identifier) @variable)
(escape_sequence) @constant.character.escape
(block
(unit
(identifier) @namespace))
(func
(identifier) @function)
(number) @constant.numeric
((identifier) @constant.builtin.boolean
(#any-of? @constant.builtin.boolean "true" "false"))
((identifier) @constant
(#match? @constant "^[A-Z][A-Z\\d_]*$"))
((identifier) @constant.builtin
(#eq? @constant.builtin "null"))
((identifier) @type
(#any-of? @type
"String"
"Map"
"Object"
"Boolean"
"Integer"
"List"))
((identifier) @function.builtin
(#any-of? @function.builtin
"void"
"id"
"version"
"apply"
"implementation"
"testImplementation"
"androidTestImplementation"
"debugImplementation"))
((identifier) @keyword.storage.modifier
(#eq? @keyword.storage.modifier "static"))
((identifier) @keyword.storage.type
(#any-of? @keyword.storage.type "class" "def" "interface"))
((identifier) @keyword
(#any-of? @keyword
"assert"
"new"
"extends"
"implements"
"instanceof"))
((identifier) @keyword.control.import
(#any-of? @keyword.control.import "import" "package"))
((identifier) @keyword.storage.modifier
(#any-of? @keyword.storage.modifier
"abstract"
"protected"
"private"
"public"))
((identifier) @keyword.control.exception
(#any-of? @keyword.control.exception
"throw"
"finally"
"try"
"catch"))
(string) @string
[
(line_comment)
(block_comment)
] @comment
((block_comment) @comment.block.documentation
(#match? @comment.block.documentation "^/[*][*][^*](?s:.)*[*]/$"))
((line_comment) @comment.block.documentation
(#match? @comment.block.documentation "^///[^/]*.*$"))
[
(operators)
(leading_key)
] @operator
["(" ")" "[" "]" "{" "}"] @punctuation.bracket

@ -0,0 +1,2 @@
([(line_comment) (block_comment)] @injection.content
(#set! injection.language "comment"))

@ -0,0 +1,6 @@
(comment) @comment.inside
(comment)+ @comment.around
(function_arguments
((_) @parameter.inside . ","? @parameter.around) @parameter.around)

@ -1,9 +1,5 @@
;;; Highlighting for lua ;;; Highlighting for lua
;;; Builtins
((identifier) @variable.builtin
(#eq? @variable.builtin "self"))
;; Keywords ;; Keywords
(if_statement (if_statement
@ -130,16 +126,65 @@
((identifier) @constant ((identifier) @constant
(#match? @constant "^[A-Z][A-Z_0-9]*$")) (#match? @constant "^[A-Z][A-Z_0-9]*$"))
;; Parameters ;; Tables
(parameters
(identifier) @variable.parameter)
; ;; Functions (field name: (identifier) @variable.other.member)
(function_declaration name: (identifier) @function)
(function_call name: (identifier) @function.call)
(function_declaration name: (dot_index_expression field: (identifier) @function)) (dot_index_expression field: (identifier) @variable.other.member)
(function_call name: (dot_index_expression field: (identifier) @function.call))
(table_constructor
[
"{"
"}"
] @constructor)
;; Functions
(parameters (identifier) @variable.parameter)
(function_call
(identifier) @function.builtin
(#any-of? @function.builtin
;; built-in functions in Lua 5.1
"assert" "collectgarbage" "dofile" "error" "getfenv" "getmetatable" "ipairs"
"load" "loadfile" "loadstring" "module" "next" "pairs" "pcall" "print"
"rawequal" "rawget" "rawset" "require" "select" "setfenv" "setmetatable"
"tonumber" "tostring" "type" "unpack" "xpcall"))
(function_declaration
name: [
(identifier) @function
(dot_index_expression
field: (identifier) @function)
])
(function_declaration
name: (method_index_expression
method: (identifier) @function.method))
(assignment_statement
(variable_list .
name: [
(identifier) @function
(dot_index_expression
field: (identifier) @function)
])
(expression_list .
value: (function_definition)))
(table_constructor
(field
name: (identifier) @function
value: (function_definition)))
(function_call
name: [
(identifier) @function.call
(dot_index_expression
field: (identifier) @function.call)
(method_index_expression
method: (identifier) @function.method.call)
])
; TODO: incorrectly highlights variable N in `N, nop = 42, function() end` ; TODO: incorrectly highlights variable N in `N, nop = 42, function() end`
(assignment_statement (assignment_statement
@ -153,6 +198,7 @@
;; Nodes ;; Nodes
(comment) @comment (comment) @comment
(string) @string (string) @string
(escape_sequence) @constant.character.escape
(number) @constant.numeric.integer (number) @constant.numeric.integer
(label_statement) @label (label_statement) @label
; A bit of a tricky one, this will only match field names ; A bit of a tricky one, this will only match field names
@ -162,7 +208,16 @@
;; Property ;; Property
(dot_index_expression field: (identifier) @variable.other.member) (dot_index_expression field: (identifier) @variable.other.member)
;; Variable ;; Variables
((identifier) @variable.builtin
(#eq? @variable.builtin "self"))
(variable_list
(attribute
"<" @punctuation.bracket
(identifier) @attribute
">" @punctuation.bracket))
(identifier) @variable (identifier) @variable
;; Error ;; Error

@ -1,33 +1,32 @@
;; Constants, Comments, and Literals ;; Constants, Comments, and Literals
(comment) @comment.line (comment) @comment.line
(multilineComment) @comment.block (block_comment) @comment.block
(docComment) @comment.block.documentation [
(multilineDocComment) @comment.block.documentation (documentation_comment)
; comments (block_documentation_comment)
] @comment.block.documentation
[(literal) (generalizedLit)] @constant
[(nil_lit)] @constant.builtin (nil_literal) @constant.builtin
[(bool_lit)] @constant.builtin.boolean ((identifier) @constant.builtin.boolean
[(char_lit)] @constant.character (#any-of? @constant.builtin.boolean "true" "false" "on" "off"))
[(char_esc_seq) (str_esc_seq)] @constant.character.escape
[(custom_numeric_lit)] @constant.numeric (char_literal) @constant.character
[(int_lit) (int_suffix)] @constant.numeric.integer (escape_sequence) @constant.character.escape
[(float_lit) (float_suffix)] @constant.numeric.float (custom_numeric_literal) @constant.numeric
(integer_literal) @constant.numeric.integer
(float_literal) @constant.numeric.float
; literals ; literals
; note: somewhat irritatingly for testing, lits have the same syntax highlighting as types ; todo: literal?
[ [
(str_lit) (long_string_literal)
(triplestr_lit) (raw_string_literal)
(rstr_lit) (generalized_string)
(generalized_str_lit) (interpreted_string_literal)
(generalized_triplestr_lit)
(interpolated_str_lit)
(interpolated_triplestr_lit)
] @string ] @string
; (generalized_string (string_content) @none) ; todo: attempt to un-match string_content
; [] @string.regexp ; [] @string.regexp
; string literals
[ [
"." "."
@ -44,272 +43,291 @@
"}" "}"
"{." "{."
".}" ".}"
"#["
"]#"
] @punctuation.bracket ] @punctuation.bracket
(interpolated_str_lit "&" @punctuation.special) ; todo: interpolated_str_lit?? & { }?
(interpolated_str_lit "{" @punctuation.special)
(interpolated_str_lit "}" @punctuation.special)
; punctuation
[ [
"and" "and"
"or" "or"
"xor" "xor"
"not" "not"
"in"
"notin"
"is"
"isnot"
"div" "div"
"mod" "mod"
"shl" "shl"
"shr" "shr"
"from"
"as"
"of"
"in"
"notin"
"is"
"isnot"
] @keyword.operator ] @keyword.operator
; operators: we list them explicitly to deliminate them from symbolic operators
[(operator) (opr) "="] @operator [(operator) "="] @operator
; all operators (must come after @keyword.operator) (infix_expression operator: _ @operator)
(prefix_expression operator: _ @operator)
(pragma) @attribute
; pragmas
(pragma_list
(identifier)? @attribute
(colon_expression
(identifier) @attribute)?)
;; Imports and Exports ;; Imports and Exports
(importStmt [
(keyw) @keyword.control.import "import"
(expr (primary (symbol) @namespace))? "export"
(expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?) "include"
(exportStmt "from"
(keyw) @keyword.control.import ] @keyword.control.import
(expr (primary (symbol) @namespace))?
(expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?) (import_statement
(fromStmt [
(keyw) @keyword.control.import (identifier) @namespace
(expr (primary (symbol) @namespace))? (expression_list (identifier) @namespace)
(expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?) (except_clause
(includeStmt "except" @keyword.control.import
(keyw) @keyword.control.import (expression_list (identifier) @namespace))])
(expr (primary (symbol) @namespace))? (import_from_statement
(expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?) (identifier) @namespace
(importExceptStmt (expression_list (identifier) @namespace))
(keyw) @keyword.control.import (include_statement (expression_list (identifier) @namespace))
(expr (primary (symbol) @namespace))? (export_statement (expression_list (identifier) @namespace))
(expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?)
; import statements
; yeah, this is a bit gross.
;; Control Flow ;; Control Flow
(ifStmt (keyw) @keyword.control.conditional) [
(whenStmt (keyw) @keyword.control.conditional) "if"
(elifStmt (keyw) @keyword.control.conditional) "when"
(elseStmt (keyw) @keyword.control.conditional) "case"
(caseStmt (keyw) @keyword.control.conditional) "elif"
(ofBranch (keyw) @keyword.control.conditional) "else"
(inlineIfStmt (keyw) @keyword.control.conditional) ] @keyword.control.conditional
(inlineWhenStmt (keyw) @keyword.control.conditional) (of_branch "of" @keyword.control.conditional)
; conditional statements ; conditional statements
; todo: do block ; todo: do block
(forStmt "block" @keyword.control
. (keyw) @keyword.control.repeat (block label: (_) @label)
. (symbol) @variable
. (keyw) @keyword.control.repeat) [
(whileStmt (keyw) @keyword.control.repeat) "for"
; loop statements "while"
"continue"
(returnStmt (keyw) @keyword.control.repeat) "break"
(yieldStmt (keyw) @keyword.control.repeat) ] @keyword.control.repeat
(discardStmt (keyw) @keyword.control.repeat) (for "in" @keyword.control.repeat)
(breakStmt (keyw) @keyword.control.repeat)
(continueStmt (keyw) @keyword.control.repeat)
; control flow statements
(raiseStmt (keyw) @keyword.control.exception)
(tryStmt (keyw) @keyword.control.exception)
(tryExceptStmt (keyw) @keyword.control.exception)
(tryFinallyStmt (keyw) @keyword.control.exception)
(inlineTryStmt (keyw) @keyword.control.exception)
; (inlineTryExceptStmt (keyw) @keyword.control.exception)
; (inlineTryFinallyStmt (keyw) @keyword.control.exception)
; exception handling statements
(staticStmt (keyw) @keyword) [
(deferStmt (keyw) @keyword) "return"
(asmStmt (keyw) @keyword) "yield"
(bindStmt (keyw) @keyword) ] @keyword.control.return
(mixinStmt (keyw) @keyword) ; return statements
; miscellaneous blocks
(blockStmt [
(keyw) @keyword.control "try"
(symbol) @label) "except"
; block statements "finally"
"raise"
] @keyword.control.exception
; exception handling statements
[
"asm"
"bind"
"mixin"
"defer"
"static"
] @keyword
; miscellaneous keywords
;; Types and Type Declarations ;; Types and Type Declarations
(typeDef [
(keyw) @keyword.storage.type "let"
(symbol) @type) "var"
; names of new types type declarations "const"
"type"
(exprColonEqExpr "object"
. (expr (primary (symbol) @variable)) "tuple"
. (expr (primary (symbol) @type))) "enum"
; variables in inline tuple declarations "concept"
] @keyword.storage.type
(primarySuffix
(indexSuffix (var_type "var" @keyword.storage.modifier)
(exprColonEqExprList (out_type "out" @keyword.storage.modifier)
(exprColonEqExpr (distinct_type "distinct" @keyword.storage.modifier)
(expr (ref_type "ref" @keyword.storage.modifier)
(primary (pointer_type "ptr" @keyword.storage.modifier)
(symbol) @type))))))
; nested types in brackets, i.e. seq[string] (var_parameter "var" @keyword.storage.modifier)
(type_parameter "type" @keyword.storage.modifier)
(primaryTypeDef (symbol) @type) (static_parameter "static" @keyword.storage.modifier)
; primary types of type declarations (NOT nested types) (ref_parameter "ref" @keyword.storage.modifier)
(pointer_parameter "ptr" @keyword.storage.modifier)
(primaryTypeDef (primaryPrefix (keyw) @type)) ; (var_parameter (identifier) @variable.parameter)
; for consistency ; (type_parameter (identifier) @variable.parameter)
; (static_parameter (identifier) @variable.parameter)
(primaryTypeDesc (symbol) @type) ; (ref_parameter (identifier) @variable.parameter)
; type annotations, on declarations or in objects ; (pointer_parameter (identifier) @variable.parameter)
; todo: when are these used??
(primaryTypeDesc (primaryPrefix (keyw) @type))
; var types etc (type_section
(type_declaration
(genericParamList (genericParam (symbol) @type)) (type_symbol_declaration
; types in generic blocks name: (_) @type)))
; types in type declarations
(enumDecl (keyw) @keyword.storage.type)
(enumElement (symbol) @type.enum.variant) (enum_field_declaration
; enum declarations and elements (symbol_declaration
name: (_) @type.enum.variant))
(tupleDecl (keyw) @keyword.storage.type) ; types as enum variants
; tuple declarations
(variant_declaration
(objectDecl (keyw) @keyword.storage.type) alternative: (of_branch
(objectPart (symbol) @variable.other.member) values: (expression_list (_) @type.enum.variant)))
; object declarations and fields ; types as object variants
(objectCase (case
(keyw) @keyword.control.conditional (of_branch
(symbol) @variable.other.member) values: (expression_list (_) @constant)))
(objectBranch (keyw) @keyword.control.conditional) ; case values are guaranteed to be constant
(objectElif (keyw) @keyword.control.conditional)
(objectElse (keyw) @keyword.control.conditional) (type_expression
(objectWhen (keyw) @keyword.control.conditional) [
; variant objects (identifier) @type
(bracket_expression
(conceptDecl (keyw) @keyword.storage.type) [
(conceptParam (keyw) @type) (identifier) @type
(conceptParam (symbol) @variable) (argument_list (identifier) @type)])
; concept declarations, parameters, and qualifiers on those parameters (tuple_construction
[
((expr (identifier) @type
(primary (symbol)) (bracket_expression
(operator) @operator [
(primary (symbol) @type)) (identifier) @type
(#match? @operator "is")) (argument_list (identifier) @type)])])])
((exprStmt ; types in type expressions
(primary (symbol))
(operator) @operator (call
(primary (symbol) @type)) function: (bracket_expression
(#match? @operator "is")) right: (argument_list (identifier) @type)))
; symbols likely to be types: "x is t" means t is either a type or a type variable ; types as generic parameters
; distinct? ; (dot_generic_call
; generic_arguments: (_) @type)
; ???
(infix_expression
operator:
[
"is"
"isnot"
]
right: (_) @type)
; types in "is" comparisions
(except_branch
values: (expression_list
[
(identifier) @type
(infix_expression
left: (identifier) @type
operator: "as"
right: (_) @variable)]))
; types in exception branches
;; Functions ;; Functions
(routine [
. (keyw) @keyword.function "proc"
. (symbol) @function) "func"
; function declarations "method"
"converter"
(routineExpr (keyw) @keyword.function) "iterator"
; discarded function "template"
"macro"
(routineExprTypeDesc (keyw) @keyword.function) ] @keyword.function
; function declarations as types
(exported_symbol "*" @attribute)
(primary (_ "=" @punctuation.delimiter [body: (_) value: (_)])
. (symbol) @function.call
. (primarySuffix (functionCall))) (proc_declaration name: (_) @function)
; regular function calls (func_declaration name: (_) @function)
(iterator_declaration name: (_) @function)
(primary (converter_declaration name: (_) @function)
. (symbol) @function.call (method_declaration name: (_) @function.method)
. (primarySuffix (cmdCall))) (template_declaration name: (_) @function.macro)
; function calls without parenthesis (macro_declaration name: (_) @function.macro)
(symbol_declaration name: (_) @variable)
(primary
(primarySuffix (qualifiedSuffix (symbol) @function.call)) (call
. (primarySuffix (functionCall))) function: [
; uniform function call syntax calls (identifier) @function.call
(dot_expression
(primary right: (identifier) @function.call)
(primarySuffix (qualifiedSuffix (symbol) @function.call)) (bracket_expression
. (primarySuffix (cmdCall))) left: [
; just in case (identifier) @function.call
(dot_expression
(primary right: (identifier) @function.call)])])
(symbol) @constructor (generalized_string
(primarySuffix (objectConstr))) function: [
; object constructor (identifier) @function.call
(dot_expression
; does not appear to be a way to distinguish these without verbatium matching right: (identifier) @function.call)
; [] @function.builtin (bracket_expression
; [] @function.method left: [
; [] @function.macro (identifier) @function.call
; [] @function.special (dot_expression
right: (identifier) @function.call)])])
(dot_generic_call function: (_) @function.call)
;; Variables ;; Variables
(paramList (paramColonEquals (symbol) @variable.parameter)) (parameter_declaration
; parameter identifiers (symbol_declaration_list
(symbol_declaration
(identColon (ident) @variable.other.member) name: (_) @variable.parameter)))
; named parts of tuples (argument_list
(equal_expression
(symbolColonExpr (symbol) @variable) left: (_) @variable.parameter))
; object constructor parameters (concept_declaration
parameters: (parameter_list (identifier) @variable.parameter))
(field_declaration
(symbol_declaration_list
(symbol_declaration
name: (_) @variable.other.member)))
(call
(argument_list
(colon_expression
left: (_) @variable.other.member)))
(tuple_construction
(colon_expression
left: (_) @variable.other.member))
(variant_declaration
(variant_discriminator_declaration
(symbol_declaration_list
(symbol_declaration
name: (_) @variable.other.member))))
;; Miscellaneous Matches
(symbolEqExpr (symbol) @variable) [
; named parameters "cast"
"discard"
(variable "do"
(keyw) @keyword.storage.type ] @keyword
(declColonEquals (symbol) @variable)) ; also: addr end interface using
; let, var, const expressions
((primary (symbol) @variable.builtin)
(#match? @variable.builtin "result"))
; `result` is an implicit builtin variable inside function scopes
((primary (symbol) @type)
(#match? @type "^[A-Z]"))
; assume PascalCase identifiers to be types
((primary
(primarySuffix
(qualifiedSuffix
(symbol) @type)))
(#match? @type "^[A-Z]"))
; assume PascalCase member variables to be enum entries
(primary (symbol) @variable) (blank_identifier) @variable.builtin
; overzealous, matches variables ((identifier) @variable.builtin
(#eq? @variable.builtin "result"))
(primary (primarySuffix (qualifiedSuffix (symbol) @variable.other.member))) (dot_expression
; overzealous, matches member variables: i.e. x in foo.x left: (identifier) @variable
right: (identifier) @variable.other.member)
(keyw) @keyword (identifier) @variable
; more specific matches are done above whenever possible

@ -1,48 +1,59 @@
[ [
(typeDef) (if)
(ifStmt) (when)
(whenStmt) (elif_branch)
(elifStmt) (else_branch)
(elseStmt) (of_branch) ; note: not case_statement
(ofBranch) ; note: not caseStmt (block)
(whileStmt) (while)
(tryStmt) (for)
(tryExceptStmt) (try)
(tryFinallyStmt) (except_branch)
(forStmt) (finally_branch)
(blockStmt) (defer)
(staticStmt) (static_statement)
(deferStmt) (proc_declaration)
(asmStmt) (func_declaration)
; exprStmt? (iterator_declaration)
(converter_declaration)
(method_declaration)
(template_declaration)
(macro_declaration)
(symbol_declaration)
] @indent ] @indent
;; increase the indentation level ;; increase the indentation level
[ [
(ifStmt) (if)
(whenStmt) (when)
(elifStmt) (elif_branch)
(elseStmt) (else_branch)
(ofBranch) ; note: not caseStmt (of_branch) ; note: not case_statement
(whileStmt) (block)
(tryStmt) (while)
(tryExceptStmt) (for)
(tryFinallyStmt) (try)
(forStmt) (except_branch)
(blockStmt) (finally_branch)
(staticStmt) (defer)
(deferStmt) (static_statement)
(asmStmt) (proc_declaration)
; exprStmt? (func_declaration)
(iterator_declaration)
(converter_declaration)
(method_declaration)
(template_declaration)
(macro_declaration)
(symbol_declaration)
] @extend ] @extend
;; ??? ;; ???
[ [
(returnStmt) (return_statement)
(raiseStmt) (raise_statement)
(yieldStmt) (yield_statement)
(breakStmt) (break_statement)
(continueStmt) (continue_statement)
] @extend.prevent-once ] @extend.prevent-once
;; end a level of indentation while staying indented ;; end a level of indentation while staying indented

@ -1,19 +1,33 @@
(routine (proc_declaration
(block) @function.inside) @function.around body: (_) @function.inside) @function.around
(func_declaration
body: (_) @function.inside) @function.around
(iterator_declaration
body: (_) @function.inside) @function.around
(converter_declaration
body: (_) @function.inside) @function.around
(method_declaration
body: (_) @function.inside) @function.around
(template_declaration
body: (_) @function.inside) @function.around
(macro_declaration
body: (_) @function.inside) @function.around
; @class.inside (types?) (type_declaration (_) @class.inside) @class.around
; @class.around
; paramListSuffix is strange and i do not understand it (parameter_declaration
(paramList (symbol_declaration_list) @parameter.inside) @parameter.around
(paramColonEquals) @parameter.inside) @parameter.around
(comment) @comment.inside [
(multilineComment) @comment.inside (comment)
(docComment) @comment.inside (block_comment)
(multilineDocComment) @comment.inside (documentation_comment)
(block_documentation_comment)
] @comment.inside
(comment)+ @comment.around [
(multilineComment) @comment.around (comment)+
(docComment)+ @comment.around (block_comment)
(multilineDocComment) @comment.around (documentation_comment)+
(block_documentation_comment)+
] @comment.around

@ -0,0 +1,9 @@
(comment) @comment.inside
(comment)+ @comment.around
(formals
((_) @parameter.inside . ","? @parameter.around) @parameter.around)
(function_expression
body: (_) @function.inside) @function.around

@ -34,6 +34,9 @@
(arguments (arguments
((_) @parameter.inside . ","? @parameter.around) @parameter.around) ((_) @parameter.inside . ","? @parameter.around) @parameter.around)
(field_initializer_list
((_) @parameter.inside . ","? @parameter.around) @parameter.around)
[ [
(line_comment) (line_comment)
(block_comment) (block_comment)

@ -24,20 +24,20 @@
(term (term
alias: (identifier) @variable.parameter) alias: (identifier) @variable.parameter)
(term ((term
value: (cast value: (cast
name: (keyword_cast) @function.builtin name: (keyword_cast) @function.builtin
parameter: [(literal)]?)) parameter: [(literal)]?)))
(literal) @string (literal) @string
(comment) @comment.line (comment) @comment.line
(marginalia) @comment.block (marginalia) @comment.block
((literal) @constant.numeric.integer ((literal) @constant.numeric.integer
(#match? @constant.numeric.integer "^-?\\d+$")) (#match? @constant.numeric.integer "^[-+]?\\d+$"))
((literal) @constant.numeric.float ((literal) @constant.numeric.float
(#match? @constant.numeric.float "^-?\\d*\\.\\d*$")) (#match? @constant.numeric.float "^[-+]?\\d*\\.\\d*$"))
(parameter) @variable.parameter (parameter) @variable.parameter

@ -19,13 +19,6 @@
(quoted_attribute_value (attribute_value) @css)) (quoted_attribute_value (attribute_value) @css))
(#eq? @_attr "style")) (#eq? @_attr "style"))
((script_element
(raw_text) @injection.content)
(#set! injection.language "javascript"))
((raw_text_expr) @injection.content
(#set! injection.language "javascript"))
( (
(script_element (script_element
(start_tag (start_tag
@ -36,5 +29,12 @@
(#set! injection.language "typescript") (#set! injection.language "typescript")
) )
((script_element
(raw_text) @injection.content)
(#set! injection.language "javascript"))
((raw_text_expr) @injection.content
(#set! injection.language "javascript"))
((comment) @injection.content ((comment) @injection.content
(#set! injection.language "comment")) (#set! injection.language "comment"))

@ -1,6 +1,10 @@
# An approximation/port of the Cyan Light Theme from Jetbrains # Cyan Light
# # Adapted from JetBrains' Cyan Light Theme https://plugins.jetbrains.com/plugin/12102-cyan-light-theme
# Original Color Scheme here https://plugins.jetbrains.com/plugin/12102-cyan-light-theme # Author: Abderrahmane Tahri Jouti <tj.abderrahmane@gmail.com>
# Original Author : Olga Berdnikova
# LICENSE : MIT
# Source: https://github.com/OlyaB/CyanTheme
"attribute" = "blue" "attribute" = "blue"
"type" = "shade07" "type" = "shade07"

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016 CloudCannon
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,120 @@
# Monokai Soda port for Helix (https://helix-editor.com)
# Author : Jimmy Zelinskie <jimmy@zelinskie.com>
# Syntax
## Constants
"constant" = "white"
"constant.builtin" = "pink"
"constant.character.escape" = "blue"
"constant.numeric" = "purple"
## Diagnostics
"diagnostic" = { modifiers = ["underlined"] }
"diagnostic.error" = { underline = { style = "curl", color = "pink" } }
"diagnostic.warning" = { underline = { style = "curl", color = "orange" } }
"diagnostic.info" = { underline = { style = "curl", color = "white" } }
## Diffs
"diff.plus" = "green"
"diff.delta" = "orange"
"diff.minus" = "pink"
"diff.delta.moved" = "orange"
## Functions
"function" = "green"
"function.macro" = "blue"
"function.builtin" = "pink"
"constructor" = "blue"
## Keywords
"keyword" = "pink"
"keyword.directive" = "blue"
## Punctuation
"punctuation" = "gray"
## Strings
"string" = "yellow"
## Types
"type" = "blue"
"type.builtin" = "pink"
## Variables
"variable" = "white"
"variable.builtin" = "pink"
"variable.other.member" = "white"
"variable.parameter" = "softorange"
## Markup
"markup.heading" = "green"
"markup.bold" = { fg = "orange", modifiers = ["bold"] }
"markup.italic" = { fg = "orange", modifiers = ["italic"] }
"markup.link.url" = { fg = "orange", modifiers = ["underlined"] }
"markup.link.text" = "yellow"
"markup.quote" = "green"
## Misc
"attribute" = "blue"
"comment" = { fg = "gray", modifiers = ["italic"] }
"error" = "pink"
"hint" = "white"
"info" = "white"
"label" = "yellow"
"module" = "softorange"
"namespace" = "pink"
"operator" = "pink"
"special" = "softorange"
"warning" = "orange"
# Editor UI
## Main
"ui.background" = { bg = "background" }
"ui.text" = "white"
"ui.window" = { bg = "darkgray" }
## Debug (TODO)
## Menus
"ui.menu" = { fg = "white", bg = "darkgray" }
"ui.menu.selected" = { modifiers = ["reversed"] }
"ui.popup" = { bg = "darkgray" }
"ui.help" = { fg = "white", bg = "darkgray" }
## Gutter
"ui.linenr" = "darkgray"
"ui.linenr.selected" = "orange"
## Cursor
"ui.cursor.primary" = { fg = "white", modifiers = ["reversed"] }
"ui.cursor.match" = { fg = "white", modifiers = ["reversed"] }
"ui.selection" = { bg = "darkgray" }
## Statusline
"ui.statusline" = { bg = "darkgray" }
"ui.statusline.inactive" = { fg = "white", bg = "darkgray" }
"ui.statusline.normal" = { fg = "white", bg = "blue" }
"ui.statusline.insert" = { fg = "white", bg = "green" }
"ui.statusline.select" = { fg = "white", bg = "purple" }
"ui.text.focus" = { fg = "yellow", modifiers = ["bold"] }
"ui.virtual" = "darkgray"
"ui.virtual.ruler" = { bg = "darkgray" }
# Palette
[palette]
"purple" = "#AE81FF"
"yellow" = "#E6DB74"
"pink" = "#f92a72"
"white" = "#cfcfc2"
"gray" = "#75715e"
"darkgray" = "#444444"
"black" = "#222222"
"blue" = "#66d9ef"
"green" = "#a6e22e"
"softorange" = "#f59762"
"orange" = "#fd971f"
"background" = "#191919"

@ -6,24 +6,23 @@
# License: MIT License # License: MIT License
"type" = "blue" "type" = "blue"
"constant" = "purple" "constant" = "fg"
"constant.numeric" = "purple" "constant.numeric" = "purple"
"constant.character.escape" = "orange" "constant.character.escape" = "orange"
"string" = "yellow" "string" = "yellow"
"comment" = "grey" "comment" = "grey"
"variable" = "fg" "variable" = "fg"
"variable.builtin" = "orange" "variable.builtin" = "purple"
"variable.parameter" = "fg" "variable.parameter" = "fg"
"variable.other.member" = "fg" "variable.other.member" = "orange"
"label" = "orange" "label" = "red"
"punctuation" = "grey" "punctuation" = "grey"
"punctuation.delimiter" = "grey" "punctuation.special" = "yellow"
"punctuation.bracket" = "fg"
"keyword" = "red" "keyword" = "red"
"operator" = "orange" "operator" = "red"
"function" = "green" "function" = "green"
"function.builtin" = "blue" "function.builtin" = "green"
"function.macro" = "purple" "function.macro" = "green"
"tag" = "yellow" "tag" = "yellow"
"namespace" = "blue" "namespace" = "blue"
"attribute" = "purple" "attribute" = "purple"
@ -48,12 +47,12 @@
"markup.raw" = "green" "markup.raw" = "green"
"diff.plus" = "green" "diff.plus" = "green"
"diff.delta" = "orange" "diff.delta" = "blue"
"diff.minus" = "red" "diff.minus" = "red"
"ui.background" = { bg = "bg0" } "ui.background" = { bg = "bg0" }
"ui.cursor" = { modifiers = ['reversed'] } "ui.cursor" = { modifiers = ['reversed'] }
"ui.cursor.match" = { fg = "orange", bg = "diff_yellow" } "ui.cursor.match" = { bg = "bg4" }
"ui.cursor.insert" = { fg = "black", bg = "grey" } "ui.cursor.insert" = { fg = "black", bg = "grey" }
"ui.cursor.select" = { fg = "bg0", bg = "blue" } "ui.cursor.select" = { fg = "bg0", bg = "blue" }
"ui.selection" = { bg = "bg5" } "ui.selection" = { bg = "bg5" }
@ -73,7 +72,7 @@
"ui.text.focus" = "green" "ui.text.focus" = "green"
"ui.menu" = { fg = "fg", bg = "bg2" } "ui.menu" = { fg = "fg", bg = "bg2" }
"ui.menu.selected" = { fg = "bg0", bg = "green" } "ui.menu.selected" = { fg = "bg0", bg = "green" }
"ui.virtual.whitespace" = { fg = "grey_dim" } "ui.virtual.whitespace" = "bg4"
"ui.virtual.ruler" = { bg = "bg3" } "ui.virtual.ruler" = { bg = "bg3" }
"ui.virtual.inlay-hint" = { fg = "grey_dim" } "ui.virtual.inlay-hint" = { fg = "grey_dim" }
@ -92,11 +91,12 @@ error = { fg = 'red', bg = 'bg2', modifiers = ['bold'] }
[palette] [palette]
black = "#181819" black = "#181819"
bg_dim = "#222327"
bg0 = "#2c2e34" bg0 = "#2c2e34"
bg1 = "#33353f" bg1 = "#33353f"
bg2 = "#363944" bg2 = "#363944"
bg3 = "#3b3e48" bg3 = "#3b3e48"
bg4 = "#5C606A" bg4 = "#414550"
bg5 = "#444852" bg5 = "#444852"
bg_red = "#ff6077" bg_red = "#ff6077"
diff_red = "#55393d" diff_red = "#55393d"

@ -58,13 +58,13 @@ variable = { fg = "fg" }
"diff.plus" = { fg = "add" } "diff.plus" = { fg = "add" }
error = { fg = "error" } error = { fg = "error" }
hint = { fg = "hint" }
info = { fg = "info" }
warning = { fg = "yellow" } warning = { fg = "yellow" }
"diagnostic.error" = { underline = { style = "curl" } } info = { fg = "info" }
"diagnostic.warning" = { underline = { style = "curl" } } hint = { fg = "hint" }
"diagnostic.info" = { underline = { style = "curl" } } "diagnostic.error" = { underline = { style = "curl", color = "error" } }
"diagnostic.hint" = { underline = { style = "curl" } } "diagnostic.warning" = { underline = { style = "curl", color = "yellow"} }
"diagnostic.info" = { underline = { style = "curl", color = "info"} }
"diagnostic.hint" = { underline = { style = "curl", color = "hint" } }
"ui.background" = { bg = "bg", fg = "fg" } "ui.background" = { bg = "bg", fg = "fg" }
"ui.cursor" = { modifiers = ["reversed"] } "ui.cursor" = { modifiers = ["reversed"] }
@ -114,8 +114,8 @@ change = "#6183bb"
delete = "#914c54" delete = "#914c54"
error = "#db4b4b" error = "#db4b4b"
hint = "#1abc9c"
info = "#0db9d7" info = "#0db9d7"
hint = "#1abc9c"
fg = "#c0caf5" fg = "#c0caf5"
fg-dark = "#a9b1d6" fg-dark = "#a9b1d6"

@ -80,6 +80,8 @@ label = "honey"
"diagnostic.info" = { underline = { color = "delta", style = "curl" } } "diagnostic.info" = { underline = { color = "delta", style = "curl" } }
"diagnostic.warning" = { underline = { color = "lightning", style = "curl" } } "diagnostic.warning" = { underline = { color = "lightning", style = "curl" } }
"diagnostic.error" = { underline = { color = "apricot", style = "curl" } } "diagnostic.error" = { underline = { color = "apricot", style = "curl" } }
"diagnostic.unnecessary" = { modifiers = ["dim"] }
"diagnostic.deprecated" = { modifiers = ["crossed_out"] }
warning = "lightning" warning = "lightning"
error = "apricot" error = "apricot"

Loading…
Cancel
Save