Merge remote-tracking branch 'origin/master' into goto_next_reference

pull/6465/head
Anthony Templeton 9 months ago
commit bb46d99fb7

@ -12,6 +12,7 @@ jobs:
check:
name: Check (msrv)
runs-on: ubuntu-latest
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
steps:
- name: Checkout sources
uses: actions/checkout@v4
@ -31,6 +32,7 @@ jobs:
test:
name: Test Suite
runs-on: ${{ matrix.os }}
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
env:
RUST_BACKTRACE: 1
HELIX_LOG_LEVEL: info
@ -65,6 +67,7 @@ jobs:
lints:
name: Lints
runs-on: ubuntu-latest
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
steps:
- name: Checkout sources
uses: actions/checkout@v4
@ -92,6 +95,7 @@ jobs:
docs:
name: Docs
runs-on: ubuntu-latest
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
steps:
- name: Checkout sources
uses: actions/checkout@v4

72
Cargo.lock generated

@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "ahash"
version = "0.8.6"
version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011"
dependencies = [
"cfg-if",
"getrandom",
@ -62,15 +62,15 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.79"
version = "1.0.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca"
checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1"
[[package]]
name = "arc-swap"
version = "1.6.0"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
checksum = "7b3d0060af21e8d11a926981cc00c6c1541aa91dd64b9f881985c3da1094425f"
[[package]]
name = "autocfg"
@ -145,9 +145,9 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53"
[[package]]
name = "cc"
version = "1.0.85"
version = "1.0.88"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b918671670962b48bc23753aef0c51d072dca6f52f01f800854ada6ddb7f7d3"
checksum = "02f341c093d19155a6e41631ce5971aac4e9a868262212153124c15fa22d1cdc"
[[package]]
name = "cfg-if"
@ -168,9 +168,9 @@ dependencies = [
[[package]]
name = "chrono"
version = "0.4.33"
version = "0.4.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb"
checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b"
dependencies = [
"android-tzdata",
"iana-time-zone",
@ -180,9 +180,9 @@ dependencies = [
[[package]]
name = "clipboard-win"
version = "5.1.0"
version = "5.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ec832972fefb8cf9313b45a0d1945e29c9c251f1d4c6eafc5fe2124c02d2e81"
checksum = "12f9a0700e0127ba15d1d52dd742097f821cd9c65939303a44d970465040a297"
dependencies = [
"error-code",
]
@ -1344,6 +1344,7 @@ version = "23.10.0"
dependencies = [
"dunce",
"etcetera",
"regex-cursor",
"ropey",
"tempfile",
"which",
@ -1602,12 +1603,12 @@ checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]]
name = "libloading"
version = "0.8.1"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161"
checksum = "2caa5afb8bf9f3a2652760ce7d4f62d21c4d5a423e68466fca30df82f2330164"
dependencies = [
"cfg-if",
"windows-sys 0.48.0",
"windows-targets 0.52.0",
]
[[package]]
@ -1689,9 +1690,9 @@ dependencies = [
[[package]]
name = "mio"
version = "0.8.9"
version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0"
checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
dependencies = [
"libc",
"log",
@ -1938,15 +1939,28 @@ dependencies = [
[[package]]
name = "regex-automata"
version = "0.4.4"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b7fa1134405e2ec9353fd416b17f8dacd46c473d7d3fd1cf202706a14eb792a"
checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-cursor"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a43718aa0040434d45728c43f56bd53bda75a91c46954cdf0f2ff4dbc8aabbe7"
dependencies = [
"log",
"memchr",
"regex-automata",
"regex-syntax",
"ropey",
]
[[package]]
name = "regex-syntax"
version = "0.8.2"
@ -2011,18 +2025,18 @@ checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1"
[[package]]
name = "serde"
version = "1.0.196"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32"
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.196"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67"
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
dependencies = [
"proc-macro2",
"quote",
@ -2031,9 +2045,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.113"
version = "1.0.114"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79"
checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0"
dependencies = [
"itoa",
"ryu",
@ -2201,9 +2215,9 @@ dependencies = [
[[package]]
name = "tempfile"
version = "3.10.0"
version = "3.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67"
checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1"
dependencies = [
"cfg-if",
"fastrand",
@ -2231,9 +2245,9 @@ dependencies = [
[[package]]
name = "textwrap"
version = "0.16.0"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"
checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9"
dependencies = [
"smawk",
"unicode-linebreak",

@ -375,8 +375,25 @@ wrap-indicator = "" # set wrap-indicator to "" to hide it
### `[editor.smart-tab]` Section
Options for navigating and editing using tab key.
| Key | Description | Default |
|------------|-------------|---------|
| `enable` | If set to true, then when the cursor is in a position with non-whitespace to its left, instead of inserting a tab, it will run `move_parent_node_end`. If there is only whitespace to the left, then it inserts a tab as normal. With the default bindings, to explicitly insert a tab character, press Shift-tab. | `true` |
| `supersede-menu` | Normally, when a menu is on screen, such as when auto complete is triggered, the tab key is bound to cycling through the items. This means when menus are on screen, one cannot use the tab key to trigger the `smart-tab` command. If this option is set to true, the `smart-tab` command always takes precedence, which means one cannot use the tab key to cycle through menu items. One of the other bindings must be used instead, such as arrow keys or `C-n`/`C-p`. | `false` |
Due to lack of support for S-tab in some terminals, the default keybindings don't fully embrace smart-tab editing experience. If you enjoy smart-tab navigation and a terminal that supports the [Enhanced Keyboard protocol](https://github.com/helix-editor/helix/wiki/Terminal-Support#enhanced-keyboard-protocol), consider setting extra keybindings:
```
[keys.normal]
tab = "move_parent_node_end"
S-tab = "move_parent_node_start"
[keys.insert]
S-tab = "move_parent_node_start"
[keys.select]
tab = "extend_parent_node_end"
S-tab = "extend_parent_node_start"
```

@ -30,6 +30,7 @@
| devicetree | ✓ | | | |
| dhall | ✓ | ✓ | | `dhall-lsp-server` |
| diff | ✓ | | | |
| docker-compose | ✓ | | ✓ | `docker-compose-langserver` |
| dockerfile | ✓ | | | `docker-langserver` |
| dot | ✓ | | | `dot-language-server` |
| dtd | ✓ | | | |
@ -43,6 +44,7 @@
| erb | ✓ | | | |
| erlang | ✓ | ✓ | | `erlang_ls` |
| esdl | ✓ | | | |
| fidl | ✓ | | | |
| fish | ✓ | ✓ | ✓ | |
| forth | ✓ | | | `forth-lsp` |
| fortran | ✓ | | ✓ | `fortls` |
@ -64,10 +66,11 @@
| gotmpl | ✓ | | | `gopls` |
| gowork | ✓ | | | `gopls` |
| graphql | ✓ | | | `graphql-lsp` |
| groovy | ✓ | | | |
| hare | ✓ | | | |
| haskell | ✓ | ✓ | | `haskell-language-server-wrapper` |
| haskell-persistent | ✓ | | | |
| hcl | ✓ | | ✓ | `terraform-ls` |
| hcl | ✓ | | ✓ | `terraform-ls` |
| heex | ✓ | ✓ | | `elixir-ls` |
| hocon | ✓ | | ✓ | |
| hoon | ✓ | | | |
@ -101,7 +104,7 @@
| lua | ✓ | ✓ | ✓ | `lua-language-server` |
| make | ✓ | | ✓ | |
| markdoc | ✓ | | | `markdoc-ls` |
| markdown | ✓ | | | `marksman` |
| markdown | ✓ | | | `marksman`, `markdown-oxide` |
| markdown.inline | ✓ | | | |
| matlab | ✓ | ✓ | ✓ | |
| mermaid | ✓ | | | |
@ -111,7 +114,7 @@
| nasm | ✓ | ✓ | | |
| nickel | ✓ | | ✓ | `nls` |
| nim | ✓ | ✓ | ✓ | `nimlangserver` |
| nix | ✓ | | | `nil` |
| nix | ✓ | | | `nil` |
| nu | ✓ | | | `nu` |
| nunjucks | ✓ | | | |
| ocaml | ✓ | | ✓ | `ocamllsp` |

@ -12,6 +12,7 @@
- [Match mode](#match-mode)
- [Window mode](#window-mode)
- [Space mode](#space-mode)
- [Comment mode](#comment-mode)
- [Popup](#popup)
- [Unimpaired](#unimpaired)
- [Insert mode](#insert-mode)
@ -53,8 +54,8 @@ Normal mode is the default mode when you launch helix. You can return to it from
| `End` | Move to the end of the line | `goto_line_end` |
| `Ctrl-b`, `PageUp` | Move page up | `page_up` |
| `Ctrl-f`, `PageDown` | Move page down | `page_down` |
| `Ctrl-u` | Move half page up | `half_page_up` |
| `Ctrl-d` | Move half page down | `half_page_down` |
| `Ctrl-u` | Move cursor and page half page up | `page_cursor_half_up` |
| `Ctrl-d` | Move cursor and page half page down | `page_cursor_half_down` |
| `Ctrl-i` | Jump forward on the jumplist | `jump_forward` |
| `Ctrl-o` | Jump backward on the jumplist | `jump_backward` |
| `Ctrl-s` | Save the current selection to the jumplist | `save_selection` |
@ -192,8 +193,8 @@ useful when you're simply looking over text and not actively editing it.
| `k`, `up` | Scroll the view upwards | `scroll_up` |
| `Ctrl-f`, `PageDown` | Move page down | `page_down` |
| `Ctrl-b`, `PageUp` | Move page up | `page_up` |
| `Ctrl-d` | Move half page down | `half_page_down` |
| `Ctrl-u` | Move half page up | `half_page_up` |
| `Ctrl-u` | Move cursor and page half page up | `page_cursor_half_up` |
| `Ctrl-d` | Move cursor and page half page down | `page_cursor_half_down` |
#### Goto mode
@ -289,6 +290,9 @@ This layer is a kludge of mappings, mostly pickers.
| `h` | Select symbol references (**LSP**) | `select_references_to_symbol_under_cursor` |
| `'` | Open last fuzzy picker | `last_picker` |
| `w` | Enter [window mode](#window-mode) | N/A |
| `c` | Comment/uncomment selections | `toggle_comments` |
| `C` | Block comment/uncomment selections | `toggle_block_comments` |
| `Alt-c` | Line comment/uncomment selections | `toggle_line_comments` |
| `p` | Paste system clipboard after selections | `paste_clipboard_after` |
| `P` | Paste system clipboard before selections | `paste_clipboard_before` |
| `y` | Yank selections to clipboard | `yank_to_clipboard` |

@ -42,7 +42,7 @@ name = "mylang"
scope = "source.mylang"
injection-regex = "mylang"
file-types = ["mylang", "myl"]
comment-token = "#"
comment-tokens = "#"
indent = { tab-width = 2, unit = " " }
formatter = { command = "mylang-formatter" , args = ["--stdin"] }
language-servers = [ "mylang-lsp" ]
@ -61,7 +61,8 @@ These configuration keys are available:
| `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` |
| `auto-format` | Whether to autoformat this language when saving |
| `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) |
| `comment-token` | The token to use as a comment-token |
| `comment-tokens` | The tokens to use as a comment token, either a single token `"//"` or an array `["//", "///", "//!"]` (the first token will be used for commenting). Also configurable as `comment-token` for backwards compatibility|
| `block-comment-tokens`| The start and end tokens for a multiline comment either an array or single table of `{ start = "/*", end = "*/"}`. The first set of tokens will be used for commenting, any pairs in the array can be uncommented |
| `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) |
| `language-servers` | The Language Servers used for this language. See below for more information in the section [Configuring Language Servers for a language](#configuring-language-servers-for-a-language) |
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |

@ -333,5 +333,7 @@ These scopes are used for theming the editor interface:
| `diagnostic.info` | Diagnostics info (editing area) |
| `diagnostic.warning` | Diagnostics warning (editing area) |
| `diagnostic.error` | Diagnostics error (editing area) |
| `diagnostic.unnecessary` | Diagnostics with unnecessary tag (editing area) |
| `diagnostic.deprecated` | Diagnostics with deprecated tag (editing area) |
[editor-section]: ./configuration.md#editor-section

@ -5,19 +5,20 @@ _hx() {
# $1 command name
# $2 word being completed
# $3 word preceding
COMPREPLY=()
case "$3" in
-g | --grammar)
COMPREPLY=($(compgen -W "fetch build" -- $2))
COMPREPLY="$(compgen -W 'fetch build' -- $2)"
;;
--health)
local languages=$(hx --health |tail -n '+7' |awk '{print $1}' |sed 's/\x1b\[[0-9;]*m//g')
COMPREPLY=($(compgen -W "$languages" -- $2))
COMPREPLY="$(compgen -W """$languages""" -- $2)"
;;
*)
COMPREPLY=($(compgen -fd -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- $2))
COMPREPLY="$(compgen -fd -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- """$2""")"
;;
esac
} && complete -o filenames -F _hx hx
local IFS=$'\n'
COMPREPLY=($COMPREPLY)
} && complete -o filenames -F _hx hx

@ -7,11 +7,11 @@
]
},
"locked": {
"lastModified": 1701025348,
"narHash": "sha256-42GHmYH+GF7VjwGSt+fVT1CQuNpGanJbNgVHTAZppUM=",
"lastModified": 1709610799,
"narHash": "sha256-5jfLQx0U9hXbi2skYMGodDJkIgffrjIOgMRjZqms2QE=",
"owner": "ipetkov",
"repo": "crane",
"rev": "42afaeb1a0325194a7cdb526332d2cb92fddd07b",
"rev": "81c393c776d5379c030607866afef6406ca1be57",
"type": "github"
},
"original": {
@ -25,11 +25,11 @@
"systems": "systems"
},
"locked": {
"lastModified": 1694529238,
"narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
"lastModified": 1709126324,
"narHash": "sha256-q6EQdSeUZOG26WelxqkmR7kArjgWCdw5sfJVHPH/7j8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
"rev": "d465f4819400de7c8d874d50b982301f28a84605",
"type": "github"
},
"original": {
@ -40,11 +40,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1700794826,
"narHash": "sha256-RyJTnTNKhO0yqRpDISk03I/4A67/dp96YRxc86YOPgU=",
"lastModified": 1709479366,
"narHash": "sha256-n6F0n8UV6lnTZbYPl1A9q1BS0p4hduAv1mGAP17CVd0=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "5a09cb4b393d58f9ed0d9ca1555016a8543c2ac8",
"rev": "b8697e57f10292a6165a20f03d2f42920dfaf973",
"type": "github"
},
"original": {
@ -72,11 +72,11 @@
]
},
"locked": {
"lastModified": 1701137803,
"narHash": "sha256-0LcPAdql5IhQSUXJx3Zna0dYTgdIoYO7zUrsKgiBd04=",
"lastModified": 1709604635,
"narHash": "sha256-le4fwmWmjGRYWwkho0Gr7mnnZndOOe4XGbLw68OvF40=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "9dd940c967502f844eacea52a61e9596268d4f70",
"rev": "e86c0fb5d3a22a5f30d7f64ecad88643fe26449d",
"type": "github"
},
"original": {

@ -32,7 +32,7 @@ once_cell = "1.19"
arc-swap = "1"
regex = "1"
bitflags = "2.4"
ahash = "0.8.6"
ahash = "0.8.11"
hashbrown = { version = "0.14.3", features = ["raw"] }
dunce = "1.0"
@ -48,7 +48,7 @@ encoding_rs = "0.8"
chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] }
etcetera = "0.8"
textwrap = "0.16.0"
textwrap = "0.16.1"
nucleo.workspace = true
parking_lot = "0.12"

@ -1,9 +1,12 @@
//! This module contains the functionality toggle comments on lines over the selection
//! using the comment character defined in the user's `languages.toml`
use smallvec::SmallVec;
use crate::{
find_first_non_whitespace_char, Change, Rope, RopeSlice, Selection, Tendril, Transaction,
syntax::BlockCommentToken, Change, Range, Rope, RopeSlice, Selection, Tendril, Transaction,
};
use helix_stdx::rope::RopeSliceExt;
use std::borrow::Cow;
/// Given text, a comment token, and a set of line indices, returns the following:
@ -22,12 +25,12 @@ fn find_line_comment(
) -> (bool, Vec<usize>, usize, usize) {
let mut commented = true;
let mut to_change = Vec::new();
let mut min = usize::MAX; // minimum col for find_first_non_whitespace_char
let mut min = usize::MAX; // minimum col for first_non_whitespace_char
let mut margin = 1;
let token_len = token.chars().count();
for line in lines {
let line_slice = text.line(line);
if let Some(pos) = find_first_non_whitespace_char(line_slice) {
if let Some(pos) = line_slice.first_non_whitespace_char() {
let len = line_slice.len_chars();
if pos < min {
@ -94,6 +97,222 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st
Transaction::change(doc, changes.into_iter())
}
#[derive(Debug, PartialEq, Eq)]
pub enum CommentChange {
Commented {
range: Range,
start_pos: usize,
end_pos: usize,
start_margin: bool,
end_margin: bool,
start_token: String,
end_token: String,
},
Uncommented {
range: Range,
start_pos: usize,
end_pos: usize,
start_token: String,
end_token: String,
},
Whitespace {
range: Range,
},
}
pub fn find_block_comments(
tokens: &[BlockCommentToken],
text: RopeSlice,
selection: &Selection,
) -> (bool, Vec<CommentChange>) {
let mut commented = true;
let mut only_whitespace = true;
let mut comment_changes = Vec::with_capacity(selection.len());
let default_tokens = tokens.first().cloned().unwrap_or_default();
// TODO: check if this can be removed on MSRV bump
#[allow(clippy::redundant_clone)]
let mut start_token = default_tokens.start.clone();
#[allow(clippy::redundant_clone)]
let mut end_token = default_tokens.end.clone();
let mut tokens = tokens.to_vec();
// sort the tokens by length, so longer tokens will match first
tokens.sort_by(|a, b| {
if a.start.len() == b.start.len() {
b.end.len().cmp(&a.end.len())
} else {
b.start.len().cmp(&a.start.len())
}
});
for range in selection {
let selection_slice = range.slice(text);
if let (Some(start_pos), Some(end_pos)) = (
selection_slice.first_non_whitespace_char(),
selection_slice.last_non_whitespace_char(),
) {
let mut line_commented = false;
let mut after_start = 0;
let mut before_end = 0;
let len = (end_pos + 1) - start_pos;
for BlockCommentToken { start, end } in &tokens {
let start_len = start.chars().count();
let end_len = end.chars().count();
after_start = start_pos + start_len;
before_end = end_pos.saturating_sub(end_len);
if len >= start_len + end_len {
let start_fragment = selection_slice.slice(start_pos..after_start);
let end_fragment = selection_slice.slice(before_end + 1..end_pos + 1);
// block commented with these tokens
if start_fragment == start.as_str() && end_fragment == end.as_str() {
start_token = start.to_string();
end_token = end.to_string();
line_commented = true;
break;
}
}
}
if !line_commented {
comment_changes.push(CommentChange::Uncommented {
range: *range,
start_pos,
end_pos,
start_token: default_tokens.start.clone(),
end_token: default_tokens.end.clone(),
});
commented = false;
} else {
comment_changes.push(CommentChange::Commented {
range: *range,
start_pos,
end_pos,
start_margin: selection_slice
.get_char(after_start)
.map_or(false, |c| c == ' '),
end_margin: after_start != before_end
&& selection_slice
.get_char(before_end)
.map_or(false, |c| c == ' '),
start_token: start_token.to_string(),
end_token: end_token.to_string(),
});
}
only_whitespace = false;
} else {
comment_changes.push(CommentChange::Whitespace { range: *range });
}
}
if only_whitespace {
commented = false;
}
(commented, comment_changes)
}
#[must_use]
pub fn create_block_comment_transaction(
doc: &Rope,
selection: &Selection,
commented: bool,
comment_changes: Vec<CommentChange>,
) -> (Transaction, SmallVec<[Range; 1]>) {
let mut changes: Vec<Change> = Vec::with_capacity(selection.len() * 2);
let mut ranges: SmallVec<[Range; 1]> = SmallVec::with_capacity(selection.len());
let mut offs = 0;
for change in comment_changes {
if commented {
if let CommentChange::Commented {
range,
start_pos,
end_pos,
start_token,
end_token,
start_margin,
end_margin,
} = change
{
let from = range.from();
changes.push((
from + start_pos,
from + start_pos + start_token.len() + start_margin as usize,
None,
));
changes.push((
from + end_pos - end_token.len() - end_margin as usize + 1,
from + end_pos + 1,
None,
));
}
} else {
// uncommented so manually map ranges through changes
match change {
CommentChange::Uncommented {
range,
start_pos,
end_pos,
start_token,
end_token,
} => {
let from = range.from();
changes.push((
from + start_pos,
from + start_pos,
Some(Tendril::from(format!("{} ", start_token))),
));
changes.push((
from + end_pos + 1,
from + end_pos + 1,
Some(Tendril::from(format!(" {}", end_token))),
));
let offset = start_token.chars().count() + end_token.chars().count() + 2;
ranges.push(
Range::new(from + offs, from + offs + end_pos + 1 + offset)
.with_direction(range.direction()),
);
offs += offset;
}
CommentChange::Commented { range, .. } | CommentChange::Whitespace { range } => {
ranges.push(Range::new(range.from() + offs, range.to() + offs));
}
}
}
}
(Transaction::change(doc, changes.into_iter()), ranges)
}
#[must_use]
pub fn toggle_block_comments(
doc: &Rope,
selection: &Selection,
tokens: &[BlockCommentToken],
) -> Transaction {
let text = doc.slice(..);
let (commented, comment_changes) = find_block_comments(tokens, text, selection);
let (mut transaction, ranges) =
create_block_comment_transaction(doc, selection, commented, comment_changes);
if !commented {
transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index()));
}
transaction
}
pub fn split_lines_of_selection(text: RopeSlice, selection: &Selection) -> Selection {
let mut ranges = SmallVec::new();
for range in selection.ranges() {
let (line_start, line_end) = range.line_range(text.slice(..));
let mut pos = text.line_to_char(line_start);
for line in text.slice(pos..text.line_to_char(line_end + 1)).lines() {
let start = pos;
pos += line.len_chars();
ranges.push(Range::new(start, pos));
}
}
Selection::new(ranges, 0)
}
#[cfg(test)]
mod test {
use super::*;
@ -149,4 +368,49 @@ mod test {
// TODO: account for uncommenting with uneven comment indentation
}
#[test]
fn test_find_block_comments() {
// three lines 5 characters.
let mut doc = Rope::from("1\n2\n3");
// select whole document
let selection = Selection::single(0, doc.len_chars());
let text = doc.slice(..);
let res = find_block_comments(&[BlockCommentToken::default()], text, &selection);
assert_eq!(
res,
(
false,
vec![CommentChange::Uncommented {
range: Range::new(0, 5),
start_pos: 0,
end_pos: 4,
start_token: "/*".to_string(),
end_token: "*/".to_string(),
}]
)
);
// comment
let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]);
transaction.apply(&mut doc);
assert_eq!(doc, "/* 1\n2\n3 */");
// uncomment
let selection = Selection::single(0, doc.len_chars());
let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]);
transaction.apply(&mut doc);
assert_eq!(doc, "1\n2\n3");
// don't panic when there is just a space in comment
doc = Rope::from("/* */");
let selection = Selection::single(0, doc.len_chars());
let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]);
transaction.apply(&mut doc);
assert_eq!(doc, "");
}
}

@ -1,10 +1,10 @@
use std::{borrow::Cow, collections::HashMap};
use helix_stdx::rope::RopeSliceExt;
use tree_sitter::{Query, QueryCursor, QueryPredicateArg};
use crate::{
chars::{char_is_line_ending, char_is_whitespace},
find_first_non_whitespace_char,
graphemes::{grapheme_width, tab_width_at},
syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax},
tree_sitter::Node,
@ -970,7 +970,7 @@ pub fn indent_for_newline(
let mut num_attempts = 0;
for line_idx in (0..=line_before).rev() {
let line = text.line(line_idx);
let first_non_whitespace_char = match find_first_non_whitespace_char(line) {
let first_non_whitespace_char = match line.first_non_whitespace_char() {
Some(i) => i,
None => {
continue;

@ -37,9 +37,6 @@ pub mod unicode {
pub use helix_loader::find_workspace;
pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> {
line.chars().position(|ch| !ch.is_whitespace())
}
mod rope_reader;
pub use rope_reader::RopeReader;

@ -7,9 +7,11 @@ use crate::{
ensure_grapheme_boundary_next, ensure_grapheme_boundary_prev, next_grapheme_boundary,
prev_grapheme_boundary,
},
line_ending::get_line_ending,
movement::Direction,
Assoc, ChangeSet, RopeGraphemes, RopeSlice,
};
use helix_stdx::rope::{self, RopeSliceExt};
use smallvec::{smallvec, SmallVec};
use std::borrow::Cow;
@ -708,12 +710,12 @@ impl IntoIterator for Selection {
pub fn keep_or_remove_matches(
text: RopeSlice,
selection: &Selection,
regex: &crate::regex::Regex,
regex: &rope::Regex,
remove: bool,
) -> Option<Selection> {
let result: SmallVec<_> = selection
.iter()
.filter(|range| regex.is_match(&range.fragment(text)) ^ remove)
.filter(|range| regex.is_match(text.regex_input_at(range.from()..range.to())) ^ remove)
.copied()
.collect();
@ -724,25 +726,20 @@ pub fn keep_or_remove_matches(
None
}
// TODO: support to split on capture #N instead of whole match
pub fn select_on_matches(
text: RopeSlice,
selection: &Selection,
regex: &crate::regex::Regex,
regex: &rope::Regex,
) -> Option<Selection> {
let mut result = SmallVec::with_capacity(selection.len());
for sel in selection {
// TODO: can't avoid occasional allocations since Regex can't operate on chunks yet
let fragment = sel.fragment(text);
let sel_start = sel.from();
let start_byte = text.char_to_byte(sel_start);
for mat in regex.find_iter(&fragment) {
for mat in regex.find_iter(text.regex_input_at(sel.from()..sel.to())) {
// TODO: retain range direction
let start = text.byte_to_char(start_byte + mat.start());
let end = text.byte_to_char(start_byte + mat.end());
let start = text.byte_to_char(mat.start());
let end = text.byte_to_char(mat.end());
let range = Range::new(start, end);
// Make sure the match is not right outside of the selection.
@ -761,12 +758,7 @@ pub fn select_on_matches(
None
}
// TODO: support to split on capture #N instead of whole match
pub fn split_on_matches(
text: RopeSlice,
selection: &Selection,
regex: &crate::regex::Regex,
) -> Selection {
pub fn split_on_newline(text: RopeSlice, selection: &Selection) -> Selection {
let mut result = SmallVec::with_capacity(selection.len());
for sel in selection {
@ -776,21 +768,47 @@ pub fn split_on_matches(
continue;
}
// TODO: can't avoid occasional allocations since Regex can't operate on chunks yet
let fragment = sel.fragment(text);
let sel_start = sel.from();
let sel_end = sel.to();
let start_byte = text.char_to_byte(sel_start);
let mut start = sel_start;
for line in sel.slice(text).lines() {
let Some(line_ending) = get_line_ending(&line) else { break };
let line_end = start + line.len_chars();
// TODO: retain range direction
result.push(Range::new(start, line_end - line_ending.len_chars()));
start = line_end;
}
if start < sel_end {
result.push(Range::new(start, sel_end));
}
}
// TODO: figure out a new primary index
Selection::new(result, 0)
}
pub fn split_on_matches(text: RopeSlice, selection: &Selection, regex: &rope::Regex) -> Selection {
let mut result = SmallVec::with_capacity(selection.len());
for sel in selection {
// Special case: zero-width selection.
if sel.from() == sel.to() {
result.push(*sel);
continue;
}
let sel_start = sel.from();
let sel_end = sel.to();
let mut start = sel_start;
for mat in regex.find_iter(&fragment) {
for mat in regex.find_iter(text.regex_input_at(sel_start..sel_end)) {
// TODO: retain range direction
let end = text.byte_to_char(start_byte + mat.start());
let end = text.byte_to_char(mat.start());
result.push(Range::new(start, end));
start = text.byte_to_char(start_byte + mat.end());
start = text.byte_to_char(mat.end());
}
if start < sel_end {
@ -1021,14 +1039,12 @@ mod test {
#[test]
fn test_select_on_matches() {
use crate::regex::{Regex, RegexBuilder};
let r = Rope::from_str("Nobody expects the Spanish inquisition");
let s = r.slice(..);
let selection = Selection::single(0, r.len_chars());
assert_eq!(
select_on_matches(s, &selection, &Regex::new(r"[A-Z][a-z]*").unwrap()),
select_on_matches(s, &selection, &rope::Regex::new(r"[A-Z][a-z]*").unwrap()),
Some(Selection::new(
smallvec![Range::new(0, 6), Range::new(19, 26)],
0
@ -1038,8 +1054,14 @@ mod test {
let r = Rope::from_str("This\nString\n\ncontains multiple\nlines");
let s = r.slice(..);
let start_of_line = RegexBuilder::new(r"^").multi_line(true).build().unwrap();
let end_of_line = RegexBuilder::new(r"$").multi_line(true).build().unwrap();
let start_of_line = rope::RegexBuilder::new()
.syntax(rope::Config::new().multi_line(true))
.build(r"^")
.unwrap();
let end_of_line = rope::RegexBuilder::new()
.syntax(rope::Config::new().multi_line(true))
.build(r"$")
.unwrap();
// line without ending
assert_eq!(
@ -1077,9 +1099,9 @@ mod test {
select_on_matches(
s,
&Selection::single(0, s.len_chars()),
&RegexBuilder::new(r"^[a-z ]*$")
.multi_line(true)
.build()
&rope::RegexBuilder::new()
.syntax(rope::Config::new().multi_line(true))
.build(r"^[a-z ]*$")
.unwrap()
),
Some(Selection::new(
@ -1171,13 +1193,15 @@ mod test {
#[test]
fn test_split_on_matches() {
use crate::regex::Regex;
let text = Rope::from(" abcd efg wrs xyz 123 456");
let selection = Selection::new(smallvec![Range::new(0, 9), Range::new(11, 20),], 0);
let result = split_on_matches(text.slice(..), &selection, &Regex::new(r"\s+").unwrap());
let result = split_on_matches(
text.slice(..),
&selection,
&rope::Regex::new(r"\s+").unwrap(),
);
assert_eq!(
result.ranges(),

@ -260,7 +260,8 @@ pub fn get_surround_pos(
if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) {
return Err(Error::CursorOverlap);
}
change_pos.extend_from_slice(&[open_pos, close_pos]);
// ensure the positions are always paired in the forward direction
change_pos.extend_from_slice(&[open_pos.min(close_pos), close_pos.max(open_pos)]);
}
Ok(change_pos)
}

@ -12,6 +12,7 @@ use arc_swap::{ArcSwap, Guard};
use bitflags::bitflags;
use globset::GlobSet;
use hashbrown::raw::RawTable;
use helix_stdx::rope::{self, RopeSliceExt};
use slotmap::{DefaultKey as LayerId, HopSlotMap};
use std::{
@ -98,7 +99,19 @@ pub struct LanguageConfiguration {
pub shebangs: Vec<String>, // interpreter(s) associated with language
#[serde(default)]
pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml>
pub comment_token: Option<String>,
#[serde(
default,
skip_serializing,
deserialize_with = "from_comment_tokens",
alias = "comment-token"
)]
pub comment_tokens: Option<Vec<String>>,
#[serde(
default,
skip_serializing,
deserialize_with = "from_block_comment_tokens"
)]
pub block_comment_tokens: Option<Vec<BlockCommentToken>>,
pub text_width: Option<usize>,
pub soft_wrap: Option<SoftWrap>,
@ -239,6 +252,59 @@ impl<'de> Deserialize<'de> for FileType {
}
}
fn from_comment_tokens<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(untagged)]
enum CommentTokens {
Multiple(Vec<String>),
Single(String),
}
Ok(
Option::<CommentTokens>::deserialize(deserializer)?.map(|tokens| match tokens {
CommentTokens::Single(val) => vec![val],
CommentTokens::Multiple(vals) => vals,
}),
)
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BlockCommentToken {
pub start: String,
pub end: String,
}
impl Default for BlockCommentToken {
fn default() -> Self {
BlockCommentToken {
start: "/*".to_string(),
end: "*/".to_string(),
}
}
}
fn from_block_comment_tokens<'de, D>(
deserializer: D,
) -> Result<Option<Vec<BlockCommentToken>>, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(untagged)]
enum BlockCommentTokens {
Multiple(Vec<BlockCommentToken>),
Single(BlockCommentToken),
}
Ok(
Option::<BlockCommentTokens>::deserialize(deserializer)?.map(|tokens| match tokens {
BlockCommentTokens::Single(val) => vec![val],
BlockCommentTokens::Multiple(vals) => vals,
}),
)
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[serde(rename_all = "kebab-case")]
pub enum LanguageServerFeature {
@ -1961,11 +2027,16 @@ impl HighlightConfiguration {
node_slice
};
static SHEBANG_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(SHEBANG).unwrap());
static SHEBANG_REGEX: Lazy<rope::Regex> =
Lazy::new(|| rope::Regex::new(SHEBANG).unwrap());
injection_capture = SHEBANG_REGEX
.captures(&Cow::from(lines))
.map(|cap| InjectionLanguageMarker::Shebang(cap[1].to_owned()))
.captures_iter(lines.regex_input())
.map(|cap| {
let cap = lines.byte_slice(cap.get_group(1).unwrap().range());
InjectionLanguageMarker::Shebang(cap.into())
})
.next()
} else if index == self.injection_content_capture_index {
content_node = Some(capture.node);
}

@ -4,6 +4,7 @@ use helix_core::{
syntax::{Configuration, Loader},
Syntax,
};
use helix_stdx::rope::RopeSliceExt;
use ropey::Rope;
use std::{ops::Range, path::PathBuf, process::Command, sync::Arc};
@ -211,7 +212,7 @@ fn test_treesitter_indent(
if ignored_lines.iter().any(|range| range.contains(&(i + 1))) {
continue;
}
if let Some(pos) = helix_core::find_first_non_whitespace_char(line) {
if let Some(pos) = line.first_non_whitespace_char() {
let tab_width: usize = 4;
let suggested_indent = treesitter_indent_for_pos(
indent_query,

@ -12,7 +12,7 @@ homepage.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ahash = "0.8.3"
ahash = "0.8.11"
hashbrown = "0.14.0"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] }
# the event registry is essentially read only but must be an rwlock so we can

@ -30,7 +30,7 @@ log = "0.4"
# cloning/compiling tree-sitter grammars
cc = { version = "1" }
threadpool = { version = "1.0" }
tempfile = "3.10.0"
tempfile = "3.10.1"
dunce = "1.0.4"
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]

@ -53,7 +53,7 @@ fn prioritize_runtime_dirs() -> Vec<PathBuf> {
rt_dirs.push(conf_rt_dir);
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
let dir = path::expand_tilde(dir);
let dir = path::expand_tilde(Path::new(&dir));
rt_dirs.push(path::normalize(dir));
}

@ -631,6 +631,12 @@ impl Client {
}),
publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities {
version_support: Some(true),
tag_support: Some(lsp::TagSupport {
value_set: vec![
lsp::DiagnosticTag::UNNECESSARY,
lsp::DiagnosticTag::DEPRECATED,
],
}),
..Default::default()
}),
inlay_hint: Some(lsp::InlayHintClientCapabilities {
@ -1017,7 +1023,7 @@ impl Client {
pub fn resolve_completion_item(
&self,
completion_item: lsp::CompletionItem,
) -> Option<impl Future<Output = Result<Value>>> {
) -> Option<impl Future<Output = Result<lsp::CompletionItem>>> {
let capabilities = self.capabilities.get().unwrap();
// Return early if the server does not support resolving completion items.
@ -1029,7 +1035,8 @@ impl Client {
_ => return None,
}
Some(self.call::<lsp::request::ResolveCompletionItem>(completion_item))
let res = self.call::<lsp::request::ResolveCompletionItem>(completion_item);
Some(async move { Ok(serde_json::from_value(res.await?)?) })
}
pub fn resolve_code_action(

@ -16,6 +16,7 @@ dunce = "1.0"
etcetera = "0.8"
ropey = { version = "1.6.1", default-features = false }
which = "6.0"
regex-cursor = "0.1.3"
[dev-dependencies]
tempfile = "3.10"

@ -1,6 +1,9 @@
pub use etcetera::home_dir;
use std::path::{Component, Path, PathBuf};
use std::{
borrow::Cow,
path::{Component, Path, PathBuf},
};
use crate::env::current_working_dir;
@ -19,19 +22,22 @@ pub fn fold_home_dir(path: &Path) -> PathBuf {
/// Expands tilde `~` into users home directory if available, otherwise returns the path
/// unchanged. The tilde will only be expanded when present as the first component of the path
/// and only slash follows it.
pub fn expand_tilde(path: impl AsRef<Path>) -> PathBuf {
let path = path.as_ref();
let mut components = path.components().peekable();
if let Some(Component::Normal(c)) = components.peek() {
if c == &"~" {
if let Ok(home) = home_dir() {
// it's ok to unwrap, the path starts with `~`
return home.join(path.strip_prefix("~").unwrap());
pub fn expand_tilde<'a, P>(path: P) -> Cow<'a, Path>
where
P: Into<Cow<'a, Path>>,
{
let path = path.into();
let mut components = path.components();
if let Some(Component::Normal(c)) = components.next() {
if c == "~" {
if let Ok(mut buf) = home_dir() {
buf.push(components);
return Cow::Owned(buf);
}
}
}
path.to_path_buf()
path
}
/// Normalize a path without resolving symlinks.
@ -109,9 +115,9 @@ pub fn normalize(path: impl AsRef<Path>) -> PathBuf {
/// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify
/// here if the path exists, just normalize it's components.
pub fn canonicalize(path: impl AsRef<Path>) -> PathBuf {
let path = expand_tilde(path);
let path = expand_tilde(path.as_ref());
let path = if path.is_relative() {
current_working_dir().join(path)
Cow::Owned(current_working_dir().join(path))
} else {
path
};
@ -183,3 +189,32 @@ pub fn get_truncated_path(path: impl AsRef<Path>) -> PathBuf {
ret.push(file);
ret
}
#[cfg(test)]
mod tests {
use std::{
ffi::OsStr,
path::{Component, Path},
};
use crate::path;
#[test]
fn expand_tilde() {
for path in ["~", "~/foo"] {
let expanded = path::expand_tilde(Path::new(path));
let tilde = Component::Normal(OsStr::new("~"));
let mut component_count = 0;
for component in expanded.components() {
// No tilde left.
assert_ne!(component, tilde);
component_count += 1;
}
// The path was at least expanded to something.
assert_ne!(component_count, 0);
}
}
}

@ -1,11 +1,24 @@
use std::ops::{Bound, RangeBounds};
pub use regex_cursor::engines::meta::{Builder as RegexBuilder, Regex};
pub use regex_cursor::regex_automata::util::syntax::Config;
use regex_cursor::{Input as RegexInput, RopeyCursor};
use ropey::RopeSlice;
pub trait RopeSliceExt: Sized {
pub trait RopeSliceExt<'a>: Sized {
fn ends_with(self, text: &str) -> bool;
fn starts_with(self, text: &str) -> bool;
fn regex_input(self) -> RegexInput<RopeyCursor<'a>>;
fn regex_input_at_bytes<R: RangeBounds<usize>>(
self,
byte_range: R,
) -> RegexInput<RopeyCursor<'a>>;
fn regex_input_at<R: RangeBounds<usize>>(self, char_range: R) -> RegexInput<RopeyCursor<'a>>;
fn first_non_whitespace_char(self) -> Option<usize>;
fn last_non_whitespace_char(self) -> Option<usize>;
}
impl RopeSliceExt for RopeSlice<'_> {
impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
fn ends_with(self, text: &str) -> bool {
let len = self.len_bytes();
if len < text.len() {
@ -23,4 +36,43 @@ impl RopeSliceExt for RopeSlice<'_> {
self.get_byte_slice(..len - text.len())
.map_or(false, |start| start == text)
}
fn regex_input(self) -> RegexInput<RopeyCursor<'a>> {
RegexInput::new(self)
}
fn regex_input_at<R: RangeBounds<usize>>(self, char_range: R) -> RegexInput<RopeyCursor<'a>> {
let start_bound = match char_range.start_bound() {
Bound::Included(&val) => Bound::Included(self.char_to_byte(val)),
Bound::Excluded(&val) => Bound::Excluded(self.char_to_byte(val)),
Bound::Unbounded => Bound::Unbounded,
};
let end_bound = match char_range.end_bound() {
Bound::Included(&val) => Bound::Included(self.char_to_byte(val)),
Bound::Excluded(&val) => Bound::Excluded(self.char_to_byte(val)),
Bound::Unbounded => Bound::Unbounded,
};
self.regex_input_at_bytes((start_bound, end_bound))
}
fn regex_input_at_bytes<R: RangeBounds<usize>>(
self,
byte_range: R,
) -> RegexInput<RopeyCursor<'a>> {
let input = match byte_range.start_bound() {
Bound::Included(&pos) | Bound::Excluded(&pos) => {
RegexInput::new(RopeyCursor::at(self, pos))
}
Bound::Unbounded => RegexInput::new(self),
};
input.range(byte_range)
}
fn first_non_whitespace_char(self) -> Option<usize> {
self.chars().position(|ch| !ch.is_whitespace())
}
fn last_non_whitespace_char(self) -> Option<usize> {
self.chars_at(self.len_chars())
.reversed()
.position(|ch| !ch.is_whitespace())
.map(|pos| self.len_chars() - pos - 1)
}
}

@ -41,7 +41,7 @@ crossterm = { version = "0.27", features = ["event-stream"] }
signal-hook = "0.3"
tokio-stream = "0.1"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
arc-swap = { version = "1.6.0" }
arc-swap = { version = "1.7.0" }
termini = "1"
# Logging
@ -84,4 +84,4 @@ helix-loader = { path = "../helix-loader" }
[dev-dependencies]
smallvec = "1.13"
indoc = "2.0.4"
tempfile = "3.10.0"
tempfile = "3.10.1"

@ -724,7 +724,7 @@ impl Application {
}
Notification::PublishDiagnostics(mut params) => {
let path = match params.uri.to_file_path() {
Ok(path) => path,
Ok(path) => helix_stdx::path::normalize(&path),
Err(_) => {
log::error!("Unsupported file URI: {}", params.uri);
return;
@ -753,9 +753,7 @@ impl Application {
let lang_conf = doc.language.clone();
if let Some(lang_conf) = &lang_conf {
if let Some(old_diagnostics) =
self.editor.diagnostics.get(&params.uri)
{
if let Some(old_diagnostics) = self.editor.diagnostics.get(&path) {
if !lang_conf.persistent_diagnostic_sources.is_empty() {
// Sort diagnostics first by severity and then by line numbers.
// Note: The `lsp::DiagnosticSeverity` enum is already defined in decreasing order
@ -788,7 +786,7 @@ impl Application {
// Insert the original lsp::Diagnostics here because we may have no open document
// for diagnosic message and so we can't calculate the exact position.
// When using them later in the diagnostics picker, we calculate them on-demand.
let diagnostics = match self.editor.diagnostics.entry(params.uri) {
let diagnostics = match self.editor.diagnostics.entry(path) {
Entry::Occupied(o) => {
let current_diagnostics = o.into_mut();
// there may entries of other language servers, which is why we can't overwrite the whole entry

@ -3,6 +3,7 @@ pub(crate) mod lsp;
pub(crate) mod typed;
pub use dap::*;
use helix_stdx::rope::{self, RopeSliceExt};
use helix_vcs::Hunk;
pub use lsp::*;
use tui::widgets::Row;
@ -11,7 +12,7 @@ pub use typed::*;
use helix_core::{
char_idx_at_visual_offset, comment,
doc_formatter::TextFormat,
encoding, find_first_non_whitespace_char, find_workspace, graphemes,
encoding, find_workspace, graphemes,
history::UndoKind,
increment, indent,
indent::IndentStyle,
@ -19,10 +20,10 @@ use helix_core::{
match_brackets,
movement::{self, move_vertically_visual, Direction},
object, pos_at_coords,
regex::{self, Regex, RegexBuilder},
regex::{self, Regex},
search::{self, CharMatcher},
selection, shellwords, surround,
syntax::LanguageServerFeature,
syntax::{BlockCommentToken, LanguageServerFeature},
text_annotations::TextAnnotations,
textobject,
tree_sitter::Node,
@ -277,6 +278,10 @@ impl MappableCommand {
page_down, "Move page down",
half_page_up, "Move half page up",
half_page_down, "Move half page down",
page_cursor_up, "Move page and cursor up",
page_cursor_down, "Move page and cursor down",
page_cursor_half_up, "Move page and cursor half up",
page_cursor_half_down, "Move page and cursor half down",
select_all, "Select whole document",
select_regex, "Select all regex matches inside selections",
split_selection, "Split selections on regex matches",
@ -410,6 +415,8 @@ impl MappableCommand {
completion, "Invoke completion popup",
hover, "Show docs for item under cursor",
toggle_comments, "Comment/uncomment selections",
toggle_line_comments, "Line comment/uncomment selections",
toggle_block_comments, "Block comment/uncomment selections",
rotate_selections_forward, "Rotate selections forward",
rotate_selections_backward, "Rotate selections backward",
rotate_selection_contents_forward, "Rotate selection contents forward",
@ -819,7 +826,7 @@ fn kill_to_line_start(cx: &mut Context) {
let head = if anchor == first_char && line != 0 {
// select until previous line
line_end_char_index(&text, line - 1)
} else if let Some(pos) = find_first_non_whitespace_char(text.line(line)) {
} else if let Some(pos) = text.line(line).first_non_whitespace_char() {
if first_char + pos < anchor {
// select until first non-blank in line if cursor is after it
first_char + pos
@ -881,7 +888,7 @@ fn goto_first_nonwhitespace_impl(view: &mut View, doc: &mut Document, movement:
let selection = doc.selection(view.id).clone().transform(|range| {
let line = range.cursor_line(text);
if let Some(pos) = find_first_non_whitespace_char(text.line(line)) {
if let Some(pos) = text.line(line).first_non_whitespace_char() {
let pos = pos + text.line_to_char(line);
range.put_cursor(text, pos, movement == Movement::Extend)
} else {
@ -1610,7 +1617,7 @@ fn switch_to_lowercase(cx: &mut Context) {
});
}
pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) {
pub fn scroll(cx: &mut Context, offset: usize, direction: Direction, sync_cursor: bool) {
use Direction::*;
let config = cx.editor.config();
let (view, doc) = current!(cx.editor);
@ -1630,7 +1637,7 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) {
let doc_text = doc.text().slice(..);
let viewport = view.inner_area(doc);
let text_fmt = doc.text_format(viewport.width, None);
let annotations = view.text_annotations(doc, None);
let mut annotations = view.text_annotations(doc, None);
(view.offset.anchor, view.offset.vertical_offset) = char_idx_at_visual_offset(
doc_text,
view.offset.anchor,
@ -1640,6 +1647,30 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) {
&annotations,
);
if sync_cursor {
let movement = match cx.editor.mode {
Mode::Select => Movement::Extend,
_ => Movement::Move,
};
// TODO: When inline diagnostics gets merged- 1. move_vertically_visual removes
// line annotations/diagnostics so the cursor may jump further than the view.
// 2. If the cursor lands on a complete line of virtual text, the cursor will
// jump a different distance than the view.
let selection = doc.selection(view.id).clone().transform(|range| {
move_vertically_visual(
doc_text,
range,
direction,
offset.unsigned_abs(),
movement,
&text_fmt,
&mut annotations,
)
});
doc.set_selection(view.id, selection);
return;
}
let mut head;
match direction {
Forward => {
@ -1690,25 +1721,49 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) {
fn page_up(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height();
scroll(cx, offset, Direction::Backward);
scroll(cx, offset, Direction::Backward, false);
}
fn page_down(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height();
scroll(cx, offset, Direction::Forward);
scroll(cx, offset, Direction::Forward, false);
}
fn half_page_up(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height() / 2;
scroll(cx, offset, Direction::Backward);
scroll(cx, offset, Direction::Backward, false);
}
fn half_page_down(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height() / 2;
scroll(cx, offset, Direction::Forward);
scroll(cx, offset, Direction::Forward, false);
}
fn page_cursor_up(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height();
scroll(cx, offset, Direction::Backward, true);
}
fn page_cursor_down(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height();
scroll(cx, offset, Direction::Forward, true);
}
fn page_cursor_half_up(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height() / 2;
scroll(cx, offset, Direction::Backward, true);
}
fn page_cursor_half_down(cx: &mut Context) {
let view = view!(cx.editor);
let offset = view.inner_height() / 2;
scroll(cx, offset, Direction::Forward, true);
}
#[allow(deprecated)]
@ -1857,11 +1912,7 @@ fn split_selection(cx: &mut Context) {
fn split_selection_on_newline(cx: &mut Context) {
let (view, doc) = current!(cx.editor);
let text = doc.text().slice(..);
// only compile the regex once
#[allow(clippy::trivial_regex)]
static REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"\r\n|[\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}]").unwrap());
let selection = selection::split_on_matches(text, doc.selection(view.id), &REGEX);
let selection = selection::split_on_newline(text, doc.selection(view.id));
doc.set_selection(view.id, selection);
}
@ -1880,8 +1931,7 @@ fn merge_consecutive_selections(cx: &mut Context) {
#[allow(clippy::too_many_arguments)]
fn search_impl(
editor: &mut Editor,
contents: &str,
regex: &Regex,
regex: &rope::Regex,
movement: Movement,
direction: Direction,
scrolloff: usize,
@ -1909,23 +1959,20 @@ fn search_impl(
// do a reverse search and wraparound to the end, we don't need to search
// the text before the current cursor position for matches, but by slicing
// it out, we need to add it back to the position of the selection.
let mut offset = 0;
let doc = doc!(editor).text().slice(..);
// use find_at to find the next match after the cursor, loop around the end
// Careful, `Regex` uses `bytes` as offsets, not character indices!
let mut mat = match direction {
Direction::Forward => regex.find_at(contents, start),
Direction::Backward => regex.find_iter(&contents[..start]).last(),
Direction::Forward => regex.find(doc.regex_input_at_bytes(start..)),
Direction::Backward => regex.find_iter(doc.regex_input_at_bytes(..start)).last(),
};
if mat.is_none() {
if wrap_around {
mat = match direction {
Direction::Forward => regex.find(contents),
Direction::Backward => {
offset = start;
regex.find_iter(&contents[start..]).last()
}
Direction::Forward => regex.find(doc.regex_input()),
Direction::Backward => regex.find_iter(doc.regex_input_at_bytes(start..)).last(),
};
}
if show_warnings {
@ -1942,8 +1989,8 @@ fn search_impl(
let selection = doc.selection(view.id);
if let Some(mat) = mat {
let start = text.byte_to_char(mat.start() + offset);
let end = text.byte_to_char(mat.end() + offset);
let start = text.byte_to_char(mat.start());
let end = text.byte_to_char(mat.end());
if end == 0 {
// skip empty matches that don't make sense
@ -1987,13 +2034,7 @@ fn searcher(cx: &mut Context, direction: Direction) {
let scrolloff = config.scrolloff;
let wrap_around = config.search.wrap_around;
let doc = doc!(cx.editor);
// TODO: could probably share with select_on_matches?
// HAXX: sadly we can't avoid allocating a single string for the whole buffer since we can't
// feed chunks into the regex yet
let contents = doc.text().slice(..).to_string();
let completions = search_completions(cx, Some(reg));
ui::regex_prompt(
@ -2015,7 +2056,6 @@ fn searcher(cx: &mut Context, direction: Direction) {
}
search_impl(
cx.editor,
&contents,
&regex,
Movement::Move,
direction,
@ -2035,8 +2075,6 @@ fn search_next_or_prev_impl(cx: &mut Context, movement: Movement, direction: Dir
let config = cx.editor.config();
let scrolloff = config.scrolloff;
if let Some(query) = cx.editor.registers.first(register, cx.editor) {
let doc = doc!(cx.editor);
let contents = doc.text().slice(..).to_string();
let search_config = &config.search;
let case_insensitive = if search_config.smart_case {
!query.chars().any(char::is_uppercase)
@ -2044,15 +2082,17 @@ fn search_next_or_prev_impl(cx: &mut Context, movement: Movement, direction: Dir
false
};
let wrap_around = search_config.wrap_around;
if let Ok(regex) = RegexBuilder::new(&query)
if let Ok(regex) = rope::RegexBuilder::new()
.syntax(
rope::Config::new()
.case_insensitive(case_insensitive)
.multi_line(true)
.build()
.multi_line(true),
)
.build(&query)
{
for _ in 0..count {
search_impl(
cx.editor,
&contents,
&regex,
movement,
direction,
@ -2189,7 +2229,7 @@ fn global_search(cx: &mut Context) {
let reg = cx.register.unwrap_or('/');
let completions = search_completions(cx, Some(reg));
ui::regex_prompt(
ui::raw_regex_prompt(
cx,
"global-search:".into(),
Some(reg),
@ -2200,7 +2240,7 @@ fn global_search(cx: &mut Context) {
.map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
.collect()
},
move |cx, regex, event| {
move |cx, _, input, event| {
if event != PromptEvent::Validate {
return;
}
@ -2215,7 +2255,7 @@ fn global_search(cx: &mut Context) {
if let Ok(matcher) = RegexMatcherBuilder::new()
.case_smart(smart_case)
.build(regex.as_str())
.build(input)
{
let search_root = helix_stdx::env::current_working_dir();
if !search_root.exists() {
@ -3051,11 +3091,11 @@ fn insert_with_indent(cx: &mut Context, cursor_fallback: IndentFallbackPos) {
} else {
// move cursor to the fallback position
let pos = match cursor_fallback {
IndentFallbackPos::LineStart => {
find_first_non_whitespace_char(text.line(cursor_line))
IndentFallbackPos::LineStart => text
.line(cursor_line)
.first_non_whitespace_char()
.map(|ws_offset| ws_offset + cursor_line_start)
.unwrap_or(cursor_line_start)
}
.unwrap_or(cursor_line_start),
IndentFallbackPos::LineEnd => line_end_char_index(&text, cursor_line),
};
@ -4334,16 +4374,27 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) {
// select inserted spaces
let transaction = if select_space {
let mut offset: usize = 0;
let ranges: SmallVec<_> = changes
.iter()
.scan(0, |offset, change| {
let range = Range::point(change.0 - *offset);
*offset += change.1 - change.0 - 1; // -1 because cursor is 0-sized
.filter_map(|change| {
if change.2.is_some() {
let range = Range::point(change.0 - offset);
offset += change.1 - change.0 - 1; // -1 adjusts for the replacement of the range by a space
Some(range)
} else {
offset += change.1 - change.0;
None
}
})
.collect();
let t = Transaction::change(text, changes.into_iter());
if ranges.is_empty() {
t
} else {
let selection = Selection::new(ranges, 0);
Transaction::change(text, changes.into_iter()).with_selection(selection)
t.with_selection(selection)
}
} else {
Transaction::change(text, changes.into_iter())
};
@ -4426,18 +4477,124 @@ pub fn completion(cx: &mut Context) {
}
// comments
fn toggle_comments(cx: &mut Context) {
type CommentTransactionFn = fn(
line_token: Option<&str>,
block_tokens: Option<&[BlockCommentToken]>,
doc: &Rope,
selection: &Selection,
) -> Transaction;
fn toggle_comments_impl(cx: &mut Context, comment_transaction: CommentTransactionFn) {
let (view, doc) = current!(cx.editor);
let token = doc
let line_token: Option<&str> = doc
.language_config()
.and_then(|lc| lc.comment_tokens.as_ref())
.and_then(|tc| tc.first())
.map(|tc| tc.as_str());
let block_tokens: Option<&[BlockCommentToken]> = doc
.language_config()
.and_then(|lc| lc.comment_token.as_ref())
.map(|tc| tc.as_ref());
let transaction = comment::toggle_line_comments(doc.text(), doc.selection(view.id), token);
.and_then(|lc| lc.block_comment_tokens.as_ref())
.map(|tc| &tc[..]);
let transaction =
comment_transaction(line_token, block_tokens, doc.text(), doc.selection(view.id));
doc.apply(&transaction, view.id);
exit_select_mode(cx);
}
/// commenting behavior:
/// 1. only line comment tokens -> line comment
/// 2. each line block commented -> uncomment all lines
/// 3. whole selection block commented -> uncomment selection
/// 4. all lines not commented and block tokens -> comment uncommented lines
/// 5. no comment tokens and not block commented -> line comment
fn toggle_comments(cx: &mut Context) {
toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| {
let text = doc.slice(..);
// only have line comment tokens
if line_token.is_some() && block_tokens.is_none() {
return comment::toggle_line_comments(doc, selection, line_token);
}
let split_lines = comment::split_lines_of_selection(text, selection);
let default_block_tokens = &[BlockCommentToken::default()];
let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens);
let (line_commented, line_comment_changes) =
comment::find_block_comments(block_comment_tokens, text, &split_lines);
// block commented by line would also be block commented so check this first
if line_commented {
return comment::create_block_comment_transaction(
doc,
&split_lines,
line_commented,
line_comment_changes,
)
.0;
}
let (block_commented, comment_changes) =
comment::find_block_comments(block_comment_tokens, text, selection);
// check if selection has block comments
if block_commented {
return comment::create_block_comment_transaction(
doc,
selection,
block_commented,
comment_changes,
)
.0;
}
// not commented and only have block comment tokens
if line_token.is_none() && block_tokens.is_some() {
return comment::create_block_comment_transaction(
doc,
&split_lines,
line_commented,
line_comment_changes,
)
.0;
}
// not block commented at all and don't have any tokens
comment::toggle_line_comments(doc, selection, line_token)
})
}
fn toggle_line_comments(cx: &mut Context) {
toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| {
if line_token.is_none() && block_tokens.is_some() {
let default_block_tokens = &[BlockCommentToken::default()];
let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens);
comment::toggle_block_comments(
doc,
&comment::split_lines_of_selection(doc.slice(..), selection),
block_comment_tokens,
)
} else {
comment::toggle_line_comments(doc, selection, line_token)
}
});
}
fn toggle_block_comments(cx: &mut Context) {
toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| {
if line_token.is_some() && block_tokens.is_none() {
comment::toggle_line_comments(doc, selection, line_token)
} else {
let default_block_tokens = &[BlockCommentToken::default()];
let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens);
comment::toggle_block_comments(doc, selection, block_comment_tokens)
}
});
}
fn rotate_selections(cx: &mut Context, direction: Direction) {
let count = cx.count();
let (view, doc) = current!(cx.editor);
@ -4858,11 +5015,11 @@ fn align_view_middle(cx: &mut Context) {
}
fn scroll_up(cx: &mut Context) {
scroll(cx, cx.count(), Direction::Backward);
scroll(cx, cx.count(), Direction::Backward, false);
}
fn scroll_down(cx: &mut Context) {
scroll(cx, cx.count(), Direction::Forward);
scroll(cx, cx.count(), Direction::Forward, false);
}
fn goto_ts_object_impl(cx: &mut Context, object: &'static str, direction: Direction) {

@ -41,7 +41,7 @@ use std::{
collections::{BTreeMap, HashSet},
fmt::Write,
future::Future,
path::PathBuf,
path::{Path, PathBuf},
};
/// Gets the first language server that is attached to a document which supports a specific feature.
@ -137,7 +137,7 @@ struct DiagnosticStyles {
}
struct PickerDiagnostic {
url: lsp::Url,
path: PathBuf,
diag: lsp::Diagnostic,
offset_encoding: OffsetEncoding,
}
@ -170,8 +170,7 @@ impl ui::menu::Item for PickerDiagnostic {
let path = match format {
DiagnosticsFormat::HideSourcePath => String::new(),
DiagnosticsFormat::ShowSourcePath => {
let file_path = self.url.to_file_path().unwrap();
let path = path::get_truncated_path(file_path);
let path = path::get_truncated_path(&self.path);
format!("{}: ", path.to_string_lossy())
}
};
@ -211,22 +210,31 @@ fn jump_to_location(
return;
}
};
jump_to_position(editor, &path, location.range, offset_encoding, action);
}
let doc = match editor.open(&path, action) {
fn jump_to_position(
editor: &mut Editor,
path: &Path,
range: lsp::Range,
offset_encoding: OffsetEncoding,
action: Action,
) {
let doc = match editor.open(path, action) {
Ok(id) => doc_mut!(editor, &id),
Err(err) => {
let err = format!("failed to open path: {:?}: {:?}", location.uri, err);
let err = format!("failed to open path: {:?}: {:?}", path, err);
editor.set_error(err);
return;
}
};
let view = view_mut!(editor);
// TODO: convert inside server
let new_range =
if let Some(new_range) = lsp_range_to_range(doc.text(), location.range, offset_encoding) {
let new_range = if let Some(new_range) = lsp_range_to_range(doc.text(), range, offset_encoding)
{
new_range
} else {
log::warn!("lsp position out of bounds - {:?}", location.range);
log::warn!("lsp position out of bounds - {:?}", range);
return;
};
// we flip the range so that the cursor sits on the start of the symbol
@ -261,21 +269,20 @@ enum DiagnosticsFormat {
fn diag_picker(
cx: &Context,
diagnostics: BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>,
_current_path: Option<lsp::Url>,
diagnostics: BTreeMap<PathBuf, Vec<(lsp::Diagnostic, usize)>>,
format: DiagnosticsFormat,
) -> Picker<PickerDiagnostic> {
// TODO: drop current_path comparison and instead use workspace: bool flag?
// flatten the map to a vec of (url, diag) pairs
let mut flat_diag = Vec::new();
for (url, diags) in diagnostics {
for (path, diags) in diagnostics {
flat_diag.reserve(diags.len());
for (diag, ls) in diags {
if let Some(ls) = cx.editor.language_server_by_id(ls) {
flat_diag.push(PickerDiagnostic {
url: url.clone(),
path: path.clone(),
diag,
offset_encoding: ls.offset_encoding(),
});
@ -295,22 +302,17 @@ fn diag_picker(
(styles, format),
move |cx,
PickerDiagnostic {
url,
path,
diag,
offset_encoding,
},
action| {
jump_to_location(
cx.editor,
&lsp::Location::new(url.clone(), diag.range),
*offset_encoding,
action,
)
jump_to_position(cx.editor, path, diag.range, *offset_encoding, action)
},
)
.with_preview(move |_editor, PickerDiagnostic { url, diag, .. }| {
let location = lsp::Location::new(url.clone(), diag.range);
Some(location_to_file_location(&location))
.with_preview(move |_editor, PickerDiagnostic { path, diag, .. }| {
let line = Some((diag.range.start.line as usize, diag.range.end.line as usize));
Some((path.clone().into(), line))
})
.truncate_start(false)
}
@ -473,17 +475,16 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
pub fn diagnostics_picker(cx: &mut Context) {
let doc = doc!(cx.editor);
if let Some(current_url) = doc.url() {
if let Some(current_path) = doc.path() {
let diagnostics = cx
.editor
.diagnostics
.get(&current_url)
.get(current_path)
.cloned()
.unwrap_or_default();
let picker = diag_picker(
cx,
[(current_url.clone(), diagnostics)].into(),
Some(current_url),
[(current_path.clone(), diagnostics)].into(),
DiagnosticsFormat::HideSourcePath,
);
cx.push_layer(Box::new(overlaid(picker)));
@ -491,16 +492,9 @@ pub fn diagnostics_picker(cx: &mut Context) {
}
pub fn workspace_diagnostics_picker(cx: &mut Context) {
let doc = doc!(cx.editor);
let current_url = doc.url();
// TODO not yet filtered by LanguageServerFeature, need to do something similar as Document::shown_diagnostics here for all open documents
let diagnostics = cx.editor.diagnostics.clone();
let picker = diag_picker(
cx,
diagnostics,
current_url,
DiagnosticsFormat::ShowSourcePath,
);
let picker = diag_picker(cx, diagnostics, DiagnosticsFormat::ShowSourcePath);
cx.push_layer(Box::new(overlaid(picker)));
}

@ -110,14 +110,14 @@ fn open(cx: &mut compositor::Context, args: &[Cow<str>], event: PromptEvent) ->
ensure!(!args.is_empty(), "wrong argument count");
for arg in args {
let (path, pos) = args::parse_file(arg);
let path = helix_stdx::path::expand_tilde(&path);
let path = helix_stdx::path::expand_tilde(path);
// If the path is a directory, open a file picker on that directory and update the status
// message
if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) {
let callback = async move {
let call: job::Callback = job::Callback::EditorCompositor(Box::new(
move |editor: &mut Editor, compositor: &mut Compositor| {
let picker = ui::file_picker(path, &editor.config());
let picker = ui::file_picker(path.into_owned(), &editor.config());
compositor.push(Box::new(overlaid(picker)));
},
));
@ -1078,11 +1078,11 @@ fn change_current_directory(
return Ok(());
}
let dir = helix_stdx::path::expand_tilde(
args.first()
let dir = args
.first()
.context("target directory not provided")?
.as_ref(),
);
.as_ref();
let dir = helix_stdx::path::expand_tilde(Path::new(dir));
helix_stdx::env::set_current_working_dir(dir)?;

@ -221,10 +221,18 @@ fn request_completion(
.iter()
.find(|&trigger| trigger_text.ends_with(trigger))
});
if trigger_char.is_some() {
lsp::CompletionContext {
trigger_kind: lsp::CompletionTriggerKind::TRIGGER_CHARACTER,
trigger_character: trigger_char.cloned(),
}
} else {
lsp::CompletionContext {
trigger_kind: lsp::CompletionTriggerKind::INVOKED,
trigger_character: None,
}
}
};
let completion_response = ls.completion(doc_id, pos, None, context).unwrap();

@ -303,6 +303,15 @@ impl Keymaps {
self.sticky.as_ref()
}
pub fn contains_key(&self, mode: Mode, key: KeyEvent) -> bool {
let keymaps = &*self.map();
let keymap = &keymaps[&mode];
keymap
.search(self.pending())
.and_then(KeyTrie::node)
.is_some_and(|node| node.contains_key(&key))
}
/// Lookup `key` in the keymap to try and find a command to execute. Escape
/// key cancels pending keystrokes. If there are no pending keystrokes but a
/// sticky node is in use, it will be cleared.

@ -180,8 +180,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"esc" => normal_mode,
"C-b" | "pageup" => page_up,
"C-f" | "pagedown" => page_down,
"C-u" => half_page_up,
"C-d" => half_page_down,
"C-u" => page_cursor_half_up,
"C-d" => page_cursor_half_down,
"C-w" => { "Window"
"C-w" | "w" => rotate_view,
@ -278,6 +278,9 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"k" => hover,
"r" => rename_symbol,
"h" => select_references_to_symbol_under_cursor,
"c" => toggle_comments,
"C" => toggle_block_comments,
"A-c" => toggle_line_comments,
"?" => command_palette,
},
"z" => { "View"
@ -289,8 +292,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"j" | "down" => scroll_down,
"C-b" | "pageup" => page_up,
"C-f" | "pagedown" => page_down,
"C-u" | "backspace" => half_page_up,
"C-d" | "space" => half_page_down,
"C-u" | "backspace" => page_cursor_half_up,
"C-d" | "space" => page_cursor_half_down,
"/" => search,
"?" => rsearch,
@ -306,8 +309,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"j" | "down" => scroll_down,
"C-b" | "pageup" => page_up,
"C-f" | "pagedown" => page_down,
"C-u" | "backspace" => half_page_up,
"C-d" | "space" => half_page_down,
"C-u" | "backspace" => page_cursor_half_up,
"C-d" | "space" => page_cursor_half_down,
"/" => search,
"?" => rsearch,

@ -1,7 +1,9 @@
use crate::{
compositor::{Component, Context, Event, EventResult},
handlers::trigger_auto_completion,
job,
};
use helix_event::AsyncHook;
use helix_view::{
document::SavePoint,
editor::CompleteAction,
@ -10,14 +12,14 @@ use helix_view::{
theme::{Modifier, Style},
ViewId,
};
use tokio::time::Instant;
use tui::{buffer::Buffer as Surface, text::Span};
use std::{borrow::Cow, sync::Arc};
use std::{borrow::Cow, sync::Arc, time::Duration};
use helix_core::{chars, Change, Transaction};
use helix_view::{graphics::Rect, Document, Editor};
use crate::commands;
use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent};
use helix_lsp::{lsp, util, OffsetEncoding};
@ -102,6 +104,7 @@ pub struct Completion {
#[allow(dead_code)]
trigger_offset: usize,
filter: String,
resolve_handler: tokio::sync::mpsc::Sender<CompletionItem>,
}
impl Completion {
@ -368,6 +371,7 @@ impl Completion {
// TODO: expand nucleo api to allow moving straight to a Utf32String here
// and avoid allocation during matching
filter: String::from(fragment),
resolve_handler: ResolveHandler::default().spawn(),
};
// need to recompute immediately in case start_offset != trigger_offset
@ -379,6 +383,8 @@ impl Completion {
completion
}
/// Synchronously resolve the given completion item. This is used when
/// accepting a completion.
fn resolve_completion_item(
language_server: &helix_lsp::Client,
completion_item: lsp::CompletionItem,
@ -386,7 +392,7 @@ impl Completion {
let future = language_server.resolve_completion_item(completion_item)?;
let response = helix_lsp::block_on(future);
match response {
Ok(value) => serde_json::from_value(value).ok(),
Ok(item) => Some(item),
Err(err) => {
log::error!("Failed to resolve completion item: {}", err);
None
@ -420,62 +426,6 @@ impl Completion {
self.popup.contents_mut().replace_option(old_item, new_item);
}
/// Asynchronously requests that the currently selection completion item is
/// resolved through LSP `completionItem/resolve`.
pub fn ensure_item_resolved(&mut self, cx: &mut commands::Context) -> bool {
// > If computing full completion items is expensive, servers can additionally provide a
// > handler for the completion item resolve request. ...
// > A typical use case is for example: the `textDocument/completion` request doesn't fill
// > in the `documentation` property for returned completion items since it is expensive
// > to compute. When the item is selected in the user interface then a
// > 'completionItem/resolve' request is sent with the selected completion item as a parameter.
// > The returned completion item should have the documentation property filled in.
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_completion
let current_item = match self.popup.contents().selection() {
Some(item) if !item.resolved => item.clone(),
_ => return false,
};
let Some(language_server) = cx
.editor
.language_server_by_id(current_item.language_server_id)
else {
return false;
};
// This method should not block the compositor so we handle the response asynchronously.
let Some(future) = language_server.resolve_completion_item(current_item.item.clone())
else {
return false;
};
cx.callback(
future,
move |_editor, compositor, response: Option<lsp::CompletionItem>| {
let resolved_item = match response {
Some(item) => item,
None => return,
};
if let Some(completion) = &mut compositor
.find::<crate::ui::EditorView>()
.unwrap()
.completion
{
let resolved_item = CompletionItem {
item: resolved_item,
language_server_id: current_item.language_server_id,
resolved: true,
};
completion.replace_item(current_item, resolved_item);
}
},
);
true
}
pub fn area(&mut self, viewport: Rect, editor: &Editor) -> Rect {
self.popup.area(viewport, editor)
}
@ -498,6 +448,9 @@ impl Component for Completion {
Some(option) => option,
None => return,
};
if !option.resolved {
helix_event::send_blocking(&self.resolve_handler, option.clone());
}
// need to render:
// option.detail
// ---
@ -599,3 +552,88 @@ impl Component for Completion {
markdown_doc.render(doc_area, surface, cx);
}
}
/// A hook for resolving incomplete completion items.
///
/// From the [LSP spec](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_completion):
///
/// > If computing full completion items is expensive, servers can additionally provide a
/// > handler for the completion item resolve request. ...
/// > A typical use case is for example: the `textDocument/completion` request doesn't fill
/// > in the `documentation` property for returned completion items since it is expensive
/// > to compute. When the item is selected in the user interface then a
/// > 'completionItem/resolve' request is sent with the selected completion item as a parameter.
/// > The returned completion item should have the documentation property filled in.
#[derive(Debug, Default)]
struct ResolveHandler {
trigger: Option<CompletionItem>,
request: Option<helix_event::CancelTx>,
}
impl AsyncHook for ResolveHandler {
type Event = CompletionItem;
fn handle_event(
&mut self,
item: Self::Event,
timeout: Option<tokio::time::Instant>,
) -> Option<tokio::time::Instant> {
if self
.trigger
.as_ref()
.is_some_and(|trigger| trigger == &item)
{
timeout
} else {
self.trigger = Some(item);
self.request = None;
Some(Instant::now() + Duration::from_millis(150))
}
}
fn finish_debounce(&mut self) {
let Some(item) = self.trigger.take() else { return };
let (tx, rx) = helix_event::cancelation();
self.request = Some(tx);
job::dispatch_blocking(move |editor, _| resolve_completion_item(editor, item, rx))
}
}
fn resolve_completion_item(
editor: &mut Editor,
item: CompletionItem,
cancel: helix_event::CancelRx,
) {
let Some(language_server) = editor.language_server_by_id(item.language_server_id) else {
return;
};
let Some(future) = language_server.resolve_completion_item(item.item.clone()) else {
return;
};
tokio::spawn(async move {
match helix_event::cancelable_future(future, cancel).await {
Some(Ok(resolved_item)) => {
job::dispatch(move |_, compositor| {
if let Some(completion) = &mut compositor
.find::<crate::ui::EditorView>()
.unwrap()
.completion
{
let resolved_item = CompletionItem {
item: resolved_item,
language_server_id: item.language_server_id,
resolved: true,
};
completion.replace_item(item, resolved_item);
};
})
.await
}
Some(Err(err)) => log::error!("completion resolve request failed: {err}"),
None => (),
}
});
}

@ -360,7 +360,7 @@ impl EditorView {
doc: &Document,
theme: &Theme,
) -> [Vec<(usize, std::ops::Range<usize>)>; 5] {
use helix_core::diagnostic::Severity;
use helix_core::diagnostic::{DiagnosticTag, Severity};
let get_scope_of = |scope| {
theme
.find_scope_index_exact(scope)
@ -380,6 +380,10 @@ impl EditorView {
let error = get_scope_of("diagnostic.error");
let r#default = get_scope_of("diagnostic"); // this is a bit redundant but should be fine
// Diagnostic tags
let unnecessary = theme.find_scope_index_exact("diagnostic.unnecessary");
let deprecated = theme.find_scope_index_exact("diagnostic.deprecated");
let mut default_vec: Vec<(usize, std::ops::Range<usize>)> = Vec::new();
let mut info_vec = Vec::new();
let mut hint_vec = Vec::new();
@ -396,6 +400,15 @@ impl EditorView {
_ => (&mut default_vec, r#default),
};
let scope = diagnostic
.tags
.first()
.and_then(|tag| match tag {
DiagnosticTag::Unnecessary => unnecessary,
DiagnosticTag::Deprecated => deprecated,
})
.unwrap_or(scope);
// If any diagnostic overlaps ranges with the prior diagnostic,
// merge the two together. Otherwise push a new span.
match vec.last_mut() {
@ -716,7 +729,8 @@ impl EditorView {
}
}
let paragraph = Paragraph::new(lines)
let text = Text::from(lines);
let paragraph = Paragraph::new(&text)
.alignment(Alignment::Right)
.wrap(Wrap { trim: true });
let width = 100.min(viewport.width);
@ -903,7 +917,9 @@ impl EditorView {
fn command_mode(&mut self, mode: Mode, cxt: &mut commands::Context, event: KeyEvent) {
match (event, cxt.editor.count) {
// count handling
(key!(i @ '0'), Some(_)) | (key!(i @ '1'..='9'), _) => {
(key!(i @ '0'), Some(_)) | (key!(i @ '1'..='9'), _)
if !self.keymaps.contains_key(mode, event) =>
{
let i = i.to_digit(10).unwrap() as usize;
cxt.editor.count =
std::num::NonZeroUsize::new(cxt.editor.count.map_or(i, |c| c.get() * 10 + i));
@ -1025,14 +1041,6 @@ impl EditorView {
pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult {
commands::compute_inlay_hints_for_all_views(cx.editor, cx.jobs);
if let Some(completion) = &mut self.completion {
return if completion.ensure_item_resolved(cx) {
EventResult::Consumed(None)
} else {
EventResult::Ignored(None)
};
}
EventResult::Ignored(None)
}
}
@ -1086,6 +1094,15 @@ impl EditorView {
if modifiers == KeyModifiers::ALT {
let selection = doc.selection(view_id).clone();
doc.set_selection(view_id, selection.push(Range::point(pos)));
} else if editor.mode == Mode::Select {
// Discards non-primary selections for consistent UX with normal mode
let primary = doc.selection(view_id).primary().put_cursor(
doc.text().slice(..),
pos,
true,
);
editor.mouse_down_range = Some(primary);
doc.set_selection(view_id, Selection::single(primary.anchor, primary.head));
} else {
doc.set_selection(view_id, Selection::point(pos));
}
@ -1154,7 +1171,7 @@ impl EditorView {
}
let offset = config.scroll_lines.unsigned_abs();
commands::scroll(cxt, offset, direction);
commands::scroll(cxt, offset, direction, false);
cxt.editor.tree.focus = current_view;
cxt.editor.ensure_cursor_in_view(current_view);
@ -1169,19 +1186,26 @@ impl EditorView {
let (view, doc) = current!(cxt.editor);
if doc
.selection(view.id)
let should_yank = match cxt.editor.mouse_down_range.take() {
Some(down_range) => doc.selection(view.id).primary() != down_range,
None => {
// This should not happen under normal cases. We fall back to the original
// behavior of yanking on non-single-char selections.
doc.selection(view.id)
.primary()
.slice(doc.text().slice(..))
.len_chars()
<= 1
{
return EventResult::Ignored(None);
> 1
}
};
commands::MappableCommand::yank_main_selection_to_primary_clipboard.execute(cxt);
if should_yank {
commands::MappableCommand::yank_main_selection_to_primary_clipboard
.execute(cxt);
EventResult::Consumed(None)
} else {
EventResult::Ignored(None)
}
}
MouseEventKind::Up(MouseButton::Right) => {

@ -2,6 +2,7 @@ use crate::compositor::{Component, Context};
use helix_view::graphics::{Margin, Rect};
use helix_view::info::Info;
use tui::buffer::Buffer as Surface;
use tui::text::Text;
use tui::widgets::{Block, Borders, Paragraph, Widget};
impl Component for Info {
@ -31,7 +32,7 @@ impl Component for Info {
let inner = block.inner(area).inner(&margin);
block.render(area, surface);
Paragraph::new(self.text.as_str())
Paragraph::new(&Text::from(self.text.as_str()))
.style(text_style)
.render(inner, surface);
}

@ -77,7 +77,7 @@ impl Component for SignatureHelp {
let (_, sig_text_height) = crate::ui::text::required_size(&sig_text, area.width);
let sig_text_area = area.clip_top(1).with_height(sig_text_height);
let sig_text_area = sig_text_area.inner(&margin).intersection(surface.area);
let sig_text_para = Paragraph::new(sig_text).wrap(Wrap { trim: false });
let sig_text_para = Paragraph::new(&sig_text).wrap(Wrap { trim: false });
sig_text_para.render(sig_text_area, surface);
if self.signature_doc.is_none() {
@ -100,7 +100,7 @@ impl Component for SignatureHelp {
let sig_doc_area = area
.clip_top(sig_text_area.height + 2)
.clip_bottom(u16::from(cx.editor.popup_border()));
let sig_doc_para = Paragraph::new(sig_doc)
let sig_doc_para = Paragraph::new(&sig_doc)
.wrap(Wrap { trim: false })
.scroll((cx.scroll.unwrap_or_default() as u16, 0));
sig_doc_para.render(sig_doc_area.inner(&margin), surface);

@ -346,7 +346,7 @@ impl Component for Markdown {
let text = self.parse(Some(&cx.editor.theme));
let par = Paragraph::new(text)
let par = Paragraph::new(&text)
.wrap(Wrap { trim: false })
.scroll((cx.scroll.unwrap_or_default() as u16, 0));

@ -18,6 +18,7 @@ use crate::filter_picker_entry;
use crate::job::{self, Callback};
pub use completion::{Completion, CompletionItem};
pub use editor::EditorView;
use helix_stdx::rope;
pub use markdown::Markdown;
pub use menu::Menu;
pub use picker::{DynamicPicker, FileLocation, Picker};
@ -26,8 +27,6 @@ pub use prompt::{Prompt, PromptEvent};
pub use spinner::{ProgressSpinners, Spinner};
pub use text::Text;
use helix_core::regex::Regex;
use helix_core::regex::RegexBuilder;
use helix_view::Editor;
use std::path::PathBuf;
@ -63,7 +62,22 @@ pub fn regex_prompt(
prompt: std::borrow::Cow<'static, str>,
history_register: Option<char>,
completion_fn: impl FnMut(&Editor, &str) -> Vec<prompt::Completion> + 'static,
fun: impl Fn(&mut crate::compositor::Context, Regex, PromptEvent) + 'static,
fun: impl Fn(&mut crate::compositor::Context, rope::Regex, PromptEvent) + 'static,
) {
raw_regex_prompt(
cx,
prompt,
history_register,
completion_fn,
move |cx, regex, _, event| fun(cx, regex, event),
);
}
pub fn raw_regex_prompt(
cx: &mut crate::commands::Context,
prompt: std::borrow::Cow<'static, str>,
history_register: Option<char>,
completion_fn: impl FnMut(&Editor, &str) -> Vec<prompt::Completion> + 'static,
fun: impl Fn(&mut crate::compositor::Context, rope::Regex, &str, PromptEvent) + 'static,
) {
let (view, doc) = current!(cx.editor);
let doc_id = view.doc;
@ -94,10 +108,13 @@ pub fn regex_prompt(
false
};
match RegexBuilder::new(input)
match rope::RegexBuilder::new()
.syntax(
rope::Config::new()
.case_insensitive(case_insensitive)
.multi_line(true)
.build()
.multi_line(true),
)
.build(input)
{
Ok(regex) => {
let (view, doc) = current!(cx.editor);
@ -110,7 +127,7 @@ pub fn regex_prompt(
view.jumps.push((doc_id, snapshot.clone()));
}
fun(cx, regex, event);
fun(cx, regex, input, event);
let (view, doc) = current!(cx.editor);
view.ensure_cursor_in_view(doc, config.scrolloff);
@ -428,9 +445,9 @@ pub mod completers {
path
} else {
match path.parent() {
Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(),
Some(path) if !path.as_os_str().is_empty() => Cow::Borrowed(path),
// Path::new("h")'s parent is Some("")...
_ => helix_stdx::env::current_working_dir(),
_ => Cow::Owned(helix_stdx::env::current_working_dir()),
}
};

@ -33,7 +33,7 @@ impl Component for Text {
fn render(&mut self, area: Rect, surface: &mut Surface, _cx: &mut Context) {
use tui::widgets::{Paragraph, Widget, Wrap};
let par = Paragraph::new(self.contents.clone()).wrap(Wrap { trim: false });
let par = Paragraph::new(&self.contents).wrap(Wrap { trim: false });
// .scroll(x, y) offsets
par.render(area, surface);

@ -526,3 +526,86 @@ async fn test_join_selections() -> anyhow::Result<()> {
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_join_selections_space() -> anyhow::Result<()> {
// join with empty lines panic
test((
platform_line(indoc! {"\
#[a
b
c
d
e|]#
"}),
"<A-J>",
platform_line(indoc! {"\
a#[ |]#b#( |)#c#( |)#d#( |)#e
"}),
))
.await?;
// normal join
test((
platform_line(indoc! {"\
#[a|]#bc
def
"}),
"<A-J>",
platform_line(indoc! {"\
abc#[ |]#def
"}),
))
.await?;
// join with empty line
test((
platform_line(indoc! {"\
#[a|]#bc
def
"}),
"<A-J>",
platform_line(indoc! {"\
#[a|]#bc
def
"}),
))
.await?;
// join with additional space in non-empty line
test((
platform_line(indoc! {"\
#[a|]#bc
def
"}),
"<A-J><A-J>",
platform_line(indoc! {"\
abc#[ |]#def
"}),
))
.await?;
// join with retained trailing spaces
test((
platform_line(indoc! {"\
#[aaa
bb
c |]#
"}),
"<A-J>",
platform_line(indoc! {"\
aaa #[ |]#bb #( |)#c
"}),
))
.await?;
Ok(())
}

@ -552,3 +552,57 @@ async fn find_char_line_ending() -> anyhow::Result<()> {
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_surround_replace() -> anyhow::Result<()> {
test((
platform_line(indoc! {"\
(#[|a]#)
"}),
"mrm{",
platform_line(indoc! {"\
{#[|a]#}
"}),
))
.await?;
test((
platform_line(indoc! {"\
(#[a|]#)
"}),
"mrm{",
platform_line(indoc! {"\
{#[a|]#}
"}),
))
.await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_surround_delete() -> anyhow::Result<()> {
test((
platform_line(indoc! {"\
(#[|a]#)
"}),
"mdm",
platform_line(indoc! {"\
#[|a]#
"}),
))
.await?;
test((
platform_line(indoc! {"\
(#[a|]#)
"}),
"mdm",
platform_line(indoc! {"\
#[a|]#
"}),
))
.await?;
Ok(())
}

@ -28,15 +28,15 @@ fn get_line_offset(line_width: u16, text_area_width: u16, alignment: Alignment)
/// # use helix_tui::widgets::{Block, Borders, Paragraph, Wrap};
/// # use helix_tui::layout::{Alignment};
/// # use helix_view::graphics::{Style, Color, Modifier};
/// let text = vec![
/// let text = Text::from(vec![
/// Spans::from(vec![
/// Span::raw("First"),
/// Span::styled("line",Style::default().add_modifier(Modifier::ITALIC)),
/// Span::raw("."),
/// ]),
/// Spans::from(Span::styled("Second line", Style::default().fg(Color::Red))),
/// ];
/// Paragraph::new(text)
/// ]);
/// Paragraph::new(&text)
/// .block(Block::default().title("Paragraph").borders(Borders::ALL))
/// .style(Style::default().fg(Color::White).bg(Color::Black))
/// .alignment(Alignment::Center)
@ -51,7 +51,7 @@ pub struct Paragraph<'a> {
/// How to wrap the text
wrap: Option<Wrap>,
/// The text to display
text: Text<'a>,
text: &'a Text<'a>,
/// Scroll
scroll: (u16, u16),
/// Alignment of the text
@ -70,7 +70,7 @@ pub struct Paragraph<'a> {
/// - Here is another point that is long enough to wrap"#);
///
/// // With leading spaces trimmed (window width of 30 chars):
/// Paragraph::new(bullet_points.clone()).wrap(Wrap { trim: true });
/// Paragraph::new(&bullet_points).wrap(Wrap { trim: true });
/// // Some indented points:
/// // - First thing goes here and is
/// // long so that it wraps
@ -78,7 +78,7 @@ pub struct Paragraph<'a> {
/// // is long enough to wrap
///
/// // But without trimming, indentation is preserved:
/// Paragraph::new(bullet_points).wrap(Wrap { trim: false });
/// Paragraph::new(&bullet_points).wrap(Wrap { trim: false });
/// // Some indented points:
/// // - First thing goes here
/// // and is long so that it wraps
@ -92,15 +92,12 @@ pub struct Wrap {
}
impl<'a> Paragraph<'a> {
pub fn new<T>(text: T) -> Paragraph<'a>
where
T: Into<Text<'a>>,
{
pub fn new(text: &'a Text) -> Paragraph<'a> {
Paragraph {
block: None,
style: Default::default(),
wrap: None,
text: text.into(),
text,
scroll: (0, 0),
alignment: Alignment::Left,
}

@ -17,14 +17,16 @@ fn terminal_buffer_size_should_not_be_limited() {
// let backend = TestBackend::new(10, 10);
// let mut terminal = Terminal::new(backend)?;
// let frame = terminal.draw(|f| {
// let paragraph = Paragraph::new("Test");
// let text = Text::from("Test");
// let paragraph = Paragraph::new(&text);
// f.render_widget(paragraph, f.size());
// })?;
// assert_eq!(frame.buffer.get(0, 0).symbol, "T");
// assert_eq!(frame.area, Rect::new(0, 0, 10, 10));
// terminal.backend_mut().resize(8, 8);
// let frame = terminal.draw(|f| {
// let paragraph = Paragraph::new("test");
// let text = Text::from("test");
// let paragraph = Paragraph::new(&text);
// f.render_widget(paragraph, f.size());
// })?;
// assert_eq!(frame.buffer.get(0, 0).symbol, "t");

@ -21,8 +21,8 @@
// terminal
// .draw(|f| {
// let size = f.size();
// let text = vec![Spans::from(SAMPLE_STRING)];
// let paragraph = Paragraph::new(text)
// let text = Text::from(SAMPLE_STRING);
// let paragraph = Paragraph::new(&text)
// .block(Block::default().borders(Borders::ALL))
// .alignment(alignment)
// .wrap(Wrap { trim: true });
@ -88,8 +88,8 @@
// terminal
// .draw(|f| {
// let size = f.size();
// let text = vec![Spans::from(s)];
// let paragraph = Paragraph::new(text)
// let text = Text::from(s);
// let paragraph = Paragraph::new(&text)
// .block(Block::default().borders(Borders::ALL))
// .wrap(Wrap { trim: true });
// f.render_widget(paragraph, size);
@ -120,8 +120,8 @@
// terminal
// .draw(|f| {
// let size = f.size();
// let text = vec![Spans::from(s)];
// let paragraph = Paragraph::new(text)
// let text = Text::from(s);
// let paragraph = Paragraph::new(&text)
// .block(Block::default().borders(Borders::ALL))
// .wrap(Wrap { trim: true });
// f.render_widget(paragraph, size);
@ -155,8 +155,8 @@
// terminal
// .draw(|f| {
// let size = f.size();
// let paragraph = Paragraph::new(line).block(Block::default().borders(Borders::ALL));
// let text = Text::from(line);
// let paragraph = Paragraph::new(&text).block(Block::default().borders(Borders::ALL));
// f.render_widget(paragraph, size);
// })
// .unwrap();
@ -174,7 +174,7 @@
// let text = Text::from(
// "段落现在可以水平滚动了!\nParagraph can scroll horizontally!\nShort line",
// );
// let paragraph = Paragraph::new(text)
// let paragraph = Paragraph::new(&text)
// .block(Block::default().borders(Borders::ALL))
// .alignment(alignment)
// .scroll(scroll);

@ -17,7 +17,7 @@ helix-event = { path = "../helix-event" }
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] }
parking_lot = "0.12"
arc-swap = { version = "1.6.0" }
arc-swap = { version = "1.7.0" }
gix = { version = "0.58.0", features = ["attributes"], default-features = false, optional = true }
imara-diff = "0.1.5"

@ -31,7 +31,7 @@ crossterm = { version = "0.27", optional = true }
once_cell = "1.19"
url = "2.5.0"
arc-swap = { version = "1.6.0" }
arc-swap = { version = "1.7.0" }
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
tokio-stream = "0.1"
@ -50,7 +50,7 @@ parking_lot = "0.12.1"
[target.'cfg(windows)'.dependencies]
clipboard-win = { version = "5.1", features = ["std"] }
clipboard-win = { version = "5.2", features = ["std"] }
[target.'cfg(unix)'.dependencies]
libc = "0.2"

@ -42,7 +42,7 @@ pub use helix_core::diagnostic::Severity;
use helix_core::{
auto_pairs::AutoPairs,
syntax::{self, AutoPairConfig, IndentationHeuristic, LanguageServerFeature, SoftWrap},
Change, LineEnding, Position, Selection, NATIVE_LINE_ENDING,
Change, LineEnding, Position, Range, Selection, NATIVE_LINE_ENDING,
};
use helix_dap as dap;
use helix_lsp::lsp;
@ -914,7 +914,7 @@ pub struct Editor {
pub macro_recording: Option<(char, Vec<KeyEvent>)>,
pub macro_replaying: Vec<char>,
pub language_servers: helix_lsp::Registry,
pub diagnostics: BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>,
pub diagnostics: BTreeMap<PathBuf, Vec<(lsp::Diagnostic, usize)>>,
pub diff_providers: DiffProviderRegistry,
pub debugger: Option<dap::Client>,
@ -964,6 +964,8 @@ pub struct Editor {
/// times during rendering and should not be set by other functions.
pub cursor_cache: Cell<Option<Option<Position>>>,
pub handlers: Handlers,
pub mouse_down_range: Option<Range>,
}
pub type Motion = Box<dyn Fn(&mut Editor)>;
@ -1080,6 +1082,7 @@ impl Editor {
needs_redraw: false,
cursor_cache: Cell::new(None),
handlers,
mouse_down_range: None,
}
}
@ -1812,7 +1815,7 @@ impl Editor {
/// Returns all supported diagnostics for the document
pub fn doc_diagnostics<'a>(
language_servers: &'a helix_lsp::Registry,
diagnostics: &'a BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>,
diagnostics: &'a BTreeMap<PathBuf, Vec<(lsp::Diagnostic, usize)>>,
document: &Document,
) -> impl Iterator<Item = helix_core::Diagnostic> + 'a {
Editor::doc_diagnostics_with_filter(language_servers, diagnostics, document, |_, _| true)
@ -1822,7 +1825,7 @@ impl Editor {
/// filtered by `filter` which is invocated with the raw `lsp::Diagnostic` and the language server id it came from
pub fn doc_diagnostics_with_filter<'a>(
language_servers: &'a helix_lsp::Registry,
diagnostics: &'a BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>,
diagnostics: &'a BTreeMap<PathBuf, Vec<(lsp::Diagnostic, usize)>>,
document: &Document,
filter: impl Fn(&lsp::Diagnostic, usize) -> bool + 'a,
@ -1831,8 +1834,7 @@ impl Editor {
let language_config = document.language.clone();
document
.path()
.and_then(|path| url::Url::from_file_path(path).ok()) // TODO log error?
.and_then(|uri| diagnostics.get(&uri))
.and_then(|path| diagnostics.get(path))
.map(|diags| {
diags.iter().filter_map(move |(diagnostic, lsp_id)| {
let ls = language_servers.get_by_id(*lsp_id)?;
@ -1978,7 +1980,7 @@ impl Editor {
/// Switches the editor into normal mode.
pub fn enter_normal_mode(&mut self) {
use helix_core::{graphemes, Range};
use helix_core::graphemes;
if self.mode == Mode::Normal {
return;

@ -226,10 +226,15 @@ impl Editor {
breakpoints.iter().position(|b| b.id == breakpoint.id)
{
breakpoints[i].verified = breakpoint.verified;
breakpoints[i].message = breakpoint.message.clone();
breakpoints[i].line =
breakpoint.line.unwrap().saturating_sub(1); // TODO: no unwrap
breakpoints[i].column = breakpoint.column;
breakpoints[i].message = breakpoint
.message
.clone()
.or_else(|| breakpoints[i].message.take());
breakpoints[i].line = breakpoint
.line
.map_or(breakpoints[i].line, |line| line.saturating_sub(1));
breakpoints[i].column =
breakpoint.column.or(breakpoints[i].column);
}
}
}

@ -23,6 +23,7 @@ cuelsp = { command = "cuelsp" }
dart = { command = "dart", args = ["language-server", "--client-id=helix"] }
dhall-lsp-server = { command = "dhall-lsp-server" }
docker-langserver = { command = "docker-langserver", args = ["--stdio"] }
docker-compose-langserver = { command = "docker-compose-langserver", args = ["--stdio"]}
dot-language-server = { command = "dot-language-server", args = ["--stdio"] }
elixir-ls = { command = "elixir-ls", config = { elixirLS.dialyzerEnabled = false } }
elm-language-server = { command = "elm-language-server" }
@ -44,11 +45,13 @@ kotlin-language-server = { command = "kotlin-language-server" }
lean = { command = "lean", args = [ "--server" ] }
ltex-ls = { command = "ltex-ls" }
markdoc-ls = { command = "markdoc-ls", args = ["--stdio"] }
markdown-oxide = { command = "markdown-oxide" }
marksman = { command = "marksman", args = ["server"] }
metals = { command = "metals", config = { "isHttpEnabled" = true } }
mint = { command = "mint", args = ["ls"] }
nil = { command = "nil" }
nimlangserver = { command = "nimlangserver" }
nimlsp = { command = "nimlsp" }
nls = { command = "nls" }
nu-lsp = { command = "nu", args = [ "--lsp" ] }
ocamllsp = { command = "ocamllsp" }
@ -190,7 +193,12 @@ injection-regex = "rust"
file-types = ["rs"]
roots = ["Cargo.toml", "Cargo.lock"]
auto-format = true
comment-token = "//"
comment-tokens = ["//", "///", "//!"]
block-comment-tokens = [
{ start = "/*", end = "*/" },
{ start = "/**", end = "*/" },
{ start = "/*!", end = "*/" },
]
language-servers = [ "rust-analyzer" ]
indent = { tab-width = 4, unit = " " }
persistent-diagnostic-sources = ["rustc", "clippy"]
@ -282,6 +290,7 @@ injection-regex = "protobuf"
file-types = ["proto"]
language-servers = [ "bufls", "pbkit" ]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " }
[[grammar]]
@ -325,6 +334,7 @@ injection-regex = "mint"
file-types = ["mint"]
shebangs = []
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "mint" ]
indent = { tab-width = 2, unit = " " }
@ -407,6 +417,7 @@ scope = "source.c"
injection-regex = "c"
file-types = ["c"] # TODO: ["h"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "clangd" ]
indent = { tab-width = 2, unit = " " }
@ -443,6 +454,7 @@ scope = "source.cpp"
injection-regex = "cpp"
file-types = ["cc", "hh", "c++", "cpp", "hpp", "h", "ipp", "tpp", "cxx", "hxx", "ixx", "txx", "ino", "C", "H", "cu", "cuh", "cppm", "h++", "ii", "inl", { glob = ".hpp.in" }, { glob = ".h.in" }]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "clangd" ]
indent = { tab-width = 2, unit = " " }
@ -490,6 +502,7 @@ injection-regex = "c-?sharp"
file-types = ["cs", "csx", "cake"]
roots = ["sln", "csproj"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = "\t" }
language-servers = [ "omnisharp" ]
@ -548,6 +561,7 @@ file-types = ["go"]
roots = ["go.work", "go.mod"]
auto-format = true
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "gopls", "golangci-lint-lsp" ]
# TODO: gopls needs utf-8 offsets?
indent = { tab-width = 4, unit = "\t" }
@ -613,6 +627,7 @@ scope = "source.gotmpl"
injection-regex = "gotmpl"
file-types = ["gotmpl"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "gopls" ]
indent = { tab-width = 2, unit = " " }
@ -642,6 +657,7 @@ language-id = "javascript"
file-types = ["js", "mjs", "cjs", "rules", "es6", "pac", { glob = "jakefile" }]
shebangs = ["node"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " }
@ -668,6 +684,7 @@ injection-regex = "jsx"
language-id = "javascriptreact"
file-types = ["jsx"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " }
grammar = "javascript"
@ -679,6 +696,8 @@ injection-regex = "(ts|typescript)"
file-types = ["ts", "mts", "cts"]
language-id = "typescript"
shebangs = ["deno", "ts-node"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " }
@ -692,6 +711,8 @@ scope = "source.tsx"
injection-regex = "(tsx)" # |typescript
language-id = "typescriptreact"
file-types = ["tsx"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " }
@ -704,6 +725,7 @@ name = "css"
scope = "source.css"
injection-regex = "css"
file-types = ["css", "scss"]
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "vscode-css-language-server" ]
auto-format = true
indent = { tab-width = 2, unit = " " }
@ -717,6 +739,7 @@ name = "scss"
scope = "source.scss"
injection-regex = "scss"
file-types = ["scss"]
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "vscode-css-language-server" ]
auto-format = true
indent = { tab-width = 2, unit = " " }
@ -730,6 +753,7 @@ name = "html"
scope = "text.html.basic"
injection-regex = "html"
file-types = ["html", "htm", "shtml", "xhtml", "xht", "jsp", "asp", "aspx", "jshtm", "volt", "rhtml"]
block-comment-tokens = { start = "<!--", end = "-->" }
language-servers = [ "vscode-html-language-server" ]
auto-format = true
indent = { tab-width = 2, unit = " " }
@ -860,6 +884,7 @@ file-types = [
"tcshrc",
"bashrc_Apple_Terminal",
"zshrc_Apple_Terminal",
{ glob = "tmux.conf" },
{ glob = ".bash_history" },
{ glob = ".bash_login" },
{ glob = ".bash_logout" },
@ -899,6 +924,7 @@ injection-regex = "php"
file-types = ["php", "inc", "php4", "php5", "phtml", "ctp"]
shebangs = ["php"]
roots = ["composer.json", "index.php"]
comment-token = "//"
language-servers = [ "intelephense" ]
indent = { tab-width = 4, unit = " " }
@ -911,6 +937,7 @@ name = "twig"
scope = "source.twig"
injection-regex = "twig"
file-types = ["twig"]
block-comment-tokens = { start = "{#", end = "#}" }
indent = { tab-width = 2, unit = " " }
[[grammar]]
@ -964,6 +991,7 @@ injection-regex = "lean"
file-types = ["lean"]
roots = [ "lakefile.lean" ]
comment-token = "--"
block-comment-tokens = { start = "/-", end = "-/" }
language-servers = [ "lean" ]
indent = { tab-width = 2, unit = " " }
@ -990,6 +1018,7 @@ file-types = ["jl"]
shebangs = ["julia"]
roots = ["Manifest.toml", "Project.toml"]
comment-token = "#"
block-comment-tokens = { start = "#=", end = "=#" }
language-servers = [ "julia" ]
indent = { tab-width = 4, unit = " " }
@ -1001,7 +1030,7 @@ source = { git = "https://github.com/tree-sitter/tree-sitter-julia", rev = "8fb3
name = "java"
scope = "source.java"
injection-regex = "java"
file-types = ["java", "jav"]
file-types = ["java", "jav", "pde"]
roots = ["pom.xml", "build.gradle", "build.gradle.kts"]
language-servers = [ "jdtls" ]
indent = { tab-width = 2, unit = " " }
@ -1053,6 +1082,7 @@ scope = "source.ocaml"
injection-regex = "ocaml"
file-types = ["ml"]
shebangs = ["ocaml", "ocamlrun", "ocamlscript"]
block-comment-tokens = { start = "(*", end = "*)" }
comment-token = "(**)"
language-servers = [ "ocamllsp" ]
indent = { tab-width = 2, unit = " " }
@ -1072,6 +1102,7 @@ name = "ocaml-interface"
scope = "source.ocaml.interface"
file-types = ["mli"]
shebangs = []
block-comment-tokens = { start = "(*", end = "*)" }
comment-token = "(**)"
language-servers = [ "ocamllsp" ]
indent = { tab-width = 2, unit = " " }
@ -1091,15 +1122,16 @@ name = "lua"
injection-regex = "lua"
scope = "source.lua"
file-types = ["lua"]
shebangs = ["lua"]
shebangs = ["lua", "luajit"]
roots = [".luarc.json", ".luacheckrc", ".stylua.toml", "selene.toml", ".git"]
comment-token = "--"
block-comment-tokens = { start = "--[[", end = "--]]" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "lua-language-server" ]
[[grammar]]
name = "lua"
source = { git = "https://github.com/MunifTanjim/tree-sitter-lua", rev = "887dfd4e83c469300c279314ff1619b1d0b85b91" }
source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-lua", rev = "88e446476a1e97a8724dff7a23e2d709855077f2" }
[[language]]
name = "svelte"
@ -1119,6 +1151,7 @@ scope = "source.vue"
injection-regex = "vue"
file-types = ["vue"]
roots = ["package.json"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "vuels" ]
@ -1146,6 +1179,7 @@ injection-regex = "haskell"
file-types = ["hs", "hs-boot"]
roots = ["Setup.hs", "stack.yaml", "cabal.project"]
comment-token = "--"
block-comment-tokens = { start = "{-", end = "-}" }
language-servers = [ "haskell-language-server" ]
indent = { tab-width = 2, unit = " " }
@ -1171,6 +1205,7 @@ injection-regex = "purescript"
file-types = ["purs"]
roots = ["spago.yaml", "spago.dhall", "bower.json"]
comment-token = "--"
block-comment-tokens = { start = "{-", end = "-}" }
language-servers = [ "purescript-language-server" ]
indent = { tab-width = 2, unit = " " }
auto-format = true
@ -1225,6 +1260,7 @@ scope = "source.prolog"
file-types = ["pl", "prolog"]
shebangs = ["swipl"]
comment-token = "%"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "swipl" ]
[[language]]
@ -1244,6 +1280,7 @@ name = "cmake"
scope = "source.cmake"
file-types = ["cmake", { glob = "CMakeLists.txt" }]
comment-token = "#"
block-comment-tokens = { start = "#[[", end = "]]" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "cmake-language-server" ]
injection-regex = "cmake"
@ -1270,6 +1307,7 @@ name = "glsl"
scope = "source.glsl"
file-types = ["glsl", "vert", "tesc", "tese", "geom", "frag", "comp" ]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " }
injection-regex = "glsl"
@ -1307,6 +1345,7 @@ file-types = ["rkt", "rktd", "rktl", "scrbl"]
shebangs = ["racket"]
comment-token = ";"
indent = { tab-width = 2, unit = " " }
block-comment-tokens = { start = "#|", end = "|#" }
language-servers = [ "racket" ]
grammar = "scheme"
@ -1341,6 +1380,7 @@ name = "wgsl"
scope = "source.wgsl"
file-types = ["wgsl"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "wgsl_analyzer" ]
indent = { tab-width = 4, unit = " " }
@ -1387,6 +1427,7 @@ name = "tablegen"
scope = "source.tablegen"
file-types = ["td"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " }
injection-regex = "tablegen"
@ -1400,8 +1441,9 @@ scope = "source.md"
injection-regex = "md|markdown"
file-types = ["md", "markdown", "mkd", "mdwn", "mdown", "markdn", "mdtxt", "mdtext", "workbook", { glob = "PULLREQ_EDITMSG" }]
roots = [".marksman.toml"]
language-servers = [ "marksman" ]
language-servers = [ "marksman", "markdown-oxide" ]
indent = { tab-width = 2, unit = " " }
block-comment-tokens = { start = "<!--", end = "-->" }
[[grammar]]
name = "markdown"
@ -1425,6 +1467,7 @@ file-types = ["dart"]
roots = ["pubspec.yaml"]
auto-format = true
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "dart" ]
indent = { tab-width = 2, unit = " " }
@ -1438,6 +1481,7 @@ scope = "source.scala"
roots = ["build.sbt", "build.sc", "build.gradle", "build.gradle.kts", "pom.xml", ".scala-build"]
file-types = ["scala", "sbt", "sc"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "metals" ]
@ -1450,7 +1494,20 @@ name = "dockerfile"
scope = "source.dockerfile"
injection-regex = "docker|dockerfile"
roots = ["Dockerfile", "Containerfile"]
file-types = [{ glob = "Dockerfile*" }, { glob = "dockerfile*" }, { glob = "Containerfile*" }, { glob = "containerfile*" }]
file-types = [
"Dockerfile",
{ glob = "Dockerfile" },
{ glob = "Dockerfile.*" },
"dockerfile",
{ glob = "dockerfile" },
{ glob = "dockerfile.*" },
"Containerfile",
{ glob = "Containerfile" },
{ glob = "Containerfile.*" },
"containerfile",
{ glob = "containerfile" },
{ glob = "containerfile.*" },
]
comment-token = "#"
indent = { tab-width = 2, unit = " " }
language-servers = [ "docker-langserver" ]
@ -1459,6 +1516,16 @@ language-servers = [ "docker-langserver" ]
name = "dockerfile"
source = { git = "https://github.com/camdencheek/tree-sitter-dockerfile", rev = "8ee3a0f7587b2bd8c45c8cb7d28bd414604aec62" }
[[language]]
name = "docker-compose"
scope = "source.yaml.docker-compose"
roots = ["docker-compose.yaml", "docker-compose.yml"]
language-servers = [ "docker-compose-langserver" ]
file-types = [{ glob = "docker-compose.yaml" }, { glob = "docker-compose.yml" }]
comment-token = "#"
indent = { tab-width = 2, unit = " " }
grammar = "yaml"
[[language]]
name = "git-commit"
scope = "git.commitmsg"
@ -1548,6 +1615,8 @@ scope = "source.graphql"
injection-regex = "graphql"
file-types = ["gql", "graphql", "graphqls"]
language-servers = [ "graphql-language-service" ]
comment-token = "#"
block-comment-tokens = { start = "\"\"\"", end = "\"\"\"" }
indent = { tab-width = 2, unit = " " }
[[grammar]]
@ -1562,6 +1631,7 @@ file-types = ["elm"]
roots = ["elm.json"]
auto-format = true
comment-token = "--"
block-comment-tokens = { start = "{-", end = "-}" }
language-servers = [ "elm-language-server" ]
indent = { tab-width = 4, unit = " " }
@ -1574,6 +1644,7 @@ name = "iex"
scope = "source.iex"
injection-regex = "iex"
file-types = ["iex"]
comment-token = "#"
[[grammar]]
name = "iex"
@ -1587,6 +1658,7 @@ file-types = ["res"]
roots = ["bsconfig.json"]
auto-format = true
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "rescript-language-server" ]
indent = { tab-width = 2, unit = " " }
@ -1598,7 +1670,7 @@ source = { git = "https://github.com/jaredramirez/tree-sitter-rescript", rev = "
name = "erlang"
scope = "source.erlang"
injection-regex = "erl(ang)?"
file-types = ["erl", "hrl", "app", { glob = "rebar.config" }, { glob = "rebar.lock" }]
file-types = ["erl", "hrl", "app", { glob = "rebar.config" }, { glob = "rebar.lock" }, { glob = "*.app.src" }]
roots = ["rebar.config"]
shebangs = ["escript"]
comment-token = "%%"
@ -1615,7 +1687,7 @@ language-servers = [ "erlang-ls" ]
[[grammar]]
name = "erlang"
source = { git = "https://github.com/the-mikedavis/tree-sitter-erlang", rev = "ce0ed253d72c199ab93caba7542b6f62075339c4" }
source = { git = "https://github.com/the-mikedavis/tree-sitter-erlang", rev = "731e50555a51f0d8635992b0e60dc98cc47a58d7" }
[[language]]
name = "kotlin"
@ -1623,6 +1695,7 @@ scope = "source.kotlin"
file-types = ["kt", "kts"]
roots = ["settings.gradle", "settings.gradle.kts"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " }
language-servers = [ "kotlin-language-server" ]
@ -1637,6 +1710,7 @@ injection-regex = "(hcl|tf|nomad)"
language-id = "terraform"
file-types = ["hcl", "tf", "nomad"]
comment-token = "#"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "terraform-ls" ]
auto-format = true
@ -1651,6 +1725,7 @@ scope = "source.tfvars"
language-id = "terraform-vars"
file-types = ["tfvars"]
comment-token = "#"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "terraform-ls" ]
auto-format = true
@ -1673,6 +1748,7 @@ scope = "source.sol"
injection-regex = "(sol|solidity)"
file-types = ["sol"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " }
language-servers = [ "solc" ]
@ -1701,6 +1777,7 @@ scope = "source.ron"
injection-regex = "ron"
file-types = ["ron"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " }
[[grammar]]
@ -1742,6 +1819,7 @@ injection-regex = "(r|R)md"
file-types = ["rmd", "Rmd"]
indent = { tab-width = 2, unit = " " }
grammar = "markdown"
block-comment-tokens = { start = "<!--", end = "-->" }
language-servers = [ "r" ]
[[language]]
@ -1751,6 +1829,7 @@ injection-regex = "swift"
file-types = ["swift"]
roots = [ "Package.swift" ]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
auto-format = true
language-servers = [ "sourcekit-lsp" ]
@ -1763,6 +1842,7 @@ name = "erb"
scope = "text.html.erb"
injection-regex = "erb"
file-types = ["erb"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " }
grammar = "embedded-template"
@ -1771,6 +1851,7 @@ name = "ejs"
scope = "text.html.ejs"
injection-regex = "ejs"
file-types = ["ejs"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " }
grammar = "embedded-template"
@ -1784,6 +1865,7 @@ scope = "source.eex"
injection-regex = "eex"
file-types = ["eex"]
roots = ["mix.exs", "mix.lock"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " }
[[grammar]]
@ -1796,6 +1878,7 @@ scope = "source.heex"
injection-regex = "heex"
file-types = ["heex"]
roots = ["mix.exs", "mix.lock"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "elixir-ls" ]
@ -1808,12 +1891,13 @@ name = "sql"
scope = "source.sql"
file-types = ["sql", "dsql"]
comment-token = "--"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " }
injection-regex = "sql"
[[grammar]]
name = "sql"
source = { git = "https://github.com/DerekStride/tree-sitter-sql", rev = "25be0b8f17e9189ad9e1b875869d025c5aec1286" }
source = { git = "https://github.com/DerekStride/tree-sitter-sql", rev = "da2d1eff425b146d3c8cab7be8dfa98b11d896dc" }
[[language]]
name = "gdscript"
@ -1866,6 +1950,7 @@ scope = "source.vala"
injection-regex = "vala"
file-types = ["vala", "vapi"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "vala-language-server" ]
@ -1891,6 +1976,7 @@ scope = "source.devicetree"
injection-regex = "(dtsi?|devicetree|fdt)"
file-types = ["dts", "dtsi"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = "\t" }
[[grammar]]
@ -1929,6 +2015,7 @@ file-types = ["odin"]
roots = ["ols.json"]
language-servers = [ "ols" ]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = "\t" }
formatter = { command = "odinfmt", args = [ "-stdin", "true" ] }
@ -1986,6 +2073,7 @@ roots = ["v.mod"]
language-servers = [ "vlang-language-server" ]
auto-format = true
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = "\t" }
[[grammar]]
@ -1997,6 +2085,7 @@ name = "verilog"
scope = "source.verilog"
file-types = ["v", "vh", "sv", "svh"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "svlangserver" ]
indent = { tab-width = 2, unit = " " }
injection-regex = "verilog"
@ -2033,6 +2122,7 @@ scope = "source.openscad"
injection-regex = "openscad"
file-types = ["scad"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "openscad-lsp" ]
indent = { tab-width = 2, unit = "\t" }
@ -2080,6 +2170,7 @@ grammar = "python"
[[language]]
name = "elvish"
scope = "source.elvish"
shebangs = ["elvish"]
file-types = ["elv"]
comment-token = "#"
indent = { tab-width = 2, unit = " " }
@ -2097,6 +2188,7 @@ injection-regex = "idr"
file-types = ["idr"]
shebangs = []
comment-token = "--"
block-comment-tokens = { start = "{-", end = "-}" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "idris2-lsp" ]
@ -2132,6 +2224,7 @@ scope = "source.dot"
injection-regex = "dot"
file-types = ["dot"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " }
language-servers = [ "dot-language-server" ]
@ -2161,12 +2254,13 @@ scope = "source.slint"
injection-regex = "slint"
file-types = ["slint"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " }
language-servers = [ "slint-lsp" ]
[[grammar]]
name = "slint"
source = { git = "https://github.com/slint-ui/tree-sitter-slint", rev = "15618215b79b9db08f824a5c97a12d073dcc1c00" }
source = { git = "https://github.com/slint-ui/tree-sitter-slint", rev = "3c82235f41b63f35a01ae3888206e93585cbb84a" }
[[language]]
name = "task"
@ -2210,6 +2304,7 @@ scope = "source.pascal"
injection-regex = "pascal"
file-types = ["pas", "pp", "inc", "lpr", "lfm"]
comment-token = "//"
block-comment-tokens = { start = "{", end = "}" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "pasls" ]
@ -2222,7 +2317,7 @@ name = "sml"
scope = "source.sml"
injection-regex = "sml"
file-types = ["sml"]
comment-token = "(*"
block-comment-tokens = { start = "(*", end = "*)" }
[[grammar]]
name = "sml"
@ -2234,6 +2329,7 @@ scope = "source.jsonnet"
file-types = ["libsonnet", "jsonnet"]
roots = ["jsonnetfile.json"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "jsonnet-language-server" ]
@ -2246,6 +2342,7 @@ name = "astro"
scope = "source.astro"
injection-regex = "astro"
file-types = ["astro"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " }
[[grammar]]
@ -2269,6 +2366,7 @@ source = { git = "https://github.com/vito/tree-sitter-bass", rev = "501133e260d7
name = "wat"
scope = "source.wat"
comment-token = ";;"
block-comment-tokens = { start = "(;", end = ";)" }
file-types = ["wat"]
[[grammar]]
@ -2279,6 +2377,7 @@ source = { git = "https://github.com/wasm-lsp/tree-sitter-wasm", rev = "2ca28a9f
name = "wast"
scope = "source.wast"
comment-token = ";;"
block-comment-tokens = { start = "(;", end = ";)" }
file-types = ["wast"]
[[grammar]]
@ -2290,6 +2389,7 @@ name = "d"
scope = "source.d"
file-types = [ "d", "dd" ]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
injection-regex = "d"
indent = { tab-width = 4, unit = " "}
language-servers = [ "serve-d" ]
@ -2316,6 +2416,7 @@ name = "kdl"
scope = "source.kdl"
file-types = ["kdl"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
injection-regex = "kdl"
[[grammar]]
@ -2359,6 +2460,8 @@ file-types = [
"menu",
"mxml",
"nuspec",
"osc",
"osm",
"pt",
"publishsettings",
"pubxml",
@ -2382,8 +2485,10 @@ file-types = [
"xul",
"xoml",
"musicxml",
"glif"
"glif",
"ui"
]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " }
[language.auto-pairs]
@ -2423,6 +2528,7 @@ scope = "source.wit"
injection-regex = "wit"
file-types = ["wit"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " " }
[language.auto-pairs]
@ -2487,6 +2593,7 @@ scope = "source.bicep"
file-types = ["bicep"]
auto-format = true
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 2, unit = " "}
language-servers = [ "bicep-langserver" ]
@ -2499,6 +2606,8 @@ name = "qml"
scope = "source.qml"
file-types = ["qml"]
language-servers = [ "qmlls" ]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " }
grammar = "qmljs"
@ -2538,6 +2647,7 @@ injection-regex = "pony"
roots = ["corral.json", "lock.json"]
indent = { tab-width = 2, unit = " " }
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
[[grammar]]
name = "ponylang"
@ -2549,6 +2659,7 @@ scope = "source.dhall"
injection-regex = "dhall"
file-types = ["dhall"]
comment-token = "--"
block-comment-tokens = { start = "{-", end = "-}" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "dhall-lsp-server" ]
formatter = { command = "dhall" , args = ["format"] }
@ -2572,6 +2683,7 @@ scope = "source.msbuild"
injection-regex = "msbuild"
file-types = ["proj", "vbproj", "csproj", "fsproj", "targets", "props"]
indent = { tab-width = 2, unit = " " }
block-comment-tokens = { start = "<!--", end = "-->" }
grammar = "xml"
[language.auto-pairs]
@ -2618,7 +2730,7 @@ scope = "source.tal"
injection-regex = "tal"
file-types = ["tal"]
auto-format = false
comment-token = "("
block-comment-tokens = { start = "(", end = ")" }
[[grammar]]
name = "uxntal"
@ -2752,6 +2864,7 @@ injection-regex = "nim"
file-types = ["nim", "nims", "nimble"]
shebangs = []
comment-token = "#"
block-comment-tokens = { start = "#[", end = "]#" }
indent = { tab-width = 2, unit = " " }
language-servers = [ "nimlangserver" ]
@ -2762,10 +2875,9 @@ language-servers = [ "nimlangserver" ]
"'" = "'"
'{' = '}'
# Nim's tree-sitter grammar is in heavy development.
[[grammar]]
name = "nim"
source = { git = "https://github.com/aMOPel/tree-sitter-nim", rev = "240239b232550e431d67de250d1b5856209e7f06" }
source = { git = "https://github.com/alaviss/tree-sitter-nim", rev = "c5f0ce3b65222f5dbb1a12f9fe894524881ad590" }
[[language]]
name = "cabal"
@ -2791,6 +2903,7 @@ source = { git = "https://github.com/pfeiferj/tree-sitter-hurl", rev = "264c4206
[[language]]
name = "markdoc"
scope = "text.markdoc"
block-comment-tokens = { start = "<!--", end = "-->" }
file-types = ["mdoc"]
language-servers = [ "markdoc-ls" ]
@ -2816,9 +2929,9 @@ scope = "source.just"
file-types = [{ glob = "justfile" }, { glob = "Justfile" }, { glob = ".justfile" }, { glob = ".Justfile" }]
injection-regex = "just"
comment-token = "#"
indent = { tab-width = 4, unit = "\t" }
auto-format = true
formatter = { command = "just", args = ["--dump"] }
indent = { tab-width = 4, unit = " " }
# auto-format = true
# formatter = { command = "just", args = ["--dump"] } # Please see: https://github.com/helix-editor/helix/issues/9703
[[grammar]]
name = "just"
@ -2844,6 +2957,7 @@ scope = "source.blueprint"
injection-regex = "blueprint"
file-types = ["blp"]
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "blueprint-compiler" ]
indent = { tab-width = 4, unit = " " }
@ -2896,6 +3010,7 @@ name = "webc"
scope = "text.html.webc"
injection-regex = "webc"
file-types = ["webc"]
block-comment-tokens = { start = "<!--", end = "-->" }
indent = { tab-width = 2, unit = " " }
grammar = "html"
@ -3113,3 +3228,34 @@ indent = { tab-width = 2, unit = " " }
[[grammar]]
name = "pkl"
source = { git = "https://github.com/apple/tree-sitter-pkl", rev = "c03f04a313b712f8ab00a2d862c10b37318699ae" }
[[language]]
name = "groovy"
language-id = "groovy"
scope = "source.groovy"
file-types = ["gradle", "groovy", "jenkinsfile", { glob = "Jenkinsfile" }, { glob = "Jenkinsfile.*" }]
shebangs = ["groovy"]
comment-token = "//"
indent = { tab-width = 2, unit = " " }
[[grammar]]
name = "groovy"
source = { git = "https://github.com/Decodetalkers/tree-sitter-groovy", rev = "7e023227f46fee428b16a0288eeb0f65ee2523ec" }
[[language]]
name = "fidl"
scope = "source.fidl"
injection-regex = "fidl"
file-types = ["fidl"]
comment-token = "//"
indent = { tab-width = 4, unit = " " }
[language.auto-pairs]
'"' = '"'
'{' = '}'
'(' = ')'
'<' = '>'
[[grammar]]
name = "fidl"
source = { git = "https://github.com/google/tree-sitter-fidl", rev = "bdbb635a7f5035e424f6173f2f11b9cd79703f8d" }

@ -56,9 +56,34 @@
(documentation_comment)+ @comment.around
(formal_parameter) @parameter.inside
(formal_parameter_list
(
(formal_parameter) @parameter.inside . ","? @parameter.around
) @parameter.around
)
(optional_formal_parameters
(
(formal_parameter) @parameter.inside . ","? @parameter.around
) @parameter.around
)
(arguments
(
[
(argument) @parameter.inside
(named_argument (label) . (_)* @parameter.inside)
]
. ","? @parameter.around
) @parameter.around
)
(formal_parameter_list) @parameter.around
(type_arguments
(
((_) . ("." . (_) @parameter.inside @parameter.around)?) @parameter.inside
. ","? @parameter.around
) @parameter.around
)
(expression_statement
((identifier) @_name (#any-of? @_name "test" "testWidgets"))

@ -145,8 +145,9 @@
((atom) @constant.builtin.boolean
(#match? @constant.builtin.boolean "^(true|false)$"))
(atom) @string.special.symbol
(string) @string
[(string) (sigil)] @string
(character) @constant.character
(escape_sequence) @constant.character.escape
(integer) @constant.numeric.integer
(float) @constant.numeric.float

@ -0,0 +1,6 @@
[
(layout_declaration)
(protocol_declaration)
(resource_declaration)
(service_declaration)
] @fold

@ -0,0 +1,64 @@
[
"ajar"
"alias"
"as"
"bits"
"closed"
"compose"
"const"
"enum"
"error"
"flexible"
"library"
"open"
; "optional" we did not specify a node for optional yet
"overlay"
"protocol"
"reserved"
"resource"
"service"
"strict"
"struct"
"table"
"type"
"union"
"using"
] @keyword
(primitives_type) @type.builtin
(builtin_complex_type) @type.builtin
(const_declaration
(identifier) @constant)
[
"="
"|"
"&"
"->"
] @operator
(attribute
"@" @attribute
(identifier) @attribute)
(string_literal) @string
(numeric_literal) @constant.numeric
[
(true)
(false)
] @constant.builtin.boolean
(comment) @comment
[
"("
")"
"<"
">"
"{"
"}"
] @punctuation.bracket

@ -0,0 +1,2 @@
((comment) @injection.content
(#set! injection.language "comment"))

@ -0,0 +1,96 @@
(unit
(identifier) @variable)
(string
(identifier) @variable)
(escape_sequence) @constant.character.escape
(block
(unit
(identifier) @namespace))
(func
(identifier) @function)
(number) @constant.numeric
((identifier) @constant.builtin.boolean
(#any-of? @constant.builtin.boolean "true" "false"))
((identifier) @constant
(#match? @constant "^[A-Z][A-Z\\d_]*$"))
((identifier) @constant.builtin
(#eq? @constant.builtin "null"))
((identifier) @type
(#any-of? @type
"String"
"Map"
"Object"
"Boolean"
"Integer"
"List"))
((identifier) @function.builtin
(#any-of? @function.builtin
"void"
"id"
"version"
"apply"
"implementation"
"testImplementation"
"androidTestImplementation"
"debugImplementation"))
((identifier) @keyword.storage.modifier
(#eq? @keyword.storage.modifier "static"))
((identifier) @keyword.storage.type
(#any-of? @keyword.storage.type "class" "def" "interface"))
((identifier) @keyword
(#any-of? @keyword
"assert"
"new"
"extends"
"implements"
"instanceof"))
((identifier) @keyword.control.import
(#any-of? @keyword.control.import "import" "package"))
((identifier) @keyword.storage.modifier
(#any-of? @keyword.storage.modifier
"abstract"
"protected"
"private"
"public"))
((identifier) @keyword.control.exception
(#any-of? @keyword.control.exception
"throw"
"finally"
"try"
"catch"))
(string) @string
[
(line_comment)
(block_comment)
] @comment
((block_comment) @comment.block.documentation
(#match? @comment.block.documentation "^/[*][*][^*](?s:.)*[*]/$"))
((line_comment) @comment.block.documentation
(#match? @comment.block.documentation "^///[^/]*.*$"))
[
(operators)
(leading_key)
] @operator
["(" ")" "[" "]" "{" "}"] @punctuation.bracket

@ -0,0 +1,2 @@
([(line_comment) (block_comment)] @injection.content
(#set! injection.language "comment"))

@ -0,0 +1,6 @@
(comment) @comment.inside
(comment)+ @comment.around
(function_arguments
((_) @parameter.inside . ","? @parameter.around) @parameter.around)

@ -1,9 +1,5 @@
;;; Highlighting for lua
;;; Builtins
((identifier) @variable.builtin
(#eq? @variable.builtin "self"))
;; Keywords
(if_statement
@ -130,16 +126,65 @@
((identifier) @constant
(#match? @constant "^[A-Z][A-Z_0-9]*$"))
;; Parameters
(parameters
(identifier) @variable.parameter)
;; Tables
(field name: (identifier) @variable.other.member)
(dot_index_expression field: (identifier) @variable.other.member)
(table_constructor
[
"{"
"}"
] @constructor)
;; Functions
(parameters (identifier) @variable.parameter)
(function_call
(identifier) @function.builtin
(#any-of? @function.builtin
;; built-in functions in Lua 5.1
"assert" "collectgarbage" "dofile" "error" "getfenv" "getmetatable" "ipairs"
"load" "loadfile" "loadstring" "module" "next" "pairs" "pcall" "print"
"rawequal" "rawget" "rawset" "require" "select" "setfenv" "setmetatable"
"tonumber" "tostring" "type" "unpack" "xpcall"))
; ;; Functions
(function_declaration name: (identifier) @function)
(function_call name: (identifier) @function.call)
(function_declaration
name: [
(identifier) @function
(dot_index_expression
field: (identifier) @function)
])
(function_declaration
name: (method_index_expression
method: (identifier) @function.method))
(function_declaration name: (dot_index_expression field: (identifier) @function))
(function_call name: (dot_index_expression field: (identifier) @function.call))
(assignment_statement
(variable_list .
name: [
(identifier) @function
(dot_index_expression
field: (identifier) @function)
])
(expression_list .
value: (function_definition)))
(table_constructor
(field
name: (identifier) @function
value: (function_definition)))
(function_call
name: [
(identifier) @function.call
(dot_index_expression
field: (identifier) @function.call)
(method_index_expression
method: (identifier) @function.method.call)
])
; TODO: incorrectly highlights variable N in `N, nop = 42, function() end`
(assignment_statement
@ -153,6 +198,7 @@
;; Nodes
(comment) @comment
(string) @string
(escape_sequence) @constant.character.escape
(number) @constant.numeric.integer
(label_statement) @label
; A bit of a tricky one, this will only match field names
@ -162,7 +208,16 @@
;; Property
(dot_index_expression field: (identifier) @variable.other.member)
;; Variable
;; Variables
((identifier) @variable.builtin
(#eq? @variable.builtin "self"))
(variable_list
(attribute
"<" @punctuation.bracket
(identifier) @attribute
">" @punctuation.bracket))
(identifier) @variable
;; Error

@ -1,33 +1,32 @@
;; Constants, Comments, and Literals
(comment) @comment.line
(multilineComment) @comment.block
(docComment) @comment.block.documentation
(multilineDocComment) @comment.block.documentation
; comments
[(literal) (generalizedLit)] @constant
[(nil_lit)] @constant.builtin
[(bool_lit)] @constant.builtin.boolean
[(char_lit)] @constant.character
[(char_esc_seq) (str_esc_seq)] @constant.character.escape
[(custom_numeric_lit)] @constant.numeric
[(int_lit) (int_suffix)] @constant.numeric.integer
[(float_lit) (float_suffix)] @constant.numeric.float
(block_comment) @comment.block
[
(documentation_comment)
(block_documentation_comment)
] @comment.block.documentation
(nil_literal) @constant.builtin
((identifier) @constant.builtin.boolean
(#any-of? @constant.builtin.boolean "true" "false" "on" "off"))
(char_literal) @constant.character
(escape_sequence) @constant.character.escape
(custom_numeric_literal) @constant.numeric
(integer_literal) @constant.numeric.integer
(float_literal) @constant.numeric.float
; literals
; note: somewhat irritatingly for testing, lits have the same syntax highlighting as types
; todo: literal?
[
(str_lit)
(triplestr_lit)
(rstr_lit)
(generalized_str_lit)
(generalized_triplestr_lit)
(interpolated_str_lit)
(interpolated_triplestr_lit)
(long_string_literal)
(raw_string_literal)
(generalized_string)
(interpreted_string_literal)
] @string
; (generalized_string (string_content) @none) ; todo: attempt to un-match string_content
; [] @string.regexp
; string literals
[
"."
@ -44,272 +43,291 @@
"}"
"{."
".}"
"#["
"]#"
] @punctuation.bracket
(interpolated_str_lit "&" @punctuation.special)
(interpolated_str_lit "{" @punctuation.special)
(interpolated_str_lit "}" @punctuation.special)
; punctuation
; todo: interpolated_str_lit?? & { }?
[
"and"
"or"
"xor"
"not"
"in"
"notin"
"is"
"isnot"
"div"
"mod"
"shl"
"shr"
"from"
"as"
"of"
"in"
"notin"
"is"
"isnot"
] @keyword.operator
; operators: we list them explicitly to deliminate them from symbolic operators
[(operator) (opr) "="] @operator
; all operators (must come after @keyword.operator)
(pragma) @attribute
; pragmas
[(operator) "="] @operator
(infix_expression operator: _ @operator)
(prefix_expression operator: _ @operator)
(pragma_list
(identifier)? @attribute
(colon_expression
(identifier) @attribute)?)
;; Imports and Exports
(importStmt
(keyw) @keyword.control.import
(expr (primary (symbol) @namespace))?
(expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?)
(exportStmt
(keyw) @keyword.control.import
(expr (primary (symbol) @namespace))?
(expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?)
(fromStmt
(keyw) @keyword.control.import
(expr (primary (symbol) @namespace))?
(expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?)
(includeStmt
(keyw) @keyword.control.import
(expr (primary (symbol) @namespace))?
(expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?)
(importExceptStmt
(keyw) @keyword.control.import
(expr (primary (symbol) @namespace))?
(expr (primary (arrayConstr (exprColonExprList (exprColonExpr (expr (primary (symbol) @namespace)))))))?)
; import statements
; yeah, this is a bit gross.
[
"import"
"export"
"include"
"from"
] @keyword.control.import
(import_statement
[
(identifier) @namespace
(expression_list (identifier) @namespace)
(except_clause
"except" @keyword.control.import
(expression_list (identifier) @namespace))])
(import_from_statement
(identifier) @namespace
(expression_list (identifier) @namespace))
(include_statement (expression_list (identifier) @namespace))
(export_statement (expression_list (identifier) @namespace))
;; Control Flow
(ifStmt (keyw) @keyword.control.conditional)
(whenStmt (keyw) @keyword.control.conditional)
(elifStmt (keyw) @keyword.control.conditional)
(elseStmt (keyw) @keyword.control.conditional)
(caseStmt (keyw) @keyword.control.conditional)
(ofBranch (keyw) @keyword.control.conditional)
(inlineIfStmt (keyw) @keyword.control.conditional)
(inlineWhenStmt (keyw) @keyword.control.conditional)
[
"if"
"when"
"case"
"elif"
"else"
] @keyword.control.conditional
(of_branch "of" @keyword.control.conditional)
; conditional statements
; todo: do block
(forStmt
. (keyw) @keyword.control.repeat
. (symbol) @variable
. (keyw) @keyword.control.repeat)
(whileStmt (keyw) @keyword.control.repeat)
; loop statements
(returnStmt (keyw) @keyword.control.repeat)
(yieldStmt (keyw) @keyword.control.repeat)
(discardStmt (keyw) @keyword.control.repeat)
(breakStmt (keyw) @keyword.control.repeat)
(continueStmt (keyw) @keyword.control.repeat)
; control flow statements
(raiseStmt (keyw) @keyword.control.exception)
(tryStmt (keyw) @keyword.control.exception)
(tryExceptStmt (keyw) @keyword.control.exception)
(tryFinallyStmt (keyw) @keyword.control.exception)
(inlineTryStmt (keyw) @keyword.control.exception)
; (inlineTryExceptStmt (keyw) @keyword.control.exception)
; (inlineTryFinallyStmt (keyw) @keyword.control.exception)
; exception handling statements
(staticStmt (keyw) @keyword)
(deferStmt (keyw) @keyword)
(asmStmt (keyw) @keyword)
(bindStmt (keyw) @keyword)
(mixinStmt (keyw) @keyword)
; miscellaneous blocks
(blockStmt
(keyw) @keyword.control
(symbol) @label)
; block statements
;; Types and Type Declarations
(typeDef
(keyw) @keyword.storage.type
(symbol) @type)
; names of new types type declarations
(exprColonEqExpr
. (expr (primary (symbol) @variable))
. (expr (primary (symbol) @type)))
; variables in inline tuple declarations
(primarySuffix
(indexSuffix
(exprColonEqExprList
(exprColonEqExpr
(expr
(primary
(symbol) @type))))))
; nested types in brackets, i.e. seq[string]
(primaryTypeDef (symbol) @type)
; primary types of type declarations (NOT nested types)
(primaryTypeDef (primaryPrefix (keyw) @type))
; for consistency
(primaryTypeDesc (symbol) @type)
; type annotations, on declarations or in objects
(primaryTypeDesc (primaryPrefix (keyw) @type))
; var types etc
(genericParamList (genericParam (symbol) @type))
; types in generic blocks
(enumDecl (keyw) @keyword.storage.type)
(enumElement (symbol) @type.enum.variant)
; enum declarations and elements
(tupleDecl (keyw) @keyword.storage.type)
; tuple declarations
(objectDecl (keyw) @keyword.storage.type)
(objectPart (symbol) @variable.other.member)
; object declarations and fields
(objectCase
(keyw) @keyword.control.conditional
(symbol) @variable.other.member)
(objectBranch (keyw) @keyword.control.conditional)
(objectElif (keyw) @keyword.control.conditional)
(objectElse (keyw) @keyword.control.conditional)
(objectWhen (keyw) @keyword.control.conditional)
; variant objects
(conceptDecl (keyw) @keyword.storage.type)
(conceptParam (keyw) @type)
(conceptParam (symbol) @variable)
; concept declarations, parameters, and qualifiers on those parameters
((expr
(primary (symbol))
(operator) @operator
(primary (symbol) @type))
(#match? @operator "is"))
((exprStmt
(primary (symbol))
(operator) @operator
(primary (symbol) @type))
(#match? @operator "is"))
; symbols likely to be types: "x is t" means t is either a type or a type variable
; distinct?
"block" @keyword.control
(block label: (_) @label)
[
"for"
"while"
"continue"
"break"
] @keyword.control.repeat
(for "in" @keyword.control.repeat)
;; Functions
[
"return"
"yield"
] @keyword.control.return
; return statements
(routine
. (keyw) @keyword.function
. (symbol) @function)
; function declarations
[
"try"
"except"
"finally"
"raise"
] @keyword.control.exception
; exception handling statements
(routineExpr (keyw) @keyword.function)
; discarded function
[
"asm"
"bind"
"mixin"
"defer"
"static"
] @keyword
; miscellaneous keywords
(routineExprTypeDesc (keyw) @keyword.function)
; function declarations as types
;; Types and Type Declarations
(primary
. (symbol) @function.call
. (primarySuffix (functionCall)))
; regular function calls
[
"let"
"var"
"const"
"type"
"object"
"tuple"
"enum"
"concept"
] @keyword.storage.type
(var_type "var" @keyword.storage.modifier)
(out_type "out" @keyword.storage.modifier)
(distinct_type "distinct" @keyword.storage.modifier)
(ref_type "ref" @keyword.storage.modifier)
(pointer_type "ptr" @keyword.storage.modifier)
(var_parameter "var" @keyword.storage.modifier)
(type_parameter "type" @keyword.storage.modifier)
(static_parameter "static" @keyword.storage.modifier)
(ref_parameter "ref" @keyword.storage.modifier)
(pointer_parameter "ptr" @keyword.storage.modifier)
; (var_parameter (identifier) @variable.parameter)
; (type_parameter (identifier) @variable.parameter)
; (static_parameter (identifier) @variable.parameter)
; (ref_parameter (identifier) @variable.parameter)
; (pointer_parameter (identifier) @variable.parameter)
; todo: when are these used??
(type_section
(type_declaration
(type_symbol_declaration
name: (_) @type)))
; types in type declarations
(enum_field_declaration
(symbol_declaration
name: (_) @type.enum.variant))
; types as enum variants
(variant_declaration
alternative: (of_branch
values: (expression_list (_) @type.enum.variant)))
; types as object variants
(case
(of_branch
values: (expression_list (_) @constant)))
; case values are guaranteed to be constant
(type_expression
[
(identifier) @type
(bracket_expression
[
(identifier) @type
(argument_list (identifier) @type)])
(tuple_construction
[
(identifier) @type
(bracket_expression
[
(identifier) @type
(argument_list (identifier) @type)])])])
; types in type expressions
(primary
. (symbol) @function.call
. (primarySuffix (cmdCall)))
; function calls without parenthesis
(call
function: (bracket_expression
right: (argument_list (identifier) @type)))
; types as generic parameters
(primary
(primarySuffix (qualifiedSuffix (symbol) @function.call))
. (primarySuffix (functionCall)))
; uniform function call syntax calls
; (dot_generic_call
; generic_arguments: (_) @type)
; ???
(primary
(primarySuffix (qualifiedSuffix (symbol) @function.call))
. (primarySuffix (cmdCall)))
; just in case
(infix_expression
operator:
[
"is"
"isnot"
]
right: (_) @type)
; types in "is" comparisions
(primary
(symbol) @constructor
(primarySuffix (objectConstr)))
; object constructor
(except_branch
values: (expression_list
[
(identifier) @type
(infix_expression
left: (identifier) @type
operator: "as"
right: (_) @variable)]))
; types in exception branches
; does not appear to be a way to distinguish these without verbatium matching
; [] @function.builtin
; [] @function.method
; [] @function.macro
; [] @function.special
;; Functions
[
"proc"
"func"
"method"
"converter"
"iterator"
"template"
"macro"
] @keyword.function
(exported_symbol "*" @attribute)
(_ "=" @punctuation.delimiter [body: (_) value: (_)])
(proc_declaration name: (_) @function)
(func_declaration name: (_) @function)
(iterator_declaration name: (_) @function)
(converter_declaration name: (_) @function)
(method_declaration name: (_) @function.method)
(template_declaration name: (_) @function.macro)
(macro_declaration name: (_) @function.macro)
(symbol_declaration name: (_) @variable)
(call
function: [
(identifier) @function.call
(dot_expression
right: (identifier) @function.call)
(bracket_expression
left: [
(identifier) @function.call
(dot_expression
right: (identifier) @function.call)])])
(generalized_string
function: [
(identifier) @function.call
(dot_expression
right: (identifier) @function.call)
(bracket_expression
left: [
(identifier) @function.call
(dot_expression
right: (identifier) @function.call)])])
(dot_generic_call function: (_) @function.call)
;; Variables
(paramList (paramColonEquals (symbol) @variable.parameter))
; parameter identifiers
(identColon (ident) @variable.other.member)
; named parts of tuples
(symbolColonExpr (symbol) @variable)
; object constructor parameters
(parameter_declaration
(symbol_declaration_list
(symbol_declaration
name: (_) @variable.parameter)))
(argument_list
(equal_expression
left: (_) @variable.parameter))
(concept_declaration
parameters: (parameter_list (identifier) @variable.parameter))
(field_declaration
(symbol_declaration_list
(symbol_declaration
name: (_) @variable.other.member)))
(call
(argument_list
(colon_expression
left: (_) @variable.other.member)))
(tuple_construction
(colon_expression
left: (_) @variable.other.member))
(variant_declaration
(variant_discriminator_declaration
(symbol_declaration_list
(symbol_declaration
name: (_) @variable.other.member))))
;; Miscellaneous Matches
(symbolEqExpr (symbol) @variable)
; named parameters
(variable
(keyw) @keyword.storage.type
(declColonEquals (symbol) @variable))
; let, var, const expressions
((primary (symbol) @variable.builtin)
(#match? @variable.builtin "result"))
; `result` is an implicit builtin variable inside function scopes
((primary (symbol) @type)
(#match? @type "^[A-Z]"))
; assume PascalCase identifiers to be types
((primary
(primarySuffix
(qualifiedSuffix
(symbol) @type)))
(#match? @type "^[A-Z]"))
; assume PascalCase member variables to be enum entries
[
"cast"
"discard"
"do"
] @keyword
; also: addr end interface using
(primary (symbol) @variable)
; overzealous, matches variables
(blank_identifier) @variable.builtin
((identifier) @variable.builtin
(#eq? @variable.builtin "result"))
(primary (primarySuffix (qualifiedSuffix (symbol) @variable.other.member)))
; overzealous, matches member variables: i.e. x in foo.x
(dot_expression
left: (identifier) @variable
right: (identifier) @variable.other.member)
(keyw) @keyword
; more specific matches are done above whenever possible
(identifier) @variable

@ -1,48 +1,59 @@
[
(typeDef)
(ifStmt)
(whenStmt)
(elifStmt)
(elseStmt)
(ofBranch) ; note: not caseStmt
(whileStmt)
(tryStmt)
(tryExceptStmt)
(tryFinallyStmt)
(forStmt)
(blockStmt)
(staticStmt)
(deferStmt)
(asmStmt)
; exprStmt?
(if)
(when)
(elif_branch)
(else_branch)
(of_branch) ; note: not case_statement
(block)
(while)
(for)
(try)
(except_branch)
(finally_branch)
(defer)
(static_statement)
(proc_declaration)
(func_declaration)
(iterator_declaration)
(converter_declaration)
(method_declaration)
(template_declaration)
(macro_declaration)
(symbol_declaration)
] @indent
;; increase the indentation level
[
(ifStmt)
(whenStmt)
(elifStmt)
(elseStmt)
(ofBranch) ; note: not caseStmt
(whileStmt)
(tryStmt)
(tryExceptStmt)
(tryFinallyStmt)
(forStmt)
(blockStmt)
(staticStmt)
(deferStmt)
(asmStmt)
; exprStmt?
(if)
(when)
(elif_branch)
(else_branch)
(of_branch) ; note: not case_statement
(block)
(while)
(for)
(try)
(except_branch)
(finally_branch)
(defer)
(static_statement)
(proc_declaration)
(func_declaration)
(iterator_declaration)
(converter_declaration)
(method_declaration)
(template_declaration)
(macro_declaration)
(symbol_declaration)
] @extend
;; ???
[
(returnStmt)
(raiseStmt)
(yieldStmt)
(breakStmt)
(continueStmt)
(return_statement)
(raise_statement)
(yield_statement)
(break_statement)
(continue_statement)
] @extend.prevent-once
;; end a level of indentation while staying indented

@ -1,19 +1,33 @@
(routine
(block) @function.inside) @function.around
(proc_declaration
body: (_) @function.inside) @function.around
(func_declaration
body: (_) @function.inside) @function.around
(iterator_declaration
body: (_) @function.inside) @function.around
(converter_declaration
body: (_) @function.inside) @function.around
(method_declaration
body: (_) @function.inside) @function.around
(template_declaration
body: (_) @function.inside) @function.around
(macro_declaration
body: (_) @function.inside) @function.around
; @class.inside (types?)
; @class.around
(type_declaration (_) @class.inside) @class.around
; paramListSuffix is strange and i do not understand it
(paramList
(paramColonEquals) @parameter.inside) @parameter.around
(parameter_declaration
(symbol_declaration_list) @parameter.inside) @parameter.around
(comment) @comment.inside
(multilineComment) @comment.inside
(docComment) @comment.inside
(multilineDocComment) @comment.inside
[
(comment)
(block_comment)
(documentation_comment)
(block_documentation_comment)
] @comment.inside
(comment)+ @comment.around
(multilineComment) @comment.around
(docComment)+ @comment.around
(multilineDocComment) @comment.around
[
(comment)+
(block_comment)
(documentation_comment)+
(block_documentation_comment)+
] @comment.around

@ -0,0 +1,9 @@
(comment) @comment.inside
(comment)+ @comment.around
(formals
((_) @parameter.inside . ","? @parameter.around) @parameter.around)
(function_expression
body: (_) @function.inside) @function.around

@ -34,6 +34,9 @@
(arguments
((_) @parameter.inside . ","? @parameter.around) @parameter.around)
(field_initializer_list
((_) @parameter.inside . ","? @parameter.around) @parameter.around)
[
(line_comment)
(block_comment)

@ -24,20 +24,20 @@
(term
alias: (identifier) @variable.parameter)
(term
((term
value: (cast
name: (keyword_cast) @function.builtin
parameter: [(literal)]?))
parameter: [(literal)]?)))
(literal) @string
(comment) @comment.line
(marginalia) @comment.block
((literal) @constant.numeric.integer
(#match? @constant.numeric.integer "^-?\\d+$"))
(#match? @constant.numeric.integer "^[-+]?\\d+$"))
((literal) @constant.numeric.float
(#match? @constant.numeric.float "^-?\\d*\\.\\d*$"))
(#match? @constant.numeric.float "^[-+]?\\d*\\.\\d*$"))
(parameter) @variable.parameter

@ -19,13 +19,6 @@
(quoted_attribute_value (attribute_value) @css))
(#eq? @_attr "style"))
((script_element
(raw_text) @injection.content)
(#set! injection.language "javascript"))
((raw_text_expr) @injection.content
(#set! injection.language "javascript"))
(
(script_element
(start_tag
@ -36,5 +29,12 @@
(#set! injection.language "typescript")
)
((script_element
(raw_text) @injection.content)
(#set! injection.language "javascript"))
((raw_text_expr) @injection.content
(#set! injection.language "javascript"))
((comment) @injection.content
(#set! injection.language "comment"))

@ -1,6 +1,10 @@
# An approximation/port of the Cyan Light Theme from Jetbrains
#
# Original Color Scheme here https://plugins.jetbrains.com/plugin/12102-cyan-light-theme
# Cyan Light
# Adapted from JetBrains' Cyan Light Theme https://plugins.jetbrains.com/plugin/12102-cyan-light-theme
# Author: Abderrahmane Tahri Jouti <tj.abderrahmane@gmail.com>
# Original Author : Olga Berdnikova
# LICENSE : MIT
# Source: https://github.com/OlyaB/CyanTheme
"attribute" = "blue"
"type" = "shade07"

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016 CloudCannon
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,120 @@
# Monokai Soda port for Helix (https://helix-editor.com)
# Author : Jimmy Zelinskie <jimmy@zelinskie.com>
# Syntax
## Constants
"constant" = "white"
"constant.builtin" = "pink"
"constant.character.escape" = "blue"
"constant.numeric" = "purple"
## Diagnostics
"diagnostic" = { modifiers = ["underlined"] }
"diagnostic.error" = { underline = { style = "curl", color = "pink" } }
"diagnostic.warning" = { underline = { style = "curl", color = "orange" } }
"diagnostic.info" = { underline = { style = "curl", color = "white" } }
## Diffs
"diff.plus" = "green"
"diff.delta" = "orange"
"diff.minus" = "pink"
"diff.delta.moved" = "orange"
## Functions
"function" = "green"
"function.macro" = "blue"
"function.builtin" = "pink"
"constructor" = "blue"
## Keywords
"keyword" = "pink"
"keyword.directive" = "blue"
## Punctuation
"punctuation" = "gray"
## Strings
"string" = "yellow"
## Types
"type" = "blue"
"type.builtin" = "pink"
## Variables
"variable" = "white"
"variable.builtin" = "pink"
"variable.other.member" = "white"
"variable.parameter" = "softorange"
## Markup
"markup.heading" = "green"
"markup.bold" = { fg = "orange", modifiers = ["bold"] }
"markup.italic" = { fg = "orange", modifiers = ["italic"] }
"markup.link.url" = { fg = "orange", modifiers = ["underlined"] }
"markup.link.text" = "yellow"
"markup.quote" = "green"
## Misc
"attribute" = "blue"
"comment" = { fg = "gray", modifiers = ["italic"] }
"error" = "pink"
"hint" = "white"
"info" = "white"
"label" = "yellow"
"module" = "softorange"
"namespace" = "pink"
"operator" = "pink"
"special" = "softorange"
"warning" = "orange"
# Editor UI
## Main
"ui.background" = { bg = "background" }
"ui.text" = "white"
"ui.window" = { bg = "darkgray" }
## Debug (TODO)
## Menus
"ui.menu" = { fg = "white", bg = "darkgray" }
"ui.menu.selected" = { modifiers = ["reversed"] }
"ui.popup" = { bg = "darkgray" }
"ui.help" = { fg = "white", bg = "darkgray" }
## Gutter
"ui.linenr" = "darkgray"
"ui.linenr.selected" = "orange"
## Cursor
"ui.cursor.primary" = { fg = "white", modifiers = ["reversed"] }
"ui.cursor.match" = { fg = "white", modifiers = ["reversed"] }
"ui.selection" = { bg = "darkgray" }
## Statusline
"ui.statusline" = { bg = "darkgray" }
"ui.statusline.inactive" = { fg = "white", bg = "darkgray" }
"ui.statusline.normal" = { fg = "white", bg = "blue" }
"ui.statusline.insert" = { fg = "white", bg = "green" }
"ui.statusline.select" = { fg = "white", bg = "purple" }
"ui.text.focus" = { fg = "yellow", modifiers = ["bold"] }
"ui.virtual" = "darkgray"
"ui.virtual.ruler" = { bg = "darkgray" }
# Palette
[palette]
"purple" = "#AE81FF"
"yellow" = "#E6DB74"
"pink" = "#f92a72"
"white" = "#cfcfc2"
"gray" = "#75715e"
"darkgray" = "#444444"
"black" = "#222222"
"blue" = "#66d9ef"
"green" = "#a6e22e"
"softorange" = "#f59762"
"orange" = "#fd971f"
"background" = "#191919"

@ -6,24 +6,23 @@
# License: MIT License
"type" = "blue"
"constant" = "purple"
"constant" = "fg"
"constant.numeric" = "purple"
"constant.character.escape" = "orange"
"string" = "yellow"
"comment" = "grey"
"variable" = "fg"
"variable.builtin" = "orange"
"variable.builtin" = "purple"
"variable.parameter" = "fg"
"variable.other.member" = "fg"
"label" = "orange"
"variable.other.member" = "orange"
"label" = "red"
"punctuation" = "grey"
"punctuation.delimiter" = "grey"
"punctuation.bracket" = "fg"
"punctuation.special" = "yellow"
"keyword" = "red"
"operator" = "orange"
"operator" = "red"
"function" = "green"
"function.builtin" = "blue"
"function.macro" = "purple"
"function.builtin" = "green"
"function.macro" = "green"
"tag" = "yellow"
"namespace" = "blue"
"attribute" = "purple"
@ -48,12 +47,12 @@
"markup.raw" = "green"
"diff.plus" = "green"
"diff.delta" = "orange"
"diff.delta" = "blue"
"diff.minus" = "red"
"ui.background" = { bg = "bg0" }
"ui.cursor" = { modifiers = ['reversed'] }
"ui.cursor.match" = { fg = "orange", bg = "diff_yellow" }
"ui.cursor.match" = { bg = "bg4" }
"ui.cursor.insert" = { fg = "black", bg = "grey" }
"ui.cursor.select" = { fg = "bg0", bg = "blue" }
"ui.selection" = { bg = "bg5" }
@ -73,7 +72,7 @@
"ui.text.focus" = "green"
"ui.menu" = { fg = "fg", bg = "bg2" }
"ui.menu.selected" = { fg = "bg0", bg = "green" }
"ui.virtual.whitespace" = { fg = "grey_dim" }
"ui.virtual.whitespace" = "bg4"
"ui.virtual.ruler" = { bg = "bg3" }
"ui.virtual.inlay-hint" = { fg = "grey_dim" }
@ -92,11 +91,12 @@ error = { fg = 'red', bg = 'bg2', modifiers = ['bold'] }
[palette]
black = "#181819"
bg_dim = "#222327"
bg0 = "#2c2e34"
bg1 = "#33353f"
bg2 = "#363944"
bg3 = "#3b3e48"
bg4 = "#5C606A"
bg4 = "#414550"
bg5 = "#444852"
bg_red = "#ff6077"
diff_red = "#55393d"

@ -58,13 +58,13 @@ variable = { fg = "fg" }
"diff.plus" = { fg = "add" }
error = { fg = "error" }
hint = { fg = "hint" }
info = { fg = "info" }
warning = { fg = "yellow" }
"diagnostic.error" = { underline = { style = "curl" } }
"diagnostic.warning" = { underline = { style = "curl" } }
"diagnostic.info" = { underline = { style = "curl" } }
"diagnostic.hint" = { underline = { style = "curl" } }
info = { fg = "info" }
hint = { fg = "hint" }
"diagnostic.error" = { underline = { style = "curl", color = "error" } }
"diagnostic.warning" = { underline = { style = "curl", color = "yellow"} }
"diagnostic.info" = { underline = { style = "curl", color = "info"} }
"diagnostic.hint" = { underline = { style = "curl", color = "hint" } }
"ui.background" = { bg = "bg", fg = "fg" }
"ui.cursor" = { modifiers = ["reversed"] }
@ -114,8 +114,8 @@ change = "#6183bb"
delete = "#914c54"
error = "#db4b4b"
hint = "#1abc9c"
info = "#0db9d7"
hint = "#1abc9c"
fg = "#c0caf5"
fg-dark = "#a9b1d6"

@ -80,6 +80,8 @@ label = "honey"
"diagnostic.info" = { underline = { color = "delta", style = "curl" } }
"diagnostic.warning" = { underline = { color = "lightning", style = "curl" } }
"diagnostic.error" = { underline = { color = "apricot", style = "curl" } }
"diagnostic.unnecessary" = { modifiers = ["dim"] }
"diagnostic.deprecated" = { modifiers = ["crossed_out"] }
warning = "lightning"
error = "apricot"

Loading…
Cancel
Save