Merge branch 'helix-editor:master' into nullspace

pull/11409/head
Stephen Broadley 2 weeks ago committed by GitHub
commit 71b0cb65b8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -14,7 +14,7 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install nix - name: Install nix
uses: cachix/install-nix-action@V28 uses: cachix/install-nix-action@v30
- name: Authenticate with Cachix - name: Authenticate with Cachix
uses: cachix/cachix-action@v15 uses: cachix/cachix-action@v15

60
Cargo.lock generated

@ -68,9 +68,9 @@ dependencies = [
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.89" version = "1.0.90"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" checksum = "37bf3594c4c988a53154954629820791dde498571819ae4ca50ca811e060cc95"
[[package]] [[package]]
name = "arc-swap" name = "arc-swap"
@ -136,9 +136,9 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.1.21" version = "1.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07b1695e2c7e8fc85310cde85aeaab7e3097f593c91d209d3f9df76c928100f0" checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f"
dependencies = [ dependencies = [
"shlex", "shlex",
] ]
@ -355,9 +355,9 @@ checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6"
[[package]] [[package]]
name = "fern" name = "fern"
version = "0.6.2" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9f0c14694cbd524c8720dd69b0e3179344f04ebb5f90f2e4a440c6ea3b2f1ee" checksum = "69ff9c9d5fb3e6da8ac2f77ab76fe7e8087d512ce095200f8f29ac5b656cf6dc"
dependencies = [ dependencies = [
"log", "log",
] ]
@ -412,15 +412,15 @@ dependencies = [
[[package]] [[package]]
name = "futures-core" name = "futures-core"
version = "0.3.30" version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]] [[package]]
name = "futures-executor" name = "futures-executor"
version = "0.3.30" version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-task", "futures-task",
@ -429,15 +429,15 @@ dependencies = [
[[package]] [[package]]
name = "futures-task" name = "futures-task"
version = "0.3.30" version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
[[package]] [[package]]
name = "futures-util" name = "futures-util"
version = "0.3.30" version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-task", "futures-task",
@ -1609,9 +1609,9 @@ dependencies = [
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.158" version = "0.2.161"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1"
[[package]] [[package]]
name = "libloading" name = "libloading"
@ -1753,9 +1753,9 @@ dependencies = [
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.19.0" version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]] [[package]]
name = "open" name = "open"
@ -1838,9 +1838,9 @@ checksum = "744a264d26b88a6a7e37cbad97953fa233b94d585236310bcbc88474b4092d79"
[[package]] [[package]]
name = "pulldown-cmark" name = "pulldown-cmark"
version = "0.12.1" version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "666f0f59e259aea2d72e6012290c09877a780935cc3c18b1ceded41f3890d59c" checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"memchr", "memchr",
@ -1914,9 +1914,9 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.10.6" version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -1926,9 +1926,9 @@ dependencies = [
[[package]] [[package]]
name = "regex-automata" name = "regex-automata"
version = "0.4.7" version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -1950,9 +1950,9 @@ dependencies = [
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.8.4" version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]] [[package]]
name = "ropey" name = "ropey"
@ -2026,9 +2026,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.128" version = "1.0.132"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
dependencies = [ dependencies = [
"itoa", "itoa",
"memchr", "memchr",
@ -2192,9 +2192,9 @@ dependencies = [
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.12.0" version = "3.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"fastrand", "fastrand",

@ -47,7 +47,7 @@ Note: Only certain languages have indentation definitions at the moment. Check
[Installation documentation](https://docs.helix-editor.com/install.html). [Installation documentation](https://docs.helix-editor.com/install.html).
[![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg?exclude_unsupported=1)](https://repology.org/project/helix/versions) [![Packaging status](https://repology.org/badge/vertical-allrepos/helix-editor.svg?exclude_unsupported=1)](https://repology.org/project/helix-editor/versions)
# Contributing # Contributing

@ -29,6 +29,7 @@
| crystal | ✓ | ✓ | | `crystalline` | | crystal | ✓ | ✓ | | `crystalline` |
| css | ✓ | | ✓ | `vscode-css-language-server` | | css | ✓ | | ✓ | `vscode-css-language-server` |
| cue | ✓ | | | `cuelsp` | | cue | ✓ | | | `cuelsp` |
| cylc | ✓ | ✓ | ✓ | |
| d | ✓ | ✓ | ✓ | `serve-d` | | d | ✓ | ✓ | ✓ | `serve-d` |
| dart | ✓ | ✓ | ✓ | `dart` | | dart | ✓ | ✓ | ✓ | `dart` |
| dbml | ✓ | | | | | dbml | ✓ | | | |
@ -39,6 +40,7 @@
| dockerfile | ✓ | ✓ | | `docker-langserver` | | dockerfile | ✓ | ✓ | | `docker-langserver` |
| dot | ✓ | | | `dot-language-server` | | dot | ✓ | | | `dot-language-server` |
| dtd | ✓ | | | | | dtd | ✓ | | | |
| dune | ✓ | | | |
| earthfile | ✓ | ✓ | ✓ | `earthlyls` | | earthfile | ✓ | ✓ | ✓ | `earthlyls` |
| edoc | ✓ | | | | | edoc | ✓ | | | |
| eex | ✓ | | | | | eex | ✓ | | | |
@ -68,7 +70,7 @@
| gjs | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` | | gjs | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` |
| gleam | ✓ | ✓ | | `gleam` | | gleam | ✓ | ✓ | | `gleam` |
| glimmer | ✓ | | | `ember-language-server` | | glimmer | ✓ | | | `ember-language-server` |
| glsl | ✓ | ✓ | ✓ | | | glsl | ✓ | ✓ | ✓ | `glsl_analyzer` |
| gn | ✓ | | | | | gn | ✓ | | | |
| go | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` | | go | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` |
| godot-resource | ✓ | ✓ | | | | godot-resource | ✓ | ✓ | | |
@ -185,6 +187,7 @@
| smali | ✓ | | ✓ | | | smali | ✓ | | ✓ | |
| smithy | ✓ | | | `cs` | | smithy | ✓ | | | `cs` |
| sml | ✓ | | | | | sml | ✓ | | | |
| snakemake | ✓ | | ✓ | `pylsp` |
| solidity | ✓ | ✓ | | `solc` | | solidity | ✓ | ✓ | | `solc` |
| spicedb | ✓ | | | | | spicedb | ✓ | | | |
| sql | ✓ | ✓ | | | | sql | ✓ | ✓ | | |

@ -17,7 +17,7 @@
- [Chocolatey](#chocolatey) - [Chocolatey](#chocolatey)
- [MSYS2](#msys2) - [MSYS2](#msys2)
[![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions) [![Packaging status](https://repology.org/badge/vertical-allrepos/helix-editor.svg)](https://repology.org/project/helix-editor/versions)
## Linux ## Linux

@ -283,7 +283,6 @@ These scopes are used for theming the editor interface:
| `ui.debug.active` | Indicator for the line at which debugging execution is paused at, found in the gutter | | `ui.debug.active` | Indicator for the line at which debugging execution is paused at, found in the gutter |
| `ui.gutter` | Gutter | | `ui.gutter` | Gutter |
| `ui.gutter.selected` | Gutter for the line the cursor is on | | `ui.gutter.selected` | Gutter for the line the cursor is on |
| `ui.highlight.frameline` | Line at which debugging execution is paused at |
| `ui.linenr` | Line numbers | | `ui.linenr` | Line numbers |
| `ui.linenr.selected` | Line number for the line the cursor is on | | `ui.linenr.selected` | Line number for the line the cursor is on |
| `ui.statusline` | Statusline | | `ui.statusline` | Statusline |
@ -320,6 +319,7 @@ These scopes are used for theming the editor interface:
| `ui.selection` | For selections in the editing area | | `ui.selection` | For selections in the editing area |
| `ui.selection.primary` | | | `ui.selection.primary` | |
| `ui.highlight` | Highlighted lines in the picker preview | | `ui.highlight` | Highlighted lines in the picker preview |
| `ui.highlight.frameline` | Line at which debugging execution is paused at |
| `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) | | `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) |
| `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) | | `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) |
| `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) | | `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) |

@ -1,17 +1,12 @@
{ {
"nodes": { "nodes": {
"crane": { "crane": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": { "locked": {
"lastModified": 1709610799, "lastModified": 1727974419,
"narHash": "sha256-5jfLQx0U9hXbi2skYMGodDJkIgffrjIOgMRjZqms2QE=", "narHash": "sha256-WD0//20h+2/yPGkO88d2nYbb23WMWYvnRyDQ9Dx4UHg=",
"owner": "ipetkov", "owner": "ipetkov",
"repo": "crane", "repo": "crane",
"rev": "81c393c776d5379c030607866afef6406ca1be57", "rev": "37e4f9f0976cb9281cd3f0c70081e5e0ecaee93f",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -25,11 +20,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1709126324, "lastModified": 1726560853,
"narHash": "sha256-q6EQdSeUZOG26WelxqkmR7kArjgWCdw5sfJVHPH/7j8=", "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "d465f4819400de7c8d874d50b982301f28a84605", "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -40,11 +35,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1709479366, "lastModified": 1728018373,
"narHash": "sha256-n6F0n8UV6lnTZbYPl1A9q1BS0p4hduAv1mGAP17CVd0=", "narHash": "sha256-NOiTvBbRLIOe5F6RbHaAh6++BNjsb149fGZd1T4+KBg=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "b8697e57f10292a6165a20f03d2f42920dfaf973", "rev": "bc947f541ae55e999ffdb4013441347d83b00feb",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -64,19 +59,16 @@
}, },
"rust-overlay": { "rust-overlay": {
"inputs": { "inputs": {
"flake-utils": [
"flake-utils"
],
"nixpkgs": [ "nixpkgs": [
"nixpkgs" "nixpkgs"
] ]
}, },
"locked": { "locked": {
"lastModified": 1709604635, "lastModified": 1728268235,
"narHash": "sha256-le4fwmWmjGRYWwkho0Gr7mnnZndOOe4XGbLw68OvF40=", "narHash": "sha256-lJMFnMO4maJuNO6PQ5fZesrTmglze3UFTTBuKGwR1Nw=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "e86c0fb5d3a22a5f30d7f64ecad88643fe26449d", "rev": "25685cc2c7054efc31351c172ae77b21814f2d42",
"type": "github" "type": "github"
}, },
"original": { "original": {

@ -6,15 +6,9 @@
flake-utils.url = "github:numtide/flake-utils"; flake-utils.url = "github:numtide/flake-utils";
rust-overlay = { rust-overlay = {
url = "github:oxalica/rust-overlay"; url = "github:oxalica/rust-overlay";
inputs = {
nixpkgs.follows = "nixpkgs";
flake-utils.follows = "flake-utils";
};
};
crane = {
url = "github:ipetkov/crane";
inputs.nixpkgs.follows = "nixpkgs"; inputs.nixpkgs.follows = "nixpkgs";
}; };
crane.url = "github:ipetkov/crane";
}; };
outputs = { outputs = {
@ -114,7 +108,7 @@
if pkgs.stdenv.isLinux if pkgs.stdenv.isLinux
then pkgs.stdenv then pkgs.stdenv
else pkgs.clangStdenv; else pkgs.clangStdenv;
rustFlagsEnv = pkgs.lib.optionalString stdenv.isLinux "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment"; rustFlagsEnv = pkgs.lib.optionalString stdenv.isLinux "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment --cfg tokio_unstable";
rustToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml; rustToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
craneLibMSRV = (crane.mkLib pkgs).overrideToolchain rustToolchain; craneLibMSRV = (crane.mkLib pkgs).overrideToolchain rustToolchain;
craneLibStable = (crane.mkLib pkgs).overrideToolchain pkgs.pkgsBuildHost.rust-bin.stable.latest.default; craneLibStable = (crane.mkLib pkgs).overrideToolchain pkgs.pkgsBuildHost.rust-bin.stable.latest.default;

@ -32,7 +32,7 @@ unicode-width = "=0.1.12"
unicode-general-category = "0.6" unicode-general-category = "0.6"
slotmap.workspace = true slotmap.workspace = true
tree-sitter.workspace = true tree-sitter.workspace = true
once_cell = "1.19" once_cell = "1.20"
arc-swap = "1" arc-swap = "1"
regex = "1" regex = "1"
bitflags = "2.6" bitflags = "2.6"

@ -9,6 +9,24 @@ use crate::{
use helix_stdx::rope::RopeSliceExt; use helix_stdx::rope::RopeSliceExt;
use std::borrow::Cow; use std::borrow::Cow;
pub const DEFAULT_COMMENT_TOKEN: &str = "//";
/// Returns the longest matching comment token of the given line (if it exists).
pub fn get_comment_token<'a, S: AsRef<str>>(
text: RopeSlice,
tokens: &'a [S],
line_num: usize,
) -> Option<&'a str> {
let line = text.line(line_num);
let start = line.first_non_whitespace_char()?;
tokens
.iter()
.map(AsRef::as_ref)
.filter(|token| line.slice(start..).starts_with(token))
.max_by_key(|token| token.len())
}
/// Given text, a comment token, and a set of line indices, returns the following: /// Given text, a comment token, and a set of line indices, returns the following:
/// - Whether the given lines should be considered commented /// - Whether the given lines should be considered commented
/// - If any of the lines are uncommented, all lines are considered as such. /// - If any of the lines are uncommented, all lines are considered as such.
@ -28,21 +46,20 @@ fn find_line_comment(
let mut min = usize::MAX; // minimum col for first_non_whitespace_char let mut min = usize::MAX; // minimum col for first_non_whitespace_char
let mut margin = 1; let mut margin = 1;
let token_len = token.chars().count(); let token_len = token.chars().count();
for line in lines { for line in lines {
let line_slice = text.line(line); let line_slice = text.line(line);
if let Some(pos) = line_slice.first_non_whitespace_char() { if let Some(pos) = line_slice.first_non_whitespace_char() {
let len = line_slice.len_chars(); let len = line_slice.len_chars();
if pos < min { min = std::cmp::min(min, pos);
min = pos;
}
// line can be shorter than pos + token len // line can be shorter than pos + token len
let fragment = Cow::from(line_slice.slice(pos..std::cmp::min(pos + token.len(), len))); let fragment = Cow::from(line_slice.slice(pos..std::cmp::min(pos + token.len(), len)));
if fragment != token {
// as soon as one of the non-blank lines doesn't have a comment, the whole block is // as soon as one of the non-blank lines doesn't have a comment, the whole block is
// considered uncommented. // considered uncommented.
if fragment != token {
commented = false; commented = false;
} }
@ -56,6 +73,7 @@ fn find_line_comment(
to_change.push(line); to_change.push(line);
} }
} }
(commented, to_change, min, margin) (commented, to_change, min, margin)
} }
@ -63,7 +81,7 @@ fn find_line_comment(
pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&str>) -> Transaction { pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&str>) -> Transaction {
let text = doc.slice(..); let text = doc.slice(..);
let token = token.unwrap_or("//"); let token = token.unwrap_or(DEFAULT_COMMENT_TOKEN);
let comment = Tendril::from(format!("{} ", token)); let comment = Tendril::from(format!("{} ", token));
let mut lines: Vec<usize> = Vec::with_capacity(selection.len()); let mut lines: Vec<usize> = Vec::with_capacity(selection.len());
@ -317,56 +335,87 @@ pub fn split_lines_of_selection(text: RopeSlice, selection: &Selection) -> Selec
mod test { mod test {
use super::*; use super::*;
mod find_line_comment {
use super::*;
#[test] #[test]
fn test_find_line_comment() { fn not_commented() {
// four lines, two space indented, except for line 1 which is blank. // four lines, two space indented, except for line 1 which is blank.
let mut doc = Rope::from(" 1\n\n 2\n 3"); let doc = Rope::from(" 1\n\n 2\n 3");
// select whole document
let mut selection = Selection::single(0, doc.len_chars() - 1);
let text = doc.slice(..); let text = doc.slice(..);
let res = find_line_comment("//", text, 0..3); let res = find_line_comment("//", text, 0..3);
// (commented = true, to_change = [line 0, line 2], min = col 2, margin = 0) // (commented = false, to_change = [line 0, line 2], min = col 2, margin = 0)
assert_eq!(res, (false, vec![0, 2], 2, 0)); assert_eq!(res, (false, vec![0, 2], 2, 0));
}
#[test]
fn is_commented() {
// three lines where the second line is empty.
let doc = Rope::from("// hello\n\n// there");
let res = find_line_comment("//", doc.slice(..), 0..3);
// (commented = true, to_change = [line 0, line 2], min = col 0, margin = 1)
assert_eq!(res, (true, vec![0, 2], 0, 1));
}
}
// TODO: account for uncommenting with uneven comment indentation
mod toggle_line_comment {
use super::*;
#[test]
fn comment() {
// four lines, two space indented, except for line 1 which is blank.
let mut doc = Rope::from(" 1\n\n 2\n 3");
// select whole document
let selection = Selection::single(0, doc.len_chars() - 1);
// comment
let transaction = toggle_line_comments(&doc, &selection, None); let transaction = toggle_line_comments(&doc, &selection, None);
transaction.apply(&mut doc); transaction.apply(&mut doc);
selection = selection.map(transaction.changes());
assert_eq!(doc, " // 1\n\n // 2\n // 3"); assert_eq!(doc, " // 1\n\n // 2\n // 3");
}
#[test]
fn uncomment() {
let mut doc = Rope::from(" // 1\n\n // 2\n // 3");
let mut selection = Selection::single(0, doc.len_chars() - 1);
// uncomment
let transaction = toggle_line_comments(&doc, &selection, None); let transaction = toggle_line_comments(&doc, &selection, None);
transaction.apply(&mut doc); transaction.apply(&mut doc);
selection = selection.map(transaction.changes()); selection = selection.map(transaction.changes());
assert_eq!(doc, " 1\n\n 2\n 3"); assert_eq!(doc, " 1\n\n 2\n 3");
assert!(selection.len() == 1); // to ignore the selection unused warning assert!(selection.len() == 1); // to ignore the selection unused warning
}
// 0 margin comments #[test]
doc = Rope::from(" //1\n\n //2\n //3"); fn uncomment_0_margin_comments() {
// reset the selection. let mut doc = Rope::from(" //1\n\n //2\n //3");
selection = Selection::single(0, doc.len_chars() - 1); let mut selection = Selection::single(0, doc.len_chars() - 1);
let transaction = toggle_line_comments(&doc, &selection, None); let transaction = toggle_line_comments(&doc, &selection, None);
transaction.apply(&mut doc); transaction.apply(&mut doc);
selection = selection.map(transaction.changes()); selection = selection.map(transaction.changes());
assert_eq!(doc, " 1\n\n 2\n 3"); assert_eq!(doc, " 1\n\n 2\n 3");
assert!(selection.len() == 1); // to ignore the selection unused warning assert!(selection.len() == 1); // to ignore the selection unused warning
}
// 0 margin comments, with no space #[test]
doc = Rope::from("//"); fn uncomment_0_margin_comments_with_no_space() {
// reset the selection. let mut doc = Rope::from("//");
selection = Selection::single(0, doc.len_chars() - 1); let mut selection = Selection::single(0, doc.len_chars() - 1);
let transaction = toggle_line_comments(&doc, &selection, None); let transaction = toggle_line_comments(&doc, &selection, None);
transaction.apply(&mut doc); transaction.apply(&mut doc);
selection = selection.map(transaction.changes()); selection = selection.map(transaction.changes());
assert_eq!(doc, ""); assert_eq!(doc, "");
assert!(selection.len() == 1); // to ignore the selection unused warning assert!(selection.len() == 1); // to ignore the selection unused warning
}
// TODO: account for uncommenting with uneven comment indentation
} }
#[test] #[test]
@ -413,4 +462,32 @@ mod test {
transaction.apply(&mut doc); transaction.apply(&mut doc);
assert_eq!(doc, ""); assert_eq!(doc, "");
} }
/// Test, if `get_comment_tokens` works, even if the content of the file includes chars, whose
/// byte size unequal the amount of chars
#[test]
fn test_get_comment_with_char_boundaries() {
let rope = Rope::from("··");
let tokens = ["//", "///"];
assert_eq!(
super::get_comment_token(rope.slice(..), tokens.as_slice(), 0),
None
);
}
/// Test for `get_comment_token`.
///
/// Assuming the comment tokens are stored as `["///", "//"]`, `get_comment_token` should still
/// return `///` instead of `//` if the user is in a doc-comment section.
#[test]
fn test_use_longest_comment() {
let text = Rope::from(" /// amogus");
let tokens = ["///", "//"];
assert_eq!(
super::get_comment_token(text.slice(..), tokens.as_slice(), 0),
Some("///")
);
}
} }

@ -1,12 +1,18 @@
use std::path::{Path, PathBuf}; use std::{
fmt,
path::{Path, PathBuf},
sync::Arc,
};
/// A generic pointer to a file location. /// A generic pointer to a file location.
/// ///
/// Currently this type only supports paths to local files. /// Currently this type only supports paths to local files.
///
/// Cloning this type is cheap: the internal representation uses an Arc.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
#[non_exhaustive] #[non_exhaustive]
pub enum Uri { pub enum Uri {
File(PathBuf), File(Arc<Path>),
} }
impl Uri { impl Uri {
@ -23,26 +29,18 @@ impl Uri {
Self::File(path) => Some(path), Self::File(path) => Some(path),
} }
} }
pub fn as_path_buf(self) -> Option<PathBuf> {
match self {
Self::File(path) => Some(path),
}
}
} }
impl From<PathBuf> for Uri { impl From<PathBuf> for Uri {
fn from(path: PathBuf) -> Self { fn from(path: PathBuf) -> Self {
Self::File(path) Self::File(path.into())
} }
} }
impl TryFrom<Uri> for PathBuf { impl fmt::Display for Uri {
type Error = (); fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
fn try_from(uri: Uri) -> Result<Self, Self::Error> { Self::File(path) => write!(f, "{}", path.display()),
match uri {
Uri::File(path) => Ok(path),
} }
} }
} }
@ -59,11 +57,16 @@ pub enum UrlConversionErrorKind {
UnableToConvert, UnableToConvert,
} }
impl std::fmt::Display for UrlConversionError { impl fmt::Display for UrlConversionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind { match self.kind {
UrlConversionErrorKind::UnsupportedScheme => { UrlConversionErrorKind::UnsupportedScheme => {
write!(f, "unsupported scheme in URL: {}", self.source.scheme()) write!(
f,
"unsupported scheme '{}' in URL {}",
self.source.scheme(),
self.source
)
} }
UrlConversionErrorKind::UnableToConvert => { UrlConversionErrorKind::UnableToConvert => {
write!(f, "unable to convert URL to file path: {}", self.source) write!(f, "unable to convert URL to file path: {}", self.source)
@ -77,7 +80,7 @@ impl std::error::Error for UrlConversionError {}
fn convert_url_to_uri(url: &url::Url) -> Result<Uri, UrlConversionErrorKind> { fn convert_url_to_uri(url: &url::Url) -> Result<Uri, UrlConversionErrorKind> {
if url.scheme() == "file" { if url.scheme() == "file" {
url.to_file_path() url.to_file_path()
.map(|path| Uri::File(helix_stdx::path::normalize(path))) .map(|path| Uri::File(helix_stdx::path::normalize(path).into()))
.map_err(|_| UrlConversionErrorKind::UnableToConvert) .map_err(|_| UrlConversionErrorKind::UnableToConvert)
} else { } else {
Err(UrlConversionErrorKind::UnsupportedScheme) Err(UrlConversionErrorKind::UnsupportedScheme)

@ -24,4 +24,4 @@ tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std
thiserror.workspace = true thiserror.workspace = true
[dev-dependencies] [dev-dependencies]
fern = "0.6" fern = "0.7"

@ -19,11 +19,11 @@ tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "p
# setup new events on initialization, hardware-lock-elision hugely benefits this case # setup new events on initialization, hardware-lock-elision hugely benefits this case
# as it essentially makes the lock entirely free as long as there is no writes # as it essentially makes the lock entirely free as long as there is no writes
parking_lot = { version = "0.12", features = ["hardware-lock-elision"] } parking_lot = { version = "0.12", features = ["hardware-lock-elision"] }
once_cell = "1.18" once_cell = "1.20"
anyhow = "1" anyhow = "1"
log = "0.4" log = "0.4"
futures-executor = "0.3.28" futures-executor = "0.3.31"
[features] [features]
integration_test = [] integration_test = []

@ -22,7 +22,7 @@ serde = { version = "1.0", features = ["derive"] }
toml = "0.8" toml = "0.8"
etcetera = "0.8" etcetera = "0.8"
tree-sitter.workspace = true tree-sitter.workspace = true
once_cell = "1.19" once_cell = "1.20"
log = "0.4" log = "0.4"
# TODO: these two should be on !wasm32 only # TODO: these two should be on !wasm32 only
@ -30,7 +30,7 @@ log = "0.4"
# cloning/compiling tree-sitter grammars # cloning/compiling tree-sitter grammars
cc = { version = "1" } cc = { version = "1" }
threadpool = { version = "1.0" } threadpool = { version = "1.0" }
tempfile = "3.12.0" tempfile = "3.13.0"
dunce = "1.0.5" dunce = "1.0.5"
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dependencies]

@ -23,7 +23,7 @@ license = "MIT"
[dependencies] [dependencies]
bitflags = "2.6.0" bitflags = "2.6.0"
serde = { version = "1.0.209", features = ["derive"] } serde = { version = "1.0.209", features = ["derive"] }
serde_json = "1.0.127" serde_json = "1.0.132"
serde_repr = "0.1" serde_repr = "0.1"
url = {version = "2.0.0", features = ["serde"]} url = {version = "2.0.0", features = ["serde"]}

@ -26,4 +26,4 @@ windows-sys = { version = "0.59", features = ["Win32_Foundation", "Win32_Securit
rustix = { version = "0.38", features = ["fs"] } rustix = { version = "0.38", features = ["fs"] }
[dev-dependencies] [dev-dependencies]
tempfile = "3.12" tempfile = "3.13"

@ -33,7 +33,7 @@ helix-vcs = { path = "../helix-vcs" }
helix-loader = { path = "../helix-loader" } helix-loader = { path = "../helix-loader" }
anyhow = "1" anyhow = "1"
once_cell = "1.19" once_cell = "1.20"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] }
@ -45,7 +45,7 @@ arc-swap = { version = "1.7.1" }
termini = "1" termini = "1"
# Logging # Logging
fern = "0.6" fern = "0.7"
chrono = { version = "0.4", default-features = false, features = ["clock"] } chrono = { version = "0.4", default-features = false, features = ["clock"] }
log = "0.4" log = "0.4"
@ -74,7 +74,7 @@ grep-searcher = "0.1.14"
[target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100
signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] }
libc = "0.2.158" libc = "0.2.161"
[target.'cfg(target_os = "macos")'.dependencies] [target.'cfg(target_os = "macos")'.dependencies]
crossterm = { version = "0.28", features = ["event-stream", "use-dev-tty", "libc"] } crossterm = { version = "0.28", features = ["event-stream", "use-dev-tty", "libc"] }
@ -85,5 +85,5 @@ helix-loader = { path = "../helix-loader" }
[dev-dependencies] [dev-dependencies]
smallvec = "1.13" smallvec = "1.13"
indoc = "2.0.5" indoc = "2.0.5"
tempfile = "3.12.0" tempfile = "3.13.0"
same-file = "1.0.1" same-file = "1.0.1"

@ -22,8 +22,8 @@ use helix_core::{
encoding, find_workspace, encoding, find_workspace,
graphemes::{self, next_grapheme_boundary, RevRopeGraphemes}, graphemes::{self, next_grapheme_boundary, RevRopeGraphemes},
history::UndoKind, history::UndoKind,
increment, indent, increment,
indent::IndentStyle, indent::{self, IndentStyle},
line_ending::{get_line_ending_of_str, line_end_char_index}, line_ending::{get_line_ending_of_str, line_end_char_index},
match_brackets, match_brackets,
movement::{self, move_vertically_visual, Direction}, movement::{self, move_vertically_visual, Direction},
@ -3467,7 +3467,15 @@ fn open(cx: &mut Context, open: Open) {
) )
}; };
let indent = indent::indent_for_newline( let continue_comment_token = doc
.language_config()
.and_then(|config| config.comment_tokens.as_ref())
.and_then(|tokens| comment::get_comment_token(text, tokens, cursor_line));
let line = text.line(cursor_line);
let indent = match line.first_non_whitespace_char() {
Some(pos) if continue_comment_token.is_some() => line.slice(..pos).to_string(),
_ => indent::indent_for_newline(
doc.language_config(), doc.language_config(),
doc.syntax(), doc.syntax(),
&doc.config.load().indent_heuristic, &doc.config.load().indent_heuristic,
@ -3477,21 +3485,33 @@ fn open(cx: &mut Context, open: Open) {
line_num, line_num,
line_end_index, line_end_index,
cursor_line, cursor_line,
); ),
};
let indent_len = indent.len(); let indent_len = indent.len();
let mut text = String::with_capacity(1 + indent_len); let mut text = String::with_capacity(1 + indent_len);
text.push_str(doc.line_ending.as_str()); text.push_str(doc.line_ending.as_str());
text.push_str(&indent); text.push_str(&indent);
if let Some(token) = continue_comment_token {
text.push_str(token);
text.push(' ');
}
let text = text.repeat(count); let text = text.repeat(count);
// calculate new selection ranges // calculate new selection ranges
let pos = offs + line_end_index + line_end_offset_width; let pos = offs + line_end_index + line_end_offset_width;
let comment_len = continue_comment_token
.map(|token| token.len() + 1) // `+ 1` for the extra space added
.unwrap_or_default();
for i in 0..count { for i in 0..count {
// pos -> beginning of reference line, // pos -> beginning of reference line,
// + (i * (1+indent_len)) -> beginning of i'th line from pos // + (i * (1+indent_len + comment_len)) -> beginning of i'th line from pos (possibly including comment token)
// + indent_len -> -> indent for i'th line // + indent_len + comment_len -> -> indent for i'th line
ranges.push(Range::point(pos + (i * (1 + indent_len)) + indent_len)); ranges.push(Range::point(
pos + (i * (1 + indent_len + comment_len)) + indent_len + comment_len,
));
} }
offs += text.chars().count(); offs += text.chars().count();
@ -3929,6 +3949,11 @@ pub mod insert {
let mut new_text = String::new(); let mut new_text = String::new();
let continue_comment_token = doc
.language_config()
.and_then(|config| config.comment_tokens.as_ref())
.and_then(|tokens| comment::get_comment_token(text, tokens, current_line));
// If the current line is all whitespace, insert a line ending at the beginning of // If the current line is all whitespace, insert a line ending at the beginning of
// the current line. This makes the current line empty and the new line contain the // the current line. This makes the current line empty and the new line contain the
// indentation of the old line. // indentation of the old line.
@ -3938,7 +3963,11 @@ pub mod insert {
(line_start, line_start, new_text.chars().count()) (line_start, line_start, new_text.chars().count())
} else { } else {
let indent = indent::indent_for_newline( let line = text.line(current_line);
let indent = match line.first_non_whitespace_char() {
Some(pos) if continue_comment_token.is_some() => line.slice(..pos).to_string(),
_ => indent::indent_for_newline(
doc.language_config(), doc.language_config(),
doc.syntax(), doc.syntax(),
&doc.config.load().indent_heuristic, &doc.config.load().indent_heuristic,
@ -3948,7 +3977,8 @@ pub mod insert {
current_line, current_line,
pos, pos,
current_line, current_line,
); ),
};
// If we are between pairs (such as brackets), we want to // If we are between pairs (such as brackets), we want to
// insert an additional line which is indented one level // insert an additional line which is indented one level
@ -3958,19 +3988,30 @@ pub mod insert {
.and_then(|pairs| pairs.get(prev)) .and_then(|pairs| pairs.get(prev))
.map_or(false, |pair| pair.open == prev && pair.close == curr); .map_or(false, |pair| pair.open == prev && pair.close == curr);
let local_offs = if on_auto_pair { let local_offs = if let Some(token) = continue_comment_token {
new_text.push_str(doc.line_ending.as_str());
new_text.push_str(&indent);
new_text.push_str(token);
new_text.push(' ');
new_text.chars().count()
} else if on_auto_pair {
// line where the cursor will be
let inner_indent = indent.clone() + doc.indent_style.as_str(); let inner_indent = indent.clone() + doc.indent_style.as_str();
new_text.reserve_exact(2 + indent.len() + inner_indent.len()); new_text.reserve_exact(2 + indent.len() + inner_indent.len());
new_text.push_str(doc.line_ending.as_str()); new_text.push_str(doc.line_ending.as_str());
new_text.push_str(&inner_indent); new_text.push_str(&inner_indent);
// line where the matching pair will be
let local_offs = new_text.chars().count(); let local_offs = new_text.chars().count();
new_text.push_str(doc.line_ending.as_str()); new_text.push_str(doc.line_ending.as_str());
new_text.push_str(&indent); new_text.push_str(&indent);
local_offs local_offs
} else { } else {
new_text.reserve_exact(1 + indent.len()); new_text.reserve_exact(1 + indent.len());
new_text.push_str(doc.line_ending.as_str()); new_text.push_str(doc.line_ending.as_str());
new_text.push_str(&indent); new_text.push_str(&indent);
new_text.chars().count() new_text.chars().count()
}; };

@ -34,7 +34,7 @@ use crate::{
use std::{ use std::{
cmp::Ordering, cmp::Ordering,
collections::{BTreeMap, HashSet}, collections::{BTreeMap, HashSet},
fmt::{Display, Write}, fmt::Display,
future::Future, future::Future,
path::Path, path::Path,
}; };
@ -61,10 +61,31 @@ macro_rules! language_server_with_feature {
}}; }};
} }
/// A wrapper around `lsp::Location` that swaps out the LSP URI for `helix_core::Uri`.
#[derive(Debug, Clone, PartialEq, Eq)]
struct Location {
uri: Uri,
range: lsp::Range,
}
fn lsp_location_to_location(location: lsp::Location) -> Option<Location> {
let uri = match location.uri.try_into() {
Ok(uri) => uri,
Err(err) => {
log::warn!("discarding invalid or unsupported URI: {err}");
return None;
}
};
Some(Location {
uri,
range: location.range,
})
}
struct SymbolInformationItem { struct SymbolInformationItem {
location: Location,
symbol: lsp::SymbolInformation, symbol: lsp::SymbolInformation,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
uri: Uri,
} }
struct DiagnosticStyles { struct DiagnosticStyles {
@ -75,35 +96,35 @@ struct DiagnosticStyles {
} }
struct PickerDiagnostic { struct PickerDiagnostic {
uri: Uri, location: Location,
diag: lsp::Diagnostic, diag: lsp::Diagnostic,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
} }
fn uri_to_file_location<'a>(uri: &'a Uri, range: &lsp::Range) -> Option<FileLocation<'a>> { fn location_to_file_location(location: &Location) -> Option<FileLocation> {
let path = uri.as_path()?; let path = location.uri.as_path()?;
let line = Some((range.start.line as usize, range.end.line as usize)); let line = Some((
location.range.start.line as usize,
location.range.end.line as usize,
));
Some((path.into(), line)) Some((path.into(), line))
} }
fn jump_to_location( fn jump_to_location(
editor: &mut Editor, editor: &mut Editor,
location: &lsp::Location, location: &Location,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
action: Action, action: Action,
) { ) {
let (view, doc) = current!(editor); let (view, doc) = current!(editor);
push_jump(view, doc); push_jump(view, doc);
let path = match location.uri.to_file_path() { let Some(path) = location.uri.as_path() else {
Ok(path) => path, let err = format!("unable to convert URI to filepath: {:?}", location.uri);
Err(_) => {
let err = format!("unable to convert URI to filepath: {}", location.uri);
editor.set_error(err); editor.set_error(err);
return; return;
}
}; };
jump_to_position(editor, &path, location.range, offset_encoding, action); jump_to_position(editor, path, location.range, offset_encoding, action);
} }
fn jump_to_position( fn jump_to_position(
@ -196,7 +217,10 @@ fn diag_picker(
for (diag, ls) in diags { for (diag, ls) in diags {
if let Some(ls) = cx.editor.language_server_by_id(ls) { if let Some(ls) = cx.editor.language_server_by_id(ls) {
flat_diag.push(PickerDiagnostic { flat_diag.push(PickerDiagnostic {
location: Location {
uri: uri.clone(), uri: uri.clone(),
range: diag.range,
},
diag, diag,
offset_encoding: ls.offset_encoding(), offset_encoding: ls.offset_encoding(),
}); });
@ -243,7 +267,7 @@ fn diag_picker(
// between message code and message // between message code and message
2, 2,
ui::PickerColumn::new("path", |item: &PickerDiagnostic, _| { ui::PickerColumn::new("path", |item: &PickerDiagnostic, _| {
if let Some(path) = item.uri.as_path() { if let Some(path) = item.location.uri.as_path() {
path::get_truncated_path(path) path::get_truncated_path(path)
.to_string_lossy() .to_string_lossy()
.to_string() .to_string()
@ -261,26 +285,14 @@ fn diag_picker(
primary_column, primary_column,
flat_diag, flat_diag,
styles, styles,
move |cx, move |cx, diag, action| {
PickerDiagnostic { jump_to_location(cx.editor, &diag.location, diag.offset_encoding, action);
uri,
diag,
offset_encoding,
},
action| {
let Some(path) = uri.as_path() else {
return;
};
jump_to_position(cx.editor, path, diag.range, *offset_encoding, action);
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
view.diagnostics_handler view.diagnostics_handler
.immediately_show_diagnostic(doc, view.id); .immediately_show_diagnostic(doc, view.id);
}, },
) )
.with_preview(move |_editor, PickerDiagnostic { uri, diag, .. }| { .with_preview(move |_editor, diag| location_to_file_location(&diag.location))
let line = Some((diag.range.start.line as usize, diag.range.end.line as usize));
Some((uri.as_path()?.into(), line))
})
.truncate_start(false) .truncate_start(false)
} }
@ -303,7 +315,10 @@ pub fn symbol_picker(cx: &mut Context) {
container_name: None, container_name: None,
}, },
offset_encoding, offset_encoding,
location: Location {
uri: uri.clone(), uri: uri.clone(),
range: symbol.selection_range,
},
}); });
for child in symbol.children.into_iter().flatten() { for child in symbol.children.into_iter().flatten() {
nested_to_flat(list, file, uri, child, offset_encoding); nested_to_flat(list, file, uri, child, offset_encoding);
@ -337,7 +352,10 @@ pub fn symbol_picker(cx: &mut Context) {
lsp::DocumentSymbolResponse::Flat(symbols) => symbols lsp::DocumentSymbolResponse::Flat(symbols) => symbols
.into_iter() .into_iter()
.map(|symbol| SymbolInformationItem { .map(|symbol| SymbolInformationItem {
location: Location {
uri: doc_uri.clone(), uri: doc_uri.clone(),
range: symbol.location.range,
},
symbol, symbol,
offset_encoding, offset_encoding,
}) })
@ -392,17 +410,10 @@ pub fn symbol_picker(cx: &mut Context) {
symbols, symbols,
(), (),
move |cx, item, action| { move |cx, item, action| {
jump_to_location( jump_to_location(cx.editor, &item.location, item.offset_encoding, action);
cx.editor,
&item.symbol.location,
item.offset_encoding,
action,
);
}, },
) )
.with_preview(move |_editor, item| { .with_preview(move |_editor, item| location_to_file_location(&item.location))
uri_to_file_location(&item.uri, &item.symbol.location.range)
})
.truncate_start(false); .truncate_start(false);
compositor.push(Box::new(overlaid(picker))) compositor.push(Box::new(overlaid(picker)))
@ -453,8 +464,11 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
} }
}; };
Some(SymbolInformationItem { Some(SymbolInformationItem {
symbol, location: Location {
uri, uri,
range: symbol.location.range,
},
symbol,
offset_encoding, offset_encoding,
}) })
}) })
@ -490,7 +504,7 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
}) })
.without_filtering(), .without_filtering(),
ui::PickerColumn::new("path", |item: &SymbolInformationItem, _| { ui::PickerColumn::new("path", |item: &SymbolInformationItem, _| {
if let Some(path) = item.uri.as_path() { if let Some(path) = item.location.uri.as_path() {
path::get_relative_path(path) path::get_relative_path(path)
.to_string_lossy() .to_string_lossy()
.to_string() .to_string()
@ -507,15 +521,10 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
[], [],
(), (),
move |cx, item, action| { move |cx, item, action| {
jump_to_location( jump_to_location(cx.editor, &item.location, item.offset_encoding, action);
cx.editor,
&item.symbol.location,
item.offset_encoding,
action,
);
}, },
) )
.with_preview(|_editor, item| uri_to_file_location(&item.uri, &item.symbol.location.range)) .with_preview(|_editor, item| location_to_file_location(&item.location))
.with_dynamic_query(get_symbols, None) .with_dynamic_query(get_symbols, None)
.truncate_start(false); .truncate_start(false);
@ -847,7 +856,7 @@ impl Display for ApplyEditErrorKind {
fn goto_impl( fn goto_impl(
editor: &mut Editor, editor: &mut Editor,
compositor: &mut Compositor, compositor: &mut Compositor,
locations: Vec<lsp::Location>, locations: Vec<Location>,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
) { ) {
let cwdir = helix_stdx::env::current_working_dir(); let cwdir = helix_stdx::env::current_working_dir();
@ -860,80 +869,41 @@ fn goto_impl(
_locations => { _locations => {
let columns = [ui::PickerColumn::new( let columns = [ui::PickerColumn::new(
"location", "location",
|item: &lsp::Location, cwdir: &std::path::PathBuf| { |item: &Location, cwdir: &std::path::PathBuf| {
// The preallocation here will overallocate a few characters since it will account for the let path = if let Some(path) = item.uri.as_path() {
// URL's scheme, which is not used most of the time since that scheme will be "file://". path.strip_prefix(cwdir).unwrap_or(path).to_string_lossy()
// Those extra chars will be used to avoid allocating when writing the line number (in the
// common case where it has 5 digits or less, which should be enough for a cast majority
// of usages).
let mut res = String::with_capacity(item.uri.as_str().len());
if item.uri.scheme() == "file" {
// With the preallocation above and UTF-8 paths already, this closure will do one (1)
// allocation, for `to_file_path`, else there will be two (2), with `to_string_lossy`.
if let Ok(path) = item.uri.to_file_path() {
// We don't convert to a `helix_core::Uri` here because we've already checked the scheme.
// This path won't be normalized but it's only used for display.
res.push_str(
&path.strip_prefix(cwdir).unwrap_or(&path).to_string_lossy(),
);
}
} else { } else {
// Never allocates since we declared the string with this capacity already. item.uri.to_string().into()
res.push_str(item.uri.as_str()); };
}
// Most commonly, this will not allocate, especially on Unix systems where the root prefix format!("{path}:{}", item.range.start.line + 1).into()
// is a simple `/` and not `C:\` (with whatever drive letter)
write!(&mut res, ":{}", item.range.start.line + 1)
.expect("Will only failed if allocating fail");
res.into()
}, },
)]; )];
let picker = Picker::new(columns, 0, locations, cwdir, move |cx, location, action| { let picker = Picker::new(columns, 0, locations, cwdir, move |cx, location, action| {
jump_to_location(cx.editor, location, offset_encoding, action) jump_to_location(cx.editor, location, offset_encoding, action)
}) })
.with_preview(move |_editor, location| { .with_preview(move |_editor, location| location_to_file_location(location));
use crate::ui::picker::PathOrId;
let lines = Some((
location.range.start.line as usize,
location.range.end.line as usize,
));
// TODO: we should avoid allocating by doing the Uri conversion ahead of time.
//
// To do this, introduce a `Location` type in `helix-core` that reuses the core
// `Uri` type instead of the LSP `Url` type and replaces the LSP `Range` type.
// Refactor the callers of `goto_impl` to pass iterators that translate the
// LSP location type to the custom one in core, or have them collect and pass
// `Vec<Location>`s. Replace the `uri_to_file_location` function with
// `location_to_file_location` that takes only `&helix_core::Location` as
// parameters.
//
// By doing this we can also eliminate the duplicated URI info in the
// `SymbolInformationItem` type and introduce a custom Symbol type in `helix-core`
// which will be reused in the future for tree-sitter based symbol pickers.
let path = Uri::try_from(&location.uri).ok()?.as_path_buf()?;
#[allow(deprecated)]
Some((PathOrId::from_path_buf(path), lines))
});
compositor.push(Box::new(overlaid(picker))); compositor.push(Box::new(overlaid(picker)));
} }
} }
} }
fn to_locations(definitions: Option<lsp::GotoDefinitionResponse>) -> Vec<lsp::Location> { fn to_locations(definitions: Option<lsp::GotoDefinitionResponse>) -> Vec<Location> {
match definitions { match definitions {
Some(lsp::GotoDefinitionResponse::Scalar(location)) => vec![location], Some(lsp::GotoDefinitionResponse::Scalar(location)) => {
Some(lsp::GotoDefinitionResponse::Array(locations)) => locations, lsp_location_to_location(location).into_iter().collect()
}
Some(lsp::GotoDefinitionResponse::Array(locations)) => locations
.into_iter()
.flat_map(lsp_location_to_location)
.collect(),
Some(lsp::GotoDefinitionResponse::Link(locations)) => locations Some(lsp::GotoDefinitionResponse::Link(locations)) => locations
.into_iter() .into_iter()
.map(|location_link| lsp::Location { .map(|location_link| {
uri: location_link.target_uri, lsp::Location::new(location_link.target_uri, location_link.target_range)
range: location_link.target_range,
}) })
.flat_map(lsp_location_to_location)
.collect(), .collect(),
None => Vec::new(), None => Vec::new(),
} }
@ -1018,7 +988,11 @@ pub fn goto_reference(cx: &mut Context) {
cx.callback( cx.callback(
future, future,
move |editor, compositor, response: Option<Vec<lsp::Location>>| { move |editor, compositor, response: Option<Vec<lsp::Location>>| {
let items = response.unwrap_or_default(); let items: Vec<Location> = response
.into_iter()
.flatten()
.flat_map(lsp_location_to_location)
.collect();
if items.is_empty() { if items.is_empty() {
editor.set_error("No references found."); editor.set_error("No references found.");
} else { } else {

@ -96,7 +96,10 @@ impl Component for SignatureHelp {
fn render(&mut self, area: Rect, surface: &mut Buffer, cx: &mut Context) { fn render(&mut self, area: Rect, surface: &mut Buffer, cx: &mut Context) {
let margin = Margin::horizontal(1); let margin = Margin::horizontal(1);
let signature = &self.signatures[self.active_signature]; let signature = self
.signatures
.get(self.active_signature)
.unwrap_or_else(|| &self.signatures[0]);
let active_param_span = signature.active_param_range.map(|(start, end)| { let active_param_span = signature.active_param_range.map(|(start, end)| {
vec![( vec![(
@ -108,9 +111,13 @@ impl Component for SignatureHelp {
)] )]
}); });
let sig = &self.signatures[self.active_signature]; let signature = self
.signatures
.get(self.active_signature)
.unwrap_or_else(|| &self.signatures[0]);
let sig_text = crate::ui::markdown::highlighted_code_block( let sig_text = crate::ui::markdown::highlighted_code_block(
sig.signature.as_str(), signature.signature.as_str(),
&self.language, &self.language,
Some(&cx.editor.theme), Some(&cx.editor.theme),
Arc::clone(&self.config_loader), Arc::clone(&self.config_loader),
@ -130,7 +137,7 @@ impl Component for SignatureHelp {
let sig_text_para = Paragraph::new(&sig_text).wrap(Wrap { trim: false }); let sig_text_para = Paragraph::new(&sig_text).wrap(Wrap { trim: false });
sig_text_para.render(sig_text_area, surface); sig_text_para.render(sig_text_area, surface);
if sig.signature_doc.is_none() { if signature.signature_doc.is_none() {
return; return;
} }
@ -142,7 +149,7 @@ impl Component for SignatureHelp {
} }
} }
let sig_doc = match &sig.signature_doc { let sig_doc = match &signature.signature_doc {
None => return, None => return,
Some(doc) => Markdown::new(doc.clone(), Arc::clone(&self.config_loader)), Some(doc) => Markdown::new(doc.clone(), Arc::clone(&self.config_loader)),
}; };
@ -160,12 +167,15 @@ impl Component for SignatureHelp {
const PADDING: u16 = 2; const PADDING: u16 = 2;
const SEPARATOR_HEIGHT: u16 = 1; const SEPARATOR_HEIGHT: u16 = 1;
let sig = &self.signatures[self.active_signature]; let signature = self
.signatures
.get(self.active_signature)
.unwrap_or_else(|| &self.signatures[0]);
let max_text_width = viewport.0.saturating_sub(PADDING).clamp(10, 120); let max_text_width = viewport.0.saturating_sub(PADDING).clamp(10, 120);
let signature_text = crate::ui::markdown::highlighted_code_block( let signature_text = crate::ui::markdown::highlighted_code_block(
sig.signature.as_str(), signature.signature.as_str(),
&self.language, &self.language,
None, None,
Arc::clone(&self.config_loader), Arc::clone(&self.config_loader),
@ -174,7 +184,7 @@ impl Component for SignatureHelp {
let (sig_width, sig_height) = let (sig_width, sig_height) =
crate::ui::text::required_size(&signature_text, max_text_width); crate::ui::text::required_size(&signature_text, max_text_width);
let (width, height) = match sig.signature_doc { let (width, height) = match signature.signature_doc {
Some(ref doc) => { Some(ref doc) => {
let doc_md = Markdown::new(doc.clone(), Arc::clone(&self.config_loader)); let doc_md = Markdown::new(doc.clone(), Arc::clone(&self.config_loader));
let doc_text = doc_md.parse(None); let doc_text = doc_md.parse(None);

@ -32,7 +32,7 @@ use std::{
borrow::Cow, borrow::Cow,
collections::HashMap, collections::HashMap,
io::Read, io::Read,
path::{Path, PathBuf}, path::Path,
sync::{ sync::{
atomic::{self, AtomicUsize}, atomic::{self, AtomicUsize},
Arc, Arc,
@ -63,26 +63,12 @@ pub const MAX_FILE_SIZE_FOR_PREVIEW: u64 = 10 * 1024 * 1024;
#[derive(PartialEq, Eq, Hash)] #[derive(PartialEq, Eq, Hash)]
pub enum PathOrId<'a> { pub enum PathOrId<'a> {
Id(DocumentId), Id(DocumentId),
// See [PathOrId::from_path_buf]: this will eventually become `Path(&Path)`. Path(&'a Path),
Path(Cow<'a, Path>),
}
impl<'a> PathOrId<'a> {
/// Creates a [PathOrId] from a PathBuf
///
/// # Deprecated
/// The owned version of PathOrId will be removed in a future refactor
/// and replaced with `&'a Path`. See the caller of this function for
/// more details on its removal.
#[deprecated]
pub fn from_path_buf(path_buf: PathBuf) -> Self {
Self::Path(Cow::Owned(path_buf))
}
} }
impl<'a> From<&'a Path> for PathOrId<'a> { impl<'a> From<&'a Path> for PathOrId<'a> {
fn from(path: &'a Path) -> Self { fn from(path: &'a Path) -> Self {
Self::Path(Cow::Borrowed(path)) Self::Path(path)
} }
} }
@ -581,7 +567,6 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
match path_or_id { match path_or_id {
PathOrId::Path(path) => { PathOrId::Path(path) => {
let path = path.as_ref();
if let Some(doc) = editor.document_by_path(path) { if let Some(doc) = editor.document_by_path(path) {
return Some((Preview::EditorDocument(doc), range)); return Some((Preview::EditorDocument(doc), range));
} }

@ -24,5 +24,5 @@ unicode-segmentation = "1.12"
crossterm = { version = "0.28", optional = true } crossterm = { version = "0.28", optional = true }
termini = "1.0" termini = "1.0"
serde = { version = "1", "optional" = true, features = ["derive"]} serde = { version = "1", "optional" = true, features = ["derive"]}
once_cell = "1.19" once_cell = "1.20"
log = "~0.4" log = "~0.4"

@ -29,4 +29,4 @@ log = "0.4"
git = ["gix"] git = ["gix"]
[dev-dependencies] [dev-dependencies]
tempfile = "3.12" tempfile = "3.13"

@ -22,18 +22,24 @@ use crate::FileChange;
#[cfg(test)] #[cfg(test)]
mod test; mod test;
#[inline]
fn get_repo_dir(file: &Path) -> Result<&Path> {
file.parent().context("file has no parent directory")
}
pub fn get_diff_base(file: &Path) -> Result<Vec<u8>> { pub fn get_diff_base(file: &Path) -> Result<Vec<u8>> {
debug_assert!(!file.exists() || file.is_file()); debug_assert!(!file.exists() || file.is_file());
debug_assert!(file.is_absolute()); debug_assert!(file.is_absolute());
let file = gix::path::realpath(file).context("resolve symlinks")?;
// TODO cache repository lookup // TODO cache repository lookup
let repo_dir = file.parent().context("file has no parent directory")?; let repo_dir = get_repo_dir(&file)?;
let repo = open_repo(repo_dir) let repo = open_repo(repo_dir)
.context("failed to open git repo")? .context("failed to open git repo")?
.to_thread_local(); .to_thread_local();
let head = repo.head_commit()?; let head = repo.head_commit()?;
let file_oid = find_file_in_commit(&repo, &head, file)?; let file_oid = find_file_in_commit(&repo, &head, &file)?;
let file_object = repo.find_object(file_oid)?; let file_object = repo.find_object(file_oid)?;
let data = file_object.detach().data; let data = file_object.detach().data;
@ -56,7 +62,9 @@ pub fn get_diff_base(file: &Path) -> Result<Vec<u8>> {
pub fn get_current_head_name(file: &Path) -> Result<Arc<ArcSwap<Box<str>>>> { pub fn get_current_head_name(file: &Path) -> Result<Arc<ArcSwap<Box<str>>>> {
debug_assert!(!file.exists() || file.is_file()); debug_assert!(!file.exists() || file.is_file());
debug_assert!(file.is_absolute()); debug_assert!(file.is_absolute());
let repo_dir = file.parent().context("file has no parent directory")?; let file = gix::path::realpath(file).context("resolve symlinks")?;
let repo_dir = get_repo_dir(&file)?;
let repo = open_repo(repo_dir) let repo = open_repo(repo_dir)
.context("failed to open git repo")? .context("failed to open git repo")?
.to_thread_local(); .to_thread_local();

@ -98,9 +98,13 @@ fn directory() {
assert!(git::get_diff_base(&dir).is_err()); assert!(git::get_diff_base(&dir).is_err());
} }
/// Test that `get_file_head` does not return content for a symlink. /// Test that `get_diff_base` resolves symlinks so that the same diff base is
/// This is important to correctly cover cases where a symlink is removed and replaced by a file. /// used as the target file.
/// If the contents of the symlink object were returned a diff between a path and the actual file would be produced (bad ui). ///
/// This is important to correctly cover cases where a symlink is removed and
/// replaced by a file. If the contents of the symlink object were returned
/// a diff between a literal file path and the actual file content would be
/// produced (bad ui).
#[cfg(any(unix, windows))] #[cfg(any(unix, windows))]
#[test] #[test]
fn symlink() { fn symlink() {
@ -108,14 +112,41 @@ fn symlink() {
use std::os::unix::fs::symlink; use std::os::unix::fs::symlink;
#[cfg(not(unix))] #[cfg(not(unix))]
use std::os::windows::fs::symlink_file as symlink; use std::os::windows::fs::symlink_file as symlink;
let temp_git = empty_git_repo(); let temp_git = empty_git_repo();
let file = temp_git.path().join("file.txt"); let file = temp_git.path().join("file.txt");
let contents = b"foo".as_slice(); let contents = Vec::from(b"foo");
File::create(&file).unwrap().write_all(contents).unwrap(); File::create(&file).unwrap().write_all(&contents).unwrap();
let file_link = temp_git.path().join("file_link.txt"); let file_link = temp_git.path().join("file_link.txt");
symlink("file.txt", &file_link).unwrap(); symlink("file.txt", &file_link).unwrap();
create_commit(temp_git.path(), true);
assert_eq!(git::get_diff_base(&file_link).unwrap(), contents);
assert_eq!(git::get_diff_base(&file).unwrap(), contents);
}
/// Test that `get_diff_base` returns content when the file is a symlink to
/// another file that is in a git repo, but the symlink itself is not.
#[cfg(any(unix, windows))]
#[test]
fn symlink_to_git_repo() {
#[cfg(unix)]
use std::os::unix::fs::symlink;
#[cfg(not(unix))]
use std::os::windows::fs::symlink_file as symlink;
let temp_dir = tempfile::tempdir().expect("create temp dir");
let temp_git = empty_git_repo();
let file = temp_git.path().join("file.txt");
let contents = Vec::from(b"foo");
File::create(&file).unwrap().write_all(&contents).unwrap();
create_commit(temp_git.path(), true); create_commit(temp_git.path(), true);
assert!(git::get_diff_base(&file_link).is_err());
assert_eq!(git::get_diff_base(&file).unwrap(), Vec::from(contents)); let file_link = temp_dir.path().join("file_link.txt");
symlink(&file, &file_link).unwrap();
assert_eq!(git::get_diff_base(&file_link).unwrap(), contents);
assert_eq!(git::get_diff_base(&file).unwrap(), contents);
} }

@ -28,10 +28,10 @@ bitflags = "2.6"
anyhow = "1" anyhow = "1"
crossterm = { version = "0.28", optional = true } crossterm = { version = "0.28", optional = true }
tempfile = "3.12" tempfile = "3.13"
# Conversion traits # Conversion traits
once_cell = "1.19" once_cell = "1.20"
url = "2.5.2" url = "2.5.2"
arc-swap = { version = "1.7.1" } arc-swap = { version = "1.7.1" }

@ -243,7 +243,7 @@ impl Editor {
match op { match op {
ResourceOp::Create(op) => { ResourceOp::Create(op) => {
let uri = Uri::try_from(&op.uri)?; let uri = Uri::try_from(&op.uri)?;
let path = uri.as_path_buf().expect("URIs are valid paths"); let path = uri.as_path().expect("URIs are valid paths");
let ignore_if_exists = op.options.as_ref().map_or(false, |options| { let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
}); });
@ -255,13 +255,15 @@ impl Editor {
} }
} }
fs::write(&path, [])?; fs::write(path, [])?;
self.language_servers.file_event_handler.file_changed(path); self.language_servers
.file_event_handler
.file_changed(path.to_path_buf());
} }
} }
ResourceOp::Delete(op) => { ResourceOp::Delete(op) => {
let uri = Uri::try_from(&op.uri)?; let uri = Uri::try_from(&op.uri)?;
let path = uri.as_path_buf().expect("URIs are valid paths"); let path = uri.as_path().expect("URIs are valid paths");
if path.is_dir() { if path.is_dir() {
let recursive = op let recursive = op
.options .options
@ -270,11 +272,13 @@ impl Editor {
.unwrap_or(false); .unwrap_or(false);
if recursive { if recursive {
fs::remove_dir_all(&path)? fs::remove_dir_all(path)?
} else { } else {
fs::remove_dir(&path)? fs::remove_dir(path)?
} }
self.language_servers.file_event_handler.file_changed(path); self.language_servers
.file_event_handler
.file_changed(path.to_path_buf());
} else if path.is_file() { } else if path.is_file() {
fs::remove_file(path)?; fs::remove_file(path)?;
} }

@ -41,6 +41,7 @@ forth-lsp = { command = "forth-lsp" }
fortls = { command = "fortls", args = ["--lowercase_intrinsics"] } fortls = { command = "fortls", args = ["--lowercase_intrinsics"] }
fsharp-ls = { command = "fsautocomplete", config = { AutomaticWorkspaceInit = true } } fsharp-ls = { command = "fsautocomplete", config = { AutomaticWorkspaceInit = true } }
gleam = { command = "gleam", args = ["lsp"] } gleam = { command = "gleam", args = ["lsp"] }
glsl_analyzer = { command = "glsl_analyzer" }
graphql-language-service = { command = "graphql-lsp", args = ["server", "-m", "stream"] } graphql-language-service = { command = "graphql-lsp", args = ["server", "-m", "stream"] }
haskell-language-server = { command = "haskell-language-server-wrapper", args = ["--lsp"] } haskell-language-server = { command = "haskell-language-server-wrapper", args = ["--lsp"] }
idris2-lsp = { command = "idris2-lsp" } idris2-lsp = { command = "idris2-lsp" }
@ -448,7 +449,6 @@ file-types = [
{ glob = "composer.lock" }, { glob = "composer.lock" },
{ glob = ".watchmanconfig" }, { glob = ".watchmanconfig" },
"avsc", "avsc",
{ glob = ".prettierrc" },
"ldtk", "ldtk",
"ldtkl", "ldtkl",
] ]
@ -1243,6 +1243,23 @@ indent = { tab-width = 2, unit = " " }
name = "ocaml-interface" name = "ocaml-interface"
source = { git = "https://github.com/tree-sitter/tree-sitter-ocaml", rev = "9965d208337d88bbf1a38ad0b0fe49e5f5ec9677", subpath = "interface" } source = { git = "https://github.com/tree-sitter/tree-sitter-ocaml", rev = "9965d208337d88bbf1a38ad0b0fe49e5f5ec9677", subpath = "interface" }
[[language]]
name = "dune"
scope = "source.dune"
roots = ["dune-project"]
file-types = [{ glob = "dune-project" }, { glob = "dune" }]
comment-token = ";"
indent = { tab-width = 1, unit = " " }
grammar = "scheme"
auto-format = true
formatter = { command = "dune", args = ["format-dune-file"] }
[language.auto-pairs]
'(' = ')'
'{' = '}'
'[' = ']'
'"' = '"'
[[language]] [[language]]
name = "lua" name = "lua"
injection-regex = "lua" injection-regex = "lua"
@ -1288,7 +1305,7 @@ source = { git = "https://github.com/ikatyang/tree-sitter-vue", rev = "91fe27547
[[language]] [[language]]
name = "yaml" name = "yaml"
scope = "source.yaml" scope = "source.yaml"
file-types = ["yml", "yaml"] file-types = ["yml", "yaml", { glob = ".prettierrc" }]
comment-token = "#" comment-token = "#"
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
language-servers = [ "yaml-language-server", "ansible-language-server" ] language-servers = [ "yaml-language-server", "ansible-language-server" ]
@ -1435,6 +1452,7 @@ file-types = ["glsl", "vert", "tesc", "tese", "geom", "frag", "comp" ]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" } block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
language-servers = [ "glsl_analyzer" ]
injection-regex = "glsl" injection-regex = "glsl"
[[grammar]] [[grammar]]
@ -2465,6 +2483,12 @@ injection-regex = "sml"
file-types = ["sml"] file-types = ["sml"]
block-comment-tokens = { start = "(*", end = "*)" } block-comment-tokens = { start = "(*", end = "*)" }
[language.auto-pairs]
'(' = ')'
'{' = '}'
'[' = ']'
'"' = '"'
[[grammar]] [[grammar]]
name = "sml" name = "sml"
source = { git = "https://github.com/Giorbo/tree-sitter-sml", rev = "bd4055d5554614520d4a0706b34dc0c317c6b608" } source = { git = "https://github.com/Giorbo/tree-sitter-sml", rev = "bd4055d5554614520d4a0706b34dc0c317c6b608" }
@ -3115,7 +3139,7 @@ indent = { tab-width = 4, unit = " " }
[[grammar]] [[grammar]]
name = "just" name = "just"
source = { git = "https://github.com/poliorcetics/tree-sitter-just", rev = "f58a8fd869035ac4653081401e6c2030251240ab" } source = { git = "https://github.com/poliorcetics/tree-sitter-just", rev = "6e28fa6cba511c694247cd802d1c3b14f8d34dbb" }
[[language]] [[language]]
name = "gn" name = "gn"
@ -3247,7 +3271,7 @@ text-width = 72
[[grammar]] [[grammar]]
name = "jjdescription" name = "jjdescription"
source = { git = "https://github.com/kareigu/tree-sitter-jjdescription", rev = "2ddec6cad07b366aee276a608e1daa2c29d3caf2" } source = { git = "https://github.com/kareigu/tree-sitter-jjdescription", rev = "23dd3dd18ee29bdd761642511aa314215801afd8" }
[[language]] [[language]]
name = "jq" name = "jq"
@ -3810,3 +3834,28 @@ language-servers = ["circom-lsp"]
[[grammar]] [[grammar]]
name = "circom" name = "circom"
source = { git = "https://github.com/Decurity/tree-sitter-circom", rev = "02150524228b1e6afef96949f2d6b7cc0aaf999e" } source = { git = "https://github.com/Decurity/tree-sitter-circom", rev = "02150524228b1e6afef96949f2d6b7cc0aaf999e" }
[[language]]
name = "snakemake"
scope = "source.snakemake"
roots = ["Snakefile", "config.yaml", "environment.yaml", "workflow/"]
file-types = ["smk", "Snakefile"]
comment-tokens = ["#", "##"]
indent = { tab-width = 2, unit = " " }
language-servers = ["pylsp" ]
[[grammar]]
name = "snakemake"
source = { git = "https://github.com/osthomas/tree-sitter-snakemake", rev = "e909815acdbe37e69440261ebb1091ed52e1dec6" }
[[language]]
name = "cylc"
scope = "source.cylc"
injection-regex = "cylc"
file-types = ["cylc", { glob = "suite.rc" }]
comment-tokens = "#"
indent = { tab-width = 4, unit = " " }
[[grammar]]
name = "cylc"
source = { git = "https://github.com/elliotfontaine/tree-sitter-cylc", rev = "30dd40d9bf23912e4aefa93eeb4c7090bda3d0f6" }

@ -0,0 +1,100 @@
(ERROR) @markup.bold
[
(jinja2_expression)
(jinja2_statement)
(jinja2_comment)
(jinja2_shebang)
] @special
(include_statement
directive: _ @keyword.directive
path: _ @string.special.path)
(comment) @comment.line
(graph_section
name: _? @label)
(task_section
name: (_
(task_name) @namespace))
(top_section
brackets_open: _ @punctuation.bracket
name: _? @label
brackets_close: _ @punctuation.bracket)
(sub_section_1
brackets_open: _ @punctuation.bracket
name: _? @label
brackets_close: _ @punctuation.bracket)
(sub_section_2
brackets_open: _ @punctuation.bracket
name: _? @label
brackets_close: _ @punctuation.bracket)
(runtime_section
brackets_open: _ @punctuation.bracket
name: _? @label
brackets_close: _ @punctuation.bracket)
(graph_setting
key: (_) @constant.numeric.integer
operator: (_)? @operator)
(quoted_graph_string
quotes_open: _ @string
quotes_close: _ @string)
(multiline_graph_string
quotes_open: _ @string
quotes_close: _ @string)
[
(graph_logical)
(graph_arrow)
(graph_parenthesis)
] @operator
(intercycle_annotation
(recurrence) @constant.numeric.integer)
(graph_task
xtrigger: _? @operator
suicide: _? @operator
name: _ @namespace)
(task_parameter
"<" @tag
name: (_)? @special
","? @tag
"="? @tag
selection: (_)? @special
">" @tag)
(intercycle_annotation
"[" @tag
(recurrence)? @constant.numeric.integer
"]" @tag)
(task_output
":" @tag
(nametag) @variable.other)
(task_output
"?"? @tag)
(setting
key: (key) @variable
operator: (_)? @operator
value: [
(unquoted_string) @string
(quoted_string) @string
(multiline_string) @string
(boolean) @constant.builtin.boolean
(integer) @constant.numeric.integer
]?)
(datetime) @constant.numeric.float

@ -0,0 +1,19 @@
[
(top_section)
(sub_section_1)
(sub_section_2)
(graph_section)
(runtime_section)
(task_section)
] @indent
[
(top_section)
(sub_section_1)
(sub_section_2)
(graph_section)
(runtime_section)
(task_section)
] @extend
(line_continuation) @indent.always

@ -0,0 +1,20 @@
((setting
key: (key) @key
(#match? @key "^script$|-script$|^script-")
value: (_
(string_content) @injection.content))
(#set! "injection.language" "bash"))
; Requires no spacing around "=" in environment settings for proper highlighting.
; Could be improved if Tree-sitter allowed to specify the target node of the injected
; language, instead of always using the root node.
; See this proposal:
; https://github.com/tree-sitter/tree-sitter/issues/3625
((task_section
(sub_section_2
name: (_) @section_name
(#eq? @section_name "environment")
(setting) @injection.content))
(#set! "injection.language" "bash")
(#set! injection.combined)
(#set! injection.include-children))

@ -0,0 +1,23 @@
(comment) @comment.inside
(comment)+ @comment.around
(_
brackets_open: _
name: _?
brackets_close: _
_* @class.inside) @class.around
(setting
value: _? @function.inside) @function.around
(graph_setting
value: _? @function.inside) @function.around
(graph_string_content
(graph_task) @entry.inside)
(task_parameter
((_) @parameter.inside
.
","? @parameter.around) @parameter.around)

@ -4,10 +4,13 @@
] @keyword.directive ] @keyword.directive
[ [
"import"
"package" "package"
] @namespace ] @namespace
[
"import"
] @keyword.control.import
[ [
"foreign" "foreign"
"using" "using"
@ -200,7 +203,7 @@
(struct . (identifier) @type) (struct . (identifier) @type)
(field_type . (identifier) "." (identifier) @type) (field_type . (identifier) @keyword.storage.type "." (identifier) @type)
(bit_set_type (identifier) @type ";") (bit_set_type (identifier) @type ";")
@ -248,6 +251,8 @@
(using_statement (identifier) @namespace) (using_statement (identifier) @namespace)
(import_declaration (identifier) @keyword.storage.type)
; Parameters ; Parameters
(parameter (identifier) @variable.parameter ":" "="? (identifier)? @constant) (parameter (identifier) @variable.parameter ":" "="? (identifier)? @constant)

@ -0,0 +1,20 @@
Copyright (c) 2016 Max Brunsfeld
Copyright (c) 2023 Oliver Thomas
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,8 @@
; inherits: python
[
(rule_definition)
(rule_inheritance)
(module_definition)
(checkpoint_definition)
] @fold

@ -0,0 +1,76 @@
; inherits: python
; Compound directives
[
"rule"
"checkpoint"
"module"
] @keyword
; Top level directives (eg. configfile, include)
(module
(directive
name: _ @keyword))
; Subordinate directives (eg. input, output)
((_)
body: (_
(directive
name: _ @label)))
; rule/module/checkpoint names
(rule_definition
name: (identifier) @type)
(module_definition
name: (identifier) @type)
(checkpoint_definition
name: (identifier) @type)
; Rule imports
(rule_import
"use" @keyword.import
"rule" @keyword.import
"from" @keyword.import
"exclude"? @keyword.import
"as"? @keyword.import
"with"? @keyword.import)
; Rule inheritance
(rule_inheritance
"use" @keyword
"rule" @keyword
"with" @keyword)
; Wildcard names
(wildcard (identifier) @variable)
(wildcard (flag) @variable.parameter.builtin)
; builtin variables
((identifier) @variable.builtin
(#any-of? @variable.builtin "checkpoints" "config" "gather" "rules" "scatter" "workflow"))
; References to directive labels in wildcard interpolations
; the #any-of? queries are moved above the #has-ancestor? queries to
; short-circuit the potentially expensive tree traversal, if possible
; see:
; https://github.com/nvim-treesitter/nvim-treesitter/pull/4302#issuecomment-1685789790
; directive labels in wildcard context
((wildcard
(identifier) @label)
(#any-of? @label "input" "log" "output" "params" "resources" "threads" "wildcards"))
((wildcard
(attribute
object: (identifier) @label))
(#any-of? @label "input" "log" "output" "params" "resources" "threads" "wildcards"))
((wildcard
(subscript
value: (identifier) @label))
(#any-of? @label "input" "log" "output" "params" "resources" "threads" "wildcards"))
; directive labels in block context (eg. within 'run:')
((identifier) @label
(#any-of? @label "input" "log" "output" "params" "resources" "threads" "wildcards"))

@ -0,0 +1,27 @@
; inherits: python
[
(rule_definition)
(checkpoint_definition)
(rule_inheritance)
(module_definition)
] @indent
[
(rule_definition)
(checkpoint_definition)
(rule_inheritance)
(module_definition)
] @extend
(directive) @indent
(directive) @extend
(rule_import
"with"
":") @indent
(rule_import
"with"
":") @extend

@ -0,0 +1,5 @@
; inherits: python
(wildcard
(constraint) @injection.content
(#set! injection.language "regex"))

@ -15,6 +15,7 @@
"keyword.control" = { fg = "purple" } "keyword.control" = { fg = "purple" }
"keyword.control.import" = { fg = "red" } "keyword.control.import" = { fg = "red" }
"keyword.directive" = { fg = "purple" } "keyword.directive" = { fg = "purple" }
"keyword.storage" = { fg = "purple" }
"label" = { fg = "purple" } "label" = { fg = "purple" }
"namespace" = { fg = "blue" } "namespace" = { fg = "blue" }
"operator" = { fg = "purple" } "operator" = { fg = "purple" }

Loading…
Cancel
Save