Merge master into docs-static-cmds

pull/11950/head
David Crespo 1 month ago
commit e0234fb434

@ -16,6 +16,7 @@ jobs:
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install stable toolchain - name: Install stable toolchain
uses: dtolnay/rust-toolchain@1.70 uses: dtolnay/rust-toolchain@1.70
@ -107,6 +108,9 @@ jobs:
- name: Validate queries - name: Validate queries
run: cargo xtask query-check run: cargo xtask query-check
- name: Validate themes
run: cargo xtask theme-check
- name: Generate docs - name: Generate docs
run: cargo xtask docgen run: cargo xtask docgen

@ -14,7 +14,7 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install nix - name: Install nix
uses: cachix/install-nix-action@V27 uses: cachix/install-nix-action@v30
- name: Authenticate with Cachix - name: Authenticate with Cachix
uses: cachix/cachix-action@v15 uses: cachix/cachix-action@v15

76
Cargo.lock generated

@ -68,9 +68,9 @@ dependencies = [
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.87" version = "1.0.90"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10f00e1f6e58a40e807377c75c6a7f97bf9044fab57816f2414e6f5f4499d7b8" checksum = "37bf3594c4c988a53154954629820791dde498571819ae4ca50ca811e060cc95"
[[package]] [[package]]
name = "arc-swap" name = "arc-swap"
@ -136,9 +136,9 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.1.18" version = "1.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b62ac837cdb5cb22e10a256099b4fc502b1dfe560cb282963a974d7abd80e476" checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f"
dependencies = [ dependencies = [
"shlex", "shlex",
] ]
@ -355,9 +355,9 @@ checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6"
[[package]] [[package]]
name = "fern" name = "fern"
version = "0.6.2" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9f0c14694cbd524c8720dd69b0e3179344f04ebb5f90f2e4a440c6ea3b2f1ee" checksum = "69ff9c9d5fb3e6da8ac2f77ab76fe7e8087d512ce095200f8f29ac5b656cf6dc"
dependencies = [ dependencies = [
"log", "log",
] ]
@ -412,15 +412,15 @@ dependencies = [
[[package]] [[package]]
name = "futures-core" name = "futures-core"
version = "0.3.30" version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]] [[package]]
name = "futures-executor" name = "futures-executor"
version = "0.3.30" version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-task", "futures-task",
@ -429,15 +429,15 @@ dependencies = [
[[package]] [[package]]
name = "futures-task" name = "futures-task"
version = "0.3.30" version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
[[package]] [[package]]
name = "futures-util" name = "futures-util"
version = "0.3.30" version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-task", "futures-task",
@ -1609,9 +1609,9 @@ dependencies = [
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.158" version = "0.2.161"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1"
[[package]] [[package]]
name = "libloading" name = "libloading"
@ -1753,9 +1753,9 @@ dependencies = [
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.19.0" version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]] [[package]]
name = "open" name = "open"
@ -1838,9 +1838,9 @@ checksum = "744a264d26b88a6a7e37cbad97953fa233b94d585236310bcbc88474b4092d79"
[[package]] [[package]]
name = "pulldown-cmark" name = "pulldown-cmark"
version = "0.12.1" version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "666f0f59e259aea2d72e6012290c09877a780935cc3c18b1ceded41f3890d59c" checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"memchr", "memchr",
@ -1914,9 +1914,9 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.10.6" version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -1926,9 +1926,9 @@ dependencies = [
[[package]] [[package]]
name = "regex-automata" name = "regex-automata"
version = "0.4.7" version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -1950,9 +1950,9 @@ dependencies = [
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.8.4" version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]] [[package]]
name = "ropey" name = "ropey"
@ -1972,9 +1972,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "0.38.36" version = "0.38.37"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f55e80d50763938498dd5ebb18647174e0c76dc38c5505294bb224624f30f36" checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"errno", "errno",
@ -2026,9 +2026,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.128" version = "1.0.132"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
dependencies = [ dependencies = [
"itoa", "itoa",
"memchr", "memchr",
@ -2192,9 +2192,9 @@ dependencies = [
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.12.0" version = "3.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"fastrand", "fastrand",
@ -2225,18 +2225,18 @@ dependencies = [
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.63" version = "1.0.64"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84"
dependencies = [ dependencies = [
"thiserror-impl", "thiserror-impl",
] ]
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "1.0.63" version = "1.0.64"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -2401,9 +2401,9 @@ dependencies = [
[[package]] [[package]]
name = "unicode-segmentation" name = "unicode-segmentation"
version = "1.11.0" version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]] [[package]]
name = "unicode-width" name = "unicode-width"

@ -37,8 +37,8 @@ All shortcuts/keymaps can be found [in the documentation on the website](https:/
- Built-in language server support - Built-in language server support
- Smart, incremental syntax highlighting and code editing via tree-sitter - Smart, incremental syntax highlighting and code editing via tree-sitter
It's a terminal-based editor first, but I'd like to explore a custom renderer Although it's primarily a terminal-based editor, I am interested in exploring
(similar to Emacs) in wgpu or skulpin. a custom renderer (similar to Emacs) using wgpu or skulpin.
Note: Only certain languages have indentation definitions at the moment. Check Note: Only certain languages have indentation definitions at the moment. Check
`runtime/queries/<lang>/` for `indents.scm`. `runtime/queries/<lang>/` for `indents.scm`.
@ -47,7 +47,7 @@ Note: Only certain languages have indentation definitions at the moment. Check
[Installation documentation](https://docs.helix-editor.com/install.html). [Installation documentation](https://docs.helix-editor.com/install.html).
[![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg?exclude_unsupported=1)](https://repology.org/project/helix/versions) [![Packaging status](https://repology.org/badge/vertical-allrepos/helix-editor.svg?exclude_unsupported=1)](https://repology.org/project/helix-editor/versions)
# Contributing # Contributing

@ -19,6 +19,7 @@
| cairo | ✓ | ✓ | ✓ | `cairo-language-server` | | cairo | ✓ | ✓ | ✓ | `cairo-language-server` |
| capnp | ✓ | | ✓ | | | capnp | ✓ | | ✓ | |
| cel | ✓ | | | | | cel | ✓ | | | |
| circom | ✓ | | | `circom-lsp` |
| clojure | ✓ | | | `clojure-lsp` | | clojure | ✓ | | | `clojure-lsp` |
| cmake | ✓ | ✓ | ✓ | `cmake-language-server` | | cmake | ✓ | ✓ | ✓ | `cmake-language-server` |
| comment | ✓ | | | | | comment | ✓ | | | |
@ -38,6 +39,7 @@
| dockerfile | ✓ | ✓ | | `docker-langserver` | | dockerfile | ✓ | ✓ | | `docker-langserver` |
| dot | ✓ | | | `dot-language-server` | | dot | ✓ | | | `dot-language-server` |
| dtd | ✓ | | | | | dtd | ✓ | | | |
| dune | ✓ | | | |
| earthfile | ✓ | ✓ | ✓ | `earthlyls` | | earthfile | ✓ | ✓ | ✓ | `earthlyls` |
| edoc | ✓ | | | | | edoc | ✓ | | | |
| eex | ✓ | | | | | eex | ✓ | | | |
@ -48,7 +50,7 @@
| elvish | ✓ | | | `elvish` | | elvish | ✓ | | | `elvish` |
| env | ✓ | ✓ | | | | env | ✓ | ✓ | | |
| erb | ✓ | | | | | erb | ✓ | | | |
| erlang | ✓ | ✓ | | `erlang_ls` | | erlang | ✓ | ✓ | | `erlang_ls`, `elp` |
| esdl | ✓ | | | | | esdl | ✓ | | | |
| fidl | ✓ | | | | | fidl | ✓ | | | |
| fish | ✓ | ✓ | ✓ | | | fish | ✓ | ✓ | ✓ | |
@ -67,7 +69,7 @@
| gjs | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` | | gjs | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` |
| gleam | ✓ | ✓ | | `gleam` | | gleam | ✓ | ✓ | | `gleam` |
| glimmer | ✓ | | | `ember-language-server` | | glimmer | ✓ | | | `ember-language-server` |
| glsl | ✓ | ✓ | ✓ | | | glsl | ✓ | ✓ | ✓ | `glsl_analyzer` |
| gn | ✓ | | | | | gn | ✓ | | | |
| go | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` | | go | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` |
| godot-resource | ✓ | ✓ | | | | godot-resource | ✓ | ✓ | | |
@ -86,7 +88,7 @@
| hocon | ✓ | ✓ | ✓ | | | hocon | ✓ | ✓ | ✓ | |
| hoon | ✓ | | | | | hoon | ✓ | | | |
| hosts | ✓ | | | | | hosts | ✓ | | | |
| html | ✓ | | | `vscode-html-language-server` | | html | ✓ | | | `vscode-html-language-server`, `superhtml` |
| hurl | ✓ | ✓ | ✓ | | | hurl | ✓ | ✓ | ✓ | |
| hyprlang | ✓ | | ✓ | | | hyprlang | ✓ | | ✓ | |
| idris | | | | `idris2-lsp` | | idris | | | | `idris2-lsp` |
@ -163,7 +165,7 @@
| protobuf | ✓ | ✓ | ✓ | `bufls`, `pb` | | protobuf | ✓ | ✓ | ✓ | `bufls`, `pb` |
| prql | ✓ | | | | | prql | ✓ | | | |
| purescript | ✓ | ✓ | | `purescript-language-server` | | purescript | ✓ | ✓ | | `purescript-language-server` |
| python | ✓ | ✓ | ✓ | `pylsp` | | python | ✓ | ✓ | ✓ | `ruff`, `jedi-language-server`, `pylsp` |
| qml | ✓ | | ✓ | `qmlls` | | qml | ✓ | | ✓ | `qmlls` |
| r | ✓ | | | `R` | | r | ✓ | | | `R` |
| racket | ✓ | | ✓ | `racket` | | racket | ✓ | | ✓ | `racket` |
@ -184,6 +186,7 @@
| smali | ✓ | | ✓ | | | smali | ✓ | | ✓ | |
| smithy | ✓ | | | `cs` | | smithy | ✓ | | | `cs` |
| sml | ✓ | | | | | sml | ✓ | | | |
| snakemake | ✓ | | ✓ | `pylsp` |
| solidity | ✓ | ✓ | | `solc` | | solidity | ✓ | ✓ | | `solc` |
| spicedb | ✓ | | | | | spicedb | ✓ | | | |
| sql | ✓ | ✓ | | | | sql | ✓ | ✓ | | |

@ -72,7 +72,7 @@
| `:sort` | Sort ranges in selection. | | `:sort` | Sort ranges in selection. |
| `:rsort` | Sort ranges in selection in reverse order. | | `:rsort` | Sort ranges in selection in reverse order. |
| `:reflow` | Hard-wrap the current selection of lines to a given width. | | `:reflow` | Hard-wrap the current selection of lines to a given width. |
| `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. | | `:tree-sitter-subtree`, `:ts-subtree` | Display the smallest tree-sitter subtree that spans the primary selection, primarily for debugging queries. |
| `:config-reload` | Refresh user config. | | `:config-reload` | Refresh user config. |
| `:config-open` | Open the user config.toml file. | | `:config-open` | Open the user config.toml file. |
| `:config-open-workspace` | Open the workspace config.toml file. | | `:config-open-workspace` | Open the workspace config.toml file. |

@ -145,6 +145,9 @@ Normal mode is the default mode when you launch helix. You can return to it from
| `Alt-i`, `Alt-down` | Shrink syntax tree object selection (**TS**) | `shrink_selection` | | `Alt-i`, `Alt-down` | Shrink syntax tree object selection (**TS**) | `shrink_selection` |
| `Alt-p`, `Alt-left` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` | | `Alt-p`, `Alt-left` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` |
| `Alt-n`, `Alt-right` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` | | `Alt-n`, `Alt-right` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` |
| `Alt-a` | Select all sibling nodes in syntax tree (**TS**) | `select_all_siblings` |
| `Alt-e` | Move to end of parent node in syntax tree (**TS**) | `move_parent_node_end` |
| `Alt-b` | Move to start of parent node in syntax tree (**TS**) | `move_parent_node_start` |
### Search ### Search

@ -17,7 +17,7 @@
- [Chocolatey](#chocolatey) - [Chocolatey](#chocolatey)
- [MSYS2](#msys2) - [MSYS2](#msys2)
[![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions) [![Packaging status](https://repology.org/badge/vertical-allrepos/helix-editor.svg)](https://repology.org/project/helix-editor/versions)
## Linux ## Linux

@ -283,7 +283,6 @@ These scopes are used for theming the editor interface:
| `ui.debug.active` | Indicator for the line at which debugging execution is paused at, found in the gutter | | `ui.debug.active` | Indicator for the line at which debugging execution is paused at, found in the gutter |
| `ui.gutter` | Gutter | | `ui.gutter` | Gutter |
| `ui.gutter.selected` | Gutter for the line the cursor is on | | `ui.gutter.selected` | Gutter for the line the cursor is on |
| `ui.highlight.frameline` | Line at which debugging execution is paused at |
| `ui.linenr` | Line numbers | | `ui.linenr` | Line numbers |
| `ui.linenr.selected` | Line number for the line the cursor is on | | `ui.linenr.selected` | Line number for the line the cursor is on |
| `ui.statusline` | Statusline | | `ui.statusline` | Statusline |
@ -320,6 +319,7 @@ These scopes are used for theming the editor interface:
| `ui.selection` | For selections in the editing area | | `ui.selection` | For selections in the editing area |
| `ui.selection.primary` | | | `ui.selection.primary` | |
| `ui.highlight` | Highlighted lines in the picker preview | | `ui.highlight` | Highlighted lines in the picker preview |
| `ui.highlight.frameline` | Line at which debugging execution is paused at |
| `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) | | `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) |
| `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) | | `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) |
| `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) | | `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) |

@ -7,3 +7,27 @@ can be accessed via the command `hx --tutor` or `:tutor`.
> 💡 Currently, not all functionality is fully documented, please refer to the > 💡 Currently, not all functionality is fully documented, please refer to the
> [key mappings](./keymap.md) list. > [key mappings](./keymap.md) list.
## Modes
Helix is a modal editor, meaning it has different modes for different tasks. The main modes are:
* [Normal mode](./keymap.md#normal-mode): For navigation and editing commands. This is the default mode.
* [Insert mode](./keymap.md#insert-mode): For typing text directly into the document. Access by typing `i` in normal mode.
* [Select/extend mode](./keymap.md#select--extend-mode): For making selections and performing operations on them. Access by typing `v` in normal mode.
## Buffers
Buffers are in-memory representations of files. You can have multiple buffers open at once. Use [pickers](./pickers.md) or commands like `:buffer-next` and `:buffer-previous` to open buffers or switch between them.
## Selection-first editing
Inspired by [Kakoune](http://kakoune.org/), Helix follows the `selection → action` model. This means that whatever you are going to act on (a word, a paragraph, a line, etc.) is selected first and the action itself (delete, change, yank, etc.) comes second. A cursor is simply a single width selection.
## Multiple selections
Also inspired by Kakoune, multiple selections are a core mode of interaction in Helix. For example, the standard way of replacing multiple instance of a word is to first select all instances (so there is one selection per instance) and then use the change action (`c`) to edit them all at the same time.
## Motions
Motions are commands that move the cursor or modify selections. They're used for navigation and text manipulation. Examples include `w` to move to the next word, or `f` to find a character. See the [Movement](./keymap.md#movement) section of the keymap for more motions.

@ -1,17 +1,12 @@
{ {
"nodes": { "nodes": {
"crane": { "crane": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": { "locked": {
"lastModified": 1709610799, "lastModified": 1727974419,
"narHash": "sha256-5jfLQx0U9hXbi2skYMGodDJkIgffrjIOgMRjZqms2QE=", "narHash": "sha256-WD0//20h+2/yPGkO88d2nYbb23WMWYvnRyDQ9Dx4UHg=",
"owner": "ipetkov", "owner": "ipetkov",
"repo": "crane", "repo": "crane",
"rev": "81c393c776d5379c030607866afef6406ca1be57", "rev": "37e4f9f0976cb9281cd3f0c70081e5e0ecaee93f",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -25,11 +20,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1709126324, "lastModified": 1726560853,
"narHash": "sha256-q6EQdSeUZOG26WelxqkmR7kArjgWCdw5sfJVHPH/7j8=", "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "d465f4819400de7c8d874d50b982301f28a84605", "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -40,11 +35,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1709479366, "lastModified": 1728018373,
"narHash": "sha256-n6F0n8UV6lnTZbYPl1A9q1BS0p4hduAv1mGAP17CVd0=", "narHash": "sha256-NOiTvBbRLIOe5F6RbHaAh6++BNjsb149fGZd1T4+KBg=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "b8697e57f10292a6165a20f03d2f42920dfaf973", "rev": "bc947f541ae55e999ffdb4013441347d83b00feb",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -64,19 +59,16 @@
}, },
"rust-overlay": { "rust-overlay": {
"inputs": { "inputs": {
"flake-utils": [
"flake-utils"
],
"nixpkgs": [ "nixpkgs": [
"nixpkgs" "nixpkgs"
] ]
}, },
"locked": { "locked": {
"lastModified": 1709604635, "lastModified": 1728268235,
"narHash": "sha256-le4fwmWmjGRYWwkho0Gr7mnnZndOOe4XGbLw68OvF40=", "narHash": "sha256-lJMFnMO4maJuNO6PQ5fZesrTmglze3UFTTBuKGwR1Nw=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "e86c0fb5d3a22a5f30d7f64ecad88643fe26449d", "rev": "25685cc2c7054efc31351c172ae77b21814f2d42",
"type": "github" "type": "github"
}, },
"original": { "original": {

@ -6,15 +6,9 @@
flake-utils.url = "github:numtide/flake-utils"; flake-utils.url = "github:numtide/flake-utils";
rust-overlay = { rust-overlay = {
url = "github:oxalica/rust-overlay"; url = "github:oxalica/rust-overlay";
inputs = {
nixpkgs.follows = "nixpkgs";
flake-utils.follows = "flake-utils";
};
};
crane = {
url = "github:ipetkov/crane";
inputs.nixpkgs.follows = "nixpkgs"; inputs.nixpkgs.follows = "nixpkgs";
}; };
crane.url = "github:ipetkov/crane";
}; };
outputs = { outputs = {
@ -114,7 +108,7 @@
if pkgs.stdenv.isLinux if pkgs.stdenv.isLinux
then pkgs.stdenv then pkgs.stdenv
else pkgs.clangStdenv; else pkgs.clangStdenv;
rustFlagsEnv = pkgs.lib.optionalString stdenv.isLinux "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment"; rustFlagsEnv = pkgs.lib.optionalString stdenv.isLinux "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment --cfg tokio_unstable";
rustToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml; rustToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
craneLibMSRV = (crane.mkLib pkgs).overrideToolchain rustToolchain; craneLibMSRV = (crane.mkLib pkgs).overrideToolchain rustToolchain;
craneLibStable = (crane.mkLib pkgs).overrideToolchain pkgs.pkgsBuildHost.rust-bin.stable.latest.default; craneLibStable = (crane.mkLib pkgs).overrideToolchain pkgs.pkgsBuildHost.rust-bin.stable.latest.default;

@ -22,7 +22,7 @@ helix-loader = { path = "../helix-loader" }
ropey = { version = "1.6.1", default-features = false, features = ["simd"] } ropey = { version = "1.6.1", default-features = false, features = ["simd"] }
smallvec = "1.13" smallvec = "1.13"
smartstring = "1.0.1" smartstring = "1.0.1"
unicode-segmentation = "1.11" unicode-segmentation = "1.12"
# unicode-width is changing width definitions # unicode-width is changing width definitions
# that both break our logic and disagree with common # that both break our logic and disagree with common
# width definitions in terminals, we need to replace it. # width definitions in terminals, we need to replace it.
@ -32,7 +32,7 @@ unicode-width = "=0.1.12"
unicode-general-category = "0.6" unicode-general-category = "0.6"
slotmap.workspace = true slotmap.workspace = true
tree-sitter.workspace = true tree-sitter.workspace = true
once_cell = "1.19" once_cell = "1.20"
arc-swap = "1" arc-swap = "1"
regex = "1" regex = "1"
bitflags = "2.6" bitflags = "2.6"

@ -9,6 +9,24 @@ use crate::{
use helix_stdx::rope::RopeSliceExt; use helix_stdx::rope::RopeSliceExt;
use std::borrow::Cow; use std::borrow::Cow;
pub const DEFAULT_COMMENT_TOKEN: &str = "//";
/// Returns the longest matching comment token of the given line (if it exists).
pub fn get_comment_token<'a, S: AsRef<str>>(
text: RopeSlice,
tokens: &'a [S],
line_num: usize,
) -> Option<&'a str> {
let line = text.line(line_num);
let start = line.first_non_whitespace_char()?;
tokens
.iter()
.map(AsRef::as_ref)
.filter(|token| line.slice(start..).starts_with(token))
.max_by_key(|token| token.len())
}
/// Given text, a comment token, and a set of line indices, returns the following: /// Given text, a comment token, and a set of line indices, returns the following:
/// - Whether the given lines should be considered commented /// - Whether the given lines should be considered commented
/// - If any of the lines are uncommented, all lines are considered as such. /// - If any of the lines are uncommented, all lines are considered as such.
@ -28,21 +46,20 @@ fn find_line_comment(
let mut min = usize::MAX; // minimum col for first_non_whitespace_char let mut min = usize::MAX; // minimum col for first_non_whitespace_char
let mut margin = 1; let mut margin = 1;
let token_len = token.chars().count(); let token_len = token.chars().count();
for line in lines { for line in lines {
let line_slice = text.line(line); let line_slice = text.line(line);
if let Some(pos) = line_slice.first_non_whitespace_char() { if let Some(pos) = line_slice.first_non_whitespace_char() {
let len = line_slice.len_chars(); let len = line_slice.len_chars();
if pos < min { min = std::cmp::min(min, pos);
min = pos;
}
// line can be shorter than pos + token len // line can be shorter than pos + token len
let fragment = Cow::from(line_slice.slice(pos..std::cmp::min(pos + token.len(), len))); let fragment = Cow::from(line_slice.slice(pos..std::cmp::min(pos + token.len(), len)));
if fragment != token {
// as soon as one of the non-blank lines doesn't have a comment, the whole block is // as soon as one of the non-blank lines doesn't have a comment, the whole block is
// considered uncommented. // considered uncommented.
if fragment != token {
commented = false; commented = false;
} }
@ -56,6 +73,7 @@ fn find_line_comment(
to_change.push(line); to_change.push(line);
} }
} }
(commented, to_change, min, margin) (commented, to_change, min, margin)
} }
@ -63,7 +81,7 @@ fn find_line_comment(
pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&str>) -> Transaction { pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&str>) -> Transaction {
let text = doc.slice(..); let text = doc.slice(..);
let token = token.unwrap_or("//"); let token = token.unwrap_or(DEFAULT_COMMENT_TOKEN);
let comment = Tendril::from(format!("{} ", token)); let comment = Tendril::from(format!("{} ", token));
let mut lines: Vec<usize> = Vec::with_capacity(selection.len()); let mut lines: Vec<usize> = Vec::with_capacity(selection.len());
@ -317,56 +335,87 @@ pub fn split_lines_of_selection(text: RopeSlice, selection: &Selection) -> Selec
mod test { mod test {
use super::*; use super::*;
mod find_line_comment {
use super::*;
#[test] #[test]
fn test_find_line_comment() { fn not_commented() {
// four lines, two space indented, except for line 1 which is blank. // four lines, two space indented, except for line 1 which is blank.
let mut doc = Rope::from(" 1\n\n 2\n 3"); let doc = Rope::from(" 1\n\n 2\n 3");
// select whole document
let mut selection = Selection::single(0, doc.len_chars() - 1);
let text = doc.slice(..); let text = doc.slice(..);
let res = find_line_comment("//", text, 0..3); let res = find_line_comment("//", text, 0..3);
// (commented = true, to_change = [line 0, line 2], min = col 2, margin = 0) // (commented = false, to_change = [line 0, line 2], min = col 2, margin = 0)
assert_eq!(res, (false, vec![0, 2], 2, 0)); assert_eq!(res, (false, vec![0, 2], 2, 0));
}
#[test]
fn is_commented() {
// three lines where the second line is empty.
let doc = Rope::from("// hello\n\n// there");
let res = find_line_comment("//", doc.slice(..), 0..3);
// (commented = true, to_change = [line 0, line 2], min = col 0, margin = 1)
assert_eq!(res, (true, vec![0, 2], 0, 1));
}
}
// TODO: account for uncommenting with uneven comment indentation
mod toggle_line_comment {
use super::*;
#[test]
fn comment() {
// four lines, two space indented, except for line 1 which is blank.
let mut doc = Rope::from(" 1\n\n 2\n 3");
// select whole document
let selection = Selection::single(0, doc.len_chars() - 1);
// comment
let transaction = toggle_line_comments(&doc, &selection, None); let transaction = toggle_line_comments(&doc, &selection, None);
transaction.apply(&mut doc); transaction.apply(&mut doc);
selection = selection.map(transaction.changes());
assert_eq!(doc, " // 1\n\n // 2\n // 3"); assert_eq!(doc, " // 1\n\n // 2\n // 3");
}
#[test]
fn uncomment() {
let mut doc = Rope::from(" // 1\n\n // 2\n // 3");
let mut selection = Selection::single(0, doc.len_chars() - 1);
// uncomment
let transaction = toggle_line_comments(&doc, &selection, None); let transaction = toggle_line_comments(&doc, &selection, None);
transaction.apply(&mut doc); transaction.apply(&mut doc);
selection = selection.map(transaction.changes()); selection = selection.map(transaction.changes());
assert_eq!(doc, " 1\n\n 2\n 3"); assert_eq!(doc, " 1\n\n 2\n 3");
assert!(selection.len() == 1); // to ignore the selection unused warning assert!(selection.len() == 1); // to ignore the selection unused warning
}
// 0 margin comments #[test]
doc = Rope::from(" //1\n\n //2\n //3"); fn uncomment_0_margin_comments() {
// reset the selection. let mut doc = Rope::from(" //1\n\n //2\n //3");
selection = Selection::single(0, doc.len_chars() - 1); let mut selection = Selection::single(0, doc.len_chars() - 1);
let transaction = toggle_line_comments(&doc, &selection, None); let transaction = toggle_line_comments(&doc, &selection, None);
transaction.apply(&mut doc); transaction.apply(&mut doc);
selection = selection.map(transaction.changes()); selection = selection.map(transaction.changes());
assert_eq!(doc, " 1\n\n 2\n 3"); assert_eq!(doc, " 1\n\n 2\n 3");
assert!(selection.len() == 1); // to ignore the selection unused warning assert!(selection.len() == 1); // to ignore the selection unused warning
}
// 0 margin comments, with no space #[test]
doc = Rope::from("//"); fn uncomment_0_margin_comments_with_no_space() {
// reset the selection. let mut doc = Rope::from("//");
selection = Selection::single(0, doc.len_chars() - 1); let mut selection = Selection::single(0, doc.len_chars() - 1);
let transaction = toggle_line_comments(&doc, &selection, None); let transaction = toggle_line_comments(&doc, &selection, None);
transaction.apply(&mut doc); transaction.apply(&mut doc);
selection = selection.map(transaction.changes()); selection = selection.map(transaction.changes());
assert_eq!(doc, ""); assert_eq!(doc, "");
assert!(selection.len() == 1); // to ignore the selection unused warning assert!(selection.len() == 1); // to ignore the selection unused warning
}
// TODO: account for uncommenting with uneven comment indentation
} }
#[test] #[test]
@ -413,4 +462,32 @@ mod test {
transaction.apply(&mut doc); transaction.apply(&mut doc);
assert_eq!(doc, ""); assert_eq!(doc, "");
} }
/// Test, if `get_comment_tokens` works, even if the content of the file includes chars, whose
/// byte size unequal the amount of chars
#[test]
fn test_get_comment_with_char_boundaries() {
let rope = Rope::from("··");
let tokens = ["//", "///"];
assert_eq!(
super::get_comment_token(rope.slice(..), tokens.as_slice(), 0),
None
);
}
/// Test for `get_comment_token`.
///
/// Assuming the comment tokens are stored as `["///", "//"]`, `get_comment_token` should still
/// return `///` instead of `//` if the user is in a doc-comment section.
#[test]
fn test_use_longest_comment() {
let text = Rope::from(" /// amogus");
let tokens = ["///", "//"];
assert_eq!(
super::get_comment_token(text.slice(..), tokens.as_slice(), 0),
Some("///")
);
}
} }

@ -2692,6 +2692,8 @@ fn pretty_print_tree_impl<W: fmt::Write>(
} }
write!(fmt, "({}", node.kind())?; write!(fmt, "({}", node.kind())?;
} else {
write!(fmt, " \"{}\"", node.kind())?;
} }
// Handle children. // Handle children.
@ -2950,7 +2952,7 @@ mod test {
#[test] #[test]
fn test_pretty_print() { fn test_pretty_print() {
let source = r#"// Hello"#; let source = r#"// Hello"#;
assert_pretty_print("rust", source, "(line_comment)", 0, source.len()); assert_pretty_print("rust", source, "(line_comment \"//\")", 0, source.len());
// A large tree should be indented with fields: // A large tree should be indented with fields:
let source = r#"fn main() { let source = r#"fn main() {
@ -2960,16 +2962,16 @@ mod test {
"rust", "rust",
source, source,
concat!( concat!(
"(function_item\n", "(function_item \"fn\"\n",
" name: (identifier)\n", " name: (identifier)\n",
" parameters: (parameters)\n", " parameters: (parameters \"(\" \")\")\n",
" body: (block\n", " body: (block \"{\"\n",
" (expression_statement\n", " (expression_statement\n",
" (macro_invocation\n", " (macro_invocation\n",
" macro: (identifier)\n", " macro: (identifier) \"!\"\n",
" (token_tree\n", " (token_tree \"(\"\n",
" (string_literal\n", " (string_literal \"\"\"\n",
" (string_content)))))))", " (string_content) \"\"\") \")\")) \";\") \"}\"))",
), ),
0, 0,
source.len(), source.len(),
@ -2981,7 +2983,7 @@ mod test {
// Error nodes are printed as errors: // Error nodes are printed as errors:
let source = r#"}{"#; let source = r#"}{"#;
assert_pretty_print("rust", source, "(ERROR)", 0, source.len()); assert_pretty_print("rust", source, "(ERROR \"}\" \"{\")", 0, source.len());
// Fields broken under unnamed nodes are determined correctly. // Fields broken under unnamed nodes are determined correctly.
// In the following source, `object` belongs to the `singleton_method` // In the following source, `object` belongs to the `singleton_method`
@ -2996,11 +2998,11 @@ mod test {
"ruby", "ruby",
source, source,
concat!( concat!(
"(singleton_method\n", "(singleton_method \"def\"\n",
" object: (self)\n", " object: (self) \".\"\n",
" name: (identifier)\n", " name: (identifier)\n",
" body: (body_statement\n", " body: (body_statement\n",
" (true)))" " (true)) \"end\")"
), ),
0, 0,
source.len(), source.len(),

@ -1,12 +1,18 @@
use std::path::{Path, PathBuf}; use std::{
fmt,
path::{Path, PathBuf},
sync::Arc,
};
/// A generic pointer to a file location. /// A generic pointer to a file location.
/// ///
/// Currently this type only supports paths to local files. /// Currently this type only supports paths to local files.
///
/// Cloning this type is cheap: the internal representation uses an Arc.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
#[non_exhaustive] #[non_exhaustive]
pub enum Uri { pub enum Uri {
File(PathBuf), File(Arc<Path>),
} }
impl Uri { impl Uri {
@ -23,26 +29,18 @@ impl Uri {
Self::File(path) => Some(path), Self::File(path) => Some(path),
} }
} }
pub fn as_path_buf(self) -> Option<PathBuf> {
match self {
Self::File(path) => Some(path),
}
}
} }
impl From<PathBuf> for Uri { impl From<PathBuf> for Uri {
fn from(path: PathBuf) -> Self { fn from(path: PathBuf) -> Self {
Self::File(path) Self::File(path.into())
} }
} }
impl TryFrom<Uri> for PathBuf { impl fmt::Display for Uri {
type Error = (); fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
fn try_from(uri: Uri) -> Result<Self, Self::Error> { Self::File(path) => write!(f, "{}", path.display()),
match uri {
Uri::File(path) => Ok(path),
} }
} }
} }
@ -59,11 +57,16 @@ pub enum UrlConversionErrorKind {
UnableToConvert, UnableToConvert,
} }
impl std::fmt::Display for UrlConversionError { impl fmt::Display for UrlConversionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind { match self.kind {
UrlConversionErrorKind::UnsupportedScheme => { UrlConversionErrorKind::UnsupportedScheme => {
write!(f, "unsupported scheme in URL: {}", self.source.scheme()) write!(
f,
"unsupported scheme '{}' in URL {}",
self.source.scheme(),
self.source
)
} }
UrlConversionErrorKind::UnableToConvert => { UrlConversionErrorKind::UnableToConvert => {
write!(f, "unable to convert URL to file path: {}", self.source) write!(f, "unable to convert URL to file path: {}", self.source)
@ -77,7 +80,7 @@ impl std::error::Error for UrlConversionError {}
fn convert_url_to_uri(url: &url::Url) -> Result<Uri, UrlConversionErrorKind> { fn convert_url_to_uri(url: &url::Url) -> Result<Uri, UrlConversionErrorKind> {
if url.scheme() == "file" { if url.scheme() == "file" {
url.to_file_path() url.to_file_path()
.map(|path| Uri::File(helix_stdx::path::normalize(path))) .map(|path| Uri::File(helix_stdx::path::normalize(path).into()))
.map_err(|_| UrlConversionErrorKind::UnableToConvert) .map_err(|_| UrlConversionErrorKind::UnableToConvert)
} else { } else {
Err(UrlConversionErrorKind::UnsupportedScheme) Err(UrlConversionErrorKind::UnsupportedScheme)

@ -24,4 +24,4 @@ tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std
thiserror.workspace = true thiserror.workspace = true
[dev-dependencies] [dev-dependencies]
fern = "0.6" fern = "0.7"

@ -19,11 +19,11 @@ tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "p
# setup new events on initialization, hardware-lock-elision hugely benefits this case # setup new events on initialization, hardware-lock-elision hugely benefits this case
# as it essentially makes the lock entirely free as long as there is no writes # as it essentially makes the lock entirely free as long as there is no writes
parking_lot = { version = "0.12", features = ["hardware-lock-elision"] } parking_lot = { version = "0.12", features = ["hardware-lock-elision"] }
once_cell = "1.18" once_cell = "1.20"
anyhow = "1" anyhow = "1"
log = "0.4" log = "0.4"
futures-executor = "0.3.28" futures-executor = "0.3.31"
[features] [features]
integration_test = [] integration_test = []

@ -22,7 +22,7 @@ serde = { version = "1.0", features = ["derive"] }
toml = "0.8" toml = "0.8"
etcetera = "0.8" etcetera = "0.8"
tree-sitter.workspace = true tree-sitter.workspace = true
once_cell = "1.19" once_cell = "1.20"
log = "0.4" log = "0.4"
# TODO: these two should be on !wasm32 only # TODO: these two should be on !wasm32 only
@ -30,7 +30,7 @@ log = "0.4"
# cloning/compiling tree-sitter grammars # cloning/compiling tree-sitter grammars
cc = { version = "1" } cc = { version = "1" }
threadpool = { version = "1.0" } threadpool = { version = "1.0" }
tempfile = "3.12.0" tempfile = "3.13.0"
dunce = "1.0.5" dunce = "1.0.5"
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dependencies]

@ -225,7 +225,7 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usi
/// Used as a ceiling dir for LSP root resolution, the filepicker and potentially as a future filewatching root /// Used as a ceiling dir for LSP root resolution, the filepicker and potentially as a future filewatching root
/// ///
/// This function starts searching the FS upward from the CWD /// This function starts searching the FS upward from the CWD
/// and returns the first directory that contains either `.git`, `.svn` or `.helix`. /// and returns the first directory that contains either `.git`, `.svn`, `.jj` or `.helix`.
/// If no workspace was found returns (CWD, true). /// If no workspace was found returns (CWD, true).
/// Otherwise (workspace, false) is returned /// Otherwise (workspace, false) is returned
pub fn find_workspace() -> (PathBuf, bool) { pub fn find_workspace() -> (PathBuf, bool) {
@ -233,6 +233,7 @@ pub fn find_workspace() -> (PathBuf, bool) {
for ancestor in current_dir.ancestors() { for ancestor in current_dir.ancestors() {
if ancestor.join(".git").exists() if ancestor.join(".git").exists()
|| ancestor.join(".svn").exists() || ancestor.join(".svn").exists()
|| ancestor.join(".jj").exists()
|| ancestor.join(".helix").exists() || ancestor.join(".helix").exists()
{ {
return (ancestor.to_owned(), false); return (ancestor.to_owned(), false);

@ -23,7 +23,7 @@ license = "MIT"
[dependencies] [dependencies]
bitflags = "2.6.0" bitflags = "2.6.0"
serde = { version = "1.0.209", features = ["derive"] } serde = { version = "1.0.209", features = ["derive"] }
serde_json = "1.0.127" serde_json = "1.0.132"
serde_repr = "0.1" serde_repr = "0.1"
url = {version = "2.0.0", features = ["serde"]} url = {version = "2.0.0", features = ["serde"]}

@ -26,4 +26,4 @@ windows-sys = { version = "0.59", features = ["Win32_Foundation", "Win32_Securit
rustix = { version = "0.38", features = ["fs"] } rustix = { version = "0.38", features = ["fs"] }
[dev-dependencies] [dev-dependencies]
tempfile = "3.12" tempfile = "3.13"

@ -51,7 +51,7 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
if len < text.len() { if len < text.len() {
return false; return false;
} }
self.get_byte_slice(..len - text.len()) self.get_byte_slice(..text.len())
.map_or(false, |start| start == text) .map_or(false, |start| start == text)
} }
@ -137,4 +137,14 @@ mod tests {
} }
} }
} }
#[test]
fn starts_with() {
assert!(RopeSlice::from("asdf").starts_with("a"));
}
#[test]
fn ends_with() {
assert!(RopeSlice::from("asdf").ends_with("f"));
}
} }

@ -33,7 +33,7 @@ helix-vcs = { path = "../helix-vcs" }
helix-loader = { path = "../helix-loader" } helix-loader = { path = "../helix-loader" }
anyhow = "1" anyhow = "1"
once_cell = "1.19" once_cell = "1.20"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] }
@ -45,7 +45,7 @@ arc-swap = { version = "1.7.1" }
termini = "1" termini = "1"
# Logging # Logging
fern = "0.6" fern = "0.7"
chrono = { version = "0.4", default-features = false, features = ["clock"] } chrono = { version = "0.4", default-features = false, features = ["clock"] }
log = "0.4" log = "0.4"
@ -74,7 +74,7 @@ grep-searcher = "0.1.14"
[target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100
signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] }
libc = "0.2.158" libc = "0.2.161"
[target.'cfg(target_os = "macos")'.dependencies] [target.'cfg(target_os = "macos")'.dependencies]
crossterm = { version = "0.28", features = ["event-stream", "use-dev-tty", "libc"] } crossterm = { version = "0.28", features = ["event-stream", "use-dev-tty", "libc"] }
@ -85,5 +85,5 @@ helix-loader = { path = "../helix-loader" }
[dev-dependencies] [dev-dependencies]
smallvec = "1.13" smallvec = "1.13"
indoc = "2.0.5" indoc = "2.0.5"
tempfile = "3.12.0" tempfile = "3.13.0"
same-file = "1.0.1" same-file = "1.0.1"

@ -846,7 +846,15 @@ impl Application {
} }
} }
Notification::ShowMessage(params) => { Notification::ShowMessage(params) => {
log::warn!("unhandled window/showMessage: {:?}", params); if self.config.load().editor.lsp.display_messages {
match params.typ {
lsp::MessageType::ERROR => self.editor.set_error(params.message),
lsp::MessageType::WARNING => {
self.editor.set_warning(params.message)
}
_ => self.editor.set_status(params.message),
}
}
} }
Notification::LogMessage(params) => { Notification::LogMessage(params) => {
log::info!("window/logMessage: {:?}", params); log::info!("window/logMessage: {:?}", params);
@ -930,7 +938,7 @@ impl Application {
self.lsp_progress.update(server_id, token, work); self.lsp_progress.update(server_id, token, work);
} }
if self.config.load().editor.lsp.display_messages { if self.config.load().editor.lsp.display_progress_messages {
self.editor.set_status(status); self.editor.set_status(status);
} }
} }

@ -22,8 +22,8 @@ use helix_core::{
encoding, find_workspace, encoding, find_workspace,
graphemes::{self, next_grapheme_boundary, RevRopeGraphemes}, graphemes::{self, next_grapheme_boundary, RevRopeGraphemes},
history::UndoKind, history::UndoKind,
increment, indent, increment,
indent::IndentStyle, indent::{self, IndentStyle},
line_ending::{get_line_ending_of_str, line_end_char_index}, line_ending::{get_line_ending_of_str, line_end_char_index},
match_brackets, match_brackets,
movement::{self, move_vertically_visual, Direction}, movement::{self, move_vertically_visual, Direction},
@ -3467,7 +3467,15 @@ fn open(cx: &mut Context, open: Open) {
) )
}; };
let indent = indent::indent_for_newline( let continue_comment_token = doc
.language_config()
.and_then(|config| config.comment_tokens.as_ref())
.and_then(|tokens| comment::get_comment_token(text, tokens, cursor_line));
let line = text.line(cursor_line);
let indent = match line.first_non_whitespace_char() {
Some(pos) if continue_comment_token.is_some() => line.slice(..pos).to_string(),
_ => indent::indent_for_newline(
doc.language_config(), doc.language_config(),
doc.syntax(), doc.syntax(),
&doc.config.load().indent_heuristic, &doc.config.load().indent_heuristic,
@ -3477,21 +3485,33 @@ fn open(cx: &mut Context, open: Open) {
line_num, line_num,
line_end_index, line_end_index,
cursor_line, cursor_line,
); ),
};
let indent_len = indent.len(); let indent_len = indent.len();
let mut text = String::with_capacity(1 + indent_len); let mut text = String::with_capacity(1 + indent_len);
text.push_str(doc.line_ending.as_str()); text.push_str(doc.line_ending.as_str());
text.push_str(&indent); text.push_str(&indent);
if let Some(token) = continue_comment_token {
text.push_str(token);
text.push(' ');
}
let text = text.repeat(count); let text = text.repeat(count);
// calculate new selection ranges // calculate new selection ranges
let pos = offs + line_end_index + line_end_offset_width; let pos = offs + line_end_index + line_end_offset_width;
let comment_len = continue_comment_token
.map(|token| token.len() + 1) // `+ 1` for the extra space added
.unwrap_or_default();
for i in 0..count { for i in 0..count {
// pos -> beginning of reference line, // pos -> beginning of reference line,
// + (i * (1+indent_len)) -> beginning of i'th line from pos // + (i * (1+indent_len + comment_len)) -> beginning of i'th line from pos (possibly including comment token)
// + indent_len -> -> indent for i'th line // + indent_len + comment_len -> -> indent for i'th line
ranges.push(Range::point(pos + (i * (1 + indent_len)) + indent_len)); ranges.push(Range::point(
pos + (i * (1 + indent_len + comment_len)) + indent_len + comment_len,
));
} }
offs += text.chars().count(); offs += text.chars().count();
@ -3929,6 +3949,11 @@ pub mod insert {
let mut new_text = String::new(); let mut new_text = String::new();
let continue_comment_token = doc
.language_config()
.and_then(|config| config.comment_tokens.as_ref())
.and_then(|tokens| comment::get_comment_token(text, tokens, current_line));
// If the current line is all whitespace, insert a line ending at the beginning of // If the current line is all whitespace, insert a line ending at the beginning of
// the current line. This makes the current line empty and the new line contain the // the current line. This makes the current line empty and the new line contain the
// indentation of the old line. // indentation of the old line.
@ -3938,7 +3963,11 @@ pub mod insert {
(line_start, line_start, new_text.chars().count()) (line_start, line_start, new_text.chars().count())
} else { } else {
let indent = indent::indent_for_newline( let line = text.line(current_line);
let indent = match line.first_non_whitespace_char() {
Some(pos) if continue_comment_token.is_some() => line.slice(..pos).to_string(),
_ => indent::indent_for_newline(
doc.language_config(), doc.language_config(),
doc.syntax(), doc.syntax(),
&doc.config.load().indent_heuristic, &doc.config.load().indent_heuristic,
@ -3948,7 +3977,8 @@ pub mod insert {
current_line, current_line,
pos, pos,
current_line, current_line,
); ),
};
// If we are between pairs (such as brackets), we want to // If we are between pairs (such as brackets), we want to
// insert an additional line which is indented one level // insert an additional line which is indented one level
@ -3958,19 +3988,30 @@ pub mod insert {
.and_then(|pairs| pairs.get(prev)) .and_then(|pairs| pairs.get(prev))
.map_or(false, |pair| pair.open == prev && pair.close == curr); .map_or(false, |pair| pair.open == prev && pair.close == curr);
let local_offs = if on_auto_pair { let local_offs = if let Some(token) = continue_comment_token {
new_text.push_str(doc.line_ending.as_str());
new_text.push_str(&indent);
new_text.push_str(token);
new_text.push(' ');
new_text.chars().count()
} else if on_auto_pair {
// line where the cursor will be
let inner_indent = indent.clone() + doc.indent_style.as_str(); let inner_indent = indent.clone() + doc.indent_style.as_str();
new_text.reserve_exact(2 + indent.len() + inner_indent.len()); new_text.reserve_exact(2 + indent.len() + inner_indent.len());
new_text.push_str(doc.line_ending.as_str()); new_text.push_str(doc.line_ending.as_str());
new_text.push_str(&inner_indent); new_text.push_str(&inner_indent);
// line where the matching pair will be
let local_offs = new_text.chars().count(); let local_offs = new_text.chars().count();
new_text.push_str(doc.line_ending.as_str()); new_text.push_str(doc.line_ending.as_str());
new_text.push_str(&indent); new_text.push_str(&indent);
local_offs local_offs
} else { } else {
new_text.reserve_exact(1 + indent.len()); new_text.reserve_exact(1 + indent.len());
new_text.push_str(doc.line_ending.as_str()); new_text.push_str(doc.line_ending.as_str());
new_text.push_str(&indent); new_text.push_str(&indent);
new_text.chars().count() new_text.chars().count()
}; };
@ -4626,6 +4667,14 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) {
let text = doc.text(); let text = doc.text();
let slice = text.slice(..); let slice = text.slice(..);
let comment_tokens = doc
.language_config()
.and_then(|config| config.comment_tokens.as_deref())
.unwrap_or(&[]);
// Sort by length to handle Rust's /// vs //
let mut comment_tokens: Vec<&str> = comment_tokens.iter().map(|x| x.as_str()).collect();
comment_tokens.sort_unstable_by_key(|x| std::cmp::Reverse(x.len()));
let mut changes = Vec::new(); let mut changes = Vec::new();
for selection in doc.selection(view.id) { for selection in doc.selection(view.id) {
@ -4637,10 +4686,31 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) {
changes.reserve(lines.len()); changes.reserve(lines.len());
let first_line_idx = slice.line_to_char(start);
let first_line_idx = skip_while(slice, first_line_idx, |ch| matches!(ch, ' ' | 't'))
.unwrap_or(first_line_idx);
let first_line = slice.slice(first_line_idx..);
let mut current_comment_token = comment_tokens
.iter()
.find(|token| first_line.starts_with(token));
for line in lines { for line in lines {
let start = line_end_char_index(&slice, line); let start = line_end_char_index(&slice, line);
let mut end = text.line_to_char(line + 1); let mut end = text.line_to_char(line + 1);
end = skip_while(slice, end, |ch| matches!(ch, ' ' | '\t')).unwrap_or(end); end = skip_while(slice, end, |ch| matches!(ch, ' ' | '\t')).unwrap_or(end);
let slice_from_end = slice.slice(end..);
if let Some(token) = comment_tokens
.iter()
.find(|token| slice_from_end.starts_with(token))
{
if Some(token) == current_comment_token {
end += token.chars().count();
end = skip_while(slice, end, |ch| matches!(ch, ' ' | '\t')).unwrap_or(end);
} else {
// update current token, but don't delete this one.
current_comment_token = Some(token);
}
}
let separator = if end == line_end_char_index(&slice, line + 1) { let separator = if end == line_end_char_index(&slice, line + 1) {
// the joining line contains only space-characters => don't include a whitespace when joining // the joining line contains only space-characters => don't include a whitespace when joining

@ -34,7 +34,7 @@ use crate::{
use std::{ use std::{
cmp::Ordering, cmp::Ordering,
collections::{BTreeMap, HashSet}, collections::{BTreeMap, HashSet},
fmt::{Display, Write}, fmt::Display,
future::Future, future::Future,
path::Path, path::Path,
}; };
@ -61,10 +61,31 @@ macro_rules! language_server_with_feature {
}}; }};
} }
/// A wrapper around `lsp::Location` that swaps out the LSP URI for `helix_core::Uri`.
#[derive(Debug, Clone, PartialEq, Eq)]
struct Location {
uri: Uri,
range: lsp::Range,
}
fn lsp_location_to_location(location: lsp::Location) -> Option<Location> {
let uri = match location.uri.try_into() {
Ok(uri) => uri,
Err(err) => {
log::warn!("discarding invalid or unsupported URI: {err}");
return None;
}
};
Some(Location {
uri,
range: location.range,
})
}
struct SymbolInformationItem { struct SymbolInformationItem {
location: Location,
symbol: lsp::SymbolInformation, symbol: lsp::SymbolInformation,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
uri: Uri,
} }
struct DiagnosticStyles { struct DiagnosticStyles {
@ -75,35 +96,35 @@ struct DiagnosticStyles {
} }
struct PickerDiagnostic { struct PickerDiagnostic {
uri: Uri, location: Location,
diag: lsp::Diagnostic, diag: lsp::Diagnostic,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
} }
fn uri_to_file_location<'a>(uri: &'a Uri, range: &lsp::Range) -> Option<FileLocation<'a>> { fn location_to_file_location(location: &Location) -> Option<FileLocation> {
let path = uri.as_path()?; let path = location.uri.as_path()?;
let line = Some((range.start.line as usize, range.end.line as usize)); let line = Some((
location.range.start.line as usize,
location.range.end.line as usize,
));
Some((path.into(), line)) Some((path.into(), line))
} }
fn jump_to_location( fn jump_to_location(
editor: &mut Editor, editor: &mut Editor,
location: &lsp::Location, location: &Location,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
action: Action, action: Action,
) { ) {
let (view, doc) = current!(editor); let (view, doc) = current!(editor);
push_jump(view, doc); push_jump(view, doc);
let path = match location.uri.to_file_path() { let Some(path) = location.uri.as_path() else {
Ok(path) => path, let err = format!("unable to convert URI to filepath: {:?}", location.uri);
Err(_) => {
let err = format!("unable to convert URI to filepath: {}", location.uri);
editor.set_error(err); editor.set_error(err);
return; return;
}
}; };
jump_to_position(editor, &path, location.range, offset_encoding, action); jump_to_position(editor, path, location.range, offset_encoding, action);
} }
fn jump_to_position( fn jump_to_position(
@ -196,7 +217,10 @@ fn diag_picker(
for (diag, ls) in diags { for (diag, ls) in diags {
if let Some(ls) = cx.editor.language_server_by_id(ls) { if let Some(ls) = cx.editor.language_server_by_id(ls) {
flat_diag.push(PickerDiagnostic { flat_diag.push(PickerDiagnostic {
location: Location {
uri: uri.clone(), uri: uri.clone(),
range: diag.range,
},
diag, diag,
offset_encoding: ls.offset_encoding(), offset_encoding: ls.offset_encoding(),
}); });
@ -243,7 +267,7 @@ fn diag_picker(
// between message code and message // between message code and message
2, 2,
ui::PickerColumn::new("path", |item: &PickerDiagnostic, _| { ui::PickerColumn::new("path", |item: &PickerDiagnostic, _| {
if let Some(path) = item.uri.as_path() { if let Some(path) = item.location.uri.as_path() {
path::get_truncated_path(path) path::get_truncated_path(path)
.to_string_lossy() .to_string_lossy()
.to_string() .to_string()
@ -261,26 +285,14 @@ fn diag_picker(
primary_column, primary_column,
flat_diag, flat_diag,
styles, styles,
move |cx, move |cx, diag, action| {
PickerDiagnostic { jump_to_location(cx.editor, &diag.location, diag.offset_encoding, action);
uri,
diag,
offset_encoding,
},
action| {
let Some(path) = uri.as_path() else {
return;
};
jump_to_position(cx.editor, path, diag.range, *offset_encoding, action);
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
view.diagnostics_handler view.diagnostics_handler
.immediately_show_diagnostic(doc, view.id); .immediately_show_diagnostic(doc, view.id);
}, },
) )
.with_preview(move |_editor, PickerDiagnostic { uri, diag, .. }| { .with_preview(move |_editor, diag| location_to_file_location(&diag.location))
let line = Some((diag.range.start.line as usize, diag.range.end.line as usize));
Some((uri.as_path()?.into(), line))
})
.truncate_start(false) .truncate_start(false)
} }
@ -303,7 +315,10 @@ pub fn symbol_picker(cx: &mut Context) {
container_name: None, container_name: None,
}, },
offset_encoding, offset_encoding,
location: Location {
uri: uri.clone(), uri: uri.clone(),
range: symbol.selection_range,
},
}); });
for child in symbol.children.into_iter().flatten() { for child in symbol.children.into_iter().flatten() {
nested_to_flat(list, file, uri, child, offset_encoding); nested_to_flat(list, file, uri, child, offset_encoding);
@ -337,7 +352,10 @@ pub fn symbol_picker(cx: &mut Context) {
lsp::DocumentSymbolResponse::Flat(symbols) => symbols lsp::DocumentSymbolResponse::Flat(symbols) => symbols
.into_iter() .into_iter()
.map(|symbol| SymbolInformationItem { .map(|symbol| SymbolInformationItem {
location: Location {
uri: doc_uri.clone(), uri: doc_uri.clone(),
range: symbol.location.range,
},
symbol, symbol,
offset_encoding, offset_encoding,
}) })
@ -392,17 +410,10 @@ pub fn symbol_picker(cx: &mut Context) {
symbols, symbols,
(), (),
move |cx, item, action| { move |cx, item, action| {
jump_to_location( jump_to_location(cx.editor, &item.location, item.offset_encoding, action);
cx.editor,
&item.symbol.location,
item.offset_encoding,
action,
);
}, },
) )
.with_preview(move |_editor, item| { .with_preview(move |_editor, item| location_to_file_location(&item.location))
uri_to_file_location(&item.uri, &item.symbol.location.range)
})
.truncate_start(false); .truncate_start(false);
compositor.push(Box::new(overlaid(picker))) compositor.push(Box::new(overlaid(picker)))
@ -453,8 +464,11 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
} }
}; };
Some(SymbolInformationItem { Some(SymbolInformationItem {
symbol, location: Location {
uri, uri,
range: symbol.location.range,
},
symbol,
offset_encoding, offset_encoding,
}) })
}) })
@ -490,7 +504,7 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
}) })
.without_filtering(), .without_filtering(),
ui::PickerColumn::new("path", |item: &SymbolInformationItem, _| { ui::PickerColumn::new("path", |item: &SymbolInformationItem, _| {
if let Some(path) = item.uri.as_path() { if let Some(path) = item.location.uri.as_path() {
path::get_relative_path(path) path::get_relative_path(path)
.to_string_lossy() .to_string_lossy()
.to_string() .to_string()
@ -507,15 +521,10 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
[], [],
(), (),
move |cx, item, action| { move |cx, item, action| {
jump_to_location( jump_to_location(cx.editor, &item.location, item.offset_encoding, action);
cx.editor,
&item.symbol.location,
item.offset_encoding,
action,
);
}, },
) )
.with_preview(|_editor, item| uri_to_file_location(&item.uri, &item.symbol.location.range)) .with_preview(|_editor, item| location_to_file_location(&item.location))
.with_dynamic_query(get_symbols, None) .with_dynamic_query(get_symbols, None)
.truncate_start(false); .truncate_start(false);
@ -847,7 +856,7 @@ impl Display for ApplyEditErrorKind {
fn goto_impl( fn goto_impl(
editor: &mut Editor, editor: &mut Editor,
compositor: &mut Compositor, compositor: &mut Compositor,
locations: Vec<lsp::Location>, locations: Vec<Location>,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
) { ) {
let cwdir = helix_stdx::env::current_working_dir(); let cwdir = helix_stdx::env::current_working_dir();
@ -860,80 +869,41 @@ fn goto_impl(
_locations => { _locations => {
let columns = [ui::PickerColumn::new( let columns = [ui::PickerColumn::new(
"location", "location",
|item: &lsp::Location, cwdir: &std::path::PathBuf| { |item: &Location, cwdir: &std::path::PathBuf| {
// The preallocation here will overallocate a few characters since it will account for the let path = if let Some(path) = item.uri.as_path() {
// URL's scheme, which is not used most of the time since that scheme will be "file://". path.strip_prefix(cwdir).unwrap_or(path).to_string_lossy()
// Those extra chars will be used to avoid allocating when writing the line number (in the
// common case where it has 5 digits or less, which should be enough for a cast majority
// of usages).
let mut res = String::with_capacity(item.uri.as_str().len());
if item.uri.scheme() == "file" {
// With the preallocation above and UTF-8 paths already, this closure will do one (1)
// allocation, for `to_file_path`, else there will be two (2), with `to_string_lossy`.
if let Ok(path) = item.uri.to_file_path() {
// We don't convert to a `helix_core::Uri` here because we've already checked the scheme.
// This path won't be normalized but it's only used for display.
res.push_str(
&path.strip_prefix(cwdir).unwrap_or(&path).to_string_lossy(),
);
}
} else { } else {
// Never allocates since we declared the string with this capacity already. item.uri.to_string().into()
res.push_str(item.uri.as_str()); };
}
// Most commonly, this will not allocate, especially on Unix systems where the root prefix format!("{path}:{}", item.range.start.line + 1).into()
// is a simple `/` and not `C:\` (with whatever drive letter)
write!(&mut res, ":{}", item.range.start.line + 1)
.expect("Will only failed if allocating fail");
res.into()
}, },
)]; )];
let picker = Picker::new(columns, 0, locations, cwdir, move |cx, location, action| { let picker = Picker::new(columns, 0, locations, cwdir, move |cx, location, action| {
jump_to_location(cx.editor, location, offset_encoding, action) jump_to_location(cx.editor, location, offset_encoding, action)
}) })
.with_preview(move |_editor, location| { .with_preview(move |_editor, location| location_to_file_location(location));
use crate::ui::picker::PathOrId;
let lines = Some((
location.range.start.line as usize,
location.range.end.line as usize,
));
// TODO: we should avoid allocating by doing the Uri conversion ahead of time.
//
// To do this, introduce a `Location` type in `helix-core` that reuses the core
// `Uri` type instead of the LSP `Url` type and replaces the LSP `Range` type.
// Refactor the callers of `goto_impl` to pass iterators that translate the
// LSP location type to the custom one in core, or have them collect and pass
// `Vec<Location>`s. Replace the `uri_to_file_location` function with
// `location_to_file_location` that takes only `&helix_core::Location` as
// parameters.
//
// By doing this we can also eliminate the duplicated URI info in the
// `SymbolInformationItem` type and introduce a custom Symbol type in `helix-core`
// which will be reused in the future for tree-sitter based symbol pickers.
let path = Uri::try_from(&location.uri).ok()?.as_path_buf()?;
#[allow(deprecated)]
Some((PathOrId::from_path_buf(path), lines))
});
compositor.push(Box::new(overlaid(picker))); compositor.push(Box::new(overlaid(picker)));
} }
} }
} }
fn to_locations(definitions: Option<lsp::GotoDefinitionResponse>) -> Vec<lsp::Location> { fn to_locations(definitions: Option<lsp::GotoDefinitionResponse>) -> Vec<Location> {
match definitions { match definitions {
Some(lsp::GotoDefinitionResponse::Scalar(location)) => vec![location], Some(lsp::GotoDefinitionResponse::Scalar(location)) => {
Some(lsp::GotoDefinitionResponse::Array(locations)) => locations, lsp_location_to_location(location).into_iter().collect()
}
Some(lsp::GotoDefinitionResponse::Array(locations)) => locations
.into_iter()
.flat_map(lsp_location_to_location)
.collect(),
Some(lsp::GotoDefinitionResponse::Link(locations)) => locations Some(lsp::GotoDefinitionResponse::Link(locations)) => locations
.into_iter() .into_iter()
.map(|location_link| lsp::Location { .map(|location_link| {
uri: location_link.target_uri, lsp::Location::new(location_link.target_uri, location_link.target_range)
range: location_link.target_range,
}) })
.flat_map(lsp_location_to_location)
.collect(), .collect(),
None => Vec::new(), None => Vec::new(),
} }
@ -1018,7 +988,11 @@ pub fn goto_reference(cx: &mut Context) {
cx.callback( cx.callback(
future, future,
move |editor, compositor, response: Option<Vec<lsp::Location>>| { move |editor, compositor, response: Option<Vec<lsp::Location>>| {
let items = response.unwrap_or_default(); let items: Vec<Location> = response
.into_iter()
.flatten()
.flat_map(lsp_location_to_location)
.collect();
if items.is_empty() { if items.is_empty() {
editor.set_error("No references found."); editor.set_error("No references found.");
} else { } else {

@ -3032,7 +3032,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
TypableCommand { TypableCommand {
name: "tree-sitter-subtree", name: "tree-sitter-subtree",
aliases: &["ts-subtree"], aliases: &["ts-subtree"],
doc: "Display tree sitter subtree under cursor, primarily for debugging queries.", doc: "Display the smallest tree-sitter subtree that spans the primary selection, primarily for debugging queries.",
fun: tree_sitter_subtree, fun: tree_sitter_subtree,
signature: CommandSignature::none(), signature: CommandSignature::none(),
}, },

@ -433,7 +433,7 @@ impl<'a> TextRenderer<'a> {
Grapheme::Newline => &self.newline, Grapheme::Newline => &self.newline,
}; };
let in_bounds = self.column_in_bounds(position.col + width - 1); let in_bounds = self.column_in_bounds(position.col, width);
if in_bounds { if in_bounds {
self.surface.set_string( self.surface.set_string(
@ -452,7 +452,6 @@ impl<'a> TextRenderer<'a> {
); );
self.surface.set_style(rect, style); self.surface.set_style(rect, style);
} }
if *is_in_indent_area && !is_whitespace { if *is_in_indent_area && !is_whitespace {
*last_indent_level = position.col; *last_indent_level = position.col;
*is_in_indent_area = false; *is_in_indent_area = false;
@ -461,8 +460,8 @@ impl<'a> TextRenderer<'a> {
width width
} }
pub fn column_in_bounds(&self, colum: usize) -> bool { pub fn column_in_bounds(&self, colum: usize, width: usize) -> bool {
self.offset.col <= colum && colum < self.viewport.width as usize + self.offset.col self.offset.col <= colum && colum + width <= self.offset.col + self.viewport.width as usize
} }
/// Overlay indentation guides ontop of a rendered line /// Overlay indentation guides ontop of a rendered line

@ -96,7 +96,10 @@ impl Component for SignatureHelp {
fn render(&mut self, area: Rect, surface: &mut Buffer, cx: &mut Context) { fn render(&mut self, area: Rect, surface: &mut Buffer, cx: &mut Context) {
let margin = Margin::horizontal(1); let margin = Margin::horizontal(1);
let signature = &self.signatures[self.active_signature]; let signature = self
.signatures
.get(self.active_signature)
.unwrap_or_else(|| &self.signatures[0]);
let active_param_span = signature.active_param_range.map(|(start, end)| { let active_param_span = signature.active_param_range.map(|(start, end)| {
vec![( vec![(
@ -108,9 +111,13 @@ impl Component for SignatureHelp {
)] )]
}); });
let sig = &self.signatures[self.active_signature]; let signature = self
.signatures
.get(self.active_signature)
.unwrap_or_else(|| &self.signatures[0]);
let sig_text = crate::ui::markdown::highlighted_code_block( let sig_text = crate::ui::markdown::highlighted_code_block(
sig.signature.as_str(), signature.signature.as_str(),
&self.language, &self.language,
Some(&cx.editor.theme), Some(&cx.editor.theme),
Arc::clone(&self.config_loader), Arc::clone(&self.config_loader),
@ -130,7 +137,7 @@ impl Component for SignatureHelp {
let sig_text_para = Paragraph::new(&sig_text).wrap(Wrap { trim: false }); let sig_text_para = Paragraph::new(&sig_text).wrap(Wrap { trim: false });
sig_text_para.render(sig_text_area, surface); sig_text_para.render(sig_text_area, surface);
if sig.signature_doc.is_none() { if signature.signature_doc.is_none() {
return; return;
} }
@ -142,7 +149,7 @@ impl Component for SignatureHelp {
} }
} }
let sig_doc = match &sig.signature_doc { let sig_doc = match &signature.signature_doc {
None => return, None => return,
Some(doc) => Markdown::new(doc.clone(), Arc::clone(&self.config_loader)), Some(doc) => Markdown::new(doc.clone(), Arc::clone(&self.config_loader)),
}; };
@ -160,12 +167,15 @@ impl Component for SignatureHelp {
const PADDING: u16 = 2; const PADDING: u16 = 2;
const SEPARATOR_HEIGHT: u16 = 1; const SEPARATOR_HEIGHT: u16 = 1;
let sig = &self.signatures[self.active_signature]; let signature = self
.signatures
.get(self.active_signature)
.unwrap_or_else(|| &self.signatures[0]);
let max_text_width = viewport.0.saturating_sub(PADDING).clamp(10, 120); let max_text_width = viewport.0.saturating_sub(PADDING).clamp(10, 120);
let signature_text = crate::ui::markdown::highlighted_code_block( let signature_text = crate::ui::markdown::highlighted_code_block(
sig.signature.as_str(), signature.signature.as_str(),
&self.language, &self.language,
None, None,
Arc::clone(&self.config_loader), Arc::clone(&self.config_loader),
@ -174,7 +184,7 @@ impl Component for SignatureHelp {
let (sig_width, sig_height) = let (sig_width, sig_height) =
crate::ui::text::required_size(&signature_text, max_text_width); crate::ui::text::required_size(&signature_text, max_text_width);
let (width, height) = match sig.signature_doc { let (width, height) = match signature.signature_doc {
Some(ref doc) => { Some(ref doc) => {
let doc_md = Markdown::new(doc.clone(), Arc::clone(&self.config_loader)); let doc_md = Markdown::new(doc.clone(), Arc::clone(&self.config_loader));
let doc_text = doc_md.parse(None); let doc_text = doc_md.parse(None);

@ -32,7 +32,7 @@ use std::{
borrow::Cow, borrow::Cow,
collections::HashMap, collections::HashMap,
io::Read, io::Read,
path::{Path, PathBuf}, path::Path,
sync::{ sync::{
atomic::{self, AtomicUsize}, atomic::{self, AtomicUsize},
Arc, Arc,
@ -63,26 +63,12 @@ pub const MAX_FILE_SIZE_FOR_PREVIEW: u64 = 10 * 1024 * 1024;
#[derive(PartialEq, Eq, Hash)] #[derive(PartialEq, Eq, Hash)]
pub enum PathOrId<'a> { pub enum PathOrId<'a> {
Id(DocumentId), Id(DocumentId),
// See [PathOrId::from_path_buf]: this will eventually become `Path(&Path)`. Path(&'a Path),
Path(Cow<'a, Path>),
}
impl<'a> PathOrId<'a> {
/// Creates a [PathOrId] from a PathBuf
///
/// # Deprecated
/// The owned version of PathOrId will be removed in a future refactor
/// and replaced with `&'a Path`. See the caller of this function for
/// more details on its removal.
#[deprecated]
pub fn from_path_buf(path_buf: PathBuf) -> Self {
Self::Path(Cow::Owned(path_buf))
}
} }
impl<'a> From<&'a Path> for PathOrId<'a> { impl<'a> From<&'a Path> for PathOrId<'a> {
fn from(path: &'a Path) -> Self { fn from(path: &'a Path) -> Self {
Self::Path(Cow::Borrowed(path)) Self::Path(path)
} }
} }
@ -581,7 +567,6 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
match path_or_id { match path_or_id {
PathOrId::Path(path) => { PathOrId::Path(path) => {
let path = path.as_ref();
if let Some(doc) = editor.document_by_path(path) { if let Some(doc) = editor.document_by_path(path) {
return Some((Preview::EditorDocument(doc), range)); return Some((Preview::EditorDocument(doc), range));
} }

@ -164,7 +164,7 @@ impl Decoration for Cursor<'_> {
renderer: &mut TextRenderer, renderer: &mut TextRenderer,
grapheme: &FormattedGrapheme, grapheme: &FormattedGrapheme,
) -> usize { ) -> usize {
if renderer.column_in_bounds(grapheme.visual_pos.col) if renderer.column_in_bounds(grapheme.visual_pos.col, grapheme.width())
&& renderer.offset.row < grapheme.visual_pos.row && renderer.offset.row < grapheme.visual_pos.row
{ {
let position = grapheme.visual_pos - renderer.offset; let position = grapheme.visual_pos - renderer.offset;

@ -98,7 +98,7 @@ impl Renderer<'_, '_> {
fn draw_eol_diagnostic(&mut self, diag: &Diagnostic, row: u16, col: usize) -> u16 { fn draw_eol_diagnostic(&mut self, diag: &Diagnostic, row: u16, col: usize) -> u16 {
let style = self.styles.severity_style(diag.severity()); let style = self.styles.severity_style(diag.severity());
let width = self.renderer.viewport.width; let width = self.renderer.viewport.width;
if !self.renderer.column_in_bounds(col + 1) { if !self.renderer.column_in_bounds(col + 1, 1) {
return 0; return 0;
} }
let col = (col - self.renderer.offset.col) as u16; let col = (col - self.renderer.offset.col) as u16;

@ -632,6 +632,41 @@ async fn test_join_selections_space() -> anyhow::Result<()> {
Ok(()) Ok(())
} }
#[tokio::test(flavor = "multi_thread")]
async fn test_join_selections_comment() -> anyhow::Result<()> {
test((
indoc! {"\
/// #[a|]#bc
/// def
"},
":lang rust<ret>J",
indoc! {"\
/// #[a|]#bc def
"},
))
.await?;
// Only join if the comment token matches the previous line.
test((
indoc! {"\
#[| // a
// b
/// c
/// d
e
/// f
// g]#
"},
":lang rust<ret>J",
indoc! {"\
#[| // a b /// c d e f // g]#
"},
))
.await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_read_file() -> anyhow::Result<()> { async fn test_read_file() -> anyhow::Result<()> {
let mut file = tempfile::NamedTempFile::new()?; let mut file = tempfile::NamedTempFile::new()?;

@ -20,9 +20,9 @@ helix-core = { path = "../helix-core" }
bitflags = "2.6" bitflags = "2.6"
cassowary = "0.3" cassowary = "0.3"
unicode-segmentation = "1.11" unicode-segmentation = "1.12"
crossterm = { version = "0.28", optional = true } crossterm = { version = "0.28", optional = true }
termini = "1.0" termini = "1.0"
serde = { version = "1", "optional" = true, features = ["derive"]} serde = { version = "1", "optional" = true, features = ["derive"]}
once_cell = "1.19" once_cell = "1.20"
log = "~0.4" log = "~0.4"

@ -29,4 +29,4 @@ log = "0.4"
git = ["gix"] git = ["gix"]
[dev-dependencies] [dev-dependencies]
tempfile = "3.12" tempfile = "3.13"

@ -22,18 +22,24 @@ use crate::FileChange;
#[cfg(test)] #[cfg(test)]
mod test; mod test;
#[inline]
fn get_repo_dir(file: &Path) -> Result<&Path> {
file.parent().context("file has no parent directory")
}
pub fn get_diff_base(file: &Path) -> Result<Vec<u8>> { pub fn get_diff_base(file: &Path) -> Result<Vec<u8>> {
debug_assert!(!file.exists() || file.is_file()); debug_assert!(!file.exists() || file.is_file());
debug_assert!(file.is_absolute()); debug_assert!(file.is_absolute());
let file = gix::path::realpath(file).context("resolve symlinks")?;
// TODO cache repository lookup // TODO cache repository lookup
let repo_dir = file.parent().context("file has no parent directory")?; let repo_dir = get_repo_dir(&file)?;
let repo = open_repo(repo_dir) let repo = open_repo(repo_dir)
.context("failed to open git repo")? .context("failed to open git repo")?
.to_thread_local(); .to_thread_local();
let head = repo.head_commit()?; let head = repo.head_commit()?;
let file_oid = find_file_in_commit(&repo, &head, file)?; let file_oid = find_file_in_commit(&repo, &head, &file)?;
let file_object = repo.find_object(file_oid)?; let file_object = repo.find_object(file_oid)?;
let data = file_object.detach().data; let data = file_object.detach().data;
@ -56,7 +62,9 @@ pub fn get_diff_base(file: &Path) -> Result<Vec<u8>> {
pub fn get_current_head_name(file: &Path) -> Result<Arc<ArcSwap<Box<str>>>> { pub fn get_current_head_name(file: &Path) -> Result<Arc<ArcSwap<Box<str>>>> {
debug_assert!(!file.exists() || file.is_file()); debug_assert!(!file.exists() || file.is_file());
debug_assert!(file.is_absolute()); debug_assert!(file.is_absolute());
let repo_dir = file.parent().context("file has no parent directory")?; let file = gix::path::realpath(file).context("resolve symlinks")?;
let repo_dir = get_repo_dir(&file)?;
let repo = open_repo(repo_dir) let repo = open_repo(repo_dir)
.context("failed to open git repo")? .context("failed to open git repo")?
.to_thread_local(); .to_thread_local();

@ -98,9 +98,13 @@ fn directory() {
assert!(git::get_diff_base(&dir).is_err()); assert!(git::get_diff_base(&dir).is_err());
} }
/// Test that `get_file_head` does not return content for a symlink. /// Test that `get_diff_base` resolves symlinks so that the same diff base is
/// This is important to correctly cover cases where a symlink is removed and replaced by a file. /// used as the target file.
/// If the contents of the symlink object were returned a diff between a path and the actual file would be produced (bad ui). ///
/// This is important to correctly cover cases where a symlink is removed and
/// replaced by a file. If the contents of the symlink object were returned
/// a diff between a literal file path and the actual file content would be
/// produced (bad ui).
#[cfg(any(unix, windows))] #[cfg(any(unix, windows))]
#[test] #[test]
fn symlink() { fn symlink() {
@ -108,14 +112,41 @@ fn symlink() {
use std::os::unix::fs::symlink; use std::os::unix::fs::symlink;
#[cfg(not(unix))] #[cfg(not(unix))]
use std::os::windows::fs::symlink_file as symlink; use std::os::windows::fs::symlink_file as symlink;
let temp_git = empty_git_repo(); let temp_git = empty_git_repo();
let file = temp_git.path().join("file.txt"); let file = temp_git.path().join("file.txt");
let contents = b"foo".as_slice(); let contents = Vec::from(b"foo");
File::create(&file).unwrap().write_all(contents).unwrap(); File::create(&file).unwrap().write_all(&contents).unwrap();
let file_link = temp_git.path().join("file_link.txt"); let file_link = temp_git.path().join("file_link.txt");
symlink("file.txt", &file_link).unwrap(); symlink("file.txt", &file_link).unwrap();
create_commit(temp_git.path(), true);
assert_eq!(git::get_diff_base(&file_link).unwrap(), contents);
assert_eq!(git::get_diff_base(&file).unwrap(), contents);
}
/// Test that `get_diff_base` returns content when the file is a symlink to
/// another file that is in a git repo, but the symlink itself is not.
#[cfg(any(unix, windows))]
#[test]
fn symlink_to_git_repo() {
#[cfg(unix)]
use std::os::unix::fs::symlink;
#[cfg(not(unix))]
use std::os::windows::fs::symlink_file as symlink;
let temp_dir = tempfile::tempdir().expect("create temp dir");
let temp_git = empty_git_repo();
let file = temp_git.path().join("file.txt");
let contents = Vec::from(b"foo");
File::create(&file).unwrap().write_all(&contents).unwrap();
create_commit(temp_git.path(), true); create_commit(temp_git.path(), true);
assert!(git::get_diff_base(&file_link).is_err());
assert_eq!(git::get_diff_base(&file).unwrap(), Vec::from(contents)); let file_link = temp_dir.path().join("file_link.txt");
symlink(&file, &file_link).unwrap();
assert_eq!(git::get_diff_base(&file_link).unwrap(), contents);
assert_eq!(git::get_diff_base(&file).unwrap(), contents);
} }

@ -28,10 +28,10 @@ bitflags = "2.6"
anyhow = "1" anyhow = "1"
crossterm = { version = "0.28", optional = true } crossterm = { version = "0.28", optional = true }
tempfile = "3.12" tempfile = "3.13"
# Conversion traits # Conversion traits
once_cell = "1.19" once_cell = "1.20"
url = "2.5.2" url = "2.5.2"
arc-swap = { version = "1.7.1" } arc-swap = { version = "1.7.1" }

@ -421,7 +421,9 @@ pub fn get_terminal_provider() -> Option<TerminalConfig> {
pub struct LspConfig { pub struct LspConfig {
/// Enables LSP /// Enables LSP
pub enable: bool, pub enable: bool,
/// Display LSP progress messages below statusline /// Display LSP messagess from $/progress below statusline
pub display_progress_messages: bool,
/// Display LSP messages from window/showMessage below statusline
pub display_messages: bool, pub display_messages: bool,
/// Enable automatic pop up of signature help (parameter hints) /// Enable automatic pop up of signature help (parameter hints)
pub auto_signature_help: bool, pub auto_signature_help: bool,
@ -439,7 +441,8 @@ impl Default for LspConfig {
fn default() -> Self { fn default() -> Self {
Self { Self {
enable: true, enable: true,
display_messages: false, display_progress_messages: false,
display_messages: true,
auto_signature_help: true, auto_signature_help: true,
display_signature_help_docs: true, display_signature_help_docs: true,
display_inlay_hints: false, display_inlay_hints: false,
@ -1271,6 +1274,13 @@ impl Editor {
self.status_msg = Some((error, Severity::Error)); self.status_msg = Some((error, Severity::Error));
} }
#[inline]
pub fn set_warning<T: Into<Cow<'static, str>>>(&mut self, warning: T) {
let warning = warning.into();
log::warn!("editor warning: {}", warning);
self.status_msg = Some((warning, Severity::Warning));
}
#[inline] #[inline]
pub fn get_status(&self) -> Option<(&Cow<'static, str>, &Severity)> { pub fn get_status(&self) -> Option<(&Cow<'static, str>, &Severity)> {
self.status_msg.as_ref().map(|(status, sev)| (status, sev)) self.status_msg.as_ref().map(|(status, sev)| (status, sev))

@ -243,7 +243,7 @@ impl Editor {
match op { match op {
ResourceOp::Create(op) => { ResourceOp::Create(op) => {
let uri = Uri::try_from(&op.uri)?; let uri = Uri::try_from(&op.uri)?;
let path = uri.as_path_buf().expect("URIs are valid paths"); let path = uri.as_path().expect("URIs are valid paths");
let ignore_if_exists = op.options.as_ref().map_or(false, |options| { let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
}); });
@ -255,13 +255,15 @@ impl Editor {
} }
} }
fs::write(&path, [])?; fs::write(path, [])?;
self.language_servers.file_event_handler.file_changed(path); self.language_servers
.file_event_handler
.file_changed(path.to_path_buf());
} }
} }
ResourceOp::Delete(op) => { ResourceOp::Delete(op) => {
let uri = Uri::try_from(&op.uri)?; let uri = Uri::try_from(&op.uri)?;
let path = uri.as_path_buf().expect("URIs are valid paths"); let path = uri.as_path().expect("URIs are valid paths");
if path.is_dir() { if path.is_dir() {
let recursive = op let recursive = op
.options .options
@ -270,11 +272,13 @@ impl Editor {
.unwrap_or(false); .unwrap_or(false);
if recursive { if recursive {
fs::remove_dir_all(&path)? fs::remove_dir_all(path)?
} else { } else {
fs::remove_dir(&path)? fs::remove_dir(path)?
} }
self.language_servers.file_event_handler.file_changed(path); self.language_servers
.file_event_handler
.file_changed(path.to_path_buf());
} else if path.is_file() { } else if path.is_file() {
fs::remove_file(path)?; fs::remove_file(path)?;
} }

@ -53,20 +53,34 @@ impl Loader {
/// Loads a theme searching directories in priority order. /// Loads a theme searching directories in priority order.
pub fn load(&self, name: &str) -> Result<Theme> { pub fn load(&self, name: &str) -> Result<Theme> {
let (theme, warnings) = self.load_with_warnings(name)?;
for warning in warnings {
warn!("Theme '{}': {}", name, warning);
}
Ok(theme)
}
/// Loads a theme searching directories in priority order, returning any warnings
pub fn load_with_warnings(&self, name: &str) -> Result<(Theme, Vec<String>)> {
if name == "default" { if name == "default" {
return Ok(self.default()); return Ok((self.default(), Vec::new()));
} }
if name == "base16_default" { if name == "base16_default" {
return Ok(self.base16_default()); return Ok((self.base16_default(), Vec::new()));
} }
let mut visited_paths = HashSet::new(); let mut visited_paths = HashSet::new();
let theme = self.load_theme(name, &mut visited_paths).map(Theme::from)?; let (theme, warnings) = self
.load_theme(name, &mut visited_paths)
.map(Theme::from_toml)?;
Ok(Theme { let theme = Theme {
name: name.into(), name: name.into(),
..theme ..theme
}) };
Ok((theme, warnings))
} }
/// Recursively load a theme, merging with any inherited parent themes. /// Recursively load a theme, merging with any inherited parent themes.
@ -87,10 +101,7 @@ impl Loader {
let theme_toml = if let Some(parent_theme_name) = inherits { let theme_toml = if let Some(parent_theme_name) = inherits {
let parent_theme_name = parent_theme_name.as_str().ok_or_else(|| { let parent_theme_name = parent_theme_name.as_str().ok_or_else(|| {
anyhow!( anyhow!("Expected 'inherits' to be a string: {}", parent_theme_name)
"Theme: expected 'inherits' to be a string: {}",
parent_theme_name
)
})?; })?;
let parent_theme_toml = match parent_theme_name { let parent_theme_toml = match parent_theme_name {
@ -181,9 +192,9 @@ impl Loader {
}) })
.ok_or_else(|| { .ok_or_else(|| {
if cycle_found { if cycle_found {
anyhow!("Theme: cycle found in inheriting: {}", name) anyhow!("Cycle found in inheriting: {}", name)
} else { } else {
anyhow!("Theme: file not found for: {}", name) anyhow!("File not found for: {}", name)
} }
}) })
} }
@ -220,19 +231,11 @@ pub struct Theme {
impl From<Value> for Theme { impl From<Value> for Theme {
fn from(value: Value) -> Self { fn from(value: Value) -> Self {
if let Value::Table(table) = value { let (theme, warnings) = Theme::from_toml(value);
let (styles, scopes, highlights) = build_theme_values(table); for warning in warnings {
warn!("{}", warning);
Self {
styles,
scopes,
highlights,
..Default::default()
}
} else {
warn!("Expected theme TOML value to be a table, found {:?}", value);
Default::default()
} }
theme
} }
} }
@ -242,31 +245,29 @@ impl<'de> Deserialize<'de> for Theme {
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
let values = Map::<String, Value>::deserialize(deserializer)?; let values = Map::<String, Value>::deserialize(deserializer)?;
let (theme, warnings) = Theme::from_keys(values);
let (styles, scopes, highlights) = build_theme_values(values); for warning in warnings {
warn!("{}", warning);
Ok(Self { }
styles, Ok(theme)
scopes,
highlights,
..Default::default()
})
} }
} }
fn build_theme_values( fn build_theme_values(
mut values: Map<String, Value>, mut values: Map<String, Value>,
) -> (HashMap<String, Style>, Vec<String>, Vec<Style>) { ) -> (HashMap<String, Style>, Vec<String>, Vec<Style>, Vec<String>) {
let mut styles = HashMap::new(); let mut styles = HashMap::new();
let mut scopes = Vec::new(); let mut scopes = Vec::new();
let mut highlights = Vec::new(); let mut highlights = Vec::new();
let mut warnings = Vec::new();
// TODO: alert user of parsing failures in editor // TODO: alert user of parsing failures in editor
let palette = values let palette = values
.remove("palette") .remove("palette")
.map(|value| { .map(|value| {
ThemePalette::try_from(value).unwrap_or_else(|err| { ThemePalette::try_from(value).unwrap_or_else(|err| {
warn!("{}", err); warnings.push(err);
ThemePalette::default() ThemePalette::default()
}) })
}) })
@ -279,7 +280,7 @@ fn build_theme_values(
for (name, style_value) in values { for (name, style_value) in values {
let mut style = Style::default(); let mut style = Style::default();
if let Err(err) = palette.parse_style(&mut style, style_value) { if let Err(err) = palette.parse_style(&mut style, style_value) {
warn!("{}", err); warnings.push(err);
} }
// these are used both as UI and as highlights // these are used both as UI and as highlights
@ -288,7 +289,7 @@ fn build_theme_values(
highlights.push(style); highlights.push(style);
} }
(styles, scopes, highlights) (styles, scopes, highlights, warnings)
} }
impl Theme { impl Theme {
@ -354,6 +355,27 @@ impl Theme {
.all(|color| !matches!(color, Some(Color::Rgb(..)))) .all(|color| !matches!(color, Some(Color::Rgb(..))))
}) })
} }
fn from_toml(value: Value) -> (Self, Vec<String>) {
if let Value::Table(table) = value {
Theme::from_keys(table)
} else {
warn!("Expected theme TOML value to be a table, found {:?}", value);
Default::default()
}
}
fn from_keys(toml_keys: Map<String, Value>) -> (Self, Vec<String>) {
let (styles, scopes, highlights, load_errors) = build_theme_values(toml_keys);
let theme = Self {
styles,
scopes,
highlights,
..Default::default()
};
(theme, load_errors)
}
} }
struct ThemePalette { struct ThemePalette {
@ -408,7 +430,7 @@ impl ThemePalette {
if let Ok(index) = s.parse::<u8>() { if let Ok(index) = s.parse::<u8>() {
return Ok(Color::Indexed(index)); return Ok(Color::Indexed(index));
} }
Err(format!("Theme: malformed ANSI: {}", s)) Err(format!("Malformed ANSI: {}", s))
} }
fn hex_string_to_rgb(s: &str) -> Result<Color, String> { fn hex_string_to_rgb(s: &str) -> Result<Color, String> {
@ -422,13 +444,13 @@ impl ThemePalette {
} }
} }
Err(format!("Theme: malformed hexcode: {}", s)) Err(format!("Malformed hexcode: {}", s))
} }
fn parse_value_as_str(value: &Value) -> Result<&str, String> { fn parse_value_as_str(value: &Value) -> Result<&str, String> {
value value
.as_str() .as_str()
.ok_or(format!("Theme: unrecognized value: {}", value)) .ok_or(format!("Unrecognized value: {}", value))
} }
pub fn parse_color(&self, value: Value) -> Result<Color, String> { pub fn parse_color(&self, value: Value) -> Result<Color, String> {
@ -445,14 +467,14 @@ impl ThemePalette {
value value
.as_str() .as_str()
.and_then(|s| s.parse().ok()) .and_then(|s| s.parse().ok())
.ok_or(format!("Theme: invalid modifier: {}", value)) .ok_or(format!("Invalid modifier: {}", value))
} }
pub fn parse_underline_style(value: &Value) -> Result<UnderlineStyle, String> { pub fn parse_underline_style(value: &Value) -> Result<UnderlineStyle, String> {
value value
.as_str() .as_str()
.and_then(|s| s.parse().ok()) .and_then(|s| s.parse().ok())
.ok_or(format!("Theme: invalid underline style: {}", value)) .ok_or(format!("Invalid underline style: {}", value))
} }
pub fn parse_style(&self, style: &mut Style, value: Value) -> Result<(), String> { pub fn parse_style(&self, style: &mut Style, value: Value) -> Result<(), String> {
@ -462,9 +484,7 @@ impl ThemePalette {
"fg" => *style = style.fg(self.parse_color(value)?), "fg" => *style = style.fg(self.parse_color(value)?),
"bg" => *style = style.bg(self.parse_color(value)?), "bg" => *style = style.bg(self.parse_color(value)?),
"underline" => { "underline" => {
let table = value let table = value.as_table_mut().ok_or("Underline must be table")?;
.as_table_mut()
.ok_or("Theme: underline must be table")?;
if let Some(value) = table.remove("color") { if let Some(value) = table.remove("color") {
*style = style.underline_color(self.parse_color(value)?); *style = style.underline_color(self.parse_color(value)?);
} }
@ -473,13 +493,11 @@ impl ThemePalette {
} }
if let Some(attr) = table.keys().next() { if let Some(attr) = table.keys().next() {
return Err(format!("Theme: invalid underline attribute: {attr}")); return Err(format!("Invalid underline attribute: {attr}"));
} }
} }
"modifiers" => { "modifiers" => {
let modifiers = value let modifiers = value.as_array().ok_or("Modifiers should be an array")?;
.as_array()
.ok_or("Theme: modifiers should be an array")?;
for modifier in modifiers { for modifier in modifiers {
if modifier if modifier
@ -492,7 +510,7 @@ impl ThemePalette {
} }
} }
} }
_ => return Err(format!("Theme: invalid style attribute: {}", name)), _ => return Err(format!("Invalid style attribute: {}", name)),
} }
} }
} else { } else {

@ -16,6 +16,7 @@ bicep-langserver = { command = "bicep-langserver" }
bitbake-language-server = { command = "bitbake-language-server" } bitbake-language-server = { command = "bitbake-language-server" }
bufls = { command = "bufls", args = ["serve"] } bufls = { command = "bufls", args = ["serve"] }
cairo-language-server = { command = "cairo-language-server", args = [] } cairo-language-server = { command = "cairo-language-server", args = [] }
circom-lsp = { command = "circom-lsp" }
cl-lsp = { command = "cl-lsp", args = [ "stdio" ] } cl-lsp = { command = "cl-lsp", args = [ "stdio" ] }
clangd = { command = "clangd" } clangd = { command = "clangd" }
clojure-lsp = { command = "clojure-lsp" } clojure-lsp = { command = "clojure-lsp" }
@ -32,6 +33,7 @@ dot-language-server = { command = "dot-language-server", args = ["--stdio"] }
earthlyls = { command = "earthlyls" } earthlyls = { command = "earthlyls" }
elixir-ls = { command = "elixir-ls", config = { elixirLS.dialyzerEnabled = false } } elixir-ls = { command = "elixir-ls", config = { elixirLS.dialyzerEnabled = false } }
elm-language-server = { command = "elm-language-server" } elm-language-server = { command = "elm-language-server" }
elp = { command = "elp", args = ["server"] }
elvish = { command = "elvish", args = ["-lsp"] } elvish = { command = "elvish", args = ["-lsp"] }
erlang-ls = { command = "erlang_ls" } erlang-ls = { command = "erlang_ls" }
forc = { command = "forc", args = ["lsp"] } forc = { command = "forc", args = ["lsp"] }
@ -39,17 +41,19 @@ forth-lsp = { command = "forth-lsp" }
fortls = { command = "fortls", args = ["--lowercase_intrinsics"] } fortls = { command = "fortls", args = ["--lowercase_intrinsics"] }
fsharp-ls = { command = "fsautocomplete", config = { AutomaticWorkspaceInit = true } } fsharp-ls = { command = "fsautocomplete", config = { AutomaticWorkspaceInit = true } }
gleam = { command = "gleam", args = ["lsp"] } gleam = { command = "gleam", args = ["lsp"] }
glsl_analyzer = { command = "glsl_analyzer" }
graphql-language-service = { command = "graphql-lsp", args = ["server", "-m", "stream"] } graphql-language-service = { command = "graphql-lsp", args = ["server", "-m", "stream"] }
haskell-language-server = { command = "haskell-language-server-wrapper", args = ["--lsp"] } haskell-language-server = { command = "haskell-language-server-wrapper", args = ["--lsp"] }
idris2-lsp = { command = "idris2-lsp" } idris2-lsp = { command = "idris2-lsp" }
intelephense = { command = "intelephense", args = ["--stdio"] } intelephense = { command = "intelephense", args = ["--stdio"] }
jdtls = { command = "jdtls" } jdtls = { command = "jdtls" }
jedi = { command = "jedi-language-server" }
jq-lsp = { command = "jq-lsp" } jq-lsp = { command = "jq-lsp" }
jsonnet-language-server = { command = "jsonnet-language-server", args= ["-t", "--lint"] } jsonnet-language-server = { command = "jsonnet-language-server", args= ["-t", "--lint"] }
julia = { command = "julia", timeout = 60, args = [ "--startup-file=no", "--history-file=no", "--quiet", "-e", "using LanguageServer; runserver()", ] } julia = { command = "julia", timeout = 60, args = [ "--startup-file=no", "--history-file=no", "--quiet", "-e", "using LanguageServer; runserver()", ] }
koka = { command = "koka", args = ["--language-server", "--lsstdio"] } koka = { command = "koka", args = ["--language-server", "--lsstdio"] }
kotlin-language-server = { command = "kotlin-language-server" } kotlin-language-server = { command = "kotlin-language-server" }
lean = { command = "lean", args = [ "--server" ] } lean = { command = "lean", args = [ "--server", "--memory=1024" ] }
ltex-ls = { command = "ltex-ls" } ltex-ls = { command = "ltex-ls" }
markdoc-ls = { command = "markdoc-ls", args = ["--stdio"] } markdoc-ls = { command = "markdoc-ls", args = ["--stdio"] }
markdown-oxide = { command = "markdown-oxide" } markdown-oxide = { command = "markdown-oxide" }
@ -84,6 +88,7 @@ racket = { command = "racket", args = ["-l", "racket-langserver"] }
regols = { command = "regols" } regols = { command = "regols" }
rescript-language-server = { command = "rescript-language-server", args = ["--stdio"] } rescript-language-server = { command = "rescript-language-server", args = ["--stdio"] }
robotframework_ls = { command = "robotframework_ls" } robotframework_ls = { command = "robotframework_ls" }
ruff = { command = "ruff", args = ["server"] }
serve-d = { command = "serve-d" } serve-d = { command = "serve-d" }
slint-lsp = { command = "slint-lsp", args = [] } slint-lsp = { command = "slint-lsp", args = [] }
solargraph = { command = "solargraph", args = ["stdio"] } solargraph = { command = "solargraph", args = ["stdio"] }
@ -91,6 +96,7 @@ solc = { command = "solc", args = ["--lsp"] }
sourcekit-lsp = { command = "sourcekit-lsp" } sourcekit-lsp = { command = "sourcekit-lsp" }
svlangserver = { command = "svlangserver", args = [] } svlangserver = { command = "svlangserver", args = [] }
swipl = { command = "swipl", args = [ "-g", "use_module(library(lsp_server))", "-g", "lsp_server:main", "-t", "halt", "--", "stdio" ] } swipl = { command = "swipl", args = [ "-g", "use_module(library(lsp_server))", "-g", "lsp_server:main", "-t", "halt", "--", "stdio" ] }
superhtml = { command = "superhtml", args = ["lsp"]}
tailwindcss-ls = { command = "tailwindcss-language-server", args = ["--stdio"] } tailwindcss-ls = { command = "tailwindcss-language-server", args = ["--stdio"] }
taplo = { command = "taplo", args = ["lsp", "stdio"] } taplo = { command = "taplo", args = ["lsp", "stdio"] }
templ = { command = "templ", args = ["lsp"] } templ = { command = "templ", args = ["lsp"] }
@ -98,6 +104,7 @@ terraform-ls = { command = "terraform-ls", args = ["serve"] }
texlab = { command = "texlab" } texlab = { command = "texlab" }
typespec = { command = "tsp-server", args = ["--stdio"] } typespec = { command = "tsp-server", args = ["--stdio"] }
vala-language-server = { command = "vala-language-server" } vala-language-server = { command = "vala-language-server" }
vale-ls = { command = "vale-ls" }
vhdl_ls = { command = "vhdl_ls", args = [] } vhdl_ls = { command = "vhdl_ls", args = [] }
vlang-language-server = { command = "v-analyzer" } vlang-language-server = { command = "v-analyzer" }
vscode-css-language-server = { command = "vscode-css-language-server", args = ["--stdio"], config = { provideFormatter = true, css = { validate = { enable = true } } } } vscode-css-language-server = { command = "vscode-css-language-server", args = ["--stdio"], config = { provideFormatter = true, css = { validate = { enable = true } } } }
@ -443,6 +450,8 @@ file-types = [
{ glob = ".watchmanconfig" }, { glob = ".watchmanconfig" },
"avsc", "avsc",
{ glob = ".prettierrc" }, { glob = ".prettierrc" },
"ldtk",
"ldtkl",
] ]
language-servers = [ "vscode-json-language-server" ] language-servers = [ "vscode-json-language-server" ]
auto-format = true auto-format = true
@ -836,7 +845,7 @@ scope = "text.html.basic"
injection-regex = "html" injection-regex = "html"
file-types = ["html", "htm", "shtml", "xhtml", "xht", "jsp", "asp", "aspx", "jshtm", "volt", "rhtml", "cshtml"] file-types = ["html", "htm", "shtml", "xhtml", "xht", "jsp", "asp", "aspx", "jshtm", "volt", "rhtml", "cshtml"]
block-comment-tokens = { start = "<!--", end = "-->" } block-comment-tokens = { start = "<!--", end = "-->" }
language-servers = [ "vscode-html-language-server" ] language-servers = [ "vscode-html-language-server", "superhtml" ]
auto-format = true auto-format = true
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -852,7 +861,7 @@ file-types = ["py", "pyi", "py3", "pyw", "ptl", "rpy", "cpy", "ipy", "pyt", { gl
shebangs = ["python"] shebangs = ["python"]
roots = ["pyproject.toml", "setup.py", "poetry.lock", "pyrightconfig.json"] roots = ["pyproject.toml", "setup.py", "poetry.lock", "pyrightconfig.json"]
comment-token = "#" comment-token = "#"
language-servers = [ "pylsp" ] language-servers = ["ruff", "jedi", "pylsp"]
# TODO: pyls needs utf-8 offsets # TODO: pyls needs utf-8 offsets
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
@ -878,7 +887,7 @@ indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "nickel" name = "nickel"
source = { git = "https://github.com/nickel-lang/tree-sitter-nickel", rev = "e1d9337864d209898a08c26b8cd4c2dd14c15148" } source = { git = "https://github.com/nickel-lang/tree-sitter-nickel", rev = "88d836a24b3b11c8720874a1a9286b8ae838d30a" }
[[language]] [[language]]
name = "nix" name = "nix"
@ -909,6 +918,7 @@ file-types = [
"podspec", "podspec",
"rjs", "rjs",
"rbi", "rbi",
"rbs",
{ glob = "rakefile" }, { glob = "rakefile" },
{ glob = "gemfile" }, { glob = "gemfile" },
{ glob = "Rakefile" }, { glob = "Rakefile" },
@ -1234,6 +1244,23 @@ indent = { tab-width = 2, unit = " " }
name = "ocaml-interface" name = "ocaml-interface"
source = { git = "https://github.com/tree-sitter/tree-sitter-ocaml", rev = "9965d208337d88bbf1a38ad0b0fe49e5f5ec9677", subpath = "interface" } source = { git = "https://github.com/tree-sitter/tree-sitter-ocaml", rev = "9965d208337d88bbf1a38ad0b0fe49e5f5ec9677", subpath = "interface" }
[[language]]
name = "dune"
scope = "source.dune"
roots = ["dune-project"]
file-types = [{ glob = "dune-project" }, { glob = "dune" }]
comment-token = ";"
indent = { tab-width = 1, unit = " " }
grammar = "scheme"
auto-format = true
formatter = { command = "dune", args = ["format-dune-file"] }
[language.auto-pairs]
'(' = ')'
'{' = '}'
'[' = ']'
'"' = '"'
[[language]] [[language]]
name = "lua" name = "lua"
injection-regex = "lua" injection-regex = "lua"
@ -1426,6 +1453,7 @@ file-types = ["glsl", "vert", "tesc", "tese", "geom", "frag", "comp" ]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" } block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
language-servers = [ "glsl_analyzer" ]
injection-regex = "glsl" injection-regex = "glsl"
[[grammar]] [[grammar]]
@ -1792,7 +1820,7 @@ roots = ["rebar.config"]
shebangs = ["escript"] shebangs = ["escript"]
comment-token = "%%" comment-token = "%%"
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
language-servers = [ "erlang-ls" ] language-servers = [ "erlang-ls", "elp" ]
[[grammar]] [[grammar]]
name = "erlang" name = "erlang"
@ -1826,7 +1854,7 @@ auto-format = true
[[grammar]] [[grammar]]
name = "hcl" name = "hcl"
source = { git = "https://github.com/MichaHoffmann/tree-sitter-hcl", rev = "3cb7fc28247efbcb2973b97e71c78838ad98a583" } source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-hcl", rev = "9e3ec9848f28d26845ba300fd73c740459b83e9b" }
[[language]] [[language]]
name = "tfvars" name = "tfvars"
@ -2130,7 +2158,7 @@ language-servers = [ "ols" ]
comment-token = "//" comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" } block-comment-tokens = { start = "/*", end = "*/" }
indent = { tab-width = 4, unit = "\t" } indent = { tab-width = 4, unit = "\t" }
formatter = { command = "odinfmt", args = [ "-stdin", "true" ] } formatter = { command = "odinfmt", args = [ "-stdin" ] }
[language.debugger] [language.debugger]
name = "lldb-dap" name = "lldb-dap"
@ -2397,7 +2425,7 @@ language-servers = [ "slint-lsp" ]
[[grammar]] [[grammar]]
name = "slint" name = "slint"
source = { git = "https://github.com/slint-ui/tree-sitter-slint", rev = "4a0558cc0fcd7a6110815b9bbd7cc12d7ab31e74" } source = { git = "https://github.com/slint-ui/tree-sitter-slint", rev = "34ccfd58d3baee7636f62d9326f32092264e8407" }
[[language]] [[language]]
name = "task" name = "task"
@ -2456,6 +2484,12 @@ injection-regex = "sml"
file-types = ["sml"] file-types = ["sml"]
block-comment-tokens = { start = "(*", end = "*)" } block-comment-tokens = { start = "(*", end = "*)" }
[language.auto-pairs]
'(' = ')'
'{' = '}'
'[' = ']'
'"' = '"'
[[grammar]] [[grammar]]
name = "sml" name = "sml"
source = { git = "https://github.com/Giorbo/tree-sitter-sml", rev = "bd4055d5554614520d4a0706b34dc0c317c6b608" } source = { git = "https://github.com/Giorbo/tree-sitter-sml", rev = "bd4055d5554614520d4a0706b34dc0c317c6b608" }
@ -3238,7 +3272,7 @@ text-width = 72
[[grammar]] [[grammar]]
name = "jjdescription" name = "jjdescription"
source = { git = "https://github.com/kareigu/tree-sitter-jjdescription", rev = "2ddec6cad07b366aee276a608e1daa2c29d3caf2" } source = { git = "https://github.com/kareigu/tree-sitter-jjdescription", rev = "23dd3dd18ee29bdd761642511aa314215801afd8" }
[[language]] [[language]]
name = "jq" name = "jq"
@ -3786,3 +3820,32 @@ indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "thrift" name = "thrift"
source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-thrift" , rev = "68fd0d80943a828d9e6f49c58a74be1e9ca142cf" } source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-thrift" , rev = "68fd0d80943a828d9e6f49c58a74be1e9ca142cf" }
[[language]]
name = "circom"
scope = "source.circom"
injection-regex = "circom"
file-types = ["circom"]
roots = ["package.json"]
comment-tokens = "//"
indent = { tab-width = 4, unit = " " }
auto-format = false
language-servers = ["circom-lsp"]
[[grammar]]
name = "circom"
source = { git = "https://github.com/Decurity/tree-sitter-circom", rev = "02150524228b1e6afef96949f2d6b7cc0aaf999e" }
[[language]]
name = "snakemake"
scope = "source.snakemake"
roots = ["Snakefile", "config.yaml", "environment.yaml", "workflow/"]
file-types = ["smk", "Snakefile"]
comment-tokens = ["#", "##"]
indent = { tab-width = 2, unit = " " }
language-servers = ["pylsp" ]
[[grammar]]
name = "snakemake"
source = { git = "https://github.com/osthomas/tree-sitter-snakemake", rev = "e909815acdbe37e69440261ebb1091ed52e1dec6" }

@ -0,0 +1,142 @@
; identifiers
; -----------
(identifier) @variable
; Pragma
; -----------
(pragma_directive) @keyword.directive
; Include
; -----------
(include_directive) @keyword.directive
; Literals
; --------
(string) @string
(int_literal) @constant.numeric.integer
(comment) @comment
; Definitions
; -----------
(function_definition
name: (identifier) @keyword.function)
(template_definition
name: (identifier) @keyword.function)
; Use contructor coloring for special functions
(main_component_definition) @constructor
; Invocations
(call_expression . (identifier) @function)
; Function parameters
(parameter name: (identifier) @variable.parameter)
; Members
(member_expression property: (property_identifier) @variable.other.member)
; Tokens
; -------
; Keywords
[
"signal"
"var"
"component"
] @keyword.storage.type
[ "include" ] @keyword.control.import
[
"public"
"input"
"output"
] @keyword.storage.modifier
[
"for"
"while"
] @keyword.control.repeat
[
"if"
"else"
] @keyword.control.conditional
[
"return"
] @keyword.control.return
[
"function"
"template"
] @keyword.function
; Punctuation
[
"("
")"
"["
"]"
"{"
"}"
] @punctuation.bracket
[
"."
","
";"
] @punctuation.delimiter
; Operators
; https://docs.circom.io/circom-language/basic-operators
[
"="
"?"
"&&"
"||"
"!"
"<"
">"
"<="
">="
"=="
"!="
"+"
"-"
"*"
"**"
"/"
"\\"
"%"
"+="
"-="
"*="
"**="
"/="
"\\="
"%="
"++"
"--"
"&"
"|"
"~"
"^"
">>"
"<<"
"&="
"|="
; "\~=" ; bug, uncomment and circom will not highlight
"^="
">>="
"<<="
] @operator
[
"<=="
"==>"
"<--"
"-->"
"==="
] @operator

@ -0,0 +1,9 @@
(function_definition) @local.scope
(template_definition) @local.scope
(main_component_definition) @local.scope
(block_statement) @local.scope
(parameter name: (identifier) @local.definition) @local.definition
(identifier) @local.reference

@ -101,19 +101,19 @@
] ]
) )
; non-builtin command names ; derived from builtin -n (fish 3.7.1)
(command name: (word) @function)
; derived from builtin -n (fish 3.2.2)
(command (command
name: [ name: [
(word) @function.builtin (word) @function.builtin
(#match? @function.builtin "^(\.|:|_|alias|argparse|bg|bind|block|breakpoint|builtin|cd|command|commandline|complete|contains|count|disown|echo|emit|eval|exec|exit|fg|functions|history|isatty|jobs|math|printf|pwd|random|read|realpath|set|set_color|source|status|string|test|time|type|ulimit|wait)$") (#any-of? @function.builtin "abbr" "alias" "and" "argparse" "begin" "bg" "bind" "block" "break" "breakpoint" "builtin" "case" "cd" "command" "commandline" "complete" "contains" "continue" "count" "disown" "echo" "else" "emit" "end" "eval" "exec" "exit" "false" "fg" "for" "function" "functions" "history" "if" "isatty" "jobs" "math" "not" "or" "path" "printf" "pwd" "random" "read" "realpath" "return" "set" "set_color" "source" "status" "string" "switch" "test" "time" "true" "type" "ulimit" "wait" "while")
] ]
) )
(test_command "test" @function.builtin) (test_command "test" @function.builtin)
; non-builtin command names
(command name: (word) @function)
;; Functions ;; Functions
(function_definition ["function" "end"] @keyword.function) (function_definition ["function" "end"] @keyword.function)

@ -23,17 +23,19 @@
(let_in_block (let_in_block
"let" @keyword "let" @keyword
"rec"? @keyword "rec"? @keyword
"in" @keyword
)
(let_binding
pat: (pattern pat: (pattern
(ident) @variable (ident) @variable
) )
"in" @keyword
) )
(fun_expr (fun_expr
"fun" @keyword.function "fun" @keyword.function
pats: pats:
(pattern (pattern_fun (ident) @variable.parameter)+
id: (ident) @variable.parameter
)+
"=>" @operator "=>" @operator
) )
(record_field) @variable.other.member (record_field) @variable.other.member

@ -4,10 +4,13 @@
] @keyword.directive ] @keyword.directive
[ [
"import"
"package" "package"
] @namespace ] @namespace
[
"import"
] @keyword.control.import
[ [
"foreign" "foreign"
"using" "using"
@ -200,7 +203,7 @@
(struct . (identifier) @type) (struct . (identifier) @type)
(field_type . (identifier) "." (identifier) @type) (field_type . (identifier) @keyword.storage.type "." (identifier) @type)
(bit_set_type (identifier) @type ";") (bit_set_type (identifier) @type ";")
@ -248,6 +251,8 @@
(using_statement (identifier) @namespace) (using_statement (identifier) @namespace)
(import_declaration (identifier) @keyword.storage.type)
; Parameters ; Parameters
(parameter (identifier) @variable.parameter ":" "="? (identifier)? @constant) (parameter (identifier) @variable.parameter ":" "="? (identifier)? @constant)

@ -0,0 +1,20 @@
Copyright (c) 2016 Max Brunsfeld
Copyright (c) 2023 Oliver Thomas
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -0,0 +1,8 @@
; inherits: python
[
(rule_definition)
(rule_inheritance)
(module_definition)
(checkpoint_definition)
] @fold

@ -0,0 +1,76 @@
; inherits: python
; Compound directives
[
"rule"
"checkpoint"
"module"
] @keyword
; Top level directives (eg. configfile, include)
(module
(directive
name: _ @keyword))
; Subordinate directives (eg. input, output)
((_)
body: (_
(directive
name: _ @label)))
; rule/module/checkpoint names
(rule_definition
name: (identifier) @type)
(module_definition
name: (identifier) @type)
(checkpoint_definition
name: (identifier) @type)
; Rule imports
(rule_import
"use" @keyword.import
"rule" @keyword.import
"from" @keyword.import
"exclude"? @keyword.import
"as"? @keyword.import
"with"? @keyword.import)
; Rule inheritance
(rule_inheritance
"use" @keyword
"rule" @keyword
"with" @keyword)
; Wildcard names
(wildcard (identifier) @variable)
(wildcard (flag) @variable.parameter.builtin)
; builtin variables
((identifier) @variable.builtin
(#any-of? @variable.builtin "checkpoints" "config" "gather" "rules" "scatter" "workflow"))
; References to directive labels in wildcard interpolations
; the #any-of? queries are moved above the #has-ancestor? queries to
; short-circuit the potentially expensive tree traversal, if possible
; see:
; https://github.com/nvim-treesitter/nvim-treesitter/pull/4302#issuecomment-1685789790
; directive labels in wildcard context
((wildcard
(identifier) @label)
(#any-of? @label "input" "log" "output" "params" "resources" "threads" "wildcards"))
((wildcard
(attribute
object: (identifier) @label))
(#any-of? @label "input" "log" "output" "params" "resources" "threads" "wildcards"))
((wildcard
(subscript
value: (identifier) @label))
(#any-of? @label "input" "log" "output" "params" "resources" "threads" "wildcards"))
; directive labels in block context (eg. within 'run:')
((identifier) @label
(#any-of? @label "input" "log" "output" "params" "resources" "threads" "wildcards"))

@ -0,0 +1,27 @@
; inherits: python
[
(rule_definition)
(checkpoint_definition)
(rule_inheritance)
(module_definition)
] @indent
[
(rule_definition)
(checkpoint_definition)
(rule_inheritance)
(module_definition)
] @extend
(directive) @indent
(directive) @extend
(rule_import
"with"
":") @indent
(rule_import
"with"
":") @extend

@ -0,0 +1,5 @@
; inherits: python
(wildcard
(constraint) @injection.content
(#set! injection.language "regex"))

@ -6,9 +6,13 @@
(attribute (attribute
(attribute_name) @attribute (attribute_name) @attribute
(quoted_attribute_value [(attribute_value) (quoted_attribute_value)]? @string)
(attribute_value) @string)
) (directive_attribute
(directive_name) @attribute
(directive_argument)? @attribute
(directive_modifiers)? @attribute
[(attribute_value) (quoted_attribute_value)]? @string)
(comment) @comment (comment) @comment
@ -18,4 +22,7 @@
"</" "</"
"{{" "{{"
"}}" "}}"
"/>"
] @punctuation.bracket ] @punctuation.bracket
"=" @punctuation.delimiter

@ -68,8 +68,10 @@
"ui.statusline.select" = { fg = "my_gray7", bg = "my_black", modifiers = ["bold"] } "ui.statusline.select" = { fg = "my_gray7", bg = "my_black", modifiers = ["bold"] }
"ui.text.focus" = "my_white1" "ui.text.focus" = "my_white1"
"ui.text" = "my_white1" "ui.text" = "my_white1"
"ui.virtual.inlay-hint" = { fg = "my_gray4", bg="my_black", modifiers = ["normal"] } # Invalid modifier: "normal". See 'https://github.com/helix-editor/helix/issues/5709'
"ui.virtual.inlay-hint.parameter" = { fg = "my_gray4", modifiers = ["normal"] } "ui.virtual.inlay-hint" = { fg = "my_gray4", bg="my_black" } #, modifiers = ["normal"] }
# "ui.virtual.inlay-hint.parameter" = { fg = "my_gray4", modifiers = ["normal"] }
"ui.virtual.inlay-hint.parameter" = "my_gray4"
"ui.virtual.inlay-hint.type" = { fg = "my_gray4", modifiers = ["italic"] } "ui.virtual.inlay-hint.type" = { fg = "my_gray4", modifiers = ["italic"] }
"ui.virtual.jump-label" = { fg = "my_yellow2", modifiers = ["bold"] } "ui.virtual.jump-label" = { fg = "my_yellow2", modifiers = ["bold"] }
"ui.virtual.ruler" = { bg = "my_gray1" } "ui.virtual.ruler" = { bg = "my_gray1" }

@ -59,8 +59,10 @@
"ui.text.focus" = { fg = "bogster-fg1", modifiers= ["bold"] } "ui.text.focus" = { fg = "bogster-fg1", modifiers= ["bold"] }
"ui.virtual.whitespace" = "bogster-base5" "ui.virtual.whitespace" = "bogster-base5"
"ui.virtual.ruler" = { bg = "bogster-base0" } "ui.virtual.ruler" = { bg = "bogster-base0" }
"ui.virtual.jump-label" = { fg = "bogster-base0", bg = "bogster-yellow", modifiers = [ "bold" ] }
"ui.selection" = { bg = "bogster-base3" } "ui.selection" = { bg = "bogster-base2" }
"ui.selection.primary" = { bg = "bogster-base3" }
"ui.cursor.match" = { fg = "bogster-base3", bg = "bogster-orange" } "ui.cursor.match" = { fg = "bogster-base3", bg = "bogster-orange" }
"ui.cursor" = { fg = "bogster-base5", modifiers = ["reversed"] } "ui.cursor" = { fg = "bogster-base5", modifiers = ["reversed"] }

@ -28,6 +28,8 @@
"ui.virtual.jump-label" = { fg = "lightblue", modifiers = ["italic", "bold"] } "ui.virtual.jump-label" = { fg = "lightblue", modifiers = ["italic", "bold"] }
"ui.bufferline" = { fg = "grey04", bg = "grey00" } "ui.bufferline" = { fg = "grey04", bg = "grey00" }
"ui.bufferline.active" = { fg = "grey07", bg = "grey02" } "ui.bufferline.active" = { fg = "grey07", bg = "grey02" }
"ui.picker.header.column" = { fg = "grey05", modifiers = ["italic", "bold"] }
"ui.picker.header.column.active" = { fg = "grey05", bg = "grey03", modifiers = ["italic", "bold"] }
"operator" = "grey05" "operator" = "grey05"
"variable" = "white" "variable" = "white"

@ -68,7 +68,8 @@
"ui.menu.selected" = { fg = "dark_red", bg = "light_blue" } "ui.menu.selected" = { fg = "dark_red", bg = "light_blue" }
"ui.selection" = { bg = "lightgoldenrod1" } "ui.selection" = { bg = "lightgoldenrod1" }
"ui.selection.primary" = { bg = "lightgoldenrod2" } "ui.selection.primary" = { bg = "lightgoldenrod2" }
"ui.virtual.whitespace" = "highlight" # Malformed ANSI: highlight. See 'https://github.com/helix-editor/helix/issues/5709'
# "ui.virtual.whitespace" = "highlight"
"ui.virtual.ruler" = { bg = "gray95" } "ui.virtual.ruler" = { bg = "gray95" }
"ui.virtual.inlay-hint" = { fg = "gray75" } "ui.virtual.inlay-hint" = { fg = "gray75" }
"ui.cursorline.primary" = { bg = "darkseagreen2" } "ui.cursorline.primary" = { bg = "darkseagreen2" }

@ -61,8 +61,11 @@
"ui.virtual" = { fg = "base5", bg = "base6" } "ui.virtual" = { fg = "base5", bg = "base6" }
"ui.virtual.whitespace" = { fg = "base5" } "ui.virtual.whitespace" = { fg = "base5" }
"ui.virtual.ruler" = { bg = "base6" } "ui.virtual.ruler" = { bg = "base6" }
"ui.virtual.inlay-hint" = { fg = "base4", modifiers = ["normal"] } # Invalid modifier: "normal". See 'https://github.com/helix-editor/helix/issues/5709'
"ui.virtual.inlay-hint.parameter" = { fg = "base3", modifiers = ["normal"] } # "ui.virtual.inlay-hint" = { fg = "base4", modifiers = ["normal"] }
# "ui.virtual.inlay-hint.parameter" = { fg = "base3", modifiers = ["normal"] }
"ui.virtual.inlay-hint" = "base4"
"ui.virtual.inlay-hint.parameter" = "base3"
"ui.virtual.inlay-hint.type" = { fg = "base3", modifiers = ["italic"] } "ui.virtual.inlay-hint.type" = { fg = "base3", modifiers = ["italic"] }
"ui.linenr" = { bg = "base6" } "ui.linenr" = { bg = "base6" }

@ -67,6 +67,7 @@
"ui.virtual.ruler" = { bg = "bg1" } "ui.virtual.ruler" = { bg = "bg1" }
"ui.virtual.inlay-hint" = { fg = "bg7" } "ui.virtual.inlay-hint" = { fg = "bg7" }
"ui.virtual.wrap" = { fg = "bg2" } "ui.virtual.wrap" = { fg = "bg2" }
"ui.virtual.jump-label" = { fg = "red3", modifiers = ["bold"] }
"diagnostic.warning" = { underline = { color = "orange1", style = "dashed" } } "diagnostic.warning" = { underline = { color = "orange1", style = "dashed" } }
"diagnostic.error" = { underline = { color = "red3", style = "dashed" } } "diagnostic.error" = { underline = { color = "red3", style = "dashed" } }

@ -43,6 +43,8 @@ tag = "red"
"ui.bufferline" = { bg = "dark-bg", fg = "light-gray" } "ui.bufferline" = { bg = "dark-bg", fg = "light-gray" }
"ui.bufferline.active" = { bg = "dark-bg", fg = "orange" } "ui.bufferline.active" = { bg = "dark-bg", fg = "orange" }
"ui.virtual.jump-label" = { fg = "pink", modifiers = ["bold"] } "ui.virtual.jump-label" = { fg = "pink", modifiers = ["bold"] }
"ui.picker.header.column" = { fg = "orange", underline.style = "line" }
"ui.picker.header.column.active" = { fg = "purple", modifiers = ["bold"], underline.style = "line" }
# Diagnostics # Diagnostics
"diagnostic" = { underline = { style = "curl" } } "diagnostic" = { underline = { style = "curl" } }

@ -25,7 +25,8 @@
"ui.statusline.normal" = { fg = "sumiInk0", bg = "crystalBlue", modifiers = ["bold"] } "ui.statusline.normal" = { fg = "sumiInk0", bg = "crystalBlue", modifiers = ["bold"] }
"ui.statusline.insert" = { fg = "sumiInk0", bg = "autumnGreen", modifiers = ["bold"] } "ui.statusline.insert" = { fg = "sumiInk0", bg = "autumnGreen", modifiers = ["bold"] }
"ui.statusline.select" = { fg = "sumiInk0", bg = "oniViolet", modifiers = ["bold"] } "ui.statusline.select" = { fg = "sumiInk0", bg = "oniViolet", modifiers = ["bold"] }
"ui.statusline.separator" = { fg = "", bg = "" } # Malformed ANSI: "". See 'https://github.com/helix-editor/helix/issues/5709'
# "ui.statusline.separator" = { fg = "", bg = "" }
"ui.bufferline" = { fg = "fujiGray", bg = "sumiInk0" } "ui.bufferline" = { fg = "fujiGray", bg = "sumiInk0" }
"ui.bufferline.active" = { fg = "oldWhite", bg = "sumiInk0" } "ui.bufferline.active" = { fg = "oldWhite", bg = "sumiInk0" }

@ -79,7 +79,8 @@
"ui.statusline" = { fg = "active_text", bg = "#414339" } "ui.statusline" = { fg = "active_text", bg = "#414339" }
"ui.statusline.inactive" = { fg = "active_text", bg = "#75715e" } "ui.statusline.inactive" = { fg = "active_text", bg = "#75715e" }
"ui.bufferline" = { fg = "grey2", bg = "bg3" } # Malformed ANSI: grey2, bg3. See 'https://github.com/helix-editor/helix/issues/5709'
# "ui.bufferline" = { fg = "grey2", bg = "bg3" }
"ui.bufferline.active" = { fg = "active_text", bg = "selection", modifiers = [ "ui.bufferline.active" = { fg = "active_text", bg = "selection", modifiers = [
"bold", "bold",
] } ] }

@ -8,9 +8,12 @@ inherits = "monokai"
"type" = { fg = "type", modifiers = ["bold"] } "type" = { fg = "type", modifiers = ["bold"] }
"ui.statusline.normal" = { fg = "light-black", bg = "cyan" } # Malformed ANSI: light-black, purple. See 'https://github.com/helix-editor/helix/issues/5709'
"ui.statusline.insert" = { fg = "light-black", bg = "green" } # "ui.statusline.normal" = { fg = "light-black", bg = "cyan" }
"ui.statusline.select" = { fg = "light-black", bg = "purple" } "ui.statusline.normal" = { bg = "cyan" }
# "ui.statusline.insert" = { fg = "light-black", bg = "green" }
"ui.statusline.insert" = { bg = "green" }
# "ui.statusline.select" = { fg = "light-black", bg = "purple" }
"ui.virtual.jump-label" = { fg = "cyan", modifiers = ["bold"] } "ui.virtual.jump-label" = { fg = "cyan", modifiers = ["bold"] }

@ -15,6 +15,7 @@
"keyword.control" = { fg = "purple" } "keyword.control" = { fg = "purple" }
"keyword.control.import" = { fg = "red" } "keyword.control.import" = { fg = "red" }
"keyword.directive" = { fg = "purple" } "keyword.directive" = { fg = "purple" }
"keyword.storage" = { fg = "purple" }
"label" = { fg = "purple" } "label" = { fg = "purple" }
"namespace" = { fg = "blue" } "namespace" = { fg = "blue" }
"operator" = { fg = "purple" } "operator" = { fg = "purple" }

@ -1,60 +1,61 @@
# Author : Timothy DeHerrera <tim@nrdxp.dev> # Author : Timothy DeHerrera <tim@nrdxp.dev>
"comment".fg = "comment" "comment".fg = "comment"
"constant".fg = "purple"
"constant.builtin".fg = "olive" "constant.builtin".fg = "olive"
"constant.character".fg = "carnation"
"constant.character.escape".fg = "magenta" "constant.character.escape".fg = "magenta"
"constant.character".fg = "carnation"
"constant".fg = "purple"
"constant.numeric".fg = "cyan" "constant.numeric".fg = "cyan"
"constant.numeric.float".fg = "red" "constant.numeric.float".fg = "red"
"function".fg = "green"
"function.builtin".fg = "sand" "function.builtin".fg = "sand"
"function".fg = "green"
"function.macro".fg = "blue" "function.macro".fg = "blue"
"function.method".fg = "opal" "function.method".fg = "opal"
"keyword" = { fg = "magenta", modifiers = ["bold"] } "keyword" = { fg = "magenta", modifiers = ["bold"] }
"keyword.operator" = { fg = "coral", modifiers = ["bold"] }
"keyword.function" = { fg = "lilac", modifiers = ["bold"] }
"keyword.control" = { fg = "carnation", modifiers = ["bold"] } "keyword.control" = { fg = "carnation", modifiers = ["bold"] }
"keyword.control.exception" = { fg = "red", modifiers = ["bold"] } "keyword.control.exception" = { fg = "red", modifiers = ["bold"] }
"keyword.function" = { fg = "lilac", modifiers = ["bold"] }
"keyword.operator" = { fg = "coral", modifiers = ["bold"] }
"keyword.storage" = { fg = "coral", modifiers = ["bold"] } "keyword.storage" = { fg = "coral", modifiers = ["bold"] }
"operator".fg = "coral" "operator".fg = "coral"
"punctuation".fg = "magenta"
"punctuation.delimiter".fg = "coral"
"punctuation.bracket".fg = "foreground" "punctuation.bracket".fg = "foreground"
"punctuation.delimiter".fg = "coral"
"punctuation".fg = "magenta"
"attribute".fg = "opal"
"string".fg = "yellow" "string".fg = "yellow"
"string.special".fg = "blue"
"string.regexp".fg = "red" "string.regexp".fg = "red"
"string.special".fg = "blue"
"tag".fg = "carnation" "tag".fg = "carnation"
"attribute".fg = "opal"
"type".fg = "opal"
"type.variant".fg = "sand"
"type.builtin".fg = "yellow" "type.builtin".fg = "yellow"
"type.enum.variant".fg = "sand" "type.enum.variant".fg = "sand"
"type".fg = "opal"
"type.variant".fg = "sand"
"variable".fg = "cyan"
"variable.builtin".fg = "olive" "variable.builtin".fg = "olive"
"variable".fg = "cyan"
"variable.other.member".fg = "lilac" "variable.other.member".fg = "lilac"
"variable.parameter" = { fg = "blue", modifiers = ["italic"] } "variable.parameter" = { fg = "blue", modifiers = ["italic"] }
"namespace".fg = "olive"
"constructor".fg = "sand" "constructor".fg = "sand"
"special".fg = "magenta"
"label".fg = "magenta" "label".fg = "magenta"
"namespace".fg = "olive"
"special".fg = "magenta"
"diff.plus".fg = "green"
"diff.delta".fg = "blue" "diff.delta".fg = "blue"
"diff.minus".fg = "red" "diff.minus".fg = "red"
"diff.plus".fg = "green"
"ui.background" = { fg = "foreground", bg = "background" } "ui.background" = { fg = "foreground", bg = "background" }
"ui.cursor" = { fg = "background", bg = "blue", modifiers = ["dim"] } "ui.cursor" = { fg = "background", bg = "blue", modifiers = ["dim"] }
"ui.cursor.match" = { fg = "green", modifiers = ["underlined"] } "ui.cursor.match" = { fg = "green", modifiers = ["underlined"] }
"ui.cursor.primary" = { fg = "background", bg = "cyan", modifiers = ["dim"] } "ui.cursor.primary" = { fg = "background", bg = "cyan", modifiers = ["dim"] }
"ui.cursorline" = { bg = "background_dark" }
"ui.help" = { fg = "foreground", bg = "background_dark" } "ui.help" = { fg = "foreground", bg = "background_dark" }
"ui.linenr" = { fg = "comment" } "ui.linenr" = { fg = "comment" }
"ui.linenr.selected" = { fg = "foreground" } "ui.linenr.selected" = { fg = "foreground" }
@ -63,7 +64,6 @@
"ui.popup" = { fg = "foreground", bg = "background_dark" } "ui.popup" = { fg = "foreground", bg = "background_dark" }
"ui.selection" = { bg = "secondary_highlight" } "ui.selection" = { bg = "secondary_highlight" }
"ui.selection.primary" = { bg = "primary_highlight" } "ui.selection.primary" = { bg = "primary_highlight" }
"ui.cursorline" = { bg = "background_dark" }
"ui.statusline" = { fg = "foreground", bg = "background_dark" } "ui.statusline" = { fg = "foreground", bg = "background_dark" }
"ui.statusline.inactive" = { fg = "comment", bg = "background_dark" } "ui.statusline.inactive" = { fg = "comment", bg = "background_dark" }
"ui.statusline.insert" = { fg = "olive", bg = "background_dark" } "ui.statusline.insert" = { fg = "olive", bg = "background_dark" }
@ -71,49 +71,54 @@
"ui.statusline.select" = { fg = "carnation", bg = "background_dark" } "ui.statusline.select" = { fg = "carnation", bg = "background_dark" }
"ui.text" = { fg = "foreground" } "ui.text" = { fg = "foreground" }
"ui.text.focus" = { fg = "cyan" } "ui.text.focus" = { fg = "cyan" }
"ui.window" = { fg = "foreground" }
"ui.virtual.whitespace" = { fg = "comment" }
"ui.virtual.indent-guide" = { fg = "opal" } "ui.virtual.indent-guide" = { fg = "opal" }
"ui.virtual.ruler" = { bg = "background_dark" } "ui.virtual.ruler" = { bg = "background_dark" }
"ui.virtual.whitespace" = { fg = "comment" }
"ui.window" = { fg = "foreground" }
"error" = { fg = "red" } "error" = { fg = "red" }
"warning" = { fg = "cyan" } "warning" = { fg = "cyan" }
"diagnostic.unnecessary" = { modifiers = ["dim"] } "diagnostic" = { underline = { style = "line", color = "coral" }, bg = "cyan" }
"diagnostic.deprecated" = { modifiers = ["crossed_out"] } "diagnostic.deprecated" = { modifiers = ["crossed_out"] }
"diagnostic.error" = { underline = { style = "curl", color = "red" } }
"diagnostic.hint" = { underline = { style = "line", color = "cyan" } }
"diagnostic.info" = { underline = { style = "line" } }
"diagnostic.unnecessary" = { modifiers = ["dim"] }
"diagnostic.warning" = { underline = { style = "curl", color = "yellow" } }
"markup.heading" = { fg = "purple", modifiers = ["bold"] }
"markup.link.label" = { fg = "blue", modifiers = ["italic"] }
"markup.list" = "cyan"
"markup.bold" = { fg = "blue", modifiers = ["bold"] } "markup.bold" = { fg = "blue", modifiers = ["bold"] }
"markup.heading" = { fg = "purple", modifiers = ["bold"] }
"markup.italic" = { fg = "yellow", modifiers = ["italic"] } "markup.italic" = { fg = "yellow", modifiers = ["italic"] }
"markup.strikethrough" = { modifiers = ["crossed_out"] } "markup.link.label" = { fg = "blue", modifiers = ["italic"] }
"markup.link.url" = "cyan"
"markup.link.text" = "magenta" "markup.link.text" = "magenta"
"markup.link.url" = "cyan"
"markup.list" = "cyan"
"markup.quote" = { fg = "yellow", modifiers = ["italic"] } "markup.quote" = { fg = "yellow", modifiers = ["italic"] }
"markup.raw" = { fg = "foreground" } "markup.raw" = { fg = "foreground" }
"markup.strikethrough" = { modifiers = ["crossed_out"] }
[palette] [palette]
background = "#282a36" background = "#282a36"
background_dark = "#21222c" background_dark = "#21222c"
comment = "#a39e9b"
foreground = "#eff0eb"
primary_highlight = "#800049" primary_highlight = "#800049"
secondary_highlight = "#4d4f66" secondary_highlight = "#4d4f66"
foreground = "#eff0eb"
comment = "#a39e9b"
# main colors # main colors
red = "#ff5c57"
blue = "#57c7ff" blue = "#57c7ff"
yellow = "#f3f99d"
green = "#5af78e"
purple = "#bd93f9"
cyan = "#9aedfe" cyan = "#9aedfe"
green = "#5af78e"
magenta = "#ff6ac1" magenta = "#ff6ac1"
purple = "#bd93f9"
red = "#ff5c57"
yellow = "#f3f99d"
# aux colors # aux colors
lilac = "#c9c5fb"
coral = "#f97c7c"
sand = "#ffab6f"
carnation = "#f99fc6" carnation = "#f99fc6"
coral = "#f97c7c"
lilac = "#c9c5fb"
olive = "#b6d37c" olive = "#b6d37c"
opal = "#b1d7c7" opal = "#b1d7c7"
sand = "#ffab6f"

@ -61,7 +61,8 @@
"ui.cursor" = { fg = "white", modifiers = ["reversed"] } "ui.cursor" = { fg = "white", modifiers = ["reversed"] }
"ui.cursor.primary" = { fg = "white", modifiers = ["reversed"] } "ui.cursor.primary" = { fg = "white", modifiers = ["reversed"] }
"ui.cursor.match" = { fg = "blue", modifiers = ["underlined"] } "ui.cursor.match" = { fg = "blue", modifiers = ["underlined"] }
"ui.cursor.insert" = { fg = "dark-blue" } # Malformed ANSI: dark-blue. See 'https://github.com/helix-editor/helix/issues/5709'
# "ui.cursor.insert" = { fg = "dark-blue" }
"ui.selection" = { bg = "faint-gray" } "ui.selection" = { bg = "faint-gray" }
"ui.selection.primary" = { bg = "#293b5bff" } "ui.selection.primary" = { bg = "#293b5bff" }

@ -1462,7 +1462,7 @@ letters! that is not good grammar. you can fix this.
Still from hello2, press Ctrl-w H to swap with the split on the Still from hello2, press Ctrl-w H to swap with the split on the
left: now hello2 is on the left and the tutor is on the top left: now hello2 is on the left and the tutor is on the top
right. After Ctrl-w you can use HJKL to split with the buffer right. After Ctrl-w you can use HJKL to swap with the buffer
on the left / below / above / on the right. on the left / below / above / on the right.
Move back to the tutor split, and press Ctrl-w o to only keep Move back to the tutor split, and press Ctrl-w o to only keep

@ -2,6 +2,7 @@ mod docgen;
mod helpers; mod helpers;
mod path; mod path;
mod querycheck; mod querycheck;
mod theme_check;
use std::{env, error::Error}; use std::{env, error::Error};
@ -13,6 +14,7 @@ pub mod tasks {
LANG_SUPPORT_MD_OUTPUT, STATIC_COMMANDS_MD_OUTPUT, TYPABLE_COMMANDS_MD_OUTPUT, LANG_SUPPORT_MD_OUTPUT, STATIC_COMMANDS_MD_OUTPUT, TYPABLE_COMMANDS_MD_OUTPUT,
}; };
use crate::querycheck::query_check; use crate::querycheck::query_check;
use crate::theme_check::theme_check;
use crate::DynError; use crate::DynError;
pub fn docgen() -> Result<(), DynError> { pub fn docgen() -> Result<(), DynError> {
@ -26,6 +28,10 @@ pub mod tasks {
query_check() query_check()
} }
pub fn themecheck() -> Result<(), DynError> {
theme_check()
}
pub fn print_help() { pub fn print_help() {
println!( println!(
" "
@ -46,6 +52,7 @@ fn main() -> Result<(), DynError> {
Some(t) => match t.as_str() { Some(t) => match t.as_str() {
"docgen" => tasks::docgen()?, "docgen" => tasks::docgen()?,
"query-check" => tasks::querycheck()?, "query-check" => tasks::querycheck()?,
"theme-check" => tasks::themecheck()?,
invalid => return Err(format!("Invalid task name: {}", invalid).into()), invalid => return Err(format!("Invalid task name: {}", invalid).into()),
}, },
}; };

@ -11,8 +11,16 @@ pub fn book_gen() -> PathBuf {
project_root().join("book/src/generated/") project_root().join("book/src/generated/")
} }
pub fn runtime() -> PathBuf {
project_root().join("runtime")
}
pub fn ts_queries() -> PathBuf { pub fn ts_queries() -> PathBuf {
project_root().join("runtime/queries") runtime().join("queries")
}
pub fn themes() -> PathBuf {
runtime().join("themes")
} }
pub fn lang_config() -> PathBuf { pub fn lang_config() -> PathBuf {

@ -0,0 +1,33 @@
use helix_view::theme::Loader;
use crate::{path, DynError};
pub fn theme_check() -> Result<(), DynError> {
let theme_names = [
vec!["default".to_string(), "base16_default".to_string()],
Loader::read_names(&path::themes()),
]
.concat();
let loader = Loader::new(&[path::runtime()]);
let mut errors_present = false;
for name in theme_names {
let (_, warnings) = loader.load_with_warnings(&name).unwrap();
if !warnings.is_empty() {
errors_present = true;
println!("Theme '{name}' loaded with errors:");
for warning in warnings {
println!("\t* {}", warning);
}
}
}
match errors_present {
true => Err("Errors found when loading bundled themes".into()),
false => {
println!("Theme check successful!");
Ok(())
}
}
}
Loading…
Cancel
Save