Merge pull request 'master' (#5) from Mirrors/helix:master into master

Reviewed-on: #5
master
Trivernis 2 years ago
commit 6ae0186ef1

@ -1,3 +1,3 @@
[alias]
xtask = "run --package xtask --"
integration-test = "test --features integration --workspace --test integration"
integration-test = "test --features integration --profile integration --workspace --test integration"

@ -32,7 +32,7 @@ body:
id: helix-log
attributes:
label: Helix log
description: See `hx -h` for log file path
description: See `hx -h` for log file path. If you can reproduce the issue run `RUST_BACKTRACE=1 hx -vv` to generate a more detailed log file.
value: |
<details><summary>~/.cache/helix/helix.log</summary>
@ -61,7 +61,8 @@ body:
label: Helix Version
description: >
Helix version (`hx -V` if using a release, `git describe` if building
from master)
placeholder: "helix 0.6.0 (c0dbd6dc)"
from master).
**Make sure that you are using the [latest helix release](https://github.com/helix-editor/helix/releases) or a newer master build**
placeholder: "helix 22.12 (5eaa6d97)"
validations:
required: true

@ -4,24 +4,17 @@ on:
push:
branches:
- master
merge_group:
schedule:
- cron: '00 01 * * *'
jobs:
check:
name: Check
name: Check (msrv)
runs-on: ubuntu-latest
strategy:
matrix:
rust: [stable, msrv]
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Use MSRV rust toolchain
if: matrix.rust == 'msrv'
run: cp .github/workflows/msrv-rust-toolchain.toml rust-toolchain.toml
- name: Install stable toolchain
uses: helix-editor/rust-toolchain@v1
with:
@ -44,7 +37,7 @@ jobs:
uses: actions/checkout@v3
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@1.61
uses: dtolnay/rust-toolchain@1.65
- uses: Swatinem/rust-cache@v2
@ -73,7 +66,7 @@ jobs:
uses: actions/checkout@v3
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@1.61
uses: dtolnay/rust-toolchain@1.65
with:
components: rustfmt, clippy
@ -98,7 +91,7 @@ jobs:
uses: actions/checkout@v3
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@1.61
uses: dtolnay/rust-toolchain@1.65
- uses: Swatinem/rust-cache@v2

@ -14,7 +14,7 @@ jobs:
uses: actions/checkout@v3
- name: Install nix
uses: cachix/install-nix-action@v18
uses: cachix/install-nix-action@v20
- name: Authenticate with Cachix
uses: cachix/cachix-action@v12

@ -1,3 +0,0 @@
[toolchain]
channel = "1.61.0"
components = ["rustfmt", "rust-src"]

1114
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -8,6 +8,7 @@ members = [
"helix-dap",
"helix-loader",
"helix-vcs",
"helix-parsec",
"xtask",
]
@ -25,3 +26,12 @@ lto = "fat"
codegen-units = 1
# strip = "debuginfo" # TODO: or strip = true
opt-level = 3
[profile.integration]
inherits = "test"
package.helix-core.opt-level = 2
package.helix-tui.opt-level = 2
package.helix-term.opt-level = 2
[patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14" }

@ -45,91 +45,10 @@ Note: Only certain languages have indentation definitions at the moment. Check
# Installation
Packages are available for various distributions (see [Installation docs](https://docs.helix-editor.com/install.html)).
If you would like to build from source:
```shell
git clone https://github.com/helix-editor/helix
cd helix
cargo install --locked --path helix-term
```
This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars in `./runtime/grammars`.
Helix needs its runtime files so make sure to copy/symlink the `runtime/` directory into the
config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%AppData%/helix/runtime` on Windows).
| OS | Command |
| -------------------- | ------------------------------------------------ |
| Windows (Cmd) | `xcopy /e /i runtime %AppData%\helix\runtime` |
| Windows (PowerShell) | `xcopy /e /i runtime $Env:AppData\helix\runtime` |
| Linux / macOS | `ln -s $PWD/runtime ~/.config/helix/runtime` |
Starting with Windows Vista you can also create symbolic links on Windows. Note that this requires
elevated privileges - i.e. PowerShell or Cmd must be run as administrator.
**PowerShell:**
```powershell
New-Item -ItemType SymbolicLink -Target "runtime" -Path "$Env:AppData\helix\runtime"
```
**Cmd:**
```cmd
cd %appdata%\helix
mklink /D runtime "<helix-repo>\runtime"
```
The runtime location can be overridden via the `HELIX_RUNTIME` environment variable.
> NOTE: if `HELIX_RUNTIME` is set prior to calling `cargo install --locked --path helix-term`,
> tree-sitter grammars will be built in `$HELIX_RUNTIME/grammars`.
If you plan on keeping the repo locally, an alternative to copying/symlinking
runtime files is to set `HELIX_RUNTIME=/path/to/helix/runtime`
(`HELIX_RUNTIME=$PWD/runtime` if you're in the helix repo directory).
Packages already solve this for you by wrapping the `hx` binary with a wrapper
that sets the variable to the install dir.
> NOTE: running via cargo also doesn't require setting explicit `HELIX_RUNTIME` path, it will automatically
> detect the `runtime` directory in the project root.
If you want to customize your `languages.toml` config,
tree-sitter grammars may be manually fetched and built with `hx --grammar fetch` and `hx --grammar build`.
In order to use LSP features like auto-complete, you will need to
[install the appropriate Language Server](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers)
for a language.
[Installation documentation](https://docs.helix-editor.com/install.html).
[![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions)
## Adding Helix to your desktop environment
If installing from source, to use Helix in desktop environments that supports [XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html), including Gnome and KDE, copy the provided `.desktop` file to the correct folder:
```bash
cp contrib/Helix.desktop ~/.local/share/applications
cp contrib/helix.png ~/.local/share/icons
```
To use another terminal than the default, you will need to modify the `.desktop` file. For example, to use `kitty`:
```bash
sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop
sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop
```
## macOS
Helix can be installed on macOS through homebrew:
```
brew install helix
```
# Contributing
Contributing guidelines can be found [here](./docs/CONTRIBUTING.md).

@ -6,13 +6,13 @@
- [Usage](./usage.md)
- [Keymap](./keymap.md)
- [Commands](./commands.md)
- [Language Support](./lang-support.md)
- [Language support](./lang-support.md)
- [Migrating from Vim](./from-vim.md)
- [Configuration](./configuration.md)
- [Themes](./themes.md)
- [Key Remapping](./remapping.md)
- [Key remapping](./remapping.md)
- [Languages](./languages.md)
- [Guides](./guides/README.md)
- [Adding Languages](./guides/adding_languages.md)
- [Adding Textobject Queries](./guides/textobject.md)
- [Adding Indent Queries](./guides/indent.md)
- [Adding languages](./guides/adding_languages.md)
- [Adding textobject queries](./guides/textobject.md)
- [Adding indent queries](./guides/indent.md)

@ -1,5 +1,5 @@
# Commands
Command mode can be activated by pressing `:`, similar to Vim. Built-in commands:
Command mode can be activated by pressing `:`. The built-in commands are:
{{#include ./generated/typable-cmd.md}}

@ -2,10 +2,10 @@
To override global configuration parameters, create a `config.toml` file located in your config directory:
* Linux and Mac: `~/.config/helix/config.toml`
* Windows: `%AppData%\helix\config.toml`
- Linux and Mac: `~/.config/helix/config.toml`
- Windows: `%AppData%\helix\config.toml`
> Hint: You can easily open the config file by typing `:config-open` within Helix normal mode.
> 💡 You can easily open the config file by typing `:config-open` within Helix normal mode.
Example config:
@ -25,12 +25,10 @@ select = "underline"
hidden = false
```
You may also specify a file to use for configuration with the `-c` or
`--config` CLI argument: `hx -c path/to/custom-config.toml`.
It is also possible to trigger configuration file reloading by sending the `USR1`
signal to the helix process, e.g. via `pkill -USR1 hx`. This is only supported
on unix operating systems.
You can use a custom configuration file by specifying it with the `-c` or
`--config` command line argument, for example `hx -c path/to/custom-config.toml`.
Additionally, you can reload the configuration file by sending the USR1
signal to the Helix process on Unix operating systems, such as by using the command `pkill -USR1 hx`.
## Editor
@ -38,25 +36,28 @@ on unix operating systems.
| Key | Description | Default |
|--|--|---------|
| `scrolloff` | Number of lines of padding around the edge of the screen when scrolling. | `5` |
| `mouse` | Enable mouse mode. | `true` |
| `middle-click-paste` | Middle click paste support. | `true` |
| `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` |
| `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` |
| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` |
| `cursorline` | Highlight all lines with a cursor. | `false` |
| `cursorcolumn` | Highlight all columns with a cursor. | `false` |
| `scrolloff` | Number of lines of padding around the edge of the screen when scrolling | `5` |
| `mouse` | Enable mouse mode | `true` |
| `middle-click-paste` | Middle click paste support | `true` |
| `scroll-lines` | Number of lines to scroll per scroll wheel step | `3` |
| `shell` | Shell to use when running external commands | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` |
| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers | `absolute` |
| `cursorline` | Highlight all lines with a cursor | `false` |
| `cursorcolumn` | Highlight all columns with a cursor | `false` |
| `gutters` | Gutters to display: Available are `diagnostics` and `diff` and `line-numbers` and `spacer`, note that `diagnostics` also includes other features like breakpoints, 1-width padding will be inserted if gutters is non-empty | `["diagnostics", "spacer", "line-numbers", "spacer", "diff"]` |
| `auto-completion` | Enable automatic pop up of auto-completion. | `true` |
| `auto-format` | Enable automatic formatting on save. | `true` |
| `auto-save` | Enable automatic saving on focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal. | `false` |
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` |
| `auto-completion` | Enable automatic pop up of auto-completion | `true` |
| `auto-format` | Enable automatic formatting on save | `true` |
| `auto-save` | Enable automatic saving on the focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal | `false` |
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant | `400` |
| `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` |
| `auto-info` | Whether to display infoboxes | `true` |
| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` |
| `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file. | `[]` |
| `completion-replace` | Set to `true` to make completions always replace the entire word and not just the part before the cursor | `false` |
| `auto-info` | Whether to display info boxes | `true` |
| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative | `false` |
| `undercurl` | Set to `true` to override automatic detection of terminal undercurl support in the event of a false negative | `false` |
| `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file | `[]` |
| `bufferline` | Renders a line at the top of the editor displaying open buffers. Can be `always`, `never` or `multiple` (only shown if more than one buffer is in use) | `never` |
| `color-modes` | Whether to color the mode indicator with different colors depending on the mode itself | `false` |
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap_at_text_width` is set | `80` |
### `[editor.statusline]` Section
@ -98,6 +99,7 @@ The following statusline elements can be configured:
| `spinner` | A progress spinner indicating LSP activity |
| `file-name` | The path/name of the opened file |
| `file-base-name` | The basename of the opened file |
| `file-modification-indicator` | The indicator to show whether the file is modified (a `[+]` appears when there are unsaved changes) |
| `file-encoding` | The encoding of the opened file if it differs from UTF-8 |
| `file-line-ending` | The file line endings (CRLF or LF) |
| `total-line-numbers` | The total line numbers of the opened file |
@ -110,23 +112,30 @@ The following statusline elements can be configured:
| `position-percentage` | The cursor position as a percentage of the total number of lines |
| `separator` | The string defined in `editor.statusline.separator` (defaults to `"│"`) |
| `spacer` | Inserts a space between elements (multiple/contiguous spacers may be specified) |
| `version-control` | The current branch name or detached commit hash of the opened workspace |
### `[editor.lsp]` Section
| Key | Description | Default |
| --- | ----------- | ------- |
| `enable` | Enables LSP integration. Setting to false will completely disable language servers regardless of language settings.| `true` |
| `display-messages` | Display LSP progress messages below statusline[^1] | `false` |
| `auto-signature-help` | Enable automatic popup of signature help (parameter hints) | `true` |
| `display-inlay-hints` | Display inlay hints[^2] | `false` |
| `display-signature-help-docs` | Display docs under signature help popup | `true` |
[^1]: By default, a progress spinner is shown in the statusline beside the file path.
[^2]: You may also have to activate them in the LSP config for them to appear, not just in Helix.
Inlay hints in Helix are still being improved on and may be a little bit laggy/janky under some circumstances, please report any bugs you see so we can fix them!
### `[editor.cursor-shape]` Section
Defines the shape of cursor in each mode. Note that due to limitations
of the terminal environment, only the primary cursor can change shape.
Defines the shape of cursor in each mode.
Valid values for these options are `block`, `bar`, `underline`, or `hidden`.
> 💡 Due to limitations of the terminal environment, only the primary cursor can
> change shape.
| Key | Description | Default |
| --- | ----------- | ------- |
| `normal` | Cursor shape in [normal mode][normal mode] | `block` |
@ -139,23 +148,22 @@ Valid values for these options are `block`, `bar`, `underline`, or `hidden`.
### `[editor.file-picker]` Section
Sets options for file picker and global search. All but the last key listed in
the default file-picker configuration below are IgnoreOptions: whether hidden
files and files listed within ignore files are ignored by (not visible in) the
helix file picker and global search. There is also one other key, `max-depth`
available, which is not defined by default.
Set options for file picker and global search. Ignoring a file means it is
not visible in the Helix file picker and global search.
All git related options are only enabled in a git repository.
| Key | Description | Default |
|--|--|---------|
|`hidden` | Enables ignoring hidden files. | true
|`parents` | Enables reading ignore files from parent directories. | true
|`ignore` | Enables reading `.ignore` files. | true
|`git-ignore` | Enables reading `.gitignore` files. | true
|`git-global` | Enables reading global .gitignore, whose path is specified in git's config: `core.excludefile` option. | true
|`git-exclude` | Enables reading `.git/info/exclude` files. | true
|`max-depth` | Set with an integer value for maximum depth to recurse. | Defaults to `None`.
|`hidden` | Enables ignoring hidden files | true
|`follow-links` | Follow symlinks instead of ignoring them | true
|`deduplicate-links` | Ignore symlinks that point at files already shown in the picker | true
|`parents` | Enables reading ignore files from parent directories | true
|`ignore` | Enables reading `.ignore` files | true
|`git-ignore` | Enables reading `.gitignore` files | true
|`git-global` | Enables reading global `.gitignore`, whose path is specified in git's config: `core.excludefile` option | true
|`git-exclude` | Enables reading `.git/info/exclude` files | true
|`max-depth` | Set with an integer value for maximum depth to recurse | Defaults to `None`.
### `[editor.auto-pairs]` Section
@ -207,7 +215,7 @@ Search specific options.
| Key | Description | Default |
|--|--|---------|
| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` |
| `smart-case` | Enable smart case regex searching (case-insensitive unless pattern contains upper case characters) | `true` |
| `wrap-around`| Whether the search should wrap after depleting the matches | `true` |
### `[editor.whitespace]` Section
@ -216,7 +224,7 @@ Options for rendering whitespace with visible characters. Use `:set whitespace.r
| Key | Description | Default |
|-----|-------------|---------|
| `render` | Whether to render whitespace. May either be `"all"` or `"none"`, or a table with sub-keys `space`, `tab`, and `newline`. | `"none"` |
| `render` | Whether to render whitespace. May either be `"all"` or `"none"`, or a table with sub-keys `space`, `nbsp`, `tab`, and `newline` | `"none"` |
| `characters` | Literal characters to use when rendering whitespace. Sub-keys may be any of `tab`, `space`, `nbsp`, `newline` or `tabpad` | See example below |
Example
@ -244,7 +252,7 @@ Options for rendering vertical indent guides.
| Key | Description | Default |
| --- | --- | --- |
| `render` | Whether to render indent guides. | `false` |
| `render` | Whether to render indent guides | `false` |
| `character` | Literal character to use for rendering the indent guide | `│` |
| `skip-levels` | Number of indent levels to skip | `0` |
@ -256,3 +264,77 @@ render = true
character = "╎" # Some characters that work well: "▏", "┆", "┊", "⸽"
skip-levels = 1
```
### `[editor.gutters]` Section
For simplicity, `editor.gutters` accepts an array of gutter types, which will
use default settings for all gutter components.
```toml
[editor]
gutters = ["diff", "diagnostics", "line-numbers", "spacer"]
```
To customize the behavior of gutters, the `[editor.gutters]` section must
be used. This section contains top level settings, as well as settings for
specific gutter components as subsections.
| Key | Description | Default |
| --- | --- | --- |
| `layout` | A vector of gutters to display | `["diagnostics", "spacer", "line-numbers", "spacer", "diff"]` |
Example:
```toml
[editor.gutters]
layout = ["diff", "diagnostics", "line-numbers", "spacer"]
```
#### `[editor.gutters.line-numbers]` Section
Options for the line number gutter
| Key | Description | Default |
| --- | --- | --- |
| `min-width` | The minimum number of characters to use | `3` |
Example:
```toml
[editor.gutters.line-numbers]
min-width = 1
```
#### `[editor.gutters.diagnotics]` Section
Currently unused
#### `[editor.gutters.diff]` Section
Currently unused
#### `[editor.gutters.spacer]` Section
Currently unused
### `[editor.soft-wrap]` Section
Options for soft wrapping lines that exceed the view width:
| Key | Description | Default |
| --- | --- | --- |
| `enable` | Whether soft wrapping is enabled. | `false` |
| `max-wrap` | Maximum free space left at the end of the line. | `20` |
| `max-indent-retain` | Maximum indentation to carry over when soft wrapping a line. | `40` |
| `wrap-indicator` | Text inserted before soft wrapped lines, highlighted with `ui.virtual.wrap` | `↪ ` |
| `wrap-at-text-width` | Soft wrap at `text-width` instead of using the full viewport size. | `false` |
Example:
```toml
[editor.soft-wrap]
enable = true
max-wrap = 25 # increase value to reduce forced mid-word wrapping
max-indent-retain = 0
wrap-indicator = "" # set wrap-indicator to "" to hide it
```

@ -10,6 +10,7 @@
| c | ✓ | ✓ | ✓ | `clangd` |
| c-sharp | ✓ | ✓ | | `OmniSharp` |
| cairo | ✓ | | | |
| capnp | ✓ | | ✓ | |
| clojure | ✓ | | | `clojure-lsp` |
| cmake | ✓ | ✓ | ✓ | `cmake-language-server` |
| comment | ✓ | | | |
@ -30,7 +31,7 @@
| eex | ✓ | | | |
| ejs | ✓ | | | |
| elixir | ✓ | ✓ | ✓ | `elixir-ls` |
| elm | ✓ | | | `elm-language-server` |
| elm | ✓ | | | `elm-language-server` |
| elvish | ✓ | | | `elvish` |
| env | ✓ | | | |
| erb | ✓ | | | |
@ -38,7 +39,7 @@
| esdl | ✓ | | | |
| fish | ✓ | ✓ | ✓ | |
| fortran | ✓ | | ✓ | `fortls` |
| gdscript | ✓ | ✓ | | |
| gdscript | ✓ | ✓ | | |
| git-attributes | ✓ | | | |
| git-commit | ✓ | | | |
| git-config | ✓ | | | |
@ -56,6 +57,7 @@
| haskell | ✓ | ✓ | | `haskell-language-server-wrapper` |
| hcl | ✓ | | ✓ | `terraform-ls` |
| heex | ✓ | ✓ | | `elixir-ls` |
| hosts | ✓ | | | |
| html | ✓ | | | `vscode-html-language-server` |
| idris | | | | `idris2-lsp` |
| iex | ✓ | | | |
@ -83,6 +85,8 @@
| mermaid | ✓ | | | |
| meson | ✓ | | ✓ | |
| mint | | | | `mint` |
| msbuild | ✓ | | ✓ | |
| nasm | ✓ | ✓ | | |
| nickel | ✓ | | ✓ | `nls` |
| nix | ✓ | | | `nil` |
| nu | ✓ | | | |
@ -92,12 +96,16 @@
| openscad | ✓ | | | `openscad-lsp` |
| org | ✓ | | | |
| pascal | ✓ | ✓ | | `pasls` |
| passwd | ✓ | | | |
| pem | ✓ | | | |
| perl | ✓ | ✓ | ✓ | |
| php | ✓ | ✓ | ✓ | `intelephense` |
| po | ✓ | ✓ | | |
| ponylang | ✓ | ✓ | ✓ | |
| prisma | ✓ | | | `prisma-language-server` |
| prolog | | | | `swipl` |
| protobuf | ✓ | | ✓ | |
| prql | ✓ | | | |
| purescript | ✓ | | | `purescript-language-server` |
| python | ✓ | ✓ | ✓ | `pylsp` |
| qml | ✓ | | ✓ | `qmlls` |
@ -107,18 +115,22 @@
| rescript | ✓ | ✓ | | `rescript-language-server` |
| rmarkdown | ✓ | | ✓ | `R` |
| ron | ✓ | | ✓ | |
| rst | ✓ | | | |
| ruby | ✓ | ✓ | ✓ | `solargraph` |
| rust | ✓ | ✓ | ✓ | `rust-analyzer` |
| sage | ✓ | ✓ | | |
| scala | ✓ | | ✓ | `metals` |
| scheme | ✓ | | | |
| scss | ✓ | | | `vscode-css-language-server` |
| slint | ✓ | | ✓ | `slint-lsp` |
| smithy | ✓ | | | `cs` |
| sml | ✓ | | | |
| solidity | ✓ | | | `solc` |
| sql | ✓ | | | |
| sshclientconfig | ✓ | | | |
| starlark | ✓ | ✓ | | |
| svelte | ✓ | | | `svelteserver` |
| sway | ✓ | ✓ | ✓ | `forc` |
| swift | ✓ | | | `sourcekit-lsp` |
| tablegen | ✓ | ✓ | ✓ | |
| task | ✓ | | | |
@ -129,7 +141,8 @@
| twig | ✓ | | | |
| typescript | ✓ | ✓ | ✓ | `typescript-language-server` |
| ungrammar | ✓ | | | |
| v | ✓ | | | `vls` |
| uxntal | ✓ | | | |
| v | ✓ | ✓ | ✓ | `v` |
| vala | ✓ | | | `vala-language-server` |
| verilog | ✓ | ✓ | | `svlangserver` |
| vhs | ✓ | | | |
@ -141,4 +154,5 @@
| xit | ✓ | | | |
| xml | ✓ | | ✓ | |
| yaml | ✓ | | ✓ | `yaml-language-server` |
| yuck | ✓ | | | |
| zig | ✓ | ✓ | ✓ | `zls` |

@ -43,11 +43,13 @@
| `:change-current-directory`, `:cd` | Change the current working directory. |
| `:show-directory`, `:pwd` | Show the current working directory. |
| `:encoding` | Set encoding. Based on `https://encoding.spec.whatwg.org`. |
| `:character-info`, `:char` | Get info about the character under the primary cursor. |
| `:reload` | Discard changes and reload from the source file. |
| `:reload-all` | Discard changes and reload all documents from the source files. |
| `:update` | Write changes only if the file has been modified. |
| `:lsp-workspace-command` | Open workspace command picker |
| `:lsp-restart` | Restarts the Language Server that is in use by the current doc |
| `:lsp-stop` | Stops the Language Server that is in use by the current doc |
| `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. |
| `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. |
| `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. |
@ -58,8 +60,9 @@
| `:hsplit-new`, `:hnew` | Open a scratch buffer in a horizontal split. |
| `:tutor` | Open the tutorial. |
| `:goto`, `:g` | Goto line number. |
| `:set-language`, `:lang` | Set the language of current buffer. |
| `:set-language`, `:lang` | Set the language of current buffer (show current language if no value specified). |
| `:set-option`, `:set` | Set a config option at runtime.<br>For example to disable smart case search, use `:set search.smart-case false`. |
| `:toggle-option`, `:toggle` | Toggle a boolean config option at runtime.<br>For example to toggle smart case search, use `:toggle search.smart-case`. |
| `:get-option`, `:get` | Get the current value of a config option. |
| `:sort` | Sort ranges in selection. |
| `:rsort` | Sort ranges in selection in reverse order. |
@ -73,3 +76,4 @@
| `:pipe` | Pipe each selection to the shell command. |
| `:pipe-to` | Pipe each selection to the shell command, ignoring output. |
| `:run-shell-command`, `:sh` | Run a shell command |
| `:reset-diff-change`, `:diffget`, `:diffg` | Reset the diff change at the cursor position. |

@ -1,4 +1,4 @@
# Guides
This section contains guides for adding new language server configurations,
tree-sitter grammars, textobject queries, etc.
tree-sitter grammars, textobject queries, and other similar items.

@ -1,45 +1,52 @@
# Adding languages
# Adding new languages to Helix
In order to add a new language to Helix, you will need to follow the steps
below.
## Language configuration
To add a new language, you need to add a `[[language]]` entry to the
`languages.toml` (see the [language configuration section]).
1. Add a new `[[language]]` entry in the `languages.toml` file and provide the
necessary configuration for the new language. For more information on
language configuration, refer to the
[language configuration section](../languages.md) of the documentation.
2. If you are adding a new language or updating an existing language server
configuration, run the command `cargo xtask docgen` to update the
[Language Support](../lang-support.md) documentation.
When adding a new language or Language Server configuration for an existing
language, run `cargo xtask docgen` to add the new configuration to the
[Language Support][lang-support] docs before creating a pull request.
When adding a Language Server configuration, be sure to update the
[Language Server Wiki][install-lsp-wiki] with installation notes.
> 💡 If you are adding a new Language Server configuration, make sure to update
> the
> [Language Server Wiki](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers)
> with the installation instructions.
## Grammar configuration
If a tree-sitter grammar is available for the language, add a new `[[grammar]]`
entry to `languages.toml`.
You may use the `source.path` key rather than `source.git` with an absolute path
to a locally available grammar for testing, but switch to `source.git` before
submitting a pull request.
1. If a tree-sitter grammar is available for the new language, add a new
`[[grammar]]` entry to the `languages.toml` file.
2. If you are testing the grammar locally, you can use the `source.path` key
with an absolute path to the grammar. However, before submitting a pull
request, make sure to switch to using `source.git`.
## Queries
For a language to have syntax-highlighting and indentation among
other things, you have to add queries. Add a directory for your
language with the path `runtime/queries/<name>/`. The tree-sitter
[website](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#queries)
gives more info on how to write queries.
> NOTE: When evaluating queries, the first matching query takes
precedence, which is different from other editors like Neovim where
the last matching query supersedes the ones before it. See
[this issue][neovim-query-precedence] for an example.
## Common Issues
- If you get errors when running after switching branches, you may have to update the tree-sitter grammars. Run `hx --grammar fetch` to fetch the grammars and `hx --grammar build` to build any out-of-date grammars.
- If a parser is segfaulting or you want to remove the parser, make sure to remove the compiled parser in `runtime/grammar/<name>.so`
[language configuration section]: ../languages.md
[neovim-query-precedence]: https://github.com/helix-editor/helix/pull/1170#issuecomment-997294090
[install-lsp-wiki]: https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers
[lang-support]: ../lang-support.md
1. In order to provide syntax highlighting and indentation for the new language,
you will need to add queries.
2. Create a new directory for the language with the path
`runtime/queries/<name>/`.
3. Refer to the
[tree-sitter website](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#queries)
for more information on writing queries.
> 💡 In Helix, the first matching query takes precedence when evaluating
> queries, which is different from other editors such as Neovim where the last
> matching query supersedes the ones before it. See
> [this issue](https://github.com/helix-editor/helix/pull/1170#issuecomment-997294090)
> for an example.
## Common issues
- If you encounter errors when running Helix after switching branches, you may
need to update the tree-sitter grammars. Run the command `hx --grammar fetch`
to fetch the grammars and `hx --grammar build` to build any out-of-date
grammars.
- If a parser is causing a segfault, or you want to remove it, make sure to
remove the compiled parser located at `runtime/grammars/<name>.so`.

@ -1,4 +1,4 @@
# Adding Indent Queries
# Adding indent queries
Helix uses tree-sitter to correctly indent new lines. This requires
a tree-sitter grammar and an `indent.scm` query file placed in
@ -36,7 +36,7 @@ changed by using a `#set!` declaration anywhere in the pattern:
(#set! "scope" "all"))
```
## Capture Types
## Capture types
- `@indent` (default scope `tail`):
Increase the indent level by 1. Multiple occurrences in the same line

@ -1,14 +1,14 @@
# Adding Textobject Queries
# Adding textobject queries
Textobjects that are language specific ([like functions, classes, etc][textobjects])
require an accompanying tree-sitter grammar and a `textobjects.scm` query file
Helix supports textobjects that are language specific, such as functions, classes, etc.
These textobjects require an accompanying tree-sitter grammar and a `textobjects.scm` query file
to work properly. Tree-sitter allows us to query the source code syntax tree
and capture specific parts of it. The queries are written in a lisp dialect.
More information on how to write queries can be found in the [official tree-sitter
documentation][tree-sitter-queries].
Query files should be placed in `runtime/queries/{language}/textobjects.scm`
when contributing. Note that to test the query files locally you should put
when contributing to Helix. Note that to test the query files locally you should put
them under your local runtime directory (`~/.config/helix/runtime` on Linux
for example).
@ -28,9 +28,9 @@ The following [captures][tree-sitter-captures] are recognized:
[Example query files][textobject-examples] can be found in the helix GitHub repository.
## Queries for Textobject Based Navigation
## Queries for textobject based navigation
[Tree-sitter based navigation][textobjects-nav] is done using captures in the
Tree-sitter based navigation in Helix is done using captures in the
following order:
- `object.movement`
@ -38,12 +38,10 @@ following order:
- `object.inside`
For example if a `function.around` capture has been already defined for a language
in it's `textobjects.scm` file, function navigation should also work automatically.
in its `textobjects.scm` file, function navigation should also work automatically.
`function.movement` should be defined only if the node captured by `function.around`
doesn't make sense in a navigation context.
[textobjects]: ../usage.md#textobjects
[textobjects-nav]: ../usage.md#tree-sitter-textobject-based-navigation
[tree-sitter-queries]: https://tree-sitter.github.io/tree-sitter/using-parsers#query-syntax
[tree-sitter-captures]: https://tree-sitter.github.io/tree-sitter/using-parsers#capturing-nodes
[textobject-examples]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+filename%3Atextobjects.scm&type=Code&ref=advsearch&l=&l=

@ -1,171 +1,250 @@
# Installation
We provide pre-built binaries on the [GitHub Releases page](https://github.com/helix-editor/helix/releases).
# Installing Helix
<!--toc:start-->
- [Pre-built binaries](#pre-built-binaries)
- [Linux, macOS, Windows and OpenBSD packaging status](#linux-macos-windows-and-openbsd-packaging-status)
- [Linux](#linux)
- [Ubuntu](#ubuntu)
- [Fedora/RHEL](#fedorarhel)
- [Arch Linux community](#arch-linux-community)
- [NixOS](#nixos)
- [AppImage](#appimage)
- [macOS](#macos)
- [Homebrew Core](#homebrew-core)
- [Windows](#windows)
- [Scoop](#scoop)
- [Chocolatey](#chocolatey)
- [MSYS2](#msys2)
- [Building from source](#building-from-source)
- [Configuring Helix's runtime files](#configuring-helixs-runtime-files)
- [Validating the installation](#validating-the-installation)
- [Configure the desktop shortcut](#configure-the-desktop-shortcut)
<!--toc:end-->
To install Helix, follow the instructions specific to your operating system.
Note that:
- To get the latest nightly version of Helix, you need to
[build from source](#building-from-source).
- To take full advantage of Helix, install the language servers for your
preferred programming languages. See the
[wiki](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers)
for instructions.
## Pre-built binaries
Download pre-built binaries from the
[GitHub Releases page](https://github.com/helix-editor/helix/releases). Add the binary to your system's `$PATH` to use it from the command
line.
## Linux, macOS, Windows and OpenBSD packaging status
Helix is available for Linux, macOS and Windows via the official repositories listed below.
[![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions)
## OSX
## Linux
Helix is available in homebrew-core:
The following third party repositories are available:
```
brew install helix
### Ubuntu
Helix is available via [Maveonair's PPA](https://launchpad.net/~maveonair/+archive/ubuntu/helix-editor):
```sh
sudo add-apt-repository ppa:maveonair/helix-editor
sudo apt update
sudo apt install helix
```
## Linux
### Fedora/RHEL
### NixOS
Helix is available via `copr`:
```sh
sudo dnf copr enable varlad/helix
sudo dnf install helix
```
A [flake](https://nixos.wiki/wiki/Flakes) containing the package is available in
the project root. The flake can also be used to spin up a reproducible development
shell for working on Helix with `nix develop`.
### Arch Linux community
Flake outputs are cached for each push to master using
[Cachix](https://www.cachix.org/). The flake is configured to
automatically make use of this cache assuming the user accepts
the new settings on first use.
Releases are available in the `community` repository:
If you are using a version of Nix without flakes enabled you can
[install Cachix cli](https://docs.cachix.org/installation); `cachix use helix` will
configure Nix to use cached outputs when possible.
```sh
sudo pacman -S helix
```
Additionally, a [helix-git](https://aur.archlinux.org/packages/helix-git/) package is available
in the AUR, which builds the master branch.
### Arch Linux
### NixOS
Releases are available in the `community` repository.
Helix is available as a [flake](https://nixos.wiki/wiki/Flakes) in the project
root. Use `nix develop` to spin up a reproducible development shell. Outputs are
cached for each push to master using [Cachix](https://www.cachix.org/). The
flake is configured to automatically make use of this cache assuming the user
accepts the new settings on first use.
A [helix-git](https://aur.archlinux.org/packages/helix-git/) package is also available on the AUR, which builds the master branch.
If you are using a version of Nix without flakes enabled,
[install Cachix CLI](https://docs.cachix.org/installation) and use
`cachix use helix` to configure Nix to use cached outputs when possible.
### Fedora Linux
### AppImage
You can install the COPR package for Helix via
Install Helix using [AppImage](https://appimage.org/).
Download Helix AppImage from the [latest releases](https://github.com/helix-editor/helix/releases/latest) page.
```sh
chmod +x helix-*.AppImage # change permission for executable mode
./helix-*.AppImage # run helix
```
sudo dnf copr enable varlad/helix
sudo dnf install helix
```
## macOS
### Void Linux
### Homebrew Core
```
sudo xbps-install helix
```sh
brew install helix
```
## Windows
Helix can be installed using [Scoop](https://scoop.sh/), [Chocolatey](https://chocolatey.org/)
Install on Windows using [Scoop](https://scoop.sh/), [Chocolatey](https://chocolatey.org/)
or [MSYS2](https://msys2.org/).
**Scoop:**
### Scoop
```
```sh
scoop install helix
```
**Chocolatey:**
### Chocolatey
```
```sh
choco install helix
```
**MSYS2:**
Choose the proper command for your system from below:
### MSYS2
- For 32 bit Windows 7 or above:
For 64-bit Windows 8.1 or above:
```
pacman -S mingw-w64-i686-helix
```sh
pacman -S mingw-w64-ucrt-x86_64-helix
```
- For 64 bit Windows 7 or above:
## Building from source
```
pacman -S mingw-w64-x86_64-helix
```
Clone the repository:
- For 64 bit Windows 8.1 or above:
```
pacman -S mingw-w64-ucrt-x86_64-helix
```sh
git clone https://github.com/helix-editor/helix
cd helix
```
## Build from source
Compile from source:
```
git clone https://github.com/helix-editor/helix
cd helix
```sh
cargo install --path helix-term --locked
```
This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars in `./runtime/grammars`.
This command will create the `hx` executable and construct the tree-sitter
grammars in the local `runtime` folder. To build the tree-sitter grammars requires
a c++ compiler to be installed, for example `gcc-c++`.
> 💡 If you are using the musl-libc instead of glibc the following environment variable must be set during the build
> to ensure tree-sitter grammars can be loaded correctly:
>
> ```sh
> RUSTFLAGS="-C target-feature=-crt-static"
> ```
Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the
config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overridden
via the `HELIX_RUNTIME` environment variable.
> 💡 Tree-sitter grammars can be fetched and compiled if not pre-packaged. Fetch
> grammars with `hx --grammar fetch` (requires `git`) and compile them with
> `hx --grammar build` (requires a C++ compiler). This will install them in
> the `runtime` directory within the user's helix config directory (more
> [details below](#multiple-runtime-directories)).
| OS | Command |
| -------------------- | ------------------------------------------------ |
| Windows (Cmd) | `xcopy /e /i runtime %AppData%\helix\runtime` |
| Windows (PowerShell) | `xcopy /e /i runtime $Env:AppData\helix\runtime` |
| Linux / macOS | `ln -s $PWD/runtime ~/.config/helix/runtime` |
### Configuring Helix's runtime files
Starting with Windows Vista you can also create symbolic links on Windows. Note that this requires
elevated privileges - i.e. PowerShell or Cmd must be run as administrator.
#### Linux and macOS
**PowerShell:**
Either set the `HELIX_RUNTIME` environment variable to point to the runtime files and add it to your `~/.bashrc` or equivalent:
```powershell
New-Item -ItemType SymbolicLink -Target "runtime" -Path "$Env:AppData\helix\runtime"
```sh
HELIX_RUNTIME=/home/user-name/src/helix/runtime
```
**Cmd:**
Or, create a symlink in `~/.config/helix` that links to the source code directory:
```cmd
cd %appdata%\helix
mklink /D runtime "<helix-repo>\runtime"
```sh
ln -s $PWD/runtime ~/.config/helix/runtime
```
The runtime location can be overridden via the `HELIX_RUNTIME` environment variable.
#### Windows
> NOTE: if `HELIX_RUNTIME` is set prior to calling `cargo install --path helix-term --locked`,
> tree-sitter grammars will be built in `$HELIX_RUNTIME/grammars`.
Either set the `HELIX_RUNTIME` environment variable to point to the runtime files using the Windows setting (search for
`Edit environment variables for your account`) or use the `setx` command in
Cmd:
If you plan on keeping the repo locally, an alternative to copying/symlinking
runtime files is to set `HELIX_RUNTIME=/path/to/helix/runtime`
(`HELIX_RUNTIME=$PWD/runtime` if you're in the helix repo directory).
```sh
setx HELIX_RUNTIME "%userprofile%\source\repos\helix\runtime"
```
To use Helix in desktop environments that supports [XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html), including Gnome and KDE, copy the provided `.desktop` file to the correct folder:
> 💡 `%userprofile%` resolves to your user directory like
> `C:\Users\Your-Name\` for example.
```bash
cp contrib/Helix.desktop ~/.local/share/applications
```
Or, create a symlink in `%appdata%\helix\` that links to the source code directory:
To use another terminal than the default, you will need to modify the `.desktop` file. For example, to use `kitty`:
| Method | Command |
| ---------- | -------------------------------------------------------------------------------------- |
| PowerShell | `New-Item -ItemType Junction -Target "runtime" -Path "$Env:AppData\helix\runtime"` |
| Cmd | `cd %appdata%\helix` <br/> `mklink /D runtime "%userprofile%\src\helix\runtime"` |
```bash
sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop
sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop
```
> 💡 On Windows, creating a symbolic link may require running PowerShell or
> Cmd as an administrator.
Please note: there is no icon for Helix yet, so the system default will be used.
#### Multiple runtime directories
## Finishing up the installation
When Helix finds multiple runtime directories it will search through them for files in the
following order:
To make sure everything is set up as expected you should finally run the helix healthcheck via
1. `runtime/` sibling directory to `$CARGO_MANIFEST_DIR` directory (this is intended for
developing and testing helix only).
2. `runtime/` subdirectory of OS-dependent helix user config directory.
3. `$HELIX_RUNTIME`.
4. `runtime/` subdirectory of path to Helix executable.
```
This order also sets the priority for selecting which file will be used if multiple runtime
directories have files with the same name.
### Validating the installation
To make sure everything is set up as expected you should run the Helix health
check:
```sh
hx --health
```
For more information on the information displayed in the health check results refer to [Healthcheck](https://github.com/helix-editor/helix/wiki/Healthcheck).
For more information on the health check results refer to
[Health check](https://github.com/helix-editor/helix/wiki/Healthcheck).
### Building tree-sitter grammars
### Configure the desktop shortcut
Tree-sitter grammars must be fetched and compiled if not pre-packaged.
Fetch grammars with `hx --grammar fetch` (requires `git`) and compile them
with `hx --grammar build` (requires a C++ compiler).
If your desktop environment supports the
[XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html)
you can configure Helix to show up in the application menu by copying the
provided `.desktop` and icon files to their correct folders:
### Installing language servers
```sh
cp contrib/Helix.desktop ~/.local/share/applications
cp contrib/helix.png ~/.icons # or ~/.local/share/icons
```
To use another terminal than the system default, you can modify the `.desktop`
file. For example, to use `kitty`:
Language servers can optionally be installed if you want their features (auto-complete, diagnostics etc.).
Follow the [instructions on the wiki page](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) to add your language servers of choice.
```sh
sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop
sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop
```

@ -14,14 +14,14 @@
- [Space mode](#space-mode)
- [Popup](#popup)
- [Unimpaired](#unimpaired)
- [Insert Mode](#insert-mode)
- [Select / extend mode](#select--extend-mode)
- [Insert mode](#insert-mode)
- [Select / extend mode](#select-extend-mode)
- [Picker](#picker)
- [Prompt](#prompt)
> 💡 Mappings marked (**LSP**) require an active language server for the file.
> 💡 Mappings marked (**TS**) require a tree-sitter grammar for the filetype.
> 💡 Mappings marked (**TS**) require a tree-sitter grammar for the file type.
## Normal mode
@ -109,7 +109,7 @@
| Key | Description | Command |
| ----- | ----------- | ------- |
| `s` | Select all regex matches inside selections | `select_regex` |
| `S` | Split selection into subselections on regex matches | `split_selection` |
| `S` | Split selection into sub selections on regex matches | `split_selection` |
| `Alt-s` | Split selection on newlines | `split_selection_on_newline` |
| `Alt-_ ` | Merge consecutive selections | `merge_consecutive_selections` |
| `&` | Align selection in columns | `align_selections` |
@ -130,7 +130,7 @@
| `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` |
| `Alt-x` | Shrink selection to line bounds (line-wise selection) | `shrink_to_line_bounds` |
| `J` | Join lines inside selection | `join_selections` |
| `Alt-J` | Join lines inside selection and select space | `join_selections_space` |
| `Alt-J` | Join lines inside selection and select the inserted space | `join_selections_space` |
| `K` | Keep selections matching the regex | `keep_selections` |
| `Alt-K` | Remove selections matching the regex | `remove_selections` |
| `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` |
@ -141,7 +141,7 @@
### Search
Search commands all operate on the `/` register by default. Use `"<char>` to operate on a different one.
Search commands all operate on the `/` register by default. To use a different register, use `"<char>`.
| Key | Description | Command |
| ----- | ----------- | ------- |
@ -166,15 +166,17 @@ These sub-modes are accessible from normal mode and typically switch back to nor
| `Ctrl-w` | Enter [window mode](#window-mode) | N/A |
| `Space` | Enter [space mode](#space-mode) | N/A |
These modes (except command mode) can be configured by
[remapping keys](https://docs.helix-editor.com/remapping.html#minor-modes).
#### View mode
Accessed by typing `z` in [normal mode](#normal-mode).
View mode is intended for scrolling and manipulating the view without changing
the selection. The "sticky" variant of this mode (accessed by typing `Z` in
normal mode) is persistent; use the Escape key to return to normal mode after
usage (useful when you're simply looking over text and not actively editing
it).
normal mode) is persistent and can be exited using the escape key. This is
useful when you're simply looking over text and not actively editing it.
| Key | Description | Command |
@ -222,7 +224,7 @@ Jumps to various locations.
Accessed by typing `m` in [normal mode](#normal-mode).
See the relevant section in [Usage](./usage.md) for an explanation about
[surround](./usage.md#surround) and [textobject](./usage.md#textobjects) usage.
[surround](./usage.md#surround) and [textobject](./usage.md#navigating-using-tree-sitter-textobjects) usage.
| Key | Description | Command |
| ----- | ----------- | ------- |
@ -239,7 +241,7 @@ TODO: Mappings for selecting syntax nodes (a superset of `[`).
Accessed by typing `Ctrl-w` in [normal mode](#normal-mode).
This layer is similar to Vim keybindings as Kakoune does not support window.
This layer is similar to Vim keybindings as Kakoune does not support windows.
| Key | Description | Command |
| ----- | ------------- | ------- |
@ -265,30 +267,32 @@ Accessed by typing `Space` in [normal mode](#normal-mode).
This layer is a kludge of mappings, mostly pickers.
| Key | Description | Command |
| ----- | ----------- | ------- |
| `f` | Open file picker | `file_picker` |
| `F` | Open file picker at current working directory | `file_picker_in_current_directory` |
| `b` | Open buffer picker | `buffer_picker` |
| `j` | Open jumplist picker | `jumplist_picker` |
| `k` | Show documentation for item under cursor in a [popup](#popup) (**LSP**) | `hover` |
| `s` | Open document symbol picker (**LSP**) | `symbol_picker` |
| `S` | Open workspace symbol picker (**LSP**) | `workspace_symbol_picker` |
| `d` | Open document diagnostics picker (**LSP**) | `diagnostics_picker` |
| `D` | Open workspace diagnostics picker (**LSP**) | `workspace_diagnostics_picker` |
| `r` | Rename symbol (**LSP**) | `rename_symbol` |
| `a` | Apply code action (**LSP**) | `code_action` |
| `'` | Open last fuzzy picker | `last_picker` |
| `w` | Enter [window mode](#window-mode) | N/A |
| `p` | Paste system clipboard after selections | `paste_clipboard_after` |
| `P` | Paste system clipboard before selections | `paste_clipboard_before` |
| `y` | Join and yank selections to clipboard | `yank_joined_to_clipboard` |
| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` |
| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` |
| `/` | Global search in workspace folder | `global_search` |
| `?` | Open command palette | `command_palette` |
> TIP: Global search displays results in a fuzzy picker, use `Space + '` to bring it back up after opening a file.
| Key | Description | Command |
| ----- | ----------- | ------- |
| `f` | Open file picker | `file_picker` |
| `F` | Open file picker at current working directory | `file_picker_in_current_directory` |
| `b` | Open buffer picker | `buffer_picker` |
| `j` | Open jumplist picker | `jumplist_picker` |
| `g` | Debug (experimental) | N/A |
| `k` | Show documentation for item under cursor in a [popup](#popup) (**LSP**) | `hover` |
| `s` | Open document symbol picker (**LSP**) | `symbol_picker` |
| `S` | Open workspace symbol picker (**LSP**) | `workspace_symbol_picker` |
| `d` | Open document diagnostics picker (**LSP**) | `diagnostics_picker` |
| `D` | Open workspace diagnostics picker (**LSP**) | `workspace_diagnostics_picker` |
| `r` | Rename symbol (**LSP**) | `rename_symbol` |
| `a` | Apply code action (**LSP**) | `code_action` |
| `h` | Select symbol references (**LSP**) | `select_references_to_symbol_under_cursor` |
| `'` | Open last fuzzy picker | `last_picker` |
| `w` | Enter [window mode](#window-mode) | N/A |
| `p` | Paste system clipboard after selections | `paste_clipboard_after` |
| `P` | Paste system clipboard before selections | `paste_clipboard_before` |
| `y` | Join and yank selections to clipboard | `yank_joined_to_clipboard` |
| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` |
| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` |
| `/` | Global search in workspace folder | `global_search` |
| `?` | Open command palette | `command_palette` |
> 💡 Global search displays results in a fuzzy picker, use `Space + '` to bring it back up after opening a file.
##### Popup
@ -301,14 +305,14 @@ Displays documentation for item under cursor.
#### Unimpaired
Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired).
These mappings are in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired).
| Key | Description | Command |
| ----- | ----------- | ------- |
| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` |
| `]d` | Go to next diagnostic (**LSP**) | `goto_next_diag` |
| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` |
| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` |
| `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` |
| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` |
| `]f` | Go to next function (**TS**) | `goto_next_function` |
| `[f` | Go to previous function (**TS**) | `goto_prev_function` |
| `]t` | Go to next type definition (**TS**) | `goto_next_class` |
@ -323,19 +327,20 @@ Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaire
| `[p` | Go to previous paragraph | `goto_prev_paragraph` |
| `]g` | Go to next change | `goto_next_change` |
| `[g` | Go to previous change | `goto_prev_change` |
| `[G` | Go to first change | `goto_first_change` |
| `]G` | Go to last change | `goto_last_change` |
| `[Space` | Add newline above | `add_newline_above` |
| `[G` | Go to first change | `goto_first_change` |
| `]Space` | Add newline below | `add_newline_below` |
| `[Space` | Add newline above | `add_newline_above` |
## Insert mode
Insert mode bindings are somewhat minimal by default. Helix is designed to
Insert mode bindings are minimal by default. Helix is designed to
be a modal editor, and this is reflected in the user experience and internal
mechanics. For example, changes to the text are only saved for undos when
escaping from insert mode to normal mode. For this reason, new users are
strongly encouraged to learn the modal editing paradigm to get the smoothest
experience.
mechanics. Changes to the text are only saved for undos when
escaping from insert mode to normal mode.
> 💡 New users are strongly encouraged to learn the modal editing paradigm
> to get the smoothest experience.
| Key | Description | Command |
| ----- | ----------- | ------- |
@ -347,7 +352,7 @@ experience.
| `Alt-d`, `Alt-Delete` | Delete next word | `delete_word_forward` |
| `Ctrl-u` | Delete to start of line | `kill_to_line_start` |
| `Ctrl-k` | Delete to end of line | `kill_to_line_end` |
| `Ctrl-h`, `Backspace` | Delete previous char | `delete_char_backward` |
| `Ctrl-h`, `Backspace`, `Shift-Backspace` | Delete previous char | `delete_char_backward` |
| `Ctrl-d`, `Delete` | Delete next char | `delete_char_forward` |
| `Ctrl-j`, `Enter` | Insert new line | `insert_newline` |
@ -365,8 +370,8 @@ with modal editors.
| `Home` | Move to line start | `goto_line_start` |
| `End` | Move to line end | `goto_line_end_newline` |
If you want to disable them in insert mode as you become more comfortable with modal editing, you can use
the following in your `config.toml`:
As you become more comfortable with modal editing, you may want to disable some
insert mode bindings. You can do this by editing your `config.toml` file.
```toml
[keys.insert]
@ -382,7 +387,7 @@ end = "no_op"
## Select / extend mode
This mode echoes Normal mode, but changes any movements to extend
Select mode echoes Normal mode, but changes any movements to extend
selections rather than replace them. Goto motions are also changed to
extend, so that `vgl` for example extends the selection to the end of
the line.
@ -428,7 +433,7 @@ Keys to use within prompt, Remapping currently not supported.
| `Alt-d`, `Alt-Delete`, `Ctrl-Delete` | Delete next word |
| `Ctrl-u` | Delete to start of line |
| `Ctrl-k` | Delete to end of line |
| `Backspace`, `Ctrl-h` | Delete previous char |
| `Backspace`, `Ctrl-h`, `Shift-Backspace` | Delete previous char |
| `Delete`, `Ctrl-d` | Delete next char |
| `Ctrl-s` | Insert a word under doc cursor, may be changed to Ctrl-r Ctrl-w later |
| `Ctrl-p`, `Up` | Select previous history |

@ -1,10 +1,10 @@
# Language Support
The following languages and Language Servers are supported. In order to use
The following languages and Language Servers are supported. To use
Language Server features, you must first [install][lsp-install-wiki] the
appropriate Language Server.
Check the language support in your installed helix version with `hx --health`.
You can check the language support in your installed helix version with `hx --health`.
Also see the [Language Configuration][lang-config] docs and the [Adding
Languages][adding-languages] guide for more language configuration information.

@ -5,13 +5,15 @@ in `languages.toml` files.
## `languages.toml` files
There are three possible `languages.toml` files. The first is compiled into
Helix and lives in the [Helix repository](https://github.com/helix-editor/helix/blob/master/languages.toml).
This provides the default configurations for languages and language servers.
There are three possible locations for a `languages.toml` file:
You may define a `languages.toml` in your [configuration directory](./configuration.md)
which overrides values from the built-in language configuration. For example
to disable auto-LSP-formatting in Rust:
1. In the Helix source code, this lives in the
[Helix repository](https://github.com/helix-editor/helix/blob/master/languages.toml).
It provides the default configurations for languages and language servers.
2. In your [configuration directory](./configuration.md). This overrides values
from the built-in language configuration. For example to disable
auto-LSP-formatting in Rust:
```toml
# in <config_dir>/helix/languages.toml
@ -21,10 +23,10 @@ name = "rust"
auto-format = false
```
Language configuration may also be overridden local to a project by creating
a `languages.toml` file under a `.helix` directory. Its settings will be merged
with the language configuration in the configuration directory and the built-in
configuration.
3. In a `.helix` folder in your project. Language configuration may also be
overridden local to a project by creating a `languages.toml` file in a
`.helix` folder. Its settings will be merged with the language configuration
in the configuration directory and the built-in configuration.
## Language configuration
@ -56,16 +58,16 @@ These configuration keys are available:
| `auto-format` | Whether to autoformat this language when saving |
| `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) |
| `comment-token` | The token to use as a comment-token |
| `indent` | The indent to use. Has sub keys `tab-width` and `unit` |
| `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) |
| `language-server` | The Language Server to run. See the Language Server configuration section below. |
| `config` | Language Server configuration |
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
| `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout |
| `max-line-length` | Maximum line length. Used for the `:reflow` command |
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap_at_text_width` is set, defaults to `editor.text-width` |
### File-type detection and the `file-types` key
Helix determines which language configuration to use with the `file-types` key
Helix determines which language configuration to use based on the `file-types` key
from the above section. `file-types` is a list of strings or tables, for
example:

@ -1,18 +1,18 @@
# Key Remapping
# Key remapping
One-way key remapping is temporarily supported via a simple TOML configuration
Helix currently supports one-way key remapping through a simple TOML configuration
file. (More powerful solutions such as rebinding via commands will be
available in the future).
To remap keys, write a `config.toml` file in your `helix` configuration
directory (default `~/.config/helix` in Linux systems) with a structure like
To remap keys, create a `config.toml` file in your `helix` configuration
directory (default `~/.config/helix` on Linux systems) with a structure like
this:
```toml
# At most one section each of 'keys.normal', 'keys.insert' and 'keys.select'
[keys.normal]
C-s = ":w" # Maps the Ctrl-s to the typable command :w which is an alias for :write (save file)
C-o = ":open ~/.config/helix/config.toml" # Maps the Ctrl-o to opening of the helix config file
C-s = ":w" # Maps Ctrl-s to the typable command :w which is an alias for :write (save file)
C-o = ":open ~/.config/helix/config.toml" # Maps Ctrl-o to opening of the helix config file
a = "move_char_left" # Maps the 'a' key to the move_char_left command
w = "move_line_up" # Maps the 'w' key move_line_up
"C-S-esc" = "extend_line" # Maps Ctrl-Shift-Escape to extend_line
@ -20,10 +20,35 @@ g = { a = "code_action" } # Maps `ga` to show possible code actions
"ret" = ["open_below", "normal_mode"] # Maps the enter key to open_below then re-enter normal mode
[keys.insert]
"A-x" = "normal_mode" # Maps Alt-X to enter normal mode
"A-x" = "normal_mode" # Maps Alt-X to enter normal mode
j = { k = "normal_mode" } # Maps `jk` to exit insert mode
```
> NOTE: Typable commands can also be remapped, remember to keep the `:` prefix to indicate it's a typable command.
## Minor modes
Minor modes are accessed by pressing a key (usually from normal mode), giving access to dedicated bindings. Bindings
can be modified or added by nesting definitions.
```toml
[keys.insert.j]
k = "normal_mode" # Maps `jk` to exit insert mode
[keys.normal.g]
a = "code_action" # Maps `ga` to show possible code actions
# invert `j` and `k` in view mode
[keys.normal.z]
j = "scroll_up"
k = "scroll_down"
# create a new minor mode bound to `+`
[keys.normal."+"]
m = ":run-shell-command make"
c = ":run-shell-command cargo build"
t = ":run-shell-command cargo test"
```
## Special keys and modifiers
Ctrl, Shift and Alt modifiers are encoded respectively with the prefixes
`C-`, `S-` and `A-`. Special keys are encoded as follows:
@ -50,5 +75,5 @@ Ctrl, Shift and Alt modifiers are encoded respectively with the prefixes
Keys can be disabled by binding them to the `no_op` command.
Commands can be found at [Keymap](https://docs.helix-editor.com/keymap.html) Commands.
> Commands can also be found in the source code at [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs) at the invocation of `static_commands!` macro and the `TypableCommandList`.
A list of commands is available in the [Keymap](https://docs.helix-editor.com/keymap.html) documentation
and in the source code at [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs) at the invocation of `static_commands!` macro and the `TypableCommandList`.

@ -1,14 +1,15 @@
# Themes
To use a theme add `theme = "<name>"` to your [`config.toml`](./configuration.md) at the very top of the file before the first section or select it during runtime using `:theme <name>`.
To use a theme add `theme = "<name>"` to the top of your [`config.toml`](./configuration.md) file, or select it during runtime using `:theme <name>`.
## Creating a theme
Create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`). The directory might have to be created beforehand.
Create a file with the name of your theme as the file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes` or `%AppData%\helix\themes` on Windows). The directory might have to be created beforehand.
The names "default" and "base16_default" are reserved for the builtin themes and cannot be overridden by user defined themes.
> 💡 The names "default" and "base16_default" are reserved for built-in themes
> and cannot be overridden by user-defined themes.
The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes).
### Overview
Each line in the theme file is specified as below:
@ -16,7 +17,7 @@ Each line in the theme file is specified as below:
key = { fg = "#ffffff", bg = "#000000", underline = { color = "#ff0000", style = "curl"}, modifiers = ["bold", "italic"] }
```
where `key` represents what you want to style, `fg` specifies the foreground color, `bg` the background color, `underline` the underline `style`/`color`, and `modifiers` is a list of style modifiers. `bg`, `underline` and `modifiers` can be omitted to defer to the defaults.
Where `key` represents what you want to style, `fg` specifies the foreground color, `bg` the background color, `underline` the underline `style`/`color`, and `modifiers` is a list of style modifiers. `bg`, `underline` and `modifiers` can be omitted to defer to the defaults.
To specify only the foreground color:
@ -24,15 +25,30 @@ To specify only the foreground color:
key = "#ffffff"
```
if the key contains a dot `'.'`, it must be quoted to prevent it being parsed as a [dotted key](https://toml.io/en/v1.0.0#keys).
If the key contains a dot `'.'`, it must be quoted to prevent it being parsed as a [dotted key](https://toml.io/en/v1.0.0#keys).
```toml
"key.key" = "#ffffff"
```
For inspiration, you can find the default `theme.toml`
[here](https://github.com/helix-editor/helix/blob/master/theme.toml) and
user-submitted themes
[here](https://github.com/helix-editor/helix/blob/master/runtime/themes).
### Using the linter
Use the supplied linting tool to check for errors and missing scopes:
```sh
cargo xtask themelint onedark # replace onedark with <name>
```
## The details of theme creation
### Color palettes
It's recommended define a palette of named colors, and refer to them from the
It's recommended to define a palette of named colors, and refer to them in the
configuration values in your theme. To do this, add a table called
`palette` to your theme file:
@ -45,8 +61,8 @@ white = "#ffffff"
black = "#000000"
```
Remember that the `[palette]` table includes all keys after its header,
so you should define the palette after normal theme options.
Keep in mind that the `[palette]` table includes all keys after its header,
so it should be defined after the normal theme options.
The default palette uses the terminal's default 16 colors, and the colors names
are listed below. The `[palette]` section in the config file takes precedence
@ -73,9 +89,8 @@ over it and is merged into the default palette.
### Modifiers
The following values may be used as modifiers.
Less common modifiers might not be supported by your terminal emulator.
The following values may be used as modifier, provided they are supported by
your terminal emulator.
| Modifier |
| --- |
@ -89,14 +104,13 @@ Less common modifiers might not be supported by your terminal emulator.
| `hidden` |
| `crossed_out` |
> Note: The `underlined` modifier is deprecated and only available for backwards compatibility.
> 💡 The `underlined` modifier is deprecated and only available for backwards compatibility.
> Its behavior is equivalent to setting `underline.style="line"`.
### Underline Style
One of the following values may be used as a value for `underline.style`.
### Underline style
Some styles might not be supported by your terminal emulator.
One of the following values may be used as a value for `underline.style`, providing it is
supported by your terminal emulator.
| Modifier |
| --- |
@ -109,7 +123,7 @@ Some styles might not be supported by your terminal emulator.
### Inheritance
Extend upon other themes by setting the `inherits` property to an existing theme.
Extend other themes by setting the `inherits` property to an existing theme.
```toml
inherits = "boo_berry"
@ -124,19 +138,19 @@ berry = "#2A2A4D"
### Scopes
The following is a list of scopes available to use for styling.
The following is a list of scopes available to use for styling:
#### Syntax highlighting
These keys match [tree-sitter scopes](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#theme).
For a given highlight produced, styling will be determined based on the longest matching theme key. For example, the highlight `function.builtin.static` would match the key `function.builtin` rather than `function`.
When determining styling for a highlight, the longest matching theme key will be used. For example, if the highlight is `function.builtin.static`, the key `function.builtin` will be used instead of `function`.
We use a similar set of scopes as
[SublimeText](https://www.sublimetext.com/docs/scope_naming.html). See also
[Sublime Text](https://www.sublimetext.com/docs/scope_naming.html). See also
[TextMate](https://macromates.com/manual/en/language_grammars) scopes.
- `attribute` - Class attributes, html tag attributes
- `attribute` - Class attributes, HTML tag attributes
- `type` - Types
- `builtin` - Primitive types provided by the language (`int`, `usize`)
@ -144,7 +158,7 @@ We use a similar set of scopes as
- `variant`
- `constructor`
- `constant` (TODO: constant.other.placeholder for %v)
- `constant` (TODO: constant.other.placeholder for `%v`)
- `builtin` Special constants provided by the language (`true`, `false`, `nil` etc)
- `boolean`
- `character`
@ -162,11 +176,11 @@ We use a similar set of scopes as
- `comment` - Code comments
- `line` - Single line comments (`//`)
- `block` - Block comments (e.g. (`/* */`)
- `block` - Block comments (e.g. (`/* */`)
- `documentation` - Documentation comments (e.g. `///` in Rust)
- `variable` - Variables
- `builtin` - Reserved language variables (`self`, `this`, `super`, etc)
- `builtin` - Reserved language variables (`self`, `this`, `super`, etc.)
- `parameter` - Function parameters
- `other`
- `member` - Fields of composite data types (e.g. structs, unions)
@ -186,10 +200,10 @@ We use a similar set of scopes as
- `return`
- `exception`
- `operator` - `or`, `in`
- `directive` - Preprocessor directives (`#if` in C)
- `directive` - Preprocessor directives (`#if` in C)
- `function` - `fn`, `func`
- `storage` - Keywords describing how things are stored
- `type` - The type of something, `class`, `function`, `var`, `let`, etc.
- `type` - The type of something, `class`, `function`, `var`, `let`, etc.
- `modifier` - Storage modifiers like `static`, `mut`, `const`, `ref`, etc.
- `operator` - `||`, `+=`, `>`
@ -201,6 +215,7 @@ We use a similar set of scopes as
- `special` (preprocessor in C)
- `tag` - Tags (e.g. `<body>` in HTML)
- `builtin`
- `namespace`
@ -215,10 +230,11 @@ We use a similar set of scopes as
- `numbered`
- `bold`
- `italic`
- `strikethrough`
- `link`
- `url` - urls pointed to by links
- `label` - non-url link references
- `text` - url and image descriptions in links
- `url` - URLs pointed to by links
- `label` - non-URL link references
- `text` - URL and image descriptions in links
- `quote`
- `raw`
- `inline`
@ -232,74 +248,77 @@ We use a similar set of scopes as
#### Interface
These scopes are used for theming the editor interface.
These scopes are used for theming the editor interface:
- `markup`
- `normal`
- `completion` - for completion doc popup ui
- `hover` - for hover popup ui
- `completion` - for completion doc popup UI
- `hover` - for hover popup UI
- `heading`
- `completion` - for completion doc popup ui
- `hover` - for hover popup ui
- `completion` - for completion doc popup UI
- `hover` - for hover popup UI
- `raw`
- `inline`
- `completion` - for completion doc popup ui
- `hover` - for hover popup ui
| Key | Notes |
| --- | --- |
| `ui.background` | |
| `ui.background.separator` | Picker separator below input line |
| `ui.cursor` | |
| `ui.cursor.insert` | |
| `ui.cursor.select` | |
| `ui.cursor.match` | Matching bracket etc. |
| `ui.cursor.primary` | Cursor with primary selection |
| `ui.gutter` | Gutter |
| `ui.gutter.selected` | Gutter for the line the cursor is on |
| `ui.linenr` | Line numbers |
| `ui.linenr.selected` | Line number for the line the cursor is on |
| `ui.statusline` | Statusline |
| `ui.statusline.inactive` | Statusline (unfocused document) |
| `ui.statusline.normal` | Statusline mode during normal mode ([only if `editor.color-modes` is enabled][editor-section]) |
| `ui.statusline.insert` | Statusline mode during insert mode ([only if `editor.color-modes` is enabled][editor-section]) |
| `ui.statusline.select` | Statusline mode during select mode ([only if `editor.color-modes` is enabled][editor-section]) |
| `ui.statusline.separator` | Separator character in statusline |
| `ui.popup` | Documentation popups (e.g Space + k) |
| `ui.popup.info` | Prompt for multiple key options |
| `ui.window` | Border lines separating splits |
| `ui.help` | Description box for commands |
| `ui.text` | Command prompts, popup text, etc. |
| `ui.text.focus` | |
| `ui.text.inactive` | Same as `ui.text` but when the text is inactive (e.g. suggestions) |
| `ui.text.info` | The key: command text in `ui.popup.info` boxes |
| `ui.virtual.ruler` | Ruler columns (see the [`editor.rulers` config][editor-section]) |
| `ui.virtual.whitespace` | Visible whitespace characters |
| `ui.virtual.indent-guide` | Vertical indent width guides |
| `ui.menu` | Code and command completion menus |
| `ui.menu.selected` | Selected autocomplete item |
| `ui.menu.scroll` | `fg` sets thumb color, `bg` sets track color of scrollbar |
| `ui.selection` | For selections in the editing area |
| `ui.selection.primary` | |
| `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) |
| `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) |
| `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) |
| `ui.cursorcolumn.secondary` | The columns of any other cursors ([if cursorcolumn is enabled][editor-section]) |
| `warning` | Diagnostics warning (gutter) |
| `error` | Diagnostics error (gutter) |
| `info` | Diagnostics info (gutter) |
| `hint` | Diagnostics hint (gutter) |
| `diagnostic` | Diagnostics fallback style (editing area) |
| `diagnostic.hint` | Diagnostics hint (editing area) |
| `diagnostic.info` | Diagnostics info (editing area) |
| `diagnostic.warning` | Diagnostics warning (editing area) |
| `diagnostic.error` | Diagnostics error (editing area) |
You can check compliance to spec with
```shell
cargo xtask themelint onedark # replace onedark with <name>
```
- `completion` - for completion doc popup UI
- `hover` - for hover popup UI
| Key | Notes |
| --- | --- |
| `ui.background` | |
| `ui.background.separator` | Picker separator below input line |
| `ui.cursor` | |
| `ui.cursor.normal` | |
| `ui.cursor.insert` | |
| `ui.cursor.select` | |
| `ui.cursor.match` | Matching bracket etc. |
| `ui.cursor.primary` | Cursor with primary selection |
| `ui.cursor.primary.normal` | |
| `ui.cursor.primary.insert` | |
| `ui.cursor.primary.select` | |
| `ui.gutter` | Gutter |
| `ui.gutter.selected` | Gutter for the line the cursor is on |
| `ui.linenr` | Line numbers |
| `ui.linenr.selected` | Line number for the line the cursor is on |
| `ui.statusline` | Statusline |
| `ui.statusline.inactive` | Statusline (unfocused document) |
| `ui.statusline.normal` | Statusline mode during normal mode ([only if `editor.color-modes` is enabled][editor-section]) |
| `ui.statusline.insert` | Statusline mode during insert mode ([only if `editor.color-modes` is enabled][editor-section]) |
| `ui.statusline.select` | Statusline mode during select mode ([only if `editor.color-modes` is enabled][editor-section]) |
| `ui.statusline.separator` | Separator character in statusline |
| `ui.popup` | Documentation popups (e.g. Space + k) |
| `ui.popup.info` | Prompt for multiple key options |
| `ui.window` | Borderlines separating splits |
| `ui.help` | Description box for commands |
| `ui.text` | Command prompts, popup text, etc. |
| `ui.text.focus` | |
| `ui.text.inactive` | Same as `ui.text` but when the text is inactive (e.g. suggestions) |
| `ui.text.info` | The key: command text in `ui.popup.info` boxes |
| `ui.virtual.ruler` | Ruler columns (see the [`editor.rulers` config][editor-section]) |
| `ui.virtual.whitespace` | Visible whitespace characters |
| `ui.virtual.indent-guide` | Vertical indent width guides |
| `ui.virtual.inlay-hint` | Default style for inlay hints of all kinds |
| `ui.virtual.inlay-hint.parameter` | Style for inlay hints of kind `parameter` (LSPs are not required to set a kind) |
| `ui.virtual.inlay-hint.type` | Style for inlay hints of kind `type` (LSPs are not required to set a kind) |
| `ui.virtual.wrap` | Soft-wrap indicator (see the [`editor.soft-wrap` config][editor-section]) |
| `ui.menu` | Code and command completion menus |
| `ui.menu.selected` | Selected autocomplete item |
| `ui.menu.scroll` | `fg` sets thumb color, `bg` sets track color of scrollbar |
| `ui.selection` | For selections in the editing area |
| `ui.selection.primary` | |
| `ui.highlight` | Highlighted lines in the picker preview |
| `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) |
| `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) |
| `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) |
| `ui.cursorcolumn.secondary` | The columns of any other cursors ([if cursorcolumn is enabled][editor-section]) |
| `warning` | Diagnostics warning (gutter) |
| `error` | Diagnostics error (gutter) |
| `info` | Diagnostics info (gutter) |
| `hint` | Diagnostics hint (gutter) |
| `diagnostic` | Diagnostics fallback style (editing area) |
| `diagnostic.hint` | Diagnostics hint (editing area) |
| `diagnostic.info` | Diagnostics info (editing area) |
| `diagnostic.warning` | Diagnostics warning (editing area) |
| `diagnostic.error` | Diagnostics error (editing area) |
[editor-section]: ./configuration.md#editor-section

@ -1,22 +1,43 @@
# Usage
# Using Helix
(Currently not fully documented, see the [keymappings](./keymap.md) list for more.)
<!--toc:start-->
- [Registers](#registers)
- [User-defined registers](#user-defined-registers)
- [Special registers](#special-registers)
- [Surround](#surround)
- [Selecting and manipulating text with textobjects](#selecting-and-manipulating-text-with-textobjects)
- [Navigating using tree-sitter textobjects](#navigating-using-tree-sitter-textobjects)
- [Moving the selection with syntax-aware motions](#moving-the-selection-with-syntax-aware-motions)
<!--toc:end-->
See [tutor](https://github.com/helix-editor/helix/blob/master/runtime/tutor) (accessible via `hx --tutor` or `:tutor`) for a vimtutor-like introduction.
For a full interactive introduction to Helix, refer to the
[tutor](https://github.com/helix-editor/helix/blob/master/runtime/tutor) which
can be accessed via the command `hx --tutor` or `:tutor`.
> 💡 Currently, not all functionality is fully documented, please refer to the
> [key mappings](./keymap.md) list.
## Registers
Vim-like registers can be used to yank and store text to be pasted later. Usage is similar, with `"` being used to select a register:
In Helix, registers are storage locations for text and other data, such as the
result of a search. Registers can be used to cut, copy, and paste text, similar
to the clipboard in other text editors. Usage is similar to Vim, with `"` being
used to select a register.
### User-defined registers
Helix allows you to create your own named registers for storing text, for
example:
- `"ay` - Yank the current selection to register `a`.
- `"op` - Paste the text in register `o` after the selection.
If there is a selected register before invoking a change or delete command, the selection will be stored in the register and the action will be carried out:
If a register is selected before invoking a change or delete command, the selection will be stored in the register and the action will be carried out:
- `"hc` - Store the selection in register `h` and then change it (delete and enter insert mode).
- `"md` - Store the selection in register `m` and delete it.
### Special Registers
### Special registers
| Register character | Contains |
| --- | --- |
@ -25,41 +46,90 @@ If there is a selected register before invoking a change or delete command, the
| `"` | Last yanked text |
| `_` | Black hole |
> There is no special register for copying to system clipboard, instead special commands and keybindings are provided. See the [keymap](keymap.md#space-mode) for the specifics.
> The black hole register works as a no-op register, meaning no data will be written to / read from it.
The system clipboard is not directly supported by a special register. Instead, special commands and keybindings are provided. Refer to the
[key map](keymap.md#space-mode) for more details.
The black hole register is a no-op register, meaning that no data will be read or written to it.
## Surround
Functionality similar to [vim-surround](https://github.com/tpope/vim-surround) is built into
helix. The keymappings have been inspired from [vim-sandwich](https://github.com/machakann/vim-sandwich):
Helix includes built-in functionality similar to [vim-surround](https://github.com/tpope/vim-surround).
The keymappings have been inspired from [vim-sandwich](https://github.com/machakann/vim-sandwich):
![surround demo](https://user-images.githubusercontent.com/23398472/122865801-97073180-d344-11eb-8142-8f43809982c6.gif)
![Surround demo](https://user-images.githubusercontent.com/23398472/122865801-97073180-d344-11eb-8142-8f43809982c6.gif)
- `ms` - Add surround characters
- `mr` - Replace surround characters
- `md` - Delete surround characters
| Key Sequence | Action |
| --------------------------------- | --------------------------------------- |
| `ms<char>` (after selecting text) | Add surround characters to selection |
| `mr<char_to_replace><new_char>` | Replace the closest surround characters |
| `md<char_to_delete>` | Delete the closest surround characters |
`ms` acts on a selection, so select the text first and use `ms<char>`. `mr` and `md` work
on the closest pairs found and selections are not required; use counts to act in outer pairs.
You can use counts to act on outer pairs.
It can also act on multiple selections (yay!). For example, to change every occurrence of `(use)` to `[use]`:
Surround can also act on multiple selections. For example, to change every occurrence of `(use)` to `[use]`:
- `%` to select the whole file
- `s` to split the selections on a search term
- Input `use` and hit Enter
- `mr([` to replace the parens with square brackets
1. `%` to select the whole file
2. `s` to split the selections on a search term
3. Input `use` and hit Enter
4. `mr([` to replace the parentheses with square brackets
Multiple characters are currently not supported, but planned.
Multiple characters are currently not supported, but planned for future release.
## Syntax-tree Motions
## Selecting and manipulating text with textobjects
`Alt-p`, `Alt-o`, `Alt-i`, and `Alt-n` (or `Alt` and arrow keys) move the primary
selection according to the selection's place in the syntax tree. Let's walk
through an example to get familiar with them. Many languages have a syntax like
so for function calls:
In Helix, textobjects are a way to select, manipulate and operate on a piece of
text in a structured way. They allow you to refer to blocks of text based on
their structure or purpose, such as a word, sentence, paragraph, or even a
function or block of code.
```
func(arg1, arg2, arg3)
![Textobject demo](https://user-images.githubusercontent.com/23398472/124231131-81a4bb00-db2d-11eb-9d10-8e577ca7b177.gif)
![Textobject tree-sitter demo](https://user-images.githubusercontent.com/23398472/132537398-2a2e0a54-582b-44ab-a77f-eb818942203d.gif)
- `ma` - Select around the object (`va` in Vim, `<alt-a>` in Kakoune)
- `mi` - Select inside the object (`vi` in Vim, `<alt-i>` in Kakoune)
| Key after `mi` or `ma` | Textobject selected |
| --- | --- |
| `w` | Word |
| `W` | WORD |
| `p` | Paragraph |
| `(`, `[`, `'`, etc. | Specified surround pairs |
| `m` | The closest surround pair |
| `f` | Function |
| `c` | Class |
| `a` | Argument/parameter |
| `o` | Comment |
| `t` | Test |
| `g` | Change |
> 💡 `f`, `c`, etc. need a tree-sitter grammar active for the current
document and a special tree-sitter query file to work properly. [Only
some grammars][lang-support] currently have the query file implemented.
Contributions are welcome!
## Navigating using tree-sitter textobjects
Navigating between functions, classes, parameters, and other elements is
possible using tree-sitter and textobject queries. For
example to move to the next function use `]f`, to move to previous
class use `[c`, and so on.
![Tree-sitter-nav-demo][tree-sitter-nav-demo]
For the full reference see the [unimpaired][unimpaired-keybinds] section of the key bind
documentation.
> 💡 This feature relies on tree-sitter textobjects
> and requires the corresponding query file to work properly.
## Moving the selection with syntax-aware motions
`Alt-p`, `Alt-o`, `Alt-i`, and `Alt-n` (or `Alt` and arrow keys) allow you to move the
selection according to its location in the syntax tree. For example, many languages have the
following syntax for function calls:
```js
func(arg1, arg2, arg3);
```
A function call might be parsed by tree-sitter into a tree like the following.
@ -93,77 +163,29 @@ a more intuitive tree format:
└──────────┘ └──────────┘ └──────────┘
```
Say we have a selection that wraps `arg1`. The selection is on the `arg1` leaf
in the tree above.
If you have a selection that wraps `arg1` (see the tree above), and you use
`Alt-n`, it will select the next sibling in the syntax tree: `arg2`.
```
```js
// before
func([arg1], arg2, arg3)
// after
func(arg1, [arg2], arg3);
```
Using `Alt-n` would select the next sibling in the syntax tree: `arg2`.
Similarly, `Alt-o` will expand the selection to the parent node, in this case, the
arguments node.
```
func(arg1, [arg2], arg3)
```
While `Alt-o` would expand the selection to the parent node. In the tree above we
can see that we would select the `arguments` node.
```
func[(arg1, arg2, arg3)]
```js
func[(arg1, arg2, arg3)];
```
There is also some nuanced behavior that prevents you from getting stuck on a
node with no sibling. If we have a selection on `arg1`, `Alt-p` would bring us
to the previous child node. Since `arg1` doesn't have a sibling to its left,
though, we climb the syntax tree and then take the previous selection. So
`Alt-p` will move the selection over to the "func" `identifier`.
```
[func](arg1, arg2, arg3)
```
## Textobjects
![textobject-demo](https://user-images.githubusercontent.com/23398472/124231131-81a4bb00-db2d-11eb-9d10-8e577ca7b177.gif)
![textobject-treesitter-demo](https://user-images.githubusercontent.com/23398472/132537398-2a2e0a54-582b-44ab-a77f-eb818942203d.gif)
- `ma` - Select around the object (`va` in Vim, `<alt-a>` in Kakoune)
- `mi` - Select inside the object (`vi` in Vim, `<alt-i>` in Kakoune)
| Key after `mi` or `ma` | Textobject selected |
| --- | --- |
| `w` | Word |
| `W` | WORD |
| `p` | Paragraph |
| `(`, `[`, `'`, etc | Specified surround pairs |
| `m` | Closest surround pair |
| `f` | Function |
| `c` | Class |
| `a` | Argument/parameter |
| `o` | Comment |
| `t` | Test |
| `g` | Change |
> NOTE: `f`, `c`, etc need a tree-sitter grammar active for the current
document and a special tree-sitter query file to work properly. [Only
some grammars][lang-support] currently have the query file implemented.
Contributions are welcome!
## Tree-sitter Textobject Based Navigation
Navigating between functions, classes, parameters, etc is made
possible by leveraging tree-sitter and textobjects queries. For
example to move to the next function use `]f`, to move to previous
class use `[c`, and so on.
![tree-sitter-nav-demo][tree-sitter-nav-demo]
See the [unimpaired][unimpaired-keybinds] section of the keybind
documentation for the full reference.
> NOTE: This feature is dependent on tree-sitter based textobjects
and therefore requires the corresponding query file to work properly.
node with no sibling. When using `Alt-p` with a selection on `arg1`, the previous
child node will be selected. In the event that `arg1` does not have a previous
sibling, the selection will move up the syntax tree and select the previous
element. As a result, using `Alt-p` with a selection on `arg1` will move the
selection to the "func" `identifier`.
[lang-support]: ./lang-support.md
[unimpaired-keybinds]: ./keymap.md#unimpaired

@ -48,6 +48,18 @@
--searchresults-border-color: #888;
--searchresults-li-bg: #252932;
--search-mark-bg: #e3b171;
--hljs-background: #191f26;
--hljs-color: #e6e1cf;
--hljs-quote: #5c6773;
--hljs-variable: #ff7733;
--hljs-type: #ffee99;
--hljs-title: #b8cc52;
--hljs-symbol: #ffb454;
--hljs-selector-tag: #ff7733;
--hljs-selector-tag: #36a3d9;
--hljs-selector-tag: #00568d;
--hljs-selector-tag: #91b362;
--hljs-selector-tag: #d96c75;
}
.coal {
@ -88,6 +100,18 @@
--searchresults-border-color: #98a3ad;
--searchresults-li-bg: #2b2b2f;
--search-mark-bg: #355c7d;
--hljs-background: #969896;
--hljs-color: #cc6666;
--hljs-quote: #de935f;
--hljs-variable: #f0c674;
--hljs-type: #b5bd68;
--hljs-title: #8abeb7;
--hljs-symbol: #81a2be;
--hljs-selector-tag: #b294bb;
--hljs-selector-tag: #1d1f21;
--hljs-selector-tag: #c5c8c6;
--hljs-selector-tag: #718c00;
--hljs-selector-tag: #c82829;
}
.light {
@ -128,6 +152,14 @@
--searchresults-border-color: #888;
--searchresults-li-bg: #e4f2fe;
--search-mark-bg: #a2cff5;
--hljs-background: #f6f7f6;
--hljs-color: #000;
--hljs-quote: #575757;
--hljs-variable: #d70025;
--hljs-type: #b21e00;
--hljs-title: #0030f2;
--hljs-symbol: #008200;
--hljs-selector-tag: #9d00ec;
}
.navy {
@ -168,6 +200,19 @@
--searchresults-border-color: #5c5c68;
--searchresults-li-bg: #242430;
--search-mark-bg: #a2cff5;
--hljs-background: #969896;
--hljs-color: #cc6666;
--hljs-quote: #de935f;
--hljs-variable: #f0c674;
--hljs-type: #b5bd68;
--hljs-title: #8abeb7;
--hljs-symbol: #81a2be;
--hljs-selector-tag: #b294bb;
--hljs-selector-tag: #1d1f21;
--hljs-selector-tag: #c5c8c6;
--hljs-selector-tag: #718c00;
--hljs-selector-tag: #c82829;
}
.rust {
@ -208,6 +253,14 @@
--searchresults-border-color: #888;
--searchresults-li-bg: #dec2a2;
--search-mark-bg: #e69f67;
--hljs-background: #f6f7f6;
--hljs-color: #000;
--hljs-quote: #575757;
--hljs-variable: #d70025;
--hljs-type: #b21e00;
--hljs-title: #0030f2;
--hljs-symbol: #008200;
--hljs-selector-tag: #9d00ec;
}
@media (prefers-color-scheme: dark) {
@ -292,7 +345,15 @@
--searchresults-header-fg: #5f5f71;
--searchresults-border-color: #5c5c68;
--searchresults-li-bg: #242430;
--search-mark-bg: #a2cff5;
--search-mark-bg: #acff5;
--hljs-background: #2f1e2e;
--hljs-color: #a39e9b;
--hljs-quote: #8d8687;
--hljs-variable: #ef6155;
--hljs-type: #f99b15;
--hljs-title: #fec418;
--hljs-symbol: #48b685;
--hljs-selector-tag: #815ba4;
}
.colibri {
@ -338,5 +399,13 @@
--searchresults-border-color: #5c5c68;
--searchresults-li-bg: #242430;
--search-mark-bg: #a2cff5;
--hljs-background: #TODO;
--hljs-color: #TODO;
--hljs-quote: #TODO;
--hljs-variable: #TODO;
--hljs-type: #TODO;
--hljs-title: #TODO;
--hljs-symbol: #TODO;
--hljs-selector-tag: #TODO;
*/
}

@ -7,12 +7,12 @@ code.hljs {
padding:3px 5px
}
.hljs {
background:#2f1e2e;
color:#a39e9b
background: var(--hljs-background);
color: var(--hljs-color);
}
.hljs-comment,
.hljs-quote {
color:#8d8687
color: var(--hljs-quote)
}
.hljs-link,
.hljs-meta,
@ -23,7 +23,7 @@ code.hljs {
.hljs-tag,
.hljs-template-variable,
.hljs-variable {
color:#ef6155
color: var(--hljs-variable)
}
.hljs-built_in,
.hljs-deletion,
@ -31,22 +31,22 @@ code.hljs {
.hljs-number,
.hljs-params,
.hljs-type {
color:#f99b15
color: var(--hljs-type)
}
.hljs-attribute,
.hljs-section,
.hljs-title {
color:#fec418
color: var(--hljs-title)
}
.hljs-addition,
.hljs-bullet,
.hljs-string,
.hljs-symbol {
color:#48b685
color: var(--hljs-symbol)
}
.hljs-keyword,
.hljs-selector-tag {
color:#815ba4
color: var(--hljs-selector-tag)
}
.hljs-emphasis {
font-style:italic

@ -0,0 +1,87 @@
<?xml version="1.0" encoding="UTF-8"?>
<component type="desktop-application">
<id>com.helix_editor.Helix</id>
<metadata_license>CC0-1.0</metadata_license>
<project_license>MPL-2.0</project_license>
<name>Helix</name>
<summary>A post-modern text editor</summary>
<description>
<p>
Helix is a terminal-based text editor inspired by Kakoune / Neovim and written in Rust.
</p>
<ul>
<li>Vim-like modal editing</li>
<li>Multiple selections</li>
<li>Built-in language server support</li>
<li>Smart, incremental syntax highlighting and code editing via tree-sitter</li>
</ul>
</description>
<launchable type="desktop-id">Helix.desktop</launchable>
<screenshots>
<screenshot type="default">
<caption>Helix with default theme</caption>

</screenshot>
</screenshots>
<url type="homepage">https://helix-editor.com/</url>
<url type="donation">https://opencollective.com/helix-editor</url>
<url type="help">https://docs.helix-editor.com/</url>
<url type="vcs-browser">https://github.com/helix-editor/helix</url>
<url type="bugtracker">https://github.com/helix-editor/helix/issues</url>
<content_rating type="oars-1.1" />
<releases>
<release version="22.12" date="2022-12-6">
<url>https://helix-editor.com/news/release-22-12-highlights/</url>
</release>
<release version="22.08" date="2022-8-31">
<url>https://helix-editor.com/news/release-22-08-highlights/</url>
</release>
<release version="22.05" date="2022-5-28">
<url>https://helix-editor.com/news/release-22-05-highlights/</url>
</release>
<release version="22.03" date="2022-3-28">
<url>https://helix-editor.com/news/release-22-03-highlights/</url>
</release>
</releases>
<requires>
<control>keyboard</control>
</requires>
<categories>
<category>Utility</category>
<category>TextEditor</category>
</categories>
<keywords>
<keyword>text</keyword>
<keyword>editor</keyword>
<keyword>development</keyword>
<keyword>programming</keyword>
</keywords>
<provides>
<binary>hx</binary>
<mediatype>text/english</mediatype>
<mediatype>text/plain</mediatype>
<mediatype>text/x-makefile</mediatype>
<mediatype>text/x-c++hdr</mediatype>
<mediatype>text/x-c++src</mediatype>
<mediatype>text/x-chdr</mediatype>
<mediatype>text/x-csrc</mediatype>
<mediatype>text/x-java</mediatype>
<mediatype>text/x-moc</mediatype>
<mediatype>text/x-pascal</mediatype>
<mediatype>text/x-tcl</mediatype>
<mediatype>text/x-tex</mediatype>
<mediatype>application/x-shellscript</mediatype>
<mediatype>text/x-c</mediatype>
<mediatype>text/x-c++</mediatype>
</provides>
</component>

@ -8,7 +8,10 @@ Some suggestions to get started:
- Help with packaging on various distributions needed!
- To use print debugging to the [Helix log file][log-file], you must:
* Print using `log::info!`, `warn!`, or `error!`. (`log::info!("helix!")`)
* Pass the appropriate verbosity level option for the desired log level. (`hx -v <file>` for info, more `v`s for higher severity inclusive)
* Pass the appropriate verbosity level option for the desired log level. (`hx -v <file>` for info, more `v`s for higher verbosity)
* Want to display the logs in a separate file instead of using the `:log-open` command in your compiled Helix editor? Start your debug version with `cargo run -- --log foo.log` and in a new terminal use `tail -f foo.log`
- Instead of running a release version of Helix, while developing you may want to run in debug mode with `cargo run` which is way faster to compile
- Looking for even faster compile times? Give a try to [mold](https://github.com/rui314/mold)
- If your preferred language is missing, integrating a tree-sitter grammar for
it and defining syntax highlight queries for it is straight forward and
doesn't require much knowledge of the internals.
@ -30,7 +33,13 @@ inside the project. We use [xtask][xtask] as an ad-hoc task runner and
thus do not require any dependencies other than `cargo` (You don't have
to `cargo install` anything either).
# Integration tests
# Testing
## Unit tests/Documentation tests
Run `cargo test --workspace` to run unit tests and documentation tests in all packages.
## Integration tests
Integration tests for helix-term can be run with `cargo integration-test`. Code
contributors are strongly encouraged to write integration tests for their code.

@ -5,6 +5,7 @@ Helix releases are versioned in the Calendar Versioning scheme:
we'll use `<tag>` as a placeholder for the tag being published.
* Merge the changelog PR
* Add new `<release>` entry in `contrib/Helix.appdata.xml` with release information according to the [AppStream spec](https://www.freedesktop.org/software/appstream/docs/sect-Metadata-Releases.html)
* Tag and push
* `git tag -s -m "<tag>" -a <tag> && git push`
* Make sure to switch to master and pull first

@ -16,22 +16,6 @@
"type": "github"
}
},
"devshell": {
"flake": false,
"locked": {
"lastModified": 1667210711,
"narHash": "sha256-IoErjXZAkzYWHEpQqwu/DeRNJGFdR7X2OGbkhMqMrpw=",
"owner": "numtide",
"repo": "devshell",
"rev": "96a9dd12b8a447840cc246e17a47b81a4268bba7",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "devshell",
"type": "github"
}
},
"dream2nix": {
"inputs": {
"alejandra": [
@ -42,10 +26,12 @@
],
"crane": "crane",
"devshell": [
"nci"
],
"flake-parts": [
"nci",
"devshell"
"parts"
],
"flake-parts": "flake-parts",
"flake-utils-pre-commit": [
"nci"
],
@ -70,14 +56,17 @@
],
"pre-commit-hooks": [
"nci"
],
"pruned-racket-catalog": [
"nci"
]
},
"locked": {
"lastModified": 1671323629,
"narHash": "sha256-9KHTPjIDjfnzZ4NjpE3gGIVHVHopy6weRDYO/7Y3hF8=",
"lastModified": 1677289985,
"narHash": "sha256-lUp06cTTlWubeBGMZqPl9jODM99LpWMcwxRiscFAUJg=",
"owner": "nix-community",
"repo": "dream2nix",
"rev": "2d7d68505c8619410df2c6b6463985f97cbcba6e",
"rev": "28b973a8d4c30cc1cbb3377ea2023a76bc3fb889",
"type": "github"
},
"original": {
@ -86,24 +75,6 @@
"type": "github"
}
},
"flake-parts": {
"inputs": {
"nixpkgs-lib": "nixpkgs-lib"
},
"locked": {
"lastModified": 1668450977,
"narHash": "sha256-cfLhMhnvXn6x1vPm+Jow3RiFAUSCw/l1utktCw5rVA4=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "d591857e9d7dd9ddbfba0ea02b43b927c3c0f1fa",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"flake-utils": {
"locked": {
"lastModified": 1659877975,
@ -119,23 +90,40 @@
"type": "github"
}
},
"mk-naked-shell": {
"flake": false,
"locked": {
"lastModified": 1676572903,
"narHash": "sha256-oQoDHHUTxNVSURfkFcYLuAK+btjs30T4rbEUtCUyKy8=",
"owner": "yusdacra",
"repo": "mk-naked-shell",
"rev": "aeca9f8aa592f5e8f71f407d081cb26fd30c5a57",
"type": "github"
},
"original": {
"owner": "yusdacra",
"repo": "mk-naked-shell",
"type": "github"
}
},
"nci": {
"inputs": {
"devshell": "devshell",
"dream2nix": "dream2nix",
"mk-naked-shell": "mk-naked-shell",
"nixpkgs": [
"nixpkgs"
],
"parts": "parts",
"rust-overlay": [
"rust-overlay"
]
},
"locked": {
"lastModified": 1671430291,
"narHash": "sha256-UIc7H8F3N8rK72J/Vj5YJdV72tvDvYjH+UPsOFvlcsE=",
"lastModified": 1677297103,
"narHash": "sha256-ArlJIbp9NGV9yvhZdV0SOUFfRlI/kHeKoCk30NbSiLc=",
"owner": "yusdacra",
"repo": "nix-cargo-integration",
"rev": "b1b0d38b8c3b0d0e6a38638d5bbe10b0bc67522c",
"rev": "a79272a2cb0942392bb3a5bf9a3ec6bc568795b2",
"type": "github"
},
"original": {
@ -146,11 +134,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1671359686,
"narHash": "sha256-3MpC6yZo+Xn9cPordGz2/ii6IJpP2n8LE8e/ebUXLrs=",
"lastModified": 1677063315,
"narHash": "sha256-qiB4ajTeAOVnVSAwCNEEkoybrAlA+cpeiBxLobHndE8=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "04f574a1c0fde90b51bf68198e2297ca4e7cccf4",
"rev": "988cc958c57ce4350ec248d2d53087777f9e1949",
"type": "github"
},
"original": {
@ -163,11 +151,11 @@
"nixpkgs-lib": {
"locked": {
"dir": "lib",
"lastModified": 1665349835,
"narHash": "sha256-UK4urM3iN80UXQ7EaOappDzcisYIuEURFRoGQ/yPkug=",
"lastModified": 1675183161,
"narHash": "sha256-Zq8sNgAxDckpn7tJo7V1afRSk2eoVbu3OjI1QklGLNg=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "34c5293a71ffdb2fe054eb5288adc1882c1eb0b1",
"rev": "e1e1b192c1a5aab2960bf0a0bd53a2e8124fa18e",
"type": "github"
},
"original": {
@ -178,10 +166,50 @@
"type": "github"
}
},
"parts": {
"inputs": {
"nixpkgs-lib": [
"nci",
"nixpkgs"
]
},
"locked": {
"lastModified": 1675933616,
"narHash": "sha256-/rczJkJHtx16IFxMmAWu5nNYcSXNg1YYXTHoGjLrLUA=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "47478a4a003e745402acf63be7f9a092d51b83d7",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"parts_2": {
"inputs": {
"nixpkgs-lib": "nixpkgs-lib"
},
"locked": {
"lastModified": 1675933616,
"narHash": "sha256-/rczJkJHtx16IFxMmAWu5nNYcSXNg1YYXTHoGjLrLUA=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "47478a4a003e745402acf63be7f9a092d51b83d7",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"root": {
"inputs": {
"nci": "nci",
"nixpkgs": "nixpkgs",
"parts": "parts_2",
"rust-overlay": "rust-overlay"
}
},
@ -193,11 +221,11 @@
]
},
"locked": {
"lastModified": 1671416426,
"narHash": "sha256-kpSH1Jrxfk2qd0pRPJn1eQdIOseGv5JuE+YaOrqU9s4=",
"lastModified": 1677292251,
"narHash": "sha256-D+6q5Z2MQn3UFJtqsM5/AvVHi3NXKZTIMZt1JGq/spA=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "fbaaff24f375ac25ec64268b0a0d63f91e474b7d",
"rev": "34cdbf6ad480ce13a6a526f57d8b9e609f3d65dc",
"type": "github"
},
"original": {

@ -12,16 +12,10 @@
inputs.nixpkgs.follows = "nixpkgs";
inputs.rust-overlay.follows = "rust-overlay";
};
parts.url = "github:hercules-ci/flake-parts";
};
outputs = {
self,
nixpkgs,
nci,
...
}: let
lib = nixpkgs.lib;
ncl = nci.lib.nci-lib;
outputs = inp: let
mkRootPath = rel:
builtins.path {
path = "${toString ./.}/${rel}";
@ -32,6 +26,12 @@
".envrc"
".ignore"
".github"
".gitignore"
"logo.svg"
"logo_dark.svg"
"logo_light.svg"
"rust-toolchain.toml"
"rustfmt.toml"
"runtime"
"screenshot.png"
"book"
@ -46,6 +46,7 @@
"flake.lock"
];
ignorePaths = path: type: let
inherit (inp.nixpkgs) lib;
# split the nix store path into its components
components = lib.splitString "/" path;
# drop off the `/nix/hash-source` section from the path
@ -61,118 +62,107 @@
# filter out unnecessary paths
filter = ignorePaths;
};
outputs = nci.lib.makeOutputs {
root = ./.;
config = common: {
outputs = {
# rename helix-term to helix since it's our main package
rename = {"helix-term" = "helix";};
# Set default app to hx (binary is from helix-term release build)
# Set default package to helix-term release build
defaults = {
app = "hx";
package = "helix";
};
};
cCompiler.package = with common.pkgs;
if stdenv.isLinux
then gcc
else clang;
shell = {
packages = with common.pkgs;
[lld_13 cargo-flamegraph rust-analyzer]
++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) cargo-tarpaulin)
++ (lib.optional stdenv.isLinux lldb)
++ (lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.CoreFoundation);
env = [
{
name = "HELIX_RUNTIME";
eval = "$PWD/runtime";
}
{
name = "RUST_BACKTRACE";
value = "1";
}
in
inp.parts.lib.mkFlake {inputs = inp;} {
imports = [inp.nci.flakeModule];
systems = [
"x86_64-linux"
"x86_64-darwin"
"aarch64-linux"
"aarch64-darwin"
"i686-linux"
];
perSystem = {
config,
pkgs,
lib,
...
}: let
makeOverridableHelix = old: config: let
grammars = pkgs.callPackage ./grammars.nix config;
runtimeDir = pkgs.runCommand "helix-runtime" {} ''
mkdir -p $out
ln -s ${mkRootPath "runtime"}/* $out
rm -r $out/grammars
ln -s ${grammars} $out/grammars
'';
helix-wrapped =
pkgs.runCommand
old.name
{
name = "RUSTFLAGS";
eval =
if common.pkgs.stdenv.isLinux
then "$RUSTFLAGS\" -C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment\""
else "$RUSTFLAGS";
inherit (old) pname version;
meta = old.meta or {};
passthru =
(old.passthru or {})
// {
unwrapped = old;
};
nativeBuildInputs = [pkgs.makeWrapper];
makeWrapperArgs = config.makeWrapperArgs or [];
}
];
};
};
pkgConfig = common: {
helix-term = {
# Wrap helix with runtime
wrapper = _: old: let
inherit (common) pkgs;
makeOverridableHelix = old: config: let
grammars = pkgs.callPackage ./grammars.nix config;
runtimeDir = pkgs.runCommand "helix-runtime" {} ''
mkdir -p $out
ln -s ${mkRootPath "runtime"}/* $out
rm -r $out/grammars
ln -s ${grammars} $out/grammars
'';
helix-wrapped =
common.internal.pkgsSet.utils.wrapDerivation old
{
nativeBuildInputs = [pkgs.makeWrapper];
makeWrapperArgs = config.makeWrapperArgs or [];
}
''
rm -rf $out/bin
mkdir -p $out/bin
ln -sf ${old}/bin/* $out/bin/
wrapProgram "$out/bin/hx" ''${makeWrapperArgs[@]} --set HELIX_RUNTIME "${runtimeDir}"
'';
in
helix-wrapped
// {override = makeOverridableHelix old;};
in
makeOverridableHelix old {};
overrides.fix-build.overrideAttrs = prev: {
src = filteredSource;
# disable fetching and building of tree-sitter grammars in the helix-term build.rs
HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1";
buildInputs = ncl.addBuildInputs prev [common.config.cCompiler.package.cc.lib];
# link languages and theme toml files since helix-term expects them (for tests)
preConfigure = ''
${prev.preConfigure or ""}
${
lib.concatMapStringsSep
"\n"
(path: "ln -sf ${mkRootPath path} ..")
["languages.toml" "theme.toml" "base16_theme.toml"]
}
''
cp -rs --no-preserve=mode,ownership ${old} $out
wrapProgram "$out/bin/hx" ''${makeWrapperArgs[@]} --set HELIX_RUNTIME "${runtimeDir}"
'';
checkPhase = ":";
meta.mainProgram = "hx";
in
helix-wrapped
// {
override = makeOverridableHelix old;
passthru =
helix-wrapped.passthru
// {
wrapper = old: makeOverridableHelix old config;
};
};
stdenv =
if pkgs.stdenv.isLinux
then pkgs.stdenv
else pkgs.clangStdenv;
rustFlagsEnv =
if stdenv.isLinux
then ''$RUSTFLAGS -C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment''
else "$RUSTFLAGS";
in {
# by default NCI adds rust-analyzer component, but helix toolchain doesn't have rust-analyzer
nci.toolchains.shell.components = ["rust-src" "rustfmt" "clippy"];
nci.projects."helix-project".relPath = "";
nci.crates."helix-term" = {
overrides = {
add-meta.override = _: {meta.mainProgram = "hx";};
add-inputs.overrideAttrs = prev: {
buildInputs = (prev.buildInputs or []) ++ [stdenv.cc.cc.lib];
};
disable-grammar-builds = {
# disable fetching and building of tree-sitter grammars in the helix-term build.rs
HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1";
};
disable-tests = {checkPhase = ":";};
set-stdenv.override = _: {inherit stdenv;};
set-filtered-src.override = _: {src = filteredSource;};
};
};
packages.helix-unwrapped = config.nci.outputs."helix-term".packages.release;
packages.helix-unwrapped-dev = config.nci.outputs."helix-term".packages.dev;
packages.helix = makeOverridableHelix config.packages.helix-unwrapped {};
packages.helix-dev = makeOverridableHelix config.packages.helix-unwrapped-dev {};
packages.default = config.packages.helix;
devShells.default = config.nci.outputs."helix-project".devShell.overrideAttrs (old: {
nativeBuildInputs =
(old.nativeBuildInputs or [])
++ (with pkgs; [lld_13 cargo-flamegraph rust-analyzer])
++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) pkgs.cargo-tarpaulin)
++ (lib.optional stdenv.isLinux pkgs.lldb)
++ (lib.optional stdenv.isDarwin pkgs.darwin.apple_sdk.frameworks.CoreFoundation);
shellHook = ''
export HELIX_RUNTIME="$PWD/runtime"
export RUST_BACKTRACE="1"
export RUSTFLAGS="${rustFlagsEnv}"
'';
});
};
};
in
outputs
// {
packages =
lib.mapAttrs
(
system: packages:
packages
// {
helix-unwrapped = packages.helix.passthru.unwrapped;
helix-unwrapped-dev = packages.helix-dev.passthru.unwrapped;
}
)
outputs.packages;
};
nixConfig = {
extra-substituters = ["https://helix.cachix.org"];

@ -17,7 +17,7 @@ integration = []
[dependencies]
helix-loader = { version = "0.6", path = "../helix-loader" }
ropey = { version = "1.5.1", default-features = false, features = ["simd"] }
ropey = { version = "1.6.0", default-features = false, features = ["simd"] }
smallvec = "1.10"
smartstring = "1.0.1"
unicode-segmentation = "1.10"
@ -29,14 +29,14 @@ tree-sitter = "0.20"
once_cell = "1.17"
arc-swap = "1"
regex = "1"
bitflags = "1.3"
ahash = "0.8.2"
hashbrown = { version = "0.13.1", features = ["raw"] }
bitflags = "2.0"
ahash = "0.8.3"
hashbrown = { version = "0.13.2", features = ["raw"] }
log = "0.4"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
toml = "0.5"
toml = "0.7"
imara-diff = "0.1.0"
@ -49,3 +49,4 @@ textwrap = "0.16.0"
[dev-dependencies]
quickcheck = { version = "1", default-features = false }
indoc = "2.0.1"

@ -45,7 +45,7 @@ fn find_line_comment(
// determine margin of 0 or 1 for uncommenting; if any comment token is not followed by a space,
// a margin of 0 is used for all lines.
if matches!(line_slice.get_char(pos + token_len), Some(c) if c != ' ') {
if !matches!(line_slice.get_char(pos + token_len), Some(c) if c == ' ') {
margin = 0;
}
@ -68,7 +68,7 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st
let mut min_next_line = 0;
for selection in selection {
let (start, end) = selection.line_range(text);
let start = start.max(min_next_line).min(text.len_lines());
let start = start.clamp(min_next_line, text.len_lines());
let end = (end + 1).min(text.len_lines());
lines.extend(start..end);
@ -108,8 +108,8 @@ mod test {
let text = doc.slice(..);
let res = find_line_comment("//", text, 0..3);
// (commented = true, to_change = [line 0, line 2], min = col 2, margin = 1)
assert_eq!(res, (false, vec![0, 2], 2, 1));
// (commented = true, to_change = [line 0, line 2], min = col 2, margin = 0)
assert_eq!(res, (false, vec![0, 2], 2, 0));
// comment
let transaction = toggle_line_comments(&doc, &selection, None);
@ -136,6 +136,17 @@ mod test {
assert_eq!(doc, " 1\n\n 2\n 3");
assert!(selection.len() == 1); // to ignore the selection unused warning
// 0 margin comments, with no space
doc = Rope::from("//");
// reset the selection.
selection = Selection::single(0, doc.len_chars() - 1);
let transaction = toggle_line_comments(&doc, &selection, None);
transaction.apply(&mut doc);
selection = selection.map(transaction.changes());
assert_eq!(doc, "");
assert!(selection.len() == 1); // to ignore the selection unused warning
// TODO: account for uncommenting with uneven comment indentation
}
}

@ -35,7 +35,7 @@ pub enum DiagnosticTag {
Deprecated,
}
/// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.91.0/lsp_types/struct.Diagnostic.html)
/// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.94.0/lsp_types/struct.Diagnostic.html)
#[derive(Debug, Clone)]
pub struct Diagnostic {
pub range: Range,

@ -0,0 +1,384 @@
//! The `DocumentFormatter` forms the bridge between the raw document text
//! and onscreen positioning. It yields the text graphemes as an iterator
//! and traverses (part) of the document text. During that traversal it
//! handles grapheme detection, softwrapping and annotations.
//! It yields `FormattedGrapheme`s and their corresponding visual coordinates.
//!
//! As both virtual text and softwrapping can insert additional lines into the document
//! it is generally not possible to find the start of the previous visual line.
//! Instead the `DocumentFormatter` starts at the last "checkpoint" (usually a linebreak)
//! called a "block" and the caller must advance it as needed.
use std::borrow::Cow;
use std::fmt::Debug;
use std::mem::{replace, take};
#[cfg(test)]
mod test;
use unicode_segmentation::{Graphemes, UnicodeSegmentation};
use crate::graphemes::{Grapheme, GraphemeStr};
use crate::syntax::Highlight;
use crate::text_annotations::TextAnnotations;
use crate::{Position, RopeGraphemes, RopeSlice};
/// TODO make Highlight a u32 to reduce the size of this enum to a single word.
#[derive(Debug, Clone, Copy)]
pub enum GraphemeSource {
Document {
codepoints: u32,
},
/// Inline virtual text can not be highlighted with a `Highlight` iterator
/// because it's not part of the document. Instead the `Highlight`
/// is emitted right by the document formatter
VirtualText {
highlight: Option<Highlight>,
},
}
#[derive(Debug, Clone)]
pub struct FormattedGrapheme<'a> {
pub grapheme: Grapheme<'a>,
pub source: GraphemeSource,
}
impl<'a> FormattedGrapheme<'a> {
pub fn new(
g: GraphemeStr<'a>,
visual_x: usize,
tab_width: u16,
source: GraphemeSource,
) -> FormattedGrapheme<'a> {
FormattedGrapheme {
grapheme: Grapheme::new(g, visual_x, tab_width),
source,
}
}
/// Returns whether this grapheme is virtual inline text
pub fn is_virtual(&self) -> bool {
matches!(self.source, GraphemeSource::VirtualText { .. })
}
pub fn placeholder() -> Self {
FormattedGrapheme {
grapheme: Grapheme::Other { g: " ".into() },
source: GraphemeSource::Document { codepoints: 0 },
}
}
pub fn doc_chars(&self) -> usize {
match self.source {
GraphemeSource::Document { codepoints } => codepoints as usize,
GraphemeSource::VirtualText { .. } => 0,
}
}
pub fn is_whitespace(&self) -> bool {
self.grapheme.is_whitespace()
}
pub fn width(&self) -> usize {
self.grapheme.width()
}
pub fn is_word_boundary(&self) -> bool {
self.grapheme.is_word_boundary()
}
}
#[derive(Debug, Clone)]
pub struct TextFormat {
pub soft_wrap: bool,
pub tab_width: u16,
pub max_wrap: u16,
pub max_indent_retain: u16,
pub wrap_indicator: Box<str>,
pub wrap_indicator_highlight: Option<Highlight>,
pub viewport_width: u16,
}
// test implementation is basically only used for testing or when softwrap is always disabled
impl Default for TextFormat {
fn default() -> Self {
TextFormat {
soft_wrap: false,
tab_width: 4,
max_wrap: 3,
max_indent_retain: 4,
wrap_indicator: Box::from(" "),
viewport_width: 17,
wrap_indicator_highlight: None,
}
}
}
#[derive(Debug)]
pub struct DocumentFormatter<'t> {
text_fmt: &'t TextFormat,
annotations: &'t TextAnnotations,
/// The visual position at the end of the last yielded word boundary
visual_pos: Position,
graphemes: RopeGraphemes<'t>,
/// The character pos of the `graphemes` iter used for inserting annotations
char_pos: usize,
/// The line pos of the `graphemes` iter used for inserting annotations
line_pos: usize,
exhausted: bool,
/// Line breaks to be reserved for virtual text
/// at the next line break
virtual_lines: usize,
inline_anntoation_graphemes: Option<(Graphemes<'t>, Option<Highlight>)>,
// softwrap specific
/// The indentation of the current line
/// Is set to `None` if the indentation level is not yet known
/// because no non-whitespace graphemes have been encountered yet
indent_level: Option<usize>,
/// In case a long word needs to be split a single grapheme might need to be wrapped
/// while the rest of the word stays on the same line
peeked_grapheme: Option<(FormattedGrapheme<'t>, usize)>,
/// A first-in first-out (fifo) buffer for the Graphemes of any given word
word_buf: Vec<FormattedGrapheme<'t>>,
/// The index of the next grapheme that will be yielded from the `word_buf`
word_i: usize,
}
impl<'t> DocumentFormatter<'t> {
/// Creates a new formatter at the last block before `char_idx`.
/// A block is a chunk which always ends with a linebreak.
/// This is usually just a normal line break.
/// However very long lines are always wrapped at constant intervals that can be cheaply calculated
/// to avoid pathological behaviour.
pub fn new_at_prev_checkpoint(
text: RopeSlice<'t>,
text_fmt: &'t TextFormat,
annotations: &'t TextAnnotations,
char_idx: usize,
) -> (Self, usize) {
// TODO divide long lines into blocks to avoid bad performance for long lines
let block_line_idx = text.char_to_line(char_idx.min(text.len_chars()));
let block_char_idx = text.line_to_char(block_line_idx);
annotations.reset_pos(block_char_idx);
(
DocumentFormatter {
text_fmt,
annotations,
visual_pos: Position { row: 0, col: 0 },
graphemes: RopeGraphemes::new(text.slice(block_char_idx..)),
char_pos: block_char_idx,
exhausted: false,
virtual_lines: 0,
indent_level: None,
peeked_grapheme: None,
word_buf: Vec::with_capacity(64),
word_i: 0,
line_pos: block_line_idx,
inline_anntoation_graphemes: None,
},
block_char_idx,
)
}
fn next_inline_annotation_grapheme(&mut self) -> Option<(&'t str, Option<Highlight>)> {
loop {
if let Some(&mut (ref mut annotation, highlight)) =
self.inline_anntoation_graphemes.as_mut()
{
if let Some(grapheme) = annotation.next() {
return Some((grapheme, highlight));
}
}
if let Some((annotation, highlight)) =
self.annotations.next_inline_annotation_at(self.char_pos)
{
self.inline_anntoation_graphemes = Some((
UnicodeSegmentation::graphemes(&*annotation.text, true),
highlight,
))
} else {
return None;
}
}
}
fn advance_grapheme(&mut self, col: usize) -> Option<FormattedGrapheme<'t>> {
let (grapheme, source) =
if let Some((grapheme, highlight)) = self.next_inline_annotation_grapheme() {
(grapheme.into(), GraphemeSource::VirtualText { highlight })
} else if let Some(grapheme) = self.graphemes.next() {
self.virtual_lines += self.annotations.annotation_lines_at(self.char_pos);
let codepoints = grapheme.len_chars() as u32;
let overlay = self.annotations.overlay_at(self.char_pos);
let grapheme = match overlay {
Some((overlay, _)) => overlay.grapheme.as_str().into(),
None => Cow::from(grapheme).into(),
};
self.char_pos += codepoints as usize;
(grapheme, GraphemeSource::Document { codepoints })
} else {
if self.exhausted {
return None;
}
self.exhausted = true;
// EOF grapheme is required for rendering
// and correct position computations
return Some(FormattedGrapheme {
grapheme: Grapheme::Other { g: " ".into() },
source: GraphemeSource::Document { codepoints: 0 },
});
};
let grapheme = FormattedGrapheme::new(grapheme, col, self.text_fmt.tab_width, source);
Some(grapheme)
}
/// Move a word to the next visual line
fn wrap_word(&mut self, virtual_lines_before_word: usize) -> usize {
// softwrap this word to the next line
let indent_carry_over = if let Some(indent) = self.indent_level {
if indent as u16 <= self.text_fmt.max_indent_retain {
indent as u16
} else {
0
}
} else {
// ensure the indent stays 0
self.indent_level = Some(0);
0
};
self.visual_pos.col = indent_carry_over as usize;
self.virtual_lines -= virtual_lines_before_word;
self.visual_pos.row += 1 + virtual_lines_before_word;
let mut i = 0;
let mut word_width = 0;
let wrap_indicator = UnicodeSegmentation::graphemes(&*self.text_fmt.wrap_indicator, true)
.map(|g| {
i += 1;
let grapheme = FormattedGrapheme::new(
g.into(),
self.visual_pos.col + word_width,
self.text_fmt.tab_width,
GraphemeSource::VirtualText {
highlight: self.text_fmt.wrap_indicator_highlight,
},
);
word_width += grapheme.width();
grapheme
});
self.word_buf.splice(0..0, wrap_indicator);
for grapheme in &mut self.word_buf[i..] {
let visual_x = self.visual_pos.col + word_width;
grapheme
.grapheme
.change_position(visual_x, self.text_fmt.tab_width);
word_width += grapheme.width();
}
word_width
}
fn advance_to_next_word(&mut self) {
self.word_buf.clear();
let mut word_width = 0;
let virtual_lines_before_word = self.virtual_lines;
let mut virtual_lines_before_grapheme = self.virtual_lines;
loop {
// softwrap word if necessary
if word_width + self.visual_pos.col >= self.text_fmt.viewport_width as usize {
// wrapping this word would move too much text to the next line
// split the word at the line end instead
if word_width > self.text_fmt.max_wrap as usize {
// Usually we stop accomulating graphemes as soon as softwrapping becomes necessary.
// However if the last grapheme is multiple columns wide it might extend beyond the EOL.
// The condition below ensures that this grapheme is not cutoff and instead wrapped to the next line
if word_width + self.visual_pos.col > self.text_fmt.viewport_width as usize {
self.peeked_grapheme = self.word_buf.pop().map(|grapheme| {
(grapheme, self.virtual_lines - virtual_lines_before_grapheme)
});
self.virtual_lines = virtual_lines_before_grapheme;
}
return;
}
word_width = self.wrap_word(virtual_lines_before_word);
}
virtual_lines_before_grapheme = self.virtual_lines;
let grapheme = if let Some((grapheme, virtual_lines)) = self.peeked_grapheme.take() {
self.virtual_lines += virtual_lines;
grapheme
} else if let Some(grapheme) = self.advance_grapheme(self.visual_pos.col + word_width) {
grapheme
} else {
return;
};
// Track indentation
if !grapheme.is_whitespace() && self.indent_level.is_none() {
self.indent_level = Some(self.visual_pos.col);
} else if grapheme.grapheme == Grapheme::Newline {
self.indent_level = None;
}
let is_word_boundary = grapheme.is_word_boundary();
word_width += grapheme.width();
self.word_buf.push(grapheme);
if is_word_boundary {
return;
}
}
}
/// returns the document line pos of the **next** grapheme that will be yielded
pub fn line_pos(&self) -> usize {
self.line_pos
}
/// returns the visual pos of the **next** grapheme that will be yielded
pub fn visual_pos(&self) -> Position {
self.visual_pos
}
}
impl<'t> Iterator for DocumentFormatter<'t> {
type Item = (FormattedGrapheme<'t>, Position);
fn next(&mut self) -> Option<Self::Item> {
let grapheme = if self.text_fmt.soft_wrap {
if self.word_i >= self.word_buf.len() {
self.advance_to_next_word();
self.word_i = 0;
}
let grapheme = replace(
self.word_buf.get_mut(self.word_i)?,
FormattedGrapheme::placeholder(),
);
self.word_i += 1;
grapheme
} else {
self.advance_grapheme(self.visual_pos.col)?
};
let pos = self.visual_pos;
if grapheme.grapheme == Grapheme::Newline {
self.visual_pos.row += 1;
self.visual_pos.row += take(&mut self.virtual_lines);
self.visual_pos.col = 0;
self.line_pos += 1;
} else {
self.visual_pos.col += grapheme.width();
}
Some((grapheme, pos))
}
}

@ -0,0 +1,182 @@
use std::rc::Rc;
use crate::doc_formatter::{DocumentFormatter, TextFormat};
use crate::text_annotations::{InlineAnnotation, Overlay, TextAnnotations};
impl TextFormat {
fn new_test(softwrap: bool) -> Self {
TextFormat {
soft_wrap: softwrap,
tab_width: 2,
max_wrap: 3,
max_indent_retain: 4,
wrap_indicator: ".".into(),
wrap_indicator_highlight: None,
// use a prime number to allow lining up too often with repeat
viewport_width: 17,
}
}
}
impl<'t> DocumentFormatter<'t> {
fn collect_to_str(&mut self) -> String {
use std::fmt::Write;
let mut res = String::new();
let viewport_width = self.text_fmt.viewport_width;
let mut line = 0;
for (grapheme, pos) in self {
if pos.row != line {
line += 1;
assert_eq!(pos.row, line);
write!(res, "\n{}", ".".repeat(pos.col)).unwrap();
assert!(
pos.col <= viewport_width as usize,
"softwrapped failed {}<={viewport_width}",
pos.col
);
}
write!(res, "{}", grapheme.grapheme).unwrap();
}
res
}
}
fn softwrap_text(text: &str) -> String {
DocumentFormatter::new_at_prev_checkpoint(
text.into(),
&TextFormat::new_test(true),
&TextAnnotations::default(),
0,
)
.0
.collect_to_str()
}
#[test]
fn basic_softwrap() {
assert_eq!(
softwrap_text(&"foo ".repeat(10)),
"foo foo foo foo \n.foo foo foo foo \n.foo foo "
);
assert_eq!(
softwrap_text(&"fooo ".repeat(10)),
"fooo fooo fooo \n.fooo fooo fooo \n.fooo fooo fooo \n.fooo "
);
// check that we don't wrap unnecessarily
assert_eq!(softwrap_text("\t\txxxx1xxxx2xx\n"), " xxxx1xxxx2xx \n ");
}
#[test]
fn softwrap_indentation() {
assert_eq!(
softwrap_text("\t\tfoo1 foo2 foo3 foo4 foo5 foo6\n"),
" foo1 foo2 \n.....foo3 foo4 \n.....foo5 foo6 \n "
);
assert_eq!(
softwrap_text("\t\t\tfoo1 foo2 foo3 foo4 foo5 foo6\n"),
" foo1 foo2 \n.foo3 foo4 foo5 \n.foo6 \n "
);
}
#[test]
fn long_word_softwrap() {
assert_eq!(
softwrap_text("\t\txxxx1xxxx2xxxx3xxxx4xxxx5xxxx6xxxx7xxxx8xxxx9xxx\n"),
" xxxx1xxxx2xxx\n.....x3xxxx4xxxx5\n.....xxxx6xxxx7xx\n.....xx8xxxx9xxx \n "
);
assert_eq!(
softwrap_text("xxxxxxxx1xxxx2xxx\n"),
"xxxxxxxx1xxxx2xxx\n. \n "
);
assert_eq!(
softwrap_text("\t\txxxx1xxxx 2xxxx3xxxx4xxxx5xxxx6xxxx7xxxx8xxxx9xxx\n"),
" xxxx1xxxx \n.....2xxxx3xxxx4x\n.....xxx5xxxx6xxx\n.....x7xxxx8xxxx9\n.....xxx \n "
);
assert_eq!(
softwrap_text("\t\txxxx1xxx 2xxxx3xxxx4xxxx5xxxx6xxxx7xxxx8xxxx9xxx\n"),
" xxxx1xxx 2xxx\n.....x3xxxx4xxxx5\n.....xxxx6xxxx7xx\n.....xx8xxxx9xxx \n "
);
}
fn overlay_text(text: &str, char_pos: usize, softwrap: bool, overlays: &[Overlay]) -> String {
DocumentFormatter::new_at_prev_checkpoint(
text.into(),
&TextFormat::new_test(softwrap),
TextAnnotations::default().add_overlay(overlays.into(), None),
char_pos,
)
.0
.collect_to_str()
}
#[test]
fn overlay() {
assert_eq!(
overlay_text(
"foobar",
0,
false,
&[Overlay::new(0, "X"), Overlay::new(2, "\t")],
),
"Xo bar "
);
assert_eq!(
overlay_text(
&"foo ".repeat(10),
0,
true,
&[
Overlay::new(2, "\t"),
Overlay::new(5, "\t"),
Overlay::new(16, "X"),
]
),
"fo f o foo \n.foo Xoo foo foo \n.foo foo foo "
);
}
fn annotate_text(text: &str, softwrap: bool, annotations: &[InlineAnnotation]) -> String {
DocumentFormatter::new_at_prev_checkpoint(
text.into(),
&TextFormat::new_test(softwrap),
TextAnnotations::default().add_inline_annotations(annotations.into(), None),
0,
)
.0
.collect_to_str()
}
#[test]
fn annotation() {
assert_eq!(
annotate_text("bar", false, &[InlineAnnotation::new(0, "foo")]),
"foobar "
);
assert_eq!(
annotate_text(
&"foo ".repeat(10),
true,
&[InlineAnnotation::new(0, "foo ")]
),
"foo foo foo foo \n.foo foo foo foo \n.foo foo foo "
);
}
#[test]
fn annotation_and_overlay() {
assert_eq!(
DocumentFormatter::new_at_prev_checkpoint(
"bbar".into(),
&TextFormat::new_test(false),
TextAnnotations::default()
.add_inline_annotations(Rc::new([InlineAnnotation::new(0, "fooo")]), None)
.add_overlay(Rc::new([Overlay::new(0, "\t")]), None),
0,
)
.0
.collect_to_str(),
"fooo bar "
);
}

@ -5,7 +5,88 @@ use ropey::{iter::Chunks, str_utils::byte_to_char_idx, RopeSlice};
use unicode_segmentation::{GraphemeCursor, GraphemeIncomplete};
use unicode_width::UnicodeWidthStr;
use std::fmt;
use std::borrow::Cow;
use std::fmt::{self, Debug, Display};
use std::marker::PhantomData;
use std::ops::Deref;
use std::ptr::NonNull;
use std::{slice, str};
use crate::chars::{char_is_whitespace, char_is_word};
use crate::LineEnding;
#[inline]
pub fn tab_width_at(visual_x: usize, tab_width: u16) -> usize {
tab_width as usize - (visual_x % tab_width as usize)
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Grapheme<'a> {
Newline,
Tab { width: usize },
Other { g: GraphemeStr<'a> },
}
impl<'a> Grapheme<'a> {
pub fn new(g: GraphemeStr<'a>, visual_x: usize, tab_width: u16) -> Grapheme<'a> {
match g {
g if g == "\t" => Grapheme::Tab {
width: tab_width_at(visual_x, tab_width),
},
_ if LineEnding::from_str(&g).is_some() => Grapheme::Newline,
_ => Grapheme::Other { g },
}
}
pub fn change_position(&mut self, visual_x: usize, tab_width: u16) {
if let Grapheme::Tab { width } = self {
*width = tab_width_at(visual_x, tab_width)
}
}
/// Returns the a visual width of this grapheme,
#[inline]
pub fn width(&self) -> usize {
match *self {
// width is not cached because we are dealing with
// ASCII almost all the time which already has a fastpath
// it's okay to convert to u16 here because no codepoint has a width larger
// than 2 and graphemes are usually atmost two visible codepoints wide
Grapheme::Other { ref g } => grapheme_width(g),
Grapheme::Tab { width } => width,
Grapheme::Newline => 1,
}
}
pub fn is_whitespace(&self) -> bool {
!matches!(&self, Grapheme::Other { g } if !g.chars().all(char_is_whitespace))
}
// TODO currently word boundaries are used for softwrapping.
// This works best for programming languages and well for prose.
// This could however be improved in the future by considering unicode
// character classes but
pub fn is_word_boundary(&self) -> bool {
!matches!(&self, Grapheme::Other { g,.. } if g.chars().all(char_is_word))
}
}
impl Display for Grapheme<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Grapheme::Newline => write!(f, " "),
Grapheme::Tab { width } => {
for _ in 0..width {
write!(f, " ")?;
}
Ok(())
}
Grapheme::Other { ref g } => {
write!(f, "{g}")
}
}
}
}
#[must_use]
pub fn grapheme_width(g: &str) -> usize {
@ -27,6 +108,8 @@ pub fn grapheme_width(g: &str) -> usize {
// We use max(1) here because all grapeheme clusters--even illformed
// ones--should have at least some width so they can be edited
// properly.
// TODO properly handle unicode width for all codepoints
// example of where unicode width is currently wrong: 🤦🏼‍♂️ (taken from https://hsivonen.fi/string-length/)
UnicodeWidthStr::width(g).max(1)
}
}
@ -341,3 +424,101 @@ impl<'a> Iterator for RopeGraphemes<'a> {
}
}
}
/// A highly compressed Cow<'a, str> that holds
/// atmost u31::MAX bytes and is readonly
pub struct GraphemeStr<'a> {
ptr: NonNull<u8>,
len: u32,
phantom: PhantomData<&'a str>,
}
impl GraphemeStr<'_> {
const MASK_OWNED: u32 = 1 << 31;
fn compute_len(&self) -> usize {
(self.len & !Self::MASK_OWNED) as usize
}
}
impl Deref for GraphemeStr<'_> {
type Target = str;
fn deref(&self) -> &Self::Target {
unsafe {
let bytes = slice::from_raw_parts(self.ptr.as_ptr(), self.compute_len());
str::from_utf8_unchecked(bytes)
}
}
}
impl Drop for GraphemeStr<'_> {
fn drop(&mut self) {
if self.len & Self::MASK_OWNED != 0 {
// free allocation
unsafe {
drop(Box::from_raw(slice::from_raw_parts_mut(
self.ptr.as_ptr(),
self.compute_len(),
)));
}
}
}
}
impl<'a> From<&'a str> for GraphemeStr<'a> {
fn from(g: &'a str) -> Self {
GraphemeStr {
ptr: unsafe { NonNull::new_unchecked(g.as_bytes().as_ptr() as *mut u8) },
len: i32::try_from(g.len()).unwrap() as u32,
phantom: PhantomData,
}
}
}
impl<'a> From<String> for GraphemeStr<'a> {
fn from(g: String) -> Self {
let len = g.len();
let ptr = Box::into_raw(g.into_bytes().into_boxed_slice()) as *mut u8;
GraphemeStr {
ptr: unsafe { NonNull::new_unchecked(ptr) },
len: i32::try_from(len).unwrap() as u32,
phantom: PhantomData,
}
}
}
impl<'a> From<Cow<'a, str>> for GraphemeStr<'a> {
fn from(g: Cow<'a, str>) -> Self {
match g {
Cow::Borrowed(g) => g.into(),
Cow::Owned(g) => g.into(),
}
}
}
impl<T: Deref<Target = str>> PartialEq<T> for GraphemeStr<'_> {
fn eq(&self, other: &T) -> bool {
self.deref() == other.deref()
}
}
impl PartialEq<str> for GraphemeStr<'_> {
fn eq(&self, other: &str) -> bool {
self.deref() == other
}
}
impl Eq for GraphemeStr<'_> {}
impl Debug for GraphemeStr<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Debug::fmt(self.deref(), f)
}
}
impl Display for GraphemeStr<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(self.deref(), f)
}
}
impl Clone for GraphemeStr<'_> {
fn clone(&self) -> Self {
self.deref().to_owned().into()
}
}

@ -1,114 +1,53 @@
use chrono::{Datelike, Duration, NaiveDate, NaiveDateTime, NaiveTime, Timelike};
use chrono::{Duration, NaiveDate, NaiveDateTime, NaiveTime};
use once_cell::sync::Lazy;
use regex::Regex;
use ropey::RopeSlice;
use std::borrow::Cow;
use std::cmp;
use std::fmt::Write;
use super::Increment;
use crate::{Range, Tendril};
/// Increment a Date or DateTime
///
/// If just a Date is selected the day will be incremented.
/// If a DateTime is selected the second will be incremented.
pub fn increment(selected_text: &str, amount: i64) -> Option<String> {
if selected_text.is_empty() {
return None;
}
#[derive(Debug, PartialEq, Eq)]
pub struct DateTimeIncrementor {
date_time: NaiveDateTime,
range: Range,
fmt: &'static str,
field: DateField,
}
FORMATS.iter().find_map(|format| {
let captures = format.regex.captures(selected_text)?;
if captures.len() - 1 != format.fields.len() {
return None;
}
impl DateTimeIncrementor {
pub fn from_range(text: RopeSlice, range: Range) -> Option<DateTimeIncrementor> {
let range = if range.is_empty() {
if range.anchor < text.len_chars() {
// Treat empty range as a cursor range.
range.put_cursor(text, range.anchor + 1, true)
} else {
// The range is empty and at the end of the text.
return None;
let date_time = captures.get(0)?;
let has_date = format.fields.iter().any(|f| f.unit.is_date());
let has_time = format.fields.iter().any(|f| f.unit.is_time());
let date_time = &selected_text[date_time.start()..date_time.end()];
match (has_date, has_time) {
(true, true) => {
let date_time = NaiveDateTime::parse_from_str(date_time, format.fmt).ok()?;
Some(
date_time
.checked_add_signed(Duration::minutes(amount))?
.format(format.fmt)
.to_string(),
)
}
} else {
range
};
FORMATS.iter().find_map(|format| {
let from = range.from().saturating_sub(format.max_len);
let to = (range.from() + format.max_len).min(text.len_chars());
let (from_in_text, to_in_text) = (range.from() - from, range.to() - from);
let text: Cow<str> = text.slice(from..to).into();
let captures = format.regex.captures(&text)?;
if captures.len() - 1 != format.fields.len() {
return None;
(true, false) => {
let date = NaiveDate::parse_from_str(date_time, format.fmt).ok()?;
Some(
date.checked_add_signed(Duration::days(amount))?
.format(format.fmt)
.to_string(),
)
}
let date_time = captures.get(0)?;
let offset = range.from() - from_in_text;
let range = Range::new(date_time.start() + offset, date_time.end() + offset);
let field = captures
.iter()
.skip(1)
.enumerate()
.find_map(|(i, capture)| {
let capture = capture?;
let capture_range = capture.range();
if capture_range.contains(&from_in_text)
&& capture_range.contains(&(to_in_text - 1))
{
Some(format.fields[i])
} else {
None
}
})?;
let has_date = format.fields.iter().any(|f| f.unit.is_date());
let has_time = format.fields.iter().any(|f| f.unit.is_time());
let date_time = &text[date_time.start()..date_time.end()];
let date_time = match (has_date, has_time) {
(true, true) => NaiveDateTime::parse_from_str(date_time, format.fmt).ok()?,
(true, false) => {
let date = NaiveDate::parse_from_str(date_time, format.fmt).ok()?;
date.and_hms_opt(0, 0, 0).unwrap()
}
(false, true) => {
let time = NaiveTime::parse_from_str(date_time, format.fmt).ok()?;
NaiveDate::from_ymd_opt(0, 1, 1).unwrap().and_time(time)
}
(false, false) => return None,
};
Some(DateTimeIncrementor {
date_time,
range,
fmt: format.fmt,
field,
})
})
}
}
impl Increment for DateTimeIncrementor {
fn increment(&self, amount: i64) -> (Range, Tendril) {
let date_time = match self.field.unit {
DateUnit::Years => add_years(self.date_time, amount),
DateUnit::Months => add_months(self.date_time, amount),
DateUnit::Days => add_duration(self.date_time, Duration::days(amount)),
DateUnit::Hours => add_duration(self.date_time, Duration::hours(amount)),
DateUnit::Minutes => add_duration(self.date_time, Duration::minutes(amount)),
DateUnit::Seconds => add_duration(self.date_time, Duration::seconds(amount)),
DateUnit::AmPm => toggle_am_pm(self.date_time),
(false, true) => {
let time = NaiveTime::parse_from_str(date_time, format.fmt).ok()?;
let (adjusted_time, _) = time.overflowing_add_signed(Duration::minutes(amount));
Some(adjusted_time.format(format.fmt).to_string())
}
(false, false) => None,
}
.unwrap_or(self.date_time);
(self.range, date_time.format(self.fmt).to_string().into())
}
})
}
static FORMATS: Lazy<Vec<Format>> = Lazy::new(|| {
@ -144,7 +83,7 @@ impl Format {
fn new(fmt: &'static str) -> Self {
let mut remaining = fmt;
let mut fields = Vec::new();
let mut regex = String::new();
let mut regex = "^".to_string();
let mut max_len = 0;
while let Some(i) = remaining.find('%') {
@ -166,6 +105,7 @@ impl Format {
write!(regex, "({})", field.regex).unwrap();
remaining = &after[spec_len..];
}
regex += "$";
let regex = Regex::new(&regex).unwrap();
@ -305,155 +245,47 @@ impl DateUnit {
}
}
fn ndays_in_month(year: i32, month: u32) -> u32 {
// The first day of the next month...
let (y, m) = if month == 12 {
(year + 1, 1)
} else {
(year, month + 1)
};
let d = NaiveDate::from_ymd_opt(y, m, 1).unwrap();
// ...is preceded by the last day of the original month.
d.pred_opt().unwrap().day()
}
fn add_months(date_time: NaiveDateTime, amount: i64) -> Option<NaiveDateTime> {
let month = (date_time.month0() as i64).checked_add(amount)?;
let year = date_time.year() + i32::try_from(month / 12).ok()?;
let year = if month.is_negative() { year - 1 } else { year };
// Normalize month
let month = month % 12;
let month = if month.is_negative() {
month + 12
} else {
month
} as u32
+ 1;
let day = cmp::min(date_time.day(), ndays_in_month(year, month));
NaiveDate::from_ymd_opt(year, month, day).map(|date| date.and_time(date_time.time()))
}
fn add_years(date_time: NaiveDateTime, amount: i64) -> Option<NaiveDateTime> {
let year = i32::try_from((date_time.year() as i64).checked_add(amount)?).ok()?;
let ndays = ndays_in_month(year, date_time.month());
if date_time.day() > ndays {
NaiveDate::from_ymd_opt(year, date_time.month(), ndays)
.and_then(|date| date.succ_opt().map(|date| date.and_time(date_time.time())))
} else {
date_time.with_year(year)
}
}
fn add_duration(date_time: NaiveDateTime, duration: Duration) -> Option<NaiveDateTime> {
date_time.checked_add_signed(duration)
}
fn toggle_am_pm(date_time: NaiveDateTime) -> Option<NaiveDateTime> {
if date_time.hour() < 12 {
add_duration(date_time, Duration::hours(12))
} else {
add_duration(date_time, Duration::hours(-12))
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::Rope;
#[test]
fn test_increment_date_times() {
let tests = [
// (original, cursor, amount, expected)
("2020-02-28", 0, 1, "2021-02-28"),
("2020-02-29", 0, 1, "2021-03-01"),
("2020-01-31", 5, 1, "2020-02-29"),
("2020-01-20", 5, 1, "2020-02-20"),
("2021-01-01", 5, -1, "2020-12-01"),
("2021-01-31", 5, -2, "2020-11-30"),
("2020-02-28", 8, 1, "2020-02-29"),
("2021-02-28", 8, 1, "2021-03-01"),
("2021-02-28", 0, -1, "2020-02-28"),
("2021-03-01", 0, -1, "2020-03-01"),
("2020-02-29", 5, -1, "2020-01-29"),
("2020-02-20", 5, -1, "2020-01-20"),
("2020-02-29", 8, -1, "2020-02-28"),
("2021-03-01", 8, -1, "2021-02-28"),
("1980/12/21", 8, 100, "1981/03/31"),
("1980/12/21", 8, -100, "1980/09/12"),
("1980/12/21", 8, 1000, "1983/09/17"),
("1980/12/21", 8, -1000, "1978/03/27"),
("2021-11-24 07:12:23", 0, 1, "2022-11-24 07:12:23"),
("2021-11-24 07:12:23", 5, 1, "2021-12-24 07:12:23"),
("2021-11-24 07:12:23", 8, 1, "2021-11-25 07:12:23"),
("2021-11-24 07:12:23", 11, 1, "2021-11-24 08:12:23"),
("2021-11-24 07:12:23", 14, 1, "2021-11-24 07:13:23"),
("2021-11-24 07:12:23", 17, 1, "2021-11-24 07:12:24"),
("2021/11/24 07:12:23", 0, 1, "2022/11/24 07:12:23"),
("2021/11/24 07:12:23", 5, 1, "2021/12/24 07:12:23"),
("2021/11/24 07:12:23", 8, 1, "2021/11/25 07:12:23"),
("2021/11/24 07:12:23", 11, 1, "2021/11/24 08:12:23"),
("2021/11/24 07:12:23", 14, 1, "2021/11/24 07:13:23"),
("2021/11/24 07:12:23", 17, 1, "2021/11/24 07:12:24"),
("2021-11-24 07:12", 0, 1, "2022-11-24 07:12"),
("2021-11-24 07:12", 5, 1, "2021-12-24 07:12"),
("2021-11-24 07:12", 8, 1, "2021-11-25 07:12"),
("2021-11-24 07:12", 11, 1, "2021-11-24 08:12"),
("2021-11-24 07:12", 14, 1, "2021-11-24 07:13"),
("2021/11/24 07:12", 0, 1, "2022/11/24 07:12"),
("2021/11/24 07:12", 5, 1, "2021/12/24 07:12"),
("2021/11/24 07:12", 8, 1, "2021/11/25 07:12"),
("2021/11/24 07:12", 11, 1, "2021/11/24 08:12"),
("2021/11/24 07:12", 14, 1, "2021/11/24 07:13"),
("Wed Nov 24 2021", 0, 1, "Thu Nov 25 2021"),
("Wed Nov 24 2021", 4, 1, "Fri Dec 24 2021"),
("Wed Nov 24 2021", 8, 1, "Thu Nov 25 2021"),
("Wed Nov 24 2021", 11, 1, "Thu Nov 24 2022"),
("24-Nov-2021", 0, 1, "25-Nov-2021"),
("24-Nov-2021", 3, 1, "24-Dec-2021"),
("24-Nov-2021", 7, 1, "24-Nov-2022"),
("2021 Nov 24", 0, 1, "2022 Nov 24"),
("2021 Nov 24", 5, 1, "2021 Dec 24"),
("2021 Nov 24", 9, 1, "2021 Nov 25"),
("Nov 24, 2021", 0, 1, "Dec 24, 2021"),
("Nov 24, 2021", 4, 1, "Nov 25, 2021"),
("Nov 24, 2021", 8, 1, "Nov 24, 2022"),
("7:21:53 am", 0, 1, "8:21:53 am"),
("7:21:53 am", 3, 1, "7:22:53 am"),
("7:21:53 am", 5, 1, "7:21:54 am"),
("7:21:53 am", 8, 1, "7:21:53 pm"),
("7:21:53 AM", 0, 1, "8:21:53 AM"),
("7:21:53 AM", 3, 1, "7:22:53 AM"),
("7:21:53 AM", 5, 1, "7:21:54 AM"),
("7:21:53 AM", 8, 1, "7:21:53 PM"),
("7:21 am", 0, 1, "8:21 am"),
("7:21 am", 3, 1, "7:22 am"),
("7:21 am", 5, 1, "7:21 pm"),
("7:21 AM", 0, 1, "8:21 AM"),
("7:21 AM", 3, 1, "7:22 AM"),
("7:21 AM", 5, 1, "7:21 PM"),
("23:24:23", 1, 1, "00:24:23"),
("23:24:23", 3, 1, "23:25:23"),
("23:24:23", 6, 1, "23:24:24"),
("23:24", 1, 1, "00:24"),
("23:24", 3, 1, "23:25"),
("2020-02-28", 1, "2020-02-29"),
("2020-02-29", 1, "2020-03-01"),
("2020-01-31", 1, "2020-02-01"),
("2020-01-20", 1, "2020-01-21"),
("2021-01-01", -1, "2020-12-31"),
("2021-01-31", -2, "2021-01-29"),
("2020-02-28", 1, "2020-02-29"),
("2021-02-28", 1, "2021-03-01"),
("2021-03-01", -1, "2021-02-28"),
("2020-02-29", -1, "2020-02-28"),
("2020-02-20", -1, "2020-02-19"),
("2021-03-01", -1, "2021-02-28"),
("1980/12/21", 100, "1981/03/31"),
("1980/12/21", -100, "1980/09/12"),
("1980/12/21", 1000, "1983/09/17"),
("1980/12/21", -1000, "1978/03/27"),
("2021-11-24 07:12:23", 1, "2021-11-24 07:13:23"),
("2021-11-24 07:12", 1, "2021-11-24 07:13"),
("Wed Nov 24 2021", 1, "Thu Nov 25 2021"),
("24-Nov-2021", 1, "25-Nov-2021"),
("2021 Nov 24", 1, "2021 Nov 25"),
("Nov 24, 2021", 1, "Nov 25, 2021"),
("7:21:53 am", 1, "7:22:53 am"),
("7:21:53 AM", 1, "7:22:53 AM"),
("7:21 am", 1, "7:22 am"),
("23:24:23", 1, "23:25:23"),
("23:24", 1, "23:25"),
("23:59", 1, "00:00"),
("23:59:59", 1, "00:00:59"),
];
for (original, cursor, amount, expected) in tests {
let rope = Rope::from_str(original);
let range = Range::new(cursor, cursor + 1);
assert_eq!(
DateTimeIncrementor::from_range(rope.slice(..), range)
.unwrap()
.increment(amount)
.1,
Tendril::from(expected)
);
for (original, amount, expected) in tests {
assert_eq!(increment(original, amount).unwrap(), expected);
}
}
@ -482,10 +314,7 @@ mod test {
];
for invalid in tests {
let rope = Rope::from_str(invalid);
let range = Range::new(0, 1);
assert_eq!(DateTimeIncrementor::from_range(rope.slice(..), range), None)
assert_eq!(increment(invalid, 1), None)
}
}
}

@ -0,0 +1,235 @@
const SEPARATOR: char = '_';
/// Increment an integer.
///
/// Supported bases:
/// 2 with prefix 0b
/// 8 with prefix 0o
/// 10 with no prefix
/// 16 with prefix 0x
///
/// An integer can contain `_` as a separator but may not start or end with a separator.
/// Base 10 integers can go negative, but bases 2, 8, and 16 cannot.
/// All addition and subtraction is saturating.
pub fn increment(selected_text: &str, amount: i64) -> Option<String> {
if selected_text.is_empty()
|| selected_text.ends_with(SEPARATOR)
|| selected_text.starts_with(SEPARATOR)
{
return None;
}
let radix = if selected_text.starts_with("0x") {
16
} else if selected_text.starts_with("0o") {
8
} else if selected_text.starts_with("0b") {
2
} else {
10
};
// Get separator indexes from right to left.
let separator_rtl_indexes: Vec<usize> = selected_text
.chars()
.rev()
.enumerate()
.filter_map(|(i, c)| if c == SEPARATOR { Some(i) } else { None })
.collect();
let word: String = selected_text.chars().filter(|&c| c != SEPARATOR).collect();
let mut new_text = if radix == 10 {
let number = &word;
let value = i128::from_str_radix(number, radix).ok()?;
let new_value = value.saturating_add(amount as i128);
let format_length = match (value.is_negative(), new_value.is_negative()) {
(true, false) => number.len() - 1,
(false, true) => number.len() + 1,
_ => number.len(),
} - separator_rtl_indexes.len();
if number.starts_with('0') || number.starts_with("-0") {
format!("{:01$}", new_value, format_length)
} else {
format!("{}", new_value)
}
} else {
let number = &word[2..];
let value = u128::from_str_radix(number, radix).ok()?;
let new_value = (value as i128).saturating_add(amount as i128);
let new_value = if new_value < 0 { 0 } else { new_value };
let format_length = selected_text.len() - 2 - separator_rtl_indexes.len();
match radix {
2 => format!("0b{:01$b}", new_value, format_length),
8 => format!("0o{:01$o}", new_value, format_length),
16 => {
let (lower_count, upper_count): (usize, usize) =
number.chars().fold((0, 0), |(lower, upper), c| {
(
lower + c.is_ascii_lowercase() as usize,
upper + c.is_ascii_uppercase() as usize,
)
});
if upper_count > lower_count {
format!("0x{:01$X}", new_value, format_length)
} else {
format!("0x{:01$x}", new_value, format_length)
}
}
_ => unimplemented!("radix not supported: {}", radix),
}
};
// Add separators from original number.
for &rtl_index in &separator_rtl_indexes {
if rtl_index < new_text.len() {
let new_index = new_text.len().saturating_sub(rtl_index);
if new_index > 0 {
new_text.insert(new_index, SEPARATOR);
}
}
}
// Add in additional separators if necessary.
if new_text.len() > selected_text.len() && !separator_rtl_indexes.is_empty() {
let spacing = match separator_rtl_indexes.as_slice() {
[.., b, a] => a - b - 1,
_ => separator_rtl_indexes[0],
};
let prefix_length = if radix == 10 { 0 } else { 2 };
if let Some(mut index) = new_text.find(SEPARATOR) {
while index - prefix_length > spacing {
index -= spacing;
new_text.insert(index, SEPARATOR);
}
}
}
Some(new_text)
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_increment_basic_decimal_numbers() {
let tests = [
("100", 1, "101"),
("100", -1, "99"),
("99", 1, "100"),
("100", 1000, "1100"),
("100", -1000, "-900"),
("-1", 1, "0"),
("-1", 2, "1"),
("1", -1, "0"),
("1", -2, "-1"),
];
for (original, amount, expected) in tests {
assert_eq!(increment(original, amount).unwrap(), expected);
}
}
#[test]
fn test_increment_basic_hexadecimal_numbers() {
let tests = [
("0x0100", 1, "0x0101"),
("0x0100", -1, "0x00ff"),
("0x0001", -1, "0x0000"),
("0x0000", -1, "0x0000"),
("0xffffffffffffffff", 1, "0x10000000000000000"),
("0xffffffffffffffff", 2, "0x10000000000000001"),
("0xffffffffffffffff", -1, "0xfffffffffffffffe"),
("0xABCDEF1234567890", 1, "0xABCDEF1234567891"),
("0xabcdef1234567890", 1, "0xabcdef1234567891"),
];
for (original, amount, expected) in tests {
assert_eq!(increment(original, amount).unwrap(), expected);
}
}
#[test]
fn test_increment_basic_octal_numbers() {
let tests = [
("0o0107", 1, "0o0110"),
("0o0110", -1, "0o0107"),
("0o0001", -1, "0o0000"),
("0o7777", 1, "0o10000"),
("0o1000", -1, "0o0777"),
("0o0107", 10, "0o0121"),
("0o0000", -1, "0o0000"),
("0o1777777777777777777777", 1, "0o2000000000000000000000"),
("0o1777777777777777777777", 2, "0o2000000000000000000001"),
("0o1777777777777777777777", -1, "0o1777777777777777777776"),
];
for (original, amount, expected) in tests {
assert_eq!(increment(original, amount).unwrap(), expected);
}
}
#[test]
fn test_increment_basic_binary_numbers() {
let tests = [
("0b00000100", 1, "0b00000101"),
("0b00000100", -1, "0b00000011"),
("0b00000100", 2, "0b00000110"),
("0b00000100", -2, "0b00000010"),
("0b00000001", -1, "0b00000000"),
("0b00111111", 10, "0b01001001"),
("0b11111111", 1, "0b100000000"),
("0b10000000", -1, "0b01111111"),
("0b0000", -1, "0b0000"),
(
"0b1111111111111111111111111111111111111111111111111111111111111111",
1,
"0b10000000000000000000000000000000000000000000000000000000000000000",
),
(
"0b1111111111111111111111111111111111111111111111111111111111111111",
2,
"0b10000000000000000000000000000000000000000000000000000000000000001",
),
(
"0b1111111111111111111111111111111111111111111111111111111111111111",
-1,
"0b1111111111111111111111111111111111111111111111111111111111111110",
),
];
for (original, amount, expected) in tests {
assert_eq!(increment(original, amount).unwrap(), expected);
}
}
#[test]
fn test_increment_with_separators() {
let tests = [
("999_999", 1, "1_000_000"),
("1_000_000", -1, "999_999"),
("-999_999", -1, "-1_000_000"),
("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"),
("0x0000_0000", -1, "0x0000_0000"),
("0x0000_0000_0000", -1, "0x0000_0000_0000"),
("0b01111111_11111111", 1, "0b10000000_00000000"),
("0b11111111_11111111", 1, "0b1_00000000_00000000"),
];
for (original, amount, expected) in tests {
assert_eq!(increment(original, amount).unwrap(), expected);
}
}
#[test]
fn test_leading_and_trailing_separators_arent_a_match() {
assert_eq!(increment("9_", 1), None);
assert_eq!(increment("_9", 1), None);
assert_eq!(increment("_9_", 1), None);
}
}

@ -1,8 +1,10 @@
pub mod date_time;
pub mod number;
mod date_time;
mod integer;
use crate::{Range, Tendril};
pub fn integer(selected_text: &str, amount: i64) -> Option<String> {
integer::increment(selected_text, amount)
}
pub trait Increment {
fn increment(&self, amount: i64) -> (Range, Tendril);
pub fn date_time(selected_text: &str, amount: i64) -> Option<String> {
date_time::increment(selected_text, amount)
}

@ -1,507 +0,0 @@
use std::borrow::Cow;
use ropey::RopeSlice;
use super::Increment;
use crate::{
textobject::{textobject_word, TextObject},
Range, Tendril,
};
#[derive(Debug, PartialEq, Eq)]
pub struct NumberIncrementor<'a> {
value: i64,
radix: u32,
range: Range,
text: RopeSlice<'a>,
}
impl<'a> NumberIncrementor<'a> {
/// Return information about number under rang if there is one.
pub fn from_range(text: RopeSlice, range: Range) -> Option<NumberIncrementor> {
// If the cursor is on the minus sign of a number we want to get the word textobject to the
// right of it.
let range = if range.to() < text.len_chars()
&& range.to() - range.from() <= 1
&& text.char(range.from()) == '-'
{
Range::new(range.from() + 1, range.to() + 1)
} else {
range
};
let range = textobject_word(text, range, TextObject::Inside, 1, false);
// If there is a minus sign to the left of the word object, we want to include it in the range.
let range = if range.from() > 0 && text.char(range.from() - 1) == '-' {
range.extend(range.from() - 1, range.from())
} else {
range
};
let word: String = text
.slice(range.from()..range.to())
.chars()
.filter(|&c| c != '_')
.collect();
let (radix, prefixed) = if word.starts_with("0x") {
(16, true)
} else if word.starts_with("0o") {
(8, true)
} else if word.starts_with("0b") {
(2, true)
} else {
(10, false)
};
let number = if prefixed { &word[2..] } else { &word };
let value = i128::from_str_radix(number, radix).ok()?;
if (value.is_positive() && value.leading_zeros() < 64)
|| (value.is_negative() && value.leading_ones() < 64)
{
return None;
}
let value = value as i64;
Some(NumberIncrementor {
range,
value,
radix,
text,
})
}
}
impl<'a> Increment for NumberIncrementor<'a> {
fn increment(&self, amount: i64) -> (Range, Tendril) {
let old_text: Cow<str> = self.text.slice(self.range.from()..self.range.to()).into();
let old_length = old_text.len();
let new_value = self.value.wrapping_add(amount);
// Get separator indexes from right to left.
let separator_rtl_indexes: Vec<usize> = old_text
.chars()
.rev()
.enumerate()
.filter_map(|(i, c)| if c == '_' { Some(i) } else { None })
.collect();
let format_length = if self.radix == 10 {
match (self.value.is_negative(), new_value.is_negative()) {
(true, false) => old_length - 1,
(false, true) => old_length + 1,
_ => old_text.len(),
}
} else {
old_text.len() - 2
} - separator_rtl_indexes.len();
let mut new_text = match self.radix {
2 => format!("0b{:01$b}", new_value, format_length),
8 => format!("0o{:01$o}", new_value, format_length),
10 if old_text.starts_with('0') || old_text.starts_with("-0") => {
format!("{:01$}", new_value, format_length)
}
10 => format!("{}", new_value),
16 => {
let (lower_count, upper_count): (usize, usize) =
old_text.chars().skip(2).fold((0, 0), |(lower, upper), c| {
(
lower + usize::from(c.is_ascii_lowercase()),
upper + usize::from(c.is_ascii_uppercase()),
)
});
if upper_count > lower_count {
format!("0x{:01$X}", new_value, format_length)
} else {
format!("0x{:01$x}", new_value, format_length)
}
}
_ => unimplemented!("radix not supported: {}", self.radix),
};
// Add separators from original number.
for &rtl_index in &separator_rtl_indexes {
if rtl_index < new_text.len() {
let new_index = new_text.len() - rtl_index;
new_text.insert(new_index, '_');
}
}
// Add in additional separators if necessary.
if new_text.len() > old_length && !separator_rtl_indexes.is_empty() {
let spacing = match separator_rtl_indexes.as_slice() {
[.., b, a] => a - b - 1,
_ => separator_rtl_indexes[0],
};
let prefix_length = if self.radix == 10 { 0 } else { 2 };
if let Some(mut index) = new_text.find('_') {
while index - prefix_length > spacing {
index -= spacing;
new_text.insert(index, '_');
}
}
}
(self.range, new_text.into())
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::Rope;
#[test]
fn test_decimal_at_point() {
let rope = Rope::from_str("Test text 12345 more text.");
let range = Range::point(12);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(10, 15),
value: 12345,
radix: 10,
text: rope.slice(..),
})
);
}
#[test]
fn test_uppercase_hexadecimal_at_point() {
let rope = Rope::from_str("Test text 0x123ABCDEF more text.");
let range = Range::point(12);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(10, 21),
value: 0x123ABCDEF,
radix: 16,
text: rope.slice(..),
})
);
}
#[test]
fn test_lowercase_hexadecimal_at_point() {
let rope = Rope::from_str("Test text 0xfa3b4e more text.");
let range = Range::point(12);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(10, 18),
value: 0xfa3b4e,
radix: 16,
text: rope.slice(..),
})
);
}
#[test]
fn test_octal_at_point() {
let rope = Rope::from_str("Test text 0o1074312 more text.");
let range = Range::point(12);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(10, 19),
value: 0o1074312,
radix: 8,
text: rope.slice(..),
})
);
}
#[test]
fn test_binary_at_point() {
let rope = Rope::from_str("Test text 0b10111010010101 more text.");
let range = Range::point(12);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(10, 26),
value: 0b10111010010101,
radix: 2,
text: rope.slice(..),
})
);
}
#[test]
fn test_negative_decimal_at_point() {
let rope = Rope::from_str("Test text -54321 more text.");
let range = Range::point(12);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(10, 16),
value: -54321,
radix: 10,
text: rope.slice(..),
})
);
}
#[test]
fn test_decimal_with_leading_zeroes_at_point() {
let rope = Rope::from_str("Test text 000045326 more text.");
let range = Range::point(12);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(10, 19),
value: 45326,
radix: 10,
text: rope.slice(..),
})
);
}
#[test]
fn test_negative_decimal_cursor_on_minus_sign() {
let rope = Rope::from_str("Test text -54321 more text.");
let range = Range::point(10);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(10, 16),
value: -54321,
radix: 10,
text: rope.slice(..),
})
);
}
#[test]
fn test_number_under_range_start_of_rope() {
let rope = Rope::from_str("100");
let range = Range::point(0);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(0, 3),
value: 100,
radix: 10,
text: rope.slice(..),
})
);
}
#[test]
fn test_number_under_range_end_of_rope() {
let rope = Rope::from_str("100");
let range = Range::point(2);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(0, 3),
value: 100,
radix: 10,
text: rope.slice(..),
})
);
}
#[test]
fn test_number_surrounded_by_punctuation() {
let rope = Rope::from_str(",100;");
let range = Range::point(1);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range),
Some(NumberIncrementor {
range: Range::new(1, 4),
value: 100,
radix: 10,
text: rope.slice(..),
})
);
}
#[test]
fn test_not_a_number_point() {
let rope = Rope::from_str("Test text 45326 more text.");
let range = Range::point(6);
assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None);
}
#[test]
fn test_number_too_large_at_point() {
let rope = Rope::from_str("Test text 0xFFFFFFFFFFFFFFFFF more text.");
let range = Range::point(12);
assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None);
}
#[test]
fn test_number_cursor_one_right_of_number() {
let rope = Rope::from_str("100 ");
let range = Range::point(3);
assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None);
}
#[test]
fn test_number_cursor_one_left_of_number() {
let rope = Rope::from_str(" 100");
let range = Range::point(0);
assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None);
}
#[test]
fn test_increment_basic_decimal_numbers() {
let tests = [
("100", 1, "101"),
("100", -1, "99"),
("99", 1, "100"),
("100", 1000, "1100"),
("100", -1000, "-900"),
("-1", 1, "0"),
("-1", 2, "1"),
("1", -1, "0"),
("1", -2, "-1"),
];
for (original, amount, expected) in tests {
let rope = Rope::from_str(original);
let range = Range::point(0);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range)
.unwrap()
.increment(amount)
.1,
Tendril::from(expected)
);
}
}
#[test]
fn test_increment_basic_hexadecimal_numbers() {
let tests = [
("0x0100", 1, "0x0101"),
("0x0100", -1, "0x00ff"),
("0x0001", -1, "0x0000"),
("0x0000", -1, "0xffffffffffffffff"),
("0xffffffffffffffff", 1, "0x0000000000000000"),
("0xffffffffffffffff", 2, "0x0000000000000001"),
("0xffffffffffffffff", -1, "0xfffffffffffffffe"),
("0xABCDEF1234567890", 1, "0xABCDEF1234567891"),
("0xabcdef1234567890", 1, "0xabcdef1234567891"),
];
for (original, amount, expected) in tests {
let rope = Rope::from_str(original);
let range = Range::point(0);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range)
.unwrap()
.increment(amount)
.1,
Tendril::from(expected)
);
}
}
#[test]
fn test_increment_basic_octal_numbers() {
let tests = [
("0o0107", 1, "0o0110"),
("0o0110", -1, "0o0107"),
("0o0001", -1, "0o0000"),
("0o7777", 1, "0o10000"),
("0o1000", -1, "0o0777"),
("0o0107", 10, "0o0121"),
("0o0000", -1, "0o1777777777777777777777"),
("0o1777777777777777777777", 1, "0o0000000000000000000000"),
("0o1777777777777777777777", 2, "0o0000000000000000000001"),
("0o1777777777777777777777", -1, "0o1777777777777777777776"),
];
for (original, amount, expected) in tests {
let rope = Rope::from_str(original);
let range = Range::point(0);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range)
.unwrap()
.increment(amount)
.1,
Tendril::from(expected)
);
}
}
#[test]
fn test_increment_basic_binary_numbers() {
let tests = [
("0b00000100", 1, "0b00000101"),
("0b00000100", -1, "0b00000011"),
("0b00000100", 2, "0b00000110"),
("0b00000100", -2, "0b00000010"),
("0b00000001", -1, "0b00000000"),
("0b00111111", 10, "0b01001001"),
("0b11111111", 1, "0b100000000"),
("0b10000000", -1, "0b01111111"),
(
"0b0000",
-1,
"0b1111111111111111111111111111111111111111111111111111111111111111",
),
(
"0b1111111111111111111111111111111111111111111111111111111111111111",
1,
"0b0000000000000000000000000000000000000000000000000000000000000000",
),
(
"0b1111111111111111111111111111111111111111111111111111111111111111",
2,
"0b0000000000000000000000000000000000000000000000000000000000000001",
),
(
"0b1111111111111111111111111111111111111111111111111111111111111111",
-1,
"0b1111111111111111111111111111111111111111111111111111111111111110",
),
];
for (original, amount, expected) in tests {
let rope = Rope::from_str(original);
let range = Range::point(0);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range)
.unwrap()
.increment(amount)
.1,
Tendril::from(expected)
);
}
}
#[test]
fn test_increment_with_separators() {
let tests = [
("999_999", 1, "1_000_000"),
("1_000_000", -1, "999_999"),
("-999_999", -1, "-1_000_000"),
("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"),
("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"),
("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"),
("0x0000_0000", -1, "0xffff_ffff_ffff_ffff"),
("0x0000_0000_0000", -1, "0xffff_ffff_ffff_ffff"),
("0b01111111_11111111", 1, "0b10000000_00000000"),
("0b11111111_11111111", 1, "0b1_00000000_00000000"),
];
for (original, amount, expected) in tests {
let rope = Rope::from_str(original);
let range = Range::point(0);
assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range)
.unwrap()
.increment(amount)
.1,
Tendril::from(expected)
);
}
}
}

@ -4,6 +4,7 @@ use tree_sitter::{Query, QueryCursor, QueryPredicateArg};
use crate::{
chars::{char_is_line_ending, char_is_whitespace},
graphemes::tab_width_at,
syntax::{LanguageConfiguration, RopeProvider, Syntax},
tree_sitter::Node,
Rope, RopeSlice,
@ -56,6 +57,14 @@ impl IndentStyle {
}
}
}
#[inline]
pub fn indent_width(&self, tab_width: usize) -> usize {
match *self {
IndentStyle::Tabs => tab_width,
IndentStyle::Spaces(width) => width as usize,
}
}
}
/// Attempts to detect the indentation style used in a document.
@ -177,17 +186,17 @@ pub fn auto_detect_indent_style(document_text: &Rope) -> Option<IndentStyle> {
/// To determine indentation of a newly inserted line, figure out the indentation at the last col
/// of the previous line.
pub fn indent_level_for_line(line: RopeSlice, tab_width: usize) -> usize {
pub fn indent_level_for_line(line: RopeSlice, tab_width: usize, indent_width: usize) -> usize {
let mut len = 0;
for ch in line.chars() {
match ch {
'\t' => len += tab_width,
'\t' => len += tab_width_at(len, tab_width as u16),
' ' => len += 1,
_ => break,
}
}
len / tab_width
len / indent_width
}
/// Computes for node and all ancestors whether they are the first node on their line.
@ -466,6 +475,7 @@ fn extend_nodes<'a>(
text: RopeSlice,
line: usize,
tab_width: usize,
indent_width: usize,
) {
let mut stop_extend = false;
@ -490,10 +500,12 @@ fn extend_nodes<'a>(
if deepest_preceding.end_position().row == line {
extend_node = true;
} else {
let cursor_indent = indent_level_for_line(text.line(line), tab_width);
let cursor_indent =
indent_level_for_line(text.line(line), tab_width, indent_width);
let node_indent = indent_level_for_line(
text.line(deepest_preceding.start_position().row),
tab_width,
indent_width,
);
if cursor_indent > node_indent {
extend_node = true;
@ -562,6 +574,7 @@ pub fn treesitter_indent_for_pos(
syntax: &Syntax,
indent_style: &IndentStyle,
tab_width: usize,
indent_width: usize,
text: RopeSlice,
line: usize,
pos: usize,
@ -604,7 +617,7 @@ pub fn treesitter_indent_for_pos(
&mut cursor,
text,
query_range,
new_line.then(|| (line, byte_pos)),
new_line.then_some((line, byte_pos)),
);
ts_parser.cursors.push(cursor);
(query_result, deepest_preceding)
@ -622,9 +635,10 @@ pub fn treesitter_indent_for_pos(
text,
line,
tab_width,
indent_width,
);
}
let mut first_in_line = get_first_in_line(node, new_line.then(|| byte_pos));
let mut first_in_line = get_first_in_line(node, new_line.then_some(byte_pos));
let mut result = Indentation::default();
// We always keep track of all the indent changes on one line, in order to only indent once
@ -709,6 +723,7 @@ pub fn indent_for_newline(
line_before_end_pos: usize,
current_line: usize,
) -> String {
let indent_width = indent_style.indent_width(tab_width);
if let (Some(query), Some(syntax)) = (
language_config.and_then(|config| config.indent_query()),
syntax,
@ -718,6 +733,7 @@ pub fn indent_for_newline(
syntax,
indent_style,
tab_width,
indent_width,
text,
line_before,
line_before_end_pos,
@ -726,7 +742,7 @@ pub fn indent_for_newline(
return indent;
};
}
let indent_level = indent_level_for_line(text.line(current_line), tab_width);
let indent_level = indent_level_for_line(text.line(current_line), tab_width, indent_width);
indent_style.as_str().repeat(indent_level)
}
@ -763,12 +779,22 @@ mod test {
#[test]
fn test_indent_level() {
let tab_width = 4;
let indent_width = 4;
let line = Rope::from(" fn new"); // 8 spaces
assert_eq!(indent_level_for_line(line.slice(..), tab_width), 2);
assert_eq!(
indent_level_for_line(line.slice(..), tab_width, indent_width),
2
);
let line = Rope::from("\t\t\tfn new"); // 3 tabs
assert_eq!(indent_level_for_line(line.slice(..), tab_width), 3);
assert_eq!(
indent_level_for_line(line.slice(..), tab_width, indent_width),
3
);
// mixed indentation
let line = Rope::from("\t \tfn new"); // 1 tab, 4 spaces, tab
assert_eq!(indent_level_for_line(line.slice(..), tab_width), 3);
assert_eq!(
indent_level_for_line(line.slice(..), tab_width, indent_width),
3
);
}
}

@ -6,6 +6,7 @@ pub mod comment;
pub mod config;
pub mod diagnostic;
pub mod diff;
pub mod doc_formatter;
pub mod graphemes;
pub mod history;
pub mod increment;
@ -24,6 +25,7 @@ pub mod shellwords;
pub mod surround;
pub mod syntax;
pub mod test;
pub mod text_annotations;
pub mod textobject;
mod transaction;
pub mod wrap;
@ -95,8 +97,12 @@ pub use {regex, tree_sitter};
pub use graphemes::RopeGraphemes;
pub use position::{
coords_at_pos, pos_at_coords, pos_at_visual_coords, visual_coords_at_pos, Position,
char_idx_at_visual_offset, coords_at_pos, pos_at_coords, visual_offset_from_anchor,
visual_offset_from_block, Position,
};
#[allow(deprecated)]
pub use position::{pos_at_visual_coords, visual_coords_at_pos};
pub use selection::{Range, Selection};
pub use smallvec::{smallvec, SmallVec};
pub use syntax::Syntax;

@ -203,6 +203,13 @@ pub fn line_end_char_index(slice: &RopeSlice, line: usize) -> usize {
.unwrap_or(0)
}
pub fn line_end_byte_index(slice: &RopeSlice, line: usize) -> usize {
slice.line_to_byte(line + 1)
- get_line_ending(&slice.line(line))
.map(|le| le.as_str().len())
.unwrap_or(0)
}
/// Fetches line `line_idx` from the passed rope slice, sans any line ending.
pub fn line_without_line_ending<'a>(slice: &'a RopeSlice, line_idx: usize) -> RopeSlice<'a> {
let start = slice.line_to_char(line_idx);

@ -4,16 +4,19 @@ use ropey::iter::Chars;
use tree_sitter::{Node, QueryCursor};
use crate::{
char_idx_at_visual_offset,
chars::{categorize_char, char_is_line_ending, CharCategory},
doc_formatter::TextFormat,
graphemes::{
next_grapheme_boundary, nth_next_grapheme_boundary, nth_prev_grapheme_boundary,
prev_grapheme_boundary,
},
line_ending::rope_is_line_ending,
pos_at_visual_coords,
position::char_idx_at_visual_block_offset,
syntax::LanguageConfiguration,
text_annotations::TextAnnotations,
textobject::TextObject,
visual_coords_at_pos, Position, Range, RopeSlice,
visual_offset_from_block, Range, RopeSlice,
};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
@ -34,7 +37,8 @@ pub fn move_horizontally(
dir: Direction,
count: usize,
behaviour: Movement,
_: usize,
_: &TextFormat,
_: &mut TextAnnotations,
) -> Range {
let pos = range.cursor(slice);
@ -48,35 +52,116 @@ pub fn move_horizontally(
range.put_cursor(slice, new_pos, behaviour == Movement::Extend)
}
pub fn move_vertically_visual(
slice: RopeSlice,
range: Range,
dir: Direction,
count: usize,
behaviour: Movement,
text_fmt: &TextFormat,
annotations: &mut TextAnnotations,
) -> Range {
if !text_fmt.soft_wrap {
move_vertically(slice, range, dir, count, behaviour, text_fmt, annotations);
}
annotations.clear_line_annotations();
let pos = range.cursor(slice);
// Compute the current position's 2d coordinates.
let (visual_pos, block_off) = visual_offset_from_block(slice, pos, pos, text_fmt, annotations);
let new_col = range
.old_visual_position
.map_or(visual_pos.col as u32, |(_, col)| col);
// Compute the new position.
let mut row_off = match dir {
Direction::Forward => count as isize,
Direction::Backward => -(count as isize),
};
// TODO how to handle inline annotations that span an entire visual line (very unlikely).
// Compute visual offset relative to block start to avoid trasversing the block twice
row_off += visual_pos.row as isize;
let new_pos = char_idx_at_visual_offset(
slice,
block_off,
row_off,
new_col as usize,
text_fmt,
annotations,
)
.0;
// Special-case to avoid moving to the end of the last non-empty line.
if behaviour == Movement::Extend && slice.line(slice.char_to_line(new_pos)).len_chars() == 0 {
return range;
}
let mut new_range = range.put_cursor(slice, new_pos, behaviour == Movement::Extend);
new_range.old_visual_position = Some((0, new_col));
new_range
}
pub fn move_vertically(
slice: RopeSlice,
range: Range,
dir: Direction,
count: usize,
behaviour: Movement,
tab_width: usize,
text_fmt: &TextFormat,
annotations: &mut TextAnnotations,
) -> Range {
annotations.clear_line_annotations();
let pos = range.cursor(slice);
let line_idx = slice.char_to_line(pos);
let line_start = slice.line_to_char(line_idx);
// Compute the current position's 2d coordinates.
let Position { row, col } = visual_coords_at_pos(slice, pos, tab_width);
let horiz = range.horiz.unwrap_or(col as u32);
let visual_pos = visual_offset_from_block(slice, line_start, pos, text_fmt, annotations).0;
let (mut new_row, new_col) = range
.old_visual_position
.map_or((visual_pos.row as u32, visual_pos.col as u32), |pos| pos);
new_row = new_row.max(visual_pos.row as u32);
let line_idx = slice.char_to_line(pos);
// Compute the new position.
let new_row = match dir {
Direction::Forward => (row + count).min(slice.len_lines().saturating_sub(1)),
Direction::Backward => row.saturating_sub(count),
let mut new_line_idx = match dir {
Direction::Forward => line_idx.saturating_add(count),
Direction::Backward => line_idx.saturating_sub(count),
};
let line = if new_line_idx >= slice.len_lines() - 1 {
// there is no line terminator for the last line
// so the logic below is not necessary here
new_line_idx = slice.len_lines() - 1;
slice
} else {
// char_idx_at_visual_block_offset returns a one-past-the-end index
// in case it reaches the end of the slice
// to avoid moving to the nextline in that case the line terminator is removed from the line
let new_line_end = prev_grapheme_boundary(slice, slice.line_to_char(new_line_idx + 1));
slice.slice(..new_line_end)
};
let new_col = col.max(horiz as usize);
let new_pos = pos_at_visual_coords(slice, Position::new(new_row, new_col), tab_width);
let new_line_start = line.line_to_char(new_line_idx);
let (new_pos, _) = char_idx_at_visual_block_offset(
line,
new_line_start,
new_row as usize,
new_col as usize,
text_fmt,
annotations,
);
// Special-case to avoid moving to the end of the last non-empty line.
if behaviour == Movement::Extend && slice.line(new_row).len_chars() == 0 {
if behaviour == Movement::Extend && slice.line(new_line_idx).len_chars() == 0 {
return range;
}
let mut new_range = range.put_cursor(slice, new_pos, behaviour == Movement::Extend);
new_range.horiz = Some(horiz);
new_range.old_visual_position = Some((new_row, new_col));
new_range
}
@ -142,9 +227,15 @@ fn word_move(slice: RopeSlice, range: Range, count: usize, target: WordMotionTar
};
// Do the main work.
(0..count).fold(start_range, |r, _| {
slice.chars_at(r.head).range_to_target(target, r)
})
let mut range = start_range;
for _ in 0..count {
let next_range = slice.chars_at(range.head).range_to_target(target, range);
if range == next_range {
break;
}
range = next_range;
}
range
}
pub fn move_prev_paragraph(
@ -166,6 +257,7 @@ pub fn move_prev_paragraph(
let mut lines = slice.lines_at(line);
lines.reverse();
let mut lines = lines.map(rope_is_line_ending).peekable();
let mut last_line = line;
for _ in 0..count {
while lines.next_if(|&e| e).is_some() {
line -= 1;
@ -173,6 +265,10 @@ pub fn move_prev_paragraph(
while lines.next_if(|&e| !e).is_some() {
line -= 1;
}
if line == last_line {
break;
}
last_line = line;
}
let head = slice.line_to_char(line);
@ -208,6 +304,7 @@ pub fn move_next_paragraph(
line += 1;
}
let mut lines = slice.lines_at(line).map(rope_is_line_ending).peekable();
let mut last_line = line;
for _ in 0..count {
while lines.next_if(|&e| !e).is_some() {
line += 1;
@ -215,6 +312,10 @@ pub fn move_next_paragraph(
while lines.next_if(|&e| e).is_some() {
line += 1;
}
if line == last_line {
break;
}
last_line = line;
}
let head = slice.line_to_char(line);
let anchor = if behavior == Movement::Move {
@ -438,7 +539,14 @@ pub fn goto_treesitter_object(
// head of range should be at beginning
Some(Range::new(start_char, end_char))
};
(0..count).fold(range, |range, _| get_range(range).unwrap_or(range))
let mut last_range = range;
for _ in 0..count {
match get_range(last_range) {
Some(r) if r != last_range => last_range = r,
_ => break,
}
}
last_range
}
#[cfg(test)]
@ -473,7 +581,16 @@ mod test {
assert_eq!(
coords_at_pos(
slice,
move_vertically(slice, range, Direction::Forward, 1, Movement::Move, 4).head
move_vertically_visual(
slice,
range,
Direction::Forward,
1,
Movement::Move,
&TextFormat::default(),
&mut TextAnnotations::default(),
)
.head
),
(1, 3).into()
);
@ -497,7 +614,15 @@ mod test {
];
for ((direction, amount), coordinates) in moves_and_expected_coordinates {
range = move_horizontally(slice, range, direction, amount, Movement::Move, 0);
range = move_horizontally(
slice,
range,
direction,
amount,
Movement::Move,
&TextFormat::default(),
&mut TextAnnotations::default(),
);
assert_eq!(coords_at_pos(slice, range.head), coordinates.into())
}
}
@ -523,7 +648,15 @@ mod test {
];
for ((direction, amount), coordinates) in moves_and_expected_coordinates {
range = move_horizontally(slice, range, direction, amount, Movement::Move, 0);
range = move_horizontally(
slice,
range,
direction,
amount,
Movement::Move,
&TextFormat::default(),
&mut TextAnnotations::default(),
);
assert_eq!(coords_at_pos(slice, range.head), coordinates.into());
assert_eq!(range.head, range.anchor);
}
@ -545,7 +678,15 @@ mod test {
];
for (direction, amount) in moves {
range = move_horizontally(slice, range, direction, amount, Movement::Extend, 0);
range = move_horizontally(
slice,
range,
direction,
amount,
Movement::Extend,
&TextFormat::default(),
&mut TextAnnotations::default(),
);
assert_eq!(range.anchor, original_anchor);
}
}
@ -569,7 +710,15 @@ mod test {
];
for ((direction, amount), coordinates) in moves_and_expected_coordinates {
range = move_vertically(slice, range, direction, amount, Movement::Move, 4);
range = move_vertically_visual(
slice,
range,
direction,
amount,
Movement::Move,
&TextFormat::default(),
&mut TextAnnotations::default(),
);
assert_eq!(coords_at_pos(slice, range.head), coordinates.into());
assert_eq!(range.head, range.anchor);
}
@ -603,8 +752,24 @@ mod test {
for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates {
range = match axis {
Axis::H => move_horizontally(slice, range, direction, amount, Movement::Move, 0),
Axis::V => move_vertically(slice, range, direction, amount, Movement::Move, 4),
Axis::H => move_horizontally(
slice,
range,
direction,
amount,
Movement::Move,
&TextFormat::default(),
&mut TextAnnotations::default(),
),
Axis::V => move_vertically_visual(
slice,
range,
direction,
amount,
Movement::Move,
&TextFormat::default(),
&mut TextAnnotations::default(),
),
};
assert_eq!(coords_at_pos(slice, range.head), coordinates.into());
assert_eq!(range.head, range.anchor);
@ -638,8 +803,24 @@ mod test {
for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates {
range = match axis {
Axis::H => move_horizontally(slice, range, direction, amount, Movement::Move, 0),
Axis::V => move_vertically(slice, range, direction, amount, Movement::Move, 4),
Axis::H => move_horizontally(
slice,
range,
direction,
amount,
Movement::Move,
&TextFormat::default(),
&mut TextAnnotations::default(),
),
Axis::V => move_vertically_visual(
slice,
range,
direction,
amount,
Movement::Move,
&TextFormat::default(),
&mut TextAnnotations::default(),
),
};
assert_eq!(coords_at_pos(slice, range.head), coordinates.into());
assert_eq!(range.head, range.anchor);
@ -1293,7 +1474,7 @@ mod test {
let text = Rope::from(s.as_str());
let selection =
selection.transform(|r| move_prev_paragraph(text.slice(..), r, 1, Movement::Move));
let actual = crate::test::plain(&s, selection);
let actual = crate::test::plain(s.as_ref(), &selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
@ -1316,7 +1497,7 @@ mod test {
let text = Rope::from(s.as_str());
let selection =
selection.transform(|r| move_prev_paragraph(text.slice(..), r, 2, Movement::Move));
let actual = crate::test::plain(&s, selection);
let actual = crate::test::plain(s.as_ref(), &selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
@ -1339,7 +1520,7 @@ mod test {
let text = Rope::from(s.as_str());
let selection = selection
.transform(|r| move_prev_paragraph(text.slice(..), r, 1, Movement::Extend));
let actual = crate::test::plain(&s, selection);
let actual = crate::test::plain(s.as_ref(), &selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
@ -1359,7 +1540,7 @@ mod test {
"a\nb\n\n#[goto\nthird\n\n|]#paragraph",
),
(
"a\nb#[\n|]#\ngoto\nsecond\n\nparagraph",
"a\nb#[\n|]#\n\ngoto\nsecond\n\nparagraph",
"a\nb#[\n\n|]#goto\nsecond\n\nparagraph",
),
(
@ -1381,7 +1562,7 @@ mod test {
let text = Rope::from(s.as_str());
let selection =
selection.transform(|r| move_next_paragraph(text.slice(..), r, 1, Movement::Move));
let actual = crate::test::plain(&s, selection);
let actual = crate::test::plain(s.as_ref(), &selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
@ -1404,7 +1585,7 @@ mod test {
let text = Rope::from(s.as_str());
let selection =
selection.transform(|r| move_next_paragraph(text.slice(..), r, 2, Movement::Move));
let actual = crate::test::plain(&s, selection);
let actual = crate::test::plain(s.as_ref(), &selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
@ -1427,7 +1608,7 @@ mod test {
let text = Rope::from(s.as_str());
let selection = selection
.transform(|r| move_next_paragraph(text.slice(..), r, 1, Movement::Extend));
let actual = crate::test::plain(&s, selection);
let actual = crate::test::plain(s.as_ref(), &selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}

@ -1,9 +1,11 @@
use std::borrow::Cow;
use std::{borrow::Cow, cmp::Ordering};
use crate::{
chars::char_is_line_ending,
doc_formatter::{DocumentFormatter, TextFormat},
graphemes::{ensure_grapheme_boundary_prev, grapheme_width, RopeGraphemes},
line_ending::line_end_char_index,
text_annotations::TextAnnotations,
RopeSlice,
};
@ -73,6 +75,13 @@ pub fn coords_at_pos(text: RopeSlice, pos: usize) -> Position {
/// Takes \t, double-width characters (CJK) into account as well as text
/// not in the document in the future.
/// See [`coords_at_pos`] for an "objective" one.
///
/// This function should be used very rarely. Usually `visual_offset_from_anchor`
/// or `visual_offset_from_block` is preferable. However when you want to compute the
/// actual visual row/column in the text (not what is actually shown on screen)
/// then you should use this function. For example aligning text should ignore virtual
/// text and softwrap.
#[deprecated = "Doesn't account for softwrap or decorations, use visual_offset_from_anchor instead"]
pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Position {
let line = text.char_to_line(pos);
@ -93,6 +102,82 @@ pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Po
Position::new(line, col)
}
/// Returns the visual offset from the start of the first visual line
/// in the block that contains anchor.
/// Text is always wrapped at blocks, they usually correspond to
/// actual line breaks but for very long lines
/// softwrapping positions are estimated with an O(1) algorithm
/// to ensure consistent performance for large lines (currently unimplemented)
///
/// Usualy you want to use `visual_offset_from_anchor` instead but this function
/// can be useful (and faster) if
/// * You already know the visual position of the block
/// * You only care about the horizontal offset (column) and not the vertical offset (row)
pub fn visual_offset_from_block(
text: RopeSlice,
anchor: usize,
pos: usize,
text_fmt: &TextFormat,
annotations: &TextAnnotations,
) -> (Position, usize) {
let mut last_pos = Position::default();
let (formatter, block_start) =
DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, annotations, anchor);
let mut char_pos = block_start;
for (grapheme, vpos) in formatter {
last_pos = vpos;
char_pos += grapheme.doc_chars();
if char_pos > pos {
return (last_pos, block_start);
}
}
(last_pos, block_start)
}
/// Returns the visual offset from the start of the visual line
/// that contains anchor.
pub fn visual_offset_from_anchor(
text: RopeSlice,
anchor: usize,
pos: usize,
text_fmt: &TextFormat,
annotations: &TextAnnotations,
max_rows: usize,
) -> Option<(Position, usize)> {
let (formatter, block_start) =
DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, annotations, anchor);
let mut char_pos = block_start;
let mut anchor_line = None;
let mut last_pos = Position::default();
for (grapheme, vpos) in formatter {
last_pos = vpos;
char_pos += grapheme.doc_chars();
if char_pos > anchor && anchor_line.is_none() {
anchor_line = Some(last_pos.row);
}
if char_pos > pos {
last_pos.row -= anchor_line.unwrap();
return Some((last_pos, block_start));
}
if let Some(anchor_line) = anchor_line {
if vpos.row >= anchor_line + max_rows {
return None;
}
}
}
let anchor_line = anchor_line.unwrap_or(last_pos.row);
last_pos.row -= anchor_line;
Some((last_pos, block_start))
}
/// Convert (line, column) coordinates to a character index.
///
/// If the `line` coordinate is beyond the end of the file, the EOF
@ -140,6 +225,11 @@ pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending
/// If the `column` coordinate is past the end of the given line, the
/// line-end position (in this case, just before the line ending
/// character) will be returned.
/// This function should be used very rarely. Usually `char_idx_at_visual_offset` is preferable.
/// However when you want to compute a char position from the visual row/column in the text
/// (not what is actually shown on screen) then you should use this function.
/// For example aligning text should ignore virtual text and softwrap.
#[deprecated = "Doesn't account for softwrap or decorations, use char_idx_at_visual_offset instead"]
pub fn pos_at_visual_coords(text: RopeSlice, coords: Position, tab_width: usize) -> usize {
let Position { mut row, col } = coords;
row = row.min(text.len_lines() - 1);
@ -169,6 +259,120 @@ pub fn pos_at_visual_coords(text: RopeSlice, coords: Position, tab_width: usize)
line_start + col_char_offset
}
/// Returns the char index on the visual line `row_offset` below the visual line of
/// the provided char index `anchor` that is closest to the supplied visual `column`.
///
/// If the targeted visual line is entirely covered by virtual text the last
/// char position before the virtual text and a virtual offset is returned instead.
///
/// If no (text) grapheme starts at exactly at the specified column the
/// start of the grapheme to the left is returned. If there is no grapheme
/// to the left (for example if the line starts with virtual text) then the positiong
/// of the next grapheme to the right is returned.
///
/// If the `line` coordinate is beyond the end of the file, the EOF
/// position will be returned.
///
/// If the `column` coordinate is past the end of the given line, the
/// line-end position (in this case, just before the line ending
/// character) will be returned.
///
/// # Returns
///
/// `(real_char_idx, virtual_lines)`
///
/// The nearest character idx "closest" (see above) to the specified visual offset
/// on the visual line is returned if the visual line contains any text:
/// If the visual line at the specified offset is a virtual line generated by a `LineAnnotation`
/// the previous char_index is returned, together with the remaining vertical offset (`virtual_lines`)
pub fn char_idx_at_visual_offset<'a>(
text: RopeSlice<'a>,
mut anchor: usize,
mut row_offset: isize,
column: usize,
text_fmt: &TextFormat,
annotations: &TextAnnotations,
) -> (usize, usize) {
// convert row relative to visual line containing anchor to row relative to a block containing anchor (anchor may change)
loop {
let (visual_pos_in_block, block_char_offset) =
visual_offset_from_block(text, anchor, anchor, text_fmt, annotations);
row_offset += visual_pos_in_block.row as isize;
anchor = block_char_offset;
if row_offset >= 0 {
break;
}
if block_char_offset == 0 {
row_offset = 0;
break;
}
// the row_offset is negative so we need to look at the previous block
// set the anchor to the last char before the current block
// this char index is also always a line earlier so increase the row_offset by 1
anchor -= 1;
row_offset += 1;
}
char_idx_at_visual_block_offset(
text,
anchor,
row_offset as usize,
column,
text_fmt,
annotations,
)
}
/// This function behaves the same as `char_idx_at_visual_offset`, except that
/// the vertical offset `row` is always computed relative to the block that contains `anchor`
/// instead of the visual line that contains `anchor`.
/// Usually `char_idx_at_visual_offset` is more useful but this function can be
/// used in some situations as an optimization when `visual_offset_from_block` was used
///
/// # Returns
///
/// `(real_char_idx, virtual_lines)`
///
/// See `char_idx_at_visual_offset` for details
pub fn char_idx_at_visual_block_offset(
text: RopeSlice,
anchor: usize,
row: usize,
column: usize,
text_fmt: &TextFormat,
annotations: &TextAnnotations,
) -> (usize, usize) {
let (formatter, mut char_idx) =
DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, annotations, anchor);
let mut last_char_idx = char_idx;
let mut last_char_idx_on_line = None;
let mut last_row = 0;
for (grapheme, grapheme_pos) in formatter {
match grapheme_pos.row.cmp(&row) {
Ordering::Equal => {
if grapheme_pos.col + grapheme.width() > column {
if !grapheme.is_virtual() {
return (char_idx, 0);
} else if let Some(char_idx) = last_char_idx_on_line {
return (char_idx, 0);
}
} else if !grapheme.is_virtual() {
last_char_idx_on_line = Some(char_idx)
}
}
Ordering::Greater => return (last_char_idx, row - last_row),
_ => (),
}
last_char_idx = char_idx;
last_row = grapheme_pos.row;
char_idx += grapheme.doc_chars();
}
(char_idx, 0)
}
#[cfg(test)]
mod test {
use super::*;
@ -228,6 +432,7 @@ mod test {
}
#[test]
#[allow(deprecated)]
fn test_visual_coords_at_pos() {
let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ");
let slice = text.slice(..);
@ -275,6 +480,130 @@ mod test {
assert_eq!(visual_coords_at_pos(slice, 2, 8), (0, 9).into());
}
#[test]
fn test_visual_off_from_block() {
let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ");
let slice = text.slice(..);
let annot = TextAnnotations::default();
let text_fmt = TextFormat::default();
assert_eq!(
visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0,
(0, 0).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 5, &text_fmt, &annot).0,
(0, 5).into()
); // position on \n
assert_eq!(
visual_offset_from_block(slice, 0, 6, &text_fmt, &annot).0,
(1, 0).into()
); // position on w
assert_eq!(
visual_offset_from_block(slice, 0, 7, &text_fmt, &annot).0,
(1, 1).into()
); // position on o
assert_eq!(
visual_offset_from_block(slice, 0, 10, &text_fmt, &annot).0,
(1, 4).into()
); // position on d
// Test with wide characters.
let text = Rope::from("今日はいい\n");
let slice = text.slice(..);
assert_eq!(
visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0,
(0, 0).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 1, &text_fmt, &annot).0,
(0, 2).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0,
(0, 4).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 3, &text_fmt, &annot).0,
(0, 6).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 4, &text_fmt, &annot).0,
(0, 8).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 5, &text_fmt, &annot).0,
(0, 10).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 6, &text_fmt, &annot).0,
(1, 0).into()
);
// Test with grapheme clusters.
let text = Rope::from("a̐éö̲\r\n");
let slice = text.slice(..);
assert_eq!(
visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0,
(0, 0).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0,
(0, 1).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 4, &text_fmt, &annot).0,
(0, 2).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 7, &text_fmt, &annot).0,
(0, 3).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 9, &text_fmt, &annot).0,
(1, 0).into()
);
// Test with wide-character grapheme clusters.
// TODO: account for cluster.
let text = Rope::from("किमपि\n");
let slice = text.slice(..);
assert_eq!(
visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0,
(0, 0).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0,
(0, 2).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 3, &text_fmt, &annot).0,
(0, 3).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 5, &text_fmt, &annot).0,
(0, 5).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 6, &text_fmt, &annot).0,
(1, 0).into()
);
// Test with tabs.
let text = Rope::from("\tHello\n");
let slice = text.slice(..);
assert_eq!(
visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0,
(0, 0).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 1, &text_fmt, &annot).0,
(0, 4).into()
);
assert_eq!(
visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0,
(0, 5).into()
);
}
#[test]
fn test_pos_at_coords() {
let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ");
@ -341,6 +670,7 @@ mod test {
}
#[test]
#[allow(deprecated)]
fn test_pos_at_visual_coords() {
let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ");
let slice = text.slice(..);
@ -405,4 +735,100 @@ mod test {
assert_eq!(pos_at_visual_coords(slice, (0, 10).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (10, 10).into(), 4), 0);
}
#[test]
fn test_char_idx_at_visual_row_offset() {
let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ\nfoo");
let slice = text.slice(..);
let mut text_fmt = TextFormat::default();
for i in 0isize..3isize {
for j in -2isize..=2isize {
if !(0..3).contains(&(i + j)) {
continue;
}
println!("{i} {j}");
assert_eq!(
char_idx_at_visual_offset(
slice,
slice.line_to_char(i as usize),
j,
3,
&text_fmt,
&TextAnnotations::default(),
)
.0,
slice.line_to_char((i + j) as usize) + 3
);
}
}
text_fmt.soft_wrap = true;
let mut softwrapped_text = "foo ".repeat(10);
softwrapped_text.push('\n');
let last_char = softwrapped_text.len() - 1;
let text = Rope::from(softwrapped_text.repeat(3));
let slice = text.slice(..);
assert_eq!(
char_idx_at_visual_offset(
slice,
last_char,
0,
0,
&text_fmt,
&TextAnnotations::default(),
)
.0,
32
);
assert_eq!(
char_idx_at_visual_offset(
slice,
last_char,
-1,
0,
&text_fmt,
&TextAnnotations::default(),
)
.0,
16
);
assert_eq!(
char_idx_at_visual_offset(
slice,
last_char,
-2,
0,
&text_fmt,
&TextAnnotations::default(),
)
.0,
0
);
assert_eq!(
char_idx_at_visual_offset(
slice,
softwrapped_text.len() + last_char,
-2,
0,
&text_fmt,
&TextAnnotations::default(),
)
.0,
softwrapped_text.len()
);
assert_eq!(
char_idx_at_visual_offset(
slice,
softwrapped_text.len() + last_char,
-5,
0,
&text_fmt,
&TextAnnotations::default(),
)
.0,
0
);
}
}

@ -21,14 +21,14 @@ use std::borrow::Cow;
/// can be in any order, or even share the same position.
///
/// The anchor and head positions use gap indexing, meaning
/// that their indices represent the the gaps *between* `char`s
/// that their indices represent the gaps *between* `char`s
/// rather than the `char`s themselves. For example, 1
/// represents the position between the first and second `char`.
///
/// Below are some example `Range` configurations to better
/// illustrate. The anchor and head indices are show as
/// "(anchor, head)", followed by example text with "[" and "]"
/// inserted to represent the anchor and head positions:
/// Below are some examples of `Range` configurations.
/// The anchor and head indices are shown as "(anchor, head)"
/// tuples, followed by example text with "[" and "]" symbols
/// representing the anchor and head positions:
///
/// - (0, 3): `[Som]e text`.
/// - (3, 0): `]Som[e text`.
@ -53,7 +53,9 @@ pub struct Range {
pub anchor: usize,
/// The head of the range, moved when extending.
pub head: usize,
pub horiz: Option<u32>,
/// The previous visual offset (softwrapped lines and columns) from
/// the start of the line
pub old_visual_position: Option<(u32, u32)>,
}
impl Range {
@ -61,7 +63,7 @@ impl Range {
Self {
anchor,
head,
horiz: None,
old_visual_position: None,
}
}
@ -127,7 +129,7 @@ impl Range {
Self {
anchor: self.head,
head: self.anchor,
horiz: self.horiz,
old_visual_position: self.old_visual_position,
}
}
@ -185,7 +187,7 @@ impl Range {
Self {
anchor,
head,
horiz: None,
old_visual_position: None,
}
}
@ -198,13 +200,13 @@ impl Range {
Self {
anchor: self.anchor.min(from),
head: self.head.max(to),
horiz: None,
old_visual_position: None,
}
} else {
Self {
anchor: self.anchor.max(to),
head: self.head.min(from),
horiz: None,
old_visual_position: None,
}
}
}
@ -219,13 +221,13 @@ impl Range {
Range {
anchor: self.anchor.max(other.anchor),
head: self.head.min(other.head),
horiz: None,
old_visual_position: None,
}
} else {
Range {
anchor: self.from().min(other.from()),
head: self.to().max(other.to()),
horiz: None,
old_visual_position: None,
}
}
}
@ -279,8 +281,8 @@ impl Range {
Range {
anchor: new_anchor,
head: new_head,
horiz: if new_anchor == self.anchor {
self.horiz
old_visual_position: if new_anchor == self.anchor {
self.old_visual_position
} else {
None
},
@ -306,7 +308,7 @@ impl Range {
Range {
anchor: self.anchor,
head: next_grapheme_boundary(slice, self.head),
horiz: self.horiz,
old_visual_position: self.old_visual_position,
}
} else {
*self
@ -378,7 +380,7 @@ impl From<(usize, usize)> for Range {
Self {
anchor,
head,
horiz: None,
old_visual_position: None,
}
}
}
@ -482,7 +484,7 @@ impl Selection {
ranges: smallvec![Range {
anchor,
head,
horiz: None
old_visual_position: None
}],
primary_index: 0,
}
@ -566,9 +568,9 @@ impl Selection {
}
/// Takes a closure and maps each `Range` over the closure.
pub fn transform<F>(mut self, f: F) -> Self
pub fn transform<F>(mut self, mut f: F) -> Self
where
F: Fn(Range) -> Range,
F: FnMut(Range) -> Range,
{
for range in self.ranges.iter_mut() {
*range = f(*range)
@ -576,6 +578,16 @@ impl Selection {
self.normalize()
}
/// Takes a closure and maps each `Range` over the closure to multiple `Range`s.
pub fn transform_iter<F, I>(mut self, f: F) -> Self
where
F: FnMut(Range) -> I,
I: Iterator<Item = Range>,
{
self.ranges = self.ranges.into_iter().flat_map(f).collect();
self.normalize()
}
// Ensures the selection adheres to the following invariants:
// 1. All ranges are grapheme aligned.
// 2. All ranges are at least 1 character wide, unless at the
@ -613,11 +625,6 @@ impl Selection {
// returns true if self ⊇ other
pub fn contains(&self, other: &Selection) -> bool {
// can't contain other if it is larger
if other.len() > self.len() {
return false;
}
let (mut iter_self, mut iter_other) = (self.iter(), other.iter());
let (mut ele_self, mut ele_other) = (iter_self.next(), iter_other.next());
@ -654,6 +661,15 @@ impl<'a> IntoIterator for &'a Selection {
}
}
impl IntoIterator for Selection {
type Item = Range;
type IntoIter = smallvec::IntoIter<[Range; 1]>;
fn into_iter(self) -> smallvec::IntoIter<[Range; 1]> {
self.ranges.into_iter()
}
}
// TODO: checkSelection -> check if valid for doc length && sorted
pub fn keep_or_remove_matches(
@ -1228,5 +1244,11 @@ mod test {
vec!((3, 4), (7, 9))
));
assert!(!contains(vec!((1, 1), (5, 6)), vec!((1, 6))));
// multiple ranges of other are all contained in some ranges of self,
assert!(contains(
vec!((1, 4), (7, 10)),
vec!((1, 2), (3, 4), (7, 9))
));
}
}

@ -129,8 +129,9 @@ impl<'a> From<&'a str> for Shellwords<'a> {
DquoteEscaped => Dquoted,
};
if i >= input.len() - 1 && end == 0 {
end = i + 1;
let c_len = c.len_utf8();
if i == input.len() - c_len && end == 0 {
end = i + c_len;
}
if end > 0 {
@ -333,4 +334,17 @@ mod test {
assert_eq!(Shellwords::from(":o a").parts(), &[":o", "a"]);
assert_eq!(Shellwords::from(":o a\\ ").parts(), &[":o", "a\\"]);
}
#[test]
fn test_multibyte_at_end() {
assert_eq!(Shellwords::from("𒀀").parts(), &["𒀀"]);
assert_eq!(
Shellwords::from(":sh echo 𒀀").parts(),
&[":sh", "echo", "𒀀"]
);
assert_eq!(
Shellwords::from(":sh echo 𒀀 hello world𒀀").parts(),
&[":sh", "echo", "𒀀", "hello", "world𒀀"]
);
}
}

@ -1,6 +1,6 @@
use std::fmt::Display;
use crate::{search, Range, Selection};
use crate::{movement::Direction, search, Range, Selection};
use ropey::RopeSlice;
pub const PAIRS: &[(char, char)] = &[
@ -55,15 +55,18 @@ pub fn get_pair(ch: char) -> (char, char) {
pub fn find_nth_closest_pairs_pos(
text: RopeSlice,
range: Range,
n: usize,
mut skip: usize,
) -> Result<(usize, usize)> {
let is_open_pair = |ch| PAIRS.iter().any(|(open, _)| *open == ch);
let is_close_pair = |ch| PAIRS.iter().any(|(_, close)| *close == ch);
let mut stack = Vec::with_capacity(2);
let pos = range.cursor(text);
let pos = range.from();
let mut close_pos = pos.saturating_sub(1);
for ch in text.chars_at(pos) {
close_pos += 1;
if is_open_pair(ch) {
// Track open pairs encountered so that we can step over
// the corresponding close pairs that will come up further
@ -71,20 +74,46 @@ pub fn find_nth_closest_pairs_pos(
// open pair is before the cursor position.
stack.push(ch);
continue;
} else if is_close_pair(ch) {
let (open, _) = get_pair(ch);
if stack.last() == Some(&open) {
stack.pop();
continue;
} else {
// In the ideal case the stack would be empty here and the
// current character would be the close pair that we are
// looking for. It could also be the case that the pairs
// are unbalanced and we encounter a close pair that doesn't
// close the last seen open pair. In either case use this
// char as the auto-detected closest pair.
return find_nth_pairs_pos(text, ch, range, n);
}
if !is_close_pair(ch) {
// We don't care if this character isn't a brace pair item,
// so short circuit here.
continue;
}
let (open, close) = get_pair(ch);
if stack.last() == Some(&open) {
// If we are encountering the closing pair for an opener
// we just found while traversing, then its inside the
// selection and should be skipped over.
stack.pop();
continue;
}
match find_nth_open_pair(text, open, close, close_pos, 1) {
// Before we accept this pair, we want to ensure that the
// pair encloses the range rather than just the cursor.
Some(open_pos)
if open_pos <= pos.saturating_add(1)
&& close_pos >= range.to().saturating_sub(1) =>
{
// Since we have special conditions for when to
// accept, we can't just pass the skip parameter on
// through to the find_nth_*_pair methods, so we
// track skips manually here.
if skip > 1 {
skip -= 1;
continue;
}
return match range.direction() {
Direction::Forward => Ok((open_pos, close_pos)),
Direction::Backward => Ok((close_pos, open_pos)),
};
}
_ => continue,
}
}
@ -244,141 +273,140 @@ mod test {
use ropey::Rope;
use smallvec::SmallVec;
#[allow(clippy::type_complexity)]
fn check_find_nth_pair_pos(
text: &str,
cases: Vec<(usize, char, usize, Result<(usize, usize)>)>,
) {
let doc = Rope::from(text);
let slice = doc.slice(..);
for (cursor_pos, ch, n, expected_range) in cases {
let range = find_nth_pairs_pos(slice, ch, (cursor_pos, cursor_pos + 1).into(), n);
assert_eq!(
range, expected_range,
"Expected {:?}, got {:?}",
expected_range, range
#[test]
fn test_get_surround_pos() {
#[rustfmt::skip]
let (doc, selection, expectations) =
rope_with_selections_and_expectations(
"(some) (chars)\n(newline)",
"_ ^ _ _ ^ _\n_ ^ _"
);
}
assert_eq!(
get_surround_pos(doc.slice(..), &selection, Some('('), 1).unwrap(),
expectations
);
}
#[test]
fn test_find_nth_pairs_pos() {
check_find_nth_pair_pos(
"some (text) here",
vec![
// cursor on [t]ext
(6, '(', 1, Ok((5, 10))),
(6, ')', 1, Ok((5, 10))),
// cursor on so[m]e
(2, '(', 1, Err(Error::PairNotFound)),
// cursor on bracket itself
(5, '(', 1, Ok((5, 10))),
(10, '(', 1, Ok((5, 10))),
],
fn test_get_surround_pos_bail_different_surround_chars() {
#[rustfmt::skip]
let (doc, selection, _) =
rope_with_selections_and_expectations(
"[some]\n(chars)xx\n(newline)",
" ^ \n ^ \n "
);
assert_eq!(
get_surround_pos(doc.slice(..), &selection, Some('('), 1),
Err(Error::PairNotFound)
);
}
#[test]
fn test_find_nth_pairs_pos_skip() {
check_find_nth_pair_pos(
"(so (many (good) text) here)",
vec![
// cursor on go[o]d
(13, '(', 1, Ok((10, 15))),
(13, '(', 2, Ok((4, 21))),
(13, '(', 3, Ok((0, 27))),
],
fn test_get_surround_pos_bail_overlapping_surround_chars() {
#[rustfmt::skip]
let (doc, selection, _) =
rope_with_selections_and_expectations(
"[some]\n(chars)xx\n(newline)",
" \n ^ \n ^ "
);
assert_eq!(
get_surround_pos(doc.slice(..), &selection, Some('('), 1),
Err(Error::PairNotFound) // overlapping surround chars
);
}
#[test]
fn test_find_nth_pairs_pos_same() {
check_find_nth_pair_pos(
"'so 'many 'good' text' here'",
vec![
// cursor on go[o]d
(13, '\'', 1, Ok((10, 15))),
(13, '\'', 2, Ok((4, 21))),
(13, '\'', 3, Ok((0, 27))),
// cursor on the quotes
(10, '\'', 1, Err(Error::CursorOnAmbiguousPair)),
],
)
fn test_get_surround_pos_bail_cursor_overlap() {
#[rustfmt::skip]
let (doc, selection, _) =
rope_with_selections_and_expectations(
"[some]\n(chars)xx\n(newline)",
" ^^ \n \n "
);
assert_eq!(
get_surround_pos(doc.slice(..), &selection, Some('['), 1),
Err(Error::CursorOverlap)
);
}
#[test]
fn test_find_nth_pairs_pos_step() {
check_find_nth_pair_pos(
"((so)((many) good (text))(here))",
vec![
// cursor on go[o]d
(15, '(', 1, Ok((5, 24))),
(15, '(', 2, Ok((0, 31))),
],
fn test_find_nth_pairs_pos_quote_success() {
#[rustfmt::skip]
let (doc, selection, expectations) =
rope_with_selections_and_expectations(
"some 'quoted text' on this 'line'\n'and this one'",
" _ ^ _ \n "
);
assert_eq!(2, expectations.len());
assert_eq!(
find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 1)
.expect("find should succeed"),
(expectations[0], expectations[1])
)
}
#[test]
fn test_find_nth_pairs_pos_mixed() {
check_find_nth_pair_pos(
"(so [many {good} text] here)",
vec![
// cursor on go[o]d
(13, '{', 1, Ok((10, 15))),
(13, '[', 1, Ok((4, 21))),
(13, '(', 1, Ok((0, 27))),
],
fn test_find_nth_pairs_pos_nested_quote_success() {
#[rustfmt::skip]
let (doc, selection, expectations) =
rope_with_selections_and_expectations(
"some 'nested 'quoted' text' on this 'line'\n'and this one'",
" _ ^ _ \n "
);
assert_eq!(2, expectations.len());
assert_eq!(
find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 2)
.expect("find should succeed"),
(expectations[0], expectations[1])
)
}
#[test]
fn test_get_surround_pos() {
let doc = Rope::from("(some) (chars)\n(newline)");
let slice = doc.slice(..);
let selection = Selection::new(
SmallVec::from_slice(&[Range::point(2), Range::point(9), Range::point(20)]),
0,
);
fn test_find_nth_pairs_pos_inside_quote_ambiguous() {
#[rustfmt::skip]
let (doc, selection, _) =
rope_with_selections_and_expectations(
"some 'nested 'quoted' text' on this 'line'\n'and this one'",
" ^ \n "
);
// cursor on s[o]me, c[h]ars, newl[i]ne
assert_eq!(
get_surround_pos(slice, &selection, Some('('), 1)
.unwrap()
.as_slice(),
&[0, 5, 7, 13, 15, 23]
);
find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 1),
Err(Error::CursorOnAmbiguousPair)
)
}
#[test]
fn test_get_surround_pos_bail() {
let doc = Rope::from("[some]\n(chars)xx\n(newline)");
let slice = doc.slice(..);
// Create a Rope and a matching Selection using a specification language.
// ^ is a single-point selection.
// _ is an expected index. These are returned as a Vec<usize> for use in assertions.
fn rope_with_selections_and_expectations(
text: &str,
spec: &str,
) -> (Rope, Selection, Vec<usize>) {
if text.len() != spec.len() {
panic!("specification must match text length -- are newlines aligned?");
}
let selection =
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(9)]), 0);
// cursor on s[o]me, c[h]ars
assert_eq!(
get_surround_pos(slice, &selection, Some('('), 1),
Err(Error::PairNotFound) // different surround chars
);
let rope = Rope::from(text);
let selection = Selection::new(
SmallVec::from_slice(&[Range::point(14), Range::point(24)]),
0,
);
// cursor on [x]x, newli[n]e
assert_eq!(
get_surround_pos(slice, &selection, Some('('), 1),
Err(Error::PairNotFound) // overlapping surround chars
);
let selections: SmallVec<[Range; 1]> = spec
.match_indices('^')
.into_iter()
.map(|(i, _)| Range::point(i))
.collect();
let selection =
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(3)]), 0);
// cursor on s[o][m]e
assert_eq!(
get_surround_pos(slice, &selection, Some('['), 1),
Err(Error::CursorOverlap)
);
let expectations: Vec<usize> = spec
.match_indices('_')
.into_iter()
.map(|(i, _)| i)
.collect();
(rope, Selection::new(selections, 0), expectations)
}
}

@ -82,7 +82,8 @@ pub struct LanguageConfiguration {
pub shebangs: Vec<String>, // interpreter(s) associated with language
pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml>
pub comment_token: Option<String>,
pub max_line_length: Option<usize>,
pub text_width: Option<usize>,
pub soft_wrap: Option<SoftWrap>,
#[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")]
pub config: Option<serde_json::Value>,
@ -427,7 +428,7 @@ impl TextObjectQuery {
let nodes: Vec<_> = mat
.captures
.iter()
.filter_map(|cap| (cap.index == capture_idx).then(|| cap.node))
.filter_map(|cap| (cap.index == capture_idx).then_some(cap.node))
.collect();
if nodes.len() > 1 {
@ -546,6 +547,33 @@ impl LanguageConfiguration {
.ok()
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
pub struct SoftWrap {
/// Soft wrap lines that exceed viewport width. Default to off
pub enable: Option<bool>,
/// Maximum space left free at the end of the line.
/// This space is used to wrap text at word boundaries. If that is not possible within this limit
/// the word is simply split at the end of the line.
///
/// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views.
///
/// Default to 20
pub max_wrap: Option<u16>,
/// Maximum number of indentation that can be carried over from the previous line when softwrapping.
/// If a line is indented further then this limit it is rendered at the start of the viewport instead.
///
/// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views.
///
/// Default to 40
pub max_indent_retain: Option<u16>,
/// Indicator placed at the beginning of softwrapped lines
///
/// Defaults to ↪
pub wrap_indicator: Option<String>,
/// Softwrap at `text_width` instead of viewport width if it is shorter
pub wrap_at_text_width: Option<bool>,
}
// Expose loader as Lazy<> global since it's always static?
@ -1092,21 +1120,14 @@ impl Syntax {
}],
cursor,
_tree: None,
captures,
captures: RefCell::new(captures),
config: layer.config.as_ref(), // TODO: just reuse `layer`
depth: layer.depth, // TODO: just reuse `layer`
ranges: &layer.ranges, // TODO: temp
})
})
.collect::<Vec<_>>();
// HAXX: arrange layers by byte range, with deeper layers positioned first
layers.sort_by_key(|layer| {
(
layer.ranges.first().cloned(),
std::cmp::Reverse(layer.depth),
)
});
layers.sort_unstable_by_key(|layer| layer.sort_key());
let mut result = HighlightIter {
source,
@ -1136,6 +1157,7 @@ impl Syntax {
bitflags! {
/// Flags that track the status of a layer
/// in the `Sytaxn::update` function
#[derive(Debug)]
struct LayerUpdateFlags : u32{
const MODIFIED = 0b001;
const MOVED = 0b010;
@ -1424,12 +1446,11 @@ impl<'a> TextProvider<'a> for RopeProvider<'a> {
struct HighlightIterLayer<'a> {
_tree: Option<Tree>,
cursor: QueryCursor,
captures: iter::Peekable<QueryCaptures<'a, 'a, RopeProvider<'a>>>,
captures: RefCell<iter::Peekable<QueryCaptures<'a, 'a, RopeProvider<'a>>>>,
config: &'a HighlightConfiguration,
highlight_end_stack: Vec<usize>,
scope_stack: Vec<LocalScope<'a>>,
depth: u32,
ranges: &'a [Range],
}
impl<'a> fmt::Debug for HighlightIterLayer<'a> {
@ -1610,10 +1631,11 @@ impl<'a> HighlightIterLayer<'a> {
// First, sort scope boundaries by their byte offset in the document. At a
// given position, emit scope endings before scope beginnings. Finally, emit
// scope boundaries from deeper layers first.
fn sort_key(&mut self) -> Option<(usize, bool, isize)> {
fn sort_key(&self) -> Option<(usize, bool, isize)> {
let depth = -(self.depth as isize);
let next_start = self
.captures
.borrow_mut()
.peek()
.map(|(m, i)| m.captures[*i].node.start_byte());
let next_end = self.highlight_end_stack.last().cloned();
@ -1838,7 +1860,8 @@ impl<'a> Iterator for HighlightIter<'a> {
// Get the next capture from whichever layer has the earliest highlight boundary.
let range;
let layer = &mut self.layers[0];
if let Some((next_match, capture_index)) = layer.captures.peek() {
let captures = layer.captures.get_mut();
if let Some((next_match, capture_index)) = captures.peek() {
let next_capture = next_match.captures[*capture_index];
range = next_capture.node.byte_range();
@ -1861,7 +1884,7 @@ impl<'a> Iterator for HighlightIter<'a> {
return self.emit_event(self.source.len_bytes(), None);
};
let (mut match_, capture_index) = layer.captures.next().unwrap();
let (mut match_, capture_index) = captures.next().unwrap();
let mut capture = match_.captures[capture_index];
// Remove from the local scope stack any local scopes that have already ended.
@ -1937,11 +1960,11 @@ impl<'a> Iterator for HighlightIter<'a> {
}
// Continue processing any additional matches for the same node.
if let Some((next_match, next_capture_index)) = layer.captures.peek() {
if let Some((next_match, next_capture_index)) = captures.peek() {
let next_capture = next_match.captures[*next_capture_index];
if next_capture.node == capture.node {
capture = next_capture;
match_ = layer.captures.next().unwrap().0;
match_ = captures.next().unwrap().0;
continue;
}
}
@ -1964,11 +1987,11 @@ impl<'a> Iterator for HighlightIter<'a> {
// highlighting patterns that are disabled for local variables.
if definition_highlight.is_some() || reference_highlight.is_some() {
while layer.config.non_local_variable_patterns[match_.pattern_index] {
if let Some((next_match, next_capture_index)) = layer.captures.peek() {
if let Some((next_match, next_capture_index)) = captures.peek() {
let next_capture = next_match.captures[*next_capture_index];
if next_capture.node == capture.node {
capture = next_capture;
match_ = layer.captures.next().unwrap().0;
match_ = captures.next().unwrap().0;
continue;
}
}
@ -1983,10 +2006,10 @@ impl<'a> Iterator for HighlightIter<'a> {
// for a given node are ordered by pattern index, so these subsequent
// captures are guaranteed to be for highlighting, not injections or
// local variables.
while let Some((next_match, next_capture_index)) = layer.captures.peek() {
while let Some((next_match, next_capture_index)) = captures.peek() {
let next_capture = next_match.captures[*next_capture_index];
if next_capture.node == capture.node {
layer.captures.next();
captures.next();
} else {
break;
}

@ -1,7 +1,9 @@
//! Test helpers.
use crate::{Range, Selection};
use ropey::Rope;
use smallvec::SmallVec;
use std::cmp::Reverse;
use unicode_segmentation::UnicodeSegmentation;
/// Convert annotated test string to test string and selection.
///
@ -10,6 +12,10 @@ use std::cmp::Reverse;
/// `#[` for primary selection with head after anchor followed by `|]#`.
/// `#(` for secondary selection with head after anchor followed by `|)#`.
///
/// If the selection contains any LF or CRLF sequences, which are immediately
/// followed by the same grapheme, then the subsequent one is removed. This is
/// to allow representing having the cursor over the end of the line.
///
/// # Examples
///
/// ```
@ -30,23 +36,23 @@ use std::cmp::Reverse;
pub fn print(s: &str) -> (String, Selection) {
let mut primary_idx = None;
let mut ranges = SmallVec::new();
let mut iter = s.chars().peekable();
let mut iter = UnicodeSegmentation::graphemes(s, true).peekable();
let mut left = String::with_capacity(s.len());
'outer: while let Some(c) = iter.next() {
let start = left.chars().count();
if c != '#' {
left.push(c);
if c != "#" {
left.push_str(c);
continue;
}
let (is_primary, close_pair) = match iter.next() {
Some('[') => (true, ']'),
Some('(') => (false, ')'),
Some("[") => (true, "]"),
Some("(") => (false, ")"),
Some(ch) => {
left.push('#');
left.push(ch);
left.push_str(ch);
continue;
}
None => break,
@ -56,24 +62,45 @@ pub fn print(s: &str) -> (String, Selection) {
panic!("primary `#[` already appeared {:?} {:?}", left, s);
}
let head_at_beg = iter.next_if_eq(&'|').is_some();
let head_at_beg = iter.next_if_eq(&"|").is_some();
let last_grapheme = |s: &str| {
UnicodeSegmentation::graphemes(s, true)
.last()
.map(String::from)
};
while let Some(c) = iter.next() {
if !(c == close_pair && iter.peek() == Some(&'#')) {
left.push(c);
let next = iter.peek();
let mut prev = last_grapheme(left.as_str());
if !(c == close_pair && next == Some(&"#")) {
left.push_str(c);
continue;
}
if !head_at_beg {
let prev = left.pop().unwrap();
if prev != '|' {
left.push(prev);
left.push(c);
continue;
match &prev {
Some(p) if p != "|" => {
left.push_str(c);
continue;
}
Some(p) if p == "|" => {
left.pop().unwrap(); // pop the |
prev = last_grapheme(left.as_str());
}
_ => (),
}
}
iter.next(); // skip "#"
let next = iter.peek();
// skip explicit line end inside selection
if (prev == Some(String::from("\r\n")) || prev == Some(String::from("\n")))
&& next.map(|n| String::from(*n)) == prev
{
iter.next();
}
if is_primary {
primary_idx = Some(ranges.len());
@ -118,14 +145,16 @@ pub fn print(s: &str) -> (String, Selection) {
/// use smallvec::smallvec;
///
/// assert_eq!(
/// plain("abc", Selection::new(smallvec![Range::new(0, 1), Range::new(3, 2)], 0)),
/// plain("abc", &Selection::new(smallvec![Range::new(0, 1), Range::new(3, 2)], 0)),
/// "#[a|]#b#(|c)#".to_owned()
/// );
/// ```
pub fn plain(s: &str, selection: Selection) -> String {
pub fn plain<R: Into<Rope>>(s: R, selection: &Selection) -> String {
let s = s.into();
let primary = selection.primary_index();
let mut out = String::with_capacity(s.len() + 5 * selection.len());
out.push_str(s);
let mut out = String::with_capacity(s.len_bytes() + 5 * selection.len());
out.push_str(&s.to_string());
let mut insertion: Vec<_> = selection
.iter()
.enumerate()
@ -138,7 +167,9 @@ pub fn plain(s: &str, selection: Selection) -> String {
(false, false) => [(range.anchor, ")#"), (range.head, "#(|")],
}
})
.map(|(char_idx, marker)| (s.char_to_byte(char_idx), marker))
.collect();
// insert in reverse order
insertion.sort_unstable_by_key(|k| Reverse(k.0));
for (i, s) in insertion {
@ -262,4 +293,94 @@ mod test {
print("hello #[|👨‍👩‍👧‍👦]# goodbye")
);
}
#[test]
fn plain_single() {
assert_eq!("#[|h]#ello", plain("hello", &Selection::single(1, 0)));
assert_eq!("#[h|]#ello", plain("hello", &Selection::single(0, 1)));
assert_eq!("#[|hell]#o", plain("hello", &Selection::single(4, 0)));
assert_eq!("#[hell|]#o", plain("hello", &Selection::single(0, 4)));
assert_eq!("#[|hello]#", plain("hello", &Selection::single(5, 0)));
assert_eq!("#[hello|]#", plain("hello", &Selection::single(0, 5)));
}
#[test]
fn plain_multi() {
assert_eq!(
plain(
"hello",
&Selection::new(
SmallVec::from_slice(&[Range::new(1, 0), Range::new(5, 4)]),
0
)
),
String::from("#[|h]#ell#(|o)#")
);
assert_eq!(
plain(
"hello",
&Selection::new(
SmallVec::from_slice(&[Range::new(0, 1), Range::new(4, 5)]),
0
)
),
String::from("#[h|]#ell#(o|)#")
);
assert_eq!(
plain(
"hello",
&Selection::new(
SmallVec::from_slice(&[Range::new(2, 0), Range::new(5, 3)]),
0
)
),
String::from("#[|he]#l#(|lo)#")
);
assert_eq!(
plain(
"hello\r\nhello\r\nhello\r\n",
&Selection::new(
SmallVec::from_slice(&[
Range::new(7, 5),
Range::new(21, 19),
Range::new(14, 12)
]),
0
)
),
String::from("hello#[|\r\n]#hello#(|\r\n)#hello#(|\r\n)#")
);
}
#[test]
fn plain_multi_byte_code_point() {
assert_eq!(
plain("„“", &Selection::single(1, 0)),
String::from("#[|„]#“")
);
assert_eq!(
plain("„“", &Selection::single(2, 1)),
String::from("„#[|“]#")
);
assert_eq!(
plain("„“", &Selection::single(0, 1)),
String::from("#[„|]#“")
);
assert_eq!(
plain("„“", &Selection::single(1, 2)),
String::from("„#[“|]#")
);
assert_eq!(
plain("they said „hello“", &Selection::single(11, 10)),
String::from("they said #[|„]#hello“")
);
}
#[test]
fn plain_multi_code_point_grapheme() {
assert_eq!(
plain("hello 👨‍👩‍👧‍👦 goodbye", &Selection::single(13, 6)),
String::from("hello #[|👨‍👩‍👧‍👦]# goodbye")
);
}
}

@ -0,0 +1,274 @@
use std::cell::Cell;
use std::convert::identity;
use std::ops::Range;
use std::rc::Rc;
use crate::syntax::Highlight;
use crate::Tendril;
/// An inline annotation is continuous text shown
/// on the screen before the grapheme that starts at
/// `char_idx`
#[derive(Debug, Clone)]
pub struct InlineAnnotation {
pub text: Tendril,
pub char_idx: usize,
}
impl InlineAnnotation {
pub fn new(char_idx: usize, text: impl Into<Tendril>) -> Self {
Self {
char_idx,
text: text.into(),
}
}
}
/// Represents a **single Grapheme** that is part of the document
/// that start at `char_idx` that will be replaced with
/// a different `grapheme`.
/// If `grapheme` contains multiple graphemes the text
/// will render incorrectly.
/// If you want to overlay multiple graphemes simply
/// use multiple `Overlays`.
///
/// # Examples
///
/// The following examples are valid overlays for the following text:
///
/// `aX͎̊͢͜͝͡bc`
///
/// ```
/// use helix_core::text_annotations::Overlay;
///
/// // replaces a
/// Overlay::new(0, "X");
///
/// // replaces X͎̊͢͜͝͡
/// Overlay::new(1, "\t");
///
/// // replaces b
/// Overlay::new(6, "X̢̢̟͖̲͌̋̇͑͝");
/// ```
///
/// The following examples are invalid uses
///
/// ```
/// use helix_core::text_annotations::Overlay;
///
/// // overlay is not aligned at grapheme boundary
/// Overlay::new(3, "x");
///
/// // overlay contains multiple graphemes
/// Overlay::new(0, "xy");
/// ```
#[derive(Debug, Clone)]
pub struct Overlay {
pub char_idx: usize,
pub grapheme: Tendril,
}
impl Overlay {
pub fn new(char_idx: usize, grapheme: impl Into<Tendril>) -> Self {
Self {
char_idx,
grapheme: grapheme.into(),
}
}
}
/// Line annotations allow for virtual text between normal
/// text lines. They cause `height` empty lines to be inserted
/// below the document line that contains `anchor_char_idx`.
///
/// These lines can be filled with text in the rendering code
/// as their contents have no effect beyond visual appearance.
///
/// To insert a line after a document line simply set
/// `anchor_char_idx` to `doc.line_to_char(line_idx)`
#[derive(Debug, Clone)]
pub struct LineAnnotation {
pub anchor_char_idx: usize,
pub height: usize,
}
#[derive(Debug)]
struct Layer<A, M> {
annotations: Rc<[A]>,
current_index: Cell<usize>,
metadata: M,
}
impl<A, M: Clone> Clone for Layer<A, M> {
fn clone(&self) -> Self {
Layer {
annotations: self.annotations.clone(),
current_index: self.current_index.clone(),
metadata: self.metadata.clone(),
}
}
}
impl<A, M> Layer<A, M> {
pub fn reset_pos(&self, char_idx: usize, get_char_idx: impl Fn(&A) -> usize) {
let new_index = self
.annotations
.binary_search_by_key(&char_idx, get_char_idx)
.unwrap_or_else(identity);
self.current_index.set(new_index);
}
pub fn consume(&self, char_idx: usize, get_char_idx: impl Fn(&A) -> usize) -> Option<&A> {
let annot = self.annotations.get(self.current_index.get())?;
debug_assert!(get_char_idx(annot) >= char_idx);
if get_char_idx(annot) == char_idx {
self.current_index.set(self.current_index.get() + 1);
Some(annot)
} else {
None
}
}
}
impl<A, M> From<(Rc<[A]>, M)> for Layer<A, M> {
fn from((annotations, metadata): (Rc<[A]>, M)) -> Layer<A, M> {
Layer {
annotations,
current_index: Cell::new(0),
metadata,
}
}
}
fn reset_pos<A, M>(layers: &[Layer<A, M>], pos: usize, get_pos: impl Fn(&A) -> usize) {
for layer in layers {
layer.reset_pos(pos, &get_pos)
}
}
/// Annotations that change that is displayed when the document is render.
/// Also commonly called virtual text.
#[derive(Default, Debug, Clone)]
pub struct TextAnnotations {
inline_annotations: Vec<Layer<InlineAnnotation, Option<Highlight>>>,
overlays: Vec<Layer<Overlay, Option<Highlight>>>,
line_annotations: Vec<Layer<LineAnnotation, ()>>,
}
impl TextAnnotations {
/// Prepare the TextAnnotations for iteration starting at char_idx
pub fn reset_pos(&self, char_idx: usize) {
reset_pos(&self.inline_annotations, char_idx, |annot| annot.char_idx);
reset_pos(&self.overlays, char_idx, |annot| annot.char_idx);
reset_pos(&self.line_annotations, char_idx, |annot| {
annot.anchor_char_idx
});
}
pub fn collect_overlay_highlights(
&self,
char_range: Range<usize>,
) -> Vec<(usize, Range<usize>)> {
let mut highlights = Vec::new();
self.reset_pos(char_range.start);
for char_idx in char_range {
if let Some((_, Some(highlight))) = self.overlay_at(char_idx) {
// we don't know the number of chars the original grapheme takes
// however it doesn't matter as highlight bounderies are automatically
// aligned to grapheme boundaries in the rendering code
highlights.push((highlight.0, char_idx..char_idx + 1))
}
}
highlights
}
/// Add new inline annotations.
///
/// The annotations grapheme will be rendered with `highlight`
/// patched on top of `ui.text`.
///
/// The annotations **must be sorted** by their `char_idx`.
/// Multiple annotations with the same `char_idx` are allowed,
/// they will be display in the order that they are present in the layer.
///
/// If multiple layers contain annotations at the same position
/// the annotations that belong to the layers added first will be shown first.
pub fn add_inline_annotations(
&mut self,
layer: Rc<[InlineAnnotation]>,
highlight: Option<Highlight>,
) -> &mut Self {
self.inline_annotations.push((layer, highlight).into());
self
}
/// Add new grapheme overlays.
///
/// The overlayed grapheme will be rendered with `highlight`
/// patched on top of `ui.text`.
///
/// The overlays **must be sorted** by their `char_idx`.
/// Multiple overlays with the same `char_idx` **are allowed**.
///
/// If multiple layers contain overlay at the same position
/// the overlay from the layer added last will be show.
pub fn add_overlay(&mut self, layer: Rc<[Overlay]>, highlight: Option<Highlight>) -> &mut Self {
self.overlays.push((layer, highlight).into());
self
}
/// Add new annotation lines.
///
/// The line annotations **must be sorted** by their `char_idx`.
/// Multiple line annotations with the same `char_idx` **are not allowed**.
pub fn add_line_annotation(&mut self, layer: Rc<[LineAnnotation]>) -> &mut Self {
self.line_annotations.push((layer, ()).into());
self
}
/// Removes all line annotations, useful for vertical motions
/// so that virtual text lines are automatically skipped.
pub fn clear_line_annotations(&mut self) {
self.line_annotations.clear();
}
pub(crate) fn next_inline_annotation_at(
&self,
char_idx: usize,
) -> Option<(&InlineAnnotation, Option<Highlight>)> {
self.inline_annotations.iter().find_map(|layer| {
let annotation = layer.consume(char_idx, |annot| annot.char_idx)?;
Some((annotation, layer.metadata))
})
}
pub(crate) fn overlay_at(&self, char_idx: usize) -> Option<(&Overlay, Option<Highlight>)> {
let mut overlay = None;
for layer in &self.overlays {
while let Some(new_overlay) = layer.consume(char_idx, |annot| annot.char_idx) {
overlay = Some((new_overlay, layer.metadata));
}
}
overlay
}
pub(crate) fn annotation_lines_at(&self, char_idx: usize) -> usize {
self.line_annotations
.iter()
.map(|layer| {
let mut lines = 0;
while let Some(annot) = layer.annotations.get(layer.current_index.get()) {
if annot.anchor_char_idx == char_idx {
layer.current_index.set(layer.current_index.get() + 1);
lines += annot.height
} else {
break;
}
}
lines
})
.sum()
}
}

@ -231,8 +231,20 @@ fn textobject_pair_surround_impl(
};
pair_pos
.map(|(anchor, head)| match textobject {
TextObject::Inside => Range::new(next_grapheme_boundary(slice, anchor), head),
TextObject::Around => Range::new(anchor, next_grapheme_boundary(slice, head)),
TextObject::Inside => {
if anchor < head {
Range::new(next_grapheme_boundary(slice, anchor), head)
} else {
Range::new(anchor, next_grapheme_boundary(slice, head))
}
}
TextObject::Around => {
if anchor < head {
Range::new(anchor, next_grapheme_boundary(slice, head))
} else {
Range::new(next_grapheme_boundary(slice, anchor), head)
}
}
TextObject::Movement => unreachable!(),
})
.unwrap_or(range)
@ -425,7 +437,7 @@ mod test {
let text = Rope::from(s.as_str());
let selection = selection
.transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Inside, 1));
let actual = crate::test::plain(&s, selection);
let actual = crate::test::plain(s.as_ref(), &selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
@ -448,7 +460,7 @@ mod test {
let text = Rope::from(s.as_str());
let selection = selection
.transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Inside, 2));
let actual = crate::test::plain(&s, selection);
let actual = crate::test::plain(s.as_ref(), &selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
@ -479,7 +491,7 @@ mod test {
let text = Rope::from(s.as_str());
let selection = selection
.transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Around, 1));
let actual = crate::test::plain(&s, selection);
let actual = crate::test::plain(s.as_ref(), &selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}

@ -1,3 +1,5 @@
use smallvec::SmallVec;
use crate::{Range, Rope, Selection, Tendril};
use std::borrow::Cow;
@ -466,6 +468,33 @@ impl Transaction {
self
}
/// Generate a transaction from a set of potentially overlapping changes. The `change_ranges`
/// iterator yield the range (of removed text) in the old document for each edit. If any change
/// overlaps with a range overlaps with a previous range then that range is ignored.
///
/// The `process_change` callback is called for each edit that is not ignored (in the order
/// yielded by `changes`) and should return the new text that the associated range will be
/// replaced with.
///
/// To make this function more flexible the iterator can yield additional data for each change
/// that is passed to `process_change`
pub fn change_ignore_overlapping<T>(
doc: &Rope,
change_ranges: impl Iterator<Item = (usize, usize, T)>,
mut process_change: impl FnMut(usize, usize, T) -> Option<Tendril>,
) -> Self {
let mut last = 0;
let changes = change_ranges.filter_map(|(from, to, data)| {
if from < last {
return None;
}
let tendril = process_change(from, to, data);
last = to;
Some((from, to, tendril))
});
Self::change(doc, changes)
}
/// Generate a transaction from a set of changes.
pub fn change<I>(doc: &Rope, changes: I) -> Self
where
@ -481,6 +510,11 @@ impl Transaction {
for (from, to, tendril) in changes {
// Verify ranges are ordered and not overlapping
debug_assert!(last <= from);
// Verify ranges are correct
debug_assert!(
from <= to,
"Edit end must end before it starts (should {from} <= {to})"
);
// Retain from last "to" to current "from"
changeset.retain(from - last);
@ -508,6 +542,44 @@ impl Transaction {
Self::change(doc, selection.iter().map(f))
}
pub fn change_by_selection_ignore_overlapping(
doc: &Rope,
selection: &Selection,
mut change_range: impl FnMut(&Range) -> (usize, usize),
mut create_tendril: impl FnMut(usize, usize) -> Option<Tendril>,
) -> (Transaction, Selection) {
let mut last_selection_idx = None;
let mut new_primary_idx = None;
let mut ranges: SmallVec<[Range; 1]> = SmallVec::new();
let process_change = |change_start, change_end, (idx, range): (usize, &Range)| {
// update the primary idx
if idx == selection.primary_index() {
new_primary_idx = Some(idx);
} else if new_primary_idx.is_none() {
if idx > selection.primary_index() {
new_primary_idx = last_selection_idx;
} else {
last_selection_idx = Some(idx);
}
}
ranges.push(*range);
create_tendril(change_start, change_end)
};
let transaction = Self::change_ignore_overlapping(
doc,
selection.iter().enumerate().map(|range| {
let (change_start, change_end) = change_range(range.1);
(change_start, change_end, range)
}),
process_change,
);
(
transaction,
Selection::new(ranges, new_primary_idx.unwrap_or(0)),
)
}
/// Insert text at each selection head.
pub fn insert(doc: &Rope, selection: &Selection, text: Tendril) -> Self {
Self::change_by_selection(doc, selection, |range| {

@ -2,6 +2,6 @@ use smartstring::{LazyCompact, SmartString};
/// Given a slice of text, return the text re-wrapped to fit it
/// within the given width.
pub fn reflow_hard_wrap(text: &str, max_line_len: usize) -> SmartString<LazyCompact> {
textwrap::refill(text, max_line_len).into()
pub fn reflow_hard_wrap(text: &str, text_width: usize) -> SmartString<LazyCompact> {
textwrap::refill(text, text_width).into()
}

@ -1,5 +1,5 @@
use helix_core::{
indent::{treesitter_indent_for_pos, IndentStyle},
indent::{indent_level_for_line, treesitter_indent_for_pos, IndentStyle},
syntax::Loader,
Syntax,
};
@ -17,6 +17,39 @@ fn test_treesitter_indent_rust_2() {
// test_treesitter_indent("commands.rs", "source.rust");
}
#[test]
fn test_indent_level_for_line_with_spaces() {
let tab_width: usize = 4;
let indent_width: usize = 4;
let line = ropey::Rope::from_str(" Indented with 8 spaces");
let indent_level = indent_level_for_line(line.slice(0..), tab_width, indent_width);
assert_eq!(indent_level, 2)
}
#[test]
fn test_indent_level_for_line_with_tabs() {
let tab_width: usize = 4;
let indent_width: usize = 4;
let line = ropey::Rope::from_str("\t\tIndented with 2 tabs");
let indent_level = indent_level_for_line(line.slice(0..), tab_width, indent_width);
assert_eq!(indent_level, 2)
}
#[test]
fn test_indent_level_for_line_with_spaces_and_tabs() {
let tab_width: usize = 4;
let indent_width: usize = 4;
let line = ropey::Rope::from_str(" \t \tIndented with mix of spaces and tabs");
let indent_level = indent_level_for_line(line.slice(0..), tab_width, indent_width);
assert_eq!(indent_level, 2)
}
fn test_treesitter_indent(file_name: &str, lang_scope: &str) {
let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
test_dir.push("tests/data/indent");
@ -28,8 +61,8 @@ fn test_treesitter_indent(file_name: &str, lang_scope: &str) {
let mut config_file = test_dir;
config_file.push("languages.toml");
let config = std::fs::read(config_file).unwrap();
let config = toml::from_slice(&config).unwrap();
let config = std::fs::read_to_string(config_file).unwrap();
let config = toml::from_str(&config).unwrap();
let loader = Loader::new(config);
// set runtime path so we can find the queries
@ -46,11 +79,13 @@ fn test_treesitter_indent(file_name: &str, lang_scope: &str) {
for i in 0..doc.len_lines() {
let line = text.line(i);
if let Some(pos) = helix_core::find_first_non_whitespace_char(line) {
let tab_and_indent_width: usize = 4;
let suggested_indent = treesitter_indent_for_pos(
indent_query,
&syntax,
&IndentStyle::Spaces(4),
4,
&IndentStyle::Spaces(tab_and_indent_width as u8),
tab_and_indent_width,
tab_and_indent_width,
text,
i,
text.line_to_char(i) + pos,

@ -19,7 +19,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
thiserror = "1.0"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
which = "4.2"
which = "4.4"
[dev-dependencies]
fern = "0.6"

@ -1,4 +1,5 @@
use crate::{
requests::DisconnectArguments,
transport::{Payload, Request, Response, Transport},
types::*,
Error, Result, ThreadId,
@ -31,6 +32,8 @@ pub struct Client {
_process: Option<Child>,
server_tx: UnboundedSender<Payload>,
request_counter: AtomicU64,
connection_type: Option<ConnectionType>,
starting_request_args: Option<Value>,
pub caps: Option<DebuggerCapabilities>,
// thread_id -> frames
pub stack_frames: HashMap<ThreadId, Vec<StackFrame>>,
@ -41,6 +44,12 @@ pub struct Client {
pub quirks: DebuggerQuirks,
}
#[derive(Clone, Copy, Debug)]
pub enum ConnectionType {
Launch,
Attach,
}
impl Client {
// Spawn a process and communicate with it by either TCP or stdio
pub async fn process(
@ -78,7 +87,8 @@ impl Client {
server_tx,
request_counter: AtomicU64::new(0),
caps: None,
//
connection_type: None,
starting_request_args: None,
stack_frames: HashMap::new(),
thread_states: HashMap::new(),
thread_id: None,
@ -150,6 +160,10 @@ impl Client {
)
}
pub fn starting_request_args(&self) -> &Option<Value> {
&self.starting_request_args
}
pub async fn tcp_process(
cmd: &str,
args: Vec<&str>,
@ -207,6 +221,10 @@ impl Client {
self.id
}
pub fn connection_type(&self) -> Option<ConnectionType> {
self.connection_type
}
fn next_request_id(&self) -> u64 {
self.request_counter.fetch_add(1, Ordering::Relaxed)
}
@ -254,7 +272,7 @@ impl Client {
// TODO: specifiable timeout, delay other calls until initialize success
timeout(Duration::from_secs(20), callback_rx.recv())
.await
.map_err(|_| Error::Timeout)? // return Timeout
.map_err(|_| Error::Timeout(id))? // return Timeout
.ok_or(Error::StreamClosed)?
.map(|response| response.body.unwrap_or_default())
// TODO: check response.success
@ -334,18 +352,35 @@ impl Client {
Ok(())
}
pub fn disconnect(&self) -> impl Future<Output = Result<Value>> {
self.call::<requests::Disconnect>(())
pub fn disconnect(
&mut self,
args: Option<DisconnectArguments>,
) -> impl Future<Output = Result<Value>> {
self.connection_type = None;
self.call::<requests::Disconnect>(args)
}
pub fn launch(&self, args: serde_json::Value) -> impl Future<Output = Result<Value>> {
pub fn launch(&mut self, args: serde_json::Value) -> impl Future<Output = Result<Value>> {
self.connection_type = Some(ConnectionType::Launch);
self.starting_request_args = Some(args.clone());
self.call::<requests::Launch>(args)
}
pub fn attach(&self, args: serde_json::Value) -> impl Future<Output = Result<Value>> {
pub fn attach(&mut self, args: serde_json::Value) -> impl Future<Output = Result<Value>> {
self.connection_type = Some(ConnectionType::Attach);
self.starting_request_args = Some(args.clone());
self.call::<requests::Attach>(args)
}
pub fn restart(&self) -> impl Future<Output = Result<Value>> {
let args = if let Some(args) = &self.starting_request_args {
args.clone()
} else {
Value::Null
};
self.call::<requests::Restart>(args)
}
pub async fn set_breakpoints(
&self,
file: PathBuf,

@ -2,7 +2,7 @@ mod client;
mod transport;
mod types;
pub use client::Client;
pub use client::{Client, ConnectionType};
pub use events::Event;
pub use transport::{Payload, Response, Transport};
pub use types::*;
@ -14,8 +14,8 @@ pub enum Error {
Parse(#[from] serde_json::Error),
#[error("IO Error: {0}")]
IO(#[from] std::io::Error),
#[error("request timed out")]
Timeout,
#[error("request {0} timed out")]
Timeout(u64),
#[error("server closed the stream")]
StreamClosed,
#[error(transparent)]

@ -378,7 +378,7 @@ pub mod requests {
impl Request for Launch {
type Arguments = Value;
type Result = Value;
type Result = ();
const COMMAND: &'static str = "launch";
}
@ -387,15 +387,35 @@ pub mod requests {
impl Request for Attach {
type Arguments = Value;
type Result = Value;
type Result = ();
const COMMAND: &'static str = "attach";
}
#[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DisconnectArguments {
#[serde(skip_serializing_if = "Option::is_none")]
pub restart: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub terminate_debuggee: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub suspend_debuggee: Option<bool>,
}
#[derive(Debug)]
pub enum Restart {}
impl Request for Restart {
type Arguments = Value;
type Result = ();
const COMMAND: &'static str = "restart";
}
#[derive(Debug)]
pub enum Disconnect {}
impl Request for Disconnect {
type Arguments = ();
type Arguments = Option<DisconnectArguments>;
type Result = ();
const COMMAND: &'static str = "disconnect";
}

@ -16,7 +16,7 @@ path = "src/main.rs"
[dependencies]
anyhow = "1"
serde = { version = "1.0", features = ["derive"] }
toml = "0.5"
toml = "0.7"
etcetera = "0.4"
tree-sitter = "0.20"
once_cell = "1.17"

@ -1,6 +1,9 @@
use std::str::from_utf8;
/// Default built-in languages.toml.
pub fn default_lang_config() -> toml::Value {
toml::from_slice(include_bytes!("../../languages.toml"))
let default_config = include_bytes!("../../languages.toml");
toml::from_str(from_utf8(default_config).unwrap())
.expect("Could not parse built-in languages.toml to valid toml")
}
@ -11,8 +14,8 @@ pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
.chain([crate::config_dir()].into_iter())
.map(|path| path.join("languages.toml"))
.filter_map(|file| {
std::fs::read(&file)
.map(|config| toml::from_slice(&config))
std::fs::read_to_string(file)
.map(|config| toml::from_str(&config))
.ok()
})
.collect::<Result<Vec<_>, _>>()?

@ -67,8 +67,9 @@ pub fn get_language(name: &str) -> Result<Language> {
#[cfg(not(target_arch = "wasm32"))]
pub fn get_language(name: &str) -> Result<Language> {
use libloading::{Library, Symbol};
let mut library_path = crate::runtime_dir().join("grammars").join(name);
library_path.set_extension(DYLIB_EXTENSION);
let mut rel_library_path = PathBuf::new().join("grammars").join(name);
rel_library_path.set_extension(DYLIB_EXTENSION);
let library_path = crate::runtime_file(&rel_library_path);
let library = unsafe { Library::new(&library_path) }
.with_context(|| format!("Error opening dynamic library {:?}", library_path))?;
@ -252,7 +253,9 @@ fn fetch_grammar(grammar: GrammarConfiguration) -> Result<FetchStatus> {
remote, revision, ..
} = grammar.source
{
let grammar_dir = crate::runtime_dir()
let grammar_dir = crate::runtime_dirs()
.first()
.expect("No runtime directories provided") // guaranteed by post-condition
.join("grammars")
.join("sources")
.join(&grammar.grammar_id);
@ -350,7 +353,9 @@ fn build_grammar(grammar: GrammarConfiguration, target: Option<&str>) -> Result<
let grammar_dir = if let GrammarSource::Local { path } = &grammar.source {
PathBuf::from(&path)
} else {
crate::runtime_dir()
crate::runtime_dirs()
.first()
.expect("No runtime directories provided") // guaranteed by post-condition
.join("grammars")
.join("sources")
.join(&grammar.grammar_id)
@ -401,7 +406,10 @@ fn build_tree_sitter_library(
None
}
};
let parser_lib_path = crate::runtime_dir().join("grammars");
let parser_lib_path = crate::runtime_dirs()
.first()
.expect("No runtime directories provided") // guaranteed by post-condition
.join("grammars");
let mut library_path = parser_lib_path.join(&grammar.grammar_id);
library_path.set_extension(DYLIB_EXTENSION);
@ -511,9 +519,6 @@ fn mtime(path: &Path) -> Result<SystemTime> {
/// Gives the contents of a file from a language's `runtime/queries/<lang>`
/// directory
pub fn load_runtime_file(language: &str, filename: &str) -> Result<String, std::io::Error> {
let path = crate::RUNTIME_DIR
.join("queries")
.join(language)
.join(filename);
std::fs::read_to_string(&path)
let path = crate::runtime_file(&PathBuf::new().join("queries").join(language).join(filename));
std::fs::read_to_string(path)
}

@ -2,11 +2,12 @@ pub mod config;
pub mod grammar;
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH");
pub static RUNTIME_DIR: once_cell::sync::Lazy<PathBuf> = once_cell::sync::Lazy::new(runtime_dir);
static RUNTIME_DIRS: once_cell::sync::Lazy<Vec<PathBuf>> =
once_cell::sync::Lazy::new(prioritize_runtime_dirs);
static CONFIG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
@ -25,31 +26,83 @@ pub fn initialize_config_file(specified_file: Option<PathBuf>) {
CONFIG_FILE.set(config_file).ok();
}
pub fn runtime_dir() -> PathBuf {
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
return dir.into();
}
/// A list of runtime directories from highest to lowest priority
///
/// The priority is:
///
/// 1. sibling directory to `CARGO_MANIFEST_DIR` (if environment variable is set)
/// 2. subdirectory of user config directory (always included)
/// 3. `HELIX_RUNTIME` (if environment variable is set)
/// 4. subdirectory of path to helix executable (always included)
///
/// Postcondition: returns at least two paths (they might not exist).
fn prioritize_runtime_dirs() -> Vec<PathBuf> {
const RT_DIR: &str = "runtime";
// Adding higher priority first
let mut rt_dirs = Vec::new();
if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") {
// this is the directory of the crate being run by cargo, we need the workspace path so we take the parent
let path = std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR);
log::debug!("runtime dir: {}", path.to_string_lossy());
return path;
rt_dirs.push(path);
}
const RT_DIR: &str = "runtime";
let conf_dir = config_dir().join(RT_DIR);
if conf_dir.exists() {
return conf_dir;
let conf_rt_dir = config_dir().join(RT_DIR);
rt_dirs.push(conf_rt_dir);
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
rt_dirs.push(dir.into());
}
// fallback to location of the executable being run
// canonicalize the path in case the executable is symlinked
std::env::current_exe()
let exe_rt_dir = std::env::current_exe()
.ok()
.and_then(|path| std::fs::canonicalize(path).ok())
.and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR)))
.unwrap()
.unwrap();
rt_dirs.push(exe_rt_dir);
rt_dirs
}
/// Runtime directories ordered from highest to lowest priority
///
/// All directories should be checked when looking for files.
///
/// Postcondition: returns at least one path (it might not exist).
pub fn runtime_dirs() -> &'static [PathBuf] {
&RUNTIME_DIRS
}
/// Find file with path relative to runtime directory
///
/// `rel_path` should be the relative path from within the `runtime/` directory.
/// The valid runtime directories are searched in priority order and the first
/// file found to exist is returned, otherwise None.
fn find_runtime_file(rel_path: &Path) -> Option<PathBuf> {
RUNTIME_DIRS.iter().find_map(|rt_dir| {
let path = rt_dir.join(rel_path);
if path.exists() {
Some(path)
} else {
None
}
})
}
/// Find file with path relative to runtime directory
///
/// `rel_path` should be the relative path from within the `runtime/` directory.
/// The valid runtime directories are searched in priority order and the first
/// file found to exist is returned, otherwise the path to the final attempt
/// that failed.
pub fn runtime_file(rel_path: &Path) -> PathBuf {
find_runtime_file(rel_path).unwrap_or_else(|| {
RUNTIME_DIRS
.last()
.map(|dir| dir.join(rel_path))
.unwrap_or_default()
})
}
pub fn config_dir() -> PathBuf {
@ -179,6 +232,8 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usi
#[cfg(test)]
mod merge_toml_tests {
use std::str;
use super::merge_toml_values;
use toml::Value;
@ -191,8 +246,9 @@ mod merge_toml_tests {
indent = { tab-width = 4, unit = " ", test = "aaa" }
"#;
let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
.expect("Couldn't parse built-in languages config");
let base = include_bytes!("../../languages.toml");
let base = str::from_utf8(base).expect("Couldn't parse built-in languages config");
let base: Value = toml::from_str(base).expect("Couldn't parse built-in languages config");
let user: Value = toml::from_str(USER).unwrap();
let merged = merge_toml_values(base, user, 3);
@ -224,8 +280,9 @@ mod merge_toml_tests {
language-server = { command = "deno", args = ["lsp"] }
"#;
let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
.expect("Couldn't parse built-in languages config");
let base = include_bytes!("../../languages.toml");
let base = str::from_utf8(base).expect("Couldn't parse built-in languages config");
let base: Value = toml::from_str(base).expect("Couldn't parse built-in languages config");
let user: Value = toml::from_str(USER).unwrap();
let merged = merge_toml_values(base, user, 3);

@ -14,15 +14,16 @@ homepage = "https://helix-editor.com"
[dependencies]
helix-core = { version = "0.6", path = "../helix-core" }
helix-loader = { version = "0.6", path = "../helix-loader" }
helix-parsec = { version = "0.6", path = "../helix-parsec" }
anyhow = "1.0"
futures-executor = "0.3"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
log = "0.4"
lsp-types = { version = "0.93", features = ["proposed"] }
lsp-types = { version = "0.94" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
thiserror = "1.0"
tokio = { version = "1.24", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
tokio-stream = "0.1.11"
which = "4.2"
tokio = { version = "1.26", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
tokio-stream = "0.1.12"
which = "4.4"

@ -6,6 +6,7 @@ use crate::{
use helix_core::{find_root, ChangeSet, Rope};
use helix_loader::{self, VERSION_AND_GIT_HASH};
use lsp::PositionEncodingKind;
use lsp_types as lsp;
use serde::Deserialize;
use serde_json::Value;
@ -32,7 +33,6 @@ pub struct Client {
server_tx: UnboundedSender<Payload>,
request_counter: AtomicU64,
pub(crate) capabilities: OnceCell<lsp::ServerCapabilities>,
offset_encoding: OffsetEncoding,
config: Option<Value>,
root_path: std::path::PathBuf,
root_uri: Option<lsp::Url>,
@ -104,7 +104,6 @@ impl Client {
server_tx,
request_counter: AtomicU64::new(0),
capabilities: OnceCell::new(),
offset_encoding: OffsetEncoding::Utf8,
config,
req_timeout,
@ -147,7 +146,19 @@ impl Client {
}
pub fn offset_encoding(&self) -> OffsetEncoding {
self.offset_encoding
self.capabilities()
.position_encoding
.as_ref()
.and_then(|encoding| match encoding.as_str() {
"utf-8" => Some(OffsetEncoding::Utf8),
"utf-16" => Some(OffsetEncoding::Utf16),
"utf-32" => Some(OffsetEncoding::Utf32),
encoding => {
log::error!("Server provided invalid position encording {encoding}, defaulting to utf-16");
None
},
})
.unwrap_or_default()
}
pub fn config(&self) -> Option<&Value> {
@ -190,7 +201,7 @@ impl Client {
let request = jsonrpc::MethodCall {
jsonrpc: Some(jsonrpc::Version::V2),
id,
id: id.clone(),
method: R::METHOD.to_string(),
params: Self::value_into_params(params),
};
@ -207,7 +218,7 @@ impl Client {
// TODO: delay other calls until initialize success
timeout(Duration::from_secs(timeout_secs), rx.recv())
.await
.map_err(|_| Error::Timeout)? // return Timeout
.map_err(|_| Error::Timeout(id))? // return Timeout
.ok_or(Error::StreamClosed)?
}
}
@ -304,12 +315,26 @@ impl Client {
execute_command: Some(lsp::DynamicRegistrationClientCapabilities {
dynamic_registration: Some(false),
}),
inlay_hint: Some(lsp::InlayHintWorkspaceClientCapabilities {
refresh_support: Some(false),
}),
workspace_edit: Some(lsp::WorkspaceEditClientCapabilities {
document_changes: Some(true),
resource_operations: Some(vec![
lsp::ResourceOperationKind::Create,
lsp::ResourceOperationKind::Rename,
lsp::ResourceOperationKind::Delete,
]),
failure_handling: Some(lsp::FailureHandlingKind::Abort),
normalizes_line_endings: Some(false),
change_annotation_support: None,
}),
..Default::default()
}),
text_document: Some(lsp::TextDocumentClientCapabilities {
completion: Some(lsp::CompletionClientCapabilities {
completion_item: Some(lsp::CompletionItemCapability {
snippet_support: Some(false),
snippet_support: Some(true),
resolve_support: Some(lsp::CompletionItemCapabilityResolveSupport {
properties: vec![
String::from("documentation"),
@ -318,6 +343,10 @@ impl Client {
],
}),
insert_replace_support: Some(true),
deprecated_support: Some(true),
tag_support: Some(lsp::TagSupport {
value_set: vec![lsp::CompletionItemTag::DEPRECATED],
}),
..Default::default()
}),
completion_item_kind: Some(lsp::CompletionItemKindCapability {
@ -344,7 +373,7 @@ impl Client {
}),
rename: Some(lsp::RenameClientCapabilities {
dynamic_registration: Some(false),
prepare_support: Some(false),
prepare_support: Some(true),
prepare_support_default_behavior: None,
honors_change_annotations: Some(false),
}),
@ -369,14 +398,27 @@ impl Client {
..Default::default()
}),
publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities {
version_support: Some(true),
..Default::default()
}),
inlay_hint: Some(lsp::InlayHintClientCapabilities {
dynamic_registration: Some(false),
resolve_support: None,
}),
..Default::default()
}),
window: Some(lsp::WindowClientCapabilities {
work_done_progress: Some(true),
..Default::default()
}),
general: Some(lsp::GeneralClientCapabilities {
position_encodings: Some(vec![
PositionEncodingKind::UTF32,
PositionEncodingKind::UTF8,
PositionEncodingKind::UTF16,
]),
..Default::default()
}),
..Default::default()
},
trace: None,
@ -577,7 +619,7 @@ impl Client {
}]
}
lsp::TextDocumentSyncKind::INCREMENTAL => {
Self::changeset_to_changes(old_text, new_text, changes, self.offset_encoding)
Self::changeset_to_changes(old_text, new_text, changes, self.offset_encoding())
}
lsp::TextDocumentSyncKind::NONE => return None,
kind => unimplemented!("{:?}", kind),
@ -628,7 +670,7 @@ impl Client {
Some(self.notify::<lsp::notification::DidSaveTextDocument>(
lsp::DidSaveTextDocumentParams {
text_document,
text: include_text.then(|| text.into()),
text: include_text.then_some(text.into()),
},
))
}
@ -703,6 +745,31 @@ impl Client {
Some(self.call::<lsp::request::SignatureHelpRequest>(params))
}
pub fn text_document_range_inlay_hints(
&self,
text_document: lsp::TextDocumentIdentifier,
range: lsp::Range,
work_done_token: Option<lsp::ProgressToken>,
) -> Option<impl Future<Output = Result<Value>>> {
let capabilities = self.capabilities.get().unwrap();
match capabilities.inlay_hint_provider {
Some(
lsp::OneOf::Left(true)
| lsp::OneOf::Right(lsp::InlayHintServerCapabilities::Options(_)),
) => (),
_ => return None,
}
let params = lsp::InlayHintParams {
text_document,
range,
work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token },
};
Some(self.call::<lsp::request::InlayHintRequest>(params))
}
pub fn text_document_hover(
&self,
text_document: lsp::TextDocumentIdentifier,
@ -886,6 +953,31 @@ impl Client {
))
}
pub fn goto_declaration(
&self,
text_document: lsp::TextDocumentIdentifier,
position: lsp::Position,
work_done_token: Option<lsp::ProgressToken>,
) -> Option<impl Future<Output = Result<Value>>> {
let capabilities = self.capabilities.get().unwrap();
// Return early if the server does not support goto-declaration.
match capabilities.declaration_provider {
Some(
lsp::DeclarationCapability::Simple(true)
| lsp::DeclarationCapability::RegistrationOptions(_)
| lsp::DeclarationCapability::Options(_),
) => (),
_ => return None,
}
Some(self.goto_request::<lsp::request::GotoDeclaration>(
text_document,
position,
work_done_token,
))
}
pub fn goto_type_definition(
&self,
text_document: lsp::TextDocumentIdentifier,
@ -986,6 +1078,29 @@ impl Client {
Some(self.call::<lsp::request::DocumentSymbolRequest>(params))
}
pub fn prepare_rename(
&self,
text_document: lsp::TextDocumentIdentifier,
position: lsp::Position,
) -> Option<impl Future<Output = Result<Value>>> {
let capabilities = self.capabilities.get().unwrap();
match capabilities.rename_provider {
Some(lsp::OneOf::Right(lsp::RenameOptions {
prepare_provider: Some(true),
..
})) => (),
_ => return None,
}
let params = lsp::TextDocumentPositionParams {
text_document,
position,
};
Some(self.call::<lsp::request::PrepareRenameRequest>(params))
}
// empty string to get all symbols
pub fn workspace_symbols(&self, query: String) -> Option<impl Future<Output = Result<Value>>> {
let capabilities = self.capabilities.get().unwrap();
@ -1002,7 +1117,7 @@ impl Client {
partial_result_params: lsp::PartialResultParams::default(),
};
Some(self.call::<lsp::request::WorkspaceSymbol>(params))
Some(self.call::<lsp::request::WorkspaceSymbolRequest>(params))
}
pub fn code_actions(
@ -1033,20 +1148,23 @@ impl Client {
Some(self.call::<lsp::request::CodeActionRequest>(params))
}
pub fn supports_rename(&self) -> bool {
let capabilities = self.capabilities.get().unwrap();
matches!(
capabilities.rename_provider,
Some(lsp::OneOf::Left(true) | lsp::OneOf::Right(_))
)
}
pub fn rename_symbol(
&self,
text_document: lsp::TextDocumentIdentifier,
position: lsp::Position,
new_name: String,
) -> Option<impl Future<Output = Result<lsp::WorkspaceEdit>>> {
let capabilities = self.capabilities.get().unwrap();
// Return early if the language server does not support renaming.
match capabilities.rename_provider {
Some(lsp::OneOf::Left(true)) | Some(lsp::OneOf::Right(_)) => (),
// None | Some(false)
_ => return None,
};
if !self.supports_rename() {
return None;
}
let params = lsp::RenameParams {
text_document_position: lsp::TextDocumentPositionParams {

@ -108,6 +108,16 @@ pub enum Id {
Str(String),
}
impl std::fmt::Display for Id {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Id::Null => f.write_str("null"),
Id::Num(num) => write!(f, "{}", num),
Id::Str(string) => f.write_str(string),
}
}
}
/// Protocol Version
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum Version {

@ -1,5 +1,6 @@
mod client;
pub mod jsonrpc;
pub mod snippet;
mod transport;
pub use client::Client;
@ -20,7 +21,6 @@ use std::{
},
};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use tokio_stream::wrappers::UnboundedReceiverStream;
@ -35,8 +35,8 @@ pub enum Error {
Parse(#[from] serde_json::Error),
#[error("IO Error: {0}")]
IO(#[from] std::io::Error),
#[error("request timed out")]
Timeout,
#[error("request {0} timed out")]
Timeout(jsonrpc::Id),
#[error("server closed the stream")]
StreamClosed,
#[error("Unhandled")]
@ -45,18 +45,21 @@ pub enum Error {
Other(#[from] anyhow::Error),
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
#[derive(Clone, Copy, Debug, Default)]
pub enum OffsetEncoding {
/// UTF-8 code units aka bytes
#[serde(rename = "utf-8")]
Utf8,
/// UTF-32 code units aka chars
Utf32,
/// UTF-16 code units
#[serde(rename = "utf-16")]
#[default]
Utf16,
}
pub mod util {
use super::*;
use helix_core::line_ending::{line_end_byte_index, line_end_char_index};
use helix_core::{chars, RopeSlice, SmallVec};
use helix_core::{diagnostic::NumberOrString, Range, Rope, Selection, Tendril, Transaction};
/// Converts a diagnostic in the document to [`lsp::Diagnostic`].
@ -117,7 +120,7 @@ pub mod util {
/// Converts [`lsp::Position`] to a position in the document.
///
/// Returns `None` if position exceeds document length or an operation overflows.
/// Returns `None` if position.line is out of bounds or an overflow occurs
pub fn lsp_pos_to_pos(
doc: &Rope,
pos: lsp::Position,
@ -128,22 +131,63 @@ pub mod util {
return None;
}
match offset_encoding {
// We need to be careful here to fully comply ith the LSP spec.
// Two relevant quotes from the spec:
//
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#position
// > If the character value is greater than the line length it defaults back
// > to the line length.
//
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocuments
// > To ensure that both client and server split the string into the same
// > line representation the protocol specifies the following end-of-line sequences:
// > \n, \r\n and \r. Positions are line end character agnostic.
// > So you can not specify a position that denotes \r|\n or \n| where | represents the character offset.
//
// This means that while the line must be in bounds the `charater`
// must be capped to the end of the line.
// Note that the end of the line here is **before** the line terminator
// so we must use `line_end_char_index` istead of `doc.line_to_char(pos_line + 1)`
//
// FIXME: Helix does not fully comply with the LSP spec for line terminators.
// The LSP standard requires that line terminators are ['\n', '\r\n', '\r'].
// Without the unicode-linebreak feature disabled, the `\r` terminator is not handled by helix.
// With the unicode-linebreak feature, helix recognizes multiple extra line break chars
// which means that positions will be decoded/encoded incorrectly in their presence
let line = match offset_encoding {
OffsetEncoding::Utf8 => {
let line = doc.line_to_char(pos_line);
let pos = line.checked_add(pos.character as usize)?;
if pos <= doc.len_chars() {
Some(pos)
} else {
None
}
let line_start = doc.line_to_byte(pos_line);
let line_end = line_end_byte_index(&doc.slice(..), pos_line);
line_start..line_end
}
OffsetEncoding::Utf16 => {
let line = doc.line_to_char(pos_line);
let line_start = doc.char_to_utf16_cu(line);
let pos = line_start.checked_add(pos.character as usize)?;
doc.try_utf16_cu_to_char(pos).ok()
// TODO directly translate line index to char-idx
// ropey can do this just as easily as utf-8 byte translation
// but the functions are just missing.
// Translate to char first and then utf-16 as a workaround
let line_start = doc.line_to_char(pos_line);
let line_end = line_end_char_index(&doc.slice(..), pos_line);
doc.char_to_utf16_cu(line_start)..doc.char_to_utf16_cu(line_end)
}
OffsetEncoding::Utf32 => {
let line_start = doc.line_to_char(pos_line);
let line_end = line_end_char_index(&doc.slice(..), pos_line);
line_start..line_end
}
};
// The LSP spec demands that the offset is capped to the end of the line
let pos = line
.start
.checked_add(pos.character as usize)
.unwrap_or(line.end)
.min(line.end);
match offset_encoding {
OffsetEncoding::Utf8 => doc.try_byte_to_char(pos).ok(),
OffsetEncoding::Utf16 => doc.try_utf16_cu_to_char(pos).ok(),
OffsetEncoding::Utf32 => Some(pos),
}
}
@ -158,8 +202,8 @@ pub mod util {
match offset_encoding {
OffsetEncoding::Utf8 => {
let line = doc.char_to_line(pos);
let line_start = doc.line_to_char(line);
let col = pos - line_start;
let line_start = doc.line_to_byte(line);
let col = doc.char_to_byte(pos) - line_start;
lsp::Position::new(line as u32, col as u32)
}
@ -168,6 +212,13 @@ pub mod util {
let line_start = doc.char_to_utf16_cu(doc.line_to_char(line));
let col = doc.char_to_utf16_cu(pos) - line_start;
lsp::Position::new(line as u32, col as u32)
}
OffsetEncoding::Utf32 => {
let line = doc.char_to_line(pos);
let line_start = doc.line_to_char(line);
let col = pos - line_start;
lsp::Position::new(line as u32, col as u32)
}
}
@ -196,40 +247,227 @@ pub mod util {
Some(Range::new(start, end))
}
/// If the LS did not provide a range for the completion or the range of the
/// primary cursor can not be used for the secondary cursor, this function
/// can be used to find the completion range for a cursor
fn find_completion_range(text: RopeSlice, replace_mode: bool, cursor: usize) -> (usize, usize) {
let start = cursor
- text
.chars_at(cursor)
.reversed()
.take_while(|ch| chars::char_is_word(*ch))
.count();
let mut end = cursor;
if replace_mode {
end += text
.chars_at(cursor)
.skip(1)
.take_while(|ch| chars::char_is_word(*ch))
.count();
}
(start, end)
}
fn completion_range(
text: RopeSlice,
edit_offset: Option<(i128, i128)>,
replace_mode: bool,
cursor: usize,
) -> Option<(usize, usize)> {
let res = match edit_offset {
Some((start_offset, end_offset)) => {
let start_offset = cursor as i128 + start_offset;
if start_offset < 0 {
return None;
}
let end_offset = cursor as i128 + end_offset;
if end_offset > text.len_chars() as i128 {
return None;
}
(start_offset as usize, end_offset as usize)
}
None => find_completion_range(text, replace_mode, cursor),
};
Some(res)
}
/// Creates a [Transaction] from the [lsp::TextEdit] in a completion response.
/// The transaction applies the edit to all cursors.
pub fn generate_transaction_from_completion_edit(
doc: &Rope,
selection: &Selection,
edit: lsp::TextEdit,
offset_encoding: OffsetEncoding,
edit_offset: Option<(i128, i128)>,
replace_mode: bool,
new_text: String,
) -> Transaction {
let replacement: Option<Tendril> = if edit.new_text.is_empty() {
let replacement: Option<Tendril> = if new_text.is_empty() {
None
} else {
Some(edit.new_text.into())
Some(new_text.into())
};
let text = doc.slice(..);
let primary_cursor = selection.primary().cursor(text);
let (removed_start, removed_end) = completion_range(
text,
edit_offset,
replace_mode,
selection.primary().cursor(text),
)
.expect("transaction must be valid for primary selection");
let removed_text = text.slice(removed_start..removed_end);
let start_offset = match lsp_pos_to_pos(doc, edit.range.start, offset_encoding) {
Some(start) => start as i128 - primary_cursor as i128,
None => return Transaction::new(doc),
};
let end_offset = match lsp_pos_to_pos(doc, edit.range.end, offset_encoding) {
Some(end) => end as i128 - primary_cursor as i128,
None => return Transaction::new(doc),
};
let (transaction, mut selection) = Transaction::change_by_selection_ignore_overlapping(
doc,
selection,
|range| {
let cursor = range.cursor(text);
completion_range(text, edit_offset, replace_mode, cursor)
.filter(|(start, end)| text.slice(start..end) == removed_text)
.unwrap_or_else(|| find_completion_range(text, replace_mode, cursor))
},
|_, _| replacement.clone(),
);
if transaction.changes().is_empty() {
return transaction;
}
selection = selection.map(transaction.changes());
transaction.with_selection(selection)
}
/// Creates a [Transaction] from the [snippet::Snippet] in a completion response.
/// The transaction applies the edit to all cursors.
#[allow(clippy::too_many_arguments)]
pub fn generate_transaction_from_snippet(
doc: &Rope,
selection: &Selection,
edit_offset: Option<(i128, i128)>,
replace_mode: bool,
snippet: snippet::Snippet,
line_ending: &str,
include_placeholder: bool,
tab_width: usize,
indent_width: usize,
) -> Transaction {
let text = doc.slice(..);
let mut off = 0i128;
let mut mapped_doc = doc.clone();
let mut selection_tabstops: SmallVec<[_; 1]> = SmallVec::new();
let (removed_start, removed_end) = completion_range(
text,
edit_offset,
replace_mode,
selection.primary().cursor(text),
)
.expect("transaction must be valid for primary selection");
let removed_text = text.slice(removed_start..removed_end);
let (transaction, selection) = Transaction::change_by_selection_ignore_overlapping(
doc,
selection,
|range| {
let cursor = range.cursor(text);
completion_range(text, edit_offset, replace_mode, cursor)
.filter(|(start, end)| text.slice(start..end) == removed_text)
.unwrap_or_else(|| find_completion_range(text, replace_mode, cursor))
},
|replacement_start, replacement_end| {
let mapped_replacement_start = (replacement_start as i128 + off) as usize;
let mapped_replacement_end = (replacement_end as i128 + off) as usize;
let line_idx = mapped_doc.char_to_line(mapped_replacement_start);
let indent_level = helix_core::indent::indent_level_for_line(
mapped_doc.line(line_idx),
tab_width,
indent_width,
) * indent_width;
let newline_with_offset = format!(
"{line_ending}{blank:indent_level$}",
line_ending = line_ending,
blank = ""
);
let (replacement, tabstops) =
snippet::render(&snippet, &newline_with_offset, include_placeholder);
selection_tabstops.push((mapped_replacement_start, tabstops));
mapped_doc.remove(mapped_replacement_start..mapped_replacement_end);
mapped_doc.insert(mapped_replacement_start, &replacement);
off +=
replacement_start as i128 - replacement_end as i128 + replacement.len() as i128;
Some(replacement)
},
);
let changes = transaction.changes();
if changes.is_empty() {
return transaction;
}
let mut mapped_selection = SmallVec::with_capacity(selection.len());
let mut mapped_primary_idx = 0;
let primary_range = selection.primary();
for (range, (tabstop_anchor, tabstops)) in selection.into_iter().zip(selection_tabstops) {
if range == primary_range {
mapped_primary_idx = mapped_selection.len()
}
Transaction::change_by_selection(doc, selection, |range| {
let cursor = range.cursor(text);
(
(cursor as i128 + start_offset) as usize,
(cursor as i128 + end_offset) as usize,
replacement.clone(),
)
})
let range = range.map(changes);
let tabstops = tabstops.first().filter(|tabstops| !tabstops.is_empty());
let Some(tabstops) = tabstops else{
// no tabstop normal mapping
mapped_selection.push(range);
continue;
};
// expand the selection to cover the tabstop to retain the helix selection semantic
// the tabstop closest to the range simply replaces `head` while anchor remains in place
// the remaining tabstops receive their own single-width cursor
if range.head < range.anchor {
let first_tabstop = tabstop_anchor + tabstops[0].1;
// if selection is forward but was moved to the right it is
// contained entirely in the replacement text, just do a point
// selection (fallback below)
if range.anchor >= first_tabstop {
let range = Range::new(range.anchor, first_tabstop);
mapped_selection.push(range);
let rem_tabstops = tabstops[1..]
.iter()
.map(|tabstop| Range::point(tabstop_anchor + tabstop.1));
mapped_selection.extend(rem_tabstops);
continue;
}
} else {
let last_idx = tabstops.len() - 1;
let last_tabstop = tabstop_anchor + tabstops[last_idx].1;
// if selection is forward but was moved to the right it is
// contained entirely in the replacement text, just do a point
// selection (fallback below)
if range.anchor <= last_tabstop {
// we can't properly compute the the next grapheme
// here because the transaction hasn't been applied yet
// that is not a problem because the range gets grapheme aligned anyway
// tough so just adding one will always cause head to be grapheme
// aligned correctly when applied to the document
let range = Range::new(range.anchor, last_tabstop + 1);
mapped_selection.push(range);
let rem_tabstops = tabstops[..last_idx]
.iter()
.map(|tabstop| Range::point(tabstop_anchor + tabstop.0));
mapped_selection.extend(rem_tabstops);
continue;
}
};
let tabstops = tabstops
.iter()
.map(|tabstop| Range::point(tabstop_anchor + tabstop.0));
mapped_selection.extend(tabstops);
}
transaction.with_selection(Selection::new(mapped_selection, mapped_primary_idx))
}
pub fn generate_transaction_from_edits(
@ -288,6 +526,7 @@ pub enum MethodCall {
ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams),
WorkspaceFolders,
WorkspaceConfiguration(lsp::ConfigurationParams),
RegisterCapability(lsp::RegistrationParams),
}
impl MethodCall {
@ -307,6 +546,10 @@ impl MethodCall {
let params: lsp::ConfigurationParams = params.parse()?;
Self::WorkspaceConfiguration(params)
}
lsp::request::RegisterCapability::METHOD => {
let params: lsp::RegistrationParams = params.parse()?;
Self::RegisterCapability(params)
}
_ => {
return Err(Error::Unhandled);
}
@ -427,6 +670,16 @@ impl Registry {
}
}
pub fn stop(&mut self, language_config: &LanguageConfiguration) {
let scope = language_config.scope.clone();
if let Some((_, client)) = self.inner.remove(&scope) {
tokio::spawn(async move {
let _ = client.force_shutdown().await;
});
}
}
pub fn get(
&mut self,
language_config: &LanguageConfiguration,
@ -606,16 +859,55 @@ mod tests {
}
test_case!("", (0, 0) => Some(0));
test_case!("", (0, 1) => None);
test_case!("", (0, 1) => Some(0));
test_case!("", (1, 0) => None);
test_case!("\n\n", (0, 0) => Some(0));
test_case!("\n\n", (1, 0) => Some(1));
test_case!("\n\n", (1, 1) => Some(2));
test_case!("\n\n", (1, 1) => Some(1));
test_case!("\n\n", (2, 0) => Some(2));
test_case!("\n\n", (3, 0) => None);
test_case!("test\n\n\n\ncase", (4, 3) => Some(11));
test_case!("test\n\n\n\ncase", (4, 4) => Some(12));
test_case!("test\n\n\n\ncase", (4, 5) => None);
test_case!("test\n\n\n\ncase", (4, 5) => Some(12));
test_case!("", (u32::MAX, u32::MAX) => None);
}
#[test]
fn emoji_format_gh_4791() {
use lsp_types::{Position, Range, TextEdit};
let edits = vec![
TextEdit {
range: Range {
start: Position {
line: 0,
character: 1,
},
end: Position {
line: 1,
character: 0,
},
},
new_text: "\n ".to_string(),
},
TextEdit {
range: Range {
start: Position {
line: 1,
character: 7,
},
end: Position {
line: 2,
character: 0,
},
},
new_text: "\n ".to_string(),
},
];
let mut source = Rope::from_str("[\n\"🇺🇸\",\n\"🎄\",\n]");
let transaction = generate_transaction_from_edits(&source, edits, OffsetEncoding::Utf8);
assert!(transaction.apply(&mut source));
}
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,13 @@
[package]
name = "helix-parsec"
version = "0.6.0"
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
edition = "2021"
license = "MPL-2.0"
description = "Parser combinators for Helix"
categories = ["editor"]
repository = "https://github.com/helix-editor/helix"
homepage = "https://helix-editor.com"
include = ["src/**/*", "README.md"]
[dependencies]

@ -0,0 +1,574 @@
//! Parser-combinator functions
//!
//! This module provides parsers and parser combinators which can be used
//! together to build parsers by functional composition.
// This module implements parser combinators following https://bodil.lol/parser-combinators/.
// `sym` (trait implementation for `&'static str`), `map`, `pred` (filter), `one_or_more`,
// `zero_or_more`, as well as the `Parser` trait originate mostly from that post.
// The remaining parsers and parser combinators are either based on
// https://github.com/archseer/snippets.nvim/blob/a583da6ef130d2a4888510afd8c4e5ffd62d0dce/lua/snippet/parser.lua#L5-L138
// or are novel.
// When a parser matches the input successfully, it returns `Ok((next_input, some_value))`
// where the type of the returned value depends on the parser. If the parser fails to match,
// it returns `Err(input)`.
type ParseResult<'a, Output> = Result<(&'a str, Output), &'a str>;
/// A parser or parser-combinator.
///
/// Parser-combinators compose multiple parsers together to parse input.
/// For example, two basic parsers (`&'static str`s) may be combined with
/// a parser-combinator like [or] to produce a new parser.
///
/// ```
/// use helix_parsec::{or, Parser};
/// let foo = "foo"; // matches "foo" literally
/// let bar = "bar"; // matches "bar" literally
/// let foo_or_bar = or(foo, bar); // matches either "foo" or "bar"
/// assert_eq!(Ok(("", "foo")), foo_or_bar.parse("foo"));
/// assert_eq!(Ok(("", "bar")), foo_or_bar.parse("bar"));
/// assert_eq!(Err("baz"), foo_or_bar.parse("baz"));
/// ```
pub trait Parser<'a> {
type Output;
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output>;
}
// Most parser-combinators are written as higher-order functions which take some
// parser(s) as input and return a new parser: a function that takes input and returns
// a parse result. The underlying implementation of [Parser::parse] for these functions
// is simply application.
#[doc(hidden)]
impl<'a, F, T> Parser<'a> for F
where
F: Fn(&'a str) -> ParseResult<T>,
{
type Output = T;
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> {
self(input)
}
}
/// A parser which matches the string literal exactly.
///
/// This parser succeeds if the next characters in the input are equal to the given
/// string literal.
///
/// Note that [str::parse] interferes with calling [Parser::parse] on string literals
/// directly; this trait implementation works when used within any parser combinator
/// but does not work on its own. To call [Parser::parse] on a parser for a string
/// literal, use the [token] parser.
///
/// # Examples
///
/// ```
/// use helix_parsec::{or, Parser};
/// let parser = or("foo", "bar");
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
/// assert_eq!(Ok(("", "bar")), parser.parse("bar"));
/// assert_eq!(Err("baz"), parser.parse("baz"));
/// ```
impl<'a> Parser<'a> for &'static str {
type Output = &'a str;
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> {
match input.get(0..self.len()) {
Some(actual) if actual == *self => Ok((&input[self.len()..], &input[0..self.len()])),
_ => Err(input),
}
}
}
// Parsers
/// A parser which matches the given string literally.
///
/// This function is a convenience for interpreting string literals as parsers
/// and is only necessary to avoid conflict with [str::parse]. See the documentation
/// for the `&'static str` implementation of [Parser].
///
/// # Examples
///
/// ```
/// use helix_parsec::{token, Parser};
/// let parser = token("foo");
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
/// assert_eq!(Err("bar"), parser.parse("bar"));
/// ```
pub fn token<'a>(literal: &'static str) -> impl Parser<'a, Output = &'a str> {
literal
}
/// A parser which matches all values until the specified pattern is found.
///
/// If the pattern is not found, this parser does not match. The input up to the
/// character which returns `true` is returned but not that character itself.
///
/// If the pattern function returns true on the first input character, this
/// parser fails.
///
/// # Examples
///
/// ```
/// use helix_parsec::{take_until, Parser};
/// let parser = take_until(|c| c == '.');
/// assert_eq!(Ok((".bar", "foo")), parser.parse("foo.bar"));
/// assert_eq!(Err(".foo"), parser.parse(".foo"));
/// assert_eq!(Err("foo"), parser.parse("foo"));
/// ```
pub fn take_until<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str>
where
F: Fn(char) -> bool,
{
move |input: &'a str| match input.find(&pattern) {
Some(index) if index != 0 => Ok((&input[index..], &input[0..index])),
_ => Err(input),
}
}
/// A parser which matches all values until the specified pattern no longer match.
///
/// This parser only ever fails if the input has a length of zero.
///
/// # Examples
///
/// ```
/// use helix_parsec::{take_while, Parser};
/// let parser = take_while(|c| c == '1');
/// assert_eq!(Ok(("2", "11")), parser.parse("112"));
/// assert_eq!(Err("22"), parser.parse("22"));
/// ```
pub fn take_while<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str>
where
F: Fn(char) -> bool,
{
move |input: &'a str| match input
.char_indices()
.take_while(|(_p, c)| pattern(*c))
.last()
{
Some((index, c)) => {
let index = index + c.len_utf8();
Ok((&input[index..], &input[0..index]))
}
_ => Err(input),
}
}
// Variadic parser combinators
/// A parser combinator which matches a sequence of parsers in an all-or-nothing fashion.
///
/// The returned value is a tuple containing the outputs of all parsers in order. Each
/// parser in the sequence may be typed differently.
///
/// # Examples
///
/// ```
/// use helix_parsec::{seq, Parser};
/// let parser = seq!("<", "a", ">");
/// assert_eq!(Ok(("", ("<", "a", ">"))), parser.parse("<a>"));
/// assert_eq!(Err("<b>"), parser.parse("<b>"));
/// ```
#[macro_export]
macro_rules! seq {
($($parsers: expr),+ $(,)?) => {
($($parsers),+)
}
}
// Seq is implemented using trait-implementations of Parser for various size tuples.
// This allows sequences to be typed heterogeneously.
macro_rules! seq_impl {
($($parser:ident),+) => {
#[allow(non_snake_case)]
impl<'a, $($parser),+> Parser<'a> for ($($parser),+)
where
$($parser: Parser<'a>),+
{
type Output = ($($parser::Output),+);
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> {
let ($($parser),+) = self;
seq_body_impl!(input, input, $($parser),+ ; )
}
}
}
}
macro_rules! seq_body_impl {
($input:expr, $next_input:expr, $head:ident, $($tail:ident),+ ; $(,)? $($acc:ident),*) => {
match $head.parse($next_input) {
Ok((next_input, $head)) => seq_body_impl!($input, next_input, $($tail),+ ; $($acc),*, $head),
Err(_) => Err($input),
}
};
($input:expr, $next_input:expr, $last:ident ; $(,)? $($acc:ident),*) => {
match $last.parse($next_input) {
Ok((next_input, last)) => Ok((next_input, ($($acc),+, last))),
Err(_) => Err($input),
}
}
}
seq_impl!(A, B);
seq_impl!(A, B, C);
seq_impl!(A, B, C, D);
seq_impl!(A, B, C, D, E);
seq_impl!(A, B, C, D, E, F);
seq_impl!(A, B, C, D, E, F, G);
seq_impl!(A, B, C, D, E, F, G, H);
seq_impl!(A, B, C, D, E, F, G, H, I);
seq_impl!(A, B, C, D, E, F, G, H, I, J);
/// A parser combinator which chooses the first of the input parsers which matches
/// successfully.
///
/// All input parsers must have the same output type. This is a variadic form for [or].
///
/// # Examples
///
/// ```
/// use helix_parsec::{choice, or, Parser};
/// let parser = choice!("foo", "bar", "baz");
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
/// assert_eq!(Ok(("", "bar")), parser.parse("bar"));
/// assert_eq!(Err("quiz"), parser.parse("quiz"));
/// ```
#[macro_export]
macro_rules! choice {
($parser: expr $(,)?) => {
$parser
};
($parser: expr, $($rest: expr),+ $(,)?) => {
or($parser, choice!($($rest),+))
}
}
// Ordinary parser combinators
/// A parser combinator which takes a parser as input and maps the output using the
/// given transformation function.
///
/// This corresponds to [Result::map]. The value is only mapped if the input parser
/// matches against input.
///
/// # Examples
///
/// ```
/// use helix_parsec::{map, Parser};
/// let parser = map("123", |s| s.parse::<i32>().unwrap());
/// assert_eq!(Ok(("", 123)), parser.parse("123"));
/// assert_eq!(Err("abc"), parser.parse("abc"));
/// ```
pub fn map<'a, P, F, T>(parser: P, map_fn: F) -> impl Parser<'a, Output = T>
where
P: Parser<'a>,
F: Fn(P::Output) -> T,
{
move |input| {
parser
.parse(input)
.map(|(next_input, result)| (next_input, map_fn(result)))
}
}
/// A parser combinator which succeeds if the given parser matches the input and
/// the given `filter_map_fn` returns `Some`.
///
/// # Examples
///
/// ```
/// use helix_parsec::{filter_map, take_until, Parser};
/// let parser = filter_map(take_until(|c| c == '.'), |s| s.parse::<i32>().ok());
/// assert_eq!(Ok((".456", 123)), parser.parse("123.456"));
/// assert_eq!(Err("abc.def"), parser.parse("abc.def"));
/// ```
pub fn filter_map<'a, P, F, T>(parser: P, filter_map_fn: F) -> impl Parser<'a, Output = T>
where
P: Parser<'a>,
F: Fn(P::Output) -> Option<T>,
{
move |input| match parser.parse(input) {
Ok((next_input, value)) => match filter_map_fn(value) {
Some(value) => Ok((next_input, value)),
None => Err(input),
},
Err(_) => Err(input),
}
}
/// A parser combinator which succeeds if the first given parser matches the input and
/// the second given parse also matches.
///
/// # Examples
///
/// ```
/// use helix_parsec::{reparse_as, take_until, one_or_more, Parser};
/// let parser = reparse_as(take_until(|c| c == '/'), one_or_more("a"));
/// assert_eq!(Ok(("/bb", vec!["a", "a"])), parser.parse("aa/bb"));
/// ```
pub fn reparse_as<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T>
where
P1: Parser<'a, Output = &'a str>,
P2: Parser<'a, Output = T>,
{
filter_map(parser1, move |str| {
parser2.parse(str).map(|(_, value)| value).ok()
})
}
/// A parser combinator which only matches the input when the predicate function
/// returns true.
///
/// # Examples
///
/// ```
/// use helix_parsec::{filter, take_until, Parser};
/// let parser = filter(take_until(|c| c == '.'), |s| s == &"123");
/// assert_eq!(Ok((".456", "123")), parser.parse("123.456"));
/// assert_eq!(Err("456.123"), parser.parse("456.123"));
/// ```
pub fn filter<'a, P, F, T>(parser: P, pred_fn: F) -> impl Parser<'a, Output = T>
where
P: Parser<'a, Output = T>,
F: Fn(&P::Output) -> bool,
{
move |input| {
if let Ok((next_input, value)) = parser.parse(input) {
if pred_fn(&value) {
return Ok((next_input, value));
}
}
Err(input)
}
}
/// A parser combinator which matches either of the input parsers.
///
/// Both parsers must have the same output type. For a variadic form which
/// can take any number of parsers, use `choice!`.
///
/// # Examples
///
/// ```
/// use helix_parsec::{or, Parser};
/// let parser = or("foo", "bar");
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
/// assert_eq!(Ok(("", "bar")), parser.parse("bar"));
/// assert_eq!(Err("baz"), parser.parse("baz"));
/// ```
pub fn or<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T>
where
P1: Parser<'a, Output = T>,
P2: Parser<'a, Output = T>,
{
move |input| match parser1.parse(input) {
ok @ Ok(_) => ok,
Err(_) => parser2.parse(input),
}
}
/// A parser combinator which attempts to match the given parser, returning a
/// `None` output value if the parser does not match.
///
/// The parser produced with this combinator always succeeds. If the given parser
/// succeeds, `Some(value)` is returned where `value` is the output of the given
/// parser. Otherwise, `None`.
///
/// # Examples
///
/// ```
/// use helix_parsec::{optional, Parser};
/// let parser = optional("foo");
/// assert_eq!(Ok(("bar", Some("foo"))), parser.parse("foobar"));
/// assert_eq!(Ok(("bar", None)), parser.parse("bar"));
/// ```
pub fn optional<'a, P, T>(parser: P) -> impl Parser<'a, Output = Option<T>>
where
P: Parser<'a, Output = T>,
{
move |input| match parser.parse(input) {
Ok((next_input, value)) => Ok((next_input, Some(value))),
Err(_) => Ok((input, None)),
}
}
/// A parser combinator which runs the given parsers in sequence and returns the
/// value of `left` if both are matched.
///
/// This is useful for two-element sequences in which you only want the output
/// value of the `left` parser.
///
/// # Examples
///
/// ```
/// use helix_parsec::{left, Parser};
/// let parser = left("foo", "bar");
/// assert_eq!(Ok(("", "foo")), parser.parse("foobar"));
/// ```
pub fn left<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T>
where
L: Parser<'a, Output = T>,
R: Parser<'a>,
{
map(seq!(left, right), |(left_value, _)| left_value)
}
/// A parser combinator which runs the given parsers in sequence and returns the
/// value of `right` if both are matched.
///
/// This is useful for two-element sequences in which you only want the output
/// value of the `right` parser.
///
/// # Examples
///
/// ```
/// use helix_parsec::{right, Parser};
/// let parser = right("foo", "bar");
/// assert_eq!(Ok(("", "bar")), parser.parse("foobar"));
/// ```
pub fn right<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T>
where
L: Parser<'a>,
R: Parser<'a, Output = T>,
{
map(seq!(left, right), |(_, right_value)| right_value)
}
/// A parser combinator which matches the given parser against the input zero or
/// more times.
///
/// This parser always succeeds and returns the empty Vec when it matched zero
/// times.
///
/// # Examples
///
/// ```
/// use helix_parsec::{zero_or_more, Parser};
/// let parser = zero_or_more("a");
/// assert_eq!(Ok(("", vec![])), parser.parse(""));
/// assert_eq!(Ok(("", vec!["a"])), parser.parse("a"));
/// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa"));
/// assert_eq!(Ok(("bb", vec![])), parser.parse("bb"));
/// ```
pub fn zero_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec<T>>
where
P: Parser<'a, Output = T>,
{
let parser = non_empty(parser);
move |mut input| {
let mut values = Vec::new();
while let Ok((next_input, value)) = parser.parse(input) {
input = next_input;
values.push(value);
}
Ok((input, values))
}
}
/// A parser combinator which matches the given parser against the input one or
/// more times.
///
/// This parser combinator acts the same as [zero_or_more] but must match at
/// least once.
///
/// # Examples
///
/// ```
/// use helix_parsec::{one_or_more, Parser};
/// let parser = one_or_more("a");
/// assert_eq!(Err(""), parser.parse(""));
/// assert_eq!(Ok(("", vec!["a"])), parser.parse("a"));
/// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa"));
/// assert_eq!(Err("bb"), parser.parse("bb"));
/// ```
pub fn one_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec<T>>
where
P: Parser<'a, Output = T>,
{
let parser = non_empty(parser);
move |mut input| {
let mut values = Vec::new();
match parser.parse(input) {
Ok((next_input, value)) => {
input = next_input;
values.push(value);
}
Err(err) => return Err(err),
}
while let Ok((next_input, value)) = parser.parse(input) {
input = next_input;
values.push(value);
}
Ok((input, values))
}
}
/// A parser combinator which matches one or more instances of the given parser
/// interspersed with the separator parser.
///
/// Output values of the separator parser are discarded.
///
/// This is typically used to parse function arguments or list items.
///
/// # Examples
///
/// ```rust
/// use helix_parsec::{sep, Parser};
/// let parser = sep("a", ",");
/// assert_eq!(Ok(("", vec!["a", "a", "a"])), parser.parse("a,a,a"));
/// ```
pub fn sep<'a, P, S, T>(parser: P, separator: S) -> impl Parser<'a, Output = Vec<T>>
where
P: Parser<'a, Output = T>,
S: Parser<'a>,
{
move |mut input| {
let mut values = Vec::new();
match parser.parse(input) {
Ok((next_input, value)) => {
input = next_input;
values.push(value);
}
Err(err) => return Err(err),
}
loop {
match separator.parse(input) {
Ok((next_input, _)) => input = next_input,
Err(_) => break,
}
match parser.parse(input) {
Ok((next_input, value)) => {
input = next_input;
values.push(value);
}
Err(_) => break,
}
}
Ok((input, values))
}
}
pub fn non_empty<'a, T>(p: impl Parser<'a, Output = T>) -> impl Parser<'a, Output = T> {
move |input| {
let (new_input, res) = p.parse(input)?;
if new_input.len() == input.len() {
Err(input)
} else {
Ok((new_input, res))
}
}
}

@ -10,11 +10,7 @@ repository = "https://github.com/helix-editor/helix"
homepage = "https://helix-editor.com"
include = ["src/**/*", "README.md"]
default-run = "hx"
rust-version = "1.57"
[package.metadata.nix]
build = true
app = true
rust-version = "1.65"
[features]
default = ["git"]
@ -37,11 +33,11 @@ helix-loader = { version = "0.6", path = "../helix-loader" }
anyhow = "1"
once_cell = "1.17"
which = "4.2"
which = "4.4"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] }
crossterm = { version = "0.25", features = ["event-stream"] }
crossterm = { version = "0.26", features = ["event-stream"] }
signal-hook = "0.3"
tokio-stream = "0.1"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
@ -61,22 +57,23 @@ pulldown-cmark = { version = "0.9", default-features = false }
content_inspector = "0.2.4"
# config
toml = "0.5"
toml = "0.7"
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
# ripgrep for global search
grep-regex = "0.1.10"
grep-searcher = "0.1.10"
grep-regex = "0.1.11"
grep-searcher = "0.1.11"
[target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100
signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] }
libc = "0.2.140"
[build-dependencies]
helix-loader = { version = "0.6", path = "../helix-loader" }
[dev-dependencies]
smallvec = "1.10"
indoc = "1.0.8"
tempfile = "3.3.0"
indoc = "2.0.1"
tempfile = "3.4.0"

@ -30,26 +30,17 @@ use crate::{
use log::{debug, error, warn};
use std::{
io::{stdin, stdout, Write},
io::{stdin, stdout},
path::Path,
sync::Arc,
time::{Duration, Instant},
};
use anyhow::{Context, Error};
use crossterm::{
event::{
DisableBracketedPaste, DisableFocusChange, DisableMouseCapture, EnableBracketedPaste,
EnableFocusChange, EnableMouseCapture, Event as CrosstermEvent,
},
execute, terminal,
tty::IsTty,
};
use crossterm::{event::Event as CrosstermEvent, tty::IsTty};
#[cfg(not(windows))]
use {
signal_hook::{consts::signal, low_level},
signal_hook_tokio::Signals,
};
use {signal_hook::consts::signal, signal_hook_tokio::Signals};
#[cfg(windows)]
type Signals = futures_util::stream::Empty<()>;
@ -62,10 +53,12 @@ use tui::backend::CrosstermBackend;
use tui::backend::TestBackend;
#[cfg(not(feature = "integration"))]
type Terminal = tui::terminal::Terminal<CrosstermBackend<std::io::Stdout>>;
type TerminalBackend = CrosstermBackend<std::io::Stdout>;
#[cfg(feature = "integration")]
type Terminal = tui::terminal::Terminal<TestBackend>;
type TerminalBackend = TestBackend;
type Terminal = tui::terminal::Terminal<TerminalBackend>;
pub struct Application {
compositor: Compositor,
@ -107,23 +100,6 @@ fn setup_integration_logging() {
.apply();
}
fn restore_term() -> Result<(), Error> {
let mut stdout = stdout();
// reset cursor shape
write!(stdout, "\x1B[0 q")?;
// Ignore errors on disabling, this might trigger on windows if we call
// disable without calling enable previously
let _ = execute!(stdout, DisableMouseCapture);
execute!(
stdout,
DisableBracketedPaste,
DisableFocusChange,
terminal::LeaveAlternateScreen
)?;
terminal::disable_raw_mode()?;
Ok(())
}
impl Application {
pub fn new(
args: Args,
@ -135,10 +111,9 @@ impl Application {
use helix_view::editor::Action;
let theme_loader = std::sync::Arc::new(theme::Loader::new(
&helix_loader::config_dir(),
&helix_loader::runtime_dir(),
));
let mut theme_parent_dirs = vec![helix_loader::config_dir()];
theme_parent_dirs.extend(helix_loader::runtime_dirs().iter().cloned());
let theme_loader = std::sync::Arc::new(theme::Loader::new(&theme_parent_dirs));
let true_color = config.editor.true_color || crate::true_color();
let theme = config
@ -159,7 +134,7 @@ impl Application {
let syn_loader = std::sync::Arc::new(syntax::Loader::new(syn_loader_conf));
#[cfg(not(feature = "integration"))]
let backend = CrosstermBackend::new(stdout());
let backend = CrosstermBackend::new(stdout(), &config.editor);
#[cfg(feature = "integration")]
let backend = TestBackend::new(120, 150);
@ -172,7 +147,7 @@ impl Application {
area,
theme_loader.clone(),
syn_loader.clone(),
Box::new(Map::new(Arc::clone(&config), |config: &Config| {
Arc::new(Map::new(Arc::clone(&config), |config: &Config| {
&config.editor
})),
);
@ -184,7 +159,7 @@ impl Application {
compositor.push(editor_view);
if args.load_tutor {
let path = helix_loader::runtime_dir().join("tutor");
let path = helix_loader::runtime_file(Path::new("tutor"));
editor.open(&path, Action::VerticalSplit)?;
// Unset path to prevent accidentally saving to the original tutor file.
doc_mut!(editor).set_path(None)?;
@ -277,10 +252,6 @@ impl Application {
Ok(app)
}
#[cfg(feature = "integration")]
async fn render(&mut self) {}
#[cfg(not(feature = "integration"))]
async fn render(&mut self) {
let mut cx = crate::compositor::Context {
editor: &mut self.editor,
@ -309,8 +280,10 @@ impl Application {
let surface = self.terminal.current_buffer_mut();
self.compositor.render(area, surface, &mut cx);
let (pos, kind) = self.compositor.cursor(area, &self.editor);
// reset cursor cache
self.editor.cursor_cache.set(None);
let pos = pos.map(|pos| (pos.col as u16, pos.row as u16));
self.terminal.draw(pos, kind).unwrap();
}
@ -343,12 +316,12 @@ impl Application {
tokio::select! {
biased;
Some(event) = input_stream.next() => {
self.handle_terminal_events(event).await;
}
Some(signal) = self.signals.next() => {
self.handle_signals(signal).await;
}
Some(event) = input_stream.next() => {
self.handle_terminal_events(event).await;
}
Some(callback) = self.jobs.futures.next() => {
self.jobs.handle_callback(&mut self.editor, &mut self.compositor, callback);
self.render().await;
@ -395,6 +368,13 @@ impl Application {
// Update all the relevant members in the editor after updating
// the configuration.
self.editor.refresh_config();
// reset view position in case softwrap was enabled/disabled
let scrolloff = self.editor.config().scrolloff;
for (view, _) in self.editor.tree.views_mut() {
let doc = &self.editor.documents[&view.doc];
view.ensure_cursor_in_view(doc, scrolloff)
}
}
/// refresh language config after config change
@ -463,14 +443,33 @@ impl Application {
pub async fn handle_signals(&mut self, signal: i32) {
match signal {
signal::SIGTSTP => {
// restore cursor
use helix_view::graphics::CursorKind;
self.terminal
.backend_mut()
.show_cursor(CursorKind::Block)
.ok();
restore_term().unwrap();
low_level::emulate_default_handler(signal::SIGTSTP).unwrap();
self.restore_term().unwrap();
// SAFETY:
//
// - helix must have permissions to send signals to all processes in its signal
// group, either by already having the requisite permission, or by having the
// user's UID / EUID / SUID match that of the receiving process(es).
let res = unsafe {
// A pid of 0 sends the signal to the entire process group, allowing the user to
// regain control of their terminal if the editor was spawned under another process
// (e.g. when running `git commit`).
//
// We have to send SIGSTOP (not SIGTSTP) to the entire process group, because,
// as mentioned above, the terminal will get stuck if `helix` was spawned from
// an external process and that process waits for `helix` to complete. This may
// be an issue with signal-hook-tokio, but the author of signal-hook believes it
// could be a tokio issue instead:
// https://github.com/vorner/signal-hook/issues/132
libc::kill(0, signal::SIGSTOP)
};
if res != 0 {
let err = std::io::Error::last_os_error();
eprintln!("{}", err);
let res = err.raw_os_error().unwrap_or(1);
std::process::exit(res);
}
}
signal::SIGCONT => {
self.claim_term().await.unwrap();
@ -629,6 +628,11 @@ impl Application {
self.compositor
.handle_event(&Event::Resize(width, height), &mut cx)
}
// Ignore keyboard release events.
CrosstermEvent::Key(crossterm::event::KeyEvent {
kind: crossterm::event::KeyEventKind::Release,
..
}) => false,
event => self.compositor.handle_event(&event.into(), &mut cx),
};
@ -698,8 +702,23 @@ impl Application {
}
}
Notification::PublishDiagnostics(mut params) => {
let path = params.uri.to_file_path().unwrap();
let doc = self.editor.document_by_path_mut(&path);
let path = match params.uri.to_file_path() {
Ok(path) => path,
Err(_) => {
log::error!("Unsupported file URI: {}", params.uri);
return;
}
};
let doc = self.editor.document_by_path_mut(&path).filter(|doc| {
if let Some(version) = params.version {
if version != doc.version() {
log::info!("Version ({version}) is out of date for {path:?} (expected ({}), dropping PublishDiagnostic notification", doc.version());
return false;
}
}
true
});
if let Some(doc) = doc {
let lang_conf = doc.language_config();
@ -940,24 +959,32 @@ impl Application {
Call::MethodCall(helix_lsp::jsonrpc::MethodCall {
method, params, id, ..
}) => {
let call = match MethodCall::parse(&method, params) {
Ok(call) => call,
let reply = match MethodCall::parse(&method, params) {
Err(helix_lsp::Error::Unhandled) => {
error!("Language Server: Method not found {}", method);
return;
error!(
"Language Server: Method {} not found in request {}",
method, id
);
Err(helix_lsp::jsonrpc::Error {
code: helix_lsp::jsonrpc::ErrorCode::MethodNotFound,
message: format!("Method not found: {}", method),
data: None,
})
}
Err(err) => {
log::error!(
"received malformed method call from Language Server: {}: {}",
"Language Server: Received malformed method call {} in request {}: {}",
method,
id,
err
);
return;
Err(helix_lsp::jsonrpc::Error {
code: helix_lsp::jsonrpc::ErrorCode::ParseError,
message: format!("Malformed method call: {}", method),
data: None,
})
}
};
let reply = match call {
MethodCall::WorkDoneProgressCreate(params) => {
Ok(MethodCall::WorkDoneProgressCreate(params)) => {
self.lsp_progress.create(server_id, params.token);
let editor_view = self
@ -971,26 +998,29 @@ impl Application {
Ok(serde_json::Value::Null)
}
MethodCall::ApplyWorkspaceEdit(params) => {
apply_workspace_edit(
Ok(MethodCall::ApplyWorkspaceEdit(params)) => {
let res = apply_workspace_edit(
&mut self.editor,
helix_lsp::OffsetEncoding::Utf8,
&params.edit,
);
Ok(json!(lsp::ApplyWorkspaceEditResponse {
applied: true,
failure_reason: None,
failed_change: None,
applied: res.is_ok(),
failure_reason: res.as_ref().err().map(|err| err.kind.to_string()),
failed_change: res
.as_ref()
.err()
.map(|err| err.failed_change_idx as u32),
}))
}
MethodCall::WorkspaceFolders => {
Ok(MethodCall::WorkspaceFolders) => {
let language_server =
self.editor.language_servers.get_by_id(server_id).unwrap();
Ok(json!(language_server.workspace_folders()))
}
MethodCall::WorkspaceConfiguration(params) => {
Ok(MethodCall::WorkspaceConfiguration(params)) => {
let result: Vec<_> = params
.items
.iter()
@ -1018,6 +1048,17 @@ impl Application {
.collect();
Ok(json!(result))
}
Ok(MethodCall::RegisterCapability(_params)) => {
log::warn!("Ignoring a client/registerCapability request because dynamic capability registration is not enabled. Please report this upstream to the language server");
// Language Servers based on the `vscode-languageserver-node` library often send
// client/registerCapability even though we do not enable dynamic registration
// for any capabilities. We should send a MethodNotFound JSONRPC error in this
// case but that rejects the registration promise in the server which causes an
// exit. So we work around this by ignoring the request and sending back an OK
// response.
Ok(serde_json::Value::Null)
}
};
let language_server = match self.editor.language_servers.get_by_id(server_id) {
@ -1034,24 +1075,19 @@ impl Application {
}
}
async fn claim_term(&mut self) -> Result<(), Error> {
async fn claim_term(&mut self) -> std::io::Result<()> {
let terminal_config = self.config.load().editor.clone().into();
self.terminal.claim(terminal_config)
}
fn restore_term(&mut self) -> std::io::Result<()> {
let terminal_config = self.config.load().editor.clone().into();
use helix_view::graphics::CursorKind;
terminal::enable_raw_mode()?;
if self.terminal.cursor_kind() == CursorKind::Hidden {
self.terminal.backend_mut().hide_cursor().ok();
}
let mut stdout = stdout();
execute!(
stdout,
terminal::EnterAlternateScreen,
EnableBracketedPaste,
EnableFocusChange
)?;
execute!(stdout, terminal::Clear(terminal::ClearType::All))?;
if self.config.load().editor.mouse {
execute!(stdout, EnableMouseCapture)?;
}
Ok(())
self.terminal
.backend_mut()
.show_cursor(CursorKind::Block)
.ok();
self.terminal.restore(terminal_config)
}
pub async fn run<S>(&mut self, input_stream: &mut S) -> Result<i32, Error>
@ -1066,7 +1102,7 @@ impl Application {
// We can't handle errors properly inside this closure. And it's
// probably not a good idea to `unwrap()` inside a panic handler.
// So we just ignore the `Result`.
let _ = restore_term();
let _ = TerminalBackend::force_restore();
hook(info);
}));
@ -1074,13 +1110,7 @@ impl Application {
let close_errs = self.close().await;
// restore cursor
use helix_view::graphics::CursorKind;
self.terminal
.backend_mut()
.show_cursor(CursorKind::Block)
.ok();
restore_term()?;
self.restore_term()?;
for err in close_errs {
self.editor.exit_code = 1;

File diff suppressed because it is too large Load Diff

@ -12,7 +12,7 @@ use helix_view::editor::Breakpoint;
use serde_json::{to_value, Value};
use tokio_stream::wrappers::UnboundedReceiverStream;
use tui::text::Spans;
use tui::{text::Spans, widgets::Row};
use std::collections::HashMap;
use std::future::Future;
@ -25,7 +25,7 @@ use helix_view::handlers::dap::{breakpoints_changed, jump_to_stack_frame, select
impl ui::menu::Item for StackFrame {
type Data = ();
fn label(&self, _data: &Self::Data) -> Spans {
fn format(&self, _data: &Self::Data) -> Row {
self.name.as_str().into() // TODO: include thread_states in the label
}
}
@ -33,7 +33,7 @@ impl ui::menu::Item for StackFrame {
impl ui::menu::Item for DebugTemplate {
type Data = ();
fn label(&self, _data: &Self::Data) -> Spans {
fn format(&self, _data: &Self::Data) -> Row {
self.name.as_str().into()
}
}
@ -41,7 +41,7 @@ impl ui::menu::Item for DebugTemplate {
impl ui::menu::Item for Thread {
type Data = ThreadStates;
fn label(&self, thread_states: &Self::Data) -> Spans {
fn format(&self, thread_states: &Self::Data) -> Row {
format!(
"{} ({})",
self.name,
@ -289,6 +289,36 @@ pub fn dap_launch(cx: &mut Context) {
))));
}
pub fn dap_restart(cx: &mut Context) {
let debugger = match &cx.editor.debugger {
Some(debugger) => debugger,
None => {
cx.editor.set_error("Debugger is not running");
return;
}
};
if !debugger
.capabilities()
.supports_restart_request
.unwrap_or(false)
{
cx.editor
.set_error("Debugger does not support session restarts");
return;
}
if debugger.starting_request_args().is_none() {
cx.editor
.set_error("No arguments found with which to restart the sessions");
return;
}
dap_callback(
cx.jobs,
debugger.restart(),
|editor, _compositor, _resp: ()| editor.set_status("Debugging session restarted"),
);
}
fn debug_parameter_prompt(
completions: Vec<DebugConfigCompletion>,
config_name: String,
@ -475,19 +505,36 @@ pub fn dap_variables(cx: &mut Context) {
if debugger.thread_id.is_none() {
cx.editor
.set_status("Cannot access variables while target is running");
.set_status("Cannot access variables while target is running.");
return;
}
let (frame, thread_id) = match (debugger.active_frame, debugger.thread_id) {
(Some(frame), Some(thread_id)) => (frame, thread_id),
_ => {
cx.editor
.set_status("Cannot find current stack frame to access variables");
.set_status("Cannot find current stack frame to access variables.");
return;
}
};
let thread_frame = match debugger.stack_frames.get(&thread_id) {
Some(thread_frame) => thread_frame,
None => {
cx.editor
.set_error("Failed to get stack frame for thread: {thread_id}");
return;
}
};
let stack_frame = match thread_frame.get(frame) {
Some(stack_frame) => stack_frame,
None => {
cx.editor
.set_error("Failed to get stack frame for thread {thread_id} and frame {frame}.");
return;
}
};
let frame_id = debugger.stack_frames[&thread_id][frame].id;
let frame_id = stack_frame.id;
let scopes = match block_on(debugger.scopes(frame_id)) {
Ok(s) => s,
Err(e) => {
@ -539,7 +586,7 @@ pub fn dap_variables(cx: &mut Context) {
pub fn dap_terminate(cx: &mut Context) {
let debugger = debugger!(cx.editor);
let request = debugger.disconnect();
let request = debugger.disconnect(None);
dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| {
// editor.set_error(format!("Failed to disconnect: {}", e));
editor.debugger = None;

@ -1,16 +1,27 @@
use futures_util::FutureExt;
use helix_lsp::{
block_on,
lsp::{self, CodeAction, CodeActionOrCommand, DiagnosticSeverity, NumberOrString},
util::{diagnostic_to_lsp_diagnostic, lsp_pos_to_pos, lsp_range_to_range, range_to_lsp_range},
lsp::{
self, CodeAction, CodeActionOrCommand, CodeActionTriggerKind, DiagnosticSeverity,
NumberOrString,
},
util::{diagnostic_to_lsp_diagnostic, lsp_range_to_range, range_to_lsp_range},
OffsetEncoding,
};
use tui::text::{Span, Spans};
use tui::{
text::{Span, Spans},
widgets::Row,
};
use super::{align_view, push_jump, Align, Context, Editor, Open};
use helix_core::{path, Selection};
use helix_view::{apply_transaction, document::Mode, editor::Action, theme::Style};
use helix_core::{path, text_annotations::InlineAnnotation, Selection};
use helix_view::{
document::{DocumentInlayHints, DocumentInlayHintsId, Mode},
editor::Action,
theme::Style,
Document, View,
};
use crate::{
compositor::{self, Compositor},
@ -21,7 +32,7 @@ use crate::{
};
use std::{
borrow::Cow, cmp::Ordering, collections::BTreeMap, fmt::Write, path::PathBuf, sync::Arc,
cmp::Ordering, collections::BTreeMap, fmt::Write, future::Future, path::PathBuf, sync::Arc,
};
/// Gets the language server that is attached to a document, and
@ -46,7 +57,7 @@ impl ui::menu::Item for lsp::Location {
/// Current working directory.
type Data = PathBuf;
fn label(&self, cwdir: &Self::Data) -> Spans {
fn format(&self, cwdir: &Self::Data) -> Row {
// The preallocation here will overallocate a few characters since it will account for the
// URL's scheme, which is not used most of the time since that scheme will be "file://".
// Those extra chars will be used to avoid allocating when writing the line number (in the
@ -80,7 +91,7 @@ impl ui::menu::Item for lsp::SymbolInformation {
/// Path to currently focussed document
type Data = Option<lsp::Url>;
fn label(&self, current_doc_path: &Self::Data) -> Spans {
fn format(&self, current_doc_path: &Self::Data) -> Row {
if current_doc_path.as_ref() == Some(&self.location.uri) {
self.name.as_str().into()
} else {
@ -110,7 +121,7 @@ struct PickerDiagnostic {
impl ui::menu::Item for PickerDiagnostic {
type Data = (DiagnosticStyles, DiagnosticsFormat);
fn label(&self, (styles, format): &Self::Data) -> Spans {
fn format(&self, (styles, format): &Self::Data) -> Row {
let mut style = self
.diag
.severity
@ -126,20 +137,17 @@ impl ui::menu::Item for PickerDiagnostic {
// remove background as it is distracting in the picker list
style.bg = None;
let code: Cow<'_, str> = self
.diag
.code
.as_ref()
.map(|c| match c {
NumberOrString::Number(n) => n.to_string().into(),
NumberOrString::String(s) => s.as_str().into(),
})
.unwrap_or_default();
let code = match self.diag.code.as_ref() {
Some(NumberOrString::Number(n)) => format!(" ({n})"),
Some(NumberOrString::String(s)) => format!(" ({s})"),
None => String::new(),
};
let path = match format {
DiagnosticsFormat::HideSourcePath => String::new(),
DiagnosticsFormat::ShowSourcePath => {
let path = path::get_truncated_path(self.url.path());
let file_path = self.url.to_file_path().unwrap();
let path = path::get_truncated_path(file_path);
format!("{}: ", path.to_string_lossy())
}
};
@ -149,6 +157,7 @@ impl ui::menu::Item for PickerDiagnostic {
Span::styled(&self.diag.message, style),
Span::styled(code, style),
])
.into()
}
}
@ -188,15 +197,15 @@ fn jump_to_location(
}
}
let (view, doc) = current!(editor);
let definition_pos = location.range.start;
// TODO: convert inside server
let new_pos = if let Some(new_pos) = lsp_pos_to_pos(doc.text(), definition_pos, offset_encoding)
{
new_pos
} else {
return;
};
doc.set_selection(view.id, Selection::point(new_pos));
let new_range =
if let Some(new_range) = lsp_range_to_range(doc.text(), location.range, offset_encoding) {
new_range
} else {
log::warn!("lsp position out of bounds - {:?}", location.range);
return;
};
doc.set_selection(view.id, Selection::single(new_range.anchor, new_range.head));
align_view(doc, view, Align::Center);
}
@ -467,7 +476,7 @@ pub fn workspace_diagnostics_picker(cx: &mut Context) {
impl ui::menu::Item for lsp::CodeActionOrCommand {
type Data = ();
fn label(&self, _data: &Self::Data) -> Spans {
fn format(&self, _data: &Self::Data) -> Row {
match self {
lsp::CodeActionOrCommand::CodeAction(action) => action.title.as_str().into(),
lsp::CodeActionOrCommand::Command(command) => command.title.as_str().into(),
@ -557,6 +566,7 @@ pub fn code_action(cx: &mut Context) {
.map(|diag| diagnostic_to_lsp_diagnostic(doc.text(), diag, offset_encoding))
.collect(),
only: None,
trigger_kind: Some(CodeActionTriggerKind::INVOKED),
},
) {
Some(future) => future,
@ -641,7 +651,7 @@ pub fn code_action(cx: &mut Context) {
log::debug!("code action: {:?}", code_action);
if let Some(ref workspace_edit) = code_action.edit {
log::debug!("edit: {:?}", workspace_edit);
apply_workspace_edit(editor, offset_encoding, workspace_edit);
let _ = apply_workspace_edit(editor, offset_encoding, workspace_edit);
}
// if code action provides both edit and command first the edit
@ -662,7 +672,7 @@ pub fn code_action(cx: &mut Context) {
impl ui::menu::Item for lsp::Command {
type Data = ();
fn label(&self, _data: &Self::Data) -> Spans {
fn format(&self, _data: &Self::Data) -> Row {
self.title.as_str().into()
}
}
@ -747,19 +757,50 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> {
}
}
#[derive(Debug)]
pub struct ApplyEditError {
pub kind: ApplyEditErrorKind,
pub failed_change_idx: usize,
}
#[derive(Debug)]
pub enum ApplyEditErrorKind {
DocumentChanged,
FileNotFound,
UnknownURISchema,
IoError(std::io::Error),
// TODO: check edits before applying and propagate failure
// InvalidEdit,
}
impl ToString for ApplyEditErrorKind {
fn to_string(&self) -> String {
match self {
ApplyEditErrorKind::DocumentChanged => "document has changed".to_string(),
ApplyEditErrorKind::FileNotFound => "file not found".to_string(),
ApplyEditErrorKind::UnknownURISchema => "URI schema not supported".to_string(),
ApplyEditErrorKind::IoError(err) => err.to_string(),
}
}
}
///TODO make this transactional (and set failureMode to transactional)
pub fn apply_workspace_edit(
editor: &mut Editor,
offset_encoding: OffsetEncoding,
workspace_edit: &lsp::WorkspaceEdit,
) {
let mut apply_edits = |uri: &helix_lsp::Url, text_edits: Vec<lsp::TextEdit>| {
) -> Result<(), ApplyEditError> {
let mut apply_edits = |uri: &helix_lsp::Url,
version: Option<i32>,
text_edits: Vec<lsp::TextEdit>|
-> Result<(), ApplyEditErrorKind> {
let path = match uri.to_file_path() {
Ok(path) => path,
Err(_) => {
let err = format!("unable to convert URI to filepath: {}", uri);
log::error!("{}", err);
editor.set_error(err);
return;
return Err(ApplyEditErrorKind::UnknownURISchema);
}
};
@ -770,11 +811,19 @@ pub fn apply_workspace_edit(
let err = format!("failed to open document: {}: {}", uri, err);
log::error!("{}", err);
editor.set_error(err);
return;
return Err(ApplyEditErrorKind::FileNotFound);
}
};
let doc = doc_mut!(editor, &doc_id);
if let Some(version) = version {
if version != doc.version() {
let err = format!("outdated workspace edit for {path:?}");
log::error!("{err}, expected {} but got {version}", doc.version());
editor.set_error(err);
return Err(ApplyEditErrorKind::DocumentChanged);
}
}
// Need to determine a view for apply/append_changes_to_history
let selections = doc.selections();
@ -796,33 +845,15 @@ pub fn apply_workspace_edit(
offset_encoding,
);
let view = view_mut!(editor, view_id);
apply_transaction(&transaction, doc, view);
doc.apply(&transaction, view.id);
doc.append_changes_to_history(view);
Ok(())
};
if let Some(ref changes) = workspace_edit.changes {
log::debug!("workspace changes: {:?}", changes);
for (uri, text_edits) in changes {
let text_edits = text_edits.to_vec();
apply_edits(uri, text_edits)
}
return;
// Not sure if it works properly, it'll be safer to just panic here to avoid breaking some parts of code on which code actions will be used
// TODO: find some example that uses workspace changes, and test it
// for (url, edits) in changes.iter() {
// let file_path = url.origin().ascii_serialization();
// let file_path = std::path::PathBuf::from(file_path);
// let file = std::fs::File::open(file_path).unwrap();
// let mut text = Rope::from_reader(file).unwrap();
// let transaction = edits_to_changes(&text, edits);
// transaction.apply(&mut text);
// }
}
if let Some(ref document_changes) = workspace_edit.document_changes {
match document_changes {
lsp::DocumentChanges::Edits(document_edits) => {
for document_edit in document_edits {
for (i, document_edit) in document_edits.iter().enumerate() {
let edits = document_edit
.edits
.iter()
@ -834,15 +865,26 @@ pub fn apply_workspace_edit(
})
.cloned()
.collect();
apply_edits(&document_edit.text_document.uri, edits);
apply_edits(
&document_edit.text_document.uri,
document_edit.text_document.version,
edits,
)
.map_err(|kind| ApplyEditError {
kind,
failed_change_idx: i,
})?;
}
}
lsp::DocumentChanges::Operations(operations) => {
log::debug!("document changes - operations: {:?}", operations);
for operation in operations {
for (i, operation) in operations.iter().enumerate() {
match operation {
lsp::DocumentChangeOperation::Op(op) => {
apply_document_resource_op(op).unwrap();
apply_document_resource_op(op).map_err(|io| ApplyEditError {
kind: ApplyEditErrorKind::IoError(io),
failed_change_idx: i,
})?;
}
lsp::DocumentChangeOperation::Edit(document_edit) => {
@ -857,13 +899,36 @@ pub fn apply_workspace_edit(
})
.cloned()
.collect();
apply_edits(&document_edit.text_document.uri, edits);
apply_edits(
&document_edit.text_document.uri,
document_edit.text_document.version,
edits,
)
.map_err(|kind| ApplyEditError {
kind,
failed_change_idx: i,
})?;
}
}
}
}
}
return Ok(());
}
if let Some(ref changes) = workspace_edit.changes {
log::debug!("workspace changes: {:?}", changes);
for (i, (uri, text_edits)) in changes.iter().enumerate() {
let text_edits = text_edits.to_vec();
apply_edits(uri, None, text_edits).map_err(|kind| ApplyEditError {
kind,
failed_change_idx: i,
})?;
}
}
Ok(())
}
fn goto_impl(
@ -910,6 +975,31 @@ fn to_locations(definitions: Option<lsp::GotoDefinitionResponse>) -> Vec<lsp::Lo
}
}
pub fn goto_declaration(cx: &mut Context) {
let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc);
let offset_encoding = language_server.offset_encoding();
let pos = doc.position(view.id, offset_encoding);
let future = match language_server.goto_declaration(doc.identifier(), pos, None) {
Some(future) => future,
None => {
cx.editor
.set_error("Language server does not support goto-declaration");
return;
}
};
cx.callback(
future,
move |editor, compositor, response: Option<lsp::GotoDefinitionResponse>| {
let items = to_locations(response);
goto_impl(editor, compositor, items, offset_encoding);
},
);
}
pub fn goto_definition(cx: &mut Context) {
let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc);
@ -1198,49 +1288,123 @@ pub fn hover(cx: &mut Context) {
}
pub fn rename_symbol(cx: &mut Context) {
let (view, doc) = current_ref!(cx.editor);
let text = doc.text().slice(..);
let primary_selection = doc.selection(view.id).primary();
let prefill = if primary_selection.len() > 1 {
primary_selection
} else {
use helix_core::textobject::{textobject_word, TextObject};
textobject_word(text, primary_selection, TextObject::Inside, 1, false)
fn get_prefill_from_word_boundary(editor: &Editor) -> String {
let (view, doc) = current_ref!(editor);
let text = doc.text().slice(..);
let primary_selection = doc.selection(view.id).primary();
if primary_selection.len() > 1 {
primary_selection
} else {
use helix_core::textobject::{textobject_word, TextObject};
textobject_word(text, primary_selection, TextObject::Inside, 1, false)
}
.fragment(text)
.into()
}
.fragment(text)
.into();
ui::prompt_with_input(
cx,
"rename-to:".into(),
prefill,
None,
ui::completers::none,
move |cx: &mut compositor::Context, input: &str, event: PromptEvent| {
if event != PromptEvent::Validate {
return;
fn get_prefill_from_lsp_response(
editor: &Editor,
offset_encoding: OffsetEncoding,
response: Option<lsp::PrepareRenameResponse>,
) -> Result<String, &'static str> {
match response {
Some(lsp::PrepareRenameResponse::Range(range)) => {
let text = doc!(editor).text();
Ok(lsp_range_to_range(text, range, offset_encoding)
.ok_or("lsp sent invalid selection range for rename")?
.fragment(text.slice(..))
.into())
}
Some(lsp::PrepareRenameResponse::RangeWithPlaceholder { placeholder, .. }) => {
Ok(placeholder)
}
Some(lsp::PrepareRenameResponse::DefaultBehavior { .. }) => {
Ok(get_prefill_from_word_boundary(editor))
}
None => Err("lsp did not respond to prepare rename request"),
}
}
let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc);
let offset_encoding = language_server.offset_encoding();
fn create_rename_prompt(editor: &Editor, prefill: String) -> Box<ui::Prompt> {
let prompt = ui::Prompt::new(
"rename-to:".into(),
None,
ui::completers::none,
move |cx: &mut compositor::Context, input: &str, event: PromptEvent| {
if event != PromptEvent::Validate {
return;
}
let pos = doc.position(view.id, offset_encoding);
let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc);
let offset_encoding = language_server.offset_encoding();
let pos = doc.position(view.id, offset_encoding);
let future =
match language_server.rename_symbol(doc.identifier(), pos, input.to_string()) {
Some(future) => future,
None => {
cx.editor
.set_error("Language server does not support symbol renaming");
return;
}
};
match block_on(future) {
Ok(edits) => {
let _ = apply_workspace_edit(cx.editor, offset_encoding, &edits);
}
Err(err) => cx.editor.set_error(err.to_string()),
}
},
)
.with_line(prefill, editor);
let future =
match language_server.rename_symbol(doc.identifier(), pos, input.to_string()) {
Some(future) => future,
None => {
cx.editor
.set_error("Language server does not support symbol renaming");
Box::new(prompt)
}
let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc);
let offset_encoding = language_server.offset_encoding();
if !language_server.supports_rename() {
cx.editor
.set_error("Language server does not support symbol renaming");
return;
}
let pos = doc.position(view.id, offset_encoding);
match language_server.prepare_rename(doc.identifier(), pos) {
// Language server supports textDocument/prepareRename, use it.
Some(future) => cx.callback(
future,
move |editor, compositor, response: Option<lsp::PrepareRenameResponse>| {
let prefill = match get_prefill_from_lsp_response(editor, offset_encoding, response)
{
Ok(p) => p,
Err(e) => {
editor.set_error(e);
return;
}
};
match block_on(future) {
Ok(edits) => apply_workspace_edit(cx.editor, offset_encoding, &edits),
Err(err) => cx.editor.set_error(err.to_string()),
}
},
);
let prompt = create_rename_prompt(editor, prefill);
compositor.push(prompt);
},
),
// Language server does not support textDocument/prepareRename, fall back
// to word boundary selection.
None => {
let prefill = get_prefill_from_word_boundary(cx.editor);
let prompt = create_rename_prompt(cx.editor, prefill);
cx.push_layer(prompt);
}
};
}
pub fn select_references_to_symbol_under_cursor(cx: &mut Context) {
@ -1291,3 +1455,174 @@ pub fn select_references_to_symbol_under_cursor(cx: &mut Context) {
},
);
}
pub fn compute_inlay_hints_for_all_views(editor: &mut Editor, jobs: &mut crate::job::Jobs) {
if !editor.config().lsp.display_inlay_hints {
return;
}
for (view, _) in editor.tree.views() {
let doc = match editor.documents.get(&view.doc) {
Some(doc) => doc,
None => continue,
};
if let Some(callback) = compute_inlay_hints_for_view(view, doc) {
jobs.callback(callback);
}
}
}
fn compute_inlay_hints_for_view(
view: &View,
doc: &Document,
) -> Option<std::pin::Pin<Box<impl Future<Output = Result<crate::job::Callback, anyhow::Error>>>>> {
let view_id = view.id;
let doc_id = view.doc;
let language_server = doc.language_server()?;
let capabilities = language_server.capabilities();
let (future, new_doc_inlay_hints_id) = match capabilities.inlay_hint_provider {
Some(
lsp::OneOf::Left(true)
| lsp::OneOf::Right(lsp::InlayHintServerCapabilities::Options(_)),
) => {
let doc_text = doc.text();
let len_lines = doc_text.len_lines();
// Compute ~3 times the current view height of inlay hints, that way some scrolling
// will not show half the view with hints and half without while still being faster
// than computing all the hints for the full file (which could be dozens of time
// longer than the view is).
let view_height = view.inner_height();
let first_visible_line = doc_text.char_to_line(view.offset.anchor);
let first_line = first_visible_line.saturating_sub(view_height);
let last_line = first_visible_line
.saturating_add(view_height.saturating_mul(2))
.min(len_lines);
let new_doc_inlay_hint_id = DocumentInlayHintsId {
first_line,
last_line,
};
// Don't recompute the annotations in case nothing has changed about the view
if !doc.inlay_hints_oudated
&& doc
.inlay_hints(view_id)
.map_or(false, |dih| dih.id == new_doc_inlay_hint_id)
{
return None;
}
let doc_slice = doc_text.slice(..);
let first_char_in_range = doc_slice.line_to_char(first_line);
let last_char_in_range = doc_slice.line_to_char(last_line);
let range = helix_lsp::util::range_to_lsp_range(
doc_text,
helix_core::Range::new(first_char_in_range, last_char_in_range),
language_server.offset_encoding(),
);
(
language_server.text_document_range_inlay_hints(doc.identifier(), range, None),
new_doc_inlay_hint_id,
)
}
_ => return None,
};
let callback = super::make_job_callback(
future?,
move |editor, _compositor, response: Option<Vec<lsp::InlayHint>>| {
// The config was modified or the window was closed while the request was in flight
if !editor.config().lsp.display_inlay_hints || editor.tree.try_get(view_id).is_none() {
return;
}
// Add annotations to relevant document, not the current one (it may have changed in between)
let doc = match editor.documents.get_mut(&doc_id) {
Some(doc) => doc,
None => return,
};
// If we have neither hints nor an LSP, empty the inlay hints since they're now oudated
let (mut hints, offset_encoding) = match (response, doc.language_server()) {
(Some(h), Some(ls)) if !h.is_empty() => (h, ls.offset_encoding()),
_ => {
doc.set_inlay_hints(
view_id,
DocumentInlayHints::empty_with_id(new_doc_inlay_hints_id),
);
doc.inlay_hints_oudated = false;
return;
}
};
// Most language servers will already send them sorted but ensure this is the case to
// avoid errors on our end.
hints.sort_unstable_by_key(|inlay_hint| inlay_hint.position);
let mut padding_before_inlay_hints = Vec::new();
let mut type_inlay_hints = Vec::new();
let mut parameter_inlay_hints = Vec::new();
let mut other_inlay_hints = Vec::new();
let mut padding_after_inlay_hints = Vec::new();
let doc_text = doc.text();
for hint in hints {
let char_idx =
match helix_lsp::util::lsp_pos_to_pos(doc_text, hint.position, offset_encoding)
{
Some(pos) => pos,
// Skip inlay hints that have no "real" position
None => continue,
};
let label = match hint.label {
lsp::InlayHintLabel::String(s) => s,
lsp::InlayHintLabel::LabelParts(parts) => parts
.into_iter()
.map(|p| p.value)
.collect::<Vec<_>>()
.join(""),
};
let inlay_hints_vec = match hint.kind {
Some(lsp::InlayHintKind::TYPE) => &mut type_inlay_hints,
Some(lsp::InlayHintKind::PARAMETER) => &mut parameter_inlay_hints,
// We can't warn on unknown kind here since LSPs are free to set it or not, for
// example Rust Analyzer does not: every kind will be `None`.
_ => &mut other_inlay_hints,
};
if let Some(true) = hint.padding_left {
padding_before_inlay_hints.push(InlineAnnotation::new(char_idx, " "));
}
inlay_hints_vec.push(InlineAnnotation::new(char_idx, label));
if let Some(true) = hint.padding_right {
padding_after_inlay_hints.push(InlineAnnotation::new(char_idx, " "));
}
}
doc.set_inlay_hints(
view_id,
DocumentInlayHints {
id: new_doc_inlay_hints_id,
type_inlay_hints: type_inlay_hints.into(),
parameter_inlay_hints: parameter_inlay_hints.into(),
other_inlay_hints: other_inlay_hints.into(),
padding_before_inlay_hints: padding_before_inlay_hints.into(),
padding_after_inlay_hints: padding_after_inlay_hints.into(),
},
);
doc.inlay_hints_oudated = false;
},
);
Some(callback)
}

File diff suppressed because it is too large Load Diff

@ -7,6 +7,7 @@ use helix_view::graphics::{CursorKind, Rect};
use tui::buffer::Buffer as Surface;
pub type Callback = Box<dyn FnOnce(&mut Compositor, &mut Context)>;
pub type SyncCallback = Box<dyn FnOnce(&mut Compositor, &mut Context) + Sync>;
// Cursive-inspired
pub enum EventResult {

@ -52,7 +52,7 @@ pub fn general() -> std::io::Result<()> {
let config_file = helix_loader::config_file();
let lang_file = helix_loader::lang_config_file();
let log_file = helix_loader::log_file();
let rt_dir = helix_loader::runtime_dir();
let rt_dirs = helix_loader::runtime_dirs();
let clipboard_provider = get_clipboard_provider();
if config_file.exists() {
@ -66,17 +66,31 @@ pub fn general() -> std::io::Result<()> {
writeln!(stdout, "Language file: default")?;
}
writeln!(stdout, "Log file: {}", log_file.display())?;
writeln!(stdout, "Runtime directory: {}", rt_dir.display())?;
if let Ok(path) = std::fs::read_link(&rt_dir) {
let msg = format!("Runtime directory is symlinked to {}", path.display());
writeln!(stdout, "{}", msg.yellow())?;
}
if !rt_dir.exists() {
writeln!(stdout, "{}", "Runtime directory does not exist.".red())?;
}
if rt_dir.read_dir().ok().map(|it| it.count()) == Some(0) {
writeln!(stdout, "{}", "Runtime directory is empty.".red())?;
writeln!(
stdout,
"Runtime directories: {}",
rt_dirs
.iter()
.map(|d| d.to_string_lossy())
.collect::<Vec<_>>()
.join(";")
)?;
for rt_dir in rt_dirs.iter() {
if let Ok(path) = std::fs::read_link(rt_dir) {
let msg = format!(
"Runtime directory {} is symlinked to: {}",
rt_dir.display(),
path.display()
);
writeln!(stdout, "{}", msg.yellow())?;
}
if !rt_dir.exists() {
let msg = format!("Runtime directory does not exist: {}", rt_dir.display());
writeln!(stdout, "{}", msg.yellow())?;
} else if rt_dir.read_dir().ok().map(|it| it.count()) == Some(0) {
let msg = format!("Runtime directory is empty: {}", rt_dir.display());
writeln!(stdout, "{}", msg.yellow())?;
}
}
writeln!(stdout, "Clipboard provider: {}", clipboard_provider.name())?;

@ -5,9 +5,12 @@ use crate::compositor::Compositor;
use futures_util::future::{BoxFuture, Future, FutureExt};
use futures_util::stream::{FuturesUnordered, StreamExt};
pub type EditorCompositorCallback = Box<dyn FnOnce(&mut Editor, &mut Compositor) + Send>;
pub type EditorCallback = Box<dyn FnOnce(&mut Editor) + Send>;
pub enum Callback {
EditorCompositor(Box<dyn FnOnce(&mut Editor, &mut Compositor) + Send>),
Editor(Box<dyn FnOnce(&mut Editor) + Send>),
EditorCompositor(EditorCompositorCallback),
Editor(EditorCallback),
}
pub type JobFuture = BoxFuture<'static, anyhow::Result<Option<Callback>>>;

@ -184,7 +184,7 @@ impl<'de> serde::de::Visitor<'de> for KeyTrieVisitor {
S: serde::de::SeqAccess<'de>,
{
let mut commands = Vec::new();
while let Some(command) = seq.next_element::<&str>()? {
while let Some(command) = seq.next_element::<String>()? {
commands.push(
command
.parse::<MappableCommand>()
@ -600,4 +600,43 @@ mod tests {
"Mismatch"
)
}
#[test]
fn escaped_keymap() {
use crate::commands::MappableCommand;
use helix_view::input::{KeyCode, KeyEvent, KeyModifiers};
let keys = r#"
"+" = [
"select_all",
":pipe sed -E 's/\\s+$//g'",
]
"#;
let key = KeyEvent {
code: KeyCode::Char('+'),
modifiers: KeyModifiers::NONE,
};
let expectation = Keymap::new(KeyTrie::Node(KeyTrieNode::new(
"",
hashmap! {
key => KeyTrie::Sequence(vec!{
MappableCommand::select_all,
MappableCommand::Typable {
name: "pipe".to_string(),
args: vec!{
"sed".to_string(),
"-E".to_string(),
"'s/\\s+$//g'".to_string()
},
doc: "".to_string(),
},
})
},
vec![key],
)));
assert_eq!(toml::from_str(keys), Ok(expectation));
}
}

@ -7,8 +7,8 @@ use helix_core::hashmap;
pub fn default() -> HashMap<Mode, Keymap> {
let normal = keymap!({ "Normal mode"
"h" | "left" => move_char_left,
"j" | "down" => move_line_down,
"k" | "up" => move_line_up,
"j" | "down" => move_visual_line_down,
"k" | "up" => move_visual_line_up,
"l" | "right" => move_char_right,
"t" => find_till_char,
@ -44,6 +44,7 @@ pub fn default() -> HashMap<Mode, Keymap> {
"l" => goto_line_end,
"s" => goto_first_nonwhitespace,
"d" => goto_definition,
"D" => goto_declaration,
"y" => goto_type_definition,
"r" => goto_reference,
"i" => goto_implementation,
@ -54,6 +55,8 @@ pub fn default() -> HashMap<Mode, Keymap> {
"m" => goto_last_modified_file,
"n" => goto_next_buffer,
"p" => goto_previous_buffer,
"k" => move_line_up,
"j" => move_line_down,
"." => goto_last_modification,
},
":" => command_mode,
@ -220,6 +223,7 @@ pub fn default() -> HashMap<Mode, Keymap> {
"'" => last_picker,
"g" => { "Debug (experimental)" sticky=true
"l" => dap_launch,
"r" => dap_restart,
"b" => dap_toggle_breakpoint,
"c" => dap_continue,
"h" => dap_pause,
@ -320,8 +324,8 @@ pub fn default() -> HashMap<Mode, Keymap> {
let mut select = normal.clone();
select.merge_nodes(keymap!({ "Select mode"
"h" | "left" => extend_char_left,
"j" | "down" => extend_line_down,
"k" | "up" => extend_line_up,
"j" | "down" => extend_visual_line_down,
"k" | "up" => extend_visual_line_up,
"l" | "right" => extend_char_right,
"w" => extend_next_word_start,
@ -344,6 +348,10 @@ pub fn default() -> HashMap<Mode, Keymap> {
"esc" => exit_select_mode,
"v" => normal_mode,
"g" => { "Goto"
"k" => extend_line_up,
"j" => extend_line_down,
},
}));
let insert = keymap!({ "Insert mode"
"esc" => normal_mode,
@ -356,13 +364,13 @@ pub fn default() -> HashMap<Mode, Keymap> {
"A-d" | "A-del" => delete_word_forward,
"C-u" => kill_to_line_start,
"C-k" => kill_to_line_end,
"C-h" | "backspace" => delete_char_backward,
"C-h" | "backspace" | "S-backspace" => delete_char_backward,
"C-d" | "del" => delete_char_forward,
"C-j" | "ret" => insert_newline,
"tab" => insert_tab,
"up" => move_line_up,
"down" => move_line_down,
"up" => move_visual_line_up,
"down" => move_visual_line_down,
"left" => move_char_left,
"right" => move_char_right,
"pageup" => page_up,

@ -10,6 +10,9 @@ pub mod health;
pub mod job;
pub mod keymap;
pub mod ui;
use std::path::Path;
use ignore::DirEntry;
pub use keymap::macros::*;
#[cfg(not(windows))]
@ -22,3 +25,25 @@ fn true_color() -> bool {
fn true_color() -> bool {
true
}
/// Function used for filtering dir entries in the various file pickers.
fn filter_picker_entry(entry: &DirEntry, root: &Path, dedup_symlinks: bool) -> bool {
// We always want to ignore the .git directory, otherwise if
// `ignore` is turned off, we end up with a lot of noise
// in our picker.
if entry.file_name() == ".git" {
return false;
}
// We also ignore symlinks that point inside the current directory
// if `dedup_links` is enabled.
if dedup_symlinks && entry.path_is_symlink() {
return entry
.path()
.canonicalize()
.ok()
.map_or(false, |path| !path.starts_with(root));
}
true
}

@ -1,16 +1,16 @@
use crate::compositor::{Component, Context, Event, EventResult};
use helix_view::{apply_transaction, editor::CompleteAction, ViewId};
use tui::buffer::Buffer as Surface;
use tui::text::Spans;
use helix_view::{
document::SavePoint,
editor::CompleteAction,
theme::{Modifier, Style},
ViewId,
};
use tui::{buffer::Buffer as Surface, text::Span};
use std::borrow::Cow;
use std::{borrow::Cow, sync::Arc};
use helix_core::{Change, Transaction};
use helix_view::{
graphics::Rect,
input::{KeyCode, KeyEvent},
Document, Editor,
};
use helix_view::{graphics::Rect, Document, Editor};
use crate::commands;
use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent};
@ -33,13 +33,20 @@ impl menu::Item for CompletionItem {
.into()
}
fn label(&self, _data: &Self::Data) -> Spans {
self.label.as_str().into()
}
fn row(&self, _data: &Self::Data) -> menu::Row {
fn format(&self, _data: &Self::Data) -> menu::Row {
let deprecated = self.deprecated.unwrap_or_default()
|| self.tags.as_ref().map_or(false, |tags| {
tags.contains(&lsp::CompletionItemTag::DEPRECATED)
});
menu::Row::new(vec![
menu::Cell::from(self.label.as_str()),
menu::Cell::from(Span::styled(
self.label.as_str(),
if deprecated {
Style::default().add_modifier(Modifier::CROSSED_OUT)
} else {
Style::default()
},
)),
menu::Cell::from(match self.kind {
Some(lsp::CompletionItemKind::TEXT) => "text",
Some(lsp::CompletionItemKind::METHOD) => "method",
@ -95,11 +102,13 @@ impl Completion {
pub fn new(
editor: &Editor,
savepoint: Arc<SavePoint>,
mut items: Vec<CompletionItem>,
offset_encoding: helix_lsp::OffsetEncoding,
start_offset: usize,
trigger_offset: usize,
) -> Self {
let replace_mode = editor.config().completion_replace;
// Sort completion items according to their preselect status (given by the LSP server)
items.sort_by_key(|item| !item.preselect.unwrap_or(false));
@ -110,50 +119,89 @@ impl Completion {
view_id: ViewId,
item: &CompletionItem,
offset_encoding: helix_lsp::OffsetEncoding,
start_offset: usize,
trigger_offset: usize,
include_placeholder: bool,
replace_mode: bool,
) -> Transaction {
let transaction = if let Some(edit) = &item.text_edit {
use helix_lsp::snippet;
let selection = doc.selection(view_id);
let text = doc.text().slice(..);
let primary_cursor = selection.primary().cursor(text);
let (edit_offset, new_text) = if let Some(edit) = &item.text_edit {
let edit = match edit {
lsp::CompletionTextEdit::Edit(edit) => edit.clone(),
lsp::CompletionTextEdit::InsertAndReplace(item) => {
// TODO: support using "insert" instead of "replace" via user config
lsp::TextEdit::new(item.replace, item.new_text.clone())
let range = if replace_mode {
item.replace
} else {
item.insert
};
lsp::TextEdit::new(range, item.new_text.clone())
}
};
util::generate_transaction_from_completion_edit(
doc.text(),
doc.selection(view_id),
edit,
offset_encoding, // TODO: should probably transcode in Client
)
let start_offset =
match util::lsp_pos_to_pos(doc.text(), edit.range.start, offset_encoding) {
Some(start) => start as i128 - primary_cursor as i128,
None => return Transaction::new(doc.text()),
};
let end_offset =
match util::lsp_pos_to_pos(doc.text(), edit.range.end, offset_encoding) {
Some(end) => end as i128 - primary_cursor as i128,
None => return Transaction::new(doc.text()),
};
(Some((start_offset, end_offset)), edit.new_text)
} else {
let text = item.insert_text.as_ref().unwrap_or(&item.label);
// Some LSPs just give you an insertText with no offset ¯\_(ツ)_/¯
// in these cases we need to check for a common prefix and remove it
let prefix = Cow::from(doc.text().slice(start_offset..trigger_offset));
let text = text.trim_start_matches::<&str>(&prefix);
// TODO: this needs to be true for the numbers to work out correctly
// in the closure below. It's passed in to a callback as this same
// formula, but can the value change between the LSP request and
// response? If it does, can we recover?
debug_assert!(
doc.selection(view_id)
.primary()
.cursor(doc.text().slice(..))
== trigger_offset
);
Transaction::change_by_selection(doc.text(), doc.selection(view_id), |range| {
let cursor = range.cursor(doc.text().slice(..));
(cursor, cursor, Some(text.into()))
})
let new_text = item
.insert_text
.clone()
.unwrap_or_else(|| item.label.clone());
// check that we are still at the correct savepoint
// we can still generate a transaction regardless but if the
// document changed (and not just the selection) then we will
// likely delete the wrong text (same if we applied an edit sent by the LS)
debug_assert!(primary_cursor == trigger_offset);
(None, new_text)
};
transaction
if matches!(item.kind, Some(lsp::CompletionItemKind::SNIPPET))
|| matches!(
item.insert_text_format,
Some(lsp::InsertTextFormat::SNIPPET)
)
{
match snippet::parse(&new_text) {
Ok(snippet) => util::generate_transaction_from_snippet(
doc.text(),
selection,
edit_offset,
replace_mode,
snippet,
doc.line_ending.as_str(),
include_placeholder,
doc.tab_width(),
doc.indent_width(),
),
Err(err) => {
log::error!(
"Failed to parse snippet: {:?}, remaining output: {}",
&new_text,
err
);
Transaction::new(doc.text())
}
}
} else {
util::generate_transaction_from_completion_edit(
doc.text(),
selection,
edit_offset,
replace_mode,
new_text,
)
}
}
fn completion_changes(transaction: &Transaction, trigger_offset: usize) -> Vec<Change> {
@ -166,11 +214,10 @@ impl Completion {
let (view, doc) = current!(editor);
// if more text was entered, remove it
doc.restore(view);
doc.restore(view, &savepoint);
match event {
PromptEvent::Abort => {
doc.restore(view);
editor.last_completion = None;
}
PromptEvent::Update => {
@ -182,13 +229,13 @@ impl Completion {
view.id,
item,
offset_encoding,
start_offset,
trigger_offset,
true,
replace_mode,
);
// initialize a savepoint
doc.savepoint();
apply_transaction(&transaction, doc, view);
doc.apply(&transaction, view.id);
editor.last_completion = Some(CompleteAction {
trigger_offset,
@ -204,11 +251,12 @@ impl Completion {
view.id,
item,
offset_encoding,
start_offset,
trigger_offset,
false,
replace_mode,
);
apply_transaction(&transaction, doc, view);
doc.apply(&transaction, view.id);
editor.last_completion = Some(CompleteAction {
trigger_offset,
@ -238,13 +286,15 @@ impl Completion {
additional_edits.clone(),
offset_encoding, // TODO: should probably transcode in Client
);
apply_transaction(&transaction, doc, view);
doc.apply(&transaction, view.id);
}
}
}
};
});
let popup = Popup::new(Self::ID, menu).with_scrollbar(false);
let popup = Popup::new(Self::ID, menu)
.with_scrollbar(false)
.ignore_escape_key(true);
let mut completion = Self {
popup,
start_offset,
@ -368,13 +418,6 @@ impl Completion {
impl Component for Completion {
fn handle_event(&mut self, event: &Event, cx: &mut Context) -> EventResult {
// let the Editor handle Esc instead
if let Event::Key(KeyEvent {
code: KeyCode::Esc, ..
}) = event
{
return EventResult::Ignored(None);
}
self.popup.handle_event(event, cx)
}
@ -386,102 +429,102 @@ impl Component for Completion {
self.popup.render(area, surface, cx);
// if we have a selection, render a markdown popup on top/below with info
if let Some(option) = self.popup.contents().selection() {
// need to render:
// option.detail
// ---
// option.documentation
let (view, doc) = current!(cx.editor);
let language = doc.language_name().unwrap_or("");
let text = doc.text().slice(..);
let cursor_pos = doc.selection(view.id).primary().cursor(text);
let coords = helix_core::visual_coords_at_pos(text, cursor_pos, doc.tab_width());
let cursor_pos = (coords.row - view.offset.row) as u16;
let mut markdown_doc = match &option.documentation {
Some(lsp::Documentation::String(contents))
| Some(lsp::Documentation::MarkupContent(lsp::MarkupContent {
kind: lsp::MarkupKind::PlainText,
value: contents,
})) => {
// TODO: convert to wrapped text
Markdown::new(
format!(
"```{}\n{}\n```\n{}",
language,
option.detail.as_deref().unwrap_or_default(),
contents
),
cx.editor.syn_loader.clone(),
)
}
Some(lsp::Documentation::MarkupContent(lsp::MarkupContent {
kind: lsp::MarkupKind::Markdown,
value: contents,
})) => {
// TODO: set language based on doc scope
if let Some(detail) = &option.detail.as_deref() {
Markdown::new(
format!("```{}\n{}\n```\n{}", language, detail, contents),
cx.editor.syn_loader.clone(),
)
} else {
Markdown::new(contents.to_string(), cx.editor.syn_loader.clone())
}
}
None if option.detail.is_some() => {
// TODO: copied from above
// TODO: set language based on doc scope
Markdown::new(
format!(
"```{}\n{}\n```",
language,
option.detail.as_deref().unwrap_or_default(),
),
cx.editor.syn_loader.clone(),
)
}
None => return,
let option = match self.popup.contents().selection() {
Some(option) => option,
None => return,
};
// need to render:
// option.detail
// ---
// option.documentation
let (view, doc) = current!(cx.editor);
let language = doc.language_name().unwrap_or("");
let text = doc.text().slice(..);
let cursor_pos = doc.selection(view.id).primary().cursor(text);
let coords = view
.screen_coords_at_pos(doc, text, cursor_pos)
.expect("cursor must be in view");
let cursor_pos = coords.row as u16;
let markdowned = |lang: &str, detail: Option<&str>, doc: Option<&str>| {
let md = match (detail, doc) {
(Some(detail), Some(doc)) => format!("```{lang}\n{detail}\n```\n{doc}"),
(Some(detail), None) => format!("```{lang}\n{detail}\n```"),
(None, Some(doc)) => doc.to_string(),
(None, None) => String::new(),
};
Markdown::new(md, cx.editor.syn_loader.clone())
};
let mut markdown_doc = match &option.documentation {
Some(lsp::Documentation::String(contents))
| Some(lsp::Documentation::MarkupContent(lsp::MarkupContent {
kind: lsp::MarkupKind::PlainText,
value: contents,
})) => {
// TODO: convert to wrapped text
markdowned(language, option.detail.as_deref(), Some(contents))
}
Some(lsp::Documentation::MarkupContent(lsp::MarkupContent {
kind: lsp::MarkupKind::Markdown,
value: contents,
})) => {
// TODO: set language based on doc scope
markdowned(language, option.detail.as_deref(), Some(contents))
}
None if option.detail.is_some() => {
// TODO: set language based on doc scope
markdowned(language, option.detail.as_deref(), None)
}
None => return,
};
let popup_area = {
let (popup_x, popup_y) = self.popup.get_rel_position(area, cx);
let (popup_width, _popup_height) = self.popup.get_size();
let mut width = area
.width
.saturating_sub(popup_x)
.saturating_sub(popup_width);
let area = if width > 30 {
let mut height = area.height.saturating_sub(popup_y);
let x = popup_x + popup_width;
let y = popup_y;
if let Some((rel_width, rel_height)) = markdown_doc.required_size((width, height)) {
width = rel_width.min(width);
height = rel_height.min(height);
}
Rect::new(x, y, width, height)
} else {
let half = area.height / 2;
let height = 15.min(half);
// we want to make sure the cursor is visible (not hidden behind the documentation)
let y = if cursor_pos + area.y
>= (cx.editor.tree.area().height - height - 2/* statusline + commandline */)
{
0
} else {
// -2 to subtract command line + statusline. a bit of a hack, because of splits.
area.height.saturating_sub(height).saturating_sub(2)
};
let (popup_width, popup_height) = self.popup.get_size();
Rect::new(popup_x, popup_y, popup_width, popup_height)
};
Rect::new(0, y, area.width, height)
let doc_width_available = area.width.saturating_sub(popup_area.right());
let doc_area = if doc_width_available > 30 {
let mut doc_width = doc_width_available;
let mut doc_height = area.height.saturating_sub(popup_area.top());
let x = popup_area.right();
let y = popup_area.top();
if let Some((rel_width, rel_height)) =
markdown_doc.required_size((doc_width, doc_height))
{
doc_width = rel_width.min(doc_width);
doc_height = rel_height.min(doc_height);
}
Rect::new(x, y, doc_width, doc_height)
} else {
// Documentation should not cover the cursor or the completion popup
// Completion popup could be above or below the current line
let avail_height_above = cursor_pos.min(popup_area.top()).saturating_sub(1);
let avail_height_below = area
.height
.saturating_sub(cursor_pos.max(popup_area.bottom()) + 1 /* padding */);
let (y, avail_height) = if avail_height_below >= avail_height_above {
(
area.height.saturating_sub(avail_height_below),
avail_height_below,
)
} else {
(0, avail_height_above)
};
if avail_height <= 1 {
return;
}
// clear area
let background = cx.editor.theme.get("ui.popup");
surface.clear_with(area, background);
markdown_doc.render(area, surface, cx);
}
Rect::new(0, y, area.width, avail_height.min(15))
};
// clear area
let background = cx.editor.theme.get("ui.popup");
surface.clear_with(doc_area, background);
markdown_doc.render(doc_area, surface, cx);
}
}

@ -0,0 +1,485 @@
use std::cmp::min;
use helix_core::doc_formatter::{DocumentFormatter, GraphemeSource, TextFormat};
use helix_core::graphemes::Grapheme;
use helix_core::str_utils::char_to_byte_idx;
use helix_core::syntax::Highlight;
use helix_core::syntax::HighlightEvent;
use helix_core::text_annotations::TextAnnotations;
use helix_core::{visual_offset_from_block, Position, RopeSlice};
use helix_view::editor::{WhitespaceConfig, WhitespaceRenderValue};
use helix_view::graphics::Rect;
use helix_view::theme::Style;
use helix_view::view::ViewPosition;
use helix_view::Document;
use helix_view::Theme;
use tui::buffer::Buffer as Surface;
pub trait LineDecoration {
fn render_background(&mut self, _renderer: &mut TextRenderer, _pos: LinePos) {}
fn render_foreground(
&mut self,
_renderer: &mut TextRenderer,
_pos: LinePos,
_end_char_idx: usize,
) {
}
}
impl<F: FnMut(&mut TextRenderer, LinePos)> LineDecoration for F {
fn render_background(&mut self, renderer: &mut TextRenderer, pos: LinePos) {
self(renderer, pos)
}
}
/// A wrapper around a HighlightIterator
/// that merges the layered highlights to create the final text style
/// and yields the active text style and the char_idx where the active
/// style will have to be recomputed.
struct StyleIter<'a, H: Iterator<Item = HighlightEvent>> {
text_style: Style,
active_highlights: Vec<Highlight>,
highlight_iter: H,
theme: &'a Theme,
}
impl<H: Iterator<Item = HighlightEvent>> Iterator for StyleIter<'_, H> {
type Item = (Style, usize);
fn next(&mut self) -> Option<(Style, usize)> {
while let Some(event) = self.highlight_iter.next() {
match event {
HighlightEvent::HighlightStart(highlights) => {
self.active_highlights.push(highlights)
}
HighlightEvent::HighlightEnd => {
self.active_highlights.pop();
}
HighlightEvent::Source { start, end } => {
if start == end {
continue;
}
let style = self
.active_highlights
.iter()
.fold(self.text_style, |acc, span| {
acc.patch(self.theme.highlight(span.0))
});
return Some((style, end));
}
}
}
None
}
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub struct LinePos {
/// Indicates whether the given visual line
/// is the first visual line of the given document line
pub first_visual_line: bool,
/// The line index of the document line that contains the given visual line
pub doc_line: usize,
/// Vertical offset from the top of the inner view area
pub visual_line: u16,
/// The first char index of this visual line.
/// Note that if the visual line is entirely filled by
/// a very long inline virtual text then this index will point
/// at the next (non-virtual) char after this visual line
pub start_char_idx: usize,
}
pub type TranslatedPosition<'a> = (usize, Box<dyn FnMut(&mut TextRenderer, Position) + 'a>);
#[allow(clippy::too_many_arguments)]
pub fn render_document(
surface: &mut Surface,
viewport: Rect,
doc: &Document,
offset: ViewPosition,
doc_annotations: &TextAnnotations,
highlight_iter: impl Iterator<Item = HighlightEvent>,
theme: &Theme,
line_decoration: &mut [Box<dyn LineDecoration + '_>],
translated_positions: &mut [TranslatedPosition],
) {
let mut renderer = TextRenderer::new(surface, doc, theme, offset.horizontal_offset, viewport);
render_text(
&mut renderer,
doc.text().slice(..),
offset,
&doc.text_format(viewport.width, Some(theme)),
doc_annotations,
highlight_iter,
theme,
line_decoration,
translated_positions,
)
}
fn translate_positions(
char_pos: usize,
first_visisble_char_idx: usize,
translated_positions: &mut [TranslatedPosition],
text_fmt: &TextFormat,
renderer: &mut TextRenderer,
pos: Position,
) {
// check if any positions translated on the fly (like cursor) has been reached
for (char_idx, callback) in &mut *translated_positions {
if *char_idx < char_pos && *char_idx >= first_visisble_char_idx {
// by replacing the char_index with usize::MAX large number we ensure
// that the same position is only translated once
// text will never reach usize::MAX as rust memory allocations are limited
// to isize::MAX
*char_idx = usize::MAX;
if text_fmt.soft_wrap {
callback(renderer, pos)
} else if pos.col >= renderer.col_offset
&& pos.col - renderer.col_offset < renderer.viewport.width as usize
{
callback(
renderer,
Position {
row: pos.row,
col: pos.col - renderer.col_offset,
},
)
}
}
}
}
#[allow(clippy::too_many_arguments)]
pub fn render_text<'t>(
renderer: &mut TextRenderer,
text: RopeSlice<'t>,
offset: ViewPosition,
text_fmt: &TextFormat,
text_annotations: &TextAnnotations,
highlight_iter: impl Iterator<Item = HighlightEvent>,
theme: &Theme,
line_decorations: &mut [Box<dyn LineDecoration + '_>],
translated_positions: &mut [TranslatedPosition],
) {
let (
Position {
row: mut row_off, ..
},
mut char_pos,
) = visual_offset_from_block(
text,
offset.anchor,
offset.anchor,
text_fmt,
text_annotations,
);
row_off += offset.vertical_offset;
assert_eq!(0, offset.vertical_offset);
let (mut formatter, mut first_visible_char_idx) =
DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, text_annotations, offset.anchor);
let mut styles = StyleIter {
text_style: renderer.text_style,
active_highlights: Vec::with_capacity(64),
highlight_iter,
theme,
};
let mut last_line_pos = LinePos {
first_visual_line: false,
doc_line: usize::MAX,
visual_line: u16::MAX,
start_char_idx: usize::MAX,
};
let mut is_in_indent_area = true;
let mut last_line_indent_level = 0;
let mut style_span = styles
.next()
.unwrap_or_else(|| (Style::default(), usize::MAX));
loop {
// formattter.line_pos returns to line index of the next grapheme
// so it must be called before formatter.next
let doc_line = formatter.line_pos();
let Some((grapheme, mut pos)) = formatter.next() else {
let mut last_pos = formatter.visual_pos();
if last_pos.row >= row_off {
last_pos.col -= 1;
last_pos.row -= row_off;
// check if any positions translated on the fly (like cursor) are at the EOF
translate_positions(
char_pos + 1,
first_visible_char_idx,
translated_positions,
text_fmt,
renderer,
last_pos,
);
}
break;
};
// skip any graphemes on visual lines before the block start
if pos.row < row_off {
if char_pos >= style_span.1 {
style_span = if let Some(style_span) = styles.next() {
style_span
} else {
break;
}
}
char_pos += grapheme.doc_chars();
first_visible_char_idx = char_pos + 1;
continue;
}
pos.row -= row_off;
// if the end of the viewport is reached stop rendering
if pos.row as u16 >= renderer.viewport.height {
break;
}
// apply decorations before rendering a new line
if pos.row as u16 != last_line_pos.visual_line {
if pos.row > 0 {
renderer.draw_indent_guides(last_line_indent_level, last_line_pos.visual_line);
is_in_indent_area = true;
for line_decoration in &mut *line_decorations {
line_decoration.render_foreground(renderer, last_line_pos, char_pos);
}
}
last_line_pos = LinePos {
first_visual_line: doc_line != last_line_pos.doc_line,
doc_line,
visual_line: pos.row as u16,
start_char_idx: char_pos,
};
for line_decoration in &mut *line_decorations {
line_decoration.render_background(renderer, last_line_pos);
}
}
// aquire the correct grapheme style
if char_pos >= style_span.1 {
style_span = styles.next().unwrap_or((Style::default(), usize::MAX));
}
char_pos += grapheme.doc_chars();
// check if any positions translated on the fly (like cursor) has been reached
translate_positions(
char_pos,
first_visible_char_idx,
translated_positions,
text_fmt,
renderer,
pos,
);
let grapheme_style = if let GraphemeSource::VirtualText { highlight } = grapheme.source {
let style = renderer.text_style;
if let Some(highlight) = highlight {
style.patch(theme.highlight(highlight.0))
} else {
style
}
} else {
style_span.0
};
let virt = grapheme.is_virtual();
renderer.draw_grapheme(
grapheme.grapheme,
grapheme_style,
virt,
&mut last_line_indent_level,
&mut is_in_indent_area,
pos,
);
}
renderer.draw_indent_guides(last_line_indent_level, last_line_pos.visual_line);
for line_decoration in &mut *line_decorations {
line_decoration.render_foreground(renderer, last_line_pos, char_pos);
}
}
#[derive(Debug)]
pub struct TextRenderer<'a> {
pub surface: &'a mut Surface,
pub text_style: Style,
pub whitespace_style: Style,
pub indent_guide_char: String,
pub indent_guide_style: Style,
pub newline: String,
pub nbsp: String,
pub space: String,
pub tab: String,
pub virtual_tab: String,
pub indent_width: u16,
pub starting_indent: usize,
pub draw_indent_guides: bool,
pub col_offset: usize,
pub viewport: Rect,
}
impl<'a> TextRenderer<'a> {
pub fn new(
surface: &'a mut Surface,
doc: &Document,
theme: &Theme,
col_offset: usize,
viewport: Rect,
) -> TextRenderer<'a> {
let editor_config = doc.config.load();
let WhitespaceConfig {
render: ws_render,
characters: ws_chars,
} = &editor_config.whitespace;
let tab_width = doc.tab_width();
let tab = if ws_render.tab() == WhitespaceRenderValue::All {
std::iter::once(ws_chars.tab)
.chain(std::iter::repeat(ws_chars.tabpad).take(tab_width - 1))
.collect()
} else {
" ".repeat(tab_width)
};
let virtual_tab = " ".repeat(tab_width);
let newline = if ws_render.newline() == WhitespaceRenderValue::All {
ws_chars.newline.into()
} else {
" ".to_owned()
};
let space = if ws_render.space() == WhitespaceRenderValue::All {
ws_chars.space.into()
} else {
" ".to_owned()
};
let nbsp = if ws_render.nbsp() == WhitespaceRenderValue::All {
ws_chars.nbsp.into()
} else {
" ".to_owned()
};
let text_style = theme.get("ui.text");
let indent_width = doc.indent_style.indent_width(tab_width) as u16;
TextRenderer {
surface,
indent_guide_char: editor_config.indent_guides.character.into(),
newline,
nbsp,
space,
tab,
virtual_tab,
whitespace_style: theme.get("ui.virtual.whitespace"),
indent_width,
starting_indent: col_offset / indent_width as usize
+ (col_offset % indent_width as usize != 0) as usize
+ editor_config.indent_guides.skip_levels as usize,
indent_guide_style: text_style.patch(
theme
.try_get("ui.virtual.indent-guide")
.unwrap_or_else(|| theme.get("ui.virtual.whitespace")),
),
text_style,
draw_indent_guides: editor_config.indent_guides.render,
viewport,
col_offset,
}
}
/// Draws a single `grapheme` at the current render position with a specified `style`.
pub fn draw_grapheme(
&mut self,
grapheme: Grapheme,
mut style: Style,
is_virtual: bool,
last_indent_level: &mut usize,
is_in_indent_area: &mut bool,
position: Position,
) {
let cut_off_start = self.col_offset.saturating_sub(position.col);
let is_whitespace = grapheme.is_whitespace();
// TODO is it correct to apply the whitspace style to all unicode white spaces?
if is_whitespace {
style = style.patch(self.whitespace_style);
}
let width = grapheme.width();
let space = if is_virtual { " " } else { &self.space };
let nbsp = if is_virtual { " " } else { &self.nbsp };
let tab = if is_virtual {
&self.virtual_tab
} else {
&self.tab
};
let grapheme = match grapheme {
Grapheme::Tab { width } => {
let grapheme_tab_width = char_to_byte_idx(tab, width);
&tab[..grapheme_tab_width]
}
// TODO special rendering for other whitespaces?
Grapheme::Other { ref g } if g == " " => space,
Grapheme::Other { ref g } if g == "\u{00A0}" => nbsp,
Grapheme::Other { ref g } => g,
Grapheme::Newline => &self.newline,
};
let in_bounds = self.col_offset <= position.col
&& position.col < self.viewport.width as usize + self.col_offset;
if in_bounds {
self.surface.set_string(
self.viewport.x + (position.col - self.col_offset) as u16,
self.viewport.y + position.row as u16,
grapheme,
style,
);
} else if cut_off_start != 0 && cut_off_start < width {
// partially on screen
let rect = Rect::new(
self.viewport.x,
self.viewport.y + position.row as u16,
(width - cut_off_start) as u16,
1,
);
self.surface.set_style(rect, style);
}
if *is_in_indent_area && !is_whitespace {
*last_indent_level = position.col;
*is_in_indent_area = false;
}
}
/// Overlay indentation guides ontop of a rendered line
/// The indentation level is computed in `draw_lines`.
/// Therefore this function must always be called afterwards.
pub fn draw_indent_guides(&mut self, indent_level: usize, row: u16) {
if !self.draw_indent_guides {
return;
}
// Don't draw indent guides outside of view
let end_indent = min(
indent_level,
// Add indent_width - 1 to round up, since the first visible
// indent might be a bit after offset.col
self.col_offset + self.viewport.width as usize + (self.indent_width as usize - 1),
) / self.indent_width as usize;
for i in self.starting_indent..end_indent {
let x = (self.viewport.x as usize + (i * self.indent_width as usize) - self.col_offset)
as u16;
let y = self.viewport.y + row;
debug_assert!(self.surface.in_bounds(x, y));
self.surface
.set_string(x, y, &self.indent_guide_char, self.indent_guide_style);
}
}
}

@ -1,42 +1,46 @@
use crate::{
commands,
commands::{self, OnKeyCallback},
compositor::{Component, Context, Event, EventResult},
job::{self, Callback},
key,
keymap::{KeymapResult, Keymaps},
ui::{Completion, ProgressSpinners},
ui::{
document::{render_document, LinePos, TextRenderer, TranslatedPosition},
Completion, ProgressSpinners,
},
};
use helix_core::{
diagnostic::NumberOrString,
graphemes::{
ensure_grapheme_boundary_next_byte, next_grapheme_boundary, prev_grapheme_boundary,
},
movement::Direction,
syntax::{self, HighlightEvent},
text_annotations::TextAnnotations,
unicode::width::UnicodeWidthStr,
visual_coords_at_pos, LineEnding, Position, Range, Selection, Transaction,
visual_offset_from_block, Position, Range, Selection, Transaction,
};
use helix_view::{
apply_transaction,
document::{Mode, SCRATCH_BUFFER_NAME},
document::{Mode, SavePoint, SCRATCH_BUFFER_NAME},
editor::{CompleteAction, CursorShapeConfig},
graphics::{Color, CursorKind, Modifier, Rect, Style},
input::{KeyEvent, MouseButton, MouseEvent, MouseEventKind},
keyboard::{KeyCode, KeyModifiers},
Document, Editor, Theme, View,
};
use std::{borrow::Cow, cmp::min, num::NonZeroUsize, path::PathBuf};
use std::{mem::take, num::NonZeroUsize, path::PathBuf, rc::Rc, sync::Arc};
use tui::buffer::Buffer as Surface;
use tui::{buffer::Buffer as Surface, text::Span};
use super::lsp::SignatureHelp;
use super::statusline;
use super::{document::LineDecoration, lsp::SignatureHelp};
pub struct EditorView {
pub keymaps: Keymaps,
on_next_key: Option<Box<dyn FnOnce(&mut commands::Context, KeyEvent)>>,
on_next_key: Option<OnKeyCallback>,
pseudo_pending: Vec<KeyEvent>,
last_insert: (commands::MappableCommand, Vec<InsertEvent>),
pub(crate) last_insert: (commands::MappableCommand, Vec<InsertEvent>),
pub(crate) completion: Option<Completion>,
spinners: ProgressSpinners,
}
@ -46,6 +50,7 @@ pub enum InsertEvent {
Key(KeyEvent),
CompletionApply(CompleteAction),
TriggerCompletion,
RequestCompletion,
}
impl Default for EditorView {
@ -84,6 +89,10 @@ impl EditorView {
let theme = &editor.theme;
let config = editor.config();
let text_annotations = view.text_annotations(doc, Some(theme));
let mut line_decorations: Vec<Box<dyn LineDecoration>> = Vec::new();
let mut translated_positions: Vec<TranslatedPosition> = Vec::new();
// DAP: Highlight current stack frame position
let stack_frame = editor.debugger.as_ref().and_then(|debugger| {
if let (Some(frame), Some(thread_id)) = (debugger.active_frame, debugger.thread_id) {
@ -104,28 +113,40 @@ impl EditorView {
== doc.path()
{
let line = frame.line - 1; // convert to 0-indexing
if line >= view.offset.row && line < view.offset.row + area.height as usize {
surface.set_style(
Rect::new(
area.x,
area.y + (line - view.offset.row) as u16,
area.width,
1,
),
theme.get("ui.highlight"),
);
}
let style = theme.get("ui.highlight");
let line_decoration = move |renderer: &mut TextRenderer, pos: LinePos| {
if pos.doc_line != line {
return;
}
renderer
.surface
.set_style(Rect::new(area.x, pos.visual_line, area.width, 1), style);
};
line_decorations.push(Box::new(line_decoration));
}
}
if is_focused && config.cursorline {
Self::highlight_cursorline(doc, view, surface, theme);
line_decorations.push(Self::cursorline_decorator(doc, view, theme))
}
if is_focused && config.cursorcolumn {
Self::highlight_cursorcolumn(doc, view, surface, theme);
Self::highlight_cursorcolumn(doc, view, surface, theme, inner, &text_annotations);
}
let mut highlights =
Self::doc_syntax_highlights(doc, view.offset.anchor, inner.height, theme);
let overlay_highlights = Self::overlay_syntax_highlights(
doc,
view.offset.anchor,
inner.height,
&text_annotations,
);
if !overlay_highlights.is_empty() {
highlights = Box::new(syntax::merge(highlights, overlay_highlights));
}
let mut highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme);
for diagnostic in Self::doc_diagnostics_highlights(doc, theme) {
// Most of the `diagnostic` Vecs are empty most of the time. Skipping
// a merge for any empty Vec saves a significant amount of work.
@ -134,8 +155,9 @@ impl EditorView {
}
highlights = Box::new(syntax::merge(highlights, diagnostic));
}
let highlights: Box<dyn Iterator<Item = HighlightEvent>> = if is_focused {
Box::new(syntax::merge(
let highlights = syntax::merge(
highlights,
Self::doc_selection_highlights(
editor.mode(),
@ -144,19 +166,52 @@ impl EditorView {
theme,
&config.cursor_shape,
),
))
);
let focused_view_elements = Self::highlight_focused_view_elements(view, doc, theme);
if focused_view_elements.is_empty() {
Box::new(highlights)
} else {
Box::new(syntax::merge(highlights, focused_view_elements))
}
} else {
Box::new(highlights)
};
Self::render_text_highlights(doc, view.offset, inner, surface, theme, highlights, &config);
Self::render_gutter(editor, doc, view, view.area, surface, theme, is_focused);
Self::render_rulers(editor, doc, view, inner, surface, theme);
Self::render_gutter(
editor,
doc,
view,
view.area,
theme,
is_focused,
&mut line_decorations,
);
if is_focused {
Self::render_focused_view_elements(view, doc, inner, theme, surface);
let cursor = doc
.selection(view.id)
.primary()
.cursor(doc.text().slice(..));
// set the cursor_cache to out of view in case the position is not found
editor.cursor_cache.set(Some(None));
let update_cursor_cache =
|_: &mut TextRenderer, pos| editor.cursor_cache.set(Some(Some(pos)));
translated_positions.push((cursor, Box::new(update_cursor_cache)));
}
render_document(
surface,
inner,
doc,
view.offset,
&text_annotations,
highlights,
theme,
&mut line_decorations,
&mut translated_positions,
);
Self::render_rulers(editor, doc, view, inner, surface, theme);
// if we're not at the edge of the screen, draw a right border
if viewport.right() != view.area.right() {
let x = area.right();
@ -204,31 +259,53 @@ impl EditorView {
.iter()
// View might be horizontally scrolled, convert from absolute distance
// from the 1st column to relative distance from left of viewport
.filter_map(|ruler| ruler.checked_sub(1 + view.offset.col as u16))
.filter_map(|ruler| ruler.checked_sub(1 + view.offset.horizontal_offset as u16))
.filter(|ruler| ruler < &viewport.width)
.map(|ruler| viewport.clip_left(ruler).with_width(1))
.for_each(|area| surface.set_style(area, ruler_theme))
}
pub fn overlay_syntax_highlights(
doc: &Document,
anchor: usize,
height: u16,
text_annotations: &TextAnnotations,
) -> Vec<(usize, std::ops::Range<usize>)> {
let text = doc.text().slice(..);
let row = text.char_to_line(anchor.min(text.len_chars()));
let range = {
// Calculate viewport byte ranges:
// Saturating subs to make it inclusive zero indexing.
let last_line = text.len_lines().saturating_sub(1);
let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line);
let start = text.line_to_byte(row.min(last_line));
let end = text.line_to_byte(last_visible_line + 1);
start..end
};
text_annotations.collect_overlay_highlights(range)
}
/// Get syntax highlights for a document in a view represented by the first line
/// and column (`offset`) and the last line. This is done instead of using a view
/// directly to enable rendering syntax highlighted docs anywhere (eg. picker preview)
pub fn doc_syntax_highlights<'doc>(
doc: &'doc Document,
offset: Position,
anchor: usize,
height: u16,
_theme: &Theme,
) -> Box<dyn Iterator<Item = HighlightEvent> + 'doc> {
let text = doc.text().slice(..);
let row = text.char_to_line(anchor.min(text.len_chars()));
let range = {
// Calculate viewport byte ranges:
// Saturating subs to make it inclusive zero indexing.
let last_line = doc.text().len_lines().saturating_sub(1);
let last_visible_line = (offset.row + height as usize)
.saturating_sub(1)
.min(last_line);
let start = text.line_to_byte(offset.row.min(last_line));
let last_line = text.len_lines().saturating_sub(1);
let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line);
let start = text.line_to_byte(row.min(last_line));
let end = text.line_to_byte(last_visible_line + 1);
start..end
@ -273,11 +350,11 @@ impl EditorView {
use helix_core::diagnostic::Severity;
let get_scope_of = |scope| {
theme
.find_scope_index(scope)
.find_scope_index_exact(scope)
// get one of the themes below as fallback values
.or_else(|| theme.find_scope_index("diagnostic"))
.or_else(|| theme.find_scope_index("ui.cursor"))
.or_else(|| theme.find_scope_index("ui.selection"))
.or_else(|| theme.find_scope_index_exact("diagnostic"))
.or_else(|| theme.find_scope_index_exact("ui.cursor"))
.or_else(|| theme.find_scope_index_exact("ui.selection"))
.expect(
"at least one of the following scopes must be defined in the theme: `diagnostic`, `ui.cursor`, or `ui.selection`",
)
@ -340,25 +417,31 @@ impl EditorView {
let cursor_is_block = cursorkind == CursorKind::Block;
let selection_scope = theme
.find_scope_index("ui.selection")
.find_scope_index_exact("ui.selection")
.expect("could not find `ui.selection` scope in the theme!");
let primary_selection_scope = theme
.find_scope_index_exact("ui.selection.primary")
.unwrap_or(selection_scope);
let base_cursor_scope = theme
.find_scope_index("ui.cursor")
.find_scope_index_exact("ui.cursor")
.unwrap_or(selection_scope);
let base_primary_cursor_scope = theme
.find_scope_index("ui.cursor.primary")
.unwrap_or(base_cursor_scope);
let cursor_scope = match mode {
Mode::Insert => theme.find_scope_index("ui.cursor.insert"),
Mode::Select => theme.find_scope_index("ui.cursor.select"),
Mode::Normal => Some(base_cursor_scope),
Mode::Insert => theme.find_scope_index_exact("ui.cursor.insert"),
Mode::Select => theme.find_scope_index_exact("ui.cursor.select"),
Mode::Normal => theme.find_scope_index_exact("ui.cursor.normal"),
}
.unwrap_or(base_cursor_scope);
let primary_cursor_scope = theme
.find_scope_index("ui.cursor.primary")
.unwrap_or(cursor_scope);
let primary_selection_scope = theme
.find_scope_index("ui.selection.primary")
.unwrap_or(selection_scope);
let primary_cursor_scope = match mode {
Mode::Insert => theme.find_scope_index_exact("ui.cursor.primary.insert"),
Mode::Select => theme.find_scope_index_exact("ui.cursor.primary.select"),
Mode::Normal => theme.find_scope_index_exact("ui.cursor.primary.normal"),
}
.unwrap_or(base_primary_cursor_scope);
let mut spans: Vec<(usize, std::ops::Range<usize>)> = Vec::new();
for (i, range) in selection.iter().enumerate() {
@ -386,7 +469,14 @@ impl EditorView {
if range.head > range.anchor {
// Standard case.
let cursor_start = prev_grapheme_boundary(text, range.head);
spans.push((selection_scope, range.anchor..cursor_start));
// non block cursors look like they exclude the cursor
let selection_end =
if selection_is_primary && !cursor_is_block && mode != Mode::Insert {
range.head
} else {
cursor_start
};
spans.push((selection_scope, range.anchor..selection_end));
if !selection_is_primary || cursor_is_block {
spans.push((cursor_scope, cursor_start..range.head));
}
@ -396,255 +486,42 @@ impl EditorView {
if !selection_is_primary || cursor_is_block {
spans.push((cursor_scope, range.head..cursor_end));
}
spans.push((selection_scope, cursor_end..range.anchor));
// non block cursors look like they exclude the cursor
let selection_start = if selection_is_primary
&& !cursor_is_block
&& !(mode == Mode::Insert && cursor_end == range.anchor)
{
range.head
} else {
cursor_end
};
spans.push((selection_scope, selection_start..range.anchor));
}
}
spans
}
pub fn render_text_highlights<H: Iterator<Item = HighlightEvent>>(
doc: &Document,
offset: Position,
viewport: Rect,
surface: &mut Surface,
theme: &Theme,
highlights: H,
config: &helix_view::editor::Config,
) {
let whitespace = &config.whitespace;
use helix_view::editor::WhitespaceRenderValue;
// It's slightly more efficient to produce a full RopeSlice from the Rope, then slice that a bunch
// of times than it is to always call Rope::slice/get_slice (it will internally always hit RSEnum::Light).
let text = doc.text().slice(..);
let characters = &whitespace.characters;
let mut spans = Vec::new();
let mut visual_x = 0usize;
let mut line = 0u16;
let tab_width = doc.tab_width();
let tab = if whitespace.render.tab() == WhitespaceRenderValue::All {
std::iter::once(characters.tab)
.chain(std::iter::repeat(characters.tabpad).take(tab_width - 1))
.collect()
} else {
" ".repeat(tab_width)
};
let space = characters.space.to_string();
let nbsp = characters.nbsp.to_string();
let newline = if whitespace.render.newline() == WhitespaceRenderValue::All {
characters.newline.to_string()
} else {
" ".to_string()
};
let indent_guide_char = config.indent_guides.character.to_string();
let text_style = theme.get("ui.text");
let whitespace_style = theme.get("ui.virtual.whitespace");
let mut is_in_indent_area = true;
let mut last_line_indent_level = 0;
// use whitespace style as fallback for indent-guide
let indent_guide_style = text_style.patch(
theme
.try_get("ui.virtual.indent-guide")
.unwrap_or_else(|| theme.get("ui.virtual.whitespace")),
);
let draw_indent_guides = |indent_level, line, surface: &mut Surface| {
if !config.indent_guides.render {
return;
}
let starting_indent =
(offset.col / tab_width) + config.indent_guides.skip_levels as usize;
// Don't draw indent guides outside of view
let end_indent = min(
indent_level,
// Add tab_width - 1 to round up, since the first visible
// indent might be a bit after offset.col
offset.col + viewport.width as usize + (tab_width - 1),
) / tab_width;
for i in starting_indent..end_indent {
let x = (viewport.x as usize + (i * tab_width) - offset.col) as u16;
let y = viewport.y + line;
debug_assert!(surface.in_bounds(x, y));
surface.set_string(x, y, &indent_guide_char, indent_guide_style);
}
};
'outer: for event in highlights {
match event {
HighlightEvent::HighlightStart(span) => {
spans.push(span);
}
HighlightEvent::HighlightEnd => {
spans.pop();
}
HighlightEvent::Source { start, end } => {
let is_trailing_cursor = text.len_chars() < end;
// `unwrap_or_else` part is for off-the-end indices of
// the rope, to allow cursor highlighting at the end
// of the rope.
let text = text.get_slice(start..end).unwrap_or_else(|| " ".into());
let style = spans
.iter()
.fold(text_style, |acc, span| acc.patch(theme.highlight(span.0)));
let space = if whitespace.render.space() == WhitespaceRenderValue::All
&& !is_trailing_cursor
{
&space
} else {
" "
};
let nbsp = if whitespace.render.nbsp() == WhitespaceRenderValue::All
&& text.len_chars() < end
{
&nbsp
} else {
" "
};
use helix_core::graphemes::{grapheme_width, RopeGraphemes};
for grapheme in RopeGraphemes::new(text) {
let out_of_bounds = offset.col > visual_x
|| visual_x >= viewport.width as usize + offset.col;
if LineEnding::from_rope_slice(&grapheme).is_some() {
if !out_of_bounds {
// we still want to render an empty cell with the style
surface.set_string(
(viewport.x as usize + visual_x - offset.col) as u16,
viewport.y + line,
&newline,
style.patch(whitespace_style),
);
}
draw_indent_guides(last_line_indent_level, line, surface);
visual_x = 0;
line += 1;
is_in_indent_area = true;
// TODO: with proper iter this shouldn't be necessary
if line >= viewport.height {
break 'outer;
}
} else {
let grapheme = Cow::from(grapheme);
let is_whitespace;
let (display_grapheme, width) = if grapheme == "\t" {
is_whitespace = true;
// make sure we display tab as appropriate amount of spaces
let visual_tab_width = tab_width - (visual_x % tab_width);
let grapheme_tab_width =
helix_core::str_utils::char_to_byte_idx(&tab, visual_tab_width);
(&tab[..grapheme_tab_width], visual_tab_width)
} else if grapheme == " " {
is_whitespace = true;
(space, 1)
} else if grapheme == "\u{00A0}" {
is_whitespace = true;
(nbsp, 1)
} else {
is_whitespace = false;
// Cow will prevent allocations if span contained in a single slice
// which should really be the majority case
let width = grapheme_width(&grapheme);
(grapheme.as_ref(), width)
};
let cut_off_start = offset.col.saturating_sub(visual_x);
if !out_of_bounds {
// if we're offscreen just keep going until we hit a new line
surface.set_string(
(viewport.x as usize + visual_x - offset.col) as u16,
viewport.y + line,
display_grapheme,
if is_whitespace {
style.patch(whitespace_style)
} else {
style
},
);
} else if cut_off_start != 0 && cut_off_start < width {
// partially on screen
let rect = Rect::new(
viewport.x,
viewport.y + line,
(width - cut_off_start) as u16,
1,
);
surface.set_style(
rect,
if is_whitespace {
style.patch(whitespace_style)
} else {
style
},
);
}
if is_in_indent_area && !(grapheme == " " || grapheme == "\t") {
draw_indent_guides(visual_x, line, surface);
is_in_indent_area = false;
last_line_indent_level = visual_x;
}
visual_x = visual_x.saturating_add(width);
}
}
}
}
}
}
/// Render brace match, etc (meant for the focused view only)
pub fn render_focused_view_elements(
pub fn highlight_focused_view_elements(
view: &View,
doc: &Document,
viewport: Rect,
theme: &Theme,
surface: &mut Surface,
) {
) -> Vec<(usize, std::ops::Range<usize>)> {
// Highlight matching braces
if let Some(syntax) = doc.syntax() {
let text = doc.text().slice(..);
use helix_core::match_brackets;
let pos = doc.selection(view.id).primary().cursor(text);
let pos = match_brackets::find_matching_bracket(syntax, doc.text(), pos)
.and_then(|pos| view.screen_coords_at_pos(doc, text, pos));
if let Some(pos) = pos {
if let Some(pos) = match_brackets::find_matching_bracket(syntax, doc.text(), pos) {
// ensure col is on screen
if (pos.col as u16) < viewport.width + view.offset.col as u16
&& pos.col >= view.offset.col
{
let style = theme.try_get("ui.cursor.match").unwrap_or_else(|| {
Style::default()
.add_modifier(Modifier::REVERSED)
.add_modifier(Modifier::DIM)
});
surface[(viewport.x + pos.col as u16, viewport.y + pos.row as u16)]
.set_style(style);
if let Some(highlight) = theme.find_scope_index_exact("ui.cursor.match") {
return vec![(highlight, pos..pos + 1)];
}
}
}
Vec::new()
}
/// Render bufferline at the top
@ -700,22 +577,17 @@ impl EditorView {
}
}
pub fn render_gutter(
editor: &Editor,
doc: &Document,
pub fn render_gutter<'d>(
editor: &'d Editor,
doc: &'d Document,
view: &View,
viewport: Rect,
surface: &mut Surface,
theme: &Theme,
is_focused: bool,
line_decorations: &mut Vec<Box<(dyn LineDecoration + 'd)>>,
) {
let text = doc.text().slice(..);
let last_line = view.last_line(doc);
// it's used inside an iterator so the collect isn't needless:
// https://github.com/rust-lang/rust-clippy/issues/6164
#[allow(clippy::needless_collect)]
let cursors: Vec<_> = doc
let cursors: Rc<[_]> = doc
.selection(view.id)
.iter()
.map(|range| range.cursor_line(text))
@ -725,29 +597,36 @@ impl EditorView {
let gutter_style = theme.get("ui.gutter");
let gutter_selected_style = theme.get("ui.gutter.selected");
// avoid lots of small allocations by reusing a text buffer for each line
let mut text = String::with_capacity(8);
let gutter_style_virtual = theme.get("ui.gutter.virtual");
let gutter_selected_style_virtual = theme.get("ui.gutter.selected.virtual");
for gutter_type in view.gutters() {
let mut gutter = gutter_type.style(editor, doc, view, theme, is_focused);
let width = gutter_type.width(view, doc);
text.reserve(width); // ensure there's enough space for the gutter
for (i, line) in (view.offset.row..(last_line + 1)).enumerate() {
let selected = cursors.contains(&line);
// avoid lots of small allocations by reusing a text buffer for each line
let mut text = String::with_capacity(width);
let cursors = cursors.clone();
let gutter_decoration = move |renderer: &mut TextRenderer, pos: LinePos| {
// TODO handle softwrap in gutters
let selected = cursors.contains(&pos.doc_line);
let x = viewport.x + offset;
let y = viewport.y + i as u16;
let y = viewport.y + pos.visual_line;
let gutter_style = if selected {
gutter_selected_style
} else {
gutter_style
let gutter_style = match (selected, pos.first_visual_line) {
(false, true) => gutter_style,
(true, true) => gutter_selected_style,
(false, false) => gutter_style_virtual,
(true, false) => gutter_selected_style_virtual,
};
if let Some(style) = gutter(line, selected, &mut text) {
surface.set_stringn(x, y, &text, width, gutter_style.patch(style));
if let Some(style) =
gutter(pos.doc_line, selected, pos.first_visual_line, &mut text)
{
renderer
.surface
.set_stringn(x, y, &text, width, gutter_style.patch(style));
} else {
surface.set_style(
renderer.surface.set_style(
Rect {
x,
y,
@ -758,7 +637,8 @@ impl EditorView {
);
}
text.clear();
}
};
line_decorations.push(Box::new(gutter_decoration));
offset += width as u16;
}
@ -805,6 +685,14 @@ impl EditorView {
});
let text = Text::styled(&diagnostic.message, style);
lines.extend(text.lines);
let code = diagnostic.code.as_ref().map(|x| match x {
NumberOrString::Number(n) => format!("({n})"),
NumberOrString::String(s) => format!("({s})"),
});
if let Some(code) = code {
let span = Span::styled(code, style);
lines.push(span.into());
}
}
let paragraph = Paragraph::new(lines)
@ -819,10 +707,13 @@ impl EditorView {
}
/// Apply the highlighting on the lines where a cursor is active
pub fn highlight_cursorline(doc: &Document, view: &View, surface: &mut Surface, theme: &Theme) {
pub fn cursorline_decorator(
doc: &Document,
view: &View,
theme: &Theme,
) -> Box<dyn LineDecoration> {
let text = doc.text().slice(..);
let last_line = view.last_line(doc);
// TODO only highlight the visual line that contains the cursor instead of the full visual line
let primary_line = doc.selection(view.id).primary().cursor_line(text);
// The secondary_lines do contain the primary_line, it doesn't matter
@ -839,20 +730,18 @@ impl EditorView {
let primary_style = theme.get("ui.cursorline.primary");
let secondary_style = theme.get("ui.cursorline.secondary");
for line in view.offset.row..(last_line + 1) {
let area = Rect::new(
view.area.x,
view.area.y + (line - view.offset.row) as u16,
view.area.width,
1,
);
if primary_line == line {
surface.set_style(area, primary_style);
} else if secondary_lines.binary_search(&line).is_ok() {
surface.set_style(area, secondary_style);
let viewport = view.area;
let line_decoration = move |renderer: &mut TextRenderer, pos: LinePos| {
let area = Rect::new(viewport.x, viewport.y + pos.visual_line, viewport.width, 1);
if primary_line == pos.doc_line {
renderer.surface.set_style(area, primary_style);
} else if secondary_lines.binary_search(&pos.doc_line).is_ok() {
renderer.surface.set_style(area, secondary_style);
}
}
};
Box::new(line_decoration)
}
/// Apply the highlighting on the columns where a cursor is active
@ -861,6 +750,8 @@ impl EditorView {
view: &View,
surface: &mut Surface,
theme: &Theme,
viewport: Rect,
text_annotations: &TextAnnotations,
) {
let text = doc.text().slice(..);
@ -876,19 +767,23 @@ impl EditorView {
.unwrap_or_else(|| theme.get("ui.cursorline.secondary"));
let inner_area = view.inner_area(doc);
let offset = view.offset.col;
let selection = doc.selection(view.id);
let primary = selection.primary();
let text_format = doc.text_format(viewport.width, None);
for range in selection.iter() {
let is_primary = primary == *range;
let cursor = range.cursor(text);
let Position { col, .. } =
visual_offset_from_block(text, cursor, cursor, &text_format, text_annotations).0;
let Position { row: _, col } =
visual_coords_at_pos(text, range.cursor(text), doc.tab_width());
// if the cursor is horizontally in the view
if col >= offset && inner_area.width > (col - offset) as u16 {
if col >= view.offset.horizontal_offset
&& inner_area.width > (col - view.offset.horizontal_offset) as u16
{
let area = Rect::new(
inner_area.x + (col - offset) as u16,
inner_area.x + (col - view.offset.horizontal_offset) as u16,
view.area.y,
1,
view.area.height,
@ -935,6 +830,7 @@ impl EditorView {
(Mode::Insert, Mode::Normal) => {
// if exiting insert mode, remove completion
self.completion = None;
cxt.editor.completion_request_handle = None;
// TODO: Use an on_mode_change hook to remove signature help
cxt.jobs.callback(async {
@ -1005,6 +901,8 @@ impl EditorView {
for _ in 0..cxt.editor.count.map_or(1, NonZeroUsize::into) {
// first execute whatever put us into insert mode
self.last_insert.0.execute(cxt);
let mut last_savepoint = None;
let mut last_request_savepoint = None;
// then replay the inputs
for key in self.last_insert.1.clone() {
match key {
@ -1012,7 +910,9 @@ impl EditorView {
InsertEvent::CompletionApply(compl) => {
let (view, doc) = current!(cxt.editor);
doc.restore(view);
if let Some(last_savepoint) = last_savepoint.as_deref() {
doc.restore(view, last_savepoint);
}
let text = doc.text().slice(..);
let cursor = doc.selection(view.id).primary().cursor(text);
@ -1026,11 +926,14 @@ impl EditorView {
(shift_position(start), shift_position(end), t)
}),
);
apply_transaction(&tx, doc, view);
doc.apply(&tx, view.id);
}
InsertEvent::TriggerCompletion => {
let (_, doc) = current!(cxt.editor);
doc.savepoint();
last_savepoint = take(&mut last_request_savepoint);
}
InsertEvent::RequestCompletion => {
let (view, doc) = current!(cxt.editor);
last_request_savepoint = Some(doc.savepoint(view));
}
}
}
@ -1055,26 +958,31 @@ impl EditorView {
}
}
#[allow(clippy::too_many_arguments)]
pub fn set_completion(
&mut self,
editor: &mut Editor,
savepoint: Arc<SavePoint>,
items: Vec<helix_lsp::lsp::CompletionItem>,
offset_encoding: helix_lsp::OffsetEncoding,
start_offset: usize,
trigger_offset: usize,
size: Rect,
) {
let mut completion =
Completion::new(editor, items, offset_encoding, start_offset, trigger_offset);
let mut completion = Completion::new(
editor,
savepoint,
items,
offset_encoding,
start_offset,
trigger_offset,
);
if completion.is_empty() {
// skip if we got no completion results
return;
}
// Immediately initialize a savepoint
doc_mut!(editor).savepoint();
editor.last_completion = None;
self.last_insert.1.push(InsertEvent::TriggerCompletion);
@ -1087,12 +995,12 @@ impl EditorView {
self.completion = None;
// Clear any savepoints
let doc = doc_mut!(editor);
doc.savepoint = None;
editor.clear_idle_timer(); // don't retrigger
}
pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult {
commands::compute_inlay_hints_for_all_views(cx.editor, cx.jobs);
if let Some(completion) = &mut self.completion {
return if completion.ensure_item_resolved(cx) {
EventResult::Consumed(None)
@ -1117,6 +1025,10 @@ impl EditorView {
event: &MouseEvent,
cxt: &mut commands::Context,
) -> EventResult {
if event.kind != MouseEventKind::Moved {
cxt.editor.reset_idle_timer();
}
let config = cxt.editor.config();
let MouseEvent {
kind,
@ -1128,7 +1040,7 @@ impl EditorView {
let pos_and_view = |editor: &Editor, row, column| {
editor.tree.views().find_map(|(view, _focus)| {
view.pos_at_screen_coords(&editor.documents[&view.doc], row, column)
view.pos_at_screen_coords(&editor.documents[&view.doc], row, column, true)
.map(|pos| (pos, view.id))
})
};
@ -1170,8 +1082,10 @@ impl EditorView {
None => return EventResult::Ignored(None),
};
let line = coords.row + view.offset.row;
if line < doc.text().len_lines() {
if let Some(char_idx) =
view.pos_at_visual_coords(doc, coords.row as u16, coords.col as u16, true)
{
let line = doc.text().char_to_line(char_idx);
commands::dap_toggle_breakpoint_impl(cxt, path, line);
return EventResult::Consumed(None);
}
@ -1183,7 +1097,7 @@ impl EditorView {
MouseEventKind::Drag(MouseButton::Left) => {
let (view, doc) = current!(cxt.editor);
let pos = match view.pos_at_screen_coords(doc, row, column) {
let pos = match view.pos_at_screen_coords(doc, row, column, true) {
Some(pos) => pos,
None => return EventResult::Ignored(None),
};
@ -1247,8 +1161,9 @@ impl EditorView {
cxt.editor.focus(view_id);
let (view, doc) = current!(cxt.editor);
let line = coords.row + view.offset.row;
if let Ok(pos) = doc.text().try_line_to_char(line) {
if let Some(pos) =
view.pos_at_visual_coords(doc, coords.row as u16, coords.col as u16, true)
{
doc.set_selection(view_id, Selection::point(pos));
if modifiers == KeyModifiers::ALT {
commands::MappableCommand::dap_edit_log.execute(cxt);

@ -4,41 +4,209 @@ use fuzzy_matcher::FuzzyMatcher;
#[cfg(test)]
mod test;
struct QueryAtom {
kind: QueryAtomKind,
atom: String,
ignore_case: bool,
inverse: bool,
}
impl QueryAtom {
fn new(atom: &str) -> Option<QueryAtom> {
let mut atom = atom.to_string();
let inverse = atom.starts_with('!');
if inverse {
atom.remove(0);
}
let mut kind = match atom.chars().next() {
Some('^') => QueryAtomKind::Prefix,
Some('\'') => QueryAtomKind::Substring,
_ if inverse => QueryAtomKind::Substring,
_ => QueryAtomKind::Fuzzy,
};
if atom.starts_with(['^', '\'']) {
atom.remove(0);
}
if atom.is_empty() {
return None;
}
if atom.ends_with('$') && !atom.ends_with("\\$") {
atom.pop();
kind = if kind == QueryAtomKind::Prefix {
QueryAtomKind::Exact
} else {
QueryAtomKind::Postfix
}
}
Some(QueryAtom {
kind,
atom: atom.replace('\\', ""),
// not ideal but fuzzy_matches only knows ascii uppercase so more consistent
// to behave the same
ignore_case: kind != QueryAtomKind::Fuzzy
&& atom.chars().all(|c| c.is_ascii_lowercase()),
inverse,
})
}
fn indices(&self, matcher: &Matcher, item: &str, indices: &mut Vec<usize>) -> bool {
// for inverse there are no indicies to return
// just return whether we matched
if self.inverse {
return self.matches(matcher, item);
}
let buf;
let item = if self.ignore_case {
buf = item.to_ascii_lowercase();
&buf
} else {
item
};
let off = match self.kind {
QueryAtomKind::Fuzzy => {
if let Some((_, fuzzy_indices)) = matcher.fuzzy_indices(item, &self.atom) {
indices.extend_from_slice(&fuzzy_indices);
return true;
} else {
return false;
}
}
QueryAtomKind::Substring => {
if let Some(off) = item.find(&self.atom) {
off
} else {
return false;
}
}
QueryAtomKind::Prefix if item.starts_with(&self.atom) => 0,
QueryAtomKind::Postfix if item.ends_with(&self.atom) => item.len() - self.atom.len(),
QueryAtomKind::Exact if item == self.atom => 0,
_ => return false,
};
indices.extend(off..(off + self.atom.len()));
true
}
fn matches(&self, matcher: &Matcher, item: &str) -> bool {
let buf;
let item = if self.ignore_case {
buf = item.to_ascii_lowercase();
&buf
} else {
item
};
let mut res = match self.kind {
QueryAtomKind::Fuzzy => matcher.fuzzy_match(item, &self.atom).is_some(),
QueryAtomKind::Substring => item.contains(&self.atom),
QueryAtomKind::Prefix => item.starts_with(&self.atom),
QueryAtomKind::Postfix => item.ends_with(&self.atom),
QueryAtomKind::Exact => item == self.atom,
};
if self.inverse {
res = !res;
}
res
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum QueryAtomKind {
/// Item is a fuzzy match of this behaviour
///
/// Usage: `foo`
Fuzzy,
/// Item contains query atom as a continous substring
///
/// Usage `'foo`
Substring,
/// Item starts with query atom
///
/// Usage: `^foo`
Prefix,
/// Item ends with query atom
///
/// Usage: `foo$`
Postfix,
/// Item is equal to query atom
///
/// Usage `^foo$`
Exact,
}
#[derive(Default)]
pub struct FuzzyQuery {
queries: Vec<String>,
first_fuzzy_atom: Option<String>,
query_atoms: Vec<QueryAtom>,
}
fn query_atoms(query: &str) -> impl Iterator<Item = &str> + '_ {
let mut saw_backslash = false;
query.split(move |c| {
saw_backslash = match c {
' ' if !saw_backslash => return true,
'\\' => true,
_ => false,
};
false
})
}
impl FuzzyQuery {
pub fn refine(&self, query: &str, old_query: &str) -> (FuzzyQuery, bool) {
// TODO: we could be a lot smarter about this
let new_query = Self::new(query);
let mut is_refinement = query.starts_with(old_query);
// if the last atom is an inverse atom adding more text to it
// will actually increase the number of matches and we can not refine
// the matches.
if is_refinement && !self.query_atoms.is_empty() {
let last_idx = self.query_atoms.len() - 1;
if self.query_atoms[last_idx].inverse
&& self.query_atoms[last_idx].atom != new_query.query_atoms[last_idx].atom
{
is_refinement = false;
}
}
(new_query, is_refinement)
}
pub fn new(query: &str) -> FuzzyQuery {
let mut saw_backslash = false;
let queries = query
.split(|c| {
saw_backslash = match c {
' ' if !saw_backslash => return true,
'\\' => true,
_ => false,
};
false
})
.filter_map(|query| {
if query.is_empty() {
let mut first_fuzzy_query = None;
let query_atoms = query_atoms(query)
.filter_map(|atom| {
let atom = QueryAtom::new(atom)?;
if atom.kind == QueryAtomKind::Fuzzy && first_fuzzy_query.is_none() {
first_fuzzy_query = Some(atom.atom);
None
} else {
Some(query.replace("\\ ", " "))
Some(atom)
}
})
.collect();
FuzzyQuery { queries }
FuzzyQuery {
first_fuzzy_atom: first_fuzzy_query,
query_atoms,
}
}
pub fn fuzzy_match(&self, item: &str, matcher: &Matcher) -> Option<i64> {
// use the rank of the first query for the rank, because merging ranks is not really possible
// use the rank of the first fuzzzy query for the rank, because merging ranks is not really possible
// this behaviour matches fzf and skim
let score = matcher.fuzzy_match(item, self.queries.get(0)?)?;
let score = self
.first_fuzzy_atom
.as_ref()
.map_or(Some(0), |atom| matcher.fuzzy_match(item, atom))?;
if self
.queries
.query_atoms
.iter()
.any(|query| matcher.fuzzy_match(item, query).is_none())
.any(|atom| !atom.matches(matcher, item))
{
return None;
}
@ -46,29 +214,26 @@ impl FuzzyQuery {
}
pub fn fuzzy_indicies(&self, item: &str, matcher: &Matcher) -> Option<(i64, Vec<usize>)> {
if self.queries.len() == 1 {
return matcher.fuzzy_indices(item, &self.queries[0]);
}
// use the rank of the first query for the rank, because merging ranks is not really possible
// this behaviour matches fzf and skim
let (score, mut indicies) = matcher.fuzzy_indices(item, self.queries.get(0)?)?;
let (score, mut indices) = self.first_fuzzy_atom.as_ref().map_or_else(
|| Some((0, Vec::new())),
|atom| matcher.fuzzy_indices(item, atom),
)?;
// fast path for the common case of not using a space
// during matching this branch should be free thanks to branch prediction
if self.queries.len() == 1 {
return Some((score, indicies));
// fast path for the common case of just a single atom
if self.query_atoms.is_empty() {
return Some((score, indices));
}
for query in &self.queries[1..] {
let (_, matched_indicies) = matcher.fuzzy_indices(item, query)?;
indicies.extend_from_slice(&matched_indicies);
for atom in &self.query_atoms {
if !atom.indices(matcher, item, &mut indices) {
return None;
}
}
// deadup and remove duplicate matches
indicies.sort_unstable();
indicies.dedup();
indices.sort_unstable();
indices.dedup();
Some((score, indicies))
Some((score, indices))
}
}

@ -53,7 +53,10 @@ impl Component for SignatureHelp {
let active_param_span = self.active_param_range.map(|(start, end)| {
vec![(
cx.editor.theme.find_scope_index("ui.selection").unwrap(),
cx.editor
.theme
.find_scope_index_exact("ui.selection")
.unwrap(),
start..end,
)]
});

@ -342,13 +342,10 @@ impl Component for Markdown {
fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> {
let padding = 2;
if padding >= viewport.1 || padding >= viewport.0 {
return None;
}
let contents = self.parse(None);
// TODO: account for tab width
let max_text_width = (viewport.0 - padding).min(120);
let max_text_width = (viewport.0.saturating_sub(padding)).min(120);
let (width, height) = crate::ui::text::required_size(&contents, max_text_width);
Some((width + padding, height + padding))

@ -4,7 +4,7 @@ use crate::{
compositor::{Callback, Component, Compositor, Context, Event, EventResult},
ctrl, key, shift,
};
use tui::{buffer::Buffer as Surface, text::Spans, widgets::Table};
use tui::{buffer::Buffer as Surface, widgets::Table};
pub use tui::widgets::{Cell, Row};
@ -18,28 +18,24 @@ pub trait Item {
/// Additional editor state that is used for label calculation.
type Data;
fn label(&self, data: &Self::Data) -> Spans;
fn format(&self, data: &Self::Data) -> Row;
fn sort_text(&self, data: &Self::Data) -> Cow<str> {
let label: String = self.label(data).into();
let label: String = self.format(data).cell_text().collect();
label.into()
}
fn filter_text(&self, data: &Self::Data) -> Cow<str> {
let label: String = self.label(data).into();
let label: String = self.format(data).cell_text().collect();
label.into()
}
fn row(&self, data: &Self::Data) -> Row {
Row::new(vec![Cell::from(self.label(data))])
}
}
impl Item for PathBuf {
/// Root prefix to strip.
type Data = PathBuf;
fn label(&self, root_path: &Self::Data) -> Spans {
fn format(&self, root_path: &Self::Data) -> Row {
self.strip_prefix(root_path)
.unwrap_or(self)
.to_string_lossy()
@ -47,6 +43,8 @@ impl Item for PathBuf {
}
}
pub type MenuCallback<T> = Box<dyn Fn(&mut Editor, Option<&T>, MenuEvent)>;
pub struct Menu<T: Item> {
options: Vec<T>,
editor_data: T::Data,
@ -59,7 +57,7 @@ pub struct Menu<T: Item> {
widths: Vec<Constraint>,
callback_fn: Box<dyn Fn(&mut Editor, Option<&T>, MenuEvent)>,
callback_fn: MenuCallback<T>,
scroll: usize,
size: (u16, u16),
@ -81,7 +79,7 @@ impl<T: Item> Menu<T> {
Self {
options,
editor_data,
matcher: Box::new(Matcher::default()),
matcher: Box::new(Matcher::default().ignore_case()),
matches,
cursor: None,
widths: Vec::new(),
@ -144,10 +142,10 @@ impl<T: Item> Menu<T> {
let n = self
.options
.first()
.map(|option| option.row(&self.editor_data).cells.len())
.map(|option| option.format(&self.editor_data).cells.len())
.unwrap_or_default();
let max_lens = self.options.iter().fold(vec![0; n], |mut acc, option| {
let row = option.row(&self.editor_data);
let row = option.format(&self.editor_data);
// maintain max for each column
for (acc, cell) in acc.iter_mut().zip(row.cells.iter()) {
let width = cell.content.width();
@ -256,12 +254,12 @@ impl<T: Item + 'static> Component for Menu<T> {
return EventResult::Consumed(close_fn);
}
// arrow up/ctrl-p/shift-tab prev completion choice (including updating the doc)
shift!(Tab) | key!(Up) | ctrl!('p') | ctrl!('k') => {
shift!(Tab) | key!(Up) | ctrl!('p') => {
self.move_up();
(self.callback_fn)(cx.editor, self.selection(), MenuEvent::Update);
return EventResult::Consumed(None);
}
key!(Tab) | key!(Down) | ctrl!('n') | ctrl!('j') => {
key!(Tab) | key!(Down) | ctrl!('n') => {
// arrow down/ctrl-n/tab advances completion choice (including updating the doc)
self.move_down();
(self.callback_fn)(cx.editor, self.selection(), MenuEvent::Update);
@ -331,7 +329,9 @@ impl<T: Item + 'static> Component for Menu<T> {
(a + b - 1) / b
}
let rows = options.iter().map(|option| option.row(&self.editor_data));
let rows = options
.iter()
.map(|option| option.format(&self.editor_data));
let table = Table::new(rows)
.style(style)
.highlight_style(selected)

@ -1,4 +1,5 @@
mod completion;
mod document;
pub(crate) mod editor;
mod fuzzy_match;
mod info;
@ -14,6 +15,7 @@ mod statusline;
mod text;
use crate::compositor::{Component, Compositor};
use crate::filter_picker_entry;
use crate::job::{self, Callback};
pub use completion::Completion;
pub use editor::EditorView;
@ -162,6 +164,9 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi
let now = Instant::now();
let dedup_symlinks = config.file_picker.deduplicate_links;
let absolute_root = root.canonicalize().unwrap_or_else(|_| root.clone());
let mut walk_builder = WalkBuilder::new(&root);
walk_builder
.hidden(config.file_picker.hidden)
@ -172,10 +177,7 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi
.git_global(config.file_picker.git_global)
.git_exclude(config.file_picker.git_exclude)
.max_depth(config.file_picker.max_depth)
// We always want to ignore the .git directory, otherwise if
// `ignore` is turned off above, we end up with a lot of noise
// in our picker.
.filter_entry(|entry| entry.file_name() != ".git");
.filter_entry(move |entry| filter_picker_entry(entry, &absolute_root, dedup_symlinks));
// We want to exclude files that the editor can't handle yet
let mut type_builder = TypesBuilder::new();
@ -194,14 +196,11 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi
// We want files along with their modification date for sorting
let files = walk_builder.build().filter_map(|entry| {
let entry = entry.ok()?;
// This is faster than entry.path().is_dir() since it uses cached fs::Metadata fetched by ignore/walkdir
let is_dir = entry.file_type().map_or(false, |ft| ft.is_dir());
if is_dir {
// Will give a false positive if metadata cannot be read (eg. permission error)
None
} else {
if entry.file_type()?.is_file() {
Some(entry.into_path())
} else {
None
}
});
@ -281,10 +280,10 @@ pub mod completers {
}
pub fn theme(_editor: &Editor, input: &str) -> Vec<Completion> {
let mut names = theme::Loader::read_names(&helix_loader::runtime_dir().join("themes"));
names.extend(theme::Loader::read_names(
&helix_loader::config_dir().join("themes"),
));
let mut names = theme::Loader::read_names(&helix_loader::config_dir().join("themes"));
for rt_dir in helix_loader::runtime_dirs() {
names.extend(theme::Loader::read_names(&rt_dir.join("themes")));
}
names.push("default".into());
names.push("base16_default".into());
names.sort();

@ -1,28 +1,38 @@
use crate::{
alt,
compositor::{Component, Compositor, Context, Event, EventResult},
ctrl, key, shift,
ui::{self, fuzzy_match::FuzzyQuery, EditorView},
ui::{
self,
document::{render_document, LineDecoration, LinePos, TextRenderer},
fuzzy_match::FuzzyQuery,
EditorView,
},
};
use futures_util::future::BoxFuture;
use tui::{
buffer::Buffer as Surface,
widgets::{Block, BorderType, Borders},
layout::Constraint,
text::{Span, Spans},
widgets::{Block, BorderType, Borders, Cell, Table},
};
use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
use tui::widgets::Widget;
use std::{
cmp::{self, Ordering},
time::Instant,
};
use std::cmp::{self, Ordering};
use std::{collections::HashMap, io::Read, path::PathBuf};
use crate::ui::{Prompt, PromptEvent};
use helix_core::{movement::Direction, Position};
use helix_core::{
movement::Direction, text_annotations::TextAnnotations,
unicode::segmentation::UnicodeSegmentation, Position,
};
use helix_view::{
editor::Action,
graphics::{CursorKind, Margin, Modifier, Rect},
theme::Style,
view::ViewPosition,
Document, DocumentId, Editor,
};
@ -60,6 +70,8 @@ impl From<DocumentId> for PathOrId {
}
}
type FileCallback<T> = Box<dyn Fn(&Editor, &T) -> Option<FileLocation>>;
/// File path and range of lines (used to align and highlight lines)
pub type FileLocation = (PathOrId, Option<(usize, usize)>);
@ -70,7 +82,7 @@ pub struct FilePicker<T: Item> {
preview_cache: HashMap<PathBuf, CachedPreview>,
read_buffer: Vec<u8>,
/// Given an item in the picker, return the file path and line number to display.
file_fn: Box<dyn Fn(&Editor, &T) -> Option<FileLocation>>,
file_fn: FileCallback<T>,
}
pub enum CachedPreview {
@ -178,7 +190,7 @@ impl<T: Item> FilePicker<T> {
}
_ => {
// TODO: enable syntax highlighting; blocked by async rendering
Document::open(path, None, None)
Document::open(path, None, None, editor.config.clone())
.map(|doc| CachedPreview::Document(Box::new(doc)))
.unwrap_or(CachedPreview::NotFound)
}
@ -213,6 +225,9 @@ impl<T: Item> FilePicker<T> {
let loader = cx.editor.syn_loader.clone();
doc.detect_language(loader);
}
// QUESTION: do we want to compute inlay hints in pickers too ? Probably not for now
// but it could be interesting in the future
}
EventResult::Consumed(None)
@ -282,43 +297,58 @@ impl<T: Item + 'static> Component for FilePicker<T> {
})
.unwrap_or(0);
let offset = Position::new(first_line, 0);
let offset = ViewPosition {
anchor: doc.text().line_to_char(first_line),
horizontal_offset: 0,
vertical_offset: 0,
};
let mut highlights =
EditorView::doc_syntax_highlights(doc, offset, area.height, &cx.editor.theme);
let mut highlights = EditorView::doc_syntax_highlights(
doc,
offset.anchor,
area.height,
&cx.editor.theme,
);
for spans in EditorView::doc_diagnostics_highlights(doc, &cx.editor.theme) {
if spans.is_empty() {
continue;
}
highlights = Box::new(helix_core::syntax::merge(highlights, spans));
}
EditorView::render_text_highlights(
let mut decorations: Vec<Box<dyn LineDecoration>> = Vec::new();
if let Some((start, end)) = range {
let style = cx
.editor
.theme
.try_get("ui.highlight")
.unwrap_or_else(|| cx.editor.theme.get("ui.selection"));
let draw_highlight = move |renderer: &mut TextRenderer, pos: LinePos| {
if (start..=end).contains(&pos.doc_line) {
let area = Rect::new(
renderer.viewport.x,
renderer.viewport.y + pos.visual_line,
renderer.viewport.width,
1,
);
renderer.surface.set_style(area, style)
}
};
decorations.push(Box::new(draw_highlight))
}
render_document(
surface,
inner,
doc,
offset,
inner,
surface,
&cx.editor.theme,
// TODO: compute text annotations asynchronously here (like inlay hints)
&TextAnnotations::default(),
highlights,
&cx.editor.config(),
&cx.editor.theme,
&mut decorations,
&mut [],
);
// highlight the line
if let Some((start, end)) = range {
let offset = start.saturating_sub(first_line) as u16;
surface.set_style(
Rect::new(
inner.x,
inner.y + offset,
inner.width,
(end.saturating_sub(start) as u16 + 1)
.min(inner.height.saturating_sub(offset)),
),
cx.editor
.theme
.try_get("ui.highlight")
.unwrap_or_else(|| cx.editor.theme.get("ui.selection")),
);
}
}
}
@ -370,6 +400,8 @@ impl Ord for PickerMatch {
}
}
type PickerCallback<T> = Box<dyn Fn(&mut Context, &T, Action)>;
pub struct Picker<T: Item> {
options: Vec<T>,
editor_data: T::Data,
@ -383,13 +415,15 @@ pub struct Picker<T: Item> {
cursor: usize,
// pattern: String,
prompt: Prompt,
previous_pattern: String,
previous_pattern: (String, FuzzyQuery),
/// Whether to truncate the start (default true)
pub truncate_start: bool,
/// Whether to show the preview panel (default true)
show_preview: bool,
/// Constraints for tabular formatting
widths: Vec<Constraint>,
callback_fn: Box<dyn Fn(&mut Context, &T, Action)>,
callback_fn: PickerCallback<T>,
}
impl<T: Item> Picker<T> {
@ -408,18 +442,21 @@ impl<T: Item> Picker<T> {
let mut picker = Self {
options,
editor_data,
matcher: Box::new(Matcher::default()),
matcher: Box::default(),
matches: Vec::new(),
cursor: 0,
prompt,
previous_pattern: String::new(),
previous_pattern: (String::new(), FuzzyQuery::default()),
truncate_start: true,
show_preview: true,
callback_fn: Box::new(callback_fn),
completion_height: 0,
widths: Vec::new(),
};
// scoring on empty input:
picker.calculate_column_widths();
// scoring on empty input
// TODO: just reuse score()
picker
.matches
@ -435,15 +472,50 @@ impl<T: Item> Picker<T> {
picker
}
pub fn score(&mut self) {
let now = Instant::now();
pub fn set_options(&mut self, new_options: Vec<T>) {
self.options = new_options;
self.cursor = 0;
self.force_score();
self.calculate_column_widths();
}
/// Calculate the width constraints using the maximum widths of each column
/// for the current options.
fn calculate_column_widths(&mut self) {
let n = self
.options
.first()
.map(|option| option.format(&self.editor_data).cells.len())
.unwrap_or_default();
let max_lens = self.options.iter().fold(vec![0; n], |mut acc, option| {
let row = option.format(&self.editor_data);
// maintain max for each column
for (acc, cell) in acc.iter_mut().zip(row.cells.iter()) {
let width = cell.content.width();
if width > *acc {
*acc = width;
}
}
acc
});
self.widths = max_lens
.into_iter()
.map(|len| Constraint::Length(len as u16))
.collect();
}
pub fn score(&mut self) {
let pattern = self.prompt.line();
if pattern == &self.previous_pattern {
if pattern == &self.previous_pattern.0 {
return;
}
let (query, is_refined) = self
.previous_pattern
.1
.refine(pattern, &self.previous_pattern.0);
if pattern.is_empty() {
// Fast path for no pattern.
self.matches.clear();
@ -456,8 +528,7 @@ impl<T: Item> Picker<T> {
len: text.chars().count(),
}
}));
} else if pattern.starts_with(&self.previous_pattern) {
let query = FuzzyQuery::new(pattern);
} else if is_refined {
// optimization: if the pattern is a more specific version of the previous one
// then we can score the filtered set.
self.matches.retain_mut(|pmatch| {
@ -479,12 +550,11 @@ impl<T: Item> Picker<T> {
self.force_score();
}
log::debug!("picker score {:?}", Instant::now().duration_since(now));
// reset cursor position
self.cursor = 0;
let pattern = self.prompt.line();
self.previous_pattern.clone_from(pattern);
self.previous_pattern.0.clone_from(pattern);
self.previous_pattern.1 = query;
}
pub fn force_score(&mut self) {
@ -619,6 +689,11 @@ impl<T: Item + 'static> Component for Picker<T> {
key!(Esc) | ctrl!('c') => {
return close_fn;
}
alt!(Enter) => {
if let Some(option) = self.selection() {
(self.callback_fn)(cx, option, Action::Load);
}
}
key!(Enter) => {
if let Some(option) = self.selection() {
(self.callback_fn)(cx, option, Action::Replace);
@ -651,7 +726,7 @@ impl<T: Item + 'static> Component for Picker<T> {
fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
let text_style = cx.editor.theme.get("ui.text");
let selected = cx.editor.theme.get("ui.text.focus");
let highlighted = cx.editor.theme.get("special").add_modifier(Modifier::BOLD);
let highlight_style = cx.editor.theme.get("special").add_modifier(Modifier::BOLD);
// -- Render the frame:
// clear area
@ -691,61 +766,126 @@ impl<T: Item + 'static> Component for Picker<T> {
}
// -- Render the contents:
// subtract area of prompt from top and current item marker " > " from left
let inner = inner.clip_top(2).clip_left(3);
// subtract area of prompt from top
let inner = inner.clip_top(2);
let rows = inner.height;
let offset = self.cursor - (self.cursor % std::cmp::max(1, rows as usize));
let cursor = self.cursor.saturating_sub(offset);
let files = self
let options = self
.matches
.iter()
.skip(offset)
.map(|pmatch| (pmatch.index, self.options.get(pmatch.index).unwrap()));
for (i, (_index, option)) in files.take(rows as usize).enumerate() {
let is_active = i == (self.cursor - offset);
if is_active {
surface.set_string(
inner.x.saturating_sub(3),
inner.y + i as u16,
" > ",
selected,
);
surface.set_style(
Rect::new(inner.x, inner.y + i as u16, inner.width, 1),
selected,
);
}
.take(rows as usize)
.map(|pmatch| &self.options[pmatch.index])
.map(|option| option.format(&self.editor_data))
.map(|mut row| {
const TEMP_CELL_SEP: &str = " ";
let line = row.cell_text().fold(String::new(), |mut s, frag| {
s.push_str(&frag);
s.push_str(TEMP_CELL_SEP);
s
});
// Items are filtered by using the text returned by menu::Item::filter_text
// but we do highlighting here using the text in Row and therefore there
// might be inconsistencies. This is the best we can do since only the
// text in Row is displayed to the end user.
let (_score, highlights) = FuzzyQuery::new(self.prompt.line())
.fuzzy_indicies(&line, &self.matcher)
.unwrap_or_default();
let highlight_byte_ranges: Vec<_> = line
.char_indices()
.enumerate()
.filter_map(|(char_idx, (byte_offset, ch))| {
highlights
.contains(&char_idx)
.then(|| byte_offset..byte_offset + ch.len_utf8())
})
.collect();
// The starting byte index of the current (iterating) cell
let mut cell_start_byte_offset = 0;
for cell in row.cells.iter_mut() {
let spans = match cell.content.lines.get(0) {
Some(s) => s,
None => {
cell_start_byte_offset += TEMP_CELL_SEP.len();
continue;
}
};
let mut cell_len = 0;
let spans = option.label(&self.editor_data);
let (_score, highlights) = FuzzyQuery::new(self.prompt.line())
.fuzzy_indicies(&String::from(&spans), &self.matcher)
.unwrap_or_default();
spans.0.into_iter().fold(inner, |pos, span| {
let new_x = surface
.set_string_truncated(
pos.x,
pos.y + i as u16,
&span.content,
pos.width as usize,
|idx| {
if highlights.contains(&idx) {
highlighted.patch(span.style)
} else if is_active {
selected.patch(span.style)
let graphemes_with_style: Vec<_> = spans
.0
.iter()
.flat_map(|span| {
span.content
.grapheme_indices(true)
.zip(std::iter::repeat(span.style))
})
.map(|((grapheme_byte_offset, grapheme), style)| {
cell_len += grapheme.len();
let start = cell_start_byte_offset;
let grapheme_byte_range =
grapheme_byte_offset..grapheme_byte_offset + grapheme.len();
if highlight_byte_ranges.iter().any(|hl_rng| {
hl_rng.start >= start + grapheme_byte_range.start
&& hl_rng.end <= start + grapheme_byte_range.end
}) {
(grapheme, style.patch(highlight_style))
} else {
text_style.patch(span.style)
(grapheme, style)
}
},
true,
self.truncate_start,
)
.0;
pos.clip_left(new_x - pos.x)
})
.collect();
let mut span_list: Vec<(String, Style)> = Vec::new();
for (grapheme, style) in graphemes_with_style {
if span_list.last().map(|(_, sty)| sty) == Some(&style) {
let (string, _) = span_list.last_mut().unwrap();
string.push_str(grapheme);
} else {
span_list.push((String::from(grapheme), style))
}
}
let spans: Vec<Span> = span_list
.into_iter()
.map(|(string, style)| Span::styled(string, style))
.collect();
let spans: Spans = spans.into();
*cell = Cell::from(spans);
cell_start_byte_offset += cell_len + TEMP_CELL_SEP.len();
}
row
});
}
let table = Table::new(options)
.style(text_style)
.highlight_style(selected)
.highlight_symbol(" > ")
.column_spacing(1)
.widths(&self.widths);
use tui::widgets::TableState;
table.render_table(
inner,
surface,
&mut TableState {
offset: 0,
selected: Some(cursor),
},
);
}
fn cursor(&self, area: Rect, editor: &Editor) -> (Option<Position>, CursorKind) {
@ -812,9 +952,7 @@ impl<T: Item + Send + 'static> Component for DynamicPicker<T> {
Some(overlay) => &mut overlay.content.file_picker.picker,
None => return,
};
picker.options = new_options;
picker.cursor = 0;
picker.force_score();
picker.set_options(new_options);
editor.reset_idle_timer();
}));
anyhow::Ok(callback)

@ -14,8 +14,11 @@ use helix_view::{
Editor,
};
pub type Completion = (RangeFrom<usize>, Cow<'static, str>);
type PromptCharHandler = Box<dyn Fn(&mut Prompt, char, &Context)>;
pub type Completion = (RangeFrom<usize>, Cow<'static, str>);
type CompletionFn = Box<dyn FnMut(&Editor, &str) -> Vec<Completion>>;
type CallbackFn = Box<dyn FnMut(&mut Context, &str, PromptEvent)>;
pub type DocFn = Box<dyn Fn(&str) -> Option<Cow<str>>>;
pub struct Prompt {
prompt: Cow<'static, str>,
@ -25,9 +28,9 @@ pub struct Prompt {
selection: Option<usize>,
history_register: Option<char>,
history_pos: Option<usize>,
completion_fn: Box<dyn FnMut(&Editor, &str) -> Vec<Completion>>,
callback_fn: Box<dyn FnMut(&mut Context, &str, PromptEvent)>,
pub doc_fn: Box<dyn Fn(&str) -> Option<Cow<str>>>,
completion_fn: CompletionFn,
callback_fn: CallbackFn,
pub doc_fn: DocFn,
next_char_handler: Option<PromptCharHandler>,
}
@ -513,7 +516,7 @@ impl Component for Prompt {
alt!('d') | alt!(Delete) | ctrl!(Delete) => self.delete_word_forwards(cx.editor),
ctrl!('k') => self.kill_to_end_of_line(cx.editor),
ctrl!('u') => self.kill_to_start_of_line(cx.editor),
ctrl!('h') | key!(Backspace) => {
ctrl!('h') | key!(Backspace) | shift!(Backspace) => {
self.delete_char_backwards(cx.editor);
(self.callback_fn)(cx, &self.line, PromptEvent::Update);
}

@ -1,5 +1,6 @@
use helix_core::{coords_at_pos, encoding, Position};
use helix_lsp::lsp::DiagnosticSeverity;
use helix_view::document::DEFAULT_LANGUAGE_NAME;
use helix_view::{
document::{Mode, SCRATCH_BUFFER_NAME},
graphics::Rect,
@ -141,6 +142,9 @@ where
helix_view::editor::StatusLineElement::Spinner => render_lsp_spinner,
helix_view::editor::StatusLineElement::FileBaseName => render_file_base_name,
helix_view::editor::StatusLineElement::FileName => render_file_name,
helix_view::editor::StatusLineElement::FileModificationIndicator => {
render_file_modification_indicator
}
helix_view::editor::StatusLineElement::FileEncoding => render_file_encoding,
helix_view::editor::StatusLineElement::FileLineEnding => render_file_line_ending,
helix_view::editor::StatusLineElement::FileType => render_file_type,
@ -155,6 +159,7 @@ where
helix_view::editor::StatusLineElement::TotalLineNumbers => render_total_line_numbers,
helix_view::editor::StatusLineElement::Separator => render_separator,
helix_view::editor::StatusLineElement::Spacer => render_spacer,
helix_view::editor::StatusLineElement::VersionControl => render_version_control,
}
}
@ -402,7 +407,7 @@ fn render_file_type<F>(context: &mut RenderContext, write: F)
where
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
{
let file_type = context.doc.language_name().unwrap_or("text");
let file_type = context.doc.language_name().unwrap_or(DEFAULT_LANGUAGE_NAME);
write(context, format!(" {} ", file_type), None);
}
@ -417,16 +422,26 @@ where
.as_ref()
.map(|p| p.to_string_lossy())
.unwrap_or_else(|| SCRATCH_BUFFER_NAME.into());
format!(
" {}{} ",
path,
if context.doc.is_modified() { "[+]" } else { "" }
)
format!(" {} ", path)
};
write(context, title, None);
}
fn render_file_modification_indicator<F>(context: &mut RenderContext, write: F)
where
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
{
let title = (if context.doc.is_modified() {
"[+]"
} else {
" "
})
.to_string();
write(context, title, None);
}
fn render_file_base_name<F>(context: &mut RenderContext, write: F)
where
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
@ -437,11 +452,7 @@ where
.as_ref()
.and_then(|p| p.as_path().file_name().map(|s| s.to_string_lossy()))
.unwrap_or_else(|| SCRATCH_BUFFER_NAME.into());
format!(
" {}{} ",
path,
if context.doc.is_modified() { "[+]" } else { "" }
)
format!(" {} ", path)
};
write(context, title, None);
@ -466,3 +477,16 @@ where
{
write(context, String::from(" "), None);
}
fn render_version_control<F>(context: &mut RenderContext, write: F)
where
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
{
let head = context
.doc
.version_control_head()
.unwrap_or_default()
.to_string();
write(context, head, None);
}

@ -58,7 +58,7 @@ pub fn required_size(text: &tui::text::Text, max_text_width: u16) -> (u16, u16)
let content_width = content.width() as u16;
if content_width > max_text_width {
text_width = max_text_width;
height += content_width / max_text_width;
height += content_width.checked_div(max_text_width).unwrap_or(0);
} else if content_width > text_width {
text_width = content_width;
}

@ -4,8 +4,8 @@ mod test {
use std::path::PathBuf;
use helix_core::{syntax::AutoPairConfig, Position, Selection};
use helix_term::{args::Args, config::Config};
use helix_core::{syntax::AutoPairConfig, Selection};
use helix_term::config::Config;
use indoc::indoc;
@ -23,5 +23,4 @@ mod test {
mod movement;
mod prompt;
mod splits;
mod write;
}

@ -3,22 +3,16 @@ use super::*;
#[tokio::test(flavor = "multi_thread")]
async fn auto_indent_c() -> anyhow::Result<()> {
test_with_config(
Args {
files: vec![(PathBuf::from("foo.c"), Position::default())],
..Default::default()
},
Config::default(),
helpers::test_syntax_conf(None),
AppBuilder::new().with_file("foo.c", None),
// switches to append mode?
(
helpers::platform_line("void foo() {#[|}]#").as_ref(),
helpers::platform_line("void foo() {#[|}]#"),
"i<ret><esc>",
helpers::platform_line(indoc! {"\
void foo() {
#[|\n]#\
}
"})
.as_ref(),
"}),
),
)
.await?;

@ -41,9 +41,7 @@ async fn insert_configured_multi_byte_chars() -> anyhow::Result<()> {
for (open, close) in pairs.iter() {
test_with_config(
Args::default(),
config.clone(),
helpers::test_syntax_conf(None),
AppBuilder::new().with_config(config.clone()),
(
format!("#[{}|]#", LINE_END),
format!("i{}", open),
@ -53,9 +51,7 @@ async fn insert_configured_multi_byte_chars() -> anyhow::Result<()> {
.await?;
test_with_config(
Args::default(),
config.clone(),
helpers::test_syntax_conf(None),
AppBuilder::new().with_config(config.clone()),
(
format!("{}#[{}|]#{}", open, close, LINE_END),
format!("i{}", close),
@ -170,15 +166,13 @@ async fn insert_before_eol() -> anyhow::Result<()> {
async fn insert_auto_pairs_disabled() -> anyhow::Result<()> {
for pair in DEFAULT_PAIRS {
test_with_config(
Args::default(),
Config {
AppBuilder::new().with_config(Config {
editor: helix_view::editor::Config {
auto_pairs: AutoPairConfig::Enable(false),
..Default::default()
},
..Default::default()
},
helpers::test_syntax_conf(None),
}),
(
format!("#[{}|]#", LINE_END),
format!("i{}", pair.0),

@ -1,99 +1,8 @@
use std::ops::RangeInclusive;
use helix_core::diagnostic::Severity;
use helix_term::application::Application;
use super::*;
#[tokio::test(flavor = "multi_thread")]
async fn test_write_quit_fail() -> anyhow::Result<()> {
let file = helpers::new_readonly_tempfile()?;
let mut app = helpers::AppBuilder::new()
.with_file(file.path(), None)
.build()?;
test_key_sequence(
&mut app,
Some("ihello<esc>:wq<ret>"),
Some(&|app| {
let mut docs: Vec<_> = app.editor.documents().collect();
assert_eq!(1, docs.len());
let doc = docs.pop().unwrap();
assert_eq!(Some(file.path()), doc.path().map(PathBuf::as_path));
assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1);
}),
false,
)
.await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_buffer_close_concurrent() -> anyhow::Result<()> {
test_key_sequences(
&mut helpers::AppBuilder::new().build()?,
vec![
(
None,
Some(&|app| {
assert_eq!(1, app.editor.documents().count());
assert!(!app.editor.is_err());
}),
),
(
Some("ihello<esc>:new<ret>"),
Some(&|app| {
assert_eq!(2, app.editor.documents().count());
assert!(!app.editor.is_err());
}),
),
(
Some(":buffer<minus>close<ret>"),
Some(&|app| {
assert_eq!(1, app.editor.documents().count());
assert!(!app.editor.is_err());
}),
),
],
false,
)
.await?;
// verify if writes are queued up, it finishes them before closing the buffer
let mut file = tempfile::NamedTempFile::new()?;
let mut command = String::new();
const RANGE: RangeInclusive<i32> = 1..=1000;
for i in RANGE {
let cmd = format!("%c{}<esc>:w<ret>", i);
command.push_str(&cmd);
}
command.push_str(":buffer<minus>close<ret>");
let mut app = helpers::AppBuilder::new()
.with_file(file.path(), None)
.build()?;
test_key_sequence(
&mut app,
Some(&command),
Some(&|app| {
assert!(!app.editor.is_err(), "error: {:?}", app.editor.get_status());
let doc = app.editor.document_by_path(file.path());
assert!(doc.is_none(), "found doc: {:?}", doc);
}),
false,
)
.await?;
helpers::assert_file_has_content(file.as_file_mut(), &RANGE.end().to_string())?;
Ok(())
}
mod write;
#[tokio::test(flavor = "multi_thread")]
async fn test_selection_duplication() -> anyhow::Result<()> {
@ -132,6 +41,70 @@ async fn test_selection_duplication() -> anyhow::Result<()> {
.as_str(),
))
.await?;
// Copy the selection to previous line, skipping the first line in the file
test((
platform_line(indoc! {"\
test
#[testitem|]#
"})
.as_str(),
"<A-C>",
platform_line(indoc! {"\
test
#[testitem|]#
"})
.as_str(),
))
.await?;
// Copy the selection to previous line, including the first line in the file
test((
platform_line(indoc! {"\
test
#[test|]#
"})
.as_str(),
"<A-C>",
platform_line(indoc! {"\
#[test|]#
#(test|)#
"})
.as_str(),
))
.await?;
// Copy the selection to next line, skipping the last line in the file
test((
platform_line(indoc! {"\
#[testitem|]#
test
"})
.as_str(),
"C",
platform_line(indoc! {"\
#[testitem|]#
test
"})
.as_str(),
))
.await?;
// Copy the selection to next line, including the last line in the file
test((
platform_line(indoc! {"\
#[test|]#
test
"})
.as_str(),
"C",
platform_line(indoc! {"\
#(test|)#
#[test|]#
"})
.as_str(),
))
.await?;
Ok(())
}
@ -228,12 +201,12 @@ async fn test_multi_selection_shell_commands() -> anyhow::Result<()> {
.as_str(),
"|echo foo<ret>",
platform_line(indoc! {"\
#[|foo
]#
#(|foo
)#
#(|foo
)#
#[|foo\n]#
#(|foo\n)#
#(|foo\n)#
"})
.as_str(),
))
@ -249,12 +222,12 @@ async fn test_multi_selection_shell_commands() -> anyhow::Result<()> {
.as_str(),
"!echo foo<ret>",
platform_line(indoc! {"\
#[|foo
]#lorem
#(|foo
)#ipsum
#(|foo
)#dolor
#[|foo\n]#
lorem
#(|foo\n)#
ipsum
#(|foo\n)#
dolor
"})
.as_str(),
))
@ -270,12 +243,12 @@ async fn test_multi_selection_shell_commands() -> anyhow::Result<()> {
.as_str(),
"<A-!>echo foo<ret>",
platform_line(indoc! {"\
lorem#[|foo
]#
ipsum#(|foo
)#
dolor#(|foo
)#
lorem#[|foo\n]#
ipsum#(|foo\n)#
dolor#(|foo\n)#
"})
.as_str(),
))
@ -327,8 +300,8 @@ async fn test_extend_line() -> anyhow::Result<()> {
platform_line(indoc! {"\
#[lorem
ipsum
dolor
|]#
dolor\n|]#
"})
.as_str(),
))
@ -345,8 +318,8 @@ async fn test_extend_line() -> anyhow::Result<()> {
"2x",
platform_line(indoc! {"\
#[lorem
ipsum
|]#
ipsum\n|]#
"})
.as_str(),
))
@ -354,3 +327,61 @@ async fn test_extend_line() -> anyhow::Result<()> {
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_character_info() -> anyhow::Result<()> {
// UTF-8, single byte
test_key_sequence(
&mut helpers::AppBuilder::new().build()?,
Some("ih<esc>h:char<ret>"),
Some(&|app| {
assert_eq!(
r#""h" (U+0068) Dec 104 Hex 68"#,
app.editor.get_status().unwrap().0
);
}),
false,
)
.await?;
// UTF-8, multi-byte
test_key_sequence(
&mut helpers::AppBuilder::new().build()?,
Some("ië<esc>h:char<ret>"),
Some(&|app| {
assert_eq!(
r#""ë" (U+0065 U+0308) Hex 65 + cc 88"#,
app.editor.get_status().unwrap().0
);
}),
false,
)
.await?;
// Multiple characters displayed as one, escaped characters
test_key_sequence(
&mut helpers::AppBuilder::new().build()?,
Some(":line<minus>ending crlf<ret>:char<ret>"),
Some(&|app| {
assert_eq!(
r#""\r\n" (U+000d U+000a) Hex 0d + 0a"#,
app.editor.get_status().unwrap().0
);
}),
false,
)
.await?;
// Non-UTF-8
test_key_sequence(
&mut helpers::AppBuilder::new().build()?,
Some(":encoding ascii<ret>ih<esc>h:char<ret>"),
Some(&|app| {
assert_eq!(r#""h" Dec 104 Hex 68"#, app.editor.get_status().unwrap().0);
}),
false,
)
.await?;
Ok(())
}

@ -1,5 +1,5 @@
use std::{
io::{Read, Write},
io::{Read, Seek, Write},
ops::RangeInclusive,
};
@ -8,6 +8,96 @@ use helix_view::doc;
use super::*;
#[tokio::test(flavor = "multi_thread")]
async fn test_write_quit_fail() -> anyhow::Result<()> {
let file = helpers::new_readonly_tempfile()?;
let mut app = helpers::AppBuilder::new()
.with_file(file.path(), None)
.build()?;
test_key_sequence(
&mut app,
Some("ihello<esc>:wq<ret>"),
Some(&|app| {
let mut docs: Vec<_> = app.editor.documents().collect();
assert_eq!(1, docs.len());
let doc = docs.pop().unwrap();
assert_eq!(Some(file.path()), doc.path().map(PathBuf::as_path));
assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1);
}),
false,
)
.await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_buffer_close_concurrent() -> anyhow::Result<()> {
test_key_sequences(
&mut helpers::AppBuilder::new().build()?,
vec![
(
None,
Some(&|app| {
assert_eq!(1, app.editor.documents().count());
assert!(!app.editor.is_err());
}),
),
(
Some("ihello<esc>:new<ret>"),
Some(&|app| {
assert_eq!(2, app.editor.documents().count());
assert!(!app.editor.is_err());
}),
),
(
Some(":buffer<minus>close<ret>"),
Some(&|app| {
assert_eq!(1, app.editor.documents().count());
assert!(!app.editor.is_err());
}),
),
],
false,
)
.await?;
// verify if writes are queued up, it finishes them before closing the buffer
let mut file = tempfile::NamedTempFile::new()?;
let mut command = String::new();
const RANGE: RangeInclusive<i32> = 1..=1000;
for i in RANGE {
let cmd = format!("%c{}<esc>:w!<ret>", i);
command.push_str(&cmd);
}
command.push_str(":buffer<minus>close<ret>");
let mut app = helpers::AppBuilder::new()
.with_file(file.path(), None)
.build()?;
test_key_sequence(
&mut app,
Some(&command),
Some(&|app| {
assert!(!app.editor.is_err(), "error: {:?}", app.editor.get_status());
let doc = app.editor.document_by_path(file.path());
assert!(doc.is_none(), "found doc: {:?}", doc);
}),
false,
)
.await?;
helpers::assert_file_has_content(file.as_file_mut(), &RANGE.end().to_string())?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_write() -> anyhow::Result<()> {
let mut file = tempfile::NamedTempFile::new()?;
@ -37,6 +127,38 @@ async fn test_write() -> anyhow::Result<()> {
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_overwrite_protection() -> anyhow::Result<()> {
let mut file = tempfile::NamedTempFile::new()?;
let mut app = helpers::AppBuilder::new()
.with_file(file.path(), None)
.build()?;
helpers::run_event_loop_until_idle(&mut app).await;
file.as_file_mut()
.write_all(helpers::platform_line("extremely important content").as_bytes())?;
file.as_file_mut().flush()?;
file.as_file_mut().sync_all()?;
test_key_sequence(&mut app, Some(":x<ret>"), None, false).await?;
file.as_file_mut().flush()?;
file.as_file_mut().sync_all()?;
file.rewind()?;
let mut file_content = String::new();
file.as_file_mut().read_to_string(&mut file_content)?;
assert_eq!(
helpers::platform_line("extremely important content"),
file_content
);
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_write_quit() -> anyhow::Result<()> {
let mut file = tempfile::NamedTempFile::new()?;
@ -70,13 +192,13 @@ async fn test_write_quit() -> anyhow::Result<()> {
async fn test_write_concurrent() -> anyhow::Result<()> {
let mut file = tempfile::NamedTempFile::new()?;
let mut command = String::new();
const RANGE: RangeInclusive<i32> = 1..=5000;
const RANGE: RangeInclusive<i32> = 1..=1000;
let mut app = helpers::AppBuilder::new()
.with_file(file.path(), None)
.build()?;
for i in RANGE {
let cmd = format!("%c{}<esc>:w<ret>", i);
let cmd = format!("%c{}<esc>:w!<ret>", i);
command.push_str(&cmd);
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save