mirror of https://github.com/helix-editor/helix
Merge branch 'master' into better-bufferline
commit
0656f44ac0
@ -1,3 +1,17 @@
|
|||||||
|
# we use tokio_unstable to enable runtime::Handle::id so we can separate
|
||||||
|
# globals from multiple parallel tests. If that function ever does get removed
|
||||||
|
# its possible to replace (with some additional overhead and effort)
|
||||||
|
# Annoyingly build.rustflags doesn't work here because it gets overwritten
|
||||||
|
# if people have their own global target.<..> config (for example to enable mold)
|
||||||
|
# specifying flags this way is more robust as they get merged
|
||||||
|
# This still gets overwritten by RUST_FLAGS though, luckily it shouldn't be necessary
|
||||||
|
# to set those most of the time. If downstream does overwrite this its not a huge
|
||||||
|
# deal since it will only break tests anyway
|
||||||
|
[target."cfg(all())"]
|
||||||
|
rustflags = ["--cfg", "tokio_unstable", "-C", "target-feature=-crt-static"]
|
||||||
|
|
||||||
|
|
||||||
[alias]
|
[alias]
|
||||||
xtask = "run --package xtask --"
|
xtask = "run --package xtask --"
|
||||||
integration-test = "test --features integration --profile integration --workspace --test integration"
|
integration-test = "test --features integration --profile integration --workspace --test integration"
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
watch_file shell.nix
|
watch_file shell.nix
|
||||||
watch_file flake.lock
|
watch_file flake.lock
|
||||||
|
watch_file rust-toolchain.toml
|
||||||
|
|
||||||
# try to use flakes, if it fails use normal nix (ie. shell.nix)
|
# try to use flakes, if it fails use normal nix (ie. shell.nix)
|
||||||
use flake || use nix
|
use flake || use nix
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
# Things that we don't want ripgrep to search that we do want in git
|
|
||||||
# https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md#automatic-filtering
|
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,158 @@
|
|||||||
|
## Building from source
|
||||||
|
|
||||||
|
- [Configuring Helix's runtime files](#configuring-helixs-runtime-files)
|
||||||
|
- [Linux and macOS](#linux-and-macos)
|
||||||
|
- [Windows](#windows)
|
||||||
|
- [Multiple runtime directories](#multiple-runtime-directories)
|
||||||
|
- [Note to packagers](#note-to-packagers)
|
||||||
|
- [Validating the installation](#validating-the-installation)
|
||||||
|
- [Configure the desktop shortcut](#configure-the-desktop-shortcut)
|
||||||
|
|
||||||
|
Requirements:
|
||||||
|
|
||||||
|
Clone the Helix GitHub repository into a directory of your choice. The
|
||||||
|
examples in this documentation assume installation into either `~/src/` on
|
||||||
|
Linux and macOS, or `%userprofile%\src\` on Windows.
|
||||||
|
|
||||||
|
- The [Rust toolchain](https://www.rust-lang.org/tools/install)
|
||||||
|
- The [Git version control system](https://git-scm.com/)
|
||||||
|
- A C++14 compatible compiler to build the tree-sitter grammars, for example GCC or Clang
|
||||||
|
|
||||||
|
If you are using the `musl-libc` standard library instead of `glibc` the following environment variable must be set during the build to ensure tree-sitter grammars can be loaded correctly:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
RUSTFLAGS="-C target-feature=-crt-static"
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Clone the repository:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone https://github.com/helix-editor/helix
|
||||||
|
cd helix
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Compile from source:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cargo install --path helix-term --locked
|
||||||
|
```
|
||||||
|
|
||||||
|
This command will create the `hx` executable and construct the tree-sitter
|
||||||
|
grammars in the local `runtime` folder.
|
||||||
|
|
||||||
|
> 💡 If you do not want to fetch or build grammars, set an environment variable `HELIX_DISABLE_AUTO_GRAMMAR_BUILD`
|
||||||
|
|
||||||
|
> 💡 Tree-sitter grammars can be fetched and compiled if not pre-packaged. Fetch
|
||||||
|
> grammars with `hx --grammar fetch` and compile them with
|
||||||
|
> `hx --grammar build`. This will install them in
|
||||||
|
> the `runtime` directory within the user's helix config directory (more
|
||||||
|
> [details below](#multiple-runtime-directories)).
|
||||||
|
|
||||||
|
### Configuring Helix's runtime files
|
||||||
|
|
||||||
|
#### Linux and macOS
|
||||||
|
|
||||||
|
The **runtime** directory is one below the Helix source, so either export a
|
||||||
|
`HELIX_RUNTIME` environment variable to point to that directory and add it to
|
||||||
|
your `~/.bashrc` or equivalent:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
export HELIX_RUNTIME=~/src/helix/runtime
|
||||||
|
```
|
||||||
|
|
||||||
|
Or, create a symbolic link:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
ln -Ts $PWD/runtime ~/.config/helix/runtime
|
||||||
|
```
|
||||||
|
|
||||||
|
If the above command fails to create a symbolic link because the file exists either move `~/.config/helix/runtime` to a new location or delete it, then run the symlink command above again.
|
||||||
|
|
||||||
|
#### Windows
|
||||||
|
|
||||||
|
Either set the `HELIX_RUNTIME` environment variable to point to the runtime files using the Windows setting (search for
|
||||||
|
`Edit environment variables for your account`) or use the `setx` command in
|
||||||
|
Cmd:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
setx HELIX_RUNTIME "%userprofile%\source\repos\helix\runtime"
|
||||||
|
```
|
||||||
|
|
||||||
|
> 💡 `%userprofile%` resolves to your user directory like
|
||||||
|
> `C:\Users\Your-Name\` for example.
|
||||||
|
|
||||||
|
Or, create a symlink in `%appdata%\helix\` that links to the source code directory:
|
||||||
|
|
||||||
|
| Method | Command |
|
||||||
|
| ---------- | -------------------------------------------------------------------------------------- |
|
||||||
|
| PowerShell | `New-Item -ItemType Junction -Target "runtime" -Path "$Env:AppData\helix\runtime"` |
|
||||||
|
| Cmd | `cd %appdata%\helix` <br/> `mklink /D runtime "%userprofile%\src\helix\runtime"` |
|
||||||
|
|
||||||
|
> 💡 On Windows, creating a symbolic link may require running PowerShell or
|
||||||
|
> Cmd as an administrator.
|
||||||
|
|
||||||
|
#### Multiple runtime directories
|
||||||
|
|
||||||
|
When Helix finds multiple runtime directories it will search through them for files in the
|
||||||
|
following order:
|
||||||
|
|
||||||
|
1. `runtime/` sibling directory to `$CARGO_MANIFEST_DIR` directory (this is intended for
|
||||||
|
developing and testing helix only).
|
||||||
|
2. `runtime/` subdirectory of OS-dependent helix user config directory.
|
||||||
|
3. `$HELIX_RUNTIME`
|
||||||
|
4. Distribution-specific fallback directory (set at compile time—not run time—
|
||||||
|
with the `HELIX_DEFAULT_RUNTIME` environment variable)
|
||||||
|
5. `runtime/` subdirectory of path to Helix executable.
|
||||||
|
|
||||||
|
This order also sets the priority for selecting which file will be used if multiple runtime
|
||||||
|
directories have files with the same name.
|
||||||
|
|
||||||
|
#### Note to packagers
|
||||||
|
|
||||||
|
If you are making a package of Helix for end users, to provide a good out of
|
||||||
|
the box experience, you should set the `HELIX_DEFAULT_RUNTIME` environment
|
||||||
|
variable at build time (before invoking `cargo build`) to a directory which
|
||||||
|
will store the final runtime files after installation. For example, say you want
|
||||||
|
to package the runtime into `/usr/lib/helix/runtime`. The rough steps a build
|
||||||
|
script could follow are:
|
||||||
|
|
||||||
|
1. `export HELIX_DEFAULT_RUNTIME=/usr/lib/helix/runtime`
|
||||||
|
1. `cargo build --profile opt --locked --path helix-term`
|
||||||
|
1. `cp -r runtime $BUILD_DIR/usr/lib/helix/`
|
||||||
|
1. `cp target/opt/hx $BUILD_DIR/usr/bin/hx`
|
||||||
|
|
||||||
|
This way the resulting `hx` binary will always look for its runtime directory in
|
||||||
|
`/usr/lib/helix/runtime` if the user has no custom runtime in `~/.config/helix`
|
||||||
|
or `HELIX_RUNTIME`.
|
||||||
|
|
||||||
|
### Validating the installation
|
||||||
|
|
||||||
|
To make sure everything is set up as expected you should run the Helix health
|
||||||
|
check:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
hx --health
|
||||||
|
```
|
||||||
|
|
||||||
|
For more information on the health check results refer to
|
||||||
|
[Health check](https://github.com/helix-editor/helix/wiki/Healthcheck).
|
||||||
|
|
||||||
|
### Configure the desktop shortcut
|
||||||
|
|
||||||
|
If your desktop environment supports the
|
||||||
|
[XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html)
|
||||||
|
you can configure Helix to show up in the application menu by copying the
|
||||||
|
provided `.desktop` and icon files to their correct folders:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cp contrib/Helix.desktop ~/.local/share/applications
|
||||||
|
cp contrib/helix.png ~/.icons # or ~/.local/share/icons
|
||||||
|
```
|
||||||
|
|
||||||
|
To use another terminal than the system default, you can modify the `.desktop`
|
||||||
|
file. For example, to use `kitty`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop
|
||||||
|
sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop
|
||||||
|
```
|
@ -0,0 +1,395 @@
|
|||||||
|
## Editor
|
||||||
|
|
||||||
|
- [`[editor]` Section](#editor-section)
|
||||||
|
- [`[editor.statusline]` Section](#editorstatusline-section)
|
||||||
|
- [`[editor.lsp]` Section](#editorlsp-section)
|
||||||
|
- [`[editor.cursor-shape]` Section](#editorcursor-shape-section)
|
||||||
|
- [`[editor.file-picker]` Section](#editorfile-picker-section)
|
||||||
|
- [`[editor.auto-pairs]` Section](#editorauto-pairs-section)
|
||||||
|
- [`[editor.search]` Section](#editorsearch-section)
|
||||||
|
- [`[editor.whitespace]` Section](#editorwhitespace-section)
|
||||||
|
- [`[editor.indent-guides]` Section](#editorindent-guides-section)
|
||||||
|
- [`[editor.gutters]` Section](#editorgutters-section)
|
||||||
|
- [`[editor.gutters.line-numbers]` Section](#editorguttersline-numbers-section)
|
||||||
|
- [`[editor.gutters.diagnostics]` Section](#editorguttersdiagnostics-section)
|
||||||
|
- [`[editor.gutters.diff]` Section](#editorguttersdiff-section)
|
||||||
|
- [`[editor.gutters.spacer]` Section](#editorguttersspacer-section)
|
||||||
|
- [`[editor.soft-wrap]` Section](#editorsoft-wrap-section)
|
||||||
|
- [`[editor.smart-tab]` Section](#editorsmart-tab-section)
|
||||||
|
|
||||||
|
### `[editor]` Section
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
|--|--|---------|
|
||||||
|
| `scrolloff` | Number of lines of padding around the edge of the screen when scrolling | `5` |
|
||||||
|
| `mouse` | Enable mouse mode | `true` |
|
||||||
|
| `middle-click-paste` | Middle click paste support | `true` |
|
||||||
|
| `scroll-lines` | Number of lines to scroll per scroll wheel step | `3` |
|
||||||
|
| `shell` | Shell to use when running external commands | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` |
|
||||||
|
| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers | `absolute` |
|
||||||
|
| `cursorline` | Highlight all lines with a cursor | `false` |
|
||||||
|
| `cursorcolumn` | Highlight all columns with a cursor | `false` |
|
||||||
|
| `gutters` | Gutters to display: Available are `diagnostics` and `diff` and `line-numbers` and `spacer`, note that `diagnostics` also includes other features like breakpoints, 1-width padding will be inserted if gutters is non-empty | `["diagnostics", "spacer", "line-numbers", "spacer", "diff"]` |
|
||||||
|
| `auto-completion` | Enable automatic pop up of auto-completion | `true` |
|
||||||
|
| `auto-format` | Enable automatic formatting on save | `true` |
|
||||||
|
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. | `250` |
|
||||||
|
| `completion-timeout` | Time in milliseconds after typing a word character before completions are shown, set to 5 for instant. | `250` |
|
||||||
|
| `preview-completion-insert` | Whether to apply completion item instantly when selected | `true` |
|
||||||
|
| `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` |
|
||||||
|
| `completion-replace` | Set to `true` to make completions always replace the entire word and not just the part before the cursor | `false` |
|
||||||
|
| `auto-info` | Whether to display info boxes | `true` |
|
||||||
|
| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative | `false` |
|
||||||
|
| `undercurl` | Set to `true` to override automatic detection of terminal undercurl support in the event of a false negative | `false` |
|
||||||
|
| `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file | `[]` |
|
||||||
|
| `bufferline` | Renders a line at the top of the editor displaying open buffers. Can be `always`, `never` or `multiple` (only shown if more than one buffer is in use) | `never` |
|
||||||
|
| `color-modes` | Whether to color the mode indicator with different colors depending on the mode itself | `false` |
|
||||||
|
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap-at-text-width` is set | `80` |
|
||||||
|
| `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml` | `[]` |
|
||||||
|
| `default-line-ending` | The line ending to use for new documents. Can be `native`, `lf`, `crlf`, `ff`, `cr` or `nel`. `native` uses the platform's native line ending (`crlf` on Windows, otherwise `lf`). | `native` |
|
||||||
|
| `insert-final-newline` | Whether to automatically insert a trailing line-ending on write if missing | `true` |
|
||||||
|
| `popup-border` | Draw border around `popup`, `menu`, `all`, or `none` | `none` |
|
||||||
|
| `indent-heuristic` | How the indentation for a newly inserted line is computed: `simple` just copies the indentation level from the previous line, `tree-sitter` computes the indentation based on the syntax tree and `hybrid` combines both approaches. If the chosen heuristic is not available, a different one will be used as a fallback (the fallback order being `hybrid` -> `tree-sitter` -> `simple`). | `hybrid`
|
||||||
|
| `jump-label-alphabet` | The characters that are used to generate two character jump labels. Characters at the start of the alphabet are used first. | `"abcdefghijklmnopqrstuvwxyz"`
|
||||||
|
|
||||||
|
### `[editor.statusline]` Section
|
||||||
|
|
||||||
|
Allows configuring the statusline at the bottom of the editor.
|
||||||
|
|
||||||
|
The configuration distinguishes between three areas of the status line:
|
||||||
|
|
||||||
|
`[ ... ... LEFT ... ... | ... ... ... CENTER ... ... ... | ... ... RIGHT ... ... ]`
|
||||||
|
|
||||||
|
Statusline elements can be defined as follows:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[editor.statusline]
|
||||||
|
left = ["mode", "spinner"]
|
||||||
|
center = ["file-name"]
|
||||||
|
right = ["diagnostics", "selections", "position", "file-encoding", "file-line-ending", "file-type"]
|
||||||
|
separator = "│"
|
||||||
|
mode.normal = "NORMAL"
|
||||||
|
mode.insert = "INSERT"
|
||||||
|
mode.select = "SELECT"
|
||||||
|
```
|
||||||
|
The `[editor.statusline]` key takes the following sub-keys:
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `left` | A list of elements aligned to the left of the statusline | `["mode", "spinner", "file-name", "read-only-indicator", "file-modification-indicator"]` |
|
||||||
|
| `center` | A list of elements aligned to the middle of the statusline | `[]` |
|
||||||
|
| `right` | A list of elements aligned to the right of the statusline | `["diagnostics", "selections", "register", "position", "file-encoding"]` |
|
||||||
|
| `separator` | The character used to separate elements in the statusline | `"│"` |
|
||||||
|
| `mode.normal` | The text shown in the `mode` element for normal mode | `"NOR"` |
|
||||||
|
| `mode.insert` | The text shown in the `mode` element for insert mode | `"INS"` |
|
||||||
|
| `mode.select` | The text shown in the `mode` element for select mode | `"SEL"` |
|
||||||
|
|
||||||
|
The following statusline elements can be configured:
|
||||||
|
|
||||||
|
| Key | Description |
|
||||||
|
| ------ | ----------- |
|
||||||
|
| `mode` | The current editor mode (`mode.normal`/`mode.insert`/`mode.select`) |
|
||||||
|
| `spinner` | A progress spinner indicating LSP activity |
|
||||||
|
| `file-name` | The path/name of the opened file |
|
||||||
|
| `file-absolute-path` | The absolute path/name of the opened file |
|
||||||
|
| `file-base-name` | The basename of the opened file |
|
||||||
|
| `file-modification-indicator` | The indicator to show whether the file is modified (a `[+]` appears when there are unsaved changes) |
|
||||||
|
| `file-encoding` | The encoding of the opened file if it differs from UTF-8 |
|
||||||
|
| `file-line-ending` | The file line endings (CRLF or LF) |
|
||||||
|
| `read-only-indicator` | An indicator that shows `[readonly]` when a file cannot be written |
|
||||||
|
| `total-line-numbers` | The total line numbers of the opened file |
|
||||||
|
| `file-type` | The type of the opened file |
|
||||||
|
| `diagnostics` | The number of warnings and/or errors |
|
||||||
|
| `workspace-diagnostics` | The number of warnings and/or errors on workspace |
|
||||||
|
| `selections` | The number of active selections |
|
||||||
|
| `primary-selection-length` | The number of characters currently in primary selection |
|
||||||
|
| `position` | The cursor position |
|
||||||
|
| `position-percentage` | The cursor position as a percentage of the total number of lines |
|
||||||
|
| `separator` | The string defined in `editor.statusline.separator` (defaults to `"│"`) |
|
||||||
|
| `spacer` | Inserts a space between elements (multiple/contiguous spacers may be specified) |
|
||||||
|
| `version-control` | The current branch name or detached commit hash of the opened workspace |
|
||||||
|
| `register` | The current selected register |
|
||||||
|
|
||||||
|
### `[editor.lsp]` Section
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
| --- | ----------- | ------- |
|
||||||
|
| `enable` | Enables LSP integration. Setting to false will completely disable language servers regardless of language settings.| `true` |
|
||||||
|
| `display-messages` | Display LSP progress messages below statusline[^1] | `false` |
|
||||||
|
| `auto-signature-help` | Enable automatic popup of signature help (parameter hints) | `true` |
|
||||||
|
| `display-inlay-hints` | Display inlay hints[^2] | `false` |
|
||||||
|
| `display-signature-help-docs` | Display docs under signature help popup | `true` |
|
||||||
|
| `snippets` | Enables snippet completions. Requires a server restart (`:lsp-restart`) to take effect after `:config-reload`/`:set`. | `true` |
|
||||||
|
| `goto-reference-include-declaration` | Include declaration in the goto references popup. | `true` |
|
||||||
|
|
||||||
|
[^1]: By default, a progress spinner is shown in the statusline beside the file path.
|
||||||
|
|
||||||
|
[^2]: You may also have to activate them in the LSP config for them to appear, not just in Helix. Inlay hints in Helix are still being improved on and may be a little bit laggy/janky under some circumstances. Please report any bugs you see so we can fix them!
|
||||||
|
|
||||||
|
### `[editor.cursor-shape]` Section
|
||||||
|
|
||||||
|
Defines the shape of cursor in each mode.
|
||||||
|
Valid values for these options are `block`, `bar`, `underline`, or `hidden`.
|
||||||
|
|
||||||
|
> 💡 Due to limitations of the terminal environment, only the primary cursor can
|
||||||
|
> change shape.
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
| --- | ----------- | ------- |
|
||||||
|
| `normal` | Cursor shape in [normal mode][normal mode] | `block` |
|
||||||
|
| `insert` | Cursor shape in [insert mode][insert mode] | `block` |
|
||||||
|
| `select` | Cursor shape in [select mode][select mode] | `block` |
|
||||||
|
|
||||||
|
[normal mode]: ./keymap.md#normal-mode
|
||||||
|
[insert mode]: ./keymap.md#insert-mode
|
||||||
|
[select mode]: ./keymap.md#select--extend-mode
|
||||||
|
|
||||||
|
### `[editor.file-picker]` Section
|
||||||
|
|
||||||
|
Set options for file picker and global search. Ignoring a file means it is
|
||||||
|
not visible in the Helix file picker and global search.
|
||||||
|
|
||||||
|
All git related options are only enabled in a git repository.
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
|--|--|---------|
|
||||||
|
|`hidden` | Enables ignoring hidden files | `true`
|
||||||
|
|`follow-symlinks` | Follow symlinks instead of ignoring them | `true`
|
||||||
|
|`deduplicate-links` | Ignore symlinks that point at files already shown in the picker | `true`
|
||||||
|
|`parents` | Enables reading ignore files from parent directories | `true`
|
||||||
|
|`ignore` | Enables reading `.ignore` files | `true`
|
||||||
|
|`git-ignore` | Enables reading `.gitignore` files | `true`
|
||||||
|
|`git-global` | Enables reading global `.gitignore`, whose path is specified in git's config: `core.excludesfile` option | `true`
|
||||||
|
|`git-exclude` | Enables reading `.git/info/exclude` files | `true`
|
||||||
|
|`max-depth` | Set with an integer value for maximum depth to recurse | Unset by default
|
||||||
|
|
||||||
|
Ignore files can be placed locally as `.ignore` or put in your home directory as `~/.ignore`. They support the usual ignore and negative ignore (unignore) rules used in `.gitignore` files.
|
||||||
|
|
||||||
|
Additionally, you can use Helix-specific ignore files by creating a local `.helix/ignore` file in the current workspace or a global `ignore` file located in your Helix config directory:
|
||||||
|
- Linux and Mac: `~/.config/helix/ignore`
|
||||||
|
- Windows: `%AppData%\helix\ignore`
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
# unignore in file picker and global search
|
||||||
|
!.github/
|
||||||
|
!.gitignore
|
||||||
|
!.gitattributes
|
||||||
|
```
|
||||||
|
|
||||||
|
### `[editor.auto-pairs]` Section
|
||||||
|
|
||||||
|
Enables automatic insertion of pairs to parentheses, brackets, etc. Can be a
|
||||||
|
simple boolean value, or a specific mapping of pairs of single characters.
|
||||||
|
|
||||||
|
To disable auto-pairs altogether, set `auto-pairs` to `false`:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[editor]
|
||||||
|
auto-pairs = false # defaults to `true`
|
||||||
|
```
|
||||||
|
|
||||||
|
The default pairs are <code>(){}[]''""``</code>, but these can be customized by
|
||||||
|
setting `auto-pairs` to a TOML table:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[editor.auto-pairs]
|
||||||
|
'(' = ')'
|
||||||
|
'{' = '}'
|
||||||
|
'[' = ']'
|
||||||
|
'"' = '"'
|
||||||
|
'`' = '`'
|
||||||
|
'<' = '>'
|
||||||
|
```
|
||||||
|
|
||||||
|
Additionally, this setting can be used in a language config. Unless
|
||||||
|
the editor setting is `false`, this will override the editor config in
|
||||||
|
documents with this language.
|
||||||
|
|
||||||
|
Example `languages.toml` that adds `<>` and removes `''`
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[[language]]
|
||||||
|
name = "rust"
|
||||||
|
|
||||||
|
[language.auto-pairs]
|
||||||
|
'(' = ')'
|
||||||
|
'{' = '}'
|
||||||
|
'[' = ']'
|
||||||
|
'"' = '"'
|
||||||
|
'`' = '`'
|
||||||
|
'<' = '>'
|
||||||
|
```
|
||||||
|
|
||||||
|
### `[editor.auto-save]` Section
|
||||||
|
|
||||||
|
Control auto save behavior.
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
|--|--|---------|
|
||||||
|
| `focus-lost` | Enable automatic saving on the focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal | `false` |
|
||||||
|
| `after-delay.enable` | Enable automatic saving after `auto-save.after-delay.timeout` milliseconds have passed since last edit. | `false` |
|
||||||
|
| `after-delay.timeout` | Time in milliseconds since last edit before auto save timer triggers. | `3000` |
|
||||||
|
|
||||||
|
### `[editor.search]` Section
|
||||||
|
|
||||||
|
Search specific options.
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
|--|--|---------|
|
||||||
|
| `smart-case` | Enable smart case regex searching (case-insensitive unless pattern contains upper case characters) | `true` |
|
||||||
|
| `wrap-around`| Whether the search should wrap after depleting the matches | `true` |
|
||||||
|
|
||||||
|
### `[editor.whitespace]` Section
|
||||||
|
|
||||||
|
Options for rendering whitespace with visible characters. Use `:set whitespace.render all` to temporarily enable visible whitespace.
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
|-----|-------------|---------|
|
||||||
|
| `render` | Whether to render whitespace. May either be `all` or `none`, or a table with sub-keys `space`, `nbsp`, `nnbsp`, `tab`, and `newline` | `none` |
|
||||||
|
| `characters` | Literal characters to use when rendering whitespace. Sub-keys may be any of `tab`, `space`, `nbsp`, `nnbsp`, `newline` or `tabpad` | See example below |
|
||||||
|
|
||||||
|
Example
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[editor.whitespace]
|
||||||
|
render = "all"
|
||||||
|
# or control each character
|
||||||
|
[editor.whitespace.render]
|
||||||
|
space = "all"
|
||||||
|
tab = "all"
|
||||||
|
nbsp = "none"
|
||||||
|
nnbsp = "none"
|
||||||
|
newline = "none"
|
||||||
|
|
||||||
|
[editor.whitespace.characters]
|
||||||
|
space = "·"
|
||||||
|
nbsp = "⍽"
|
||||||
|
nnbsp = "␣"
|
||||||
|
tab = "→"
|
||||||
|
newline = "⏎"
|
||||||
|
tabpad = "·" # Tabs will look like "→···" (depending on tab width)
|
||||||
|
```
|
||||||
|
|
||||||
|
### `[editor.indent-guides]` Section
|
||||||
|
|
||||||
|
Options for rendering vertical indent guides.
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `render` | Whether to render indent guides | `false` |
|
||||||
|
| `character` | Literal character to use for rendering the indent guide | `│` |
|
||||||
|
| `skip-levels` | Number of indent levels to skip | `0` |
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[editor.indent-guides]
|
||||||
|
render = true
|
||||||
|
character = "╎" # Some characters that work well: "▏", "┆", "┊", "⸽"
|
||||||
|
skip-levels = 1
|
||||||
|
```
|
||||||
|
|
||||||
|
### `[editor.gutters]` Section
|
||||||
|
|
||||||
|
For simplicity, `editor.gutters` accepts an array of gutter types, which will
|
||||||
|
use default settings for all gutter components.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[editor]
|
||||||
|
gutters = ["diff", "diagnostics", "line-numbers", "spacer"]
|
||||||
|
```
|
||||||
|
|
||||||
|
To customize the behavior of gutters, the `[editor.gutters]` section must
|
||||||
|
be used. This section contains top level settings, as well as settings for
|
||||||
|
specific gutter components as subsections.
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `layout` | A vector of gutters to display | `["diagnostics", "spacer", "line-numbers", "spacer", "diff"]` |
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[editor.gutters]
|
||||||
|
layout = ["diff", "diagnostics", "line-numbers", "spacer"]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `[editor.gutters.line-numbers]` Section
|
||||||
|
|
||||||
|
Options for the line number gutter
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `min-width` | The minimum number of characters to use | `3` |
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[editor.gutters.line-numbers]
|
||||||
|
min-width = 1
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `[editor.gutters.diagnostics]` Section
|
||||||
|
|
||||||
|
Currently unused
|
||||||
|
|
||||||
|
#### `[editor.gutters.diff]` Section
|
||||||
|
|
||||||
|
The `diff` gutter option displays colored bars indicating whether a `git` diff represents that a line was added, removed or changed.
|
||||||
|
These colors are controlled by the theme attributes `diff.plus`, `diff.minus` and `diff.delta`.
|
||||||
|
|
||||||
|
Other diff providers will eventually be supported by a future plugin system.
|
||||||
|
|
||||||
|
There are currently no options for this section.
|
||||||
|
|
||||||
|
#### `[editor.gutters.spacer]` Section
|
||||||
|
|
||||||
|
Currently unused
|
||||||
|
|
||||||
|
### `[editor.soft-wrap]` Section
|
||||||
|
|
||||||
|
Options for soft wrapping lines that exceed the view width:
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `enable` | Whether soft wrapping is enabled. | `false` |
|
||||||
|
| `max-wrap` | Maximum free space left at the end of the line. | `20` |
|
||||||
|
| `max-indent-retain` | Maximum indentation to carry over when soft wrapping a line. | `40` |
|
||||||
|
| `wrap-indicator` | Text inserted before soft wrapped lines, highlighted with `ui.virtual.wrap` | `↪ ` |
|
||||||
|
| `wrap-at-text-width` | Soft wrap at `text-width` instead of using the full viewport size. | `false` |
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[editor.soft-wrap]
|
||||||
|
enable = true
|
||||||
|
max-wrap = 25 # increase value to reduce forced mid-word wrapping
|
||||||
|
max-indent-retain = 0
|
||||||
|
wrap-indicator = "" # set wrap-indicator to "" to hide it
|
||||||
|
```
|
||||||
|
|
||||||
|
### `[editor.smart-tab]` Section
|
||||||
|
|
||||||
|
Options for navigating and editing using tab key.
|
||||||
|
|
||||||
|
| Key | Description | Default |
|
||||||
|
|------------|-------------|---------|
|
||||||
|
| `enable` | If set to true, then when the cursor is in a position with non-whitespace to its left, instead of inserting a tab, it will run `move_parent_node_end`. If there is only whitespace to the left, then it inserts a tab as normal. With the default bindings, to explicitly insert a tab character, press Shift-tab. | `true` |
|
||||||
|
| `supersede-menu` | Normally, when a menu is on screen, such as when auto complete is triggered, the tab key is bound to cycling through the items. This means when menus are on screen, one cannot use the tab key to trigger the `smart-tab` command. If this option is set to true, the `smart-tab` command always takes precedence, which means one cannot use the tab key to cycle through menu items. One of the other bindings must be used instead, such as arrow keys or `C-n`/`C-p`. | `false` |
|
||||||
|
|
||||||
|
|
||||||
|
Due to lack of support for S-tab in some terminals, the default keybindings don't fully embrace smart-tab editing experience. If you enjoy smart-tab navigation and a terminal that supports the [Enhanced Keyboard protocol](https://github.com/helix-editor/helix/wiki/Terminal-Support#enhanced-keyboard-protocol), consider setting extra keybindings:
|
||||||
|
|
||||||
|
```
|
||||||
|
[keys.normal]
|
||||||
|
tab = "move_parent_node_end"
|
||||||
|
S-tab = "move_parent_node_start"
|
||||||
|
|
||||||
|
[keys.insert]
|
||||||
|
S-tab = "move_parent_node_start"
|
||||||
|
|
||||||
|
[keys.select]
|
||||||
|
tab = "extend_parent_node_end"
|
||||||
|
S-tab = "extend_parent_node_start"
|
||||||
|
```
|
@ -0,0 +1,150 @@
|
|||||||
|
## Package managers
|
||||||
|
|
||||||
|
- [Linux](#linux)
|
||||||
|
- [Ubuntu](#ubuntu)
|
||||||
|
- [Fedora/RHEL](#fedorarhel)
|
||||||
|
- [Arch Linux extra](#arch-linux-extra)
|
||||||
|
- [NixOS](#nixos)
|
||||||
|
- [Flatpak](#flatpak)
|
||||||
|
- [Snap](#snap)
|
||||||
|
- [AppImage](#appimage)
|
||||||
|
- [macOS](#macos)
|
||||||
|
- [Homebrew Core](#homebrew-core)
|
||||||
|
- [MacPorts](#macports)
|
||||||
|
- [Windows](#windows)
|
||||||
|
- [Winget](#winget)
|
||||||
|
- [Scoop](#scoop)
|
||||||
|
- [Chocolatey](#chocolatey)
|
||||||
|
- [MSYS2](#msys2)
|
||||||
|
|
||||||
|
[![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions)
|
||||||
|
|
||||||
|
## Linux
|
||||||
|
|
||||||
|
The following third party repositories are available:
|
||||||
|
|
||||||
|
### Ubuntu
|
||||||
|
|
||||||
|
Add the `PPA` for Helix:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo add-apt-repository ppa:maveonair/helix-editor
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install helix
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fedora/RHEL
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo dnf install helix
|
||||||
|
```
|
||||||
|
|
||||||
|
### Arch Linux extra
|
||||||
|
|
||||||
|
Releases are available in the `extra` repository:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo pacman -S helix
|
||||||
|
```
|
||||||
|
|
||||||
|
> 💡 When installed from the `extra` repository, run Helix with `helix` instead of `hx`.
|
||||||
|
>
|
||||||
|
> For example:
|
||||||
|
> ```sh
|
||||||
|
> helix --health
|
||||||
|
> ```
|
||||||
|
> to check health
|
||||||
|
|
||||||
|
Additionally, a [helix-git](https://aur.archlinux.org/packages/helix-git/) package is available
|
||||||
|
in the AUR, which builds the master branch.
|
||||||
|
|
||||||
|
### NixOS
|
||||||
|
|
||||||
|
Helix is available in [nixpkgs](https://github.com/nixos/nixpkgs) through the `helix` attribute,
|
||||||
|
the unstable channel usually carries the latest release.
|
||||||
|
|
||||||
|
Helix is also available as a [flake](https://wiki.nixos.org/wiki/Flakes) in the project
|
||||||
|
root. Use `nix develop` to spin up a reproducible development shell. Outputs are
|
||||||
|
cached for each push to master using [Cachix](https://www.cachix.org/). The
|
||||||
|
flake is configured to automatically make use of this cache assuming the user
|
||||||
|
accepts the new settings on first use.
|
||||||
|
|
||||||
|
If you are using a version of Nix without flakes enabled,
|
||||||
|
[install Cachix CLI](https://docs.cachix.org/installation) and use
|
||||||
|
`cachix use helix` to configure Nix to use cached outputs when possible.
|
||||||
|
|
||||||
|
### Flatpak
|
||||||
|
|
||||||
|
Helix is available on [Flathub](https://flathub.org/en-GB/apps/com.helix_editor.Helix):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
flatpak install flathub com.helix_editor.Helix
|
||||||
|
flatpak run com.helix_editor.Helix
|
||||||
|
```
|
||||||
|
|
||||||
|
### Snap
|
||||||
|
|
||||||
|
Helix is available on [Snapcraft](https://snapcraft.io/helix) and can be installed with:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
snap install --classic helix
|
||||||
|
```
|
||||||
|
|
||||||
|
This will install Helix as both `/snap/bin/helix` and `/snap/bin/hx`, so make sure `/snap/bin` is in your `PATH`.
|
||||||
|
|
||||||
|
### AppImage
|
||||||
|
|
||||||
|
Install Helix using the Linux [AppImage](https://appimage.org/) format.
|
||||||
|
Download the official Helix AppImage from the [latest releases](https://github.com/helix-editor/helix/releases/latest) page.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
chmod +x helix-*.AppImage # change permission for executable mode
|
||||||
|
./helix-*.AppImage # run helix
|
||||||
|
```
|
||||||
|
|
||||||
|
## macOS
|
||||||
|
|
||||||
|
### Homebrew Core
|
||||||
|
|
||||||
|
```sh
|
||||||
|
brew install helix
|
||||||
|
```
|
||||||
|
|
||||||
|
### MacPorts
|
||||||
|
|
||||||
|
```sh
|
||||||
|
port install helix
|
||||||
|
```
|
||||||
|
|
||||||
|
## Windows
|
||||||
|
|
||||||
|
Install on Windows using [Winget](https://learn.microsoft.com/en-us/windows/package-manager/winget/), [Scoop](https://scoop.sh/), [Chocolatey](https://chocolatey.org/)
|
||||||
|
or [MSYS2](https://msys2.org/).
|
||||||
|
|
||||||
|
### Winget
|
||||||
|
Windows Package Manager winget command-line tool is by default available on Windows 11 and modern versions of Windows 10 as a part of the App Installer.
|
||||||
|
You can get [App Installer from the Microsoft Store](https://www.microsoft.com/p/app-installer/9nblggh4nns1#activetab=pivot:overviewtab). If it's already installed, make sure it is updated with the latest version.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
winget install Helix.Helix
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scoop
|
||||||
|
|
||||||
|
```sh
|
||||||
|
scoop install helix
|
||||||
|
```
|
||||||
|
|
||||||
|
### Chocolatey
|
||||||
|
|
||||||
|
```sh
|
||||||
|
choco install helix
|
||||||
|
```
|
||||||
|
|
||||||
|
### MSYS2
|
||||||
|
|
||||||
|
For 64-bit Windows 8.1 or above:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pacman -S mingw-w64-ucrt-x86_64-helix
|
||||||
|
```
|
@ -0,0 +1,54 @@
|
|||||||
|
## Registers
|
||||||
|
|
||||||
|
- [User-defined registers](#user-defined-registers)
|
||||||
|
- [Default registers](#default-registers)
|
||||||
|
- [Special registers](#special-registers)
|
||||||
|
|
||||||
|
In Helix, registers are storage locations for text and other data, such as the
|
||||||
|
result of a search. Registers can be used to cut, copy, and paste text, similar
|
||||||
|
to the clipboard in other text editors. Usage is similar to Vim, with `"` being
|
||||||
|
used to select a register.
|
||||||
|
|
||||||
|
### User-defined registers
|
||||||
|
|
||||||
|
Helix allows you to create your own named registers for storing text, for
|
||||||
|
example:
|
||||||
|
|
||||||
|
- `"ay` - Yank the current selection to register `a`.
|
||||||
|
- `"op` - Paste the text in register `o` after the selection.
|
||||||
|
|
||||||
|
If a register is selected before invoking a change or delete command, the selection will be stored in the register and the action will be carried out:
|
||||||
|
|
||||||
|
- `"hc` - Store the selection in register `h` and then change it (delete and enter insert mode).
|
||||||
|
- `"md` - Store the selection in register `m` and delete it.
|
||||||
|
|
||||||
|
### Default registers
|
||||||
|
|
||||||
|
Commands that use registers, like yank (`y`), use a default register if none is specified.
|
||||||
|
These registers are used as defaults:
|
||||||
|
|
||||||
|
| Register character | Contains |
|
||||||
|
| --- | --- |
|
||||||
|
| `/` | Last search |
|
||||||
|
| `:` | Last executed command |
|
||||||
|
| `"` | Last yanked text |
|
||||||
|
| `@` | Last recorded macro |
|
||||||
|
|
||||||
|
### Special registers
|
||||||
|
|
||||||
|
Some registers have special behavior when read from and written to.
|
||||||
|
|
||||||
|
| Register character | When read | When written |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `_` | No values are returned | All values are discarded |
|
||||||
|
| `#` | Selection indices (first selection is `1`, second is `2`, etc.) | This register is not writable |
|
||||||
|
| `.` | Contents of the current selections | This register is not writable |
|
||||||
|
| `%` | Name of the current file | This register is not writable |
|
||||||
|
| `+` | Reads from the system clipboard | Joins and yanks to the system clipboard |
|
||||||
|
| `*` | Reads from the primary clipboard | Joins and yanks to the primary clipboard |
|
||||||
|
|
||||||
|
When yanking multiple selections to the clipboard registers, the selections
|
||||||
|
are joined with newlines. Pasting from these registers will paste multiple
|
||||||
|
selections if the clipboard was last yanked to by the Helix session. Otherwise
|
||||||
|
the clipboard contents are pasted as one selection.
|
||||||
|
|
@ -0,0 +1,24 @@
|
|||||||
|
## Surround
|
||||||
|
|
||||||
|
Helix includes built-in functionality similar to [vim-surround](https://github.com/tpope/vim-surround).
|
||||||
|
The keymappings have been inspired from [vim-sandwich](https://github.com/machakann/vim-sandwich):
|
||||||
|
|
||||||
|
![Surround demo](https://user-images.githubusercontent.com/23398472/122865801-97073180-d344-11eb-8142-8f43809982c6.gif)
|
||||||
|
|
||||||
|
| Key Sequence | Action |
|
||||||
|
| --------------------------------- | --------------------------------------- |
|
||||||
|
| `ms<char>` (after selecting text) | Add surround characters to selection |
|
||||||
|
| `mr<char_to_replace><new_char>` | Replace the closest surround characters |
|
||||||
|
| `md<char_to_delete>` | Delete the closest surround characters |
|
||||||
|
|
||||||
|
You can use counts to act on outer pairs.
|
||||||
|
|
||||||
|
Surround can also act on multiple selections. For example, to change every occurrence of `(use)` to `[use]`:
|
||||||
|
|
||||||
|
1. `%` to select the whole file
|
||||||
|
2. `s` to split the selections on a search term
|
||||||
|
3. Input `use` and hit Enter
|
||||||
|
4. `mr([` to replace the parentheses with square brackets
|
||||||
|
|
||||||
|
Multiple characters are currently not supported, but planned for future release.
|
||||||
|
|
@ -0,0 +1,66 @@
|
|||||||
|
## Moving the selection with syntax-aware motions
|
||||||
|
|
||||||
|
`Alt-p`, `Alt-o`, `Alt-i`, and `Alt-n` (or `Alt` and arrow keys) allow you to move the
|
||||||
|
selection according to its location in the syntax tree. For example, many languages have the
|
||||||
|
following syntax for function calls:
|
||||||
|
|
||||||
|
```js
|
||||||
|
func(arg1, arg2, arg3);
|
||||||
|
```
|
||||||
|
|
||||||
|
A function call might be parsed by tree-sitter into a tree like the following.
|
||||||
|
|
||||||
|
```tsq
|
||||||
|
(call
|
||||||
|
function: (identifier) ; func
|
||||||
|
arguments:
|
||||||
|
(arguments ; (arg1, arg2, arg3)
|
||||||
|
(identifier) ; arg1
|
||||||
|
(identifier) ; arg2
|
||||||
|
(identifier))) ; arg3
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `:tree-sitter-subtree` to view the syntax tree of the primary selection. In
|
||||||
|
a more intuitive tree format:
|
||||||
|
|
||||||
|
```
|
||||||
|
┌────┐
|
||||||
|
│call│
|
||||||
|
┌─────┴────┴─────┐
|
||||||
|
│ │
|
||||||
|
┌─────▼────┐ ┌────▼────┐
|
||||||
|
│identifier│ │arguments│
|
||||||
|
│ "func" │ ┌────┴───┬─────┴───┐
|
||||||
|
└──────────┘ │ │ │
|
||||||
|
│ │ │
|
||||||
|
┌─────────▼┐ ┌────▼─────┐ ┌▼─────────┐
|
||||||
|
│identifier│ │identifier│ │identifier│
|
||||||
|
│ "arg1" │ │ "arg2" │ │ "arg3" │
|
||||||
|
└──────────┘ └──────────┘ └──────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
If you have a selection that wraps `arg1` (see the tree above), and you use
|
||||||
|
`Alt-n`, it will select the next sibling in the syntax tree: `arg2`.
|
||||||
|
|
||||||
|
```js
|
||||||
|
// before
|
||||||
|
func([arg1], arg2, arg3)
|
||||||
|
// after
|
||||||
|
func(arg1, [arg2], arg3);
|
||||||
|
```
|
||||||
|
|
||||||
|
Similarly, `Alt-o` will expand the selection to the parent node, in this case, the
|
||||||
|
arguments node.
|
||||||
|
|
||||||
|
```js
|
||||||
|
func[(arg1, arg2, arg3)];
|
||||||
|
```
|
||||||
|
|
||||||
|
There is also some nuanced behavior that prevents you from getting stuck on a
|
||||||
|
node with no sibling. When using `Alt-p` with a selection on `arg1`, the previous
|
||||||
|
child node will be selected. In the event that `arg1` does not have a previous
|
||||||
|
sibling, the selection will move up the syntax tree and select the previous
|
||||||
|
element. As a result, using `Alt-p` with a selection on `arg1` will move the
|
||||||
|
selection to the "func" `identifier`.
|
||||||
|
|
||||||
|
[lang-support]: ./lang-support.md
|
@ -0,0 +1,47 @@
|
|||||||
|
## Selecting and manipulating text with textobjects
|
||||||
|
|
||||||
|
In Helix, textobjects are a way to select, manipulate and operate on a piece of
|
||||||
|
text in a structured way. They allow you to refer to blocks of text based on
|
||||||
|
their structure or purpose, such as a word, sentence, paragraph, or even a
|
||||||
|
function or block of code.
|
||||||
|
|
||||||
|
![Textobject demo](https://user-images.githubusercontent.com/23398472/124231131-81a4bb00-db2d-11eb-9d10-8e577ca7b177.gif)
|
||||||
|
![Textobject tree-sitter demo](https://user-images.githubusercontent.com/23398472/132537398-2a2e0a54-582b-44ab-a77f-eb818942203d.gif)
|
||||||
|
|
||||||
|
- `ma` - Select around the object (`va` in Vim, `<alt-a>` in Kakoune)
|
||||||
|
- `mi` - Select inside the object (`vi` in Vim, `<alt-i>` in Kakoune)
|
||||||
|
|
||||||
|
| Key after `mi` or `ma` | Textobject selected |
|
||||||
|
| --- | --- |
|
||||||
|
| `w` | Word |
|
||||||
|
| `W` | WORD |
|
||||||
|
| `p` | Paragraph |
|
||||||
|
| `(`, `[`, `'`, etc. | Specified surround pairs |
|
||||||
|
| `m` | The closest surround pair |
|
||||||
|
| `f` | Function |
|
||||||
|
| `t` | Type (or Class) |
|
||||||
|
| `a` | Argument/parameter |
|
||||||
|
| `c` | Comment |
|
||||||
|
| `T` | Test |
|
||||||
|
| `g` | Change |
|
||||||
|
|
||||||
|
> 💡 `f`, `t`, etc. need a tree-sitter grammar active for the current
|
||||||
|
document and a special tree-sitter query file to work properly. [Only
|
||||||
|
some grammars](./lang-support.md) currently have the query file implemented.
|
||||||
|
Contributions are welcome!
|
||||||
|
|
||||||
|
## Navigating using tree-sitter textobjects
|
||||||
|
|
||||||
|
Navigating between functions, classes, parameters, and other elements is
|
||||||
|
possible using tree-sitter and textobject queries. For
|
||||||
|
example to move to the next function use `]f`, to move to previous
|
||||||
|
type use `[t`, and so on.
|
||||||
|
|
||||||
|
![Tree-sitter-nav-demo](https://user-images.githubusercontent.com/23398472/152332550-7dfff043-36a2-4aec-b8f2-77c13eb56d6f.gif)
|
||||||
|
|
||||||
|
For the full reference see the [unimpaired](./keymap.html#unimpaired) section of the key bind
|
||||||
|
documentation.
|
||||||
|
|
||||||
|
> 💡 This feature relies on tree-sitter textobjects
|
||||||
|
> and requires the corresponding query file to work properly.
|
||||||
|
|
Binary file not shown.
After Width: | Height: | Size: 264 KiB |
@ -1,10 +1,45 @@
|
|||||||
/// Syntax configuration loader based on built-in languages.toml.
|
use crate::syntax::{Configuration, Loader, LoaderError};
|
||||||
pub fn default_syntax_loader() -> crate::syntax::Configuration {
|
|
||||||
|
/// Language configuration based on built-in languages.toml.
|
||||||
|
pub fn default_lang_config() -> Configuration {
|
||||||
helix_loader::config::default_lang_config()
|
helix_loader::config::default_lang_config()
|
||||||
.try_into()
|
.try_into()
|
||||||
.expect("Could not serialize built-in languages.toml")
|
.expect("Could not deserialize built-in languages.toml")
|
||||||
}
|
}
|
||||||
/// Syntax configuration loader based on user configured languages.toml.
|
|
||||||
pub fn user_syntax_loader() -> Result<crate::syntax::Configuration, toml::de::Error> {
|
/// Language configuration loader based on built-in languages.toml.
|
||||||
|
pub fn default_lang_loader() -> Loader {
|
||||||
|
Loader::new(default_lang_config()).expect("Could not compile loader for default config")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum LanguageLoaderError {
|
||||||
|
DeserializeError(toml::de::Error),
|
||||||
|
LoaderError(LoaderError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for LanguageLoaderError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::DeserializeError(err) => write!(f, "Failed to parse language config: {err}"),
|
||||||
|
Self::LoaderError(err) => write!(f, "Failed to compile language config: {err}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for LanguageLoaderError {}
|
||||||
|
|
||||||
|
/// Language configuration based on user configured languages.toml.
|
||||||
|
pub fn user_lang_config() -> Result<Configuration, toml::de::Error> {
|
||||||
helix_loader::config::user_lang_config()?.try_into()
|
helix_loader::config::user_lang_config()?.try_into()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Language configuration loader based on user configured languages.toml.
|
||||||
|
pub fn user_lang_loader() -> Result<Loader, LanguageLoaderError> {
|
||||||
|
let config: Configuration = helix_loader::config::user_lang_config()
|
||||||
|
.map_err(LanguageLoaderError::DeserializeError)?
|
||||||
|
.try_into()
|
||||||
|
.map_err(LanguageLoaderError::DeserializeError)?;
|
||||||
|
|
||||||
|
Loader::new(config).map_err(LanguageLoaderError::LoaderError)
|
||||||
|
}
|
||||||
|
@ -1,76 +1,137 @@
|
|||||||
use crate::{Range, RopeSlice, Selection, Syntax};
|
use crate::{movement::Direction, syntax::TreeCursor, Range, RopeSlice, Selection, Syntax};
|
||||||
use tree_sitter::Node;
|
|
||||||
|
|
||||||
pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||||
select_node_impl(syntax, text, selection, |mut node, from, to| {
|
let cursor = &mut syntax.walk();
|
||||||
while node.start_byte() == from && node.end_byte() == to {
|
|
||||||
node = node.parent()?;
|
selection.transform(|range| {
|
||||||
|
let from = text.char_to_byte(range.from());
|
||||||
|
let to = text.char_to_byte(range.to());
|
||||||
|
|
||||||
|
let byte_range = from..to;
|
||||||
|
cursor.reset_to_byte_range(from, to);
|
||||||
|
|
||||||
|
while cursor.node().byte_range() == byte_range {
|
||||||
|
if !cursor.goto_parent() {
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
Some(node)
|
}
|
||||||
|
|
||||||
|
let node = cursor.node();
|
||||||
|
let from = text.byte_to_char(node.start_byte());
|
||||||
|
let to = text.byte_to_char(node.end_byte());
|
||||||
|
|
||||||
|
Range::new(to, from).with_direction(range.direction())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn shrink_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
pub fn shrink_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||||
select_node_impl(syntax, text, selection, |descendant, _from, _to| {
|
select_node_impl(
|
||||||
descendant.child(0).or(Some(descendant))
|
syntax,
|
||||||
})
|
text,
|
||||||
|
selection,
|
||||||
|
|cursor| {
|
||||||
|
cursor.goto_first_child();
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn select_sibling<F>(
|
pub fn select_next_sibling(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||||
syntax: &Syntax,
|
select_node_impl(
|
||||||
text: RopeSlice,
|
syntax,
|
||||||
selection: Selection,
|
text,
|
||||||
sibling_fn: &F,
|
selection,
|
||||||
) -> Selection
|
|cursor| {
|
||||||
where
|
while !cursor.goto_next_sibling() {
|
||||||
F: Fn(Node) -> Option<Node>,
|
if !cursor.goto_parent() {
|
||||||
{
|
break;
|
||||||
select_node_impl(syntax, text, selection, |descendant, _from, _to| {
|
}
|
||||||
find_sibling_recursive(descendant, sibling_fn)
|
}
|
||||||
|
},
|
||||||
|
Some(Direction::Forward),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn select_all_siblings(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||||
|
selection.transform_iter(|range| {
|
||||||
|
let mut cursor = syntax.walk();
|
||||||
|
let (from, to) = range.into_byte_range(text);
|
||||||
|
cursor.reset_to_byte_range(from, to);
|
||||||
|
|
||||||
|
if !cursor.goto_parent_with(|parent| parent.child_count() > 1) {
|
||||||
|
return vec![range].into_iter();
|
||||||
|
}
|
||||||
|
|
||||||
|
select_children(&mut cursor, text, range).into_iter()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_sibling_recursive<F>(node: Node, sibling_fn: F) -> Option<Node>
|
pub fn select_all_children(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||||
where
|
selection.transform_iter(|range| {
|
||||||
F: Fn(Node) -> Option<Node>,
|
let mut cursor = syntax.walk();
|
||||||
{
|
let (from, to) = range.into_byte_range(text);
|
||||||
sibling_fn(node).or_else(|| {
|
cursor.reset_to_byte_range(from, to);
|
||||||
node.parent()
|
select_children(&mut cursor, text, range).into_iter()
|
||||||
.and_then(|node| find_sibling_recursive(node, sibling_fn))
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn select_children<'n>(
|
||||||
|
cursor: &'n mut TreeCursor<'n>,
|
||||||
|
text: RopeSlice,
|
||||||
|
range: Range,
|
||||||
|
) -> Vec<Range> {
|
||||||
|
let children = cursor
|
||||||
|
.named_children()
|
||||||
|
.map(|child| Range::from_node(child, text, range.direction()))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if !children.is_empty() {
|
||||||
|
children
|
||||||
|
} else {
|
||||||
|
vec![range]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn select_prev_sibling(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||||
|
select_node_impl(
|
||||||
|
syntax,
|
||||||
|
text,
|
||||||
|
selection,
|
||||||
|
|cursor| {
|
||||||
|
while !cursor.goto_prev_sibling() {
|
||||||
|
if !cursor.goto_parent() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Some(Direction::Backward),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
fn select_node_impl<F>(
|
fn select_node_impl<F>(
|
||||||
syntax: &Syntax,
|
syntax: &Syntax,
|
||||||
text: RopeSlice,
|
text: RopeSlice,
|
||||||
selection: Selection,
|
selection: Selection,
|
||||||
select_fn: F,
|
motion: F,
|
||||||
|
direction: Option<Direction>,
|
||||||
) -> Selection
|
) -> Selection
|
||||||
where
|
where
|
||||||
F: Fn(Node, usize, usize) -> Option<Node>,
|
F: Fn(&mut TreeCursor),
|
||||||
{
|
{
|
||||||
let tree = syntax.tree();
|
let cursor = &mut syntax.walk();
|
||||||
|
|
||||||
selection.transform(|range| {
|
selection.transform(|range| {
|
||||||
let from = text.char_to_byte(range.from());
|
let from = text.char_to_byte(range.from());
|
||||||
let to = text.char_to_byte(range.to());
|
let to = text.char_to_byte(range.to());
|
||||||
|
|
||||||
let node = match tree
|
cursor.reset_to_byte_range(from, to);
|
||||||
.root_node()
|
|
||||||
.descendant_for_byte_range(from, to)
|
motion(cursor);
|
||||||
.and_then(|node| select_fn(node, from, to))
|
|
||||||
{
|
|
||||||
Some(node) => node,
|
|
||||||
None => return range,
|
|
||||||
};
|
|
||||||
|
|
||||||
|
let node = cursor.node();
|
||||||
let from = text.byte_to_char(node.start_byte());
|
let from = text.byte_to_char(node.start_byte());
|
||||||
let to = text.byte_to_char(node.end_byte());
|
let to = text.byte_to_char(node.end_byte());
|
||||||
|
|
||||||
if range.head < range.anchor {
|
Range::new(from, to).with_direction(direction.unwrap_or_else(|| range.direction()))
|
||||||
Range::new(to, from)
|
|
||||||
} else {
|
|
||||||
Range::new(from, to)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,162 +0,0 @@
|
|||||||
use etcetera::home_dir;
|
|
||||||
use std::path::{Component, Path, PathBuf};
|
|
||||||
|
|
||||||
/// Replaces users home directory from `path` with tilde `~` if the directory
|
|
||||||
/// is available, otherwise returns the path unchanged.
|
|
||||||
pub fn fold_home_dir(path: &Path) -> PathBuf {
|
|
||||||
if let Ok(home) = home_dir() {
|
|
||||||
if let Ok(stripped) = path.strip_prefix(&home) {
|
|
||||||
return PathBuf::from("~").join(stripped);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
path.to_path_buf()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Expands tilde `~` into users home directory if available, otherwise returns the path
|
|
||||||
/// unchanged. The tilde will only be expanded when present as the first component of the path
|
|
||||||
/// and only slash follows it.
|
|
||||||
pub fn expand_tilde(path: &Path) -> PathBuf {
|
|
||||||
let mut components = path.components().peekable();
|
|
||||||
if let Some(Component::Normal(c)) = components.peek() {
|
|
||||||
if c == &"~" {
|
|
||||||
if let Ok(home) = home_dir() {
|
|
||||||
// it's ok to unwrap, the path starts with `~`
|
|
||||||
return home.join(path.strip_prefix("~").unwrap());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
path.to_path_buf()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Normalize a path, removing things like `.` and `..`.
|
|
||||||
///
|
|
||||||
/// CAUTION: This does not resolve symlinks (unlike
|
|
||||||
/// [`std::fs::canonicalize`]). This may cause incorrect or surprising
|
|
||||||
/// behavior at times. This should be used carefully. Unfortunately,
|
|
||||||
/// [`std::fs::canonicalize`] can be hard to use correctly, since it can often
|
|
||||||
/// fail, or on Windows returns annoying device paths. This is a problem Cargo
|
|
||||||
/// needs to improve on.
|
|
||||||
/// Copied from cargo: <https://github.com/rust-lang/cargo/blob/070e459c2d8b79c5b2ac5218064e7603329c92ae/crates/cargo-util/src/paths.rs#L81>
|
|
||||||
pub fn get_normalized_path(path: &Path) -> PathBuf {
|
|
||||||
// normalization strategy is to canonicalize first ancestor path that exists (i.e., canonicalize as much as possible),
|
|
||||||
// then run handrolled normalization on the non-existent remainder
|
|
||||||
let (base, path) = path
|
|
||||||
.ancestors()
|
|
||||||
.find_map(|base| {
|
|
||||||
let canonicalized_base = dunce::canonicalize(base).ok()?;
|
|
||||||
let remainder = path.strip_prefix(base).ok()?.into();
|
|
||||||
Some((canonicalized_base, remainder))
|
|
||||||
})
|
|
||||||
.unwrap_or_else(|| (PathBuf::new(), PathBuf::from(path)));
|
|
||||||
|
|
||||||
if path.as_os_str().is_empty() {
|
|
||||||
return base;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut components = path.components().peekable();
|
|
||||||
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
|
|
||||||
components.next();
|
|
||||||
PathBuf::from(c.as_os_str())
|
|
||||||
} else {
|
|
||||||
PathBuf::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
for component in components {
|
|
||||||
match component {
|
|
||||||
Component::Prefix(..) => unreachable!(),
|
|
||||||
Component::RootDir => {
|
|
||||||
ret.push(component.as_os_str());
|
|
||||||
}
|
|
||||||
Component::CurDir => {}
|
|
||||||
Component::ParentDir => {
|
|
||||||
ret.pop();
|
|
||||||
}
|
|
||||||
Component::Normal(c) => {
|
|
||||||
ret.push(c);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
base.join(ret)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the canonical, absolute form of a path with all intermediate components normalized.
|
|
||||||
///
|
|
||||||
/// This function is used instead of `std::fs::canonicalize` because we don't want to verify
|
|
||||||
/// here if the path exists, just normalize it's components.
|
|
||||||
pub fn get_canonicalized_path(path: &Path) -> PathBuf {
|
|
||||||
let path = expand_tilde(path);
|
|
||||||
let path = if path.is_relative() {
|
|
||||||
helix_loader::current_working_dir().join(path)
|
|
||||||
} else {
|
|
||||||
path
|
|
||||||
};
|
|
||||||
|
|
||||||
get_normalized_path(path.as_path())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_relative_path(path: &Path) -> PathBuf {
|
|
||||||
let path = PathBuf::from(path);
|
|
||||||
let path = if path.is_absolute() {
|
|
||||||
let cwdir = get_normalized_path(&helix_loader::current_working_dir());
|
|
||||||
get_normalized_path(&path)
|
|
||||||
.strip_prefix(cwdir)
|
|
||||||
.map(PathBuf::from)
|
|
||||||
.unwrap_or(path)
|
|
||||||
} else {
|
|
||||||
path
|
|
||||||
};
|
|
||||||
fold_home_dir(&path)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a truncated filepath where the basepart of the path is reduced to the first
|
|
||||||
/// char of the folder and the whole filename appended.
|
|
||||||
///
|
|
||||||
/// Also strip the current working directory from the beginning of the path.
|
|
||||||
/// Note that this function does not check if the truncated path is unambiguous.
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use helix_core::path::get_truncated_path;
|
|
||||||
/// use std::path::Path;
|
|
||||||
///
|
|
||||||
/// assert_eq!(
|
|
||||||
/// get_truncated_path("/home/cnorris/documents/jokes.txt").as_path(),
|
|
||||||
/// Path::new("/h/c/d/jokes.txt")
|
|
||||||
/// );
|
|
||||||
/// assert_eq!(
|
|
||||||
/// get_truncated_path("jokes.txt").as_path(),
|
|
||||||
/// Path::new("jokes.txt")
|
|
||||||
/// );
|
|
||||||
/// assert_eq!(
|
|
||||||
/// get_truncated_path("/jokes.txt").as_path(),
|
|
||||||
/// Path::new("/jokes.txt")
|
|
||||||
/// );
|
|
||||||
/// assert_eq!(
|
|
||||||
/// get_truncated_path("/h/c/d/jokes.txt").as_path(),
|
|
||||||
/// Path::new("/h/c/d/jokes.txt")
|
|
||||||
/// );
|
|
||||||
/// assert_eq!(get_truncated_path("").as_path(), Path::new(""));
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
pub fn get_truncated_path<P: AsRef<Path>>(path: P) -> PathBuf {
|
|
||||||
let cwd = helix_loader::current_working_dir();
|
|
||||||
let path = path
|
|
||||||
.as_ref()
|
|
||||||
.strip_prefix(cwd)
|
|
||||||
.unwrap_or_else(|_| path.as_ref());
|
|
||||||
let file = path.file_name().unwrap_or_default();
|
|
||||||
let base = path.parent().unwrap_or_else(|| Path::new(""));
|
|
||||||
let mut ret = PathBuf::new();
|
|
||||||
for d in base {
|
|
||||||
ret.push(
|
|
||||||
d.to_string_lossy()
|
|
||||||
.chars()
|
|
||||||
.next()
|
|
||||||
.unwrap_or_default()
|
|
||||||
.to_string(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
ret.push(file);
|
|
||||||
ret
|
|
||||||
}
|
|
@ -0,0 +1,264 @@
|
|||||||
|
use std::{cmp::Reverse, ops::Range};
|
||||||
|
|
||||||
|
use super::{LanguageLayer, LayerId};
|
||||||
|
|
||||||
|
use slotmap::HopSlotMap;
|
||||||
|
use tree_sitter::Node;
|
||||||
|
|
||||||
|
/// The byte range of an injection layer.
|
||||||
|
///
|
||||||
|
/// Injection ranges may overlap, but all overlapping parts are subsets of their parent ranges.
|
||||||
|
/// This allows us to sort the ranges ahead of time in order to efficiently find a range that
|
||||||
|
/// contains a point with maximum depth.
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct InjectionRange {
|
||||||
|
start: usize,
|
||||||
|
end: usize,
|
||||||
|
layer_id: LayerId,
|
||||||
|
depth: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TreeCursor<'a> {
|
||||||
|
layers: &'a HopSlotMap<LayerId, LanguageLayer>,
|
||||||
|
root: LayerId,
|
||||||
|
current: LayerId,
|
||||||
|
injection_ranges: Vec<InjectionRange>,
|
||||||
|
// TODO: Ideally this would be a `tree_sitter::TreeCursor<'a>` but
|
||||||
|
// that returns very surprising results in testing.
|
||||||
|
cursor: Node<'a>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> TreeCursor<'a> {
|
||||||
|
pub(super) fn new(layers: &'a HopSlotMap<LayerId, LanguageLayer>, root: LayerId) -> Self {
|
||||||
|
let mut injection_ranges = Vec::new();
|
||||||
|
|
||||||
|
for (layer_id, layer) in layers.iter() {
|
||||||
|
// Skip the root layer
|
||||||
|
if layer.parent.is_none() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for byte_range in layer.ranges.iter() {
|
||||||
|
let range = InjectionRange {
|
||||||
|
start: byte_range.start_byte,
|
||||||
|
end: byte_range.end_byte,
|
||||||
|
layer_id,
|
||||||
|
depth: layer.depth,
|
||||||
|
};
|
||||||
|
injection_ranges.push(range);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
injection_ranges.sort_unstable_by_key(|range| (range.end, Reverse(range.depth)));
|
||||||
|
|
||||||
|
let cursor = layers[root].tree().root_node();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
layers,
|
||||||
|
root,
|
||||||
|
current: root,
|
||||||
|
injection_ranges,
|
||||||
|
cursor,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn node(&self) -> Node<'a> {
|
||||||
|
self.cursor
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn goto_parent(&mut self) -> bool {
|
||||||
|
if let Some(parent) = self.node().parent() {
|
||||||
|
self.cursor = parent;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we are already on the root layer, we cannot ascend.
|
||||||
|
if self.current == self.root {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ascend to the parent layer.
|
||||||
|
let range = self.node().byte_range();
|
||||||
|
let parent_id = self.layers[self.current]
|
||||||
|
.parent
|
||||||
|
.expect("non-root layers have a parent");
|
||||||
|
self.current = parent_id;
|
||||||
|
let root = self.layers[self.current].tree().root_node();
|
||||||
|
self.cursor = root
|
||||||
|
.descendant_for_byte_range(range.start, range.end)
|
||||||
|
.unwrap_or(root);
|
||||||
|
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn goto_parent_with<P>(&mut self, predicate: P) -> bool
|
||||||
|
where
|
||||||
|
P: Fn(&Node) -> bool,
|
||||||
|
{
|
||||||
|
while self.goto_parent() {
|
||||||
|
if predicate(&self.node()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Finds the injection layer that has exactly the same range as the given `range`.
|
||||||
|
fn layer_id_of_byte_range(&self, search_range: Range<usize>) -> Option<LayerId> {
|
||||||
|
let start_idx = self
|
||||||
|
.injection_ranges
|
||||||
|
.partition_point(|range| range.end < search_range.end);
|
||||||
|
|
||||||
|
self.injection_ranges[start_idx..]
|
||||||
|
.iter()
|
||||||
|
.take_while(|range| range.end == search_range.end)
|
||||||
|
.find_map(|range| (range.start == search_range.start).then_some(range.layer_id))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn goto_first_child_impl(&mut self, named: bool) -> bool {
|
||||||
|
// Check if the current node's range is an exact injection layer range.
|
||||||
|
if let Some(layer_id) = self
|
||||||
|
.layer_id_of_byte_range(self.node().byte_range())
|
||||||
|
.filter(|&layer_id| layer_id != self.current)
|
||||||
|
{
|
||||||
|
// Switch to the child layer.
|
||||||
|
self.current = layer_id;
|
||||||
|
self.cursor = self.layers[self.current].tree().root_node();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
let child = if named {
|
||||||
|
self.cursor.named_child(0)
|
||||||
|
} else {
|
||||||
|
self.cursor.child(0)
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(child) = child {
|
||||||
|
// Otherwise descend in the current tree.
|
||||||
|
self.cursor = child;
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn goto_first_child(&mut self) -> bool {
|
||||||
|
self.goto_first_child_impl(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn goto_first_named_child(&mut self) -> bool {
|
||||||
|
self.goto_first_child_impl(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn goto_next_sibling_impl(&mut self, named: bool) -> bool {
|
||||||
|
let sibling = if named {
|
||||||
|
self.cursor.next_named_sibling()
|
||||||
|
} else {
|
||||||
|
self.cursor.next_sibling()
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(sibling) = sibling {
|
||||||
|
self.cursor = sibling;
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn goto_next_sibling(&mut self) -> bool {
|
||||||
|
self.goto_next_sibling_impl(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn goto_next_named_sibling(&mut self) -> bool {
|
||||||
|
self.goto_next_sibling_impl(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn goto_prev_sibling_impl(&mut self, named: bool) -> bool {
|
||||||
|
let sibling = if named {
|
||||||
|
self.cursor.prev_named_sibling()
|
||||||
|
} else {
|
||||||
|
self.cursor.prev_sibling()
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(sibling) = sibling {
|
||||||
|
self.cursor = sibling;
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn goto_prev_sibling(&mut self) -> bool {
|
||||||
|
self.goto_prev_sibling_impl(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn goto_prev_named_sibling(&mut self) -> bool {
|
||||||
|
self.goto_prev_sibling_impl(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Finds the injection layer that contains the given start-end range.
|
||||||
|
fn layer_id_containing_byte_range(&self, start: usize, end: usize) -> LayerId {
|
||||||
|
let start_idx = self
|
||||||
|
.injection_ranges
|
||||||
|
.partition_point(|range| range.end < end);
|
||||||
|
|
||||||
|
self.injection_ranges[start_idx..]
|
||||||
|
.iter()
|
||||||
|
.take_while(|range| range.start < end)
|
||||||
|
.find_map(|range| (range.start <= start).then_some(range.layer_id))
|
||||||
|
.unwrap_or(self.root)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reset_to_byte_range(&mut self, start: usize, end: usize) {
|
||||||
|
self.current = self.layer_id_containing_byte_range(start, end);
|
||||||
|
let root = self.layers[self.current].tree().root_node();
|
||||||
|
self.cursor = root.descendant_for_byte_range(start, end).unwrap_or(root);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over the children of the node the TreeCursor is on
|
||||||
|
/// at the time this is called.
|
||||||
|
pub fn children(&'a mut self) -> ChildIter {
|
||||||
|
let parent = self.node();
|
||||||
|
|
||||||
|
ChildIter {
|
||||||
|
cursor: self,
|
||||||
|
parent,
|
||||||
|
named: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over the named children of the node the TreeCursor is on
|
||||||
|
/// at the time this is called.
|
||||||
|
pub fn named_children(&'a mut self) -> ChildIter {
|
||||||
|
let parent = self.node();
|
||||||
|
|
||||||
|
ChildIter {
|
||||||
|
cursor: self,
|
||||||
|
parent,
|
||||||
|
named: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ChildIter<'n> {
|
||||||
|
cursor: &'n mut TreeCursor<'n>,
|
||||||
|
parent: Node<'n>,
|
||||||
|
named: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'n> Iterator for ChildIter<'n> {
|
||||||
|
type Item = Node<'n>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
// first iteration, just visit the first child
|
||||||
|
if self.cursor.node() == self.parent {
|
||||||
|
self.cursor
|
||||||
|
.goto_first_child_impl(self.named)
|
||||||
|
.then(|| self.cursor.node())
|
||||||
|
} else {
|
||||||
|
self.cursor
|
||||||
|
.goto_next_sibling_impl(self.named)
|
||||||
|
.then(|| self.cursor.node())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,9 @@
|
|||||||
use smartstring::{LazyCompact, SmartString};
|
use smartstring::{LazyCompact, SmartString};
|
||||||
|
use textwrap::{Options, WordSplitter::NoHyphenation};
|
||||||
|
|
||||||
/// Given a slice of text, return the text re-wrapped to fit it
|
/// Given a slice of text, return the text re-wrapped to fit it
|
||||||
/// within the given width.
|
/// within the given width.
|
||||||
pub fn reflow_hard_wrap(text: &str, text_width: usize) -> SmartString<LazyCompact> {
|
pub fn reflow_hard_wrap(text: &str, text_width: usize) -> SmartString<LazyCompact> {
|
||||||
textwrap::refill(text, text_width).into()
|
let options = Options::new(text_width).word_splitter(NoHyphenation);
|
||||||
|
textwrap::refill(text, options).into()
|
||||||
}
|
}
|
||||||
|
@ -1,25 +1,27 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "helix-dap"
|
name = "helix-dap"
|
||||||
version = "0.6.0"
|
|
||||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
|
||||||
edition = "2018"
|
|
||||||
license = "MPL-2.0"
|
|
||||||
description = "DAP client implementation for Helix project"
|
description = "DAP client implementation for Helix project"
|
||||||
categories = ["editor"]
|
version.workspace = true
|
||||||
repository = "https://github.com/helix-editor/helix"
|
authors.workspace = true
|
||||||
homepage = "https://helix-editor.com"
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
categories.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
helix-core = { version = "0.6", path = "../helix-core" }
|
helix-stdx = { path = "../helix-stdx" }
|
||||||
|
helix-core = { path = "../helix-core" }
|
||||||
|
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
thiserror = "1.0"
|
|
||||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
|
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
|
||||||
which = "4.4"
|
thiserror.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
fern = "0.6"
|
fern = "0.6"
|
||||||
|
@ -1,15 +1,29 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "helix-event"
|
name = "helix-event"
|
||||||
version = "0.6.0"
|
version.workspace = true
|
||||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
authors.workspace = true
|
||||||
edition = "2021"
|
edition.workspace = true
|
||||||
license = "MPL-2.0"
|
license.workspace = true
|
||||||
categories = ["editor"]
|
rust-version.workspace = true
|
||||||
repository = "https://github.com/helix-editor/helix"
|
categories.workspace = true
|
||||||
homepage = "https://helix-editor.com"
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot"] }
|
ahash = "0.8.11"
|
||||||
parking_lot = { version = "0.12", features = ["send_guard"] }
|
hashbrown = "0.14.5"
|
||||||
|
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] }
|
||||||
|
# the event registry is essentially read only but must be an rwlock so we can
|
||||||
|
# setup new events on initialization, hardware-lock-elision hugely benefits this case
|
||||||
|
# as it essentially makes the lock entirely free as long as there is no writes
|
||||||
|
parking_lot = { version = "0.12", features = ["hardware-lock-elision"] }
|
||||||
|
once_cell = "1.18"
|
||||||
|
|
||||||
|
anyhow = "1"
|
||||||
|
log = "0.4"
|
||||||
|
futures-executor = "0.3.28"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
integration_test = []
|
||||||
|
@ -0,0 +1,19 @@
|
|||||||
|
use std::future::Future;
|
||||||
|
|
||||||
|
pub use oneshot::channel as cancelation;
|
||||||
|
use tokio::sync::oneshot;
|
||||||
|
|
||||||
|
pub type CancelTx = oneshot::Sender<()>;
|
||||||
|
pub type CancelRx = oneshot::Receiver<()>;
|
||||||
|
|
||||||
|
pub async fn cancelable_future<T>(future: impl Future<Output = T>, cancel: CancelRx) -> Option<T> {
|
||||||
|
tokio::select! {
|
||||||
|
biased;
|
||||||
|
_ = cancel => {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
res = future => {
|
||||||
|
Some(res)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,67 @@
|
|||||||
|
//! Utilities for declaring an async (usually debounced) hook
|
||||||
|
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use futures_executor::block_on;
|
||||||
|
use tokio::sync::mpsc::{self, error::TrySendError, Sender};
|
||||||
|
use tokio::time::Instant;
|
||||||
|
|
||||||
|
/// Async hooks provide a convenient framework for implementing (debounced)
|
||||||
|
/// async event handlers. Most synchronous event hooks will likely need to
|
||||||
|
/// debounce their events, coordinate multiple different hooks and potentially
|
||||||
|
/// track some state. `AsyncHooks` facilitate these use cases by running as
|
||||||
|
/// a background tokio task that waits for events (usually an enum) to be
|
||||||
|
/// sent through a channel.
|
||||||
|
pub trait AsyncHook: Sync + Send + 'static + Sized {
|
||||||
|
type Event: Sync + Send + 'static;
|
||||||
|
/// Called immediately whenever an event is received, this function can
|
||||||
|
/// consume the event immediately or debounce it. In case of debouncing,
|
||||||
|
/// it can either define a new debounce timeout or continue the current one
|
||||||
|
fn handle_event(&mut self, event: Self::Event, timeout: Option<Instant>) -> Option<Instant>;
|
||||||
|
|
||||||
|
/// Called whenever the debounce timeline is reached
|
||||||
|
fn finish_debounce(&mut self);
|
||||||
|
|
||||||
|
fn spawn(self) -> mpsc::Sender<Self::Event> {
|
||||||
|
// the capacity doesn't matter too much here, unless the cpu is totally overwhelmed
|
||||||
|
// the cap will never be reached since we always immediately drain the channel
|
||||||
|
// so it should only be reached in case of total CPU overload.
|
||||||
|
// However, a bounded channel is much more efficient so it's nice to use here
|
||||||
|
let (tx, rx) = mpsc::channel(128);
|
||||||
|
tokio::spawn(run(self, rx));
|
||||||
|
tx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run<Hook: AsyncHook>(mut hook: Hook, mut rx: mpsc::Receiver<Hook::Event>) {
|
||||||
|
let mut deadline = None;
|
||||||
|
loop {
|
||||||
|
let event = match deadline {
|
||||||
|
Some(deadline_) => {
|
||||||
|
let res = tokio::time::timeout_at(deadline_, rx.recv()).await;
|
||||||
|
match res {
|
||||||
|
Ok(event) => event,
|
||||||
|
Err(_) => {
|
||||||
|
hook.finish_debounce();
|
||||||
|
deadline = None;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => rx.recv().await,
|
||||||
|
};
|
||||||
|
let Some(event) = event else {
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
deadline = hook.handle_event(event, deadline);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn send_blocking<T>(tx: &Sender<T>, data: T) {
|
||||||
|
// block_on has some overhead and in practice the channel should basically
|
||||||
|
// never be full anyway so first try sending without blocking
|
||||||
|
if let Err(TrySendError::Full(data)) = tx.try_send(data) {
|
||||||
|
// set a timeout so that we just drop a message instead of freezing the editor in the worst case
|
||||||
|
let _ = block_on(tx.send_timeout(data, Duration::from_millis(10)));
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,91 @@
|
|||||||
|
//! rust dynamic dispatch is extremely limited so we have to build our
|
||||||
|
//! own vtable implementation. Otherwise implementing the event system would not be possible.
|
||||||
|
//! A nice bonus of this approach is that we can optimize the vtable a bit more. Normally
|
||||||
|
//! a dyn Trait fat pointer contains two pointers: A pointer to the data itself and a
|
||||||
|
//! pointer to a global (static) vtable entry which itself contains multiple other pointers
|
||||||
|
//! (the various functions of the trait, drop, size and align). That makes dynamic
|
||||||
|
//! dispatch pretty slow (double pointer indirections). However, we only have a single function
|
||||||
|
//! in the hook trait and don't need a drop implementation (event system is global anyway
|
||||||
|
//! and never dropped) so we can just store the entire vtable inline.
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use std::ptr::{self, NonNull};
|
||||||
|
|
||||||
|
use crate::Event;
|
||||||
|
|
||||||
|
/// Opaque handle type that represents an erased type parameter.
|
||||||
|
///
|
||||||
|
/// If extern types were stable, this could be implemented as `extern { pub type Opaque; }` but
|
||||||
|
/// until then we can use this.
|
||||||
|
///
|
||||||
|
/// Care should be taken that we don't use a concrete instance of this. It should only be used
|
||||||
|
/// through a reference, so we can maintain something else's lifetime.
|
||||||
|
struct Opaque(());
|
||||||
|
|
||||||
|
pub(crate) struct ErasedHook {
|
||||||
|
data: NonNull<Opaque>,
|
||||||
|
call: unsafe fn(NonNull<Opaque>, NonNull<Opaque>, NonNull<Opaque>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ErasedHook {
|
||||||
|
pub(crate) fn new_dynamic<H: Fn() -> Result<()> + 'static + Send + Sync>(
|
||||||
|
hook: H,
|
||||||
|
) -> ErasedHook {
|
||||||
|
unsafe fn call<F: Fn() -> Result<()> + 'static + Send + Sync>(
|
||||||
|
hook: NonNull<Opaque>,
|
||||||
|
_event: NonNull<Opaque>,
|
||||||
|
result: NonNull<Opaque>,
|
||||||
|
) {
|
||||||
|
let hook: NonNull<F> = hook.cast();
|
||||||
|
let result: NonNull<Result<()>> = result.cast();
|
||||||
|
let hook: &F = hook.as_ref();
|
||||||
|
let res = hook();
|
||||||
|
ptr::write(result.as_ptr(), res)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
ErasedHook {
|
||||||
|
data: NonNull::new_unchecked(Box::into_raw(Box::new(hook)) as *mut Opaque),
|
||||||
|
call: call::<H>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn new<E: Event, F: Fn(&mut E) -> Result<()>>(hook: F) -> ErasedHook {
|
||||||
|
unsafe fn call<E: Event, F: Fn(&mut E) -> Result<()>>(
|
||||||
|
hook: NonNull<Opaque>,
|
||||||
|
event: NonNull<Opaque>,
|
||||||
|
result: NonNull<Opaque>,
|
||||||
|
) {
|
||||||
|
let hook: NonNull<F> = hook.cast();
|
||||||
|
let mut event: NonNull<E> = event.cast();
|
||||||
|
let result: NonNull<Result<()>> = result.cast();
|
||||||
|
let hook: &F = hook.as_ref();
|
||||||
|
let res = hook(event.as_mut());
|
||||||
|
ptr::write(result.as_ptr(), res)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
ErasedHook {
|
||||||
|
data: NonNull::new_unchecked(Box::into_raw(Box::new(hook)) as *mut Opaque),
|
||||||
|
call: call::<E, F>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn call<E: Event>(&self, event: &mut E) -> Result<()> {
|
||||||
|
let mut res = Ok(());
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
(self.call)(
|
||||||
|
self.data,
|
||||||
|
NonNull::from(event).cast(),
|
||||||
|
NonNull::from(&mut res).cast(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl Sync for ErasedHook {}
|
||||||
|
unsafe impl Send for ErasedHook {}
|
@ -1,8 +1,203 @@
|
|||||||
//! `helix-event` contains systems that allow (often async) communication between
|
//! `helix-event` contains systems that allow (often async) communication between
|
||||||
//! different editor components without strongly coupling them. Currently this
|
//! different editor components without strongly coupling them. Specifically
|
||||||
//! crate only contains some smaller facilities but the intend is to add more
|
//! it allows defining synchronous hooks that run when certain editor events
|
||||||
//! functionality in the future ( like a generic hook system)
|
//! occur.
|
||||||
|
//!
|
||||||
|
//! The core of the event system are hook callbacks and the [`Event`] trait. A
|
||||||
|
//! hook is essentially just a closure `Fn(event: &mut impl Event) -> Result<()>`
|
||||||
|
//! that gets called every time an appropriate event is dispatched. The implementation
|
||||||
|
//! details of the [`Event`] trait are considered private. The [`events`] macro is
|
||||||
|
//! provided which automatically declares event types. Similarly the `register_hook`
|
||||||
|
//! macro should be used to (safely) declare event hooks.
|
||||||
|
//!
|
||||||
|
//! Hooks run synchronously which can be advantageous since they can modify the
|
||||||
|
//! current editor state right away (for example to immediately hide the completion
|
||||||
|
//! popup). However, they can not contain their own state without locking since
|
||||||
|
//! they only receive immutable references. For handler that want to track state, do
|
||||||
|
//! expensive background computations or debouncing an [`AsyncHook`] is preferable.
|
||||||
|
//! Async hooks are based around a channels that receive events specific to
|
||||||
|
//! that `AsyncHook` (usually an enum). These events can be sent by synchronous
|
||||||
|
//! hooks. Due to some limitations around tokio channels the [`send_blocking`]
|
||||||
|
//! function exported in this crate should be used instead of the builtin
|
||||||
|
//! `blocking_send`.
|
||||||
|
//!
|
||||||
|
//! In addition to the core event system, this crate contains some message queues
|
||||||
|
//! that allow transfer of data back to the main event loop from async hooks and
|
||||||
|
//! hooks that may not have access to all application data (for example in helix-view).
|
||||||
|
//! This include the ability to control rendering ([`lock_frame`], [`request_redraw`]) and
|
||||||
|
//! display status messages ([`status`]).
|
||||||
|
//!
|
||||||
|
//! Hooks declared in helix-term can furthermore dispatch synchronous jobs to be run on the
|
||||||
|
//! main loop (including access to the compositor). Ideally that queue will be moved
|
||||||
|
//! to helix-view in the future if we manage to detach the compositor from its rendering backend.
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
pub use cancel::{cancelable_future, cancelation, CancelRx, CancelTx};
|
||||||
|
pub use debounce::{send_blocking, AsyncHook};
|
||||||
pub use redraw::{lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard};
|
pub use redraw::{lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard};
|
||||||
|
pub use registry::Event;
|
||||||
|
|
||||||
|
mod cancel;
|
||||||
|
mod debounce;
|
||||||
|
mod hook;
|
||||||
mod redraw;
|
mod redraw;
|
||||||
|
mod registry;
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub mod runtime;
|
||||||
|
pub mod status;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test;
|
||||||
|
|
||||||
|
pub fn register_event<E: Event + 'static>() {
|
||||||
|
registry::with_mut(|registry| registry.register_event::<E>())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Registers a hook that will be called when an event of type `E` is dispatched.
|
||||||
|
/// This function should usually not be used directly, use the [`register_hook`]
|
||||||
|
/// macro instead.
|
||||||
|
///
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// `hook` must be totally generic over all lifetime parameters of `E`. For
|
||||||
|
/// example if `E` was a known type `Foo<'a, 'b>`, then the correct trait bound
|
||||||
|
/// would be `F: for<'a, 'b, 'c> Fn(&'a mut Foo<'b, 'c>)`, but there is no way to
|
||||||
|
/// express that kind of constraint for a generic type with the Rust type system
|
||||||
|
/// as of this writing.
|
||||||
|
pub unsafe fn register_hook_raw<E: Event>(
|
||||||
|
hook: impl Fn(&mut E) -> Result<()> + 'static + Send + Sync,
|
||||||
|
) {
|
||||||
|
registry::with_mut(|registry| registry.register_hook(hook))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Register a hook solely by event name
|
||||||
|
pub fn register_dynamic_hook(
|
||||||
|
hook: impl Fn() -> Result<()> + 'static + Send + Sync,
|
||||||
|
id: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
registry::with_mut(|reg| reg.register_dynamic_hook(hook, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dispatch(e: impl Event) {
|
||||||
|
registry::with(|registry| registry.dispatch(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Macro to declare events
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ``` no-compile
|
||||||
|
/// events! {
|
||||||
|
/// FileWrite(&Path)
|
||||||
|
/// ViewScrolled{ view: View, new_pos: ViewOffset }
|
||||||
|
/// DocumentChanged<'a> { old_doc: &'a Rope, doc: &'a mut Document, changes: &'a ChangeSet }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// fn init() {
|
||||||
|
/// register_event::<FileWrite>();
|
||||||
|
/// register_event::<ViewScrolled>();
|
||||||
|
/// register_event::<DocumentChanged>();
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// fn save(path: &Path, content: &str){
|
||||||
|
/// std::fs::write(path, content);
|
||||||
|
/// dispatch(FileWrite(path));
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! events {
|
||||||
|
($name: ident<$($lt: lifetime),*> { $($data:ident : $data_ty:ty),* } $($rem:tt)*) => {
|
||||||
|
pub struct $name<$($lt),*> { $(pub $data: $data_ty),* }
|
||||||
|
unsafe impl<$($lt),*> $crate::Event for $name<$($lt),*> {
|
||||||
|
const ID: &'static str = stringify!($name);
|
||||||
|
const LIFETIMES: usize = $crate::events!(@sum $(1, $lt),*);
|
||||||
|
type Static = $crate::events!(@replace_lt $name, $('static, $lt),*);
|
||||||
|
}
|
||||||
|
$crate::events!{ $($rem)* }
|
||||||
|
};
|
||||||
|
($name: ident { $($data:ident : $data_ty:ty),* } $($rem:tt)*) => {
|
||||||
|
pub struct $name { $(pub $data: $data_ty),* }
|
||||||
|
unsafe impl $crate::Event for $name {
|
||||||
|
const ID: &'static str = stringify!($name);
|
||||||
|
const LIFETIMES: usize = 0;
|
||||||
|
type Static = Self;
|
||||||
|
}
|
||||||
|
$crate::events!{ $($rem)* }
|
||||||
|
};
|
||||||
|
() => {};
|
||||||
|
(@replace_lt $name: ident, $($lt1: lifetime, $lt2: lifetime),* ) => {$name<$($lt1),*>};
|
||||||
|
(@sum $($val: expr, $lt1: lifetime),* ) => {0 $(+ $val)*};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Safely register statically typed event hooks
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! register_hook {
|
||||||
|
// Safety: this is safe because we fully control the type of the event here and
|
||||||
|
// ensure all lifetime arguments are fully generic and the correct number of lifetime arguments
|
||||||
|
// is present
|
||||||
|
(move |$event:ident: &mut $event_ty: ident<$($lt: lifetime),*>| $body: expr) => {
|
||||||
|
let val = move |$event: &mut $event_ty<$($lt),*>| $body;
|
||||||
|
unsafe {
|
||||||
|
// Lifetimes are a bit of a pain. We want to allow events being
|
||||||
|
// non-static. Lifetimes don't actually exist at runtime so its
|
||||||
|
// fine to essentially transmute the lifetimes as long as we can
|
||||||
|
// prove soundness. The hook must therefore accept any combination
|
||||||
|
// of lifetimes. In other words fn(&'_ mut Event<'_, '_>) is ok
|
||||||
|
// but examples like fn(&'_ mut Event<'_, 'static>) or fn<'a>(&'a
|
||||||
|
// mut Event<'a, 'a>) are not. To make this safe we use a macro to
|
||||||
|
// forbid the user from specifying lifetimes manually (all lifetimes
|
||||||
|
// specified are always function generics and passed to the event so
|
||||||
|
// lifetimes can't be used multiple times and using 'static causes a
|
||||||
|
// syntax error).
|
||||||
|
//
|
||||||
|
// There is one soundness hole tough: Type Aliases allow
|
||||||
|
// "accidentally" creating these problems. For example:
|
||||||
|
//
|
||||||
|
// type Event2 = Event<'static>.
|
||||||
|
// type Event2<'a> = Event<'a, a>.
|
||||||
|
//
|
||||||
|
// These cases can be caught by counting the number of lifetimes
|
||||||
|
// parameters at the parameter declaration site and then at the hook
|
||||||
|
// declaration site. By asserting the number of lifetime parameters
|
||||||
|
// are equal we can catch all bad type aliases under one assumption:
|
||||||
|
// There are no unused lifetime parameters. Introducing a static
|
||||||
|
// would reduce the number of arguments of the alias by one in the
|
||||||
|
// above example Event2 has zero lifetime arguments while the original
|
||||||
|
// event has one lifetime argument. Similar logic applies to using
|
||||||
|
// a lifetime argument multiple times. The ASSERT below performs a
|
||||||
|
// a compile time assertion to ensure exactly this property.
|
||||||
|
//
|
||||||
|
// With unused lifetime arguments it is still one way to cause unsound code:
|
||||||
|
//
|
||||||
|
// type Event2<'a, 'b> = Event<'a, 'a>;
|
||||||
|
//
|
||||||
|
// However, this case will always emit a compiler warning/cause CI
|
||||||
|
// failures so a user would have to introduce #[allow(unused)] which
|
||||||
|
// is easily caught in review (and a very theoretical case anyway).
|
||||||
|
// If we want to be pedantic we can simply compile helix with
|
||||||
|
// forbid(unused). All of this is just a safety net to prevent
|
||||||
|
// very theoretical misuse. This won't come up in real code (and is
|
||||||
|
// easily caught in review).
|
||||||
|
#[allow(unused)]
|
||||||
|
const ASSERT: () = {
|
||||||
|
if <$event_ty as $crate::Event>::LIFETIMES != 0 + $crate::events!(@sum $(1, $lt),*){
|
||||||
|
panic!("invalid type alias");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
$crate::register_hook_raw::<$crate::events!(@replace_lt $event_ty, $('static, $lt),*)>(val);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
(move |$event:ident: &mut $event_ty: ident| $body: expr) => {
|
||||||
|
let val = move |$event: &mut $event_ty| $body;
|
||||||
|
unsafe {
|
||||||
|
#[allow(unused)]
|
||||||
|
const ASSERT: () = {
|
||||||
|
if <$event_ty as $crate::Event>::LIFETIMES != 0{
|
||||||
|
panic!("invalid type alias");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
$crate::register_hook_raw::<$event_ty>(val);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
@ -0,0 +1,131 @@
|
|||||||
|
//! A global registry where events are registered and can be
|
||||||
|
//! subscribed to by registering hooks. The registry identifies event
|
||||||
|
//! types using their type name so multiple event with the same type name
|
||||||
|
//! may not be registered (will cause a panic to ensure soundness)
|
||||||
|
|
||||||
|
use std::any::TypeId;
|
||||||
|
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use hashbrown::hash_map::Entry;
|
||||||
|
use hashbrown::HashMap;
|
||||||
|
use parking_lot::RwLock;
|
||||||
|
|
||||||
|
use crate::hook::ErasedHook;
|
||||||
|
use crate::runtime_local;
|
||||||
|
|
||||||
|
pub struct Registry {
|
||||||
|
events: HashMap<&'static str, TypeId, ahash::RandomState>,
|
||||||
|
handlers: HashMap<&'static str, Vec<ErasedHook>, ahash::RandomState>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Registry {
|
||||||
|
pub fn register_event<E: Event + 'static>(&mut self) {
|
||||||
|
let ty = TypeId::of::<E>();
|
||||||
|
assert_eq!(ty, TypeId::of::<E::Static>());
|
||||||
|
match self.events.entry(E::ID) {
|
||||||
|
Entry::Occupied(entry) => {
|
||||||
|
if entry.get() == &ty {
|
||||||
|
// don't warn during tests to avoid log spam
|
||||||
|
#[cfg(not(feature = "integration_test"))]
|
||||||
|
panic!("Event {} was registered multiple times", E::ID);
|
||||||
|
} else {
|
||||||
|
panic!("Multiple events with ID {} were registered", E::ID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Entry::Vacant(ent) => {
|
||||||
|
ent.insert(ty);
|
||||||
|
self.handlers.insert(E::ID, Vec::new());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// `hook` must be totally generic over all lifetime parameters of `E`. For
|
||||||
|
/// example if `E` was a known type `Foo<'a, 'b> then the correct trait bound
|
||||||
|
/// would be `F: for<'a, 'b, 'c> Fn(&'a mut Foo<'b, 'c>)` but there is no way to
|
||||||
|
/// express that kind of constraint for a generic type with the rust type system
|
||||||
|
/// right now.
|
||||||
|
pub unsafe fn register_hook<E: Event>(
|
||||||
|
&mut self,
|
||||||
|
hook: impl Fn(&mut E) -> Result<()> + 'static + Send + Sync,
|
||||||
|
) {
|
||||||
|
// ensure event type ids match so we can rely on them always matching
|
||||||
|
let id = E::ID;
|
||||||
|
let Some(&event_id) = self.events.get(id) else {
|
||||||
|
panic!("Tried to register handler for unknown event {id}");
|
||||||
|
};
|
||||||
|
assert!(
|
||||||
|
TypeId::of::<E::Static>() == event_id,
|
||||||
|
"Tried to register invalid hook for event {id}"
|
||||||
|
);
|
||||||
|
let hook = ErasedHook::new(hook);
|
||||||
|
self.handlers.get_mut(id).unwrap().push(hook);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_dynamic_hook(
|
||||||
|
&mut self,
|
||||||
|
hook: impl Fn() -> Result<()> + 'static + Send + Sync,
|
||||||
|
id: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
// ensure event type ids match so we can rely on them always matching
|
||||||
|
if self.events.get(id).is_none() {
|
||||||
|
bail!("Tried to register handler for unknown event {id}");
|
||||||
|
};
|
||||||
|
let hook = ErasedHook::new_dynamic(hook);
|
||||||
|
self.handlers.get_mut(id).unwrap().push(hook);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dispatch<E: Event>(&self, mut event: E) {
|
||||||
|
let Some(hooks) = self.handlers.get(E::ID) else {
|
||||||
|
log::error!("Dispatched unknown event {}", E::ID);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let event_id = self.events[E::ID];
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
TypeId::of::<E::Static>(),
|
||||||
|
event_id,
|
||||||
|
"Tried to dispatch invalid event {}",
|
||||||
|
E::ID
|
||||||
|
);
|
||||||
|
|
||||||
|
for hook in hooks {
|
||||||
|
// safety: event type is the same
|
||||||
|
if let Err(err) = unsafe { hook.call(&mut event) } {
|
||||||
|
log::error!("{} hook failed: {err:#?}", E::ID);
|
||||||
|
crate::status::report_blocking(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
runtime_local! {
|
||||||
|
static REGISTRY: RwLock<Registry> = RwLock::new(Registry {
|
||||||
|
// hardcoded random number is good enough here we don't care about DOS resistance
|
||||||
|
// and avoids the additional complexity of `Option<Registry>`
|
||||||
|
events: HashMap::with_hasher(ahash::RandomState::with_seeds(423, 9978, 38322, 3280080)),
|
||||||
|
handlers: HashMap::with_hasher(ahash::RandomState::with_seeds(423, 99078, 382322, 3282938)),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn with<T>(f: impl FnOnce(&Registry) -> T) -> T {
|
||||||
|
f(®ISTRY.read())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn with_mut<T>(f: impl FnOnce(&mut Registry) -> T) -> T {
|
||||||
|
f(&mut REGISTRY.write())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// # Safety
|
||||||
|
/// The number of specified lifetimes and the static type *must* be correct.
|
||||||
|
/// This is ensured automatically by the [`events`](crate::events)
|
||||||
|
/// macro.
|
||||||
|
pub unsafe trait Event: Sized {
|
||||||
|
/// Globally unique (case sensitive) string that identifies this type.
|
||||||
|
/// A good candidate is the events type name
|
||||||
|
const ID: &'static str;
|
||||||
|
const LIFETIMES: usize;
|
||||||
|
type Static: Event + 'static;
|
||||||
|
}
|
@ -0,0 +1,88 @@
|
|||||||
|
//! The event system makes use of global to decouple different systems.
|
||||||
|
//! However, this can cause problems for the integration test system because
|
||||||
|
//! it runs multiple helix applications in parallel. Making the globals
|
||||||
|
//! thread-local does not work because a applications can/does have multiple
|
||||||
|
//! runtime threads. Instead this crate implements a similar notion to a thread
|
||||||
|
//! local but instead of being local to a single thread, the statics are local to
|
||||||
|
//! a single tokio-runtime. The implementation requires locking so it's not exactly efficient.
|
||||||
|
//!
|
||||||
|
//! Therefore this function is only enabled during integration tests and behaves like
|
||||||
|
//! a normal static otherwise. I would prefer this module to be fully private and to only
|
||||||
|
//! export the macro but the macro still need to construct these internals so it's marked
|
||||||
|
//! `doc(hidden)` instead
|
||||||
|
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
#[cfg(not(feature = "integration_test"))]
|
||||||
|
pub struct RuntimeLocal<T: 'static> {
|
||||||
|
/// inner API used in the macro, not part of public API
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub __data: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "integration_test"))]
|
||||||
|
impl<T> Deref for RuntimeLocal<T> {
|
||||||
|
type Target = T;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.__data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "integration_test"))]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! runtime_local {
|
||||||
|
($($(#[$attr:meta])* $vis: vis static $name:ident: $ty: ty = $init: expr;)*) => {
|
||||||
|
$($(#[$attr])* $vis static $name: $crate::runtime::RuntimeLocal<$ty> = $crate::runtime::RuntimeLocal {
|
||||||
|
__data: $init
|
||||||
|
};)*
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "integration_test")]
|
||||||
|
pub struct RuntimeLocal<T: 'static> {
|
||||||
|
data:
|
||||||
|
parking_lot::RwLock<hashbrown::HashMap<tokio::runtime::Id, &'static T, ahash::RandomState>>,
|
||||||
|
init: fn() -> T,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "integration_test")]
|
||||||
|
impl<T> RuntimeLocal<T> {
|
||||||
|
/// inner API used in the macro, not part of public API
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub const fn __new(init: fn() -> T) -> Self {
|
||||||
|
Self {
|
||||||
|
data: parking_lot::RwLock::new(hashbrown::HashMap::with_hasher(
|
||||||
|
ahash::RandomState::with_seeds(423, 9978, 38322, 3280080),
|
||||||
|
)),
|
||||||
|
init,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "integration_test")]
|
||||||
|
impl<T> Deref for RuntimeLocal<T> {
|
||||||
|
type Target = T;
|
||||||
|
fn deref(&self) -> &T {
|
||||||
|
let id = tokio::runtime::Handle::current().id();
|
||||||
|
let guard = self.data.read();
|
||||||
|
match guard.get(&id) {
|
||||||
|
Some(res) => res,
|
||||||
|
None => {
|
||||||
|
drop(guard);
|
||||||
|
let data = Box::leak(Box::new((self.init)()));
|
||||||
|
let mut guard = self.data.write();
|
||||||
|
guard.insert(id, data);
|
||||||
|
data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "integration_test")]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! runtime_local {
|
||||||
|
($($(#[$attr:meta])* $vis: vis static $name:ident: $ty: ty = $init: expr;)*) => {
|
||||||
|
$($(#[$attr])* $vis static $name: $crate::runtime::RuntimeLocal<$ty> = $crate::runtime::RuntimeLocal::__new(|| $init);)*
|
||||||
|
};
|
||||||
|
}
|
@ -0,0 +1,68 @@
|
|||||||
|
//! A queue of async messages/errors that will be shown in the editor
|
||||||
|
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use crate::{runtime_local, send_blocking};
|
||||||
|
use once_cell::sync::OnceCell;
|
||||||
|
use tokio::sync::mpsc::{Receiver, Sender};
|
||||||
|
|
||||||
|
/// Describes the severity level of a [`StatusMessage`].
|
||||||
|
#[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord)]
|
||||||
|
pub enum Severity {
|
||||||
|
Hint,
|
||||||
|
Info,
|
||||||
|
Warning,
|
||||||
|
Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct StatusMessage {
|
||||||
|
pub severity: Severity,
|
||||||
|
pub message: Cow<'static, str>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<anyhow::Error> for StatusMessage {
|
||||||
|
fn from(err: anyhow::Error) -> Self {
|
||||||
|
StatusMessage {
|
||||||
|
severity: Severity::Error,
|
||||||
|
message: err.to_string().into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&'static str> for StatusMessage {
|
||||||
|
fn from(msg: &'static str) -> Self {
|
||||||
|
StatusMessage {
|
||||||
|
severity: Severity::Info,
|
||||||
|
message: msg.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
runtime_local! {
|
||||||
|
static MESSAGES: OnceCell<Sender<StatusMessage>> = OnceCell::new();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn report(msg: impl Into<StatusMessage>) {
|
||||||
|
// if the error channel overflows just ignore it
|
||||||
|
let _ = MESSAGES
|
||||||
|
.wait()
|
||||||
|
.send_timeout(msg.into(), Duration::from_millis(10))
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn report_blocking(msg: impl Into<StatusMessage>) {
|
||||||
|
let messages = MESSAGES.wait();
|
||||||
|
send_blocking(messages, msg.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Must be called once during editor startup exactly once
|
||||||
|
/// before any of the messages in this module can be used
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
/// If called multiple times
|
||||||
|
pub fn setup() -> Receiver<StatusMessage> {
|
||||||
|
let (tx, rx) = tokio::sync::mpsc::channel(128);
|
||||||
|
let _ = MESSAGES.set(tx);
|
||||||
|
rx
|
||||||
|
}
|
@ -0,0 +1,90 @@
|
|||||||
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use parking_lot::Mutex;
|
||||||
|
|
||||||
|
use crate::{dispatch, events, register_dynamic_hook, register_event, register_hook};
|
||||||
|
#[test]
|
||||||
|
fn smoke_test() {
|
||||||
|
events! {
|
||||||
|
Event1 { content: String }
|
||||||
|
Event2 { content: usize }
|
||||||
|
}
|
||||||
|
register_event::<Event1>();
|
||||||
|
register_event::<Event2>();
|
||||||
|
|
||||||
|
// setup hooks
|
||||||
|
let res1: Arc<Mutex<String>> = Arc::default();
|
||||||
|
let acc = Arc::clone(&res1);
|
||||||
|
register_hook!(move |event: &mut Event1| {
|
||||||
|
acc.lock().push_str(&event.content);
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
let res2: Arc<AtomicUsize> = Arc::default();
|
||||||
|
let acc = Arc::clone(&res2);
|
||||||
|
register_hook!(move |event: &mut Event2| {
|
||||||
|
acc.fetch_add(event.content, Ordering::Relaxed);
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
|
||||||
|
// triggers events
|
||||||
|
let thread = std::thread::spawn(|| {
|
||||||
|
for i in 0..1000 {
|
||||||
|
dispatch(Event2 { content: i });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
std::thread::sleep(Duration::from_millis(1));
|
||||||
|
dispatch(Event1 {
|
||||||
|
content: "foo".to_owned(),
|
||||||
|
});
|
||||||
|
dispatch(Event2 { content: 42 });
|
||||||
|
dispatch(Event1 {
|
||||||
|
content: "bar".to_owned(),
|
||||||
|
});
|
||||||
|
dispatch(Event1 {
|
||||||
|
content: "hello world".to_owned(),
|
||||||
|
});
|
||||||
|
thread.join().unwrap();
|
||||||
|
|
||||||
|
// check output
|
||||||
|
assert_eq!(&**res1.lock(), "foobarhello world");
|
||||||
|
assert_eq!(
|
||||||
|
res2.load(Ordering::Relaxed),
|
||||||
|
42 + (0..1000usize).sum::<usize>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn dynamic() {
|
||||||
|
events! {
|
||||||
|
Event3 {}
|
||||||
|
Event4 { count: usize }
|
||||||
|
};
|
||||||
|
register_event::<Event3>();
|
||||||
|
register_event::<Event4>();
|
||||||
|
|
||||||
|
let count = Arc::new(AtomicUsize::new(0));
|
||||||
|
let count1 = count.clone();
|
||||||
|
let count2 = count.clone();
|
||||||
|
register_dynamic_hook(
|
||||||
|
move || {
|
||||||
|
count1.fetch_add(2, Ordering::Relaxed);
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
"Event3",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
register_dynamic_hook(
|
||||||
|
move || {
|
||||||
|
count2.fetch_add(3, Ordering::Relaxed);
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
"Event4",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
dispatch(Event3 {});
|
||||||
|
dispatch(Event4 { count: 0 });
|
||||||
|
dispatch(Event3 {});
|
||||||
|
assert_eq!(count.load(Ordering::Relaxed), 7)
|
||||||
|
}
|
@ -1,31 +1,34 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "helix-lsp"
|
name = "helix-lsp"
|
||||||
version = "0.6.0"
|
|
||||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
|
||||||
edition = "2021"
|
|
||||||
license = "MPL-2.0"
|
|
||||||
description = "LSP client implementation for Helix project"
|
description = "LSP client implementation for Helix project"
|
||||||
categories = ["editor"]
|
version.workspace = true
|
||||||
repository = "https://github.com/helix-editor/helix"
|
authors.workspace = true
|
||||||
homepage = "https://helix-editor.com"
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
categories.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
helix-core = { version = "0.6", path = "../helix-core" }
|
helix-stdx = { path = "../helix-stdx" }
|
||||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
helix-core = { path = "../helix-core" }
|
||||||
helix-parsec = { version = "0.6", path = "../helix-parsec" }
|
helix-loader = { path = "../helix-loader" }
|
||||||
|
helix-parsec = { path = "../helix-parsec" }
|
||||||
|
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
futures-executor = "0.3"
|
futures-executor = "0.3"
|
||||||
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
||||||
globset = "0.4.13"
|
globset = "0.4.14"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
lsp-types = { version = "0.94" }
|
lsp-types = { version = "0.95" }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
thiserror = "1.0"
|
tokio = { version = "1.38", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||||
tokio = { version = "1.32", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
tokio-stream = "0.1.15"
|
||||||
tokio-stream = "0.1.14"
|
parking_lot = "0.12.3"
|
||||||
which = "4.4"
|
arc-swap = "1"
|
||||||
parking_lot = "0.12.1"
|
slotmap.workspace = true
|
||||||
|
thiserror.workspace = true
|
||||||
|
@ -0,0 +1,105 @@
|
|||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use globset::{GlobBuilder, GlobSet};
|
||||||
|
|
||||||
|
use crate::lsp;
|
||||||
|
|
||||||
|
#[derive(Default, Debug)]
|
||||||
|
pub(crate) struct FileOperationFilter {
|
||||||
|
dir_globs: GlobSet,
|
||||||
|
file_globs: GlobSet,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileOperationFilter {
|
||||||
|
fn new(capability: Option<&lsp::FileOperationRegistrationOptions>) -> FileOperationFilter {
|
||||||
|
let Some(cap) = capability else {
|
||||||
|
return FileOperationFilter::default();
|
||||||
|
};
|
||||||
|
let mut dir_globs = GlobSet::builder();
|
||||||
|
let mut file_globs = GlobSet::builder();
|
||||||
|
for filter in &cap.filters {
|
||||||
|
// TODO: support other url schemes
|
||||||
|
let is_non_file_schema = filter
|
||||||
|
.scheme
|
||||||
|
.as_ref()
|
||||||
|
.is_some_and(|schema| schema != "file");
|
||||||
|
if is_non_file_schema {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let ignore_case = filter
|
||||||
|
.pattern
|
||||||
|
.options
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|opts| opts.ignore_case)
|
||||||
|
.unwrap_or(false);
|
||||||
|
let mut glob_builder = GlobBuilder::new(&filter.pattern.glob);
|
||||||
|
glob_builder.case_insensitive(!ignore_case);
|
||||||
|
let glob = match glob_builder.build() {
|
||||||
|
Ok(glob) => glob,
|
||||||
|
Err(err) => {
|
||||||
|
log::error!("invalid glob send by LS: {err}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match filter.pattern.matches {
|
||||||
|
Some(lsp::FileOperationPatternKind::File) => {
|
||||||
|
file_globs.add(glob);
|
||||||
|
}
|
||||||
|
Some(lsp::FileOperationPatternKind::Folder) => {
|
||||||
|
dir_globs.add(glob);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
file_globs.add(glob.clone());
|
||||||
|
dir_globs.add(glob);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
let file_globs = file_globs.build().unwrap_or_else(|err| {
|
||||||
|
log::error!("invalid globs send by LS: {err}");
|
||||||
|
GlobSet::empty()
|
||||||
|
});
|
||||||
|
let dir_globs = dir_globs.build().unwrap_or_else(|err| {
|
||||||
|
log::error!("invalid globs send by LS: {err}");
|
||||||
|
GlobSet::empty()
|
||||||
|
});
|
||||||
|
FileOperationFilter {
|
||||||
|
dir_globs,
|
||||||
|
file_globs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn has_interest(&self, path: &Path, is_dir: bool) -> bool {
|
||||||
|
if is_dir {
|
||||||
|
self.dir_globs.is_match(path)
|
||||||
|
} else {
|
||||||
|
self.file_globs.is_match(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug)]
|
||||||
|
pub(crate) struct FileOperationsInterest {
|
||||||
|
// TODO: support other notifications
|
||||||
|
// did_create: FileOperationFilter,
|
||||||
|
// will_create: FileOperationFilter,
|
||||||
|
pub did_rename: FileOperationFilter,
|
||||||
|
pub will_rename: FileOperationFilter,
|
||||||
|
// did_delete: FileOperationFilter,
|
||||||
|
// will_delete: FileOperationFilter,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileOperationsInterest {
|
||||||
|
pub fn new(capabilities: &lsp::ServerCapabilities) -> FileOperationsInterest {
|
||||||
|
let capabilities = capabilities
|
||||||
|
.workspace
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|capabilities| capabilities.file_operations.as_ref());
|
||||||
|
let Some(capabilities) = capabilities else {
|
||||||
|
return FileOperationsInterest::default();
|
||||||
|
};
|
||||||
|
FileOperationsInterest {
|
||||||
|
did_rename: FileOperationFilter::new(capabilities.did_rename.as_ref()),
|
||||||
|
will_rename: FileOperationFilter::new(capabilities.will_rename.as_ref()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,13 +1,14 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "helix-parsec"
|
name = "helix-parsec"
|
||||||
version = "0.6.0"
|
|
||||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
|
||||||
edition = "2021"
|
|
||||||
license = "MPL-2.0"
|
|
||||||
description = "Parser combinators for Helix"
|
description = "Parser combinators for Helix"
|
||||||
categories = ["editor"]
|
|
||||||
repository = "https://github.com/helix-editor/helix"
|
|
||||||
homepage = "https://helix-editor.com"
|
|
||||||
include = ["src/**/*", "README.md"]
|
include = ["src/**/*", "README.md"]
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
categories.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
@ -0,0 +1,29 @@
|
|||||||
|
[package]
|
||||||
|
name = "helix-stdx"
|
||||||
|
description = "Standard library extensions"
|
||||||
|
include = ["src/**/*", "README.md"]
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
categories.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
dunce = "1.0"
|
||||||
|
etcetera = "0.8"
|
||||||
|
ropey = { version = "1.6.1", default-features = false }
|
||||||
|
which = "6.0"
|
||||||
|
regex-cursor = "0.1.4"
|
||||||
|
bitflags = "2.4"
|
||||||
|
|
||||||
|
[target.'cfg(windows)'.dependencies]
|
||||||
|
windows-sys = { version = "0.52", features = ["Win32_Security", "Win32_Security_Authorization", "Win32_System_Threading"] }
|
||||||
|
|
||||||
|
[target.'cfg(unix)'.dependencies]
|
||||||
|
rustix = { version = "0.38", features = ["fs"] }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.10"
|
@ -0,0 +1,91 @@
|
|||||||
|
use std::{
|
||||||
|
ffi::OsStr,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::RwLock,
|
||||||
|
};
|
||||||
|
|
||||||
|
static CWD: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||||
|
|
||||||
|
// Get the current working directory.
|
||||||
|
// This information is managed internally as the call to std::env::current_dir
|
||||||
|
// might fail if the cwd has been deleted.
|
||||||
|
pub fn current_working_dir() -> PathBuf {
|
||||||
|
if let Some(path) = &*CWD.read().unwrap() {
|
||||||
|
return path.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
// implementation of crossplatform pwd -L
|
||||||
|
// we want pwd -L so that symlinked directories are handled correctly
|
||||||
|
let mut cwd = std::env::current_dir().expect("Couldn't determine current working directory");
|
||||||
|
|
||||||
|
let pwd = std::env::var_os("PWD");
|
||||||
|
#[cfg(windows)]
|
||||||
|
let pwd = pwd.or_else(|| std::env::var_os("CD"));
|
||||||
|
|
||||||
|
if let Some(pwd) = pwd.map(PathBuf::from) {
|
||||||
|
if pwd.canonicalize().ok().as_ref() == Some(&cwd) {
|
||||||
|
cwd = pwd;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut dst = CWD.write().unwrap();
|
||||||
|
*dst = Some(cwd.clone());
|
||||||
|
|
||||||
|
cwd
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_current_working_dir(path: impl AsRef<Path>) -> std::io::Result<()> {
|
||||||
|
let path = crate::path::canonicalize(path);
|
||||||
|
std::env::set_current_dir(&path)?;
|
||||||
|
let mut cwd = CWD.write().unwrap();
|
||||||
|
*cwd = Some(path);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn env_var_is_set(env_var_name: &str) -> bool {
|
||||||
|
std::env::var_os(env_var_name).is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn binary_exists<T: AsRef<OsStr>>(binary_name: T) -> bool {
|
||||||
|
which::which(binary_name).is_ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn which<T: AsRef<OsStr>>(
|
||||||
|
binary_name: T,
|
||||||
|
) -> Result<std::path::PathBuf, ExecutableNotFoundError> {
|
||||||
|
let binary_name = binary_name.as_ref();
|
||||||
|
which::which(binary_name).map_err(|err| ExecutableNotFoundError {
|
||||||
|
command: binary_name.to_string_lossy().into_owned(),
|
||||||
|
inner: err,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ExecutableNotFoundError {
|
||||||
|
command: String,
|
||||||
|
inner: which::Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for ExecutableNotFoundError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "command '{}' not found: {}", self.command, self.inner)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for ExecutableNotFoundError {}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{current_working_dir, set_current_working_dir};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn current_dir_is_set() {
|
||||||
|
let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap();
|
||||||
|
let cwd = current_working_dir();
|
||||||
|
assert_ne!(cwd, new_path);
|
||||||
|
|
||||||
|
set_current_working_dir(&new_path).expect("Couldn't set new path");
|
||||||
|
|
||||||
|
let cwd = current_working_dir();
|
||||||
|
assert_eq!(cwd, new_path);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,459 @@
|
|||||||
|
//! From <https://github.com/Freaky/faccess>
|
||||||
|
|
||||||
|
use std::io;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use bitflags::bitflags;
|
||||||
|
|
||||||
|
// Licensed under MIT from faccess
|
||||||
|
bitflags! {
|
||||||
|
/// Access mode flags for `access` function to test for.
|
||||||
|
pub struct AccessMode: u8 {
|
||||||
|
/// Path exists
|
||||||
|
const EXISTS = 0b0001;
|
||||||
|
/// Path can likely be read
|
||||||
|
const READ = 0b0010;
|
||||||
|
/// Path can likely be written to
|
||||||
|
const WRITE = 0b0100;
|
||||||
|
/// Path can likely be executed
|
||||||
|
const EXECUTE = 0b1000;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
mod imp {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
use rustix::fs::Access;
|
||||||
|
use std::os::unix::fs::{MetadataExt, PermissionsExt};
|
||||||
|
|
||||||
|
pub fn access(p: &Path, mode: AccessMode) -> io::Result<()> {
|
||||||
|
let mut imode = Access::empty();
|
||||||
|
|
||||||
|
if mode.contains(AccessMode::EXISTS) {
|
||||||
|
imode |= Access::EXISTS;
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode.contains(AccessMode::READ) {
|
||||||
|
imode |= Access::READ_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode.contains(AccessMode::WRITE) {
|
||||||
|
imode |= Access::WRITE_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode.contains(AccessMode::EXECUTE) {
|
||||||
|
imode |= Access::EXEC_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
rustix::fs::access(p, imode)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chown(p: &Path, uid: Option<u32>, gid: Option<u32>) -> io::Result<()> {
|
||||||
|
let uid = uid.map(|n| unsafe { rustix::fs::Uid::from_raw(n) });
|
||||||
|
let gid = gid.map(|n| unsafe { rustix::fs::Gid::from_raw(n) });
|
||||||
|
rustix::fs::chown(p, uid, gid)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn copy_metadata(from: &Path, to: &Path) -> io::Result<()> {
|
||||||
|
let from_meta = std::fs::metadata(from)?;
|
||||||
|
let to_meta = std::fs::metadata(to)?;
|
||||||
|
let from_gid = from_meta.gid();
|
||||||
|
let to_gid = to_meta.gid();
|
||||||
|
|
||||||
|
let mut perms = from_meta.permissions();
|
||||||
|
perms.set_mode(perms.mode() & 0o0777);
|
||||||
|
if from_gid != to_gid && chown(to, None, Some(from_gid)).is_err() {
|
||||||
|
let new_perms = (perms.mode() & 0o0707) | ((perms.mode() & 0o07) << 3);
|
||||||
|
perms.set_mode(new_perms);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::fs::set_permissions(to, perms)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Licensed under MIT from faccess except for `chown`, `copy_metadata` and `is_acl_inherited`
|
||||||
|
#[cfg(windows)]
|
||||||
|
mod imp {
|
||||||
|
|
||||||
|
use windows_sys::Win32::Foundation::{CloseHandle, LocalFree, ERROR_SUCCESS, HANDLE, PSID};
|
||||||
|
use windows_sys::Win32::Security::Authorization::{
|
||||||
|
GetNamedSecurityInfoW, SetNamedSecurityInfoW, SE_FILE_OBJECT,
|
||||||
|
};
|
||||||
|
use windows_sys::Win32::Security::{
|
||||||
|
AccessCheck, AclSizeInformation, GetAce, GetAclInformation, GetSidIdentifierAuthority,
|
||||||
|
ImpersonateSelf, IsValidAcl, IsValidSid, MapGenericMask, RevertToSelf,
|
||||||
|
SecurityImpersonation, ACCESS_ALLOWED_CALLBACK_ACE, ACL, ACL_SIZE_INFORMATION,
|
||||||
|
DACL_SECURITY_INFORMATION, GENERIC_MAPPING, GROUP_SECURITY_INFORMATION, INHERITED_ACE,
|
||||||
|
LABEL_SECURITY_INFORMATION, OBJECT_SECURITY_INFORMATION, OWNER_SECURITY_INFORMATION,
|
||||||
|
PRIVILEGE_SET, PROTECTED_DACL_SECURITY_INFORMATION, PSECURITY_DESCRIPTOR,
|
||||||
|
SID_IDENTIFIER_AUTHORITY, TOKEN_DUPLICATE, TOKEN_QUERY,
|
||||||
|
};
|
||||||
|
use windows_sys::Win32::Storage::FileSystem::{
|
||||||
|
FILE_ACCESS_RIGHTS, FILE_ALL_ACCESS, FILE_GENERIC_EXECUTE, FILE_GENERIC_READ,
|
||||||
|
FILE_GENERIC_WRITE,
|
||||||
|
};
|
||||||
|
use windows_sys::Win32::System::Threading::{GetCurrentThread, OpenThreadToken};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
use std::ffi::c_void;
|
||||||
|
|
||||||
|
use std::os::windows::{ffi::OsStrExt, fs::OpenOptionsExt};
|
||||||
|
|
||||||
|
struct SecurityDescriptor {
|
||||||
|
sd: PSECURITY_DESCRIPTOR,
|
||||||
|
owner: PSID,
|
||||||
|
group: PSID,
|
||||||
|
dacl: *mut ACL,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for SecurityDescriptor {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if !self.sd.is_null() {
|
||||||
|
unsafe {
|
||||||
|
LocalFree(self.sd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SecurityDescriptor {
|
||||||
|
fn for_path(p: &Path) -> io::Result<SecurityDescriptor> {
|
||||||
|
let path = std::fs::canonicalize(p)?;
|
||||||
|
let pathos = path.into_os_string();
|
||||||
|
let mut pathw: Vec<u16> = Vec::with_capacity(pathos.len() + 1);
|
||||||
|
pathw.extend(pathos.encode_wide());
|
||||||
|
pathw.push(0);
|
||||||
|
|
||||||
|
let mut sd = std::ptr::null_mut();
|
||||||
|
let mut owner = std::ptr::null_mut();
|
||||||
|
let mut group = std::ptr::null_mut();
|
||||||
|
let mut dacl = std::ptr::null_mut();
|
||||||
|
|
||||||
|
let err = unsafe {
|
||||||
|
GetNamedSecurityInfoW(
|
||||||
|
pathw.as_ptr(),
|
||||||
|
SE_FILE_OBJECT,
|
||||||
|
OWNER_SECURITY_INFORMATION
|
||||||
|
| GROUP_SECURITY_INFORMATION
|
||||||
|
| DACL_SECURITY_INFORMATION
|
||||||
|
| LABEL_SECURITY_INFORMATION,
|
||||||
|
&mut owner,
|
||||||
|
&mut group,
|
||||||
|
&mut dacl,
|
||||||
|
std::ptr::null_mut(),
|
||||||
|
&mut sd,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
if err == ERROR_SUCCESS {
|
||||||
|
Ok(SecurityDescriptor {
|
||||||
|
sd,
|
||||||
|
owner,
|
||||||
|
group,
|
||||||
|
dacl,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(io::Error::last_os_error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_acl_inherited(&self) -> bool {
|
||||||
|
let mut acl_info: ACL_SIZE_INFORMATION = unsafe { ::core::mem::zeroed() };
|
||||||
|
let acl_info_ptr: *mut c_void = &mut acl_info as *mut _ as *mut c_void;
|
||||||
|
let mut ace: ACCESS_ALLOWED_CALLBACK_ACE = unsafe { ::core::mem::zeroed() };
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
GetAclInformation(
|
||||||
|
self.dacl,
|
||||||
|
acl_info_ptr,
|
||||||
|
std::mem::size_of_val(&acl_info) as u32,
|
||||||
|
AclSizeInformation,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
for i in 0..acl_info.AceCount {
|
||||||
|
let mut ptr = &mut ace as *mut _ as *mut c_void;
|
||||||
|
unsafe { GetAce(self.dacl, i, &mut ptr) };
|
||||||
|
if (ace.Header.AceFlags as u32 & INHERITED_ACE) != 0 {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn descriptor(&self) -> &PSECURITY_DESCRIPTOR {
|
||||||
|
&self.sd
|
||||||
|
}
|
||||||
|
|
||||||
|
fn owner(&self) -> &PSID {
|
||||||
|
&self.owner
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ThreadToken(HANDLE);
|
||||||
|
impl Drop for ThreadToken {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe {
|
||||||
|
CloseHandle(self.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ThreadToken {
|
||||||
|
fn new() -> io::Result<Self> {
|
||||||
|
unsafe {
|
||||||
|
if ImpersonateSelf(SecurityImpersonation) == 0 {
|
||||||
|
return Err(io::Error::last_os_error());
|
||||||
|
}
|
||||||
|
|
||||||
|
let token: *mut HANDLE = std::ptr::null_mut();
|
||||||
|
let err =
|
||||||
|
OpenThreadToken(GetCurrentThread(), TOKEN_DUPLICATE | TOKEN_QUERY, 0, token);
|
||||||
|
|
||||||
|
RevertToSelf();
|
||||||
|
|
||||||
|
if err == 0 {
|
||||||
|
return Err(io::Error::last_os_error());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self(*token))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_handle(&self) -> &HANDLE {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Based roughly on Tcl's NativeAccess()
|
||||||
|
// https://github.com/tcltk/tcl/blob/2ee77587e4dc2150deb06b48f69db948b4ab0584/win/tclWinFile.c
|
||||||
|
fn eaccess(p: &Path, mut mode: FILE_ACCESS_RIGHTS) -> io::Result<()> {
|
||||||
|
let md = p.metadata()?;
|
||||||
|
|
||||||
|
if !md.is_dir() {
|
||||||
|
// Read Only is ignored for directories
|
||||||
|
if mode & FILE_GENERIC_WRITE == FILE_GENERIC_WRITE && md.permissions().readonly() {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::PermissionDenied,
|
||||||
|
"File is read only",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// If it doesn't have the correct extension it isn't executable
|
||||||
|
if mode & FILE_GENERIC_EXECUTE == FILE_GENERIC_EXECUTE {
|
||||||
|
if let Some(ext) = p.extension().and_then(|s| s.to_str()) {
|
||||||
|
match ext {
|
||||||
|
"exe" | "com" | "bat" | "cmd" => (),
|
||||||
|
_ => {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::InvalidData,
|
||||||
|
"File not executable",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::fs::OpenOptions::new()
|
||||||
|
.access_mode(mode)
|
||||||
|
.open(p)
|
||||||
|
.map(|_| ());
|
||||||
|
}
|
||||||
|
|
||||||
|
let sd = SecurityDescriptor::for_path(p)?;
|
||||||
|
|
||||||
|
// Unmapped Samba users are assigned a top level authority of 22
|
||||||
|
// ACL tests are likely to be misleading
|
||||||
|
const SAMBA_UNMAPPED: SID_IDENTIFIER_AUTHORITY = SID_IDENTIFIER_AUTHORITY {
|
||||||
|
Value: [0, 0, 0, 0, 0, 22],
|
||||||
|
};
|
||||||
|
unsafe {
|
||||||
|
let owner = sd.owner();
|
||||||
|
if IsValidSid(*owner) != 0
|
||||||
|
&& (*GetSidIdentifierAuthority(*owner)).Value == SAMBA_UNMAPPED.Value
|
||||||
|
{
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let token = ThreadToken::new()?;
|
||||||
|
|
||||||
|
let mut privileges: PRIVILEGE_SET = unsafe { std::mem::zeroed() };
|
||||||
|
let mut granted_access: u32 = 0;
|
||||||
|
let mut privileges_length = std::mem::size_of::<PRIVILEGE_SET>() as u32;
|
||||||
|
let mut result = 0;
|
||||||
|
|
||||||
|
let mut mapping = GENERIC_MAPPING {
|
||||||
|
GenericRead: FILE_GENERIC_READ,
|
||||||
|
GenericWrite: FILE_GENERIC_WRITE,
|
||||||
|
GenericExecute: FILE_GENERIC_EXECUTE,
|
||||||
|
GenericAll: FILE_ALL_ACCESS,
|
||||||
|
};
|
||||||
|
|
||||||
|
unsafe { MapGenericMask(&mut mode, &mut mapping) };
|
||||||
|
|
||||||
|
if unsafe {
|
||||||
|
AccessCheck(
|
||||||
|
*sd.descriptor(),
|
||||||
|
*token.as_handle(),
|
||||||
|
mode,
|
||||||
|
&mut mapping,
|
||||||
|
&mut privileges,
|
||||||
|
&mut privileges_length,
|
||||||
|
&mut granted_access,
|
||||||
|
&mut result,
|
||||||
|
)
|
||||||
|
} != 0
|
||||||
|
{
|
||||||
|
if result == 0 {
|
||||||
|
Err(io::Error::new(
|
||||||
|
io::ErrorKind::PermissionDenied,
|
||||||
|
"Permission Denied",
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(io::Error::last_os_error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn access(p: &Path, mode: AccessMode) -> io::Result<()> {
|
||||||
|
let mut imode = 0;
|
||||||
|
|
||||||
|
if mode.contains(AccessMode::READ) {
|
||||||
|
imode |= FILE_GENERIC_READ;
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode.contains(AccessMode::WRITE) {
|
||||||
|
imode |= FILE_GENERIC_WRITE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode.contains(AccessMode::EXECUTE) {
|
||||||
|
imode |= FILE_GENERIC_EXECUTE;
|
||||||
|
}
|
||||||
|
|
||||||
|
if imode == 0 {
|
||||||
|
if p.exists() {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(io::Error::new(io::ErrorKind::NotFound, "Not Found"))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
eaccess(p, imode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chown(p: &Path, sd: SecurityDescriptor) -> io::Result<()> {
|
||||||
|
let path = std::fs::canonicalize(p)?;
|
||||||
|
let pathos = path.as_os_str();
|
||||||
|
let mut pathw = Vec::with_capacity(pathos.len() + 1);
|
||||||
|
pathw.extend(pathos.encode_wide());
|
||||||
|
pathw.push(0);
|
||||||
|
|
||||||
|
let mut owner = std::ptr::null_mut();
|
||||||
|
let mut group = std::ptr::null_mut();
|
||||||
|
let mut dacl = std::ptr::null();
|
||||||
|
|
||||||
|
let mut si = OBJECT_SECURITY_INFORMATION::default();
|
||||||
|
if unsafe { IsValidSid(sd.owner) } != 0 {
|
||||||
|
si |= OWNER_SECURITY_INFORMATION;
|
||||||
|
owner = sd.owner;
|
||||||
|
}
|
||||||
|
|
||||||
|
if unsafe { IsValidSid(sd.group) } != 0 {
|
||||||
|
si |= GROUP_SECURITY_INFORMATION;
|
||||||
|
group = sd.group;
|
||||||
|
}
|
||||||
|
|
||||||
|
if unsafe { IsValidAcl(sd.dacl) } != 0 {
|
||||||
|
si |= DACL_SECURITY_INFORMATION;
|
||||||
|
if !sd.is_acl_inherited() {
|
||||||
|
si |= PROTECTED_DACL_SECURITY_INFORMATION;
|
||||||
|
}
|
||||||
|
dacl = sd.dacl as *const _;
|
||||||
|
}
|
||||||
|
|
||||||
|
let err = unsafe {
|
||||||
|
SetNamedSecurityInfoW(
|
||||||
|
pathw.as_ptr(),
|
||||||
|
SE_FILE_OBJECT,
|
||||||
|
si,
|
||||||
|
owner,
|
||||||
|
group,
|
||||||
|
dacl,
|
||||||
|
std::ptr::null(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
if err == ERROR_SUCCESS {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(io::Error::last_os_error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn copy_metadata(from: &Path, to: &Path) -> io::Result<()> {
|
||||||
|
let sd = SecurityDescriptor::for_path(from)?;
|
||||||
|
chown(to, sd)?;
|
||||||
|
|
||||||
|
let meta = std::fs::metadata(from)?;
|
||||||
|
let perms = meta.permissions();
|
||||||
|
|
||||||
|
std::fs::set_permissions(to, perms)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Licensed under MIT from faccess except for `copy_metadata`
|
||||||
|
#[cfg(not(any(unix, windows)))]
|
||||||
|
mod imp {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
pub fn access(p: &Path, mode: AccessMode) -> io::Result<()> {
|
||||||
|
if mode.contains(AccessMode::WRITE) {
|
||||||
|
if std::fs::metadata(p)?.permissions().readonly() {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::PermissionDenied,
|
||||||
|
"Path is read only",
|
||||||
|
));
|
||||||
|
} else {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.exists() {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(io::Error::new(io::ErrorKind::NotFound, "Path not found"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn copy_metadata(from: &path, to: &Path) -> io::Result<()> {
|
||||||
|
let meta = std::fs::metadata(from)?;
|
||||||
|
let perms = meta.permissions();
|
||||||
|
std::fs::set_permissions(to, perms)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn readonly(p: &Path) -> bool {
|
||||||
|
match imp::access(p, AccessMode::WRITE) {
|
||||||
|
Ok(_) => false,
|
||||||
|
Err(err) if err.kind() == std::io::ErrorKind::NotFound => false,
|
||||||
|
Err(_) => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn copy_metadata(from: &Path, to: &Path) -> io::Result<()> {
|
||||||
|
imp::copy_metadata(from, to)
|
||||||
|
}
|
@ -0,0 +1,4 @@
|
|||||||
|
pub mod env;
|
||||||
|
pub mod faccess;
|
||||||
|
pub mod path;
|
||||||
|
pub mod rope;
|
@ -0,0 +1,231 @@
|
|||||||
|
pub use etcetera::home_dir;
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
borrow::Cow,
|
||||||
|
ffi::OsString,
|
||||||
|
path::{Component, Path, PathBuf, MAIN_SEPARATOR_STR},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::env::current_working_dir;
|
||||||
|
|
||||||
|
/// Replaces users home directory from `path` with tilde `~` if the directory
|
||||||
|
/// is available, otherwise returns the path unchanged.
|
||||||
|
pub fn fold_home_dir<'a, P>(path: P) -> Cow<'a, Path>
|
||||||
|
where
|
||||||
|
P: Into<Cow<'a, Path>>,
|
||||||
|
{
|
||||||
|
let path = path.into();
|
||||||
|
if let Ok(home) = home_dir() {
|
||||||
|
if let Ok(stripped) = path.strip_prefix(&home) {
|
||||||
|
let mut path = OsString::with_capacity(2 + stripped.as_os_str().len());
|
||||||
|
path.push("~");
|
||||||
|
path.push(MAIN_SEPARATOR_STR);
|
||||||
|
path.push(stripped);
|
||||||
|
return Cow::Owned(PathBuf::from(path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Expands tilde `~` into users home directory if available, otherwise returns the path
|
||||||
|
/// unchanged. The tilde will only be expanded when present as the first component of the path
|
||||||
|
/// and only slash follows it.
|
||||||
|
pub fn expand_tilde<'a, P>(path: P) -> Cow<'a, Path>
|
||||||
|
where
|
||||||
|
P: Into<Cow<'a, Path>>,
|
||||||
|
{
|
||||||
|
let path = path.into();
|
||||||
|
let mut components = path.components();
|
||||||
|
if let Some(Component::Normal(c)) = components.next() {
|
||||||
|
if c == "~" {
|
||||||
|
if let Ok(mut buf) = home_dir() {
|
||||||
|
buf.push(components);
|
||||||
|
return Cow::Owned(buf);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Normalize a path without resolving symlinks.
|
||||||
|
// Strategy: start from the first component and move up. Cannonicalize previous path,
|
||||||
|
// join component, cannonicalize new path, strip prefix and join to the final result.
|
||||||
|
pub fn normalize(path: impl AsRef<Path>) -> PathBuf {
|
||||||
|
let mut components = path.as_ref().components().peekable();
|
||||||
|
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
|
||||||
|
components.next();
|
||||||
|
PathBuf::from(c.as_os_str())
|
||||||
|
} else {
|
||||||
|
PathBuf::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
for component in components {
|
||||||
|
match component {
|
||||||
|
Component::Prefix(..) => unreachable!(),
|
||||||
|
Component::RootDir => {
|
||||||
|
ret.push(component.as_os_str());
|
||||||
|
}
|
||||||
|
Component::CurDir => {}
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
Component::ParentDir => {
|
||||||
|
ret.pop();
|
||||||
|
}
|
||||||
|
#[cfg(windows)]
|
||||||
|
Component::ParentDir => {
|
||||||
|
if let Some(head) = ret.components().next_back() {
|
||||||
|
match head {
|
||||||
|
Component::Prefix(_) | Component::RootDir => {}
|
||||||
|
Component::CurDir => unreachable!(),
|
||||||
|
// If we left previous component as ".." it means we met a symlink before and we can't pop path.
|
||||||
|
Component::ParentDir => {
|
||||||
|
ret.push("..");
|
||||||
|
}
|
||||||
|
Component::Normal(_) => {
|
||||||
|
if ret.is_symlink() {
|
||||||
|
ret.push("..");
|
||||||
|
} else {
|
||||||
|
ret.pop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
Component::Normal(c) => {
|
||||||
|
ret.push(c);
|
||||||
|
}
|
||||||
|
#[cfg(windows)]
|
||||||
|
Component::Normal(c) => 'normal: {
|
||||||
|
use std::fs::canonicalize;
|
||||||
|
|
||||||
|
let new_path = ret.join(c);
|
||||||
|
if new_path.is_symlink() {
|
||||||
|
ret = new_path;
|
||||||
|
break 'normal;
|
||||||
|
}
|
||||||
|
let (can_new, can_old) = (canonicalize(&new_path), canonicalize(&ret));
|
||||||
|
match (can_new, can_old) {
|
||||||
|
(Ok(can_new), Ok(can_old)) => {
|
||||||
|
let striped = can_new.strip_prefix(can_old);
|
||||||
|
ret.push(striped.unwrap_or_else(|_| c.as_ref()));
|
||||||
|
}
|
||||||
|
_ => ret.push(c),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dunce::simplified(&ret).to_path_buf()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the canonical, absolute form of a path with all intermediate components normalized.
|
||||||
|
///
|
||||||
|
/// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify
|
||||||
|
/// here if the path exists, just normalize it's components.
|
||||||
|
pub fn canonicalize(path: impl AsRef<Path>) -> PathBuf {
|
||||||
|
let path = expand_tilde(path.as_ref());
|
||||||
|
let path = if path.is_relative() {
|
||||||
|
Cow::Owned(current_working_dir().join(path))
|
||||||
|
} else {
|
||||||
|
path
|
||||||
|
};
|
||||||
|
|
||||||
|
normalize(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_relative_path<'a, P>(path: P) -> Cow<'a, Path>
|
||||||
|
where
|
||||||
|
P: Into<Cow<'a, Path>>,
|
||||||
|
{
|
||||||
|
let path = path.into();
|
||||||
|
if path.is_absolute() {
|
||||||
|
let cwdir = normalize(current_working_dir());
|
||||||
|
if let Ok(stripped) = normalize(&path).strip_prefix(cwdir) {
|
||||||
|
return Cow::Owned(PathBuf::from(stripped));
|
||||||
|
}
|
||||||
|
|
||||||
|
return fold_home_dir(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a truncated filepath where the basepart of the path is reduced to the first
|
||||||
|
/// char of the folder and the whole filename appended.
|
||||||
|
///
|
||||||
|
/// Also strip the current working directory from the beginning of the path.
|
||||||
|
/// Note that this function does not check if the truncated path is unambiguous.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use helix_stdx::path::get_truncated_path;
|
||||||
|
/// use std::path::Path;
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// get_truncated_path("/home/cnorris/documents/jokes.txt").as_path(),
|
||||||
|
/// Path::new("/h/c/d/jokes.txt")
|
||||||
|
/// );
|
||||||
|
/// assert_eq!(
|
||||||
|
/// get_truncated_path("jokes.txt").as_path(),
|
||||||
|
/// Path::new("jokes.txt")
|
||||||
|
/// );
|
||||||
|
/// assert_eq!(
|
||||||
|
/// get_truncated_path("/jokes.txt").as_path(),
|
||||||
|
/// Path::new("/jokes.txt")
|
||||||
|
/// );
|
||||||
|
/// assert_eq!(
|
||||||
|
/// get_truncated_path("/h/c/d/jokes.txt").as_path(),
|
||||||
|
/// Path::new("/h/c/d/jokes.txt")
|
||||||
|
/// );
|
||||||
|
/// assert_eq!(get_truncated_path("").as_path(), Path::new(""));
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
pub fn get_truncated_path(path: impl AsRef<Path>) -> PathBuf {
|
||||||
|
let cwd = current_working_dir();
|
||||||
|
let path = path.as_ref();
|
||||||
|
let path = path.strip_prefix(cwd).unwrap_or(path);
|
||||||
|
let file = path.file_name().unwrap_or_default();
|
||||||
|
let base = path.parent().unwrap_or_else(|| Path::new(""));
|
||||||
|
let mut ret = PathBuf::with_capacity(file.len());
|
||||||
|
// A char can't be directly pushed to a PathBuf
|
||||||
|
let mut first_char_buffer = String::new();
|
||||||
|
for d in base {
|
||||||
|
let Some(first_char) = d.to_string_lossy().chars().next() else {
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
first_char_buffer.push(first_char);
|
||||||
|
ret.push(&first_char_buffer);
|
||||||
|
first_char_buffer.clear();
|
||||||
|
}
|
||||||
|
ret.push(file);
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::{
|
||||||
|
ffi::OsStr,
|
||||||
|
path::{Component, Path},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::path;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn expand_tilde() {
|
||||||
|
for path in ["~", "~/foo"] {
|
||||||
|
let expanded = path::expand_tilde(Path::new(path));
|
||||||
|
|
||||||
|
let tilde = Component::Normal(OsStr::new("~"));
|
||||||
|
|
||||||
|
let mut component_count = 0;
|
||||||
|
for component in expanded.components() {
|
||||||
|
// No tilde left.
|
||||||
|
assert_ne!(component, tilde);
|
||||||
|
component_count += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The path was at least expanded to something.
|
||||||
|
assert_ne!(component_count, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue