Merge branch 'master' into help-command

pull/997/head
Omnikar 2 years ago
commit 1e136c62d8
No known key found for this signature in database
GPG Key ID: 7DE6694CDA7885ED

@ -1,2 +0,0 @@
[alias]
xtask = "run --package xtask --"

@ -0,0 +1,3 @@
[alias]
xtask = "run --package xtask --"
integration-test = "test --features integration --workspace --test integration"

11
.gitattributes vendored

@ -0,0 +1,11 @@
# Auto detect text files and perform normalization
* text=auto
*.rs text diff=rust
*.toml text diff=toml
*.scm text diff=scheme
*.md text diff=markdown
book/theme/highlight.js linguist-vendored
Cargo.lock text

@ -0,0 +1 @@
open_collective: helix-editor

@ -0,0 +1,13 @@
---
name: Enhancement
about: Suggest an improvement
title: ''
labels: C-enhancement
assignees: ''
---
<!--
Your enhancement may already be reported!
Please search on the issue tracker before creating a new issue.
If this is an idea for a feature, please open an "Idea" Discussion instead.
-->

@ -1,13 +0,0 @@
---
name: Feature request
about: Suggest a new feature or improvement
title: ''
labels: C-enhancement
assignees: ''
---
<!-- Your feature may already be reported!
Please search on the issue tracker before creating one. -->
#### Describe your feature request

@ -11,37 +11,24 @@ jobs:
check: check:
name: Check name: Check
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
rust: [stable, msrv]
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Use MSRV rust toolchain
if: matrix.rust == 'msrv'
run: cp .github/workflows/msrv-rust-toolchain.toml rust-toolchain.toml
- name: Install stable toolchain - name: Install stable toolchain
uses: actions-rs/toolchain@v1 uses: helix-editor/rust-toolchain@v1
with: with:
profile: minimal profile: minimal
toolchain: stable
override: true override: true
- name: Cache cargo registry - uses: Swatinem/rust-cache@v1
uses: actions/cache@v3
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-registry-
- name: Cache cargo index
uses: actions/cache@v3
with:
path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-index-
- name: Cache cargo target dir
uses: actions/cache@v3
with:
path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
- name: Run cargo check - name: Run cargo check
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
@ -51,47 +38,27 @@ jobs:
test: test:
name: Test Suite name: Test Suite
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
env:
RUST_BACKTRACE: 1
HELIX_LOG_LEVEL: info
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Install stable toolchain - name: Install stable toolchain
uses: actions-rs/toolchain@v1 uses: helix-editor/rust-toolchain@v1
with: with:
profile: minimal profile: minimal
toolchain: ${{ matrix.rust }}
override: true override: true
- name: Cache cargo registry - uses: Swatinem/rust-cache@v1
uses: actions/cache@v3
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-registry-
- name: Cache cargo index
uses: actions/cache@v3
with:
path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-index-
- name: Cache cargo target dir
uses: actions/cache@v3
with:
path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
- name: Copy minimal languages config
run: cp .github/workflows/languages.toml ./languages.toml
- name: Cache test tree-sitter grammar - name: Cache test tree-sitter grammar
uses: actions/cache@v3 uses: actions/cache@v3
with: with:
path: runtime/grammars path: runtime/grammars
key: ${{ runner.os }}-v2-tree-sitter-grammars-${{ hashFiles('languages.toml') }} key: ${{ runner.os }}-stable-v${{ env.CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
restore-keys: ${{ runner.os }}-v2-tree-sitter-grammars- restore-keys: ${{ runner.os }}-stable-v${{ env.CACHE_VERSION }}-tree-sitter-grammars-
- name: Run cargo test - name: Run cargo test
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
@ -99,10 +66,14 @@ jobs:
command: test command: test
args: --workspace args: --workspace
- name: Run cargo integration-test
uses: actions-rs/cargo@v1
with:
command: integration-test
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest, macos-latest, windows-latest] os: [ubuntu-latest, macos-latest, windows-latest]
rust: [stable]
lints: lints:
name: Lints name: Lints
@ -112,33 +83,13 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Install stable toolchain - name: Install stable toolchain
uses: actions-rs/toolchain@v1 uses: helix-editor/rust-toolchain@v1
with: with:
profile: minimal profile: minimal
toolchain: stable
override: true override: true
components: rustfmt, clippy components: rustfmt, clippy
- name: Cache cargo registry - uses: Swatinem/rust-cache@v1
uses: actions/cache@v3
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-registry-
- name: Cache cargo index
uses: actions/cache@v3
with:
path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-index-
- name: Cache cargo target dir
uses: actions/cache@v3
with:
path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
- name: Run cargo fmt - name: Run cargo fmt
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
@ -152,6 +103,14 @@ jobs:
command: clippy command: clippy
args: --all-targets -- -D warnings args: --all-targets -- -D warnings
- name: Run cargo doc
uses: actions-rs/cargo@v1
with:
command: doc
args: --no-deps --workspace --document-private-items
env:
RUSTDOCFLAGS: -D warnings
docs: docs:
name: Docs name: Docs
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -160,32 +119,12 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Install stable toolchain - name: Install stable toolchain
uses: actions-rs/toolchain@v1 uses: helix-editor/rust-toolchain@v1
with: with:
profile: minimal profile: minimal
toolchain: stable
override: true override: true
- name: Cache cargo registry - uses: Swatinem/rust-cache@v1
uses: actions/cache@v3
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-registry-
- name: Cache cargo index
uses: actions/cache@v3
with:
path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-index-
- name: Cache cargo target dir
uses: actions/cache@v3
with:
path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
- name: Generate docs - name: Generate docs
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
@ -200,3 +139,23 @@ jobs:
|| (echo "Run 'cargo xtask docgen', commit the changes and push again" \ || (echo "Run 'cargo xtask docgen', commit the changes and push again" \
&& exit 1) && exit 1)
queries:
name: Tree-sitter queries
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install stable toolchain
uses: helix-editor/rust-toolchain@v1
with:
profile: minimal
override: true
- uses: Swatinem/rust-cache@v1
- name: Generate docs
uses: actions-rs/cargo@v1
with:
command: xtask
args: query-check

@ -14,7 +14,7 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Install nix - name: Install nix
uses: cachix/install-nix-action@v16 uses: cachix/install-nix-action@v17
- name: Authenticate with Cachix - name: Authenticate with Cachix
uses: cachix/cachix-action@v10 uses: cachix/cachix-action@v10
@ -23,4 +23,4 @@ jobs:
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
- name: Build nix flake - name: Build nix flake
run: nix build run: nix build -L

@ -0,0 +1,3 @@
[toolchain]
channel = "1.61.0"
components = ["rustfmt", "rust-src"]

@ -4,6 +4,18 @@ on:
tags: tags:
- '[0-9]+.[0-9]+' - '[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+.[0-9]+' - '[0-9]+.[0-9]+.[0-9]+'
branches:
- 'patch/ci-release-*'
pull_request:
paths:
- '.github/workflows/release.yml'
env:
# Preview mode: Publishes the build output as a CI artifact instead of creating
# a release, allowing for manual inspection of the output. This mode is
# activated if the CI run was triggered by events other than pushed tags, or
# if the repository is a fork.
preview: ${{ !startsWith(github.ref, 'refs/tags/') || github.repository != 'helix-editor/helix' }}
jobs: jobs:
fetch-grammars: fetch-grammars:
@ -14,40 +26,18 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Install stable toolchain - name: Install stable toolchain
uses: actions-rs/toolchain@v1 uses: helix-editor/rust-toolchain@v1
with: with:
profile: minimal profile: minimal
toolchain: stable
override: true override: true
- name: Cache cargo registry - uses: Swatinem/rust-cache@v1
uses: actions/cache@v3
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-registry-
- name: Cache cargo index
uses: actions/cache@v3
with:
path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-index-
- name: Cache cargo target dir
uses: actions/cache@v3
with:
path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
- name: Fetch tree-sitter grammars - name: Fetch tree-sitter grammars
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
env:
HELIX_DISABLE_AUTO_GRAMMAR_BUILD: yes
with: with:
command: run command: run
args: -- --grammar fetch args: --package=helix-loader --bin=hx-loader
- name: Bundle grammars - name: Bundle grammars
run: tar cJf grammars.tar.xz -C runtime/grammars/sources . run: tar cJf grammars.tar.xz -C runtime/grammars/sources .
@ -71,11 +61,16 @@ jobs:
rust: stable rust: stable
target: x86_64-unknown-linux-gnu target: x86_64-unknown-linux-gnu
cross: false cross: false
# - build: aarch64-linux - build: aarch64-linux
# os: ubuntu-20.04 os: ubuntu-20.04
# rust: stable rust: stable
# target: aarch64-unknown-linux-gnu target: aarch64-unknown-linux-gnu
# cross: true cross: true
- build: riscv64-linux
os: ubuntu-20.04
rust: stable
target: riscv64gc-unknown-linux-gnu
cross: true
- build: x86_64-macos - build: x86_64-macos
os: macos-latest os: macos-latest
rust: stable rust: stable
@ -86,10 +81,12 @@ jobs:
rust: stable rust: stable
target: x86_64-pc-windows-msvc target: x86_64-pc-windows-msvc
cross: false cross: false
# - build: aarch64-macos - build: aarch64-macos
# os: macos-latest os: macos-latest
# rust: stable rust: stable
# target: aarch64-apple-darwin target: aarch64-apple-darwin
cross: false
skip_tests: true # x86_64 host can't run aarch64 code
# - build: x86_64-win-gnu # - build: x86_64-win-gnu
# os: windows-2019 # os: windows-2019
# rust: stable-x86_64-gnu # rust: stable-x86_64-gnu
@ -104,7 +101,7 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Download grammars - name: Download grammars
uses: actions/download-artifact@v2 uses: actions/download-artifact@v3
- name: Move grammars under runtime - name: Move grammars under runtime
if: "!startsWith(matrix.os, 'windows')" if: "!startsWith(matrix.os, 'windows')"
@ -120,13 +117,30 @@ jobs:
target: ${{ matrix.target }} target: ${{ matrix.target }}
override: true override: true
# Install a pre-release version of Cross
# TODO: We need to pre-install Cross because we need cross-rs/cross#591 to
# get a newer C++ compiler toolchain. Remove this step when Cross
# 0.3.0, which includes cross-rs/cross#591, is released.
- name: Install Cross
if: "matrix.cross"
run: cargo install cross --git https://github.com/cross-rs/cross.git --rev 47df5c76e7cba682823a0b6aa6d95c17b31ba63a
- name: Run cargo test - name: Run cargo test
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
if: "!matrix.skip_tests"
with: with:
use-cross: ${{ matrix.cross }} use-cross: ${{ matrix.cross }}
command: test command: test
args: --release --locked --target ${{ matrix.target }} --workspace args: --release --locked --target ${{ matrix.target }} --workspace
- name: Set profile.release.strip = true
shell: bash
run: |
cat >> .cargo/config.toml <<EOF
[profile.release]
strip = true
EOF
- name: Build release binary - name: Build release binary
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
@ -134,29 +148,62 @@ jobs:
command: build command: build
args: --release --locked --target ${{ matrix.target }} args: --release --locked --target ${{ matrix.target }}
- name: Strip release binary (linux and macos) - name: Build AppImage
if: matrix.build == 'x86_64-linux' || matrix.build == 'x86_64-macos' shell: bash
run: strip "target/${{ matrix.target }}/release/hx" if: matrix.build == 'aarch64-linux' || matrix.build == 'x86_64-linux'
- name: Strip release binary (arm)
if: matrix.build == 'aarch64-linux'
run: | run: |
docker run --rm -v \ mkdir dist
"$PWD/target:/target:Z" \
rustembedded/cross:${{ matrix.target }} \ name=dev
aarch64-linux-gnu-strip \ if [[ $GITHUB_REF == refs/tags/* ]]; then
/target/${{ matrix.target }}/release/hx name=${GITHUB_REF:10}
fi
build="${{ matrix.build }}"
export VERSION="$name"
export ARCH=${build%-linux}
export APP=helix
export OUTPUT="helix-$VERSION-$ARCH.AppImage"
export UPDATE_INFORMATION="gh-releases-zsync|$GITHUB_REPOSITORY_OWNER|helix|latest|$APP-*-$ARCH.AppImage.zsync"
mkdir -p "$APP.AppDir"/usr/{bin,lib/helix}
cp "target/${{ matrix.target }}/release/hx" "$APP.AppDir/usr/bin/hx"
rm -rf runtime/grammars/sources
cp -r runtime "$APP.AppDir/usr/lib/helix/runtime"
cat << 'EOF' > "$APP.AppDir/AppRun"
#!/bin/sh
APPDIR="$(dirname "$(readlink -f "${0}")")"
HELIX_RUNTIME="$APPDIR/usr/lib/helix/runtime" exec "$APPDIR/usr/bin/hx" "$@"
EOF
chmod 755 "$APP.AppDir/AppRun"
curl -Lo linuxdeploy-x86_64.AppImage \
https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage
chmod +x linuxdeploy-x86_64.AppImage
./linuxdeploy-x86_64.AppImage \
--appdir "$APP.AppDir" -d contrib/Helix.desktop \
-i contrib/helix.png --output appimage
mv "$APP-$VERSION-$ARCH.AppImage" \
"$APP-$VERSION-$ARCH.AppImage.zsync" dist
- name: Build archive - name: Build archive
shell: bash shell: bash
run: | run: |
mkdir dist mkdir -p dist
if [ "${{ matrix.os }}" = "windows-2019" ]; then if [ "${{ matrix.os }}" = "windows-2019" ]; then
cp "target/${{ matrix.target }}/release/hx.exe" "dist/" cp "target/${{ matrix.target }}/release/hx.exe" "dist/"
else else
cp "target/${{ matrix.target }}/release/hx" "dist/" cp "target/${{ matrix.target }}/release/hx" "dist/"
fi fi
if [ -d runtime/grammars/sources ]; then
rm -rf runtime/grammars/sources rm -rf runtime/grammars/sources
fi
cp -r runtime dist cp -r runtime dist
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v3
@ -172,7 +219,7 @@ jobs:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v3 uses: actions/checkout@v3
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v3
- name: Calculate tag name - name: Calculate tag name
run: | run: |
@ -206,10 +253,16 @@ jobs:
pkgname=helix-$TAG-$platform pkgname=helix-$TAG-$platform
mkdir $pkgname mkdir $pkgname
cp $source/LICENSE $source/README.md $pkgname cp $source/LICENSE $source/README.md $pkgname
mkdir $pkgname/contrib
cp -r $source/contrib/completion $pkgname/contrib
mv bins-$platform/runtime $pkgname/ mv bins-$platform/runtime $pkgname/
mv bins-$platform/hx$exe $pkgname mv bins-$platform/hx$exe $pkgname
chmod +x $pkgname/hx$exe chmod +x $pkgname/hx$exe
if [[ "$platform" = "aarch64-linux" || "$platform" = "x86_64-linux" ]]; then
mv bins-$platform/helix-*.AppImage* dist/
fi
if [ "$exe" = "" ]; then if [ "$exe" = "" ]; then
tar cJf dist/$pkgname.tar.xz $pkgname tar cJf dist/$pkgname.tar.xz $pkgname
else else
@ -222,9 +275,17 @@ jobs:
- name: Upload binaries to release - name: Upload binaries to release
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
if: env.preview == 'false'
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: dist/* file: dist/*
file_glob: true file_glob: true
tag: ${{ steps.tagname.outputs.val }} tag: ${{ steps.tagname.outputs.val }}
overwrite: true overwrite: true
- name: Upload binaries as artifact
uses: actions/upload-artifact@v3
if: env.preview == 'true'
with:
name: release
path: dist/*

1
.gitignore vendored

@ -1,6 +1,5 @@
target target
.direnv .direnv
helix-term/rustfmt.toml helix-term/rustfmt.toml
helix-syntax/languages/
result result
runtime/grammars runtime/grammars

@ -0,0 +1,5 @@
# Things that we don't want ripgrep to search that we do want in git
# https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md#automatic-filtering
# Minified JS vendored from mdbook
book/theme/highlight.js

@ -1,3 +1,457 @@
# 22.08.1 (2022-09-01)
This is a patch release that fixes a panic caused by closing splits or buffers. ([#3633](https://github.com/helix-editor/helix/pull/3633))
# 22.08 (2022-08-31)
A big _thank you_ to our contributors! This release had 87 contributors.
As usual, the following is a summary of each of the changes since the last release.
For the full log, check out the [git log](https://github.com/helix-editor/helix/compare/22.05..22.08).
Breaking changes:
- Special keymap names for `+`, `;` and `%` have been replaced with those literal characters ([#2677](https://github.com/helix-editor/helix/pull/2677), [#3556](https://github.com/helix-editor/helix/pull/3556))
- `A-Left` and `A-Right` have become `C-Left` and `C-Right` for word-wise motion ([#2500](https://github.com/helix-editor/helix/pull/2500))
- The `catppuccin` theme's name has been corrected from `catpuccin` ([#2713](https://github.com/helix-editor/helix/pull/2713))
- `catppuccin` has been replaced by its variants, `catppuccin_frappe`, `catppuccin_latte`, `catppuccin_macchiato`, `catppuccin_mocha` ([#3281](https://github.com/helix-editor/helix/pull/3281))
- `C-n` and `C-p` have been removed from the default insert mode keymap ([#3340](https://github.com/helix-editor/helix/pull/3340))
- The `extend_line` command has been replaced with `extend_line_below` and a new `extend_line` command now exists ([#3046](https://github.com/helix-editor/helix/pull/3046))
Features:
- Add an integration testing harness ([#2359](https://github.com/helix-editor/helix/pull/2359))
- Indent guides ([#1796](https://github.com/helix-editor/helix/pull/1796), [906259c](https://github.com/helix-editor/helix/commit/906259c))
- Cursorline ([#2170](https://github.com/helix-editor/helix/pull/2170), [fde9e03](https://github.com/helix-editor/helix/commit/fde9e03))
- Select all instances of the symbol under the cursor (`<space>h`) ([#2738](https://github.com/helix-editor/helix/pull/2738))
- A picker for document and workspace LSP diagnostics (`<space>g`/`<space>G`) ([#2013](https://github.com/helix-editor/helix/pull/2013), [#2984](https://github.com/helix-editor/helix/pull/2984))
- Allow styling the mode indicator per-mode ([#2676](https://github.com/helix-editor/helix/pull/2676))
- Live preview for the theme picker ([#1798](https://github.com/helix-editor/helix/pull/1798))
- Configurable statusline ([#2434](https://github.com/helix-editor/helix/pull/2434))
- LSP SignatureHelp ([#1755](https://github.com/helix-editor/helix/pull/1755), [a8b123f](https://github.com/helix-editor/helix/commit/a8b123f))
- A picker for the jumplist ([#3033](https://github.com/helix-editor/helix/pull/3033))
- Configurable external formatter binaries ([#2942](https://github.com/helix-editor/helix/pull/2942))
- Bracketed paste support ([#3233](https://github.com/helix-editor/helix/pull/3233), [12ddd03](https://github.com/helix-editor/helix/commit/12ddd03))
Commands:
- `:insert-output` and `:append-output` which insert/append output from a shell command ([#2589](https://github.com/helix-editor/helix/pull/2589))
- The `t` textobject (`]t`/`[t`/`mit`/`mat`) for navigating tests ([#2807](https://github.com/helix-editor/helix/pull/2807))
- `C-Backspace` and `C-Delete` for word-wise deletion in prompts and pickers ([#2500](https://github.com/helix-editor/helix/pull/2500))
- `A-Delete` for forward word-wise deletion in insert mode ([#2500](https://github.com/helix-editor/helix/pull/2500))
- `C-t` for toggling the preview pane in pickers ([#3021](https://github.com/helix-editor/helix/pull/3021))
- `extend_line` now extends in the direction of the cursor ([#3046](https://github.com/helix-editor/helix/pull/3046))
Usability improvements and fixes:
- Fix tree-sitter parser builds on illumos ([#2602](https://github.com/helix-editor/helix/pull/2602))
- Remove empty stratch buffer from jumplists when removing ([5ed6223](https://github.com/helix-editor/helix/commit/5ed6223))
- Fix panic on undo after `shell_append_output` ([#2625](https://github.com/helix-editor/helix/pull/2625))
- Sort LSP edits by start range ([3d91c99](https://github.com/helix-editor/helix/commit/3d91c99))
- Be more defensive about LSP URI conversions ([6de6a3e](https://github.com/helix-editor/helix/commit/6de6a3e), [378f438](https://github.com/helix-editor/helix/commit/378f438))
- Ignore SendErrors when grammar builds fail ([#2641](https://github.com/helix-editor/helix/pull/2641))
- Append `set_line_ending` to document history ([#2649](https://github.com/helix-editor/helix/pull/2649))
- Use last prompt entry when empty ([b14c258](https://github.com/helix-editor/helix/commit/b14c258), [#2870](https://github.com/helix-editor/helix/pull/2870))
- Do not add extra line breaks in markdown lists ([#2689](https://github.com/helix-editor/helix/pull/2689))
- Disable dialyzer by default for ElixirLS ([#2710](https://github.com/helix-editor/helix/pull/2710))
- Refactor textobject node capture ([#2741](https://github.com/helix-editor/helix/pull/2741))
- Prevent re-selecting the same range with `expand_selection` ([#2760](https://github.com/helix-editor/helix/pull/2760))
- Introduce `keyword.storage` highlight scope ([#2731](https://github.com/helix-editor/helix/pull/2731))
- Handle symlinks more consistently ([#2718](https://github.com/helix-editor/helix/pull/2718))
- Improve markdown list rendering ([#2687](https://github.com/helix-editor/helix/pull/2687))
- Update auto-pairs and idle-timout settings when the config is reloaded ([#2736](https://github.com/helix-editor/helix/pull/2736))
- Fix panic on closing last buffer ([#2658](https://github.com/helix-editor/helix/pull/2658))
- Prevent modifying jumplist until jumping to a reference ([#2670](https://github.com/helix-editor/helix/pull/2670))
- Ensure `:quit` and `:quit!` take no arguments ([#2654](https://github.com/helix-editor/helix/pull/2654))
- Fix crash due to cycles when replaying macros ([#2647](https://github.com/helix-editor/helix/pull/2647))
- Pass LSP FormattingOptions ([#2635](https://github.com/helix-editor/helix/pull/2635))
- Prevent showing colors when the health-check is piped ([#2836](https://github.com/helix-editor/helix/pull/2836))
- Use character indexing for mouse selection ([#2839](https://github.com/helix-editor/helix/pull/2839))
- Display the highest severity diagnostic for a line in the gutter ([#2835](https://github.com/helix-editor/helix/pull/2835))
- Default the ruler color to red background ([#2669](https://github.com/helix-editor/helix/pull/2669))
- Make `move_vertically` aware of tabs and wide characters ([#2620](https://github.com/helix-editor/helix/pull/2620))
- Enable shellwords for Windows ([#2767](https://github.com/helix-editor/helix/pull/2767))
- Add history suggestions to global search ([#2717](https://github.com/helix-editor/helix/pull/2717))
- Fix the scrollbar's length proportional to total menu items ([#2860](https://github.com/helix-editor/helix/pull/2860))
- Reset terminal modifiers for diagnostic text ([#2861](https://github.com/helix-editor/helix/pull/2861), [#2900](https://github.com/helix-editor/helix/pull/2900))
- Redetect indents and line-endings after a Language Server replaces the document ([#2778](https://github.com/helix-editor/helix/pull/2778))
- Check selection's visible width when copying on mouse click ([#2711](https://github.com/helix-editor/helix/pull/2711))
- Fix edge-case in tree-sitter `expand_selection` command ([#2877](https://github.com/helix-editor/helix/pull/2877))
- Add a single-width left margin for the completion popup ([#2728](https://github.com/helix-editor/helix/pull/2728))
- Right-align the scrollbar in the completion popup ([#2754](https://github.com/helix-editor/helix/pull/2754))
- Fix recursive macro crash and empty macro lockout ([#2902](https://github.com/helix-editor/helix/pull/2902))
- Fix backwards character deletion on other whitespaces ([#2855](https://github.com/helix-editor/helix/pull/2855))
- Add search and space/backspace bindings to view modes ([#2803](https://github.com/helix-editor/helix/pull/2803))
- Add `--vsplit` and `--hsplit` CLI arguments for opening in splits ([#2773](https://github.com/helix-editor/helix/pull/2773), [#3073](https://github.com/helix-editor/helix/pull/3073))
- Sort themes, languages and files inputs by score and name ([#2675](https://github.com/helix-editor/helix/pull/2675))
- Highlight entire rows in ([#2939](https://github.com/helix-editor/helix/pull/2939))
- Fix backwards selection duplication widening bug ([#2945](https://github.com/helix-editor/helix/pull/2945), [#3024](https://github.com/helix-editor/helix/pull/3024))
- Skip serializing Option type DAP fields ([44f5963](https://github.com/helix-editor/helix/commit/44f5963))
- Fix required `cwd` field in DAP `RunTerminalArguments` type ([85411be](https://github.com/helix-editor/helix/commit/85411be), [#3240](https://github.com/helix-editor/helix/pull/3240))
- Add LSP `workspace/applyEdit` to client capabilities ([#3012](https://github.com/helix-editor/helix/pull/3012))
- Respect count for repeating motion ([#3057](https://github.com/helix-editor/helix/pull/3057))
- Respect count for selecting next/previous match ([#3056](https://github.com/helix-editor/helix/pull/3056))
- Respect count for tree-sitter motions ([#3058](https://github.com/helix-editor/helix/pull/3058))
- Make gutters padding optional ([#2996](https://github.com/helix-editor/helix/pull/2996))
- Support pre-filling prompts ([#2459](https://github.com/helix-editor/helix/pull/2459), [#3259](https://github.com/helix-editor/helix/pull/3259))
- Add statusline element to display file line-endings ([#3113](https://github.com/helix-editor/helix/pull/3113))
- Keep jump and file history when using `:split` ([#3031](https://github.com/helix-editor/helix/pull/3031), [#3160](https://github.com/helix-editor/helix/pull/3160))
- Make tree-sitter query `; inherits <language>` feature imperative ([#2470](https://github.com/helix-editor/helix/pull/2470))
- Indent with tabs by default ([#3095](https://github.com/helix-editor/helix/pull/3095))
- Fix non-msvc grammar compilation on Windows ([#3190](https://github.com/helix-editor/helix/pull/3190))
- Add spacer element to the statusline ([#3165](https://github.com/helix-editor/helix/pull/3165), [255c173](https://github.com/helix-editor/helix/commit/255c173))
- Make gutters padding automatic ([#3163](https://github.com/helix-editor/helix/pull/3163))
- Add `code` for LSP `Diagnostic` type ([#3096](https://github.com/helix-editor/helix/pull/3096))
- Add position percentage to the statusline ([#3168](https://github.com/helix-editor/helix/pull/3168))
- Add a configurable and themable statusline separator string ([#3175](https://github.com/helix-editor/helix/pull/3175))
- Use OR of all selections when `search_selection` acts on multiple selections ([#3138](https://github.com/helix-editor/helix/pull/3138))
- Add clipboard information to logs and the healthcheck ([#3271](https://github.com/helix-editor/helix/pull/3271))
- Fix align selection behavior on tabs ([#3276](https://github.com/helix-editor/helix/pull/3276))
- Fix terminal cursor shape reset ([#3289](https://github.com/helix-editor/helix/pull/3289))
- Add an `injection.include-unnamed-children` predicate to injections queries ([#3129](https://github.com/helix-editor/helix/pull/3129))
- Add a `-c`/`--config` CLI flag for specifying config file location ([#2666](https://github.com/helix-editor/helix/pull/2666))
- Detect indent-style in `:set-language` command ([#3330](https://github.com/helix-editor/helix/pull/3330))
- Fix non-deterministic highlighting ([#3275](https://github.com/helix-editor/helix/pull/3275))
- Avoid setting the stdin handle when not necessary ([#3248](https://github.com/helix-editor/helix/pull/3248), [#3379](https://github.com/helix-editor/helix/pull/3379))
- Fix indent guide styling ([#3324](https://github.com/helix-editor/helix/pull/3324))
- Fix tab highlight when tab is partially visible ([#3313](https://github.com/helix-editor/helix/pull/3313))
- Add completion for nested settings ([#3183](https://github.com/helix-editor/helix/pull/3183))
- Advertise WorkspaceSymbolClientCapabilities LSP client capability ([#3361](https://github.com/helix-editor/helix/pull/3361))
- Remove duplicate entries from the theme picker ([#3439](https://github.com/helix-editor/helix/pull/3439))
- Shorted output for grammar fetching and building ([#3396](https://github.com/helix-editor/helix/pull/3396))
- Add a `tabpad` option for visible tab padding whitespace characters ([#3458](https://github.com/helix-editor/helix/pull/3458))
- Make DAP external terminal provider configurable ([cb7615e](https://github.com/helix-editor/helix/commit/cb7615e))
- Use health checkmark character with shorter width ([#3505](https://github.com/helix-editor/helix/pull/3505))
- Reset document mode to normal on view focus loss ([e4c9d40](https://github.com/helix-editor/helix/commit/e4c9d40))
- Render indented code-blocks in markdown ([#3503](https://github.com/helix-editor/helix/pull/3503))
- Add WezTerm to DAP terminal provider defaults ([#3588](https://github.com/helix-editor/helix/pull/3588))
- Derive `Document` language name from `languages.toml` `name` key ([#3338](https://github.com/helix-editor/helix/pull/3338))
- Fix process spawning error handling ([#3349](https://github.com/helix-editor/helix/pull/3349))
- Don't resolve links for `:o` completion ([8a4fbf6](https://github.com/helix-editor/helix/commit/8a4fbf6))
- Recalculate completion after pasting into prompt ([e77b7d1](https://github.com/helix-editor/helix/commit/e77b7d1))
- Fix extra selections with regex anchors ([#3598](https://github.com/helix-editor/helix/pull/3598))
- Move mode transition logic to `handle_keymap_event` ([#2634](https://github.com/helix-editor/helix/pull/2634))
- Add documents to view history when using the jumplist ([#3593](https://github.com/helix-editor/helix/pull/3593))
- Prevent panic when loading tree-sitter queries ([fa1dc7e](https://github.com/helix-editor/helix/commit/fa1dc7e))
- Discard LSP publishDiagnostic when LS is not initialized ([#3403](https://github.com/helix-editor/helix/pull/3403))
- Refactor tree-sitter textobject motions as repeatable motions ([#3264](https://github.com/helix-editor/helix/pull/3264))
- Avoid command execution hooks on closed docs ([#3613](https://github.com/helix-editor/helix/pull/3613))
- Share `restore_term` code between panic and normal exits ([#2612](https://github.com/helix-editor/helix/pull/2612))
- Show clipboard info in `--health` output ([#2947](https://github.com/helix-editor/helix/pull/2947))
- Recalculate completion when going through prompt history ([#3193](https://github.com/helix-editor/helix/pull/3193))
Themes:
- Update `tokyonight` and `tokyonight_storm` themes ([#2606](https://github.com/helix-editor/helix/pull/2606))
- Update `solarized_light` themes ([#2626](https://github.com/helix-editor/helix/pull/2626))
- Fix `catpuccin` `ui.popup` theme ([#2644](https://github.com/helix-editor/helix/pull/2644))
- Update selection style of `night_owl` ([#2668](https://github.com/helix-editor/helix/pull/2668))
- Fix spelling of `catppuccin` theme ([#2713](https://github.com/helix-editor/helix/pull/2713))
- Update `base16_default`'s `ui.menu` ([#2794](https://github.com/helix-editor/helix/pull/2794))
- Add `noctis_bordo` ([#2830](https://github.com/helix-editor/helix/pull/2830))
- Add `acme` ([#2876](https://github.com/helix-editor/helix/pull/2876))
- Add `meliora` ([#2884](https://github.com/helix-editor/helix/pull/2884), [#2890](https://github.com/helix-editor/helix/pull/2890))
- Add cursorline scopes to various themes ([33d287a](https://github.com/helix-editor/helix/commit/33d287a), [#2892](https://github.com/helix-editor/helix/pull/2892), [#2915](https://github.com/helix-editor/helix/pull/2915), [#2916](https://github.com/helix-editor/helix/pull/2916), [#2918](https://github.com/helix-editor/helix/pull/2918), [#2927](https://github.com/helix-editor/helix/pull/2927), [#2925](https://github.com/helix-editor/helix/pull/2925), [#2938](https://github.com/helix-editor/helix/pull/2938), [#2962](https://github.com/helix-editor/helix/pull/2962), [#3054](https://github.com/helix-editor/helix/pull/3054))
- Add mode colors to various themes ([#2926](https://github.com/helix-editor/helix/pull/2926), [#2933](https://github.com/helix-editor/helix/pull/2933), [#2929](https://github.com/helix-editor/helix/pull/2929), [#3098](https://github.com/helix-editor/helix/pull/3098), [#3104](https://github.com/helix-editor/helix/pull/3104), [#3128](https://github.com/helix-editor/helix/pull/3128), [#3135](https://github.com/helix-editor/helix/pull/3135), [#3200](https://github.com/helix-editor/helix/pull/3200))
- Add `nord_light` ([#2908](https://github.com/helix-editor/helix/pull/2908))
- Update `night_owl` ([#2929](https://github.com/helix-editor/helix/pull/2929))
- Update `autumn` ([2e70985](https://github.com/helix-editor/helix/commit/2e70985), [936ed3a](https://github.com/helix-editor/helix/commit/936ed3a))
- Update `one_dark` ([#3011](https://github.com/helix-editor/helix/pull/3011))
- Add `noctis` ([#3043](https://github.com/helix-editor/helix/pull/3043), [#3128](https://github.com/helix-editor/helix/pull/3128))
- Update `boo_berry` ([#3191](https://github.com/helix-editor/helix/pull/3191))
- Update `monokai` ([#3131](https://github.com/helix-editor/helix/pull/3131))
- Add `ayu_dark`, `ayu_light`, `ayu_mirage` ([#3184](https://github.com/helix-editor/helix/pull/3184))
- Update `onelight` ([#3226](https://github.com/helix-editor/helix/pull/3226))
- Add `base16_transparent` ([#3216](https://github.com/helix-editor/helix/pull/3216), [b565fff](https://github.com/helix-editor/helix/commit/b565fff))
- Add `flatwhite` ([#3236](https://github.com/helix-editor/helix/pull/3236))
- Update `dark_plus` ([#3302](https://github.com/helix-editor/helix/pull/3302))
- Add `doom_acario_dark` ([#3308](https://github.com/helix-editor/helix/pull/3308), [#3539](https://github.com/helix-editor/helix/pull/3539))
- Add `rose_pine_moon` ([#3229](https://github.com/helix-editor/helix/pull/3229))
- Update `spacebones_light` ([#3342](https://github.com/helix-editor/helix/pull/3342))
- Fix typos in themes ([8deaebd](https://github.com/helix-editor/helix/commit/8deaebd), [#3412](https://github.com/helix-editor/helix/pull/3412))
- Add `emacs` ([#3410](https://github.com/helix-editor/helix/pull/3410))
- Add `papercolor-light` ([#3426](https://github.com/helix-editor/helix/pull/3426), [#3470](https://github.com/helix-editor/helix/pull/3470), [#3585](https://github.com/helix-editor/helix/pull/3585))
- Add `penumbra+` ([#3398](https://github.com/helix-editor/helix/pull/3398))
- Add `fleetish` ([#3591](https://github.com/helix-editor/helix/pull/3591), [#3607](https://github.com/helix-editor/helix/pull/3607))
- Add `sonokai` ([#3595](https://github.com/helix-editor/helix/pull/3595))
- Update all themes for theme lints ([#3587](https://github.com/helix-editor/helix/pull/3587))
LSP:
- V ([#2526](https://github.com/helix-editor/helix/pull/2526))
- Prisma ([#2703](https://github.com/helix-editor/helix/pull/2703))
- Clojure ([#2780](https://github.com/helix-editor/helix/pull/2780))
- WGSL ([#2872](https://github.com/helix-editor/helix/pull/2872))
- Elvish ([#2948](https://github.com/helix-editor/helix/pull/2948))
- Idris ([#2971](https://github.com/helix-editor/helix/pull/2971))
- Fortran ([#3025](https://github.com/helix-editor/helix/pull/3025))
- Gleam ([#3139](https://github.com/helix-editor/helix/pull/3139))
- Odin ([#3214](https://github.com/helix-editor/helix/pull/3214))
New languages:
- V ([#2526](https://github.com/helix-editor/helix/pull/2526))
- EDoc ([#2640](https://github.com/helix-editor/helix/pull/2640))
- JSDoc ([#2650](https://github.com/helix-editor/helix/pull/2650))
- OpenSCAD ([#2680](https://github.com/helix-editor/helix/pull/2680))
- Prisma ([#2703](https://github.com/helix-editor/helix/pull/2703))
- Clojure ([#2780](https://github.com/helix-editor/helix/pull/2780))
- Starlark ([#2903](https://github.com/helix-editor/helix/pull/2903))
- Elvish ([#2948](https://github.com/helix-editor/helix/pull/2948))
- Fortran ([#3025](https://github.com/helix-editor/helix/pull/3025))
- Ungrammar ([#3048](https://github.com/helix-editor/helix/pull/3048))
- SCSS ([#3074](https://github.com/helix-editor/helix/pull/3074))
- Go Template ([#3091](https://github.com/helix-editor/helix/pull/3091))
- Graphviz dot ([#3241](https://github.com/helix-editor/helix/pull/3241))
- Cue ([#3262](https://github.com/helix-editor/helix/pull/3262))
- Slint ([#3355](https://github.com/helix-editor/helix/pull/3355))
- Beancount ([#3297](https://github.com/helix-editor/helix/pull/3297))
- Taskwarrior ([#3468](https://github.com/helix-editor/helix/pull/3468))
- xit ([#3521](https://github.com/helix-editor/helix/pull/3521))
- ESDL ([#3526](https://github.com/helix-editor/helix/pull/3526))
- Awk ([#3528](https://github.com/helix-editor/helix/pull/3528), [#3535](https://github.com/helix-editor/helix/pull/3535))
- Pascal ([#3542](https://github.com/helix-editor/helix/pull/3542))
Updated languages and queries:
- Nix ([#2472](https://github.com/helix-editor/helix/pull/2472))
- Elixir ([#2619](https://github.com/helix-editor/helix/pull/2619))
- CPON ([#2643](https://github.com/helix-editor/helix/pull/2643))
- Textobjects queries for Erlang, Elixir, Gleam ([#2661](https://github.com/helix-editor/helix/pull/2661))
- Capture rust closures as function textobjects ([4a27e2d](https://github.com/helix-editor/helix/commit/4a27e2d))
- Heex ([#2800](https://github.com/helix-editor/helix/pull/2800), [#3170](https://github.com/helix-editor/helix/pull/3170))
- Add `<<=` operator highlighting for Rust ([#2805](https://github.com/helix-editor/helix/pull/2805))
- Fix comment injection in JavaScript/TypeScript ([#2763](https://github.com/helix-editor/helix/pull/2763))
- Nickel ([#2859](https://github.com/helix-editor/helix/pull/2859))
- Add `Rakefile` and `Gemfile` to Ruby file-types ([#2875](https://github.com/helix-editor/helix/pull/2875))
- Erlang ([#2910](https://github.com/helix-editor/helix/pull/2910), [ac669ad](https://github.com/helix-editor/helix/commit/ac669ad))
- Markdown ([#2910](https://github.com/helix-editor/helix/pull/2910), [#3108](https://github.com/helix-editor/helix/pull/3108), [#3400](https://github.com/helix-editor/helix/pull/3400))
- Bash ([#2910](https://github.com/helix-editor/helix/pull/2910))
- Rust ([#2910](https://github.com/helix-editor/helix/pull/2910), [#3397](https://github.com/helix-editor/helix/pull/3397))
- Edoc ([#2910](https://github.com/helix-editor/helix/pull/2910))
- HTML ([#2910](https://github.com/helix-editor/helix/pull/2910))
- Make ([#2910](https://github.com/helix-editor/helix/pull/2910))
- TSQ ([#2910](https://github.com/helix-editor/helix/pull/2910), [#2960](https://github.com/helix-editor/helix/pull/2960))
- git-commit ([#2910](https://github.com/helix-editor/helix/pull/2910))
- Use default fallback for Python indents ([9ae70cc](https://github.com/helix-editor/helix/commit/9ae70cc))
- Add Haskell LSP roots ([#2954](https://github.com/helix-editor/helix/pull/2954))
- Ledger ([#2936](https://github.com/helix-editor/helix/pull/2936), [#2988](https://github.com/helix-editor/helix/pull/2988))
- Nickel ([#2987](https://github.com/helix-editor/helix/pull/2987))
- JavaScript/TypeScript ([#2961](https://github.com/helix-editor/helix/pull/2961), [#3219](https://github.com/helix-editor/helix/pull/3219), [#3213](https://github.com/helix-editor/helix/pull/3213), [#3280](https://github.com/helix-editor/helix/pull/3280), [#3301](https://github.com/helix-editor/helix/pull/3301))
- GLSL ([#3051](https://github.com/helix-editor/helix/pull/3051))
- Fix locals tracking in Rust ([#3027](https://github.com/helix-editor/helix/pull/3027), [#3212](https://github.com/helix-editor/helix/pull/3212), [#3345](https://github.com/helix-editor/helix/pull/3345))
- Verilog ([#3158](https://github.com/helix-editor/helix/pull/3158))
- Ruby ([#3173](https://github.com/helix-editor/helix/pull/3173), [#3527](https://github.com/helix-editor/helix/pull/3527))
- Svelte ([#3147](https://github.com/helix-editor/helix/pull/3147))
- Add Elixir and HEEx comment textobjects ([#3179](https://github.com/helix-editor/helix/pull/3179))
- Python ([#3103](https://github.com/helix-editor/helix/pull/3103), [#3201](https://github.com/helix-editor/helix/pull/3201), [#3284](https://github.com/helix-editor/helix/pull/3284))
- PHP ([#3317](https://github.com/helix-editor/helix/pull/3317))
- Latex ([#3370](https://github.com/helix-editor/helix/pull/3370))
- Clojure ([#3387](https://github.com/helix-editor/helix/pull/3387))
- Swift ([#3461](https://github.com/helix-editor/helix/pull/3461))
- C# ([#3480](https://github.com/helix-editor/helix/pull/3480), [#3494](https://github.com/helix-editor/helix/pull/3494))
- Org ([#3489](https://github.com/helix-editor/helix/pull/3489))
- Elm ([#3497](https://github.com/helix-editor/helix/pull/3497))
- Dart ([#3419](https://github.com/helix-editor/helix/pull/3419))
- Julia ([#3507](https://github.com/helix-editor/helix/pull/3507))
- Fix Rust textobjects ([#3590](https://github.com/helix-editor/helix/pull/3590))
- C ([00d88e5](https://github.com/helix-editor/helix/commit/00d88e5))
- Update Rust ([0ef0ef9](https://github.com/helix-editor/helix/commit/0ef0ef9))
Packaging:
- Add `rust-analyzer` to Nix flake devShell ([#2739](https://github.com/helix-editor/helix/pull/2739))
- Add cachix information to the Nix flake ([#2999](https://github.com/helix-editor/helix/pull/2999))
- Pass makeWrapperArgs to wrapProgram in the Nix flake ([#3003](https://github.com/helix-editor/helix/pull/3003))
- Add a way to override which grammars are built by Nix ([#3141](https://github.com/helix-editor/helix/pull/3141))
- Add a GitHub actions release for `aarch64-macos` ([#3137](https://github.com/helix-editor/helix/pull/3137))
- Add shell auto-completions for Elvish ([#3331](https://github.com/helix-editor/helix/pull/3331))
# 22.05 (2022-05-28)
An even bigger shout out than usual to all the contributors - we had a whopping
110 contributors in this release! That's more than double the number of
contributors as last release!
Check out some of the highlights in the [news section](https://helix-editor.com/news/release-22-05-highlights/).
As usual, the following is a summary of each of the changes since the last release.
For the full log, check out the [git log](https://github.com/helix-editor/helix/compare/22.03..22.05).
Breaking Changes:
- Removed `C-j`, `C-k` bindings from file picker ([#1792](https://github.com/helix-editor/helix/pull/1792))
- Replaced `C-f` with `C-d` and `C-b` with `C-u` bindings in file picker ([#1792](https://github.com/helix-editor/helix/pull/1792))
- `A-hjkl` bindings have been moved to `A-pion` ([#2205](https://github.com/helix-editor/helix/pull/2205))
- `A-Left`/`A-Right` have been moved to `C-Left`/`C-Right` ([#2193](https://github.com/helix-editor/helix/pull/2193))
Features:
- The indentation mechanism has been reworked ([#1562](https://github.com/helix-editor/helix/pull/1562), [#1908](https://github.com/helix-editor/helix/pull/1908))
- Configurable gutters ([#1967](https://github.com/helix-editor/helix/pull/1967))
- Support for local language configuration ([#1249](https://github.com/helix-editor/helix/pull/1249))
- Configurable themed rulers ([#2060](https://github.com/helix-editor/helix/pull/2060))
- Render visible whitespace ([e6b865e](https://github.com/helix-editor/helix/commit/e6b865e), [#2322](https://github.com/helix-editor/helix/pull/2322), [#2331](https://github.com/helix-editor/helix/pull/2331))
Commands:
- Paragraph motion and textobject (`]p`, `[p`) ([#1627](https://github.com/helix-editor/helix/pull/1627), [#1956](https://github.com/helix-editor/helix/pull/1956), [#1969](https://github.com/helix-editor/helix/pull/1969), [#1992](https://github.com/helix-editor/helix/pull/1992), [#2226](https://github.com/helix-editor/helix/pull/2226))
- `:buffer-next`, `:buffer-previous` ([#1940](https://github.com/helix-editor/helix/pull/1940))
- `:set-language` to set the buffers language ([#1866](https://github.com/helix-editor/helix/pull/1866), [#1996](https://github.com/helix-editor/helix/pull/1996))
- Command for picking files from the current working directory (`Space-F`) ([#1600](https://github.com/helix-editor/helix/pull/1600), [#2308](https://github.com/helix-editor/helix/pull/2308))
- `:write!` which creates non-existent subdirectories ([#1839](https://github.com/helix-editor/helix/pull/1839))
- Add `m` textobject that selects closest surrounding pair ([de15d70](https://github.com/helix-editor/helix/commit/de15d70), [76175db](https://github.com/helix-editor/helix/commit/76175db))
- `:pipe` typable command for piping selections ([#1972](https://github.com/helix-editor/helix/pull/1972))
- `extend_line_above` which extends to previous lines ([#2117](https://github.com/helix-editor/helix/pull/2117))
- `set_line_ending` which replaces line endings ([#1871](https://github.com/helix-editor/helix/pull/1871))
- `:get-option` for getting the current value of an option (`:get`) ([#2231](https://github.com/helix-editor/helix/pull/2231))
- `:run-shell-command` which does not interact with selections ([#1682](https://github.com/helix-editor/helix/pull/1682))
- `:reflow` which hard-wraps selected text ([#2128](https://github.com/helix-editor/helix/pull/2128))
- `commit_undo_checkpoint` which adds an undo checkpoint ([#2115](https://github.com/helix-editor/helix/pull/2115))
- `:log-open` which opens the log file ([#2422](https://github.com/helix-editor/helix/pull/2422))
- `transpose_view` which transposes window splits ([#2461](https://github.com/helix-editor/helix/pull/2461))
- View-swapping: `swap_view_right`, `swap_view_left`, `swap_view_up`, `swap_view_down` ([#2445](https://github.com/helix-editor/helix/pull/2445))
- `shrink_to_line_bounds` which shrinks selections to line-bounds ([#2450](https://github.com/helix-editor/helix/pull/2450))
Usability improvements and fixes:
- Handle broken pipes when piping `hx --health` through `head` ([#1876](https://github.com/helix-editor/helix/pull/1876))
- Fix for `copy_selection` on newlines ([ab7885e](https://github.com/helix-editor/helix/commit/ab7885e), [236c6b7](https://github.com/helix-editor/helix/commit/236c6b7))
- Use `win32yank` clipboard provider on WSL2 ([#1912](https://github.com/helix-editor/helix/pull/1912))
- Jump to the next number on the line before incrementing ([#1778](https://github.com/helix-editor/helix/pull/1778))
- Fix start position of next search ([#1904](https://github.com/helix-editor/helix/pull/1904))
- Use check and X marks for health check output ([#1918](https://github.com/helix-editor/helix/pull/1918))
- Clear terminal after switching to alternate screens ([#1944](https://github.com/helix-editor/helix/pull/1944))
- Fix `toggle_comments` command on multiple selections ([#1882](https://github.com/helix-editor/helix/pull/1882))
- Apply `ui.gutter` theming to empty gutter spans ([#2032](https://github.com/helix-editor/helix/pull/2032))
- Use checkboxes in `hx --health` output ([#1947](https://github.com/helix-editor/helix/pull/1947))
- Pass unmapped keys through prompt regardless of modifiers ([764adbd](https://github.com/helix-editor/helix/commit/764adbd))
- LSP: pull formatting options from config ([c18de0e](https://github.com/helix-editor/helix/commit/c18de0e))
- LSP: provide `rootPath` ([84e799f](https://github.com/helix-editor/helix/commit/84e799f))
- LSP: implement `workspace_folders` ([8adf0c1](https://github.com/helix-editor/helix/commit/8adf0c1))
- LSP: fix auto-import ([#2088](https://github.com/helix-editor/helix/pull/2088))
- Send active diagnostic to LSP when requesting code actions ([#2005](https://github.com/helix-editor/helix/pull/2005))
- Prevent panic when parsing malformed LSP `PublishDiagnostic` ([#2160](https://github.com/helix-editor/helix/pull/2160))
- Restore document state on completion cancel ([#2096](https://github.com/helix-editor/helix/pull/2096))
- Only merge top-level array when merging `languages.toml` ([#2145](https://github.com/helix-editor/helix/pull/2145), [#2215](https://github.com/helix-editor/helix/pull/2215))
- Fix open on multiline selection ([#2161](https://github.com/helix-editor/helix/pull/2161))
- Allow re-binding `0` if it is not used in a count ([#2174](https://github.com/helix-editor/helix/pull/2174))
- Fix `ctrl-u` behavior in insert mode ([#1957](https://github.com/helix-editor/helix/pull/1957))
- Check LSP rename capabilities before sending rename action ([#2203](https://github.com/helix-editor/helix/pull/2203))
- Register the `publish_diagnostics` LSP capability ([#2241](https://github.com/helix-editor/helix/pull/2241))
- Fix paste direction for typed paste commands ([#2288](https://github.com/helix-editor/helix/pull/2288))
- Improve handling of buffer-close ([#1397](https://github.com/helix-editor/helix/pull/1397))
- Extend the tutor file ([#2133](https://github.com/helix-editor/helix/pull/2133))
- Treat slashes as word separators in prompts ([#2315](https://github.com/helix-editor/helix/pull/2315))
- Auto-complete directory members ([#1682](https://github.com/helix-editor/helix/pull/1682))
- Allow disabling format-on-save as a global editor setting ([#2321](https://github.com/helix-editor/helix/pull/2321))
- Wrap command palette in overlay ([#2378](https://github.com/helix-editor/helix/pull/2378))
- Prevent selections from collapsing when inserting newlines ([#2414](https://github.com/helix-editor/helix/pull/2414))
- Allow configuration of LSP request timeout ([#2405](https://github.com/helix-editor/helix/pull/2405))
- Use debug console on Windows for DAP terminal ([#2294](https://github.com/helix-editor/helix/pull/2294))
- Exclude cursor when deleting with `C-w` in insert mode ([#2431](https://github.com/helix-editor/helix/pull/2431))
- Prevent panics from LSP parsing errors ([7ae6cad](https://github.com/helix-editor/helix/commit/7ae6cad))
- Prevent panics from LSP responses without requests ([#2475](https://github.com/helix-editor/helix/pull/2475))
- Fix scroll rate for documentation popups ([#2497](https://github.com/helix-editor/helix/pull/2497))
- Support inserting into prompts from registers ([#2458](https://github.com/helix-editor/helix/pull/2458))
- Separate theme scopes for diagnostic types ([#2437](https://github.com/helix-editor/helix/pull/2437))
- Use `ui.menu` instead of `ui.statusline` for command completion menu theming ([82fb217](https://github.com/helix-editor/helix/commit/82fb217))
- Panic when reloading a shrunk file ([#2506](https://github.com/helix-editor/helix/pull/2506))
- Add theme key for picker separator ([#2523](https://github.com/helix-editor/helix/pull/2523))
Themes:
- Remove `ui.text` background from dark_plus ([#1950](https://github.com/helix-editor/helix/pull/1950))
- Add `boo_berry` ([#1962](https://github.com/helix-editor/helix/pull/1962))
- Update `dark_plus` markup colors ([#1989](https://github.com/helix-editor/helix/pull/1989))
- Update `dark_plus` `tag` and `ui.menu.selected` colors ([#2014](https://github.com/helix-editor/helix/pull/2014))
- Add `dracula_at_night` ([#2008](https://github.com/helix-editor/helix/pull/2008))
- Improve `dracula` selection theming ([#2077](https://github.com/helix-editor/helix/pull/2077))
- Remove dim attribute on `onedark` line-number gutter ([#2155](https://github.com/helix-editor/helix/pull/2155))
- Add `tokyonight` ([#2162](https://github.com/helix-editor/helix/pull/2162))
- Use border colors from the original `dark_plus` theme ([#2186](https://github.com/helix-editor/helix/pull/2186))
- Add `autumn` ([#2212](https://github.com/helix-editor/helix/pull/2212), [#2270](https://github.com/helix-editor/helix/pull/2270), [#2531](https://github.com/helix-editor/helix/pull/2531))
- Add `tokyonight_storm` ([#2240](https://github.com/helix-editor/helix/pull/2240))
- Add `pop-dark` ([#2189](https://github.com/helix-editor/helix/pull/2189))
- Fix `base16_terminal` theme using incorrect ansi-color ([#2279](https://github.com/helix-editor/helix/pull/2279))
- Add `onelight` ([#2287](https://github.com/helix-editor/helix/pull/2287), [#2323](https://github.com/helix-editor/helix/pull/2323))
- Add `ui.virtual` scopes to `onedark` theme ([3626e38](https://github.com/helix-editor/helix/commit/3626e38))
- Add `night_owl` ([#2330](https://github.com/helix-editor/helix/pull/2330))
- Use yellow foreground and red background for `monokai_pro_spectrum` ([#2433](https://github.com/helix-editor/helix/pull/2433))
- Add `snazzy` ([#2473](https://github.com/helix-editor/helix/pull/2473))
- Update `dark_plus` constructor color ([8e8d4ba](https://github.com/helix-editor/helix/commit/8e8d4ba))
- Add `ui.menu` to the default theme ([e7e13dc](https://github.com/helix-editor/helix/commit/e7e13dc))
- Add `ui.menu` to any themes missing the key ([9be810f](https://github.com/helix-editor/helix/commit/9be810f))
- Add `catppuccin` ([#2546](https://github.com/helix-editor/helix/pull/2546), [7160e74](https://github.com/helix-editor/helix/commit/7160e74))
LSP:
- Use texlab for latex ([#1922](https://github.com/helix-editor/helix/pull/1922))
- HTML ([#2018](https://github.com/helix-editor/helix/pull/2018))
- JSON ([#2024](https://github.com/helix-editor/helix/pull/2024))
- CSS ([#2025](https://github.com/helix-editor/helix/pull/2025))
- PHP ([#2031](https://github.com/helix-editor/helix/pull/2031))
- Swift ([#2033](https://github.com/helix-editor/helix/pull/2033))
- OCaml ([#2035](https://github.com/helix-editor/helix/pull/2035))
- Vue ([#2043](https://github.com/helix-editor/helix/pull/2043))
- Yaml ([#2234](https://github.com/helix-editor/helix/pull/2234))
- Vala ([#2243](https://github.com/helix-editor/helix/pull/2243))
- TOML ([#2302](https://github.com/helix-editor/helix/pull/2302))
- Java ([#2511](https://github.com/helix-editor/helix/pull/2511))
- Lua ([#2560](https://github.com/helix-editor/helix/pull/2560))
- Verilog ([#2552](https://github.com/helix-editor/helix/pull/2552))
New Languages:
- JSX ([#1906](https://github.com/helix-editor/helix/pull/1906), [a24fb17](https://github.com/helix-editor/helix/commit/a24fb17), [855e438](https://github.com/helix-editor/helix/commit/855e438), [#1921](https://github.com/helix-editor/helix/pull/1921))
- Rust Object Notation (RON) ([#1925](https://github.com/helix-editor/helix/pull/1925))
- R and R Markdown ([#1998](https://github.com/helix-editor/helix/pull/1998))
- Swift ([#2033](https://github.com/helix-editor/helix/pull/2033))
- EJS and ERB ([#2055](https://github.com/helix-editor/helix/pull/2055))
- EEx ([9d095e0](https://github.com/helix-editor/helix/commit/9d095e0))
- HEEx ([4836bb3](https://github.com/helix-editor/helix/commit/4836bb3), [#2149](https://github.com/helix-editor/helix/pull/2149))
- SQL ([#2097](https://github.com/helix-editor/helix/pull/2097))
- GDScript ([#1985](https://github.com/helix-editor/helix/pull/1985))
- Nickel ([#2173](https://github.com/helix-editor/helix/pull/2173), [#2320](https://github.com/helix-editor/helix/pull/2320))
- `go.mod` and `go.work` ([#2197](https://github.com/helix-editor/helix/pull/2197))
- Nushell ([#2225](https://github.com/helix-editor/helix/pull/2225))
- Vala ([#2243](https://github.com/helix-editor/helix/pull/2243))
- Hare ([#2289](https://github.com/helix-editor/helix/pull/2289), [#2480](https://github.com/helix-editor/helix/pull/2480))
- DeviceTree ([#2329](https://github.com/helix-editor/helix/pull/2329))
- Cairo ([7387905](https://github.com/helix-editor/helix/commit/7387905))
- CPON ([#2355](https://github.com/helix-editor/helix/pull/2355), [#2424](https://github.com/helix-editor/helix/pull/2424))
- git-ignore ([#2397](https://github.com/helix-editor/helix/pull/2397))
- git-attributes ([#2397](https://github.com/helix-editor/helix/pull/2397))
- Odin ([#2399](https://github.com/helix-editor/helix/pull/2399), [#2464](https://github.com/helix-editor/helix/pull/2464))
- Meson ([#2314](https://github.com/helix-editor/helix/pull/2314))
- SSH Client Config ([#2498](https://github.com/helix-editor/helix/pull/2498))
- Scheme ([d25bae8](https://github.com/helix-editor/helix/commit/d25bae8))
- Verilog ([#2552](https://github.com/helix-editor/helix/pull/2552))
Updated Languages and Queries:
- Erlang ([e2a5071](https://github.com/helix-editor/helix/commit/e2a5071), [#2149](https://github.com/helix-editor/helix/pull/2149), [82da9bd](https://github.com/helix-editor/helix/commit/82da9bd))
- Elixir ([1819478](https://github.com/helix-editor/helix/commit/1819478), [8c3c901](https://github.com/helix-editor/helix/commit/8c3c901), [4ac94a5](https://github.com/helix-editor/helix/commit/4ac94a5))
- Gleam ([7cd6050](https://github.com/helix-editor/helix/commit/7cd6050), [45dd540](https://github.com/helix-editor/helix/commit/45dd540))
- Bash ([#1917](https://github.com/helix-editor/helix/pull/1917))
- JavaScript ([#2140](https://github.com/helix-editor/helix/pull/2140))
- Ruby textobject queries ([#2143](https://github.com/helix-editor/helix/pull/2143))
- Fix Golang textobject queries ([#2153](https://github.com/helix-editor/helix/pull/2153))
- Add more bash and HCL file extensions ([#2201](https://github.com/helix-editor/helix/pull/2201))
- Divide HCL and tfvars into separate languages ([#2244](https://github.com/helix-editor/helix/pull/2244))
- Use JavaScript for `cjs` files ([#2387](https://github.com/helix-editor/helix/pull/2387))
- Use Perl for `t` files ([#2395](https://github.com/helix-editor/helix/pull/2395))
- Use `markup.list` scopes for lists ([#2401](https://github.com/helix-editor/helix/pull/2401))
- Use PHP for `inc` files ([#2440](https://github.com/helix-editor/helix/pull/2440))
- Improve Rust textobjects ([#2494](https://github.com/helix-editor/helix/pull/2494), [10463fe](https://github.com/helix-editor/helix/commit/10463fe))
- Python ([#2451](https://github.com/helix-editor/helix/pull/2451))
Packaging:
- Use `builtins.fromTOML` in Nix Flake on Nix 2.6+ ([#1892](https://github.com/helix-editor/helix/pull/1892))
- Shell auto-completion files are now available ([#2022](https://github.com/helix-editor/helix/pull/2022))
- Create an AppImage on release ([#2089](https://github.com/helix-editor/helix/pull/2089))
# 22.03 (2022-03-28) # 22.03 (2022-03-28)
A big shout out to all the contributors! We had 51 contributors in this release. A big shout out to all the contributors! We had 51 contributors in this release.
@ -230,7 +684,7 @@ Usability improvements and fixes:
- File picker configuration ([#988](https://github.com/helix-editor/helix/pull/988)) - File picker configuration ([#988](https://github.com/helix-editor/helix/pull/988))
- Fix surround cursor position calculation ([#1183](https://github.com/helix-editor/helix/pull/1183)) - Fix surround cursor position calculation ([#1183](https://github.com/helix-editor/helix/pull/1183))
- Accept count for goto_window ([#1033](https://github.com/helix-editor/helix/pull/1033)) - Accept count for goto_window ([#1033](https://github.com/helix-editor/helix/pull/1033))
- Make kill_to_line_end behave like emacs ([#1235](https://github.com/helix-editor/helix/pull/1235)) - Make kill_to_line_end behave like Emacs ([#1235](https://github.com/helix-editor/helix/pull/1235))
- Only use a single documentation popup ([#1241](https://github.com/helix-editor/helix/pull/1241)) - Only use a single documentation popup ([#1241](https://github.com/helix-editor/helix/pull/1241))
- ui: popup: Don't allow scrolling past the end of content ([`3307f44c`](https://github.com/helix-editor/helix/commit/3307f44c)) - ui: popup: Don't allow scrolling past the end of content ([`3307f44c`](https://github.com/helix-editor/helix/commit/3307f44c))
- Open files with spaces in filename, allow opening multiple files ([#1231](https://github.com/helix-editor/helix/pull/1231)) - Open files with spaces in filename, allow opening multiple files ([#1231](https://github.com/helix-editor/helix/pull/1231))
@ -445,7 +899,7 @@ Fixes:
- A bunch of bugs regarding `o`/`O` behavior ([#281](https://github.com/helix-editor/helix/pull/281)) - A bunch of bugs regarding `o`/`O` behavior ([#281](https://github.com/helix-editor/helix/pull/281))
- `~` expansion now works in file completion ([#284](https://github.com/helix-editor/helix/pull/284)) - `~` expansion now works in file completion ([#284](https://github.com/helix-editor/helix/pull/284))
- Several UI related overflow crashes ([#318](https://github.com/helix-editor/helix/pull/318)) - Several UI related overflow crashes ([#318](https://github.com/helix-editor/helix/pull/318))
- Fix a test failure occuring only on `test --release` ([`4f108ab1`](https://github.com/helix-editor/helix/commit/4f108ab1b2197809506bd7305ad903a3525eabfa)) - Fix a test failure occurring only on `test --release` ([`4f108ab1`](https://github.com/helix-editor/helix/commit/4f108ab1b2197809506bd7305ad903a3525eabfa))
- Prompts now support unicode input ([#295](https://github.com/helix-editor/helix/pull/295)) - Prompts now support unicode input ([#295](https://github.com/helix-editor/helix/pull/295))
- Completion documentation no longer overlaps the popup ([#322](https://github.com/helix-editor/helix/pull/322)) - Completion documentation no longer overlaps the popup ([#322](https://github.com/helix-editor/helix/pull/322))
- Fix a crash when trying to select `^` ([`9c534614`](https://github.com/helix-editor/helix/commit/9c53461429a3e72e3b1fb87d7ca490e168d7dee2)) - Fix a crash when trying to select `^` ([`9c534614`](https://github.com/helix-editor/helix/commit/9c53461429a3e72e3b1fb87d7ca490e168d7dee2))
@ -468,7 +922,7 @@ to distinguish it in bug reports..
on cargo run. `~/.config/helix/runtime` can also be used. on cargo run. `~/.config/helix/runtime` can also be used.
- Registers can now be selected via " (for example `"ay`) - Registers can now be selected via " (for example `"ay`)
- Support for Nix files was added - Support for Nix files was added
- Movement is now fully tested and matches kakoune implementation - Movement is now fully tested and matches Kakoune implementation
- A per-file LSP symbol picker was added to space+s - A per-file LSP symbol picker was added to space+s
- Selection can be replaced with yanked text via R - Selection can be replaced with yanked text via R
@ -492,7 +946,7 @@ Keymaps:
- The runtime/ can now optionally be embedded in the binary - The runtime/ can now optionally be embedded in the binary
- Haskell syntax added - Haskell syntax added
- Window mode (ctrl-w) added - Window mode (ctrl-w) added
- Show matching bracket (vim's matchbrackets) - Show matching bracket (Vim's matchbrackets)
- Themes now support style modifiers - Themes now support style modifiers
- First user contributed theme - First user contributed theme
- Create a document if it doesn't exist yet on save - Create a document if it doesn't exist yet on save

590
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -1,14 +1,13 @@
# Helix # Helix
[![Build status](https://github.com/helix-editor/helix/actions/workflows/build.yml/badge.svg)](https://github.com/helix-editor/helix/actions) [![Build status](https://github.com/helix-editor/helix/actions/workflows/build.yml/badge.svg)](https://github.com/helix-editor/helix/actions)
![Screenshot](./screenshot.png) ![Screenshot](./screenshot.png)
A kakoune / neovim inspired editor, written in Rust. A Kakoune / Neovim inspired editor, written in Rust.
The editing model is very heavily based on kakoune; during development I found The editing model is very heavily based on Kakoune; during development I found
myself agreeing with most of kakoune's design decisions. myself agreeing with most of Kakoune's design decisions.
For more information, see the [website](https://helix-editor.com) or For more information, see the [website](https://helix-editor.com) or
[documentation](https://docs.helix-editor.com/). [documentation](https://docs.helix-editor.com/).
@ -25,28 +24,36 @@ All shortcuts/keymaps can be found [in the documentation on the website](https:/
- Smart, incremental syntax highlighting and code editing via tree-sitter - Smart, incremental syntax highlighting and code editing via tree-sitter
It's a terminal-based editor first, but I'd like to explore a custom renderer It's a terminal-based editor first, but I'd like to explore a custom renderer
(similar to emacs) in wgpu or skulpin. (similar to Emacs) in wgpu or skulpin.
Note: Only certain languages have indentation definitions at the moment. Check Note: Only certain languages have indentation definitions at the moment. Check
`runtime/queries/<lang>/` for `indents.toml`. `runtime/queries/<lang>/` for `indents.scm`.
# Installation # Installation
We provide packaging for various distributions, but here's a quick method to Packages are available for various distributions (see [Installation docs](https://docs.helix-editor.com/install.html)).
build from source.
``` If you would like to build from source:
```shell
git clone https://github.com/helix-editor/helix git clone https://github.com/helix-editor/helix
cd helix cd helix
cargo install --path helix-term cargo install --path helix-term
hx --grammar fetch
hx --grammar build
``` ```
This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars. This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars.
If you want to customize your `languages.toml` config,
tree-sitter grammars may be manually fetched and built with `hx --grammar fetch` and `hx --grammar build`.
Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the
config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%AppData%/helix/runtime` on Windows). config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%AppData%/helix/runtime` on Windows).
| OS | Command |
| -------------------- | -------------------------------------------- |
| Windows (cmd.exe) | `xcopy /e /i runtime %AppData%\helix\runtime` |
| Windows (PowerShell) | `xcopy /e /i runtime $Env:AppData\helix\runtime` |
| Linux/macOS | `ln -s $PWD/runtime ~/.config/helix/runtime` |
This location can be overridden via the `HELIX_RUNTIME` environment variable. This location can be overridden via the `HELIX_RUNTIME` environment variable.
Packages already solve this for you by wrapping the `hx` binary with a wrapper Packages already solve this for you by wrapping the `hx` binary with a wrapper
@ -55,14 +62,17 @@ that sets the variable to the install dir.
> NOTE: running via cargo also doesn't require setting explicit `HELIX_RUNTIME` path, it will automatically > NOTE: running via cargo also doesn't require setting explicit `HELIX_RUNTIME` path, it will automatically
> detect the `runtime` directory in the project root. > detect the `runtime` directory in the project root.
In order to use LSP features like auto-complete, you will need to
[install the appropriate Language Server](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers)
for a language.
[![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions) [![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions)
## MacOS ## MacOS
Helix can be installed on MacOS through homebrew via: Helix can be installed on MacOS through homebrew:
``` ```
brew tap helix-editor/helix
brew install helix brew install helix
``` ```

@ -1 +1 @@
22.03 22.08.1

@ -1,6 +1,6 @@
# Author: NNB <nnbnh@protonmail.com> # Author: NNB <nnbnh@protonmail.com>
"ui.menu" = "black" "ui.menu" = { fg = "black", bg = "white" }
"ui.menu.selected" = { modifiers = ["reversed"] } "ui.menu.selected" = { modifiers = ["reversed"] }
"ui.linenr" = { fg = "gray", bg = "black" } "ui.linenr" = { fg = "gray", bg = "black" }
"ui.popup" = { modifiers = ["reversed"] } "ui.popup" = { modifiers = ["reversed"] }
@ -10,7 +10,6 @@
"comment" = { fg = "gray" } "comment" = { fg = "gray" }
"ui.statusline" = { fg = "black", bg = "white" } "ui.statusline" = { fg = "black", bg = "white" }
"ui.statusline.inactive" = { fg = "gray", bg = "white" } "ui.statusline.inactive" = { fg = "gray", bg = "white" }
"ui.help" = { modifiers = ["reversed"] }
"ui.cursor" = { fg = "white", modifiers = ["reversed"] } "ui.cursor" = { fg = "white", modifiers = ["reversed"] }
"variable" = "red" "variable" = "red"
"constant.numeric" = "yellow" "constant.numeric" = "yellow"

@ -11,7 +11,6 @@
- [Configuration](./configuration.md) - [Configuration](./configuration.md)
- [Themes](./themes.md) - [Themes](./themes.md)
- [Key Remapping](./remapping.md) - [Key Remapping](./remapping.md)
- [Hooks](./hooks.md)
- [Languages](./languages.md) - [Languages](./languages.md)
- [Guides](./guides/README.md) - [Guides](./guides/README.md)
- [Adding Languages](./guides/adding_languages.md) - [Adding Languages](./guides/adding_languages.md)

@ -1,5 +1,5 @@
# Commands # Commands
Command mode can be activated by pressing `:`, similar to vim. Built-in commands: Command mode can be activated by pressing `:`, similar to Vim. Built-in commands:
{{#include ./generated/typable-cmd.md}} {{#include ./generated/typable-cmd.md}}

@ -25,6 +25,9 @@ select = "underline"
hidden = false hidden = false
``` ```
You may also specify a file to use for configuration with the `-c` or
`--config` CLI argument: `hx -c path/to/custom-config.toml`.
## Editor ## Editor
### `[editor]` Section ### `[editor]` Section
@ -37,24 +40,68 @@ hidden = false
| `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` | | `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` |
| `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` | | `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` |
| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` | | `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` |
| `cursorline` | Highlight all lines with a cursor. | `false` |
| `gutters` | Gutters to display: Available are `diagnostics` and `line-numbers` and `spacer`, note that `diagnostics` also includes other features like breakpoints, 1-width padding will be inserted if gutters is non-empty | `["diagnostics", "line-numbers"]` |
| `auto-completion` | Enable automatic pop up of auto-completion. | `true` | | `auto-completion` | Enable automatic pop up of auto-completion. | `true` |
| `auto-format` | Enable automatic formatting on save. | `true` |
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` | | `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` |
| `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` | | `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` |
| `auto-info` | Whether to display infoboxes | `true` | | `auto-info` | Whether to display infoboxes | `true` |
| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` | | `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` |
| `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file. | `[]` |
| `bufferline` | Renders a line at the top of the editor displaying open buffers. Can be `always`, `never` or `multiple` (only shown if more than one buffer is in use) | `never` |
| `color-modes` | Whether to color the mode indicator with different colors depending on the mode itself | `false` |
### `[editor.statusline]` Section
Allows configuring the statusline at the bottom of the editor.
The configuration distinguishes between three areas of the status line:
`[ ... ... LEFT ... ... | ... ... ... ... CENTER ... ... ... ... | ... ... RIGHT ... ... ]`
Statusline elements can be defined as follows:
```toml
[editor.statusline]
left = ["mode", "spinner"]
center = ["file-name"]
right = ["diagnostics", "selections", "position", "file-encoding", "file-line-ending", "file-type"]
separator = "│"
```
The following elements can be configured:
| Key | Description |
| ------ | ----------- |
| `mode` | The current editor mode (`NOR`/`INS`/`SEL`) |
| `spinner` | A progress spinner indicating LSP activity |
| `file-name` | The path/name of the opened file |
| `file-encoding` | The encoding of the opened file if it differs from UTF-8 |
| `file-line-ending` | The file line endings (CRLF or LF) |
| `file-type` | The type of the opened file |
| `diagnostics` | The number of warnings and/or errors |
| `selections` | The number of active selections |
| `position` | The cursor position |
| `position-percentage` | The cursor position as a percentage of the total number of lines |
| `separator` | The string defined in `editor.statusline.separator` (defaults to `"│"`) |
| `spacer` | Inserts a space between elements (multiple/contiguous spacers may be specified) |
### `[editor.lsp]` Section ### `[editor.lsp]` Section
| Key | Description | Default | | Key | Description | Default |
| --- | ----------- | ------- | | --- | ----------- | ------- |
| `display-messages` | Display LSP progress messages below statusline[^1] | `false` | | `display-messages` | Display LSP progress messages below statusline[^1] | `false` |
| `auto-signature-help` | Enable automatic popup of signature help (parameter hints) | `true` |
| `display-signature-help-docs` | Display docs under signature help popup | `true` |
[^1]: A progress spinner is always shown in the statusline beside the file path. [^1]: By default, a progress spinner is shown in the statusline beside the file path.
### `[editor.cursor-shape]` Section ### `[editor.cursor-shape]` Section
Defines the shape of cursor in each mode. Note that due to limitations Defines the shape of cursor in each mode. Note that due to limitations
of the terminal environment, only the primary cursor can change shape. of the terminal environment, only the primary cursor can change shape.
Valid values for these options are `block`, `bar`, `underline`, or `hidden`.
| Key | Description | Default | | Key | Description | Default |
| --- | ----------- | ------- | | --- | ----------- | ------- |
@ -74,6 +121,8 @@ files and files listed within ignore files are ignored by (not visible in) the
helix file picker and global search. There is also one other key, `max-depth` helix file picker and global search. There is also one other key, `max-depth`
available, which is not defined by default. available, which is not defined by default.
All git related options are only enabled in a git repository.
| Key | Description | Default | | Key | Description | Default |
|--|--|---------| |--|--|---------|
|`hidden` | Enables ignoring hidden files. | true |`hidden` | Enables ignoring hidden files. | true
@ -86,16 +135,18 @@ available, which is not defined by default.
### `[editor.auto-pairs]` Section ### `[editor.auto-pairs]` Section
Enable automatic insertion of pairs to parentheses, brackets, etc. Can be Enables automatic insertion of pairs to parentheses, brackets, etc. Can be a
a simple boolean value, or a specific mapping of pairs of single characters. simple boolean value, or a specific mapping of pairs of single characters.
| Key | Description | To disable auto-pairs altogether, set `auto-pairs` to `false`:
| --- | ----------- |
| `false` | Completely disable auto pairing, regardless of language-specific settings
| `true` | Use the default pairs: <code>(){}[]''""``</code>
| Mapping of pairs | e.g. `{ "(" = ")", "{" = "}", ... }`
Example ```toml
[editor]
auto-pairs = false # defaults to `true`
```
The default pairs are <code>(){}[]''""``</code>, but these can be customized by
setting `auto-pairs` to a TOML table:
```toml ```toml
[editor.auto-pairs] [editor.auto-pairs]
@ -134,3 +185,48 @@ Search specific options.
|--|--|---------| |--|--|---------|
| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` | | `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` |
| `wrap-around`| Whether the search should wrap after depleting the matches | `true` | | `wrap-around`| Whether the search should wrap after depleting the matches | `true` |
### `[editor.whitespace]` Section
Options for rendering whitespace with visible characters. Use `:set whitespace.render all` to temporarily enable visible whitespace.
| Key | Description | Default |
|-----|-------------|---------|
| `render` | Whether to render whitespace. May either be `"all"` or `"none"`, or a table with sub-keys `space`, `tab`, and `newline`. | `"none"` |
| `characters` | Literal characters to use when rendering whitespace. Sub-keys may be any of `tab`, `space`, `nbsp`, `newline` or `tabpad` | See example below |
Example
```toml
[editor.whitespace]
render = "all"
# or control each character
[editor.whitespace.render]
space = "all"
tab = "all"
newline = "none"
[editor.whitespace.characters]
space = "·"
nbsp = "⍽"
tab = "→"
newline = "⏎"
tabpad = "·" # Tabs will look like "→···" (depending on tab width)
```
### `[editor.indent-guides]` Section
Options for rendering vertical indent guides.
| Key | Description | Default |
| --- | --- | --- |
| `render` | Whether to render indent guides. | `false` |
| `character` | Literal character to use for rendering the indent guide | `│` |
Example:
```toml
[editor.indent-guides]
render = true
character = "╎"
```

@ -1,12 +1,12 @@
# Migrating from Vim # Migrating from Vim
Helix's editing model is strongly inspired from vim and kakoune, and a notable Helix's editing model is strongly inspired from Vim and Kakoune, and a notable
difference from vim (and the most striking similarity to kakoune) is that Helix difference from Vim (and the most striking similarity to Kakoune) is that Helix
follows the `selection → action` model. This means that the whatever you are follows the `selection → action` model. This means that the whatever you are
going to act on (a word, a paragraph, a line, etc) is selected first and the going to act on (a word, a paragraph, a line, etc) is selected first and the
action itself (delete, change, yank, etc) comes second. A cursor is simply a action itself (delete, change, yank, etc) comes second. A cursor is simply a
single width selection. single width selection.
See also Kakoune's [Migrating from Vim](https://github.com/mawww/kakoune/wiki/Migrating-from-Vim). See also Kakoune's [Migrating from Vim](https://github.com/mawww/kakoune/wiki/Migrating-from-Vim) and Helix's [Migrating from Vim](https://github.com/helix-editor/helix/wiki/Migrating-from-Vim).
> TODO: Mention texobjects, surround, registers > TODO: Mention textobjects, surround, registers

@ -1,69 +1,125 @@
| Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default LSP | | Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default LSP |
| --- | --- | --- | --- | --- | | --- | --- | --- | --- | --- |
| astro | ✓ | | | |
| awk | ✓ | ✓ | | `awk-language-server` |
| bash | ✓ | | | `bash-language-server` | | bash | ✓ | | | `bash-language-server` |
| bass | ✓ | | | `bass` |
| beancount | ✓ | | | |
| c | ✓ | ✓ | ✓ | `clangd` | | c | ✓ | ✓ | ✓ | `clangd` |
| c-sharp | ✓ | | | `OmniSharp` | | c-sharp | ✓ | ✓ | | `OmniSharp` |
| cairo | ✓ | | | |
| clojure | ✓ | | | `clojure-lsp` |
| cmake | ✓ | ✓ | ✓ | `cmake-language-server` | | cmake | ✓ | ✓ | ✓ | `cmake-language-server` |
| comment | ✓ | | | | | comment | ✓ | | | |
| cpon | ✓ | | ✓ | |
| cpp | ✓ | ✓ | ✓ | `clangd` | | cpp | ✓ | ✓ | ✓ | `clangd` |
| css | ✓ | | | | | css | ✓ | | | `vscode-css-language-server` |
| cue | ✓ | | | `cuelsp` |
| dart | ✓ | | ✓ | `dart` | | dart | ✓ | | ✓ | `dart` |
| devicetree | ✓ | | | |
| diff | ✓ | | | |
| dockerfile | ✓ | | | `docker-langserver` | | dockerfile | ✓ | | | `docker-langserver` |
| elixir | ✓ | | | `elixir-ls` | | dot | ✓ | | | `dot-language-server` |
| edoc | ✓ | | | |
| eex | ✓ | | | |
| ejs | ✓ | | | |
| elixir | ✓ | ✓ | | `elixir-ls` |
| elm | ✓ | | | `elm-language-server` | | elm | ✓ | | | `elm-language-server` |
| erlang | ✓ | | | `erlang_ls` | | elvish | ✓ | | | `elvish` |
| erb | ✓ | | | |
| erlang | ✓ | ✓ | | `erlang_ls` |
| esdl | ✓ | | | |
| fish | ✓ | ✓ | ✓ | | | fish | ✓ | ✓ | ✓ | |
| fortran | ✓ | | ✓ | `fortls` |
| gdscript | ✓ | ✓ | | |
| git-attributes | ✓ | | | |
| git-commit | ✓ | | | | | git-commit | ✓ | | | |
| git-config | ✓ | | | | | git-config | ✓ | | | |
| git-diff | ✓ | | | | | git-ignore | ✓ | | | |
| git-rebase | ✓ | | | | | git-rebase | ✓ | | | |
| gleam | ✓ | | | | | gleam | ✓ | | | `gleam` |
| glsl | ✓ | | ✓ | | | glsl | ✓ | | ✓ | |
| go | ✓ | ✓ | ✓ | `gopls` | | go | ✓ | ✓ | ✓ | `gopls` |
| godot-resource | ✓ | | | |
| gomod | ✓ | | | `gopls` |
| gotmpl | ✓ | | | `gopls` |
| gowork | ✓ | | | `gopls` |
| graphql | ✓ | | | | | graphql | ✓ | | | |
| hare | ✓ | | | |
| haskell | ✓ | | | `haskell-language-server-wrapper` | | haskell | ✓ | | | `haskell-language-server-wrapper` |
| hcl | ✓ | | ✓ | `terraform-ls` | | hcl | ✓ | | ✓ | `terraform-ls` |
| html | ✓ | | | | | heex | ✓ | ✓ | | |
| html | ✓ | | | `vscode-html-language-server` |
| idris | | | | `idris2-lsp` |
| iex | ✓ | | | | | iex | ✓ | | | |
| java | ✓ | | | | | java | ✓ | | | `jdtls` |
| javascript | ✓ | | ✓ | `typescript-language-server` | | javascript | ✓ | ✓ | ✓ | `typescript-language-server` |
| json | ✓ | | ✓ | | | jsdoc | ✓ | | | |
| json | ✓ | | ✓ | `vscode-json-language-server` |
| jsonnet | ✓ | | | `jsonnet-language-server` |
| jsx | ✓ | ✓ | ✓ | `typescript-language-server` |
| julia | ✓ | | | `julia` | | julia | ✓ | | | `julia` |
| kotlin | ✓ | | | `kotlin-language-server` | | kotlin | ✓ | | | `kotlin-language-server` |
| latex | ✓ | | | | | latex | ✓ | | | `texlab` |
| lean | ✓ | | | `lean` | | lean | ✓ | | | `lean` |
| ledger | ✓ | | | | | ledger | ✓ | | | |
| llvm | ✓ | ✓ | ✓ | | | llvm | ✓ | ✓ | ✓ | |
| llvm-mir | ✓ | ✓ | ✓ | | | llvm-mir | ✓ | ✓ | ✓ | |
| llvm-mir-yaml | ✓ | | ✓ | | | llvm-mir-yaml | ✓ | | ✓ | |
| lua | ✓ | | ✓ | | | lua | ✓ | | ✓ | `lua-language-server` |
| make | ✓ | | | | | make | ✓ | | | |
| markdown | ✓ | | | | | markdown | ✓ | | | `marksman` |
| markdown.inline | ✓ | | | |
| meson | ✓ | | ✓ | |
| mint | | | | `mint` | | mint | | | | `mint` |
| nix | ✓ | | ✓ | `rnix-lsp` | | nickel | ✓ | | ✓ | `nls` |
| ocaml | ✓ | | ✓ | | | nix | ✓ | | | `rnix-lsp` |
| ocaml-interface | ✓ | | | | | nu | ✓ | | | |
| ocaml | ✓ | | ✓ | `ocamllsp` |
| ocaml-interface | ✓ | | | `ocamllsp` |
| odin | ✓ | | | `ols` |
| openscad | ✓ | | | `openscad-lsp` |
| org | ✓ | | | | | org | ✓ | | | |
| pascal | ✓ | ✓ | | `pasls` |
| perl | ✓ | ✓ | ✓ | | | perl | ✓ | ✓ | ✓ | |
| php | ✓ | ✓ | ✓ | | | php | ✓ | ✓ | ✓ | `intelephense` |
| prisma | ✓ | | | `prisma-language-server` |
| prolog | | | | `swipl` | | prolog | | | | `swipl` |
| protobuf | ✓ | | ✓ | | | protobuf | ✓ | | ✓ | |
| python | ✓ | ✓ | ✓ | `pylsp` | | python | ✓ | ✓ | | `pylsp` |
| r | ✓ | | | `R` |
| racket | | | | `racket` | | racket | | | | `racket` |
| regex | ✓ | | | | | regex | ✓ | | | |
| rescript | ✓ | ✓ | | `rescript-language-server` | | rescript | ✓ | ✓ | | `rescript-language-server` |
| ruby | ✓ | | ✓ | `solargraph` | | rmarkdown | ✓ | | ✓ | `R` |
| ron | ✓ | | ✓ | |
| ruby | ✓ | ✓ | ✓ | `solargraph` |
| rust | ✓ | ✓ | ✓ | `rust-analyzer` | | rust | ✓ | ✓ | ✓ | `rust-analyzer` |
| scala | ✓ | | ✓ | `metals` | | scala | ✓ | | ✓ | `metals` |
| scheme | ✓ | | | |
| scss | ✓ | | | `vscode-css-language-server` |
| slint | ✓ | | ✓ | `slint-lsp` |
| sml | ✓ | | | |
| solidity | ✓ | | | `solc` | | solidity | ✓ | | | `solc` |
| svelte | ✓ | | ✓ | `svelteserver` | | sql | ✓ | | | |
| sshclientconfig | ✓ | | | |
| starlark | ✓ | ✓ | | |
| svelte | ✓ | | | `svelteserver` |
| swift | ✓ | | | `sourcekit-lsp` |
| tablegen | ✓ | ✓ | ✓ | | | tablegen | ✓ | ✓ | ✓ | |
| toml | ✓ | | | | | task | ✓ | | | |
| tfvars | | | | `terraform-ls` |
| toml | ✓ | | | `taplo` |
| tsq | ✓ | | | | | tsq | ✓ | | | |
| tsx | ✓ | | | `typescript-language-server` | | tsx | ✓ | | | `typescript-language-server` |
| twig | ✓ | | | | | twig | ✓ | | | |
| typescript | ✓ | | ✓ | `typescript-language-server` | | typescript | ✓ | ✓ | ✓ | `typescript-language-server` |
| vue | ✓ | | | | | ungrammar | ✓ | | | |
| wgsl | ✓ | | | | | v | ✓ | | | `vls` |
| yaml | ✓ | | ✓ | | | vala | ✓ | | | `vala-language-server` |
| verilog | ✓ | ✓ | | `svlangserver` |
| vue | ✓ | | | `vls` |
| wgsl | ✓ | | | `wgsl_analyzer` |
| xit | ✓ | | | |
| yaml | ✓ | | ✓ | `yaml-language-server` |
| zig | ✓ | | ✓ | `zls` | | zig | ✓ | | ✓ | `zls` |

@ -1,30 +1,33 @@
| Name | Description | | Name | Description |
| --- | --- | | --- | --- |
| `:quit`, `:q` | Close the current view. | | `:quit`, `:q` | Close the current view. |
| `:quit!`, `:q!` | Close the current view forcefully (ignoring unsaved changes). | | `:quit!`, `:q!` | Force close the current view, ignoring unsaved changes. |
| `:open`, `:o` | Open a file from disk into the current view. | | `:open`, `:o` | Open a file from disk into the current view. |
| `:buffer-close`, `:bc`, `:bclose` | Close the current buffer. | | `:buffer-close`, `:bc`, `:bclose` | Close the current buffer. |
| `:buffer-close!`, `:bc!`, `:bclose!` | Close the current buffer forcefully (ignoring unsaved changes). | | `:buffer-close!`, `:bc!`, `:bclose!` | Close the current buffer forcefully, ignoring unsaved changes. |
| `:buffer-close-others`, `:bco`, `:bcloseother` | Close all buffers but the currently focused one. | | `:buffer-close-others`, `:bco`, `:bcloseother` | Close all buffers but the currently focused one. |
| `:buffer-close-others!`, `:bco!`, `:bcloseother!` | Close all buffers but the currently focused one. | | `:buffer-close-others!`, `:bco!`, `:bcloseother!` | Force close all buffers but the currently focused one. |
| `:buffer-close-all`, `:bca`, `:bcloseall` | Close all buffers, without quiting. | | `:buffer-close-all`, `:bca`, `:bcloseall` | Close all buffers without quitting. |
| `:buffer-close-all!`, `:bca!`, `:bcloseall!` | Close all buffers forcefully (ignoring unsaved changes), without quiting. | | `:buffer-close-all!`, `:bca!`, `:bcloseall!` | Force close all buffers ignoring unsaved changes without quitting. |
| `:buffer-next`, `:bn`, `:bnext` | Goto next buffer. |
| `:buffer-previous`, `:bp`, `:bprev` | Goto previous buffer. |
| `:write`, `:w` | Write changes to disk. Accepts an optional path (:write some/path.txt) | | `:write`, `:w` | Write changes to disk. Accepts an optional path (:write some/path.txt) |
| `:write!`, `:w!` | Force write changes to disk creating necessary subdirectories. Accepts an optional path (:write some/path.txt) |
| `:new`, `:n` | Create a new scratch buffer. | | `:new`, `:n` | Create a new scratch buffer. |
| `:format`, `:fmt` | Format the file using the LSP formatter. | | `:format`, `:fmt` | Format the file using the LSP formatter. |
| `:indent-style` | Set the indentation style for editing. ('t' for tabs or 1-8 for number of spaces.) | | `:indent-style` | Set the indentation style for editing. ('t' for tabs or 1-8 for number of spaces.) |
| `:line-ending` | Set the document's default line ending. Options: crlf, lf, cr, ff, nel. | | `:line-ending` | Set the document's default line ending. Options: crlf, lf. |
| `:earlier`, `:ear` | Jump back to an earlier point in edit history. Accepts a number of steps or a time span. | | `:earlier`, `:ear` | Jump back to an earlier point in edit history. Accepts a number of steps or a time span. |
| `:later`, `:lat` | Jump to a later point in edit history. Accepts a number of steps or a time span. | | `:later`, `:lat` | Jump to a later point in edit history. Accepts a number of steps or a time span. |
| `:write-quit`, `:wq`, `:x` | Write changes to disk and close the current view. Accepts an optional path (:wq some/path.txt) | | `:write-quit`, `:wq`, `:x` | Write changes to disk and close the current view. Accepts an optional path (:wq some/path.txt) |
| `:write-quit!`, `:wq!`, `:x!` | Write changes to disk and close the current view forcefully. Accepts an optional path (:wq! some/path.txt) | | `:write-quit!`, `:wq!`, `:x!` | Write changes to disk and close the current view forcefully. Accepts an optional path (:wq! some/path.txt) |
| `:write-all`, `:wa` | Write changes from all views to disk. | | `:write-all`, `:wa` | Write changes from all buffers to disk. |
| `:write-quit-all`, `:wqa`, `:xa` | Write changes from all views to disk and close all views. | | `:write-quit-all`, `:wqa`, `:xa` | Write changes from all buffers to disk and close all views. |
| `:write-quit-all!`, `:wqa!`, `:xa!` | Write changes from all views to disk and close all views forcefully (ignoring unsaved changes). | | `:write-quit-all!`, `:wqa!`, `:xa!` | Write changes from all buffers to disk and close all views forcefully (ignoring unsaved changes). |
| `:quit-all`, `:qa` | Close all views. | | `:quit-all`, `:qa` | Close all views. |
| `:quit-all!`, `:qa!` | Close all views forcefully (ignoring unsaved changes). | | `:quit-all!`, `:qa!` | Force close all views ignoring unsaved changes. |
| `:cquit`, `:cq` | Quit with exit code (default 1). Accepts an optional integer exit code (:cq 2). | | `:cquit`, `:cq` | Quit with exit code (default 1). Accepts an optional integer exit code (:cq 2). |
| `:cquit!`, `:cq!` | Quit with exit code (default 1) forcefully (ignoring unsaved changes). Accepts an optional integer exit code (:cq! 2). | | `:cquit!`, `:cq!` | Force quit with exit code (default 1) ignoring unsaved changes. Accepts an optional integer exit code (:cq! 2). |
| `:theme` | Change the editor theme. | | `:theme` | Change the editor theme. |
| `:clipboard-yank` | Yank main selection into system clipboard. | | `:clipboard-yank` | Yank main selection into system clipboard. |
| `:clipboard-yank-join` | Yank joined selections into system clipboard. A separator can be provided as first argument. Default value is newline. | | `:clipboard-yank-join` | Yank joined selections into system clipboard. A separator can be provided as first argument. Default value is newline. |
@ -39,8 +42,9 @@
| `:show-clipboard-provider` | Show clipboard provider name in status bar. | | `:show-clipboard-provider` | Show clipboard provider name in status bar. |
| `:change-current-directory`, `:cd` | Change the current working directory. | | `:change-current-directory`, `:cd` | Change the current working directory. |
| `:show-directory`, `:pwd` | Show the current working directory. | | `:show-directory`, `:pwd` | Show the current working directory. |
| `:encoding` | Set encoding based on `https://encoding.spec.whatwg.org` | | `:encoding` | Set encoding. Based on `https://encoding.spec.whatwg.org`. |
| `:reload` | Discard changes and reload from the source file. | | `:reload` | Discard changes and reload from the source file. |
| `:lsp-restart` | Restarts the Language Server that is in use by the current doc |
| `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. | | `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. |
| `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. | | `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. |
| `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. | | `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. |
@ -50,11 +54,19 @@
| `:hsplit`, `:hs`, `:sp` | Open the file in a horizontal split. | | `:hsplit`, `:hs`, `:sp` | Open the file in a horizontal split. |
| `:hsplit-new`, `:hnew` | Open a scratch buffer in a horizontal split. | | `:hsplit-new`, `:hnew` | Open a scratch buffer in a horizontal split. |
| `:tutor` | Open the tutorial. | | `:tutor` | Open the tutorial. |
| `:goto`, `:g` | Go to line number. | | `:goto`, `:g` | Goto line number. |
| `:set-option`, `:set` | Set a config option at runtime | | `:set-language`, `:lang` | Set the language of current buffer. |
| `:set-option`, `:set` | Set a config option at runtime.<br>For example to disable smart case search, use `:set search.smart-case false`. |
| `:get-option`, `:get` | Get the current value of a config option. |
| `:sort` | Sort ranges in selection. | | `:sort` | Sort ranges in selection. |
| `:rsort` | Sort ranges in selection in reverse order. | | `:rsort` | Sort ranges in selection in reverse order. |
| `:reflow` | Hard-wrap the current selection of lines to a given width. |
| `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. | | `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. |
| `:config-reload` | Refreshes helix's config. | | `:config-reload` | Refresh user config. |
| `:config-open` | Open the helix config.toml file. | | `:config-open` | Open the user config.toml file. |
| `:log-open` | Open the helix log file. |
| `:insert-output` | Run shell command, inserting output after each selection. |
| `:append-output` | Run shell command, appending output after each selection. |
| `:pipe` | Pipe each selection to the shell command. |
| `:run-shell-command`, `:sh` | Run a shell command |
| `:help`, `:h` | Open documentation for a command or keybind. | | `:help`, `:h` | Open documentation for a command or keybind. |

@ -1,4 +1,4 @@
# Guides # Guides
This section contains guides for adding new language server configurations, This section contains guides for adding new language server configurations,
tree-sitter grammers, textobject queries, etc. tree-sitter grammars, textobject queries, etc.

@ -2,67 +2,23 @@
## Language configuration ## Language configuration
To add a new language, you need to add a `language` entry to the To add a new language, you need to add a `[[language]]` entry to the
[`languages.toml`][languages.toml] found in the root of the repository; `languages.toml` (see the [language configuration section]).
this `languages.toml` file is included at compilation time, and is
distinct from the `languages.toml` file in the user's [configuration
directory](../configuration.md).
```toml When adding a new language or Language Server configuration for an existing
[[language]] language, run `cargo xtask docgen` to add the new configuration to the
name = "mylang" [Language Support][lang-support] docs before creating a pull request.
scope = "scope.mylang" When adding a Language Server configuration, be sure to update the
injection-regex = "^mylang$" [Language Server Wiki][install-lsp-wiki] with installation notes.
file-types = ["mylang", "myl"]
comment-token = "#"
indent = { tab-width = 2, unit = " " }
```
These are the available keys and descriptions for the file.
| Key | Description |
| ---- | ----------- |
| `name` | The name of the language |
| `scope` | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.<name>` or `text.<name>` in case of markup languages |
| `injection-regex` | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. |
| `file-types` | The filetypes of the language, for example `["yml", "yaml"]`. Extensions and full file names are supported. |
| `shebangs` | The interpreters from the shebang line, for example `["sh", "bash"]` |
| `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` |
| `auto-format` | Whether to autoformat this language when saving |
| `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) |
| `comment-token` | The token to use as a comment-token |
| `indent` | The indent to use. Has sub keys `tab-width` and `unit` |
| `config` | Language server configuration |
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
## Grammar configuration ## Grammar configuration
If a tree-sitter grammar is available for the language, add a new `grammar` If a tree-sitter grammar is available for the language, add a new `[[grammar]]`
entry to `languages.toml`. entry to `languages.toml`.
```toml You may use the `source.path` key rather than `source.git` with an absolute path
[[grammar]] to a locally available grammar for testing, but switch to `source.git` before
name = "mylang" submitting a pull request.
source = { git = "https://github.com/example/mylang", rev = "a250c4582510ff34767ec3b7dcdd3c24e8c8aa68" }
```
Grammar configuration takes these keys:
| Key | Description |
| --- | ----------- |
| `name` | The name of the tree-sitter grammar |
| `source` | The method of fetching the grammar - a table with a schema defined below |
Where `source` is a table with either these keys when using a grammar from a
git repository:
| Key | Description |
| --- | ----------- |
| `git` | A git remote URL from which the grammar should be cloned |
| `rev` | The revision (commit hash or tag) which should be fetched |
| `subpath` | A path within the grammar directory which should be built. Some grammar repositories host multiple grammars (for example `tree-sitter-typescript` and `tree-sitter-ocaml`) in subdirectories. This key is used to point `hx --grammar build` to the correct path for compilation. When omitted, the root of repository is used |
Or a `path` key with an absolute path to a locally available grammar directory.
## Queries ## Queries
@ -73,7 +29,7 @@ language with the path `runtime/queries/<name>/`. The tree-sitter
gives more info on how to write queries. gives more info on how to write queries.
> NOTE: When evaluating queries, the first matching query takes > NOTE: When evaluating queries, the first matching query takes
precedence, which is different from other editors like neovim where precedence, which is different from other editors like Neovim where
the last matching query supersedes the ones before it. See the last matching query supersedes the ones before it. See
[this issue][neovim-query-precedence] for an example. [this issue][neovim-query-precedence] for an example.
@ -83,8 +39,7 @@ the last matching query supersedes the ones before it. See
- If a parser is segfaulting or you want to remove the parser, make sure to remove the compiled parser in `runtime/grammar/<name>.so` - If a parser is segfaulting or you want to remove the parser, make sure to remove the compiled parser in `runtime/grammar/<name>.so`
- The indents query is `indents.toml`, *not* `indents.scm`. See [this](https://github.com/helix-editor/helix/issues/114) issue for more information. [language configuration section]: ../languages.md
[treesitter-language-injection]: https://tree-sitter.github.io/tree-sitter/syntax-highlighting#language-injection
[languages.toml]: https://github.com/helix-editor/helix/blob/master/languages.toml
[neovim-query-precedence]: https://github.com/helix-editor/helix/pull/1170#issuecomment-997294090 [neovim-query-precedence]: https://github.com/helix-editor/helix/pull/1170#issuecomment-997294090
[install-lsp-wiki]: https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers
[lang-support]: ../lang-support.md

@ -39,7 +39,7 @@ changed by using a `#set!` declaration anywhere in the pattern:
## Capture Types ## Capture Types
- `@indent` (default scope `tail`): - `@indent` (default scope `tail`):
Increase the indent level by 1. Multiple occurences in the same line Increase the indent level by 1. Multiple occurrences in the same line
don't stack. If there is at least one `@indent` and one `@outdent` don't stack. If there is at least one `@indent` and one `@outdent`
capture on the same line, the indent level isn't changed at all. capture on the same line, the indent level isn't changed at all.

@ -20,6 +20,8 @@ The following [captures][tree-sitter-captures] are recognized:
| `function.around` | | `function.around` |
| `class.inside` | | `class.inside` |
| `class.around` | | `class.around` |
| `test.inside` |
| `test.around` |
| `parameter.inside` | | `parameter.inside` |
| `comment.inside` | | `comment.inside` |
| `comment.around` | | `comment.around` |

@ -6,10 +6,9 @@ We provide pre-built binaries on the [GitHub Releases page](https://github.com/h
## OSX ## OSX
A Homebrew tap is available: Helix is available in homebrew-core:
``` ```
brew tap helix-editor/helix
brew install helix brew install helix
``` ```
@ -22,8 +21,12 @@ the project root. The flake can also be used to spin up a reproducible developme
shell for working on Helix with `nix develop`. shell for working on Helix with `nix develop`.
Flake outputs are cached for each push to master using Flake outputs are cached for each push to master using
[Cachix](https://www.cachix.org/). With Cachix [Cachix](https://www.cachix.org/). The flake is configured to
[installed](https://docs.cachix.org/installation), `cachix use helix` will automatically make use of this cache assuming the user accepts
the new settings on first use.
If you are using a version of Nix without flakes enabled you can
[install Cachix cli](https://docs.cachix.org/installation); `cachix use helix` will
configure Nix to use cached outputs when possible. configure Nix to use cached outputs when possible.
### Arch Linux ### Arch Linux
@ -41,6 +44,12 @@ sudo dnf copr enable varlad/helix
sudo dnf install helix sudo dnf install helix
``` ```
### Void Linux
```
sudo xbps-install helix
```
## Build from source ## Build from source
``` ```
@ -52,11 +61,31 @@ cargo install --path helix-term
This will install the `hx` binary to `$HOME/.cargo/bin`. This will install the `hx` binary to `$HOME/.cargo/bin`.
Helix also needs it's runtime files so make sure to copy/symlink the `runtime/` directory into the Helix also needs it's runtime files so make sure to copy/symlink the `runtime/` directory into the
config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overriden config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overridden
via the `HELIX_RUNTIME` environment variable. via the `HELIX_RUNTIME` environment variable.
## Building tree-sitter grammars | OS | command |
|-------------------|-----------|
|windows(cmd.exe) |`xcopy /e /i runtime %AppData%/helix/runtime` |
|windows(powershell)|`xcopy /e /i runtime $Env:AppData\helix\runtime` |
|linux/macos |`ln -s $PWD/runtime ~/.config/helix/runtime`|
## Finishing up the installation
To make sure everything is set up as expected you should finally run the helix healthcheck via
```
hx --health
```
For more information on the information displayed in the healthcheck results refer to [Healthcheck](https://github.com/helix-editor/helix/wiki/Healthcheck).
### Building tree-sitter grammars
Tree-sitter grammars must be fetched and compiled if not pre-packaged. Tree-sitter grammars must be fetched and compiled if not pre-packaged.
Fetch grammars with `hx --grammar fetch` (requires `git`) and compile them Fetch grammars with `hx --grammar fetch` (requires `git`) and compile them
with `hx --grammar build` (requires a C compiler). with `hx --grammar build` (requires a C++ compiler).
### Installing language servers
Language servers can optionally be installed if you want their features (auto-complete, diagnostics etc.).
Follow the [instructions on the wiki page](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) to add your language servers of choice.

@ -1,13 +1,33 @@
# Keymap # Keymap
- Mappings marked (**LSP**) require an active language server for the file. - [Normal mode](#normal-mode)
- Mappings marked (**TS**) require a tree-sitter grammar for the filetype. - [Movement](#movement)
- [Changes](#changes)
- [Shell](#shell)
- [Selection manipulation](#selection-manipulation)
- [Search](#search)
- [Minor modes](#minor-modes)
- [View mode](#view-mode)
- [Goto mode](#goto-mode)
- [Match mode](#match-mode)
- [Window mode](#window-mode)
- [Space mode](#space-mode)
- [Popup](#popup)
- [Unimpaired](#unimpaired)
- [Insert Mode](#insert-mode)
- [Select / extend mode](#select--extend-mode)
- [Picker](#picker)
- [Prompt](#prompt)
> 💡 Mappings marked (**LSP**) require an active language server for the file.
> 💡 Mappings marked (**TS**) require a tree-sitter grammar for the filetype.
## Normal mode ## Normal mode
### Movement ### Movement
> NOTE: Unlike vim, `f`, `F`, `t` and `T` are not confined to the current line. > NOTE: Unlike Vim, `f`, `F`, `t` and `T` are not confined to the current line.
| Key | Description | Command | | Key | Description | Command |
| ----- | ----------- | ------- | | ----- | ----------- | ------- |
@ -52,7 +72,7 @@
| `A` | Insert at the end of the line | `append_to_line` | | `A` | Insert at the end of the line | `append_to_line` |
| `o` | Open new line below selection | `open_below` | | `o` | Open new line below selection | `open_below` |
| `O` | Open new line above selection | `open_above` | | `O` | Open new line above selection | `open_above` |
| `.` | Repeat last change | N/A | | `.` | Repeat last insert | N/A |
| `u` | Undo change | `undo` | | `u` | Undo change | `undo` |
| `U` | Redo change | `redo` | | `U` | Redo change | `redo` |
| `Alt-u` | Move backward in history | `earlier` | | `Alt-u` | Move backward in history | `earlier` |
@ -105,16 +125,17 @@
| `Alt-(` | Rotate selection contents backward | `rotate_selection_contents_backward` | | `Alt-(` | Rotate selection contents backward | `rotate_selection_contents_backward` |
| `Alt-)` | Rotate selection contents forward | `rotate_selection_contents_forward` | | `Alt-)` | Rotate selection contents forward | `rotate_selection_contents_forward` |
| `%` | Select entire file | `select_all` | | `%` | Select entire file | `select_all` |
| `x` | Select current line, if already selected, extend to next line | `extend_line` | | `x` | Select current line, if already selected, extend to next line | `extend_line_below` |
| `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` | | `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` |
| `Alt-x` | Shrink selection to line bounds (line-wise selection) | `shrink_to_line_bounds` |
| `J` | Join lines inside selection | `join_selections` | | `J` | Join lines inside selection | `join_selections` |
| `K` | Keep selections matching the regex | `keep_selections` | | `K` | Keep selections matching the regex | `keep_selections` |
| `Alt-K` | Remove selections matching the regex | `remove_selections` | | `Alt-K` | Remove selections matching the regex | `remove_selections` |
| `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` | | `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` |
| `Alt-k`, `Alt-up` | Expand selection to parent syntax node (**TS**) | `expand_selection` | | `Alt-o`, `Alt-up` | Expand selection to parent syntax node (**TS**) | `expand_selection` |
| `Alt-j`, `Alt-down` | Shrink syntax tree object selection (**TS**) | `shrink_selection` | | `Alt-i`, `Alt-down` | Shrink syntax tree object selection (**TS**) | `shrink_selection` |
| `Alt-h`, `Alt-left` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` | | `Alt-p`, `Alt-left` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` |
| `Alt-l`, `Alt-right` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` | | `Alt-n`, `Alt-right` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` |
### Search ### Search
@ -191,7 +212,7 @@ Jumps to various locations.
#### Match mode #### Match mode
Enter this mode using `m` from normal mode. See the relavant section Enter this mode using `m` from normal mode. See the relevant section
in [Usage](./usage.md) for an explanation about [surround](./usage.md#surround) in [Usage](./usage.md) for an explanation about [surround](./usage.md#surround)
and [textobject](./usage.md#textobject) usage. and [textobject](./usage.md#textobject) usage.
@ -208,7 +229,7 @@ TODO: Mappings for selecting syntax nodes (a superset of `[`).
#### Window mode #### Window mode
This layer is similar to vim keybindings as kakoune does not support window. This layer is similar to Vim keybindings as Kakoune does not support window.
| Key | Description | Command | | Key | Description | Command |
| ----- | ------------- | ------- | | ----- | ------------- | ------- |
@ -223,6 +244,10 @@ This layer is similar to vim keybindings as kakoune does not support window.
| `l`, `Ctrl-l`, `Right` | Move to right split | `jump_view_right` | | `l`, `Ctrl-l`, `Right` | Move to right split | `jump_view_right` |
| `q`, `Ctrl-q` | Close current window | `wclose` | | `q`, `Ctrl-q` | Close current window | `wclose` |
| `o`, `Ctrl-o` | Only keep the current window, closing all the others | `wonly` | | `o`, `Ctrl-o` | Only keep the current window, closing all the others | `wonly` |
| `H` | Swap window to the left | `swap_view_left` |
| `J` | Swap window downwards | `swap_view_down` |
| `K` | Swap window upwards | `swap_view_up` |
| `L` | Swap window to the right | `swap_view_right` |
#### Space mode #### Space mode
@ -232,10 +257,14 @@ This layer is a kludge of mappings, mostly pickers.
| Key | Description | Command | | Key | Description | Command |
| ----- | ----------- | ------- | | ----- | ----------- | ------- |
| `f` | Open file picker | `file_picker` | | `f` | Open file picker | `file_picker` |
| `F` | Open file picker at current working directory | `file_picker_in_current_directory` |
| `b` | Open buffer picker | `buffer_picker` | | `b` | Open buffer picker | `buffer_picker` |
| `j` | Open jumplist picker | `jumplist_picker` |
| `k` | Show documentation for item under cursor in a [popup](#popup) (**LSP**) | `hover` | | `k` | Show documentation for item under cursor in a [popup](#popup) (**LSP**) | `hover` |
| `s` | Open document symbol picker (**LSP**) | `symbol_picker` | | `s` | Open document symbol picker (**LSP**) | `symbol_picker` |
| `S` | Open workspace symbol picker (**LSP**) | `workspace_symbol_picker` | | `S` | Open workspace symbol picker (**LSP**) | `workspace_symbol_picker` |
| `g` | Open document diagnostics picker (**LSP**) | `diagnostics_picker` |
| `G` | Open workspace diagnostics picker (**LSP**) | `workspace_diagnostics_picker`
| `r` | Rename symbol (**LSP**) | `rename_symbol` | | `r` | Rename symbol (**LSP**) | `rename_symbol` |
| `a` | Apply code action (**LSP**) | `code_action` | | `a` | Apply code action (**LSP**) | `code_action` |
| `'` | Open last fuzzy picker | `last_picker` | | `'` | Open last fuzzy picker | `last_picker` |
@ -277,40 +306,61 @@ Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaire
| `[a` | Go to previous argument/parameter (**TS**) | `goto_prev_parameter` | | `[a` | Go to previous argument/parameter (**TS**) | `goto_prev_parameter` |
| `]o` | Go to next comment (**TS**) | `goto_next_comment` | | `]o` | Go to next comment (**TS**) | `goto_next_comment` |
| `[o` | Go to previous comment (**TS**) | `goto_prev_comment` | | `[o` | Go to previous comment (**TS**) | `goto_prev_comment` |
| `]t` | Go to next test (**TS**) | `goto_next_test` |
| `]t` | Go to previous test (**TS**) | `goto_prev_test` |
| `]p` | Go to next paragraph | `goto_next_paragraph` |
| `[p` | Go to previous paragraph | `goto_prev_paragraph` |
| `[space` | Add newline above | `add_newline_above` | | `[space` | Add newline above | `add_newline_above` |
| `]space` | Add newline below | `add_newline_below` | | `]space` | Add newline below | `add_newline_below` |
## Insert Mode ## Insert Mode
We support many readline/emacs style bindings in insert mode for Insert mode bindings are somewhat minimal by default. Helix is designed to
convenience. These can be helpful for making simple modifications be a modal editor, and this is reflected in the user experience and internal
without escaping to normal mode, but beware that you will not have an mechanics. For example, changes to the text are only saved for undos when
undo-able "save point" until you return to normal mode. escaping from insert mode to normal mode. For this reason, new users are
strongly encouraged to learn the modal editing paradigm to get the smoothest
experience.
| Key | Description | Command | | Key | Description | Command |
| ----- | ----------- | ------- | | ----- | ----------- | ------- |
| `Escape` | Switch to normal mode | `normal_mode` | | `Escape` | Switch to normal mode | `normal_mode` |
| `Ctrl-x` | Autocomplete | `completion` | | `Ctrl-x` | Autocomplete | `completion` |
| `Ctrl-r` | Insert a register content | `insert_register` | | `Ctrl-r` | Insert a register content | `insert_register` |
| `Ctrl-w`, `Alt-Backspace` | Delete previous word | `delete_word_backward` | | `Ctrl-w`, `Alt-Backspace`, `Ctrl-Backspace` | Delete previous word | `delete_word_backward` |
| `Alt-d` | Delete next word | `delete_word_forward` | | `Alt-d`, `Alt-Delete`, `Ctrl-Delete` | Delete next word | `delete_word_forward` |
| `Alt-b`, `Alt-Left` | Backward a word | `move_prev_word_end` |
| `Ctrl-b`, `Left` | Backward a char | `move_char_left` |
| `Alt-f`, `Alt-Right` | Forward a word | `move_next_word_start` |
| `Ctrl-f`, `Right` | Forward a char | `move_char_right` |
| `Ctrl-e`, `End` | Move to line end | `goto_line_end_newline` |
| `Ctrl-a`, `Home` | Move to line start | `goto_line_start` |
| `Ctrl-u` | Delete to start of line | `kill_to_line_start` | | `Ctrl-u` | Delete to start of line | `kill_to_line_start` |
| `Ctrl-k` | Delete to end of line | `kill_to_line_end` | | `Ctrl-k` | Delete to end of line | `kill_to_line_end` |
| `Ctrl-j`, `Enter` | Insert new line | `insert_newline` | | `Ctrl-j`, `Enter` | Insert new line | `insert_newline` |
| `Backspace`, `Ctrl-h` | Delete previous char | `delete_char_backward` | | `Backspace`, `Ctrl-h` | Delete previous char | `delete_char_backward` |
| `Delete`, `Ctrl-d` | Delete previous char | `delete_char_forward` | | `Delete`, `Ctrl-d` | Delete next char | `delete_char_forward` |
| `Ctrl-p`, `Up` | Move to previous line | `move_line_up` |
| `Ctrl-n`, `Down` | Move to next line | `move_line_down` | However, if you really want navigation in insert mode, this is supported. An
| `PageUp` | Move one page up | `page_up` | example config that gives the ability to use arrow keys while still in insert
| `PageDown` | Move one page down | `page_down` | mode:
| `Alt->` | Go to end of buffer | `goto_file_end` |
| `Alt-<` | Go to start of buffer | `goto_file_start` | ```toml
[keys.insert]
"up" = "move_line_up"
"down" = "move_line_down"
"left" = "move_char_left"
"right" = "move_char_right"
"C-b" = "move_char_left"
"C-f" = "move_char_right"
"A-b" = "move_prev_word_end"
"C-left" = "move_prev_word_end"
"A-f" = "move_next_word_start"
"C-right" = "move_next_word_start"
"A-<" = "goto_file_start"
"A->" = "goto_file_end"
"pageup" = "page_up"
"pagedown" = "page_down"
"home" = "goto_line_start"
"C-a" = "goto_line_start"
"end" = "goto_line_end_newline"
"C-e" = "goto_line_end_newline"
"A-left" = "goto_line_start"
```
## Select / extend mode ## Select / extend mode
@ -325,39 +375,40 @@ mode before pressing `n` or `N` makes it possible to keep the current
selection. Toggling it on and off during your iterative searching allows selection. Toggling it on and off during your iterative searching allows
you to selectively add search terms to your selections. you to selectively add search terms to your selections.
# Picker ## Picker
Keys to use within picker. Remapping currently not supported. Keys to use within picker. Remapping currently not supported.
| Key | Description | | Key | Description |
| ----- | ------------- | | ----- | ------------- |
| `Up`, `Ctrl-k`, `Ctrl-p` | Previous entry | | `Shift-Tab`, `Up`, `Ctrl-p` | Previous entry |
| `PageUp`, `Ctrl-b` | Page up | | `Tab`, `Down`, `Ctrl-n` | Next entry |
| `Down`, `Ctrl-j`, `Ctrl-n` | Next entry | | `PageUp`, `Ctrl-u` | Page up |
| `PageDown`, `Ctrl-f` | Page down | | `PageDown`, `Ctrl-d` | Page down |
| `Home` | Go to first entry | | `Home` | Go to first entry |
| `End` | Go to last entry | | `End` | Go to last entry |
| `Ctrl-space` | Filter options | | `Ctrl-space` | Filter options |
| `Enter` | Open selected | | `Enter` | Open selected |
| `Ctrl-s` | Open horizontally | | `Ctrl-s` | Open horizontally |
| `Ctrl-v` | Open vertically | | `Ctrl-v` | Open vertically |
| `Ctrl-t` | Toggle preview |
| `Escape`, `Ctrl-c` | Close picker | | `Escape`, `Ctrl-c` | Close picker |
# Prompt ## Prompt
Keys to use within prompt, Remapping currently not supported. Keys to use within prompt, Remapping currently not supported.
| Key | Description | | Key | Description |
| ----- | ------------- | | ----- | ------------- |
| `Escape`, `Ctrl-c` | Close prompt | | `Escape`, `Ctrl-c` | Close prompt |
| `Alt-b`, `Alt-Left` | Backward a word | | `Alt-b`, `Ctrl-Left` | Backward a word |
| `Ctrl-b`, `Left` | Backward a char | | `Ctrl-b`, `Left` | Backward a char |
| `Alt-f`, `Alt-Right` | Forward a word | | `Alt-f`, `Ctrl-Right` | Forward a word |
| `Ctrl-f`, `Right` | Forward a char | | `Ctrl-f`, `Right` | Forward a char |
| `Ctrl-e`, `End` | Move prompt end | | `Ctrl-e`, `End` | Move prompt end |
| `Ctrl-a`, `Home` | Move prompt start | | `Ctrl-a`, `Home` | Move prompt start |
| `Ctrl-w` | Delete previous word | | `Ctrl-w`, `Alt-Backspace`, `Ctrl-Backspace` | Delete previous word |
| `Alt-d` | Delete next word | | `Alt-d`, `Alt-Delete`, `Ctrl-Delete` | Delete next word |
| `Ctrl-u` | Delete to start of line | | `Ctrl-u` | Delete to start of line |
| `Ctrl-k` | Delete to end of line | | `Ctrl-k` | Delete to end of line |
| `backspace`, `Ctrl-h` | Delete previous char | | `backspace`, `Ctrl-h` | Delete previous char |
@ -365,7 +416,7 @@ Keys to use within prompt, Remapping currently not supported.
| `Ctrl-s` | Insert a word under doc cursor, may be changed to Ctrl-r Ctrl-w later | | `Ctrl-s` | Insert a word under doc cursor, may be changed to Ctrl-r Ctrl-w later |
| `Ctrl-p`, `Up` | Select previous history | | `Ctrl-p`, `Up` | Select previous history |
| `Ctrl-n`, `Down` | Select next history | | `Ctrl-n`, `Down` | Select next history |
| `Ctrl-r` | Insert the content of the register selected by following input char |
| `Tab` | Select next completion item | | `Tab` | Select next completion item |
| `BackTab` | Select previous completion item | | `BackTab` | Select previous completion item |
| `Enter` | Open selected | | `Enter` | Open selected |

@ -1,10 +1,16 @@
# Language Support # Language Support
For more information like arguments passed to default LSP server, The following languages and Language Servers are supported. In order to use
extensions assosciated with a filetype, custom LSP settings, filetype Language Server features, you must first [install][lsp-install-wiki] the
specific indent settings, etc see the default appropriate Language Server.
[`languages.toml`][languages.toml] file.
Check the language support in your installed helix version with `hx --health`.
Also see the [Language Configuration][lang-config] docs and the [Adding
Languages][adding-languages] guide for more language configuration information.
{{#include ./generated/lang-support.md}} {{#include ./generated/lang-support.md}}
[languages.toml]: https://github.com/helix-editor/helix/blob/master/languages.toml [lsp-install-wiki]: https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers
[lang-config]: ./languages.md
[adding-languages]: ./guides/adding_languages.md

@ -1,8 +1,17 @@
# Languages # Languages
Language-specific settings and settings for particular language servers can be configured in a `languages.toml` file placed in your [configuration directory](./configuration.md). Helix actually uses two `languages.toml` files, the [first one](https://github.com/helix-editor/helix/blob/master/languages.toml) is in the main helix repository; it contains the default settings for each language and is included in the helix binary at compile time. Users who want to see the available settings and options can either reference the helix repo's `languages.toml` file, or consult the table in the [adding languages](./guides/adding_languages.md) section. Language-specific settings and settings for language servers are configured
in `languages.toml` files.
Changes made to the `languages.toml` file in a user's [configuration directory](./configuration.md) are merged with helix's defaults on start-up, such that a user's settings will take precedence over defaults in the event of a collision. For example, the default `languages.toml` sets rust's `auto-format` to `true`. If a user wants to disable auto-format, they can change the `languages.toml` in their [configuration directory](./configuration.md) to make the rust entry read like the example below; the new key/value pair `auto-format = false` will override the default when the two sets of settings are merged on start-up: ## `languages.toml` files
There are three possible `languages.toml` files. The first is compiled into
Helix and lives in the [Helix repository](https://github.com/helix-editor/helix/blob/master/languages.toml).
This provides the default configurations for languages and language servers.
You may define a `languages.toml` in your [configuration directory](./configuration.md)
which overrides values from the built-in language configuration. For example
to disable auto-LSP-formatting in Rust:
```toml ```toml
# in <config_dir>/helix/languages.toml # in <config_dir>/helix/languages.toml
@ -12,23 +21,103 @@ name = "rust"
auto-format = false auto-format = false
``` ```
## Tree-sitter grammars Language configuration may also be overridden local to a project by creating
a `languages.toml` file under a `.helix` directory. Its settings will be merged
with the language configuration in the configuration directory and the built-in
configuration.
Tree-sitter grammars can also be configured in `languages.toml`: ## Language configuration
Each language is configured by adding a `[[language]]` section to a
`languages.toml` file. For example:
```toml ```toml
# in <config_dir>/helix/languages.toml [[language]]
name = "mylang"
scope = "source.mylang"
injection-regex = "^mylang$"
file-types = ["mylang", "myl"]
comment-token = "#"
indent = { tab-width = 2, unit = " " }
language-server = { command = "mylang-lsp", args = ["--stdio"] }
formatter = { command = "mylang-formatter" , args = ["--stdin"] }
```
[[grammar]] These configuration keys are available:
name = "rust"
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a250c4582510ff34767ec3b7dcdd3c24e8c8aa68" } | Key | Description |
| ---- | ----------- |
| `name` | The name of the language |
| `scope` | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.<name>` or `text.<name>` in case of markup languages |
| `injection-regex` | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. |
| `file-types` | The filetypes of the language, for example `["yml", "yaml"]`. Extensions and full file names are supported. |
| `shebangs` | The interpreters from the shebang line, for example `["sh", "bash"]` |
| `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` |
| `auto-format` | Whether to autoformat this language when saving |
| `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) |
| `comment-token` | The token to use as a comment-token |
| `indent` | The indent to use. Has sub keys `tab-width` and `unit` |
| `language-server` | The Language Server to run. See the Language Server configuration section below. |
| `config` | Language Server configuration |
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
| `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout |
| `max-line-length` | Maximum line length. Used for the `:reflow` command |
### Language Server configuration
The `language-server` field takes the following keys:
| Key | Description |
| --- | ----------- |
| `command` | The name of the language server binary to execute. Binaries must be in `$PATH` |
| `args` | A list of arguments to pass to the language server binary |
| `timeout` | The maximum time a request to the language server may take, in seconds. Defaults to `20` |
| `language-id` | The language name to pass to the language server. Some language servers support multiple languages and use this field to determine which one is being served in a buffer |
The top-level `config` field is used to configure the LSP initialization options. A `format`
sub-table within `config` can be used to pass extra formatting options to
[Document Formatting Requests](https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-16.md#document-formatting-request--leftwards_arrow_with_hook).
For example with typescript:
```toml
[[language]]
name = "typescript"
auto-format = true
# pass format options according to https://github.com/typescript-language-server/typescript-language-server#workspacedidchangeconfiguration omitting the "[language].format." prefix.
config = { format = { "semicolons" = "insert", "insertSpaceBeforeFunctionParenthesis" = true } }
```
## Tree-sitter grammar configuration
The source for a language's tree-sitter grammar is specified in a `[[grammar]]`
section in `languages.toml`. For example:
```toml
[[grammar]] [[grammar]]
name = "c" name = "mylang"
source = { path = "/path/to/tree-sitter-c" } source = { git = "https://github.com/example/mylang", rev = "a250c4582510ff34767ec3b7dcdd3c24e8c8aa68" }
``` ```
You may use a top-level `use-grammars` key to control which grammars are fetched and built. Grammar configuration takes these keys:
| Key | Description |
| --- | ----------- |
| `name` | The name of the tree-sitter grammar |
| `source` | The method of fetching the grammar - a table with a schema defined below |
Where `source` is a table with either these keys when using a grammar from a
git repository:
| Key | Description |
| --- | ----------- |
| `git` | A git remote URL from which the grammar should be cloned |
| `rev` | The revision (commit hash or tag) which should be fetched |
| `subpath` | A path within the grammar directory which should be built. Some grammar repositories host multiple grammars (for example `tree-sitter-typescript` and `tree-sitter-ocaml`) in subdirectories. This key is used to point `hx --grammar build` to the correct path for compilation. When omitted, the root of repository is used |
### Choosing grammars
You may use a top-level `use-grammars` key to control which grammars are
fetched and built when using `hx --grammar fetch` and `hx --grammar build`.
```toml ```toml
# Note: this key must come **before** the [[language]] and [[grammar]] sections # Note: this key must come **before** the [[language]] and [[grammar]] sections
@ -38,3 +127,5 @@ use-grammars = { except = [ "yaml", "json" ] }
``` ```
When omitted, all grammars are fetched and built. When omitted, all grammars are fetched and built.
[treesitter-language-injection]: https://tree-sitter.github.io/tree-sitter/syntax-highlighting#language-injection

@ -33,12 +33,7 @@ Control, Shift and Alt modifiers are encoded respectively with the prefixes
| Backspace | `"backspace"` | | Backspace | `"backspace"` |
| Space | `"space"` | | Space | `"space"` |
| Return/Enter | `"ret"` | | Return/Enter | `"ret"` |
| < | `"lt"` |
| \> | `"gt"` |
| \+ | `"plus"` |
| \- | `"minus"` | | \- | `"minus"` |
| ; | `"semicolon"` |
| % | `"percent"` |
| Left | `"left"` | | Left | `"left"` |
| Right | `"right"` | | Right | `"right"` |
| Up | `"up"` | | Up | `"up"` |

@ -37,8 +37,8 @@ configuration values in your theme. To do this, add a table called
`palette` to your theme file: `palette` to your theme file:
```toml ```toml
ui.background = "white" "ui.background" = "white"
ui.text = "black" "ui.text" = "black"
[palette] [palette]
white = "#ffffff" white = "#ffffff"
@ -103,6 +103,8 @@ We use a similar set of scopes as
[SublimeText](https://www.sublimetext.com/docs/scope_naming.html). See also [SublimeText](https://www.sublimetext.com/docs/scope_naming.html). See also
[TextMate](https://macromates.com/manual/en/language_grammars) scopes. [TextMate](https://macromates.com/manual/en/language_grammars) scopes.
- `attribute` - Class attributes, html tag attributes
- `type` - Types - `type` - Types
- `builtin` - Primitive types provided by the language (`int`, `usize`) - `builtin` - Primitive types provided by the language (`int`, `usize`)
- `constructor` - `constructor`
@ -133,13 +135,13 @@ We use a similar set of scopes as
- `parameter` - Function parameters - `parameter` - Function parameters
- `other` - `other`
- `member` - Fields of composite data types (e.g. structs, unions) - `member` - Fields of composite data types (e.g. structs, unions)
- `function` (TODO: ?)
- `label` - `label`
- `punctuation` - `punctuation`
- `delimiter` - Commas, colons - `delimiter` - Commas, colons
- `bracket` - Parentheses, angle brackets, etc. - `bracket` - Parentheses, angle brackets, etc.
- `special` - String interpolation brackets.
- `keyword` - `keyword`
- `control` - `control`
@ -151,6 +153,9 @@ We use a similar set of scopes as
- `operator` - `or`, `in` - `operator` - `or`, `in`
- `directive` - Preprocessor directives (`#if` in C) - `directive` - Preprocessor directives (`#if` in C)
- `function` - `fn`, `func` - `function` - `fn`, `func`
- `storage` - Keywords describing how things are stored
- `type` - The type of something, `class`, `function`, `var`, `let`, etc.
- `modifier` - Storage modifiers like `static`, `mut`, `const`, `ref`, etc.
- `operator` - `||`, `+=`, `>` - `operator` - `||`, `+=`, `>`
@ -158,7 +163,7 @@ We use a similar set of scopes as
- `builtin` - `builtin`
- `method` - `method`
- `macro` - `macro`
- `special` (preprocesor in C) - `special` (preprocessor in C)
- `tag` - Tags (e.g. `<body>` in HTML) - `tag` - Tags (e.g. `<body>` in HTML)
@ -208,29 +213,51 @@ These scopes are used for theming the editor interface.
| Key | Notes | | Key | Notes |
| --- | --- | | --- | --- |
| `ui.background` | | | `ui.background` | |
| `ui.background.separator` | Picker separator below input line |
| `ui.cursor` | | | `ui.cursor` | |
| `ui.cursor.insert` | | | `ui.cursor.insert` | |
| `ui.cursor.select` | | | `ui.cursor.select` | |
| `ui.cursor.match` | Matching bracket etc. | | `ui.cursor.match` | Matching bracket etc. |
| `ui.cursor.primary` | Cursor with primary selection | | `ui.cursor.primary` | Cursor with primary selection |
| `ui.linenr` | | | `ui.linenr` | Line numbers |
| `ui.linenr.selected` | | | `ui.linenr.selected` | Line number for the line the cursor is on |
| `ui.statusline` | Statusline | | `ui.statusline` | Statusline |
| `ui.statusline.inactive` | Statusline (unfocused document) | | `ui.statusline.inactive` | Statusline (unfocused document) |
| `ui.popup` | | | `ui.statusline.normal` | Statusline mode during normal mode ([only if `editor.color-modes` is enabled][editor-section]) |
| `ui.popup.info` | | | `ui.statusline.insert` | Statusline mode during insert mode ([only if `editor.color-modes` is enabled][editor-section]) |
| `ui.window` | | | `ui.statusline.select` | Statusline mode during select mode ([only if `editor.color-modes` is enabled][editor-section]) |
| `ui.help` | | | `ui.statusline.separator` | Separator character in statusline |
| `ui.text` | | | `ui.popup` | Documentation popups (e.g space-k) |
| `ui.popup.info` | Prompt for multiple key options |
| `ui.window` | Border lines separating splits |
| `ui.help` | Description box for commands |
| `ui.text` | Command prompts, popup text, etc. |
| `ui.text.focus` | | | `ui.text.focus` | |
| `ui.text.info` | | | `ui.text.info` | The key: command text in `ui.popup.info` boxes |
| `ui.menu` | | | `ui.virtual.ruler` | Ruler columns (see the [`editor.rulers` config][editor-section])|
| `ui.menu.selected` | | | `ui.virtual.whitespace` | Visible white-space characters |
| `ui.virtual.indent-guide` | Vertical indent width guides |
| `ui.menu` | Code and command completion menus |
| `ui.menu.selected` | Selected autocomplete item |
| `ui.menu.scroll` | `fg` sets thumb color, `bg` sets track color of scrollbar |
| `ui.selection` | For selections in the editing area | | `ui.selection` | For selections in the editing area |
| `ui.selection.primary` | | | `ui.selection.primary` | |
| `ui.cursorline.primary` | The line of the primary cursor |
| `ui.cursorline.secondary` | The lines of any other cursors |
| `warning` | Diagnostics warning (gutter) | | `warning` | Diagnostics warning (gutter) |
| `error` | Diagnostics error (gutter) | | `error` | Diagnostics error (gutter) |
| `info` | Diagnostics info (gutter) | | `info` | Diagnostics info (gutter) |
| `hint` | Diagnostics hint (gutter) | | `hint` | Diagnostics hint (gutter) |
| `diagnostic` | For text in editing area | | `diagnostic` | Diagnostics fallback style (editing area) |
| `diagnostic.hint` | Diagnostics hint (editing area) |
| `diagnostic.info` | Diagnostics info (editing area) |
| `diagnostic.warning` | Diagnostics warning (editing area) |
| `diagnostic.error` | Diagnostics error (editing area) |
You can check compliance to spec with
```shell
cargo xtask themelint onedark # replace onedark with <name>
```
[editor-section]: ./configuration.md#editor-section

@ -6,6 +6,7 @@ Docs for bleeding edge master can be found at
See the [usage] section for a quick overview of the editor, [keymap] See the [usage] section for a quick overview of the editor, [keymap]
section for all available keybindings and the [configuration] section section for all available keybindings and the [configuration] section
for defining custom keybindings, setting themes, etc. for defining custom keybindings, setting themes, etc.
For everything else (e.g., how to install supported language servers), see the [Helix Wiki].
Refer the [FAQ] for common questions. Refer the [FAQ] for common questions.
@ -13,3 +14,4 @@ Refer the [FAQ] for common questions.
[usage]: ./usage.md [usage]: ./usage.md
[keymap]: ./keymap.md [keymap]: ./keymap.md
[configuration]: ./configuration.md [configuration]: ./configuration.md
[Helix Wiki]: https://github.com/helix-editor/helix/wiki

@ -2,7 +2,7 @@
(Currently not fully documented, see the [keymappings](./keymap.md) list for more.) (Currently not fully documented, see the [keymappings](./keymap.md) list for more.)
See [tutor.txt](https://github.com/helix-editor/helix/blob/master/runtime/tutor.txt) (accessible via `hx --tutor` or `:tutor`) for a vimtutor-like introduction. See [tutor](https://github.com/helix-editor/helix/blob/master/runtime/tutor) (accessible via `hx --tutor` or `:tutor`) for a vimtutor-like introduction.
## Registers ## Registers
@ -51,25 +51,98 @@ It can also act on multiple selections (yay!). For example, to change every occu
Multiple characters are currently not supported, but planned. Multiple characters are currently not supported, but planned.
## Textobjects ## Syntax-tree Motions
`A-p`, `A-o`, `A-i`, and `A-n` (or `Alt` and arrow keys) move the primary
selection according to the selection's place in the syntax tree. Let's walk
through an example to get familiar with them. Many languages have a syntax like
so for function calls:
```
func(arg1, arg2, arg3)
```
A function call might be parsed by tree-sitter into a tree like the following.
```tsq
(call
function: (identifier) ; func
arguments:
(arguments ; (arg1, arg2, arg3)
(identifier) ; arg1
(identifier) ; arg2
(identifier))) ; arg3
```
Use `:tree-sitter-subtree` to view the syntax tree of the primary selection. In
a more intuitive tree format:
```
┌────┐
│call│
┌─────┴────┴─────┐
│ │
┌─────▼────┐ ┌────▼────┐
│identifier│ │arguments│
│ "func" │ ┌────┴───┬─────┴───┐
└──────────┘ │ │ │
│ │ │
┌─────────▼┐ ┌────▼─────┐ ┌▼─────────┐
│identifier│ │identifier│ │identifier│
│ "arg1" │ │ "arg2" │ │ "arg3" │
└──────────┘ └──────────┘ └──────────┘
```
Say we have a selection that wraps `arg1`. The selection is on the `arg1` leaf
in the tree above.
```
func([arg1], arg2, arg3)
```
Using `A-n` would select the next sibling in the syntax tree: `arg2`.
```
func(arg1, [arg2], arg3)
```
While `A-o` would expand the selection to the parent node. In the tree above we
can see that we would select the `arguments` node.
```
func[(arg1, arg2, arg3)]
```
There is also some nuanced behavior that prevents you from getting stuck on a
node with no sibling. If we have a selection on `arg1`, `A-p` would bring us
to the previous child node. Since `arg1` doesn't have a sibling to its left,
though, we climb the syntax tree and then take the previous selection. So `A-p`
will move the selection over to the "func" `identifier`.
```
[func](arg1, arg2, arg3)
```
Currently supported: `word`, `surround`, `function`, `class`, `parameter`. ## Textobjects
![textobject-demo](https://user-images.githubusercontent.com/23398472/124231131-81a4bb00-db2d-11eb-9d10-8e577ca7b177.gif) ![textobject-demo](https://user-images.githubusercontent.com/23398472/124231131-81a4bb00-db2d-11eb-9d10-8e577ca7b177.gif)
![textobject-treesitter-demo](https://user-images.githubusercontent.com/23398472/132537398-2a2e0a54-582b-44ab-a77f-eb818942203d.gif) ![textobject-treesitter-demo](https://user-images.githubusercontent.com/23398472/132537398-2a2e0a54-582b-44ab-a77f-eb818942203d.gif)
- `ma` - Select around the object (`va` in vim, `<alt-a>` in kakoune) - `ma` - Select around the object (`va` in Vim, `<alt-a>` in Kakoune)
- `mi` - Select inside the object (`vi` in vim, `<alt-i>` in kakoune) - `mi` - Select inside the object (`vi` in Vim, `<alt-i>` in Kakoune)
| Key after `mi` or `ma` | Textobject selected | | Key after `mi` or `ma` | Textobject selected |
| --- | --- | | --- | --- |
| `w` | Word | | `w` | Word |
| `W` | WORD | | `W` | WORD |
| `p` | Paragraph |
| `(`, `[`, `'`, etc | Specified surround pairs | | `(`, `[`, `'`, etc | Specified surround pairs |
| `m` | Closest surround pair |
| `f` | Function | | `f` | Function |
| `c` | Class | | `c` | Class |
| `a` | Argument/parameter | | `a` | Argument/parameter |
| `o` | Comment | | `o` | Comment |
| `t` | Test |
> NOTE: `f`, `c`, etc need a tree-sitter grammar active for the current > NOTE: `f`, `c`, etc need a tree-sitter grammar active for the current
document and a special tree-sitter query file to work properly. [Only document and a special tree-sitter query file to work properly. [Only

@ -601,10 +601,10 @@ function playground_text(playground) {
}); });
})(); })();
(function controllMenu() { (function controlMenu() {
var menu = document.getElementById('menu-bar'); var menu = document.getElementById('menu-bar');
(function controllPosition() { (function controlPosition() {
var scrollTop = document.scrollingElement.scrollTop; var scrollTop = document.scrollingElement.scrollTop;
var prevScrollTop = scrollTop; var prevScrollTop = scrollTop;
var minMenuY = -menu.clientHeight - 50; var minMenuY = -menu.clientHeight - 50;
@ -647,7 +647,7 @@ function playground_text(playground) {
prevScrollTop = scrollTop; prevScrollTop = scrollTop;
}, { passive: true }); }, { passive: true });
})(); })();
(function controllBorder() { (function controlBorder() {
menu.classList.remove('bordered'); menu.classList.remove('bordered');
document.addEventListener('scroll', function () { document.addEventListener('scroll', function () {
if (menu.offsetTop === 0) { if (menu.offsetTop === 0) {

@ -390,7 +390,7 @@ ul#searchresults span.teaser em {
.chapter li { .chapter li {
display: flex; display: flex;
color: var(--sidebar-non-existant); color: var(--sidebar-non-existent);
} }
.chapter li a { .chapter li a {
display: block; display: block;

@ -16,7 +16,7 @@
--sidebar-bg: #14191f; --sidebar-bg: #14191f;
--sidebar-fg: #c8c9db; --sidebar-fg: #c8c9db;
--sidebar-non-existant: #5c6773; --sidebar-non-existent: #5c6773;
--sidebar-active: #ffb454; --sidebar-active: #ffb454;
--sidebar-spacer: #2d334f; --sidebar-spacer: #2d334f;
@ -56,7 +56,7 @@
--sidebar-bg: #292c2f; --sidebar-bg: #292c2f;
--sidebar-fg: #a1adb8; --sidebar-fg: #a1adb8;
--sidebar-non-existant: #505254; --sidebar-non-existent: #505254;
--sidebar-active: #3473ad; --sidebar-active: #3473ad;
--sidebar-spacer: #393939; --sidebar-spacer: #393939;
@ -96,7 +96,7 @@
--sidebar-bg: #fafafa; --sidebar-bg: #fafafa;
--sidebar-fg: hsl(0, 0%, 0%); --sidebar-fg: hsl(0, 0%, 0%);
--sidebar-non-existant: #aaaaaa; --sidebar-non-existent: #aaaaaa;
--sidebar-active: #1f1fff; --sidebar-active: #1f1fff;
--sidebar-spacer: #f4f4f4; --sidebar-spacer: #f4f4f4;
@ -136,7 +136,7 @@
--sidebar-bg: #282d3f; --sidebar-bg: #282d3f;
--sidebar-fg: #c8c9db; --sidebar-fg: #c8c9db;
--sidebar-non-existant: #505274; --sidebar-non-existent: #505274;
--sidebar-active: #2b79a2; --sidebar-active: #2b79a2;
--sidebar-spacer: #2d334f; --sidebar-spacer: #2d334f;
@ -176,7 +176,7 @@
--sidebar-bg: #3b2e2a; --sidebar-bg: #3b2e2a;
--sidebar-fg: #c8c9db; --sidebar-fg: #c8c9db;
--sidebar-non-existant: #505254; --sidebar-non-existent: #505254;
--sidebar-active: #e69f67; --sidebar-active: #e69f67;
--sidebar-spacer: #45373a; --sidebar-spacer: #45373a;
@ -217,7 +217,7 @@
--sidebar-bg: #292c2f; --sidebar-bg: #292c2f;
--sidebar-fg: #a1adb8; --sidebar-fg: #a1adb8;
--sidebar-non-existant: #505254; --sidebar-non-existent: #505254;
--sidebar-active: #3473ad; --sidebar-active: #3473ad;
--sidebar-spacer: #393939; --sidebar-spacer: #393939;
@ -259,7 +259,7 @@
--sidebar-bg: #281733; --sidebar-bg: #281733;
--sidebar-fg: #c8c9db; --sidebar-fg: #c8c9db;
--sidebar-non-existant: #505274; --sidebar-non-existent: #505274;
--sidebar-active: #a4a0e8; --sidebar-active: #a4a0e8;
--sidebar-spacer: #2d334f; --sidebar-spacer: #2d334f;
@ -304,7 +304,7 @@
--sidebar-bg: #281733; --sidebar-bg: #281733;
--sidebar-fg: #c8c9db; --sidebar-fg: #c8c9db;
--sidebar-non-existant: #505274; --sidebar-non-existent: #505274;
--sidebar-active: #a4a0e8; --sidebar-active: #a4a0e8;
--sidebar-spacer: #2d334f; --sidebar-spacer: #2d334f;

@ -0,0 +1,89 @@
[Desktop Entry]
Name=Helix
GenericName=Text Editor
GenericName[de]=Texteditor
GenericName[fr]=Éditeur de texte
GenericName[ru]=Текстовый редактор
GenericName[sr]=Едитор текст
GenericName[tr]=Metin Düzenleyici
Comment=Edit text files
Comment[af]=Redigeer tekslêers
Comment[am]=የጽሑፍ ፋይሎች ያስተካክሉ
Comment[ar]=حرّر ملفات نصية
Comment[az]=Mətn fayllarını redaktə edin
Comment[be]=Рэдагаваньне тэкставых файлаў
Comment[bg]=Редактиране на текстови файлове
Comment[bn]=টেক্স্ট ফাইল এডিট করুন
Comment[bs]=Izmijeni tekstualne datoteke
Comment[ca]=Edita fitxers de text
Comment[cs]=Úprava textových souborů
Comment[cy]=Golygu ffeiliau testun
Comment[da]=Redigér tekstfiler
Comment[de]=Textdateien bearbeiten
Comment[el]=Επεξεργασία αρχείων κειμένου
Comment[en_CA]=Edit text files
Comment[en_GB]=Edit text files
Comment[es]=Edita archivos de texto
Comment[et]=Redigeeri tekstifaile
Comment[eu]=Editatu testu-fitxategiak
Comment[fa]=ویرایش پرونده‌های متنی
Comment[fi]=Muokkaa tekstitiedostoja
Comment[fr]=Éditer des fichiers texte
Comment[ga]=Eagar comhad Téacs
Comment[gu]=લખાણ ફાઇલોમાં ફેરફાર કરો
Comment[he]=ערוך קבצי טקסט
Comment[hi]=पाठ फ़ाइलें संपादित करें
Comment[hr]=Uređivanje tekstualne datoteke
Comment[hu]=Szövegfájlok szerkesztése
Comment[id]=Edit file teks
Comment[it]=Modifica file di testo
Comment[ja]=テキストファイルを編集します
Comment[kn]=ಪಠ್ಯ ಕಡತಗಳನ್ನು ಸಂಪಾದಿಸು
Comment[ko]=텍스트 파일을 편집합니다
Comment[lt]=Redaguoti tekstines bylas
Comment[lv]=Rediģēt teksta failus
Comment[mk]=Уреди текстуални фајлови
Comment[ml]=വാചക രചനകള് തിരുത്തുക
Comment[mn]=Текст файл боловсруулах
Comment[mr]=गद्य फाइल संपादित करा
Comment[ms]=Edit fail teks
Comment[nb]=Rediger tekstfiler
Comment[ne]=पाठ फाइललाई संशोधन गर्नुहोस्
Comment[nl]=Tekstbestanden bewerken
Comment[nn]=Rediger tekstfiler
Comment[no]=Rediger tekstfiler
Comment[or]=ପାଠ୍ଯ ଫାଇଲଗୁଡ଼ିକୁ ସମ୍ପାଦନ କରନ୍ତୁ
Comment[pa]=ਪਾਠ ਫਾਇਲਾਂ ਸੰਪਾਦਨ
Comment[pl]=Edytor plików tekstowych
Comment[pt]=Editar ficheiros de texto
Comment[pt_BR]=Edite arquivos de texto
Comment[ro]=Editare fişiere text
Comment[ru]=Редактирование текстовых файлов
Comment[sk]=Úprava textových súborov
Comment[sl]=Urejanje datotek z besedili
Comment[sq]=Përpuno files teksti
Comment[sr]=Уређујте текст фајлове
Comment[sr@Latn]=Izmeni tekstualne datoteke
Comment[sv]=Redigera textfiler
Comment[ta]=உரை கோப்புகளை தொகுக்கவும்
Comment[th]=แก้ไขแฟ้มข้อความ
Comment[tk]=Metin faýllary editle
Comment[tr]=Metin dosyaları düzenleyin
Comment[uk]=Редактор текстових файлів
Comment[vi]=Soạn thảo tập tin văn bản
Comment[wa]=Asspougnî des fitchîs tecses
Comment[zh_CN]=编辑文本文件
Comment[zh_TW]=編輯文字檔
TryExec=hx
Exec=hx %F
Terminal=true
Type=Application
Keywords=Text;editor;
Keywords[fr]=Texte;éditeur;
Keywords[ru]=текст;текстовый редактор;
Keywords[sr]=Текст;едитор;
Keywords[tr]=Metin;düzenleyici;
Icon=helix
Categories=Utility;TextEditor;
StartupNotify=false
MimeType=text/english;text/plain;text/x-makefile;text/x-c++hdr;text/x-c++src;text/x-chdr;text/x-csrc;text/x-java;text/x-moc;text/x-pascal;text/x-tcl;text/x-tex;application/x-shellscript;text/x-c;text/x-c++;

@ -0,0 +1,23 @@
#!/usr/bin/env bash
# Bash completion script for Helix editor
_hx() {
# $1 command name
# $2 word being completed
# $3 word preceding
COMPREPLY=()
case "$3" in
-g | --grammar)
COMPREPLY=($(compgen -W "fetch build" -- $2))
;;
--health)
local languages=$(hx --health |tail -n '+7' |awk '{print $1}' |sed 's/\x1b\[[0-9;]*m//g')
COMPREPLY=($(compgen -W "$languages" -- $2))
;;
*)
COMPREPLY=($(compgen -fd -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- $2))
;;
esac
} && complete -F _hx hx

@ -0,0 +1,55 @@
# You can move it here ~/.config/elvish/lib/hx.elv
# Or add `eval (slurp < ~/$REPOS/helix/contrib/completion/hx.elv)`
# Be sure to replace `$REPOS` with something that makes sense for you!
### Renders a pretty completion candidate
var candidate = { | _stem _desc |
edit:complex-candidate $_stem &display=(styled $_stem bold)(styled " "$_desc dim)
}
### These commands will invalidate further input (i.e. not react to them)
var skips = [ "--tutor" "--help" "--version" "-V" "--health" ]
### Grammar commands
var grammar = [ "--grammar" "-g" ]
### Config commands
var config = [ "--config" "-c" ]
### Set an arg-completer for the `hx` binary
set edit:completion:arg-completer[hx] = {|@args|
var n = (count $args)
if (>= $n 3) {
# Stop completions if passed arg will take presedence
# and invalidate further input
if (has-value $skips $args[-2]) {
return
}
# If the previous arg == --grammar, then only suggest:
if (has-value $grammar $args[-2]) {
$candidate "fetch" "Fetch the tree-sitter grammars"
$candidate "build" "Build the tree-sitter grammars"
return
}
# When we have --config, we need a file
if (has-values $config $args[-2]) {
edit:complete-filename $args[-1] | each { |v| put $v[stem] }
return
}
# When we have --log, we need a file
if (has-values "log" $args[-2]) {
edit:complete-filename $args[-1] | each { |v| put $v[stem] }
return
}
}
edit:complete-filename $args[-1] | each { |v| put $v[stem]}
$candidate "--help" "(Prints help information)"
$candidate "--version" "(Prints version information)"
$candidate "--tutor" "(Loads the tutorial)"
$candidate "--health" "(Checks for errors in editor setup)"
$candidate "--grammar" "(Fetch or build the tree-sitter grammars)"
$candidate "--vsplit" "(Splits all given files vertically)"
$candidate "--hsplit" "(Splits all given files horizontally)"
$candidate "--config" "(Specifies a file to use for configuration)"
$candidate "--log" "(Specifies a file to write log data into)"
}

@ -0,0 +1,15 @@
#!/usr/bin/env fish
# Fish completion script for Helix editor
set -l langs (hx --health |tail -n '+7' |awk '{print $1}' |sed 's/\x1b\[[0-9;]*m//g')
complete -c hx -s h -l help -d "Prints help information"
complete -c hx -l tutor -d "Loads the tutorial"
complete -c hx -l health -x -a "$langs" -d "Checks for errors in editor setup"
complete -c hx -s g -l grammar -x -a "fetch build" -d "Fetches or builds tree-sitter grammars"
complete -c hx -s v -o vv -o vvv -d "Increases logging verbosity"
complete -c hx -s V -l version -d "Prints version information"
complete -c hx -l vsplit -d "Splits all given files vertically into different windows"
complete -c hx -l hsplit -d "Splits all given files horizontally into different windows"
complete -c hx -s c -l config -r -d "Specifies a file to use for completion"
complete -c hx -l log -r -d "Specifies a file to write log data into"

@ -0,0 +1,34 @@
#compdef _hx hx
# Zsh completion script for Helix editor
_hx() {
_arguments -C \
"-h[Prints help information]" \
"--help[Prints help information]" \
"-v[Increase logging verbosity]" \
"-vv[Increase logging verbosity]" \
"-vvv[Increase logging verbosity]" \
"-V[Prints version information]" \
"--version[Prints version information]" \
"--tutor[Loads the tutorial]" \
"--health[Checks for errors in editor setup]:language:->health" \
"-g[Fetches or builds tree-sitter grammars]:action:->grammar" \
"--grammar[Fetches or builds tree-sitter grammars]:action:->grammar" \
"--vsplit[Splits all given files vertically into different windows]" \
"--hsplit[Splits all given files horizontally into different windows]" \
"-c[Specifies a file to use for configuration]" \
"--config[Specifies a file to use for configuration]" \
"--log[Specifies a file to write log data into]" \
"*:file:_files"
case "$state" in
health)
local languages=($(hx --health |tail -n '+7' |awk '{print $1}' |sed 's/\x1b\[[0-9;]*m//g'))
_values 'language' $languages
;;
grammar)
_values 'action' fetch build
;;
esac
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

@ -0,0 +1,8 @@
# Flake's default package for non-flake-enabled nix instances
let
compat = builtins.fetchTarball {
url = "https://github.com/edolstra/flake-compat/archive/b4a34015c698c7793d592d66adbab377907a2be8.tar.gz";
sha256 = "sha256:1qc703yg0babixi6wshn5wm2kgl5y1drcswgszh4xxzbrwkk9sv7";
};
in
(import compat {src = ./.;}).defaultNix.default

@ -30,8 +30,22 @@ inside the project. We use [xtask][xtask] as an ad-hoc task runner and
thus do not require any dependencies other than `cargo` (You don't have thus do not require any dependencies other than `cargo` (You don't have
to `cargo install` anything either). to `cargo install` anything either).
# Integration tests
Integration tests for helix-term can be run with `cargo integration-test`. Code
contributors are strongly encouraged to write integration tests for their code.
Existing tests can be used as examples. Helpers can be found in
[helpers.rs][helpers.rs]
## Minimum Stable Rust Version (MSRV) Policy
Helix follows the MSRV of Firefox.
The current MSRV and future changes to the MSRV are listed in the [Firefox documentation].
[Firefox documentation]: https://firefox-source-docs.mozilla.org/writing-rust-code/update-policy.html
[good-first-issue]: https://github.com/helix-editor/helix/labels/E-easy [good-first-issue]: https://github.com/helix-editor/helix/labels/E-easy
[log-file]: https://github.com/helix-editor/helix/wiki/FAQ#access-the-log-file [log-file]: https://github.com/helix-editor/helix/wiki/FAQ#access-the-log-file
[architecture.md]: ./architecture.md [architecture.md]: ./architecture.md
[docs]: https://docs.helix-editor.com/ [docs]: https://docs.helix-editor.com/
[xtask]: https://github.com/matklad/cargo-xtask [xtask]: https://github.com/matklad/cargo-xtask
[helpers.rs]: ../helix-term/tests/test/helpers.rs

@ -32,7 +32,7 @@ represented by a `Selection`. Each `Range` in the selection consists of a moving
a selection with a single range, with the head and the anchor in the same a selection with a single range, with the head and the anchor in the same
position. position.
Ropes are modified by constructing an OT-like `Transaction`. It's represents Ropes are modified by constructing an OT-like `Transaction`. It represents
a single coherent change to the document and can be applied to the rope. a single coherent change to the document and can be applied to the rope.
A transaction can be inverted to produce an undo. Selections and marks can be A transaction can be inverted to produce an undo. Selections and marks can be
mapped over a transaction to translate to a position in the new text state after mapped over a transaction to translate to a position in the new text state after
@ -42,7 +42,7 @@ applying the transaction.
> interface used to generate text edits. > interface used to generate text edits.
`Syntax` is the interface used to interact with tree-sitter ASTs for syntax `Syntax` is the interface used to interact with tree-sitter ASTs for syntax
highling and other features. highlighting and other features.
## View ## View

@ -0,0 +1,59 @@
## Checklist
Helix releases are versioned in the Calendar Versioning scheme:
`YY.0M(.MICRO)`, for example `22.05` for May of 2022. In these instructions
we'll use `<tag>` as a placeholder for the tag being published.
* Merge the changelog PR
* Tag and push
* `git tag -s -m "<tag>" -a <tag> && git push`
* Make sure to switch to master and pull first
* Edit the `VERSION` file and change the date to the next planned release
* Releases are planned to happen every two months, so `22.05` would change to `22.07`
* Wait for the Release CI to finish
* It will automatically turn the git tag into a GitHub release when it uploads artifacts
* Edit the new release
* Use `<tag>` as the title
* Link to the changelog and release notes
* Merge the release notes PR
* Download the macos and linux binaries and update the `sha256`s in the [homebrew formula]
* Use `sha256sum` on the downloaded `.tar.xz` files to determine the hash
* Link to the release notes in this-week-in-rust
* [Example PR](https://github.com/rust-lang/this-week-in-rust/pull/3300)
* Post to reddit
* [Example post](https://www.reddit.com/r/rust/comments/uzp5ze/helix_editor_2205_released/)
[homebrew formula]: https://github.com/Homebrew/homebrew-core/blob/master/Formula/helix.rb
## Changelog Curation
The changelog is currently created manually by reading through commits in the
log since the last release. GitHub's compare view is a nice way to approach
this. For example when creating the 22.07 release notes, this compare link
may be used
```
https://github.com/helix-editor/helix/compare/22.05...master
```
Either side of the triple-dot may be replaced with an exact revision, so if
you wish to incrementally compile the changelog, you can tackle a weeks worth
or so, record the revision where you stopped, and use that as a starting point
next week:
```
https://github.com/helix-editor/helix/compare/7706a4a0d8b67b943c31d0c5f7b00d357b5d838d...master
```
A work-in-progress commit for a changelog might look like
[this example](https://github.com/helix-editor/helix/commit/831adfd4c709ca16b248799bfef19698d5175e55).
Not every PR or commit needs a blurb in the changelog. Each release section
tends to have a blurb that links to a GitHub comparison between release
versions for convenience:
> As usual, the following is a summary of each of the changes since the last
> release. For the full log, check out the git log.
Typically, small changes like dependencies or documentation updates, refactors,
or meta changes like GitHub Actions work are left out.

@ -3,11 +3,11 @@
"crane": { "crane": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1644785799, "lastModified": 1661875961,
"narHash": "sha256-VpAJO1L0XeBvtCuNGK4IDKp6ENHIpTrlaZT7yfBCvwo=", "narHash": "sha256-f1h/2c6Teeu1ofAHWzrS8TwBPcnN+EEu+z1sRVmMQTk=",
"owner": "ipetkov", "owner": "ipetkov",
"repo": "crane", "repo": "crane",
"rev": "fc7a94f841347c88f2cb44217b2a3faa93e2a0b2", "rev": "d9f394e4e20e97c2a60c3ad82c2b6ef99be19e24",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -17,19 +17,13 @@
} }
}, },
"devshell": { "devshell": {
"inputs": { "flake": false,
"flake-utils": "flake-utils",
"nixpkgs": [
"nixCargoIntegration",
"nixpkgs"
]
},
"locked": { "locked": {
"lastModified": 1646667754, "lastModified": 1660811669,
"narHash": "sha256-LahZHvCC3UVzGQ55iWDRZkuDssXl1rYgqgScrPV9S38=", "narHash": "sha256-V6lmsaLNFz41myppL0yxglta92ijkSvpZ+XVygAh+bU=",
"owner": "numtide", "owner": "numtide",
"repo": "devshell", "repo": "devshell",
"rev": "59fbe1dfc0de8c3332957c16998a7d16dff365d8", "rev": "c2feacb46ee69949124c835419861143c4016fb5",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -41,76 +35,60 @@
"dream2nix": { "dream2nix": {
"inputs": { "inputs": {
"alejandra": [ "alejandra": [
"nixCargoIntegration", "nci",
"nixpkgs" "nixpkgs"
], ],
"crane": "crane", "crane": "crane",
"devshell": [
"nci",
"devshell"
],
"flake-utils-pre-commit": [ "flake-utils-pre-commit": [
"nixCargoIntegration", "nci",
"nixpkgs" "nixpkgs"
], ],
"gomod2nix": [ "gomod2nix": [
"nixCargoIntegration", "nci",
"nixpkgs" "nixpkgs"
], ],
"mach-nix": [ "mach-nix": [
"nixCargoIntegration", "nci",
"nixpkgs" "nixpkgs"
], ],
"nixpkgs": [ "nixpkgs": [
"nixCargoIntegration", "nci",
"nixpkgs"
],
"node2nix": [
"nixCargoIntegration",
"nixpkgs" "nixpkgs"
], ],
"poetry2nix": [ "poetry2nix": [
"nixCargoIntegration", "nci",
"nixpkgs" "nixpkgs"
], ],
"pre-commit-hooks": [ "pre-commit-hooks": [
"nixCargoIntegration", "nci",
"nixpkgs" "nixpkgs"
] ]
}, },
"locked": { "locked": {
"lastModified": 1646710334, "lastModified": 1662176993,
"narHash": "sha256-eLBcDgcbOUfeH4k6SEW5a5v0PTp2KNCn+5ZXIoWGYww=", "narHash": "sha256-Sy7DsGAveDUFBb6YDsUSYZd/AcXfP/MOMIwMt/NgY84=",
"owner": "nix-community", "owner": "nix-community",
"repo": "dream2nix", "repo": "dream2nix",
"rev": "5dcfbfd3b60ce0208b894c1bdea00e2bdf80ca6a", "rev": "809bc5940214744eb29778a9a0b03f161979c1b2",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "nix-community", "owner": "nix-community",
"ref": "main",
"repo": "dream2nix", "repo": "dream2nix",
"type": "github" "type": "github"
} }
}, },
"flake-utils": { "flake-utils": {
"locked": { "locked": {
"lastModified": 1642700792, "lastModified": 1656928814,
"narHash": "sha256-XqHrk7hFb+zBvRg6Ghl+AZDq03ov6OshJLiSWOoX5es=", "narHash": "sha256-RIFfgBuKz6Hp89yRr7+NR5tzIAbn52h8vT6vXkYjZoM=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "846b2ae0fc4cc943637d3d1def4454213e203cba",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"locked": {
"lastModified": 1637014545,
"narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "bba5dcc8e0b20ab664967ad83d24d64cb64ec4f4", "rev": "7e2a3b3dfd9af950a856d66b0a7d01e3c18aa249",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -119,23 +97,23 @@
"type": "github" "type": "github"
} }
}, },
"nixCargoIntegration": { "nci": {
"inputs": { "inputs": {
"devshell": "devshell", "devshell": "devshell",
"dream2nix": "dream2nix", "dream2nix": "dream2nix",
"nixpkgs": [ "nixpkgs": [
"nixpkgs" "nixpkgs"
], ],
"rustOverlay": [ "rust-overlay": [
"rust-overlay" "rust-overlay"
] ]
}, },
"locked": { "locked": {
"lastModified": 1646766572, "lastModified": 1662177071,
"narHash": "sha256-DV3+zxvAIKsMHsHedJKYFsracvFyLKpFQqurUBR86oY=", "narHash": "sha256-x6XF//RdZlw81tFAYM1TkjY+iQIpyMCWZ46r9o4wVQY=",
"owner": "yusdacra", "owner": "yusdacra",
"repo": "nix-cargo-integration", "repo": "nix-cargo-integration",
"rev": "3a3f47f43ba486b7554164a698c8dfc5a38624ce", "rev": "65270dea87bb82fc02102a15221677eea237680e",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -146,11 +124,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1646497237, "lastModified": 1662019588,
"narHash": "sha256-Ccpot1h/rV8MgcngDp5OrdmLTMaUTbStZTR5/sI7zW0=", "narHash": "sha256-oPEjHKGGVbBXqwwL+UjsveJzghWiWV0n9ogo1X6l4cw=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "062a0c5437b68f950b081bbfc8a699d57a4ee026", "rev": "2da64a81275b68fdad38af669afeda43d401e94b",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -162,24 +140,24 @@
}, },
"root": { "root": {
"inputs": { "inputs": {
"nixCargoIntegration": "nixCargoIntegration", "nci": "nci",
"nixpkgs": "nixpkgs", "nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay" "rust-overlay": "rust-overlay"
} }
}, },
"rust-overlay": { "rust-overlay": {
"inputs": { "inputs": {
"flake-utils": "flake-utils_2", "flake-utils": "flake-utils",
"nixpkgs": [ "nixpkgs": [
"nixpkgs" "nixpkgs"
] ]
}, },
"locked": { "locked": {
"lastModified": 1646792695, "lastModified": 1662087605,
"narHash": "sha256-2drCXIKIQnJMlTZbcCfuHZAh+iPcdlRkCqtZnA6MHLY=", "narHash": "sha256-Gpf2gp2JenKGf+TylX/YJpttY2bzsnvAMLdLaxoZRyU=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "7f599870402c8d2a5806086c8ee0f2d92b175c54", "rev": "60c2cfaa8b90ed8cebd18b214fac8682dcf222dd",
"type": "github" "type": "github"
}, },
"original": { "original": {

@ -7,17 +7,28 @@
url = "github:oxalica/rust-overlay"; url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs"; inputs.nixpkgs.follows = "nixpkgs";
}; };
nixCargoIntegration = { nci = {
url = "github:yusdacra/nix-cargo-integration"; url = "github:yusdacra/nix-cargo-integration";
inputs.nixpkgs.follows = "nixpkgs"; inputs.nixpkgs.follows = "nixpkgs";
inputs.rustOverlay.follows = "rust-overlay"; inputs.rust-overlay.follows = "rust-overlay";
}; };
}; };
outputs = inputs@{ nixCargoIntegration, ... }: outputs = {
nixCargoIntegration.lib.makeOutputs { self,
nixpkgs,
nci,
...
}: let
lib = nixpkgs.lib;
mkRootPath = rel:
builtins.path {
path = "${toString ./.}/${rel}";
name = rel;
};
outputs = nci.lib.makeOutputs {
root = ./.; root = ./.;
renameOutputs = { "helix-term" = "helix"; }; renameOutputs = {"helix-term" = "helix";};
# Set default app to hx (binary is from helix-term release build) # Set default app to hx (binary is from helix-term release build)
# Set default package to helix-term release build # Set default package to helix-term release build
defaultOutputs = { defaultOutputs = {
@ -25,41 +36,145 @@
package = "helix"; package = "helix";
}; };
overrides = { overrides = {
cCompiler = common:
with common.pkgs;
if stdenv.isLinux
then gcc
else clang;
crateOverrides = common: _: { crateOverrides = common: _: {
helix-term = prev: helix-term = prev: {
let src = builtins.path {
inherit (common) pkgs; name = "helix-source";
grammars = pkgs.callPackage ./grammars.nix { }; path = toString ./.;
runtimeDir = pkgs.runCommand "helix-runtime" { } '' # filter out unneeded stuff that cause rebuilds
mkdir -p $out filter = path: type:
ln -s ${common.root}/runtime/* $out lib.all
rm -r $out/grammars (n: builtins.baseNameOf path != n)
ln -s ${grammars} $out/grammars [
''; ".envrc"
in ".ignore"
{ ".github"
"runtime"
"screenshot.png"
"book"
"contrib"
"docs"
"README.md"
"shell.nix"
"default.nix"
"grammars.nix"
"flake.nix"
"flake.lock"
];
};
# disable fetching and building of tree-sitter grammars in the helix-term build.rs # disable fetching and building of tree-sitter grammars in the helix-term build.rs
HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1"; HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1";
# link languages and theme toml files since helix-term expects them (for tests)
preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} ..";
buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ];
nativeBuildInputs = [ pkgs.makeWrapper ];
postFixup = '' buildInputs = (prev.buildInputs or []) ++ [common.cCompiler.cc.lib];
if [ -f "$out/bin/hx" ]; then
wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}" # link languages and theme toml files since helix-term expects them (for tests)
fi preConfigure = ''
${prev.preConfigure or ""}
${
lib.concatMapStringsSep
"\n"
(path: "ln -sf ${mkRootPath path} ..")
["languages.toml" "theme.toml" "base16_theme.toml"]
}
''; '';
meta.mainProgram = "hx";
}; };
}; };
shell = common: prev: { shell = common: prev: {
packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin cargo-flamegraph ]); packages =
env = prev.env ++ [ prev.packages
{ name = "HELIX_RUNTIME"; eval = "$PWD/runtime"; } ++ (
{ name = "RUST_BACKTRACE"; value = "1"; } with common.pkgs;
{ name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment"; } [lld_13 cargo-flamegraph rust-analyzer]
++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) cargo-tarpaulin)
++ (lib.optional stdenv.isLinux lldb)
);
env =
prev.env
++ [
{
name = "HELIX_RUNTIME";
eval = "$PWD/runtime";
}
{
name = "RUST_BACKTRACE";
value = "1";
}
{
name = "RUSTFLAGS";
value =
if common.pkgs.stdenv.isLinux
then "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment"
else "";
}
]; ];
}; };
}; };
}; };
makeOverridableHelix = system: old: config: let
pkgs = nixpkgs.legacyPackages.${system};
grammars = pkgs.callPackage ./grammars.nix config;
runtimeDir = pkgs.runCommand "helix-runtime" {} ''
mkdir -p $out
ln -s ${mkRootPath "runtime"}/* $out
rm -r $out/grammars
ln -s ${grammars} $out/grammars
'';
helix-wrapped =
pkgs.runCommand "${old.name}-wrapped"
{
inherit (old) pname version meta;
nativeBuildInputs = [pkgs.makeWrapper];
makeWrapperArgs = config.makeWrapperArgs or [];
}
''
mkdir -p $out
cp -r --no-preserve=mode,ownership ${old}/* $out/
chmod +x $out/bin/*
wrapProgram "$out/bin/hx" ''${makeWrapperArgs[@]} --set HELIX_RUNTIME "${runtimeDir}"
'';
in
helix-wrapped
// {override = makeOverridableHelix system old;};
in
outputs
// {
apps =
lib.mapAttrs
(
system: apps: rec {
default = hx;
hx = {
type = "app";
program = lib.getExe self.${system}.packages.helix;
};
}
)
outputs.apps;
packages =
lib.mapAttrs
(
system: packages: rec {
default = helix;
helix = makeOverridableHelix system helix-unwrapped {};
helix-debug = makeOverridableHelix system helix-unwrapped-debug {};
helix-unwrapped = packages.helix;
helix-unwrapped-debug = packages.helix-debug;
}
)
outputs.packages;
};
nixConfig = {
extra-substituters = ["https://helix.cachix.org"];
extra-trusted-public-keys = ["helix.cachix.org-1:ejp9KQpR1FBI2onstMQ34yogDm4OgU2ru6lIwPvuCVs="];
};
} }

@ -1,17 +1,25 @@
{ stdenv, lib, runCommand, yj }: {
let stdenv,
lib,
runCommandLocal,
runCommand,
yj,
includeGrammarIf ? _: true,
...
}: let
# HACK: nix < 2.6 has a bug in the toml parser, so we convert to JSON # HACK: nix < 2.6 has a bug in the toml parser, so we convert to JSON
# before parsing # before parsing
languages-json = runCommand "languages-toml-to-json" { } '' languages-json = runCommandLocal "languages-toml-to-json" {} ''
${yj}/bin/yj -t < ${./languages.toml} > $out ${yj}/bin/yj -t < ${./languages.toml} > $out
''; '';
languagesConfig = if lib.versionAtLeast builtins.nixVersion "2.6.0" then languagesConfig =
builtins.fromTOML (builtins.readFile ./languages.toml) if lib.versionAtLeast builtins.nixVersion "2.6.0"
else then builtins.fromTOML (builtins.readFile ./languages.toml)
builtins.fromJSON (builtins.readFile (builtins.toPath languages-json)); else builtins.fromJSON (builtins.readFile (builtins.toPath languages-json));
isGitGrammar = (grammar: isGitGrammar = grammar:
builtins.hasAttr "source" grammar && builtins.hasAttr "git" grammar.source builtins.hasAttr "source" grammar
&& builtins.hasAttr "rev" grammar.source); && builtins.hasAttr "git" grammar.source
&& builtins.hasAttr "rev" grammar.source;
isGitHubGrammar = grammar: lib.hasPrefix "https://github.com" grammar.source.git; isGitHubGrammar = grammar: lib.hasPrefix "https://github.com" grammar.source.git;
toGitHubFetcher = url: let toGitHubFetcher = url: let
match = builtins.match "https://github\.com/([^/]*)/([^/]*)/?" url; match = builtins.match "https://github\.com/([^/]*)/([^/]*)/?" url;
@ -20,8 +28,7 @@ let
repo = builtins.elemAt match 1; repo = builtins.elemAt match 1;
}; };
gitGrammars = builtins.filter isGitGrammar languagesConfig.grammar; gitGrammars = builtins.filter isGitGrammar languagesConfig.grammar;
buildGrammar = grammar: buildGrammar = grammar: let
let
gh = toGitHubFetcher grammar.source.git; gh = toGitHubFetcher grammar.source.git;
sourceGit = builtins.fetchTree { sourceGit = builtins.fetchTree {
type = "git"; type = "git";
@ -36,17 +43,21 @@ let
repo = gh.repo; repo = gh.repo;
inherit (grammar.source) rev; inherit (grammar.source) rev;
}; };
source = if isGitHubGrammar grammar then sourceGitHub else sourceGit; source =
in stdenv.mkDerivation rec { if isGitHubGrammar grammar
then sourceGitHub
else sourceGit;
in
stdenv.mkDerivation rec {
# see https://github.com/NixOS/nixpkgs/blob/fbdd1a7c0bc29af5325e0d7dd70e804a972eb465/pkgs/development/tools/parsing/tree-sitter/grammar.nix # see https://github.com/NixOS/nixpkgs/blob/fbdd1a7c0bc29af5325e0d7dd70e804a972eb465/pkgs/development/tools/parsing/tree-sitter/grammar.nix
pname = "helix-tree-sitter-${grammar.name}"; pname = "helix-tree-sitter-${grammar.name}";
version = grammar.source.rev; version = grammar.source.rev;
src = if builtins.hasAttr "subpath" grammar.source then src =
"${source}/${grammar.source.subpath}" if builtins.hasAttr "subpath" grammar.source
else then "${source}/${grammar.source.subpath}"
source; else source;
dontUnpack = true; dontUnpack = true;
dontConfigure = true; dontConfigure = true;
@ -93,14 +104,18 @@ let
runHook postFixup runHook postFixup
''; '';
}; };
builtGrammars = builtins.map (grammar: { grammarsToBuild = builtins.filter includeGrammarIf gitGrammars;
builtGrammars =
builtins.map (grammar: {
inherit (grammar) name; inherit (grammar) name;
artifact = buildGrammar grammar; artifact = buildGrammar grammar;
}) gitGrammars; })
grammarLinks = builtins.map (grammar: grammarsToBuild;
"ln -s ${grammar.artifact}/${grammar.name}.so $out/${grammar.name}.so") grammarLinks =
builtins.map (grammar: "ln -s ${grammar.artifact}/${grammar.name}.so $out/${grammar.name}.so")
builtGrammars; builtGrammars;
in runCommand "consolidated-helix-grammars" { } '' in
runCommand "consolidated-helix-grammars" {} ''
mkdir -p $out mkdir -p $out
${builtins.concatStringsSep "\n" grammarLinks} ${builtins.concatStringsSep "\n" grammarLinks}
'' ''

@ -12,20 +12,21 @@ include = ["src/**/*", "README.md"]
[features] [features]
unicode-lines = ["ropey/unicode_lines"] unicode-lines = ["ropey/unicode_lines"]
integration = []
[dependencies] [dependencies]
helix-loader = { version = "0.6", path = "../helix-loader" } helix-loader = { version = "0.6", path = "../helix-loader" }
ropey = { version = "1.4", default-features = false } ropey = { version = "1.5", default-features = false, features = ["simd"] }
smallvec = "1.8" smallvec = "1.9"
smartstring = "1.0.1" smartstring = "1.0.1"
unicode-segmentation = "1.9" unicode-segmentation = "1.10"
unicode-width = "0.1" unicode-width = "0.1"
unicode-general-category = "0.5" unicode-general-category = "0.6"
# slab = "0.4.2" # slab = "0.4.2"
slotmap = "1.0" slotmap = "1.0"
tree-sitter = "0.20" tree-sitter = "0.20"
once_cell = "1.10" once_cell = "1.15"
arc-swap = "1" arc-swap = "1"
regex = "1" regex = "1"
@ -34,13 +35,14 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
toml = "0.5" toml = "0.5"
similar = "2.1" similar = "2.2"
encoding_rs = "0.8" encoding_rs = "0.8"
chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] } chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] }
etcetera = "0.3" etcetera = "0.4"
textwrap = "0.15.1"
[dev-dependencies] [dev-dependencies]
quickcheck = { version = "1", default-features = false } quickcheck = { version = "1", default-features = false }

@ -1,12 +1,9 @@
//! When typing the opening character of one of the possible pairs defined below, //! When typing the opening character of one of the possible pairs defined below,
//! this module provides the functionality to insert the paired closing character. //! this module provides the functionality to insert the paired closing character.
use crate::{ use crate::{graphemes, movement::Direction, Range, Rope, Selection, Tendril, Transaction};
graphemes, movement::Direction, Range, Rope, RopeGraphemes, Selection, Tendril, Transaction,
};
use std::collections::HashMap; use std::collections::HashMap;
use log::debug;
use smallvec::SmallVec; use smallvec::SmallVec;
// Heavily based on https://github.com/codemirror/closebrackets/ // Heavily based on https://github.com/codemirror/closebrackets/
@ -125,7 +122,7 @@ impl Default for AutoPairs {
#[must_use] #[must_use]
pub fn hook(doc: &Rope, selection: &Selection, ch: char, pairs: &AutoPairs) -> Option<Transaction> { pub fn hook(doc: &Rope, selection: &Selection, ch: char, pairs: &AutoPairs) -> Option<Transaction> {
debug!("autopairs hook selection: {:#?}", selection); log::trace!("autopairs hook selection: {:#?}", selection);
if let Some(pair) = pairs.get(ch) { if let Some(pair) = pairs.get(ch) {
if pair.same() { if pair.same() {
@ -149,14 +146,6 @@ fn prev_char(doc: &Rope, pos: usize) -> Option<char> {
doc.get_char(pos - 1) doc.get_char(pos - 1)
} }
fn is_single_grapheme(doc: &Rope, range: &Range) -> bool {
let mut graphemes = RopeGraphemes::new(doc.slice(range.from()..range.to()));
let first = graphemes.next();
let second = graphemes.next();
debug!("first: {:#?}, second: {:#?}", first, second);
first.is_some() && second.is_none()
}
/// calculate what the resulting range should be for an auto pair insertion /// calculate what the resulting range should be for an auto pair insertion
fn get_next_range( fn get_next_range(
doc: &Rope, doc: &Rope,
@ -189,8 +178,8 @@ fn get_next_range(
); );
} }
let single_grapheme = is_single_grapheme(doc, start_range);
let doc_slice = doc.slice(..); let doc_slice = doc.slice(..);
let single_grapheme = start_range.is_single_grapheme(doc_slice);
// just skip over graphemes // just skip over graphemes
if len_inserted == 0 { if len_inserted == 0 {
@ -235,9 +224,11 @@ fn get_next_range(
// other end of the grapheme to get to where the new characters // other end of the grapheme to get to where the new characters
// are inserted, then move the head to where it should be // are inserted, then move the head to where it should be
let prev_bound = graphemes::prev_grapheme_boundary(doc_slice, start_range.head); let prev_bound = graphemes::prev_grapheme_boundary(doc_slice, start_range.head);
debug!( log::trace!(
"prev_bound: {}, offset: {}, len_inserted: {}", "prev_bound: {}, offset: {}, len_inserted: {}",
prev_bound, offset, len_inserted prev_bound,
offset,
len_inserted
); );
prev_bound + offset + len_inserted prev_bound + offset + len_inserted
}; };
@ -312,7 +303,7 @@ fn handle_open(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction {
}); });
let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index())); let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index()));
debug!("auto pair transaction: {:#?}", t); log::debug!("auto pair transaction: {:#?}", t);
t t
} }
@ -344,7 +335,7 @@ fn handle_close(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction {
}); });
let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index())); let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index()));
debug!("auto pair transaction: {:#?}", t); log::debug!("auto pair transaction: {:#?}", t);
t t
} }
@ -384,7 +375,7 @@ fn handle_same(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction {
}); });
let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index())); let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index()));
debug!("auto pair transaction: {:#?}", t); log::debug!("auto pair transaction: {:#?}", t);
t t
} }

@ -72,7 +72,7 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st
let end = (end + 1).min(text.len_lines()); let end = (end + 1).min(text.len_lines());
lines.extend(start..end); lines.extend(start..end);
min_next_line = end + 1; min_next_line = end;
} }
let (commented, to_change, min, margin) = find_line_comment(token, text, lines); let (commented, to_change, min, margin) = find_line_comment(token, text, lines);

@ -1,10 +1,10 @@
/// Syntax configuration loader based on built-in languages.toml. /// Syntax configuration loader based on built-in languages.toml.
pub fn default_syntax_loader() -> crate::syntax::Configuration { pub fn default_syntax_loader() -> crate::syntax::Configuration {
helix_loader::default_lang_config() helix_loader::config::default_lang_config()
.try_into() .try_into()
.expect("Could not serialize built-in languages.toml") .expect("Could not serialize built-in languages.toml")
} }
/// Syntax configuration loader based on user configured languages.toml. /// Syntax configuration loader based on user configured languages.toml.
pub fn user_syntax_loader() -> Result<crate::syntax::Configuration, toml::de::Error> { pub fn user_syntax_loader() -> Result<crate::syntax::Configuration, toml::de::Error> {
helix_loader::user_lang_config()?.try_into() helix_loader::config::user_lang_config()?.try_into()
} }

@ -23,6 +23,18 @@ pub struct Range {
pub end: usize, pub end: usize,
} }
#[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)]
pub enum NumberOrString {
Number(i32),
String(String),
}
#[derive(Debug, Clone)]
pub enum DiagnosticTag {
Unnecessary,
Deprecated,
}
/// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.91.0/lsp_types/struct.Diagnostic.html) /// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.91.0/lsp_types/struct.Diagnostic.html)
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Diagnostic { pub struct Diagnostic {
@ -30,4 +42,7 @@ pub struct Diagnostic {
pub line: usize, pub line: usize,
pub message: String, pub message: String,
pub severity: Option<Severity>, pub severity: Option<Severity>,
pub code: Option<NumberOrString>,
pub tags: Vec<DiagnosticTag>,
pub source: Option<String>,
} }

@ -14,7 +14,7 @@ pub fn grapheme_width(g: &str) -> usize {
// Point 1: theoretically, ascii control characters should have zero // Point 1: theoretically, ascii control characters should have zero
// width, but in our case we actually want them to have width: if they // width, but in our case we actually want them to have width: if they
// show up in text, we want to treat them as textual elements that can // show up in text, we want to treat them as textual elements that can
// be editied. So we can get away with making all ascii single width // be edited. So we can get away with making all ascii single width
// here. // here.
// Point 2: we're only examining the first codepoint here, which means // Point 2: we're only examining the first codepoint here, which means
// we're ignoring graphemes formed with combining characters. However, // we're ignoring graphemes formed with combining characters. However,

@ -22,10 +22,10 @@ use std::time::{Duration, Instant};
/// ///
/// The current revision is the one currently displayed in the buffer. /// The current revision is the one currently displayed in the buffer.
/// ///
/// Commiting a new revision to the history will update the last child of the /// Committing a new revision to the history will update the last child of the
/// current revision, and push a new revision to the end of the vector. /// current revision, and push a new revision to the end of the vector.
/// ///
/// Revisions are commited with a timestamp. :earlier and :later can be used /// Revisions are committed with a timestamp. :earlier and :later can be used
/// to jump to the closest revision to a moment in time relative to the timestamp /// to jump to the closest revision to a moment in time relative to the timestamp
/// of the current revision plus (:later) or minus (:earlier) the duration /// of the current revision plus (:later) or minus (:earlier) the duration
/// given to the command. If a single integer is given, the editor will instead /// given to the command. If a single integer is given, the editor will instead
@ -33,7 +33,7 @@ use std::time::{Duration, Instant};
/// ///
/// Limitations: /// Limitations:
/// * Changes in selections currently don't commit history changes. The selection /// * Changes in selections currently don't commit history changes. The selection
/// will only be updated to the state after a commited buffer change. /// will only be updated to the state after a committed buffer change.
/// * The vector of history revisions is currently unbounded. This might /// * The vector of history revisions is currently unbounded. This might
/// cause the memory consumption to grow significantly large during long /// cause the memory consumption to grow significantly large during long
/// editing sessions. /// editing sessions.
@ -177,7 +177,7 @@ impl History {
} }
} }
/// List of nodes on the way from `n` to 'a`. Doesn`t include `a`. /// List of nodes on the way from `n` to 'a`. Doesn't include `a`.
/// Includes `n` unless `a == n`. `a` must be an ancestor of `n`. /// Includes `n` unless `a == n`. `a` must be an ancestor of `n`.
fn path_up(&self, mut n: usize, a: usize) -> Vec<usize> { fn path_up(&self, mut n: usize, a: usize) -> Vec<usize> {
let mut path = Vec::new(); let mut path = Vec::new();
@ -288,7 +288,7 @@ pub enum UndoKind {
TimePeriod(std::time::Duration), TimePeriod(std::time::Duration),
} }
/// A subset of sytemd.time time span syntax units. /// A subset of systemd.time time span syntax units.
const TIME_UNITS: &[(&[&str], &str, u64)] = &[ const TIME_UNITS: &[(&[&str], &str, u64)] = &[
(&["seconds", "second", "sec", "s"], "seconds", 1), (&["seconds", "second", "sec", "s"], "seconds", 1),
(&["minutes", "minute", "min", "m"], "minutes", 60), (&["minutes", "minute", "min", "m"], "minutes", 60),
@ -546,8 +546,8 @@ mod test {
// Units are validated. // Units are validated.
assert_eq!( assert_eq!(
"1 millenium".parse::<UndoKind>(), "1 millennium".parse::<UndoKind>(),
Err("incorrect time unit: millenium".to_string()) Err("incorrect time unit: millennium".to_string())
); );
// Units can't be specified twice. // Units can't be specified twice.

@ -5,6 +5,7 @@ use ropey::RopeSlice;
use std::borrow::Cow; use std::borrow::Cow;
use std::cmp; use std::cmp;
use std::fmt::Write;
use super::Increment; use super::Increment;
use crate::{Range, Tendril}; use crate::{Range, Tendril};
@ -162,7 +163,7 @@ impl Format {
fields.push(field); fields.push(field);
max_len += field.max_len + remaining[..i].len(); max_len += field.max_len + remaining[..i].len();
regex += &remaining[..i]; regex += &remaining[..i];
regex += &format!("({})", field.regex); write!(regex, "({})", field.regex).unwrap();
remaining = &after[spec_len..]; remaining = &after[spec_len..];
} }

@ -377,7 +377,7 @@ mod test {
} }
#[test] #[test]
fn test_increment_basic_hexadedimal_numbers() { fn test_increment_basic_hexadecimal_numbers() {
let tests = [ let tests = [
("0x0100", 1, "0x0101"), ("0x0100", 1, "0x0101"),
("0x0100", -1, "0x00ff"), ("0x0100", -1, "0x00ff"),

@ -230,14 +230,14 @@ fn get_first_in_line(mut node: Node, byte_pos: usize, new_line: bool) -> Vec<boo
/// - Successively add indent captures to get the (added) indent from a single line /// - Successively add indent captures to get the (added) indent from a single line
/// - Successively add the indent results for each line /// - Successively add the indent results for each line
#[derive(Default)] #[derive(Default)]
struct Indentation { pub struct Indentation {
/// The total indent (the number of indent levels) is defined as max(0, indent-outdent). /// The total indent (the number of indent levels) is defined as max(0, indent-outdent).
/// The string that this results in depends on the indent style (spaces or tabs, etc.) /// The string that this results in depends on the indent style (spaces or tabs, etc.)
indent: usize, indent: usize,
outdent: usize, outdent: usize,
} }
impl Indentation { impl Indentation {
/// Add some other [IndentResult] to this. /// Add some other [Indentation] to this.
/// The added indent should be the total added indent from one line /// The added indent should be the total added indent from one line
fn add_line(&mut self, added: &Indentation) { fn add_line(&mut self, added: &Indentation) {
if added.indent > 0 && added.outdent == 0 { if added.indent > 0 && added.outdent == 0 {
@ -433,7 +433,7 @@ fn query_indents(
/// after pos were moved to a new line. /// after pos were moved to a new line.
/// ///
/// The indentation is determined by traversing all the tree-sitter nodes containing the position. /// The indentation is determined by traversing all the tree-sitter nodes containing the position.
/// Each of these nodes produces some [AddedIndent] for: /// Each of these nodes produces some [Indentation] for:
/// ///
/// - The line of the (beginning of the) node. This is defined by the scope `all` if this is the first node on its line. /// - The line of the (beginning of the) node. This is defined by the scope `all` if this is the first node on its line.
/// - The line after the node. This is defined by: /// - The line after the node. This is defined by:
@ -441,9 +441,9 @@ fn query_indents(
/// - The scope `all` if this node is not the first node on its line. /// - The scope `all` if this node is not the first node on its line.
/// Intuitively, `all` applies to everything contained in this node while `tail` applies to everything except for the first line of the node. /// Intuitively, `all` applies to everything contained in this node while `tail` applies to everything except for the first line of the node.
/// The indents from different nodes for the same line are then combined. /// The indents from different nodes for the same line are then combined.
/// The [IndentResult] is simply the sum of the [AddedIndent] for all lines. /// The result [Indentation] is simply the sum of the [Indentation] for all lines.
/// ///
/// Specifying which line exactly an [AddedIndent] applies to is important because indents on the same line combine differently than indents on different lines: /// Specifying which line exactly an [Indentation] applies to is important because indents on the same line combine differently than indents on different lines:
/// ```ignore /// ```ignore
/// some_function(|| { /// some_function(|| {
/// // Both the function parameters as well as the contained block should be indented. /// // Both the function parameters as well as the contained block should be indented.
@ -453,7 +453,7 @@ fn query_indents(
/// ///
/// ```ignore /// ```ignore
/// some_function( /// some_function(
/// parm1, /// param1,
/// || { /// || {
/// // Here we get 2 indent levels because the 'parameters' and the 'block' node begin on different lines /// // Here we get 2 indent levels because the 'parameters' and the 'block' node begin on different lines
/// }, /// },

@ -24,8 +24,10 @@ pub mod shellwords;
mod state; mod state;
pub mod surround; pub mod surround;
pub mod syntax; pub mod syntax;
pub mod test;
pub mod textobject; pub mod textobject;
mod transaction; mod transaction;
pub mod wrap;
pub mod unicode { pub mod unicode {
pub use unicode_general_category as category; pub use unicode_general_category as category;
@ -41,10 +43,10 @@ pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> {
/// ///
/// Order of detection: /// Order of detection:
/// * Top-most folder containing a root marker in current git repository /// * Top-most folder containing a root marker in current git repository
/// * Git repostory root if no marker detected /// * Git repository root if no marker detected
/// * Top-most folder containing a root marker if not git repository detected /// * Top-most folder containing a root marker if not git repository detected
/// * Current working directory as fallback /// * Current working directory as fallback
pub fn find_root(root: Option<&str>, root_markers: &[String]) -> Option<std::path::PathBuf> { pub fn find_root(root: Option<&str>, root_markers: &[String]) -> std::path::PathBuf {
let current_dir = std::env::current_dir().expect("unable to determine current directory"); let current_dir = std::env::current_dir().expect("unable to determine current directory");
let root = match root { let root = match root {
@ -61,28 +63,28 @@ pub fn find_root(root: Option<&str>, root_markers: &[String]) -> Option<std::pat
let mut top_marker = None; let mut top_marker = None;
for ancestor in root.ancestors() { for ancestor in root.ancestors() {
for marker in root_markers { if root_markers
if ancestor.join(marker).exists() { .iter()
.any(|marker| ancestor.join(marker).exists())
{
top_marker = Some(ancestor); top_marker = Some(ancestor);
break;
}
} }
// don't go higher than repo
if ancestor.join(".git").is_dir() { if ancestor.join(".git").is_dir() {
// Use workspace if detected from marker // Top marker is repo root if not root marker was detected yet
return Some(top_marker.unwrap_or(ancestor).to_path_buf()); if top_marker.is_none() {
top_marker = Some(ancestor);
} }
// Don't go higher than repo if we're in one
break;
} }
// In absence of git repo, use workspace if detected
if top_marker.is_some() {
top_marker.map(|a| a.to_path_buf())
} else {
Some(current_dir)
} }
// Return the found top marker or the current_dir as fallback
top_marker.map_or(current_dir, |a| a.to_path_buf())
} }
pub use ropey::{Rope, RopeBuilder, RopeSlice}; pub use ropey::{str_utils, Rope, RopeBuilder, RopeSlice};
// pub use tendril::StrTendril as Tendril; // pub use tendril::StrTendril as Tendril;
pub use smartstring::SmartString; pub use smartstring::SmartString;
@ -93,7 +95,9 @@ pub type Tendril = SmartString<smartstring::LazyCompact>;
pub use {regex, tree_sitter}; pub use {regex, tree_sitter};
pub use graphemes::RopeGraphemes; pub use graphemes::RopeGraphemes;
pub use position::{coords_at_pos, pos_at_coords, visual_coords_at_pos, Position}; pub use position::{
coords_at_pos, pos_at_coords, pos_at_visual_coords, visual_coords_at_pos, Position,
};
pub use selection::{Range, Selection}; pub use selection::{Range, Selection};
pub use smallvec::{smallvec, SmallVec}; pub use smallvec::{smallvec, SmallVec};
pub use syntax::Syntax; pub use syntax::Syntax;

@ -119,6 +119,11 @@ pub fn str_is_line_ending(s: &str) -> bool {
LineEnding::from_str(s).is_some() LineEnding::from_str(s).is_some()
} }
#[inline]
pub fn rope_is_line_ending(r: RopeSlice) -> bool {
r.chunks().all(str_is_line_ending)
}
/// Attempts to detect what line ending the passed document uses. /// Attempts to detect what line ending the passed document uses.
pub fn auto_detect_line_ending(doc: &Rope) -> Option<LineEnding> { pub fn auto_detect_line_ending(doc: &Rope) -> Option<LineEnding> {
// Return first matched line ending. Not all possible line endings // Return first matched line ending. Not all possible line endings
@ -300,8 +305,17 @@ mod line_ending_tests {
fn line_end_char_index_rope_slice() { fn line_end_char_index_rope_slice() {
let r = Rope::from_str("Hello\rworld\nhow\r\nare you?"); let r = Rope::from_str("Hello\rworld\nhow\r\nare you?");
let s = &r.slice(..); let s = &r.slice(..);
#[cfg(not(feature = "unicode-lines"))]
{
assert_eq!(line_end_char_index(s, 0), 11); assert_eq!(line_end_char_index(s, 0), 11);
assert_eq!(line_end_char_index(s, 1), 15); assert_eq!(line_end_char_index(s, 1), 15);
assert_eq!(line_end_char_index(s, 2), 25); assert_eq!(line_end_char_index(s, 2), 25);
} }
#[cfg(feature = "unicode-lines")]
{
assert_eq!(line_end_char_index(s, 0), 5);
assert_eq!(line_end_char_index(s, 1), 11);
assert_eq!(line_end_char_index(s, 2), 15);
}
}
} }

@ -5,15 +5,15 @@ use tree_sitter::{Node, QueryCursor};
use crate::{ use crate::{
chars::{categorize_char, char_is_line_ending, CharCategory}, chars::{categorize_char, char_is_line_ending, CharCategory},
coords_at_pos,
graphemes::{ graphemes::{
next_grapheme_boundary, nth_next_grapheme_boundary, nth_prev_grapheme_boundary, next_grapheme_boundary, nth_next_grapheme_boundary, nth_prev_grapheme_boundary,
prev_grapheme_boundary, prev_grapheme_boundary,
}, },
pos_at_coords, line_ending::rope_is_line_ending,
pos_at_visual_coords,
syntax::LanguageConfiguration, syntax::LanguageConfiguration,
textobject::TextObject, textobject::TextObject,
Position, Range, RopeSlice, visual_coords_at_pos, Position, Range, RopeSlice,
}; };
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
@ -34,6 +34,7 @@ pub fn move_horizontally(
dir: Direction, dir: Direction,
count: usize, count: usize,
behaviour: Movement, behaviour: Movement,
_: usize,
) -> Range { ) -> Range {
let pos = range.cursor(slice); let pos = range.cursor(slice);
@ -53,15 +54,12 @@ pub fn move_vertically(
dir: Direction, dir: Direction,
count: usize, count: usize,
behaviour: Movement, behaviour: Movement,
tab_width: usize,
) -> Range { ) -> Range {
let pos = range.cursor(slice); let pos = range.cursor(slice);
// Compute the current position's 2d coordinates. // Compute the current position's 2d coordinates.
// TODO: switch this to use `visual_coords_at_pos` rather than let Position { row, col } = visual_coords_at_pos(slice, pos, tab_width);
// `coords_at_pos` as this will cause a jerky movement when the visual
// position does not match, like moving from a line with tabs/CJK to
// a line without
let Position { row, col } = coords_at_pos(slice, pos);
let horiz = range.horiz.unwrap_or(col as u32); let horiz = range.horiz.unwrap_or(col as u32);
// Compute the new position. // Compute the new position.
@ -70,7 +68,7 @@ pub fn move_vertically(
Direction::Backward => row.saturating_sub(count), Direction::Backward => row.saturating_sub(count),
}; };
let new_col = col.max(horiz as usize); let new_col = col.max(horiz as usize);
let new_pos = pos_at_coords(slice, Position::new(new_row, new_col), true); let new_pos = pos_at_visual_coords(slice, Position::new(new_row, new_col), tab_width);
// Special-case to avoid moving to the end of the last non-empty line. // Special-case to avoid moving to the end of the last non-empty line.
if behaviour == Movement::Extend && slice.line(new_row).len_chars() == 0 { if behaviour == Movement::Extend && slice.line(new_row).len_chars() == 0 {
@ -149,6 +147,88 @@ fn word_move(slice: RopeSlice, range: Range, count: usize, target: WordMotionTar
}) })
} }
pub fn move_prev_paragraph(
slice: RopeSlice,
range: Range,
count: usize,
behavior: Movement,
) -> Range {
let mut line = range.cursor_line(slice);
let first_char = slice.line_to_char(line) == range.cursor(slice);
let prev_line_empty = rope_is_line_ending(slice.line(line.saturating_sub(1)));
let curr_line_empty = rope_is_line_ending(slice.line(line));
let prev_empty_to_line = prev_line_empty && !curr_line_empty;
// skip character before paragraph boundary
if prev_empty_to_line && !first_char {
line += 1;
}
let mut lines = slice.lines_at(line);
lines.reverse();
let mut lines = lines.map(rope_is_line_ending).peekable();
for _ in 0..count {
while lines.next_if(|&e| e).is_some() {
line -= 1;
}
while lines.next_if(|&e| !e).is_some() {
line -= 1;
}
}
let head = slice.line_to_char(line);
let anchor = if behavior == Movement::Move {
// exclude first character after paragraph boundary
if prev_empty_to_line && first_char {
range.cursor(slice)
} else {
range.head
}
} else {
range.put_cursor(slice, head, true).anchor
};
Range::new(anchor, head)
}
pub fn move_next_paragraph(
slice: RopeSlice,
range: Range,
count: usize,
behavior: Movement,
) -> Range {
let mut line = range.cursor_line(slice);
let last_char =
prev_grapheme_boundary(slice, slice.line_to_char(line + 1)) == range.cursor(slice);
let curr_line_empty = rope_is_line_ending(slice.line(line));
let next_line_empty =
rope_is_line_ending(slice.line(slice.len_lines().saturating_sub(1).min(line + 1)));
let curr_empty_to_line = curr_line_empty && !next_line_empty;
// skip character after paragraph boundary
if curr_empty_to_line && last_char {
line += 1;
}
let mut lines = slice.lines_at(line).map(rope_is_line_ending).peekable();
for _ in 0..count {
while lines.next_if(|&e| !e).is_some() {
line += 1;
}
while lines.next_if(|&e| e).is_some() {
line += 1;
}
}
let head = slice.line_to_char(line);
let anchor = if behavior == Movement::Move {
if curr_empty_to_line && last_char {
range.head
} else {
range.cursor(slice)
}
} else {
range.put_cursor(slice, head, true).anchor
};
Range::new(anchor, head)
}
// ---- util ------------ // ---- util ------------
#[inline] #[inline]
@ -190,7 +270,7 @@ pub enum WordMotionTarget {
NextWordEnd, NextWordEnd,
PrevWordStart, PrevWordStart,
PrevWordEnd, PrevWordEnd,
// A "Long word" (also known as a WORD in vim/kakoune) is strictly // A "Long word" (also known as a WORD in Vim/Kakoune) is strictly
// delimited by whitespace, and can consist of punctuation as well // delimited by whitespace, and can consist of punctuation as well
// as alphanumerics. // as alphanumerics.
NextLongWordStart, NextLongWordStart,
@ -316,9 +396,9 @@ pub fn goto_treesitter_object(
dir: Direction, dir: Direction,
slice_tree: Node, slice_tree: Node,
lang_config: &LanguageConfiguration, lang_config: &LanguageConfiguration,
_count: usize, count: usize,
) -> Range { ) -> Range {
let get_range = move || -> Option<Range> { let get_range = move |range: Range| -> Option<Range> {
let byte_pos = slice.char_to_byte(range.cursor(slice)); let byte_pos = slice.char_to_byte(range.cursor(slice));
let cap_name = |t: TextObject| format!("{}.{}", object_name, t); let cap_name = |t: TextObject| format!("{}.{}", object_name, t);
@ -356,13 +436,15 @@ pub fn goto_treesitter_object(
// head of range should be at beginning // head of range should be at beginning
Some(Range::new(end_char, start_char)) Some(Range::new(end_char, start_char))
}; };
get_range().unwrap_or(range) (0..count).fold(range, |range, _| get_range(range).unwrap_or(range))
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use ropey::Rope; use ropey::Rope;
use crate::{coords_at_pos, pos_at_coords};
use super::*; use super::*;
const SINGLE_LINE_SAMPLE: &str = "This is a simple alphabetic line"; const SINGLE_LINE_SAMPLE: &str = "This is a simple alphabetic line";
@ -389,7 +471,7 @@ mod test {
assert_eq!( assert_eq!(
coords_at_pos( coords_at_pos(
slice, slice,
move_vertically(slice, range, Direction::Forward, 1, Movement::Move).head move_vertically(slice, range, Direction::Forward, 1, Movement::Move, 4).head
), ),
(1, 3).into() (1, 3).into()
); );
@ -413,7 +495,7 @@ mod test {
]; ];
for ((direction, amount), coordinates) in moves_and_expected_coordinates { for ((direction, amount), coordinates) in moves_and_expected_coordinates {
range = move_horizontally(slice, range, direction, amount, Movement::Move); range = move_horizontally(slice, range, direction, amount, Movement::Move, 0);
assert_eq!(coords_at_pos(slice, range.head), coordinates.into()) assert_eq!(coords_at_pos(slice, range.head), coordinates.into())
} }
} }
@ -439,7 +521,7 @@ mod test {
]; ];
for ((direction, amount), coordinates) in moves_and_expected_coordinates { for ((direction, amount), coordinates) in moves_and_expected_coordinates {
range = move_horizontally(slice, range, direction, amount, Movement::Move); range = move_horizontally(slice, range, direction, amount, Movement::Move, 0);
assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(coords_at_pos(slice, range.head), coordinates.into());
assert_eq!(range.head, range.anchor); assert_eq!(range.head, range.anchor);
} }
@ -461,7 +543,7 @@ mod test {
]; ];
for (direction, amount) in moves { for (direction, amount) in moves {
range = move_horizontally(slice, range, direction, amount, Movement::Extend); range = move_horizontally(slice, range, direction, amount, Movement::Extend, 0);
assert_eq!(range.anchor, original_anchor); assert_eq!(range.anchor, original_anchor);
} }
} }
@ -485,7 +567,7 @@ mod test {
]; ];
for ((direction, amount), coordinates) in moves_and_expected_coordinates { for ((direction, amount), coordinates) in moves_and_expected_coordinates {
range = move_vertically(slice, range, direction, amount, Movement::Move); range = move_vertically(slice, range, direction, amount, Movement::Move, 4);
assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(coords_at_pos(slice, range.head), coordinates.into());
assert_eq!(range.head, range.anchor); assert_eq!(range.head, range.anchor);
} }
@ -519,8 +601,8 @@ mod test {
for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates { for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates {
range = match axis { range = match axis {
Axis::H => move_horizontally(slice, range, direction, amount, Movement::Move), Axis::H => move_horizontally(slice, range, direction, amount, Movement::Move, 0),
Axis::V => move_vertically(slice, range, direction, amount, Movement::Move), Axis::V => move_vertically(slice, range, direction, amount, Movement::Move, 4),
}; };
assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(coords_at_pos(slice, range.head), coordinates.into());
assert_eq!(range.head, range.anchor); assert_eq!(range.head, range.anchor);
@ -544,18 +626,18 @@ mod test {
let moves_and_expected_coordinates = [ let moves_and_expected_coordinates = [
// Places cursor at the fourth kana. // Places cursor at the fourth kana.
((Axis::H, Direction::Forward, 4), (0, 4)), ((Axis::H, Direction::Forward, 4), (0, 4)),
// Descent places cursor at the 4th character. // Descent places cursor at the 8th character.
((Axis::V, Direction::Forward, 1usize), (1, 4)), ((Axis::V, Direction::Forward, 1usize), (1, 8)),
// Moving back 1 character. // Moving back 2 characters.
((Axis::H, Direction::Backward, 1usize), (1, 3)), ((Axis::H, Direction::Backward, 2usize), (1, 6)),
// Jumping back up 1 line. // Jumping back up 1 line.
((Axis::V, Direction::Backward, 1usize), (0, 3)), ((Axis::V, Direction::Backward, 1usize), (0, 3)),
]; ];
for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates { for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates {
range = match axis { range = match axis {
Axis::H => move_horizontally(slice, range, direction, amount, Movement::Move), Axis::H => move_horizontally(slice, range, direction, amount, Movement::Move, 0),
Axis::V => move_vertically(slice, range, direction, amount, Movement::Move), Axis::V => move_vertically(slice, range, direction, amount, Movement::Move, 4),
}; };
assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(coords_at_pos(slice, range.head), coordinates.into());
assert_eq!(range.head, range.anchor); assert_eq!(range.head, range.anchor);
@ -1179,4 +1261,172 @@ mod test {
} }
} }
} }
#[test]
fn test_behaviour_when_moving_to_prev_paragraph_single() {
let tests = [
("#[|]#", "#[|]#"),
("#[s|]#tart at\nfirst char\n", "#[|s]#tart at\nfirst char\n"),
("start at\nlast char#[\n|]#", "#[|start at\nlast char\n]#"),
(
"goto\nfirst\n\n#[p|]#aragraph",
"#[|goto\nfirst\n\n]#paragraph",
),
(
"goto\nfirst\n#[\n|]#paragraph",
"#[|goto\nfirst\n\n]#paragraph",
),
(
"goto\nsecond\n\np#[a|]#ragraph",
"goto\nsecond\n\n#[|pa]#ragraph",
),
(
"here\n\nhave\nmultiple\nparagraph\n\n\n\n\n#[|]#",
"here\n\n#[|have\nmultiple\nparagraph\n\n\n\n\n]#",
),
];
for (before, expected) in tests {
let (s, selection) = crate::test::print(before);
let text = Rope::from(s.as_str());
let selection =
selection.transform(|r| move_prev_paragraph(text.slice(..), r, 1, Movement::Move));
let actual = crate::test::plain(&s, selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
#[test]
fn test_behaviour_when_moving_to_prev_paragraph_double() {
let tests = [
(
"on#[e|]#\n\ntwo\n\nthree\n\n",
"#[|one]#\n\ntwo\n\nthree\n\n",
),
(
"one\n\ntwo\n\nth#[r|]#ee\n\n",
"one\n\n#[|two\n\nthr]#ee\n\n",
),
];
for (before, expected) in tests {
let (s, selection) = crate::test::print(before);
let text = Rope::from(s.as_str());
let selection =
selection.transform(|r| move_prev_paragraph(text.slice(..), r, 2, Movement::Move));
let actual = crate::test::plain(&s, selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
#[test]
fn test_behaviour_when_moving_to_prev_paragraph_extend() {
let tests = [
(
"one\n\n#[|two\n\n]#three\n\n",
"#[|one\n\ntwo\n\n]#three\n\n",
),
(
"#[|one\n\ntwo\n\n]#three\n\n",
"#[|one\n\ntwo\n\n]#three\n\n",
),
];
for (before, expected) in tests {
let (s, selection) = crate::test::print(before);
let text = Rope::from(s.as_str());
let selection = selection
.transform(|r| move_prev_paragraph(text.slice(..), r, 1, Movement::Extend));
let actual = crate::test::plain(&s, selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
#[test]
fn test_behaviour_when_moving_to_next_paragraph_single() {
let tests = [
("#[|]#", "#[|]#"),
("#[s|]#tart at\nfirst char\n", "#[start at\nfirst char\n|]#"),
("start at\nlast char#[\n|]#", "start at\nlast char#[\n|]#"),
(
"a\nb\n\n#[g|]#oto\nthird\n\nparagraph",
"a\nb\n\n#[goto\nthird\n\n|]#paragraph",
),
(
"a\nb\n#[\n|]#goto\nthird\n\nparagraph",
"a\nb\n\n#[goto\nthird\n\n|]#paragraph",
),
(
"a\nb#[\n|]#\ngoto\nsecond\n\nparagraph",
"a\nb#[\n\n|]#goto\nsecond\n\nparagraph",
),
(
"here\n\nhave\n#[m|]#ultiple\nparagraph\n\n\n\n\n",
"here\n\nhave\n#[multiple\nparagraph\n\n\n\n\n|]#",
),
(
"#[t|]#ext\n\n\nafter two blank lines\n\nmore text\n",
"#[text\n\n\n|]#after two blank lines\n\nmore text\n",
),
(
"#[text\n\n\n|]#after two blank lines\n\nmore text\n",
"text\n\n\n#[after two blank lines\n\n|]#more text\n",
),
];
for (before, expected) in tests {
let (s, selection) = crate::test::print(before);
let text = Rope::from(s.as_str());
let selection =
selection.transform(|r| move_next_paragraph(text.slice(..), r, 1, Movement::Move));
let actual = crate::test::plain(&s, selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
#[test]
fn test_behaviour_when_moving_to_next_paragraph_double() {
let tests = [
(
"one\n\ntwo\n\nth#[r|]#ee\n\n",
"one\n\ntwo\n\nth#[ree\n\n|]#",
),
(
"on#[e|]#\n\ntwo\n\nthree\n\n",
"on#[e\n\ntwo\n\n|]#three\n\n",
),
];
for (before, expected) in tests {
let (s, selection) = crate::test::print(before);
let text = Rope::from(s.as_str());
let selection =
selection.transform(|r| move_next_paragraph(text.slice(..), r, 2, Movement::Move));
let actual = crate::test::plain(&s, selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
#[test]
fn test_behaviour_when_moving_to_next_paragraph_extend() {
let tests = [
(
"one\n\n#[two\n\n|]#three\n\n",
"one\n\n#[two\n\nthree\n\n|]#",
),
(
"one\n\n#[two\n\nthree\n\n|]#",
"one\n\n#[two\n\nthree\n\n|]#",
),
];
for (before, expected) in tests {
let (s, selection) = crate::test::print(before);
let text = Rope::from(s.as_str());
let selection = selection
.transform(|r| move_next_paragraph(text.slice(..), r, 1, Movement::Extend));
let actual = crate::test::plain(&s, selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
} }

@ -2,12 +2,11 @@ use crate::{Range, RopeSlice, Selection, Syntax};
use tree_sitter::Node; use tree_sitter::Node;
pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
select_node_impl(syntax, text, selection, |descendant, from, to| { select_node_impl(syntax, text, selection, |mut node, from, to| {
if descendant.start_byte() == from && descendant.end_byte() == to { while node.start_byte() == from && node.end_byte() == to {
descendant.parent() node = node.parent()?;
} else {
Some(descendant)
} }
Some(node)
}) })
} }

@ -5,16 +5,15 @@ use std::path::{Component, Path, PathBuf};
/// is available, otherwise returns the path unchanged. /// is available, otherwise returns the path unchanged.
pub fn fold_home_dir(path: &Path) -> PathBuf { pub fn fold_home_dir(path: &Path) -> PathBuf {
if let Ok(home) = home_dir() { if let Ok(home) = home_dir() {
if path.starts_with(&home) { if let Ok(stripped) = path.strip_prefix(&home) {
// it's ok to unwrap, the path starts with home dir return PathBuf::from("~").join(stripped);
return PathBuf::from("~").join(path.strip_prefix(&home).unwrap());
} }
} }
path.to_path_buf() path.to_path_buf()
} }
/// Expands tilde `~` into users home directory if avilable, otherwise returns the path /// Expands tilde `~` into users home directory if available, otherwise returns the path
/// unchanged. The tilde will only be expanded when present as the first component of the path /// unchanged. The tilde will only be expanded when present as the first component of the path
/// and only slash follows it. /// and only slash follows it.
pub fn expand_tilde(path: &Path) -> PathBuf { pub fn expand_tilde(path: &Path) -> PathBuf {
@ -91,3 +90,54 @@ pub fn get_relative_path(path: &Path) -> PathBuf {
}; };
fold_home_dir(path) fold_home_dir(path)
} }
/// Returns a truncated filepath where the basepart of the path is reduced to the first
/// char of the folder and the whole filename appended.
///
/// Also strip the current working directory from the beginning of the path.
/// Note that this function does not check if the truncated path is unambiguous.
///
/// ```
/// use helix_core::path::get_truncated_path;
/// use std::path::Path;
///
/// assert_eq!(
/// get_truncated_path("/home/cnorris/documents/jokes.txt").as_path(),
/// Path::new("/h/c/d/jokes.txt")
/// );
/// assert_eq!(
/// get_truncated_path("jokes.txt").as_path(),
/// Path::new("jokes.txt")
/// );
/// assert_eq!(
/// get_truncated_path("/jokes.txt").as_path(),
/// Path::new("/jokes.txt")
/// );
/// assert_eq!(
/// get_truncated_path("/h/c/d/jokes.txt").as_path(),
/// Path::new("/h/c/d/jokes.txt")
/// );
/// assert_eq!(get_truncated_path("").as_path(), Path::new(""));
/// ```
///
pub fn get_truncated_path<P: AsRef<Path>>(path: P) -> PathBuf {
let cwd = std::env::current_dir().unwrap_or_default();
let path = path
.as_ref()
.strip_prefix(cwd)
.unwrap_or_else(|_| path.as_ref());
let file = path.file_name().unwrap_or_default();
let base = path.parent().unwrap_or_else(|| Path::new(""));
let mut ret = PathBuf::new();
for d in base {
ret.push(
d.to_string_lossy()
.chars()
.next()
.unwrap_or_default()
.to_string(),
);
}
ret.push(file);
ret
}

@ -109,9 +109,6 @@ pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Po
/// with left-side block-cursor positions, as this prevents the the block cursor /// with left-side block-cursor positions, as this prevents the the block cursor
/// from jumping to the next line. Otherwise you typically want it to be `false`, /// from jumping to the next line. Otherwise you typically want it to be `false`,
/// such as when dealing with raw anchor/head positions. /// such as when dealing with raw anchor/head positions.
///
/// TODO: this should be changed to work in terms of visual row/column, not
/// graphemes.
pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending: bool) -> usize { pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending: bool) -> usize {
let Position { mut row, col } = coords; let Position { mut row, col } = coords;
if limit_before_line_ending { if limit_before_line_ending {
@ -135,6 +132,43 @@ pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending
line_start + col_char_offset line_start + col_char_offset
} }
/// Convert visual (line, column) coordinates to a character index.
///
/// If the `line` coordinate is beyond the end of the file, the EOF
/// position will be returned.
///
/// If the `column` coordinate is past the end of the given line, the
/// line-end position (in this case, just before the line ending
/// character) will be returned.
pub fn pos_at_visual_coords(text: RopeSlice, coords: Position, tab_width: usize) -> usize {
let Position { mut row, col } = coords;
row = row.min(text.len_lines() - 1);
let line_start = text.line_to_char(row);
let line_end = line_end_char_index(&text, row);
let mut col_char_offset = 0;
let mut cols_remaining = col;
for grapheme in RopeGraphemes::new(text.slice(line_start..line_end)) {
let grapheme_width = if grapheme == "\t" {
tab_width - ((col - cols_remaining) % tab_width)
} else {
let grapheme = Cow::from(grapheme);
grapheme_width(&grapheme)
};
// If pos is in the middle of a wider grapheme (tab for example)
// return the starting offset.
if grapheme_width > cols_remaining {
break;
}
cols_remaining -= grapheme_width;
col_char_offset += grapheme.chars().count();
}
line_start + col_char_offset
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
@ -305,4 +339,70 @@ mod test {
assert_eq!(pos_at_coords(slice, (0, 10).into(), true), 0); assert_eq!(pos_at_coords(slice, (0, 10).into(), true), 0);
assert_eq!(pos_at_coords(slice, (10, 10).into(), true), 0); assert_eq!(pos_at_coords(slice, (10, 10).into(), true), 0);
} }
#[test]
fn test_pos_at_visual_coords() {
let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ");
let slice = text.slice(..);
assert_eq!(pos_at_visual_coords(slice, (0, 0).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 5).into(), 4), 5); // position on \n
assert_eq!(pos_at_visual_coords(slice, (0, 6).into(), 4), 5); // position after \n
assert_eq!(pos_at_visual_coords(slice, (1, 0).into(), 4), 6); // position on w
assert_eq!(pos_at_visual_coords(slice, (1, 1).into(), 4), 7); // position on o
assert_eq!(pos_at_visual_coords(slice, (1, 4).into(), 4), 10); // position on d
// Test with wide characters.
let text = Rope::from("今日はいい\n");
let slice = text.slice(..);
assert_eq!(pos_at_visual_coords(slice, (0, 0).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 1).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 2).into(), 4), 1);
assert_eq!(pos_at_visual_coords(slice, (0, 3).into(), 4), 1);
assert_eq!(pos_at_visual_coords(slice, (0, 4).into(), 4), 2);
assert_eq!(pos_at_visual_coords(slice, (0, 5).into(), 4), 2);
assert_eq!(pos_at_visual_coords(slice, (0, 6).into(), 4), 3);
assert_eq!(pos_at_visual_coords(slice, (0, 7).into(), 4), 3);
assert_eq!(pos_at_visual_coords(slice, (0, 8).into(), 4), 4);
assert_eq!(pos_at_visual_coords(slice, (0, 9).into(), 4), 4);
// assert_eq!(pos_at_visual_coords(slice, (0, 10).into(), 4, false), 5);
// assert_eq!(pos_at_visual_coords(slice, (0, 10).into(), 4, true), 5);
assert_eq!(pos_at_visual_coords(slice, (1, 0).into(), 4), 6);
// Test with grapheme clusters.
let text = Rope::from("a̐éö̲\r\n");
let slice = text.slice(..);
assert_eq!(pos_at_visual_coords(slice, (0, 0).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 1).into(), 4), 2);
assert_eq!(pos_at_visual_coords(slice, (0, 2).into(), 4), 4);
assert_eq!(pos_at_visual_coords(slice, (0, 3).into(), 4), 7); // \r\n is one char here
assert_eq!(pos_at_visual_coords(slice, (0, 4).into(), 4), 7);
assert_eq!(pos_at_visual_coords(slice, (1, 0).into(), 4), 9);
// Test with wide-character grapheme clusters.
let text = Rope::from("किमपि");
// 2 - 1 - 2 codepoints
// TODO: delete handling as per https://news.ycombinator.com/item?id=20058454
let slice = text.slice(..);
assert_eq!(pos_at_visual_coords(slice, (0, 0).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 1).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 2).into(), 4), 2);
assert_eq!(pos_at_visual_coords(slice, (0, 3).into(), 4), 3);
// Test with tabs.
let text = Rope::from("\tHello\n");
let slice = text.slice(..);
assert_eq!(pos_at_visual_coords(slice, (0, 0).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 1).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 2).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 3).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 4).into(), 4), 1);
assert_eq!(pos_at_visual_coords(slice, (0, 5).into(), 4), 2);
// Test out of bounds.
let text = Rope::new();
let slice = text.slice(..);
assert_eq!(pos_at_visual_coords(slice, (10, 0).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (0, 10).into(), 4), 0);
assert_eq!(pos_at_visual_coords(slice, (10, 10).into(), 4), 0);
}
} }

@ -69,6 +69,14 @@ impl Registers {
self.get(name).map(|reg| reg.read()) self.get(name).map(|reg| reg.read())
} }
pub fn first(&self, name: char) -> Option<&String> {
self.read(name).and_then(|entries| entries.first())
}
pub fn last(&self, name: char) -> Option<&String> {
self.read(name).and_then(|entries| entries.last())
}
pub fn inner(&self) -> &HashMap<char, Register> { pub fn inner(&self) -> &HashMap<char, Register> {
&self.inner &self.inner
} }

@ -1,6 +1,28 @@
use crate::RopeSlice; use crate::RopeSlice;
pub fn find_nth_next(text: RopeSlice, ch: char, mut pos: usize, n: usize) -> Option<usize> { // TODO: switch to std::str::Pattern when it is stable.
pub trait CharMatcher {
fn char_match(&self, ch: char) -> bool;
}
impl CharMatcher for char {
fn char_match(&self, ch: char) -> bool {
*self == ch
}
}
impl<F: Fn(&char) -> bool> CharMatcher for F {
fn char_match(&self, ch: char) -> bool {
(*self)(&ch)
}
}
pub fn find_nth_next<M: CharMatcher>(
text: RopeSlice,
char_matcher: M,
mut pos: usize,
n: usize,
) -> Option<usize> {
if pos >= text.len_chars() || n == 0 { if pos >= text.len_chars() || n == 0 {
return None; return None;
} }
@ -13,7 +35,7 @@ pub fn find_nth_next(text: RopeSlice, ch: char, mut pos: usize, n: usize) -> Opt
pos += 1; pos += 1;
if c == ch { if char_matcher.char_match(c) {
break; break;
} }
} }

@ -8,7 +8,7 @@ use crate::{
prev_grapheme_boundary, prev_grapheme_boundary,
}, },
movement::Direction, movement::Direction,
Assoc, ChangeSet, RopeSlice, Assoc, ChangeSet, RopeGraphemes, RopeSlice,
}; };
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use std::borrow::Cow; use std::borrow::Cow;
@ -222,9 +222,23 @@ impl Range {
// groupAt // groupAt
/// Returns the text inside this range given the text of the whole buffer.
///
/// The returned `Cow` is a reference if the range of text is inside a single
/// chunk of the rope. Otherwise a copy of the text is returned. Consider
/// using `slice` instead if you do not need a `Cow` or `String` to avoid copying.
#[inline] #[inline]
pub fn fragment<'a, 'b: 'a>(&'a self, text: RopeSlice<'b>) -> Cow<'b, str> { pub fn fragment<'a, 'b: 'a>(&'a self, text: RopeSlice<'b>) -> Cow<'b, str> {
text.slice(self.from()..self.to()).into() self.slice(text).into()
}
/// Returns the text inside this range given the text of the whole buffer.
///
/// The returned value is a reference to the passed slice. This method never
/// copies any contents.
#[inline]
pub fn slice<'a, 'b: 'a>(&'a self, text: RopeSlice<'b>) -> RopeSlice<'b> {
text.slice(self.from()..self.to())
} }
//-------------------------------- //--------------------------------
@ -339,6 +353,14 @@ impl Range {
pub fn cursor_line(&self, text: RopeSlice) -> usize { pub fn cursor_line(&self, text: RopeSlice) -> usize {
text.char_to_line(self.cursor(text)) text.char_to_line(self.cursor(text))
} }
/// Returns true if this Range covers a single grapheme in the given text
pub fn is_single_grapheme(&self, doc: RopeSlice) -> bool {
let mut graphemes = RopeGraphemes::new(doc.slice(self.from()..self.to()));
let first = graphemes.next();
let second = graphemes.next();
first.is_some() && second.is_none()
}
} }
impl From<(usize, usize)> for Range { impl From<(usize, usize)> for Range {
@ -540,6 +562,10 @@ impl Selection {
self.ranges.iter().map(move |range| range.fragment(text)) self.ranges.iter().map(move |range| range.fragment(text))
} }
pub fn slices<'a>(&'a self, text: RopeSlice<'a>) -> impl Iterator<Item = RopeSlice> + 'a {
self.ranges.iter().map(move |range| range.slice(text))
}
#[inline(always)] #[inline(always)]
pub fn iter(&self) -> std::slice::Iter<'_, Range> { pub fn iter(&self) -> std::slice::Iter<'_, Range> {
self.ranges.iter() self.ranges.iter()
@ -633,7 +659,13 @@ pub fn select_on_matches(
let start = text.byte_to_char(start_byte + mat.start()); let start = text.byte_to_char(start_byte + mat.start());
let end = text.byte_to_char(start_byte + mat.end()); let end = text.byte_to_char(start_byte + mat.end());
result.push(Range::new(start, end));
let range = Range::new(start, end);
// Make sure the match is not right outside of the selection.
// These invalid matches can come from using RegEx anchors like `^`, `$`
if range != Range::point(sel.to()) {
result.push(range);
}
} }
} }
@ -830,7 +862,7 @@ mod test {
} }
#[test] #[test]
fn test_graphem_aligned() { fn test_grapheme_aligned() {
let r = Rope::from_str("\r\nHi\r\n"); let r = Rope::from_str("\r\nHi\r\n");
let s = r.slice(..); let s = r.slice(..);
@ -903,6 +935,76 @@ mod test {
assert_eq!(Range::new(6, 5).min_width_1(s), Range::new(6, 5)); assert_eq!(Range::new(6, 5).min_width_1(s), Range::new(6, 5));
} }
#[test]
fn test_select_on_matches() {
use crate::regex::{Regex, RegexBuilder};
let r = Rope::from_str("Nobody expects the Spanish inquisition");
let s = r.slice(..);
let selection = Selection::single(0, r.len_chars());
assert_eq!(
select_on_matches(s, &selection, &Regex::new(r"[A-Z][a-z]*").unwrap()),
Some(Selection::new(
smallvec![Range::new(0, 6), Range::new(19, 26)],
0
))
);
let r = Rope::from_str("This\nString\n\ncontains multiple\nlines");
let s = r.slice(..);
let start_of_line = RegexBuilder::new(r"^").multi_line(true).build().unwrap();
let end_of_line = RegexBuilder::new(r"$").multi_line(true).build().unwrap();
// line without ending
assert_eq!(
select_on_matches(s, &Selection::single(0, 4), &start_of_line),
Some(Selection::single(0, 0))
);
assert_eq!(
select_on_matches(s, &Selection::single(0, 4), &end_of_line),
None
);
// line with ending
assert_eq!(
select_on_matches(s, &Selection::single(0, 5), &start_of_line),
Some(Selection::single(0, 0))
);
assert_eq!(
select_on_matches(s, &Selection::single(0, 5), &end_of_line),
Some(Selection::single(4, 4))
);
// line with start of next line
assert_eq!(
select_on_matches(s, &Selection::single(0, 6), &start_of_line),
Some(Selection::new(
smallvec![Range::point(0), Range::point(5)],
0
))
);
assert_eq!(
select_on_matches(s, &Selection::single(0, 6), &end_of_line),
Some(Selection::single(4, 4))
);
// multiple lines
assert_eq!(
select_on_matches(
s,
&Selection::single(0, s.len_chars()),
&RegexBuilder::new(r"^[a-z ]*$")
.multi_line(true)
.build()
.unwrap()
),
Some(Selection::new(
smallvec![Range::point(12), Range::new(13, 30), Range::new(31, 36)],
0
))
);
}
#[test] #[test]
fn test_line_range() { fn test_line_range() {
let r = Rope::from_str("\r\nHi\r\nthere!"); let r = Rope::from_str("\r\nHi\r\nthere!");

@ -24,9 +24,13 @@ pub fn shellwords(input: &str) -> Vec<Cow<'_, str>> {
state = match state { state = match state {
Normal => match c { Normal => match c {
'\\' => { '\\' => {
if cfg!(unix) {
escaped.push_str(&input[start..i]); escaped.push_str(&input[start..i]);
start = i + 1; start = i + 1;
NormalEscaped NormalEscaped
} else {
Normal
}
} }
'"' => { '"' => {
end = i; end = i;
@ -45,9 +49,13 @@ pub fn shellwords(input: &str) -> Vec<Cow<'_, str>> {
NormalEscaped => Normal, NormalEscaped => Normal,
Quoted => match c { Quoted => match c {
'\\' => { '\\' => {
if cfg!(unix) {
escaped.push_str(&input[start..i]); escaped.push_str(&input[start..i]);
start = i + 1; start = i + 1;
QuoteEscaped QuoteEscaped
} else {
Quoted
}
} }
'\'' => { '\'' => {
end = i; end = i;
@ -58,9 +66,13 @@ pub fn shellwords(input: &str) -> Vec<Cow<'_, str>> {
QuoteEscaped => Quoted, QuoteEscaped => Quoted,
Dquoted => match c { Dquoted => match c {
'\\' => { '\\' => {
if cfg!(unix) {
escaped.push_str(&input[start..i]); escaped.push_str(&input[start..i]);
start = i + 1; start = i + 1;
DquoteEscaped DquoteEscaped
} else {
Dquoted
}
} }
'"' => { '"' => {
end = i; end = i;
@ -99,6 +111,25 @@ mod test {
use super::*; use super::*;
#[test] #[test]
#[cfg(windows)]
fn test_normal() {
let input = r#":o single_word twó wörds \three\ \"with\ escaping\\"#;
let result = shellwords(input);
let expected = vec![
Cow::from(":o"),
Cow::from("single_word"),
Cow::from("twó"),
Cow::from("wörds"),
Cow::from("\\three\\"),
Cow::from("\\"),
Cow::from("with\\ escaping\\\\"),
];
// TODO test is_owned and is_borrowed, once they get stabilized.
assert_eq!(expected, result);
}
#[test]
#[cfg(unix)]
fn test_normal() { fn test_normal() {
let input = r#":o single_word twó wörds \three\ \"with\ escaping\\"#; let input = r#":o single_word twó wörds \three\ \"with\ escaping\\"#;
let result = shellwords(input); let result = shellwords(input);
@ -114,6 +145,7 @@ mod test {
} }
#[test] #[test]
#[cfg(unix)]
fn test_quoted() { fn test_quoted() {
let quoted = let quoted =
r#":o 'single_word' 'twó wörds' '' ' ''\three\' \"with\ escaping\\' 'quote incomplete"#; r#":o 'single_word' 'twó wörds' '' ' ''\three\' \"with\ escaping\\' 'quote incomplete"#;
@ -129,6 +161,7 @@ mod test {
} }
#[test] #[test]
#[cfg(unix)]
fn test_dquoted() { fn test_dquoted() {
let dquoted = r#":o "single_word" "twó wörds" "" " ""\three\' \"with\ escaping\\" "dquote incomplete"#; let dquoted = r#":o "single_word" "twó wörds" "" " ""\three\' \"with\ escaping\\" "dquote incomplete"#;
let result = shellwords(dquoted); let result = shellwords(dquoted);
@ -143,6 +176,7 @@ mod test {
} }
#[test] #[test]
#[cfg(unix)]
fn test_mixed() { fn test_mixed() {
let dquoted = r#":o single_word 'twó wörds' "\three\' \"with\ escaping\\""no space before"'and after' $#%^@ "%^&(%^" ')(*&^%''a\\\\\b' '"#; let dquoted = r#":o single_word 'twó wörds' "\three\' \"with\ escaping\\""no space before"'and after' $#%^@ "%^&(%^" ')(*&^%''a\\\\\b' '"#;
let result = shellwords(dquoted); let result = shellwords(dquoted);

@ -52,6 +52,45 @@ pub fn get_pair(ch: char) -> (char, char) {
.unwrap_or((ch, ch)) .unwrap_or((ch, ch))
} }
pub fn find_nth_closest_pairs_pos(
text: RopeSlice,
range: Range,
n: usize,
) -> Result<(usize, usize)> {
let is_open_pair = |ch| PAIRS.iter().any(|(open, _)| *open == ch);
let is_close_pair = |ch| PAIRS.iter().any(|(_, close)| *close == ch);
let mut stack = Vec::with_capacity(2);
let pos = range.cursor(text);
for ch in text.chars_at(pos) {
if is_open_pair(ch) {
// Track open pairs encountered so that we can step over
// the corresponding close pairs that will come up further
// down the loop. We want to find a lone close pair whose
// open pair is before the cursor position.
stack.push(ch);
continue;
} else if is_close_pair(ch) {
let (open, _) = get_pair(ch);
if stack.last() == Some(&open) {
stack.pop();
continue;
} else {
// In the ideal case the stack would be empty here and the
// current character would be the close pair that we are
// looking for. It could also be the case that the pairs
// are unbalanced and we encounter a close pair that doesn't
// close the last seen open pair. In either case use this
// char as the auto-detected closest pair.
return find_nth_pairs_pos(text, ch, range, n);
}
}
}
Err(Error::PairNotFound)
}
/// Find the position of surround pairs of `ch` which can be either a closing /// Find the position of surround pairs of `ch` which can be either a closing
/// or opening pair. `n` will skip n - 1 pairs (eg. n=2 will discard (only) /// or opening pair. `n` will skip n - 1 pairs (eg. n=2 will discard (only)
/// the first pair found and keep looking) /// the first pair found and keep looking)
@ -173,17 +212,22 @@ fn find_nth_close_pair(
/// Find position of surround characters around every cursor. Returns None /// Find position of surround characters around every cursor. Returns None
/// if any positions overlap. Note that the positions are in a flat Vec. /// if any positions overlap. Note that the positions are in a flat Vec.
/// Use get_surround_pos().chunks(2) to get matching pairs of surround positions. /// Use get_surround_pos().chunks(2) to get matching pairs of surround positions.
/// `ch` can be either closing or opening pair. /// `ch` can be either closing or opening pair. If `ch` is None, surround pairs
/// are automatically detected around each cursor (note that this may result
/// in them selecting different surround characters for each selection).
pub fn get_surround_pos( pub fn get_surround_pos(
text: RopeSlice, text: RopeSlice,
selection: &Selection, selection: &Selection,
ch: char, ch: Option<char>,
skip: usize, skip: usize,
) -> Result<Vec<usize>> { ) -> Result<Vec<usize>> {
let mut change_pos = Vec::new(); let mut change_pos = Vec::new();
for &range in selection { for &range in selection {
let (open_pos, close_pos) = find_nth_pairs_pos(text, ch, range, skip)?; let (open_pos, close_pos) = match ch {
Some(ch) => find_nth_pairs_pos(text, ch, range, skip)?,
None => find_nth_closest_pairs_pos(text, range, skip)?,
};
if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) { if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) {
return Err(Error::CursorOverlap); return Err(Error::CursorOverlap);
} }
@ -299,7 +343,7 @@ mod test {
// cursor on s[o]me, c[h]ars, newl[i]ne // cursor on s[o]me, c[h]ars, newl[i]ne
assert_eq!( assert_eq!(
get_surround_pos(slice, &selection, '(', 1) get_surround_pos(slice, &selection, Some('('), 1)
.unwrap() .unwrap()
.as_slice(), .as_slice(),
&[0, 5, 7, 13, 15, 23] &[0, 5, 7, 13, 15, 23]
@ -315,7 +359,7 @@ mod test {
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(9)]), 0); Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(9)]), 0);
// cursor on s[o]me, c[h]ars // cursor on s[o]me, c[h]ars
assert_eq!( assert_eq!(
get_surround_pos(slice, &selection, '(', 1), get_surround_pos(slice, &selection, Some('('), 1),
Err(Error::PairNotFound) // different surround chars Err(Error::PairNotFound) // different surround chars
); );
@ -325,7 +369,7 @@ mod test {
); );
// cursor on [x]x, newli[n]e // cursor on [x]x, newli[n]e
assert_eq!( assert_eq!(
get_surround_pos(slice, &selection, '(', 1), get_surround_pos(slice, &selection, Some('('), 1),
Err(Error::PairNotFound) // overlapping surround chars Err(Error::PairNotFound) // overlapping surround chars
); );
@ -333,7 +377,7 @@ mod test {
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(3)]), 0); Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(3)]), 0);
// cursor on s[o][m]e // cursor on s[o][m]e
assert_eq!( assert_eq!(
get_surround_pos(slice, &selection, '[', 1), get_surround_pos(slice, &selection, Some('['), 1),
Err(Error::CursorOverlap) Err(Error::CursorOverlap)
); );
} }

@ -50,6 +50,10 @@ where
Ok(Option::<AutoPairConfig>::deserialize(deserializer)?.and_then(AutoPairConfig::into)) Ok(Option::<AutoPairConfig>::deserialize(deserializer)?.and_then(AutoPairConfig::into))
} }
fn default_timeout() -> u64 {
20
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct Configuration { pub struct Configuration {
pub language: Vec<LanguageConfiguration>, pub language: Vec<LanguageConfiguration>,
@ -67,12 +71,17 @@ pub struct LanguageConfiguration {
pub shebangs: Vec<String>, // interpreter(s) associated with language pub shebangs: Vec<String>, // interpreter(s) associated with language
pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml> pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml>
pub comment_token: Option<String>, pub comment_token: Option<String>,
pub max_line_length: Option<usize>,
#[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")] #[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")]
pub config: Option<serde_json::Value>, pub config: Option<serde_json::Value>,
#[serde(default)] #[serde(default)]
pub auto_format: bool, pub auto_format: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub formatter: Option<FormatterConfiguration>,
#[serde(default)] #[serde(default)]
pub diagnostic_severity: Severity, pub diagnostic_severity: Severity,
@ -104,6 +113,8 @@ pub struct LanguageConfiguration {
/// global setting. /// global setting.
#[serde(default, skip_serializing, deserialize_with = "deserialize_auto_pairs")] #[serde(default, skip_serializing, deserialize_with = "deserialize_auto_pairs")]
pub auto_pairs: Option<AutoPairs>, pub auto_pairs: Option<AutoPairs>,
pub rulers: Option<Vec<u16>>, // if set, override editor's rulers
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@ -113,9 +124,20 @@ pub struct LanguageServerConfiguration {
#[serde(default)] #[serde(default)]
#[serde(skip_serializing_if = "Vec::is_empty")] #[serde(skip_serializing_if = "Vec::is_empty")]
pub args: Vec<String>, pub args: Vec<String>,
#[serde(default = "default_timeout")]
pub timeout: u64,
pub language_id: Option<String>, pub language_id: Option<String>,
} }
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct FormatterConfiguration {
pub command: String,
#[serde(default)]
#[serde(skip_serializing_if = "Vec::is_empty")]
pub args: Vec<String>,
}
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
pub struct AdvancedCompletion { pub struct AdvancedCompletion {
@ -225,9 +247,10 @@ pub struct TextObjectQuery {
pub query: Query, pub query: Query,
} }
#[derive(Debug)]
pub enum CapturedNode<'a> { pub enum CapturedNode<'a> {
Single(Node<'a>), Single(Node<'a>),
/// Guarenteed to be not empty /// Guaranteed to be not empty
Grouped(Vec<Node<'a>>), Grouped(Vec<Node<'a>>),
} }
@ -259,12 +282,12 @@ impl TextObjectQuery {
/// and support for this is partial and could use improvement. /// and support for this is partial and could use improvement.
/// ///
/// ```query /// ```query
/// ;; supported:
/// (comment)+ @capture /// (comment)+ @capture
/// ///
/// ;; unsupported: /// ; OR
/// ( /// (
/// (comment)+ /// (comment)*
/// .
/// (function) /// (function)
/// ) @capture /// ) @capture
/// ``` /// ```
@ -290,61 +313,42 @@ impl TextObjectQuery {
let capture_idx = capture_names let capture_idx = capture_names
.iter() .iter()
.find_map(|cap| self.query.capture_index_for_name(cap))?; .find_map(|cap| self.query.capture_index_for_name(cap))?;
let captures = cursor.matches(&self.query, node, RopeProvider(slice));
let nodes = cursor
let nodes = captures.flat_map(move |mat| { .captures(&self.query, node, RopeProvider(slice))
let captures = mat.captures.iter().filter(move |c| c.index == capture_idx); .filter_map(move |(mat, _)| {
let nodes = captures.map(|c| c.node); let nodes: Vec<_> = mat
let pattern_idx = mat.pattern_index; .captures
let quantifier = self.query.capture_quantifiers(pattern_idx)[capture_idx as usize]; .iter()
.filter_map(|cap| (cap.index == capture_idx).then(|| cap.node))
let iter: Box<dyn Iterator<Item = CapturedNode>> = match quantifier { .collect();
CaptureQuantifier::OneOrMore | CaptureQuantifier::ZeroOrMore => {
let nodes: Vec<Node> = nodes.collect(); if nodes.len() > 1 {
if nodes.is_empty() { Some(CapturedNode::Grouped(nodes))
Box::new(std::iter::empty())
} else { } else {
Box::new(std::iter::once(CapturedNode::Grouped(nodes))) nodes.into_iter().map(CapturedNode::Single).next()
} }
}
_ => Box::new(nodes.map(CapturedNode::Single)),
};
iter
}); });
Some(nodes) Some(nodes)
} }
} }
fn read_query(language: &str, filename: &str) -> String { pub fn read_query(language: &str, filename: &str) -> String {
static INHERITS_REGEX: Lazy<Regex> = static INHERITS_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r";+\s*inherits\s*:?\s*([a-z_,()]+)\s*").unwrap()); Lazy::new(|| Regex::new(r";+\s*inherits\s*:?\s*([a-z_,()-]+)\s*").unwrap());
let query = load_runtime_file(language, filename).unwrap_or_default(); let query = load_runtime_file(language, filename).unwrap_or_default();
// TODO: the collect() is not ideal // replaces all "; inherits <language>(,<language>)*" with the queries of the given language(s)
let inherits = INHERITS_REGEX INHERITS_REGEX
.captures_iter(&query) .replace_all(&query, |captures: &regex::Captures| {
.flat_map(|captures| {
captures[1] captures[1]
.split(',') .split(',')
.map(str::to_owned) .map(|language| format!("\n{}\n", read_query(language, filename)))
.collect::<Vec<_>>() .collect::<String>()
}) })
.collect::<Vec<_>>(); .to_string()
if inherits.is_empty() {
return query;
}
let mut queries = inherits
.iter()
.map(|language| read_query(language, filename))
.collect::<Vec<_>>();
queries.push(query);
queries.concat()
} }
impl LanguageConfiguration { impl LanguageConfiguration {
@ -370,7 +374,8 @@ impl LanguageConfiguration {
&injections_query, &injections_query,
&locals_query, &locals_query,
) )
.unwrap_or_else(|query_error| panic!("Could not parse queries for language {:?}. Are your grammars out of sync? Try running 'hx --grammar fetch' and 'hx --grammar build'. This query could not be parsed: {:?}", self.language_id, query_error)); .map_err(|err| log::error!("Could not parse queries for language {:?}. Are your grammars out of sync? Try running 'hx --grammar fetch' and 'hx --grammar build'. This query could not be parsed: {:?}", self.language_id, err))
.ok()?;
config.configure(scopes); config.configure(scopes);
Some(Arc::new(config)) Some(Arc::new(config))
@ -395,23 +400,15 @@ impl LanguageConfiguration {
pub fn indent_query(&self) -> Option<&Query> { pub fn indent_query(&self) -> Option<&Query> {
self.indent_query self.indent_query
.get_or_init(|| { .get_or_init(|| self.load_query("indents.scm"))
let lang_name = self.language_id.to_ascii_lowercase();
let query_text = read_query(&lang_name, "indents.scm");
let lang = self.highlight_config.get()?.as_ref()?.language;
Query::new(lang, &query_text).ok()
})
.as_ref() .as_ref()
} }
pub fn textobject_query(&self) -> Option<&TextObjectQuery> { pub fn textobject_query(&self) -> Option<&TextObjectQuery> {
self.textobject_query self.textobject_query
.get_or_init(|| -> Option<TextObjectQuery> { .get_or_init(|| {
let lang_name = self.language_id.to_ascii_lowercase(); self.load_query("textobjects.scm")
let query_text = read_query(&lang_name, "textobjects.scm"); .map(|query| TextObjectQuery { query })
let lang = self.highlight_config.get()?.as_ref()?.language;
let query = Query::new(lang, &query_text).ok()?;
Some(TextObjectQuery { query })
}) })
.as_ref() .as_ref()
} }
@ -419,6 +416,18 @@ impl LanguageConfiguration {
pub fn scope(&self) -> &str { pub fn scope(&self) -> &str {
&self.scope &self.scope
} }
fn load_query(&self, kind: &str) -> Option<Query> {
let lang_name = self.language_id.to_ascii_lowercase();
let query_text = read_query(&lang_name, kind);
if query_text.is_empty() {
return None;
}
let lang = self.highlight_config.get()?.as_ref()?.language;
Query::new(lang, &query_text)
.map_err(|e| log::error!("Failed to parse {} queries for {}: {}", kind, lang_name, e))
.ok()
}
} }
// Expose loader as Lazy<> global since it's always static? // Expose loader as Lazy<> global since it's always static?
@ -501,6 +510,13 @@ impl Loader {
.cloned() .cloned()
} }
pub fn language_config_for_language_id(&self, id: &str) -> Option<Arc<LanguageConfiguration>> {
self.language_configs
.iter()
.find(|config| config.language_id == id)
.cloned()
}
pub fn language_configuration_for_injection_string( pub fn language_configuration_for_injection_string(
&self, &self,
string: &str, string: &str,
@ -526,6 +542,10 @@ impl Loader {
None None
} }
pub fn language_configs(&self) -> impl Iterator<Item = &Arc<LanguageConfiguration>> {
self.language_configs.iter()
}
pub fn set_scopes(&self, scopes: Vec<String>) { pub fn set_scopes(&self, scopes: Vec<String>) {
self.scopes.store(Arc::new(scopes)); self.scopes.store(Arc::new(scopes));
@ -744,7 +764,7 @@ impl Syntax {
); );
let mut injections = Vec::new(); let mut injections = Vec::new();
for mat in matches { for mat in matches {
let (language_name, content_node, include_children) = injection_for_match( let (language_name, content_node, included_children) = injection_for_match(
&layer.config, &layer.config,
&layer.config.injections_query, &layer.config.injections_query,
&mat, &mat,
@ -761,7 +781,7 @@ impl Syntax {
{ {
if let Some(config) = (injection_callback)(&language_name) { if let Some(config) = (injection_callback)(&language_name) {
let ranges = let ranges =
intersect_ranges(&layer.ranges, &[content_node], include_children); intersect_ranges(&layer.ranges, &[content_node], included_children);
if !ranges.is_empty() { if !ranges.is_empty() {
injections.push((config, ranges)); injections.push((config, ranges));
@ -773,7 +793,10 @@ impl Syntax {
// Process combined injections. // Process combined injections.
if let Some(combined_injections_query) = &layer.config.combined_injections_query { if let Some(combined_injections_query) = &layer.config.combined_injections_query {
let mut injections_by_pattern_index = let mut injections_by_pattern_index =
vec![(None, Vec::new(), false); combined_injections_query.pattern_count()]; vec![
(None, Vec::new(), IncludedChildren::default());
combined_injections_query.pattern_count()
];
let matches = cursor.matches( let matches = cursor.matches(
combined_injections_query, combined_injections_query,
layer.tree().root_node(), layer.tree().root_node(),
@ -781,7 +804,7 @@ impl Syntax {
); );
for mat in matches { for mat in matches {
let entry = &mut injections_by_pattern_index[mat.pattern_index]; let entry = &mut injections_by_pattern_index[mat.pattern_index];
let (language_name, content_node, include_children) = injection_for_match( let (language_name, content_node, included_children) = injection_for_match(
&layer.config, &layer.config,
combined_injections_query, combined_injections_query,
&mat, &mat,
@ -793,16 +816,16 @@ impl Syntax {
if let Some(content_node) = content_node { if let Some(content_node) = content_node {
entry.1.push(content_node); entry.1.push(content_node);
} }
entry.2 = include_children; entry.2 = included_children;
} }
for (lang_name, content_nodes, includes_children) in injections_by_pattern_index for (lang_name, content_nodes, included_children) in injections_by_pattern_index
{ {
if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) { if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) {
if let Some(config) = (injection_callback)(&lang_name) { if let Some(config) = (injection_callback)(&lang_name) {
let ranges = intersect_ranges( let ranges = intersect_ranges(
&layer.ranges, &layer.ranges,
&content_nodes, &content_nodes,
includes_children, included_children,
); );
if !ranges.is_empty() { if !ranges.is_empty() {
injections.push((config, ranges)); injections.push((config, ranges));
@ -962,7 +985,9 @@ impl LanguageLayer {
} }
fn parse(&mut self, parser: &mut Parser, source: &Rope) -> Result<(), Error> { fn parse(&mut self, parser: &mut Parser, source: &Rope) -> Result<(), Error> {
parser.set_included_ranges(&self.ranges).unwrap(); parser
.set_included_ranges(&self.ranges)
.map_err(|_| Error::InvalidRanges)?;
parser parser
.set_language(self.config.language) .set_language(self.config.language)
@ -1097,8 +1122,8 @@ pub(crate) fn generate_edits(
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
use std::{iter, mem, ops, str, usize}; use std::{iter, mem, ops, str, usize};
use tree_sitter::{ use tree_sitter::{
CaptureQuantifier, Language as Grammar, Node, Parser, Point, Query, QueryCaptures, QueryCursor, Language as Grammar, Node, Parser, Point, Query, QueryCaptures, QueryCursor, QueryError,
QueryError, QueryMatch, Range, TextProvider, Tree, QueryMatch, Range, TextProvider, Tree,
}; };
const CANCELLATION_CHECK_INTERVAL: usize = 100; const CANCELLATION_CHECK_INTERVAL: usize = 100;
@ -1112,6 +1137,7 @@ pub struct Highlight(pub usize);
pub enum Error { pub enum Error {
Cancelled, Cancelled,
InvalidLanguage, InvalidLanguage,
InvalidRanges,
Unknown, Unknown,
} }
@ -1123,7 +1149,7 @@ pub enum HighlightEvent {
HighlightEnd, HighlightEnd,
} }
/// Contains the data neeeded to higlight code written in a particular language. /// Contains the data needed to highlight code written in a particular language.
/// ///
/// This struct is immutable and can be shared between threads. /// This struct is immutable and can be shared between threads.
#[derive(Debug)] #[derive(Debug)]
@ -1333,8 +1359,8 @@ impl HighlightConfiguration {
/// Tree-sitter syntax-highlighting queries specify highlights in the form of dot-separated /// Tree-sitter syntax-highlighting queries specify highlights in the form of dot-separated
/// highlight names like `punctuation.bracket` and `function.method.builtin`. Consumers of /// highlight names like `punctuation.bracket` and `function.method.builtin`. Consumers of
/// these queries can choose to recognize highlights with different levels of specificity. /// these queries can choose to recognize highlights with different levels of specificity.
/// For example, the string `function.builtin` will match against `function.method.builtin` /// For example, the string `function.builtin` will match against `function.builtin.constructor`
/// and `function.builtin.constructor`, but will not match `function.method`. /// but will not match `function.method.builtin` and `function.method`.
/// ///
/// When highlighting, results are returned as `Highlight` values, which contain the index /// When highlighting, results are returned as `Highlight` values, which contain the index
/// of the matched highlight this list of highlight names. /// of the matched highlight this list of highlight names.
@ -1354,13 +1380,15 @@ impl HighlightConfiguration {
let recognized_name = recognized_name; let recognized_name = recognized_name;
let mut len = 0; let mut len = 0;
let mut matches = true; let mut matches = true;
for part in recognized_name.split('.') { for (i, part) in recognized_name.split('.').enumerate() {
len += 1; match capture_parts.get(i) {
if !capture_parts.contains(&part) { Some(capture_part) if *capture_part == part => len += 1,
_ => {
matches = false; matches = false;
break; break;
} }
} }
}
if matches && len > best_match_len { if matches && len > best_match_len {
best_index = Some(i); best_index = Some(i);
best_match_len = len; best_match_len = len;
@ -1400,6 +1428,19 @@ impl<'a> HighlightIterLayer<'a> {
} }
} }
#[derive(Clone)]
enum IncludedChildren {
None,
All,
Unnamed,
}
impl Default for IncludedChildren {
fn default() -> Self {
Self::None
}
}
// Compute the ranges that should be included when parsing an injection. // Compute the ranges that should be included when parsing an injection.
// This takes into account three things: // This takes into account three things:
// * `parent_ranges` - The ranges must all fall within the *current* layer's ranges. // * `parent_ranges` - The ranges must all fall within the *current* layer's ranges.
@ -1412,7 +1453,7 @@ impl<'a> HighlightIterLayer<'a> {
fn intersect_ranges( fn intersect_ranges(
parent_ranges: &[Range], parent_ranges: &[Range],
nodes: &[Node], nodes: &[Node],
includes_children: bool, included_children: IncludedChildren,
) -> Vec<Range> { ) -> Vec<Range> {
let mut cursor = nodes[0].walk(); let mut cursor = nodes[0].walk();
let mut result = Vec::new(); let mut result = Vec::new();
@ -1436,11 +1477,15 @@ fn intersect_ranges(
for excluded_range in node for excluded_range in node
.children(&mut cursor) .children(&mut cursor)
.filter_map(|child| { .filter_map(|child| match included_children {
if includes_children { IncludedChildren::None => Some(child.range()),
None IncludedChildren::All => None,
} else { IncludedChildren::Unnamed => {
if child.is_named() {
Some(child.range()) Some(child.range())
} else {
None
}
} }
}) })
.chain([following_range].iter().cloned()) .chain([following_range].iter().cloned())
@ -1769,7 +1814,7 @@ fn injection_for_match<'a>(
query: &'a Query, query: &'a Query,
query_match: &QueryMatch<'a, 'a>, query_match: &QueryMatch<'a, 'a>,
source: RopeSlice<'a>, source: RopeSlice<'a>,
) -> (Option<Cow<'a, str>>, Option<Node<'a>>, bool) { ) -> (Option<Cow<'a, str>>, Option<Node<'a>>, IncludedChildren) {
let content_capture_index = config.injection_content_capture_index; let content_capture_index = config.injection_content_capture_index;
let language_capture_index = config.injection_language_capture_index; let language_capture_index = config.injection_language_capture_index;
@ -1785,7 +1830,7 @@ fn injection_for_match<'a>(
} }
} }
let mut include_children = false; let mut included_children = IncludedChildren::default();
for prop in query.property_settings(query_match.pattern_index) { for prop in query.property_settings(query_match.pattern_index) {
match prop.key.as_ref() { match prop.key.as_ref() {
// In addition to specifying the language name via the text of a // In addition to specifying the language name via the text of a
@ -1801,12 +1846,17 @@ fn injection_for_match<'a>(
// `injection.content` node - only the ranges that belong to the // `injection.content` node - only the ranges that belong to the
// node itself. This can be changed using a `#set!` predicate that // node itself. This can be changed using a `#set!` predicate that
// sets the `injection.include-children` key. // sets the `injection.include-children` key.
"injection.include-children" => include_children = true, "injection.include-children" => included_children = IncludedChildren::All,
// Some queries might only exclude named children but include unnamed
// children in their `injection.content` node. This can be enabled using
// a `#set!` predicate that sets the `injection.include-unnamed-children` key.
"injection.include-unnamed-children" => included_children = IncludedChildren::Unnamed,
_ => {} _ => {}
} }
} }
(language_name, content_node, include_children) (language_name, content_node, included_children)
} }
pub struct Merge<I> { pub struct Merge<I> {
@ -1958,7 +2008,7 @@ mod test {
let source = Rope::from_str( let source = Rope::from_str(
r#" r#"
/// a comment on /// a comment on
/// mutiple lines /// multiple lines
"#, "#,
); );
@ -1982,14 +2032,16 @@ mod test {
assert_eq!( assert_eq!(
matches[0].byte_range(), matches[0].byte_range(),
range, range,
"@{capture} expected {range:?}" "@{} expected {:?}",
capture,
range
) )
}; };
test("quantified_nodes", 1..35); test("quantified_nodes", 1..36);
// NOTE: Enable after implementing proper node group capturing // NOTE: Enable after implementing proper node group capturing
// test("quantified_nodes_grouped", 1..35); // test("quantified_nodes_grouped", 1..36);
// test("multiple_nodes_grouped", 1..35); // test("multiple_nodes_grouped", 1..36);
} }
#[test] #[test]

@ -0,0 +1,143 @@
//! Test helpers.
use crate::{Range, Selection};
use smallvec::SmallVec;
use std::cmp::Reverse;
/// Convert annotated test string to test string and selection.
///
/// `#[|` for primary selection with head before anchor followed by `]#`.
/// `#(|` for secondary selection with head before anchor followed by `)#`.
/// `#[` for primary selection with head after anchor followed by `|]#`.
/// `#(` for secondary selection with head after anchor followed by `|)#`.
///
/// # Examples
///
/// ```
/// use helix_core::{Range, Selection, test::print};
/// use smallvec::smallvec;
///
/// assert_eq!(
/// print("#[a|]#b#(|c)#"),
/// ("abc".to_owned(), Selection::new(smallvec![Range::new(0, 1), Range::new(3, 2)], 0))
/// );
/// ```
///
/// # Panics
///
/// Panics when missing primary or appeared more than once.
/// Panics when missing head or anchor.
/// Panics when head come after head or anchor come after anchor.
pub fn print(s: &str) -> (String, Selection) {
let mut primary_idx = None;
let mut ranges = SmallVec::new();
let mut iter = s.chars().peekable();
let mut left = String::with_capacity(s.len());
'outer: while let Some(c) = iter.next() {
let start = left.len();
if c != '#' {
left.push(c);
continue;
}
let (is_primary, close_pair) = match iter.next() {
Some('[') => (true, ']'),
Some('(') => (false, ')'),
Some(ch) => {
left.push('#');
left.push(ch);
continue;
}
None => break,
};
if is_primary && primary_idx.is_some() {
panic!("primary `#[` already appeared {:?} {:?}", left, s);
}
let head_at_beg = iter.next_if_eq(&'|').is_some();
while let Some(c) = iter.next() {
if !(c == close_pair && iter.peek() == Some(&'#')) {
left.push(c);
continue;
}
if !head_at_beg {
let prev = left.pop().unwrap();
if prev != '|' {
left.push(prev);
left.push(c);
continue;
}
}
iter.next(); // skip "#"
if is_primary {
primary_idx = Some(ranges.len());
}
let (anchor, head) = match head_at_beg {
true => (left.len(), start),
false => (start, left.len()),
};
ranges.push(Range::new(anchor, head));
continue 'outer;
}
if head_at_beg {
panic!("missing end `{}#` {:?} {:?}", close_pair, left, s);
} else {
panic!("missing end `|{}#` {:?} {:?}", close_pair, left, s);
}
}
let primary = match primary_idx {
Some(i) => i,
None => panic!("missing primary `#[|]#` {:?}", s),
};
let selection = Selection::new(ranges, primary);
(left, selection)
}
/// Convert test string and selection to annotated test string.
///
/// `#[|` for primary selection with head before anchor followed by `]#`.
/// `#(|` for secondary selection with head before anchor followed by `)#`.
/// `#[` for primary selection with head after anchor followed by `|]#`.
/// `#(` for secondary selection with head after anchor followed by `|)#`.
///
/// # Examples
///
/// ```
/// use helix_core::{Range, Selection, test::plain};
/// use smallvec::smallvec;
///
/// assert_eq!(
/// plain("abc", Selection::new(smallvec![Range::new(0, 1), Range::new(3, 2)], 0)),
/// "#[a|]#b#(|c)#".to_owned()
/// );
/// ```
pub fn plain(s: &str, selection: Selection) -> String {
let primary = selection.primary_index();
let mut out = String::with_capacity(s.len() + 5 * selection.len());
out.push_str(s);
let mut insertion: Vec<_> = selection
.iter()
.enumerate()
.flat_map(|(i, range)| {
// sort like this before reversed so anchor < head later
match (range.anchor < range.head, i == primary) {
(true, true) => [(range.anchor, "#["), (range.head, "|]#")],
(true, false) => [(range.anchor, "#("), (range.head, "|)#")],
(false, true) => [(range.anchor, "]#"), (range.head, "#[|")],
(false, false) => [(range.anchor, ")#"), (range.head, "#(|")],
}
})
.collect();
// insert in reverse order
insertion.sort_unstable_by_key(|k| Reverse(k.0));
for (i, s) in insertion {
out.insert_str(i, s);
}
out
}

@ -4,7 +4,8 @@ use ropey::RopeSlice;
use tree_sitter::{Node, QueryCursor}; use tree_sitter::{Node, QueryCursor};
use crate::chars::{categorize_char, char_is_whitespace, CharCategory}; use crate::chars::{categorize_char, char_is_whitespace, CharCategory};
use crate::graphemes::next_grapheme_boundary; use crate::graphemes::{next_grapheme_boundary, prev_grapheme_boundary};
use crate::line_ending::rope_is_line_ending;
use crate::movement::Direction; use crate::movement::Direction;
use crate::surround; use crate::surround;
use crate::syntax::LanguageConfiguration; use crate::syntax::LanguageConfiguration;
@ -111,14 +112,124 @@ pub fn textobject_word(
} }
} }
pub fn textobject_surround( pub fn textobject_paragraph(
slice: RopeSlice,
range: Range,
textobject: TextObject,
count: usize,
) -> Range {
let mut line = range.cursor_line(slice);
let prev_line_empty = rope_is_line_ending(slice.line(line.saturating_sub(1)));
let curr_line_empty = rope_is_line_ending(slice.line(line));
let next_line_empty = rope_is_line_ending(slice.line(line.saturating_sub(1)));
let last_char =
prev_grapheme_boundary(slice, slice.line_to_char(line + 1)) == range.cursor(slice);
let prev_empty_to_line = prev_line_empty && !curr_line_empty;
let curr_empty_to_line = curr_line_empty && !next_line_empty;
// skip character before paragraph boundary
let mut line_back = line; // line but backwards
if prev_empty_to_line || curr_empty_to_line {
line_back += 1;
}
// do not include current paragraph on paragraph end (include next)
if !(curr_empty_to_line && last_char) {
let mut lines = slice.lines_at(line_back);
lines.reverse();
let mut lines = lines.map(rope_is_line_ending).peekable();
while lines.next_if(|&e| e).is_some() {
line_back -= 1;
}
while lines.next_if(|&e| !e).is_some() {
line_back -= 1;
}
}
// skip character after paragraph boundary
if curr_empty_to_line && last_char {
line += 1;
}
let mut lines = slice.lines_at(line).map(rope_is_line_ending).peekable();
let mut count_done = 0; // count how many non-whitespace paragraphs done
for _ in 0..count {
let mut done = false;
while lines.next_if(|&e| !e).is_some() {
line += 1;
done = true;
}
while lines.next_if(|&e| e).is_some() {
line += 1;
}
count_done += done as usize;
}
// search one paragraph backwards for last paragraph
// makes `map` at the end of the paragraph with trailing newlines useful
let last_paragraph = count_done != count && lines.peek().is_none();
if last_paragraph {
let mut lines = slice.lines_at(line_back);
lines.reverse();
let mut lines = lines.map(rope_is_line_ending).peekable();
while lines.next_if(|&e| e).is_some() {
line_back -= 1;
}
while lines.next_if(|&e| !e).is_some() {
line_back -= 1;
}
}
// handle last whitespaces part separately depending on textobject
match textobject {
TextObject::Around => {}
TextObject::Inside => {
// remove last whitespace paragraph
let mut lines = slice.lines_at(line);
lines.reverse();
let mut lines = lines.map(rope_is_line_ending).peekable();
while lines.next_if(|&e| e).is_some() {
line -= 1;
}
}
TextObject::Movement => unreachable!(),
}
let anchor = slice.line_to_char(line_back);
let head = slice.line_to_char(line);
Range::new(anchor, head)
}
pub fn textobject_pair_surround(
slice: RopeSlice, slice: RopeSlice,
range: Range, range: Range,
textobject: TextObject, textobject: TextObject,
ch: char, ch: char,
count: usize, count: usize,
) -> Range { ) -> Range {
surround::find_nth_pairs_pos(slice, ch, range, count) textobject_pair_surround_impl(slice, range, textobject, Some(ch), count)
}
pub fn textobject_pair_surround_closest(
slice: RopeSlice,
range: Range,
textobject: TextObject,
count: usize,
) -> Range {
textobject_pair_surround_impl(slice, range, textobject, None, count)
}
fn textobject_pair_surround_impl(
slice: RopeSlice,
range: Range,
textobject: TextObject,
ch: Option<char>,
count: usize,
) -> Range {
let pair_pos = match ch {
Some(ch) => surround::find_nth_pairs_pos(slice, ch, range, count),
// Automatically find the closest surround pairs
None => surround::find_nth_closest_pairs_pos(slice, range, count),
};
pair_pos
.map(|(anchor, head)| match textobject { .map(|(anchor, head)| match textobject {
TextObject::Inside => Range::new(next_grapheme_boundary(slice, anchor), head), TextObject::Inside => Range::new(next_grapheme_boundary(slice, anchor), head),
TextObject::Around => Range::new(anchor, next_grapheme_boundary(slice, head)), TextObject::Around => Range::new(anchor, next_grapheme_boundary(slice, head)),
@ -288,6 +399,91 @@ mod test {
} }
} }
#[test]
fn test_textobject_paragraph_inside_single() {
let tests = [
("#[|]#", "#[|]#"),
("firs#[t|]#\n\nparagraph\n\n", "#[first\n|]#\nparagraph\n\n"),
(
"second\n\npa#[r|]#agraph\n\n",
"second\n\n#[paragraph\n|]#\n",
),
("#[f|]#irst char\n\n", "#[first char\n|]#\n"),
("last char\n#[\n|]#", "#[last char\n|]#\n"),
(
"empty to line\n#[\n|]#paragraph boundary\n\n",
"empty to line\n\n#[paragraph boundary\n|]#\n",
),
(
"line to empty\n\n#[p|]#aragraph boundary\n\n",
"line to empty\n\n#[paragraph boundary\n|]#\n",
),
];
for (before, expected) in tests {
let (s, selection) = crate::test::print(before);
let text = Rope::from(s.as_str());
let selection = selection
.transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Inside, 1));
let actual = crate::test::plain(&s, selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
#[test]
fn test_textobject_paragraph_inside_double() {
let tests = [
(
"last two\n\n#[p|]#aragraph\n\nwithout whitespaces\n\n",
"last two\n\n#[paragraph\n\nwithout whitespaces\n|]#\n",
),
(
"last two\n#[\n|]#paragraph\n\nwithout whitespaces\n\n",
"last two\n\n#[paragraph\n\nwithout whitespaces\n|]#\n",
),
];
for (before, expected) in tests {
let (s, selection) = crate::test::print(before);
let text = Rope::from(s.as_str());
let selection = selection
.transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Inside, 2));
let actual = crate::test::plain(&s, selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
#[test]
fn test_textobject_paragraph_around_single() {
let tests = [
("#[|]#", "#[|]#"),
("firs#[t|]#\n\nparagraph\n\n", "#[first\n\n|]#paragraph\n\n"),
(
"second\n\npa#[r|]#agraph\n\n",
"second\n\n#[paragraph\n\n|]#",
),
("#[f|]#irst char\n\n", "#[first char\n\n|]#"),
("last char\n#[\n|]#", "#[last char\n\n|]#"),
(
"empty to line\n#[\n|]#paragraph boundary\n\n",
"empty to line\n\n#[paragraph boundary\n\n|]#",
),
(
"line to empty\n\n#[p|]#aragraph boundary\n\n",
"line to empty\n\n#[paragraph boundary\n\n|]#",
),
];
for (before, expected) in tests {
let (s, selection) = crate::test::print(before);
let text = Rope::from(s.as_str());
let selection = selection
.transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Around, 1));
let actual = crate::test::plain(&s, selection);
assert_eq!(actual, expected, "\nbefore: `{:?}`", before);
}
}
#[test] #[test]
fn test_textobject_surround() { fn test_textobject_surround() {
// (text, [(cursor position, textobject, final range, surround char, count), ...]) // (text, [(cursor position, textobject, final range, surround char, count), ...])
@ -366,7 +562,7 @@ mod test {
let slice = doc.slice(..); let slice = doc.slice(..);
for &case in scenario { for &case in scenario {
let (pos, objtype, expected_range, ch, count) = case; let (pos, objtype, expected_range, ch, count) = case;
let result = textobject_surround(slice, Range::point(pos), objtype, ch, count); let result = textobject_pair_surround(slice, Range::point(pos), objtype, ch, count);
assert_eq!( assert_eq!(
result, result,
expected_range.into(), expected_range.into(),

@ -0,0 +1,7 @@
use smartstring::{LazyCompact, SmartString};
/// Given a slice of text, return the text re-wrapped to fit it
/// within the given width.
pub fn reflow_hard_wrap(text: &str, max_line_len: usize) -> SmartString<LazyCompact> {
textwrap::refill(text, max_line_len).into()
}

@ -34,7 +34,7 @@ pub struct Client {
pub caps: Option<DebuggerCapabilities>, pub caps: Option<DebuggerCapabilities>,
// thread_id -> frames // thread_id -> frames
pub stack_frames: HashMap<ThreadId, Vec<StackFrame>>, pub stack_frames: HashMap<ThreadId, Vec<StackFrame>>,
pub thread_states: HashMap<ThreadId, String>, pub thread_states: ThreadStates,
pub thread_id: Option<ThreadId>, pub thread_id: Option<ThreadId>,
/// Currently active frame for the current thread. /// Currently active frame for the current thread.
pub active_frame: Option<usize>, pub active_frame: Option<usize>,

@ -14,6 +14,8 @@ impl std::fmt::Display for ThreadId {
} }
} }
pub type ThreadStates = HashMap<ThreadId, String>;
pub trait Request { pub trait Request {
type Arguments: serde::de::DeserializeOwned + serde::Serialize; type Arguments: serde::de::DeserializeOwned + serde::Serialize;
type Result: serde::de::DeserializeOwned + serde::Serialize; type Result: serde::de::DeserializeOwned + serde::Serialize;
@ -25,9 +27,11 @@ pub trait Request {
pub struct ColumnDescriptor { pub struct ColumnDescriptor {
pub attribute_name: String, pub attribute_name: String,
pub label: String, pub label: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub format: Option<String>, pub format: Option<String>,
#[serde(rename = "type")] #[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub ty: Option<String>, pub ty: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub width: Option<usize>, pub width: Option<usize>,
} }
@ -36,52 +40,94 @@ pub struct ColumnDescriptor {
pub struct ExceptionBreakpointsFilter { pub struct ExceptionBreakpointsFilter {
pub filter: String, pub filter: String,
pub label: String, pub label: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>, pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub default: Option<bool>, pub default: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_condition: Option<bool>, pub supports_condition: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub condition_description: Option<String>, pub condition_description: Option<String>,
} }
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct DebuggerCapabilities { pub struct DebuggerCapabilities {
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_configuration_done_request: Option<bool>, pub supports_configuration_done_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_function_breakpoints: Option<bool>, pub supports_function_breakpoints: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_conditional_breakpoints: Option<bool>, pub supports_conditional_breakpoints: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_hit_conditional_breakpoints: Option<bool>, pub supports_hit_conditional_breakpoints: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_evaluate_for_hovers: Option<bool>, pub supports_evaluate_for_hovers: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_step_back: Option<bool>, pub supports_step_back: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_set_variable: Option<bool>, pub supports_set_variable: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_restart_frame: Option<bool>, pub supports_restart_frame: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_goto_targets_request: Option<bool>, pub supports_goto_targets_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_step_in_targets_request: Option<bool>, pub supports_step_in_targets_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_completions_request: Option<bool>, pub supports_completions_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_modules_request: Option<bool>, pub supports_modules_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_restart_request: Option<bool>, pub supports_restart_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_exception_options: Option<bool>, pub supports_exception_options: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_value_formatting_options: Option<bool>, pub supports_value_formatting_options: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_exception_info_request: Option<bool>, pub supports_exception_info_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub support_terminate_debuggee: Option<bool>, pub support_terminate_debuggee: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub support_suspend_debuggee: Option<bool>, pub support_suspend_debuggee: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_delayed_stack_trace_loading: Option<bool>, pub supports_delayed_stack_trace_loading: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_loaded_sources_request: Option<bool>, pub supports_loaded_sources_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_log_points: Option<bool>, pub supports_log_points: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_terminate_threads_request: Option<bool>, pub supports_terminate_threads_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_set_expression: Option<bool>, pub supports_set_expression: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_terminate_request: Option<bool>, pub supports_terminate_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_data_breakpoints: Option<bool>, pub supports_data_breakpoints: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_read_memory_request: Option<bool>, pub supports_read_memory_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_write_memory_request: Option<bool>, pub supports_write_memory_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_disassemble_request: Option<bool>, pub supports_disassemble_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_cancel_request: Option<bool>, pub supports_cancel_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_breakpoint_locations_request: Option<bool>, pub supports_breakpoint_locations_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_clipboard_context: Option<bool>, pub supports_clipboard_context: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_stepping_granularity: Option<bool>, pub supports_stepping_granularity: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_instruction_breakpoints: Option<bool>, pub supports_instruction_breakpoints: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_exception_filter_options: Option<bool>, pub supports_exception_filter_options: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub exception_breakpoint_filters: Option<Vec<ExceptionBreakpointsFilter>>, pub exception_breakpoint_filters: Option<Vec<ExceptionBreakpointsFilter>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub completion_trigger_characters: Option<Vec<String>>, pub completion_trigger_characters: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub additional_module_columns: Option<Vec<ColumnDescriptor>>, pub additional_module_columns: Option<Vec<ColumnDescriptor>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supported_checksum_algorithms: Option<Vec<String>>, pub supported_checksum_algorithms: Option<Vec<String>>,
} }
@ -95,13 +141,21 @@ pub struct Checksum {
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Source { pub struct Source {
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>, pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub path: Option<PathBuf>, pub path: Option<PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
pub source_reference: Option<usize>, pub source_reference: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub presentation_hint: Option<String>, pub presentation_hint: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub origin: Option<String>, pub origin: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub sources: Option<Vec<Source>>, pub sources: Option<Vec<Source>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub adapter_data: Option<Value>, pub adapter_data: Option<Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub checksums: Option<Vec<Checksum>>, pub checksums: Option<Vec<Checksum>>,
} }
@ -109,36 +163,56 @@ pub struct Source {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SourceBreakpoint { pub struct SourceBreakpoint {
pub line: usize, pub line: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub column: Option<usize>, pub column: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub condition: Option<String>, pub condition: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub hit_condition: Option<String>, pub hit_condition: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub log_message: Option<String>, pub log_message: Option<String>,
} }
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Breakpoint { pub struct Breakpoint {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<usize>, pub id: Option<usize>,
pub verified: bool, pub verified: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>, pub message: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub source: Option<Source>, pub source: Option<Source>,
#[serde(skip_serializing_if = "Option::is_none")]
pub line: Option<usize>, pub line: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub column: Option<usize>, pub column: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub end_line: Option<usize>, pub end_line: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub end_column: Option<usize>, pub end_column: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub instruction_reference: Option<String>, pub instruction_reference: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub offset: Option<usize>, pub offset: Option<usize>,
} }
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct StackFrameFormat { pub struct StackFrameFormat {
#[serde(skip_serializing_if = "Option::is_none")]
pub parameters: Option<bool>, pub parameters: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub parameter_types: Option<bool>, pub parameter_types: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub parameter_names: Option<bool>, pub parameter_names: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub parameter_values: Option<bool>, pub parameter_values: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub line: Option<bool>, pub line: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub module: Option<bool>, pub module: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub include_all: Option<bool>, pub include_all: Option<bool>,
} }
@ -147,14 +221,21 @@ pub struct StackFrameFormat {
pub struct StackFrame { pub struct StackFrame {
pub id: usize, pub id: usize,
pub name: String, pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub source: Option<Source>, pub source: Option<Source>,
pub line: usize, pub line: usize,
pub column: usize, pub column: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub end_line: Option<usize>, pub end_line: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub end_column: Option<usize>, pub end_column: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub can_restart: Option<bool>, pub can_restart: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub instruction_pointer_reference: Option<String>, pub instruction_pointer_reference: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub module_id: Option<Value>, pub module_id: Option<Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub presentation_hint: Option<String>, pub presentation_hint: Option<String>,
} }
@ -169,29 +250,41 @@ pub struct Thread {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Scope { pub struct Scope {
pub name: String, pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub presentation_hint: Option<String>, pub presentation_hint: Option<String>,
pub variables_reference: usize, pub variables_reference: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub named_variables: Option<usize>, pub named_variables: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub indexed_variables: Option<usize>, pub indexed_variables: Option<usize>,
pub expensive: bool, pub expensive: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub source: Option<Source>, pub source: Option<Source>,
#[serde(skip_serializing_if = "Option::is_none")]
pub line: Option<usize>, pub line: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub column: Option<usize>, pub column: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub end_line: Option<usize>, pub end_line: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub end_column: Option<usize>, pub end_column: Option<usize>,
} }
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ValueFormat { pub struct ValueFormat {
#[serde(skip_serializing_if = "Option::is_none")]
pub hex: Option<bool>, pub hex: Option<bool>,
} }
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct VariablePresentationHint { pub struct VariablePresentationHint {
#[serde(skip_serializing_if = "Option::is_none")]
pub kind: Option<String>, pub kind: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub attributes: Option<Vec<String>>, pub attributes: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub visibility: Option<String>, pub visibility: Option<String>,
} }
@ -200,13 +293,18 @@ pub struct VariablePresentationHint {
pub struct Variable { pub struct Variable {
pub name: String, pub name: String,
pub value: String, pub value: String,
#[serde(rename = "type")] #[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub ty: Option<String>, pub ty: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub presentation_hint: Option<VariablePresentationHint>, pub presentation_hint: Option<VariablePresentationHint>,
#[serde(skip_serializing_if = "Option::is_none")]
pub evaluate_name: Option<String>, pub evaluate_name: Option<String>,
pub variables_reference: usize, pub variables_reference: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub named_variables: Option<usize>, pub named_variables: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub indexed_variables: Option<usize>, pub indexed_variables: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub memory_reference: Option<String>, pub memory_reference: Option<String>,
} }
@ -215,13 +313,21 @@ pub struct Variable {
pub struct Module { pub struct Module {
pub id: String, // TODO: || number pub id: String, // TODO: || number
pub name: String, pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub path: Option<PathBuf>, pub path: Option<PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
pub is_optimized: Option<bool>, pub is_optimized: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub is_user_code: Option<bool>, pub is_user_code: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>, pub version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub symbol_status: Option<String>, pub symbol_status: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub symbol_file_path: Option<String>, pub symbol_file_path: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub date_time_stamp: Option<String>, pub date_time_stamp: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub address_range: Option<String>, pub address_range: Option<String>,
} }
@ -230,22 +336,31 @@ pub mod requests {
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct InitializeArguments { pub struct InitializeArguments {
#[serde(rename = "clientID")] #[serde(rename = "clientID", skip_serializing_if = "Option::is_none")]
pub client_id: Option<String>, pub client_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub client_name: Option<String>, pub client_name: Option<String>,
#[serde(rename = "adapterID")] #[serde(rename = "adapterID")]
pub adapter_id: String, pub adapter_id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub locale: Option<String>, pub locale: Option<String>,
#[serde(rename = "linesStartAt1")] #[serde(rename = "linesStartAt1", skip_serializing_if = "Option::is_none")]
pub lines_start_at_one: Option<bool>, pub lines_start_at_one: Option<bool>,
#[serde(rename = "columnsStartAt1")] #[serde(rename = "columnsStartAt1", skip_serializing_if = "Option::is_none")]
pub columns_start_at_one: Option<bool>, pub columns_start_at_one: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub path_format: Option<String>, pub path_format: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_variable_type: Option<bool>, pub supports_variable_type: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_variable_paging: Option<bool>, pub supports_variable_paging: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_run_in_terminal_request: Option<bool>, pub supports_run_in_terminal_request: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_memory_references: Option<bool>, pub supports_memory_references: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_progress_reporting: Option<bool>, pub supports_progress_reporting: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub supports_invalidated_event: Option<bool>, pub supports_invalidated_event: Option<bool>,
} }
@ -298,14 +413,17 @@ pub mod requests {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SetBreakpointsArguments { pub struct SetBreakpointsArguments {
pub source: Source, pub source: Source,
#[serde(skip_serializing_if = "Option::is_none")]
pub breakpoints: Option<Vec<SourceBreakpoint>>, pub breakpoints: Option<Vec<SourceBreakpoint>>,
// lines is deprecated // lines is deprecated
#[serde(skip_serializing_if = "Option::is_none")]
pub source_modified: Option<bool>, pub source_modified: Option<bool>,
} }
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SetBreakpointsResponse { pub struct SetBreakpointsResponse {
#[serde(skip_serializing_if = "Option::is_none")]
pub breakpoints: Option<Vec<Breakpoint>>, pub breakpoints: Option<Vec<Breakpoint>>,
} }
@ -327,6 +445,7 @@ pub mod requests {
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ContinueResponse { pub struct ContinueResponse {
#[serde(skip_serializing_if = "Option::is_none")]
pub all_threads_continued: Option<bool>, pub all_threads_continued: Option<bool>,
} }
@ -343,14 +462,18 @@ pub mod requests {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct StackTraceArguments { pub struct StackTraceArguments {
pub thread_id: ThreadId, pub thread_id: ThreadId,
#[serde(skip_serializing_if = "Option::is_none")]
pub start_frame: Option<usize>, pub start_frame: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub levels: Option<usize>, pub levels: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub format: Option<StackFrameFormat>, pub format: Option<StackFrameFormat>,
} }
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct StackTraceResponse { pub struct StackTraceResponse {
#[serde(skip_serializing_if = "Option::is_none")]
pub total_frames: Option<usize>, pub total_frames: Option<usize>,
pub stack_frames: Vec<StackFrame>, pub stack_frames: Vec<StackFrame>,
} }
@ -404,9 +527,13 @@ pub mod requests {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct VariablesArguments { pub struct VariablesArguments {
pub variables_reference: usize, pub variables_reference: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub filter: Option<String>, pub filter: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub start: Option<usize>, pub start: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub count: Option<usize>, pub count: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub format: Option<ValueFormat>, pub format: Option<ValueFormat>,
} }
@ -429,7 +556,9 @@ pub mod requests {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct StepInArguments { pub struct StepInArguments {
pub thread_id: ThreadId, pub thread_id: ThreadId,
#[serde(skip_serializing_if = "Option::is_none")]
pub target_id: Option<usize>, pub target_id: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub granularity: Option<String>, pub granularity: Option<String>,
} }
@ -446,6 +575,7 @@ pub mod requests {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct StepOutArguments { pub struct StepOutArguments {
pub thread_id: ThreadId, pub thread_id: ThreadId,
#[serde(skip_serializing_if = "Option::is_none")]
pub granularity: Option<String>, pub granularity: Option<String>,
} }
@ -462,6 +592,7 @@ pub mod requests {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct NextArguments { pub struct NextArguments {
pub thread_id: ThreadId, pub thread_id: ThreadId,
#[serde(skip_serializing_if = "Option::is_none")]
pub granularity: Option<String>, pub granularity: Option<String>,
} }
@ -493,8 +624,11 @@ pub mod requests {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct EvaluateArguments { pub struct EvaluateArguments {
pub expression: String, pub expression: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub frame_id: Option<usize>, pub frame_id: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub context: Option<String>, pub context: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub format: Option<ValueFormat>, pub format: Option<ValueFormat>,
} }
@ -502,12 +636,16 @@ pub mod requests {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct EvaluateResponse { pub struct EvaluateResponse {
pub result: String, pub result: String,
#[serde(rename = "type")] #[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub ty: Option<String>, pub ty: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub presentation_hint: Option<VariablePresentationHint>, pub presentation_hint: Option<VariablePresentationHint>,
pub variables_reference: usize, pub variables_reference: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub named_variables: Option<usize>, pub named_variables: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub indexed_variables: Option<usize>, pub indexed_variables: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub memory_reference: Option<String>, pub memory_reference: Option<String>,
} }
@ -531,6 +669,7 @@ pub mod requests {
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SetExceptionBreakpointsResponse { pub struct SetExceptionBreakpointsResponse {
#[serde(skip_serializing_if = "Option::is_none")]
pub breakpoints: Option<Vec<Breakpoint>>, pub breakpoints: Option<Vec<Breakpoint>>,
} }
@ -548,17 +687,22 @@ pub mod requests {
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct RunInTerminalResponse { pub struct RunInTerminalResponse {
#[serde(skip_serializing_if = "Option::is_none")]
pub process_id: Option<u32>, pub process_id: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub shell_process_id: Option<u32>, pub shell_process_id: Option<u32>,
} }
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct RunInTerminalArguments { pub struct RunInTerminalArguments {
#[serde(skip_serializing_if = "Option::is_none")]
pub kind: Option<String>, pub kind: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>, pub title: Option<String>,
pub cwd: Option<String>, pub cwd: String,
pub args: Vec<String>, pub args: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub env: Option<HashMap<String, Option<String>>>, pub env: Option<HashMap<String, Option<String>>>,
} }
@ -605,11 +749,17 @@ pub mod events {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Stopped { pub struct Stopped {
pub reason: String, pub reason: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>, pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thread_id: Option<ThreadId>, pub thread_id: Option<ThreadId>,
#[serde(skip_serializing_if = "Option::is_none")]
pub preserve_focus_hint: Option<bool>, pub preserve_focus_hint: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub text: Option<String>, pub text: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub all_threads_stopped: Option<bool>, pub all_threads_stopped: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub hit_breakpoint_ids: Option<Vec<usize>>, pub hit_breakpoint_ids: Option<Vec<usize>>,
} }
@ -617,6 +767,7 @@ pub mod events {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Continued { pub struct Continued {
pub thread_id: ThreadId, pub thread_id: ThreadId,
#[serde(skip_serializing_if = "Option::is_none")]
pub all_threads_continued: Option<bool>, pub all_threads_continued: Option<bool>,
} }
@ -629,6 +780,7 @@ pub mod events {
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Terminated { pub struct Terminated {
#[serde(skip_serializing_if = "Option::is_none")]
pub restart: Option<Value>, pub restart: Option<Value>,
} }
@ -643,12 +795,19 @@ pub mod events {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Output { pub struct Output {
pub output: String, pub output: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub category: Option<String>, pub category: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub group: Option<String>, pub group: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub line: Option<usize>, pub line: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub column: Option<usize>, pub column: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub variables_reference: Option<usize>, pub variables_reference: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub source: Option<Source>, pub source: Option<Source>,
#[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<Value>, pub data: Option<Value>,
} }
@ -677,9 +836,13 @@ pub mod events {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Process { pub struct Process {
pub name: String, pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub system_process_id: Option<usize>, pub system_process_id: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub is_local_process: Option<bool>, pub is_local_process: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub start_method: Option<String>, // TODO: use enum pub start_method: Option<String>, // TODO: use enum
#[serde(skip_serializing_if = "Option::is_none")]
pub pointer_size: Option<usize>, pub pointer_size: Option<usize>,
} }

@ -9,15 +9,24 @@ categories = ["editor"]
repository = "https://github.com/helix-editor/helix" repository = "https://github.com/helix-editor/helix"
homepage = "https://helix-editor.com" homepage = "https://helix-editor.com"
[[bin]]
name = "hx-loader"
path = "src/main.rs"
[dependencies] [dependencies]
anyhow = "1" anyhow = "1"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
toml = "0.5" toml = "0.5"
etcetera = "0.3" etcetera = "0.4"
tree-sitter = "0.20" tree-sitter = "0.20"
libloading = "0.7" once_cell = "1.15"
once_cell = "1.9" log = "0.4"
# TODO: these two should be on !wasm32 only
# cloning/compiling tree-sitter grammars # cloning/compiling tree-sitter grammars
cc = { version = "1" } cc = { version = "1" }
threadpool = { version = "1.0" } threadpool = { version = "1.0" }
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
libloading = "0.7"

@ -0,0 +1,42 @@
/// Default built-in languages.toml.
pub fn default_lang_config() -> toml::Value {
toml::from_slice(include_bytes!("../../languages.toml"))
.expect("Could not parse built-in languages.toml to valid toml")
}
/// User configured languages.toml file, merged with the default config.
pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
let config = crate::local_config_dirs()
.into_iter()
.chain([crate::config_dir()].into_iter())
.map(|path| path.join("languages.toml"))
.filter_map(|file| {
std::fs::read(&file)
.map(|config| toml::from_slice(&config))
.ok()
})
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.chain([default_lang_config()].into_iter())
.fold(toml::Value::Table(toml::value::Table::default()), |a, b| {
// combines for example
// b:
// [[language]]
// name = "toml"
// language-server = { command = "taplo", args = ["lsp", "stdio"] }
//
// a:
// [[language]]
// language-server = { command = "/usr/bin/taplo" }
//
// into:
// [[language]]
// name = "toml"
// language-server = { command = "/usr/bin/taplo" }
//
// thus it overrides the third depth-level of b with values of a if they exist, but otherwise merges their values
crate::merge_toml_values(b, a, 3)
});
Ok(config)
}

@ -1,5 +1,4 @@
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use libloading::{Library, Symbol};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fs; use std::fs;
use std::time::SystemTime; use std::time::SystemTime;
@ -17,6 +16,9 @@ const DYLIB_EXTENSION: &str = "so";
#[cfg(windows)] #[cfg(windows)]
const DYLIB_EXTENSION: &str = "dll"; const DYLIB_EXTENSION: &str = "dll";
#[cfg(target_arch = "wasm32")]
const DYLIB_EXTENSION: &str = "wasm";
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
struct Configuration { struct Configuration {
#[serde(rename = "use-grammars")] #[serde(rename = "use-grammars")]
@ -57,7 +59,14 @@ pub enum GrammarSource {
const BUILD_TARGET: &str = env!("BUILD_TARGET"); const BUILD_TARGET: &str = env!("BUILD_TARGET");
const REMOTE_NAME: &str = "origin"; const REMOTE_NAME: &str = "origin";
#[cfg(target_arch = "wasm32")]
pub fn get_language(name: &str) -> Result<Language> {
unimplemented!()
}
#[cfg(not(target_arch = "wasm32"))]
pub fn get_language(name: &str) -> Result<Language> { pub fn get_language(name: &str) -> Result<Language> {
use libloading::{Library, Symbol};
let name = name.to_ascii_lowercase(); let name = name.to_ascii_lowercase();
let mut library_path = crate::runtime_dir().join("grammars").join(&name); let mut library_path = crate::runtime_dir().join("grammars").join(&name);
library_path.set_extension(DYLIB_EXTENSION); library_path.set_extension(DYLIB_EXTENSION);
@ -80,11 +89,102 @@ pub fn fetch_grammars() -> Result<()> {
let mut grammars = get_grammar_configs()?; let mut grammars = get_grammar_configs()?;
grammars.retain(|grammar| !matches!(grammar.source, GrammarSource::Local { .. })); grammars.retain(|grammar| !matches!(grammar.source, GrammarSource::Local { .. }));
run_parallel(grammars, fetch_grammar, "fetch") println!("Fetching {} grammars", grammars.len());
let results = run_parallel(grammars, fetch_grammar);
let mut errors = Vec::new();
let mut git_updated = Vec::new();
let mut git_up_to_date = 0;
let mut non_git = Vec::new();
for res in results {
match res {
Ok(FetchStatus::GitUpToDate) => git_up_to_date += 1,
Ok(FetchStatus::GitUpdated {
grammar_id,
revision,
}) => git_updated.push((grammar_id, revision)),
Ok(FetchStatus::NonGit { grammar_id }) => non_git.push(grammar_id),
Err(e) => errors.push(e),
}
}
non_git.sort_unstable();
git_updated.sort_unstable_by(|a, b| a.0.cmp(&b.0));
if git_up_to_date != 0 {
println!("{} up to date git grammars", git_up_to_date);
}
if !non_git.is_empty() {
println!("{} non git grammars", non_git.len());
println!("\t{:?}", non_git);
}
if !git_updated.is_empty() {
println!("{} updated grammars", git_updated.len());
// We checked the vec is not empty, unwrapping will not panic
let longest_id = git_updated.iter().map(|x| x.0.len()).max().unwrap();
for (id, rev) in git_updated {
println!(
"\t{id:width$} now on {rev}",
id = id,
width = longest_id,
rev = rev
);
}
}
if !errors.is_empty() {
let len = errors.len();
println!("{} grammars failed to fetch", len);
for (i, error) in errors.into_iter().enumerate() {
println!("\tFailure {}/{}: {}", i + 1, len, error);
}
}
Ok(())
} }
pub fn build_grammars() -> Result<()> { pub fn build_grammars(target: Option<String>) -> Result<()> {
run_parallel(get_grammar_configs()?, build_grammar, "build") let grammars = get_grammar_configs()?;
println!("Building {} grammars", grammars.len());
let results = run_parallel(grammars, move |grammar| {
build_grammar(grammar, target.as_deref())
});
let mut errors = Vec::new();
let mut already_built = 0;
let mut built = Vec::new();
for res in results {
match res {
Ok(BuildStatus::AlreadyBuilt) => already_built += 1,
Ok(BuildStatus::Built { grammar_id }) => built.push(grammar_id),
Err(e) => errors.push(e),
}
}
built.sort_unstable();
if already_built != 0 {
println!("{} grammars already built", already_built);
}
if !built.is_empty() {
println!("{} grammars built now", built.len());
println!("\t{:?}", built);
}
if !errors.is_empty() {
let len = errors.len();
println!("{} grammars failed to build", len);
for (i, error) in errors.into_iter().enumerate() {
println!("\tFailure {}/{}: {}", i, len, error);
}
}
Ok(())
} }
// Returns the set of grammar configurations the user requests. // Returns the set of grammar configurations the user requests.
@ -92,7 +192,7 @@ pub fn build_grammars() -> Result<()> {
// merged. The `grammar_selection` key of the config is then used to filter // merged. The `grammar_selection` key of the config is then used to filter
// down all grammars into a subset of the user's choosing. // down all grammars into a subset of the user's choosing.
fn get_grammar_configs() -> Result<Vec<GrammarConfiguration>> { fn get_grammar_configs() -> Result<Vec<GrammarConfiguration>> {
let config: Configuration = crate::user_lang_config() let config: Configuration = crate::config::user_lang_config()
.context("Could not parse languages.toml")? .context("Could not parse languages.toml")?
.try_into()?; .try_into()?;
@ -113,37 +213,49 @@ fn get_grammar_configs() -> Result<Vec<GrammarConfiguration>> {
Ok(grammars) Ok(grammars)
} }
fn run_parallel<F>(grammars: Vec<GrammarConfiguration>, job: F, action: &'static str) -> Result<()> fn run_parallel<F, Res>(grammars: Vec<GrammarConfiguration>, job: F) -> Vec<Result<Res>>
where where
F: Fn(GrammarConfiguration) -> Result<()> + std::marker::Send + 'static + Copy, F: Fn(GrammarConfiguration) -> Result<Res> + Send + 'static + Clone,
Res: Send + 'static,
{ {
let pool = threadpool::Builder::new().build(); let pool = threadpool::Builder::new().build();
let (tx, rx) = channel(); let (tx, rx) = channel();
for grammar in grammars { for grammar in grammars {
let tx = tx.clone(); let tx = tx.clone();
let job = job.clone();
pool.execute(move || { pool.execute(move || {
tx.send(job(grammar)).unwrap(); // Ignore any SendErrors, if any job in another thread has encountered an
// error the Receiver will be closed causing this send to fail.
let _ = tx.send(job(grammar));
}); });
} }
drop(tx); drop(tx);
// TODO: print all failures instead of the first one found. rx.iter().collect()
rx.iter()
.find(|result| result.is_err())
.map(|err| err.with_context(|| format!("Failed to {} some grammar(s)", action)))
.unwrap_or(Ok(()))
} }
fn fetch_grammar(grammar: GrammarConfiguration) -> Result<()> { enum FetchStatus {
GitUpToDate,
GitUpdated {
grammar_id: String,
revision: String,
},
NonGit {
grammar_id: String,
},
}
fn fetch_grammar(grammar: GrammarConfiguration) -> Result<FetchStatus> {
if let GrammarSource::Git { if let GrammarSource::Git {
remote, revision, .. remote, revision, ..
} = grammar.source } = grammar.source
{ {
let grammar_dir = crate::runtime_dir() let grammar_dir = crate::runtime_dir()
.join("grammars/sources") .join("grammars")
.join("sources")
.join(&grammar.grammar_id); .join(&grammar.grammar_id);
fs::create_dir_all(&grammar_dir).context(format!( fs::create_dir_all(&grammar_dir).context(format!(
@ -172,16 +284,18 @@ fn fetch_grammar(grammar: GrammarConfiguration) -> Result<()> {
)?; )?;
git(&grammar_dir, ["checkout", &revision])?; git(&grammar_dir, ["checkout", &revision])?;
println!( Ok(FetchStatus::GitUpdated {
"Grammar '{}' checked out at '{}'.", grammar_id: grammar.grammar_id,
grammar.grammar_id, revision revision,
); })
} else { } else {
println!("Grammar '{}' is already up to date.", grammar.grammar_id); Ok(FetchStatus::GitUpToDate)
} }
} else {
Ok(FetchStatus::NonGit {
grammar_id: grammar.grammar_id,
})
} }
Ok(())
} }
// Sets the remote for a repository to the given URL, creating the remote if // Sets the remote for a repository to the given URL, creating the remote if
@ -228,12 +342,18 @@ where
} }
} }
fn build_grammar(grammar: GrammarConfiguration) -> Result<()> { enum BuildStatus {
AlreadyBuilt,
Built { grammar_id: String },
}
fn build_grammar(grammar: GrammarConfiguration, target: Option<&str>) -> Result<BuildStatus> {
let grammar_dir = if let GrammarSource::Local { path } = &grammar.source { let grammar_dir = if let GrammarSource::Local { path } = &grammar.source {
PathBuf::from(&path) PathBuf::from(&path)
} else { } else {
crate::runtime_dir() crate::runtime_dir()
.join("grammars/sources") .join("grammars")
.join("sources")
.join(&grammar.grammar_id) .join(&grammar.grammar_id)
}; };
@ -260,10 +380,14 @@ fn build_grammar(grammar: GrammarConfiguration) -> Result<()> {
} }
.join("src"); .join("src");
build_tree_sitter_library(&path, grammar) build_tree_sitter_library(&path, grammar, target)
} }
fn build_tree_sitter_library(src_path: &Path, grammar: GrammarConfiguration) -> Result<()> { fn build_tree_sitter_library(
src_path: &Path,
grammar: GrammarConfiguration,
target: Option<&str>,
) -> Result<BuildStatus> {
let header_path = src_path; let header_path = src_path;
let parser_path = src_path.join("parser.c"); let parser_path = src_path.join("parser.c");
let mut scanner_path = src_path.join("scanner.c"); let mut scanner_path = src_path.join("scanner.c");
@ -286,27 +410,25 @@ fn build_tree_sitter_library(src_path: &Path, grammar: GrammarConfiguration) ->
.context("Failed to compare source and binary timestamps")?; .context("Failed to compare source and binary timestamps")?;
if !recompile { if !recompile {
println!("Grammar '{}' is already built.", grammar.grammar_id); return Ok(BuildStatus::AlreadyBuilt);
return Ok(());
} }
println!("Building grammar '{}'", grammar.grammar_id);
let mut config = cc::Build::new(); let mut config = cc::Build::new();
config config
.cpp(true) .cpp(true)
.opt_level(3) .opt_level(3)
.cargo_metadata(false) .cargo_metadata(false)
.host(BUILD_TARGET) .host(BUILD_TARGET)
.target(BUILD_TARGET); .target(target.unwrap_or(BUILD_TARGET));
let compiler = config.get_compiler(); let compiler = config.get_compiler();
let mut command = Command::new(compiler.path()); let mut command = Command::new(compiler.path());
command.current_dir(src_path); command.current_dir(src_path);
for (key, value) in compiler.env() { for (key, value) in compiler.env() {
command.env(key, value); command.env(key, value);
} }
command.args(compiler.args());
if cfg!(windows) { if cfg!(all(windows, target_env = "msvc")) {
command command
.args(&["/nologo", "/LD", "/I"]) .args(&["/nologo", "/LD", "/I"])
.arg(header_path) .arg(header_path)
@ -339,12 +461,17 @@ fn build_tree_sitter_library(src_path: &Path, grammar: GrammarConfiguration) ->
} }
} }
command.arg("-xc").arg(parser_path); command.arg("-xc").arg(parser_path);
if cfg!(all(unix, not(target_os = "macos"))) { if cfg!(all(
unix,
not(any(target_os = "macos", target_os = "illumos"))
)) {
command.arg("-Wl,-z,relro,-z,now"); command.arg("-Wl,-z,relro,-z,now");
} }
} }
let output = command.output().context("Failed to execute C compiler")?; let output = command
.output()
.context("Failed to execute C/C++ compiler")?;
if !output.status.success() { if !output.status.success() {
return Err(anyhow!( return Err(anyhow!(
"Parser compilation failed.\nStdout: {}\nStderr: {}", "Parser compilation failed.\nStdout: {}\nStderr: {}",
@ -353,7 +480,9 @@ fn build_tree_sitter_library(src_path: &Path, grammar: GrammarConfiguration) ->
)); ));
} }
Ok(()) Ok(BuildStatus::Built {
grammar_id: grammar.grammar_id,
})
} }
fn needs_recompile( fn needs_recompile(

@ -1,34 +1,56 @@
pub mod config;
pub mod grammar; pub mod grammar;
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy}; use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
use std::path::PathBuf;
pub static RUNTIME_DIR: once_cell::sync::Lazy<std::path::PathBuf> = pub static RUNTIME_DIR: once_cell::sync::Lazy<PathBuf> = once_cell::sync::Lazy::new(runtime_dir);
once_cell::sync::Lazy::new(runtime_dir);
pub fn runtime_dir() -> std::path::PathBuf { static CONFIG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
pub fn initialize_config_file(specified_file: Option<PathBuf>) {
let config_file = specified_file.unwrap_or_else(|| {
let config_dir = config_dir();
if !config_dir.exists() {
std::fs::create_dir_all(&config_dir).ok();
}
config_dir.join("config.toml")
});
// We should only initialize this value once.
CONFIG_FILE.set(config_file).ok();
}
pub fn runtime_dir() -> PathBuf {
if let Ok(dir) = std::env::var("HELIX_RUNTIME") { if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
return dir.into(); return dir.into();
} }
if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") {
// this is the directory of the crate being run by cargo, we need the workspace path so we take the parent
let path = std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR);
log::debug!("runtime dir: {}", path.to_string_lossy());
return path;
}
const RT_DIR: &str = "runtime"; const RT_DIR: &str = "runtime";
let conf_dir = config_dir().join(RT_DIR); let conf_dir = config_dir().join(RT_DIR);
if conf_dir.exists() { if conf_dir.exists() {
return conf_dir; return conf_dir;
} }
if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") {
// this is the directory of the crate being run by cargo, we need the workspace path so we take the parent
return std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR);
}
// fallback to location of the executable being run // fallback to location of the executable being run
// canonicalize the path in case the executable is symlinked
std::env::current_exe() std::env::current_exe()
.ok() .ok()
.and_then(|path| std::fs::canonicalize(path).ok())
.and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR))) .and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR)))
.unwrap() .unwrap()
} }
pub fn config_dir() -> std::path::PathBuf { pub fn config_dir() -> PathBuf {
// TODO: allow env var override // TODO: allow env var override
let strategy = choose_base_strategy().expect("Unable to find the config directory!"); let strategy = choose_base_strategy().expect("Unable to find the config directory!");
let mut path = strategy.config_dir(); let mut path = strategy.config_dir();
@ -36,7 +58,16 @@ pub fn config_dir() -> std::path::PathBuf {
path path
} }
pub fn cache_dir() -> std::path::PathBuf { pub fn local_config_dirs() -> Vec<PathBuf> {
let directories = find_local_config_dirs()
.into_iter()
.map(|path| path.join(".helix"))
.collect();
log::debug!("Located configuration folders: {:?}", directories);
directories
}
pub fn cache_dir() -> PathBuf {
// TODO: allow env var override // TODO: allow env var override
let strategy = choose_base_strategy().expect("Unable to find the config directory!"); let strategy = choose_base_strategy().expect("Unable to find the config directory!");
let mut path = strategy.cache_dir(); let mut path = strategy.cache_dir();
@ -44,41 +75,51 @@ pub fn cache_dir() -> std::path::PathBuf {
path path
} }
pub fn config_file() -> std::path::PathBuf { pub fn config_file() -> PathBuf {
config_dir().join("config.toml") CONFIG_FILE
.get()
.map(|path| path.to_path_buf())
.unwrap_or_else(|| config_dir().join("config.toml"))
} }
pub fn lang_config_file() -> std::path::PathBuf { pub fn lang_config_file() -> PathBuf {
config_dir().join("languages.toml") config_dir().join("languages.toml")
} }
pub fn log_file() -> std::path::PathBuf { pub fn log_file() -> PathBuf {
cache_dir().join("helix.log") cache_dir().join("helix.log")
} }
/// Default bultin-in languages.toml. pub fn find_local_config_dirs() -> Vec<PathBuf> {
pub fn default_lang_config() -> toml::Value { let current_dir = std::env::current_dir().expect("unable to determine current directory");
toml::from_slice(include_bytes!("../../languages.toml")) let mut directories = Vec::new();
.expect("Could not parse bultin-in languages.toml to valid toml")
} for ancestor in current_dir.ancestors() {
if ancestor.join(".git").is_dir() {
/// User configured languages.toml file, merged with the default config. directories.push(ancestor.to_path_buf());
pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> { // Don't go higher than repo if we're in one
let def_lang_conf = default_lang_config(); break;
let data = std::fs::read(crate::config_dir().join("languages.toml")); } else if ancestor.join(".helix").is_dir() {
let user_lang_conf = match data { directories.push(ancestor.to_path_buf());
Ok(raw) => {
let value = toml::from_slice(&raw)?;
merge_toml_values(def_lang_conf, value)
} }
Err(_) => def_lang_conf, }
}; directories
Ok(user_lang_conf)
} }
// right overrides left /// Merge two TOML documents, merging values from `right` onto `left`
pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value { ///
/// When an array exists in both `left` and `right`, `right`'s array is
/// used. When a table exists in both `left` and `right`, the merged table
/// consists of all keys in `left`'s table unioned with all keys in `right`
/// with the values of `right` being merged recursively onto values of
/// `left`.
///
/// `merge_toplevel_arrays` controls whether a top-level array in the TOML
/// document is merged instead of overridden. This is useful for TOML
/// documents that use a top-level array of values like the `languages.toml`,
/// where one usually wants to override or add to the array instead of
/// replacing it altogether.
pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usize) -> toml::Value {
use toml::Value; use toml::Value;
fn get_name(v: &Value) -> Option<&str> { fn get_name(v: &Value) -> Option<&str> {
@ -87,24 +128,36 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value {
match (left, right) { match (left, right) {
(Value::Array(mut left_items), Value::Array(right_items)) => { (Value::Array(mut left_items), Value::Array(right_items)) => {
// The top-level arrays should be merged but nested arrays should
// act as overrides. For the `languages.toml` config, this means
// that you can specify a sub-set of languages in an overriding
// `languages.toml` but that nested arrays like Language Server
// arguments are replaced instead of merged.
if merge_depth > 0 {
left_items.reserve(right_items.len()); left_items.reserve(right_items.len());
for rvalue in right_items { for rvalue in right_items {
let lvalue = get_name(&rvalue) let lvalue = get_name(&rvalue)
.and_then(|rname| left_items.iter().position(|v| get_name(v) == Some(rname))) .and_then(|rname| {
left_items.iter().position(|v| get_name(v) == Some(rname))
})
.map(|lpos| left_items.remove(lpos)); .map(|lpos| left_items.remove(lpos));
let mvalue = match lvalue { let mvalue = match lvalue {
Some(lvalue) => merge_toml_values(lvalue, rvalue), Some(lvalue) => merge_toml_values(lvalue, rvalue, merge_depth - 1),
None => rvalue, None => rvalue,
}; };
left_items.push(mvalue); left_items.push(mvalue);
} }
Value::Array(left_items) Value::Array(left_items)
} else {
Value::Array(right_items)
}
} }
(Value::Table(mut left_map), Value::Table(right_map)) => { (Value::Table(mut left_map), Value::Table(right_map)) => {
if merge_depth > 0 {
for (rname, rvalue) in right_map { for (rname, rvalue) in right_map {
match left_map.remove(&rname) { match left_map.remove(&rname) {
Some(lvalue) => { Some(lvalue) => {
let merged_value = merge_toml_values(lvalue, rvalue); let merged_value = merge_toml_values(lvalue, rvalue, merge_depth - 1);
left_map.insert(rname, merged_value); left_map.insert(rname, merged_value);
} }
None => { None => {
@ -113,6 +166,9 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value {
} }
} }
Value::Table(left_map) Value::Table(left_map)
} else {
Value::Table(right_map)
}
} }
// Catch everything else we didn't handle, and use the right value // Catch everything else we didn't handle, and use the right value
(_, value) => value, (_, value) => value,
@ -122,23 +178,22 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value {
#[cfg(test)] #[cfg(test)]
mod merge_toml_tests { mod merge_toml_tests {
use super::merge_toml_values; use super::merge_toml_values;
#[test]
fn language_tomls() {
use toml::Value; use toml::Value;
const USER: &str = " #[test]
fn language_toml_map_merges() {
const USER: &str = r#"
[[language]] [[language]]
name = \"nix\" name = "nix"
test = \"bbb\" test = "bbb"
indent = { tab-width = 4, unit = \" \", test = \"aaa\" } indent = { tab-width = 4, unit = " ", test = "aaa" }
"; "#;
let base: Value = toml::from_slice(include_bytes!("../../languages.toml")) let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
.expect("Couldn't parse built-in languages config"); .expect("Couldn't parse built-in languages config");
let user: Value = toml::from_str(USER).unwrap(); let user: Value = toml::from_str(USER).unwrap();
let merged = merge_toml_values(base, user); let merged = merge_toml_values(base, user, 3);
let languages = merged.get("language").unwrap().as_array().unwrap(); let languages = merged.get("language").unwrap().as_array().unwrap();
let nix = languages let nix = languages
.iter() .iter()
@ -158,4 +213,33 @@ mod merge_toml_tests {
// We didn't change comment-token so it should be same // We didn't change comment-token so it should be same
assert_eq!(nix.get("comment-token").unwrap().as_str().unwrap(), "#"); assert_eq!(nix.get("comment-token").unwrap().as_str().unwrap(), "#");
} }
#[test]
fn language_toml_nested_array_merges() {
const USER: &str = r#"
[[language]]
name = "typescript"
language-server = { command = "deno", args = ["lsp"] }
"#;
let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
.expect("Couldn't parse built-in languages config");
let user: Value = toml::from_str(USER).unwrap();
let merged = merge_toml_values(base, user, 3);
let languages = merged.get("language").unwrap().as_array().unwrap();
let ts = languages
.iter()
.find(|v| v.get("name").unwrap().as_str().unwrap() == "typescript")
.unwrap();
assert_eq!(
ts.get("language-server")
.unwrap()
.get("args")
.unwrap()
.as_array()
.unwrap(),
&vec![Value::String("lsp".into())]
)
}
} }

@ -0,0 +1,9 @@
use anyhow::Result;
use helix_loader::grammar::fetch_grammars;
// This binary is used in the Release CI as an optimization to cut down on
// compilation time. This is not meant to be run manually.
fn main() -> Result<()> {
fetch_grammars()
}

@ -17,12 +17,11 @@ helix-core = { version = "0.6", path = "../helix-core" }
anyhow = "1.0" anyhow = "1.0"
futures-executor = "0.3" futures-executor = "0.3"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
jsonrpc-core = { version = "18.0", default-features = false } # don't pull in all of futures
log = "0.4" log = "0.4"
lsp-types = { version = "0.92", features = ["proposed"] } lsp-types = { version = "0.93", features = ["proposed"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
thiserror = "1.0" thiserror = "1.0"
tokio = { version = "1.17", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } tokio = { version = "1.21", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
tokio-stream = "0.1.8" tokio-stream = "0.1.10"
which = "4.2" which = "4.2"

@ -1,12 +1,15 @@
use crate::{ use crate::{
jsonrpc,
transport::{Payload, Transport}, transport::{Payload, Transport},
Call, Error, OffsetEncoding, Result, Call, Error, OffsetEncoding, Result,
}; };
use anyhow::anyhow;
use helix_core::{find_root, ChangeSet, Rope}; use helix_core::{find_root, ChangeSet, Rope};
use jsonrpc_core as jsonrpc;
use lsp_types as lsp; use lsp_types as lsp;
use serde::Deserialize;
use serde_json::Value; use serde_json::Value;
use std::collections::HashMap;
use std::future::Future; use std::future::Future;
use std::process::Stdio; use std::process::Stdio;
use std::sync::{ use std::sync::{
@ -31,7 +34,10 @@ pub struct Client {
pub(crate) capabilities: OnceCell<lsp::ServerCapabilities>, pub(crate) capabilities: OnceCell<lsp::ServerCapabilities>,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
config: Option<Value>, config: Option<Value>,
root_markers: Vec<String>, root_path: std::path::PathBuf,
root_uri: Option<lsp::Url>,
workspace_folders: Vec<lsp::WorkspaceFolder>,
req_timeout: u64,
} }
impl Client { impl Client {
@ -40,8 +46,9 @@ impl Client {
cmd: &str, cmd: &str,
args: &[String], args: &[String],
config: Option<Value>, config: Option<Value>,
root_markers: Vec<String>, root_markers: &[String],
id: usize, id: usize,
req_timeout: u64,
) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc<Notify>)> { ) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc<Notify>)> {
// Resolve path to the binary // Resolve path to the binary
let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?; let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?;
@ -65,6 +72,25 @@ impl Client {
let (server_rx, server_tx, initialize_notify) = let (server_rx, server_tx, initialize_notify) =
Transport::start(reader, writer, stderr, id); Transport::start(reader, writer, stderr, id);
let root_path = find_root(None, root_markers);
let root_uri = lsp::Url::from_file_path(root_path.clone()).ok();
// TODO: support multiple workspace folders
let workspace_folders = root_uri
.clone()
.map(|root| {
vec![lsp::WorkspaceFolder {
name: root
.path_segments()
.and_then(|segments| segments.last())
.map(|basename| basename.to_string())
.unwrap_or_default(),
uri: root,
}]
})
.unwrap_or_default();
let client = Self { let client = Self {
id, id,
_process: process, _process: process,
@ -73,7 +99,11 @@ impl Client {
capabilities: OnceCell::new(), capabilities: OnceCell::new(),
offset_encoding: OffsetEncoding::Utf8, offset_encoding: OffsetEncoding::Utf8,
config, config,
root_markers, req_timeout,
root_path,
root_uri,
workspace_folders,
}; };
Ok((client, server_rx, initialize_notify)) Ok((client, server_rx, initialize_notify))
@ -117,6 +147,10 @@ impl Client {
self.config.as_ref() self.config.as_ref()
} }
pub fn workspace_folders(&self) -> &[lsp::WorkspaceFolder] {
&self.workspace_folders
}
/// Execute a RPC request on the language server. /// Execute a RPC request on the language server.
async fn request<R: lsp::request::Request>(&self, params: R::Params) -> Result<R::Result> async fn request<R: lsp::request::Request>(&self, params: R::Params) -> Result<R::Result>
where where
@ -139,6 +173,7 @@ impl Client {
{ {
let server_tx = self.server_tx.clone(); let server_tx = self.server_tx.clone();
let id = self.next_request_id(); let id = self.next_request_id();
let timeout_secs = self.req_timeout;
async move { async move {
use std::time::Duration; use std::time::Duration;
@ -162,8 +197,8 @@ impl Client {
}) })
.map_err(|e| Error::Other(e.into()))?; .map_err(|e| Error::Other(e.into()))?;
// TODO: specifiable timeout, delay other calls until initialize success // TODO: delay other calls until initialize success
timeout(Duration::from_secs(20), rx.recv()) timeout(Duration::from_secs(timeout_secs), rx.recv())
.await .await
.map_err(|_| Error::Timeout)? // return Timeout .map_err(|_| Error::Timeout)? // return Timeout
.ok_or(Error::StreamClosed)? .ok_or(Error::StreamClosed)?
@ -234,20 +269,18 @@ impl Client {
// ------------------------------------------------------------------------------------------- // -------------------------------------------------------------------------------------------
pub(crate) async fn initialize(&self) -> Result<lsp::InitializeResult> { pub(crate) async fn initialize(&self) -> Result<lsp::InitializeResult> {
// TODO: delay any requests that are triggered prior to initialize if let Some(config) = &self.config {
let root = find_root(None, &self.root_markers) log::info!("Using custom LSP config: {}", config);
.and_then(|root| lsp::Url::from_file_path(root).ok());
if self.config.is_some() {
log::info!("Using custom LSP config: {}", self.config.as_ref().unwrap());
} }
#[allow(deprecated)] #[allow(deprecated)]
let params = lsp::InitializeParams { let params = lsp::InitializeParams {
process_id: Some(std::process::id()), process_id: Some(std::process::id()),
// root_path is obsolete, use root_uri workspace_folders: Some(self.workspace_folders.clone()),
root_path: None, // root_path is obsolete, but some clients like pyright still use it so we specify both.
root_uri: root, // clients will prefer _uri if possible
root_path: self.root_path.to_str().map(|path| path.to_owned()),
root_uri: self.root_uri.clone(),
initialization_options: self.config.clone(), initialization_options: self.config.clone(),
capabilities: lsp::ClientCapabilities { capabilities: lsp::ClientCapabilities {
workspace: Some(lsp::WorkspaceClientCapabilities { workspace: Some(lsp::WorkspaceClientCapabilities {
@ -255,12 +288,25 @@ impl Client {
did_change_configuration: Some(lsp::DynamicRegistrationClientCapabilities { did_change_configuration: Some(lsp::DynamicRegistrationClientCapabilities {
dynamic_registration: Some(false), dynamic_registration: Some(false),
}), }),
workspace_folders: Some(true),
apply_edit: Some(true),
symbol: Some(lsp::WorkspaceSymbolClientCapabilities {
dynamic_registration: Some(false),
..Default::default()
}),
..Default::default() ..Default::default()
}), }),
text_document: Some(lsp::TextDocumentClientCapabilities { text_document: Some(lsp::TextDocumentClientCapabilities {
completion: Some(lsp::CompletionClientCapabilities { completion: Some(lsp::CompletionClientCapabilities {
completion_item: Some(lsp::CompletionItemCapability { completion_item: Some(lsp::CompletionItemCapability {
snippet_support: Some(false), snippet_support: Some(false),
resolve_support: Some(lsp::CompletionItemCapabilityResolveSupport {
properties: vec![
String::from("documentation"),
String::from("detail"),
String::from("additionalTextEdits"),
],
}),
..Default::default() ..Default::default()
}), }),
completion_item_kind: Some(lsp::CompletionItemKindCapability { completion_item_kind: Some(lsp::CompletionItemKindCapability {
@ -275,6 +321,16 @@ impl Client {
content_format: Some(vec![lsp::MarkupKind::Markdown]), content_format: Some(vec![lsp::MarkupKind::Markdown]),
..Default::default() ..Default::default()
}), }),
signature_help: Some(lsp::SignatureHelpClientCapabilities {
signature_information: Some(lsp::SignatureInformationSettings {
documentation_format: Some(vec![lsp::MarkupKind::Markdown]),
parameter_information: Some(lsp::ParameterInformationSettings {
label_offset_support: Some(true),
}),
active_parameter_support: Some(true),
}),
..Default::default()
}),
rename: Some(lsp::RenameClientCapabilities { rename: Some(lsp::RenameClientCapabilities {
dynamic_registration: Some(false), dynamic_registration: Some(false),
prepare_support: Some(false), prepare_support: Some(false),
@ -301,6 +357,9 @@ impl Client {
}), }),
..Default::default() ..Default::default()
}), }),
publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities {
..Default::default()
}),
..Default::default() ..Default::default()
}), }),
window: Some(lsp::WindowClientCapabilities { window: Some(lsp::WindowClientCapabilities {
@ -310,7 +369,6 @@ impl Client {
..Default::default() ..Default::default()
}, },
trace: None, trace: None,
workspace_folders: None,
client_info: None, client_info: None,
locale: None, // TODO locale: None, // TODO
}; };
@ -597,7 +655,12 @@ impl Client {
text_document: lsp::TextDocumentIdentifier, text_document: lsp::TextDocumentIdentifier,
position: lsp::Position, position: lsp::Position,
work_done_token: Option<lsp::ProgressToken>, work_done_token: Option<lsp::ProgressToken>,
) -> impl Future<Output = Result<Value>> { ) -> Option<impl Future<Output = Result<Value>>> {
let capabilities = self.capabilities.get().unwrap();
// Return early if signature help is not supported
capabilities.signature_help_provider.as_ref()?;
let params = lsp::SignatureHelpParams { let params = lsp::SignatureHelpParams {
text_document_position_params: lsp::TextDocumentPositionParams { text_document_position_params: lsp::TextDocumentPositionParams {
text_document, text_document,
@ -608,7 +671,7 @@ impl Client {
// lsp::SignatureHelpContext // lsp::SignatureHelpContext
}; };
self.call::<lsp::request::SignatureHelpRequest>(params) Some(self.call::<lsp::request::SignatureHelpRequest>(params))
} }
pub fn text_document_hover( pub fn text_document_hover(
@ -647,6 +710,24 @@ impl Client {
}; };
// TODO: return err::unavailable so we can fall back to tree sitter formatting // TODO: return err::unavailable so we can fall back to tree sitter formatting
// merge FormattingOptions with 'config.format'
let config_format = self
.config
.as_ref()
.and_then(|cfg| cfg.get("format"))
.and_then(|fmt| HashMap::<String, lsp::FormattingProperty>::deserialize(fmt).ok());
let options = if let Some(mut properties) = config_format {
// passed in options take precedence over 'config.format'
properties.extend(options.properties);
lsp::FormattingOptions {
properties,
..options
}
} else {
options
};
let params = lsp::DocumentFormattingParams { let params = lsp::DocumentFormattingParams {
text_document, text_document,
options, options,
@ -693,6 +774,26 @@ impl Client {
Ok(response.unwrap_or_default()) Ok(response.unwrap_or_default())
} }
pub fn text_document_document_highlight(
&self,
text_document: lsp::TextDocumentIdentifier,
position: lsp::Position,
work_done_token: Option<lsp::ProgressToken>,
) -> impl Future<Output = Result<Value>> {
let params = lsp::DocumentHighlightParams {
text_document_position_params: lsp::TextDocumentPositionParams {
text_document,
position,
},
work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token },
partial_result_params: lsp::PartialResultParams {
partial_result_token: None,
},
};
self.call::<lsp::request::DocumentHighlightRequest>(params)
}
fn goto_request< fn goto_request<
T: lsp::request::Request< T: lsp::request::Request<
Params = lsp::GotoDefinitionParams, Params = lsp::GotoDefinitionParams,
@ -804,11 +905,12 @@ impl Client {
&self, &self,
text_document: lsp::TextDocumentIdentifier, text_document: lsp::TextDocumentIdentifier,
range: lsp::Range, range: lsp::Range,
context: lsp::CodeActionContext,
) -> impl Future<Output = Result<Value>> { ) -> impl Future<Output = Result<Value>> {
let params = lsp::CodeActionParams { let params = lsp::CodeActionParams {
text_document, text_document,
range, range,
context: lsp::CodeActionContext::default(), context,
work_done_progress_params: lsp::WorkDoneProgressParams::default(), work_done_progress_params: lsp::WorkDoneProgressParams::default(),
partial_result_params: lsp::PartialResultParams::default(), partial_result_params: lsp::PartialResultParams::default(),
}; };
@ -822,6 +924,19 @@ impl Client {
position: lsp::Position, position: lsp::Position,
new_name: String, new_name: String,
) -> anyhow::Result<lsp::WorkspaceEdit> { ) -> anyhow::Result<lsp::WorkspaceEdit> {
let capabilities = self.capabilities.get().unwrap();
// check if we're able to rename
match capabilities.rename_provider {
Some(lsp::OneOf::Left(true)) | Some(lsp::OneOf::Right(_)) => (),
// None | Some(false)
_ => {
log::warn!("rename_symbol failed: The server does not support rename");
let err = "The server does not support rename";
return Err(anyhow!(err));
}
};
let params = lsp::RenameParams { let params = lsp::RenameParams {
text_document_position: lsp::TextDocumentPositionParams { text_document_position: lsp::TextDocumentPositionParams {
text_document, text_document,

@ -0,0 +1,370 @@
//! An implementation of the JSONRPC 2.0 spec types
// Upstream implementation: https://github.com/paritytech/jsonrpc/tree/38af3c9439aa75481805edf6c05c6622a5ab1e70/core/src/types
// Changes from upstream:
// * unused functions (almost all non-trait-implementation functions) have been removed
// * `#[serde(deny_unknown_fields)]` annotations have been removed on response types
// for compatibility with non-strict language server implementations like Ruby Sorbet
// (see https://github.com/helix-editor/helix/issues/2786)
// * some variable names have been lengthened for readability
use serde::de::{self, DeserializeOwned, Visitor};
use serde::{Deserialize, Serialize};
use serde_json::Value;
// https://www.jsonrpc.org/specification#error_object
#[derive(Debug, PartialEq, Clone)]
pub enum ErrorCode {
ParseError,
InvalidRequest,
MethodNotFound,
InvalidParams,
InternalError,
ServerError(i64),
}
impl ErrorCode {
pub fn code(&self) -> i64 {
match *self {
ErrorCode::ParseError => -32700,
ErrorCode::InvalidRequest => -32600,
ErrorCode::MethodNotFound => -32601,
ErrorCode::InvalidParams => -32602,
ErrorCode::InternalError => -32603,
ErrorCode::ServerError(code) => code,
}
}
}
impl From<i64> for ErrorCode {
fn from(code: i64) -> Self {
match code {
-32700 => ErrorCode::ParseError,
-32600 => ErrorCode::InvalidRequest,
-32601 => ErrorCode::MethodNotFound,
-32602 => ErrorCode::InvalidParams,
-32603 => ErrorCode::InternalError,
code => ErrorCode::ServerError(code),
}
}
}
impl<'de> Deserialize<'de> for ErrorCode {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let code: i64 = Deserialize::deserialize(deserializer)?;
Ok(ErrorCode::from(code))
}
}
impl Serialize for ErrorCode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_i64(self.code())
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct Error {
pub code: ErrorCode,
pub message: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<Value>,
}
impl Error {
pub fn invalid_params<M>(message: M) -> Self
where
M: Into<String>,
{
Error {
code: ErrorCode::InvalidParams,
message: message.into(),
data: None,
}
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}: {}", self.code, self.message)
}
}
impl std::error::Error for Error {}
// https://www.jsonrpc.org/specification#request_object
/// Request ID
#[derive(Debug, PartialEq, Clone, Hash, Eq, Deserialize, Serialize)]
#[serde(untagged)]
pub enum Id {
Null,
Num(u64),
Str(String),
}
/// Protocol Version
#[derive(Debug, PartialEq, Clone, Copy, Hash, Eq)]
pub enum Version {
V2,
}
impl Serialize for Version {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match *self {
Version::V2 => serializer.serialize_str("2.0"),
}
}
}
struct VersionVisitor;
impl<'v> Visitor<'v> for VersionVisitor {
type Value = Version;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
match value {
"2.0" => Ok(Version::V2),
_ => Err(de::Error::custom("invalid version")),
}
}
}
impl<'de> Deserialize<'de> for Version {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserializer.deserialize_identifier(VersionVisitor)
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Params {
None,
Array(Vec<Value>),
Map(serde_json::Map<String, Value>),
}
impl Params {
pub fn parse<D>(self) -> Result<D, Error>
where
D: DeserializeOwned,
{
let value: Value = self.into();
serde_json::from_value(value)
.map_err(|err| Error::invalid_params(format!("Invalid params: {}.", err)))
}
}
impl From<Params> for Value {
fn from(params: Params) -> Value {
match params {
Params::Array(vec) => Value::Array(vec),
Params::Map(map) => Value::Object(map),
Params::None => Value::Null,
}
}
}
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
pub struct MethodCall {
pub jsonrpc: Option<Version>,
pub method: String,
#[serde(default = "default_params")]
pub params: Params,
pub id: Id,
}
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
pub struct Notification {
pub jsonrpc: Option<Version>,
pub method: String,
#[serde(default = "default_params")]
pub params: Params,
}
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
#[serde(untagged)]
pub enum Call {
MethodCall(MethodCall),
Notification(Notification),
Invalid {
// We can attempt to salvage the id out of the invalid request
// for better debugging
#[serde(default = "default_id")]
id: Id,
},
}
fn default_params() -> Params {
Params::None
}
fn default_id() -> Id {
Id::Null
}
impl From<MethodCall> for Call {
fn from(method_call: MethodCall) -> Self {
Call::MethodCall(method_call)
}
}
impl From<Notification> for Call {
fn from(notification: Notification) -> Self {
Call::Notification(notification)
}
}
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
#[serde(untagged)]
pub enum Request {
Single(Call),
Batch(Vec<Call>),
}
// https://www.jsonrpc.org/specification#response_object
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct Success {
#[serde(skip_serializing_if = "Option::is_none")]
pub jsonrpc: Option<Version>,
pub result: Value,
pub id: Id,
}
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
pub struct Failure {
#[serde(skip_serializing_if = "Option::is_none")]
pub jsonrpc: Option<Version>,
pub error: Error,
pub id: Id,
}
// Note that failure comes first because we're not using
// #[serde(deny_unknown_field)]: we want a request that contains
// both `result` and `error` to be a `Failure`.
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
#[serde(untagged)]
pub enum Output {
Failure(Failure),
Success(Success),
}
impl From<Output> for Result<Value, Error> {
fn from(output: Output) -> Self {
match output {
Output::Success(success) => Ok(success.result),
Output::Failure(failure) => Err(failure.error),
}
}
}
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[serde(untagged)]
pub enum Response {
Single(Output),
Batch(Vec<Output>),
}
impl From<Failure> for Response {
fn from(failure: Failure) -> Self {
Response::Single(Output::Failure(failure))
}
}
impl From<Success> for Response {
fn from(success: Success) -> Self {
Response::Single(Output::Success(success))
}
}
#[test]
fn method_call_serialize() {
use serde_json;
let m = MethodCall {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1), Value::from(2)]),
id: Id::Num(1),
};
let serialized = serde_json::to_string(&m).unwrap();
assert_eq!(
serialized,
r#"{"jsonrpc":"2.0","method":"update","params":[1,2],"id":1}"#
);
}
#[test]
fn notification_serialize() {
use serde_json;
let n = Notification {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1), Value::from(2)]),
};
let serialized = serde_json::to_string(&n).unwrap();
assert_eq!(
serialized,
r#"{"jsonrpc":"2.0","method":"update","params":[1,2]}"#
);
}
#[test]
fn success_output_deserialize() {
use serde_json;
let dso = r#"{"jsonrpc":"2.0","result":1,"id":1}"#;
let deserialized: Output = serde_json::from_str(dso).unwrap();
assert_eq!(
deserialized,
Output::Success(Success {
jsonrpc: Some(Version::V2),
result: Value::from(1),
id: Id::Num(1)
})
);
}
#[test]
fn success_output_deserialize_with_extra_fields() {
use serde_json;
// https://github.com/helix-editor/helix/issues/2786
let dso = r#"{"jsonrpc":"2.0","result":1,"id":1,"requestMethod":"initialize"}"#;
let deserialized: Output = serde_json::from_str(dso).unwrap();
assert_eq!(
deserialized,
Output::Success(Success {
jsonrpc: Some(Version::V2),
result: Value::from(1),
id: Id::Num(1)
})
);
}

@ -1,15 +1,16 @@
mod client; mod client;
pub mod jsonrpc;
mod transport; mod transport;
pub use client::Client; pub use client::Client;
pub use futures_executor::block_on; pub use futures_executor::block_on;
pub use jsonrpc::Call; pub use jsonrpc::Call;
pub use jsonrpc_core as jsonrpc;
pub use lsp::{Position, Url}; pub use lsp::{Position, Url};
pub use lsp_types as lsp; pub use lsp_types as lsp;
use futures_util::stream::select_all::SelectAll; use futures_util::stream::select_all::SelectAll;
use helix_core::syntax::LanguageConfiguration; use helix_core::syntax::{LanguageConfiguration, LanguageServerConfiguration};
use tokio::sync::mpsc::UnboundedReceiver;
use std::{ use std::{
collections::{hash_map::Entry, HashMap}, collections::{hash_map::Entry, HashMap},
@ -38,8 +39,8 @@ pub enum Error {
Timeout, Timeout,
#[error("server closed the stream")] #[error("server closed the stream")]
StreamClosed, StreamClosed,
#[error("LSP not defined")] #[error("Unhandled")]
LspNotDefined, Unhandled,
#[error(transparent)] #[error(transparent)]
Other(#[from] anyhow::Error), Other(#[from] anyhow::Error),
} }
@ -56,7 +57,62 @@ pub enum OffsetEncoding {
pub mod util { pub mod util {
use super::*; use super::*;
use helix_core::{Range, Rope, Transaction}; use helix_core::{diagnostic::NumberOrString, Range, Rope, Transaction};
/// Converts a diagnostic in the document to [`lsp::Diagnostic`].
///
/// Panics when [`pos_to_lsp_pos`] would for an invalid range on the diagnostic.
pub fn diagnostic_to_lsp_diagnostic(
doc: &Rope,
diag: &helix_core::diagnostic::Diagnostic,
offset_encoding: OffsetEncoding,
) -> lsp::Diagnostic {
use helix_core::diagnostic::Severity::*;
let range = Range::new(diag.range.start, diag.range.end);
let severity = diag.severity.map(|s| match s {
Hint => lsp::DiagnosticSeverity::HINT,
Info => lsp::DiagnosticSeverity::INFORMATION,
Warning => lsp::DiagnosticSeverity::WARNING,
Error => lsp::DiagnosticSeverity::ERROR,
});
let code = match diag.code.clone() {
Some(x) => match x {
NumberOrString::Number(x) => Some(lsp::NumberOrString::Number(x)),
NumberOrString::String(x) => Some(lsp::NumberOrString::String(x)),
},
None => None,
};
let new_tags: Vec<_> = diag
.tags
.iter()
.map(|tag| match tag {
helix_core::diagnostic::DiagnosticTag::Unnecessary => {
lsp::DiagnosticTag::UNNECESSARY
}
helix_core::diagnostic::DiagnosticTag::Deprecated => lsp::DiagnosticTag::DEPRECATED,
})
.collect();
let tags = if !new_tags.is_empty() {
Some(new_tags)
} else {
None
};
// TODO: add support for Diagnostic.data
lsp::Diagnostic::new(
range_to_lsp_range(doc, range, offset_encoding),
severity,
code,
diag.source.clone(),
diag.message.to_owned(),
None,
tags,
)
}
/// Converts [`lsp::Position`] to a position in the document. /// Converts [`lsp::Position`] to a position in the document.
/// ///
@ -141,9 +197,13 @@ pub mod util {
pub fn generate_transaction_from_edits( pub fn generate_transaction_from_edits(
doc: &Rope, doc: &Rope,
edits: Vec<lsp::TextEdit>, mut edits: Vec<lsp::TextEdit>,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
) -> Transaction { ) -> Transaction {
// Sort edits by start range, since some LSPs (Omnisharp) send them
// in reverse order.
edits.sort_unstable_by_key(|edit| edit.range.start);
Transaction::change( Transaction::change(
doc, doc,
edits.into_iter().map(|edit| { edits.into_iter().map(|edit| {
@ -169,59 +229,38 @@ pub mod util {
}), }),
) )
} }
/// The result of asking the language server to format the document. This can be turned into a
/// `Transaction`, but the advantage of not doing that straight away is that this one is
/// `Send` and `Sync`.
#[derive(Clone, Debug)]
pub struct LspFormatting {
pub doc: Rope,
pub edits: Vec<lsp::TextEdit>,
pub offset_encoding: OffsetEncoding,
}
impl From<LspFormatting> for Transaction {
fn from(fmt: LspFormatting) -> Transaction {
generate_transaction_from_edits(&fmt.doc, fmt.edits, fmt.offset_encoding)
}
}
} }
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub enum MethodCall { pub enum MethodCall {
WorkDoneProgressCreate(lsp::WorkDoneProgressCreateParams), WorkDoneProgressCreate(lsp::WorkDoneProgressCreateParams),
ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams), ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams),
WorkspaceFolders,
WorkspaceConfiguration(lsp::ConfigurationParams), WorkspaceConfiguration(lsp::ConfigurationParams),
} }
impl MethodCall { impl MethodCall {
pub fn parse(method: &str, params: jsonrpc::Params) -> Option<MethodCall> { pub fn parse(method: &str, params: jsonrpc::Params) -> Result<MethodCall> {
use lsp::request::Request; use lsp::request::Request;
let request = match method { let request = match method {
lsp::request::WorkDoneProgressCreate::METHOD => { lsp::request::WorkDoneProgressCreate::METHOD => {
let params: lsp::WorkDoneProgressCreateParams = params let params: lsp::WorkDoneProgressCreateParams = params.parse()?;
.parse()
.expect("Failed to parse WorkDoneCreate params");
Self::WorkDoneProgressCreate(params) Self::WorkDoneProgressCreate(params)
} }
lsp::request::ApplyWorkspaceEdit::METHOD => { lsp::request::ApplyWorkspaceEdit::METHOD => {
let params: lsp::ApplyWorkspaceEditParams = params let params: lsp::ApplyWorkspaceEditParams = params.parse()?;
.parse()
.expect("Failed to parse ApplyWorkspaceEdit params");
Self::ApplyWorkspaceEdit(params) Self::ApplyWorkspaceEdit(params)
} }
lsp::request::WorkspaceFoldersRequest::METHOD => Self::WorkspaceFolders,
lsp::request::WorkspaceConfiguration::METHOD => { lsp::request::WorkspaceConfiguration::METHOD => {
let params: lsp::ConfigurationParams = params let params: lsp::ConfigurationParams = params.parse()?;
.parse()
.expect("Failed to parse WorkspaceConfiguration params");
Self::WorkspaceConfiguration(params) Self::WorkspaceConfiguration(params)
} }
_ => { _ => {
log::warn!("unhandled lsp request: {}", method); return Err(Error::Unhandled);
return None;
} }
}; };
Some(request) Ok(request)
} }
} }
@ -236,42 +275,34 @@ pub enum Notification {
} }
impl Notification { impl Notification {
pub fn parse(method: &str, params: jsonrpc::Params) -> Option<Notification> { pub fn parse(method: &str, params: jsonrpc::Params) -> Result<Notification> {
use lsp::notification::Notification as _; use lsp::notification::Notification as _;
let notification = match method { let notification = match method {
lsp::notification::Initialized::METHOD => Self::Initialized, lsp::notification::Initialized::METHOD => Self::Initialized,
lsp::notification::PublishDiagnostics::METHOD => { lsp::notification::PublishDiagnostics::METHOD => {
let params: lsp::PublishDiagnosticsParams = params let params: lsp::PublishDiagnosticsParams = params.parse()?;
.parse()
.expect("Failed to parse PublishDiagnostics params");
// TODO: need to loop over diagnostics and distinguish them by URI
Self::PublishDiagnostics(params) Self::PublishDiagnostics(params)
} }
lsp::notification::ShowMessage::METHOD => { lsp::notification::ShowMessage::METHOD => {
let params: lsp::ShowMessageParams = params.parse().ok()?; let params: lsp::ShowMessageParams = params.parse()?;
Self::ShowMessage(params) Self::ShowMessage(params)
} }
lsp::notification::LogMessage::METHOD => { lsp::notification::LogMessage::METHOD => {
let params: lsp::LogMessageParams = params.parse().ok()?; let params: lsp::LogMessageParams = params.parse()?;
Self::LogMessage(params) Self::LogMessage(params)
} }
lsp::notification::Progress::METHOD => { lsp::notification::Progress::METHOD => {
let params: lsp::ProgressParams = params.parse().ok()?; let params: lsp::ProgressParams = params.parse()?;
Self::ProgressMessage(params) Self::ProgressMessage(params)
} }
_ => { _ => {
log::error!("unhandled LSP notification: {}", method); return Err(Error::Unhandled);
return None;
} }
}; };
Some(notification) Ok(notification)
} }
} }
@ -305,56 +336,50 @@ impl Registry {
.map(|(_, client)| client.as_ref()) .map(|(_, client)| client.as_ref())
} }
pub fn get(&mut self, language_config: &LanguageConfiguration) -> Result<Arc<Client>> { pub fn restart(
&mut self,
language_config: &LanguageConfiguration,
) -> Result<Option<Arc<Client>>> {
let config = match &language_config.language_server { let config = match &language_config.language_server {
Some(config) => config, Some(config) => config,
None => return Err(Error::LspNotDefined), None => return Ok(None),
}; };
match self.inner.entry(language_config.scope.clone()) { let scope = language_config.scope.clone();
Entry::Occupied(entry) => Ok(entry.get().1.clone()),
Entry::Vacant(entry) => { match self.inner.entry(scope) {
Entry::Vacant(_) => Ok(None),
Entry::Occupied(mut entry) => {
// initialize a new client // initialize a new client
let id = self.counter.fetch_add(1, Ordering::Relaxed); let id = self.counter.fetch_add(1, Ordering::Relaxed);
let (client, incoming, initialize_notify) = Client::start(
&config.command, let NewClientResult(client, incoming) = start_client(id, language_config, config)?;
&config.args,
language_config.config.clone(),
language_config.roots.clone(),
id,
)?;
self.incoming.push(UnboundedReceiverStream::new(incoming)); self.incoming.push(UnboundedReceiverStream::new(incoming));
let client = Arc::new(client);
// Initialize the client asynchronously entry.insert((id, client.clone()));
let _client = client.clone();
tokio::spawn(async move {
use futures_util::TryFutureExt;
let value = _client
.capabilities
.get_or_try_init(|| {
_client
.initialize()
.map_ok(|response| response.capabilities)
})
.await;
if let Err(e) = value { Ok(Some(client))
log::error!("failed to initialize language server: {}", e); }
return; }
} }
// next up, notify<initialized> pub fn get(&mut self, language_config: &LanguageConfiguration) -> Result<Option<Arc<Client>>> {
_client let config = match &language_config.language_server {
.notify::<lsp::notification::Initialized>(lsp::InitializedParams {}) Some(config) => config,
.await None => return Ok(None),
.unwrap(); };
initialize_notify.notify_one(); match self.inner.entry(language_config.scope.clone()) {
}); Entry::Occupied(entry) => Ok(Some(entry.get().1.clone())),
Entry::Vacant(entry) => {
// initialize a new client
let id = self.counter.fetch_add(1, Ordering::Relaxed);
let NewClientResult(client, incoming) = start_client(id, language_config, config)?;
self.incoming.push(UnboundedReceiverStream::new(incoming));
entry.insert((id, client.clone())); entry.insert((id, client.clone()));
Ok(client) Ok(Some(client))
} }
} }
} }
@ -390,7 +415,7 @@ impl LspProgressMap {
Self::default() Self::default()
} }
/// Returns a map of all tokens coresponding to the lanaguage server with `id`. /// Returns a map of all tokens corresponding to the language server with `id`.
pub fn progress_map(&self, id: usize) -> Option<&HashMap<lsp::ProgressToken, ProgressStatus>> { pub fn progress_map(&self, id: usize) -> Option<&HashMap<lsp::ProgressToken, ProgressStatus>> {
self.0.get(&id) self.0.get(&id)
} }
@ -428,7 +453,7 @@ impl LspProgressMap {
self.0.get_mut(&id).and_then(|vals| vals.remove(token)) self.0.get_mut(&id).and_then(|vals| vals.remove(token))
} }
/// Updates the progess of `token` for server with `id` to `status`, returns the value replaced or `None`. /// Updates the progress of `token` for server with `id` to `status`, returns the value replaced or `None`.
pub fn update( pub fn update(
&mut self, &mut self,
id: usize, id: usize,
@ -442,6 +467,56 @@ impl LspProgressMap {
} }
} }
struct NewClientResult(Arc<Client>, UnboundedReceiver<(usize, Call)>);
/// start_client takes both a LanguageConfiguration and a LanguageServerConfiguration to ensure that
/// it is only called when it makes sense.
fn start_client(
id: usize,
config: &LanguageConfiguration,
ls_config: &LanguageServerConfiguration,
) -> Result<NewClientResult> {
let (client, incoming, initialize_notify) = Client::start(
&ls_config.command,
&ls_config.args,
config.config.clone(),
&config.roots,
id,
ls_config.timeout,
)?;
let client = Arc::new(client);
// Initialize the client asynchronously
let _client = client.clone();
tokio::spawn(async move {
use futures_util::TryFutureExt;
let value = _client
.capabilities
.get_or_try_init(|| {
_client
.initialize()
.map_ok(|response| response.capabilities)
})
.await;
if let Err(e) = value {
log::error!("failed to initialize language server: {}", e);
return;
}
// next up, notify<initialized>
_client
.notify::<lsp::notification::Initialized>(lsp::InitializedParams {})
.await
.unwrap();
initialize_notify.notify_one();
});
Ok(NewClientResult(client, incoming))
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{lsp, util::*, OffsetEncoding}; use super::{lsp, util::*, OffsetEncoding};

@ -1,6 +1,5 @@
use crate::{Error, Result}; use crate::{jsonrpc, Error, Result};
use anyhow::Context; use anyhow::Context;
use jsonrpc_core as jsonrpc;
use log::{error, info}; use log::{error, info};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
@ -215,13 +214,7 @@ impl Transport {
} }
}; };
let tx = self if let Some(tx) = self.pending_requests.lock().await.remove(&id) {
.pending_requests
.lock()
.await
.remove(&id)
.expect("pending_request with id not found!");
match tx.send(result).await { match tx.send(result).await {
Ok(_) => (), Ok(_) => (),
Err(_) => error!( Err(_) => error!(
@ -229,6 +222,13 @@ impl Transport {
id id
), ),
}; };
} else {
log::error!(
"Discarding Language Server response without a request (id={:?}) {:?}",
id,
result
);
}
Ok(()) Ok(())
} }

@ -10,6 +10,7 @@ repository = "https://github.com/helix-editor/helix"
homepage = "https://helix-editor.com" homepage = "https://helix-editor.com"
include = ["src/**/*", "README.md"] include = ["src/**/*", "README.md"]
default-run = "hx" default-run = "hx"
rust-version = "1.57"
[package.metadata.nix] [package.metadata.nix]
build = true build = true
@ -17,6 +18,7 @@ app = true
[features] [features]
unicode-lines = ["helix-core/unicode-lines"] unicode-lines = ["helix-core/unicode-lines"]
integration = []
[[bin]] [[bin]]
name = "hx" name = "hx"
@ -30,18 +32,17 @@ helix-dap = { version = "0.6", path = "../helix-dap" }
helix-loader = { version = "0.6", path = "../helix-loader" } helix-loader = { version = "0.6", path = "../helix-loader" }
anyhow = "1" anyhow = "1"
once_cell = "1.10" once_cell = "1.15"
which = "4.2" which = "4.2"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
num_cpus = "1"
tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] }
crossterm = { version = "0.23", features = ["event-stream"] } crossterm = { version = "0.25", features = ["event-stream"] }
signal-hook = "0.3" signal-hook = "0.3"
tokio-stream = "0.1" tokio-stream = "0.1"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
arc-swap = { version = "1.5.0" } arc-swap = { version = "1.5.1" }
# Logging # Logging
fern = "0.6" fern = "0.6"
@ -63,14 +64,16 @@ serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
# ripgrep for global search # ripgrep for global search
grep-regex = "0.1.9" grep-regex = "0.1.10"
grep-searcher = "0.1.8" grep-searcher = "0.1.10"
# Remove once retain_mut lands in stable rust
retain_mut = "0.1.7"
[target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100
signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] }
[build-dependencies] [build-dependencies]
helix-loader = { version = "0.6", path = "../helix-loader" } helix-loader = { version = "0.6", path = "../helix-loader" }
[dev-dependencies]
smallvec = "1.9"
indoc = "1.0.6"
tempfile = "3.3.0"

@ -19,7 +19,8 @@ fn main() {
if std::env::var("HELIX_DISABLE_AUTO_GRAMMAR_BUILD").is_err() { if std::env::var("HELIX_DISABLE_AUTO_GRAMMAR_BUILD").is_err() {
fetch_grammars().expect("Failed to fetch tree-sitter grammars"); fetch_grammars().expect("Failed to fetch tree-sitter grammars");
build_grammars().expect("Failed to compile tree-sitter grammars"); build_grammars(Some(std::env::var("TARGET").unwrap()))
.expect("Failed to compile tree-sitter grammars");
} }
println!("cargo:rerun-if-changed=../runtime/grammars/"); println!("cargo:rerun-if-changed=../runtime/grammars/");

@ -1,16 +1,18 @@
use arc_swap::{access::Map, ArcSwap}; use arc_swap::{access::Map, ArcSwap};
use futures_util::Stream;
use helix_core::{ use helix_core::{
config::{default_syntax_loader, user_syntax_loader}, config::{default_syntax_loader, user_syntax_loader},
diagnostic::{DiagnosticTag, NumberOrString},
pos_at_coords, syntax, Selection, pos_at_coords, syntax, Selection,
}; };
use helix_lsp::{lsp, util::lsp_pos_to_pos, LspProgressMap}; use helix_lsp::{lsp, util::lsp_pos_to_pos, LspProgressMap};
use helix_view::{align_view, editor::ConfigEvent, theme, Align, Editor}; use helix_view::{align_view, editor::ConfigEvent, theme, tree::Layout, Align, Editor};
use serde_json::json; use serde_json::json;
use crate::{ use crate::{
args::Args, args::Args,
commands::apply_workspace_edit, commands::apply_workspace_edit,
compositor::Compositor, compositor::{Compositor, Event},
config::Config, config::Config,
job::Jobs, job::Jobs,
keymap::Keymaps, keymap::Keymaps,
@ -24,10 +26,13 @@ use std::{
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use anyhow::Error; use anyhow::{Context, Error};
use crossterm::{ use crossterm::{
event::{DisableMouseCapture, EnableMouseCapture, Event, EventStream}, event::{
DisableBracketedPaste, DisableMouseCapture, EnableBracketedPaste, EnableMouseCapture,
Event as CrosstermEvent,
},
execute, terminal, execute, terminal,
tty::IsTty, tty::IsTty,
}; };
@ -39,9 +44,11 @@ use {
#[cfg(windows)] #[cfg(windows)]
type Signals = futures_util::stream::Empty<()>; type Signals = futures_util::stream::Empty<()>;
const LSP_DEADLINE: Duration = Duration::from_millis(16);
pub struct Application { pub struct Application {
compositor: Compositor, compositor: Compositor,
editor: Editor, pub editor: Editor,
config: Arc<ArcSwap<Config>>, config: Arc<ArcSwap<Config>>,
@ -53,18 +60,58 @@ pub struct Application {
signals: Signals, signals: Signals,
jobs: Jobs, jobs: Jobs,
lsp_progress: LspProgressMap, lsp_progress: LspProgressMap,
last_render: Instant,
}
#[cfg(feature = "integration")]
fn setup_integration_logging() {
let level = std::env::var("HELIX_LOG_LEVEL")
.map(|lvl| lvl.parse().unwrap())
.unwrap_or(log::LevelFilter::Info);
// Separate file config so we can include year, month and day in file logs
let _ = fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"{} {} [{}] {}",
chrono::Local::now().format("%Y-%m-%dT%H:%M:%S%.3f"),
record.target(),
record.level(),
message
))
})
.level(level)
.chain(std::io::stdout())
.apply();
}
fn restore_term() -> Result<(), Error> {
let mut stdout = stdout();
// reset cursor shape
write!(stdout, "\x1B[0 q")?;
// Ignore errors on disabling, this might trigger on windows if we call
// disable without calling enable previously
let _ = execute!(stdout, DisableMouseCapture);
execute!(
stdout,
DisableBracketedPaste,
terminal::LeaveAlternateScreen
)?;
terminal::disable_raw_mode()?;
Ok(())
} }
impl Application { impl Application {
pub fn new(args: Args, config: Config) -> Result<Self, Error> { pub fn new(args: Args, config: Config) -> Result<Self, Error> {
use helix_view::editor::Action; #[cfg(feature = "integration")]
let mut compositor = Compositor::new()?; setup_integration_logging();
let size = compositor.size();
let conf_dir = helix_loader::config_dir(); use helix_view::editor::Action;
let theme_loader = let theme_loader = std::sync::Arc::new(theme::Loader::new(
std::sync::Arc::new(theme::Loader::new(&conf_dir, &helix_loader::runtime_dir())); &helix_loader::config_dir(),
&helix_loader::runtime_dir(),
));
let true_color = config.editor.true_color || crate::true_color(); let true_color = config.editor.true_color || crate::true_color();
let theme = config let theme = config
@ -80,13 +127,7 @@ impl Application {
.ok() .ok()
.filter(|theme| (true_color || theme.is_16_color())) .filter(|theme| (true_color || theme.is_16_color()))
}) })
.unwrap_or_else(|| { .unwrap_or_else(|| theme_loader.default_theme(true_color));
if true_color {
theme_loader.default()
} else {
theme_loader.base16_default()
}
});
let syn_loader_conf = user_syntax_loader().unwrap_or_else(|err| { let syn_loader_conf = user_syntax_loader().unwrap_or_else(|err| {
eprintln!("Bad language config: {}", err); eprintln!("Bad language config: {}", err);
@ -98,9 +139,10 @@ impl Application {
}); });
let syn_loader = std::sync::Arc::new(syntax::Loader::new(syn_loader_conf)); let syn_loader = std::sync::Arc::new(syntax::Loader::new(syn_loader_conf));
let mut compositor = Compositor::new().context("build compositor")?;
let config = Arc::new(ArcSwap::from_pointee(config)); let config = Arc::new(ArcSwap::from_pointee(config));
let mut editor = Editor::new( let mut editor = Editor::new(
size, compositor.size(),
theme_loader.clone(), theme_loader.clone(),
syn_loader.clone(), syn_loader.clone(),
Box::new(Map::new(Arc::clone(&config), |config: &Config| { Box::new(Map::new(Arc::clone(&config), |config: &Config| {
@ -115,30 +157,45 @@ impl Application {
compositor.push(editor_view); compositor.push(editor_view);
if args.load_tutor { if args.load_tutor {
let path = helix_loader::runtime_dir().join("tutor.txt"); let path = helix_loader::runtime_dir().join("tutor");
editor.open(path, Action::VerticalSplit)?; editor.open(&path, Action::VerticalSplit)?;
// Unset path to prevent accidentally saving to the original tutor file. // Unset path to prevent accidentally saving to the original tutor file.
doc_mut!(editor).set_path(None)?; doc_mut!(editor).set_path(None)?;
} else if !args.files.is_empty() { } else if !args.files.is_empty() {
let first = &args.files[0].0; // we know it's not empty let first = &args.files[0].0; // we know it's not empty
if first.is_dir() { if first.is_dir() {
std::env::set_current_dir(&first)?; std::env::set_current_dir(&first).context("set current dir")?;
editor.new_file(Action::VerticalSplit); editor.new_file(Action::VerticalSplit);
let picker = ui::file_picker(".".into(), &config.load().editor); let picker = ui::file_picker(".".into(), &config.load().editor);
compositor.push(Box::new(overlayed(picker))); compositor.push(Box::new(overlayed(picker)));
} else { } else {
let nr_of_files = args.files.len(); let nr_of_files = args.files.len();
editor.open(first.to_path_buf(), Action::VerticalSplit)?; for (i, (file, pos)) in args.files.into_iter().enumerate() {
for (file, pos) in args.files {
if file.is_dir() { if file.is_dir() {
return Err(anyhow::anyhow!( return Err(anyhow::anyhow!(
"expected a path to file, found a directory. (to open a directory pass it as first argument)" "expected a path to file, found a directory. (to open a directory pass it as first argument)"
)); ));
} else { } else {
let doc_id = editor.open(file, Action::Load)?; // If the user passes in either `--vsplit` or
// `--hsplit` as a command line argument, all the given
// files will be opened according to the selected
// option. If neither of those two arguments are passed
// in, just load the files normally.
let action = match args.split {
_ if i == 0 => Action::VerticalSplit,
Some(Layout::Vertical) => Action::VerticalSplit,
Some(Layout::Horizontal) => Action::HorizontalSplit,
None => Action::Load,
};
let doc_id = editor
.open(&file, action)
.context(format!("open '{}'", file.to_string_lossy()))?;
// with Action::Load all documents have the same view // with Action::Load all documents have the same view
// NOTE: this isn't necessarily true anymore. If
// `--vsplit` or `--hsplit` are used, the file which is
// opened last is focused on.
let view_id = editor.tree.focus; let view_id = editor.tree.focus;
let doc = editor.document_mut(doc_id).unwrap(); let doc = doc_mut!(editor, &doc_id);
let pos = Selection::point(pos_at_coords(doc.text().slice(..), pos, true)); let pos = Selection::point(pos_at_coords(doc.text().slice(..), pos, true));
doc.set_selection(view_id, pos); doc.set_selection(view_id, pos);
} }
@ -149,7 +206,7 @@ impl Application {
let (view, doc) = current!(editor); let (view, doc) = current!(editor);
align_view(doc, view, Align::Center); align_view(doc, view, Align::Center);
} }
} else if stdin().is_tty() { } else if stdin().is_tty() || cfg!(feature = "integration") {
editor.new_file(Action::VerticalSplit); editor.new_file(Action::VerticalSplit);
} else if cfg!(target_os = "macos") { } else if cfg!(target_os = "macos") {
// On Linux and Windows, we allow the output of a command to be piped into the new buffer. // On Linux and Windows, we allow the output of a command to be piped into the new buffer.
@ -167,7 +224,8 @@ impl Application {
#[cfg(windows)] #[cfg(windows)]
let signals = futures_util::stream::empty(); let signals = futures_util::stream::empty();
#[cfg(not(windows))] #[cfg(not(windows))]
let signals = Signals::new(&[signal::SIGTSTP, signal::SIGCONT])?; let signals =
Signals::new(&[signal::SIGTSTP, signal::SIGCONT]).context("build signal handler")?;
let app = Self { let app = Self {
compositor, compositor,
@ -181,40 +239,57 @@ impl Application {
signals, signals,
jobs: Jobs::new(), jobs: Jobs::new(),
lsp_progress: LspProgressMap::new(), lsp_progress: LspProgressMap::new(),
last_render: Instant::now(),
}; };
Ok(app) Ok(app)
} }
fn render(&mut self) { fn render(&mut self) {
let compositor = &mut self.compositor;
let mut cx = crate::compositor::Context { let mut cx = crate::compositor::Context {
editor: &mut self.editor, editor: &mut self.editor,
jobs: &mut self.jobs, jobs: &mut self.jobs,
scroll: None, scroll: None,
}; };
self.compositor.render(&mut cx); compositor.render(&mut cx);
} }
pub async fn event_loop(&mut self) { pub async fn event_loop<S>(&mut self, input_stream: &mut S)
let mut reader = EventStream::new(); where
let mut last_render = Instant::now(); S: Stream<Item = crossterm::Result<crossterm::event::Event>> + Unpin,
let deadline = Duration::from_secs(1) / 60; {
self.render(); self.render();
self.last_render = Instant::now();
loop { loop {
if self.editor.should_close() { if !self.event_loop_until_idle(input_stream).await {
break; break;
} }
}
}
pub async fn event_loop_until_idle<S>(&mut self, input_stream: &mut S) -> bool
where
S: Stream<Item = crossterm::Result<crossterm::event::Event>> + Unpin,
{
#[cfg(feature = "integration")]
let mut idle_handled = false;
loop {
if self.editor.should_close() {
return false;
}
use futures_util::StreamExt; use futures_util::StreamExt;
tokio::select! { tokio::select! {
biased; biased;
event = reader.next() => { Some(event) = input_stream.next() => {
self.handle_terminal_events(event) self.handle_terminal_events(event);
} }
Some(signal) = self.signals.next() => { Some(signal) = self.signals.next() => {
self.handle_signals(signal).await; self.handle_signals(signal).await;
@ -223,9 +298,10 @@ impl Application {
self.handle_language_server_message(call, id).await; self.handle_language_server_message(call, id).await;
// limit render calls for fast language server messages // limit render calls for fast language server messages
let last = self.editor.language_servers.incoming.is_empty(); let last = self.editor.language_servers.incoming.is_empty();
if last || last_render.elapsed() > deadline {
if last || self.last_render.elapsed() > LSP_DEADLINE {
self.render(); self.render();
last_render = Instant::now(); self.last_render = Instant::now();
} }
} }
Some(payload) = self.editor.debugger_events.next() => { Some(payload) = self.editor.debugger_events.next() => {
@ -250,9 +326,25 @@ impl Application {
// idle timeout // idle timeout
self.editor.clear_idle_timer(); self.editor.clear_idle_timer();
self.handle_idle_timeout(); self.handle_idle_timeout();
#[cfg(feature = "integration")]
{
idle_handled = true;
} }
} }
} }
// for integration tests only, reset the idle timer after every
// event to make a signal when test events are done processing
#[cfg(feature = "integration")]
{
if idle_handled {
return true;
}
self.editor.reset_idle_timer();
}
}
} }
pub fn handle_config_events(&mut self, config_event: ConfigEvent) { pub fn handle_config_events(&mut self, config_event: ConfigEvent) {
@ -264,41 +356,50 @@ impl Application {
// the Application can apply it. // the Application can apply it.
ConfigEvent::Update(editor_config) => { ConfigEvent::Update(editor_config) => {
let mut app_config = (*self.config.load().clone()).clone(); let mut app_config = (*self.config.load().clone()).clone();
app_config.editor = editor_config; app_config.editor = *editor_config;
self.config.store(Arc::new(app_config)); self.config.store(Arc::new(app_config));
} }
} }
}
fn refresh_config(&mut self) { // Update all the relevant members in the editor after updating
let config = Config::load(helix_loader::config_file()).unwrap_or_else(|err| { // the configuration.
self.editor.set_error(err.to_string()); self.editor.refresh_config();
Config::default() }
});
// Refresh theme /// Refresh theme after config change
fn refresh_theme(&mut self, config: &Config) {
if let Some(theme) = config.theme.clone() { if let Some(theme) = config.theme.clone() {
let true_color = self.true_color(); let true_color = self.true_color();
self.editor.set_theme( match self.theme_loader.load(&theme) {
self.theme_loader Ok(theme) => {
.load(&theme) if true_color || theme.is_16_color() {
.map_err(|e| { self.editor.set_theme(theme);
log::warn!("failed to load theme `{}` - {}", theme, e);
e
})
.ok()
.filter(|theme| (true_color || theme.is_16_color()))
.unwrap_or_else(|| {
if true_color {
self.theme_loader.default()
} else { } else {
self.theme_loader.base16_default() self.editor
.set_error("theme requires truecolor support, which is not available");
}
}
Err(err) => {
let err_string = format!("failed to load theme `{}` - {}", theme, err);
self.editor.set_error(err_string);
} }
}),
);
} }
}
}
fn refresh_config(&mut self) {
match Config::load_default() {
Ok(config) => {
self.refresh_theme(&config);
// Store new config
self.config.store(Arc::new(config)); self.config.store(Arc::new(config));
} }
Err(err) => {
self.editor.set_error(err.to_string());
}
}
}
fn true_color(&self) -> bool { fn true_color(&self) -> bool {
self.config.load().editor.true_color || crate::true_color() self.config.load().editor.true_color || crate::true_color()
@ -314,7 +415,7 @@ impl Application {
match signal { match signal {
signal::SIGTSTP => { signal::SIGTSTP => {
self.compositor.save_cursor(); self.compositor.save_cursor();
self.restore_term().unwrap(); restore_term().unwrap();
low_level::emulate_default_handler(signal::SIGTSTP).unwrap(); low_level::emulate_default_handler(signal::SIGTSTP).unwrap();
} }
signal::SIGCONT => { signal::SIGCONT => {
@ -346,23 +447,20 @@ impl Application {
} }
} }
pub fn handle_terminal_events(&mut self, event: Option<Result<Event, crossterm::ErrorKind>>) { pub fn handle_terminal_events(&mut self, event: Result<CrosstermEvent, crossterm::ErrorKind>) {
let mut cx = crate::compositor::Context { let mut cx = crate::compositor::Context {
editor: &mut self.editor, editor: &mut self.editor,
jobs: &mut self.jobs, jobs: &mut self.jobs,
scroll: None, scroll: None,
}; };
// Handle key events // Handle key events
let should_redraw = match event { let should_redraw = match event.unwrap() {
Some(Ok(Event::Resize(width, height))) => { CrosstermEvent::Resize(width, height) => {
self.compositor.resize(width, height); self.compositor.resize(width, height);
self.compositor self.compositor
.handle_event(Event::Resize(width, height), &mut cx) .handle_event(&Event::Resize(width, height), &mut cx)
} }
Some(Ok(event)) => self.compositor.handle_event(event, &mut cx), event => self.compositor.handle_event(&event.into(), &mut cx),
Some(Err(x)) => panic!("{}", x),
None => panic!(),
}; };
if should_redraw && !self.editor.should_close() { if should_redraw && !self.editor.should_close() {
@ -380,8 +478,14 @@ impl Application {
match call { match call {
Call::Notification(helix_lsp::jsonrpc::Notification { method, params, .. }) => { Call::Notification(helix_lsp::jsonrpc::Notification { method, params, .. }) => {
let notification = match Notification::parse(&method, params) { let notification = match Notification::parse(&method, params) {
Some(notification) => notification, Ok(notification) => notification,
None => return, Err(err) => {
log::error!(
"received malformed notification from Language Server: {}",
err
);
return;
}
}; };
match notification { match notification {
@ -411,15 +515,20 @@ impl Application {
let language_id = let language_id =
doc.language_id().map(ToOwned::to_owned).unwrap_or_default(); doc.language_id().map(ToOwned::to_owned).unwrap_or_default();
let url = match doc.url() {
Some(url) => url,
None => continue, // skip documents with no path
};
tokio::spawn(language_server.text_document_did_open( tokio::spawn(language_server.text_document_did_open(
doc.url().unwrap(), url,
doc.version(), doc.version(),
doc.text(), doc.text(),
language_id, language_id,
)); ));
} }
} }
Notification::PublishDiagnostics(params) => { Notification::PublishDiagnostics(mut params) => {
let path = params.uri.to_file_path().unwrap(); let path = params.uri.to_file_path().unwrap();
let doc = self.editor.document_by_path_mut(&path); let doc = self.editor.document_by_path_mut(&path);
@ -429,15 +538,17 @@ impl Application {
let diagnostics = params let diagnostics = params
.diagnostics .diagnostics
.into_iter() .iter()
.filter_map(|diagnostic| { .filter_map(|diagnostic| {
use helix_core::{ use helix_core::diagnostic::{Diagnostic, Range, Severity::*};
diagnostic::{Range, Severity::*},
Diagnostic,
};
use lsp::DiagnosticSeverity; use lsp::DiagnosticSeverity;
let language_server = doc.language_server().unwrap(); let language_server = if let Some(language_server) = doc.language_server() {
language_server
} else {
log::warn!("Discarding diagnostic because language server is not initialized: {:?}", diagnostic);
return None;
};
// TODO: convert inside server // TODO: convert inside server
let start = if let Some(start) = lsp_pos_to_pos( let start = if let Some(start) = lsp_pos_to_pos(
@ -482,19 +593,59 @@ impl Application {
} }
}; };
let code = match diagnostic.code.clone() {
Some(x) => match x {
lsp::NumberOrString::Number(x) => {
Some(NumberOrString::Number(x))
}
lsp::NumberOrString::String(x) => {
Some(NumberOrString::String(x))
}
},
None => None,
};
let tags = if let Some(ref tags) = diagnostic.tags {
let new_tags = tags.iter().filter_map(|tag| {
match *tag {
lsp::DiagnosticTag::DEPRECATED => Some(DiagnosticTag::Deprecated),
lsp::DiagnosticTag::UNNECESSARY => Some(DiagnosticTag::Unnecessary),
_ => None
}
}).collect();
new_tags
} else {
Vec::new()
};
Some(Diagnostic { Some(Diagnostic {
range: Range { start, end }, range: Range { start, end },
line: diagnostic.range.start.line as usize, line: diagnostic.range.start.line as usize,
message: diagnostic.message, message: diagnostic.message.clone(),
severity, severity,
// code code,
// source tags,
source: diagnostic.source.clone()
}) })
}) })
.collect(); .collect();
doc.set_diagnostics(diagnostics); doc.set_diagnostics(diagnostics);
} }
// Sort diagnostics first by severity and then by line numbers.
// Note: The `lsp::DiagnosticSeverity` enum is already defined in decreasing order
params
.diagnostics
.sort_unstable_by_key(|d| (d.severity, d.range.start));
// Insert the original lsp::Diagnostics here because we may have no open document
// for diagnosic message and so we can't calculate the exact position.
// When using them later in the diagnostics picker, we calculate them on-demand.
self.editor
.diagnostics
.insert(params.uri, params.diagnostics);
} }
Notification::ShowMessage(params) => { Notification::ShowMessage(params) => {
log::warn!("unhandled window/showMessage: {:?}", params); log::warn!("unhandled window/showMessage: {:?}", params);
@ -594,14 +745,22 @@ impl Application {
method, params, id, .. method, params, id, ..
}) => { }) => {
let call = match MethodCall::parse(&method, params) { let call = match MethodCall::parse(&method, params) {
Some(call) => call, Ok(call) => call,
None => { Err(helix_lsp::Error::Unhandled) => {
error!("Method not found {}", method); error!("Language Server: Method not found {}", method);
return;
}
Err(err) => {
log::error!(
"received malformed method call from Language Server: {}: {}",
method,
err
);
return; return;
} }
}; };
match call { let reply = match call {
MethodCall::WorkDoneProgressCreate(params) => { MethodCall::WorkDoneProgressCreate(params) => {
self.lsp_progress.create(server_id, params.token); self.lsp_progress.create(server_id, params.token);
@ -613,16 +772,8 @@ impl Application {
if spinner.is_stopped() { if spinner.is_stopped() {
spinner.start(); spinner.start();
} }
let language_server =
match self.editor.language_servers.get_by_id(server_id) {
Some(language_server) => language_server,
None => {
warn!("can't find language server with id `{}`", server_id);
return;
}
};
tokio::spawn(language_server.reply(id, Ok(serde_json::Value::Null))); Ok(serde_json::Value::Null)
} }
MethodCall::ApplyWorkspaceEdit(params) => { MethodCall::ApplyWorkspaceEdit(params) => {
apply_workspace_edit( apply_workspace_edit(
@ -631,33 +782,19 @@ impl Application {
&params.edit, &params.edit,
); );
let language_server =
match self.editor.language_servers.get_by_id(server_id) {
Some(language_server) => language_server,
None => {
warn!("can't find language server with id `{}`", server_id);
return;
}
};
tokio::spawn(language_server.reply(
id,
Ok(json!(lsp::ApplyWorkspaceEditResponse { Ok(json!(lsp::ApplyWorkspaceEditResponse {
applied: true, applied: true,
failure_reason: None, failure_reason: None,
failed_change: None, failed_change: None,
})), }))
));
} }
MethodCall::WorkspaceConfiguration(params) => { MethodCall::WorkspaceFolders => {
let language_server = let language_server =
match self.editor.language_servers.get_by_id(server_id) { self.editor.language_servers.get_by_id(server_id).unwrap();
Some(language_server) => language_server,
None => { Ok(json!(language_server.workspace_folders()))
warn!("can't find language server with id `{}`", server_id);
return;
} }
}; MethodCall::WorkspaceConfiguration(params) => {
let result: Vec<_> = params let result: Vec<_> = params
.items .items
.iter() .iter()
@ -668,7 +805,12 @@ impl Application {
let doc = self.editor.document_by_path(path)?; let doc = self.editor.document_by_path(path)?;
doc.language_config()?.config.as_ref()? doc.language_config()?.config.as_ref()?
} }
None => language_server.config()?, None => self
.editor
.language_servers
.get_by_id(server_id)
.unwrap()
.config()?,
}; };
if let Some(section) = item.section.as_ref() { if let Some(section) = item.section.as_ref() {
for part in section.split('.') { for part in section.split('.') {
@ -678,9 +820,19 @@ impl Application {
Some(config) Some(config)
}) })
.collect(); .collect();
tokio::spawn(language_server.reply(id, Ok(json!(result)))); Ok(json!(result))
} }
};
let language_server = match self.editor.language_servers.get_by_id(server_id) {
Some(language_server) => language_server,
None => {
warn!("can't find language server with id `{}`", server_id);
return;
} }
};
tokio::spawn(language_server.reply(id, reply));
} }
Call::Invalid { id } => log::error!("LSP invalid method call id={:?}", id), Call::Invalid { id } => log::error!("LSP invalid method call id={:?}", id),
} }
@ -689,26 +841,18 @@ impl Application {
async fn claim_term(&mut self) -> Result<(), Error> { async fn claim_term(&mut self) -> Result<(), Error> {
terminal::enable_raw_mode()?; terminal::enable_raw_mode()?;
let mut stdout = stdout(); let mut stdout = stdout();
execute!(stdout, terminal::EnterAlternateScreen)?; execute!(stdout, terminal::EnterAlternateScreen, EnableBracketedPaste)?;
execute!(stdout, terminal::Clear(terminal::ClearType::All))?;
if self.config.load().editor.mouse { if self.config.load().editor.mouse {
execute!(stdout, EnableMouseCapture)?; execute!(stdout, EnableMouseCapture)?;
} }
Ok(()) Ok(())
} }
fn restore_term(&mut self) -> Result<(), Error> { pub async fn run<S>(&mut self, input_stream: &mut S) -> Result<i32, Error>
let mut stdout = stdout(); where
// reset cursor shape S: Stream<Item = crossterm::Result<crossterm::event::Event>> + Unpin,
write!(stdout, "\x1B[2 q")?; {
// Ignore errors on disabling, this might trigger on windows if we call
// disable without calling enable previously
let _ = execute!(stdout, DisableMouseCapture);
execute!(stdout, terminal::LeaveAlternateScreen)?;
terminal::disable_raw_mode()?;
Ok(())
}
pub async fn run(&mut self) -> Result<i32, Error> {
self.claim_term().await?; self.claim_term().await?;
// Exit the alternate screen and disable raw mode before panicking // Exit the alternate screen and disable raw mode before panicking
@ -716,23 +860,25 @@ impl Application {
std::panic::set_hook(Box::new(move |info| { std::panic::set_hook(Box::new(move |info| {
// We can't handle errors properly inside this closure. And it's // We can't handle errors properly inside this closure. And it's
// probably not a good idea to `unwrap()` inside a panic handler. // probably not a good idea to `unwrap()` inside a panic handler.
// So we just ignore the `Result`s. // So we just ignore the `Result`.
let _ = execute!(std::io::stdout(), DisableMouseCapture); let _ = restore_term();
let _ = execute!(std::io::stdout(), terminal::LeaveAlternateScreen);
let _ = terminal::disable_raw_mode();
hook(info); hook(info);
})); }));
self.event_loop().await; self.event_loop(input_stream).await;
self.close().await?;
restore_term()?;
self.jobs.finish().await; Ok(self.editor.exit_code)
}
pub async fn close(&mut self) -> anyhow::Result<()> {
self.jobs.finish().await?;
if self.editor.close_language_servers(None).await.is_err() { if self.editor.close_language_servers(None).await.is_err() {
log::error!("Timed out waiting for language servers to shutdown"); log::error!("Timed out waiting for language servers to shutdown");
}; };
self.restore_term()?; Ok(())
Ok(self.editor.exit_code)
} }
} }

@ -1,5 +1,6 @@
use anyhow::Result; use anyhow::Result;
use helix_core::Position; use helix_core::Position;
use helix_view::tree::Layout;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
#[derive(Default)] #[derive(Default)]
@ -11,7 +12,10 @@ pub struct Args {
pub load_tutor: bool, pub load_tutor: bool,
pub fetch_grammars: bool, pub fetch_grammars: bool,
pub build_grammars: bool, pub build_grammars: bool,
pub split: Option<Layout>,
pub verbosity: u64, pub verbosity: u64,
pub log_file: Option<PathBuf>,
pub config_file: Option<PathBuf>,
pub files: Vec<(PathBuf, Position)>, pub files: Vec<(PathBuf, Position)>,
} }
@ -28,6 +32,8 @@ impl Args {
"--version" => args.display_version = true, "--version" => args.display_version = true,
"--help" => args.display_help = true, "--help" => args.display_help = true,
"--tutor" => args.load_tutor = true, "--tutor" => args.load_tutor = true,
"--vsplit" => args.split = Some(Layout::Vertical),
"--hsplit" => args.split = Some(Layout::Horizontal),
"--health" => { "--health" => {
args.health = true; args.health = true;
args.health_arg = argv.next_if(|opt| !opt.starts_with('-')); args.health_arg = argv.next_if(|opt| !opt.starts_with('-'));
@ -39,6 +45,14 @@ impl Args {
anyhow::bail!("--grammar must be followed by either 'fetch' or 'build'") anyhow::bail!("--grammar must be followed by either 'fetch' or 'build'")
} }
}, },
"-c" | "--config" => match argv.next().as_deref() {
Some(path) => args.config_file = Some(path.into()),
None => anyhow::bail!("--config must specify a path to read"),
},
"--log" => match argv.next().as_deref() {
Some(path) => args.log_file = Some(path.into()),
None => anyhow::bail!("--log must specify a path to write"),
},
arg if arg.starts_with("--") => { arg if arg.starts_with("--") => {
anyhow::bail!("unexpected double dash argument: {}", arg) anyhow::bail!("unexpected double dash argument: {}", arg)
} }

File diff suppressed because it is too large Load Diff

@ -4,13 +4,15 @@ use crate::{
job::{Callback, Jobs}, job::{Callback, Jobs},
ui::{self, overlay::overlayed, FilePicker, Picker, Popup, Prompt, PromptEvent, Text}, ui::{self, overlay::overlayed, FilePicker, Picker, Popup, Prompt, PromptEvent, Text},
}; };
use helix_core::syntax::{DebugArgumentValue, DebugConfigCompletion}; use dap::{StackFrame, Thread, ThreadStates};
use helix_core::syntax::{DebugArgumentValue, DebugConfigCompletion, DebugTemplate};
use helix_dap::{self as dap, Client}; use helix_dap::{self as dap, Client};
use helix_lsp::block_on; use helix_lsp::block_on;
use helix_view::editor::Breakpoint; use helix_view::editor::Breakpoint;
use serde_json::{to_value, Value}; use serde_json::{to_value, Value};
use tokio_stream::wrappers::UnboundedReceiverStream; use tokio_stream::wrappers::UnboundedReceiverStream;
use tui::text::Spans;
use std::collections::HashMap; use std::collections::HashMap;
use std::future::Future; use std::future::Future;
@ -20,6 +22,38 @@ use anyhow::{anyhow, bail};
use helix_view::handlers::dap::{breakpoints_changed, jump_to_stack_frame, select_thread_id}; use helix_view::handlers::dap::{breakpoints_changed, jump_to_stack_frame, select_thread_id};
impl ui::menu::Item for StackFrame {
type Data = ();
fn label(&self, _data: &Self::Data) -> Spans {
self.name.as_str().into() // TODO: include thread_states in the label
}
}
impl ui::menu::Item for DebugTemplate {
type Data = ();
fn label(&self, _data: &Self::Data) -> Spans {
self.name.as_str().into()
}
}
impl ui::menu::Item for Thread {
type Data = ThreadStates;
fn label(&self, thread_states: &Self::Data) -> Spans {
format!(
"{} ({})",
self.name,
thread_states
.get(&self.id)
.map(|state| state.as_str())
.unwrap_or("unknown")
)
.into()
}
}
fn thread_picker( fn thread_picker(
cx: &mut Context, cx: &mut Context,
callback_fn: impl Fn(&mut Editor, &dap::Thread) + Send + 'static, callback_fn: impl Fn(&mut Editor, &dap::Thread) + Send + 'static,
@ -41,17 +75,7 @@ fn thread_picker(
let thread_states = debugger.thread_states.clone(); let thread_states = debugger.thread_states.clone();
let picker = FilePicker::new( let picker = FilePicker::new(
threads, threads,
move |thread| { thread_states,
format!(
"{} ({})",
thread.name,
thread_states
.get(&thread.id)
.map(|state| state.as_str())
.unwrap_or("unknown")
)
.into()
},
move |cx, thread, _action| callback_fn(cx.editor, thread), move |cx, thread, _action| callback_fn(cx.editor, thread),
move |editor, thread| { move |editor, thread| {
let frames = editor.debugger.as_ref()?.stack_frames.get(&thread.id)?; let frames = editor.debugger.as_ref()?.stack_frames.get(&thread.id)?;
@ -192,6 +216,8 @@ pub fn dap_start_impl(
} }
} }
args.insert("cwd", to_value(std::env::current_dir().unwrap())?);
let args = to_value(args).unwrap(); let args = to_value(args).unwrap();
let callback = |_editor: &mut Editor, _compositor: &mut Compositor, _response: Value| { let callback = |_editor: &mut Editor, _compositor: &mut Compositor, _response: Value| {
@ -243,7 +269,7 @@ pub fn dap_launch(cx: &mut Context) {
cx.push_layer(Box::new(overlayed(Picker::new( cx.push_layer(Box::new(overlayed(Picker::new(
templates, templates,
|template| template.name.as_str().into(), (),
|cx, template, _action| { |cx, template, _action| {
let completions = template.completion.clone(); let completions = template.completion.clone();
let name = template.name.clone(); let name = template.name.clone();
@ -475,7 +501,7 @@ pub fn dap_variables(cx: &mut Context) {
for scope in scopes.iter() { for scope in scopes.iter() {
// use helix_view::graphics::Style; // use helix_view::graphics::Style;
use tui::text::{Span, Spans}; use tui::text::Span;
let response = block_on(debugger.variables(scope.variables_reference)); let response = block_on(debugger.variables(scope.variables_reference));
variables.push(Spans::from(Span::styled( variables.push(Spans::from(Span::styled(
@ -556,7 +582,7 @@ pub fn dap_edit_condition(cx: &mut Context) {
None => return, None => return,
}; };
let callback = Box::pin(async move { let callback = Box::pin(async move {
let call: Callback = Box::new(move |_editor, compositor| { let call: Callback = Box::new(move |editor, compositor| {
let mut prompt = Prompt::new( let mut prompt = Prompt::new(
"condition:".into(), "condition:".into(),
None, None,
@ -581,7 +607,7 @@ pub fn dap_edit_condition(cx: &mut Context) {
}, },
); );
if let Some(condition) = breakpoint.condition { if let Some(condition) = breakpoint.condition {
prompt.insert_str(&condition) prompt.insert_str(&condition, editor)
} }
compositor.push(Box::new(prompt)); compositor.push(Box::new(prompt));
}); });
@ -598,7 +624,7 @@ pub fn dap_edit_log(cx: &mut Context) {
None => return, None => return,
}; };
let callback = Box::pin(async move { let callback = Box::pin(async move {
let call: Callback = Box::new(move |_editor, compositor| { let call: Callback = Box::new(move |editor, compositor| {
let mut prompt = Prompt::new( let mut prompt = Prompt::new(
"log-message:".into(), "log-message:".into(),
None, None,
@ -622,7 +648,7 @@ pub fn dap_edit_log(cx: &mut Context) {
}, },
); );
if let Some(log_message) = breakpoint.log_message { if let Some(log_message) = breakpoint.log_message {
prompt.insert_str(&log_message); prompt.insert_str(&log_message, editor);
} }
compositor.push(Box::new(prompt)); compositor.push(Box::new(prompt));
}); });
@ -652,7 +678,7 @@ pub fn dap_switch_stack_frame(cx: &mut Context) {
let picker = FilePicker::new( let picker = FilePicker::new(
frames, frames,
|frame| frame.name.as_str().into(), // TODO: include thread_states in the label (),
move |cx, frame, _action| { move |cx, frame, _action| {
let debugger = debugger!(cx.editor); let debugger = debugger!(cx.editor);
// TODO: this should be simpler to find // TODO: this should be simpler to find

@ -1,21 +1,30 @@
use helix_lsp::{ use helix_lsp::{
block_on, lsp, block_on,
util::{lsp_pos_to_pos, lsp_range_to_range, range_to_lsp_range}, lsp::{self, DiagnosticSeverity, NumberOrString},
util::{diagnostic_to_lsp_diagnostic, lsp_pos_to_pos, lsp_range_to_range, range_to_lsp_range},
OffsetEncoding, OffsetEncoding,
}; };
use tui::text::{Span, Spans};
use super::{align_view, push_jump, Align, Context, Editor}; use super::{align_view, push_jump, Align, Context, Editor, Open};
use helix_core::Selection; use helix_core::{path, Selection};
use helix_view::editor::Action; use helix_view::{editor::Action, theme::Style};
use crate::{ use crate::{
compositor::{self, Compositor}, compositor::{self, Compositor},
ui::{self, overlay::overlayed, FileLocation, FilePicker, Popup, PromptEvent}, ui::{
self, lsp::SignatureHelp, overlay::overlayed, FileLocation, FilePicker, Popup, PromptEvent,
},
}; };
use std::borrow::Cow; use std::{borrow::Cow, collections::BTreeMap, path::PathBuf, sync::Arc};
/// Gets the language server that is attached to a document, and
/// if it's not active displays a status message. Using this macro
/// in a context where the editor automatically queries the LSP
/// (instead of when the user explicitly does so via a keybind like
/// `gd`) will spam the "LSP inactive" status message confusingly.
#[macro_export] #[macro_export]
macro_rules! language_server { macro_rules! language_server {
($editor:expr, $doc:expr) => { ($editor:expr, $doc:expr) => {
@ -29,6 +38,112 @@ macro_rules! language_server {
}; };
} }
impl ui::menu::Item for lsp::Location {
/// Current working directory.
type Data = PathBuf;
fn label(&self, cwdir: &Self::Data) -> Spans {
let file: Cow<'_, str> = (self.uri.scheme() == "file")
.then(|| {
self.uri
.to_file_path()
.map(|path| {
// strip root prefix
path.strip_prefix(&cwdir)
.map(|path| path.to_path_buf())
.unwrap_or(path)
})
.map(|path| Cow::from(path.to_string_lossy().into_owned()))
.ok()
})
.flatten()
.unwrap_or_else(|| self.uri.as_str().into());
let line = self.range.start.line;
format!("{}:{}", file, line).into()
}
}
impl ui::menu::Item for lsp::SymbolInformation {
/// Path to currently focussed document
type Data = Option<lsp::Url>;
fn label(&self, current_doc_path: &Self::Data) -> Spans {
if current_doc_path.as_ref() == Some(&self.location.uri) {
self.name.as_str().into()
} else {
match self.location.uri.to_file_path() {
Ok(path) => {
let relative_path = helix_core::path::get_relative_path(path.as_path())
.to_string_lossy()
.into_owned();
format!("{} ({})", &self.name, relative_path).into()
}
Err(_) => format!("{} ({})", &self.name, &self.location.uri).into(),
}
}
}
}
struct DiagnosticStyles {
hint: Style,
info: Style,
warning: Style,
error: Style,
}
struct PickerDiagnostic {
url: lsp::Url,
diag: lsp::Diagnostic,
}
impl ui::menu::Item for PickerDiagnostic {
type Data = (DiagnosticStyles, DiagnosticsFormat);
fn label(&self, (styles, format): &Self::Data) -> Spans {
let mut style = self
.diag
.severity
.map(|s| match s {
DiagnosticSeverity::HINT => styles.hint,
DiagnosticSeverity::INFORMATION => styles.info,
DiagnosticSeverity::WARNING => styles.warning,
DiagnosticSeverity::ERROR => styles.error,
_ => Style::default(),
})
.unwrap_or_default();
// remove background as it is distracting in the picker list
style.bg = None;
let code = self
.diag
.code
.as_ref()
.map(|c| match c {
NumberOrString::Number(n) => n.to_string(),
NumberOrString::String(s) => s.to_string(),
})
.map(|code| format!(" ({})", code))
.unwrap_or_default();
let path = match format {
DiagnosticsFormat::HideSourcePath => String::new(),
DiagnosticsFormat::ShowSourcePath => {
let path = path::get_truncated_path(self.url.path())
.to_string_lossy()
.into_owned();
format!("{}: ", path)
}
};
Spans::from(vec![
Span::raw(path),
Span::styled(&self.diag.message, style),
Span::styled(code, style),
])
}
}
fn location_to_file_location(location: &lsp::Location) -> FileLocation { fn location_to_file_location(location: &lsp::Location) -> FileLocation {
let path = location.uri.to_file_path().unwrap(); let path = location.uri.to_file_path().unwrap();
let line = Some(( let line = Some((
@ -39,18 +154,31 @@ fn location_to_file_location(location: &lsp::Location) -> FileLocation {
} }
// TODO: share with symbol picker(symbol.location) // TODO: share with symbol picker(symbol.location)
// TODO: need to use push_jump() before?
fn jump_to_location( fn jump_to_location(
editor: &mut Editor, editor: &mut Editor,
location: &lsp::Location, location: &lsp::Location,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
action: Action, action: Action,
) { ) {
let path = location let (view, doc) = current!(editor);
.uri push_jump(view, doc);
.to_file_path()
.expect("unable to convert URI to filepath"); let path = match location.uri.to_file_path() {
let _id = editor.open(path, action).expect("editor.open failed"); Ok(path) => path,
Err(_) => {
let err = format!("unable to convert URI to filepath: {}", location.uri);
editor.set_error(err);
return;
}
};
match editor.open(&path, action) {
Ok(_) => (),
Err(err) => {
let err = format!("failed to open path: {:?}: {:?}", location.uri, err);
editor.set_error(err);
return;
}
}
let (view, doc) = current!(editor); let (view, doc) = current!(editor);
let definition_pos = location.range.start; let definition_pos = location.range.start;
// TODO: convert inside server // TODO: convert inside server
@ -70,26 +198,30 @@ fn sym_picker(
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
) -> FilePicker<lsp::SymbolInformation> { ) -> FilePicker<lsp::SymbolInformation> {
// TODO: drop current_path comparison and instead use workspace: bool flag? // TODO: drop current_path comparison and instead use workspace: bool flag?
let current_path2 = current_path.clone();
FilePicker::new( FilePicker::new(
symbols, symbols,
move |symbol| { current_path.clone(),
if current_path.as_ref() == Some(&symbol.location.uri) {
symbol.name.as_str().into()
} else {
let path = symbol.location.uri.to_file_path().unwrap();
let relative_path = helix_core::path::get_relative_path(path.as_path())
.to_string_lossy()
.into_owned();
format!("{} ({})", &symbol.name, relative_path).into()
}
},
move |cx, symbol, action| { move |cx, symbol, action| {
if current_path2.as_ref() == Some(&symbol.location.uri) { let (view, doc) = current!(cx.editor);
push_jump(cx.editor); push_jump(view, doc);
} else {
let path = symbol.location.uri.to_file_path().unwrap(); if current_path.as_ref() != Some(&symbol.location.uri) {
cx.editor.open(path, action).expect("editor.open failed"); let uri = &symbol.location.uri;
let path = match uri.to_file_path() {
Ok(path) => path,
Err(_) => {
let err = format!("unable to convert URI to filepath: {}", uri);
log::error!("{}", err);
cx.editor.set_error(err);
return;
}
};
if let Err(err) = cx.editor.open(&path, action) {
let err = format!("failed to open document: {}: {}", uri, err);
log::error!("{}", err);
cx.editor.set_error(err);
return;
}
} }
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
@ -108,6 +240,69 @@ fn sym_picker(
.truncate_start(false) .truncate_start(false)
} }
#[derive(Copy, Clone, PartialEq)]
enum DiagnosticsFormat {
ShowSourcePath,
HideSourcePath,
}
fn diag_picker(
cx: &Context,
diagnostics: BTreeMap<lsp::Url, Vec<lsp::Diagnostic>>,
current_path: Option<lsp::Url>,
format: DiagnosticsFormat,
offset_encoding: OffsetEncoding,
) -> FilePicker<PickerDiagnostic> {
// TODO: drop current_path comparison and instead use workspace: bool flag?
// flatten the map to a vec of (url, diag) pairs
let mut flat_diag = Vec::new();
for (url, diags) in diagnostics {
flat_diag.reserve(diags.len());
for diag in diags {
flat_diag.push(PickerDiagnostic {
url: url.clone(),
diag,
});
}
}
let styles = DiagnosticStyles {
hint: cx.editor.theme.get("hint"),
info: cx.editor.theme.get("info"),
warning: cx.editor.theme.get("warning"),
error: cx.editor.theme.get("error"),
};
FilePicker::new(
flat_diag,
(styles, format),
move |cx, PickerDiagnostic { url, diag }, action| {
if current_path.as_ref() == Some(url) {
let (view, doc) = current!(cx.editor);
push_jump(view, doc);
} else {
let path = url.to_file_path().unwrap();
cx.editor.open(&path, action).expect("editor.open failed");
}
let (view, doc) = current!(cx.editor);
if let Some(range) = lsp_range_to_range(doc.text(), diag.range, offset_encoding) {
// we flip the range so that the cursor sits on the start of the symbol
// (for example start of the function).
doc.set_selection(view.id, Selection::single(range.head, range.anchor));
align_view(doc, view, Align::Center);
}
},
move |_editor, PickerDiagnostic { url, diag }| {
let location = lsp::Location::new(url.clone(), diag.range);
Some(location_to_file_location(&location))
},
)
.truncate_start(false)
}
pub fn symbol_picker(cx: &mut Context) { pub fn symbol_picker(cx: &mut Context) {
fn nested_to_flat( fn nested_to_flat(
list: &mut Vec<lsp::SymbolInformation>, list: &mut Vec<lsp::SymbolInformation>,
@ -178,11 +373,50 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
) )
} }
pub fn diagnostics_picker(cx: &mut Context) {
let doc = doc!(cx.editor);
let language_server = language_server!(cx.editor, doc);
if let Some(current_url) = doc.url() {
let offset_encoding = language_server.offset_encoding();
let diagnostics = cx
.editor
.diagnostics
.get(&current_url)
.cloned()
.unwrap_or_default();
let picker = diag_picker(
cx,
[(current_url.clone(), diagnostics)].into(),
Some(current_url),
DiagnosticsFormat::HideSourcePath,
offset_encoding,
);
cx.push_layer(Box::new(overlayed(picker)));
}
}
pub fn workspace_diagnostics_picker(cx: &mut Context) {
let doc = doc!(cx.editor);
let language_server = language_server!(cx.editor, doc);
let current_url = doc.url();
let offset_encoding = language_server.offset_encoding();
let diagnostics = cx.editor.diagnostics.clone();
let picker = diag_picker(
cx,
diagnostics,
current_url,
DiagnosticsFormat::ShowSourcePath,
offset_encoding,
);
cx.push_layer(Box::new(overlayed(picker)));
}
impl ui::menu::Item for lsp::CodeActionOrCommand { impl ui::menu::Item for lsp::CodeActionOrCommand {
fn label(&self) -> &str { type Data = ();
fn label(&self, _data: &Self::Data) -> Spans {
match self { match self {
lsp::CodeActionOrCommand::CodeAction(action) => action.title.as_str(), lsp::CodeActionOrCommand::CodeAction(action) => action.title.as_str().into(),
lsp::CodeActionOrCommand::Command(command) => command.title.as_str(), lsp::CodeActionOrCommand::Command(command) => command.title.as_str().into(),
} }
} }
} }
@ -192,15 +426,29 @@ pub fn code_action(cx: &mut Context) {
let language_server = language_server!(cx.editor, doc); let language_server = language_server!(cx.editor, doc);
let range = range_to_lsp_range( let selection_range = doc.selection(view.id).primary();
doc.text(),
doc.selection(view.id).primary(),
language_server.offset_encoding(),
);
let future = language_server.code_actions(doc.identifier(), range);
let offset_encoding = language_server.offset_encoding(); let offset_encoding = language_server.offset_encoding();
let range = range_to_lsp_range(doc.text(), selection_range, offset_encoding);
let future = language_server.code_actions(
doc.identifier(),
range,
// Filter and convert overlapping diagnostics
lsp::CodeActionContext {
diagnostics: doc
.diagnostics()
.iter()
.filter(|&diag| {
selection_range
.overlaps(&helix_core::Range::new(diag.range.start, diag.range.end))
})
.map(|diag| diagnostic_to_lsp_diagnostic(doc.text(), diag, offset_encoding))
.collect(),
only: None,
},
);
cx.callback( cx.callback(
future, future,
move |editor, compositor, response: Option<lsp::CodeActionResponse>| { move |editor, compositor, response: Option<lsp::CodeActionResponse>| {
@ -213,7 +461,7 @@ pub fn code_action(cx: &mut Context) {
return; return;
} }
let mut picker = ui::Menu::new(actions, move |editor, code_action, event| { let mut picker = ui::Menu::new(actions, (), move |editor, code_action, event| {
if event != PromptEvent::Validate { if event != PromptEvent::Validate {
return; return;
} }
@ -243,10 +491,7 @@ pub fn code_action(cx: &mut Context) {
}); });
picker.move_down(); // pre-select the first item picker.move_down(); // pre-select the first item
let popup = Popup::new("code-action", picker).margin(helix_view::graphics::Margin { let popup = Popup::new("code-action", picker);
vertical: 1,
horizontal: 1,
});
compositor.replace_or_push("code-action", popup); compositor.replace_or_push("code-action", popup);
}, },
) )
@ -330,12 +575,27 @@ pub fn apply_workspace_edit(
workspace_edit: &lsp::WorkspaceEdit, workspace_edit: &lsp::WorkspaceEdit,
) { ) {
let mut apply_edits = |uri: &helix_lsp::Url, text_edits: Vec<lsp::TextEdit>| { let mut apply_edits = |uri: &helix_lsp::Url, text_edits: Vec<lsp::TextEdit>| {
let path = uri let path = match uri.to_file_path() {
.to_file_path() Ok(path) => path,
.expect("unable to convert URI to filepath"); Err(_) => {
let err = format!("unable to convert URI to filepath: {}", uri);
log::error!("{}", err);
editor.set_error(err);
return;
}
};
let current_view_id = view!(editor).id; let current_view_id = view!(editor).id;
let doc_id = editor.open(path, Action::Load).unwrap(); let doc_id = match editor.open(&path, Action::Load) {
Ok(doc_id) => doc_id,
Err(err) => {
let err = format!("failed to open document: {}: {}", uri, err);
log::error!("{}", err);
editor.set_error(err);
return;
}
};
let doc = editor let doc = editor
.document_mut(doc_id) .document_mut(doc_id)
.expect("Document for document_changes not found"); .expect("Document for document_changes not found");
@ -367,7 +627,7 @@ pub fn apply_workspace_edit(
log::debug!("workspace changes: {:?}", changes); log::debug!("workspace changes: {:?}", changes);
for (uri, text_edits) in changes { for (uri, text_edits) in changes {
let text_edits = text_edits.to_vec(); let text_edits = text_edits.to_vec();
apply_edits(uri, text_edits); apply_edits(uri, text_edits)
} }
return; return;
// Not sure if it works properly, it'll be safer to just panic here to avoid breaking some parts of code on which code actions will be used // Not sure if it works properly, it'll be safer to just panic here to avoid breaking some parts of code on which code actions will be used
@ -402,8 +662,8 @@ pub fn apply_workspace_edit(
} }
lsp::DocumentChanges::Operations(operations) => { lsp::DocumentChanges::Operations(operations) => {
log::debug!("document changes - operations: {:?}", operations); log::debug!("document changes - operations: {:?}", operations);
for operateion in operations { for operation in operations {
match operateion { match operation {
lsp::DocumentChangeOperation::Op(op) => { lsp::DocumentChangeOperation::Op(op) => {
apply_document_resource_op(op).unwrap(); apply_document_resource_op(op).unwrap();
} }
@ -428,15 +688,14 @@ pub fn apply_workspace_edit(
} }
} }
} }
fn goto_impl( fn goto_impl(
editor: &mut Editor, editor: &mut Editor,
compositor: &mut Compositor, compositor: &mut Compositor,
locations: Vec<lsp::Location>, locations: Vec<lsp::Location>,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
) { ) {
push_jump(editor); let cwdir = std::env::current_dir().unwrap_or_default();
let cwdir = std::env::current_dir().expect("couldn't determine current directory");
match locations.as_slice() { match locations.as_slice() {
[location] => { [location] => {
@ -448,26 +707,7 @@ fn goto_impl(
_locations => { _locations => {
let picker = FilePicker::new( let picker = FilePicker::new(
locations, locations,
move |location| { cwdir,
let file: Cow<'_, str> = (location.uri.scheme() == "file")
.then(|| {
location
.uri
.to_file_path()
.map(|path| {
// strip root prefix
path.strip_prefix(&cwdir)
.map(|path| path.to_path_buf())
.unwrap_or(path)
})
.map(|path| Cow::from(path.to_string_lossy().into_owned()))
.ok()
})
.flatten()
.unwrap_or_else(|| location.uri.as_str().into());
let line = location.range.start.line;
format!("{}:{}", file, line).into()
},
move |cx, location, action| { move |cx, location, action| {
jump_to_location(cx.editor, location, offset_encoding, action) jump_to_location(cx.editor, location, offset_encoding, action)
}, },
@ -565,34 +805,122 @@ pub fn goto_reference(cx: &mut Context) {
); );
} }
#[derive(PartialEq)]
pub enum SignatureHelpInvoked {
Manual,
Automatic,
}
pub fn signature_help(cx: &mut Context) { pub fn signature_help(cx: &mut Context) {
signature_help_impl(cx, SignatureHelpInvoked::Manual)
}
pub fn signature_help_impl(cx: &mut Context, invoked: SignatureHelpInvoked) {
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc); let was_manually_invoked = invoked == SignatureHelpInvoked::Manual;
let language_server = match doc.language_server() {
Some(language_server) => language_server,
None => {
// Do not show the message if signature help was invoked
// automatically on backspace, trigger characters, etc.
if was_manually_invoked {
cx.editor
.set_status("Language server not active for current buffer");
}
return;
}
};
let offset_encoding = language_server.offset_encoding(); let offset_encoding = language_server.offset_encoding();
let pos = doc.position(view.id, offset_encoding); let pos = doc.position(view.id, offset_encoding);
let future = language_server.text_document_signature_help(doc.identifier(), pos, None); let future = match language_server.text_document_signature_help(doc.identifier(), pos, None) {
Some(f) => f,
None => return,
};
cx.callback( cx.callback(
future, future,
move |_editor, _compositor, response: Option<lsp::SignatureHelp>| { move |editor, compositor, response: Option<lsp::SignatureHelp>| {
if let Some(signature_help) = response { let config = &editor.config();
log::info!("{:?}", signature_help);
// signatures
// active_signature
// active_parameter
// render as:
// signature if !(config.lsp.auto_signature_help
// ---------- || SignatureHelp::visible_popup(compositor).is_some()
// doc || was_manually_invoked)
{
return;
}
// with active param highlighted let response = match response {
// According to the spec the response should be None if there
// are no signatures, but some servers don't follow this.
Some(s) if !s.signatures.is_empty() => s,
_ => {
compositor.remove(SignatureHelp::ID);
return;
} }
};
let doc = doc!(editor);
let language = doc.language_name().unwrap_or("");
let signature = match response
.signatures
.get(response.active_signature.unwrap_or(0) as usize)
{
Some(s) => s,
None => return,
};
let mut contents = SignatureHelp::new(
signature.label.clone(),
language.to_string(),
Arc::clone(&editor.syn_loader),
);
let signature_doc = if config.lsp.display_signature_help_docs {
signature.documentation.as_ref().map(|doc| match doc {
lsp::Documentation::String(s) => s.clone(),
lsp::Documentation::MarkupContent(markup) => markup.value.clone(),
})
} else {
None
};
contents.set_signature_doc(signature_doc);
let active_param_range = || -> Option<(usize, usize)> {
let param_idx = signature
.active_parameter
.or(response.active_parameter)
.unwrap_or(0) as usize;
let param = signature.parameters.as_ref()?.get(param_idx)?;
match &param.label {
lsp::ParameterLabel::Simple(string) => {
let start = signature.label.find(string.as_str())?;
Some((start, start + string.len()))
}
lsp::ParameterLabel::LabelOffsets([start, end]) => {
// LS sends offsets based on utf-16 based string representation
// but highlighting in helix is done using byte offset.
use helix_core::str_utils::char_to_byte_idx;
let from = char_to_byte_idx(&signature.label, *start as usize);
let to = char_to_byte_idx(&signature.label, *end as usize);
Some((from, to))
}
}
};
contents.set_active_param_range(active_param_range());
let old_popup = compositor.find_id::<Popup<SignatureHelp>>(SignatureHelp::ID);
let popup = Popup::new(SignatureHelp::ID, contents)
.position(old_popup.and_then(|p| p.get_position()))
.position_bias(Open::Above)
.ignore_escape_key(true);
compositor.replace_or_push(SignatureHelp::ID, popup);
}, },
); );
} }
pub fn hover(cx: &mut Context) { pub fn hover(cx: &mut Context) {
let (view, doc) = current!(cx.editor); let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc); let language_server = language_server!(cx.editor, doc);
@ -642,10 +970,23 @@ pub fn hover(cx: &mut Context) {
}, },
); );
} }
pub fn rename_symbol(cx: &mut Context) { pub fn rename_symbol(cx: &mut Context) {
ui::prompt( let (view, doc) = current_ref!(cx.editor);
let text = doc.text().slice(..);
let primary_selection = doc.selection(view.id).primary();
let prefill = if primary_selection.len() > 1 {
primary_selection
} else {
use helix_core::textobject::{textobject_word, TextObject};
textobject_word(text, primary_selection, TextObject::Inside, 1, false)
}
.fragment(text)
.into();
ui::prompt_with_input(
cx, cx,
"rename-to:".into(), "rename-to:".into(),
prefill,
None, None,
ui::completers::none, ui::completers::none,
move |cx: &mut compositor::Context, input: &str, event: PromptEvent| { move |cx: &mut compositor::Context, input: &str, event: PromptEvent| {
@ -660,8 +1001,51 @@ pub fn rename_symbol(cx: &mut Context) {
let pos = doc.position(view.id, offset_encoding); let pos = doc.position(view.id, offset_encoding);
let task = language_server.rename_symbol(doc.identifier(), pos, input.to_string()); let task = language_server.rename_symbol(doc.identifier(), pos, input.to_string());
let edits = block_on(task).unwrap_or_default(); match block_on(task) {
apply_workspace_edit(cx.editor, offset_encoding, &edits); Ok(edits) => apply_workspace_edit(cx.editor, offset_encoding, &edits),
Err(err) => cx.editor.set_error(err.to_string()),
}
},
);
}
pub fn select_references_to_symbol_under_cursor(cx: &mut Context) {
let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc);
let offset_encoding = language_server.offset_encoding();
let pos = doc.position(view.id, offset_encoding);
let future = language_server.text_document_document_highlight(doc.identifier(), pos, None);
cx.callback(
future,
move |editor, _compositor, response: Option<Vec<lsp::DocumentHighlight>>| {
let document_highlights = match response {
Some(highlights) if !highlights.is_empty() => highlights,
_ => return,
};
let (view, doc) = current!(editor);
let language_server = language_server!(editor, doc);
let offset_encoding = language_server.offset_encoding();
let text = doc.text();
let pos = doc.selection(view.id).primary().head;
// We must find the range that contains our primary cursor to prevent our primary cursor to move
let mut primary_index = 0;
let ranges = document_highlights
.iter()
.filter_map(|highlight| lsp_range_to_range(text, highlight.range, offset_encoding))
.enumerate()
.map(|(i, range)| {
if range.contains(pos) {
primary_index = i;
}
range
})
.collect();
let selection = Selection::new(ranges, primary_index);
doc.set_selection(view.id, selection);
}, },
); );
} }

File diff suppressed because it is too large Load Diff

@ -4,7 +4,8 @@
use helix_core::Position; use helix_core::Position;
use helix_view::graphics::{CursorKind, Rect}; use helix_view::graphics::{CursorKind, Rect};
use crossterm::event::Event; #[cfg(feature = "integration")]
use tui::backend::TestBackend;
use tui::buffer::Buffer as Surface; use tui::buffer::Buffer as Surface;
pub type Callback = Box<dyn FnOnce(&mut Compositor, &mut Context)>; pub type Callback = Box<dyn FnOnce(&mut Compositor, &mut Context)>;
@ -15,9 +16,10 @@ pub enum EventResult {
Consumed(Option<Callback>), Consumed(Option<Callback>),
} }
use crate::job::Jobs;
use helix_view::Editor; use helix_view::Editor;
use crate::job::Jobs; pub use helix_view::input::Event;
pub struct Context<'a> { pub struct Context<'a> {
pub editor: &'a mut Editor, pub editor: &'a mut Editor,
@ -27,7 +29,7 @@ pub struct Context<'a> {
pub trait Component: Any + AnyComponent { pub trait Component: Any + AnyComponent {
/// Process input events, return true if handled. /// Process input events, return true if handled.
fn handle_event(&mut self, _event: Event, _ctx: &mut Context) -> EventResult { fn handle_event(&mut self, _event: &Event, _ctx: &mut Context) -> EventResult {
EventResult::Ignored(None) EventResult::Ignored(None)
} }
// , args: () // , args: ()
@ -63,11 +65,21 @@ pub trait Component: Any + AnyComponent {
} }
} }
use anyhow::Error; use anyhow::Context as AnyhowContext;
use tui::backend::Backend;
#[cfg(not(feature = "integration"))]
use tui::backend::CrosstermBackend;
#[cfg(not(feature = "integration"))]
use std::io::stdout; use std::io::stdout;
use tui::backend::{Backend, CrosstermBackend};
#[cfg(not(feature = "integration"))]
type Terminal = tui::terminal::Terminal<CrosstermBackend<std::io::Stdout>>; type Terminal = tui::terminal::Terminal<CrosstermBackend<std::io::Stdout>>;
#[cfg(feature = "integration")]
type Terminal = tui::terminal::Terminal<TestBackend>;
pub struct Compositor { pub struct Compositor {
layers: Vec<Box<dyn Component>>, layers: Vec<Box<dyn Component>>,
terminal: Terminal, terminal: Terminal,
@ -76,9 +88,14 @@ pub struct Compositor {
} }
impl Compositor { impl Compositor {
pub fn new() -> Result<Self, Error> { pub fn new() -> anyhow::Result<Self> {
#[cfg(not(feature = "integration"))]
let backend = CrosstermBackend::new(stdout()); let backend = CrosstermBackend::new(stdout());
let terminal = Terminal::new(backend)?;
#[cfg(feature = "integration")]
let backend = TestBackend::new(120, 150);
let terminal = Terminal::new(backend).context("build terminal")?;
Ok(Self { Ok(Self {
layers: Vec::new(), layers: Vec::new(),
terminal, terminal,
@ -132,10 +149,18 @@ impl Compositor {
self.layers.pop() self.layers.pop()
} }
pub fn handle_event(&mut self, event: Event, cx: &mut Context) -> bool { pub fn remove(&mut self, id: &'static str) -> Option<Box<dyn Component>> {
let idx = self
.layers
.iter()
.position(|layer| layer.id() == Some(id))?;
Some(self.layers.remove(idx))
}
pub fn handle_event(&mut self, event: &Event, cx: &mut Context) -> bool {
// If it is a key event and a macro is being recorded, push the key event to the recording. // If it is a key event and a macro is being recorded, push the key event to the recording.
if let (Event::Key(key), Some((_, keys))) = (event, &mut cx.editor.macro_recording) { if let (Event::Key(key), Some((_, keys))) = (event, &mut cx.editor.macro_recording) {
keys.push(key.into()); keys.push(*key);
} }
let mut callbacks = Vec::new(); let mut callbacks = Vec::new();

@ -1,6 +1,10 @@
use crossterm::style::{Color, Print, Stylize}; use crossterm::{
style::{Color, Print, Stylize},
tty::IsTty,
};
use helix_core::config::{default_syntax_loader, user_syntax_loader}; use helix_core::config::{default_syntax_loader, user_syntax_loader};
use helix_loader::grammar::load_runtime_file; use helix_loader::grammar::load_runtime_file;
use helix_view::clipboard::get_clipboard_provider;
use std::io::Write; use std::io::Write;
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
@ -49,6 +53,7 @@ pub fn general() -> std::io::Result<()> {
let lang_file = helix_loader::lang_config_file(); let lang_file = helix_loader::lang_config_file();
let log_file = helix_loader::log_file(); let log_file = helix_loader::log_file();
let rt_dir = helix_loader::runtime_dir(); let rt_dir = helix_loader::runtime_dir();
let clipboard_provider = get_clipboard_provider();
if config_file.exists() { if config_file.exists() {
writeln!(stdout, "Config file: {}", config_file.display())?; writeln!(stdout, "Config file: {}", config_file.display())?;
@ -73,6 +78,34 @@ pub fn general() -> std::io::Result<()> {
if rt_dir.read_dir().ok().map(|it| it.count()) == Some(0) { if rt_dir.read_dir().ok().map(|it| it.count()) == Some(0) {
writeln!(stdout, "{}", "Runtime directory is empty.".red())?; writeln!(stdout, "{}", "Runtime directory is empty.".red())?;
} }
writeln!(stdout, "Clipboard provider: {}", clipboard_provider.name())?;
Ok(())
}
pub fn clipboard() -> std::io::Result<()> {
let stdout = std::io::stdout();
let mut stdout = stdout.lock();
let board = get_clipboard_provider();
match board.name().as_ref() {
"none" => {
writeln!(
stdout,
"{}",
"System clipboard provider: Not installed".red()
)?;
writeln!(
stdout,
" {}",
"For troubleshooting system clipboard issues, refer".red()
)?;
writeln!(stdout, " {}",
"https://github.com/helix-editor/helix/wiki/Troubleshooting#copypaste-fromto-system-clipboard-not-working"
.red().underlined())?;
}
name => writeln!(stdout, "System clipboard provider: {}", name)?,
}
Ok(()) Ok(())
} }
@ -106,17 +139,19 @@ pub fn languages_all() -> std::io::Result<()> {
let terminal_cols = crossterm::terminal::size().map(|(c, _)| c).unwrap_or(80); let terminal_cols = crossterm::terminal::size().map(|(c, _)| c).unwrap_or(80);
let column_width = terminal_cols as usize / headings.len(); let column_width = terminal_cols as usize / headings.len();
let is_terminal = std::io::stdout().is_tty();
let column = |item: &str, color: Color| { let column = |item: &str, color: Color| {
let data = format!( let mut data = format!(
"{:width$}", "{:width$}",
item.get(..column_width - 2) item.get(..column_width - 2)
.map(|s| format!("{}…", s)) .map(|s| format!("{}…", s))
.unwrap_or_else(|| item.to_string()), .unwrap_or_else(|| item.to_string()),
width = column_width, width = column_width,
) );
.stylize() if is_terminal {
.with(color); data = data.stylize().with(color).to_string();
}
// We can't directly use println!() because of // We can't directly use println!() because of
// https://github.com/crossterm-rs/crossterm/issues/589 // https://github.com/crossterm-rs/crossterm/issues/589
@ -134,8 +169,8 @@ pub fn languages_all() -> std::io::Result<()> {
let check_binary = |cmd: Option<String>| match cmd { let check_binary = |cmd: Option<String>| match cmd {
Some(cmd) => match which::which(&cmd) { Some(cmd) => match which::which(&cmd) {
Ok(_) => column(&cmd, Color::Green), Ok(_) => column(&format!("✓ {}", cmd), Color::Green),
Err(_) => column(&cmd, Color::Red), Err(_) => column(&format!("✘ {}", cmd), Color::Red),
}, },
None => column("None", Color::Yellow), None => column("None", Color::Yellow),
}; };
@ -154,8 +189,8 @@ pub fn languages_all() -> std::io::Result<()> {
for ts_feat in TsFeature::all() { for ts_feat in TsFeature::all() {
match load_runtime_file(&lang.language_id, ts_feat.runtime_filename()).is_ok() { match load_runtime_file(&lang.language_id, ts_feat.runtime_filename()).is_ok() {
true => column("Found", Color::Green), true => column("", Color::Green),
false => column("Not Found", Color::Red), false => column("", Color::Red),
} }
} }
@ -263,8 +298,8 @@ fn probe_treesitter_feature(lang: &str, feature: TsFeature) -> std::io::Result<(
let mut stdout = stdout.lock(); let mut stdout = stdout.lock();
let found = match load_runtime_file(lang, feature.runtime_filename()).is_ok() { let found = match load_runtime_file(lang, feature.runtime_filename()).is_ok() {
true => "Found".green(), true => "".green(),
false => "Not found".red(), false => "".red(),
}; };
writeln!(stdout, "{} queries: {}", feature.short_title(), found)?; writeln!(stdout, "{} queries: {}", feature.short_title(), found)?;
@ -273,13 +308,15 @@ fn probe_treesitter_feature(lang: &str, feature: TsFeature) -> std::io::Result<(
pub fn print_health(health_arg: Option<String>) -> std::io::Result<()> { pub fn print_health(health_arg: Option<String>) -> std::io::Result<()> {
match health_arg.as_deref() { match health_arg.as_deref() {
Some("all") => languages_all()?, Some("languages") => languages_all()?,
Some(lang) => language(lang.to_string())?, Some("clipboard") => clipboard()?,
None => { None | Some("all") => {
general()?; general()?;
clipboard()?;
writeln!(std::io::stdout().lock())?; writeln!(std::io::stdout().lock())?;
languages_all()?; languages_all()?;
} }
Some(lang) => language(lang.to_string())?,
} }
Ok(()) Ok(())
} }

@ -2,7 +2,7 @@ use helix_view::Editor;
use crate::compositor::Compositor; use crate::compositor::Compositor;
use futures_util::future::{self, BoxFuture, Future, FutureExt}; use futures_util::future::{BoxFuture, Future, FutureExt};
use futures_util::stream::{FuturesUnordered, StreamExt}; use futures_util::stream::{FuturesUnordered, StreamExt};
pub type Callback = Box<dyn FnOnce(&mut Editor, &mut Compositor) + Send>; pub type Callback = Box<dyn FnOnce(&mut Editor, &mut Compositor) + Send>;
@ -93,8 +93,21 @@ impl Jobs {
} }
/// Blocks until all the jobs that need to be waited on are done. /// Blocks until all the jobs that need to be waited on are done.
pub async fn finish(&mut self) { pub async fn finish(&mut self) -> anyhow::Result<()> {
let wait_futures = std::mem::take(&mut self.wait_futures); log::debug!("waiting on jobs...");
wait_futures.for_each(|_| future::ready(())).await let mut wait_futures = std::mem::take(&mut self.wait_futures);
while let (Some(job), tail) = wait_futures.into_future().await {
match job {
Ok(_) => {
wait_futures = tail;
}
Err(e) => {
self.wait_futures = tail;
return Err(e);
}
}
}
Ok(())
} }
} }

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save