Merge branch 'master' into help-command

pull/997/head
Omnikar 3 years ago
commit d7fb82f22b
No known key found for this signature in database
GPG Key ID: 7DE6694CDA7885ED

@ -3,3 +3,4 @@ watch_file flake.lock
# try to use flakes, if it fails use normal nix (ie. shell.nix) # try to use flakes, if it fails use normal nix (ie. shell.nix)
use flake || use nix use flake || use nix
eval "$shellHook"

@ -1,29 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: C-bug
assignees: ''
---
<!-- Your issue may already be reported!
Please search on the issue tracker before creating one. -->
### Reproduction steps
<!-- Ideally provide a key sequence and/or asciinema.org recording. -->
### Environment
- Platform: <!-- macOS / Windows / Linux -->
- Terminal emulator:
- Helix version: <!-- 'hx -V' if using a release, 'git describe' if building from master -->
<details><summary>~/.cache/helix/helix.log</summary>
```
please provide a copy of `~/.cache/helix/helix.log` here if possible, you may need to redact some of the lines
```
</details>

@ -0,0 +1,67 @@
name: Bug Report
description: Create a report to help us improve
labels: C-bug
body:
- type: markdown
attributes:
value: Thank you for filing a bug report! 🐛
- type: textarea
id: problem
attributes:
label: Summary
description: >
Please provide a short summary of the bug, along with any information
you feel relevant to replicate the bug.
validations:
required: true
- type: textarea
id: reproduction-steps
attributes:
label: Reproduction Steps
value: |
<!-- Ideally provide a key sequence and/or asciinema.org recording. -->
I tried this:
1. `hx`
I expected this to happen:
Instead, this happened:
- type: textarea
id: helix-log
attributes:
label: Helix log
description: See `hx -h` for log file path
value: |
<details><summary>~/.cache/helix/helix.log</summary>
```
please provide a copy of `~/.cache/helix/helix.log` here if possible, you may need to redact some of the lines
```
</details>
- type: input
id: platform
attributes:
label: Platform
placeholder: Linux / macOS / Windows
validations:
required: true
- type: input
id: terminal-emulator
attributes:
label: Terminal Emulator
placeholder: wezterm 20220101-133340-7edc5b5a
validations:
required: true
- type: input
id: helix-version
attributes:
label: Helix Version
description: >
Helix version (`hx -V` if using a release, `git describe` if building
from master)
placeholder: "helix 0.6.0 (c0dbd6dc)"
validations:
required: true

@ -13,9 +13,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v2 uses: actions/checkout@v3
with:
submodules: true
- name: Install stable toolchain - name: Install stable toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
@ -25,22 +23,25 @@ jobs:
override: true override: true
- name: Cache cargo registry - name: Cache cargo registry
uses: actions/cache@v2.1.7 uses: actions/cache@v3
with: with:
path: ~/.cargo/registry path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-registry-
- name: Cache cargo index - name: Cache cargo index
uses: actions/cache@v2.1.7 uses: actions/cache@v3
with: with:
path: ~/.cargo/git path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-index-
- name: Cache cargo target dir - name: Cache cargo target dir
uses: actions/cache@v2.1.7 uses: actions/cache@v3
with: with:
path: target path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
- name: Run cargo check - name: Run cargo check
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
@ -52,9 +53,7 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v2 uses: actions/checkout@v3
with:
submodules: true
- name: Install stable toolchain - name: Install stable toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
@ -64,27 +63,41 @@ jobs:
override: true override: true
- name: Cache cargo registry - name: Cache cargo registry
uses: actions/cache@v2.1.7 uses: actions/cache@v3
with: with:
path: ~/.cargo/registry path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-registry-
- name: Cache cargo index - name: Cache cargo index
uses: actions/cache@v2.1.7 uses: actions/cache@v3
with: with:
path: ~/.cargo/git path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-index-
- name: Cache cargo target dir - name: Cache cargo target dir
uses: actions/cache@v2.1.7 uses: actions/cache@v3
with: with:
path: target path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
- name: Copy minimal languages config
run: cp .github/workflows/languages.toml ./languages.toml
- name: Cache test tree-sitter grammar
uses: actions/cache@v3
with:
path: runtime/grammars
key: ${{ runner.os }}-v2-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
restore-keys: ${{ runner.os }}-v2-tree-sitter-grammars-
- name: Run cargo test - name: Run cargo test
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
command: test command: test
args: --workspace
strategy: strategy:
matrix: matrix:
@ -96,9 +109,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v2 uses: actions/checkout@v3
with:
submodules: true
- name: Install stable toolchain - name: Install stable toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
@ -109,22 +120,25 @@ jobs:
components: rustfmt, clippy components: rustfmt, clippy
- name: Cache cargo registry - name: Cache cargo registry
uses: actions/cache@v2.1.7 uses: actions/cache@v3
with: with:
path: ~/.cargo/registry path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-registry-
- name: Cache cargo index - name: Cache cargo index
uses: actions/cache@v2.1.7 uses: actions/cache@v3
with: with:
path: ~/.cargo/git path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-index-
- name: Cache cargo target dir - name: Cache cargo target dir
uses: actions/cache@v2.1.7 uses: actions/cache@v3
with: with:
path: target path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
- name: Run cargo fmt - name: Run cargo fmt
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
@ -143,9 +157,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v2 uses: actions/checkout@v3
with:
submodules: true
- name: Install stable toolchain - name: Install stable toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
@ -155,22 +167,25 @@ jobs:
override: true override: true
- name: Cache cargo registry - name: Cache cargo registry
uses: actions/cache@v2.1.6 uses: actions/cache@v3
with: with:
path: ~/.cargo/registry path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-registry-
- name: Cache cargo index - name: Cache cargo index
uses: actions/cache@v2.1.6 uses: actions/cache@v3
with: with:
path: ~/.cargo/git path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-index-
- name: Cache cargo target dir - name: Cache cargo target dir
uses: actions/cache@v2.1.6 uses: actions/cache@v3
with: with:
path: target path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
- name: Generate docs - name: Generate docs
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1

@ -0,0 +1,26 @@
# Publish the Nix flake outputs to Cachix
name: Cachix
on:
push:
branches:
- master
jobs:
publish:
name: Publish Flake
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install nix
uses: cachix/install-nix-action@v16
- name: Authenticate with Cachix
uses: cachix/cachix-action@v10
with:
name: helix
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
- name: Build nix flake
run: nix build

@ -4,12 +4,14 @@ on:
push: push:
branches: branches:
- master - master
tags:
- '*'
jobs: jobs:
deploy: deploy:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: Setup mdBook - name: Setup mdBook
uses: peaceiris/actions-mdbook@v1 uses: peaceiris/actions-mdbook@v1
@ -19,9 +21,21 @@ jobs:
- run: mdbook build book - run: mdbook build book
- name: Set output directory
run: |
OUTDIR=$(basename ${{ github.ref }})
echo "OUTDIR=$OUTDIR" >> $GITHUB_ENV
- name: Deploy - name: Deploy
uses: peaceiris/actions-gh-pages@v3 uses: peaceiris/actions-gh-pages@v3
if: github.ref == 'refs/heads/master' with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./book/book
destination_dir: ./${{ env.OUTDIR }}
- name: Deploy stable
uses: peaceiris/actions-gh-pages@v3
if: startswith(github.ref, 'refs/tags/')
with: with:
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./book/book publish_dir: ./book/book

@ -0,0 +1,26 @@
# This languages.toml is used for testing in CI.
[[language]]
name = "rust"
scope = "source.rust"
injection-regex = "rust"
file-types = ["rs"]
comment-token = "//"
roots = ["Cargo.toml", "Cargo.lock"]
indent = { tab-width = 4, unit = " " }
[[grammar]]
name = "rust"
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a360da0a29a19c281d08295a35ecd0544d2da211" }
[[language]]
name = "nix"
scope = "source.nix"
injection-regex = "nix"
file-types = ["nix"]
shebangs = []
roots = []
comment-token = "#"
# A grammar entry is not necessary for this language - it is only used for
# testing TOML merging behavior.

@ -1,32 +1,81 @@
name: Release name: Release
on: on:
# schedule:
# - cron: '0 0 * * *' # midnight UTC
push: push:
tags: tags:
- 'v[0-9]+.[0-9]+.[0-9]+' - '[0-9]+.[0-9]+'
## - release - '[0-9]+.[0-9]+.[0-9]+'
jobs: jobs:
fetch-grammars:
name: Fetch Grammars
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Cache cargo registry
uses: actions/cache@v3
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-registry-
- name: Cache cargo index
uses: actions/cache@v3
with:
path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-index-
- name: Cache cargo target dir
uses: actions/cache@v3
with:
path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
- name: Fetch tree-sitter grammars
uses: actions-rs/cargo@v1
env:
HELIX_DISABLE_AUTO_GRAMMAR_BUILD: yes
with:
command: run
args: -- --grammar fetch
- name: Bundle grammars
run: tar cJf grammars.tar.xz -C runtime/grammars/sources .
- uses: actions/upload-artifact@v3
with:
name: grammars
path: grammars.tar.xz
dist: dist:
name: Dist name: Dist
needs: [fetch-grammars]
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
fail-fast: false # don't fail other jobs if one fails fail-fast: false # don't fail other jobs if one fails
matrix: matrix:
build: [x86_64-linux, aarch64-linux, x86_64-macos, x86_64-windows] #, x86_64-win-gnu, win32-msvc build: [x86_64-linux, x86_64-macos, x86_64-windows] #, x86_64-win-gnu, win32-msvc
include: include:
- build: x86_64-linux - build: x86_64-linux
os: ubuntu-20.04 os: ubuntu-20.04
rust: stable rust: stable
target: x86_64-unknown-linux-gnu target: x86_64-unknown-linux-gnu
cross: false cross: false
- build: aarch64-linux # - build: aarch64-linux
os: ubuntu-20.04 # os: ubuntu-20.04
rust: stable # rust: stable
target: aarch64-unknown-linux-gnu # target: aarch64-unknown-linux-gnu
cross: true # cross: true
- build: x86_64-macos - build: x86_64-macos
os: macos-latest os: macos-latest
rust: stable rust: stable
@ -52,9 +101,16 @@ jobs:
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v2 uses: actions/checkout@v3
with:
submodules: true - name: Download grammars
uses: actions/download-artifact@v2
- name: Move grammars under runtime
if: "!startsWith(matrix.os, 'windows')"
run: |
mkdir -p runtime/grammars/sources
tar xJf grammars/grammars.tar.xz -C runtime/grammars/sources
- name: Install ${{ matrix.rust }} toolchain - name: Install ${{ matrix.rust }} toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
@ -69,7 +125,7 @@ jobs:
with: with:
use-cross: ${{ matrix.cross }} use-cross: ${{ matrix.cross }}
command: test command: test
args: --release --locked --target ${{ matrix.target }} args: --release --locked --target ${{ matrix.target }} --workspace
- name: Build release binary - name: Build release binary
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
@ -100,9 +156,10 @@ jobs:
else else
cp "target/${{ matrix.target }}/release/hx" "dist/" cp "target/${{ matrix.target }}/release/hx" "dist/"
fi fi
rm -rf runtime/grammars/sources
cp -r runtime dist cp -r runtime dist
- uses: actions/upload-artifact@v2.3.1 - uses: actions/upload-artifact@v3
with: with:
name: bins-${{ matrix.build }} name: bins-${{ matrix.build }}
path: dist path: dist
@ -113,20 +170,14 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v2 uses: actions/checkout@v3
with:
submodules: false
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
# with:
# path: dist
# - run: ls -al ./dist
- run: ls -al bins-*
- name: Calculate tag name - name: Calculate tag name
run: | run: |
name=dev name=dev
if [[ $GITHUB_REF == refs/tags/v* ]]; then if [[ $GITHUB_REF == refs/tags/* ]]; then
name=${GITHUB_REF:10} name=${GITHUB_REF:10}
fi fi
echo ::set-output name=val::$name echo ::set-output name=val::$name
@ -138,8 +189,13 @@ jobs:
run: | run: |
set -ex set -ex
rm -rf tmp source="$(pwd)"
mkdir tmp mkdir -p runtime/grammars/sources
tar xJf grammars/grammars.tar.xz -C runtime/grammars/sources
rm -rf grammars
cd "$(mktemp -d)"
mv $source/bins-* .
mkdir dist mkdir dist
for dir in bins-* ; do for dir in bins-* ; do
@ -148,19 +204,22 @@ jobs:
exe=".exe" exe=".exe"
fi fi
pkgname=helix-$TAG-$platform pkgname=helix-$TAG-$platform
mkdir tmp/$pkgname mkdir $pkgname
cp LICENSE README.md tmp/$pkgname cp $source/LICENSE $source/README.md $pkgname
mv bins-$platform/runtime tmp/$pkgname/ mv bins-$platform/runtime $pkgname/
mv bins-$platform/hx$exe tmp/$pkgname mv bins-$platform/hx$exe $pkgname
chmod +x tmp/$pkgname/hx$exe chmod +x $pkgname/hx$exe
if [ "$exe" = "" ]; then if [ "$exe" = "" ]; then
tar cJf dist/$pkgname.tar.xz -C tmp $pkgname tar cJf dist/$pkgname.tar.xz $pkgname
else else
(cd tmp && 7z a -r ../dist/$pkgname.zip $pkgname) 7z a -r dist/$pkgname.zip $pkgname
fi fi
done done
tar cJf dist/helix-$TAG-source.tar.xz -C $source .
mv dist $source/
- name: Upload binaries to release - name: Upload binaries to release
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:

231
.gitmodules vendored

@ -1,231 +0,0 @@
[submodule "helix-syntax/languages/tree-sitter-cpp"]
path = helix-syntax/languages/tree-sitter-cpp
url = https://github.com/tree-sitter/tree-sitter-cpp
shallow = true
[submodule "helix-syntax/languages/tree-sitter-javascript"]
path = helix-syntax/languages/tree-sitter-javascript
url = https://github.com/tree-sitter/tree-sitter-javascript
shallow = true
[submodule "helix-syntax/languages/tree-sitter-julia"]
path = helix-syntax/languages/tree-sitter-julia
url = https://github.com/tree-sitter/tree-sitter-julia
shallow = true
[submodule "helix-syntax/languages/tree-sitter-python"]
path = helix-syntax/languages/tree-sitter-python
url = https://github.com/tree-sitter/tree-sitter-python
shallow = true
[submodule "helix-syntax/languages/tree-sitter-typescript"]
path = helix-syntax/languages/tree-sitter-typescript
url = https://github.com/tree-sitter/tree-sitter-typescript
shallow = true
[submodule "helix-syntax/languages/tree-sitter-agda"]
path = helix-syntax/languages/tree-sitter-agda
url = https://github.com/tree-sitter/tree-sitter-agda
shallow = true
[submodule "helix-syntax/languages/tree-sitter-go"]
path = helix-syntax/languages/tree-sitter-go
url = https://github.com/tree-sitter/tree-sitter-go
shallow = true
[submodule "helix-syntax/languages/tree-sitter-ruby"]
path = helix-syntax/languages/tree-sitter-ruby
url = https://github.com/tree-sitter/tree-sitter-ruby
shallow = true
[submodule "helix-syntax/languages/tree-sitter-java"]
path = helix-syntax/languages/tree-sitter-java
url = https://github.com/tree-sitter/tree-sitter-java
shallow = true
[submodule "helix-syntax/languages/tree-sitter-php"]
path = helix-syntax/languages/tree-sitter-php
url = https://github.com/tree-sitter/tree-sitter-php
shallow = true
[submodule "helix-syntax/languages/tree-sitter-html"]
path = helix-syntax/languages/tree-sitter-html
url = https://github.com/tree-sitter/tree-sitter-html
shallow = true
[submodule "helix-syntax/languages/tree-sitter-scala"]
path = helix-syntax/languages/tree-sitter-scala
url = https://github.com/tree-sitter/tree-sitter-scala
shallow = true
[submodule "helix-syntax/languages/tree-sitter-bash"]
path = helix-syntax/languages/tree-sitter-bash
url = https://github.com/tree-sitter/tree-sitter-bash
shallow = true
[submodule "helix-syntax/languages/tree-sitter-rust"]
path = helix-syntax/languages/tree-sitter-rust
url = https://github.com/tree-sitter/tree-sitter-rust
shallow = true
[submodule "helix-syntax/languages/tree-sitter-json"]
path = helix-syntax/languages/tree-sitter-json
url = https://github.com/tree-sitter/tree-sitter-json
shallow = true
[submodule "helix-syntax/languages/tree-sitter-css"]
path = helix-syntax/languages/tree-sitter-css
url = https://github.com/tree-sitter/tree-sitter-css
shallow = true
[submodule "helix-syntax/languages/tree-sitter-c-sharp"]
path = helix-syntax/languages/tree-sitter-c-sharp
url = https://github.com/tree-sitter/tree-sitter-c-sharp
shallow = true
[submodule "helix-syntax/languages/tree-sitter-c"]
path = helix-syntax/languages/tree-sitter-c
url = https://github.com/tree-sitter/tree-sitter-c
shallow = true
[submodule "helix-syntax/languages/tree-sitter-haskell"]
path = helix-syntax/languages/tree-sitter-haskell
url = https://github.com/tree-sitter/tree-sitter-haskell
shallow = true
[submodule "helix-syntax/languages/tree-sitter-swift"]
path = helix-syntax/languages/tree-sitter-swift
url = https://github.com/tree-sitter/tree-sitter-swift
shallow = true
[submodule "helix-syntax/languages/tree-sitter-toml"]
path = helix-syntax/languages/tree-sitter-toml
url = https://github.com/ikatyang/tree-sitter-toml
shallow = true
[submodule "helix-syntax/languages/tree-sitter-elixir"]
path = helix-syntax/languages/tree-sitter-elixir
url = https://github.com/elixir-lang/tree-sitter-elixir
shallow = true
[submodule "helix-syntax/languages/tree-sitter-nix"]
path = helix-syntax/languages/tree-sitter-nix
url = https://github.com/cstrahan/tree-sitter-nix
shallow = true
[submodule "helix-syntax/languages/tree-sitter-latex"]
path = helix-syntax/languages/tree-sitter-latex
url = https://github.com/latex-lsp/tree-sitter-latex
shallow = true
[submodule "helix-syntax/languages/tree-sitter-ledger"]
path = helix-syntax/languages/tree-sitter-ledger
url = https://github.com/cbarrete/tree-sitter-ledger
shallow = true
[submodule "helix-syntax/languages/tree-sitter-protobuf"]
path = helix-syntax/languages/tree-sitter-protobuf
url = https://github.com/yusdacra/tree-sitter-protobuf.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-ocaml"]
path = helix-syntax/languages/tree-sitter-ocaml
url = https://github.com/tree-sitter/tree-sitter-ocaml
shallow = true
[submodule "helix-syntax/languages/tree-sitter-lua"]
path = helix-syntax/languages/tree-sitter-lua
url = https://github.com/nvim-treesitter/tree-sitter-lua
shallow = true
[submodule "helix-syntax/languages/tree-sitter-yaml"]
path = helix-syntax/languages/tree-sitter-yaml
url = https://github.com/ikatyang/tree-sitter-yaml
shallow = true
[submodule "helix-syntax/languages/tree-sitter-zig"]
path = helix-syntax/languages/tree-sitter-zig
url = https://github.com/maxxnino/tree-sitter-zig
shallow = true
[submodule "helix-syntax/languages/tree-sitter-svelte"]
path = helix-syntax/languages/tree-sitter-svelte
url = https://github.com/Himujjal/tree-sitter-svelte
shallow = true
[submodule "helix-syntax/languages/tree-sitter-vue"]
path = helix-syntax/languages/tree-sitter-vue
url = https://github.com/ikatyang/tree-sitter-vue
shallow = true
[submodule "helix-syntax/languages/tree-sitter-tsq"]
path = helix-syntax/languages/tree-sitter-tsq
url = https://github.com/tree-sitter/tree-sitter-tsq
shallow = true
[submodule "helix-syntax/languages/tree-sitter-cmake"]
path = helix-syntax/languages/tree-sitter-cmake
url = https://github.com/uyha/tree-sitter-cmake
shallow = true
[submodule "helix-syntax/languages/tree-sitter-glsl"]
path = helix-syntax/languages/tree-sitter-glsl
url = https://github.com/theHamsta/tree-sitter-glsl.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-perl"]
path = helix-syntax/languages/tree-sitter-perl
url = https://github.com/ganezdragon/tree-sitter-perl
shallow = true
[submodule "helix-syntax/languages/tree-sitter-comment"]
path = helix-syntax/languages/tree-sitter-comment
url = https://github.com/stsewd/tree-sitter-comment
shallow = true
[submodule "helix-syntax/languages/tree-sitter-wgsl"]
path = helix-syntax/languages/tree-sitter-wgsl
url = https://github.com/szebniok/tree-sitter-wgsl
shallow = true
[submodule "helix-syntax/languages/tree-sitter-llvm"]
path = helix-syntax/languages/tree-sitter-llvm
url = https://github.com/benwilliamgraham/tree-sitter-llvm
shallow = true
[submodule "helix-syntax/languages/tree-sitter-markdown"]
path = helix-syntax/languages/tree-sitter-markdown
url = https://github.com/MDeiml/tree-sitter-markdown
shallow = true
[submodule "helix-syntax/languages/tree-sitter-dart"]
path = helix-syntax/languages/tree-sitter-dart
url = https://github.com/UserNobody14/tree-sitter-dart.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-dockerfile"]
path = helix-syntax/languages/tree-sitter-dockerfile
url = https://github.com/camdencheek/tree-sitter-dockerfile.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-fish"]
path = helix-syntax/languages/tree-sitter-fish
url = https://github.com/ram02z/tree-sitter-fish
shallow = true
[submodule "helix-syntax/languages/tree-sitter-git-commit"]
path = helix-syntax/languages/tree-sitter-git-commit
url = https://github.com/the-mikedavis/tree-sitter-git-commit.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-llvm-mir"]
path = helix-syntax/languages/tree-sitter-llvm-mir
url = https://github.com/Flakebi/tree-sitter-llvm-mir.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-git-diff"]
path = helix-syntax/languages/tree-sitter-git-diff
url = https://github.com/the-mikedavis/tree-sitter-git-diff.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-tablegen"]
path = helix-syntax/languages/tree-sitter-tablegen
url = https://github.com/Flakebi/tree-sitter-tablegen
shallow = true
[submodule "helix-syntax/languages/tree-sitter-git-rebase"]
path = helix-syntax/languages/tree-sitter-git-rebase
url = https://github.com/the-mikedavis/tree-sitter-git-rebase.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-lean"]
path = helix-syntax/languages/tree-sitter-lean
url = https://github.com/Julian/tree-sitter-lean
shallow = true
[submodule "helix-syntax/languages/tree-sitter-regex"]
path = helix-syntax/languages/tree-sitter-regex
url = https://github.com/tree-sitter/tree-sitter-regex.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-make"]
path = helix-syntax/languages/tree-sitter-make
url = https://github.com/alemuller/tree-sitter-make
shallow = true
[submodule "helix-syntax/languages/tree-sitter-git-config"]
path = helix-syntax/languages/tree-sitter-git-config
url = https://github.com/the-mikedavis/tree-sitter-git-config.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-graphql"]
path = helix-syntax/languages/tree-sitter-graphql
url = https://github.com/bkegley/tree-sitter-graphql
shallow = true
[submodule "helix-syntax/languages/tree-sitter-elm"]
path = helix-syntax/languages/tree-sitter-elm
url = https://github.com/elm-tooling/tree-sitter-elm
shallow = true
[submodule "helix-syntax/languages/tree-sitter-iex"]
path = helix-syntax/languages/tree-sitter-iex
url = https://github.com/elixir-lang/tree-sitter-iex
shallow = true
[submodule "helix-syntax/languages/tree-sitter-twig"]
path = helix-syntax/languages/tree-sitter-twig
url = https://github.com/eirabben/tree-sitter-twig.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-rescript"]
path = helix-syntax/languages/tree-sitter-rescript
url = https://github.com/jaredramirez/tree-sitter-rescript
shallow = true
[submodule "helix-syntax/languages/tree-sitter-erlang"]
path = helix-syntax/languages/tree-sitter-erlang
url = https://github.com/the-mikedavis/tree-sitter-erlang

@ -1,3 +1,165 @@
# 22.03 (2022-03-28)
A big shout out to all the contributors! We had 51 contributors in this release.
This release is particularly large and featureful. Check out some of the
highlights in the [news section](https://helix-editor.com/news/release-22-03-highlights/).
As usual, the following is a summary of each of the changes since the last release.
For the full log, check out the [git log](https://github.com/helix-editor/helix/compare/v0.6.0..22.03).
Breaking changes:
- LSP config now lives under `editor.lsp` ([#1868](https://github.com/helix-editor/helix/pull/1868))
- Expand-selection was moved from `]o` to `Alt-h` ([#1495](https://github.com/helix-editor/helix/pull/1495))
Features:
- Experimental Debug Adapter Protocol (DAP) support ([#574](https://github.com/helix-editor/helix/pull/574))
- Primary cursor shape may now be customized per mode ([#1154](https://github.com/helix-editor/helix/pull/1154))
- Overhaul incremental highlights and enable combined injections ([`6728344..4080341`](https://github.com/helix-editor/helix/compare/6728344..4080341))
- Allow specifying file start position ([#445](https://github.com/helix-editor/helix/pull/445), [#1676](https://github.com/helix-editor/helix/pull/1676))
- Dynamic line numbers ([#1522](https://github.com/helix-editor/helix/pull/1522))
- Show an info box with the contents of registers ([#980](https://github.com/helix-editor/helix/pull/980))
- Wrap-around behavior during search is now configurable ([#1516](https://github.com/helix-editor/helix/pull/1516))
- Tree-sitter textobjects motions for classes, functions, and parameters ([#1619](https://github.com/helix-editor/helix/pull/1619), [#1708](https://github.com/helix-editor/helix/pull/1708), [#1805](https://github.com/helix-editor/helix/pull/1805))
- Command palette: a picker for available commands ([#1400](https://github.com/helix-editor/helix/pull/1400))
- LSP `workspace/configuration` and `workspace/didChangeConfiguration` support ([#1684](https://github.com/helix-editor/helix/pull/1684))
- `hx --health [LANG]` command ([#1669](https://github.com/helix-editor/helix/pull/1669))
- Refactor of the tree-sitter grammar system ([#1659](https://github.com/helix-editor/helix/pull/1659))
- All submodules have been removed
- New `hx --grammar {fetch|build}` flags for fetching and building tree-sitter grammars
- A custom grammar selection may now be declared with the `use-grammars` key in `languages.toml`
Commands:
- `:cquit!` - quit forcefully with a non-zero exit-code ([#1414](https://github.com/helix-editor/helix/pull/1414))
- `shrink_selection` - shrink the selection to a child tree-sitter node (`Alt-j`, [#1340](https://github.com/helix-editor/helix/pull/1340))
- `:tree-sitter-subtree` - show the tree-sitter subtree under the primary selection ([#1453](https://github.com/helix-editor/helix/pull/1453), [#1524](https://github.com/helix-editor/helix/pull/1524))
- Add `Alt-Backspace`, `Alt-<`, `Alt->`, and `Ctrl-j` to insert mode ([#1441](https://github.com/helix-editor/helix/pull/1441))
- `select_next_sibling`, `select_prev_sibling` - select next and previous tree-sitter nodes (`Alt-l` and `Alt-h`, [#1495](https://github.com/helix-editor/helix/pull/1495))
- `:buffer-close-all`, `:buffer-close-all!`, `:buffer-close-others`, and `:buffer-close-others!` ([#1677](https://github.com/helix-editor/helix/pull/1677))
- `:vsplit-new` and `:hsplit-new` - open vertical and horizontal splits with new scratch buffers ([#1763](https://github.com/helix-editor/helix/pull/1763))
- `:open-config` to open the config file and `:refresh-config` to refresh config after changes ([#1771](https://github.com/helix-editor/helix/pull/1771), [#1803](https://github.com/helix-editor/helix/pull/1803))
Usability improvements and fixes:
- Prevent `:cquit` from ignoring unsaved changes ([#1414](https://github.com/helix-editor/helix/pull/1414))
- Scrolling view keeps selections ([#1420](https://github.com/helix-editor/helix/pull/1420))
- Only use shellwords parsing on unix platforms ([`7767703`](https://github.com/helix-editor/helix/commit/7767703))
- Fix slash in search selector status message ([#1449](https://github.com/helix-editor/helix/pull/1449))
- Use `std::path::MAIN_SEPARATOR` to determine completion ([`3e4f815`](https://github.com/helix-editor/helix/commit/3e4f815))
- Expand to current node with `expand_selection` when the node has no children ([#1454](https://github.com/helix-editor/helix/pull/1454))
- Add vertical and horizontal splits to the buffer picker ([#1502](https://github.com/helix-editor/helix/pull/1502))
- Use the correct language ID for JavaScript & TypeScript LSP ([#1466](https://github.com/helix-editor/helix/pull/1466))
- Run format command for all buffers being written ([#1444](https://github.com/helix-editor/helix/pull/1444))
- Fix panics during resizing ([#1408](https://github.com/helix-editor/helix/pull/1408))
- Fix auto-pairs with CRLF ([#1470](https://github.com/helix-editor/helix/pull/1470))
- Fix picker scrolling when the bottom is reached ([#1567](https://github.com/helix-editor/helix/pull/1567))
- Use markup themes for the markdown component ([#1363](https://github.com/helix-editor/helix/pull/1363))
- Automatically commit changes to history if not in insert mode ([`2a7ae96`](https://github.com/helix-editor/helix/commit/2a7ae96))
- Render code-actions as a menu and add padding to popup ([`094a0aa`](https://github.com/helix-editor/helix/commit/094a0aa))
- Only render menu scrollbar if the menu doesn't fit ([`f10a06f`](https://github.com/helix-editor/helix/commit/f10a06f), [`36b975c`](https://github.com/helix-editor/helix/commit/36b975c))
- Parse git revision instead of tag for version ([`d3221b0`](https://github.com/helix-editor/helix/commit/d3221b0), [#1674](https://github.com/helix-editor/helix/pull/1674))
- Fix incorrect last modified buffer ([#1621](https://github.com/helix-editor/helix/pull/1621))
- Add `PageUp`, `PageDown`, `Ctrl-u`, `Ctrl-d`, `Home`, `End` bindings to the file picker ([#1612](https://github.com/helix-editor/helix/pull/1612))
- Display buffer IDs in the buffer picker ([#1134](https://github.com/helix-editor/helix/pull/1134))
- Allow multi-line prompt documentation ([`2af0432`](https://github.com/helix-editor/helix/commit/2af0432))
- Ignore the `.git` directory from the file picker ([#1604](https://github.com/helix-editor/helix/pull/1604))
- Allow separate styling for markup heading levels ([#1618](https://github.com/helix-editor/helix/pull/1618))
- Automatically close popups ([#1285](https://github.com/helix-editor/helix/pull/1285))
- Allow auto-pairs tokens to be configured ([#1624](https://github.com/helix-editor/helix/pull/1624))
- Don't indent empty lines in `indent` command ([#1653](https://github.com/helix-editor/helix/pull/1653))
- Ignore `Enter` keypress when a menu has no selection ([#1704](https://github.com/helix-editor/helix/pull/1704))
- Show errors when surround deletions and replacements fail ([#1709](https://github.com/helix-editor/helix/pull/1709))
- Show infobox hints for `mi` and `ma` ([#1686](https://github.com/helix-editor/helix/pull/1686))
- Highlight matching text in file picker suggestions ([#1635](https://github.com/helix-editor/helix/pull/1635))
- Allow capturing multiple nodes in textobject queries ([#1611](https://github.com/helix-editor/helix/pull/1611))
- Make repeat operator work with completion edits ([#1640](https://github.com/helix-editor/helix/pull/1640))
- Save to the jumplist when searching ([#1718](https://github.com/helix-editor/helix/pull/1718))
- Fix bug with auto-replacement of components in compositor ([#1711](https://github.com/helix-editor/helix/pull/1711))
- Use Kakoune logic for `align_selection` ([#1675](https://github.com/helix-editor/helix/pull/1675))
- Fix `follows` for `nixpkgs` in `flake.nix` ([#1729](https://github.com/helix-editor/helix/pull/1729))
- Performance improvements for the picker ([`78fba86`](https://github.com/helix-editor/helix/commit/78fba86))
- Rename infobox theme scopes ([#1741](https://github.com/helix-editor/helix/pull/1741))
- Fallback to broader scopes if a theme scope is not found ([#1714](https://github.com/helix-editor/helix/pull/1714))
- Add arrow-keys bindings for tree-sitter sibling selection commands ([#1724](https://github.com/helix-editor/helix/pull/1724))
- Fix a bug in LSP when creating a file in a folder that does not exist ([#1775](https://github.com/helix-editor/helix/pull/1775))
- Use `^` and `$` regex location assertions for search ([#1793](https://github.com/helix-editor/helix/pull/1793))
- Fix register names in `insert_register` command ([#1751](https://github.com/helix-editor/helix/pull/1751))
- Perform extend line for all selections ([#1804](https://github.com/helix-editor/helix/pull/1804))
- Prevent panic when moving in an empty picker ([#1786](https://github.com/helix-editor/helix/pull/1786))
- Fix line number calculations for non CR/CRLF line breaks ([`b4a282f`](https://github.com/helix-editor/helix/commit/b4a282f), [`0b96201`](https://github.com/helix-editor/helix/commit/0b96201))
- Deploy documentation for `master` builds separately from release docs ([#1783](https://github.com/helix-editor/helix/pull/1783))
Themes:
- Add everforest_light ([#1412](https://github.com/helix-editor/helix/pull/1412))
- Add gruvbox_light ([#1509](https://github.com/helix-editor/helix/pull/1509))
- Add modified background to dracula popup ([#1434](https://github.com/helix-editor/helix/pull/1434))
- Markup support for monokai pro themes ([#1553](https://github.com/helix-editor/helix/pull/1553))
- Markup support for dracula theme ([#1554](https://github.com/helix-editor/helix/pull/1554))
- Add `tag` to gruvbox theme ([#1555](https://github.com/helix-editor/helix/pull/1555))
- Markup support for remaining themes ([#1525](https://github.com/helix-editor/helix/pull/1525))
- Serika light and dark ([#1566](https://github.com/helix-editor/helix/pull/1566))
- Fix rose_pine and rose_pine_dawn popup background color ([#1606](https://github.com/helix-editor/helix/pull/1606))
- Fix hover menu item text color in base16 themes ([#1668](https://github.com/helix-editor/helix/pull/1668))
- Update markup heading styles for everforest ([#1687](https://github.com/helix-editor/helix/pull/1687))
- Update markup heading styles for rose_pine themes ([#1706](https://github.com/helix-editor/helix/pull/1706))
- Style bogster cursors ([`6a6a9ab`](https://github.com/helix-editor/helix/commit/6a6a9ab))
- Fix `ui.selection` in rose_pine themes ([#1716](https://github.com/helix-editor/helix/pull/1716))
- Use distinct colors for cursor and matched pair in gruvbox ([#1791](https://github.com/helix-editor/helix/pull/1791))
- Improve colors for `ui.cursor.match` capture in some themes ([#1862](https://github.com/helix-editor/helix/pull/1862))
LSP:
- Add default language server for JavaScript ([#1457](https://github.com/helix-editor/helix/pull/1457))
- Add `pom.xml` as maven root directory marker ([#1496](https://github.com/helix-editor/helix/pull/1496))
- Haskell LSP ([#1556](https://github.com/helix-editor/helix/pull/1556))
- C-sharp LSP support ([#1788](https://github.com/helix-editor/helix/pull/1788))
- Clean up Julia LSP config ([#1811](https://github.com/helix-editor/helix/pull/1811))
New Languages:
- llvm-mir ([#1398](https://github.com/helix-editor/helix/pull/1398))
- regex ([#1362](https://github.com/helix-editor/helix/pull/1362))
- Make ([#1433](https://github.com/helix-editor/helix/pull/1433), [#1661](https://github.com/helix-editor/helix/pull/1661))
- git-config ([#1426](https://github.com/helix-editor/helix/pull/1426))
- Lean ([#1422](https://github.com/helix-editor/helix/pull/1422))
- Elm ([#1514](https://github.com/helix-editor/helix/pull/1514))
- GraphQL ([#1515](https://github.com/helix-editor/helix/pull/1515))
- Twig ([#1602](https://github.com/helix-editor/helix/pull/1602))
- Rescript ([#1616](https://github.com/helix-editor/helix/pull/1616), [#1863](https://github.com/helix-editor/helix/pull/1863))
- Erlang ([#1657](https://github.com/helix-editor/helix/pull/1657))
- Kotlin ([#1689](https://github.com/helix-editor/helix/pull/1689))
- HCL ([#1705](https://github.com/helix-editor/helix/pull/1705), [#1726](https://github.com/helix-editor/helix/pull/1726))
- Org ([#1845](https://github.com/helix-editor/helix/pull/1845))
- Solidity ([#1848](https://github.com/helix-editor/helix/pull/1848), [#1854](https://github.com/helix-editor/helix/pull/1854))
Updated Languages and Queries:
- Textobject and indent queries for c and cpp ([#1293](https://github.com/helix-editor/helix/pull/1293))
- Fix null and boolean constant highlights for nix ([#1428](https://github.com/helix-editor/helix/pull/1428))
- Capture markdown link text as `markup.link.text` ([#1456](https://github.com/helix-editor/helix/pull/1456))
- Update and re-enable Haskell ([#1417](https://github.com/helix-editor/helix/pull/1417), [#1520](https://github.com/helix-editor/helix/pull/1520))
- Update Go with generics support ([`ddbf036`](https://github.com/helix-editor/helix/commit/ddbf036))
- Use `tree-sitter-css` for SCSS files ([#1507](https://github.com/helix-editor/helix/pull/1507))
- Update Zig ([#1501](https://github.com/helix-editor/helix/pull/1501))
- Update PHP ([#1521](https://github.com/helix-editor/helix/pull/1521))
- Expand language support for comment injections ([#1527](https://github.com/helix-editor/helix/pull/1527))
- Use tree-sitter-bash for `.zshrc` and `.bashrc` ([`7d51042`](https://github.com/helix-editor/helix/commit/7d51042))
- Use tree-sitter-bash for `.bash_profile` ([#1571](https://github.com/helix-editor/helix/pull/1571))
- Use tree-sitter-bash for `.zshenv` and ZSH files ([#1574](https://github.com/helix-editor/helix/pull/1574))
- IEx ([#1576](https://github.com/helix-editor/helix/pull/1576))
- Textobject queries for PHP ([#1601](https://github.com/helix-editor/helix/pull/1601))
- C-sharp highlight query improvements ([#1795](https://github.com/helix-editor/helix/pull/1795))
- Git commit performance has been improved on large verbose commits ([#1838](https://github.com/helix-editor/helix/pull/1838))
Packaging:
- The submodules system has been replaced with command-line flags for fetching and building tree-sitter grammars ([#1659](https://github.com/helix-editor/helix/pull/1659))
- Flake outputs are pushed to Cachix on each push to `master` ([#1721](https://github.com/helix-editor/helix/pull/1721))
- Update flake's `nix-cargo-integration` to depend on `dream2nix` ([#1758](https://github.com/helix-editor/helix/pull/1758))
# 0.6.0 (2022-01-04) # 0.6.0 (2022-01-04)
@ -19,19 +181,20 @@ Features:
- Make it possible to keybind TypableCommands ([#1169](https://github.com/helix-editor/helix/pull/1169)) - Make it possible to keybind TypableCommands ([#1169](https://github.com/helix-editor/helix/pull/1169))
- Detect workspace root using language markers ([#1370](https://github.com/helix-editor/helix/pull/1370)) - Detect workspace root using language markers ([#1370](https://github.com/helix-editor/helix/pull/1370))
- Add WORD textobject ([#991](https://github.com/helix-editor/helix/pull/991)) - Add WORD textobject ([#991](https://github.com/helix-editor/helix/pull/991))
- Add LSP rename_symbol (space-r) ([#1011](https://github.com/helix-editor/helix/pull/1011)) - Add LSP rename_symbol (`space-r`) ([#1011](https://github.com/helix-editor/helix/pull/1011))
- Added workspace_symbol_picker ([#1041](https://github.com/helix-editor/helix/pull/1041)) - Added workspace_symbol_picker ([#1041](https://github.com/helix-editor/helix/pull/1041))
- Detect filetype from shebang line ([#1001](https://github.com/helix-editor/helix/pull/1001)) - Detect filetype from shebang line ([#1001](https://github.com/helix-editor/helix/pull/1001))
- Allow piping from stdin into a buffer on startup ([#996](https://github.com/helix-editor/helix/pull/996)) - Allow piping from stdin into a buffer on startup ([#996](https://github.com/helix-editor/helix/pull/996))
- Add auto pairs for same-char pairs ([#1219](https://github.com/helix-editor/helix/pull/1219)) - Add auto pairs for same-char pairs ([#1219](https://github.com/helix-editor/helix/pull/1219))
- Update settings at runtime ([#798](https://github.com/helix-editor/helix/pull/798)) - Update settings at runtime ([#798](https://github.com/helix-editor/helix/pull/798))
- Enable thin LTO (cccc194) - Enable thin LTO ([`cccc194`](https://github.com/helix-editor/helix/commit/cccc194))
Commands: Commands:
- :wonly -- window only ([#1057](https://github.com/helix-editor/helix/pull/1057))
- buffer-close (:bc, :bclose) ([#1035](https://github.com/helix-editor/helix/pull/1035)) - `:wonly` -- window only ([#1057](https://github.com/helix-editor/helix/pull/1057))
- Add :<line> and :goto <line> commands ([#1128](https://github.com/helix-editor/helix/pull/1128)) - buffer-close (`:bc`, `:bclose`) ([#1035](https://github.com/helix-editor/helix/pull/1035))
- :sort command ([#1288](https://github.com/helix-editor/helix/pull/1288)) - Add `:<line>` and `:goto <line>` commands ([#1128](https://github.com/helix-editor/helix/pull/1128))
- `:sort` command ([#1288](https://github.com/helix-editor/helix/pull/1288))
- Add m textobject for pair under cursor ([#961](https://github.com/helix-editor/helix/pull/961)) - Add m textobject for pair under cursor ([#961](https://github.com/helix-editor/helix/pull/961))
- Implement "Goto next buffer / Goto previous buffer" commands ([#950](https://github.com/helix-editor/helix/pull/950)) - Implement "Goto next buffer / Goto previous buffer" commands ([#950](https://github.com/helix-editor/helix/pull/950))
- Implement "Goto last modification" command ([#1067](https://github.com/helix-editor/helix/pull/1067)) - Implement "Goto last modification" command ([#1067](https://github.com/helix-editor/helix/pull/1067))
@ -39,17 +202,17 @@ Commands:
- Add movement shortcut for history ([#1088](https://github.com/helix-editor/helix/pull/1088)) - Add movement shortcut for history ([#1088](https://github.com/helix-editor/helix/pull/1088))
- Add command to inc/dec number under cursor ([#1027](https://github.com/helix-editor/helix/pull/1027)) - Add command to inc/dec number under cursor ([#1027](https://github.com/helix-editor/helix/pull/1027))
- Add support for dates for increment/decrement - Add support for dates for increment/decrement
- Align selections (&) ([#1101](https://github.com/helix-editor/helix/pull/1101)) - Align selections (`&`) ([#1101](https://github.com/helix-editor/helix/pull/1101))
- Implement no-yank delete/change ([#1099](https://github.com/helix-editor/helix/pull/1099)) - Implement no-yank delete/change ([#1099](https://github.com/helix-editor/helix/pull/1099))
- Implement black hole register ([#1165](https://github.com/helix-editor/helix/pull/1165)) - Implement black hole register ([#1165](https://github.com/helix-editor/helix/pull/1165))
- gf as goto_file (gf) ([#1102](https://github.com/helix-editor/helix/pull/1102)) - `gf` as goto_file (`gf`) ([#1102](https://github.com/helix-editor/helix/pull/1102))
- Add last modified file (gm) ([#1093](https://github.com/helix-editor/helix/pull/1093)) - Add last modified file (`gm`) ([#1093](https://github.com/helix-editor/helix/pull/1093))
- ensure_selections_forward ([#1393](https://github.com/helix-editor/helix/pull/1393)) - ensure_selections_forward ([#1393](https://github.com/helix-editor/helix/pull/1393))
- Readline style insert mode ([#1039](https://github.com/helix-editor/helix/pull/1039)) - Readline style insert mode ([#1039](https://github.com/helix-editor/helix/pull/1039))
Usability improvements and fixes: Usability improvements and fixes:
- Detect filetype on :write ([#1141](https://github.com/helix-editor/helix/pull/1141)) - Detect filetype on `:write` ([#1141](https://github.com/helix-editor/helix/pull/1141))
- Add single and double quotes to matching pairs ([#995](https://github.com/helix-editor/helix/pull/995)) - Add single and double quotes to matching pairs ([#995](https://github.com/helix-editor/helix/pull/995))
- Launch with defaults upon invalid config/theme (rather than panicking) ([#982](https://github.com/helix-editor/helix/pull/982)) - Launch with defaults upon invalid config/theme (rather than panicking) ([#982](https://github.com/helix-editor/helix/pull/982))
- If switching away from an empty scratch buffer, remove it ([#935](https://github.com/helix-editor/helix/pull/935)) - If switching away from an empty scratch buffer, remove it ([#935](https://github.com/helix-editor/helix/pull/935))
@ -58,7 +221,7 @@ Usability improvements and fixes:
- Prevent picker from previewing binaries or large file ([#939](https://github.com/helix-editor/helix/pull/939)) - Prevent picker from previewing binaries or large file ([#939](https://github.com/helix-editor/helix/pull/939))
- Inform when reaching undo/redo bounds ([#981](https://github.com/helix-editor/helix/pull/981)) - Inform when reaching undo/redo bounds ([#981](https://github.com/helix-editor/helix/pull/981))
- search_impl will only align cursor center when it isn't in view ([#959](https://github.com/helix-editor/helix/pull/959)) - search_impl will only align cursor center when it isn't in view ([#959](https://github.com/helix-editor/helix/pull/959))
- Add <C-h>, <C-u>, <C-d>, Delete in prompt mode ([#1034](https://github.com/helix-editor/helix/pull/1034)) - Add `<C-h>`, `<C-u>`, `<C-d>`, Delete in prompt mode ([#1034](https://github.com/helix-editor/helix/pull/1034))
- Restore screen position when aborting search ([#1047](https://github.com/helix-editor/helix/pull/1047)) - Restore screen position when aborting search ([#1047](https://github.com/helix-editor/helix/pull/1047))
- Buffer picker: show is_modifier flag ([#1020](https://github.com/helix-editor/helix/pull/1020)) - Buffer picker: show is_modifier flag ([#1020](https://github.com/helix-editor/helix/pull/1020))
- Add commit hash to version info, if present ([#957](https://github.com/helix-editor/helix/pull/957)) - Add commit hash to version info, if present ([#957](https://github.com/helix-editor/helix/pull/957))
@ -69,20 +232,20 @@ Usability improvements and fixes:
- Accept count for goto_window ([#1033](https://github.com/helix-editor/helix/pull/1033)) - Accept count for goto_window ([#1033](https://github.com/helix-editor/helix/pull/1033))
- Make kill_to_line_end behave like emacs ([#1235](https://github.com/helix-editor/helix/pull/1235)) - Make kill_to_line_end behave like emacs ([#1235](https://github.com/helix-editor/helix/pull/1235))
- Only use a single documentation popup ([#1241](https://github.com/helix-editor/helix/pull/1241)) - Only use a single documentation popup ([#1241](https://github.com/helix-editor/helix/pull/1241))
- ui: popup: Don't allow scrolling past the end of content (3307f44c) - ui: popup: Don't allow scrolling past the end of content ([`3307f44c`](https://github.com/helix-editor/helix/commit/3307f44c))
- Open files with spaces in filename, allow opening multiple files ([#1231](https://github.com/helix-editor/helix/pull/1231)) - Open files with spaces in filename, allow opening multiple files ([#1231](https://github.com/helix-editor/helix/pull/1231))
- Allow paste commands to take a count ([#1261](https://github.com/helix-editor/helix/pull/1261)) - Allow paste commands to take a count ([#1261](https://github.com/helix-editor/helix/pull/1261))
- Auto pairs selection ([#1254](https://github.com/helix-editor/helix/pull/1254)) - Auto pairs selection ([#1254](https://github.com/helix-editor/helix/pull/1254))
- Use a fuzzy matcher for commands ([#1386](https://github.com/helix-editor/helix/pull/1386)) - Use a fuzzy matcher for commands ([#1386](https://github.com/helix-editor/helix/pull/1386))
- Add c-s to pick word under doc cursor to prompt line & search completion ([#831](https://github.com/helix-editor/helix/pull/831)) - Add `<C-s>` to pick word under doc cursor to prompt line & search completion ([#831](https://github.com/helix-editor/helix/pull/831))
- Fix :earlier/:later missing changeset update ([#1069](https://github.com/helix-editor/helix/pull/1069)) - Fix `:earlier`/`:later` missing changeset update ([#1069](https://github.com/helix-editor/helix/pull/1069))
- Support extend for multiple goto ([#909](https://github.com/helix-editor/helix/pull/909)) - Support extend for multiple goto ([#909](https://github.com/helix-editor/helix/pull/909))
- Add arrow-key bindings for window switching ([#933](https://github.com/helix-editor/helix/pull/933)) - Add arrow-key bindings for window switching ([#933](https://github.com/helix-editor/helix/pull/933))
- Implement key ordering for info box ([#952](https://github.com/helix-editor/helix/pull/952)) - Implement key ordering for info box ([#952](https://github.com/helix-editor/helix/pull/952))
LSP: LSP:
- Implement MarkedString rendering (e128a8702) - Implement MarkedString rendering ([`e128a8702`](https://github.com/helix-editor/helix/commit/e128a8702))
- Don't panic if init fails (d31bef7) - Don't panic if init fails ([`d31bef7`](https://github.com/helix-editor/helix/commit/d31bef7))
- Configurable diagnostic severity ([#1325](https://github.com/helix-editor/helix/pull/1325)) - Configurable diagnostic severity ([#1325](https://github.com/helix-editor/helix/pull/1325))
- Resolve completion item ([#1315](https://github.com/helix-editor/helix/pull/1315)) - Resolve completion item ([#1315](https://github.com/helix-editor/helix/pull/1315))
- Code action command support ([#1304](https://github.com/helix-editor/helix/pull/1304)) - Code action command support ([#1304](https://github.com/helix-editor/helix/pull/1304))
@ -95,7 +258,7 @@ Grammars:
- Racket ([#1143](https://github.com/helix-editor/helix/pull/1143)) - Racket ([#1143](https://github.com/helix-editor/helix/pull/1143))
- WGSL ([#1166](https://github.com/helix-editor/helix/pull/1166)) - WGSL ([#1166](https://github.com/helix-editor/helix/pull/1166))
- LLVM ([#1167](https://github.com/helix-editor/helix/pull/1167)) ([#1388](https://github.com/helix-editor/helix/pull/1388)) ([#1409](https://github.com/helix-editor/helix/pull/1409)) ([#1398](https://github.com/helix-editor/helix/pull/1398)) - LLVM ([#1167](https://github.com/helix-editor/helix/pull/1167)) ([#1388](https://github.com/helix-editor/helix/pull/1388)) ([#1409](https://github.com/helix-editor/helix/pull/1409)) ([#1398](https://github.com/helix-editor/helix/pull/1398))
- Markdown (49e06787) - Markdown ([`49e06787`](https://github.com/helix-editor/helix/commit/49e06787))
- Scala ([#1278](https://github.com/helix-editor/helix/pull/1278)) - Scala ([#1278](https://github.com/helix-editor/helix/pull/1278))
- Dart ([#1250](https://github.com/helix-editor/helix/pull/1250)) - Dart ([#1250](https://github.com/helix-editor/helix/pull/1250))
- Fish ([#1308](https://github.com/helix-editor/helix/pull/1308)) - Fish ([#1308](https://github.com/helix-editor/helix/pull/1308))
@ -145,16 +308,16 @@ Features:
- Treesitter textobjects (select a function via `mf`, class via `mc`) ([#728](https://github.com/helix-editor/helix/pull/728)) - Treesitter textobjects (select a function via `mf`, class via `mc`) ([#728](https://github.com/helix-editor/helix/pull/728))
- Global search across entire workspace `space+/` ([#651](https://github.com/helix-editor/helix/pull/651)) - Global search across entire workspace `space+/` ([#651](https://github.com/helix-editor/helix/pull/651))
- Relative line number support ([#485](https://github.com/helix-editor/helix/pull/485)) - Relative line number support ([#485](https://github.com/helix-editor/helix/pull/485))
- Prompts now store a history (72cf86e) - Prompts now store a history ([`72cf86e`](https://github.com/helix-editor/helix/commit/72cf86e))
- `:vsplit` and `:hsplit` commands ([#639](https://github.com/helix-editor/helix/pull/639)) - `:vsplit` and `:hsplit` commands ([#639](https://github.com/helix-editor/helix/pull/639))
- `C-w h/j/k/l` can now be used to navigate between splits ([#860](https://github.com/helix-editor/helix/pull/860)) - `C-w h/j/k/l` can now be used to navigate between splits ([#860](https://github.com/helix-editor/helix/pull/860))
- `C-j` and `C-k` are now alternative keybindings to `C-n` and `C-p` in the UI ([#876](https://github.com/helix-editor/helix/pull/876)) - `C-j` and `C-k` are now alternative keybindings to `C-n` and `C-p` in the UI ([#876](https://github.com/helix-editor/helix/pull/876))
- Shell commands (shell-pipe, pipe-to, shell-insert-output, shell-append-output, keep-pipe) ([#547](https://github.com/helix-editor/helix/pull/547)) - Shell commands (shell-pipe, pipe-to, shell-insert-output, shell-append-output, keep-pipe) ([#547](https://github.com/helix-editor/helix/pull/547))
- Searching now defaults to smart case search (case insensitive unless uppercase is used) ([#761](https://github.com/helix-editor/helix/pull/761)) - Searching now defaults to smart case search (case insensitive unless uppercase is used) ([#761](https://github.com/helix-editor/helix/pull/761))
- The preview pane was improved to highlight and center line ranges - The preview pane was improved to highlight and center line ranges
- The user `languages.toml` is now merged into defaults, no longer need to copy the entire file (dc57f8dc) - The user `languages.toml` is now merged into defaults, no longer need to copy the entire file ([`dc57f8dc`](https://github.com/helix-editor/helix/commit/dc57f8dc))
- Show hidden files in completions ([#648](https://github.com/helix-editor/helix/pull/648)) - Show hidden files in completions ([#648](https://github.com/helix-editor/helix/pull/648))
- Grammar injections are now properly handled (dd0b15e) - Grammar injections are now properly handled ([`dd0b15e`](https://github.com/helix-editor/helix/commit/dd0b15e))
- `v` in select mode now switches back to normal mode ([#660](https://github.com/helix-editor/helix/pull/660)) - `v` in select mode now switches back to normal mode ([#660](https://github.com/helix-editor/helix/pull/660))
- View mode can now be triggered as a "sticky" mode ([#719](https://github.com/helix-editor/helix/pull/719)) - View mode can now be triggered as a "sticky" mode ([#719](https://github.com/helix-editor/helix/pull/719))
- `f`/`t` and object selection motions can now be repeated via `Alt-.` ([#891](https://github.com/helix-editor/helix/pull/891)) - `f`/`t` and object selection motions can now be repeated via `Alt-.` ([#891](https://github.com/helix-editor/helix/pull/891))
@ -172,7 +335,7 @@ New grammars:
- Vue ([#787](https://github.com/helix-editor/helix/pull/787)) - Vue ([#787](https://github.com/helix-editor/helix/pull/787))
- Tree-sitter queries ([#845](https://github.com/helix-editor/helix/pull/845)) - Tree-sitter queries ([#845](https://github.com/helix-editor/helix/pull/845))
- CMake ([#888](https://github.com/helix-editor/helix/pull/888)) - CMake ([#888](https://github.com/helix-editor/helix/pull/888))
- Elixir (we switched over to the official grammar) (6c0786e) - Elixir (we switched over to the official grammar) ([`6c0786e`](https://github.com/helix-editor/helix/commit/6c0786e))
- Language server definitions for Nix and Elixir ([#725](https://github.com/helix-editor/helix/pull/725)) - Language server definitions for Nix and Elixir ([#725](https://github.com/helix-editor/helix/pull/725))
- Python now uses `pylsp` instead of `pyls` - Python now uses `pylsp` instead of `pyls`
- Python now supports indentation - Python now supports indentation
@ -189,19 +352,20 @@ Fixes:
- Fix crash on empty rust file ([#592](https://github.com/helix-editor/helix/pull/592)) - Fix crash on empty rust file ([#592](https://github.com/helix-editor/helix/pull/592))
- Exit select mode after toggle comment ([#598](https://github.com/helix-editor/helix/pull/598)) - Exit select mode after toggle comment ([#598](https://github.com/helix-editor/helix/pull/598))
- Pin popups with no positioning to the initial position (12ea3888) - Pin popups with no positioning to the initial position ([`12ea3888`](https://github.com/helix-editor/helix/commit/12ea3888))
- xsel copy should not freeze the editor (6dd7dc4) - xsel copy should not freeze the editor ([`6dd7dc4`](https://github.com/helix-editor/helix/commit/6dd7dc4))
- `*` now only sets the search register and doesn't jump to the next occurrence (3426285) - `*` now only sets the search register and doesn't jump to the next occurrence ([`3426285`](https://github.com/helix-editor/helix/commit/3426285))
- Goto line start/end commands extend when in select mode ([#739](https://github.com/helix-editor/helix/pull/739)) - Goto line start/end commands extend when in select mode ([#739](https://github.com/helix-editor/helix/pull/739))
- Fix documentation popups sometimes not getting fully highlighted (066367c) - Fix documentation popups sometimes not getting fully highlighted ([`066367c`](https://github.com/helix-editor/helix/commit/066367c))
- Refactor apply_workspace_edit to remove assert (b02d872) - Refactor apply_workspace_edit to remove assert ([`b02d872`](https://github.com/helix-editor/helix/commit/b02d872))
- Wrap around the top of the picker menu when scrolling (c7d6e44) - Wrap around the top of the picker menu when scrolling ([`c7d6e44`](https://github.com/helix-editor/helix/commit/c7d6e44))
- Don't allow closing the last split if there's unsaved changes (3ff5b00) - Don't allow closing the last split if there's unsaved changes ([`3ff5b00`](https://github.com/helix-editor/helix/commit/3ff5b00))
- Indentation used different default on hx vs hx new_file.txt (c913bad) - Indentation used different default on hx vs hx new_file.txt ([`c913bad`](https://github.com/helix-editor/helix/commit/c913bad))
# 0.4.1 (2021-08-14) # 0.4.1 (2021-08-14)
A minor release that includes: A minor release that includes:
- A fix for rendering glitches that would occur after editing with multiple selections. - A fix for rendering glitches that would occur after editing with multiple selections.
- CI fix for grammars not being cross-compiled for aarch64 - CI fix for grammars not being cross-compiled for aarch64
@ -223,10 +387,10 @@ selections in the future as well as resolves many bugs and edge cases.
- Autoinfo: `whichkey`-like popups which show available sub-mode shortcuts ([#316](https://github.com/helix-editor/helix/pull/316)) - Autoinfo: `whichkey`-like popups which show available sub-mode shortcuts ([#316](https://github.com/helix-editor/helix/pull/316))
- Added WORD movements (W/B/E) ([#390](https://github.com/helix-editor/helix/pull/390)) - Added WORD movements (W/B/E) ([#390](https://github.com/helix-editor/helix/pull/390))
- Vertical selections (repeat selection above/below) ([#462](https://github.com/helix-editor/helix/pull/462)) - Vertical selections (repeat selection above/below) ([#462](https://github.com/helix-editor/helix/pull/462))
- Selection rotation via `(` and `)` ([66a90130](https://github.com/helix-editor/helix/commit/66a90130a5f99d769e9f6034025297f78ecaa3ec)) - Selection rotation via `(` and `)` ([`66a90130`](https://github.com/helix-editor/helix/commit/66a90130a5f99d769e9f6034025297f78ecaa3ec))
- Selection contents rotation via `Alt-(` and `Alt-)` ([02cba2a](https://github.com/helix-editor/helix/commit/02cba2a7f403f48eccb18100fb751f7b42373dba)) - Selection contents rotation via `Alt-(` and `Alt-)` ([`02cba2a`](https://github.com/helix-editor/helix/commit/02cba2a7f403f48eccb18100fb751f7b42373dba))
- Completion behavior improvements ([f917b5a4](https://github.com/helix-editor/helix/commit/f917b5a441ff3ae582358b6939ffbf889f4aa530), [627b899](https://github.com/helix-editor/helix/commit/627b89931576f7af86166ae8d5cbc55537877473)) - Completion behavior improvements ([`f917b5a4`](https://github.com/helix-editor/helix/commit/f917b5a441ff3ae582358b6939ffbf889f4aa530), [`627b899`](https://github.com/helix-editor/helix/commit/627b89931576f7af86166ae8d5cbc55537877473))
- Fixed a language server crash ([385a6b5a](https://github.com/helix-editor/helix/commit/385a6b5a1adddfc26e917982641530e1a7c7aa81)) - Fixed a language server crash ([`385a6b5a`](https://github.com/helix-editor/helix/commit/385a6b5a1adddfc26e917982641530e1a7c7aa81))
- Case change commands (`` ` ``, `~`, ``<a-`>``) ([#441](https://github.com/helix-editor/helix/pull/441)) - Case change commands (`` ` ``, `~`, ``<a-`>``) ([#441](https://github.com/helix-editor/helix/pull/441))
- File pickers (including goto) now provide a preview! ([#534](https://github.com/helix-editor/helix/pull/534)) - File pickers (including goto) now provide a preview! ([#534](https://github.com/helix-editor/helix/pull/534))
- Injection query support. Rust macro calls and embedded languages are now properly highlighted ([#430](https://github.com/helix-editor/helix/pull/430)) - Injection query support. Rust macro calls and embedded languages are now properly highlighted ([#430](https://github.com/helix-editor/helix/pull/430))
@ -242,7 +406,7 @@ selections in the future as well as resolves many bugs and edge cases.
- Comment toggling now uses a language specific comment token ([#463](https://github.com/helix-editor/helix/pull/463)) - Comment toggling now uses a language specific comment token ([#463](https://github.com/helix-editor/helix/pull/463))
- Julia support ([#413](https://github.com/helix-editor/helix/pull/413)) - Julia support ([#413](https://github.com/helix-editor/helix/pull/413))
- Java support ([#448](https://github.com/helix-editor/helix/pull/448)) - Java support ([#448](https://github.com/helix-editor/helix/pull/448))
- Prompts have an (in-memory) history ([63e54e30](https://github.com/helix-editor/helix/commit/63e54e30a74bb0d1d782877ddbbcf95f2817d061)) - Prompts have an (in-memory) history ([`63e54e30`](https://github.com/helix-editor/helix/commit/63e54e30a74bb0d1d782877ddbbcf95f2817d061))
# 0.3.0 (2021-06-27) # 0.3.0 (2021-06-27)
@ -256,7 +420,7 @@ Highlights:
- Support for other line endings (CRLF). Significantly improved Windows support. ([#224](https://github.com/helix-editor/helix/pull/224)) - Support for other line endings (CRLF). Significantly improved Windows support. ([#224](https://github.com/helix-editor/helix/pull/224))
- Encodings other than UTF-8 are now supported! ([#228](https://github.com/helix-editor/helix/pull/228)) - Encodings other than UTF-8 are now supported! ([#228](https://github.com/helix-editor/helix/pull/228))
- Key bindings can now be configured via a `config.toml` file ([#268](https://github.com/helix-editor/helix/pull/268)) - Key bindings can now be configured via a `config.toml` file ([#268](https://github.com/helix-editor/helix/pull/268))
- Theme can now be configured and changed at runtime ([please feel free to contribute more themes!](https://github.com/helix-editor/helix/tree/master/runtime/themes)) ([#267](https://github.com/helix-editor/helix/pull/267)) - Theme can now be configured and changed at runtime. ([Please feel free to contribute more themes!](https://github.com/helix-editor/helix/tree/master/runtime/themes)) ([#267](https://github.com/helix-editor/helix/pull/267))
- System clipboard yank/paste is now supported! ([#310](https://github.com/helix-editor/helix/pull/310)) - System clipboard yank/paste is now supported! ([#310](https://github.com/helix-editor/helix/pull/310))
- Surround commands were implemented ([#320](https://github.com/helix-editor/helix/pull/320)) - Surround commands were implemented ([#320](https://github.com/helix-editor/helix/pull/320))
@ -273,7 +437,7 @@ Features:
- Code is being migrated from helix-term to helix-view (prerequisite for - Code is being migrated from helix-term to helix-view (prerequisite for
alternative frontends) ([#366](https://github.com/helix-editor/helix/pull/366)) alternative frontends) ([#366](https://github.com/helix-editor/helix/pull/366))
- `x` and `X` merged - `x` and `X` merged
([f41688d9](https://github.com/helix-editor/helix/commit/f41688d960ef89c29c4a51c872b8406fb8f81a85)) ([`f41688d9`](https://github.com/helix-editor/helix/commit/f41688d960ef89c29c4a51c872b8406fb8f81a85))
Fixes: Fixes:
@ -281,12 +445,12 @@ Fixes:
- A bunch of bugs regarding `o`/`O` behavior ([#281](https://github.com/helix-editor/helix/pull/281)) - A bunch of bugs regarding `o`/`O` behavior ([#281](https://github.com/helix-editor/helix/pull/281))
- `~` expansion now works in file completion ([#284](https://github.com/helix-editor/helix/pull/284)) - `~` expansion now works in file completion ([#284](https://github.com/helix-editor/helix/pull/284))
- Several UI related overflow crashes ([#318](https://github.com/helix-editor/helix/pull/318)) - Several UI related overflow crashes ([#318](https://github.com/helix-editor/helix/pull/318))
- Fix a test failure occuring only on `test --release` ([4f108ab1](https://github.com/helix-editor/helix/commit/4f108ab1b2197809506bd7305ad903a3525eabfa)) - Fix a test failure occuring only on `test --release` ([`4f108ab1`](https://github.com/helix-editor/helix/commit/4f108ab1b2197809506bd7305ad903a3525eabfa))
- Prompts now support unicode input ([#295](https://github.com/helix-editor/helix/pull/295)) - Prompts now support unicode input ([#295](https://github.com/helix-editor/helix/pull/295))
- Completion documentation no longer overlaps the popup ([#322](https://github.com/helix-editor/helix/pull/322)) - Completion documentation no longer overlaps the popup ([#322](https://github.com/helix-editor/helix/pull/322))
- Fix a crash when trying to select `^` ([9c534614](https://github.com/helix-editor/helix/commit/9c53461429a3e72e3b1fb87d7ca490e168d7dee2)) - Fix a crash when trying to select `^` ([`9c534614`](https://github.com/helix-editor/helix/commit/9c53461429a3e72e3b1fb87d7ca490e168d7dee2))
- Prompt completions are now paginated ([39dc09e6](https://github.com/helix-editor/helix/commit/39dc09e6c4172299bc79de4c1c52288d3f624bd7)) - Prompt completions are now paginated ([`39dc09e6`](https://github.com/helix-editor/helix/commit/39dc09e6c4172299bc79de4c1c52288d3f624bd7))
- Goto did not work on Windows ([503ca112](https://github.com/helix-editor/helix/commit/503ca112ae57ebdf3ea323baf8940346204b46d2)) - Goto did not work on Windows ([`503ca112`](https://github.com/helix-editor/helix/commit/503ca112ae57ebdf3ea323baf8940346204b46d2))
# 0.2.1 # 0.2.1

210
Cargo.lock generated

@ -13,9 +13,9 @@ dependencies = [
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.53" version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94a45b455c14666b85fc40a019e8ab9eb75e3a124e05494f5397122bc9eb06e0" checksum = "4361135be9122e0870de935d7c439aef945b9f9ddd4199a553b5270b49c82a27"
[[package]] [[package]]
name = "arc-swap" name = "arc-swap"
@ -25,9 +25,9 @@ checksum = "c5d78ce20460b82d3fa150275ed9d55e21064fc7951177baacf86a145c4a4b1f"
[[package]] [[package]]
name = "autocfg" name = "autocfg"
version = "1.0.1" version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]] [[package]]
name = "bitflags" name = "bitflags"
@ -66,9 +66,9 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.0.72" version = "1.0.73"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
@ -121,9 +121,9 @@ dependencies = [
[[package]] [[package]]
name = "crossbeam-utils" name = "crossbeam-utils"
version = "0.8.7" version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e5bed1f1c269533fa816a0a5492b3545209a205ca1a54842be180eb63a16a6" checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"lazy_static", "lazy_static",
@ -131,16 +131,16 @@ dependencies = [
[[package]] [[package]]
name = "crossterm" name = "crossterm"
version = "0.23.0" version = "0.23.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77b75a27dc8d220f1f8521ea69cd55a34d720a200ebb3a624d9aa19193d3b432" checksum = "f1fd7173631a4e9e2ca8b32ae2fad58aab9843ea5aaf56642661937d87e28a3e"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"crossterm_winapi", "crossterm_winapi",
"futures-core", "futures-core",
"libc", "libc",
"mio", "mio 0.7.14",
"parking_lot 0.12.0", "parking_lot",
"signal-hook", "signal-hook",
"signal-hook-mio", "signal-hook-mio",
"winapi", "winapi",
@ -293,13 +293,13 @@ dependencies = [
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.2.4" version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c" checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"wasi", "wasi 0.10.2+wasi-snapshot-preview1",
] ]
[[package]] [[package]]
@ -362,7 +362,7 @@ dependencies = [
"chrono", "chrono",
"encoding_rs", "encoding_rs",
"etcetera", "etcetera",
"helix-syntax", "helix-loader",
"log", "log",
"once_cell", "once_cell",
"quickcheck", "quickcheck",
@ -393,6 +393,22 @@ dependencies = [
"serde_json", "serde_json",
"thiserror", "thiserror",
"tokio", "tokio",
"which",
]
[[package]]
name = "helix-loader"
version = "0.6.0"
dependencies = [
"anyhow",
"cc",
"etcetera",
"libloading",
"once_cell",
"serde",
"threadpool",
"toml",
"tree-sitter",
] ]
[[package]] [[package]]
@ -411,17 +427,7 @@ dependencies = [
"thiserror", "thiserror",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
] "which",
[[package]]
name = "helix-syntax"
version = "0.6.0"
dependencies = [
"anyhow",
"cc",
"libloading",
"threadpool",
"tree-sitter",
] ]
[[package]] [[package]]
@ -429,6 +435,7 @@ name = "helix-term"
version = "0.6.0" version = "0.6.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"chrono", "chrono",
"content_inspector", "content_inspector",
"crossterm", "crossterm",
@ -439,6 +446,7 @@ dependencies = [
"grep-searcher", "grep-searcher",
"helix-core", "helix-core",
"helix-dap", "helix-dap",
"helix-loader",
"helix-lsp", "helix-lsp",
"helix-tui", "helix-tui",
"helix-view", "helix-view",
@ -447,6 +455,7 @@ dependencies = [
"num_cpus", "num_cpus",
"once_cell", "once_cell",
"pulldown-cmark", "pulldown-cmark",
"retain_mut",
"serde", "serde",
"serde_json", "serde_json",
"signal-hook", "signal-hook",
@ -454,6 +463,7 @@ dependencies = [
"tokio", "tokio",
"tokio-stream", "tokio-stream",
"toml", "toml",
"which",
] ]
[[package]] [[package]]
@ -474,6 +484,7 @@ name = "helix-view"
version = "0.6.0" version = "0.6.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"bitflags", "bitflags",
"chardetng", "chardetng",
"clipboard-win", "clipboard-win",
@ -486,6 +497,7 @@ dependencies = [
"log", "log",
"once_cell", "once_cell",
"serde", "serde",
"serde_json",
"slotmap", "slotmap",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
@ -532,15 +544,6 @@ dependencies = [
"winapi-util", "winapi-util",
] ]
[[package]]
name = "instant"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if",
]
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.1" version = "1.0.1"
@ -568,9 +571,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.117" version = "0.2.121"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e74d72e0f9b65b5b4ca49a346af3976df0f9c61d550727f349ecd559f251a26c" checksum = "efaa7b300f3b5fe8eb6bf21ce3895e1751d9665086af2d64b42f19701015ff4f"
[[package]] [[package]]
name = "libloading" name = "libloading"
@ -593,18 +596,18 @@ dependencies = [
[[package]] [[package]]
name = "log" name = "log"
version = "0.4.14" version = "0.4.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" checksum = "6389c490849ff5bc16be905ae24bc913a9c8892e19b2341dbc175e14c341c2b8"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
] ]
[[package]] [[package]]
name = "lsp-types" name = "lsp-types"
version = "0.92.0" version = "0.92.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8a69d4142d51b208c9fc3cea68b1a7fcef30354e7aa6ccad07250fd8430fc76" checksum = "c79d4897790e8fd2550afa6d6125821edb5716e60e0e285046e070f0f6a06e0e"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"serde", "serde",
@ -647,6 +650,20 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "mio"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ba42135c6a5917b9db9cd7b293e5409e1c6b041e6f9825e92e55a894c63b6f8"
dependencies = [
"libc",
"log",
"miow",
"ntapi",
"wasi 0.11.0+wasi-snapshot-preview1",
"winapi",
]
[[package]] [[package]]
name = "miow" name = "miow"
version = "0.3.7" version = "0.3.7"
@ -658,9 +675,9 @@ dependencies = [
[[package]] [[package]]
name = "ntapi" name = "ntapi"
version = "0.3.6" version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" checksum = "c28774a7fd2fbb4f0babd8237ce554b73af68021b5f695a3cebd6c59bac0980f"
dependencies = [ dependencies = [
"winapi", "winapi",
] ]
@ -696,20 +713,9 @@ dependencies = [
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.9.0" version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5"
[[package]]
name = "parking_lot"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
dependencies = [
"instant",
"lock_api",
"parking_lot_core 0.8.5",
]
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
@ -718,21 +724,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58" checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58"
dependencies = [ dependencies = [
"lock_api", "lock_api",
"parking_lot_core 0.9.1", "parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
dependencies = [
"cfg-if",
"instant",
"libc",
"redox_syscall",
"smallvec",
"winapi",
] ]
[[package]] [[package]]
@ -806,9 +798,9 @@ dependencies = [
[[package]] [[package]]
name = "rand" name = "rand"
version = "0.8.4" version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [ dependencies = [
"rand_core", "rand_core",
] ]
@ -824,9 +816,9 @@ dependencies = [
[[package]] [[package]]
name = "redox_syscall" name = "redox_syscall"
version = "0.2.10" version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" checksum = "8380fe0152551244f0747b1bf41737e0f8a74f97a14ccefd1148187271634f3c"
dependencies = [ dependencies = [
"bitflags", "bitflags",
] ]
@ -843,9 +835,9 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.5.4" version = "1.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -864,13 +856,20 @@ version = "0.6.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
[[package]]
name = "retain_mut"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c31b5c4033f8fdde8700e4657be2c497e7288f01515be52168c631e2e4d4086"
[[package]] [[package]]
name = "ropey" name = "ropey"
version = "1.3.2" version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6b9aa65bcd9f308d37c7158b4a1afaaa32b8450213e20c9b98e7d5b3cc2fec3" checksum = "fa0dd9b26e2a102b33d400b7b7d196c81a4014eb96eda90b1c5b48d7215d9633"
dependencies = [ dependencies = [
"smallvec", "smallvec",
"str_indices",
] ]
[[package]] [[package]]
@ -953,7 +952,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29fd5867f1c4f2c5be079aee7a2adf1152ebb04a4bc4d341f504b7dece607ed4" checksum = "29fd5867f1c4f2c5be079aee7a2adf1152ebb04a4bc4d341f504b7dece607ed4"
dependencies = [ dependencies = [
"libc", "libc",
"mio", "mio 0.7.14",
"signal-hook", "signal-hook",
] ]
@ -1007,11 +1006,23 @@ checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
[[package]] [[package]]
name = "smartstring" name = "smartstring"
version = "0.2.9" version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31aa6a31c0c2b21327ce875f7e8952322acfcfd0c27569a6e18a647281352c9b" checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29"
dependencies = [ dependencies = [
"autocfg",
"static_assertions", "static_assertions",
"version_check",
]
[[package]]
name = "socket2"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0"
dependencies = [
"libc",
"winapi",
] ]
[[package]] [[package]]
@ -1026,11 +1037,17 @@ version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d44a3643b4ff9caf57abcee9c2c621d6c03d9135e0d8b589bd9afb5992cb176a" checksum = "d44a3643b4ff9caf57abcee9c2c621d6c03d9135e0d8b589bd9afb5992cb176a"
[[package]]
name = "str_indices"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "283baa48c486e4c5e27b4d92c435db9eaceac236a74dab5e3293570e2c3fa4aa"
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.86" version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" checksum = "ea297be220d52398dcc07ce15a209fce436d361735ac1db700cab3b6cdfb9f54"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1092,19 +1109,20 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.16.1" version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c27a64b625de6d309e8c57716ba93021dccf1b3b5c97edd6d3dd2d2135afc0a" checksum = "2af73ac49756f3f7c01172e34a23e5d0216f6c32333757c2c61feb2bbff5a5ee"
dependencies = [ dependencies = [
"bytes", "bytes",
"libc", "libc",
"memchr", "memchr",
"mio", "mio 0.8.1",
"num_cpus", "num_cpus",
"once_cell", "once_cell",
"parking_lot 0.11.2", "parking_lot",
"pin-project-lite", "pin-project-lite",
"signal-hook-registry", "signal-hook-registry",
"socket2",
"tokio-macros", "tokio-macros",
"winapi", "winapi",
] ]
@ -1142,9 +1160,9 @@ dependencies = [
[[package]] [[package]]
name = "tree-sitter" name = "tree-sitter"
version = "0.20.4" version = "0.20.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e34327f8eac545e3f037382471b2b19367725a242bba7bc45edb9efb49fe39a" checksum = "09b3b781640108d29892e8b9684642d2cda5ea05951fd58f0fea1db9edeb9b71"
dependencies = [ dependencies = [
"cc", "cc",
"regex", "regex",
@ -1234,11 +1252,17 @@ version = "0.10.2+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]] [[package]]
name = "which" name = "which"
version = "4.2.4" version = "4.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a5a7e487e921cf220206864a94a89b6c6905bfc19f1057fa26a4cb360e5c1d2" checksum = "5c4fb54e6113b6a8772ee41c3404fb0301ac79604489467e0a9ce1f3e97c24ae"
dependencies = [ dependencies = [
"either", "either",
"lazy_static", "lazy_static",

@ -4,15 +4,15 @@ members = [
"helix-view", "helix-view",
"helix-term", "helix-term",
"helix-tui", "helix-tui",
"helix-syntax",
"helix-lsp", "helix-lsp",
"helix-dap", "helix-dap",
"helix-loader",
"xtask", "xtask",
] ]
# Build helix-syntax in release mode to make the code path faster in development. default-members = [
# [profile.dev.package."helix-syntax"] "helix-term"
# opt-level = 3 ]
[profile.dev] [profile.dev]
split-debuginfo = "unpacked" split-debuginfo = "unpacked"
@ -20,3 +20,10 @@ split-debuginfo = "unpacked"
[profile.release] [profile.release]
lto = "thin" lto = "thin"
# debug = true # debug = true
[profile.opt]
inherits = "release"
lto = "fat"
codegen-units = 1
# strip = "debuginfo" # TODO: or strip = true
opt-level = 3

@ -36,16 +36,18 @@ We provide packaging for various distributions, but here's a quick method to
build from source. build from source.
``` ```
git clone --recurse-submodules --shallow-submodules -j8 https://github.com/helix-editor/helix git clone https://github.com/helix-editor/helix
cd helix cd helix
cargo install --path helix-term cargo install --path helix-term
hx --grammar fetch
hx --grammar build
``` ```
This will install the `hx` binary to `$HOME/.cargo/bin`. This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars.
Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the
config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%AppData%/helix/runtime` on Windows). config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%AppData%/helix/runtime` on Windows).
This location can be overriden via the `HELIX_RUNTIME` environment variable. This location can be overridden via the `HELIX_RUNTIME` environment variable.
Packages already solve this for you by wrapping the `hx` binary with a wrapper Packages already solve this for you by wrapping the `hx` binary with a wrapper
that sets the variable to the install dir. that sets the variable to the install dir.
@ -56,6 +58,7 @@ that sets the variable to the install dir.
[![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions) [![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions)
## MacOS ## MacOS
Helix can be installed on MacOS through homebrew via: Helix can be installed on MacOS through homebrew via:
``` ```

@ -1,19 +0,0 @@
- [ ] completion isIncomplete support
- [ ] respect view fullscreen flag
- [ ] Implement marks (superset of Selection/Range)
- [ ] = for auto indent line/selection
- [ ] lsp: signature help
2
- [ ] store some state between restarts: file positions, prompt history
- [ ] highlight matched characters in picker
3
- [ ] diff mode with highlighting?
- [ ] snippet support (tab to jump between marks)
- [ ] gamelisp/wasm scripting
X
- [ ] rendering via skulpin/skia or raw wgpu

@ -0,0 +1 @@
22.03

@ -1,5 +1,7 @@
# Summary # Summary
[Helix](./title-page.md)
- [Installation](./install.md) - [Installation](./install.md)
- [Usage](./usage.md) - [Usage](./usage.md)
- [Keymap](./keymap.md) - [Keymap](./keymap.md)
@ -14,3 +16,4 @@
- [Guides](./guides/README.md) - [Guides](./guides/README.md)
- [Adding Languages](./guides/adding_languages.md) - [Adding Languages](./guides/adding_languages.md)
- [Adding Textobject Queries](./guides/textobject.md) - [Adding Textobject Queries](./guides/textobject.md)
- [Adding Indent Queries](./guides/indent.md)

@ -5,6 +5,8 @@ To override global configuration parameters, create a `config.toml` file located
* Linux and Mac: `~/.config/helix/config.toml` * Linux and Mac: `~/.config/helix/config.toml`
* Windows: `%AppData%\helix\config.toml` * Windows: `%AppData%\helix\config.toml`
> Hint: You can easily open the config file by typing `:config-open` within Helix normal mode.
Example config: Example config:
```toml ```toml
@ -35,14 +37,20 @@ hidden = false
| `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` | | `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` |
| `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` | | `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` |
| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` | | `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` |
| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` |
| `auto-pairs` | Enable automatic insertion of pairs to parenthese, brackets, etc. | `true` |
| `auto-completion` | Enable automatic pop up of auto-completion. | `true` | | `auto-completion` | Enable automatic pop up of auto-completion. | `true` |
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` | | `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` |
| `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` | | `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` |
| `auto-info` | Whether to display infoboxes | `true` | | `auto-info` | Whether to display infoboxes | `true` |
| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` | | `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` |
### `[editor.lsp]` Section
| Key | Description | Default |
| --- | ----------- | ------- |
| `display-messages` | Display LSP progress messages below statusline[^1] | `false` |
[^1]: A progress spinner is always shown in the statusline beside the file path.
### `[editor.cursor-shape]` Section ### `[editor.cursor-shape]` Section
Defines the shape of cursor in each mode. Note that due to limitations Defines the shape of cursor in each mode. Note that due to limitations
@ -76,10 +84,53 @@ available, which is not defined by default.
|`git-exclude` | Enables reading `.git/info/exclude` files. | true |`git-exclude` | Enables reading `.git/info/exclude` files. | true
|`max-depth` | Set with an integer value for maximum depth to recurse. | Defaults to `None`. |`max-depth` | Set with an integer value for maximum depth to recurse. | Defaults to `None`.
## LSP ### `[editor.auto-pairs]` Section
Enable automatic insertion of pairs to parentheses, brackets, etc. Can be
a simple boolean value, or a specific mapping of pairs of single characters.
| Key | Description |
| --- | ----------- |
| `false` | Completely disable auto pairing, regardless of language-specific settings
| `true` | Use the default pairs: <code>(){}[]''""``</code>
| Mapping of pairs | e.g. `{ "(" = ")", "{" = "}", ... }`
Example
To display all language server messages in the status line add the following to your `config.toml`:
```toml ```toml
[lsp] [editor.auto-pairs]
display-messages = true '(' = ')'
'{' = '}'
'[' = ']'
'"' = '"'
'`' = '`'
'<' = '>'
``` ```
Additionally, this setting can be used in a language config. Unless
the editor setting is `false`, this will override the editor config in
documents with this language.
Example `languages.toml` that adds <> and removes ''
```toml
[[language]]
name = "rust"
[language.auto-pairs]
'(' = ')'
'{' = '}'
'[' = ']'
'"' = '"'
'`' = '`'
'<' = '>'
```
### `[editor.search]` Section
Search specific options.
| Key | Description | Default |
|--|--|---------|
| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` |
| `wrap-around`| Whether the search should wrap after depleting the matches | `true` |

@ -2,7 +2,7 @@
| --- | --- | --- | --- | --- | | --- | --- | --- | --- | --- |
| bash | ✓ | | | `bash-language-server` | | bash | ✓ | | | `bash-language-server` |
| c | ✓ | ✓ | ✓ | `clangd` | | c | ✓ | ✓ | ✓ | `clangd` |
| c-sharp | ✓ | | | | | c-sharp | ✓ | | | `OmniSharp` |
| cmake | ✓ | ✓ | ✓ | `cmake-language-server` | | cmake | ✓ | ✓ | ✓ | `cmake-language-server` |
| comment | ✓ | | | | | comment | ✓ | | | |
| cpp | ✓ | ✓ | ✓ | `clangd` | | cpp | ✓ | ✓ | ✓ | `clangd` |
@ -11,22 +11,25 @@
| dockerfile | ✓ | | | `docker-langserver` | | dockerfile | ✓ | | | `docker-langserver` |
| elixir | ✓ | | | `elixir-ls` | | elixir | ✓ | | | `elixir-ls` |
| elm | ✓ | | | `elm-language-server` | | elm | ✓ | | | `elm-language-server` |
| erlang | ✓ | | | | | erlang | ✓ | | | `erlang_ls` |
| fish | ✓ | ✓ | ✓ | | | fish | ✓ | ✓ | ✓ | |
| git-commit | ✓ | | | | | git-commit | ✓ | | | |
| git-config | ✓ | | | | | git-config | ✓ | | | |
| git-diff | ✓ | | | | | git-diff | ✓ | | | |
| git-rebase | ✓ | | | | | git-rebase | ✓ | | | |
| gleam | ✓ | | | |
| glsl | ✓ | | ✓ | | | glsl | ✓ | | ✓ | |
| go | ✓ | ✓ | ✓ | `gopls` | | go | ✓ | ✓ | ✓ | `gopls` |
| graphql | ✓ | | | | | graphql | ✓ | | | |
| haskell | ✓ | | | `haskell-language-server-wrapper` | | haskell | ✓ | | | `haskell-language-server-wrapper` |
| hcl | ✓ | | ✓ | `terraform-ls` |
| html | ✓ | | | | | html | ✓ | | | |
| iex | ✓ | | | | | iex | ✓ | | | |
| java | ✓ | | | | | java | ✓ | | | |
| javascript | ✓ | | ✓ | `typescript-language-server` | | javascript | ✓ | | ✓ | `typescript-language-server` |
| json | ✓ | | ✓ | | | json | ✓ | | ✓ | |
| julia | ✓ | | | `julia` | | julia | ✓ | | | `julia` |
| kotlin | ✓ | | | `kotlin-language-server` |
| latex | ✓ | | | | | latex | ✓ | | | |
| lean | ✓ | | | `lean` | | lean | ✓ | | | `lean` |
| ledger | ✓ | | | | | ledger | ✓ | | | |
@ -40,6 +43,7 @@
| nix | ✓ | | ✓ | `rnix-lsp` | | nix | ✓ | | ✓ | `rnix-lsp` |
| ocaml | ✓ | | ✓ | | | ocaml | ✓ | | ✓ | |
| ocaml-interface | ✓ | | | | | ocaml-interface | ✓ | | | |
| org | ✓ | | | |
| perl | ✓ | ✓ | ✓ | | | perl | ✓ | ✓ | ✓ | |
| php | ✓ | ✓ | ✓ | | | php | ✓ | ✓ | ✓ | |
| prolog | | | | `swipl` | | prolog | | | | `swipl` |
@ -51,6 +55,7 @@
| ruby | ✓ | | ✓ | `solargraph` | | ruby | ✓ | | ✓ | `solargraph` |
| rust | ✓ | ✓ | ✓ | `rust-analyzer` | | rust | ✓ | ✓ | ✓ | `rust-analyzer` |
| scala | ✓ | | ✓ | `metals` | | scala | ✓ | | ✓ | `metals` |
| solidity | ✓ | | | `solc` |
| svelte | ✓ | | ✓ | `svelteserver` | | svelte | ✓ | | ✓ | `svelteserver` |
| tablegen | ✓ | ✓ | ✓ | | | tablegen | ✓ | ✓ | ✓ | |
| toml | ✓ | | | | | toml | ✓ | | | |

@ -5,6 +5,10 @@
| `:open`, `:o` | Open a file from disk into the current view. | | `:open`, `:o` | Open a file from disk into the current view. |
| `:buffer-close`, `:bc`, `:bclose` | Close the current buffer. | | `:buffer-close`, `:bc`, `:bclose` | Close the current buffer. |
| `:buffer-close!`, `:bc!`, `:bclose!` | Close the current buffer forcefully (ignoring unsaved changes). | | `:buffer-close!`, `:bc!`, `:bclose!` | Close the current buffer forcefully (ignoring unsaved changes). |
| `:buffer-close-others`, `:bco`, `:bcloseother` | Close all buffers but the currently focused one. |
| `:buffer-close-others!`, `:bco!`, `:bcloseother!` | Close all buffers but the currently focused one. |
| `:buffer-close-all`, `:bca`, `:bcloseall` | Close all buffers, without quiting. |
| `:buffer-close-all!`, `:bca!`, `:bcloseall!` | Close all buffers forcefully (ignoring unsaved changes), without quiting. |
| `:write`, `:w` | Write changes to disk. Accepts an optional path (:write some/path.txt) | | `:write`, `:w` | Write changes to disk. Accepts an optional path (:write some/path.txt) |
| `:new`, `:n` | Create a new scratch buffer. | | `:new`, `:n` | Create a new scratch buffer. |
| `:format`, `:fmt` | Format the file using the LSP formatter. | | `:format`, `:fmt` | Format the file using the LSP formatter. |
@ -42,11 +46,15 @@
| `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. | | `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. |
| `:debug-eval` | Evaluate expression in current debug context. | | `:debug-eval` | Evaluate expression in current debug context. |
| `:vsplit`, `:vs` | Open the file in a vertical split. | | `:vsplit`, `:vs` | Open the file in a vertical split. |
| `:vsplit-new`, `:vnew` | Open a scratch buffer in a vertical split. |
| `:hsplit`, `:hs`, `:sp` | Open the file in a horizontal split. | | `:hsplit`, `:hs`, `:sp` | Open the file in a horizontal split. |
| `:hsplit-new`, `:hnew` | Open a scratch buffer in a horizontal split. |
| `:tutor` | Open the tutorial. | | `:tutor` | Open the tutorial. |
| `:goto`, `:g` | Go to line number. | | `:goto`, `:g` | Go to line number. |
| `:set-option`, `:set` | Set a config option at runtime | | `:set-option`, `:set` | Set a config option at runtime |
| `:sort` | Sort ranges in selection. | | `:sort` | Sort ranges in selection. |
| `:rsort` | Sort ranges in selection in reverse order. | | `:rsort` | Sort ranges in selection in reverse order. |
| `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. | | `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. |
| `:config-reload` | Refreshes helix's config. |
| `:config-open` | Open the helix config.toml file. |
| `:help`, `:h` | Open documentation for a command or keybind. | | `:help`, `:h` | Open documentation for a command or keybind. |

@ -1,45 +1,68 @@
# Adding languages # Adding languages
## Submodules ## Language configuration
To add a new language, you should first add a tree-sitter submodule. To do this, To add a new language, you need to add a `language` entry to the
you can run the command [`languages.toml`][languages.toml] found in the root of the repository;
```sh this `languages.toml` file is included at compilation time, and is
git submodule add -f <repository> helix-syntax/languages/tree-sitter-<name> distinct from the `languages.toml` file in the user's [configuration
``` directory](../configuration.md).
For example, to add tree-sitter-ocaml you would run
```sh ```toml
git submodule add -f https://github.com/tree-sitter/tree-sitter-ocaml helix-syntax/languages/tree-sitter-ocaml [[language]]
``` name = "mylang"
Make sure the submodule is shallow by doing scope = "scope.mylang"
```sh injection-regex = "^mylang$"
git config -f .gitmodules submodule.helix-syntax/languages/tree-sitter-<name>.shallow true file-types = ["mylang", "myl"]
comment-token = "#"
indent = { tab-width = 2, unit = " " }
``` ```
or you can manually add `shallow = true` to `.gitmodules`. These are the available keys and descriptions for the file.
## languages.toml | Key | Description |
| ---- | ----------- |
| `name` | The name of the language |
| `scope` | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.<name>` or `text.<name>` in case of markup languages |
| `injection-regex` | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. |
| `file-types` | The filetypes of the language, for example `["yml", "yaml"]`. Extensions and full file names are supported. |
| `shebangs` | The interpreters from the shebang line, for example `["sh", "bash"]` |
| `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` |
| `auto-format` | Whether to autoformat this language when saving |
| `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) |
| `comment-token` | The token to use as a comment-token |
| `indent` | The indent to use. Has sub keys `tab-width` and `unit` |
| `config` | Language server configuration |
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
Next, you need to add the language to the [`languages.toml`][languages.toml] found in the root of ## Grammar configuration
the repository; this `languages.toml` file is included at compilation time, and
is distinct from the `language.toml` file in the user's [configuration
directory](../configuration.md).
These are the available keys and descriptions for the file. If a tree-sitter grammar is available for the language, add a new `grammar`
entry to `languages.toml`.
```toml
[[grammar]]
name = "mylang"
source = { git = "https://github.com/example/mylang", rev = "a250c4582510ff34767ec3b7dcdd3c24e8c8aa68" }
```
Grammar configuration takes these keys:
| Key | Description | | Key | Description |
| ---- | ----------- | | --- | ----------- |
| name | The name of the language | | `name` | The name of the tree-sitter grammar |
| scope | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.<name>` or `text.<name>` in case of markup languages | | `source` | The method of fetching the grammar - a table with a schema defined below |
| injection-regex | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. |
| file-types | The filetypes of the language, for example `["yml", "yaml"]` | Where `source` is a table with either these keys when using a grammar from a
| shebangs | The interpreters from the shebang line, for example `["sh", "bash"]` | git repository:
| roots | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` |
| auto-format | Whether to autoformat this language when saving | | Key | Description |
| diagnostic-severity | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) | | --- | ----------- |
| comment-token | The token to use as a comment-token | | `git` | A git remote URL from which the grammar should be cloned |
| indent | The indent to use. Has sub keys `tab-width` and `unit` | | `rev` | The revision (commit hash or tag) which should be fetched |
| config | Language server configuration | | `subpath` | A path within the grammar directory which should be built. Some grammar repositories host multiple grammars (for example `tree-sitter-typescript` and `tree-sitter-ocaml`) in subdirectories. This key is used to point `hx --grammar build` to the correct path for compilation. When omitted, the root of repository is used |
Or a `path` key with an absolute path to a locally available grammar directory.
## Queries ## Queries
@ -51,18 +74,14 @@ gives more info on how to write queries.
> NOTE: When evaluating queries, the first matching query takes > NOTE: When evaluating queries, the first matching query takes
precedence, which is different from other editors like neovim where precedence, which is different from other editors like neovim where
the last matching query supercedes the ones before it. See the last matching query supersedes the ones before it. See
[this issue][neovim-query-precedence] for an example. [this issue][neovim-query-precedence] for an example.
## Common Issues ## Common Issues
- If you get errors when building after switching branches, you may have to remove or update tree-sitter submodules. You can update submodules by running - If you get errors when running after switching branches, you may have to update the tree-sitter grammars. Run `hx --grammar fetch` to fetch the grammars and `hx --grammar build` to build any out-of-date grammars.
```sh
git submodule sync; git submodule update --init
```
- Make sure to not use the `--remote` flag. To remove submodules look inside the `.gitmodules` and remove directories that are not present inside of it.
- If a parser is segfaulting or you want to remove the parser, make sure to remove the submodule *and* the compiled parser in `runtime/grammar/<name>.so` - If a parser is segfaulting or you want to remove the parser, make sure to remove the compiled parser in `runtime/grammar/<name>.so`
- The indents query is `indents.toml`, *not* `indents.scm`. See [this](https://github.com/helix-editor/helix/issues/114) issue for more information. - The indents query is `indents.toml`, *not* `indents.scm`. See [this](https://github.com/helix-editor/helix/issues/114) issue for more information.

@ -0,0 +1,79 @@
# Adding Indent Queries
Helix uses tree-sitter to correctly indent new lines. This requires
a tree-sitter grammar and an `indent.scm` query file placed in
`runtime/queries/{language}/indents.scm`. The indentation for a line
is calculated by traversing the syntax tree from the lowest node at the
beginning of the new line. Each of these nodes contributes to the total
indent when it is captured by the query (in what way depends on the name
of the capture).
Note that it matters where these added indents begin. For example,
multiple indent level increases that start on the same line only increase
the total indent level by 1.
## Scopes
Added indents don't always apply to the whole node. For example, in most
cases when a node should be indented, we actually only want everything
except for its first line to be indented. For this, there are several
scopes (more scopes may be added in the future if required):
- `all`:
This scope applies to the whole captured node. This is only different from
`tail` when the captured node is the first node on its line.
- `tail`:
This scope applies to everything except for the first line of the
captured node.
Every capture type has a default scope which should do the right thing
in most situations. When a different scope is required, this can be
changed by using a `#set!` declaration anywhere in the pattern:
```scm
(assignment_expression
right: (_) @indent
(#set! "scope" "all"))
```
## Capture Types
- `@indent` (default scope `tail`):
Increase the indent level by 1. Multiple occurences in the same line
don't stack. If there is at least one `@indent` and one `@outdent`
capture on the same line, the indent level isn't changed at all.
- `@outdent` (default scope `all`):
Decrease the indent level by 1. The same rules as for `@indent` apply.
## Predicates
In some cases, an S-expression cannot express exactly what pattern should be matched.
For that, tree-sitter allows for predicates to appear anywhere within a pattern,
similar to how `#set!` declarations work:
```scm
(some_kind
(child_kind) @indent
(#predicate? arg1 arg2 ...)
)
```
The number of arguments depends on the predicate that's used.
Each argument is either a capture (`@name`) or a string (`"some string"`).
The following predicates are supported by tree-sitter:
- `#eq?`/`#not-eq?`:
The first argument (a capture) must/must not be equal to the second argument
(a capture or a string).
- `#match?`/`#not-match?`:
The first argument (a capture) must/must not match the regex given in the
second argument (a string).
Additionally, we support some custom predicates for indent queries:
- `#not-kind-eq?`:
The kind of the first argument (a capture) must not be equal to the second
argument (a string).
- `#same-line?`/`#not-same-line?`:
The captures given by the 2 arguments must/must not start on the same line.

@ -21,6 +21,8 @@ The following [captures][tree-sitter-captures] are recognized:
| `class.inside` | | `class.inside` |
| `class.around` | | `class.around` |
| `parameter.inside` | | `parameter.inside` |
| `comment.inside` |
| `comment.around` |
[Example query files][textobject-examples] can be found in the helix GitHub repository. [Example query files][textobject-examples] can be found in the helix GitHub repository.

@ -19,7 +19,12 @@ brew install helix
A [flake](https://nixos.wiki/wiki/Flakes) containing the package is available in A [flake](https://nixos.wiki/wiki/Flakes) containing the package is available in
the project root. The flake can also be used to spin up a reproducible development the project root. The flake can also be used to spin up a reproducible development
shell for working on Helix. shell for working on Helix with `nix develop`.
Flake outputs are cached for each push to master using
[Cachix](https://www.cachix.org/). With Cachix
[installed](https://docs.cachix.org/installation), `cachix use helix` will
configure Nix to use cached outputs when possible.
### Arch Linux ### Arch Linux
@ -39,7 +44,7 @@ sudo dnf install helix
## Build from source ## Build from source
``` ```
git clone --recurse-submodules --shallow-submodules -j8 https://github.com/helix-editor/helix git clone https://github.com/helix-editor/helix
cd helix cd helix
cargo install --path helix-term cargo install --path helix-term
``` ```
@ -49,3 +54,9 @@ This will install the `hx` binary to `$HOME/.cargo/bin`.
Helix also needs it's runtime files so make sure to copy/symlink the `runtime/` directory into the Helix also needs it's runtime files so make sure to copy/symlink the `runtime/` directory into the
config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overriden config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overriden
via the `HELIX_RUNTIME` environment variable. via the `HELIX_RUNTIME` environment variable.
## Building tree-sitter grammars
Tree-sitter grammars must be fetched and compiled if not pre-packaged.
Fetch grammars with `hx --grammar fetch` (requires `git`) and compile them
with `hx --grammar build` (requires a C compiler).

@ -11,10 +11,10 @@
| Key | Description | Command | | Key | Description | Command |
| ----- | ----------- | ------- | | ----- | ----------- | ------- |
| `h`/`Left` | Move left | `move_char_left` | | `h`, `Left` | Move left | `move_char_left` |
| `j`/`Down` | Move down | `move_line_down` | | `j`, `Down` | Move down | `move_line_down` |
| `k`/`Up` | Move up | `move_line_up` | | `k`, `Up` | Move up | `move_line_up` |
| `l`/`Right` | Move right | `move_char_right` | | `l`, `Right` | Move right | `move_char_right` |
| `w` | Move next word start | `move_next_word_start` | | `w` | Move next word start | `move_next_word_start` |
| `b` | Move previous word start | `move_prev_word_start` | | `b` | Move previous word start | `move_prev_word_start` |
| `e` | Move next word end | `move_next_word_end` | | `e` | Move next word end | `move_next_word_end` |
@ -27,24 +27,15 @@
| `F` | Find previous char | `find_prev_char` | | `F` | Find previous char | `find_prev_char` |
| `G` | Go to line number `<n>` | `goto_line` | | `G` | Go to line number `<n>` | `goto_line` |
| `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` | | `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` |
| `Alt-:` | Ensures the selection is in forward direction | `ensure_selections_forward` |
| `Home` | Move to the start of the line | `goto_line_start` | | `Home` | Move to the start of the line | `goto_line_start` |
| `End` | Move to the end of the line | `goto_line_end` | | `End` | Move to the end of the line | `goto_line_end` |
| `PageUp` | Move page up | `page_up` | | `Ctrl-b`, `PageUp` | Move page up | `page_up` |
| `PageDown` | Move page down | `page_down` | | `Ctrl-f`, `PageDown` | Move page down | `page_down` |
| `Ctrl-u` | Move half page up | `half_page_up` | | `Ctrl-u` | Move half page up | `half_page_up` |
| `Ctrl-d` | Move half page down | `half_page_down` | | `Ctrl-d` | Move half page down | `half_page_down` |
| `Ctrl-i` | Jump forward on the jumplist | `jump_forward` | | `Ctrl-i` | Jump forward on the jumplist | `jump_forward` |
| `Ctrl-o` | Jump backward on the jumplist | `jump_backward` | | `Ctrl-o` | Jump backward on the jumplist | `jump_backward` |
| `Ctrl-s` | Save the current selection to the jumplist | `save_selection` | | `Ctrl-s` | Save the current selection to the jumplist | `save_selection` |
| `v` | Enter [select (extend) mode](#select--extend-mode) | `select_mode` |
| `g` | Enter [goto mode](#goto-mode) | N/A |
| `m` | Enter [match mode](#match-mode) | N/A |
| `:` | Enter command mode | `command_mode` |
| `z` | Enter [view mode](#view-mode) | N/A |
| `Z` | Enter sticky [view mode](#view-mode) | N/A |
| `Ctrl-w` | Enter [window mode](#window-mode) | N/A |
| `Space` | Enter [space mode](#space-mode) | N/A |
### Changes ### Changes
@ -104,6 +95,7 @@
| `_` | Trim whitespace from the selection | `trim_selections` | | `_` | Trim whitespace from the selection | `trim_selections` |
| `;` | Collapse selection onto a single cursor | `collapse_selection` | | `;` | Collapse selection onto a single cursor | `collapse_selection` |
| `Alt-;` | Flip selection cursor and anchor | `flip_selections` | | `Alt-;` | Flip selection cursor and anchor | `flip_selections` |
| `Alt-:` | Ensures the selection is in forward direction | `ensure_selections_forward` |
| `,` | Keep only the primary selection | `keep_primary_selection` | | `,` | Keep only the primary selection | `keep_primary_selection` |
| `Alt-,` | Remove the primary selection | `remove_primary_selection` | | `Alt-,` | Remove the primary selection | `remove_primary_selection` |
| `C` | Copy selection onto the next line (Add cursor below) | `copy_selection_on_next_line` | | `C` | Copy selection onto the next line (Add cursor below) | `copy_selection_on_next_line` |
@ -119,13 +111,14 @@
| `K` | Keep selections matching the regex | `keep_selections` | | `K` | Keep selections matching the regex | `keep_selections` |
| `Alt-K` | Remove selections matching the regex | `remove_selections` | | `Alt-K` | Remove selections matching the regex | `remove_selections` |
| `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` | | `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` |
| `Alt-k` | Expand selection to parent syntax node (**TS**) | `expand_selection` | | `Alt-k`, `Alt-up` | Expand selection to parent syntax node (**TS**) | `expand_selection` |
| `Alt-j` | Shrink syntax tree object selection (**TS**) | `shrink_selection` | | `Alt-j`, `Alt-down` | Shrink syntax tree object selection (**TS**) | `shrink_selection` |
| `Alt-h` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` | | `Alt-h`, `Alt-left` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` |
| `Alt-l` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` | | `Alt-l`, `Alt-right` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` |
### Search ### Search
Search commands all operate on the `/` register by default. Use `"<char>` to operate on a different one.
| Key | Description | Command | | Key | Description | Command |
| ----- | ----------- | ------- | | ----- | ----------- | ------- |
@ -139,6 +132,17 @@
These sub-modes are accessible from normal mode and typically switch back to normal mode after a command. These sub-modes are accessible from normal mode and typically switch back to normal mode after a command.
| Key | Description | Command |
| ----- | ----------- | ------- |
| `v` | Enter [select (extend) mode](#select--extend-mode) | `select_mode` |
| `g` | Enter [goto mode](#goto-mode) | N/A |
| `m` | Enter [match mode](#match-mode) | N/A |
| `:` | Enter command mode | `command_mode` |
| `z` | Enter [view mode](#view-mode) | N/A |
| `Z` | Enter sticky [view mode](#view-mode) | N/A |
| `Ctrl-w` | Enter [window mode](#window-mode) | N/A |
| `Space` | Enter [space mode](#space-mode) | N/A |
#### View mode #### View mode
View mode is intended for scrolling and manipulating the view without changing View mode is intended for scrolling and manipulating the view without changing
@ -155,8 +159,8 @@ over text and not actively editing it).
| `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` | | `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` |
| `j`, `down` | Scroll the view downwards | `scroll_down` | | `j`, `down` | Scroll the view downwards | `scroll_down` |
| `k`, `up` | Scroll the view upwards | `scroll_up` | | `k`, `up` | Scroll the view upwards | `scroll_up` |
| `Ctrl-f` | Move page down | `page_down` | | `Ctrl-f`, `PageDown` | Move page down | `page_down` |
| `Ctrl-b` | Move page up | `page_up` | | `Ctrl-b`, `PageUp` | Move page up | `page_up` |
| `Ctrl-d` | Move half page down | `half_page_down` | | `Ctrl-d` | Move half page down | `half_page_down` |
| `Ctrl-u` | Move half page up | `half_page_up` | | `Ctrl-u` | Move half page up | `half_page_up` |
@ -211,12 +215,12 @@ This layer is similar to vim keybindings as kakoune does not support window.
| `w`, `Ctrl-w` | Switch to next window | `rotate_view` | | `w`, `Ctrl-w` | Switch to next window | `rotate_view` |
| `v`, `Ctrl-v` | Vertical right split | `vsplit` | | `v`, `Ctrl-v` | Vertical right split | `vsplit` |
| `s`, `Ctrl-s` | Horizontal bottom split | `hsplit` | | `s`, `Ctrl-s` | Horizontal bottom split | `hsplit` |
| `h`, `Ctrl-h`, `left` | Move to left split | `jump_view_left` |
| `f` | Go to files in the selection in horizontal splits | `goto_file` | | `f` | Go to files in the selection in horizontal splits | `goto_file` |
| `F` | Go to files in the selection in vertical splits | `goto_file` | | `F` | Go to files in the selection in vertical splits | `goto_file` |
| `j`, `Ctrl-j`, `down` | Move to split below | `jump_view_down` | | `h`, `Ctrl-h`, `Left` | Move to left split | `jump_view_left` |
| `k`, `Ctrl-k`, `up` | Move to split above | `jump_view_up` | | `j`, `Ctrl-j`, `Down` | Move to split below | `jump_view_down` |
| `l`, `Ctrl-l`, `right` | Move to right split | `jump_view_right` | | `k`, `Ctrl-k`, `Up` | Move to split above | `jump_view_up` |
| `l`, `Ctrl-l`, `Right` | Move to right split | `jump_view_right` |
| `q`, `Ctrl-q` | Close current window | `wclose` | | `q`, `Ctrl-q` | Close current window | `wclose` |
| `o`, `Ctrl-o` | Only keep the current window, closing all the others | `wonly` | | `o`, `Ctrl-o` | Only keep the current window, closing all the others | `wonly` |
@ -242,6 +246,7 @@ This layer is a kludge of mappings, mostly pickers.
| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` | | `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` |
| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` | | `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` |
| `/` | Global search in workspace folder | `global_search` | | `/` | Global search in workspace folder | `global_search` |
| `?` | Open command palette | `command_palette` |
> TIP: Global search displays results in a fuzzy picker, use `space + '` to bring it back up after opening a file. > TIP: Global search displays results in a fuzzy picker, use `space + '` to bring it back up after opening a file.
@ -268,40 +273,57 @@ Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaire
| `[f` | Go to previous function (**TS**) | `goto_prev_function` | | `[f` | Go to previous function (**TS**) | `goto_prev_function` |
| `]c` | Go to next class (**TS**) | `goto_next_class` | | `]c` | Go to next class (**TS**) | `goto_next_class` |
| `[c` | Go to previous class (**TS**) | `goto_prev_class` | | `[c` | Go to previous class (**TS**) | `goto_prev_class` |
| `]p` | Go to next parameter (**TS**) | `goto_next_parameter` | | `]a` | Go to next argument/parameter (**TS**) | `goto_next_parameter` |
| `[p` | Go to previous parameter (**TS**) | `goto_prev_parameter` | | `[a` | Go to previous argument/parameter (**TS**) | `goto_prev_parameter` |
| `]o` | Go to next comment (**TS**) | `goto_next_comment` |
| `[o` | Go to previous comment (**TS**) | `goto_prev_comment` |
| `[space` | Add newline above | `add_newline_above` | | `[space` | Add newline above | `add_newline_above` |
| `]space` | Add newline below | `add_newline_below` | | `]space` | Add newline below | `add_newline_below` |
## Insert Mode ## Insert Mode
We support many readline/emacs style bindings in insert mode for
convenience. These can be helpful for making simple modifications
without escaping to normal mode, but beware that you will not have an
undo-able "save point" until you return to normal mode.
| Key | Description | Command | | Key | Description | Command |
| ----- | ----------- | ------- | | ----- | ----------- | ------- |
| `Escape` | Switch to normal mode | `normal_mode` | | `Escape` | Switch to normal mode | `normal_mode` |
| `Ctrl-x` | Autocomplete | `completion` | | `Ctrl-x` | Autocomplete | `completion` |
| `Ctrl-r` | Insert a register content | `insert_register` | | `Ctrl-r` | Insert a register content | `insert_register` |
| `Ctrl-w` | Delete previous word | `delete_word_backward` | | `Ctrl-w`, `Alt-Backspace` | Delete previous word | `delete_word_backward` |
| `Alt-d` | Delete next word | `delete_word_forward` | | `Alt-d` | Delete next word | `delete_word_forward` |
| `Alt-b`, `Alt-Left` | Backward a word | `move_prev_word_end` | | `Alt-b`, `Alt-Left` | Backward a word | `move_prev_word_end` |
| `Ctrl-b`, `Left` | Backward a char | `move_char_left` | | `Ctrl-b`, `Left` | Backward a char | `move_char_left` |
| `Alt-f`, `Alt-Right` | Forward a word | `move_next_word_start` | | `Alt-f`, `Alt-Right` | Forward a word | `move_next_word_start` |
| `Ctrl-f`, `Right` | Forward a char | `move_char_right` | | `Ctrl-f`, `Right` | Forward a char | `move_char_right` |
| `Ctrl-e`, `End` | move to line end | `goto_line_end_newline` | | `Ctrl-e`, `End` | Move to line end | `goto_line_end_newline` |
| `Ctrl-a`, `Home` | move to line start | `goto_line_start` | | `Ctrl-a`, `Home` | Move to line start | `goto_line_start` |
| `Ctrl-u` | delete to start of line | `kill_to_line_start` | | `Ctrl-u` | Delete to start of line | `kill_to_line_start` |
| `Ctrl-k` | delete to end of line | `kill_to_line_end` | | `Ctrl-k` | Delete to end of line | `kill_to_line_end` |
| `backspace`, `Ctrl-h` | delete previous char | `delete_char_backward` | | `Ctrl-j`, `Enter` | Insert new line | `insert_newline` |
| `delete`, `Ctrl-d` | delete previous char | `delete_char_forward` | | `Backspace`, `Ctrl-h` | Delete previous char | `delete_char_backward` |
| `Ctrl-p`, `Up` | move to previous line | `move_line_up` | | `Delete`, `Ctrl-d` | Delete previous char | `delete_char_forward` |
| `Ctrl-n`, `Down` | move to next line | `move_line_down` | | `Ctrl-p`, `Up` | Move to previous line | `move_line_up` |
| `Ctrl-n`, `Down` | Move to next line | `move_line_down` |
| `PageUp` | Move one page up | `page_up` |
| `PageDown` | Move one page down | `page_down` |
| `Alt->` | Go to end of buffer | `goto_file_end` |
| `Alt-<` | Go to start of buffer | `goto_file_start` |
## Select / extend mode ## Select / extend mode
I'm still pondering whether to keep this mode or not. It changes movement This mode echoes Normal mode, but changes any movements to extend
commands (including goto) to extend the existing selection instead of replacing it. selections rather than replace them. Goto motions are also changed to
extend, so that `vgl` for example extends the selection to the end of
the line.
> NOTE: It's a bit confusing at the moment because extend hasn't been Search is also affected. By default, `n` and `N` will remove the current
> implemented for all movement commands yet. selection and select the next instance of the search term. Toggling this
mode before pressing `n` or `N` makes it possible to keep the current
selection. Toggling it on and off during your iterative searching allows
you to selectively add search terms to your selections.
# Picker # Picker

@ -4,10 +4,37 @@ Language-specific settings and settings for particular language servers can be c
Changes made to the `languages.toml` file in a user's [configuration directory](./configuration.md) are merged with helix's defaults on start-up, such that a user's settings will take precedence over defaults in the event of a collision. For example, the default `languages.toml` sets rust's `auto-format` to `true`. If a user wants to disable auto-format, they can change the `languages.toml` in their [configuration directory](./configuration.md) to make the rust entry read like the example below; the new key/value pair `auto-format = false` will override the default when the two sets of settings are merged on start-up: Changes made to the `languages.toml` file in a user's [configuration directory](./configuration.md) are merged with helix's defaults on start-up, such that a user's settings will take precedence over defaults in the event of a collision. For example, the default `languages.toml` sets rust's `auto-format` to `true`. If a user wants to disable auto-format, they can change the `languages.toml` in their [configuration directory](./configuration.md) to make the rust entry read like the example below; the new key/value pair `auto-format = false` will override the default when the two sets of settings are merged on start-up:
``` ```toml
# in <config_dir>/helix/languages.toml # in <config_dir>/helix/languages.toml
[[language]] [[language]]
name = "rust" name = "rust"
auto-format = false auto-format = false
``` ```
## Tree-sitter grammars
Tree-sitter grammars can also be configured in `languages.toml`:
```toml
# in <config_dir>/helix/languages.toml
[[grammar]]
name = "rust"
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a250c4582510ff34767ec3b7dcdd3c24e8c8aa68" }
[[grammar]]
name = "c"
source = { path = "/path/to/tree-sitter-c" }
```
You may use a top-level `use-grammars` key to control which grammars are fetched and built.
```toml
# Note: this key must come **before** the [[language]] and [[grammar]] sections
use-grammars = { only = [ "rust", "c", "cpp" ] }
# or
use-grammars = { except = [ "yaml", "json" ] }
```
When omitted, all grammars are fetched and built.

@ -166,6 +166,8 @@ We use a similar set of scopes as
- `markup` - `markup`
- `heading` - `heading`
- `marker`
- `1`, `2`, `3`, `4`, `5`, `6` - heading text for h1 through h6
- `list` - `list`
- `unnumbered` - `unnumbered`
- `numbered` - `numbered`
@ -216,12 +218,12 @@ These scopes are used for theming the editor interface.
| `ui.statusline` | Statusline | | `ui.statusline` | Statusline |
| `ui.statusline.inactive` | Statusline (unfocused document) | | `ui.statusline.inactive` | Statusline (unfocused document) |
| `ui.popup` | | | `ui.popup` | |
| `ui.popup.info` | |
| `ui.window` | | | `ui.window` | |
| `ui.help` | | | `ui.help` | |
| `ui.text` | | | `ui.text` | |
| `ui.text.focus` | | | `ui.text.focus` | |
| `ui.info` | | | `ui.text.info` | |
| `ui.info.text` | |
| `ui.menu` | | | `ui.menu` | |
| `ui.menu.selected` | | | `ui.menu.selected` | |
| `ui.selection` | For selections in the editing area | | `ui.selection` | For selections in the editing area |

@ -0,0 +1,15 @@
# Helix
Docs for bleeding edge master can be found at
[https://docs.helix-editor.com/master](https://docs.helix-editor.com/master).
See the [usage] section for a quick overview of the editor, [keymap]
section for all available keybindings and the [configuration] section
for defining custom keybindings, setting themes, etc.
Refer the [FAQ] for common questions.
[FAQ]: https://github.com/helix-editor/helix/wiki/FAQ
[usage]: ./usage.md
[keymap]: ./keymap.md
[configuration]: ./configuration.md

@ -68,7 +68,8 @@ Currently supported: `word`, `surround`, `function`, `class`, `parameter`.
| `(`, `[`, `'`, etc | Specified surround pairs | | `(`, `[`, `'`, etc | Specified surround pairs |
| `f` | Function | | `f` | Function |
| `c` | Class | | `c` | Class |
| `p` | Parameter | | `a` | Argument/parameter |
| `o` | Comment |
> NOTE: `f`, `c`, etc need a tree-sitter grammar active for the current > NOTE: `f`, `c`, etc need a tree-sitter grammar active for the current
document and a special tree-sitter query file to work properly. [Only document and a special tree-sitter query file to work properly. [Only

@ -2,8 +2,9 @@
| Crate | Description | | Crate | Description |
| ----------- | ----------- | | ----------- | ----------- |
| helix-core | Core editing primitives, functional. | | helix-core | Core editing primitives, functional. |
| helix-syntax | Tree-sitter grammars |
| helix-lsp | Language server client | | helix-lsp | Language server client |
| helix-dap | Debug Adapter Protocol (DAP) client |
| helix-loader | Functions for building, fetching, and loading external resources |
| helix-view | UI abstractions for use in backends, imperative shell. | | helix-view | UI abstractions for use in backends, imperative shell. |
| helix-term | Terminal UI | | helix-term | Terminal UI |
| helix-tui | TUI primitives, forked from tui-rs, inspired by Cursive | | helix-tui | TUI primitives, forked from tui-rs, inspired by Cursive |
@ -54,15 +55,40 @@ A `Document` ties together the `Rope`, `Selection`(s), `Syntax`, document
file. file.
A `View` represents an open split in the UI. It holds the currently open A `View` represents an open split in the UI. It holds the currently open
document ID and other related state. document ID and other related state. Views encapsulate the gutter, status line,
diagnostics, and the inner area where the code is displayed.
> NOTE: Multiple views are able to display the same document, so the document > NOTE: Multiple views are able to display the same document, so the document
> contains selections for each view. To retrieve, `document.selection()` takes > contains selections for each view. To retrieve, `document.selection()` takes
> a `ViewId`. > a `ViewId`.
`Info` is the autoinfo box that shows hints when awaiting another key with bindings
like `g` and `m`. It is attached to the viewport as a whole.
`Surface` is like a buffer to which widgets draw themselves to, and the
surface is then rendered on the screen on each cycle.
`Rect`s are areas (simply an x and y coordinate with the origin at the
screen top left and then a height and width) which are part of a
`Surface`. They can be used to limit the area to which a `Component` can
render. For example if we wrap a `Markdown` component in a `Popup`
(think the documentation popup with space+k), Markdown's render method
will get a Rect that is the exact size of the popup.
Widgets are called `Component`s internally, and you can see most of them
in `helix-term/src/ui`. Some components like `Popup` and `Overlay` can take
other components as children.
`Layer`s are how multiple components are displayed, and is simply a
`Vec<Component>`. Layers are managed by the `Compositor`. On each top
level render call, the compositor renders each component in the order
they were pushed into the stack. This makes multiple components "layer"
on top of one another. Hence we get a file picker displayed over the
editor, etc.
The `Editor` holds the global state: all the open documents, a tree The `Editor` holds the global state: all the open documents, a tree
representation of all the view splits, and a registry of language servers. To representation of all the view splits, the configuration, and a registry of
open or close files, interact with the editor. language servers. To open or close files, interact with the editor.
## LSP ## LSP

@ -1,12 +1,35 @@
{ {
"nodes": { "nodes": {
"crane": {
"flake": false,
"locked": {
"lastModified": 1644785799,
"narHash": "sha256-VpAJO1L0XeBvtCuNGK4IDKp6ENHIpTrlaZT7yfBCvwo=",
"owner": "ipetkov",
"repo": "crane",
"rev": "fc7a94f841347c88f2cb44217b2a3faa93e2a0b2",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"devshell": { "devshell": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": [
"nixCargoIntegration",
"nixpkgs"
]
},
"locked": { "locked": {
"lastModified": 1641980203, "lastModified": 1646667754,
"narHash": "sha256-RiWJ3+6V267Ji+P54K1Xrj1Nsah9BfG/aLfIhqgVyBY=", "narHash": "sha256-LahZHvCC3UVzGQ55iWDRZkuDssXl1rYgqgScrPV9S38=",
"owner": "numtide", "owner": "numtide",
"repo": "devshell", "repo": "devshell",
"rev": "d897c1ddb4eab66cc2b783c7868d78555b9880ad", "rev": "59fbe1dfc0de8c3332957c16998a7d16dff365d8",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -15,7 +38,73 @@
"type": "github" "type": "github"
} }
}, },
"dream2nix": {
"inputs": {
"alejandra": [
"nixCargoIntegration",
"nixpkgs"
],
"crane": "crane",
"flake-utils-pre-commit": [
"nixCargoIntegration",
"nixpkgs"
],
"gomod2nix": [
"nixCargoIntegration",
"nixpkgs"
],
"mach-nix": [
"nixCargoIntegration",
"nixpkgs"
],
"nixpkgs": [
"nixCargoIntegration",
"nixpkgs"
],
"node2nix": [
"nixCargoIntegration",
"nixpkgs"
],
"poetry2nix": [
"nixCargoIntegration",
"nixpkgs"
],
"pre-commit-hooks": [
"nixCargoIntegration",
"nixpkgs"
]
},
"locked": {
"lastModified": 1646710334,
"narHash": "sha256-eLBcDgcbOUfeH4k6SEW5a5v0PTp2KNCn+5ZXIoWGYww=",
"owner": "nix-community",
"repo": "dream2nix",
"rev": "5dcfbfd3b60ce0208b894c1bdea00e2bdf80ca6a",
"type": "github"
},
"original": {
"owner": "nix-community",
"ref": "main",
"repo": "dream2nix",
"type": "github"
}
},
"flake-utils": { "flake-utils": {
"locked": {
"lastModified": 1642700792,
"narHash": "sha256-XqHrk7hFb+zBvRg6Ghl+AZDq03ov6OshJLiSWOoX5es=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "846b2ae0fc4cc943637d3d1def4454213e203cba",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"locked": { "locked": {
"lastModified": 1637014545, "lastModified": 1637014545,
"narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=", "narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=",
@ -33,6 +122,7 @@
"nixCargoIntegration": { "nixCargoIntegration": {
"inputs": { "inputs": {
"devshell": "devshell", "devshell": "devshell",
"dream2nix": "dream2nix",
"nixpkgs": [ "nixpkgs": [
"nixpkgs" "nixpkgs"
], ],
@ -41,11 +131,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1642054253, "lastModified": 1646766572,
"narHash": "sha256-kHh9VmaB7gbS6pheheC4x0uT84LEmhfbsbWEQJgU2E4=", "narHash": "sha256-DV3+zxvAIKsMHsHedJKYFsracvFyLKpFQqurUBR86oY=",
"owner": "yusdacra", "owner": "yusdacra",
"repo": "nix-cargo-integration", "repo": "nix-cargo-integration",
"rev": "f8fa9af990195a3f63fe2dde84aa187e193da793", "rev": "3a3f47f43ba486b7554164a698c8dfc5a38624ce",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -56,11 +146,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1641887635, "lastModified": 1646497237,
"narHash": "sha256-kDGpufwzVaiGe5e1sBUBPo9f1YN+nYHJlYqCaVpZTQQ=", "narHash": "sha256-Ccpot1h/rV8MgcngDp5OrdmLTMaUTbStZTR5/sI7zW0=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "b2737d4980a17cc2b7d600d7d0b32fd7333aca88", "rev": "062a0c5437b68f950b081bbfc8a699d57a4ee026",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -70,22 +160,6 @@
"type": "github" "type": "github"
} }
}, },
"nixpkgs_2": {
"locked": {
"lastModified": 1637453606,
"narHash": "sha256-Gy6cwUswft9xqsjWxFYEnx/63/qzaFUwatcbV5GF/GQ=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "8afc4e543663ca0a6a4f496262cd05233737e732",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": { "root": {
"inputs": { "inputs": {
"nixCargoIntegration": "nixCargoIntegration", "nixCargoIntegration": "nixCargoIntegration",
@ -95,15 +169,17 @@
}, },
"rust-overlay": { "rust-overlay": {
"inputs": { "inputs": {
"flake-utils": "flake-utils", "flake-utils": "flake-utils_2",
"nixpkgs": "nixpkgs_2" "nixpkgs": [
"nixpkgs"
]
}, },
"locked": { "locked": {
"lastModified": 1642128126, "lastModified": 1646792695,
"narHash": "sha256-av8JUACdrTfQYl/ftZJvKpZEmZfa0avCq7tt5Usdoq0=", "narHash": "sha256-2drCXIKIQnJMlTZbcCfuHZAh+iPcdlRkCqtZnA6MHLY=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "ce4ef6f2d74f2b68f7547df1de22d1b0037ce4ad", "rev": "7f599870402c8d2a5806086c8ee0f2d92b175c54",
"type": "github" "type": "github"
}, },
"original": { "original": {

@ -3,7 +3,10 @@
inputs = { inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
rust-overlay.url = "github:oxalica/rust-overlay"; rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
nixCargoIntegration = { nixCargoIntegration = {
url = "github:yusdacra/nix-cargo-integration"; url = "github:yusdacra/nix-cargo-integration";
inputs.nixpkgs.follows = "nixpkgs"; inputs.nixpkgs.follows = "nixpkgs";
@ -11,60 +14,37 @@
}; };
}; };
outputs = inputs@{ self, nixCargoIntegration, ... }: outputs = inputs@{ nixCargoIntegration, ... }:
nixCargoIntegration.lib.makeOutputs { nixCargoIntegration.lib.makeOutputs {
root = ./.; root = ./.;
buildPlatform = "crate2nix";
renameOutputs = { "helix-term" = "helix"; }; renameOutputs = { "helix-term" = "helix"; };
# Set default app to hx (binary is from helix-term release build) # Set default app to hx (binary is from helix-term release build)
# Set default package to helix-term release build # Set default package to helix-term release build
defaultOutputs = { app = "hx"; package = "helix"; }; defaultOutputs = {
overrides = { app = "hx";
crateOverrides = common: _: rec { package = "helix";
# link languages and theme toml files since helix-core/helix-view expects them
helix-core = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} .."; };
helix-view = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} .."; };
helix-syntax = prev: {
src =
let
pkgs = common.pkgs;
helix = pkgs.fetchgit {
url = "https://github.com/helix-editor/helix.git";
rev = "a8fd33ac012a79069ef1409503a2edcf3a585153";
fetchSubmodules = true;
sha256 = "sha256-5AtOC55ttWT+7RYMboaFxpGZML51ix93wAkYJTt+8JI=";
};
in
pkgs.runCommand prev.src.name { } ''
mkdir -p $out
ln -s ${prev.src}/* $out
ln -sf ${helix}/helix-syntax/languages $out
'';
preConfigure = "mkdir -p ../runtime/grammars";
postInstall = "cp -r ../runtime $out/runtime";
}; };
overrides = {
crateOverrides = common: _: {
helix-term = prev: helix-term = prev:
let let
inherit (common) pkgs lib; inherit (common) pkgs;
helixSyntax = lib.buildCrate { grammars = pkgs.callPackage ./grammars.nix { };
root = self;
memberName = "helix-syntax";
defaultCrateOverrides = {
helix-syntax = helix-syntax;
};
release = false;
};
runtimeDir = pkgs.runCommand "helix-runtime" { } '' runtimeDir = pkgs.runCommand "helix-runtime" { } ''
mkdir -p $out mkdir -p $out
ln -s ${common.root}/runtime/* $out ln -s ${common.root}/runtime/* $out
ln -sf ${helixSyntax}/runtime/grammars $out rm -r $out/grammars
ln -s ${grammars} $out/grammars
''; '';
in in
{ {
# disable fetching and building of tree-sitter grammars in the helix-term build.rs
HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1";
# link languages and theme toml files since helix-term expects them (for tests) # link languages and theme toml files since helix-term expects them (for tests)
preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} .."; preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} ..";
buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ]; buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ];
nativeBuildInputs = [ pkgs.makeWrapper ]; nativeBuildInputs = [ pkgs.makeWrapper ];
postFixup = '' postFixup = ''
if [ -f "$out/bin/hx" ]; then if [ -f "$out/bin/hx" ]; then
wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}" wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}"

@ -0,0 +1,106 @@
{ stdenv, lib, runCommand, yj }:
let
# HACK: nix < 2.6 has a bug in the toml parser, so we convert to JSON
# before parsing
languages-json = runCommand "languages-toml-to-json" { } ''
${yj}/bin/yj -t < ${./languages.toml} > $out
'';
languagesConfig = if lib.versionAtLeast builtins.nixVersion "2.6.0" then
builtins.fromTOML (builtins.readFile ./languages.toml)
else
builtins.fromJSON (builtins.readFile (builtins.toPath languages-json));
isGitGrammar = (grammar:
builtins.hasAttr "source" grammar && builtins.hasAttr "git" grammar.source
&& builtins.hasAttr "rev" grammar.source);
isGitHubGrammar = grammar: lib.hasPrefix "https://github.com" grammar.source.git;
toGitHubFetcher = url: let
match = builtins.match "https://github\.com/([^/]*)/([^/]*)/?" url;
in {
owner = builtins.elemAt match 0;
repo = builtins.elemAt match 1;
};
gitGrammars = builtins.filter isGitGrammar languagesConfig.grammar;
buildGrammar = grammar:
let
gh = toGitHubFetcher grammar.source.git;
sourceGit = builtins.fetchTree {
type = "git";
url = grammar.source.git;
rev = grammar.source.rev;
ref = grammar.source.ref or "HEAD";
shallow = true;
};
sourceGitHub = builtins.fetchTree {
type = "github";
owner = gh.owner;
repo = gh.repo;
inherit (grammar.source) rev;
};
source = if isGitHubGrammar grammar then sourceGitHub else sourceGit;
in stdenv.mkDerivation rec {
# see https://github.com/NixOS/nixpkgs/blob/fbdd1a7c0bc29af5325e0d7dd70e804a972eb465/pkgs/development/tools/parsing/tree-sitter/grammar.nix
pname = "helix-tree-sitter-${grammar.name}";
version = grammar.source.rev;
src = if builtins.hasAttr "subpath" grammar.source then
"${source}/${grammar.source.subpath}"
else
source;
dontUnpack = true;
dontConfigure = true;
FLAGS = [
"-I${src}/src"
"-g"
"-O3"
"-fPIC"
"-fno-exceptions"
"-Wl,-z,relro,-z,now"
];
NAME = grammar.name;
buildPhase = ''
runHook preBuild
if [[ -e "$src/src/scanner.cc" ]]; then
$CXX -c "$src/src/scanner.cc" -o scanner.o $FLAGS
elif [[ -e "$src/src/scanner.c" ]]; then
$CC -c "$src/src/scanner.c" -o scanner.o $FLAGS
fi
$CC -c "$src/src/parser.c" -o parser.o $FLAGS
$CXX -shared -o $NAME.so *.o
ls -al
runHook postBuild
'';
installPhase = ''
runHook preInstall
mkdir $out
mv $NAME.so $out/
runHook postInstall
'';
# Strip failed on darwin: strip: error: symbols referenced by indirect symbol table entries that can't be stripped
fixupPhase = lib.optionalString stdenv.isLinux ''
runHook preFixup
$STRIP $out/$NAME.so
runHook postFixup
'';
};
builtGrammars = builtins.map (grammar: {
inherit (grammar) name;
artifact = buildGrammar grammar;
}) gitGrammars;
grammarLinks = builtins.map (grammar:
"ln -s ${grammar.artifact}/${grammar.name}.so $out/${grammar.name}.so")
builtGrammars;
in runCommand "consolidated-helix-grammars" { } ''
mkdir -p $out
${builtins.concatStringsSep "\n" grammarLinks}
''

@ -11,20 +11,21 @@ homepage = "https://helix-editor.com"
include = ["src/**/*", "README.md"] include = ["src/**/*", "README.md"]
[features] [features]
unicode-lines = ["ropey/unicode_lines"]
[dependencies] [dependencies]
helix-syntax = { version = "0.6", path = "../helix-syntax" } helix-loader = { version = "0.6", path = "../helix-loader" }
ropey = "1.3" ropey = { version = "1.4", default-features = false }
smallvec = "1.8" smallvec = "1.8"
smartstring = "0.2.9" smartstring = "1.0.1"
unicode-segmentation = "1.9" unicode-segmentation = "1.9"
unicode-width = "0.1" unicode-width = "0.1"
unicode-general-category = "0.5" unicode-general-category = "0.5"
# slab = "0.4.2" # slab = "0.4.2"
slotmap = "1.0" slotmap = "1.0"
tree-sitter = "0.20" tree-sitter = "0.20"
once_cell = "1.9" once_cell = "1.10"
arc-swap = "1" arc-swap = "1"
regex = "1" regex = "1"
@ -35,10 +36,11 @@ toml = "0.5"
similar = "2.1" similar = "2.1"
etcetera = "0.3"
encoding_rs = "0.8" encoding_rs = "0.8"
chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] } chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] }
etcetera = "0.3"
[dev-dependencies] [dev-dependencies]
quickcheck = { version = "1", default-features = false } quickcheck = { version = "1", default-features = false }

@ -4,12 +4,14 @@
use crate::{ use crate::{
graphemes, movement::Direction, Range, Rope, RopeGraphemes, Selection, Tendril, Transaction, graphemes, movement::Direction, Range, Rope, RopeGraphemes, Selection, Tendril, Transaction,
}; };
use std::collections::HashMap;
use log::debug; use log::debug;
use smallvec::SmallVec; use smallvec::SmallVec;
// Heavily based on https://github.com/codemirror/closebrackets/ // Heavily based on https://github.com/codemirror/closebrackets/
pub const PAIRS: &[(char, char)] = &[ pub const DEFAULT_PAIRS: &[(char, char)] = &[
('(', ')'), ('(', ')'),
('{', '}'), ('{', '}'),
('[', ']'), ('[', ']'),
@ -18,9 +20,95 @@ pub const PAIRS: &[(char, char)] = &[
('`', '`'), ('`', '`'),
]; ];
// [TODO] build this dynamically in language config. see #992 /// The type that represents the collection of auto pairs,
const OPEN_BEFORE: &str = "([{'\":;,> \n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}"; /// keyed by the opener.
const CLOSE_BEFORE: &str = ")]}'\":;,> \n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}"; // includes space and newlines #[derive(Debug, Clone)]
pub struct AutoPairs(HashMap<char, Pair>);
/// Represents the config for a particular pairing.
#[derive(Debug, Clone, Copy)]
pub struct Pair {
pub open: char,
pub close: char,
}
impl Pair {
/// true if open == close
pub fn same(&self) -> bool {
self.open == self.close
}
/// true if all of the pair's conditions hold for the given document and range
pub fn should_close(&self, doc: &Rope, range: &Range) -> bool {
let mut should_close = Self::next_is_not_alpha(doc, range);
if self.same() {
should_close &= Self::prev_is_not_alpha(doc, range);
}
should_close
}
pub fn next_is_not_alpha(doc: &Rope, range: &Range) -> bool {
let cursor = range.cursor(doc.slice(..));
let next_char = doc.get_char(cursor);
next_char.map(|c| !c.is_alphanumeric()).unwrap_or(true)
}
pub fn prev_is_not_alpha(doc: &Rope, range: &Range) -> bool {
let cursor = range.cursor(doc.slice(..));
let prev_char = prev_char(doc, cursor);
prev_char.map(|c| !c.is_alphanumeric()).unwrap_or(true)
}
}
impl From<&(char, char)> for Pair {
fn from(&(open, close): &(char, char)) -> Self {
Self { open, close }
}
}
impl From<(&char, &char)> for Pair {
fn from((open, close): (&char, &char)) -> Self {
Self {
open: *open,
close: *close,
}
}
}
impl AutoPairs {
/// Make a new AutoPairs set with the given pairs and default conditions.
pub fn new<'a, V: 'a, A>(pairs: V) -> Self
where
V: IntoIterator<Item = A>,
A: Into<Pair>,
{
let mut auto_pairs = HashMap::new();
for pair in pairs.into_iter() {
let auto_pair = pair.into();
auto_pairs.insert(auto_pair.open, auto_pair);
if auto_pair.open != auto_pair.close {
auto_pairs.insert(auto_pair.close, auto_pair);
}
}
Self(auto_pairs)
}
pub fn get(&self, ch: char) -> Option<&Pair> {
self.0.get(&ch)
}
}
impl Default for AutoPairs {
fn default() -> Self {
AutoPairs::new(DEFAULT_PAIRS.iter())
}
}
// insert hook: // insert hook:
// Fn(doc, selection, char) => Option<Transaction> // Fn(doc, selection, char) => Option<Transaction>
@ -36,21 +124,17 @@ const CLOSE_BEFORE: &str = ")]}'\":;,> \n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{20
// middle of triple quotes, and more exotic pairs like Jinja's {% %} // middle of triple quotes, and more exotic pairs like Jinja's {% %}
#[must_use] #[must_use]
pub fn hook(doc: &Rope, selection: &Selection, ch: char) -> Option<Transaction> { pub fn hook(doc: &Rope, selection: &Selection, ch: char, pairs: &AutoPairs) -> Option<Transaction> {
debug!("autopairs hook selection: {:#?}", selection); debug!("autopairs hook selection: {:#?}", selection);
for &(open, close) in PAIRS { if let Some(pair) = pairs.get(ch) {
if open == ch { if pair.same() {
if open == close { return Some(handle_same(doc, selection, pair));
return Some(handle_same(doc, selection, open, CLOSE_BEFORE, OPEN_BEFORE)); } else if pair.open == ch {
} else { return Some(handle_open(doc, selection, pair));
return Some(handle_open(doc, selection, open, close, CLOSE_BEFORE)); } else if pair.close == ch {
}
}
if close == ch {
// && char_at pos == close // && char_at pos == close
return Some(handle_close(doc, selection, open, close)); return Some(handle_close(doc, selection, pair));
} }
} }
@ -196,13 +280,7 @@ fn get_next_range(
Range::new(end_anchor, end_head) Range::new(end_anchor, end_head)
} }
fn handle_open( fn handle_open(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction {
doc: &Rope,
selection: &Selection,
open: char,
close: char,
close_before: &str,
) -> Transaction {
let mut end_ranges = SmallVec::with_capacity(selection.len()); let mut end_ranges = SmallVec::with_capacity(selection.len());
let mut offs = 0; let mut offs = 0;
@ -212,22 +290,21 @@ fn handle_open(
let len_inserted; let len_inserted;
let change = match next_char { let change = match next_char {
Some(ch) if !close_before.contains(ch) => { Some(_) if !pair.should_close(doc, start_range) => {
len_inserted = open.len_utf8(); len_inserted = pair.open.len_utf8();
let mut tendril = Tendril::new(); let mut tendril = Tendril::new();
tendril.push(open); tendril.push(pair.open);
(cursor, cursor, Some(tendril)) (cursor, cursor, Some(tendril))
} }
// None | Some(ch) if close_before.contains(ch) => {}
_ => { _ => {
// insert open & close // insert open & close
let pair = Tendril::from_iter([open, close]); let pair_str = Tendril::from_iter([pair.open, pair.close]);
len_inserted = open.len_utf8() + close.len_utf8(); len_inserted = pair.open.len_utf8() + pair.close.len_utf8();
(cursor, cursor, Some(pair)) (cursor, cursor, Some(pair_str))
} }
}; };
let next_range = get_next_range(doc, start_range, offs, open, len_inserted); let next_range = get_next_range(doc, start_range, offs, pair.open, len_inserted);
end_ranges.push(next_range); end_ranges.push(next_range);
offs += len_inserted; offs += len_inserted;
@ -239,7 +316,7 @@ fn handle_open(
t t
} }
fn handle_close(doc: &Rope, selection: &Selection, _open: char, close: char) -> Transaction { fn handle_close(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction {
let mut end_ranges = SmallVec::with_capacity(selection.len()); let mut end_ranges = SmallVec::with_capacity(selection.len());
let mut offs = 0; let mut offs = 0;
@ -249,17 +326,17 @@ fn handle_close(doc: &Rope, selection: &Selection, _open: char, close: char) ->
let next_char = doc.get_char(cursor); let next_char = doc.get_char(cursor);
let mut len_inserted = 0; let mut len_inserted = 0;
let change = if next_char == Some(close) { let change = if next_char == Some(pair.close) {
// return transaction that moves past close // return transaction that moves past close
(cursor, cursor, None) // no-op (cursor, cursor, None) // no-op
} else { } else {
len_inserted += close.len_utf8(); len_inserted += pair.close.len_utf8();
let mut tendril = Tendril::new(); let mut tendril = Tendril::new();
tendril.push(close); tendril.push(pair.close);
(cursor, cursor, Some(tendril)) (cursor, cursor, Some(tendril))
}; };
let next_range = get_next_range(doc, start_range, offs, close, len_inserted); let next_range = get_next_range(doc, start_range, offs, pair.close, len_inserted);
end_ranges.push(next_range); end_ranges.push(next_range);
offs += len_inserted; offs += len_inserted;
@ -272,13 +349,7 @@ fn handle_close(doc: &Rope, selection: &Selection, _open: char, close: char) ->
} }
/// handle cases where open and close is the same, or in triples ("""docstring""") /// handle cases where open and close is the same, or in triples ("""docstring""")
fn handle_same( fn handle_same(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction {
doc: &Rope,
selection: &Selection,
token: char,
close_before: &str,
open_before: &str,
) -> Transaction {
let mut end_ranges = SmallVec::with_capacity(selection.len()); let mut end_ranges = SmallVec::with_capacity(selection.len());
let mut offs = 0; let mut offs = 0;
@ -286,30 +357,26 @@ fn handle_same(
let transaction = Transaction::change_by_selection(doc, selection, |start_range| { let transaction = Transaction::change_by_selection(doc, selection, |start_range| {
let cursor = start_range.cursor(doc.slice(..)); let cursor = start_range.cursor(doc.slice(..));
let mut len_inserted = 0; let mut len_inserted = 0;
let next_char = doc.get_char(cursor); let next_char = doc.get_char(cursor);
let prev_char = prev_char(doc, cursor);
let change = if next_char == Some(token) { let change = if next_char == Some(pair.open) {
// return transaction that moves past close // return transaction that moves past close
(cursor, cursor, None) // no-op (cursor, cursor, None) // no-op
} else { } else {
let mut pair = Tendril::new(); let mut pair_str = Tendril::new();
pair.push(token); pair_str.push(pair.open);
// for equal pairs, don't insert both open and close if either // for equal pairs, don't insert both open and close if either
// side has a non-pair char // side has a non-pair char
if (next_char.is_none() || close_before.contains(next_char.unwrap())) if pair.should_close(doc, start_range) {
&& (prev_char.is_none() || open_before.contains(prev_char.unwrap())) pair_str.push(pair.close);
{
pair.push(token);
} }
len_inserted += pair.len(); len_inserted += pair_str.len();
(cursor, cursor, Some(pair)) (cursor, cursor, Some(pair_str))
}; };
let next_range = get_next_range(doc, start_range, offs, token, len_inserted); let next_range = get_next_range(doc, start_range, offs, pair.open, len_inserted);
end_ranges.push(next_range); end_ranges.push(next_range);
offs += len_inserted; offs += len_inserted;
@ -329,21 +396,23 @@ mod test {
const LINE_END: &str = crate::DEFAULT_LINE_ENDING.as_str(); const LINE_END: &str = crate::DEFAULT_LINE_ENDING.as_str();
fn differing_pairs() -> impl Iterator<Item = &'static (char, char)> { fn differing_pairs() -> impl Iterator<Item = &'static (char, char)> {
PAIRS.iter().filter(|(open, close)| open != close) DEFAULT_PAIRS.iter().filter(|(open, close)| open != close)
} }
fn matching_pairs() -> impl Iterator<Item = &'static (char, char)> { fn matching_pairs() -> impl Iterator<Item = &'static (char, char)> {
PAIRS.iter().filter(|(open, close)| open == close) DEFAULT_PAIRS.iter().filter(|(open, close)| open == close)
} }
fn test_hooks( fn test_hooks(
in_doc: &Rope, in_doc: &Rope,
in_sel: &Selection, in_sel: &Selection,
ch: char, ch: char,
pairs: &[(char, char)],
expected_doc: &Rope, expected_doc: &Rope,
expected_sel: &Selection, expected_sel: &Selection,
) { ) {
let trans = hook(in_doc, in_sel, ch).unwrap(); let pairs = AutoPairs::new(pairs.iter());
let trans = hook(in_doc, in_sel, ch, &pairs).unwrap();
let mut actual_doc = in_doc.clone(); let mut actual_doc = in_doc.clone();
assert!(trans.apply(&mut actual_doc)); assert!(trans.apply(&mut actual_doc));
assert_eq!(expected_doc, &actual_doc); assert_eq!(expected_doc, &actual_doc);
@ -353,7 +422,8 @@ mod test {
fn test_hooks_with_pairs<I, F, R>( fn test_hooks_with_pairs<I, F, R>(
in_doc: &Rope, in_doc: &Rope,
in_sel: &Selection, in_sel: &Selection,
pairs: I, test_pairs: I,
pairs: &[(char, char)],
get_expected_doc: F, get_expected_doc: F,
actual_sel: &Selection, actual_sel: &Selection,
) where ) where
@ -362,11 +432,12 @@ mod test {
R: Into<Rope>, R: Into<Rope>,
Rope: From<R>, Rope: From<R>,
{ {
pairs.into_iter().for_each(|(open, close)| { test_pairs.into_iter().for_each(|(open, close)| {
test_hooks( test_hooks(
in_doc, in_doc,
in_sel, in_sel,
*open, *open,
pairs,
&Rope::from(get_expected_doc(*open, *close)), &Rope::from(get_expected_doc(*open, *close)),
actual_sel, actual_sel,
) )
@ -381,7 +452,8 @@ mod test {
test_hooks_with_pairs( test_hooks_with_pairs(
&Rope::from(LINE_END), &Rope::from(LINE_END),
&Selection::single(1, 0), &Selection::single(1, 0),
PAIRS, DEFAULT_PAIRS,
DEFAULT_PAIRS,
|open, close| format!("{}{}{}", open, close, LINE_END), |open, close| format!("{}{}{}", open, close, LINE_END),
&Selection::single(2, 1), &Selection::single(2, 1),
); );
@ -391,7 +463,8 @@ mod test {
test_hooks_with_pairs( test_hooks_with_pairs(
&empty_doc, &empty_doc,
&Selection::single(empty_doc.len_chars(), LINE_END.len()), &Selection::single(empty_doc.len_chars(), LINE_END.len()),
PAIRS, DEFAULT_PAIRS,
DEFAULT_PAIRS,
|open, close| { |open, close| {
format!( format!(
"{line_end}{open}{close}{line_end}", "{line_end}{open}{close}{line_end}",
@ -406,21 +479,25 @@ mod test {
#[test] #[test]
fn test_insert_before_multi_code_point_graphemes() { fn test_insert_before_multi_code_point_graphemes() {
test_hooks_with_pairs( for (_, close) in differing_pairs() {
test_hooks(
&Rope::from(format!("hello 👨‍👩‍👧‍👦 goodbye{}", LINE_END)), &Rope::from(format!("hello 👨‍👩‍👧‍👦 goodbye{}", LINE_END)),
&Selection::single(13, 6), &Selection::single(13, 6),
PAIRS, *close,
|open, _| format!("hello {}👨‍👩‍👧‍👦 goodbye{}", open, LINE_END), DEFAULT_PAIRS,
&Rope::from(format!("hello {}👨‍👩‍👧‍👦 goodbye{}", close, LINE_END)),
&Selection::single(14, 7), &Selection::single(14, 7),
); );
} }
}
#[test] #[test]
fn test_insert_at_end_of_document() { fn test_insert_at_end_of_document() {
test_hooks_with_pairs( test_hooks_with_pairs(
&Rope::from(LINE_END), &Rope::from(LINE_END),
&Selection::single(LINE_END.len(), LINE_END.len()), &Selection::single(LINE_END.len(), LINE_END.len()),
PAIRS, DEFAULT_PAIRS,
DEFAULT_PAIRS,
|open, close| format!("{}{}{}", LINE_END, open, close), |open, close| format!("{}{}{}", LINE_END, open, close),
&Selection::single(LINE_END.len() + 1, LINE_END.len() + 1), &Selection::single(LINE_END.len() + 1, LINE_END.len() + 1),
); );
@ -428,7 +505,8 @@ mod test {
test_hooks_with_pairs( test_hooks_with_pairs(
&Rope::from(format!("foo{}", LINE_END)), &Rope::from(format!("foo{}", LINE_END)),
&Selection::single(3 + LINE_END.len(), 3 + LINE_END.len()), &Selection::single(3 + LINE_END.len(), 3 + LINE_END.len()),
PAIRS, DEFAULT_PAIRS,
DEFAULT_PAIRS,
|open, close| format!("foo{}{}{}", LINE_END, open, close), |open, close| format!("foo{}{}{}", LINE_END, open, close),
&Selection::single(LINE_END.len() + 4, LINE_END.len() + 4), &Selection::single(LINE_END.len() + 4, LINE_END.len() + 4),
); );
@ -442,7 +520,8 @@ mod test {
&Rope::from(format!("{line_end}{line_end}", line_end = LINE_END)), &Rope::from(format!("{line_end}{line_end}", line_end = LINE_END)),
// before inserting the pair, the cursor covers all of both empty lines // before inserting the pair, the cursor covers all of both empty lines
&Selection::single(0, LINE_END.len() * 2), &Selection::single(0, LINE_END.len() * 2),
PAIRS, DEFAULT_PAIRS,
DEFAULT_PAIRS,
|open, close| { |open, close| {
format!( format!(
"{line_end}{open}{close}{line_end}", "{line_end}{open}{close}{line_end}",
@ -467,7 +546,8 @@ mod test {
smallvec!(Range::new(1, 0), Range::new(2, 1), Range::new(3, 2),), smallvec!(Range::new(1, 0), Range::new(2, 1), Range::new(3, 2),),
0, 0,
), ),
PAIRS, DEFAULT_PAIRS,
DEFAULT_PAIRS,
|open, close| { |open, close| {
format!( format!(
"{open}{close}\n{open}{close}\n{open}{close}\n", "{open}{close}\n{open}{close}\n{open}{close}\n",
@ -489,6 +569,7 @@ mod test {
&Rope::from("foo\n"), &Rope::from("foo\n"),
&Selection::single(2, 4), &Selection::single(2, 4),
differing_pairs(), differing_pairs(),
DEFAULT_PAIRS,
|open, close| format!("foo{}{}\n", open, close), |open, close| format!("foo{}{}\n", open, close),
&Selection::single(2, 5), &Selection::single(2, 5),
); );
@ -501,6 +582,7 @@ mod test {
&Rope::from(format!("foo{}", LINE_END)), &Rope::from(format!("foo{}", LINE_END)),
&Selection::single(3, 3 + LINE_END.len()), &Selection::single(3, 3 + LINE_END.len()),
differing_pairs(), differing_pairs(),
DEFAULT_PAIRS,
|open, close| format!("foo{}{}{}", open, close, LINE_END), |open, close| format!("foo{}{}{}", open, close, LINE_END),
&Selection::single(4, 5), &Selection::single(4, 5),
); );
@ -518,6 +600,7 @@ mod test {
0, 0,
), ),
differing_pairs(), differing_pairs(),
DEFAULT_PAIRS,
|open, close| { |open, close| {
format!( format!(
"foo{open}{close}\nfoo{open}{close}\nfoo{open}{close}\n", "foo{open}{close}\nfoo{open}{close}\nfoo{open}{close}\n",
@ -535,13 +618,14 @@ mod test {
/// ([)] -> insert ) -> ()[] /// ([)] -> insert ) -> ()[]
#[test] #[test]
fn test_insert_close_inside_pair() { fn test_insert_close_inside_pair() {
for (open, close) in PAIRS { for (open, close) in DEFAULT_PAIRS {
let doc = Rope::from(format!("{}{}{}", open, close, LINE_END)); let doc = Rope::from(format!("{}{}{}", open, close, LINE_END));
test_hooks( test_hooks(
&doc, &doc,
&Selection::single(2, 1), &Selection::single(2, 1),
*close, *close,
DEFAULT_PAIRS,
&doc, &doc,
&Selection::single(2 + LINE_END.len(), 2), &Selection::single(2 + LINE_END.len(), 2),
); );
@ -551,13 +635,14 @@ mod test {
/// [(]) -> append ) -> [()] /// [(]) -> append ) -> [()]
#[test] #[test]
fn test_append_close_inside_pair() { fn test_append_close_inside_pair() {
for (open, close) in PAIRS { for (open, close) in DEFAULT_PAIRS {
let doc = Rope::from(format!("{}{}{}", open, close, LINE_END)); let doc = Rope::from(format!("{}{}{}", open, close, LINE_END));
test_hooks( test_hooks(
&doc, &doc,
&Selection::single(0, 2), &Selection::single(0, 2),
*close, *close,
DEFAULT_PAIRS,
&doc, &doc,
&Selection::single(0, 2 + LINE_END.len()), &Selection::single(0, 2 + LINE_END.len()),
); );
@ -579,14 +664,14 @@ mod test {
0, 0,
); );
for (open, close) in PAIRS { for (open, close) in DEFAULT_PAIRS {
let doc = Rope::from(format!( let doc = Rope::from(format!(
"{open}{close}\n{open}{close}\n{open}{close}\n", "{open}{close}\n{open}{close}\n{open}{close}\n",
open = open, open = open,
close = close close = close
)); ));
test_hooks(&doc, &sel, *close, &doc, &expected_sel); test_hooks(&doc, &sel, *close, DEFAULT_PAIRS, &doc, &expected_sel);
} }
} }
@ -605,14 +690,14 @@ mod test {
0, 0,
); );
for (open, close) in PAIRS { for (open, close) in DEFAULT_PAIRS {
let doc = Rope::from(format!( let doc = Rope::from(format!(
"{open}{close}\n{open}{close}\n{open}{close}\n", "{open}{close}\n{open}{close}\n{open}{close}\n",
open = open, open = open,
close = close close = close
)); ));
test_hooks(&doc, &sel, *close, &doc, &expected_sel); test_hooks(&doc, &sel, *close, DEFAULT_PAIRS, &doc, &expected_sel);
} }
} }
@ -630,7 +715,14 @@ mod test {
close = close close = close
)); ));
test_hooks(&doc, &sel, *open, &expected_doc, &expected_sel); test_hooks(
&doc,
&sel,
*open,
DEFAULT_PAIRS,
&expected_doc,
&expected_sel,
);
} }
} }
@ -648,7 +740,14 @@ mod test {
close = close close = close
)); ));
test_hooks(&doc, &sel, *open, &expected_doc, &expected_sel); test_hooks(
&doc,
&sel,
*open,
DEFAULT_PAIRS,
&expected_doc,
&expected_sel,
);
} }
} }
@ -667,7 +766,14 @@ mod test {
outer_open, inner_open, inner_close, outer_close outer_open, inner_open, inner_close, outer_close
)); ));
test_hooks(&doc, &sel, *inner_open, &expected_doc, &expected_sel); test_hooks(
&doc,
&sel,
*inner_open,
DEFAULT_PAIRS,
&expected_doc,
&expected_sel,
);
} }
} }
} }
@ -687,7 +793,14 @@ mod test {
outer_open, inner_open, inner_close, outer_close outer_open, inner_open, inner_close, outer_close
)); ));
test_hooks(&doc, &sel, *inner_open, &expected_doc, &expected_sel); test_hooks(
&doc,
&sel,
*inner_open,
DEFAULT_PAIRS,
&expected_doc,
&expected_sel,
);
} }
} }
} }
@ -698,7 +811,8 @@ mod test {
test_hooks_with_pairs( test_hooks_with_pairs(
&Rope::from("word"), &Rope::from("word"),
&Selection::single(1, 0), &Selection::single(1, 0),
PAIRS, DEFAULT_PAIRS,
DEFAULT_PAIRS,
|open, _| format!("{}word", open), |open, _| format!("{}word", open),
&Selection::single(2, 1), &Selection::single(2, 1),
) )
@ -710,7 +824,8 @@ mod test {
test_hooks_with_pairs( test_hooks_with_pairs(
&Rope::from("word"), &Rope::from("word"),
&Selection::single(3, 0), &Selection::single(3, 0),
PAIRS, DEFAULT_PAIRS,
DEFAULT_PAIRS,
|open, _| format!("{}word", open), |open, _| format!("{}word", open),
&Selection::single(4, 1), &Selection::single(4, 1),
) )
@ -722,10 +837,17 @@ mod test {
let sel = Selection::single(0, 4); let sel = Selection::single(0, 4);
let expected_sel = Selection::single(0, 5); let expected_sel = Selection::single(0, 5);
for (_, close) in PAIRS { for (_, close) in DEFAULT_PAIRS {
let doc = Rope::from("word"); let doc = Rope::from("word");
let expected_doc = Rope::from(format!("wor{}d", close)); let expected_doc = Rope::from(format!("wor{}d", close));
test_hooks(&doc, &sel, *close, &expected_doc, &expected_sel); test_hooks(
&doc,
&sel,
*close,
DEFAULT_PAIRS,
&expected_doc,
&expected_sel,
);
} }
} }
@ -736,6 +858,7 @@ mod test {
&Rope::from("foo word"), &Rope::from("foo word"),
&Selection::single(7, 3), &Selection::single(7, 3),
differing_pairs(), differing_pairs(),
DEFAULT_PAIRS,
|open, close| format!("foo{}{} word", open, close), |open, close| format!("foo{}{} word", open, close),
&Selection::single(9, 4), &Selection::single(9, 4),
) )
@ -749,6 +872,7 @@ mod test {
&Rope::from(format!("foo{}{} word{}", open, close, LINE_END)), &Rope::from(format!("foo{}{} word{}", open, close, LINE_END)),
&Selection::single(9, 4), &Selection::single(9, 4),
*close, *close,
DEFAULT_PAIRS,
&Rope::from(format!("foo{}{} word{}", open, close, LINE_END)), &Rope::from(format!("foo{}{} word{}", open, close, LINE_END)),
&Selection::single(9, 5), &Selection::single(9, 5),
) )
@ -771,6 +895,7 @@ mod test {
&doc, &doc,
&sel, &sel,
differing_pairs(), differing_pairs(),
DEFAULT_PAIRS,
|open, close| format!("word{}{}{}", open, close, LINE_END), |open, close| format!("word{}{}{}", open, close, LINE_END),
&expected_sel, &expected_sel,
); );
@ -779,8 +904,34 @@ mod test {
&doc, &doc,
&sel, &sel,
matching_pairs(), matching_pairs(),
DEFAULT_PAIRS,
|open, _| format!("word{}{}", open, LINE_END), |open, _| format!("word{}{}", open, LINE_END),
&expected_sel, &expected_sel,
); );
} }
#[test]
fn test_configured_pairs() {
let test_pairs = &[('`', ':'), ('+', '-')];
test_hooks_with_pairs(
&Rope::from(LINE_END),
&Selection::single(1, 0),
test_pairs,
test_pairs,
|open, close| format!("{}{}{}", open, close, LINE_END),
&Selection::single(2, 1),
);
let doc = Rope::from(format!("foo`: word{}", LINE_END));
test_hooks(
&doc,
&Selection::single(9, 4),
':',
test_pairs,
&doc,
&Selection::single(9, 5),
)
}
} }

@ -91,7 +91,10 @@ mod test {
#[test] #[test]
fn test_categorize() { fn test_categorize() {
const EOL_TEST_CASE: &str = "\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}"; #[cfg(not(feature = "unicode-lines"))]
const EOL_TEST_CASE: &str = "\n";
#[cfg(feature = "unicode-lines")]
const EOL_TEST_CASE: &str = "\n\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}";
const WORD_TEST_CASE: &str = "_hello_world_あいうえおー1234567890"; const WORD_TEST_CASE: &str = "_hello_world_あいうえおー1234567890";
const PUNCTUATION_TEST_CASE: &str = const PUNCTUATION_TEST_CASE: &str =
"!\"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~!”#$%&’()*+、。:;<=>?@「」^`{|}~"; "!\"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~!”#$%&’()*+、。:;<=>?@「」^`{|}~";

@ -1,33 +1,10 @@
use crate::merge_toml_values;
/// Default bultin-in languages.toml.
pub fn default_lang_config() -> toml::Value {
toml::from_slice(include_bytes!("../../languages.toml"))
.expect("Could not parse bultin-in languages.toml to valid toml")
}
/// User configured languages.toml file, merged with the default config.
pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
let def_lang_conf = default_lang_config();
let data = std::fs::read(crate::config_dir().join("languages.toml"));
let user_lang_conf = match data {
Ok(raw) => {
let value = toml::from_slice(&raw)?;
merge_toml_values(def_lang_conf, value)
}
Err(_) => def_lang_conf,
};
Ok(user_lang_conf)
}
/// Syntax configuration loader based on built-in languages.toml. /// Syntax configuration loader based on built-in languages.toml.
pub fn default_syntax_loader() -> crate::syntax::Configuration { pub fn default_syntax_loader() -> crate::syntax::Configuration {
default_lang_config() helix_loader::default_lang_config()
.try_into() .try_into()
.expect("Could not serialize built-in language.toml") .expect("Could not serialize built-in languages.toml")
} }
/// Syntax configuration loader based on user configured languages.toml. /// Syntax configuration loader based on user configured languages.toml.
pub fn user_syntax_loader() -> Result<crate::syntax::Configuration, toml::de::Error> { pub fn user_syntax_loader() -> Result<crate::syntax::Configuration, toml::de::Error> {
user_lang_config()?.try_into() helix_loader::user_lang_config()?.try_into()
} }

@ -333,10 +333,7 @@ impl<'a> Iterator for RopeGraphemes<'a> {
} }
if a < self.cur_chunk_start { if a < self.cur_chunk_start {
let a_char = self.text.byte_to_char(a); Some(self.text.byte_slice(a..b))
let b_char = self.text.byte_to_char(b);
Some(self.text.slice(a_char..b_char))
} else { } else {
let a2 = a - self.cur_chunk_start; let a2 = a - self.cur_chunk_start;
let b2 = b - self.cur_chunk_start; let b2 = b - self.cur_chunk_start;

@ -1,6 +1,10 @@
use std::collections::HashMap;
use tree_sitter::{Query, QueryCursor, QueryPredicateArg};
use crate::{ use crate::{
chars::{char_is_line_ending, char_is_whitespace}, chars::{char_is_line_ending, char_is_whitespace},
syntax::{IndentQuery, LanguageConfiguration, Syntax}, syntax::{LanguageConfiguration, RopeProvider, Syntax},
tree_sitter::Node, tree_sitter::Node,
Rope, RopeSlice, Rope, RopeSlice,
}; };
@ -186,103 +190,405 @@ pub fn indent_level_for_line(line: RopeSlice, tab_width: usize) -> usize {
len / tab_width len / tab_width
} }
/// Find the highest syntax node at position. /// Computes for node and all ancestors whether they are the first node on their line.
/// This is to identify the column where this node (e.g., an HTML closing tag) ends. /// The first entry in the return value represents the root node, the last one the node itself
fn get_highest_syntax_node_at_bytepos(syntax: &Syntax, pos: usize) -> Option<Node> { fn get_first_in_line(mut node: Node, byte_pos: usize, new_line: bool) -> Vec<bool> {
let tree = syntax.tree(); let mut first_in_line = Vec::new();
loop {
// named_descendant if let Some(prev) = node.prev_sibling() {
let mut node = tree.root_node().descendant_for_byte_range(pos, pos)?; // If we insert a new line, the first node at/after the cursor is considered to be the first in its line
let first = prev.end_position().row != node.start_position().row
while let Some(parent) = node.parent() { || (new_line && node.start_byte() >= byte_pos && prev.start_byte() < byte_pos);
if parent.start_byte() == node.start_byte() { first_in_line.push(Some(first));
node = parent } else {
// Nodes that have no previous siblings are first in their line if and only if their parent is
// (which we don't know yet)
first_in_line.push(None);
}
if let Some(parent) = node.parent() {
node = parent;
} else { } else {
break; break;
} }
} }
Some(node) let mut result = Vec::with_capacity(first_in_line.len());
let mut parent_is_first = true; // The root node is by definition the first node in its line
for first in first_in_line.into_iter().rev() {
if let Some(first) = first {
result.push(first);
parent_is_first = first;
} else {
result.push(parent_is_first);
}
}
result
} }
/// Calculate the indentation at a given treesitter node. /// The total indent for some line of code.
/// If newline is false, then any "indent" nodes on the line are ignored ("outdent" still applies). /// This is usually constructed in one of 2 ways:
/// This is because the indentation is only increased starting at the second line of the node. /// - Successively add indent captures to get the (added) indent from a single line
fn calculate_indentation( /// - Successively add the indent results for each line
query: &IndentQuery, #[derive(Default)]
node: Option<Node>, struct Indentation {
line: usize, /// The total indent (the number of indent levels) is defined as max(0, indent-outdent).
newline: bool, /// The string that this results in depends on the indent style (spaces or tabs, etc.)
) -> usize { indent: usize,
let mut increment: isize = 0; outdent: usize,
}
impl Indentation {
/// Add some other [IndentResult] to this.
/// The added indent should be the total added indent from one line
fn add_line(&mut self, added: &Indentation) {
if added.indent > 0 && added.outdent == 0 {
self.indent += 1;
} else if added.outdent > 0 && added.indent == 0 {
self.outdent += 1;
}
}
/// Add an indent capture to this indent.
/// All the captures that are added in this way should be on the same line.
fn add_capture(&mut self, added: IndentCaptureType) {
match added {
IndentCaptureType::Indent => {
self.indent = 1;
}
IndentCaptureType::Outdent => {
self.outdent = 1;
}
}
}
fn as_string(&self, indent_style: &IndentStyle) -> String {
let indent_level = if self.indent >= self.outdent {
self.indent - self.outdent
} else {
log::warn!("Encountered more outdent than indent nodes while calculating indentation: {} outdent, {} indent", self.outdent, self.indent);
0
};
indent_style.as_str().repeat(indent_level)
}
}
let mut node = match node { /// An indent definition which corresponds to a capture from the indent query
Some(node) => node, struct IndentCapture {
None => return 0, capture_type: IndentCaptureType,
scope: IndentScope,
}
#[derive(Clone, Copy)]
enum IndentCaptureType {
Indent,
Outdent,
}
impl IndentCaptureType {
fn default_scope(&self) -> IndentScope {
match self {
IndentCaptureType::Indent => IndentScope::Tail,
IndentCaptureType::Outdent => IndentScope::All,
}
}
}
/// This defines which part of a node an [IndentCapture] applies to.
/// Each [IndentCaptureType] has a default scope, but the scope can be changed
/// with `#set!` property declarations.
#[derive(Clone, Copy)]
enum IndentScope {
/// The indent applies to the whole node
All,
/// The indent applies to everything except for the first line of the node
Tail,
}
/// Execute the indent query.
/// Returns for each node (identified by its id) a list of indent captures for that node.
fn query_indents(
query: &Query,
syntax: &Syntax,
cursor: &mut QueryCursor,
text: RopeSlice,
range: std::ops::Range<usize>,
// Position of the (optional) newly inserted line break.
// Given as (line, byte_pos)
new_line_break: Option<(usize, usize)>,
) -> HashMap<usize, Vec<IndentCapture>> {
let mut indent_captures: HashMap<usize, Vec<IndentCapture>> = HashMap::new();
cursor.set_byte_range(range);
// Iterate over all captures from the query
for m in cursor.matches(query, syntax.tree().root_node(), RopeProvider(text)) {
// Skip matches where not all custom predicates are fulfilled
if !query.general_predicates(m.pattern_index).iter().all(|pred| {
match pred.operator.as_ref() {
"not-kind-eq?" => match (pred.args.get(0), pred.args.get(1)) {
(
Some(QueryPredicateArg::Capture(capture_idx)),
Some(QueryPredicateArg::String(kind)),
) => {
let node = m.nodes_for_capture_index(*capture_idx).next();
match node {
Some(node) => node.kind()!=kind.as_ref(),
_ => true,
}
}
_ => {
panic!("Invalid indent query: Arguments to \"not-kind-eq?\" must be a capture and a string");
}
},
"same-line?" | "not-same-line?" => {
match (pred.args.get(0), pred.args.get(1)) {
(
Some(QueryPredicateArg::Capture(capt1)),
Some(QueryPredicateArg::Capture(capt2))
) => {
let get_line_num = |node: Node| {
let mut node_line = node.start_position().row;
// Adjust for the new line that will be inserted
if let Some((line, byte)) = new_line_break {
if node_line==line && node.start_byte()>=byte {
node_line += 1;
}
}
node_line
}; };
let n1 = m.nodes_for_capture_index(*capt1).next();
let n2 = m.nodes_for_capture_index(*capt2).next();
match (n1, n2) {
(Some(n1), Some(n2)) => {
let same_line = get_line_num(n1)==get_line_num(n2);
same_line==(pred.operator.as_ref()=="same-line?")
}
_ => true,
}
}
_ => {
panic!("Invalid indent query: Arguments to \"{}\" must be 2 captures", pred.operator);
}
}
}
_ => {
panic!(
"Invalid indent query: Unknown predicate (\"{}\")",
pred.operator
);
}
}
}) {
continue;
}
for capture in m.captures {
let capture_type = query.capture_names()[capture.index as usize].as_str();
let capture_type = match capture_type {
"indent" => IndentCaptureType::Indent,
"outdent" => IndentCaptureType::Outdent,
_ => {
// Ignore any unknown captures (these may be needed for predicates such as #match?)
continue;
}
};
let scope = capture_type.default_scope();
let mut indent_capture = IndentCapture {
capture_type,
scope,
};
// Apply additional settings for this capture
for property in query.property_settings(m.pattern_index) {
match property.key.as_ref() {
"scope" => {
indent_capture.scope = match property.value.as_deref() {
Some("all") => IndentScope::All,
Some("tail") => IndentScope::Tail,
Some(s) => {
panic!("Invalid indent query: Unknown value for \"scope\" property (\"{}\")", s);
}
None => {
panic!(
"Invalid indent query: Missing value for \"scope\" property"
);
}
}
}
_ => {
panic!(
"Invalid indent query: Unknown property \"{}\"",
property.key
);
}
}
}
indent_captures
.entry(capture.node.id())
// Most entries only need to contain a single IndentCapture
.or_insert_with(|| Vec::with_capacity(1))
.push(indent_capture);
}
}
indent_captures
}
let mut current_line = line; /// Use the syntax tree to determine the indentation for a given position.
let mut consider_indent = newline; /// This can be used in 2 ways:
let mut increment_from_line: isize = 0; ///
/// - To get the correct indentation for an existing line (new_line=false), not necessarily equal to the current indentation.
/// - In this case, pos should be inside the first tree-sitter node on that line.
/// In most cases, this can just be the first non-whitespace on that line.
/// - To get the indentation for a new line (new_line=true). This behaves like the first usecase if the part of the current line
/// after pos were moved to a new line.
///
/// The indentation is determined by traversing all the tree-sitter nodes containing the position.
/// Each of these nodes produces some [AddedIndent] for:
///
/// - The line of the (beginning of the) node. This is defined by the scope `all` if this is the first node on its line.
/// - The line after the node. This is defined by:
/// - The scope `tail`.
/// - The scope `all` if this node is not the first node on its line.
/// Intuitively, `all` applies to everything contained in this node while `tail` applies to everything except for the first line of the node.
/// The indents from different nodes for the same line are then combined.
/// The [IndentResult] is simply the sum of the [AddedIndent] for all lines.
///
/// Specifying which line exactly an [AddedIndent] applies to is important because indents on the same line combine differently than indents on different lines:
/// ```ignore
/// some_function(|| {
/// // Both the function parameters as well as the contained block should be indented.
/// // Because they are on the same line, this only yields one indent level
/// });
/// ```
///
/// ```ignore
/// some_function(
/// parm1,
/// || {
/// // Here we get 2 indent levels because the 'parameters' and the 'block' node begin on different lines
/// },
/// );
/// ```
pub fn treesitter_indent_for_pos(
query: &Query,
syntax: &Syntax,
indent_style: &IndentStyle,
text: RopeSlice,
line: usize,
pos: usize,
new_line: bool,
) -> Option<String> {
let byte_pos = text.char_to_byte(pos);
let mut node = syntax
.tree()
.root_node()
.descendant_for_byte_range(byte_pos, byte_pos)?;
let mut first_in_line = get_first_in_line(node, byte_pos, new_line);
let new_line_break = if new_line {
Some((line, byte_pos))
} else {
None
};
let query_result = crate::syntax::PARSER.with(|ts_parser| {
let mut ts_parser = ts_parser.borrow_mut();
let mut cursor = ts_parser.cursors.pop().unwrap_or_else(QueryCursor::new);
let query_result = query_indents(
query,
syntax,
&mut cursor,
text,
byte_pos..byte_pos + 1,
new_line_break,
);
ts_parser.cursors.push(cursor);
query_result
});
let mut result = Indentation::default();
// We always keep track of all the indent changes on one line, in order to only indent once
// even if there are multiple "indent" nodes on the same line
let mut indent_for_line = Indentation::default();
let mut indent_for_line_below = Indentation::default();
loop { loop {
let node_kind = node.kind(); // This can safely be unwrapped because `first_in_line` contains
let start = node.start_position().row; // one entry for each ancestor of the node (which is what we iterate over)
if current_line != start { let is_first = *first_in_line.last().unwrap();
// Indent/dedent by at most one per line: // Apply all indent definitions for this node
// .map(|a| { <-- ({ is two scopes if let Some(definitions) = query_result.get(&node.id()) {
// let len = 1; <-- indents one level for definition in definitions {
// }) <-- }) is two scopes match definition.scope {
if consider_indent || increment_from_line < 0 { IndentScope::All => {
increment += increment_from_line.signum(); if is_first {
indent_for_line.add_capture(definition.capture_type);
} else {
indent_for_line_below.add_capture(definition.capture_type);
}
}
IndentScope::Tail => {
indent_for_line_below.add_capture(definition.capture_type);
} }
increment_from_line = 0;
current_line = start;
consider_indent = true;
} }
if query.outdent.contains(node_kind) {
increment_from_line -= 1;
} }
if query.indent.contains(node_kind) {
increment_from_line += 1;
} }
if let Some(parent) = node.parent() { if let Some(parent) = node.parent() {
let mut node_line = node.start_position().row;
let mut parent_line = parent.start_position().row;
if node_line == line && new_line {
// Also consider the line that will be inserted
if node.start_byte() >= byte_pos {
node_line += 1;
}
if parent.start_byte() >= byte_pos {
parent_line += 1;
}
};
if node_line != parent_line {
if node_line < line + (new_line as usize) {
// Don't add indent for the line below the line of the query
result.add_line(&indent_for_line_below);
}
if node_line == parent_line + 1 {
indent_for_line_below = indent_for_line;
} else {
result.add_line(&indent_for_line);
indent_for_line_below = Indentation::default();
}
indent_for_line = Indentation::default();
}
node = parent; node = parent;
first_in_line.pop();
} else { } else {
result.add_line(&indent_for_line_below);
result.add_line(&indent_for_line);
break; break;
} }
} }
if consider_indent || increment_from_line < 0 { Some(result.as_string(indent_style))
increment += increment_from_line.signum();
}
increment.max(0) as usize
} }
// TODO: two usecases: if we are triggering this for a new, blank line: /// Returns the indentation for a new line.
// - it should return 0 when mass indenting stuff /// This is done either using treesitter, or if that's not available by copying the indentation from the current line
// - it should look up the wrapper node and count it too when we press o/O #[allow(clippy::too_many_arguments)]
pub fn suggested_indent_for_pos( pub fn indent_for_newline(
language_config: Option<&LanguageConfiguration>, language_config: Option<&LanguageConfiguration>,
syntax: Option<&Syntax>, syntax: Option<&Syntax>,
indent_style: &IndentStyle,
tab_width: usize,
text: RopeSlice, text: RopeSlice,
pos: usize, line_before: usize,
line: usize, line_before_end_pos: usize,
new_line: bool, current_line: usize,
) -> Option<usize> { ) -> String {
if let (Some(query), Some(syntax)) = ( if let (Some(query), Some(syntax)) = (
language_config.and_then(|config| config.indent_query()), language_config.and_then(|config| config.indent_query()),
syntax, syntax,
) { ) {
let byte_start = text.char_to_byte(pos); if let Some(indent) = treesitter_indent_for_pos(
let node = get_highest_syntax_node_at_bytepos(syntax, byte_start); query,
// TODO: special case for comments syntax,
// TODO: if preserve_leading_whitespace indent_style,
Some(calculate_indentation(query, node, line, new_line)) text,
} else { line_before,
None line_before_end_pos,
true,
) {
return indent;
};
} }
let indent_level = indent_level_for_line(text.line(current_line), tab_width);
indent_style.as_str().repeat(indent_level)
} }
pub fn get_scopes(syntax: Option<&Syntax>, text: RopeSlice, pos: usize) -> Vec<&'static str> { pub fn get_scopes(syntax: Option<&Syntax>, text: RopeSlice, pos: usize) -> Vec<&'static str> {
@ -326,155 +632,4 @@ mod test {
let line = Rope::from("\t \tfn new"); // 1 tab, 4 spaces, tab let line = Rope::from("\t \tfn new"); // 1 tab, 4 spaces, tab
assert_eq!(indent_level_for_line(line.slice(..), tab_width), 3); assert_eq!(indent_level_for_line(line.slice(..), tab_width), 3);
} }
#[test]
fn test_suggested_indent_for_line() {
let doc = Rope::from(
"
use std::{
io::{self, stdout, Stdout, Write},
path::PathBuf,
sync::Arc,
time::Duration,
}
mod test {
fn hello_world() {
1 + 1;
let does_indentation_work = 1;
let test_function = function_with_param(this_param,
that_param
);
let test_function = function_with_param(
this_param,
that_param
);
let test_function = function_with_proper_indent(param1,
param2,
);
let selection = Selection::new(
changes
.clone()
.map(|(start, end, text): (usize, usize, Option<Tendril>)| {
let len = text.map(|text| text.len()).unwrap() - 1; // minus newline
let pos = start + len;
Range::new(pos, pos)
})
.collect(),
0,
);
return;
}
}
impl<A, D> MyTrait<A, D> for YourType
where
A: TraitB + TraitC,
D: TraitE + TraitF,
{
}
#[test]
//
match test {
Some(a) => 1,
None => {
unimplemented!()
}
}
std::panic::set_hook(Box::new(move |info| {
hook(info);
}));
{ { {
1
}}}
pub fn change<I>(document: &Document, changes: I) -> Self
where
I: IntoIterator<Item = Change> + ExactSizeIterator,
{
[
1,
2,
3,
];
(
1,
2
);
true
}
",
);
let doc = doc;
use crate::diagnostic::Severity;
use crate::syntax::{
Configuration, IndentationConfiguration, LanguageConfiguration, Loader,
};
use once_cell::sync::OnceCell;
let loader = Loader::new(Configuration {
language: vec![LanguageConfiguration {
scope: "source.rust".to_string(),
file_types: vec!["rs".to_string()],
shebangs: vec![],
language_id: "Rust".to_string(),
highlight_config: OnceCell::new(),
config: None,
//
injection_regex: None,
roots: vec![],
comment_token: None,
auto_format: false,
diagnostic_severity: Severity::Warning,
tree_sitter_library: None,
language_server: None,
indent: Some(IndentationConfiguration {
tab_width: 4,
unit: String::from(" "),
}),
indent_query: OnceCell::new(),
textobject_query: OnceCell::new(),
debugger: None,
}],
});
// set runtime path so we can find the queries
let mut runtime = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"));
runtime.push("../runtime");
std::env::set_var("HELIX_RUNTIME", runtime.to_str().unwrap());
let language_config = loader.language_config_for_scope("source.rust").unwrap();
let highlight_config = language_config.highlight_config(&[]).unwrap();
let syntax = Syntax::new(&doc, highlight_config, std::sync::Arc::new(loader));
let text = doc.slice(..);
let tab_width = 4;
for i in 0..doc.len_lines() {
let line = text.line(i);
if let Some(pos) = crate::find_first_non_whitespace_char(line) {
let indent = indent_level_for_line(line, tab_width);
assert_eq!(
suggested_indent_for_pos(
Some(&language_config),
Some(&syntax),
text,
text.line_to_char(i) + pos,
i,
false
),
Some(indent),
"line {}: \"{}\"",
i,
line
);
}
}
}
} }

@ -33,9 +33,6 @@ pub mod unicode {
pub use unicode_width as width; pub use unicode_width as width;
} }
static RUNTIME_DIR: once_cell::sync::Lazy<std::path::PathBuf> =
once_cell::sync::Lazy::new(runtime_dir);
pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> { pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> {
line.chars().position(|ch| !ch.is_whitespace()) line.chars().position(|ch| !ch.is_whitespace())
} }
@ -85,132 +82,6 @@ pub fn find_root(root: Option<&str>, root_markers: &[String]) -> Option<std::pat
} }
} }
pub fn runtime_dir() -> std::path::PathBuf {
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
return dir.into();
}
const RT_DIR: &str = "runtime";
let conf_dir = config_dir().join(RT_DIR);
if conf_dir.exists() {
return conf_dir;
}
if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") {
// this is the directory of the crate being run by cargo, we need the workspace path so we take the parent
return std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR);
}
// fallback to location of the executable being run
std::env::current_exe()
.ok()
.and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR)))
.unwrap()
}
pub fn config_dir() -> std::path::PathBuf {
// TODO: allow env var override
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
let mut path = strategy.config_dir();
path.push("helix");
path
}
pub fn cache_dir() -> std::path::PathBuf {
// TODO: allow env var override
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
let mut path = strategy.cache_dir();
path.push("helix");
path
}
// right overrides left
pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value {
use toml::Value;
fn get_name(v: &Value) -> Option<&str> {
v.get("name").and_then(Value::as_str)
}
match (left, right) {
(Value::Array(mut left_items), Value::Array(right_items)) => {
left_items.reserve(right_items.len());
for rvalue in right_items {
let lvalue = get_name(&rvalue)
.and_then(|rname| left_items.iter().position(|v| get_name(v) == Some(rname)))
.map(|lpos| left_items.remove(lpos));
let mvalue = match lvalue {
Some(lvalue) => merge_toml_values(lvalue, rvalue),
None => rvalue,
};
left_items.push(mvalue);
}
Value::Array(left_items)
}
(Value::Table(mut left_map), Value::Table(right_map)) => {
for (rname, rvalue) in right_map {
match left_map.remove(&rname) {
Some(lvalue) => {
let merged_value = merge_toml_values(lvalue, rvalue);
left_map.insert(rname, merged_value);
}
None => {
left_map.insert(rname, rvalue);
}
}
}
Value::Table(left_map)
}
// Catch everything else we didn't handle, and use the right value
(_, value) => value,
}
}
#[cfg(test)]
mod merge_toml_tests {
use super::merge_toml_values;
#[test]
fn language_tomls() {
use toml::Value;
const USER: &str = "
[[language]]
name = \"nix\"
test = \"bbb\"
indent = { tab-width = 4, unit = \" \", test = \"aaa\" }
";
let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
.expect("Couldn't parse built-in languages config");
let user: Value = toml::from_str(USER).unwrap();
let merged = merge_toml_values(base, user);
let languages = merged.get("language").unwrap().as_array().unwrap();
let nix = languages
.iter()
.find(|v| v.get("name").unwrap().as_str().unwrap() == "nix")
.unwrap();
let nix_indent = nix.get("indent").unwrap();
// We changed tab-width and unit in indent so check them if they are the new values
assert_eq!(
nix_indent.get("tab-width").unwrap().as_integer().unwrap(),
4
);
assert_eq!(nix_indent.get("unit").unwrap().as_str().unwrap(), " ");
// We added a new keys, so check them
assert_eq!(nix.get("test").unwrap().as_str().unwrap(), "bbb");
assert_eq!(nix_indent.get("test").unwrap().as_str().unwrap(), "aaa");
// We didn't change comment-token so it should be same
assert_eq!(nix.get("comment-token").unwrap().as_str().unwrap(), "#");
}
}
pub use etcetera::home_dir;
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
pub use ropey::{Rope, RopeBuilder, RopeSlice}; pub use ropey::{Rope, RopeBuilder, RopeSlice};
// pub use tendril::StrTendril as Tendril; // pub use tendril::StrTendril as Tendril;

@ -10,11 +10,17 @@ pub const DEFAULT_LINE_ENDING: LineEnding = LineEnding::LF;
pub enum LineEnding { pub enum LineEnding {
Crlf, // CarriageReturn followed by LineFeed Crlf, // CarriageReturn followed by LineFeed
LF, // U+000A -- LineFeed LF, // U+000A -- LineFeed
#[cfg(feature = "unicode-lines")]
VT, // U+000B -- VerticalTab VT, // U+000B -- VerticalTab
#[cfg(feature = "unicode-lines")]
FF, // U+000C -- FormFeed FF, // U+000C -- FormFeed
#[cfg(feature = "unicode-lines")]
CR, // U+000D -- CarriageReturn CR, // U+000D -- CarriageReturn
#[cfg(feature = "unicode-lines")]
Nel, // U+0085 -- NextLine Nel, // U+0085 -- NextLine
#[cfg(feature = "unicode-lines")]
LS, // U+2028 -- Line Separator LS, // U+2028 -- Line Separator
#[cfg(feature = "unicode-lines")]
PS, // U+2029 -- ParagraphSeparator PS, // U+2029 -- ParagraphSeparator
} }
@ -32,11 +38,17 @@ impl LineEnding {
match self { match self {
Self::Crlf => "\u{000D}\u{000A}", Self::Crlf => "\u{000D}\u{000A}",
Self::LF => "\u{000A}", Self::LF => "\u{000A}",
#[cfg(feature = "unicode-lines")]
Self::VT => "\u{000B}", Self::VT => "\u{000B}",
#[cfg(feature = "unicode-lines")]
Self::FF => "\u{000C}", Self::FF => "\u{000C}",
#[cfg(feature = "unicode-lines")]
Self::CR => "\u{000D}", Self::CR => "\u{000D}",
#[cfg(feature = "unicode-lines")]
Self::Nel => "\u{0085}", Self::Nel => "\u{0085}",
#[cfg(feature = "unicode-lines")]
Self::LS => "\u{2028}", Self::LS => "\u{2028}",
#[cfg(feature = "unicode-lines")]
Self::PS => "\u{2029}", Self::PS => "\u{2029}",
} }
} }
@ -45,11 +57,17 @@ impl LineEnding {
pub const fn from_char(ch: char) -> Option<LineEnding> { pub const fn from_char(ch: char) -> Option<LineEnding> {
match ch { match ch {
'\u{000A}' => Some(LineEnding::LF), '\u{000A}' => Some(LineEnding::LF),
#[cfg(feature = "unicode-lines")]
'\u{000B}' => Some(LineEnding::VT), '\u{000B}' => Some(LineEnding::VT),
#[cfg(feature = "unicode-lines")]
'\u{000C}' => Some(LineEnding::FF), '\u{000C}' => Some(LineEnding::FF),
#[cfg(feature = "unicode-lines")]
'\u{000D}' => Some(LineEnding::CR), '\u{000D}' => Some(LineEnding::CR),
#[cfg(feature = "unicode-lines")]
'\u{0085}' => Some(LineEnding::Nel), '\u{0085}' => Some(LineEnding::Nel),
#[cfg(feature = "unicode-lines")]
'\u{2028}' => Some(LineEnding::LS), '\u{2028}' => Some(LineEnding::LS),
#[cfg(feature = "unicode-lines")]
'\u{2029}' => Some(LineEnding::PS), '\u{2029}' => Some(LineEnding::PS),
// Not a line ending // Not a line ending
_ => None, _ => None,
@ -65,11 +83,17 @@ impl LineEnding {
match g { match g {
"\u{000D}\u{000A}" => Some(LineEnding::Crlf), "\u{000D}\u{000A}" => Some(LineEnding::Crlf),
"\u{000A}" => Some(LineEnding::LF), "\u{000A}" => Some(LineEnding::LF),
#[cfg(feature = "unicode-lines")]
"\u{000B}" => Some(LineEnding::VT), "\u{000B}" => Some(LineEnding::VT),
#[cfg(feature = "unicode-lines")]
"\u{000C}" => Some(LineEnding::FF), "\u{000C}" => Some(LineEnding::FF),
#[cfg(feature = "unicode-lines")]
"\u{000D}" => Some(LineEnding::CR), "\u{000D}" => Some(LineEnding::CR),
#[cfg(feature = "unicode-lines")]
"\u{0085}" => Some(LineEnding::Nel), "\u{0085}" => Some(LineEnding::Nel),
#[cfg(feature = "unicode-lines")]
"\u{2028}" => Some(LineEnding::LS), "\u{2028}" => Some(LineEnding::LS),
#[cfg(feature = "unicode-lines")]
"\u{2029}" => Some(LineEnding::PS), "\u{2029}" => Some(LineEnding::PS),
// Not a line ending // Not a line ending
_ => None, _ => None,
@ -101,7 +125,9 @@ pub fn auto_detect_line_ending(doc: &Rope) -> Option<LineEnding> {
// are being matched, as they might be special-use only // are being matched, as they might be special-use only
for line in doc.lines().take(100) { for line in doc.lines().take(100) {
match get_line_ending(&line) { match get_line_ending(&line) {
None | Some(LineEnding::VT) | Some(LineEnding::FF) | Some(LineEnding::PS) => {} None => {}
#[cfg(feature = "unicode-lines")]
Some(LineEnding::VT) | Some(LineEnding::FF) | Some(LineEnding::PS) => {}
ending => return ending, ending => return ending,
} }
} }
@ -128,6 +154,19 @@ pub fn get_line_ending(line: &RopeSlice) -> Option<LineEnding> {
LineEnding::from_str(g2).or_else(|| LineEnding::from_str(g1)) LineEnding::from_str(g2).or_else(|| LineEnding::from_str(g1))
} }
#[cfg(not(feature = "unicode-lines"))]
/// Returns the passed line's line ending, if any.
pub fn get_line_ending_of_str(line: &str) -> Option<LineEnding> {
if line.ends_with("\u{000D}\u{000A}") {
Some(LineEnding::Crlf)
} else if line.ends_with('\u{000A}') {
Some(LineEnding::LF)
} else {
None
}
}
#[cfg(feature = "unicode-lines")]
/// Returns the passed line's line ending, if any. /// Returns the passed line's line ending, if any.
pub fn get_line_ending_of_str(line: &str) -> Option<LineEnding> { pub fn get_line_ending_of_str(line: &str) -> Option<LineEnding> {
if line.ends_with("\u{000D}\u{000A}") { if line.ends_with("\u{000D}\u{000A}") {
@ -211,6 +250,7 @@ mod line_ending_tests {
#[test] #[test]
fn str_to_line_ending() { fn str_to_line_ending() {
#[cfg(feature = "unicode-lines")]
assert_eq!(LineEnding::from_str("\r"), Some(LineEnding::CR)); assert_eq!(LineEnding::from_str("\r"), Some(LineEnding::CR));
assert_eq!(LineEnding::from_str("\n"), Some(LineEnding::LF)); assert_eq!(LineEnding::from_str("\n"), Some(LineEnding::LF));
assert_eq!(LineEnding::from_str("\r\n"), Some(LineEnding::Crlf)); assert_eq!(LineEnding::from_str("\r\n"), Some(LineEnding::Crlf));
@ -220,6 +260,7 @@ mod line_ending_tests {
#[test] #[test]
fn rope_slice_to_line_ending() { fn rope_slice_to_line_ending() {
let r = Rope::from_str("hello\r\n"); let r = Rope::from_str("hello\r\n");
#[cfg(feature = "unicode-lines")]
assert_eq!( assert_eq!(
LineEnding::from_rope_slice(&r.slice(5..6)), LineEnding::from_rope_slice(&r.slice(5..6)),
Some(LineEnding::CR) Some(LineEnding::CR)
@ -238,6 +279,7 @@ mod line_ending_tests {
#[test] #[test]
fn get_line_ending_rope_slice() { fn get_line_ending_rope_slice() {
let r = Rope::from_str("Hello\rworld\nhow\r\nare you?"); let r = Rope::from_str("Hello\rworld\nhow\r\nare you?");
#[cfg(feature = "unicode-lines")]
assert_eq!(get_line_ending(&r.slice(..6)), Some(LineEnding::CR)); assert_eq!(get_line_ending(&r.slice(..6)), Some(LineEnding::CR));
assert_eq!(get_line_ending(&r.slice(..12)), Some(LineEnding::LF)); assert_eq!(get_line_ending(&r.slice(..12)), Some(LineEnding::LF));
assert_eq!(get_line_ending(&r.slice(..17)), Some(LineEnding::Crlf)); assert_eq!(get_line_ending(&r.slice(..17)), Some(LineEnding::Crlf));
@ -247,6 +289,7 @@ mod line_ending_tests {
#[test] #[test]
fn get_line_ending_str() { fn get_line_ending_str() {
let text = "Hello\rworld\nhow\r\nare you?"; let text = "Hello\rworld\nhow\r\nare you?";
#[cfg(feature = "unicode-lines")]
assert_eq!(get_line_ending_of_str(&text[..6]), Some(LineEnding::CR)); assert_eq!(get_line_ending_of_str(&text[..6]), Some(LineEnding::CR));
assert_eq!(get_line_ending_of_str(&text[..12]), Some(LineEnding::LF)); assert_eq!(get_line_ending_of_str(&text[..12]), Some(LineEnding::LF));
assert_eq!(get_line_ending_of_str(&text[..17]), Some(LineEnding::Crlf)); assert_eq!(get_line_ending_of_str(&text[..17]), Some(LineEnding::Crlf));
@ -257,9 +300,8 @@ mod line_ending_tests {
fn line_end_char_index_rope_slice() { fn line_end_char_index_rope_slice() {
let r = Rope::from_str("Hello\rworld\nhow\r\nare you?"); let r = Rope::from_str("Hello\rworld\nhow\r\nare you?");
let s = &r.slice(..); let s = &r.slice(..);
assert_eq!(line_end_char_index(s, 0), 5); assert_eq!(line_end_char_index(s, 0), 11);
assert_eq!(line_end_char_index(s, 1), 11); assert_eq!(line_end_char_index(s, 1), 15);
assert_eq!(line_end_char_index(s, 2), 15); assert_eq!(line_end_char_index(s, 2), 25);
assert_eq!(line_end_char_index(s, 3), 25);
} }
} }

@ -1,9 +1,10 @@
use etcetera::home_dir;
use std::path::{Component, Path, PathBuf}; use std::path::{Component, Path, PathBuf};
/// Replaces users home directory from `path` with tilde `~` if the directory /// Replaces users home directory from `path` with tilde `~` if the directory
/// is available, otherwise returns the path unchanged. /// is available, otherwise returns the path unchanged.
pub fn fold_home_dir(path: &Path) -> PathBuf { pub fn fold_home_dir(path: &Path) -> PathBuf {
if let Ok(home) = super::home_dir() { if let Ok(home) = home_dir() {
if path.starts_with(&home) { if path.starts_with(&home) {
// it's ok to unwrap, the path starts with home dir // it's ok to unwrap, the path starts with home dir
return PathBuf::from("~").join(path.strip_prefix(&home).unwrap()); return PathBuf::from("~").join(path.strip_prefix(&home).unwrap());
@ -20,7 +21,7 @@ pub fn expand_tilde(path: &Path) -> PathBuf {
let mut components = path.components().peekable(); let mut components = path.components().peekable();
if let Some(Component::Normal(c)) = components.peek() { if let Some(Component::Normal(c)) = components.peek() {
if c == &"~" { if c == &"~" {
if let Ok(home) = super::home_dir() { if let Ok(home) = home_dir() {
// it's ok to unwrap, the path starts with `~` // it's ok to unwrap, the path starts with `~`
return home.join(path.strip_prefix("~").unwrap()); return home.join(path.strip_prefix("~").unwrap());
} }

@ -1,8 +1,9 @@
use std::borrow::Cow;
use crate::{ use crate::{
chars::char_is_line_ending, chars::char_is_line_ending,
graphemes::{ensure_grapheme_boundary_prev, RopeGraphemes}, graphemes::{ensure_grapheme_boundary_prev, grapheme_width, RopeGraphemes},
line_ending::line_end_char_index, line_ending::line_end_char_index,
unicode::width::UnicodeWidthChar,
RopeSlice, RopeSlice,
}; };
@ -77,14 +78,17 @@ pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Po
let line_start = text.line_to_char(line); let line_start = text.line_to_char(line);
let pos = ensure_grapheme_boundary_prev(text, pos); let pos = ensure_grapheme_boundary_prev(text, pos);
let col = text
.slice(line_start..pos) let mut col = 0;
.chars()
.flat_map(|c| match c { for grapheme in RopeGraphemes::new(text.slice(line_start..pos)) {
'\t' => Some(tab_width), if grapheme == "\t" {
c => UnicodeWidthChar::width(c), col += tab_width - (col % tab_width);
}) } else {
.sum(); let grapheme = Cow::from(grapheme);
col += grapheme_width(&grapheme);
}
}
Position::new(line, col) Position::new(line, col)
} }

@ -1,3 +1,5 @@
use std::fmt::Display;
use crate::{search, Range, Selection}; use crate::{search, Range, Selection};
use ropey::RopeSlice; use ropey::RopeSlice;
@ -11,6 +13,27 @@ pub const PAIRS: &[(char, char)] = &[
('', ''), ('', ''),
]; ];
#[derive(Debug, PartialEq)]
pub enum Error {
PairNotFound,
CursorOverlap,
RangeExceedsText,
CursorOnAmbiguousPair,
}
impl Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(match *self {
Error::PairNotFound => "Surround pair not found around all cursors",
Error::CursorOverlap => "Cursors overlap for a single surround pair range",
Error::RangeExceedsText => "Cursor range exceeds text length",
Error::CursorOnAmbiguousPair => "Cursor on ambiguous surround pair",
})
}
}
type Result<T> = std::result::Result<T, Error>;
/// Given any char in [PAIRS], return the open and closing chars. If not found in /// Given any char in [PAIRS], return the open and closing chars. If not found in
/// [PAIRS] return (ch, ch). /// [PAIRS] return (ch, ch).
/// ///
@ -37,31 +60,36 @@ pub fn find_nth_pairs_pos(
ch: char, ch: char,
range: Range, range: Range,
n: usize, n: usize,
) -> Option<(usize, usize)> { ) -> Result<(usize, usize)> {
if text.len_chars() < 2 || range.to() >= text.len_chars() { if text.len_chars() < 2 {
return None; return Err(Error::PairNotFound);
}
if range.to() >= text.len_chars() {
return Err(Error::RangeExceedsText);
} }
let (open, close) = get_pair(ch); let (open, close) = get_pair(ch);
let pos = range.cursor(text); let pos = range.cursor(text);
if open == close { let (open, close) = if open == close {
if Some(open) == text.get_char(pos) { if Some(open) == text.get_char(pos) {
// Cursor is directly on match char. We return no match // Cursor is directly on match char. We return no match
// because there's no way to know which side of the char // because there's no way to know which side of the char
// we should be searching on. // we should be searching on.
return None; return Err(Error::CursorOnAmbiguousPair);
} }
Some(( (
search::find_nth_prev(text, open, pos, n)?, search::find_nth_prev(text, open, pos, n),
search::find_nth_next(text, close, pos, n)?, search::find_nth_next(text, close, pos, n),
)) )
} else { } else {
Some(( (
find_nth_open_pair(text, open, close, pos, n)?, find_nth_open_pair(text, open, close, pos, n),
find_nth_close_pair(text, open, close, pos, n)?, find_nth_close_pair(text, open, close, pos, n),
)) )
} };
Option::zip(open, close).ok_or(Error::PairNotFound)
} }
fn find_nth_open_pair( fn find_nth_open_pair(
@ -151,17 +179,17 @@ pub fn get_surround_pos(
selection: &Selection, selection: &Selection,
ch: char, ch: char,
skip: usize, skip: usize,
) -> Option<Vec<usize>> { ) -> Result<Vec<usize>> {
let mut change_pos = Vec::new(); let mut change_pos = Vec::new();
for &range in selection { for &range in selection {
let (open_pos, close_pos) = find_nth_pairs_pos(text, ch, range, skip)?; let (open_pos, close_pos) = find_nth_pairs_pos(text, ch, range, skip)?;
if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) { if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) {
return None; return Err(Error::CursorOverlap);
} }
change_pos.extend_from_slice(&[open_pos, close_pos]); change_pos.extend_from_slice(&[open_pos, close_pos]);
} }
Some(change_pos) Ok(change_pos)
} }
#[cfg(test)] #[cfg(test)]
@ -175,7 +203,7 @@ mod test {
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
fn check_find_nth_pair_pos( fn check_find_nth_pair_pos(
text: &str, text: &str,
cases: Vec<(usize, char, usize, Option<(usize, usize)>)>, cases: Vec<(usize, char, usize, Result<(usize, usize)>)>,
) { ) {
let doc = Rope::from(text); let doc = Rope::from(text);
let slice = doc.slice(..); let slice = doc.slice(..);
@ -196,13 +224,13 @@ mod test {
"some (text) here", "some (text) here",
vec![ vec![
// cursor on [t]ext // cursor on [t]ext
(6, '(', 1, Some((5, 10))), (6, '(', 1, Ok((5, 10))),
(6, ')', 1, Some((5, 10))), (6, ')', 1, Ok((5, 10))),
// cursor on so[m]e // cursor on so[m]e
(2, '(', 1, None), (2, '(', 1, Err(Error::PairNotFound)),
// cursor on bracket itself // cursor on bracket itself
(5, '(', 1, Some((5, 10))), (5, '(', 1, Ok((5, 10))),
(10, '(', 1, Some((5, 10))), (10, '(', 1, Ok((5, 10))),
], ],
); );
} }
@ -213,9 +241,9 @@ mod test {
"(so (many (good) text) here)", "(so (many (good) text) here)",
vec![ vec![
// cursor on go[o]d // cursor on go[o]d
(13, '(', 1, Some((10, 15))), (13, '(', 1, Ok((10, 15))),
(13, '(', 2, Some((4, 21))), (13, '(', 2, Ok((4, 21))),
(13, '(', 3, Some((0, 27))), (13, '(', 3, Ok((0, 27))),
], ],
); );
} }
@ -226,11 +254,11 @@ mod test {
"'so 'many 'good' text' here'", "'so 'many 'good' text' here'",
vec![ vec![
// cursor on go[o]d // cursor on go[o]d
(13, '\'', 1, Some((10, 15))), (13, '\'', 1, Ok((10, 15))),
(13, '\'', 2, Some((4, 21))), (13, '\'', 2, Ok((4, 21))),
(13, '\'', 3, Some((0, 27))), (13, '\'', 3, Ok((0, 27))),
// cursor on the quotes // cursor on the quotes
(10, '\'', 1, None), (10, '\'', 1, Err(Error::CursorOnAmbiguousPair)),
], ],
) )
} }
@ -241,8 +269,8 @@ mod test {
"((so)((many) good (text))(here))", "((so)((many) good (text))(here))",
vec![ vec![
// cursor on go[o]d // cursor on go[o]d
(15, '(', 1, Some((5, 24))), (15, '(', 1, Ok((5, 24))),
(15, '(', 2, Some((0, 31))), (15, '(', 2, Ok((0, 31))),
], ],
) )
} }
@ -253,9 +281,9 @@ mod test {
"(so [many {good} text] here)", "(so [many {good} text] here)",
vec![ vec![
// cursor on go[o]d // cursor on go[o]d
(13, '{', 1, Some((10, 15))), (13, '{', 1, Ok((10, 15))),
(13, '[', 1, Some((4, 21))), (13, '[', 1, Ok((4, 21))),
(13, '(', 1, Some((0, 27))), (13, '(', 1, Ok((0, 27))),
], ],
) )
} }
@ -285,11 +313,10 @@ mod test {
let selection = let selection =
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(9)]), 0); Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(9)]), 0);
// cursor on s[o]me, c[h]ars // cursor on s[o]me, c[h]ars
assert_eq!( assert_eq!(
get_surround_pos(slice, &selection, '(', 1), get_surround_pos(slice, &selection, '(', 1),
None // different surround chars Err(Error::PairNotFound) // different surround chars
); );
let selection = Selection::new( let selection = Selection::new(
@ -299,7 +326,15 @@ mod test {
// cursor on [x]x, newli[n]e // cursor on [x]x, newli[n]e
assert_eq!( assert_eq!(
get_surround_pos(slice, &selection, '(', 1), get_surround_pos(slice, &selection, '(', 1),
None // overlapping surround chars Err(Error::PairNotFound) // overlapping surround chars
);
let selection =
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(3)]), 0);
// cursor on s[o][m]e
assert_eq!(
get_surround_pos(slice, &selection, '[', 1),
Err(Error::CursorOverlap)
); );
} }
} }

@ -1,4 +1,5 @@
use crate::{ use crate::{
auto_pairs::AutoPairs,
chars::char_is_line_ending, chars::char_is_line_ending,
diagnostic::Severity, diagnostic::Severity,
regex::Regex, regex::Regex,
@ -6,8 +7,6 @@ use crate::{
Rope, RopeSlice, Tendril, Rope, RopeSlice, Tendril,
}; };
pub use helix_syntax::get_language;
use arc_swap::{ArcSwap, Guard}; use arc_swap::{ArcSwap, Guard};
use slotmap::{DefaultKey as LayerId, HopSlotMap}; use slotmap::{DefaultKey as LayerId, HopSlotMap};
@ -17,12 +16,15 @@ use std::{
collections::{HashMap, HashSet, VecDeque}, collections::{HashMap, HashSet, VecDeque},
fmt, fmt,
path::Path, path::Path,
str::FromStr,
sync::Arc, sync::Arc,
}; };
use once_cell::sync::{Lazy, OnceCell}; use once_cell::sync::{Lazy, OnceCell};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use helix_loader::grammar::{get_language, load_runtime_file};
fn deserialize_regex<'de, D>(deserializer: D) -> Result<Option<Regex>, D::Error> fn deserialize_regex<'de, D>(deserializer: D) -> Result<Option<Regex>, D::Error>
where where
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,
@ -41,8 +43,14 @@ where
.transpose() .transpose()
} }
pub fn deserialize_auto_pairs<'de, D>(deserializer: D) -> Result<Option<AutoPairs>, D::Error>
where
D: serde::Deserializer<'de>,
{
Ok(Option::<AutoPairConfig>::deserialize(deserializer)?.and_then(AutoPairConfig::into))
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Configuration { pub struct Configuration {
pub language: Vec<LanguageConfiguration>, pub language: Vec<LanguageConfiguration>,
} }
@ -68,7 +76,7 @@ pub struct LanguageConfiguration {
#[serde(default)] #[serde(default)]
pub diagnostic_severity: Severity, pub diagnostic_severity: Severity,
pub tree_sitter_library: Option<String>, // tree-sitter library name, defaults to language_id pub grammar: Option<String>, // tree-sitter grammar name, defaults to language_id
// content_regex // content_regex
#[serde(default, skip_serializing, deserialize_with = "deserialize_regex")] #[serde(default, skip_serializing, deserialize_with = "deserialize_regex")]
@ -84,11 +92,18 @@ pub struct LanguageConfiguration {
pub indent: Option<IndentationConfiguration>, pub indent: Option<IndentationConfiguration>,
#[serde(skip)] #[serde(skip)]
pub(crate) indent_query: OnceCell<Option<IndentQuery>>, pub(crate) indent_query: OnceCell<Option<Query>>,
#[serde(skip)] #[serde(skip)]
pub(crate) textobject_query: OnceCell<Option<TextObjectQuery>>, pub(crate) textobject_query: OnceCell<Option<TextObjectQuery>>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub debugger: Option<DebugAdapterConfig>, pub debugger: Option<DebugAdapterConfig>,
/// Automatic insertion of pairs to parentheses, brackets,
/// etc. Defaults to true. Optionally, this can be a list of 2-tuples
/// to specify a list of characters to pair. This overrides the
/// global setting.
#[serde(default, skip_serializing, deserialize_with = "deserialize_auto_pairs")]
pub auto_pairs: Option<AutoPairs>,
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@ -162,15 +177,47 @@ pub struct IndentationConfiguration {
pub unit: String, pub unit: String,
} }
#[derive(Debug, Serialize, Deserialize)] /// Configuration for auto pairs
#[serde(rename_all = "kebab-case")] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct IndentQuery { #[serde(rename_all = "kebab-case", deny_unknown_fields, untagged)]
#[serde(default)] pub enum AutoPairConfig {
#[serde(skip_serializing_if = "HashSet::is_empty")] /// Enables or disables auto pairing. False means disabled. True means to use the default pairs.
pub indent: HashSet<String>, Enable(bool),
#[serde(default)]
#[serde(skip_serializing_if = "HashSet::is_empty")] /// The mappings of pairs.
pub outdent: HashSet<String>, Pairs(HashMap<char, char>),
}
impl Default for AutoPairConfig {
fn default() -> Self {
AutoPairConfig::Enable(true)
}
}
impl From<&AutoPairConfig> for Option<AutoPairs> {
fn from(auto_pair_config: &AutoPairConfig) -> Self {
match auto_pair_config {
AutoPairConfig::Enable(false) => None,
AutoPairConfig::Enable(true) => Some(AutoPairs::default()),
AutoPairConfig::Pairs(pairs) => Some(AutoPairs::new(pairs.iter())),
}
}
}
impl From<AutoPairConfig> for Option<AutoPairs> {
fn from(auto_pairs_config: AutoPairConfig) -> Self {
(&auto_pairs_config).into()
}
}
impl FromStr for AutoPairConfig {
type Err = std::str::ParseBoolError;
// only do bool parsing for runtime setting
fn from_str(s: &str) -> Result<Self, Self::Err> {
let enable: bool = s.parse()?;
Ok(AutoPairConfig::Enable(enable))
}
} }
#[derive(Debug)] #[derive(Debug)]
@ -178,16 +225,56 @@ pub struct TextObjectQuery {
pub query: Query, pub query: Query,
} }
pub enum CapturedNode<'a> {
Single(Node<'a>),
/// Guarenteed to be not empty
Grouped(Vec<Node<'a>>),
}
impl<'a> CapturedNode<'a> {
pub fn start_byte(&self) -> usize {
match self {
Self::Single(n) => n.start_byte(),
Self::Grouped(ns) => ns[0].start_byte(),
}
}
pub fn end_byte(&self) -> usize {
match self {
Self::Single(n) => n.end_byte(),
Self::Grouped(ns) => ns.last().unwrap().end_byte(),
}
}
pub fn byte_range(&self) -> std::ops::Range<usize> {
self.start_byte()..self.end_byte()
}
}
impl TextObjectQuery { impl TextObjectQuery {
/// Run the query on the given node and return sub nodes which match given /// Run the query on the given node and return sub nodes which match given
/// capture ("function.inside", "class.around", etc). /// capture ("function.inside", "class.around", etc).
///
/// Captures may contain multiple nodes by using quantifiers (+, *, etc),
/// and support for this is partial and could use improvement.
///
/// ```query
/// ;; supported:
/// (comment)+ @capture
///
/// ;; unsupported:
/// (
/// (comment)+
/// (function)
/// ) @capture
/// ```
pub fn capture_nodes<'a>( pub fn capture_nodes<'a>(
&'a self, &'a self,
capture_name: &str, capture_name: &str,
node: Node<'a>, node: Node<'a>,
slice: RopeSlice<'a>, slice: RopeSlice<'a>,
cursor: &'a mut QueryCursor, cursor: &'a mut QueryCursor,
) -> Option<impl Iterator<Item = Node<'a>>> { ) -> Option<impl Iterator<Item = CapturedNode<'a>>> {
self.capture_nodes_any(&[capture_name], node, slice, cursor) self.capture_nodes_any(&[capture_name], node, slice, cursor)
} }
@ -199,26 +286,34 @@ impl TextObjectQuery {
node: Node<'a>, node: Node<'a>,
slice: RopeSlice<'a>, slice: RopeSlice<'a>,
cursor: &'a mut QueryCursor, cursor: &'a mut QueryCursor,
) -> Option<impl Iterator<Item = Node<'a>>> { ) -> Option<impl Iterator<Item = CapturedNode<'a>>> {
let capture_idx = capture_names let capture_idx = capture_names
.iter() .iter()
.find_map(|cap| self.query.capture_index_for_name(cap))?; .find_map(|cap| self.query.capture_index_for_name(cap))?;
let captures = cursor.captures(&self.query, node, RopeProvider(slice)); let captures = cursor.matches(&self.query, node, RopeProvider(slice));
captures let nodes = captures.flat_map(move |mat| {
.filter_map(move |(mat, idx)| { let captures = mat.captures.iter().filter(move |c| c.index == capture_idx);
(mat.captures[idx].index == capture_idx).then(|| mat.captures[idx].node) let nodes = captures.map(|c| c.node);
}) let pattern_idx = mat.pattern_index;
.into() let quantifier = self.query.capture_quantifiers(pattern_idx)[capture_idx as usize];
let iter: Box<dyn Iterator<Item = CapturedNode>> = match quantifier {
CaptureQuantifier::OneOrMore | CaptureQuantifier::ZeroOrMore => {
let nodes: Vec<Node> = nodes.collect();
if nodes.is_empty() {
Box::new(std::iter::empty())
} else {
Box::new(std::iter::once(CapturedNode::Grouped(nodes)))
} }
} }
_ => Box::new(nodes.map(CapturedNode::Single)),
};
fn load_runtime_file(language: &str, filename: &str) -> Result<String, std::io::Error> { iter
let path = crate::RUNTIME_DIR });
.join("queries") Some(nodes)
.join(language) }
.join(filename);
std::fs::read_to_string(&path)
} }
fn read_query(language: &str, filename: &str) -> String { fn read_query(language: &str, filename: &str) -> String {
@ -266,12 +361,7 @@ impl LanguageConfiguration {
if highlights_query.is_empty() { if highlights_query.is_empty() {
None None
} else { } else {
let language = get_language( let language = get_language(self.grammar.as_deref().unwrap_or(&self.language_id))
&crate::RUNTIME_DIR,
self.tree_sitter_library
.as_deref()
.unwrap_or(&self.language_id),
)
.map_err(|e| log::info!("{}", e)) .map_err(|e| log::info!("{}", e))
.ok()?; .ok()?;
let config = HighlightConfiguration::new( let config = HighlightConfiguration::new(
@ -280,7 +370,7 @@ impl LanguageConfiguration {
&injections_query, &injections_query,
&locals_query, &locals_query,
) )
.unwrap(); // TODO: avoid panic .unwrap_or_else(|query_error| panic!("Could not parse queries for language {:?}. Are your grammars out of sync? Try running 'hx --grammar fetch' and 'hx --grammar build'. This query could not be parsed: {:?}", self.language_id, query_error));
config.configure(scopes); config.configure(scopes);
Some(Arc::new(config)) Some(Arc::new(config))
@ -303,13 +393,13 @@ impl LanguageConfiguration {
self.highlight_config.get().is_some() self.highlight_config.get().is_some()
} }
pub fn indent_query(&self) -> Option<&IndentQuery> { pub fn indent_query(&self) -> Option<&Query> {
self.indent_query self.indent_query
.get_or_init(|| { .get_or_init(|| {
let language = self.language_id.to_ascii_lowercase(); let lang_name = self.language_id.to_ascii_lowercase();
let query_text = read_query(&lang_name, "indents.scm");
let toml = load_runtime_file(&language, "indents.toml").ok()?; let lang = self.highlight_config.get()?.as_ref()?.language;
toml::from_slice(toml.as_bytes()).ok() Query::new(lang, &query_text).ok()
}) })
.as_ref() .as_ref()
} }
@ -456,7 +546,7 @@ impl Loader {
pub struct TsParser { pub struct TsParser {
parser: tree_sitter::Parser, parser: tree_sitter::Parser,
cursors: Vec<QueryCursor>, pub cursors: Vec<QueryCursor>,
} }
// could also just use a pool, or a single instance? // could also just use a pool, or a single instance?
@ -475,9 +565,7 @@ pub struct Syntax {
} }
fn byte_range_to_str(range: std::ops::Range<usize>, source: RopeSlice) -> Cow<str> { fn byte_range_to_str(range: std::ops::Range<usize>, source: RopeSlice) -> Cow<str> {
let start_char = source.byte_to_char(range.start); Cow::from(source.byte_slice(range))
let end_char = source.byte_to_char(range.end);
Cow::from(source.slice(start_char..end_char))
} }
impl Syntax { impl Syntax {
@ -1009,8 +1097,8 @@ pub(crate) fn generate_edits(
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
use std::{iter, mem, ops, str, usize}; use std::{iter, mem, ops, str, usize};
use tree_sitter::{ use tree_sitter::{
Language as Grammar, Node, Parser, Point, Query, QueryCaptures, QueryCursor, QueryError, CaptureQuantifier, Language as Grammar, Node, Parser, Point, Query, QueryCaptures, QueryCursor,
QueryMatch, Range, TextProvider, Tree, QueryError, QueryMatch, Range, TextProvider, Tree,
}; };
const CANCELLATION_CHECK_INTERVAL: usize = 100; const CANCELLATION_CHECK_INTERVAL: usize = 100;
@ -1081,7 +1169,7 @@ struct HighlightIter<'a> {
} }
// Adapter to convert rope chunks to bytes // Adapter to convert rope chunks to bytes
struct ChunksBytes<'a> { pub struct ChunksBytes<'a> {
chunks: ropey::iter::Chunks<'a>, chunks: ropey::iter::Chunks<'a>,
} }
impl<'a> Iterator for ChunksBytes<'a> { impl<'a> Iterator for ChunksBytes<'a> {
@ -1091,14 +1179,12 @@ impl<'a> Iterator for ChunksBytes<'a> {
} }
} }
struct RopeProvider<'a>(RopeSlice<'a>); pub struct RopeProvider<'a>(pub RopeSlice<'a>);
impl<'a> TextProvider<'a> for RopeProvider<'a> { impl<'a> TextProvider<'a> for RopeProvider<'a> {
type I = ChunksBytes<'a>; type I = ChunksBytes<'a>;
fn text(&mut self, node: Node) -> Self::I { fn text(&mut self, node: Node) -> Self::I {
let start_char = self.0.byte_to_char(node.start_byte()); let fragment = self.0.byte_slice(node.start_byte()..node.end_byte());
let end_char = self.0.byte_to_char(node.end_byte());
let fragment = self.0.slice(start_char..end_char);
ChunksBytes { ChunksBytes {
chunks: fragment.chunks(), chunks: fragment.chunks(),
} }
@ -1862,6 +1948,50 @@ mod test {
use super::*; use super::*;
use crate::{Rope, Transaction}; use crate::{Rope, Transaction};
#[test]
fn test_textobject_queries() {
let query_str = r#"
(line_comment)+ @quantified_nodes
((line_comment)+) @quantified_nodes_grouped
((line_comment) (line_comment)) @multiple_nodes_grouped
"#;
let source = Rope::from_str(
r#"
/// a comment on
/// mutiple lines
"#,
);
let loader = Loader::new(Configuration { language: vec![] });
let language = get_language("Rust").unwrap();
let query = Query::new(language, query_str).unwrap();
let textobject = TextObjectQuery { query };
let mut cursor = QueryCursor::new();
let config = HighlightConfiguration::new(language, "", "", "").unwrap();
let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader));
let root = syntax.tree().root_node();
let mut test = |capture, range| {
let matches: Vec<_> = textobject
.capture_nodes(capture, root, source.slice(..), &mut cursor)
.unwrap()
.collect();
assert_eq!(
matches[0].byte_range(),
range,
"@{capture} expected {range:?}"
)
};
test("quantified_nodes", 1..35);
// NOTE: Enable after implementing proper node group capturing
// test("quantified_nodes_grouped", 1..35);
// test("multiple_nodes_grouped", 1..35);
}
#[test] #[test]
fn test_parser() { fn test_parser() {
let highlight_names: Vec<String> = [ let highlight_names: Vec<String> = [
@ -1891,16 +2021,12 @@ mod test {
let loader = Loader::new(Configuration { language: vec![] }); let loader = Loader::new(Configuration { language: vec![] });
let language = get_language(&crate::RUNTIME_DIR, "Rust").unwrap(); let language = get_language("Rust").unwrap();
let config = HighlightConfiguration::new( let config = HighlightConfiguration::new(
language, language,
&std::fs::read_to_string( &std::fs::read_to_string("../runtime/grammars/sources/rust/queries/highlights.scm")
"../helix-syntax/languages/tree-sitter-rust/queries/highlights.scm",
)
.unwrap(), .unwrap(),
&std::fs::read_to_string( &std::fs::read_to_string("../runtime/grammars/sources/rust/queries/injections.scm")
"../helix-syntax/languages/tree-sitter-rust/queries/injections.scm",
)
.unwrap(), .unwrap(),
"", // locals.scm "", // locals.scm
) )
@ -1989,7 +2115,7 @@ mod test {
#[test] #[test]
fn test_load_runtime_file() { fn test_load_runtime_file() {
// Test to make sure we can load some data from the runtime directory. // Test to make sure we can load some data from the runtime directory.
let contents = load_runtime_file("rust", "indents.toml").unwrap(); let contents = load_runtime_file("rust", "indents.scm").unwrap();
assert!(!contents.is_empty()); assert!(!contents.is_empty());
let results = load_runtime_file("rust", "does-not-exist"); let results = load_runtime_file("rust", "does-not-exist");

@ -21,7 +21,6 @@ pub enum Assoc {
After, After,
} }
// ChangeSpec = Change | ChangeSet | Vec<Change>
#[derive(Debug, Default, Clone, PartialEq, Eq)] #[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct ChangeSet { pub struct ChangeSet {
pub(crate) changes: Vec<Operation>, pub(crate) changes: Vec<Operation>,
@ -50,7 +49,6 @@ impl ChangeSet {
} }
// TODO: from iter // TODO: from iter
//
#[doc(hidden)] // used by lsp to convert to LSP changes #[doc(hidden)] // used by lsp to convert to LSP changes
pub fn changes(&self) -> &[Operation] { pub fn changes(&self) -> &[Operation] {
@ -415,8 +413,6 @@ impl ChangeSet {
pub struct Transaction { pub struct Transaction {
changes: ChangeSet, changes: ChangeSet,
selection: Option<Selection>, selection: Option<Selection>,
// effects, annotations
// scroll_into_view
} }
impl Transaction { impl Transaction {
@ -440,14 +436,12 @@ impl Transaction {
/// Returns true if applied successfully. /// Returns true if applied successfully.
pub fn apply(&self, doc: &mut Rope) -> bool { pub fn apply(&self, doc: &mut Rope) -> bool {
if !self.changes.is_empty() { if self.changes.is_empty() {
// apply changes to the document return true;
if !self.changes.apply(doc) {
return false;
}
} }
true // apply changes to the document
self.changes.apply(doc)
} }
/// Generate a transaction that reverts this one. /// Generate a transaction that reverts this one.
@ -475,7 +469,7 @@ impl Transaction {
/// Generate a transaction from a set of changes. /// Generate a transaction from a set of changes.
pub fn change<I>(doc: &Rope, changes: I) -> Self pub fn change<I>(doc: &Rope, changes: I) -> Self
where where
I: IntoIterator<Item = Change> + Iterator, I: Iterator<Item = Change>,
{ {
let len = doc.len_chars(); let len = doc.len_chars();
@ -483,12 +477,11 @@ impl Transaction {
let size = upper.unwrap_or(lower); let size = upper.unwrap_or(lower);
let mut changeset = ChangeSet::with_capacity(2 * size + 1); // rough estimate let mut changeset = ChangeSet::with_capacity(2 * size + 1); // rough estimate
// TODO: verify ranges are ordered and not overlapping or change will panic.
// TODO: test for (pos, pos, None) to factor out as nothing
let mut last = 0; let mut last = 0;
for (from, to, tendril) in changes { for (from, to, tendril) in changes {
// Verify ranges are ordered and not overlapping
debug_assert!(last <= from);
// Retain from last "to" to current "from" // Retain from last "to" to current "from"
changeset.retain(from - last); changeset.retain(from - last);
let span = to - from; let span = to - from;
@ -694,7 +687,7 @@ mod test {
let mut doc = Rope::from("hello world!\ntest 123"); let mut doc = Rope::from("hello world!\ntest 123");
let transaction = Transaction::change( let transaction = Transaction::change(
&doc, &doc,
// (1, 1, None) is a useless 0-width delete // (1, 1, None) is a useless 0-width delete that gets factored out
vec![(1, 1, None), (6, 11, Some("void".into())), (12, 17, None)].into_iter(), vec![(1, 1, None), (6, 11, Some("void".into())), (12, 17, None)].into_iter(),
); );
transaction.apply(&mut doc); transaction.apply(&mut doc);

@ -0,0 +1 @@
../../../src/indent.rs

@ -0,0 +1,13 @@
# This languages.toml should contain definitions for all languages for which we have indent tests
[[language]]
name = "rust"
scope = "source.rust"
injection-regex = "rust"
file-types = ["rs"]
comment-token = "//"
roots = ["Cargo.toml", "Cargo.lock"]
indent = { tab-width = 4, unit = " " }
[[grammar]]
name = "rust"
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a360da0a29a19c281d08295a35ecd0544d2da211" }

@ -0,0 +1,105 @@
use std::{
io::{self, stdout, Stdout, Write},
path::PathBuf,
sync::Arc,
time::Duration,
};
mod test {
fn hello_world() {
1 + 1;
let does_indentation_work = 1;
let mut really_long_variable_name_using_up_the_line =
really_long_fn_that_should_definitely_go_on_the_next_line();
really_long_variable_name_using_up_the_line =
really_long_fn_that_should_definitely_go_on_the_next_line();
really_long_variable_name_using_up_the_line |=
really_long_fn_that_should_definitely_go_on_the_next_line();
let (
a_long_variable_name_in_this_tuple,
b_long_variable_name_in_this_tuple,
c_long_variable_name_in_this_tuple,
d_long_variable_name_in_this_tuple,
e_long_variable_name_in_this_tuple,
): (usize, usize, usize, usize, usize) =
if really_long_fn_that_should_definitely_go_on_the_next_line() {
(
03294239434,
1213412342314,
21231234134,
834534234549898789,
9879234234543853457,
)
} else {
(0, 1, 2, 3, 4)
};
let test_function = function_with_param(this_param,
that_param
);
let test_function = function_with_param(
this_param,
that_param
);
let test_function = function_with_proper_indent(param1,
param2,
);
let selection = Selection::new(
changes
.clone()
.map(|(start, end, text): (usize, usize, Option<Tendril>)| {
let len = text.map(|text| text.len()).unwrap() - 1; // minus newline
let pos = start + len;
Range::new(pos, pos)
})
.collect(),
0,
);
return;
}
}
impl<A, D> MyTrait<A, D> for YourType
where
A: TraitB + TraitC,
D: TraitE + TraitF,
{
}
#[test]
//
match test {
Some(a) => 1,
None => {
unimplemented!()
}
}
std::panic::set_hook(Box::new(move |info| {
hook(info);
}));
{ { {
1
}}}
pub fn change<I>(document: &Document, changes: I) -> Self
where
I: IntoIterator<Item = Change> + ExactSizeIterator,
{
[
1,
2,
3,
];
(
1,
2
);
true
}

@ -0,0 +1,68 @@
use helix_core::{
indent::{treesitter_indent_for_pos, IndentStyle},
syntax::Loader,
Syntax,
};
use std::path::PathBuf;
#[test]
fn test_treesitter_indent_rust() {
test_treesitter_indent("rust.rs", "source.rust");
}
#[test]
fn test_treesitter_indent_rust_2() {
test_treesitter_indent("indent.rs", "source.rust");
// TODO Use commands.rs as indentation test.
// Currently this fails because we can't align the parameters of a closure yet
// test_treesitter_indent("commands.rs", "source.rust");
}
fn test_treesitter_indent(file_name: &str, lang_scope: &str) {
let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
test_dir.push("tests/data/indent");
let mut test_file = test_dir.clone();
test_file.push(file_name);
let test_file = std::fs::File::open(test_file).unwrap();
let doc = ropey::Rope::from_reader(test_file).unwrap();
let mut config_file = test_dir;
config_file.push("languages.toml");
let config = std::fs::read(config_file).unwrap();
let config = toml::from_slice(&config).unwrap();
let loader = Loader::new(config);
// set runtime path so we can find the queries
let mut runtime = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"));
runtime.push("../runtime");
std::env::set_var("HELIX_RUNTIME", runtime.to_str().unwrap());
let language_config = loader.language_config_for_scope(lang_scope).unwrap();
let highlight_config = language_config.highlight_config(&[]).unwrap();
let syntax = Syntax::new(&doc, highlight_config, std::sync::Arc::new(loader));
let indent_query = language_config.indent_query().unwrap();
let text = doc.slice(..);
for i in 0..doc.len_lines() {
let line = text.line(i);
if let Some(pos) = helix_core::find_first_non_whitespace_char(line) {
let suggested_indent = treesitter_indent_for_pos(
indent_query,
&syntax,
&IndentStyle::Spaces(4),
text,
i,
text.line_to_char(i) + pos,
false,
)
.unwrap();
assert!(
line.get_slice(..pos).map_or(false, |s| s == suggested_indent),
"Wrong indentation on line {}:\n\"{}\" (original line)\n\"{}\" (suggested indentation)\n",
i+1,
line.slice(..line.len_chars()-1),
suggested_indent,
);
}
}
}

@ -19,6 +19,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
thiserror = "1.0" thiserror = "1.0"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
which = "4.2"
[dev-dependencies] [dev-dependencies]
fern = "0.6" fern = "0.6"

@ -105,6 +105,9 @@ impl Client {
args: Vec<&str>, args: Vec<&str>,
id: usize, id: usize,
) -> Result<(Self, UnboundedReceiver<Payload>)> { ) -> Result<(Self, UnboundedReceiver<Payload>)> {
// Resolve path to the binary
let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?;
let process = Command::new(cmd) let process = Command::new(cmd)
.args(args) .args(args)
.stdin(Stdio::piped()) .stdin(Stdio::piped())

@ -1,21 +1,23 @@
[package] [package]
name = "helix-syntax" name = "helix-loader"
version = "0.6.0" version = "0.6.0"
description = "A post-modern text editor."
authors = ["Blaž Hrastnik <blaz@mxxn.io>"] authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
edition = "2021" edition = "2021"
license = "MPL-2.0" license = "MPL-2.0"
description = "Tree-sitter grammars support"
categories = ["editor"] categories = ["editor"]
repository = "https://github.com/helix-editor/helix" repository = "https://github.com/helix-editor/helix"
homepage = "https://helix-editor.com" homepage = "https://helix-editor.com"
include = ["src/**/*", "languages/**/*", "build.rs", "!**/docs/**/*", "!**/test/**/*", "!**/examples/**/*", "!**/build/**/*"]
[dependencies] [dependencies]
anyhow = "1"
serde = { version = "1.0", features = ["derive"] }
toml = "0.5"
etcetera = "0.3"
tree-sitter = "0.20" tree-sitter = "0.20"
libloading = "0.7" libloading = "0.7"
anyhow = "1" once_cell = "1.9"
[build-dependencies] # cloning/compiling tree-sitter grammars
cc = { version = "1" } cc = { version = "1" }
threadpool = { version = "1.0" } threadpool = { version = "1.0" }
anyhow = "1"

@ -0,0 +1,6 @@
fn main() {
println!(
"cargo:rustc-env=BUILD_TARGET={}",
std::env::var("TARGET").unwrap()
);
}

@ -0,0 +1,391 @@
use anyhow::{anyhow, Context, Result};
use libloading::{Library, Symbol};
use serde::{Deserialize, Serialize};
use std::fs;
use std::time::SystemTime;
use std::{
collections::HashSet,
path::{Path, PathBuf},
process::Command,
sync::mpsc::channel,
};
use tree_sitter::Language;
#[cfg(unix)]
const DYLIB_EXTENSION: &str = "so";
#[cfg(windows)]
const DYLIB_EXTENSION: &str = "dll";
#[derive(Debug, Serialize, Deserialize)]
struct Configuration {
#[serde(rename = "use-grammars")]
pub grammar_selection: Option<GrammarSelection>,
pub grammar: Vec<GrammarConfiguration>,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "lowercase", untagged)]
pub enum GrammarSelection {
Only { only: HashSet<String> },
Except { except: HashSet<String> },
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct GrammarConfiguration {
#[serde(rename = "name")]
pub grammar_id: String,
pub source: GrammarSource,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "lowercase", untagged)]
pub enum GrammarSource {
Local {
path: String,
},
Git {
#[serde(rename = "git")]
remote: String,
#[serde(rename = "rev")]
revision: String,
subpath: Option<String>,
},
}
const BUILD_TARGET: &str = env!("BUILD_TARGET");
const REMOTE_NAME: &str = "origin";
pub fn get_language(name: &str) -> Result<Language> {
let name = name.to_ascii_lowercase();
let mut library_path = crate::runtime_dir().join("grammars").join(&name);
library_path.set_extension(DYLIB_EXTENSION);
let library = unsafe { Library::new(&library_path) }
.with_context(|| format!("Error opening dynamic library {:?}", library_path))?;
let language_fn_name = format!("tree_sitter_{}", name.replace('-', "_"));
let language = unsafe {
let language_fn: Symbol<unsafe extern "C" fn() -> Language> = library
.get(language_fn_name.as_bytes())
.with_context(|| format!("Failed to load symbol {}", language_fn_name))?;
language_fn()
};
std::mem::forget(library);
Ok(language)
}
pub fn fetch_grammars() -> Result<()> {
// We do not need to fetch local grammars.
let mut grammars = get_grammar_configs()?;
grammars.retain(|grammar| !matches!(grammar.source, GrammarSource::Local { .. }));
run_parallel(grammars, fetch_grammar, "fetch")
}
pub fn build_grammars() -> Result<()> {
run_parallel(get_grammar_configs()?, build_grammar, "build")
}
// Returns the set of grammar configurations the user requests.
// Grammars are configured in the default and user `languages.toml` and are
// merged. The `grammar_selection` key of the config is then used to filter
// down all grammars into a subset of the user's choosing.
fn get_grammar_configs() -> Result<Vec<GrammarConfiguration>> {
let config: Configuration = crate::user_lang_config()
.context("Could not parse languages.toml")?
.try_into()?;
let grammars = match config.grammar_selection {
Some(GrammarSelection::Only { only: selections }) => config
.grammar
.into_iter()
.filter(|grammar| selections.contains(&grammar.grammar_id))
.collect(),
Some(GrammarSelection::Except { except: rejections }) => config
.grammar
.into_iter()
.filter(|grammar| !rejections.contains(&grammar.grammar_id))
.collect(),
None => config.grammar,
};
Ok(grammars)
}
fn run_parallel<F>(grammars: Vec<GrammarConfiguration>, job: F, action: &'static str) -> Result<()>
where
F: Fn(GrammarConfiguration) -> Result<()> + std::marker::Send + 'static + Copy,
{
let pool = threadpool::Builder::new().build();
let (tx, rx) = channel();
for grammar in grammars {
let tx = tx.clone();
pool.execute(move || {
tx.send(job(grammar)).unwrap();
});
}
drop(tx);
// TODO: print all failures instead of the first one found.
rx.iter()
.find(|result| result.is_err())
.map(|err| err.with_context(|| format!("Failed to {} some grammar(s)", action)))
.unwrap_or(Ok(()))
}
fn fetch_grammar(grammar: GrammarConfiguration) -> Result<()> {
if let GrammarSource::Git {
remote, revision, ..
} = grammar.source
{
let grammar_dir = crate::runtime_dir()
.join("grammars/sources")
.join(&grammar.grammar_id);
fs::create_dir_all(&grammar_dir).context(format!(
"Could not create grammar directory {:?}",
grammar_dir
))?;
// create the grammar dir contains a git directory
if !grammar_dir.join(".git").is_dir() {
git(&grammar_dir, ["init"])?;
}
// ensure the remote matches the configured remote
if get_remote_url(&grammar_dir).map_or(true, |s| s != remote) {
set_remote(&grammar_dir, &remote)?;
}
// ensure the revision matches the configured revision
if get_revision(&grammar_dir).map_or(true, |s| s != revision) {
// Fetch the exact revision from the remote.
// Supported by server-side git since v2.5.0 (July 2015),
// enabled by default on major git hosts.
git(
&grammar_dir,
["fetch", "--depth", "1", REMOTE_NAME, &revision],
)?;
git(&grammar_dir, ["checkout", &revision])?;
println!(
"Grammar '{}' checked out at '{}'.",
grammar.grammar_id, revision
);
} else {
println!("Grammar '{}' is already up to date.", grammar.grammar_id);
}
}
Ok(())
}
// Sets the remote for a repository to the given URL, creating the remote if
// it does not yet exist.
fn set_remote(repository_dir: &Path, remote_url: &str) -> Result<String> {
git(
repository_dir,
["remote", "set-url", REMOTE_NAME, remote_url],
)
.or_else(|_| git(repository_dir, ["remote", "add", REMOTE_NAME, remote_url]))
}
fn get_remote_url(repository_dir: &Path) -> Option<String> {
git(repository_dir, ["remote", "get-url", REMOTE_NAME]).ok()
}
fn get_revision(repository_dir: &Path) -> Option<String> {
git(repository_dir, ["rev-parse", "HEAD"]).ok()
}
// A wrapper around 'git' commands which returns stdout in success and a
// helpful error message showing the command, stdout, and stderr in error.
fn git<I, S>(repository_dir: &Path, args: I) -> Result<String>
where
I: IntoIterator<Item = S>,
S: AsRef<std::ffi::OsStr>,
{
let output = Command::new("git")
.args(args)
.current_dir(repository_dir)
.output()?;
if output.status.success() {
Ok(String::from_utf8_lossy(&output.stdout)
.trim_end()
.to_owned())
} else {
// TODO: figure out how to display the git command using `args`
Err(anyhow!(
"Git command failed.\nStdout: {}\nStderr: {}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr),
))
}
}
fn build_grammar(grammar: GrammarConfiguration) -> Result<()> {
let grammar_dir = if let GrammarSource::Local { path } = &grammar.source {
PathBuf::from(&path)
} else {
crate::runtime_dir()
.join("grammars/sources")
.join(&grammar.grammar_id)
};
let grammar_dir_entries = grammar_dir.read_dir().with_context(|| {
format!(
"Failed to read directory {:?}. Did you use 'hx --grammar fetch'?",
grammar_dir
)
})?;
if grammar_dir_entries.count() == 0 {
return Err(anyhow!(
"Directory {:?} is empty. Did you use 'hx --grammar fetch'?",
grammar_dir
));
};
let path = match &grammar.source {
GrammarSource::Git {
subpath: Some(subpath),
..
} => grammar_dir.join(subpath),
_ => grammar_dir,
}
.join("src");
build_tree_sitter_library(&path, grammar)
}
fn build_tree_sitter_library(src_path: &Path, grammar: GrammarConfiguration) -> Result<()> {
let header_path = src_path;
let parser_path = src_path.join("parser.c");
let mut scanner_path = src_path.join("scanner.c");
let scanner_path = if scanner_path.exists() {
Some(scanner_path)
} else {
scanner_path.set_extension("cc");
if scanner_path.exists() {
Some(scanner_path)
} else {
None
}
};
let parser_lib_path = crate::runtime_dir().join("grammars");
let mut library_path = parser_lib_path.join(&grammar.grammar_id);
library_path.set_extension(DYLIB_EXTENSION);
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path)
.context("Failed to compare source and binary timestamps")?;
if !recompile {
println!("Grammar '{}' is already built.", grammar.grammar_id);
return Ok(());
}
println!("Building grammar '{}'", grammar.grammar_id);
let mut config = cc::Build::new();
config
.cpp(true)
.opt_level(3)
.cargo_metadata(false)
.host(BUILD_TARGET)
.target(BUILD_TARGET);
let compiler = config.get_compiler();
let mut command = Command::new(compiler.path());
command.current_dir(src_path);
for (key, value) in compiler.env() {
command.env(key, value);
}
if cfg!(windows) {
command
.args(&["/nologo", "/LD", "/I"])
.arg(header_path)
.arg("/Od")
.arg("/utf-8");
if let Some(scanner_path) = scanner_path.as_ref() {
command.arg(scanner_path);
}
command
.arg(parser_path)
.arg("/link")
.arg(format!("/out:{}", library_path.to_str().unwrap()));
} else {
command
.arg("-shared")
.arg("-fPIC")
.arg("-fno-exceptions")
.arg("-g")
.arg("-I")
.arg(header_path)
.arg("-o")
.arg(&library_path)
.arg("-O3");
if let Some(scanner_path) = scanner_path.as_ref() {
if scanner_path.extension() == Some("c".as_ref()) {
command.arg("-xc").arg("-std=c99").arg(scanner_path);
} else {
command.arg(scanner_path);
}
}
command.arg("-xc").arg(parser_path);
if cfg!(all(unix, not(target_os = "macos"))) {
command.arg("-Wl,-z,relro,-z,now");
}
}
let output = command.output().context("Failed to execute C compiler")?;
if !output.status.success() {
return Err(anyhow!(
"Parser compilation failed.\nStdout: {}\nStderr: {}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
));
}
Ok(())
}
fn needs_recompile(
lib_path: &Path,
parser_c_path: &Path,
scanner_path: &Option<PathBuf>,
) -> Result<bool> {
if !lib_path.exists() {
return Ok(true);
}
let lib_mtime = mtime(lib_path)?;
if mtime(parser_c_path)? > lib_mtime {
return Ok(true);
}
if let Some(scanner_path) = scanner_path {
if mtime(scanner_path)? > lib_mtime {
return Ok(true);
}
}
Ok(false)
}
fn mtime(path: &Path) -> Result<SystemTime> {
Ok(fs::metadata(path)?.modified()?)
}
/// Gives the contents of a file from a language's `runtime/queries/<lang>`
/// directory
pub fn load_runtime_file(language: &str, filename: &str) -> Result<String, std::io::Error> {
let path = crate::RUNTIME_DIR
.join("queries")
.join(language)
.join(filename);
std::fs::read_to_string(&path)
}

@ -0,0 +1,161 @@
pub mod grammar;
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
pub static RUNTIME_DIR: once_cell::sync::Lazy<std::path::PathBuf> =
once_cell::sync::Lazy::new(runtime_dir);
pub fn runtime_dir() -> std::path::PathBuf {
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
return dir.into();
}
const RT_DIR: &str = "runtime";
let conf_dir = config_dir().join(RT_DIR);
if conf_dir.exists() {
return conf_dir;
}
if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") {
// this is the directory of the crate being run by cargo, we need the workspace path so we take the parent
return std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR);
}
// fallback to location of the executable being run
std::env::current_exe()
.ok()
.and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR)))
.unwrap()
}
pub fn config_dir() -> std::path::PathBuf {
// TODO: allow env var override
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
let mut path = strategy.config_dir();
path.push("helix");
path
}
pub fn cache_dir() -> std::path::PathBuf {
// TODO: allow env var override
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
let mut path = strategy.cache_dir();
path.push("helix");
path
}
pub fn config_file() -> std::path::PathBuf {
config_dir().join("config.toml")
}
pub fn lang_config_file() -> std::path::PathBuf {
config_dir().join("languages.toml")
}
pub fn log_file() -> std::path::PathBuf {
cache_dir().join("helix.log")
}
/// Default bultin-in languages.toml.
pub fn default_lang_config() -> toml::Value {
toml::from_slice(include_bytes!("../../languages.toml"))
.expect("Could not parse bultin-in languages.toml to valid toml")
}
/// User configured languages.toml file, merged with the default config.
pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
let def_lang_conf = default_lang_config();
let data = std::fs::read(crate::config_dir().join("languages.toml"));
let user_lang_conf = match data {
Ok(raw) => {
let value = toml::from_slice(&raw)?;
merge_toml_values(def_lang_conf, value)
}
Err(_) => def_lang_conf,
};
Ok(user_lang_conf)
}
// right overrides left
pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value {
use toml::Value;
fn get_name(v: &Value) -> Option<&str> {
v.get("name").and_then(Value::as_str)
}
match (left, right) {
(Value::Array(mut left_items), Value::Array(right_items)) => {
left_items.reserve(right_items.len());
for rvalue in right_items {
let lvalue = get_name(&rvalue)
.and_then(|rname| left_items.iter().position(|v| get_name(v) == Some(rname)))
.map(|lpos| left_items.remove(lpos));
let mvalue = match lvalue {
Some(lvalue) => merge_toml_values(lvalue, rvalue),
None => rvalue,
};
left_items.push(mvalue);
}
Value::Array(left_items)
}
(Value::Table(mut left_map), Value::Table(right_map)) => {
for (rname, rvalue) in right_map {
match left_map.remove(&rname) {
Some(lvalue) => {
let merged_value = merge_toml_values(lvalue, rvalue);
left_map.insert(rname, merged_value);
}
None => {
left_map.insert(rname, rvalue);
}
}
}
Value::Table(left_map)
}
// Catch everything else we didn't handle, and use the right value
(_, value) => value,
}
}
#[cfg(test)]
mod merge_toml_tests {
use super::merge_toml_values;
#[test]
fn language_tomls() {
use toml::Value;
const USER: &str = "
[[language]]
name = \"nix\"
test = \"bbb\"
indent = { tab-width = 4, unit = \" \", test = \"aaa\" }
";
let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
.expect("Couldn't parse built-in languages config");
let user: Value = toml::from_str(USER).unwrap();
let merged = merge_toml_values(base, user);
let languages = merged.get("language").unwrap().as_array().unwrap();
let nix = languages
.iter()
.find(|v| v.get("name").unwrap().as_str().unwrap() == "nix")
.unwrap();
let nix_indent = nix.get("indent").unwrap();
// We changed tab-width and unit in indent so check them if they are the new values
assert_eq!(
nix_indent.get("tab-width").unwrap().as_integer().unwrap(),
4
);
assert_eq!(nix_indent.get("unit").unwrap().as_str().unwrap(), " ");
// We added a new keys, so check them
assert_eq!(nix.get("test").unwrap().as_str().unwrap(), "bbb");
assert_eq!(nix_indent.get("test").unwrap().as_str().unwrap(), "aaa");
// We didn't change comment-token so it should be same
assert_eq!(nix.get("comment-token").unwrap().as_str().unwrap(), "#");
}
}

@ -23,5 +23,6 @@ lsp-types = { version = "0.92", features = ["proposed"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
thiserror = "1.0" thiserror = "1.0"
tokio = { version = "1.16", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } tokio = { version = "1.17", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
tokio-stream = "0.1.8" tokio-stream = "0.1.8"
which = "4.2"

@ -43,6 +43,9 @@ impl Client {
root_markers: Vec<String>, root_markers: Vec<String>,
id: usize, id: usize,
) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc<Notify>)> { ) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc<Notify>)> {
// Resolve path to the binary
let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?;
let process = Command::new(cmd) let process = Command::new(cmd)
.args(args) .args(args)
.stdin(Stdio::piped()) .stdin(Stdio::piped())
@ -110,6 +113,10 @@ impl Client {
self.offset_encoding self.offset_encoding
} }
pub fn config(&self) -> Option<&Value> {
self.config.as_ref()
}
/// Execute a RPC request on the language server. /// Execute a RPC request on the language server.
async fn request<R: lsp::request::Request>(&self, params: R::Params) -> Result<R::Result> async fn request<R: lsp::request::Request>(&self, params: R::Params) -> Result<R::Result>
where where
@ -243,6 +250,13 @@ impl Client {
root_uri: root, root_uri: root,
initialization_options: self.config.clone(), initialization_options: self.config.clone(),
capabilities: lsp::ClientCapabilities { capabilities: lsp::ClientCapabilities {
workspace: Some(lsp::WorkspaceClientCapabilities {
configuration: Some(true),
did_change_configuration: Some(lsp::DynamicRegistrationClientCapabilities {
dynamic_registration: Some(false),
}),
..Default::default()
}),
text_document: Some(lsp::TextDocumentClientCapabilities { text_document: Some(lsp::TextDocumentClientCapabilities {
completion: Some(lsp::CompletionClientCapabilities { completion: Some(lsp::CompletionClientCapabilities {
completion_item: Some(lsp::CompletionItemCapability { completion_item: Some(lsp::CompletionItemCapability {
@ -327,6 +341,16 @@ impl Client {
self.exit().await self.exit().await
} }
// -------------------------------------------------------------------------------------------
// Workspace
// -------------------------------------------------------------------------------------------
pub fn did_change_configuration(&self, settings: Value) -> impl Future<Output = Result<()>> {
self.notify::<lsp::notification::DidChangeConfiguration>(
lsp::DidChangeConfigurationParams { settings },
)
}
// ------------------------------------------------------------------------------------------- // -------------------------------------------------------------------------------------------
// Text document // Text document
// ------------------------------------------------------------------------------------------- // -------------------------------------------------------------------------------------------

@ -191,6 +191,7 @@ pub mod util {
pub enum MethodCall { pub enum MethodCall {
WorkDoneProgressCreate(lsp::WorkDoneProgressCreateParams), WorkDoneProgressCreate(lsp::WorkDoneProgressCreateParams),
ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams), ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams),
WorkspaceConfiguration(lsp::ConfigurationParams),
} }
impl MethodCall { impl MethodCall {
@ -209,6 +210,12 @@ impl MethodCall {
.expect("Failed to parse ApplyWorkspaceEdit params"); .expect("Failed to parse ApplyWorkspaceEdit params");
Self::ApplyWorkspaceEdit(params) Self::ApplyWorkspaceEdit(params)
} }
lsp::request::WorkspaceConfiguration::METHOD => {
let params: lsp::ConfigurationParams = params
.parse()
.expect("Failed to parse WorkspaceConfiguration params");
Self::WorkspaceConfiguration(params)
}
_ => { _ => {
log::warn!("unhandled lsp request: {}", method); log::warn!("unhandled lsp request: {}", method);
return None; return None;

@ -1,13 +0,0 @@
helix-syntax
============
Syntax highlighting for helix, (shallow) submodules resides here.
Differences from nvim-treesitter
--------------------------------
As the syntax are commonly ported from
<https://github.com/nvim-treesitter/nvim-treesitter>.
Note that we do not support the custom `#any-of` predicate which is
supported by neovim so one needs to change it to `#match` with regex.

@ -1,206 +0,0 @@
use anyhow::{anyhow, Context, Result};
use std::fs;
use std::time::SystemTime;
use std::{
path::{Path, PathBuf},
process::Command,
};
use std::sync::mpsc::channel;
fn collect_tree_sitter_dirs(ignore: &[String]) -> Result<Vec<String>> {
let mut dirs = Vec::new();
let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("languages");
for entry in fs::read_dir(path)? {
let entry = entry?;
let path = entry.path();
if !entry.file_type()?.is_dir() {
continue;
}
let dir = path.file_name().unwrap().to_str().unwrap().to_string();
// filter ignores
if ignore.contains(&dir) {
continue;
}
dirs.push(dir)
}
Ok(dirs)
}
#[cfg(unix)]
const DYLIB_EXTENSION: &str = "so";
#[cfg(windows)]
const DYLIB_EXTENSION: &str = "dll";
fn build_library(src_path: &Path, language: &str) -> Result<()> {
let header_path = src_path;
// let grammar_path = src_path.join("grammar.json");
let parser_path = src_path.join("parser.c");
let mut scanner_path = src_path.join("scanner.c");
let scanner_path = if scanner_path.exists() {
Some(scanner_path)
} else {
scanner_path.set_extension("cc");
if scanner_path.exists() {
Some(scanner_path)
} else {
None
}
};
let parser_lib_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../runtime/grammars");
let mut library_path = parser_lib_path.join(language);
library_path.set_extension(DYLIB_EXTENSION);
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path)
.with_context(|| "Failed to compare source and binary timestamps")?;
if !recompile {
return Ok(());
}
let mut config = cc::Build::new();
config.cpp(true).opt_level(2).cargo_metadata(false);
let compiler = config.get_compiler();
let mut command = Command::new(compiler.path());
command.current_dir(src_path);
for (key, value) in compiler.env() {
command.env(key, value);
}
if cfg!(windows) {
command
.args(&["/nologo", "/LD", "/I"])
.arg(header_path)
.arg("/Od")
.arg("/utf-8");
if let Some(scanner_path) = scanner_path.as_ref() {
command.arg(scanner_path);
}
command
.arg(parser_path)
.arg("/link")
.arg(format!("/out:{}", library_path.to_str().unwrap()));
} else {
command
.arg("-shared")
.arg("-fPIC")
.arg("-fno-exceptions")
.arg("-g")
.arg("-I")
.arg(header_path)
.arg("-o")
.arg(&library_path)
.arg("-O2");
if let Some(scanner_path) = scanner_path.as_ref() {
if scanner_path.extension() == Some("c".as_ref()) {
command.arg("-xc").arg("-std=c99").arg(scanner_path);
} else {
command.arg(scanner_path);
}
}
command.arg("-xc").arg(parser_path);
if cfg!(all(unix, not(target_os = "macos"))) {
command.arg("-Wl,-z,relro,-z,now");
}
}
let output = command
.output()
.with_context(|| "Failed to execute C compiler")?;
if !output.status.success() {
return Err(anyhow!(
"Parser compilation failed.\nStdout: {}\nStderr: {}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
));
}
Ok(())
}
fn needs_recompile(
lib_path: &Path,
parser_c_path: &Path,
scanner_path: &Option<PathBuf>,
) -> Result<bool> {
if !lib_path.exists() {
return Ok(true);
}
let lib_mtime = mtime(lib_path)?;
if mtime(parser_c_path)? > lib_mtime {
return Ok(true);
}
if let Some(scanner_path) = scanner_path {
if mtime(scanner_path)? > lib_mtime {
return Ok(true);
}
}
Ok(false)
}
fn mtime(path: &Path) -> Result<SystemTime> {
Ok(fs::metadata(path)?.modified()?)
}
fn build_dir(dir: &str, language: &str) {
println!("Build language {}", language);
if PathBuf::from("languages")
.join(dir)
.read_dir()
.unwrap()
.next()
.is_none()
{
eprintln!(
"The directory {} is empty, you probably need to use 'git submodule update --init --recursive'?",
dir
);
std::process::exit(1);
}
let path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("languages")
.join(dir)
.join("src");
build_library(&path, language).unwrap();
}
fn main() {
let ignore = vec![
"tree-sitter-typescript".to_string(),
"tree-sitter-ocaml".to_string(),
];
let dirs = collect_tree_sitter_dirs(&ignore).unwrap();
let mut n_jobs = 0;
let pool = threadpool::Builder::new().build(); // by going through the builder, it'll use num_cpus
let (tx, rx) = channel();
for dir in dirs {
let tx = tx.clone();
n_jobs += 1;
pool.execute(move || {
let language = &dir.strip_prefix("tree-sitter-").unwrap();
build_dir(&dir, language);
// report progress
tx.send(1).unwrap();
});
}
pool.join();
// drop(tx);
assert_eq!(rx.try_iter().sum::<usize>(), n_jobs);
build_dir("tree-sitter-typescript/tsx", "tsx");
build_dir("tree-sitter-typescript/typescript", "typescript");
build_dir("tree-sitter-ocaml/ocaml", "ocaml");
build_dir("tree-sitter-ocaml/interface", "ocaml-interface")
}

@ -1 +0,0 @@
Subproject commit ca69cdf485e9ce2b2ef0991a720aa88d87d30231

@ -1 +0,0 @@
Subproject commit a8eb5cb57c66f74c63ab950de081207cccf52017

@ -1 +0,0 @@
Subproject commit f05e279aedde06a25801c3f2b2cc8ac17fac52ae

@ -1 +0,0 @@
Subproject commit 53a65a908167d6556e1fcdb67f1ee62aac101dda

@ -1 +0,0 @@
Subproject commit f6616f1e417ee8b62daf251aa1daa5d73781c596

@ -1 +0,0 @@
Subproject commit 5dd3c62f1bbe378b220fe16b317b85247898639e

@ -1 +0,0 @@
Subproject commit e8dcc9d2b404c542fd236ea5f7208f90be8a6e89

@ -1 +0,0 @@
Subproject commit 94e10230939e702b4fa3fa2cb5c3bc7173b95d07

@ -1 +0,0 @@
Subproject commit 6a25376685d1d47968c2cef06d4db8d84a70025e

@ -1 +0,0 @@
Subproject commit 7af32bc04a66ab196f5b9f92ac471f29372ae2ce

@ -1 +0,0 @@
Subproject commit f5d7bda543da788bd507b05bd722627dde66c9ec

@ -1 +0,0 @@
Subproject commit bd50ccf66b42c55252ac8efc1086af4ac6bab8cd

@ -1 +0,0 @@
Subproject commit 86985bde399c5f40b00bc75f7ab70a6c69a5f9c3

@ -1 +0,0 @@
Subproject commit 04e54ab6585dfd4fee6ddfe5849af56f101b6d4f

@ -1 +0,0 @@
Subproject commit 066e395e1107df17183cf3ae4230f1a1406cc972

@ -1 +0,0 @@
Subproject commit 0e4f0baf90b57e5aeb62dcdbf03062c6315d43ea

@ -1 +0,0 @@
Subproject commit c12e6ecb54485f764250556ffd7ccb18f8e2942b

@ -1 +0,0 @@
Subproject commit 332dc528f27044bc4427024dbb33e6941fc131f2

@ -1 +0,0 @@
Subproject commit 88408ffc5e27abcffced7010fc77396ae3636d7e

@ -1 +0,0 @@
Subproject commit 0fa917a7022d1cd2e9b779a6a8fc5dc7fad69c75

@ -1 +0,0 @@
Subproject commit 5e66e961eee421786bdda8495ed1db045e06b5fe

@ -1 +0,0 @@
Subproject commit b6ec26f181dd059eedd506fa5fbeae1b8e5556c8

@ -1 +0,0 @@
Subproject commit d93af487cc75120c89257195e6be46c999c6ba18

@ -1 +0,0 @@
Subproject commit 3ec55082cf0be015d03148be8edfdfa8c56e77f9

@ -1 +0,0 @@
Subproject commit bd6186c24d5eb13b4623efac9d944dcc095c0dad

@ -1 +0,0 @@
Subproject commit 4a95461c4761c624f2263725aca79eeaefd36cad

@ -1 +0,0 @@
Subproject commit 65bceef69c3b0f24c0b19ce67d79f57c96e90fcb

@ -1 +0,0 @@
Subproject commit 12ea597262125fc22fd2e91aa953ac69b19c26ca

@ -1 +0,0 @@
Subproject commit 7f720661de5316c0f8fee956526d4002fa1086d8

@ -1 +0,0 @@
Subproject commit d98426109258b266e1e92358c5f11716d2e8f638

@ -1 +0,0 @@
Subproject commit 0cdeb0e51411a3ba5493662952c3039de08939ca

@ -1 +0,0 @@
Subproject commit 3b213925b9c4f42c1acfe2e10bfbb438d9c6834d

@ -1 +0,0 @@
Subproject commit 06fabca19454b2dc00c1b211a7cb7ad0bc2585f1

@ -1 +0,0 @@
Subproject commit 6f5d40190ec8a0aa8c8410699353d820f4f7d7a6

@ -1 +0,0 @@
Subproject commit a4b9187417d6be349ee5fd4b6e77b4172c6827dd

@ -1 +0,0 @@
Subproject commit ad8c32917a16dfbb387d1da567bf0c3fb6fffde2

@ -1 +0,0 @@
Subproject commit 50f38ceab667f9d482640edfee803d74f4edeba5

@ -1 +0,0 @@
Subproject commit 23d419ba45789c5a47d31448061557716b02750a

@ -1 +0,0 @@
Subproject commit 0ac2c6da562c7a2c26ed7e8691d4a590f7e8b90a

@ -1 +0,0 @@
Subproject commit 57f855461aeeca73bd4218754fb26b5ac143f98f

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save