diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 00000000..526c7b3c
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,139 @@
+name: Build
+on:
+ pull_request:
+ push:
+ branches:
+ - master
+ schedule:
+ - cron: '00 01 * * *'
+
+jobs:
+ check:
+ name: Check
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ rust: [stable, msrv]
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Use MSRV rust toolchain
+ if: matrix.rust == 'msrv'
+ run: cp .github/workflows/msrv-rust-toolchain.toml rust-toolchain.toml
+
+ - name: Install stable toolchain
+ uses: helix-editor/rust-toolchain@v1
+ with:
+ profile: minimal
+ override: true
+
+ - uses: Swatinem/rust-cache@v2
+
+ - name: Run cargo check
+ run: cargo check
+
+ test:
+ name: Test Suite
+ runs-on: ${{ matrix.os }}
+ env:
+ RUST_BACKTRACE: 1
+ HELIX_LOG_LEVEL: info
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Install stable toolchain
+ uses: helix-editor/rust-toolchain@v1
+ with:
+ profile: minimal
+ override: true
+
+ - uses: Swatinem/rust-cache@v2
+
+ - name: Cache test tree-sitter grammar
+ uses: actions/cache@v3
+ with:
+ path: runtime/grammars
+ key: ${{ runner.os }}-stable-v${{ env.CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
+ restore-keys: ${{ runner.os }}-stable-v${{ env.CACHE_VERSION }}-tree-sitter-grammars-
+
+ - name: Run cargo test
+ run: cargo test --workspace
+
+ - name: Run cargo integration-test
+ run: cargo integration-test
+
+ strategy:
+ matrix:
+ os: [ubuntu-latest, macos-latest, windows-latest]
+
+ lints:
+ name: Lints
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Install stable toolchain
+ uses: helix-editor/rust-toolchain@v1
+ with:
+ profile: minimal
+ override: true
+ components: rustfmt, clippy
+
+ - uses: Swatinem/rust-cache@v2
+
+ - name: Run cargo fmt
+ run: cargo fmt --all -- --check
+
+ - name: Run cargo clippy
+ run: cargo clippy --workspace --all-targets -- -D warnings
+
+ - name: Run cargo doc
+ run: cargo doc --no-deps --workspace --document-private-items
+ env:
+ RUSTDOCFLAGS: -D warnings
+
+ docs:
+ name: Docs
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Install stable toolchain
+ uses: helix-editor/rust-toolchain@v1
+ with:
+ profile: minimal
+ override: true
+
+ - uses: Swatinem/rust-cache@v2
+
+ - name: Generate docs
+ run: cargo xtask docgen
+
+ - name: Check uncommitted documentation changes
+ run: |
+ git diff
+ git diff-files --quiet \
+ || (echo "Run 'cargo xtask docgen', commit the changes and push again" \
+ && exit 1)
+
+ queries:
+ name: Tree-sitter queries
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Install stable toolchain
+ uses: helix-editor/rust-toolchain@v1
+ with:
+ profile: minimal
+ override: true
+
+ - uses: Swatinem/rust-cache@v2
+
+ - name: Generate docs
+ run: cargo xtask query-check
diff --git a/.github/workflows/build.yml.orig b/.github/workflows/build.yml.orig
new file mode 100644
index 00000000..526c7b3c
--- /dev/null
+++ b/.github/workflows/build.yml.orig
@@ -0,0 +1,139 @@
+name: Build
+on:
+ pull_request:
+ push:
+ branches:
+ - master
+ schedule:
+ - cron: '00 01 * * *'
+
+jobs:
+ check:
+ name: Check
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ rust: [stable, msrv]
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Use MSRV rust toolchain
+ if: matrix.rust == 'msrv'
+ run: cp .github/workflows/msrv-rust-toolchain.toml rust-toolchain.toml
+
+ - name: Install stable toolchain
+ uses: helix-editor/rust-toolchain@v1
+ with:
+ profile: minimal
+ override: true
+
+ - uses: Swatinem/rust-cache@v2
+
+ - name: Run cargo check
+ run: cargo check
+
+ test:
+ name: Test Suite
+ runs-on: ${{ matrix.os }}
+ env:
+ RUST_BACKTRACE: 1
+ HELIX_LOG_LEVEL: info
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Install stable toolchain
+ uses: helix-editor/rust-toolchain@v1
+ with:
+ profile: minimal
+ override: true
+
+ - uses: Swatinem/rust-cache@v2
+
+ - name: Cache test tree-sitter grammar
+ uses: actions/cache@v3
+ with:
+ path: runtime/grammars
+ key: ${{ runner.os }}-stable-v${{ env.CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
+ restore-keys: ${{ runner.os }}-stable-v${{ env.CACHE_VERSION }}-tree-sitter-grammars-
+
+ - name: Run cargo test
+ run: cargo test --workspace
+
+ - name: Run cargo integration-test
+ run: cargo integration-test
+
+ strategy:
+ matrix:
+ os: [ubuntu-latest, macos-latest, windows-latest]
+
+ lints:
+ name: Lints
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Install stable toolchain
+ uses: helix-editor/rust-toolchain@v1
+ with:
+ profile: minimal
+ override: true
+ components: rustfmt, clippy
+
+ - uses: Swatinem/rust-cache@v2
+
+ - name: Run cargo fmt
+ run: cargo fmt --all -- --check
+
+ - name: Run cargo clippy
+ run: cargo clippy --workspace --all-targets -- -D warnings
+
+ - name: Run cargo doc
+ run: cargo doc --no-deps --workspace --document-private-items
+ env:
+ RUSTDOCFLAGS: -D warnings
+
+ docs:
+ name: Docs
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Install stable toolchain
+ uses: helix-editor/rust-toolchain@v1
+ with:
+ profile: minimal
+ override: true
+
+ - uses: Swatinem/rust-cache@v2
+
+ - name: Generate docs
+ run: cargo xtask docgen
+
+ - name: Check uncommitted documentation changes
+ run: |
+ git diff
+ git diff-files --quiet \
+ || (echo "Run 'cargo xtask docgen', commit the changes and push again" \
+ && exit 1)
+
+ queries:
+ name: Tree-sitter queries
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Install stable toolchain
+ uses: helix-editor/rust-toolchain@v1
+ with:
+ profile: minimal
+ override: true
+
+ - uses: Swatinem/rust-cache@v2
+
+ - name: Generate docs
+ run: cargo xtask query-check
diff --git a/.github/workflows/languages.toml b/.github/workflows/languages.toml
index 18cf71cf..b883ba1a 100644
--- a/.github/workflows/languages.toml
+++ b/.github/workflows/languages.toml
@@ -11,7 +11,7 @@ indent = { tab-width = 4, unit = " " }
[[grammar]]
name = "rust"
-source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a360da0a29a19c281d08295a35ecd0544d2da211" }
+source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "0431a2c60828731f27491ee9fdefe25e250ce9c9" }
[[language]]
name = "nix"
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index f2058232..1672933c 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -24,13 +24,10 @@ jobs:
profile: minimal
override: true
- - uses: Swatinem/rust-cache@v1
+ - uses: Swatinem/rust-cache@v2
- name: Fetch tree-sitter grammars
- uses: actions-rs/cargo@v1
- with:
- command: run
- args: --package=helix-loader --bin=hx-loader
+ run: cargo run --package=helix-loader --bin=hx-loader
- name: Bundle grammars
run: tar cJf grammars.tar.xz -C runtime/grammars/sources .
@@ -198,16 +195,6 @@ jobs:
- uses: actions/download-artifact@v3
- - name: Calculate tag name
- run: |
- name=dev
- if [[ $GITHUB_REF == refs/tags/* ]]; then
- name=${GITHUB_REF:10}
- fi
- echo ::set-output name=val::$name
- echo TAG=$name >> $GITHUB_ENV
- id: tagname
-
- name: Build archive
shell: bash
run: |
@@ -227,7 +214,7 @@ jobs:
if [[ $platform =~ "windows" ]]; then
exe=".exe"
fi
- pkgname=helix-$TAG-$platform
+ pkgname=helix-$GITHUB_REF_NAME-$platform
mkdir $pkgname
cp $source/LICENSE $source/README.md $pkgname
mkdir $pkgname/contrib
@@ -247,7 +234,7 @@ jobs:
fi
done
- tar cJf dist/helix-$TAG-source.tar.xz -C $source .
+ tar cJf dist/helix-$GITHUB_REF_NAME-source.tar.xz -C $source .
mv dist $source/
- name: Upload binaries to release
@@ -257,7 +244,7 @@ jobs:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: dist/*
file_glob: true
- tag: ${{ steps.tagname.outputs.val }}
+ tag: ${{ github.ref_name }}
overwrite: true
- name: Upload binaries as artifact
diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 00000000..13566b81
--- /dev/null
+++ b/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Editor-based HTTP Client requests
+/httpRequests/
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
diff --git a/.idea/helix.iml b/.idea/helix.iml
new file mode 100644
index 00000000..bc2cd874
--- /dev/null
+++ b/.idea/helix.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 00000000..03d9549e
--- /dev/null
+++ b/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 00000000..6c65ec58
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 00000000..94a25f7f
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Cargo.lock b/Cargo.lock
index 93459aa0..616c5317 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -13,6 +13,18 @@ dependencies = [
"version_check",
]
+[[package]]
+name = "ahash"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf6ccdb167abbf410dcb915cabd428929d7f6a04980b54a11f26a39f1c7f7107"
+dependencies = [
+ "cfg-if",
+ "getrandom",
+ "once_cell",
+ "version_check",
+]
+
[[package]]
name = "aho-corasick"
version = "0.7.18"
@@ -92,9 +104,9 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53"
[[package]]
name = "cc"
-version = "1.0.74"
+version = "1.0.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "581f5dba903aac52ea3feb5ec4810848460ee833876f1f9b0fdeab1f19091574"
+checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4"
[[package]]
name = "cfg-if"
@@ -115,9 +127,9 @@ dependencies = [
[[package]]
name = "chrono"
-version = "0.4.22"
+version = "0.4.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1"
+checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f"
dependencies = [
"iana-time-zone",
"num-integer",
@@ -400,18 +412,29 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
dependencies = [
- "ahash",
+ "ahash 0.7.6",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038"
+dependencies = [
+ "ahash 0.8.2",
]
[[package]]
name = "helix-core"
version = "0.6.0"
dependencies = [
+ "ahash 0.8.2",
"arc-swap",
"bitflags",
"chrono",
"encoding_rs",
"etcetera",
+ "hashbrown 0.13.1",
"helix-loader",
"log",
"once_cell",
@@ -655,9 +678,9 @@ checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5"
[[package]]
name = "libloading"
-version = "0.7.3"
+version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
+checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
dependencies = [
"cfg-if",
"winapi",
@@ -876,9 +899,9 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.6.0"
+version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
+checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
dependencies = [
"aho-corasick",
"memchr",
@@ -959,9 +982,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.87"
+version = "1.0.88"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45"
+checksum = "8e8b3801309262e8184d9687fb697586833e939767aea0dda89f5a8e650e8bd7"
dependencies = [
"itoa",
"ryu",
@@ -1023,9 +1046,9 @@ dependencies = [
[[package]]
name = "similar"
-version = "2.2.0"
+version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62ac7f900db32bf3fd12e0117dd3dc4da74bc52ebaac97f39668446d89694803"
+checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf"
[[package]]
name = "slab"
@@ -1196,9 +1219,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]]
name = "tokio"
-version = "1.21.2"
+version = "1.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9e03c497dc955702ba729190dc4aac6f2a0ce97f913e5b1b5912fc5039d9099"
+checksum = "d76ce4a75fb488c605c54bf610f221cea8b0dafb53333c1a67e8ee199dcd2ae3"
dependencies = [
"autocfg",
"bytes",
@@ -1288,7 +1311,7 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5faade31a542b8b35855fff6e8def199853b2da8da256da52f52f1316ee3137"
dependencies = [
- "hashbrown",
+ "hashbrown 0.12.3",
"regex",
]
diff --git a/README.md b/README.md
index 0ba1f696..ed2ec438 100644
--- a/README.md
+++ b/README.md
@@ -76,10 +76,10 @@ config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%App
| -------------------- | ------------------------------------------------ |
| Windows (Cmd) | `xcopy /e /i runtime %AppData%\helix\runtime` |
| Windows (PowerShell) | `xcopy /e /i runtime $Env:AppData\helix\runtime` |
-| Linux / MacOS | `ln -s $PWD/runtime ~/.config/helix/runtime` |
+| Linux / macOS | `ln -s $PWD/runtime ~/.config/helix/runtime` |
Starting with Windows Vista you can also create symbolic links on Windows. Note that this requires
-elevated priviliges - i.e. PowerShell or Cmd must be run as administrator.
+elevated privileges - i.e. PowerShell or Cmd must be run as administrator.
**PowerShell:**
@@ -135,9 +135,9 @@ sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desk
Please note: there is no icon for Helix yet, so the system default will be used.
-## MacOS
+## macOS
-Helix can be installed on MacOS through homebrew:
+Helix can be installed on macOS through homebrew:
```
brew install helix
diff --git a/book/src/configuration.md b/book/src/configuration.md
index 41563f4f..ea8fd76f 100644
--- a/book/src/configuration.md
+++ b/book/src/configuration.md
@@ -46,7 +46,7 @@ on unix operating systems.
| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` |
| `cursorline` | Highlight all lines with a cursor. | `false` |
| `cursorcolumn` | Highlight all columns with a cursor. | `false` |
-| `gutters` | Gutters to display: Available are `diagnostics` and `line-numbers` and `spacer`, note that `diagnostics` also includes other features like breakpoints, 1-width padding will be inserted if gutters is non-empty | `["diagnostics", "line-numbers"]` |
+| `gutters` | Gutters to display: Available are `diagnostics` and `line-numbers` and `spacer`, note that `diagnostics` also includes other features like breakpoints, 1-width padding will be inserted if gutters is non-empty | `["diagnostics", "spacer", "line-numbers"]` |
| `auto-completion` | Enable automatic pop up of auto-completion. | `true` |
| `auto-format` | Enable automatic formatting on save. | `true` |
| `auto-save` | Enable automatic saving on focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal. | `false` |
@@ -103,7 +103,9 @@ The following statusline elements can be configured:
| `total-line-numbers` | The total line numbers of the opened file |
| `file-type` | The type of the opened file |
| `diagnostics` | The number of warnings and/or errors |
+| `workspace-diagnostics` | The number of warnings and/or errors on workspace |
| `selections` | The number of active selections |
+| `primary-selection-length` | The number of characters currently in primary selection |
| `position` | The cursor position |
| `position-percentage` | The cursor position as a percentage of the total number of lines |
| `separator` | The string defined in `editor.statusline.separator` (defaults to `"│"`) |
diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md
index 411e67b8..487057e6 100644
--- a/book/src/generated/lang-support.md
+++ b/book/src/generated/lang-support.md
@@ -5,6 +5,7 @@
| bash | ✓ | | | `bash-language-server` |
| bass | ✓ | | | `bass` |
| beancount | ✓ | | | |
+| bicep | ✓ | | | `bicep-langserver` |
| c | ✓ | ✓ | ✓ | `clangd` |
| c-sharp | ✓ | ✓ | | `OmniSharp` |
| cairo | ✓ | | | |
@@ -24,7 +25,7 @@
| edoc | ✓ | | | |
| eex | ✓ | | | |
| ejs | ✓ | | | |
-| elixir | ✓ | ✓ | | `elixir-ls` |
+| elixir | ✓ | ✓ | ✓ | `elixir-ls` |
| elm | ✓ | | | `elm-language-server` |
| elvish | ✓ | | | `elvish` |
| env | ✓ | | | |
@@ -50,12 +51,12 @@
| hare | ✓ | | | |
| haskell | ✓ | | | `haskell-language-server-wrapper` |
| hcl | ✓ | | ✓ | `terraform-ls` |
-| heex | ✓ | ✓ | | |
+| heex | ✓ | ✓ | | `elixir-ls` |
| html | ✓ | | | `vscode-html-language-server` |
| idris | | | | `idris2-lsp` |
| iex | ✓ | | | |
| ini | ✓ | | | |
-| java | ✓ | | | `jdtls` |
+| java | ✓ | ✓ | | `jdtls` |
| javascript | ✓ | ✓ | ✓ | `typescript-language-server` |
| jsdoc | ✓ | | | |
| json | ✓ | | ✓ | `vscode-json-language-server` |
@@ -77,7 +78,7 @@
| meson | ✓ | | ✓ | |
| mint | | | | `mint` |
| nickel | ✓ | | ✓ | `nls` |
-| nix | ✓ | | | `rnix-lsp` |
+| nix | ✓ | | | `nil` |
| nu | ✓ | | | |
| ocaml | ✓ | | ✓ | `ocamllsp` |
| ocaml-interface | ✓ | | | `ocamllsp` |
@@ -92,6 +93,7 @@
| protobuf | ✓ | | ✓ | |
| purescript | ✓ | | | `purescript-language-server` |
| python | ✓ | ✓ | ✓ | `pylsp` |
+| qml | ✓ | | ✓ | `qmlls` |
| r | ✓ | | | `R` |
| racket | | | | `racket` |
| regex | ✓ | | | |
diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md
index adf1b4c6..9f01d2f5 100644
--- a/book/src/generated/typable-cmd.md
+++ b/book/src/generated/typable-cmd.md
@@ -44,7 +44,9 @@
| `:show-directory`, `:pwd` | Show the current working directory. |
| `:encoding` | Set encoding. Based on `https://encoding.spec.whatwg.org`. |
| `:reload` | Discard changes and reload from the source file. |
+| `:reload-all` | Discard changes and reload all documents from the source files. |
| `:update` | Write changes only if the file has been modified. |
+| `:lsp-workspace-command` | Open workspace command picker |
| `:lsp-restart` | Restarts the Language Server that is in use by the current doc |
| `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. |
| `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. |
diff --git a/book/src/install.md b/book/src/install.md
index a041d651..44f13584 100644
--- a/book/src/install.md
+++ b/book/src/install.md
@@ -103,10 +103,10 @@ via the `HELIX_RUNTIME` environment variable.
| -------------------- | ------------------------------------------------ |
| Windows (Cmd) | `xcopy /e /i runtime %AppData%\helix\runtime` |
| Windows (PowerShell) | `xcopy /e /i runtime $Env:AppData\helix\runtime` |
-| Linux / MacOS | `ln -s $PWD/runtime ~/.config/helix/runtime` |
+| Linux / macOS | `ln -s $PWD/runtime ~/.config/helix/runtime` |
Starting with Windows Vista you can also create symbolic links on Windows. Note that this requires
-elevated priviliges - i.e. PowerShell or Cmd must be run as administrator.
+elevated privileges - i.e. PowerShell or Cmd must be run as administrator.
**PowerShell:**
diff --git a/contrib/completion/hx.bash b/contrib/completion/hx.bash
index 89f3283c..01b42deb 100644
--- a/contrib/completion/hx.bash
+++ b/contrib/completion/hx.bash
@@ -19,5 +19,5 @@ _hx() {
COMPREPLY=($(compgen -fd -W "-h --help --tutor -V --version -v -vv -vvv --health -g --grammar --vsplit --hsplit -c --config --log" -- $2))
;;
esac
-} && complete -F _hx hx
+} && complete -o filenames -F _hx hx
diff --git a/flake.lock b/flake.lock
index 74206d2b..f097519e 100644
--- a/flake.lock
+++ b/flake.lock
@@ -1,22 +1,5 @@
{
"nodes": {
- "all-cabal-json": {
- "flake": false,
- "locked": {
- "lastModified": 1665552503,
- "narHash": "sha256-r14RmRSwzv5c+bWKUDaze6pXM7nOsiz1H8nvFHJvufc=",
- "owner": "nix-community",
- "repo": "all-cabal-json",
- "rev": "d7c0434eebffb305071404edcf9d5cd99703878e",
- "type": "github"
- },
- "original": {
- "owner": "nix-community",
- "ref": "hackage",
- "repo": "all-cabal-json",
- "type": "github"
- }
- },
"crane": {
"flake": false,
"locked": {
@@ -52,47 +35,45 @@
"dream2nix": {
"inputs": {
"alejandra": [
- "nci",
- "nixpkgs"
+ "nci"
+ ],
+ "all-cabal-json": [
+ "nci"
],
- "all-cabal-json": "all-cabal-json",
"crane": "crane",
"devshell": [
"nci",
"devshell"
],
"flake-utils-pre-commit": [
- "nci",
- "nixpkgs"
+ "nci"
+ ],
+ "ghc-utils": [
+ "nci"
],
- "ghc-utils": "ghc-utils",
"gomod2nix": [
- "nci",
- "nixpkgs"
+ "nci"
],
"mach-nix": [
- "nci",
- "nixpkgs"
+ "nci"
],
"nixpkgs": [
"nci",
"nixpkgs"
],
"poetry2nix": [
- "nci",
- "nixpkgs"
+ "nci"
],
"pre-commit-hooks": [
- "nci",
- "nixpkgs"
+ "nci"
]
},
"locked": {
- "lastModified": 1667429039,
- "narHash": "sha256-Lu6da25JioHzerkLHAHSO9suCQFzJ/XBjkcGCIbasLM=",
+ "lastModified": 1668851003,
+ "narHash": "sha256-X7RCQQynbxStZR2m7HW38r/msMQwVl3afD6UXOCtvx4=",
"owner": "nix-community",
"repo": "dream2nix",
- "rev": "5252794e58eedb02d607fa3187ffead7becc81b0",
+ "rev": "c77e8379d8fe01213ba072e40946cbfb7b58e628",
"type": "github"
},
"original": {
@@ -116,22 +97,6 @@
"type": "github"
}
},
- "ghc-utils": {
- "flake": false,
- "locked": {
- "lastModified": 1662774800,
- "narHash": "sha256-1Rd2eohGUw/s1tfvkepeYpg8kCEXiIot0RijapUjAkE=",
- "ref": "refs/heads/master",
- "rev": "bb3a2d3dc52ff0253fb9c2812bd7aa2da03e0fea",
- "revCount": 1072,
- "type": "git",
- "url": "https://gitlab.haskell.org/bgamari/ghc-utils"
- },
- "original": {
- "type": "git",
- "url": "https://gitlab.haskell.org/bgamari/ghc-utils"
- }
- },
"nci": {
"inputs": {
"devshell": "devshell",
@@ -144,11 +109,11 @@
]
},
"locked": {
- "lastModified": 1667542401,
- "narHash": "sha256-mdWjP5tjSf8n6FAtpSgL23kX4+eWBwLrSYo9iY3mA8Q=",
+ "lastModified": 1669011203,
+ "narHash": "sha256-Lymj4HktNEFmVXtwI0Os7srDXHZbZW0Nzw3/+5Hf8ko=",
"owner": "yusdacra",
"repo": "nix-cargo-integration",
- "rev": "cd5e5cbd81c80dc219455dd3b1e0ddb55fae51ec",
+ "rev": "c5133b91fc1d549087c91228bd213f2518728a4b",
"type": "github"
},
"original": {
@@ -159,11 +124,11 @@
},
"nixpkgs": {
"locked": {
- "lastModified": 1667482890,
- "narHash": "sha256-pua0jp87iwN7NBY5/ypx0s9L9CG49Ju/NI4wGwurHc4=",
+ "lastModified": 1668905981,
+ "narHash": "sha256-RBQa/+9Uk1eFTqIOXBSBezlEbA3v5OkgP+qptQs1OxY=",
"owner": "nixos",
"repo": "nixpkgs",
- "rev": "a2a777538d971c6b01c6e54af89ddd6567c055e8",
+ "rev": "690ffff026b4e635b46f69002c0f4e81c65dfc2e",
"type": "github"
},
"original": {
@@ -188,11 +153,11 @@
]
},
"locked": {
- "lastModified": 1667487142,
- "narHash": "sha256-bVuzLs1ZVggJAbJmEDVO9G6p8BH3HRaolK70KXvnWnU=",
+ "lastModified": 1668998422,
+ "narHash": "sha256-G/BklIplCHZEeDIabaaxqgITdIXtMolRGlwxn9jG2/Q=",
"owner": "oxalica",
"repo": "rust-overlay",
- "rev": "cf668f737ac986c0a89e83b6b2e3c5ddbd8cf33b",
+ "rev": "68ab029c93f8f8eed4cf3ce9a89a9fd4504b2d6e",
"type": "github"
},
"original": {
diff --git a/flake.nix b/flake.nix
index b1d3f01e..fe1c6b44 100644
--- a/flake.nix
+++ b/flake.nix
@@ -150,6 +150,7 @@
["languages.toml" "theme.toml" "base16_theme.toml"]
}
'';
+ checkPhase = ":";
meta.mainProgram = "hx";
};
@@ -166,7 +167,7 @@
packages
// {
helix-unwrapped = packages.helix.passthru.unwrapped;
- helix-unwrapped-debug = packages.helix-debug.passthru.unwrapped;
+ helix-unwrapped-dev = packages.helix-dev.passthru.unwrapped;
}
)
outputs.packages;
diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml
index 45272f98..eb886c90 100644
--- a/helix-core/Cargo.toml
+++ b/helix-core/Cargo.toml
@@ -30,6 +30,8 @@ once_cell = "1.16"
arc-swap = "1"
regex = "1"
bitflags = "1.3"
+ahash = "0.8.2"
+hashbrown = { version = "0.13.1", features = ["raw"] }
log = "0.4"
serde = { version = "1.0", features = ["derive"] }
diff --git a/helix-core/src/comment.rs b/helix-core/src/comment.rs
index 44f6cdfe..ec5d7a45 100644
--- a/helix-core/src/comment.rs
+++ b/helix-core/src/comment.rs
@@ -100,43 +100,41 @@ mod test {
#[test]
fn test_find_line_comment() {
- use crate::State;
-
// four lines, two space indented, except for line 1 which is blank.
- let doc = Rope::from(" 1\n\n 2\n 3");
-
- let mut state = State::new(doc);
+ let mut doc = Rope::from(" 1\n\n 2\n 3");
// select whole document
- state.selection = Selection::single(0, state.doc.len_chars() - 1);
+ let mut selection = Selection::single(0, doc.len_chars() - 1);
- let text = state.doc.slice(..);
+ let text = doc.slice(..);
let res = find_line_comment("//", text, 0..3);
// (commented = true, to_change = [line 0, line 2], min = col 2, margin = 1)
assert_eq!(res, (false, vec![0, 2], 2, 1));
// comment
- let transaction = toggle_line_comments(&state.doc, &state.selection, None);
- transaction.apply(&mut state.doc);
- state.selection = state.selection.map(transaction.changes());
+ let transaction = toggle_line_comments(&doc, &selection, None);
+ transaction.apply(&mut doc);
+ selection = selection.map(transaction.changes());
- assert_eq!(state.doc, " // 1\n\n // 2\n // 3");
+ assert_eq!(doc, " // 1\n\n // 2\n // 3");
// uncomment
- let transaction = toggle_line_comments(&state.doc, &state.selection, None);
- transaction.apply(&mut state.doc);
- state.selection = state.selection.map(transaction.changes());
- assert_eq!(state.doc, " 1\n\n 2\n 3");
+ let transaction = toggle_line_comments(&doc, &selection, None);
+ transaction.apply(&mut doc);
+ selection = selection.map(transaction.changes());
+ assert_eq!(doc, " 1\n\n 2\n 3");
+ assert!(selection.len() == 1); // to ignore the selection unused warning
// 0 margin comments
- state.doc = Rope::from(" //1\n\n //2\n //3");
+ doc = Rope::from(" //1\n\n //2\n //3");
// reset the selection.
- state.selection = Selection::single(0, state.doc.len_chars() - 1);
+ selection = Selection::single(0, doc.len_chars() - 1);
- let transaction = toggle_line_comments(&state.doc, &state.selection, None);
- transaction.apply(&mut state.doc);
- state.selection = state.selection.map(transaction.changes());
- assert_eq!(state.doc, " 1\n\n 2\n 3");
+ let transaction = toggle_line_comments(&doc, &selection, None);
+ transaction.apply(&mut doc);
+ selection = selection.map(transaction.changes());
+ assert_eq!(doc, " 1\n\n 2\n 3");
+ assert!(selection.len() == 1); // to ignore the selection unused warning
// TODO: account for uncommenting with uneven comment indentation
}
diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs
index 5cd72b07..82509242 100644
--- a/helix-core/src/history.rs
+++ b/helix-core/src/history.rs
@@ -1,9 +1,15 @@
-use crate::{Assoc, ChangeSet, Range, Rope, State, Transaction};
+use crate::{Assoc, ChangeSet, Range, Rope, Selection, Transaction};
use once_cell::sync::Lazy;
use regex::Regex;
use std::num::NonZeroUsize;
use std::time::{Duration, Instant};
+#[derive(Debug, Clone)]
+pub struct State {
+ pub doc: Rope,
+ pub selection: Selection,
+}
+
/// Stores the history of changes to a buffer.
///
/// Currently the history is represented as a vector of revisions. The vector
@@ -48,7 +54,7 @@ pub struct History {
}
/// A single point in history. See [History] for more information.
-#[derive(Debug)]
+#[derive(Debug, Clone)]
struct Revision {
parent: usize,
last_child: Option,
@@ -113,6 +119,37 @@ impl History {
self.current == 0
}
+ /// Returns the changes since the given revision composed into a transaction.
+ /// Returns None if there are no changes between the current and given revisions.
+ pub fn changes_since(&self, revision: usize) -> Option {
+ use std::cmp::Ordering::*;
+
+ match revision.cmp(&self.current) {
+ Equal => None,
+ Less => {
+ let mut child = self.revisions[revision].last_child?.get();
+ let mut transaction = self.revisions[child].transaction.clone();
+ while child != self.current {
+ child = self.revisions[child].last_child?.get();
+ transaction = transaction.compose(self.revisions[child].transaction.clone());
+ }
+ Some(transaction)
+ }
+ Greater => {
+ let mut inversion = self.revisions[revision].inversion.clone();
+ let mut parent = self.revisions[revision].parent;
+ while parent != self.current {
+ parent = self.revisions[parent].parent;
+ if parent == 0 {
+ return None;
+ }
+ inversion = inversion.compose(self.revisions[parent].inversion.clone());
+ }
+ Some(inversion)
+ }
+ }
+ }
+
/// Undo the last edit.
pub fn undo(&mut self) -> Option<&Transaction> {
if self.at_root() {
@@ -366,12 +403,16 @@ impl std::str::FromStr for UndoKind {
#[cfg(test)]
mod test {
use super::*;
+ use crate::Selection;
#[test]
fn test_undo_redo() {
let mut history = History::default();
let doc = Rope::from("hello");
- let mut state = State::new(doc);
+ let mut state = State {
+ doc,
+ selection: Selection::point(0),
+ };
let transaction1 =
Transaction::change(&state.doc, vec![(5, 5, Some(" world!".into()))].into_iter());
@@ -420,7 +461,10 @@ mod test {
fn test_earlier_later() {
let mut history = History::default();
let doc = Rope::from("a\n");
- let mut state = State::new(doc);
+ let mut state = State {
+ doc,
+ selection: Selection::point(0),
+ };
fn undo(history: &mut History, state: &mut State) {
if let Some(transaction) = history.undo() {
diff --git a/helix-core/src/increment/date_time.rs b/helix-core/src/increment/date_time.rs
index 1574bf4d..265242ce 100644
--- a/helix-core/src/increment/date_time.rs
+++ b/helix-core/src/increment/date_time.rs
@@ -74,12 +74,12 @@ impl DateTimeIncrementor {
(true, false) => {
let date = NaiveDate::parse_from_str(date_time, format.fmt).ok()?;
- date.and_hms(0, 0, 0)
+ date.and_hms_opt(0, 0, 0).unwrap()
}
(false, true) => {
let time = NaiveTime::parse_from_str(date_time, format.fmt).ok()?;
- NaiveDate::from_ymd(0, 1, 1).and_time(time)
+ NaiveDate::from_ymd_opt(0, 1, 1).unwrap().and_time(time)
}
(false, false) => return None,
};
@@ -312,10 +312,10 @@ fn ndays_in_month(year: i32, month: u32) -> u32 {
} else {
(year, month + 1)
};
- let d = NaiveDate::from_ymd(y, m, 1);
+ let d = NaiveDate::from_ymd_opt(y, m, 1).unwrap();
// ...is preceded by the last day of the original month.
- d.pred().day()
+ d.pred_opt().unwrap().day()
}
fn add_months(date_time: NaiveDateTime, amount: i64) -> Option {
@@ -334,7 +334,7 @@ fn add_months(date_time: NaiveDateTime, amount: i64) -> Option {
let day = cmp::min(date_time.day(), ndays_in_month(year, month));
- Some(NaiveDate::from_ymd(year, month, day).and_time(date_time.time()))
+ NaiveDate::from_ymd_opt(year, month, day).map(|date| date.and_time(date_time.time()))
}
fn add_years(date_time: NaiveDateTime, amount: i64) -> Option {
@@ -342,8 +342,8 @@ fn add_years(date_time: NaiveDateTime, amount: i64) -> Option {
let ndays = ndays_in_month(year, date_time.month());
if date_time.day() > ndays {
- let d = NaiveDate::from_ymd(year, date_time.month(), ndays);
- Some(d.succ().and_time(date_time.time()))
+ NaiveDate::from_ymd_opt(year, date_time.month(), ndays)
+ .and_then(|date| date.succ_opt().map(|date| date.and_time(date_time.time())))
} else {
date_time.with_year(year)
}
diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs
index 9526fc8a..d6aa5edb 100644
--- a/helix-core/src/indent.rs
+++ b/helix-core/src/indent.rs
@@ -461,59 +461,61 @@ fn query_indents(
/// so that the indent computation starts with the correct syntax node.
fn extend_nodes<'a>(
node: &mut Node<'a>,
- deepest_preceding: Option>,
+ mut deepest_preceding: Node<'a>,
extend_captures: &HashMap>,
text: RopeSlice,
line: usize,
tab_width: usize,
) {
- if let Some(mut deepest_preceding) = deepest_preceding {
- let mut stop_extend = false;
- while deepest_preceding != *node {
- let mut extend_node = false;
- // This will be set to true if this node is captured, regardless of whether
- // it actually will be extended (e.g. because the cursor isn't indented
- // more than the node).
- let mut node_captured = false;
- if let Some(captures) = extend_captures.get(&deepest_preceding.id()) {
- for capture in captures {
- match capture {
- ExtendCapture::PreventOnce => {
- stop_extend = true;
- }
- ExtendCapture::Extend => {
- node_captured = true;
- // We extend the node if
- // - the cursor is on the same line as the end of the node OR
- // - the line that the cursor is on is more indented than the
- // first line of the node
- if deepest_preceding.end_position().row == line {
+ let mut stop_extend = false;
+
+ while deepest_preceding != *node {
+ let mut extend_node = false;
+ // This will be set to true if this node is captured, regardless of whether
+ // it actually will be extended (e.g. because the cursor isn't indented
+ // more than the node).
+ let mut node_captured = false;
+ if let Some(captures) = extend_captures.get(&deepest_preceding.id()) {
+ for capture in captures {
+ match capture {
+ ExtendCapture::PreventOnce => {
+ stop_extend = true;
+ }
+ ExtendCapture::Extend => {
+ node_captured = true;
+ // We extend the node if
+ // - the cursor is on the same line as the end of the node OR
+ // - the line that the cursor is on is more indented than the
+ // first line of the node
+ if deepest_preceding.end_position().row == line {
+ extend_node = true;
+ } else {
+ let cursor_indent = indent_level_for_line(text.line(line), tab_width);
+ let node_indent = indent_level_for_line(
+ text.line(deepest_preceding.start_position().row),
+ tab_width,
+ );
+ if cursor_indent > node_indent {
extend_node = true;
- } else {
- let cursor_indent =
- indent_level_for_line(text.line(line), tab_width);
- let node_indent = indent_level_for_line(
- text.line(deepest_preceding.start_position().row),
- tab_width,
- );
- if cursor_indent > node_indent {
- extend_node = true;
- }
}
}
}
}
}
- // If we encountered some `StopExtend` capture before, we don't
- // extend the node even if we otherwise would
- if node_captured && stop_extend {
- stop_extend = false;
- } else if extend_node && !stop_extend {
- *node = deepest_preceding;
- break;
- }
- // This parent always exists since node is an ancestor of deepest_preceding
- deepest_preceding = deepest_preceding.parent().unwrap();
+ }
+ // If we encountered some `StopExtend` capture before, we don't
+ // extend the node even if we otherwise would
+ if node_captured && stop_extend {
+ stop_extend = false;
+ } else if extend_node && !stop_extend {
+ *node = deepest_preceding;
+ break;
+ }
+ // If the tree contains a syntax error, `deepest_preceding` may not
+ // have a parent despite being a descendant of `node`.
+ deepest_preceding = match deepest_preceding.parent() {
+ Some(parent) => parent,
+ None => return,
}
}
}
@@ -612,14 +614,16 @@ pub fn treesitter_indent_for_pos(
let extend_captures = query_result.extend_captures;
// Check for extend captures, potentially changing the node that the indent calculation starts with
- extend_nodes(
- &mut node,
- deepest_preceding,
- &extend_captures,
- text,
- line,
- tab_width,
- );
+ if let Some(deepest_preceding) = deepest_preceding {
+ extend_nodes(
+ &mut node,
+ deepest_preceding,
+ &extend_captures,
+ text,
+ line,
+ tab_width,
+ );
+ }
let mut first_in_line = get_first_in_line(node, new_line.then(|| byte_pos));
let mut result = Indentation::default();
diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs
index 8f869e35..5f60c048 100644
--- a/helix-core/src/lib.rs
+++ b/helix-core/src/lib.rs
@@ -21,7 +21,6 @@ pub mod register;
pub mod search;
pub mod selection;
pub mod shellwords;
-mod state;
pub mod surround;
pub mod syntax;
pub mod test;
@@ -103,7 +102,6 @@ pub use smallvec::{smallvec, SmallVec};
pub use syntax::Syntax;
pub use diagnostic::Diagnostic;
-pub use state::State;
pub use line_ending::{LineEnding, DEFAULT_LINE_ENDING};
pub use transaction::{Assoc, Change, ChangeSet, Operation, Transaction};
diff --git a/helix-core/src/register.rs b/helix-core/src/register.rs
index 1cff77ba..52eb6e3e 100644
--- a/helix-core/src/register.rs
+++ b/helix-core/src/register.rs
@@ -15,11 +15,7 @@ impl Register {
}
pub fn new_with_values(name: char, values: Vec) -> Self {
- if name == '_' {
- Self::new(name)
- } else {
- Self { name, values }
- }
+ Self { name, values }
}
pub const fn name(&self) -> char {
@@ -31,15 +27,11 @@ impl Register {
}
pub fn write(&mut self, values: Vec) {
- if self.name != '_' {
- self.values = values;
- }
+ self.values = values;
}
pub fn push(&mut self, value: String) {
- if self.name != '_' {
- self.values.push(value);
- }
+ self.values.push(value);
}
}
@@ -54,19 +46,25 @@ impl Registers {
self.inner.get(&name)
}
- pub fn get_mut(&mut self, name: char) -> &mut Register {
- self.inner
- .entry(name)
- .or_insert_with(|| Register::new(name))
+ pub fn read(&self, name: char) -> Option<&[String]> {
+ self.get(name).map(|reg| reg.read())
}
pub fn write(&mut self, name: char, values: Vec) {
- self.inner
- .insert(name, Register::new_with_values(name, values));
+ if name != '_' {
+ self.inner
+ .insert(name, Register::new_with_values(name, values));
+ }
}
- pub fn read(&self, name: char) -> Option<&[String]> {
- self.get(name).map(|reg| reg.read())
+ pub fn push(&mut self, name: char, value: String) {
+ if name != '_' {
+ if let Some(r) = self.inner.get_mut(&name) {
+ r.push(value);
+ } else {
+ self.write(name, vec![value]);
+ }
+ }
}
pub fn first(&self, name: char) -> Option<&String> {
diff --git a/helix-core/src/shellwords.rs b/helix-core/src/shellwords.rs
index e8c5945b..9475f5e5 100644
--- a/helix-core/src/shellwords.rs
+++ b/helix-core/src/shellwords.rs
@@ -1,9 +1,9 @@
use std::borrow::Cow;
/// Auto escape for shellwords usage.
-pub fn escape(input: &str) -> Cow<'_, str> {
+pub fn escape(input: Cow) -> Cow {
if !input.chars().any(|x| x.is_ascii_whitespace()) {
- Cow::Borrowed(input)
+ input
} else if cfg!(unix) {
Cow::Owned(input.chars().fold(String::new(), |mut buf, c| {
if c.is_ascii_whitespace() {
@@ -17,127 +17,182 @@ pub fn escape(input: &str) -> Cow<'_, str> {
}
}
-/// Get the vec of escaped / quoted / doublequoted filenames from the input str
-pub fn shellwords(input: &str) -> Vec> {
- enum State {
- OnWhitespace,
- Unquoted,
- UnquotedEscaped,
- Quoted,
- QuoteEscaped,
- Dquoted,
- DquoteEscaped,
- }
+enum State {
+ OnWhitespace,
+ Unquoted,
+ UnquotedEscaped,
+ Quoted,
+ QuoteEscaped,
+ Dquoted,
+ DquoteEscaped,
+}
- use State::*;
+pub struct Shellwords<'a> {
+ state: State,
+ /// Shellwords where whitespace and escapes has been resolved.
+ words: Vec>,
+ /// The parts of the input that are divided into shellwords. This can be
+ /// used to retrieve the original text for a given word by looking up the
+ /// same index in the Vec as the word in `words`.
+ parts: Vec<&'a str>,
+}
- let mut state = Unquoted;
- let mut args: Vec> = Vec::new();
- let mut escaped = String::with_capacity(input.len());
+impl<'a> From<&'a str> for Shellwords<'a> {
+ fn from(input: &'a str) -> Self {
+ use State::*;
- let mut start = 0;
- let mut end = 0;
+ let mut state = Unquoted;
+ let mut words = Vec::new();
+ let mut parts = Vec::new();
+ let mut escaped = String::with_capacity(input.len());
- for (i, c) in input.char_indices() {
- state = match state {
- OnWhitespace => match c {
- '"' => {
- end = i;
- Dquoted
- }
- '\'' => {
- end = i;
- Quoted
- }
- '\\' => {
- if cfg!(unix) {
- escaped.push_str(&input[start..i]);
- start = i + 1;
- UnquotedEscaped
- } else {
+ let mut part_start = 0;
+ let mut unescaped_start = 0;
+ let mut end = 0;
+
+ for (i, c) in input.char_indices() {
+ state = match state {
+ OnWhitespace => match c {
+ '"' => {
+ end = i;
+ Dquoted
+ }
+ '\'' => {
+ end = i;
+ Quoted
+ }
+ '\\' => {
+ if cfg!(unix) {
+ escaped.push_str(&input[unescaped_start..i]);
+ unescaped_start = i + 1;
+ UnquotedEscaped
+ } else {
+ OnWhitespace
+ }
+ }
+ c if c.is_ascii_whitespace() => {
+ end = i;
OnWhitespace
}
- }
- c if c.is_ascii_whitespace() => {
- end = i;
- OnWhitespace
- }
- _ => Unquoted,
- },
- Unquoted => match c {
- '\\' => {
- if cfg!(unix) {
- escaped.push_str(&input[start..i]);
- start = i + 1;
- UnquotedEscaped
- } else {
- Unquoted
+ _ => Unquoted,
+ },
+ Unquoted => match c {
+ '\\' => {
+ if cfg!(unix) {
+ escaped.push_str(&input[unescaped_start..i]);
+ unescaped_start = i + 1;
+ UnquotedEscaped
+ } else {
+ Unquoted
+ }
}
- }
- c if c.is_ascii_whitespace() => {
- end = i;
- OnWhitespace
- }
- _ => Unquoted,
- },
- UnquotedEscaped => Unquoted,
- Quoted => match c {
- '\\' => {
- if cfg!(unix) {
- escaped.push_str(&input[start..i]);
- start = i + 1;
- QuoteEscaped
- } else {
- Quoted
+ c if c.is_ascii_whitespace() => {
+ end = i;
+ OnWhitespace
}
- }
- '\'' => {
- end = i;
- OnWhitespace
- }
- _ => Quoted,
- },
- QuoteEscaped => Quoted,
- Dquoted => match c {
- '\\' => {
- if cfg!(unix) {
- escaped.push_str(&input[start..i]);
- start = i + 1;
- DquoteEscaped
- } else {
- Dquoted
+ _ => Unquoted,
+ },
+ UnquotedEscaped => Unquoted,
+ Quoted => match c {
+ '\\' => {
+ if cfg!(unix) {
+ escaped.push_str(&input[unescaped_start..i]);
+ unescaped_start = i + 1;
+ QuoteEscaped
+ } else {
+ Quoted
+ }
}
- }
- '"' => {
- end = i;
- OnWhitespace
- }
- _ => Dquoted,
- },
- DquoteEscaped => Dquoted,
- };
+ '\'' => {
+ end = i;
+ OnWhitespace
+ }
+ _ => Quoted,
+ },
+ QuoteEscaped => Quoted,
+ Dquoted => match c {
+ '\\' => {
+ if cfg!(unix) {
+ escaped.push_str(&input[unescaped_start..i]);
+ unescaped_start = i + 1;
+ DquoteEscaped
+ } else {
+ Dquoted
+ }
+ }
+ '"' => {
+ end = i;
+ OnWhitespace
+ }
+ _ => Dquoted,
+ },
+ DquoteEscaped => Dquoted,
+ };
- if i >= input.len() - 1 && end == 0 {
- end = i + 1;
- }
+ if i >= input.len() - 1 && end == 0 {
+ end = i + 1;
+ }
- if end > 0 {
- let esc_trim = escaped.trim();
- let inp = &input[start..end];
+ if end > 0 {
+ let esc_trim = escaped.trim();
+ let inp = &input[unescaped_start..end];
- if !(esc_trim.is_empty() && inp.trim().is_empty()) {
- if esc_trim.is_empty() {
- args.push(inp.into());
- } else {
- args.push([escaped, inp.into()].concat().into());
- escaped = "".to_string();
+ if !(esc_trim.is_empty() && inp.trim().is_empty()) {
+ if esc_trim.is_empty() {
+ words.push(inp.into());
+ parts.push(inp);
+ } else {
+ words.push([escaped, inp.into()].concat().into());
+ parts.push(&input[part_start..end]);
+ escaped = "".to_string();
+ }
}
+ unescaped_start = i + 1;
+ part_start = i + 1;
+ end = 0;
}
- start = i + 1;
- end = 0;
}
+
+ debug_assert!(words.len() == parts.len());
+
+ Self {
+ state,
+ words,
+ parts,
+ }
+ }
+}
+
+impl<'a> Shellwords<'a> {
+ /// Checks that the input ends with a whitespace character which is not escaped.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// use helix_core::shellwords::Shellwords;
+ /// assert_eq!(Shellwords::from(" ").ends_with_whitespace(), true);
+ /// assert_eq!(Shellwords::from(":open ").ends_with_whitespace(), true);
+ /// assert_eq!(Shellwords::from(":open foo.txt ").ends_with_whitespace(), true);
+ /// assert_eq!(Shellwords::from(":open").ends_with_whitespace(), false);
+ /// #[cfg(unix)]
+ /// assert_eq!(Shellwords::from(":open a\\ ").ends_with_whitespace(), false);
+ /// #[cfg(unix)]
+ /// assert_eq!(Shellwords::from(":open a\\ b.txt").ends_with_whitespace(), false);
+ /// ```
+ pub fn ends_with_whitespace(&self) -> bool {
+ matches!(self.state, State::OnWhitespace)
+ }
+
+ /// Returns the list of shellwords calculated from the input string.
+ pub fn words(&self) -> &[Cow<'a, str>] {
+ &self.words
+ }
+
+ /// Returns a list of strings which correspond to [`Self::words`] but represent the original
+ /// text in the input string - including escape characters - without separating whitespace.
+ pub fn parts(&self) -> &[&'a str] {
+ &self.parts
}
- args
}
#[cfg(test)]
@@ -148,7 +203,8 @@ mod test {
#[cfg(windows)]
fn test_normal() {
let input = r#":o single_word twó wörds \three\ \"with\ escaping\\"#;
- let result = shellwords(input);
+ let shellwords = Shellwords::from(input);
+ let result = shellwords.words().to_vec();
let expected = vec![
Cow::from(":o"),
Cow::from("single_word"),
@@ -166,7 +222,8 @@ mod test {
#[cfg(unix)]
fn test_normal() {
let input = r#":o single_word twó wörds \three\ \"with\ escaping\\"#;
- let result = shellwords(input);
+ let shellwords = Shellwords::from(input);
+ let result = shellwords.words().to_vec();
let expected = vec![
Cow::from(":o"),
Cow::from("single_word"),
@@ -183,7 +240,8 @@ mod test {
fn test_quoted() {
let quoted =
r#":o 'single_word' 'twó wörds' '' ' ''\three\' \"with\ escaping\\' 'quote incomplete"#;
- let result = shellwords(quoted);
+ let shellwords = Shellwords::from(quoted);
+ let result = shellwords.words().to_vec();
let expected = vec![
Cow::from(":o"),
Cow::from("single_word"),
@@ -198,7 +256,8 @@ mod test {
#[cfg(unix)]
fn test_dquoted() {
let dquoted = r#":o "single_word" "twó wörds" "" " ""\three\' \"with\ escaping\\" "dquote incomplete"#;
- let result = shellwords(dquoted);
+ let shellwords = Shellwords::from(dquoted);
+ let result = shellwords.words().to_vec();
let expected = vec![
Cow::from(":o"),
Cow::from("single_word"),
@@ -213,7 +272,8 @@ mod test {
#[cfg(unix)]
fn test_mixed() {
let dquoted = r#":o single_word 'twó wörds' "\three\' \"with\ escaping\\""no space before"'and after' $#%^@ "%^&(%^" ')(*&^%''a\\\\\b' '"#;
- let result = shellwords(dquoted);
+ let shellwords = Shellwords::from(dquoted);
+ let result = shellwords.words().to_vec();
let expected = vec![
Cow::from(":o"),
Cow::from("single_word"),
@@ -234,7 +294,8 @@ mod test {
fn test_lists() {
let input =
r#":set statusline.center ["file-type","file-encoding"] '["list", "in", "qoutes"]'"#;
- let result = shellwords(input);
+ let shellwords = Shellwords::from(input);
+ let result = shellwords.words().to_vec();
let expected = vec![
Cow::from(":set"),
Cow::from("statusline.center"),
@@ -247,15 +308,29 @@ mod test {
#[test]
#[cfg(unix)]
fn test_escaping_unix() {
- assert_eq!(escape("foobar"), Cow::Borrowed("foobar"));
- assert_eq!(escape("foo bar"), Cow::Borrowed("foo\\ bar"));
- assert_eq!(escape("foo\tbar"), Cow::Borrowed("foo\\\tbar"));
+ assert_eq!(escape("foobar".into()), Cow::Borrowed("foobar"));
+ assert_eq!(escape("foo bar".into()), Cow::Borrowed("foo\\ bar"));
+ assert_eq!(escape("foo\tbar".into()), Cow::Borrowed("foo\\\tbar"));
}
#[test]
#[cfg(windows)]
fn test_escaping_windows() {
- assert_eq!(escape("foobar"), Cow::Borrowed("foobar"));
- assert_eq!(escape("foo bar"), Cow::Borrowed("\"foo bar\""));
+ assert_eq!(escape("foobar".into()), Cow::Borrowed("foobar"));
+ assert_eq!(escape("foo bar".into()), Cow::Borrowed("\"foo bar\""));
+ }
+
+ #[test]
+ #[cfg(unix)]
+ fn test_parts() {
+ assert_eq!(Shellwords::from(":o a").parts(), &[":o", "a"]);
+ assert_eq!(Shellwords::from(":o a\\ ").parts(), &[":o", "a\\ "]);
+ }
+
+ #[test]
+ #[cfg(windows)]
+ fn test_parts() {
+ assert_eq!(Shellwords::from(":o a").parts(), &[":o", "a"]);
+ assert_eq!(Shellwords::from(":o a\\ ").parts(), &[":o", "a\\"]);
}
}
diff --git a/helix-core/src/state.rs b/helix-core/src/state.rs
deleted file mode 100644
index dcc4b11b..00000000
--- a/helix-core/src/state.rs
+++ /dev/null
@@ -1,17 +0,0 @@
-use crate::{Rope, Selection};
-
-#[derive(Debug, Clone)]
-pub struct State {
- pub doc: Rope,
- pub selection: Selection,
-}
-
-impl State {
- #[must_use]
- pub fn new(doc: Rope) -> Self {
- Self {
- doc,
- selection: Selection::point(0),
- }
- }
-}
diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs
index 0f62577f..8dc34a3e 100644
--- a/helix-core/src/syntax.rs
+++ b/helix-core/src/syntax.rs
@@ -7,8 +7,10 @@ use crate::{
Rope, RopeSlice, Tendril,
};
+use ahash::RandomState;
use arc_swap::{ArcSwap, Guard};
use bitflags::bitflags;
+use hashbrown::raw::RawTable;
use slotmap::{DefaultKey as LayerId, HopSlotMap};
use std::{
@@ -16,7 +18,8 @@ use std::{
cell::RefCell,
collections::{HashMap, VecDeque},
fmt,
- mem::replace,
+ hash::{Hash, Hasher},
+ mem::{replace, transmute},
path::Path,
str::FromStr,
sync::Arc,
@@ -354,6 +357,26 @@ impl<'a> CapturedNode<'a> {
}
}
+/// The maximum number of in-progress matches a TS cursor can consider at once.
+/// This is set to a constant in order to avoid performance problems for medium to large files. Set with `set_match_limit`.
+/// Using such a limit means that we lose valid captures, so there is fundamentally a tradeoff here.
+///
+///
+/// Old tree sitter versions used a limit of 32 by default until this limit was removed in version `0.19.5` (must now be set manually).
+/// However, this causes performance issues for medium to large files.
+/// In helix, this problem caused treesitter motions to take multiple seconds to complete in medium-sized rust files (3k loc).
+///
+///
+/// Neovim also encountered this problem and reintroduced this limit after it was removed upstream
+/// (see and ).
+/// The number used here is fundamentally a tradeoff between breaking some obscure edge cases and performance.
+///
+///
+/// Neovim chose 64 for this value somewhat arbitrarily ().
+/// 64 is too low for some languages though. In particular, it breaks some highlighting for record fields in Erlang record definitions.
+/// This number can be increased if new syntax highlight breakages are found, as long as the performance penalty is not too high.
+const TREE_SITTER_MATCH_LIMIT: u32 = 256;
+
impl TextObjectQuery {
/// Run the query on the given node and return sub nodes which match given
/// capture ("function.inside", "class.around", etc).
@@ -394,6 +417,8 @@ impl TextObjectQuery {
.iter()
.find_map(|cap| self.query.capture_index_for_name(cap))?;
+ cursor.set_match_limit(TREE_SITTER_MATCH_LIMIT);
+
let nodes = cursor
.captures(&self.query, node, RopeProvider(slice))
.filter_map(move |(mat, _)| {
@@ -748,30 +773,38 @@ impl Syntax {
// Convert the changeset into tree sitter edits.
let edits = generate_edits(old_source, changeset);
+ // This table allows inverse indexing of `layers`.
+ // That is by hashing a `Layer` you can find
+ // the `LayerId` of an existing equivalent `Layer` in `layers`.
+ //
+ // It is used to determine if a new layer exists for an injection
+ // or if an existing layer needs to be updated.
+ let mut layers_table = RawTable::with_capacity(self.layers.len());
+ let layers_hasher = RandomState::new();
// Use the edits to update all layers markers
- if !edits.is_empty() {
- fn point_add(a: Point, b: Point) -> Point {
- if b.row > 0 {
- Point::new(a.row.saturating_add(b.row), b.column)
- } else {
- Point::new(0, a.column.saturating_add(b.column))
- }
+ fn point_add(a: Point, b: Point) -> Point {
+ if b.row > 0 {
+ Point::new(a.row.saturating_add(b.row), b.column)
+ } else {
+ Point::new(0, a.column.saturating_add(b.column))
}
- fn point_sub(a: Point, b: Point) -> Point {
- if a.row > b.row {
- Point::new(a.row.saturating_sub(b.row), a.column)
- } else {
- Point::new(0, a.column.saturating_sub(b.column))
- }
+ }
+ fn point_sub(a: Point, b: Point) -> Point {
+ if a.row > b.row {
+ Point::new(a.row.saturating_sub(b.row), a.column)
+ } else {
+ Point::new(0, a.column.saturating_sub(b.column))
}
+ }
- for layer in self.layers.values_mut() {
- // The root layer always covers the whole range (0..usize::MAX)
- if layer.depth == 0 {
- layer.flags = LayerUpdateFlags::MODIFIED;
- continue;
- }
+ for (layer_id, layer) in self.layers.iter_mut() {
+ // The root layer always covers the whole range (0..usize::MAX)
+ if layer.depth == 0 {
+ layer.flags = LayerUpdateFlags::MODIFIED;
+ continue;
+ }
+ if !edits.is_empty() {
for range in &mut layer.ranges {
// Roughly based on https://github.com/tree-sitter/tree-sitter/blob/ddeaa0c7f534268b35b4f6cb39b52df082754413/lib/src/subtree.c#L691-L720
for edit in edits.iter().rev() {
@@ -836,6 +869,12 @@ impl Syntax {
}
}
}
+
+ let hash = layers_hasher.hash_one(layer);
+ // Safety: insert_no_grow is unsafe because it assumes that the table
+ // has enough capacity to hold additional elements.
+ // This is always the case as we reserved enough capacity above.
+ unsafe { layers_table.insert_no_grow(hash, layer_id) };
}
PARSER.with(|ts_parser| {
@@ -843,6 +882,7 @@ impl Syntax {
let mut cursor = ts_parser.cursors.pop().unwrap_or_else(QueryCursor::new);
// TODO: might need to set cursor range
cursor.set_byte_range(0..usize::MAX);
+ cursor.set_match_limit(TREE_SITTER_MATCH_LIMIT);
let source_slice = source.slice(..);
@@ -959,27 +999,23 @@ impl Syntax {
let depth = layer.depth + 1;
// TODO: can't inline this since matches borrows self.layers
for (config, ranges) in injections {
- // Find an existing layer
- let layer = self
- .layers
- .iter_mut()
- .find(|(_, layer)| {
- layer.depth == depth && // TODO: track parent id instead
- layer.config.language == config.language && layer.ranges == ranges
+ let new_layer = LanguageLayer {
+ tree: None,
+ config,
+ depth,
+ ranges,
+ flags: LayerUpdateFlags::empty(),
+ };
+
+ // Find an identical existing layer
+ let layer = layers_table
+ .get(layers_hasher.hash_one(&new_layer), |&it| {
+ self.layers[it] == new_layer
})
- .map(|(id, _layer)| id);
+ .copied();
// ...or insert a new one.
- let layer_id = layer.unwrap_or_else(|| {
- self.layers.insert(LanguageLayer {
- tree: None,
- config,
- depth,
- ranges,
- // set the modified flag to ensure the layer is parsed
- flags: LayerUpdateFlags::empty(),
- })
- });
+ let layer_id = layer.unwrap_or_else(|| self.layers.insert(new_layer));
queue.push_back(layer_id);
}
@@ -1032,6 +1068,7 @@ impl Syntax {
// if reusing cursors & no range this resets to whole range
cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX));
+ cursor_ref.set_match_limit(TREE_SITTER_MATCH_LIMIT);
let mut captures = cursor_ref
.captures(
@@ -1115,6 +1152,34 @@ pub struct LanguageLayer {
flags: LayerUpdateFlags,
}
+/// This PartialEq implementation only checks if that
+/// two layers are theoretically identical (meaning they highlight the same text range with the same language).
+/// It does not check whether the layers have the same internal treesitter
+/// state.
+impl PartialEq for LanguageLayer {
+ fn eq(&self, other: &Self) -> bool {
+ self.depth == other.depth
+ && self.config.language == other.config.language
+ && self.ranges == other.ranges
+ }
+}
+
+/// Hash implementation belongs to PartialEq implementation above.
+/// See its documentation for details.
+impl Hash for LanguageLayer {
+ fn hash(&self, state: &mut H) {
+ self.depth.hash(state);
+ // The transmute is necessary here because tree_sitter::Language does not derive Hash at the moment.
+ // However it does use #[repr] transparent so the transmute here is safe
+ // as `Language` (which `Grammar` is an alias for) is just a newtype wrapper around a (thin) pointer.
+ // This is also compatible with the PartialEq implementation of language
+ // as that is just a pointer comparison.
+ let language: *const () = unsafe { transmute(self.config.language) };
+ language.hash(state);
+ self.ranges.hash(state);
+ }
+}
+
impl LanguageLayer {
pub fn tree(&self) -> &Tree {
// TODO: no unwrap
@@ -1260,7 +1325,7 @@ use std::sync::atomic::{AtomicUsize, Ordering};
use std::{iter, mem, ops, str, usize};
use tree_sitter::{
Language as Grammar, Node, Parser, Point, Query, QueryCaptures, QueryCursor, QueryError,
- QueryMatch, Range, TextProvider, Tree,
+ QueryMatch, Range, TextProvider, Tree, TreeCursor,
};
const CANCELLATION_CHECK_INTERVAL: usize = 100;
@@ -2130,57 +2195,68 @@ impl> Iterator for Merge {
}
}
+fn node_is_visible(node: &Node) -> bool {
+ node.is_missing() || (node.is_named() && node.language().node_kind_is_visible(node.kind_id()))
+}
+
pub fn pretty_print_tree(fmt: &mut W, node: Node) -> fmt::Result {
- pretty_print_tree_impl(fmt, node, true, None, 0)
+ if node.child_count() == 0 {
+ if node_is_visible(&node) {
+ write!(fmt, "({})", node.kind())
+ } else {
+ write!(fmt, "\"{}\"", node.kind())
+ }
+ } else {
+ pretty_print_tree_impl(fmt, &mut node.walk(), 0)
+ }
}
fn pretty_print_tree_impl(
fmt: &mut W,
- node: Node,
- is_root: bool,
- field_name: Option<&str>,
+ cursor: &mut TreeCursor,
depth: usize,
) -> fmt::Result {
- fn is_visible(node: Node) -> bool {
- node.is_missing()
- || (node.is_named() && node.language().node_kind_is_visible(node.kind_id()))
- }
+ let node = cursor.node();
+ let visible = node_is_visible(&node);
- if is_visible(node) {
+ if visible {
let indentation_columns = depth * 2;
write!(fmt, "{:indentation_columns$}", "")?;
- if let Some(field_name) = field_name {
+ if let Some(field_name) = cursor.field_name() {
write!(fmt, "{}: ", field_name)?;
}
write!(fmt, "({}", node.kind())?;
- } else if is_root {
- write!(fmt, "(\"{}\")", node.kind())?;
}
- for child_idx in 0..node.child_count() {
- if let Some(child) = node.child(child_idx) {
- if is_visible(child) {
+ // Handle children.
+ if cursor.goto_first_child() {
+ loop {
+ if node_is_visible(&cursor.node()) {
fmt.write_char('\n')?;
}
- pretty_print_tree_impl(
- fmt,
- child,
- false,
- node.field_name_for_child(child_idx as u32),
- depth + 1,
- )?;
+ pretty_print_tree_impl(fmt, cursor, depth + 1)?;
+
+ if !cursor.goto_next_sibling() {
+ break;
+ }
}
+
+ let moved = cursor.goto_parent();
+ // The parent of the first child must exist, and must be `node`.
+ debug_assert!(moved);
+ debug_assert!(cursor.node() == node);
}
- if is_visible(node) {
- write!(fmt, ")")?;
+ if visible {
+ fmt.write_char(')')?;
}
Ok(())
}
+
#[cfg(test)]
mod test {
use super::*;
@@ -2353,11 +2429,17 @@ mod test {
}
#[track_caller]
- fn assert_pretty_print(source: &str, expected: &str, start: usize, end: usize) {
+ fn assert_pretty_print(
+ language_name: &str,
+ source: &str,
+ expected: &str,
+ start: usize,
+ end: usize,
+ ) {
let source = Rope::from_str(source);
let loader = Loader::new(Configuration { language: vec![] });
- let language = get_language("rust").unwrap();
+ let language = get_language(language_name).unwrap();
let config = HighlightConfiguration::new(language, "", "", "").unwrap();
let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader));
@@ -2377,13 +2459,14 @@ mod test {
#[test]
fn test_pretty_print() {
let source = r#"/// Hello"#;
- assert_pretty_print(source, "(line_comment)", 0, source.len());
+ assert_pretty_print("rust", source, "(line_comment)", 0, source.len());
// A large tree should be indented with fields:
let source = r#"fn main() {
println!("Hello, World!");
}"#;
assert_pretty_print(
+ "rust",
source,
concat!(
"(function_item\n",
@@ -2402,11 +2485,34 @@ mod test {
// Selecting a token should print just that token:
let source = r#"fn main() {}"#;
- assert_pretty_print(source, r#"("fn")"#, 0, 1);
+ assert_pretty_print("rust", source, r#""fn""#, 0, 1);
// Error nodes are printed as errors:
let source = r#"}{"#;
- assert_pretty_print(source, "(ERROR)", 0, source.len());
+ assert_pretty_print("rust", source, "(ERROR)", 0, source.len());
+
+ // Fields broken under unnamed nodes are determined correctly.
+ // In the following source, `object` belongs to the `singleton_method`
+ // rule but `name` and `body` belong to an unnamed helper `_method_rest`.
+ // This can cause a bug with a pretty-printing implementation that
+ // uses `Node::field_name_for_child` to determine field names but is
+ // fixed when using `TreeCursor::field_name`.
+ let source = "def self.method_name
+ true
+ end";
+ assert_pretty_print(
+ "ruby",
+ source,
+ concat!(
+ "(singleton_method\n",
+ " object: (self)\n",
+ " name: (identifier)\n",
+ " body: (body_statement\n",
+ " (true)))"
+ ),
+ 0,
+ source.len(),
+ );
}
#[test]
diff --git a/helix-core/src/test.rs b/helix-core/src/test.rs
index 3e54d2c2..17523ed7 100644
--- a/helix-core/src/test.rs
+++ b/helix-core/src/test.rs
@@ -148,6 +148,7 @@ pub fn plain(s: &str, selection: Selection) -> String {
}
#[cfg(test)]
+#[allow(clippy::module_inception)]
mod test {
use super::*;
diff --git a/helix-core/src/transaction.rs b/helix-core/src/transaction.rs
index 559e4f66..ae4d95ec 100644
--- a/helix-core/src/transaction.rs
+++ b/helix-core/src/transaction.rs
@@ -579,7 +579,7 @@ impl<'a> Iterator for ChangeIterator<'a> {
#[cfg(test)]
mod test {
use super::*;
- use crate::State;
+ use crate::history::State;
#[test]
fn composition() {
@@ -706,7 +706,10 @@ mod test {
#[test]
fn optimized_composition() {
- let mut state = State::new("".into());
+ let mut state = State {
+ doc: "".into(),
+ selection: Selection::point(0),
+ };
let t1 = Transaction::insert(&state.doc, &state.selection, Tendril::from("h"));
t1.apply(&mut state.doc);
state.selection = state.selection.clone().map(t1.changes());
diff --git a/helix-core/tests/data/indent/languages.toml b/helix-core/tests/data/indent/languages.toml
index f9cef494..3206f124 100644
--- a/helix-core/tests/data/indent/languages.toml
+++ b/helix-core/tests/data/indent/languages.toml
@@ -10,4 +10,4 @@ indent = { tab-width = 4, unit = " " }
[[grammar]]
name = "rust"
-source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a360da0a29a19c281d08295a35ecd0544d2da211" }
+source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "0431a2c60828731f27491ee9fdefe25e250ce9c9" }
diff --git a/helix-lsp/Cargo.toml b/helix-lsp/Cargo.toml
index ad432d96..41884e73 100644
--- a/helix-lsp/Cargo.toml
+++ b/helix-lsp/Cargo.toml
@@ -22,6 +22,6 @@ lsp-types = { version = "0.93", features = ["proposed"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
thiserror = "1.0"
-tokio = { version = "1.21", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
+tokio = { version = "1.22", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
tokio-stream = "0.1.11"
which = "4.2"
diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs
index af3c4b57..2c2c7c88 100644
--- a/helix-lsp/src/client.rs
+++ b/helix-lsp/src/client.rs
@@ -4,7 +4,6 @@ use crate::{
Call, Error, OffsetEncoding, Result,
};
-use anyhow::anyhow;
use helix_core::{find_root, ChangeSet, Rope};
use lsp_types as lsp;
use serde::Deserialize;
@@ -314,6 +313,7 @@ impl Client {
String::from("additionalTextEdits"),
],
}),
+ insert_replace_support: Some(true),
..Default::default()
}),
completion_item_kind: Some(lsp::CompletionItemKindCapability {
@@ -545,16 +545,17 @@ impl Client {
new_text: &Rope,
changes: &ChangeSet,
) -> Option>> {
- // figure out what kind of sync the server supports
-
let capabilities = self.capabilities.get().unwrap();
+ // Return early if the server does not support document sync.
let sync_capabilities = match capabilities.text_document_sync {
- Some(lsp::TextDocumentSyncCapability::Kind(kind))
- | Some(lsp::TextDocumentSyncCapability::Options(lsp::TextDocumentSyncOptions {
- change: Some(kind),
- ..
- })) => kind,
+ Some(
+ lsp::TextDocumentSyncCapability::Kind(kind)
+ | lsp::TextDocumentSyncCapability::Options(lsp::TextDocumentSyncOptions {
+ change: Some(kind),
+ ..
+ }),
+ ) => kind,
// None | SyncOptions { changes: None }
_ => return None,
};
@@ -630,8 +631,12 @@ impl Client {
text_document: lsp::TextDocumentIdentifier,
position: lsp::Position,
work_done_token: Option,
- ) -> impl Future