diff --git a/.cargo/config.toml b/.cargo/config.toml index 5d615566..b016eca3 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,3 +1,3 @@ [alias] xtask = "run --package xtask --" -integration-test = "test --features integration --workspace --test integration" +integration-test = "test --features integration --profile integration --workspace --test integration" diff --git a/.github/ISSUE_TEMPLATE/bug_report.yaml b/.github/ISSUE_TEMPLATE/bug_report.yaml index c67deb69..47fd3fe8 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yaml +++ b/.github/ISSUE_TEMPLATE/bug_report.yaml @@ -32,7 +32,7 @@ body: id: helix-log attributes: label: Helix log - description: See `hx -h` for log file path + description: See `hx -h` for log file path. If you can reproduce the issue run `RUST_BACKTRACE=1 hx -vv` to generate a more detailed log file. value: |
~/.cache/helix/helix.log @@ -61,7 +61,8 @@ body: label: Helix Version description: > Helix version (`hx -V` if using a release, `git describe` if building - from master) - placeholder: "helix 0.6.0 (c0dbd6dc)" + from master). + **Make sure that you are using the [latest helix release](https://github.com/helix-editor/helix/releases) or a newer master build** + placeholder: "helix 22.12 (5eaa6d97)" validations: required: true diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0d6fcb3e..d7d7d47e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,24 +4,17 @@ on: push: branches: - master + merge_group: schedule: - cron: '00 01 * * *' jobs: check: - name: Check + name: Check (msrv) runs-on: ubuntu-latest - strategy: - matrix: - rust: [stable, msrv] steps: - name: Checkout sources uses: actions/checkout@v3 - - - name: Use MSRV rust toolchain - if: matrix.rust == 'msrv' - run: cp .github/workflows/msrv-rust-toolchain.toml rust-toolchain.toml - - name: Install stable toolchain uses: helix-editor/rust-toolchain@v1 with: @@ -44,7 +37,7 @@ jobs: uses: actions/checkout@v3 - name: Install stable toolchain - uses: dtolnay/rust-toolchain@1.61 + uses: dtolnay/rust-toolchain@1.65 - uses: Swatinem/rust-cache@v2 @@ -73,7 +66,7 @@ jobs: uses: actions/checkout@v3 - name: Install stable toolchain - uses: dtolnay/rust-toolchain@1.61 + uses: dtolnay/rust-toolchain@1.65 with: components: rustfmt, clippy @@ -98,7 +91,7 @@ jobs: uses: actions/checkout@v3 - name: Install stable toolchain - uses: dtolnay/rust-toolchain@1.61 + uses: dtolnay/rust-toolchain@1.65 - uses: Swatinem/rust-cache@v2 diff --git a/.github/workflows/cachix.yml b/.github/workflows/cachix.yml index 20035678..7d2f734a 100644 --- a/.github/workflows/cachix.yml +++ b/.github/workflows/cachix.yml @@ -14,7 +14,7 @@ jobs: uses: actions/checkout@v3 - name: Install nix - uses: cachix/install-nix-action@v18 + uses: cachix/install-nix-action@v20 - name: Authenticate with Cachix uses: cachix/cachix-action@v12 diff --git a/.github/workflows/msrv-rust-toolchain.toml b/.github/workflows/msrv-rust-toolchain.toml deleted file mode 100644 index b169d31e..00000000 --- a/.github/workflows/msrv-rust-toolchain.toml +++ /dev/null @@ -1,3 +0,0 @@ -[toolchain] -channel = "1.61.0" -components = ["rustfmt", "rust-src"] diff --git a/Cargo.lock b/Cargo.lock index 69ba8444..c86b5aac 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21,9 +21,9 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf6ccdb167abbf410dcb915cabd428929d7f6a04980b54a11f26a39f1c7f7107" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ "cfg-if", "getrandom", @@ -51,9 +51,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.68" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" +checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800" [[package]] name = "arc-swap" @@ -61,15 +61,6 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" -[[package]] -name = "atoi" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" -dependencies = [ - "num-traits", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -83,21 +74,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] -name = "bstr" -version = "0.2.17" +name = "bitflags" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" -dependencies = [ - "lazy_static", - "memchr", - "regex-automata", -] +checksum = "487f1e0fcbe47deb8b0574e646def1c903389d95241dd1bbcc6ce4a715dfc0c1" [[package]] name = "bstr" -version = "1.0.1" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fca0852af221f458706eb0725c03e4ed6c46af9ac98e6a689d5e634215d594dd" +checksum = "5ffdb39cb703212f3c11973452c2861b972f757b021158f3516ba10f2fa8b2c1" dependencies = [ "memchr", "once_cell", @@ -107,9 +93,9 @@ dependencies = [ [[package]] name = "btoi" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97c0869a9faa81f8bbf8102371105d6d0a7b79167a04c340b04ab16892246a11" +checksum = "9dd6407f73a9b8b6162d8a2ef999fe6afd7cc15902ebf42c5cd296addf17e0ad" dependencies = [ "num-traits", ] @@ -132,32 +118,17 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" -[[package]] -name = "bytesize" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c58ec36aac5066d5ca17df51b3e70279f5670a72102f5752cb7e7c856adfc70" - [[package]] name = "cassowary" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" -[[package]] -name = "castaway" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a17ed5635fc8536268e5d4de1e22e81ac34419e5f052d4d51f4e01dcc263fcc" -dependencies = [ - "rustversion", -] - [[package]] name = "cc" -version = "1.0.78" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" [[package]] name = "cfg-if" @@ -215,17 +186,6 @@ dependencies = [ "unicode-width", ] -[[package]] -name = "compact_str" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5138945395949e7dfba09646dc9e766b548ff48e23deb5246890e6b64ae9e1b9" -dependencies = [ - "castaway", - "itoa", - "ryu", -] - [[package]] name = "content_inspector" version = "0.2.4" @@ -250,27 +210,18 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "crossbeam-utils" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" -dependencies = [ - "cfg-if", -] - [[package]] name = "crossterm" -version = "0.25.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64e6c0fbe2c17357405f7c758c1ef960fce08bdfb2c03d88d2a18d7e09c4b67" +checksum = "a84cda67535339806297f1b331d6dd6320470d2a0fe65381e79ee9e156dd3d13" dependencies = [ - "bitflags", + "bitflags 1.3.2", "crossterm_winapi", "futures-core", "libc", "mio", - "parking_lot 0.12.1", + "parking_lot", "signal-hook", "signal-hook-mio", "winapi", @@ -309,7 +260,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn", + "syn 1.0.104", ] [[package]] @@ -326,20 +277,7 @@ checksum = "a08a6e2fcc370a089ad3b4aaf54db3b1b4cee38ddabce5896b33eb693275f470" dependencies = [ "proc-macro2", "quote", - "syn", -] - -[[package]] -name = "dashmap" -version = "5.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" -dependencies = [ - "cfg-if", - "hashbrown 0.12.3", - "lock_api", - "once_cell", - "parking_lot_core 0.9.4", + "syn 1.0.104", ] [[package]] @@ -383,6 +321,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "dunce" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bd4b30a6560bbd9b4620f4de34c3f14f60848e58a9b7216801afcb4c7b31c3c" + [[package]] name = "either" version = "1.8.0" @@ -391,9 +335,9 @@ checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" [[package]] name = "encoding_rs" -version = "0.8.31" +version = "0.8.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" dependencies = [ "cfg-if", ] @@ -407,6 +351,27 @@ dependencies = [ "encoding_rs", ] +[[package]] +name = "errno" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +dependencies = [ + "errno-dragonfly", + "libc", + "winapi", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "error-code" version = "2.3.1" @@ -455,7 +420,7 @@ dependencies = [ "cfg-if", "libc", "redox_syscall", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -485,15 +450,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.25" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac" +checksum = "86d7a0c1aa76363dac491de0ee99faf6941128376f1cf96f07db7603b7de69dd" [[package]] name = "futures-executor" -version = "0.3.25" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2" +checksum = "1997dd9df74cdac935c76252744c1ed5794fac083242ea4fe77ef3ed60ba0f83" dependencies = [ "futures-core", "futures-task", @@ -502,15 +467,15 @@ dependencies = [ [[package]] name = "futures-task" -version = "0.3.25" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea" +checksum = "fd65540d33b37b16542a0438c12e6aeead10d4ac5d05bd3f805b8f35ab592879" [[package]] name = "futures-util" -version = "0.3.25" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6" +checksum = "3ef6b17e481503ec85211fed8f39d1970f128935ca1f814cd32ac4a6842e84ab" dependencies = [ "futures-core", "futures-task", @@ -540,75 +505,116 @@ dependencies = [ ] [[package]] -name = "git-actor" -version = "0.17.0" +name = "gix" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1853c840375a04e02315cb225b6d802291abfabbf08e500727c98a8670f1bf1" +dependencies = [ + "gix-actor", + "gix-attributes", + "gix-config", + "gix-credentials", + "gix-date", + "gix-diff", + "gix-discover", + "gix-features", + "gix-glob", + "gix-hash", + "gix-hashtable", + "gix-index", + "gix-lock", + "gix-mailmap", + "gix-object", + "gix-odb", + "gix-pack", + "gix-path", + "gix-prompt", + "gix-ref", + "gix-refspec", + "gix-revision", + "gix-sec", + "gix-tempfile", + "gix-traverse", + "gix-url", + "gix-validate", + "gix-worktree", + "log", + "once_cell", + "signal-hook", + "smallvec", + "thiserror", + "unicode-normalization", +] + +[[package]] +name = "gix-actor" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9e5fd7bc63ad527d64584f8d01f99b89c051f5fbb8144b58ae5f812775065cf" +checksum = "dc22b0cdc52237667c301dd7cdc6ead8f8f73c9f824e9942c8ebd6b764f6c0bf" dependencies = [ - "bstr 1.0.1", + "bstr", "btoi", - "git-date", + "gix-date", "itoa", "nom", - "quick-error", + "thiserror", ] [[package]] -name = "git-attributes" -version = "0.8.0" +name = "gix-attributes" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8013dfce47c1e29236d732308933e2c77af5355ec5105755d26faf7764d3f7b" +checksum = "2231a25934a240d0a4b6f4478401c73ee81d8be52de0293eedbc172334abf3e1" dependencies = [ - "bstr 1.0.1", - "compact_str", - "git-features", - "git-glob", - "git-path", - "git-quote", + "bstr", + "gix-features", + "gix-glob", + "gix-path", + "gix-quote", "thiserror", "unicode-bom", ] [[package]] -name = "git-bitmap" -version = "0.2.0" +name = "gix-bitmap" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44304093ac66a0ada1b243c15c3a503a165a1d0f50bec748f4e5a9b84a0d0722" +checksum = "024bca0c7187517bda5ea24ab148c9ca8208dd0c3e2bea88cdb2008f91791a6d" dependencies = [ - "quick-error", + "thiserror", ] [[package]] -name = "git-chunk" -version = "0.4.0" +name = "gix-chunk" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3090baa2f4a3fe488a9b3e31090b83259aaf930bf0634af34c18117274f8f1a8" +checksum = "b0d39583cab06464b8bf73b3f1707458270f0e7383cb24c3c9c1a16e6f792978" dependencies = [ "thiserror", ] [[package]] -name = "git-command" -version = "0.2.1" +name = "gix-command" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "215145cc1686a45bc6f9872b153a0d3f3c40a1b94173a928325e1b53dfa5e2af" +checksum = "b2c6f75c1e0f924de39e750880a6e21307194bb1ab773efe3c7d2d787277f8ab" dependencies = [ - "bstr 1.0.1", + "bstr", ] [[package]] -name = "git-config" -version = "0.15.0" +name = "gix-config" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9da662fd64ac69772158dcf04777da6266f0f36bc9a310b3eb2d805bb696315" +checksum = "6aa7d7dd60256b7a0c0506a1d708ec92767c2662ee57b3301b538eaa3e064f8a" dependencies = [ - "bstr 1.0.1", - "git-config-value", - "git-features", - "git-glob", - "git-path", - "git-ref", - "git-sec", + "bstr", + "gix-config-value", + "gix-features", + "gix-glob", + "gix-path", + "gix-ref", + "gix-sec", "memchr", "nom", "once_cell", @@ -618,135 +624,137 @@ dependencies = [ ] [[package]] -name = "git-config-value" -version = "0.10.0" +name = "gix-config-value" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989a90c1c630513a153c685b4249b96fdf938afc75bf7ef2ae1ccbd3d799f5db" +checksum = "693d4a4ba0531e46fe558459557a5b29fb86c3e4b2666c1c0861d93c7c678331" dependencies = [ - "bitflags", - "bstr 1.0.1", - "git-path", + "bitflags 1.3.2", + "bstr", + "gix-path", "libc", "thiserror", ] [[package]] -name = "git-credentials" -version = "0.9.0" +name = "gix-credentials" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97cd6bbe001afd6356b35ef13f2a6b0f0abc0133d1b2ecaec1033bdd769616d6" +checksum = "750b684197374518ea057e0a0594713e07683faa0a3f43c0f93d97f64130ad8d" dependencies = [ - "bstr 1.0.1", - "git-command", - "git-config-value", - "git-path", - "git-prompt", - "git-sec", - "git-url", + "bstr", + "gix-command", + "gix-config-value", + "gix-path", + "gix-prompt", + "gix-sec", + "gix-url", "thiserror", ] [[package]] -name = "git-date" -version = "0.4.0" +name = "gix-date" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "412c9b89026505bd24d5f8acafa578de6eea3b271ece307a73b8e646e671302a" +checksum = "b96271912ce39822501616f177dea7218784e6c63be90d5f36322ff3a722aae2" dependencies = [ - "bstr 1.0.1", + "bstr", "itoa", "thiserror", "time", ] [[package]] -name = "git-diff" -version = "0.26.0" +name = "gix-diff" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca87474422d26d606d04cec6bedfabcd92a0a74102cd7936785358ced6a4a25a" +checksum = "585b0834d4b6791a848637c4e109545fda9b0f29b591ba55edb33ceda6e7856b" dependencies = [ - "git-hash", - "git-object", + "gix-hash", + "gix-object", "imara-diff", "thiserror", ] [[package]] -name = "git-discover" -version = "0.12.0" +name = "gix-discover" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9e26e0bc434643228cd418185bd28ca5c7cf831bde1da434807391c27ac40e" +checksum = "1b58931ab475a977deff03417e041a66e4bcb76c4e5797e7ec2fcb272ebce01c" dependencies = [ - "bstr 1.0.1", - "git-hash", - "git-path", - "git-ref", - "git-sec", + "bstr", + "dunce", + "gix-hash", + "gix-path", + "gix-ref", + "gix-sec", "thiserror", ] [[package]] -name = "git-features" -version = "0.26.0" +name = "gix-features" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ff74064fa007c5beefa89a64bb72834f32b3c497750a56c79c6802bbdb311f9" +checksum = "5e6a9dfa7b3c1a99315203e8b97f8f99f3bd95731590607abeaa5ca31bc41fe3" dependencies = [ "crc32fast", "flate2", - "git-hash", + "gix-hash", "libc", "once_cell", "prodash", - "quick-error", "sha1_smol", + "thiserror", "walkdir", ] [[package]] -name = "git-glob" -version = "0.5.1" +name = "gix-glob" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3908404c9b76ac7b3f636a104142378d3eaa78623cbc6eb7c7f0651979d48e8a" +checksum = "93e43efd776bc543f46f0fd0ca3d920c37af71a764a16f2aebd89765e9ff2993" dependencies = [ - "bitflags", - "bstr 1.0.1", + "bitflags 1.3.2", + "bstr", ] [[package]] -name = "git-hash" -version = "0.10.1" +name = "gix-hash" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1532d82bf830532f8d545c5b7b568e311e3593f16cf7ee9dd0ce03c74b12b99d" +checksum = "0c0c5a9f4d621d4f4ea046bb331df5c746ca735b8cae5b234cc2be70ee4dbef0" dependencies = [ "hex", "thiserror", ] [[package]] -name = "git-hashtable" -version = "0.1.0" +name = "gix-hashtable" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c52b625ad8cc360a0b7f426266f21fb07bd49b8f4ccf1b3ca7bc89424db1dec4" +checksum = "9609c1b8f36f12968e6a6098f7cdb52004f7d42d570f47a2d6d7c16612f19acb" dependencies = [ - "git-hash", - "hashbrown 0.13.1", + "gix-hash", + "hashbrown 0.13.2", + "parking_lot", ] [[package]] -name = "git-index" -version = "0.12.1" +name = "gix-index" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "485da97dd4f69c7d9a8dc238cd6f4a726387ffc34573489e8e0d2bee266e3454" +checksum = "546ee7855d5d8731288f05a63c07ab41b59cb406660a825ed3fe89d7223823df" dependencies = [ - "atoi", - "bitflags", - "bstr 1.0.1", + "bitflags 1.3.2", + "bstr", + "btoi", "filetime", - "git-bitmap", - "git-features", - "git-hash", - "git-lock", - "git-object", - "git-traverse", + "gix-bitmap", + "gix-features", + "gix-hash", + "gix-lock", + "gix-object", + "gix-traverse", "itoa", "memmap2", "smallvec", @@ -754,39 +762,39 @@ dependencies = [ ] [[package]] -name = "git-lock" -version = "3.0.0" +name = "gix-lock" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e4f05b8a68c3a5dd83a6651c76be384e910fe283072184fdab9d77f87ccec2" +checksum = "41b80172055c5d8017a48ddac5cc7a95421c00211047db0165c97853c4f05194" dependencies = [ "fastrand", - "git-tempfile", - "quick-error", + "gix-tempfile", + "thiserror", ] [[package]] -name = "git-mailmap" -version = "0.9.0" +name = "gix-mailmap" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0316b4346f3e162ade368209efb8a609b587793c74aa3b8de0ec01a4f3580120" +checksum = "2b66aea5e52875cd4915f4957a6f4b75831a36981e2ec3f5fad9e370e444fe1a" dependencies = [ - "bstr 1.0.1", - "git-actor", - "quick-error", + "bstr", + "gix-actor", + "thiserror", ] [[package]] -name = "git-object" -version = "0.26.0" +name = "gix-object" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f8563e2d6f524d7053f3106714f99ecdc3adbba2cb7108c09d71a02579f2e19" +checksum = "8df068db9180ee935fbb70504848369e270bdcb576b05c0faa8b9fd3b86fc017" dependencies = [ - "bstr 1.0.1", + "bstr", "btoi", - "git-actor", - "git-features", - "git-hash", - "git-validate", + "gix-actor", + "gix-features", + "gix-hash", + "gix-validate", "hex", "itoa", "nom", @@ -795,260 +803,215 @@ dependencies = [ ] [[package]] -name = "git-odb" -version = "0.40.0" +name = "gix-odb" +version = "0.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616115a0e3daff6e08842758d24547b37a6eb6d0e2eedd95a740c3aaa2750333" +checksum = "aa63fce01e5bce663bb24ad01fa2b77266e91b1d1982aab3f67cb0aed8af8169" dependencies = [ "arc-swap", - "git-features", - "git-hash", - "git-object", - "git-pack", - "git-path", - "git-quote", - "parking_lot 0.12.1", + "gix-features", + "gix-hash", + "gix-object", + "gix-pack", + "gix-path", + "gix-quote", + "parking_lot", "tempfile", "thiserror", ] [[package]] -name = "git-pack" -version = "0.30.0" +name = "gix-pack" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd16b88f4b66041f41ca510c28bd81c4ee7363c5a544b3d62b4170432965871" +checksum = "66bc9d22f0d0620d013003ab05ead5c60124b6e1101bc245be9be4fd7e2330cb" dependencies = [ - "bytesize", "clru", - "dashmap", - "git-chunk", - "git-diff", - "git-features", - "git-hash", - "git-hashtable", - "git-object", - "git-path", - "git-tempfile", - "git-traverse", + "gix-chunk", + "gix-diff", + "gix-features", + "gix-hash", + "gix-hashtable", + "gix-object", + "gix-path", + "gix-tempfile", + "gix-traverse", "memmap2", - "parking_lot 0.12.1", + "parking_lot", "smallvec", "thiserror", ] [[package]] -name = "git-path" -version = "0.7.0" +name = "gix-path" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e40e68481a06da243d3f4dfd86a4be39c24eefb535017a862e845140dcdb878a" +checksum = "f6c104a66dec149cb8f7aaafc6ab797654cf82d67f050fd0cb7e7294e328354b" dependencies = [ - "bstr 1.0.1", + "bstr", "thiserror", ] [[package]] -name = "git-prompt" -version = "0.3.0" +name = "gix-prompt" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3612a486e507dd431ef0f7108eeaafc8fd1ed7bd0f205a88554f6f91fe5dccbf" +checksum = "a20cebf73229debaa82574c4fd20dcaf00fa8d4bfce823a862c4e990d7a0b5b4" dependencies = [ - "git-command", - "git-config-value", + "gix-command", + "gix-config-value", "nix", - "parking_lot 0.12.1", + "parking_lot", "thiserror", ] [[package]] -name = "git-quote" -version = "0.4.0" +name = "gix-quote" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd11f4e7f251ab297545faa4c5a4517f4985a43b9c16bf96fa49107f58e837f" +checksum = "a282f5a8d9ee0b09ec47390ac727350c48f2f5c76d803cd8da6b3e7ad56e0bcb" dependencies = [ - "bstr 1.0.1", + "bstr", "btoi", - "quick-error", + "thiserror", ] [[package]] -name = "git-ref" -version = "0.23.0" +name = "gix-ref" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6767925a6fc4af5c5a81e348d1d851c1b3ab2b512bd7f562ac11be37c14468" +checksum = "3de7cd1050fa82be4240994defc1f1f2fd9def5d8815dcd005f5ddc5e3dc7511" dependencies = [ - "git-actor", - "git-features", - "git-hash", - "git-lock", - "git-object", - "git-path", - "git-tempfile", - "git-validate", + "gix-actor", + "gix-features", + "gix-hash", + "gix-lock", + "gix-object", + "gix-path", + "gix-tempfile", + "gix-validate", "memmap2", "nom", "thiserror", ] [[package]] -name = "git-refspec" -version = "0.7.0" +name = "gix-refspec" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddf310ed5f2829ac0af96e7d4aebd4ae4b89f0718a7ae3666d09b02b2c5a1dfd" +checksum = "aba332462bda2e8efeae4302b39a6ed01ad56ef772fd5b7ef197cf2798294d65" dependencies = [ - "bstr 1.0.1", - "git-hash", - "git-revision", - "git-validate", - "smallvec", - "thiserror", -] - -[[package]] -name = "git-repository" -version = "0.32.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "993277960cb7e2d3991a11c1ec6951c1d142de052c26a18d2db64304e52d3741" -dependencies = [ - "git-actor", - "git-attributes", - "git-config", - "git-credentials", - "git-date", - "git-diff", - "git-discover", - "git-features", - "git-glob", - "git-hash", - "git-hashtable", - "git-index", - "git-lock", - "git-mailmap", - "git-object", - "git-odb", - "git-pack", - "git-path", - "git-prompt", - "git-ref", - "git-refspec", - "git-revision", - "git-sec", - "git-tempfile", - "git-traverse", - "git-url", - "git-validate", - "git-worktree", - "log", - "once_cell", - "prodash", - "signal-hook", + "bstr", + "gix-hash", + "gix-revision", + "gix-validate", "smallvec", "thiserror", - "unicode-normalization", ] [[package]] -name = "git-revision" -version = "0.10.0" +name = "gix-revision" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f9a6bd28c9d1676bb96f428cd09614ae18a0087d7cea1cebfd177e25f99b2af" +checksum = "ed98e4a0254953c64bc913bd23146a1de662067d5cf974cbdde396958b39e5b0" dependencies = [ - "bstr 1.0.1", - "git-date", - "git-hash", - "git-hashtable", - "git-object", + "bstr", + "gix-date", + "gix-hash", + "gix-hashtable", + "gix-object", "thiserror", ] [[package]] -name = "git-sec" -version = "0.6.0" +name = "gix-sec" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1802e8252fa223b0ad89a393aed461132174ced1e6842a41f56dc92a3fc14f" +checksum = "e8ffa5bf0772f9b01de501c035b6b084cf9b8bb07dec41e3afc6a17336a65f47" dependencies = [ - "bitflags", + "bitflags 1.3.2", "dirs", - "git-path", + "gix-path", "libc", "windows", ] [[package]] -name = "git-tempfile" -version = "3.0.0" +name = "gix-tempfile" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6bb4dee86c8cae5a078cfaac3b004ef99c31548ed86218f23a7ff9b4b74f3be" +checksum = "aed73ef9642f779d609fd19acc332ac1597b978ee87ec11743a68eefaed65bfa" dependencies = [ - "dashmap", "libc", "once_cell", + "parking_lot", "signal-hook", "signal-hook-registry", "tempfile", ] [[package]] -name = "git-traverse" -version = "0.22.0" +name = "gix-traverse" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd356da21ec00f69b9d4f105df4cb85543c746b18f4b7fc81529ce77713cdb29" +checksum = "dd9a4a07bb22168dc79c60e1a6a41919d198187ca83d8a5940ad8d7122a45df3" dependencies = [ - "git-hash", - "git-hashtable", - "git-object", + "gix-hash", + "gix-hashtable", + "gix-object", "thiserror", ] [[package]] -name = "git-url" -version = "0.13.0" +name = "gix-url" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85af407ed0dbb8d8da2a7241827d2fd5681186d9dab3570fc8dd8d6152ec48f" +checksum = "b6a22b4b32ad14d68f7b7fb6458fa58d44b01797d94c1b8f4db2d9c7b3c366b5" dependencies = [ - "bstr 1.0.1", - "git-features", - "git-path", + "bstr", + "gix-features", + "gix-path", "home", "thiserror", "url", ] [[package]] -name = "git-validate" -version = "0.7.1" +name = "gix-validate" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0431cf9352c596dc7c8ec9066ee551ce54e63c86c3c767e5baf763f6019ff3c2" +checksum = "b69ddb780ea1465255e66818d75b7098371c58dbc9560da4488a44b9f5c7e443" dependencies = [ - "bstr 1.0.1", + "bstr", "thiserror", ] [[package]] -name = "git-worktree" -version = "0.12.0" +name = "gix-worktree" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3bc63878f134e08ed52dba5d82422798c01a3f2e48c38ae9a2f7ff9194f362" +checksum = "992b8fdade33e079dc61c29f2388ab8e049965ebf7be40efa7f8b80e3c4543fe" dependencies = [ - "bstr 1.0.1", - "git-attributes", - "git-features", - "git-glob", - "git-hash", - "git-index", - "git-object", - "git-path", + "bstr", + "gix-attributes", + "gix-features", + "gix-glob", + "gix-hash", + "gix-index", + "gix-object", + "gix-path", "io-close", "thiserror", ] [[package]] name = "globset" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a1e17342619edbc21a964c2afbeb6c820c6a2560032872f397bb97ea127bd0a" +checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc" dependencies = [ "aho-corasick", - "bstr 0.2.17", + "bstr", "fnv", "log", "regex", @@ -1056,21 +1019,21 @@ dependencies = [ [[package]] name = "grep-matcher" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d27563c33062cd33003b166ade2bb4fd82db1fd6a86db764dfdad132d46c1cc" +checksum = "3902ca28f26945fe35cad349d776f163981d777fee382ccd6ef451126f51b319" dependencies = [ "memchr", ] [[package]] name = "grep-regex" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1345f8d33c89f2d5b081f2f2a41175adef9fd0bed2fea6a26c96c2deb027e58e" +checksum = "997598b41d53a37a2e3fc5300d5c11d825368c054420a9c65125b8fe1078463f" dependencies = [ "aho-corasick", - "bstr 0.2.17", + "bstr", "grep-matcher", "log", "regex", @@ -1080,11 +1043,11 @@ dependencies = [ [[package]] name = "grep-searcher" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48852bd08f9b4eb3040ecb6d2f4ade224afe880a9a0909c5563cc59fa67932cc" +checksum = "5601c4b9f480f0c9ebb40b1f6cbf447b8a50c5369223937a6c5214368c58779f" dependencies = [ - "bstr 0.2.17", + "bstr", "bytecount", "encoding_rs", "encoding_rs_io", @@ -1104,26 +1067,27 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", ] [[package]] name = "helix-core" version = "0.6.0" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "arc-swap", - "bitflags", + "bitflags 2.0.2", "chrono", "encoding_rs", "etcetera", - "hashbrown 0.13.1", + "hashbrown 0.13.2", "helix-loader", "imara-diff", + "indoc", "log", "once_cell", "quickcheck", @@ -1182,6 +1146,7 @@ dependencies = [ "futures-util", "helix-core", "helix-loader", + "helix-parsec", "log", "lsp-types", "serde", @@ -1192,6 +1157,10 @@ dependencies = [ "which", ] +[[package]] +name = "helix-parsec" +version = "0.6.0" + [[package]] name = "helix-term" version = "0.6.0" @@ -1215,6 +1184,7 @@ dependencies = [ "helix-view", "ignore", "indoc", + "libc", "log", "once_cell", "pulldown-cmark", @@ -1234,11 +1204,13 @@ dependencies = [ name = "helix-tui" version = "0.6.0" dependencies = [ - "bitflags", + "bitflags 2.0.2", "cassowary", "crossterm", "helix-core", "helix-view", + "log", + "once_cell", "serde", "termini", "unicode-segmentation", @@ -1248,11 +1220,12 @@ dependencies = [ name = "helix-vcs" version = "0.6.0" dependencies = [ - "git-repository", + "arc-swap", + "gix", "helix-core", "imara-diff", "log", - "parking_lot 0.12.1", + "parking_lot", "tempfile", "tokio", ] @@ -1263,7 +1236,7 @@ version = "0.6.0" dependencies = [ "anyhow", "arc-swap", - "bitflags", + "bitflags 2.0.2", "chardetng", "clipboard-win", "crossterm", @@ -1274,8 +1247,10 @@ dependencies = [ "helix-lsp", "helix-tui", "helix-vcs", + "libc", "log", "once_cell", + "parking_lot", "serde", "serde_json", "slotmap", @@ -1310,12 +1285,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "human_format" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86cce260d758a9aa3d7c4b99d55c815a540f8a37514ba6046ab6be402a157cb0" - [[package]] name = "iana-time-zone" version = "0.1.53" @@ -1352,11 +1321,10 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.18" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713f1b139373f96a2e0ce3ac931cd01ee973c3c5dd7c40c0c2efe96ad2b6751d" +checksum = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492" dependencies = [ - "crossbeam-utils", "globset", "lazy_static", "log", @@ -1374,15 +1342,25 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e98c1d0ad70fc91b8b9654b1f33db55e59579d3b3de2bffdced0fdb810570cb8" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +dependencies = [ + "autocfg", "hashbrown 0.12.3", ] [[package]] name = "indoc" -version = "1.0.8" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2d6f23ffea9d7e76c53eee25dfb67bcd8fde7f1198b0855350698c9f07c780" +checksum = "9f2cb48b81b1dc9f39676bf99f5499babfec7cd8fe14307f7b3d747208fb5690" [[package]] name = "instant" @@ -1403,6 +1381,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "io-lifetimes" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7d6c6f8c91b4b9ed43484ad1a938e393caf35960fce7f82a040497207bd8e9e" +dependencies = [ + "libc", + "windows-sys 0.42.0", +] + [[package]] name = "itoa" version = "1.0.4" @@ -1426,9 +1414,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.137" +version = "0.2.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89" +checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c" [[package]] name = "libloading" @@ -1449,6 +1437,12 @@ dependencies = [ "cc", ] +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + [[package]] name = "lock_api" version = "0.4.9" @@ -1470,11 +1464,11 @@ dependencies = [ [[package]] name = "lsp-types" -version = "0.93.2" +version = "0.94.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51" +checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237" dependencies = [ - "bitflags", + "bitflags 1.3.2", "serde", "serde_json", "serde_repr", @@ -1520,7 +1514,7 @@ dependencies = [ "libc", "log", "wasi", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -1529,7 +1523,7 @@ version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46a58d1d356c6597d08cde02c2f09d785b09e28711837b1ed667dc652c08a694" dependencies = [ - "bitflags", + "bitflags 1.3.2", "cfg-if", "libc", "static_assertions", @@ -1545,6 +1539,15 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nom8" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" +dependencies = [ + "memchr", +] + [[package]] name = "num-integer" version = "0.1.45" @@ -1585,20 +1588,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.17.0" +version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" - -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] name = "parking_lot" @@ -1607,21 +1599,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.4", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -1634,7 +1612,7 @@ dependencies = [ "libc", "redox_syscall", "smallvec", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -1657,24 +1635,18 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "proc-macro2" -version = "1.0.47" +version = "1.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" +checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224" dependencies = [ "unicode-ident", ] [[package]] name = "prodash" -version = "23.0.0" +version = "23.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8c414345b4a98cbcd0e8d8829c8f54b47a7ed4fb771c45b7c5c6c0ae23dc4c" -dependencies = [ - "bytesize", - "dashmap", - "human_format", - "parking_lot 0.11.2", -] +checksum = "d73c6b64cb5b99eb63ca97d378685712617ec0172ff5c04cd47a489d3e2c51f8" [[package]] name = "pulldown-cmark" @@ -1682,17 +1654,11 @@ version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63" dependencies = [ - "bitflags", + "bitflags 1.3.2", "memchr", "unicase", ] -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - [[package]] name = "quickcheck" version = "1.0.3" @@ -1704,9 +1670,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.21" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" dependencies = [ "proc-macro2", ] @@ -1735,7 +1701,7 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -1772,30 +1738,29 @@ version = "0.6.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - [[package]] name = "ropey" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4f832915525613e83f275694cb8c184f5df13ca26a9ef0ea6ce736921964c8e" +checksum = "53ce7a2c43a32e50d666e33c5a80251b31147bb4b49024bcab11fb6f20c671ed" dependencies = [ "smallvec", "str_indices", ] [[package]] -name = "rustversion" -version = "1.0.9" +name = "rustix" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" +checksum = "d4fdebc4b395b7fbb9ab11e462e20ed9051e7b16e42d24042c776eca0ac81b03" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.42.0", +] [[package]] name = "ryu" @@ -1826,29 +1791,29 @@ checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898" [[package]] name = "serde" -version = "1.0.152" +version = "1.0.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +checksum = "771d4d9c4163ee138805e12c710dd365e4f44be8be0503cb1bb9eb989425d9c9" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.152" +version = "1.0.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +checksum = "e801c1712f48475582b7696ac71e0ca34ebb30e09338425384269d9717c62cad" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.4", ] [[package]] name = "serde_json" -version = "1.0.91" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" +checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea" dependencies = [ "itoa", "ryu", @@ -1863,7 +1828,16 @@ checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.104", +] + +[[package]] +name = "serde_spanned" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4" +dependencies = [ + "serde", ] [[package]] @@ -1874,9 +1848,9 @@ checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" [[package]] name = "signal-hook" -version = "0.3.14" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a253b5e89e2698464fc26b545c9edceb338e18a89effeeecfea192c3025be29d" +checksum = "732768f1176d21d09e076c23a93123d40bba92d50c4058da34d45c8de8e682b9" dependencies = [ "libc", "signal-hook-registry", @@ -1994,18 +1968,28 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "syn" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c622ae390c9302e214c31013517c2061ecb2699935882c60a9b37f82f8625ae" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "tempfile" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95" dependencies = [ "cfg-if", "fastrand", - "libc", "redox_syscall", - "remove_dir_all", - "winapi", + "rustix", + "windows-sys 0.42.0", ] [[package]] @@ -2039,22 +2023,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.38" +version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.38" +version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.104", ] [[package]] @@ -2121,9 +2105,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.24.1" +version = "1.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d9f76183f91ecfb55e1d7d5602bd1d979e38a3a522fe900241cf195624d67ae" +checksum = "03201d01c3c27a29c8a5cee5b55a93ddae1ccf6f08f65365c2c918f8c1b76f64" dependencies = [ "autocfg", "bytes", @@ -2131,12 +2115,12 @@ dependencies = [ "memchr", "mio", "num_cpus", - "parking_lot 0.12.1", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] @@ -2147,14 +2131,14 @@ checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.104", ] [[package]] name = "tokio-stream" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +checksum = "8fb52b74f05dbf495a8fba459fdc331812b96aa086d9eb78101fa0d4569c3313" dependencies = [ "futures-core", "pin-project-lite", @@ -2163,18 +2147,42 @@ dependencies = [ [[package]] name = "toml" -version = "0.5.10" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f" +checksum = "f7afcae9e3f0fe2c370fd4657108972cbb2fa9db1b9f84849cefd80741b01cb6" dependencies = [ "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.19.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6a7712b49e1775fb9a7b998de6635b299237f48b404dde71704f2e0e7f37e5" +dependencies = [ + "indexmap", + "nom8", + "serde", + "serde_spanned", + "toml_datetime", ] [[package]] name = "tree-sitter" version = "0.20.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4423c784fe11398ca91e505cdc71356b07b1a924fc8735cfab5333afe3e18bc" +source = "git+https://github.com/tree-sitter/tree-sitter?rev=c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14#c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14" dependencies = [ "cc", "regex", @@ -2234,9 +2242,9 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.10.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" @@ -2300,7 +2308,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 1.0.104", "wasm-bindgen-shared", ] @@ -2322,7 +2330,7 @@ checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.104", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -2335,9 +2343,9 @@ checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" [[package]] name = "which" -version = "4.3.0" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" dependencies = [ "either", "libc", @@ -2377,17 +2385,17 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.40.0" +version = "0.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e30acc718a52fb130fec72b1cb5f55ffeeec9253e1b785e94db222178a6acaa1" +checksum = "04662ed0e3e5630dfa9b26e4cb823b817f1a9addda855d973a9458c236556244" dependencies = [ - "windows_aarch64_gnullvm 0.40.0", - "windows_aarch64_msvc 0.40.0", - "windows_i686_gnu 0.40.0", - "windows_i686_msvc 0.40.0", - "windows_x86_64_gnu 0.40.0", - "windows_x86_64_gnullvm 0.40.0", - "windows_x86_64_msvc 0.40.0", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] [[package]] @@ -2396,98 +2404,80 @@ version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" dependencies = [ - "windows_aarch64_gnullvm 0.42.0", - "windows_aarch64_msvc 0.42.0", - "windows_i686_gnu 0.42.0", - "windows_i686_msvc 0.42.0", - "windows_x86_64_gnu 0.42.0", - "windows_x86_64_gnullvm 0.42.0", - "windows_x86_64_msvc 0.42.0", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.40.0" +name = "windows-sys" +version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3caa4a1a16561b714323ca6b0817403738583033a6a92e04c5d10d4ba37ca10" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets", +] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.0" +name = "windows-targets" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" +checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] [[package]] -name = "windows_aarch64_msvc" -version = "0.40.0" +name = "windows_aarch64_gnullvm" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "328973c62dfcc50fb1aaa8e7100676e0b642fe56bac6bafff3327902db843ab4" +checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" [[package]] name = "windows_aarch64_msvc" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" +checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" [[package]] name = "windows_i686_gnu" -version = "0.40.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa5b09fad70f0df85dea2ac2a525537e415e2bf63ee31cf9b8e263645ee9f3c1" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" +checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" [[package]] name = "windows_i686_msvc" -version = "0.40.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a1ad4031c1a98491fa195d8d43d7489cb749f135f2e5c4eed58da094bd0d876" - -[[package]] -name = "windows_i686_msvc" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" +checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" [[package]] name = "windows_x86_64_gnu" -version = "0.40.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520ff37edd72da8064b49d2281182898e17f0688ae9f4070bca27e4b5c162ac7" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" +checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" [[package]] name = "windows_x86_64_gnullvm" -version = "0.40.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046e5b82215102c44fd75f488f1b9158973d02aa34d06ed85c23d6f5520a2853" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0c9c6df55dd1bfa76e131cef44bdd8ec9c819ef3611f04dfe453fd5bfeda28" +checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" [[package]] name = "windows_x86_64_msvc" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" +checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" [[package]] name = "xtask" diff --git a/Cargo.toml b/Cargo.toml index ecf6848e..aaa21659 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,6 +8,7 @@ members = [ "helix-dap", "helix-loader", "helix-vcs", + "helix-parsec", "xtask", ] @@ -25,3 +26,12 @@ lto = "fat" codegen-units = 1 # strip = "debuginfo" # TODO: or strip = true opt-level = 3 + +[profile.integration] +inherits = "test" +package.helix-core.opt-level = 2 +package.helix-tui.opt-level = 2 +package.helix-term.opt-level = 2 + +[patch.crates-io] +tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14" } diff --git a/README.md b/README.md index 60125e11..cba52a7a 100644 --- a/README.md +++ b/README.md @@ -45,91 +45,10 @@ Note: Only certain languages have indentation definitions at the moment. Check # Installation -Packages are available for various distributions (see [Installation docs](https://docs.helix-editor.com/install.html)). - -If you would like to build from source: - -```shell -git clone https://github.com/helix-editor/helix -cd helix -cargo install --locked --path helix-term -``` - -This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars in `./runtime/grammars`. - -Helix needs its runtime files so make sure to copy/symlink the `runtime/` directory into the -config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%AppData%/helix/runtime` on Windows). - -| OS | Command | -| -------------------- | ------------------------------------------------ | -| Windows (Cmd) | `xcopy /e /i runtime %AppData%\helix\runtime` | -| Windows (PowerShell) | `xcopy /e /i runtime $Env:AppData\helix\runtime` | -| Linux / macOS | `ln -s $PWD/runtime ~/.config/helix/runtime` | - -Starting with Windows Vista you can also create symbolic links on Windows. Note that this requires -elevated privileges - i.e. PowerShell or Cmd must be run as administrator. - -**PowerShell:** - -```powershell -New-Item -ItemType SymbolicLink -Target "runtime" -Path "$Env:AppData\helix\runtime" -``` - -**Cmd:** - -```cmd -cd %appdata%\helix -mklink /D runtime "\runtime" -``` - -The runtime location can be overridden via the `HELIX_RUNTIME` environment variable. - -> NOTE: if `HELIX_RUNTIME` is set prior to calling `cargo install --locked --path helix-term`, -> tree-sitter grammars will be built in `$HELIX_RUNTIME/grammars`. - -If you plan on keeping the repo locally, an alternative to copying/symlinking -runtime files is to set `HELIX_RUNTIME=/path/to/helix/runtime` -(`HELIX_RUNTIME=$PWD/runtime` if you're in the helix repo directory). - -Packages already solve this for you by wrapping the `hx` binary with a wrapper -that sets the variable to the install dir. - -> NOTE: running via cargo also doesn't require setting explicit `HELIX_RUNTIME` path, it will automatically -> detect the `runtime` directory in the project root. - -If you want to customize your `languages.toml` config, -tree-sitter grammars may be manually fetched and built with `hx --grammar fetch` and `hx --grammar build`. - -In order to use LSP features like auto-complete, you will need to -[install the appropriate Language Server](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) -for a language. +[Installation documentation](https://docs.helix-editor.com/install.html). [![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions) -## Adding Helix to your desktop environment - -If installing from source, to use Helix in desktop environments that supports [XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html), including Gnome and KDE, copy the provided `.desktop` file to the correct folder: - -```bash -cp contrib/Helix.desktop ~/.local/share/applications -cp contrib/helix.png ~/.local/share/icons -``` - -To use another terminal than the default, you will need to modify the `.desktop` file. For example, to use `kitty`: - -```bash -sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop -sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop -``` - -## macOS - -Helix can be installed on macOS through homebrew: - -``` -brew install helix -``` - # Contributing Contributing guidelines can be found [here](./docs/CONTRIBUTING.md). diff --git a/book/src/SUMMARY.md b/book/src/SUMMARY.md index eaf0c4f4..6e780b87 100644 --- a/book/src/SUMMARY.md +++ b/book/src/SUMMARY.md @@ -6,13 +6,13 @@ - [Usage](./usage.md) - [Keymap](./keymap.md) - [Commands](./commands.md) - - [Language Support](./lang-support.md) + - [Language support](./lang-support.md) - [Migrating from Vim](./from-vim.md) - [Configuration](./configuration.md) - [Themes](./themes.md) - - [Key Remapping](./remapping.md) + - [Key remapping](./remapping.md) - [Languages](./languages.md) - [Guides](./guides/README.md) - - [Adding Languages](./guides/adding_languages.md) - - [Adding Textobject Queries](./guides/textobject.md) - - [Adding Indent Queries](./guides/indent.md) + - [Adding languages](./guides/adding_languages.md) + - [Adding textobject queries](./guides/textobject.md) + - [Adding indent queries](./guides/indent.md) diff --git a/book/src/commands.md b/book/src/commands.md index d9a11386..047a30a9 100644 --- a/book/src/commands.md +++ b/book/src/commands.md @@ -1,5 +1,5 @@ # Commands -Command mode can be activated by pressing `:`, similar to Vim. Built-in commands: +Command mode can be activated by pressing `:`. The built-in commands are: {{#include ./generated/typable-cmd.md}} diff --git a/book/src/configuration.md b/book/src/configuration.md index a35482e6..bf314993 100644 --- a/book/src/configuration.md +++ b/book/src/configuration.md @@ -2,10 +2,10 @@ To override global configuration parameters, create a `config.toml` file located in your config directory: -* Linux and Mac: `~/.config/helix/config.toml` -* Windows: `%AppData%\helix\config.toml` +- Linux and Mac: `~/.config/helix/config.toml` +- Windows: `%AppData%\helix\config.toml` -> Hint: You can easily open the config file by typing `:config-open` within Helix normal mode. +> 💡 You can easily open the config file by typing `:config-open` within Helix normal mode. Example config: @@ -25,12 +25,10 @@ select = "underline" hidden = false ``` -You may also specify a file to use for configuration with the `-c` or -`--config` CLI argument: `hx -c path/to/custom-config.toml`. - -It is also possible to trigger configuration file reloading by sending the `USR1` -signal to the helix process, e.g. via `pkill -USR1 hx`. This is only supported -on unix operating systems. +You can use a custom configuration file by specifying it with the `-c` or +`--config` command line argument, for example `hx -c path/to/custom-config.toml`. +Additionally, you can reload the configuration file by sending the USR1 +signal to the Helix process on Unix operating systems, such as by using the command `pkill -USR1 hx`. ## Editor @@ -38,25 +36,28 @@ on unix operating systems. | Key | Description | Default | |--|--|---------| -| `scrolloff` | Number of lines of padding around the edge of the screen when scrolling. | `5` | -| `mouse` | Enable mouse mode. | `true` | -| `middle-click-paste` | Middle click paste support. | `true` | -| `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` | -| `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`
Windows: `["cmd", "/C"]` | -| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` | -| `cursorline` | Highlight all lines with a cursor. | `false` | -| `cursorcolumn` | Highlight all columns with a cursor. | `false` | +| `scrolloff` | Number of lines of padding around the edge of the screen when scrolling | `5` | +| `mouse` | Enable mouse mode | `true` | +| `middle-click-paste` | Middle click paste support | `true` | +| `scroll-lines` | Number of lines to scroll per scroll wheel step | `3` | +| `shell` | Shell to use when running external commands | Unix: `["sh", "-c"]`
Windows: `["cmd", "/C"]` | +| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers | `absolute` | +| `cursorline` | Highlight all lines with a cursor | `false` | +| `cursorcolumn` | Highlight all columns with a cursor | `false` | | `gutters` | Gutters to display: Available are `diagnostics` and `diff` and `line-numbers` and `spacer`, note that `diagnostics` also includes other features like breakpoints, 1-width padding will be inserted if gutters is non-empty | `["diagnostics", "spacer", "line-numbers", "spacer", "diff"]` | -| `auto-completion` | Enable automatic pop up of auto-completion. | `true` | -| `auto-format` | Enable automatic formatting on save. | `true` | -| `auto-save` | Enable automatic saving on focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal. | `false` | -| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` | +| `auto-completion` | Enable automatic pop up of auto-completion | `true` | +| `auto-format` | Enable automatic formatting on save | `true` | +| `auto-save` | Enable automatic saving on the focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal | `false` | +| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant | `400` | | `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` | -| `auto-info` | Whether to display infoboxes | `true` | -| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` | -| `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file. | `[]` | +| `completion-replace` | Set to `true` to make completions always replace the entire word and not just the part before the cursor | `false` | +| `auto-info` | Whether to display info boxes | `true` | +| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative | `false` | +| `undercurl` | Set to `true` to override automatic detection of terminal undercurl support in the event of a false negative | `false` | +| `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file | `[]` | | `bufferline` | Renders a line at the top of the editor displaying open buffers. Can be `always`, `never` or `multiple` (only shown if more than one buffer is in use) | `never` | | `color-modes` | Whether to color the mode indicator with different colors depending on the mode itself | `false` | +| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap_at_text_width` is set | `80` | ### `[editor.statusline]` Section @@ -98,6 +99,7 @@ The following statusline elements can be configured: | `spinner` | A progress spinner indicating LSP activity | | `file-name` | The path/name of the opened file | | `file-base-name` | The basename of the opened file | +| `file-modification-indicator` | The indicator to show whether the file is modified (a `[+]` appears when there are unsaved changes) | | `file-encoding` | The encoding of the opened file if it differs from UTF-8 | | `file-line-ending` | The file line endings (CRLF or LF) | | `total-line-numbers` | The total line numbers of the opened file | @@ -110,23 +112,30 @@ The following statusline elements can be configured: | `position-percentage` | The cursor position as a percentage of the total number of lines | | `separator` | The string defined in `editor.statusline.separator` (defaults to `"│"`) | | `spacer` | Inserts a space between elements (multiple/contiguous spacers may be specified) | +| `version-control` | The current branch name or detached commit hash of the opened workspace | ### `[editor.lsp]` Section | Key | Description | Default | | --- | ----------- | ------- | +| `enable` | Enables LSP integration. Setting to false will completely disable language servers regardless of language settings.| `true` | | `display-messages` | Display LSP progress messages below statusline[^1] | `false` | | `auto-signature-help` | Enable automatic popup of signature help (parameter hints) | `true` | +| `display-inlay-hints` | Display inlay hints[^2] | `false` | | `display-signature-help-docs` | Display docs under signature help popup | `true` | [^1]: By default, a progress spinner is shown in the statusline beside the file path. +[^2]: You may also have to activate them in the LSP config for them to appear, not just in Helix. + Inlay hints in Helix are still being improved on and may be a little bit laggy/janky under some circumstances, please report any bugs you see so we can fix them! ### `[editor.cursor-shape]` Section -Defines the shape of cursor in each mode. Note that due to limitations -of the terminal environment, only the primary cursor can change shape. +Defines the shape of cursor in each mode. Valid values for these options are `block`, `bar`, `underline`, or `hidden`. +> 💡 Due to limitations of the terminal environment, only the primary cursor can +> change shape. + | Key | Description | Default | | --- | ----------- | ------- | | `normal` | Cursor shape in [normal mode][normal mode] | `block` | @@ -139,23 +148,22 @@ Valid values for these options are `block`, `bar`, `underline`, or `hidden`. ### `[editor.file-picker]` Section -Sets options for file picker and global search. All but the last key listed in -the default file-picker configuration below are IgnoreOptions: whether hidden -files and files listed within ignore files are ignored by (not visible in) the -helix file picker and global search. There is also one other key, `max-depth` -available, which is not defined by default. +Set options for file picker and global search. Ignoring a file means it is +not visible in the Helix file picker and global search. All git related options are only enabled in a git repository. | Key | Description | Default | |--|--|---------| -|`hidden` | Enables ignoring hidden files. | true -|`parents` | Enables reading ignore files from parent directories. | true -|`ignore` | Enables reading `.ignore` files. | true -|`git-ignore` | Enables reading `.gitignore` files. | true -|`git-global` | Enables reading global .gitignore, whose path is specified in git's config: `core.excludefile` option. | true -|`git-exclude` | Enables reading `.git/info/exclude` files. | true -|`max-depth` | Set with an integer value for maximum depth to recurse. | Defaults to `None`. +|`hidden` | Enables ignoring hidden files | true +|`follow-links` | Follow symlinks instead of ignoring them | true +|`deduplicate-links` | Ignore symlinks that point at files already shown in the picker | true +|`parents` | Enables reading ignore files from parent directories | true +|`ignore` | Enables reading `.ignore` files | true +|`git-ignore` | Enables reading `.gitignore` files | true +|`git-global` | Enables reading global `.gitignore`, whose path is specified in git's config: `core.excludefile` option | true +|`git-exclude` | Enables reading `.git/info/exclude` files | true +|`max-depth` | Set with an integer value for maximum depth to recurse | Defaults to `None`. ### `[editor.auto-pairs]` Section @@ -207,7 +215,7 @@ Search specific options. | Key | Description | Default | |--|--|---------| -| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` | +| `smart-case` | Enable smart case regex searching (case-insensitive unless pattern contains upper case characters) | `true` | | `wrap-around`| Whether the search should wrap after depleting the matches | `true` | ### `[editor.whitespace]` Section @@ -216,7 +224,7 @@ Options for rendering whitespace with visible characters. Use `:set whitespace.r | Key | Description | Default | |-----|-------------|---------| -| `render` | Whether to render whitespace. May either be `"all"` or `"none"`, or a table with sub-keys `space`, `tab`, and `newline`. | `"none"` | +| `render` | Whether to render whitespace. May either be `"all"` or `"none"`, or a table with sub-keys `space`, `nbsp`, `tab`, and `newline` | `"none"` | | `characters` | Literal characters to use when rendering whitespace. Sub-keys may be any of `tab`, `space`, `nbsp`, `newline` or `tabpad` | See example below | Example @@ -244,7 +252,7 @@ Options for rendering vertical indent guides. | Key | Description | Default | | --- | --- | --- | -| `render` | Whether to render indent guides. | `false` | +| `render` | Whether to render indent guides | `false` | | `character` | Literal character to use for rendering the indent guide | `│` | | `skip-levels` | Number of indent levels to skip | `0` | @@ -256,3 +264,77 @@ render = true character = "╎" # Some characters that work well: "▏", "┆", "┊", "⸽" skip-levels = 1 ``` + +### `[editor.gutters]` Section + +For simplicity, `editor.gutters` accepts an array of gutter types, which will +use default settings for all gutter components. + +```toml +[editor] +gutters = ["diff", "diagnostics", "line-numbers", "spacer"] +``` + +To customize the behavior of gutters, the `[editor.gutters]` section must +be used. This section contains top level settings, as well as settings for +specific gutter components as subsections. + +| Key | Description | Default | +| --- | --- | --- | +| `layout` | A vector of gutters to display | `["diagnostics", "spacer", "line-numbers", "spacer", "diff"]` | + +Example: + +```toml +[editor.gutters] +layout = ["diff", "diagnostics", "line-numbers", "spacer"] +``` + +#### `[editor.gutters.line-numbers]` Section + +Options for the line number gutter + +| Key | Description | Default | +| --- | --- | --- | +| `min-width` | The minimum number of characters to use | `3` | + +Example: + +```toml +[editor.gutters.line-numbers] +min-width = 1 +``` + +#### `[editor.gutters.diagnotics]` Section + +Currently unused + +#### `[editor.gutters.diff]` Section + +Currently unused + +#### `[editor.gutters.spacer]` Section + +Currently unused + +### `[editor.soft-wrap]` Section + +Options for soft wrapping lines that exceed the view width: + +| Key | Description | Default | +| --- | --- | --- | +| `enable` | Whether soft wrapping is enabled. | `false` | +| `max-wrap` | Maximum free space left at the end of the line. | `20` | +| `max-indent-retain` | Maximum indentation to carry over when soft wrapping a line. | `40` | +| `wrap-indicator` | Text inserted before soft wrapped lines, highlighted with `ui.virtual.wrap` | `↪ ` | +| `wrap-at-text-width` | Soft wrap at `text-width` instead of using the full viewport size. | `false` | + +Example: + +```toml +[editor.soft-wrap] +enable = true +max-wrap = 25 # increase value to reduce forced mid-word wrapping +max-indent-retain = 0 +wrap-indicator = "" # set wrap-indicator to "" to hide it +``` diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index 00e6a91e..3c18956a 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -10,6 +10,7 @@ | c | ✓ | ✓ | ✓ | `clangd` | | c-sharp | ✓ | ✓ | | `OmniSharp` | | cairo | ✓ | | | | +| capnp | ✓ | | ✓ | | | clojure | ✓ | | | `clojure-lsp` | | cmake | ✓ | ✓ | ✓ | `cmake-language-server` | | comment | ✓ | | | | @@ -30,7 +31,7 @@ | eex | ✓ | | | | | ejs | ✓ | | | | | elixir | ✓ | ✓ | ✓ | `elixir-ls` | -| elm | ✓ | | | `elm-language-server` | +| elm | ✓ | ✓ | | `elm-language-server` | | elvish | ✓ | | | `elvish` | | env | ✓ | | | | | erb | ✓ | | | | @@ -38,7 +39,7 @@ | esdl | ✓ | | | | | fish | ✓ | ✓ | ✓ | | | fortran | ✓ | | ✓ | `fortls` | -| gdscript | ✓ | ✓ | | | +| gdscript | ✓ | ✓ | ✓ | | | git-attributes | ✓ | | | | | git-commit | ✓ | | | | | git-config | ✓ | | | | @@ -56,6 +57,7 @@ | haskell | ✓ | ✓ | | `haskell-language-server-wrapper` | | hcl | ✓ | | ✓ | `terraform-ls` | | heex | ✓ | ✓ | | `elixir-ls` | +| hosts | ✓ | | | | | html | ✓ | | | `vscode-html-language-server` | | idris | | | | `idris2-lsp` | | iex | ✓ | | | | @@ -83,6 +85,8 @@ | mermaid | ✓ | | | | | meson | ✓ | | ✓ | | | mint | | | | `mint` | +| msbuild | ✓ | | ✓ | | +| nasm | ✓ | ✓ | | | | nickel | ✓ | | ✓ | `nls` | | nix | ✓ | | | `nil` | | nu | ✓ | | | | @@ -92,12 +96,16 @@ | openscad | ✓ | | | `openscad-lsp` | | org | ✓ | | | | | pascal | ✓ | ✓ | | `pasls` | +| passwd | ✓ | | | | +| pem | ✓ | | | | | perl | ✓ | ✓ | ✓ | | | php | ✓ | ✓ | ✓ | `intelephense` | +| po | ✓ | ✓ | | | | ponylang | ✓ | ✓ | ✓ | | | prisma | ✓ | | | `prisma-language-server` | | prolog | | | | `swipl` | | protobuf | ✓ | | ✓ | | +| prql | ✓ | | | | | purescript | ✓ | | | `purescript-language-server` | | python | ✓ | ✓ | ✓ | `pylsp` | | qml | ✓ | | ✓ | `qmlls` | @@ -107,18 +115,22 @@ | rescript | ✓ | ✓ | | `rescript-language-server` | | rmarkdown | ✓ | | ✓ | `R` | | ron | ✓ | | ✓ | | +| rst | ✓ | | | | | ruby | ✓ | ✓ | ✓ | `solargraph` | | rust | ✓ | ✓ | ✓ | `rust-analyzer` | +| sage | ✓ | ✓ | | | | scala | ✓ | | ✓ | `metals` | | scheme | ✓ | | | | | scss | ✓ | | | `vscode-css-language-server` | | slint | ✓ | | ✓ | `slint-lsp` | +| smithy | ✓ | | | `cs` | | sml | ✓ | | | | | solidity | ✓ | | | `solc` | | sql | ✓ | | | | | sshclientconfig | ✓ | | | | | starlark | ✓ | ✓ | | | | svelte | ✓ | | | `svelteserver` | +| sway | ✓ | ✓ | ✓ | `forc` | | swift | ✓ | | | `sourcekit-lsp` | | tablegen | ✓ | ✓ | ✓ | | | task | ✓ | | | | @@ -129,7 +141,8 @@ | twig | ✓ | | | | | typescript | ✓ | ✓ | ✓ | `typescript-language-server` | | ungrammar | ✓ | | | | -| v | ✓ | | | `vls` | +| uxntal | ✓ | | | | +| v | ✓ | ✓ | ✓ | `v` | | vala | ✓ | | | `vala-language-server` | | verilog | ✓ | ✓ | | `svlangserver` | | vhs | ✓ | | | | @@ -141,4 +154,5 @@ | xit | ✓ | | | | | xml | ✓ | | ✓ | | | yaml | ✓ | | ✓ | `yaml-language-server` | +| yuck | ✓ | | | | | zig | ✓ | ✓ | ✓ | `zls` | diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md index 66e6ac03..8b367aad 100644 --- a/book/src/generated/typable-cmd.md +++ b/book/src/generated/typable-cmd.md @@ -43,11 +43,13 @@ | `:change-current-directory`, `:cd` | Change the current working directory. | | `:show-directory`, `:pwd` | Show the current working directory. | | `:encoding` | Set encoding. Based on `https://encoding.spec.whatwg.org`. | +| `:character-info`, `:char` | Get info about the character under the primary cursor. | | `:reload` | Discard changes and reload from the source file. | | `:reload-all` | Discard changes and reload all documents from the source files. | | `:update` | Write changes only if the file has been modified. | | `:lsp-workspace-command` | Open workspace command picker | | `:lsp-restart` | Restarts the Language Server that is in use by the current doc | +| `:lsp-stop` | Stops the Language Server that is in use by the current doc | | `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. | | `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. | | `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. | @@ -58,8 +60,9 @@ | `:hsplit-new`, `:hnew` | Open a scratch buffer in a horizontal split. | | `:tutor` | Open the tutorial. | | `:goto`, `:g` | Goto line number. | -| `:set-language`, `:lang` | Set the language of current buffer. | +| `:set-language`, `:lang` | Set the language of current buffer (show current language if no value specified). | | `:set-option`, `:set` | Set a config option at runtime.
For example to disable smart case search, use `:set search.smart-case false`. | +| `:toggle-option`, `:toggle` | Toggle a boolean config option at runtime.
For example to toggle smart case search, use `:toggle search.smart-case`. | | `:get-option`, `:get` | Get the current value of a config option. | | `:sort` | Sort ranges in selection. | | `:rsort` | Sort ranges in selection in reverse order. | @@ -73,3 +76,4 @@ | `:pipe` | Pipe each selection to the shell command. | | `:pipe-to` | Pipe each selection to the shell command, ignoring output. | | `:run-shell-command`, `:sh` | Run a shell command | +| `:reset-diff-change`, `:diffget`, `:diffg` | Reset the diff change at the cursor position. | diff --git a/book/src/guides/README.md b/book/src/guides/README.md index e0c44ce7..c25768e6 100644 --- a/book/src/guides/README.md +++ b/book/src/guides/README.md @@ -1,4 +1,4 @@ # Guides This section contains guides for adding new language server configurations, -tree-sitter grammars, textobject queries, etc. +tree-sitter grammars, textobject queries, and other similar items. diff --git a/book/src/guides/adding_languages.md b/book/src/guides/adding_languages.md index 6598b9bf..b92af402 100644 --- a/book/src/guides/adding_languages.md +++ b/book/src/guides/adding_languages.md @@ -1,45 +1,52 @@ -# Adding languages +# Adding new languages to Helix + +In order to add a new language to Helix, you will need to follow the steps +below. ## Language configuration -To add a new language, you need to add a `[[language]]` entry to the -`languages.toml` (see the [language configuration section]). +1. Add a new `[[language]]` entry in the `languages.toml` file and provide the + necessary configuration for the new language. For more information on + language configuration, refer to the + [language configuration section](../languages.md) of the documentation. +2. If you are adding a new language or updating an existing language server + configuration, run the command `cargo xtask docgen` to update the + [Language Support](../lang-support.md) documentation. -When adding a new language or Language Server configuration for an existing -language, run `cargo xtask docgen` to add the new configuration to the -[Language Support][lang-support] docs before creating a pull request. -When adding a Language Server configuration, be sure to update the -[Language Server Wiki][install-lsp-wiki] with installation notes. +> 💡 If you are adding a new Language Server configuration, make sure to update +> the +> [Language Server Wiki](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) +> with the installation instructions. ## Grammar configuration -If a tree-sitter grammar is available for the language, add a new `[[grammar]]` -entry to `languages.toml`. - -You may use the `source.path` key rather than `source.git` with an absolute path -to a locally available grammar for testing, but switch to `source.git` before -submitting a pull request. +1. If a tree-sitter grammar is available for the new language, add a new + `[[grammar]]` entry to the `languages.toml` file. +2. If you are testing the grammar locally, you can use the `source.path` key + with an absolute path to the grammar. However, before submitting a pull + request, make sure to switch to using `source.git`. ## Queries -For a language to have syntax-highlighting and indentation among -other things, you have to add queries. Add a directory for your -language with the path `runtime/queries//`. The tree-sitter -[website](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#queries) -gives more info on how to write queries. - -> NOTE: When evaluating queries, the first matching query takes -precedence, which is different from other editors like Neovim where -the last matching query supersedes the ones before it. See -[this issue][neovim-query-precedence] for an example. - -## Common Issues - -- If you get errors when running after switching branches, you may have to update the tree-sitter grammars. Run `hx --grammar fetch` to fetch the grammars and `hx --grammar build` to build any out-of-date grammars. - -- If a parser is segfaulting or you want to remove the parser, make sure to remove the compiled parser in `runtime/grammar/.so` - -[language configuration section]: ../languages.md -[neovim-query-precedence]: https://github.com/helix-editor/helix/pull/1170#issuecomment-997294090 -[install-lsp-wiki]: https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers -[lang-support]: ../lang-support.md +1. In order to provide syntax highlighting and indentation for the new language, + you will need to add queries. +2. Create a new directory for the language with the path + `runtime/queries//`. +3. Refer to the + [tree-sitter website](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#queries) + for more information on writing queries. + +> 💡 In Helix, the first matching query takes precedence when evaluating +> queries, which is different from other editors such as Neovim where the last +> matching query supersedes the ones before it. See +> [this issue](https://github.com/helix-editor/helix/pull/1170#issuecomment-997294090) +> for an example. + +## Common issues + +- If you encounter errors when running Helix after switching branches, you may + need to update the tree-sitter grammars. Run the command `hx --grammar fetch` + to fetch the grammars and `hx --grammar build` to build any out-of-date + grammars. +- If a parser is causing a segfault, or you want to remove it, make sure to + remove the compiled parser located at `runtime/grammars/.so`. diff --git a/book/src/guides/indent.md b/book/src/guides/indent.md index 0e259289..b660d785 100644 --- a/book/src/guides/indent.md +++ b/book/src/guides/indent.md @@ -1,4 +1,4 @@ -# Adding Indent Queries +# Adding indent queries Helix uses tree-sitter to correctly indent new lines. This requires a tree-sitter grammar and an `indent.scm` query file placed in @@ -36,7 +36,7 @@ changed by using a `#set!` declaration anywhere in the pattern: (#set! "scope" "all")) ``` -## Capture Types +## Capture types - `@indent` (default scope `tail`): Increase the indent level by 1. Multiple occurrences in the same line diff --git a/book/src/guides/textobject.md b/book/src/guides/textobject.md index 8a217354..405f11c1 100644 --- a/book/src/guides/textobject.md +++ b/book/src/guides/textobject.md @@ -1,14 +1,14 @@ -# Adding Textobject Queries +# Adding textobject queries -Textobjects that are language specific ([like functions, classes, etc][textobjects]) -require an accompanying tree-sitter grammar and a `textobjects.scm` query file +Helix supports textobjects that are language specific, such as functions, classes, etc. +These textobjects require an accompanying tree-sitter grammar and a `textobjects.scm` query file to work properly. Tree-sitter allows us to query the source code syntax tree and capture specific parts of it. The queries are written in a lisp dialect. More information on how to write queries can be found in the [official tree-sitter documentation][tree-sitter-queries]. Query files should be placed in `runtime/queries/{language}/textobjects.scm` -when contributing. Note that to test the query files locally you should put +when contributing to Helix. Note that to test the query files locally you should put them under your local runtime directory (`~/.config/helix/runtime` on Linux for example). @@ -28,9 +28,9 @@ The following [captures][tree-sitter-captures] are recognized: [Example query files][textobject-examples] can be found in the helix GitHub repository. -## Queries for Textobject Based Navigation +## Queries for textobject based navigation -[Tree-sitter based navigation][textobjects-nav] is done using captures in the +Tree-sitter based navigation in Helix is done using captures in the following order: - `object.movement` @@ -38,12 +38,10 @@ following order: - `object.inside` For example if a `function.around` capture has been already defined for a language -in it's `textobjects.scm` file, function navigation should also work automatically. +in its `textobjects.scm` file, function navigation should also work automatically. `function.movement` should be defined only if the node captured by `function.around` doesn't make sense in a navigation context. -[textobjects]: ../usage.md#textobjects -[textobjects-nav]: ../usage.md#tree-sitter-textobject-based-navigation [tree-sitter-queries]: https://tree-sitter.github.io/tree-sitter/using-parsers#query-syntax [tree-sitter-captures]: https://tree-sitter.github.io/tree-sitter/using-parsers#capturing-nodes [textobject-examples]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+filename%3Atextobjects.scm&type=Code&ref=advsearch&l=&l= diff --git a/book/src/install.md b/book/src/install.md index bf0ff079..a0e24de7 100644 --- a/book/src/install.md +++ b/book/src/install.md @@ -1,171 +1,250 @@ -# Installation - -We provide pre-built binaries on the [GitHub Releases page](https://github.com/helix-editor/helix/releases). +# Installing Helix + + +- [Pre-built binaries](#pre-built-binaries) +- [Linux, macOS, Windows and OpenBSD packaging status](#linux-macos-windows-and-openbsd-packaging-status) +- [Linux](#linux) + - [Ubuntu](#ubuntu) + - [Fedora/RHEL](#fedorarhel) + - [Arch Linux community](#arch-linux-community) + - [NixOS](#nixos) + - [AppImage](#appimage) +- [macOS](#macos) + - [Homebrew Core](#homebrew-core) +- [Windows](#windows) + - [Scoop](#scoop) + - [Chocolatey](#chocolatey) + - [MSYS2](#msys2) +- [Building from source](#building-from-source) + - [Configuring Helix's runtime files](#configuring-helixs-runtime-files) + - [Validating the installation](#validating-the-installation) + - [Configure the desktop shortcut](#configure-the-desktop-shortcut) + + +To install Helix, follow the instructions specific to your operating system. +Note that: + +- To get the latest nightly version of Helix, you need to + [build from source](#building-from-source). + +- To take full advantage of Helix, install the language servers for your + preferred programming languages. See the + [wiki](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) + for instructions. + +## Pre-built binaries + +Download pre-built binaries from the +[GitHub Releases page](https://github.com/helix-editor/helix/releases). Add the binary to your system's `$PATH` to use it from the command +line. + +## Linux, macOS, Windows and OpenBSD packaging status + +Helix is available for Linux, macOS and Windows via the official repositories listed below. [![Packaging status](https://repology.org/badge/vertical-allrepos/helix.svg)](https://repology.org/project/helix/versions) -## OSX +## Linux -Helix is available in homebrew-core: +The following third party repositories are available: -``` -brew install helix +### Ubuntu + +Helix is available via [Maveonair's PPA](https://launchpad.net/~maveonair/+archive/ubuntu/helix-editor): + +```sh +sudo add-apt-repository ppa:maveonair/helix-editor +sudo apt update +sudo apt install helix ``` -## Linux +### Fedora/RHEL -### NixOS +Helix is available via `copr`: + +```sh +sudo dnf copr enable varlad/helix +sudo dnf install helix +``` -A [flake](https://nixos.wiki/wiki/Flakes) containing the package is available in -the project root. The flake can also be used to spin up a reproducible development -shell for working on Helix with `nix develop`. +### Arch Linux community -Flake outputs are cached for each push to master using -[Cachix](https://www.cachix.org/). The flake is configured to -automatically make use of this cache assuming the user accepts -the new settings on first use. +Releases are available in the `community` repository: -If you are using a version of Nix without flakes enabled you can -[install Cachix cli](https://docs.cachix.org/installation); `cachix use helix` will -configure Nix to use cached outputs when possible. +```sh +sudo pacman -S helix +``` +Additionally, a [helix-git](https://aur.archlinux.org/packages/helix-git/) package is available +in the AUR, which builds the master branch. -### Arch Linux +### NixOS -Releases are available in the `community` repository. +Helix is available as a [flake](https://nixos.wiki/wiki/Flakes) in the project +root. Use `nix develop` to spin up a reproducible development shell. Outputs are +cached for each push to master using [Cachix](https://www.cachix.org/). The +flake is configured to automatically make use of this cache assuming the user +accepts the new settings on first use. -A [helix-git](https://aur.archlinux.org/packages/helix-git/) package is also available on the AUR, which builds the master branch. +If you are using a version of Nix without flakes enabled, +[install Cachix CLI](https://docs.cachix.org/installation) and use +`cachix use helix` to configure Nix to use cached outputs when possible. -### Fedora Linux +### AppImage -You can install the COPR package for Helix via +Install Helix using [AppImage](https://appimage.org/). +Download Helix AppImage from the [latest releases](https://github.com/helix-editor/helix/releases/latest) page. +```sh +chmod +x helix-*.AppImage # change permission for executable mode +./helix-*.AppImage # run helix ``` -sudo dnf copr enable varlad/helix -sudo dnf install helix -``` + +## macOS -### Void Linux +### Homebrew Core -``` -sudo xbps-install helix +```sh +brew install helix ``` ## Windows -Helix can be installed using [Scoop](https://scoop.sh/), [Chocolatey](https://chocolatey.org/) +Install on Windows using [Scoop](https://scoop.sh/), [Chocolatey](https://chocolatey.org/) or [MSYS2](https://msys2.org/). -**Scoop:** +### Scoop -``` +```sh scoop install helix ``` -**Chocolatey:** +### Chocolatey -``` +```sh choco install helix ``` -**MSYS2:** - -Choose the proper command for your system from below: +### MSYS2 - - For 32 bit Windows 7 or above: +For 64-bit Windows 8.1 or above: -``` -pacman -S mingw-w64-i686-helix +```sh +pacman -S mingw-w64-ucrt-x86_64-helix ``` - - For 64 bit Windows 7 or above: +## Building from source -``` -pacman -S mingw-w64-x86_64-helix -``` +Clone the repository: - - For 64 bit Windows 8.1 or above: - -``` -pacman -S mingw-w64-ucrt-x86_64-helix +```sh +git clone https://github.com/helix-editor/helix +cd helix ``` -## Build from source +Compile from source: -``` -git clone https://github.com/helix-editor/helix -cd helix +```sh cargo install --path helix-term --locked ``` -This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars in `./runtime/grammars`. +This command will create the `hx` executable and construct the tree-sitter +grammars in the local `runtime` folder. To build the tree-sitter grammars requires +a c++ compiler to be installed, for example `gcc-c++`. + +> 💡 If you are using the musl-libc instead of glibc the following environment variable must be set during the build +> to ensure tree-sitter grammars can be loaded correctly: +> +> ```sh +> RUSTFLAGS="-C target-feature=-crt-static" +> ``` -Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the -config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overridden -via the `HELIX_RUNTIME` environment variable. +> 💡 Tree-sitter grammars can be fetched and compiled if not pre-packaged. Fetch +> grammars with `hx --grammar fetch` (requires `git`) and compile them with +> `hx --grammar build` (requires a C++ compiler). This will install them in +> the `runtime` directory within the user's helix config directory (more +> [details below](#multiple-runtime-directories)). -| OS | Command | -| -------------------- | ------------------------------------------------ | -| Windows (Cmd) | `xcopy /e /i runtime %AppData%\helix\runtime` | -| Windows (PowerShell) | `xcopy /e /i runtime $Env:AppData\helix\runtime` | -| Linux / macOS | `ln -s $PWD/runtime ~/.config/helix/runtime` | +### Configuring Helix's runtime files -Starting with Windows Vista you can also create symbolic links on Windows. Note that this requires -elevated privileges - i.e. PowerShell or Cmd must be run as administrator. +#### Linux and macOS -**PowerShell:** +Either set the `HELIX_RUNTIME` environment variable to point to the runtime files and add it to your `~/.bashrc` or equivalent: -```powershell -New-Item -ItemType SymbolicLink -Target "runtime" -Path "$Env:AppData\helix\runtime" +```sh +HELIX_RUNTIME=/home/user-name/src/helix/runtime ``` -**Cmd:** +Or, create a symlink in `~/.config/helix` that links to the source code directory: -```cmd -cd %appdata%\helix -mklink /D runtime "\runtime" +```sh +ln -s $PWD/runtime ~/.config/helix/runtime ``` -The runtime location can be overridden via the `HELIX_RUNTIME` environment variable. +#### Windows -> NOTE: if `HELIX_RUNTIME` is set prior to calling `cargo install --path helix-term --locked`, -> tree-sitter grammars will be built in `$HELIX_RUNTIME/grammars`. +Either set the `HELIX_RUNTIME` environment variable to point to the runtime files using the Windows setting (search for +`Edit environment variables for your account`) or use the `setx` command in +Cmd: -If you plan on keeping the repo locally, an alternative to copying/symlinking -runtime files is to set `HELIX_RUNTIME=/path/to/helix/runtime` -(`HELIX_RUNTIME=$PWD/runtime` if you're in the helix repo directory). +```sh +setx HELIX_RUNTIME "%userprofile%\source\repos\helix\runtime" +``` -To use Helix in desktop environments that supports [XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html), including Gnome and KDE, copy the provided `.desktop` file to the correct folder: +> 💡 `%userprofile%` resolves to your user directory like +> `C:\Users\Your-Name\` for example. -```bash -cp contrib/Helix.desktop ~/.local/share/applications -``` +Or, create a symlink in `%appdata%\helix\` that links to the source code directory: -To use another terminal than the default, you will need to modify the `.desktop` file. For example, to use `kitty`: +| Method | Command | +| ---------- | -------------------------------------------------------------------------------------- | +| PowerShell | `New-Item -ItemType Junction -Target "runtime" -Path "$Env:AppData\helix\runtime"` | +| Cmd | `cd %appdata%\helix`
`mklink /D runtime "%userprofile%\src\helix\runtime"` | -```bash -sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop -sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop -``` +> 💡 On Windows, creating a symbolic link may require running PowerShell or +> Cmd as an administrator. -Please note: there is no icon for Helix yet, so the system default will be used. +#### Multiple runtime directories -## Finishing up the installation +When Helix finds multiple runtime directories it will search through them for files in the +following order: -To make sure everything is set up as expected you should finally run the helix healthcheck via +1. `runtime/` sibling directory to `$CARGO_MANIFEST_DIR` directory (this is intended for + developing and testing helix only). +2. `runtime/` subdirectory of OS-dependent helix user config directory. +3. `$HELIX_RUNTIME`. +4. `runtime/` subdirectory of path to Helix executable. -``` +This order also sets the priority for selecting which file will be used if multiple runtime +directories have files with the same name. + +### Validating the installation + +To make sure everything is set up as expected you should run the Helix health +check: + +```sh hx --health ``` -For more information on the information displayed in the health check results refer to [Healthcheck](https://github.com/helix-editor/helix/wiki/Healthcheck). +For more information on the health check results refer to +[Health check](https://github.com/helix-editor/helix/wiki/Healthcheck). -### Building tree-sitter grammars +### Configure the desktop shortcut -Tree-sitter grammars must be fetched and compiled if not pre-packaged. -Fetch grammars with `hx --grammar fetch` (requires `git`) and compile them -with `hx --grammar build` (requires a C++ compiler). +If your desktop environment supports the +[XDG desktop menu](https://specifications.freedesktop.org/menu-spec/menu-spec-latest.html) +you can configure Helix to show up in the application menu by copying the +provided `.desktop` and icon files to their correct folders: -### Installing language servers +```sh +cp contrib/Helix.desktop ~/.local/share/applications +cp contrib/helix.png ~/.icons # or ~/.local/share/icons +``` + +To use another terminal than the system default, you can modify the `.desktop` +file. For example, to use `kitty`: -Language servers can optionally be installed if you want their features (auto-complete, diagnostics etc.). -Follow the [instructions on the wiki page](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) to add your language servers of choice. +```sh +sed -i "s|Exec=hx %F|Exec=kitty hx %F|g" ~/.local/share/applications/Helix.desktop +sed -i "s|Terminal=true|Terminal=false|g" ~/.local/share/applications/Helix.desktop +``` diff --git a/book/src/keymap.md b/book/src/keymap.md index b38a1f44..173728f2 100644 --- a/book/src/keymap.md +++ b/book/src/keymap.md @@ -14,14 +14,14 @@ - [Space mode](#space-mode) - [Popup](#popup) - [Unimpaired](#unimpaired) -- [Insert Mode](#insert-mode) -- [Select / extend mode](#select--extend-mode) +- [Insert mode](#insert-mode) +- [Select / extend mode](#select-extend-mode) - [Picker](#picker) - [Prompt](#prompt) > 💡 Mappings marked (**LSP**) require an active language server for the file. -> 💡 Mappings marked (**TS**) require a tree-sitter grammar for the filetype. +> 💡 Mappings marked (**TS**) require a tree-sitter grammar for the file type. ## Normal mode @@ -109,7 +109,7 @@ | Key | Description | Command | | ----- | ----------- | ------- | | `s` | Select all regex matches inside selections | `select_regex` | -| `S` | Split selection into subselections on regex matches | `split_selection` | +| `S` | Split selection into sub selections on regex matches | `split_selection` | | `Alt-s` | Split selection on newlines | `split_selection_on_newline` | | `Alt-_ ` | Merge consecutive selections | `merge_consecutive_selections` | | `&` | Align selection in columns | `align_selections` | @@ -130,7 +130,7 @@ | `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` | | `Alt-x` | Shrink selection to line bounds (line-wise selection) | `shrink_to_line_bounds` | | `J` | Join lines inside selection | `join_selections` | -| `Alt-J` | Join lines inside selection and select space | `join_selections_space` | +| `Alt-J` | Join lines inside selection and select the inserted space | `join_selections_space` | | `K` | Keep selections matching the regex | `keep_selections` | | `Alt-K` | Remove selections matching the regex | `remove_selections` | | `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` | @@ -141,7 +141,7 @@ ### Search -Search commands all operate on the `/` register by default. Use `"` to operate on a different one. +Search commands all operate on the `/` register by default. To use a different register, use `"`. | Key | Description | Command | | ----- | ----------- | ------- | @@ -166,15 +166,17 @@ These sub-modes are accessible from normal mode and typically switch back to nor | `Ctrl-w` | Enter [window mode](#window-mode) | N/A | | `Space` | Enter [space mode](#space-mode) | N/A | +These modes (except command mode) can be configured by +[remapping keys](https://docs.helix-editor.com/remapping.html#minor-modes). + #### View mode Accessed by typing `z` in [normal mode](#normal-mode). View mode is intended for scrolling and manipulating the view without changing the selection. The "sticky" variant of this mode (accessed by typing `Z` in -normal mode) is persistent; use the Escape key to return to normal mode after -usage (useful when you're simply looking over text and not actively editing -it). +normal mode) is persistent and can be exited using the escape key. This is +useful when you're simply looking over text and not actively editing it. | Key | Description | Command | @@ -222,7 +224,7 @@ Jumps to various locations. Accessed by typing `m` in [normal mode](#normal-mode). See the relevant section in [Usage](./usage.md) for an explanation about -[surround](./usage.md#surround) and [textobject](./usage.md#textobjects) usage. +[surround](./usage.md#surround) and [textobject](./usage.md#navigating-using-tree-sitter-textobjects) usage. | Key | Description | Command | | ----- | ----------- | ------- | @@ -239,7 +241,7 @@ TODO: Mappings for selecting syntax nodes (a superset of `[`). Accessed by typing `Ctrl-w` in [normal mode](#normal-mode). -This layer is similar to Vim keybindings as Kakoune does not support window. +This layer is similar to Vim keybindings as Kakoune does not support windows. | Key | Description | Command | | ----- | ------------- | ------- | @@ -265,30 +267,32 @@ Accessed by typing `Space` in [normal mode](#normal-mode). This layer is a kludge of mappings, mostly pickers. -| Key | Description | Command | -| ----- | ----------- | ------- | -| `f` | Open file picker | `file_picker` | -| `F` | Open file picker at current working directory | `file_picker_in_current_directory` | -| `b` | Open buffer picker | `buffer_picker` | -| `j` | Open jumplist picker | `jumplist_picker` | -| `k` | Show documentation for item under cursor in a [popup](#popup) (**LSP**) | `hover` | -| `s` | Open document symbol picker (**LSP**) | `symbol_picker` | -| `S` | Open workspace symbol picker (**LSP**) | `workspace_symbol_picker` | -| `d` | Open document diagnostics picker (**LSP**) | `diagnostics_picker` | -| `D` | Open workspace diagnostics picker (**LSP**) | `workspace_diagnostics_picker` | -| `r` | Rename symbol (**LSP**) | `rename_symbol` | -| `a` | Apply code action (**LSP**) | `code_action` | -| `'` | Open last fuzzy picker | `last_picker` | -| `w` | Enter [window mode](#window-mode) | N/A | -| `p` | Paste system clipboard after selections | `paste_clipboard_after` | -| `P` | Paste system clipboard before selections | `paste_clipboard_before` | -| `y` | Join and yank selections to clipboard | `yank_joined_to_clipboard` | -| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` | -| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` | -| `/` | Global search in workspace folder | `global_search` | -| `?` | Open command palette | `command_palette` | - -> TIP: Global search displays results in a fuzzy picker, use `Space + '` to bring it back up after opening a file. +| Key | Description | Command | +| ----- | ----------- | ------- | +| `f` | Open file picker | `file_picker` | +| `F` | Open file picker at current working directory | `file_picker_in_current_directory` | +| `b` | Open buffer picker | `buffer_picker` | +| `j` | Open jumplist picker | `jumplist_picker` | +| `g` | Debug (experimental) | N/A | +| `k` | Show documentation for item under cursor in a [popup](#popup) (**LSP**) | `hover` | +| `s` | Open document symbol picker (**LSP**) | `symbol_picker` | +| `S` | Open workspace symbol picker (**LSP**) | `workspace_symbol_picker` | +| `d` | Open document diagnostics picker (**LSP**) | `diagnostics_picker` | +| `D` | Open workspace diagnostics picker (**LSP**) | `workspace_diagnostics_picker` | +| `r` | Rename symbol (**LSP**) | `rename_symbol` | +| `a` | Apply code action (**LSP**) | `code_action` | +| `h` | Select symbol references (**LSP**) | `select_references_to_symbol_under_cursor` | +| `'` | Open last fuzzy picker | `last_picker` | +| `w` | Enter [window mode](#window-mode) | N/A | +| `p` | Paste system clipboard after selections | `paste_clipboard_after` | +| `P` | Paste system clipboard before selections | `paste_clipboard_before` | +| `y` | Join and yank selections to clipboard | `yank_joined_to_clipboard` | +| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` | +| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` | +| `/` | Global search in workspace folder | `global_search` | +| `?` | Open command palette | `command_palette` | + +> 💡 Global search displays results in a fuzzy picker, use `Space + '` to bring it back up after opening a file. ##### Popup @@ -301,14 +305,14 @@ Displays documentation for item under cursor. #### Unimpaired -Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired). +These mappings are in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired). | Key | Description | Command | | ----- | ----------- | ------- | -| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` | | `]d` | Go to next diagnostic (**LSP**) | `goto_next_diag` | -| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` | +| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` | | `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` | +| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` | | `]f` | Go to next function (**TS**) | `goto_next_function` | | `[f` | Go to previous function (**TS**) | `goto_prev_function` | | `]t` | Go to next type definition (**TS**) | `goto_next_class` | @@ -323,19 +327,20 @@ Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaire | `[p` | Go to previous paragraph | `goto_prev_paragraph` | | `]g` | Go to next change | `goto_next_change` | | `[g` | Go to previous change | `goto_prev_change` | -| `[G` | Go to first change | `goto_first_change` | | `]G` | Go to last change | `goto_last_change` | -| `[Space` | Add newline above | `add_newline_above` | +| `[G` | Go to first change | `goto_first_change` | | `]Space` | Add newline below | `add_newline_below` | +| `[Space` | Add newline above | `add_newline_above` | ## Insert mode -Insert mode bindings are somewhat minimal by default. Helix is designed to +Insert mode bindings are minimal by default. Helix is designed to be a modal editor, and this is reflected in the user experience and internal -mechanics. For example, changes to the text are only saved for undos when -escaping from insert mode to normal mode. For this reason, new users are -strongly encouraged to learn the modal editing paradigm to get the smoothest -experience. +mechanics. Changes to the text are only saved for undos when +escaping from insert mode to normal mode. + +> 💡 New users are strongly encouraged to learn the modal editing paradigm +> to get the smoothest experience. | Key | Description | Command | | ----- | ----------- | ------- | @@ -347,7 +352,7 @@ experience. | `Alt-d`, `Alt-Delete` | Delete next word | `delete_word_forward` | | `Ctrl-u` | Delete to start of line | `kill_to_line_start` | | `Ctrl-k` | Delete to end of line | `kill_to_line_end` | -| `Ctrl-h`, `Backspace` | Delete previous char | `delete_char_backward` | +| `Ctrl-h`, `Backspace`, `Shift-Backspace` | Delete previous char | `delete_char_backward` | | `Ctrl-d`, `Delete` | Delete next char | `delete_char_forward` | | `Ctrl-j`, `Enter` | Insert new line | `insert_newline` | @@ -365,8 +370,8 @@ with modal editors. | `Home` | Move to line start | `goto_line_start` | | `End` | Move to line end | `goto_line_end_newline` | -If you want to disable them in insert mode as you become more comfortable with modal editing, you can use -the following in your `config.toml`: +As you become more comfortable with modal editing, you may want to disable some +insert mode bindings. You can do this by editing your `config.toml` file. ```toml [keys.insert] @@ -382,7 +387,7 @@ end = "no_op" ## Select / extend mode -This mode echoes Normal mode, but changes any movements to extend +Select mode echoes Normal mode, but changes any movements to extend selections rather than replace them. Goto motions are also changed to extend, so that `vgl` for example extends the selection to the end of the line. @@ -428,7 +433,7 @@ Keys to use within prompt, Remapping currently not supported. | `Alt-d`, `Alt-Delete`, `Ctrl-Delete` | Delete next word | | `Ctrl-u` | Delete to start of line | | `Ctrl-k` | Delete to end of line | -| `Backspace`, `Ctrl-h` | Delete previous char | +| `Backspace`, `Ctrl-h`, `Shift-Backspace` | Delete previous char | | `Delete`, `Ctrl-d` | Delete next char | | `Ctrl-s` | Insert a word under doc cursor, may be changed to Ctrl-r Ctrl-w later | | `Ctrl-p`, `Up` | Select previous history | diff --git a/book/src/lang-support.md b/book/src/lang-support.md index 6a08cd69..3f96673b 100644 --- a/book/src/lang-support.md +++ b/book/src/lang-support.md @@ -1,10 +1,10 @@ # Language Support -The following languages and Language Servers are supported. In order to use +The following languages and Language Servers are supported. To use Language Server features, you must first [install][lsp-install-wiki] the appropriate Language Server. -Check the language support in your installed helix version with `hx --health`. +You can check the language support in your installed helix version with `hx --health`. Also see the [Language Configuration][lang-config] docs and the [Adding Languages][adding-languages] guide for more language configuration information. diff --git a/book/src/languages.md b/book/src/languages.md index ff06dc00..5ed69505 100644 --- a/book/src/languages.md +++ b/book/src/languages.md @@ -5,13 +5,15 @@ in `languages.toml` files. ## `languages.toml` files -There are three possible `languages.toml` files. The first is compiled into -Helix and lives in the [Helix repository](https://github.com/helix-editor/helix/blob/master/languages.toml). -This provides the default configurations for languages and language servers. +There are three possible locations for a `languages.toml` file: -You may define a `languages.toml` in your [configuration directory](./configuration.md) -which overrides values from the built-in language configuration. For example -to disable auto-LSP-formatting in Rust: +1. In the Helix source code, this lives in the + [Helix repository](https://github.com/helix-editor/helix/blob/master/languages.toml). + It provides the default configurations for languages and language servers. + +2. In your [configuration directory](./configuration.md). This overrides values + from the built-in language configuration. For example to disable + auto-LSP-formatting in Rust: ```toml # in /helix/languages.toml @@ -21,10 +23,10 @@ name = "rust" auto-format = false ``` -Language configuration may also be overridden local to a project by creating -a `languages.toml` file under a `.helix` directory. Its settings will be merged -with the language configuration in the configuration directory and the built-in -configuration. +3. In a `.helix` folder in your project. Language configuration may also be + overridden local to a project by creating a `languages.toml` file in a + `.helix` folder. Its settings will be merged with the language configuration + in the configuration directory and the built-in configuration. ## Language configuration @@ -56,16 +58,16 @@ These configuration keys are available: | `auto-format` | Whether to autoformat this language when saving | | `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) | | `comment-token` | The token to use as a comment-token | -| `indent` | The indent to use. Has sub keys `tab-width` and `unit` | +| `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) | | `language-server` | The Language Server to run. See the Language Server configuration section below. | | `config` | Language Server configuration | | `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) | | `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout | -| `max-line-length` | Maximum line length. Used for the `:reflow` command | +| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap_at_text_width` is set, defaults to `editor.text-width` | ### File-type detection and the `file-types` key -Helix determines which language configuration to use with the `file-types` key +Helix determines which language configuration to use based on the `file-types` key from the above section. `file-types` is a list of strings or tables, for example: diff --git a/book/src/remapping.md b/book/src/remapping.md index e89c6611..d762c6ad 100644 --- a/book/src/remapping.md +++ b/book/src/remapping.md @@ -1,18 +1,18 @@ -# Key Remapping +# Key remapping -One-way key remapping is temporarily supported via a simple TOML configuration +Helix currently supports one-way key remapping through a simple TOML configuration file. (More powerful solutions such as rebinding via commands will be available in the future). -To remap keys, write a `config.toml` file in your `helix` configuration -directory (default `~/.config/helix` in Linux systems) with a structure like +To remap keys, create a `config.toml` file in your `helix` configuration +directory (default `~/.config/helix` on Linux systems) with a structure like this: ```toml # At most one section each of 'keys.normal', 'keys.insert' and 'keys.select' [keys.normal] -C-s = ":w" # Maps the Ctrl-s to the typable command :w which is an alias for :write (save file) -C-o = ":open ~/.config/helix/config.toml" # Maps the Ctrl-o to opening of the helix config file +C-s = ":w" # Maps Ctrl-s to the typable command :w which is an alias for :write (save file) +C-o = ":open ~/.config/helix/config.toml" # Maps Ctrl-o to opening of the helix config file a = "move_char_left" # Maps the 'a' key to the move_char_left command w = "move_line_up" # Maps the 'w' key move_line_up "C-S-esc" = "extend_line" # Maps Ctrl-Shift-Escape to extend_line @@ -20,10 +20,35 @@ g = { a = "code_action" } # Maps `ga` to show possible code actions "ret" = ["open_below", "normal_mode"] # Maps the enter key to open_below then re-enter normal mode [keys.insert] -"A-x" = "normal_mode" # Maps Alt-X to enter normal mode +"A-x" = "normal_mode" # Maps Alt-X to enter normal mode j = { k = "normal_mode" } # Maps `jk` to exit insert mode ``` -> NOTE: Typable commands can also be remapped, remember to keep the `:` prefix to indicate it's a typable command. + +## Minor modes + +Minor modes are accessed by pressing a key (usually from normal mode), giving access to dedicated bindings. Bindings +can be modified or added by nesting definitions. + +```toml +[keys.insert.j] +k = "normal_mode" # Maps `jk` to exit insert mode + +[keys.normal.g] +a = "code_action" # Maps `ga` to show possible code actions + +# invert `j` and `k` in view mode +[keys.normal.z] +j = "scroll_up" +k = "scroll_down" + +# create a new minor mode bound to `+` +[keys.normal."+"] +m = ":run-shell-command make" +c = ":run-shell-command cargo build" +t = ":run-shell-command cargo test" +``` + +## Special keys and modifiers Ctrl, Shift and Alt modifiers are encoded respectively with the prefixes `C-`, `S-` and `A-`. Special keys are encoded as follows: @@ -50,5 +75,5 @@ Ctrl, Shift and Alt modifiers are encoded respectively with the prefixes Keys can be disabled by binding them to the `no_op` command. -Commands can be found at [Keymap](https://docs.helix-editor.com/keymap.html) Commands. -> Commands can also be found in the source code at [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs) at the invocation of `static_commands!` macro and the `TypableCommandList`. +A list of commands is available in the [Keymap](https://docs.helix-editor.com/keymap.html) documentation + and in the source code at [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs) at the invocation of `static_commands!` macro and the `TypableCommandList`. diff --git a/book/src/themes.md b/book/src/themes.md index 015ec59b..994542c5 100644 --- a/book/src/themes.md +++ b/book/src/themes.md @@ -1,14 +1,15 @@ # Themes -To use a theme add `theme = ""` to your [`config.toml`](./configuration.md) at the very top of the file before the first section or select it during runtime using `:theme `. +To use a theme add `theme = ""` to the top of your [`config.toml`](./configuration.md) file, or select it during runtime using `:theme `. ## Creating a theme -Create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`). The directory might have to be created beforehand. +Create a file with the name of your theme as the file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes` or `%AppData%\helix\themes` on Windows). The directory might have to be created beforehand. -The names "default" and "base16_default" are reserved for the builtin themes and cannot be overridden by user defined themes. +> 💡 The names "default" and "base16_default" are reserved for built-in themes +> and cannot be overridden by user-defined themes. -The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes). +### Overview Each line in the theme file is specified as below: @@ -16,7 +17,7 @@ Each line in the theme file is specified as below: key = { fg = "#ffffff", bg = "#000000", underline = { color = "#ff0000", style = "curl"}, modifiers = ["bold", "italic"] } ``` -where `key` represents what you want to style, `fg` specifies the foreground color, `bg` the background color, `underline` the underline `style`/`color`, and `modifiers` is a list of style modifiers. `bg`, `underline` and `modifiers` can be omitted to defer to the defaults. +Where `key` represents what you want to style, `fg` specifies the foreground color, `bg` the background color, `underline` the underline `style`/`color`, and `modifiers` is a list of style modifiers. `bg`, `underline` and `modifiers` can be omitted to defer to the defaults. To specify only the foreground color: @@ -24,15 +25,30 @@ To specify only the foreground color: key = "#ffffff" ``` -if the key contains a dot `'.'`, it must be quoted to prevent it being parsed as a [dotted key](https://toml.io/en/v1.0.0#keys). +If the key contains a dot `'.'`, it must be quoted to prevent it being parsed as a [dotted key](https://toml.io/en/v1.0.0#keys). ```toml "key.key" = "#ffffff" ``` +For inspiration, you can find the default `theme.toml` +[here](https://github.com/helix-editor/helix/blob/master/theme.toml) and +user-submitted themes +[here](https://github.com/helix-editor/helix/blob/master/runtime/themes). + +### Using the linter + +Use the supplied linting tool to check for errors and missing scopes: + +```sh +cargo xtask themelint onedark # replace onedark with +``` + +## The details of theme creation + ### Color palettes -It's recommended define a palette of named colors, and refer to them from the +It's recommended to define a palette of named colors, and refer to them in the configuration values in your theme. To do this, add a table called `palette` to your theme file: @@ -45,8 +61,8 @@ white = "#ffffff" black = "#000000" ``` -Remember that the `[palette]` table includes all keys after its header, -so you should define the palette after normal theme options. +Keep in mind that the `[palette]` table includes all keys after its header, +so it should be defined after the normal theme options. The default palette uses the terminal's default 16 colors, and the colors names are listed below. The `[palette]` section in the config file takes precedence @@ -73,9 +89,8 @@ over it and is merged into the default palette. ### Modifiers -The following values may be used as modifiers. - -Less common modifiers might not be supported by your terminal emulator. +The following values may be used as modifier, provided they are supported by +your terminal emulator. | Modifier | | --- | @@ -89,14 +104,13 @@ Less common modifiers might not be supported by your terminal emulator. | `hidden` | | `crossed_out` | -> Note: The `underlined` modifier is deprecated and only available for backwards compatibility. +> 💡 The `underlined` modifier is deprecated and only available for backwards compatibility. > Its behavior is equivalent to setting `underline.style="line"`. -### Underline Style - -One of the following values may be used as a value for `underline.style`. +### Underline style -Some styles might not be supported by your terminal emulator. +One of the following values may be used as a value for `underline.style`, providing it is +supported by your terminal emulator. | Modifier | | --- | @@ -109,7 +123,7 @@ Some styles might not be supported by your terminal emulator. ### Inheritance -Extend upon other themes by setting the `inherits` property to an existing theme. +Extend other themes by setting the `inherits` property to an existing theme. ```toml inherits = "boo_berry" @@ -124,19 +138,19 @@ berry = "#2A2A4D" ### Scopes -The following is a list of scopes available to use for styling. +The following is a list of scopes available to use for styling: #### Syntax highlighting These keys match [tree-sitter scopes](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#theme). -For a given highlight produced, styling will be determined based on the longest matching theme key. For example, the highlight `function.builtin.static` would match the key `function.builtin` rather than `function`. +When determining styling for a highlight, the longest matching theme key will be used. For example, if the highlight is `function.builtin.static`, the key `function.builtin` will be used instead of `function`. We use a similar set of scopes as -[SublimeText](https://www.sublimetext.com/docs/scope_naming.html). See also +[Sublime Text](https://www.sublimetext.com/docs/scope_naming.html). See also [TextMate](https://macromates.com/manual/en/language_grammars) scopes. -- `attribute` - Class attributes, html tag attributes +- `attribute` - Class attributes, HTML tag attributes - `type` - Types - `builtin` - Primitive types provided by the language (`int`, `usize`) @@ -144,7 +158,7 @@ We use a similar set of scopes as - `variant` - `constructor` -- `constant` (TODO: constant.other.placeholder for %v) +- `constant` (TODO: constant.other.placeholder for `%v`) - `builtin` Special constants provided by the language (`true`, `false`, `nil` etc) - `boolean` - `character` @@ -162,11 +176,11 @@ We use a similar set of scopes as - `comment` - Code comments - `line` - Single line comments (`//`) - - `block` - Block comments (e.g. (`/* */`) + - `block` - Block comments (e.g. (`/* */`) - `documentation` - Documentation comments (e.g. `///` in Rust) - `variable` - Variables - - `builtin` - Reserved language variables (`self`, `this`, `super`, etc) + - `builtin` - Reserved language variables (`self`, `this`, `super`, etc.) - `parameter` - Function parameters - `other` - `member` - Fields of composite data types (e.g. structs, unions) @@ -186,10 +200,10 @@ We use a similar set of scopes as - `return` - `exception` - `operator` - `or`, `in` - - `directive` - Preprocessor directives (`#if` in C) + - `directive` - Preprocessor directives (`#if` in C) - `function` - `fn`, `func` - `storage` - Keywords describing how things are stored - - `type` - The type of something, `class`, `function`, `var`, `let`, etc. + - `type` - The type of something, `class`, `function`, `var`, `let`, etc. - `modifier` - Storage modifiers like `static`, `mut`, `const`, `ref`, etc. - `operator` - `||`, `+=`, `>` @@ -201,6 +215,7 @@ We use a similar set of scopes as - `special` (preprocessor in C) - `tag` - Tags (e.g. `` in HTML) + - `builtin` - `namespace` @@ -215,10 +230,11 @@ We use a similar set of scopes as - `numbered` - `bold` - `italic` + - `strikethrough` - `link` - - `url` - urls pointed to by links - - `label` - non-url link references - - `text` - url and image descriptions in links + - `url` - URLs pointed to by links + - `label` - non-URL link references + - `text` - URL and image descriptions in links - `quote` - `raw` - `inline` @@ -232,74 +248,77 @@ We use a similar set of scopes as #### Interface -These scopes are used for theming the editor interface. +These scopes are used for theming the editor interface: - `markup` - `normal` - - `completion` - for completion doc popup ui - - `hover` - for hover popup ui + - `completion` - for completion doc popup UI + - `hover` - for hover popup UI - `heading` - - `completion` - for completion doc popup ui - - `hover` - for hover popup ui + - `completion` - for completion doc popup UI + - `hover` - for hover popup UI - `raw` - `inline` - - `completion` - for completion doc popup ui - - `hover` - for hover popup ui - - -| Key | Notes | -| --- | --- | -| `ui.background` | | -| `ui.background.separator` | Picker separator below input line | -| `ui.cursor` | | -| `ui.cursor.insert` | | -| `ui.cursor.select` | | -| `ui.cursor.match` | Matching bracket etc. | -| `ui.cursor.primary` | Cursor with primary selection | -| `ui.gutter` | Gutter | -| `ui.gutter.selected` | Gutter for the line the cursor is on | -| `ui.linenr` | Line numbers | -| `ui.linenr.selected` | Line number for the line the cursor is on | -| `ui.statusline` | Statusline | -| `ui.statusline.inactive` | Statusline (unfocused document) | -| `ui.statusline.normal` | Statusline mode during normal mode ([only if `editor.color-modes` is enabled][editor-section]) | -| `ui.statusline.insert` | Statusline mode during insert mode ([only if `editor.color-modes` is enabled][editor-section]) | -| `ui.statusline.select` | Statusline mode during select mode ([only if `editor.color-modes` is enabled][editor-section]) | -| `ui.statusline.separator` | Separator character in statusline | -| `ui.popup` | Documentation popups (e.g Space + k) | -| `ui.popup.info` | Prompt for multiple key options | -| `ui.window` | Border lines separating splits | -| `ui.help` | Description box for commands | -| `ui.text` | Command prompts, popup text, etc. | -| `ui.text.focus` | | -| `ui.text.inactive` | Same as `ui.text` but when the text is inactive (e.g. suggestions) | -| `ui.text.info` | The key: command text in `ui.popup.info` boxes | -| `ui.virtual.ruler` | Ruler columns (see the [`editor.rulers` config][editor-section]) | -| `ui.virtual.whitespace` | Visible whitespace characters | -| `ui.virtual.indent-guide` | Vertical indent width guides | -| `ui.menu` | Code and command completion menus | -| `ui.menu.selected` | Selected autocomplete item | -| `ui.menu.scroll` | `fg` sets thumb color, `bg` sets track color of scrollbar | -| `ui.selection` | For selections in the editing area | -| `ui.selection.primary` | | -| `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) | -| `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) | -| `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) | -| `ui.cursorcolumn.secondary` | The columns of any other cursors ([if cursorcolumn is enabled][editor-section]) | -| `warning` | Diagnostics warning (gutter) | -| `error` | Diagnostics error (gutter) | -| `info` | Diagnostics info (gutter) | -| `hint` | Diagnostics hint (gutter) | -| `diagnostic` | Diagnostics fallback style (editing area) | -| `diagnostic.hint` | Diagnostics hint (editing area) | -| `diagnostic.info` | Diagnostics info (editing area) | -| `diagnostic.warning` | Diagnostics warning (editing area) | -| `diagnostic.error` | Diagnostics error (editing area) | - -You can check compliance to spec with - -```shell -cargo xtask themelint onedark # replace onedark with -``` + - `completion` - for completion doc popup UI + - `hover` - for hover popup UI + + +| Key | Notes | +| --- | --- | +| `ui.background` | | +| `ui.background.separator` | Picker separator below input line | +| `ui.cursor` | | +| `ui.cursor.normal` | | +| `ui.cursor.insert` | | +| `ui.cursor.select` | | +| `ui.cursor.match` | Matching bracket etc. | +| `ui.cursor.primary` | Cursor with primary selection | +| `ui.cursor.primary.normal` | | +| `ui.cursor.primary.insert` | | +| `ui.cursor.primary.select` | | +| `ui.gutter` | Gutter | +| `ui.gutter.selected` | Gutter for the line the cursor is on | +| `ui.linenr` | Line numbers | +| `ui.linenr.selected` | Line number for the line the cursor is on | +| `ui.statusline` | Statusline | +| `ui.statusline.inactive` | Statusline (unfocused document) | +| `ui.statusline.normal` | Statusline mode during normal mode ([only if `editor.color-modes` is enabled][editor-section]) | +| `ui.statusline.insert` | Statusline mode during insert mode ([only if `editor.color-modes` is enabled][editor-section]) | +| `ui.statusline.select` | Statusline mode during select mode ([only if `editor.color-modes` is enabled][editor-section]) | +| `ui.statusline.separator` | Separator character in statusline | +| `ui.popup` | Documentation popups (e.g. Space + k) | +| `ui.popup.info` | Prompt for multiple key options | +| `ui.window` | Borderlines separating splits | +| `ui.help` | Description box for commands | +| `ui.text` | Command prompts, popup text, etc. | +| `ui.text.focus` | | +| `ui.text.inactive` | Same as `ui.text` but when the text is inactive (e.g. suggestions) | +| `ui.text.info` | The key: command text in `ui.popup.info` boxes | +| `ui.virtual.ruler` | Ruler columns (see the [`editor.rulers` config][editor-section]) | +| `ui.virtual.whitespace` | Visible whitespace characters | +| `ui.virtual.indent-guide` | Vertical indent width guides | +| `ui.virtual.inlay-hint` | Default style for inlay hints of all kinds | +| `ui.virtual.inlay-hint.parameter` | Style for inlay hints of kind `parameter` (LSPs are not required to set a kind) | +| `ui.virtual.inlay-hint.type` | Style for inlay hints of kind `type` (LSPs are not required to set a kind) | +| `ui.virtual.wrap` | Soft-wrap indicator (see the [`editor.soft-wrap` config][editor-section]) | +| `ui.menu` | Code and command completion menus | +| `ui.menu.selected` | Selected autocomplete item | +| `ui.menu.scroll` | `fg` sets thumb color, `bg` sets track color of scrollbar | +| `ui.selection` | For selections in the editing area | +| `ui.selection.primary` | | +| `ui.highlight` | Highlighted lines in the picker preview | +| `ui.cursorline.primary` | The line of the primary cursor ([if cursorline is enabled][editor-section]) | +| `ui.cursorline.secondary` | The lines of any other cursors ([if cursorline is enabled][editor-section]) | +| `ui.cursorcolumn.primary` | The column of the primary cursor ([if cursorcolumn is enabled][editor-section]) | +| `ui.cursorcolumn.secondary` | The columns of any other cursors ([if cursorcolumn is enabled][editor-section]) | +| `warning` | Diagnostics warning (gutter) | +| `error` | Diagnostics error (gutter) | +| `info` | Diagnostics info (gutter) | +| `hint` | Diagnostics hint (gutter) | +| `diagnostic` | Diagnostics fallback style (editing area) | +| `diagnostic.hint` | Diagnostics hint (editing area) | +| `diagnostic.info` | Diagnostics info (editing area) | +| `diagnostic.warning` | Diagnostics warning (editing area) | +| `diagnostic.error` | Diagnostics error (editing area) | [editor-section]: ./configuration.md#editor-section diff --git a/book/src/usage.md b/book/src/usage.md index a6eb9ec1..81cf8372 100644 --- a/book/src/usage.md +++ b/book/src/usage.md @@ -1,22 +1,43 @@ -# Usage +# Using Helix -(Currently not fully documented, see the [keymappings](./keymap.md) list for more.) + +- [Registers](#registers) + - [User-defined registers](#user-defined-registers) + - [Special registers](#special-registers) +- [Surround](#surround) +- [Selecting and manipulating text with textobjects](#selecting-and-manipulating-text-with-textobjects) +- [Navigating using tree-sitter textobjects](#navigating-using-tree-sitter-textobjects) +- [Moving the selection with syntax-aware motions](#moving-the-selection-with-syntax-aware-motions) + -See [tutor](https://github.com/helix-editor/helix/blob/master/runtime/tutor) (accessible via `hx --tutor` or `:tutor`) for a vimtutor-like introduction. +For a full interactive introduction to Helix, refer to the +[tutor](https://github.com/helix-editor/helix/blob/master/runtime/tutor) which +can be accessed via the command `hx --tutor` or `:tutor`. + +> 💡 Currently, not all functionality is fully documented, please refer to the +> [key mappings](./keymap.md) list. ## Registers -Vim-like registers can be used to yank and store text to be pasted later. Usage is similar, with `"` being used to select a register: +In Helix, registers are storage locations for text and other data, such as the +result of a search. Registers can be used to cut, copy, and paste text, similar +to the clipboard in other text editors. Usage is similar to Vim, with `"` being +used to select a register. + +### User-defined registers + +Helix allows you to create your own named registers for storing text, for +example: - `"ay` - Yank the current selection to register `a`. - `"op` - Paste the text in register `o` after the selection. -If there is a selected register before invoking a change or delete command, the selection will be stored in the register and the action will be carried out: +If a register is selected before invoking a change or delete command, the selection will be stored in the register and the action will be carried out: - `"hc` - Store the selection in register `h` and then change it (delete and enter insert mode). - `"md` - Store the selection in register `m` and delete it. -### Special Registers +### Special registers | Register character | Contains | | --- | --- | @@ -25,41 +46,90 @@ If there is a selected register before invoking a change or delete command, the | `"` | Last yanked text | | `_` | Black hole | -> There is no special register for copying to system clipboard, instead special commands and keybindings are provided. See the [keymap](keymap.md#space-mode) for the specifics. -> The black hole register works as a no-op register, meaning no data will be written to / read from it. +The system clipboard is not directly supported by a special register. Instead, special commands and keybindings are provided. Refer to the +[key map](keymap.md#space-mode) for more details. + +The black hole register is a no-op register, meaning that no data will be read or written to it. ## Surround -Functionality similar to [vim-surround](https://github.com/tpope/vim-surround) is built into -helix. The keymappings have been inspired from [vim-sandwich](https://github.com/machakann/vim-sandwich): +Helix includes built-in functionality similar to [vim-surround](https://github.com/tpope/vim-surround). +The keymappings have been inspired from [vim-sandwich](https://github.com/machakann/vim-sandwich): -![surround demo](https://user-images.githubusercontent.com/23398472/122865801-97073180-d344-11eb-8142-8f43809982c6.gif) +![Surround demo](https://user-images.githubusercontent.com/23398472/122865801-97073180-d344-11eb-8142-8f43809982c6.gif) -- `ms` - Add surround characters -- `mr` - Replace surround characters -- `md` - Delete surround characters +| Key Sequence | Action | +| --------------------------------- | --------------------------------------- | +| `ms` (after selecting text) | Add surround characters to selection | +| `mr` | Replace the closest surround characters | +| `md` | Delete the closest surround characters | -`ms` acts on a selection, so select the text first and use `ms`. `mr` and `md` work -on the closest pairs found and selections are not required; use counts to act in outer pairs. +You can use counts to act on outer pairs. -It can also act on multiple selections (yay!). For example, to change every occurrence of `(use)` to `[use]`: +Surround can also act on multiple selections. For example, to change every occurrence of `(use)` to `[use]`: -- `%` to select the whole file -- `s` to split the selections on a search term -- Input `use` and hit Enter -- `mr([` to replace the parens with square brackets +1. `%` to select the whole file +2. `s` to split the selections on a search term +3. Input `use` and hit Enter +4. `mr([` to replace the parentheses with square brackets -Multiple characters are currently not supported, but planned. +Multiple characters are currently not supported, but planned for future release. -## Syntax-tree Motions +## Selecting and manipulating text with textobjects -`Alt-p`, `Alt-o`, `Alt-i`, and `Alt-n` (or `Alt` and arrow keys) move the primary -selection according to the selection's place in the syntax tree. Let's walk -through an example to get familiar with them. Many languages have a syntax like -so for function calls: +In Helix, textobjects are a way to select, manipulate and operate on a piece of +text in a structured way. They allow you to refer to blocks of text based on +their structure or purpose, such as a word, sentence, paragraph, or even a +function or block of code. -``` -func(arg1, arg2, arg3) +![Textobject demo](https://user-images.githubusercontent.com/23398472/124231131-81a4bb00-db2d-11eb-9d10-8e577ca7b177.gif) +![Textobject tree-sitter demo](https://user-images.githubusercontent.com/23398472/132537398-2a2e0a54-582b-44ab-a77f-eb818942203d.gif) + +- `ma` - Select around the object (`va` in Vim, `` in Kakoune) +- `mi` - Select inside the object (`vi` in Vim, `` in Kakoune) + +| Key after `mi` or `ma` | Textobject selected | +| --- | --- | +| `w` | Word | +| `W` | WORD | +| `p` | Paragraph | +| `(`, `[`, `'`, etc. | Specified surround pairs | +| `m` | The closest surround pair | +| `f` | Function | +| `c` | Class | +| `a` | Argument/parameter | +| `o` | Comment | +| `t` | Test | +| `g` | Change | + +> 💡 `f`, `c`, etc. need a tree-sitter grammar active for the current +document and a special tree-sitter query file to work properly. [Only +some grammars][lang-support] currently have the query file implemented. +Contributions are welcome! + +## Navigating using tree-sitter textobjects + +Navigating between functions, classes, parameters, and other elements is +possible using tree-sitter and textobject queries. For +example to move to the next function use `]f`, to move to previous +class use `[c`, and so on. + +![Tree-sitter-nav-demo][tree-sitter-nav-demo] + +For the full reference see the [unimpaired][unimpaired-keybinds] section of the key bind +documentation. + +> 💡 This feature relies on tree-sitter textobjects +> and requires the corresponding query file to work properly. + +## Moving the selection with syntax-aware motions + +`Alt-p`, `Alt-o`, `Alt-i`, and `Alt-n` (or `Alt` and arrow keys) allow you to move the +selection according to its location in the syntax tree. For example, many languages have the +following syntax for function calls: + +```js +func(arg1, arg2, arg3); ``` A function call might be parsed by tree-sitter into a tree like the following. @@ -93,77 +163,29 @@ a more intuitive tree format: └──────────┘ └──────────┘ └──────────┘ ``` -Say we have a selection that wraps `arg1`. The selection is on the `arg1` leaf -in the tree above. +If you have a selection that wraps `arg1` (see the tree above), and you use +`Alt-n`, it will select the next sibling in the syntax tree: `arg2`. -``` +```js +// before func([arg1], arg2, arg3) +// after +func(arg1, [arg2], arg3); ``` -Using `Alt-n` would select the next sibling in the syntax tree: `arg2`. +Similarly, `Alt-o` will expand the selection to the parent node, in this case, the +arguments node. -``` -func(arg1, [arg2], arg3) -``` - -While `Alt-o` would expand the selection to the parent node. In the tree above we -can see that we would select the `arguments` node. - -``` -func[(arg1, arg2, arg3)] +```js +func[(arg1, arg2, arg3)]; ``` There is also some nuanced behavior that prevents you from getting stuck on a -node with no sibling. If we have a selection on `arg1`, `Alt-p` would bring us -to the previous child node. Since `arg1` doesn't have a sibling to its left, -though, we climb the syntax tree and then take the previous selection. So -`Alt-p` will move the selection over to the "func" `identifier`. - -``` -[func](arg1, arg2, arg3) -``` - -## Textobjects - -![textobject-demo](https://user-images.githubusercontent.com/23398472/124231131-81a4bb00-db2d-11eb-9d10-8e577ca7b177.gif) -![textobject-treesitter-demo](https://user-images.githubusercontent.com/23398472/132537398-2a2e0a54-582b-44ab-a77f-eb818942203d.gif) - -- `ma` - Select around the object (`va` in Vim, `` in Kakoune) -- `mi` - Select inside the object (`vi` in Vim, `` in Kakoune) - -| Key after `mi` or `ma` | Textobject selected | -| --- | --- | -| `w` | Word | -| `W` | WORD | -| `p` | Paragraph | -| `(`, `[`, `'`, etc | Specified surround pairs | -| `m` | Closest surround pair | -| `f` | Function | -| `c` | Class | -| `a` | Argument/parameter | -| `o` | Comment | -| `t` | Test | -| `g` | Change | - -> NOTE: `f`, `c`, etc need a tree-sitter grammar active for the current -document and a special tree-sitter query file to work properly. [Only -some grammars][lang-support] currently have the query file implemented. -Contributions are welcome! - -## Tree-sitter Textobject Based Navigation - -Navigating between functions, classes, parameters, etc is made -possible by leveraging tree-sitter and textobjects queries. For -example to move to the next function use `]f`, to move to previous -class use `[c`, and so on. - -![tree-sitter-nav-demo][tree-sitter-nav-demo] - -See the [unimpaired][unimpaired-keybinds] section of the keybind -documentation for the full reference. - -> NOTE: This feature is dependent on tree-sitter based textobjects -and therefore requires the corresponding query file to work properly. +node with no sibling. When using `Alt-p` with a selection on `arg1`, the previous +child node will be selected. In the event that `arg1` does not have a previous +sibling, the selection will move up the syntax tree and select the previous +element. As a result, using `Alt-p` with a selection on `arg1` will move the +selection to the "func" `identifier`. [lang-support]: ./lang-support.md [unimpaired-keybinds]: ./keymap.md#unimpaired diff --git a/book/theme/css/variables.css b/book/theme/css/variables.css index 1bf91b19..5d0978cc 100644 --- a/book/theme/css/variables.css +++ b/book/theme/css/variables.css @@ -48,6 +48,18 @@ --searchresults-border-color: #888; --searchresults-li-bg: #252932; --search-mark-bg: #e3b171; + --hljs-background: #191f26; + --hljs-color: #e6e1cf; + --hljs-quote: #5c6773; + --hljs-variable: #ff7733; + --hljs-type: #ffee99; + --hljs-title: #b8cc52; + --hljs-symbol: #ffb454; + --hljs-selector-tag: #ff7733; + --hljs-selector-tag: #36a3d9; + --hljs-selector-tag: #00568d; + --hljs-selector-tag: #91b362; + --hljs-selector-tag: #d96c75; } .coal { @@ -88,6 +100,18 @@ --searchresults-border-color: #98a3ad; --searchresults-li-bg: #2b2b2f; --search-mark-bg: #355c7d; + --hljs-background: #969896; + --hljs-color: #cc6666; + --hljs-quote: #de935f; + --hljs-variable: #f0c674; + --hljs-type: #b5bd68; + --hljs-title: #8abeb7; + --hljs-symbol: #81a2be; + --hljs-selector-tag: #b294bb; + --hljs-selector-tag: #1d1f21; + --hljs-selector-tag: #c5c8c6; + --hljs-selector-tag: #718c00; + --hljs-selector-tag: #c82829; } .light { @@ -128,6 +152,14 @@ --searchresults-border-color: #888; --searchresults-li-bg: #e4f2fe; --search-mark-bg: #a2cff5; + --hljs-background: #f6f7f6; + --hljs-color: #000; + --hljs-quote: #575757; + --hljs-variable: #d70025; + --hljs-type: #b21e00; + --hljs-title: #0030f2; + --hljs-symbol: #008200; + --hljs-selector-tag: #9d00ec; } .navy { @@ -168,6 +200,19 @@ --searchresults-border-color: #5c5c68; --searchresults-li-bg: #242430; --search-mark-bg: #a2cff5; + + --hljs-background: #969896; + --hljs-color: #cc6666; + --hljs-quote: #de935f; + --hljs-variable: #f0c674; + --hljs-type: #b5bd68; + --hljs-title: #8abeb7; + --hljs-symbol: #81a2be; + --hljs-selector-tag: #b294bb; + --hljs-selector-tag: #1d1f21; + --hljs-selector-tag: #c5c8c6; + --hljs-selector-tag: #718c00; + --hljs-selector-tag: #c82829; } .rust { @@ -208,6 +253,14 @@ --searchresults-border-color: #888; --searchresults-li-bg: #dec2a2; --search-mark-bg: #e69f67; + --hljs-background: #f6f7f6; + --hljs-color: #000; + --hljs-quote: #575757; + --hljs-variable: #d70025; + --hljs-type: #b21e00; + --hljs-title: #0030f2; + --hljs-symbol: #008200; + --hljs-selector-tag: #9d00ec; } @media (prefers-color-scheme: dark) { @@ -292,7 +345,15 @@ --searchresults-header-fg: #5f5f71; --searchresults-border-color: #5c5c68; --searchresults-li-bg: #242430; - --search-mark-bg: #a2cff5; + --search-mark-bg: #acff5; + --hljs-background: #2f1e2e; + --hljs-color: #a39e9b; + --hljs-quote: #8d8687; + --hljs-variable: #ef6155; + --hljs-type: #f99b15; + --hljs-title: #fec418; + --hljs-symbol: #48b685; + --hljs-selector-tag: #815ba4; } .colibri { @@ -338,5 +399,13 @@ --searchresults-border-color: #5c5c68; --searchresults-li-bg: #242430; --search-mark-bg: #a2cff5; + --hljs-background: #TODO; + --hljs-color: #TODO; + --hljs-quote: #TODO; + --hljs-variable: #TODO; + --hljs-type: #TODO; + --hljs-title: #TODO; + --hljs-symbol: #TODO; + --hljs-selector-tag: #TODO; */ } diff --git a/book/theme/highlight.css b/book/theme/highlight.css index 8dce7d65..a2db0500 100644 --- a/book/theme/highlight.css +++ b/book/theme/highlight.css @@ -7,12 +7,12 @@ code.hljs { padding:3px 5px } .hljs { - background:#2f1e2e; - color:#a39e9b + background: var(--hljs-background); + color: var(--hljs-color); } .hljs-comment, .hljs-quote { - color:#8d8687 + color: var(--hljs-quote) } .hljs-link, .hljs-meta, @@ -23,7 +23,7 @@ code.hljs { .hljs-tag, .hljs-template-variable, .hljs-variable { - color:#ef6155 + color: var(--hljs-variable) } .hljs-built_in, .hljs-deletion, @@ -31,22 +31,22 @@ code.hljs { .hljs-number, .hljs-params, .hljs-type { - color:#f99b15 + color: var(--hljs-type) } .hljs-attribute, .hljs-section, .hljs-title { - color:#fec418 + color: var(--hljs-title) } .hljs-addition, .hljs-bullet, .hljs-string, .hljs-symbol { - color:#48b685 + color: var(--hljs-symbol) } .hljs-keyword, .hljs-selector-tag { - color:#815ba4 + color: var(--hljs-selector-tag) } .hljs-emphasis { font-style:italic diff --git a/contrib/Helix.appdata.xml b/contrib/Helix.appdata.xml new file mode 100644 index 00000000..a2428497 --- /dev/null +++ b/contrib/Helix.appdata.xml @@ -0,0 +1,87 @@ + + + com.helix_editor.Helix + CC0-1.0 + MPL-2.0 + Helix + A post-modern text editor + + +

+ Helix is a terminal-based text editor inspired by Kakoune / Neovim and written in Rust. +

+
    +
  • Vim-like modal editing
  • +
  • Multiple selections
  • +
  • Built-in language server support
  • +
  • Smart, incremental syntax highlighting and code editing via tree-sitter
  • +
+
+ + Helix.desktop + + + + Helix with default theme + https://github.com/helix-editor/helix/raw/d4565b4404cabc522bd60822abd374755581d751/screenshot.png + + + + https://helix-editor.com/ + https://opencollective.com/helix-editor + https://docs.helix-editor.com/ + https://github.com/helix-editor/helix + https://github.com/helix-editor/helix/issues + + + + + + https://helix-editor.com/news/release-22-12-highlights/ + + + https://helix-editor.com/news/release-22-08-highlights/ + + + https://helix-editor.com/news/release-22-05-highlights/ + + + https://helix-editor.com/news/release-22-03-highlights/ + + + + + keyboard + + + + Utility + TextEditor + + + + text + editor + development + programming + + + + hx + text/english + text/plain + text/x-makefile + text/x-c++hdr + text/x-c++src + text/x-chdr + text/x-csrc + text/x-java + text/x-moc + text/x-pascal + text/x-tcl + text/x-tex + application/x-shellscript + text/x-c + text/x-c++ + +
diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 491cd424..982b2237 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -8,7 +8,10 @@ Some suggestions to get started: - Help with packaging on various distributions needed! - To use print debugging to the [Helix log file][log-file], you must: * Print using `log::info!`, `warn!`, or `error!`. (`log::info!("helix!")`) - * Pass the appropriate verbosity level option for the desired log level. (`hx -v ` for info, more `v`s for higher severity inclusive) + * Pass the appropriate verbosity level option for the desired log level. (`hx -v ` for info, more `v`s for higher verbosity) + * Want to display the logs in a separate file instead of using the `:log-open` command in your compiled Helix editor? Start your debug version with `cargo run -- --log foo.log` and in a new terminal use `tail -f foo.log` +- Instead of running a release version of Helix, while developing you may want to run in debug mode with `cargo run` which is way faster to compile +- Looking for even faster compile times? Give a try to [mold](https://github.com/rui314/mold) - If your preferred language is missing, integrating a tree-sitter grammar for it and defining syntax highlight queries for it is straight forward and doesn't require much knowledge of the internals. @@ -30,7 +33,13 @@ inside the project. We use [xtask][xtask] as an ad-hoc task runner and thus do not require any dependencies other than `cargo` (You don't have to `cargo install` anything either). -# Integration tests +# Testing + +## Unit tests/Documentation tests + +Run `cargo test --workspace` to run unit tests and documentation tests in all packages. + +## Integration tests Integration tests for helix-term can be run with `cargo integration-test`. Code contributors are strongly encouraged to write integration tests for their code. diff --git a/docs/releases.md b/docs/releases.md index ec0b7270..6e7c37c6 100644 --- a/docs/releases.md +++ b/docs/releases.md @@ -5,6 +5,7 @@ Helix releases are versioned in the Calendar Versioning scheme: we'll use `` as a placeholder for the tag being published. * Merge the changelog PR +* Add new `` entry in `contrib/Helix.appdata.xml` with release information according to the [AppStream spec](https://www.freedesktop.org/software/appstream/docs/sect-Metadata-Releases.html) * Tag and push * `git tag -s -m "" -a && git push` * Make sure to switch to master and pull first diff --git a/flake.lock b/flake.lock index 4cf1018c..fa292273 100644 --- a/flake.lock +++ b/flake.lock @@ -16,22 +16,6 @@ "type": "github" } }, - "devshell": { - "flake": false, - "locked": { - "lastModified": 1667210711, - "narHash": "sha256-IoErjXZAkzYWHEpQqwu/DeRNJGFdR7X2OGbkhMqMrpw=", - "owner": "numtide", - "repo": "devshell", - "rev": "96a9dd12b8a447840cc246e17a47b81a4268bba7", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "devshell", - "type": "github" - } - }, "dream2nix": { "inputs": { "alejandra": [ @@ -42,10 +26,12 @@ ], "crane": "crane", "devshell": [ + "nci" + ], + "flake-parts": [ "nci", - "devshell" + "parts" ], - "flake-parts": "flake-parts", "flake-utils-pre-commit": [ "nci" ], @@ -70,14 +56,17 @@ ], "pre-commit-hooks": [ "nci" + ], + "pruned-racket-catalog": [ + "nci" ] }, "locked": { - "lastModified": 1671323629, - "narHash": "sha256-9KHTPjIDjfnzZ4NjpE3gGIVHVHopy6weRDYO/7Y3hF8=", + "lastModified": 1677289985, + "narHash": "sha256-lUp06cTTlWubeBGMZqPl9jODM99LpWMcwxRiscFAUJg=", "owner": "nix-community", "repo": "dream2nix", - "rev": "2d7d68505c8619410df2c6b6463985f97cbcba6e", + "rev": "28b973a8d4c30cc1cbb3377ea2023a76bc3fb889", "type": "github" }, "original": { @@ -86,24 +75,6 @@ "type": "github" } }, - "flake-parts": { - "inputs": { - "nixpkgs-lib": "nixpkgs-lib" - }, - "locked": { - "lastModified": 1668450977, - "narHash": "sha256-cfLhMhnvXn6x1vPm+Jow3RiFAUSCw/l1utktCw5rVA4=", - "owner": "hercules-ci", - "repo": "flake-parts", - "rev": "d591857e9d7dd9ddbfba0ea02b43b927c3c0f1fa", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "flake-parts", - "type": "github" - } - }, "flake-utils": { "locked": { "lastModified": 1659877975, @@ -119,23 +90,40 @@ "type": "github" } }, + "mk-naked-shell": { + "flake": false, + "locked": { + "lastModified": 1676572903, + "narHash": "sha256-oQoDHHUTxNVSURfkFcYLuAK+btjs30T4rbEUtCUyKy8=", + "owner": "yusdacra", + "repo": "mk-naked-shell", + "rev": "aeca9f8aa592f5e8f71f407d081cb26fd30c5a57", + "type": "github" + }, + "original": { + "owner": "yusdacra", + "repo": "mk-naked-shell", + "type": "github" + } + }, "nci": { "inputs": { - "devshell": "devshell", "dream2nix": "dream2nix", + "mk-naked-shell": "mk-naked-shell", "nixpkgs": [ "nixpkgs" ], + "parts": "parts", "rust-overlay": [ "rust-overlay" ] }, "locked": { - "lastModified": 1671430291, - "narHash": "sha256-UIc7H8F3N8rK72J/Vj5YJdV72tvDvYjH+UPsOFvlcsE=", + "lastModified": 1677297103, + "narHash": "sha256-ArlJIbp9NGV9yvhZdV0SOUFfRlI/kHeKoCk30NbSiLc=", "owner": "yusdacra", "repo": "nix-cargo-integration", - "rev": "b1b0d38b8c3b0d0e6a38638d5bbe10b0bc67522c", + "rev": "a79272a2cb0942392bb3a5bf9a3ec6bc568795b2", "type": "github" }, "original": { @@ -146,11 +134,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1671359686, - "narHash": "sha256-3MpC6yZo+Xn9cPordGz2/ii6IJpP2n8LE8e/ebUXLrs=", + "lastModified": 1677063315, + "narHash": "sha256-qiB4ajTeAOVnVSAwCNEEkoybrAlA+cpeiBxLobHndE8=", "owner": "nixos", "repo": "nixpkgs", - "rev": "04f574a1c0fde90b51bf68198e2297ca4e7cccf4", + "rev": "988cc958c57ce4350ec248d2d53087777f9e1949", "type": "github" }, "original": { @@ -163,11 +151,11 @@ "nixpkgs-lib": { "locked": { "dir": "lib", - "lastModified": 1665349835, - "narHash": "sha256-UK4urM3iN80UXQ7EaOappDzcisYIuEURFRoGQ/yPkug=", + "lastModified": 1675183161, + "narHash": "sha256-Zq8sNgAxDckpn7tJo7V1afRSk2eoVbu3OjI1QklGLNg=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "34c5293a71ffdb2fe054eb5288adc1882c1eb0b1", + "rev": "e1e1b192c1a5aab2960bf0a0bd53a2e8124fa18e", "type": "github" }, "original": { @@ -178,10 +166,50 @@ "type": "github" } }, + "parts": { + "inputs": { + "nixpkgs-lib": [ + "nci", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1675933616, + "narHash": "sha256-/rczJkJHtx16IFxMmAWu5nNYcSXNg1YYXTHoGjLrLUA=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "47478a4a003e745402acf63be7f9a092d51b83d7", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "flake-parts", + "type": "github" + } + }, + "parts_2": { + "inputs": { + "nixpkgs-lib": "nixpkgs-lib" + }, + "locked": { + "lastModified": 1675933616, + "narHash": "sha256-/rczJkJHtx16IFxMmAWu5nNYcSXNg1YYXTHoGjLrLUA=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "47478a4a003e745402acf63be7f9a092d51b83d7", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "flake-parts", + "type": "github" + } + }, "root": { "inputs": { "nci": "nci", "nixpkgs": "nixpkgs", + "parts": "parts_2", "rust-overlay": "rust-overlay" } }, @@ -193,11 +221,11 @@ ] }, "locked": { - "lastModified": 1671416426, - "narHash": "sha256-kpSH1Jrxfk2qd0pRPJn1eQdIOseGv5JuE+YaOrqU9s4=", + "lastModified": 1677292251, + "narHash": "sha256-D+6q5Z2MQn3UFJtqsM5/AvVHi3NXKZTIMZt1JGq/spA=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "fbaaff24f375ac25ec64268b0a0d63f91e474b7d", + "rev": "34cdbf6ad480ce13a6a526f57d8b9e609f3d65dc", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 673f3cf6..2ac76488 100644 --- a/flake.nix +++ b/flake.nix @@ -12,16 +12,10 @@ inputs.nixpkgs.follows = "nixpkgs"; inputs.rust-overlay.follows = "rust-overlay"; }; + parts.url = "github:hercules-ci/flake-parts"; }; - outputs = { - self, - nixpkgs, - nci, - ... - }: let - lib = nixpkgs.lib; - ncl = nci.lib.nci-lib; + outputs = inp: let mkRootPath = rel: builtins.path { path = "${toString ./.}/${rel}"; @@ -32,6 +26,12 @@ ".envrc" ".ignore" ".github" + ".gitignore" + "logo.svg" + "logo_dark.svg" + "logo_light.svg" + "rust-toolchain.toml" + "rustfmt.toml" "runtime" "screenshot.png" "book" @@ -46,6 +46,7 @@ "flake.lock" ]; ignorePaths = path: type: let + inherit (inp.nixpkgs) lib; # split the nix store path into its components components = lib.splitString "/" path; # drop off the `/nix/hash-source` section from the path @@ -61,118 +62,107 @@ # filter out unnecessary paths filter = ignorePaths; }; - outputs = nci.lib.makeOutputs { - root = ./.; - config = common: { - outputs = { - # rename helix-term to helix since it's our main package - rename = {"helix-term" = "helix";}; - # Set default app to hx (binary is from helix-term release build) - # Set default package to helix-term release build - defaults = { - app = "hx"; - package = "helix"; - }; - }; - cCompiler.package = with common.pkgs; - if stdenv.isLinux - then gcc - else clang; - shell = { - packages = with common.pkgs; - [lld_13 cargo-flamegraph rust-analyzer] - ++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) cargo-tarpaulin) - ++ (lib.optional stdenv.isLinux lldb) - ++ (lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.CoreFoundation); - env = [ - { - name = "HELIX_RUNTIME"; - eval = "$PWD/runtime"; - } - { - name = "RUST_BACKTRACE"; - value = "1"; - } + in + inp.parts.lib.mkFlake {inputs = inp;} { + imports = [inp.nci.flakeModule]; + systems = [ + "x86_64-linux" + "x86_64-darwin" + "aarch64-linux" + "aarch64-darwin" + "i686-linux" + ]; + perSystem = { + config, + pkgs, + lib, + ... + }: let + makeOverridableHelix = old: config: let + grammars = pkgs.callPackage ./grammars.nix config; + runtimeDir = pkgs.runCommand "helix-runtime" {} '' + mkdir -p $out + ln -s ${mkRootPath "runtime"}/* $out + rm -r $out/grammars + ln -s ${grammars} $out/grammars + ''; + helix-wrapped = + pkgs.runCommand + old.name { - name = "RUSTFLAGS"; - eval = - if common.pkgs.stdenv.isLinux - then "$RUSTFLAGS\" -C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment\"" - else "$RUSTFLAGS"; + inherit (old) pname version; + meta = old.meta or {}; + passthru = + (old.passthru or {}) + // { + unwrapped = old; + }; + nativeBuildInputs = [pkgs.makeWrapper]; + makeWrapperArgs = config.makeWrapperArgs or []; } - ]; - }; - }; - pkgConfig = common: { - helix-term = { - # Wrap helix with runtime - wrapper = _: old: let - inherit (common) pkgs; - makeOverridableHelix = old: config: let - grammars = pkgs.callPackage ./grammars.nix config; - runtimeDir = pkgs.runCommand "helix-runtime" {} '' - mkdir -p $out - ln -s ${mkRootPath "runtime"}/* $out - rm -r $out/grammars - ln -s ${grammars} $out/grammars - ''; - helix-wrapped = - common.internal.pkgsSet.utils.wrapDerivation old - { - nativeBuildInputs = [pkgs.makeWrapper]; - makeWrapperArgs = config.makeWrapperArgs or []; - } - '' - rm -rf $out/bin - mkdir -p $out/bin - ln -sf ${old}/bin/* $out/bin/ - wrapProgram "$out/bin/hx" ''${makeWrapperArgs[@]} --set HELIX_RUNTIME "${runtimeDir}" - ''; - in - helix-wrapped - // {override = makeOverridableHelix old;}; - in - makeOverridableHelix old {}; - overrides.fix-build.overrideAttrs = prev: { - src = filteredSource; - - # disable fetching and building of tree-sitter grammars in the helix-term build.rs - HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1"; - - buildInputs = ncl.addBuildInputs prev [common.config.cCompiler.package.cc.lib]; - - # link languages and theme toml files since helix-term expects them (for tests) - preConfigure = '' - ${prev.preConfigure or ""} - ${ - lib.concatMapStringsSep - "\n" - (path: "ln -sf ${mkRootPath path} ..") - ["languages.toml" "theme.toml" "base16_theme.toml"] - } + '' + cp -rs --no-preserve=mode,ownership ${old} $out + wrapProgram "$out/bin/hx" ''${makeWrapperArgs[@]} --set HELIX_RUNTIME "${runtimeDir}" ''; - checkPhase = ":"; - - meta.mainProgram = "hx"; + in + helix-wrapped + // { + override = makeOverridableHelix old; + passthru = + helix-wrapped.passthru + // { + wrapper = old: makeOverridableHelix old config; + }; + }; + stdenv = + if pkgs.stdenv.isLinux + then pkgs.stdenv + else pkgs.clangStdenv; + rustFlagsEnv = + if stdenv.isLinux + then ''$RUSTFLAGS -C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment'' + else "$RUSTFLAGS"; + in { + # by default NCI adds rust-analyzer component, but helix toolchain doesn't have rust-analyzer + nci.toolchains.shell.components = ["rust-src" "rustfmt" "clippy"]; + nci.projects."helix-project".relPath = ""; + nci.crates."helix-term" = { + overrides = { + add-meta.override = _: {meta.mainProgram = "hx";}; + add-inputs.overrideAttrs = prev: { + buildInputs = (prev.buildInputs or []) ++ [stdenv.cc.cc.lib]; + }; + disable-grammar-builds = { + # disable fetching and building of tree-sitter grammars in the helix-term build.rs + HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1"; + }; + disable-tests = {checkPhase = ":";}; + set-stdenv.override = _: {inherit stdenv;}; + set-filtered-src.override = _: {src = filteredSource;}; }; }; + + packages.helix-unwrapped = config.nci.outputs."helix-term".packages.release; + packages.helix-unwrapped-dev = config.nci.outputs."helix-term".packages.dev; + packages.helix = makeOverridableHelix config.packages.helix-unwrapped {}; + packages.helix-dev = makeOverridableHelix config.packages.helix-unwrapped-dev {}; + packages.default = config.packages.helix; + + devShells.default = config.nci.outputs."helix-project".devShell.overrideAttrs (old: { + nativeBuildInputs = + (old.nativeBuildInputs or []) + ++ (with pkgs; [lld_13 cargo-flamegraph rust-analyzer]) + ++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) pkgs.cargo-tarpaulin) + ++ (lib.optional stdenv.isLinux pkgs.lldb) + ++ (lib.optional stdenv.isDarwin pkgs.darwin.apple_sdk.frameworks.CoreFoundation); + shellHook = '' + export HELIX_RUNTIME="$PWD/runtime" + export RUST_BACKTRACE="1" + export RUSTFLAGS="${rustFlagsEnv}" + ''; + }); }; }; - in - outputs - // { - packages = - lib.mapAttrs - ( - system: packages: - packages - // { - helix-unwrapped = packages.helix.passthru.unwrapped; - helix-unwrapped-dev = packages.helix-dev.passthru.unwrapped; - } - ) - outputs.packages; - }; nixConfig = { extra-substituters = ["https://helix.cachix.org"]; diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index ca6cd51e..9dfef9ae 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -17,7 +17,7 @@ integration = [] [dependencies] helix-loader = { version = "0.6", path = "../helix-loader" } -ropey = { version = "1.5.1", default-features = false, features = ["simd"] } +ropey = { version = "1.6.0", default-features = false, features = ["simd"] } smallvec = "1.10" smartstring = "1.0.1" unicode-segmentation = "1.10" @@ -29,14 +29,14 @@ tree-sitter = "0.20" once_cell = "1.17" arc-swap = "1" regex = "1" -bitflags = "1.3" -ahash = "0.8.2" -hashbrown = { version = "0.13.1", features = ["raw"] } +bitflags = "2.0" +ahash = "0.8.3" +hashbrown = { version = "0.13.2", features = ["raw"] } log = "0.4" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" -toml = "0.5" +toml = "0.7" imara-diff = "0.1.0" @@ -49,3 +49,4 @@ textwrap = "0.16.0" [dev-dependencies] quickcheck = { version = "1", default-features = false } +indoc = "2.0.1" diff --git a/helix-core/src/comment.rs b/helix-core/src/comment.rs index ec5d7a45..9c7e50f3 100644 --- a/helix-core/src/comment.rs +++ b/helix-core/src/comment.rs @@ -45,7 +45,7 @@ fn find_line_comment( // determine margin of 0 or 1 for uncommenting; if any comment token is not followed by a space, // a margin of 0 is used for all lines. - if matches!(line_slice.get_char(pos + token_len), Some(c) if c != ' ') { + if !matches!(line_slice.get_char(pos + token_len), Some(c) if c == ' ') { margin = 0; } @@ -68,7 +68,7 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st let mut min_next_line = 0; for selection in selection { let (start, end) = selection.line_range(text); - let start = start.max(min_next_line).min(text.len_lines()); + let start = start.clamp(min_next_line, text.len_lines()); let end = (end + 1).min(text.len_lines()); lines.extend(start..end); @@ -108,8 +108,8 @@ mod test { let text = doc.slice(..); let res = find_line_comment("//", text, 0..3); - // (commented = true, to_change = [line 0, line 2], min = col 2, margin = 1) - assert_eq!(res, (false, vec![0, 2], 2, 1)); + // (commented = true, to_change = [line 0, line 2], min = col 2, margin = 0) + assert_eq!(res, (false, vec![0, 2], 2, 0)); // comment let transaction = toggle_line_comments(&doc, &selection, None); @@ -136,6 +136,17 @@ mod test { assert_eq!(doc, " 1\n\n 2\n 3"); assert!(selection.len() == 1); // to ignore the selection unused warning + // 0 margin comments, with no space + doc = Rope::from("//"); + // reset the selection. + selection = Selection::single(0, doc.len_chars() - 1); + + let transaction = toggle_line_comments(&doc, &selection, None); + transaction.apply(&mut doc); + selection = selection.map(transaction.changes()); + assert_eq!(doc, ""); + assert!(selection.len() == 1); // to ignore the selection unused warning + // TODO: account for uncommenting with uneven comment indentation } } diff --git a/helix-core/src/diagnostic.rs b/helix-core/src/diagnostic.rs index 6b5da17e..58ddb038 100644 --- a/helix-core/src/diagnostic.rs +++ b/helix-core/src/diagnostic.rs @@ -35,7 +35,7 @@ pub enum DiagnosticTag { Deprecated, } -/// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.91.0/lsp_types/struct.Diagnostic.html) +/// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.94.0/lsp_types/struct.Diagnostic.html) #[derive(Debug, Clone)] pub struct Diagnostic { pub range: Range, diff --git a/helix-core/src/doc_formatter.rs b/helix-core/src/doc_formatter.rs new file mode 100644 index 00000000..c7dc9081 --- /dev/null +++ b/helix-core/src/doc_formatter.rs @@ -0,0 +1,384 @@ +//! The `DocumentFormatter` forms the bridge between the raw document text +//! and onscreen positioning. It yields the text graphemes as an iterator +//! and traverses (part) of the document text. During that traversal it +//! handles grapheme detection, softwrapping and annotations. +//! It yields `FormattedGrapheme`s and their corresponding visual coordinates. +//! +//! As both virtual text and softwrapping can insert additional lines into the document +//! it is generally not possible to find the start of the previous visual line. +//! Instead the `DocumentFormatter` starts at the last "checkpoint" (usually a linebreak) +//! called a "block" and the caller must advance it as needed. + +use std::borrow::Cow; +use std::fmt::Debug; +use std::mem::{replace, take}; + +#[cfg(test)] +mod test; + +use unicode_segmentation::{Graphemes, UnicodeSegmentation}; + +use crate::graphemes::{Grapheme, GraphemeStr}; +use crate::syntax::Highlight; +use crate::text_annotations::TextAnnotations; +use crate::{Position, RopeGraphemes, RopeSlice}; + +/// TODO make Highlight a u32 to reduce the size of this enum to a single word. +#[derive(Debug, Clone, Copy)] +pub enum GraphemeSource { + Document { + codepoints: u32, + }, + /// Inline virtual text can not be highlighted with a `Highlight` iterator + /// because it's not part of the document. Instead the `Highlight` + /// is emitted right by the document formatter + VirtualText { + highlight: Option, + }, +} + +#[derive(Debug, Clone)] +pub struct FormattedGrapheme<'a> { + pub grapheme: Grapheme<'a>, + pub source: GraphemeSource, +} + +impl<'a> FormattedGrapheme<'a> { + pub fn new( + g: GraphemeStr<'a>, + visual_x: usize, + tab_width: u16, + source: GraphemeSource, + ) -> FormattedGrapheme<'a> { + FormattedGrapheme { + grapheme: Grapheme::new(g, visual_x, tab_width), + source, + } + } + /// Returns whether this grapheme is virtual inline text + pub fn is_virtual(&self) -> bool { + matches!(self.source, GraphemeSource::VirtualText { .. }) + } + + pub fn placeholder() -> Self { + FormattedGrapheme { + grapheme: Grapheme::Other { g: " ".into() }, + source: GraphemeSource::Document { codepoints: 0 }, + } + } + + pub fn doc_chars(&self) -> usize { + match self.source { + GraphemeSource::Document { codepoints } => codepoints as usize, + GraphemeSource::VirtualText { .. } => 0, + } + } + + pub fn is_whitespace(&self) -> bool { + self.grapheme.is_whitespace() + } + + pub fn width(&self) -> usize { + self.grapheme.width() + } + + pub fn is_word_boundary(&self) -> bool { + self.grapheme.is_word_boundary() + } +} + +#[derive(Debug, Clone)] +pub struct TextFormat { + pub soft_wrap: bool, + pub tab_width: u16, + pub max_wrap: u16, + pub max_indent_retain: u16, + pub wrap_indicator: Box, + pub wrap_indicator_highlight: Option, + pub viewport_width: u16, +} + +// test implementation is basically only used for testing or when softwrap is always disabled +impl Default for TextFormat { + fn default() -> Self { + TextFormat { + soft_wrap: false, + tab_width: 4, + max_wrap: 3, + max_indent_retain: 4, + wrap_indicator: Box::from(" "), + viewport_width: 17, + wrap_indicator_highlight: None, + } + } +} + +#[derive(Debug)] +pub struct DocumentFormatter<'t> { + text_fmt: &'t TextFormat, + annotations: &'t TextAnnotations, + + /// The visual position at the end of the last yielded word boundary + visual_pos: Position, + graphemes: RopeGraphemes<'t>, + /// The character pos of the `graphemes` iter used for inserting annotations + char_pos: usize, + /// The line pos of the `graphemes` iter used for inserting annotations + line_pos: usize, + exhausted: bool, + + /// Line breaks to be reserved for virtual text + /// at the next line break + virtual_lines: usize, + inline_anntoation_graphemes: Option<(Graphemes<'t>, Option)>, + + // softwrap specific + /// The indentation of the current line + /// Is set to `None` if the indentation level is not yet known + /// because no non-whitespace graphemes have been encountered yet + indent_level: Option, + /// In case a long word needs to be split a single grapheme might need to be wrapped + /// while the rest of the word stays on the same line + peeked_grapheme: Option<(FormattedGrapheme<'t>, usize)>, + /// A first-in first-out (fifo) buffer for the Graphemes of any given word + word_buf: Vec>, + /// The index of the next grapheme that will be yielded from the `word_buf` + word_i: usize, +} + +impl<'t> DocumentFormatter<'t> { + /// Creates a new formatter at the last block before `char_idx`. + /// A block is a chunk which always ends with a linebreak. + /// This is usually just a normal line break. + /// However very long lines are always wrapped at constant intervals that can be cheaply calculated + /// to avoid pathological behaviour. + pub fn new_at_prev_checkpoint( + text: RopeSlice<'t>, + text_fmt: &'t TextFormat, + annotations: &'t TextAnnotations, + char_idx: usize, + ) -> (Self, usize) { + // TODO divide long lines into blocks to avoid bad performance for long lines + let block_line_idx = text.char_to_line(char_idx.min(text.len_chars())); + let block_char_idx = text.line_to_char(block_line_idx); + annotations.reset_pos(block_char_idx); + ( + DocumentFormatter { + text_fmt, + annotations, + visual_pos: Position { row: 0, col: 0 }, + graphemes: RopeGraphemes::new(text.slice(block_char_idx..)), + char_pos: block_char_idx, + exhausted: false, + virtual_lines: 0, + indent_level: None, + peeked_grapheme: None, + word_buf: Vec::with_capacity(64), + word_i: 0, + line_pos: block_line_idx, + inline_anntoation_graphemes: None, + }, + block_char_idx, + ) + } + + fn next_inline_annotation_grapheme(&mut self) -> Option<(&'t str, Option)> { + loop { + if let Some(&mut (ref mut annotation, highlight)) = + self.inline_anntoation_graphemes.as_mut() + { + if let Some(grapheme) = annotation.next() { + return Some((grapheme, highlight)); + } + } + + if let Some((annotation, highlight)) = + self.annotations.next_inline_annotation_at(self.char_pos) + { + self.inline_anntoation_graphemes = Some(( + UnicodeSegmentation::graphemes(&*annotation.text, true), + highlight, + )) + } else { + return None; + } + } + } + + fn advance_grapheme(&mut self, col: usize) -> Option> { + let (grapheme, source) = + if let Some((grapheme, highlight)) = self.next_inline_annotation_grapheme() { + (grapheme.into(), GraphemeSource::VirtualText { highlight }) + } else if let Some(grapheme) = self.graphemes.next() { + self.virtual_lines += self.annotations.annotation_lines_at(self.char_pos); + let codepoints = grapheme.len_chars() as u32; + + let overlay = self.annotations.overlay_at(self.char_pos); + let grapheme = match overlay { + Some((overlay, _)) => overlay.grapheme.as_str().into(), + None => Cow::from(grapheme).into(), + }; + + self.char_pos += codepoints as usize; + (grapheme, GraphemeSource::Document { codepoints }) + } else { + if self.exhausted { + return None; + } + self.exhausted = true; + // EOF grapheme is required for rendering + // and correct position computations + return Some(FormattedGrapheme { + grapheme: Grapheme::Other { g: " ".into() }, + source: GraphemeSource::Document { codepoints: 0 }, + }); + }; + + let grapheme = FormattedGrapheme::new(grapheme, col, self.text_fmt.tab_width, source); + + Some(grapheme) + } + + /// Move a word to the next visual line + fn wrap_word(&mut self, virtual_lines_before_word: usize) -> usize { + // softwrap this word to the next line + let indent_carry_over = if let Some(indent) = self.indent_level { + if indent as u16 <= self.text_fmt.max_indent_retain { + indent as u16 + } else { + 0 + } + } else { + // ensure the indent stays 0 + self.indent_level = Some(0); + 0 + }; + + self.visual_pos.col = indent_carry_over as usize; + self.virtual_lines -= virtual_lines_before_word; + self.visual_pos.row += 1 + virtual_lines_before_word; + let mut i = 0; + let mut word_width = 0; + let wrap_indicator = UnicodeSegmentation::graphemes(&*self.text_fmt.wrap_indicator, true) + .map(|g| { + i += 1; + let grapheme = FormattedGrapheme::new( + g.into(), + self.visual_pos.col + word_width, + self.text_fmt.tab_width, + GraphemeSource::VirtualText { + highlight: self.text_fmt.wrap_indicator_highlight, + }, + ); + word_width += grapheme.width(); + grapheme + }); + self.word_buf.splice(0..0, wrap_indicator); + + for grapheme in &mut self.word_buf[i..] { + let visual_x = self.visual_pos.col + word_width; + grapheme + .grapheme + .change_position(visual_x, self.text_fmt.tab_width); + word_width += grapheme.width(); + } + word_width + } + + fn advance_to_next_word(&mut self) { + self.word_buf.clear(); + let mut word_width = 0; + let virtual_lines_before_word = self.virtual_lines; + let mut virtual_lines_before_grapheme = self.virtual_lines; + + loop { + // softwrap word if necessary + if word_width + self.visual_pos.col >= self.text_fmt.viewport_width as usize { + // wrapping this word would move too much text to the next line + // split the word at the line end instead + if word_width > self.text_fmt.max_wrap as usize { + // Usually we stop accomulating graphemes as soon as softwrapping becomes necessary. + // However if the last grapheme is multiple columns wide it might extend beyond the EOL. + // The condition below ensures that this grapheme is not cutoff and instead wrapped to the next line + if word_width + self.visual_pos.col > self.text_fmt.viewport_width as usize { + self.peeked_grapheme = self.word_buf.pop().map(|grapheme| { + (grapheme, self.virtual_lines - virtual_lines_before_grapheme) + }); + self.virtual_lines = virtual_lines_before_grapheme; + } + return; + } + + word_width = self.wrap_word(virtual_lines_before_word); + } + + virtual_lines_before_grapheme = self.virtual_lines; + + let grapheme = if let Some((grapheme, virtual_lines)) = self.peeked_grapheme.take() { + self.virtual_lines += virtual_lines; + grapheme + } else if let Some(grapheme) = self.advance_grapheme(self.visual_pos.col + word_width) { + grapheme + } else { + return; + }; + + // Track indentation + if !grapheme.is_whitespace() && self.indent_level.is_none() { + self.indent_level = Some(self.visual_pos.col); + } else if grapheme.grapheme == Grapheme::Newline { + self.indent_level = None; + } + + let is_word_boundary = grapheme.is_word_boundary(); + word_width += grapheme.width(); + self.word_buf.push(grapheme); + + if is_word_boundary { + return; + } + } + } + + /// returns the document line pos of the **next** grapheme that will be yielded + pub fn line_pos(&self) -> usize { + self.line_pos + } + + /// returns the visual pos of the **next** grapheme that will be yielded + pub fn visual_pos(&self) -> Position { + self.visual_pos + } +} + +impl<'t> Iterator for DocumentFormatter<'t> { + type Item = (FormattedGrapheme<'t>, Position); + + fn next(&mut self) -> Option { + let grapheme = if self.text_fmt.soft_wrap { + if self.word_i >= self.word_buf.len() { + self.advance_to_next_word(); + self.word_i = 0; + } + let grapheme = replace( + self.word_buf.get_mut(self.word_i)?, + FormattedGrapheme::placeholder(), + ); + self.word_i += 1; + grapheme + } else { + self.advance_grapheme(self.visual_pos.col)? + }; + + let pos = self.visual_pos; + if grapheme.grapheme == Grapheme::Newline { + self.visual_pos.row += 1; + self.visual_pos.row += take(&mut self.virtual_lines); + self.visual_pos.col = 0; + self.line_pos += 1; + } else { + self.visual_pos.col += grapheme.width(); + } + Some((grapheme, pos)) + } +} diff --git a/helix-core/src/doc_formatter/test.rs b/helix-core/src/doc_formatter/test.rs new file mode 100644 index 00000000..ac8918bb --- /dev/null +++ b/helix-core/src/doc_formatter/test.rs @@ -0,0 +1,182 @@ +use std::rc::Rc; + +use crate::doc_formatter::{DocumentFormatter, TextFormat}; +use crate::text_annotations::{InlineAnnotation, Overlay, TextAnnotations}; + +impl TextFormat { + fn new_test(softwrap: bool) -> Self { + TextFormat { + soft_wrap: softwrap, + tab_width: 2, + max_wrap: 3, + max_indent_retain: 4, + wrap_indicator: ".".into(), + wrap_indicator_highlight: None, + // use a prime number to allow lining up too often with repeat + viewport_width: 17, + } + } +} + +impl<'t> DocumentFormatter<'t> { + fn collect_to_str(&mut self) -> String { + use std::fmt::Write; + let mut res = String::new(); + let viewport_width = self.text_fmt.viewport_width; + let mut line = 0; + + for (grapheme, pos) in self { + if pos.row != line { + line += 1; + assert_eq!(pos.row, line); + write!(res, "\n{}", ".".repeat(pos.col)).unwrap(); + assert!( + pos.col <= viewport_width as usize, + "softwrapped failed {}<={viewport_width}", + pos.col + ); + } + write!(res, "{}", grapheme.grapheme).unwrap(); + } + + res + } +} + +fn softwrap_text(text: &str) -> String { + DocumentFormatter::new_at_prev_checkpoint( + text.into(), + &TextFormat::new_test(true), + &TextAnnotations::default(), + 0, + ) + .0 + .collect_to_str() +} + +#[test] +fn basic_softwrap() { + assert_eq!( + softwrap_text(&"foo ".repeat(10)), + "foo foo foo foo \n.foo foo foo foo \n.foo foo " + ); + assert_eq!( + softwrap_text(&"fooo ".repeat(10)), + "fooo fooo fooo \n.fooo fooo fooo \n.fooo fooo fooo \n.fooo " + ); + + // check that we don't wrap unnecessarily + assert_eq!(softwrap_text("\t\txxxx1xxxx2xx\n"), " xxxx1xxxx2xx \n "); +} + +#[test] +fn softwrap_indentation() { + assert_eq!( + softwrap_text("\t\tfoo1 foo2 foo3 foo4 foo5 foo6\n"), + " foo1 foo2 \n.....foo3 foo4 \n.....foo5 foo6 \n " + ); + assert_eq!( + softwrap_text("\t\t\tfoo1 foo2 foo3 foo4 foo5 foo6\n"), + " foo1 foo2 \n.foo3 foo4 foo5 \n.foo6 \n " + ); +} + +#[test] +fn long_word_softwrap() { + assert_eq!( + softwrap_text("\t\txxxx1xxxx2xxxx3xxxx4xxxx5xxxx6xxxx7xxxx8xxxx9xxx\n"), + " xxxx1xxxx2xxx\n.....x3xxxx4xxxx5\n.....xxxx6xxxx7xx\n.....xx8xxxx9xxx \n " + ); + assert_eq!( + softwrap_text("xxxxxxxx1xxxx2xxx\n"), + "xxxxxxxx1xxxx2xxx\n. \n " + ); + assert_eq!( + softwrap_text("\t\txxxx1xxxx 2xxxx3xxxx4xxxx5xxxx6xxxx7xxxx8xxxx9xxx\n"), + " xxxx1xxxx \n.....2xxxx3xxxx4x\n.....xxx5xxxx6xxx\n.....x7xxxx8xxxx9\n.....xxx \n " + ); + assert_eq!( + softwrap_text("\t\txxxx1xxx 2xxxx3xxxx4xxxx5xxxx6xxxx7xxxx8xxxx9xxx\n"), + " xxxx1xxx 2xxx\n.....x3xxxx4xxxx5\n.....xxxx6xxxx7xx\n.....xx8xxxx9xxx \n " + ); +} + +fn overlay_text(text: &str, char_pos: usize, softwrap: bool, overlays: &[Overlay]) -> String { + DocumentFormatter::new_at_prev_checkpoint( + text.into(), + &TextFormat::new_test(softwrap), + TextAnnotations::default().add_overlay(overlays.into(), None), + char_pos, + ) + .0 + .collect_to_str() +} + +#[test] +fn overlay() { + assert_eq!( + overlay_text( + "foobar", + 0, + false, + &[Overlay::new(0, "X"), Overlay::new(2, "\t")], + ), + "Xo bar " + ); + assert_eq!( + overlay_text( + &"foo ".repeat(10), + 0, + true, + &[ + Overlay::new(2, "\t"), + Overlay::new(5, "\t"), + Overlay::new(16, "X"), + ] + ), + "fo f o foo \n.foo Xoo foo foo \n.foo foo foo " + ); +} + +fn annotate_text(text: &str, softwrap: bool, annotations: &[InlineAnnotation]) -> String { + DocumentFormatter::new_at_prev_checkpoint( + text.into(), + &TextFormat::new_test(softwrap), + TextAnnotations::default().add_inline_annotations(annotations.into(), None), + 0, + ) + .0 + .collect_to_str() +} + +#[test] +fn annotation() { + assert_eq!( + annotate_text("bar", false, &[InlineAnnotation::new(0, "foo")]), + "foobar " + ); + assert_eq!( + annotate_text( + &"foo ".repeat(10), + true, + &[InlineAnnotation::new(0, "foo ")] + ), + "foo foo foo foo \n.foo foo foo foo \n.foo foo foo " + ); +} +#[test] +fn annotation_and_overlay() { + assert_eq!( + DocumentFormatter::new_at_prev_checkpoint( + "bbar".into(), + &TextFormat::new_test(false), + TextAnnotations::default() + .add_inline_annotations(Rc::new([InlineAnnotation::new(0, "fooo")]), None) + .add_overlay(Rc::new([Overlay::new(0, "\t")]), None), + 0, + ) + .0 + .collect_to_str(), + "fooo bar " + ); +} diff --git a/helix-core/src/graphemes.rs b/helix-core/src/graphemes.rs index 675f5750..15ef3eb0 100644 --- a/helix-core/src/graphemes.rs +++ b/helix-core/src/graphemes.rs @@ -5,7 +5,88 @@ use ropey::{iter::Chunks, str_utils::byte_to_char_idx, RopeSlice}; use unicode_segmentation::{GraphemeCursor, GraphemeIncomplete}; use unicode_width::UnicodeWidthStr; -use std::fmt; +use std::borrow::Cow; +use std::fmt::{self, Debug, Display}; +use std::marker::PhantomData; +use std::ops::Deref; +use std::ptr::NonNull; +use std::{slice, str}; + +use crate::chars::{char_is_whitespace, char_is_word}; +use crate::LineEnding; + +#[inline] +pub fn tab_width_at(visual_x: usize, tab_width: u16) -> usize { + tab_width as usize - (visual_x % tab_width as usize) +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Grapheme<'a> { + Newline, + Tab { width: usize }, + Other { g: GraphemeStr<'a> }, +} + +impl<'a> Grapheme<'a> { + pub fn new(g: GraphemeStr<'a>, visual_x: usize, tab_width: u16) -> Grapheme<'a> { + match g { + g if g == "\t" => Grapheme::Tab { + width: tab_width_at(visual_x, tab_width), + }, + _ if LineEnding::from_str(&g).is_some() => Grapheme::Newline, + _ => Grapheme::Other { g }, + } + } + + pub fn change_position(&mut self, visual_x: usize, tab_width: u16) { + if let Grapheme::Tab { width } = self { + *width = tab_width_at(visual_x, tab_width) + } + } + + /// Returns the a visual width of this grapheme, + #[inline] + pub fn width(&self) -> usize { + match *self { + // width is not cached because we are dealing with + // ASCII almost all the time which already has a fastpath + // it's okay to convert to u16 here because no codepoint has a width larger + // than 2 and graphemes are usually atmost two visible codepoints wide + Grapheme::Other { ref g } => grapheme_width(g), + Grapheme::Tab { width } => width, + Grapheme::Newline => 1, + } + } + + pub fn is_whitespace(&self) -> bool { + !matches!(&self, Grapheme::Other { g } if !g.chars().all(char_is_whitespace)) + } + + // TODO currently word boundaries are used for softwrapping. + // This works best for programming languages and well for prose. + // This could however be improved in the future by considering unicode + // character classes but + pub fn is_word_boundary(&self) -> bool { + !matches!(&self, Grapheme::Other { g,.. } if g.chars().all(char_is_word)) + } +} + +impl Display for Grapheme<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Grapheme::Newline => write!(f, " "), + Grapheme::Tab { width } => { + for _ in 0..width { + write!(f, " ")?; + } + Ok(()) + } + Grapheme::Other { ref g } => { + write!(f, "{g}") + } + } + } +} #[must_use] pub fn grapheme_width(g: &str) -> usize { @@ -27,6 +108,8 @@ pub fn grapheme_width(g: &str) -> usize { // We use max(1) here because all grapeheme clusters--even illformed // ones--should have at least some width so they can be edited // properly. + // TODO properly handle unicode width for all codepoints + // example of where unicode width is currently wrong: 🤦🏼‍♂️ (taken from https://hsivonen.fi/string-length/) UnicodeWidthStr::width(g).max(1) } } @@ -341,3 +424,101 @@ impl<'a> Iterator for RopeGraphemes<'a> { } } } + +/// A highly compressed Cow<'a, str> that holds +/// atmost u31::MAX bytes and is readonly +pub struct GraphemeStr<'a> { + ptr: NonNull, + len: u32, + phantom: PhantomData<&'a str>, +} + +impl GraphemeStr<'_> { + const MASK_OWNED: u32 = 1 << 31; + + fn compute_len(&self) -> usize { + (self.len & !Self::MASK_OWNED) as usize + } +} + +impl Deref for GraphemeStr<'_> { + type Target = str; + fn deref(&self) -> &Self::Target { + unsafe { + let bytes = slice::from_raw_parts(self.ptr.as_ptr(), self.compute_len()); + str::from_utf8_unchecked(bytes) + } + } +} + +impl Drop for GraphemeStr<'_> { + fn drop(&mut self) { + if self.len & Self::MASK_OWNED != 0 { + // free allocation + unsafe { + drop(Box::from_raw(slice::from_raw_parts_mut( + self.ptr.as_ptr(), + self.compute_len(), + ))); + } + } + } +} + +impl<'a> From<&'a str> for GraphemeStr<'a> { + fn from(g: &'a str) -> Self { + GraphemeStr { + ptr: unsafe { NonNull::new_unchecked(g.as_bytes().as_ptr() as *mut u8) }, + len: i32::try_from(g.len()).unwrap() as u32, + phantom: PhantomData, + } + } +} + +impl<'a> From for GraphemeStr<'a> { + fn from(g: String) -> Self { + let len = g.len(); + let ptr = Box::into_raw(g.into_bytes().into_boxed_slice()) as *mut u8; + GraphemeStr { + ptr: unsafe { NonNull::new_unchecked(ptr) }, + len: i32::try_from(len).unwrap() as u32, + phantom: PhantomData, + } + } +} + +impl<'a> From> for GraphemeStr<'a> { + fn from(g: Cow<'a, str>) -> Self { + match g { + Cow::Borrowed(g) => g.into(), + Cow::Owned(g) => g.into(), + } + } +} + +impl> PartialEq for GraphemeStr<'_> { + fn eq(&self, other: &T) -> bool { + self.deref() == other.deref() + } +} +impl PartialEq for GraphemeStr<'_> { + fn eq(&self, other: &str) -> bool { + self.deref() == other + } +} +impl Eq for GraphemeStr<'_> {} +impl Debug for GraphemeStr<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Debug::fmt(self.deref(), f) + } +} +impl Display for GraphemeStr<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(self.deref(), f) + } +} +impl Clone for GraphemeStr<'_> { + fn clone(&self) -> Self { + self.deref().to_owned().into() + } +} diff --git a/helix-core/src/increment/date_time.rs b/helix-core/src/increment/date_time.rs index 265242ce..2980bb58 100644 --- a/helix-core/src/increment/date_time.rs +++ b/helix-core/src/increment/date_time.rs @@ -1,114 +1,53 @@ -use chrono::{Datelike, Duration, NaiveDate, NaiveDateTime, NaiveTime, Timelike}; +use chrono::{Duration, NaiveDate, NaiveDateTime, NaiveTime}; use once_cell::sync::Lazy; use regex::Regex; -use ropey::RopeSlice; - -use std::borrow::Cow; -use std::cmp; use std::fmt::Write; -use super::Increment; -use crate::{Range, Tendril}; +/// Increment a Date or DateTime +/// +/// If just a Date is selected the day will be incremented. +/// If a DateTime is selected the second will be incremented. +pub fn increment(selected_text: &str, amount: i64) -> Option { + if selected_text.is_empty() { + return None; + } -#[derive(Debug, PartialEq, Eq)] -pub struct DateTimeIncrementor { - date_time: NaiveDateTime, - range: Range, - fmt: &'static str, - field: DateField, -} + FORMATS.iter().find_map(|format| { + let captures = format.regex.captures(selected_text)?; + if captures.len() - 1 != format.fields.len() { + return None; + } -impl DateTimeIncrementor { - pub fn from_range(text: RopeSlice, range: Range) -> Option { - let range = if range.is_empty() { - if range.anchor < text.len_chars() { - // Treat empty range as a cursor range. - range.put_cursor(text, range.anchor + 1, true) - } else { - // The range is empty and at the end of the text. - return None; + let date_time = captures.get(0)?; + let has_date = format.fields.iter().any(|f| f.unit.is_date()); + let has_time = format.fields.iter().any(|f| f.unit.is_time()); + let date_time = &selected_text[date_time.start()..date_time.end()]; + match (has_date, has_time) { + (true, true) => { + let date_time = NaiveDateTime::parse_from_str(date_time, format.fmt).ok()?; + Some( + date_time + .checked_add_signed(Duration::minutes(amount))? + .format(format.fmt) + .to_string(), + ) } - } else { - range - }; - - FORMATS.iter().find_map(|format| { - let from = range.from().saturating_sub(format.max_len); - let to = (range.from() + format.max_len).min(text.len_chars()); - - let (from_in_text, to_in_text) = (range.from() - from, range.to() - from); - let text: Cow = text.slice(from..to).into(); - - let captures = format.regex.captures(&text)?; - if captures.len() - 1 != format.fields.len() { - return None; + (true, false) => { + let date = NaiveDate::parse_from_str(date_time, format.fmt).ok()?; + Some( + date.checked_add_signed(Duration::days(amount))? + .format(format.fmt) + .to_string(), + ) } - - let date_time = captures.get(0)?; - let offset = range.from() - from_in_text; - let range = Range::new(date_time.start() + offset, date_time.end() + offset); - - let field = captures - .iter() - .skip(1) - .enumerate() - .find_map(|(i, capture)| { - let capture = capture?; - let capture_range = capture.range(); - - if capture_range.contains(&from_in_text) - && capture_range.contains(&(to_in_text - 1)) - { - Some(format.fields[i]) - } else { - None - } - })?; - - let has_date = format.fields.iter().any(|f| f.unit.is_date()); - let has_time = format.fields.iter().any(|f| f.unit.is_time()); - - let date_time = &text[date_time.start()..date_time.end()]; - let date_time = match (has_date, has_time) { - (true, true) => NaiveDateTime::parse_from_str(date_time, format.fmt).ok()?, - (true, false) => { - let date = NaiveDate::parse_from_str(date_time, format.fmt).ok()?; - - date.and_hms_opt(0, 0, 0).unwrap() - } - (false, true) => { - let time = NaiveTime::parse_from_str(date_time, format.fmt).ok()?; - - NaiveDate::from_ymd_opt(0, 1, 1).unwrap().and_time(time) - } - (false, false) => return None, - }; - - Some(DateTimeIncrementor { - date_time, - range, - fmt: format.fmt, - field, - }) - }) - } -} - -impl Increment for DateTimeIncrementor { - fn increment(&self, amount: i64) -> (Range, Tendril) { - let date_time = match self.field.unit { - DateUnit::Years => add_years(self.date_time, amount), - DateUnit::Months => add_months(self.date_time, amount), - DateUnit::Days => add_duration(self.date_time, Duration::days(amount)), - DateUnit::Hours => add_duration(self.date_time, Duration::hours(amount)), - DateUnit::Minutes => add_duration(self.date_time, Duration::minutes(amount)), - DateUnit::Seconds => add_duration(self.date_time, Duration::seconds(amount)), - DateUnit::AmPm => toggle_am_pm(self.date_time), + (false, true) => { + let time = NaiveTime::parse_from_str(date_time, format.fmt).ok()?; + let (adjusted_time, _) = time.overflowing_add_signed(Duration::minutes(amount)); + Some(adjusted_time.format(format.fmt).to_string()) + } + (false, false) => None, } - .unwrap_or(self.date_time); - - (self.range, date_time.format(self.fmt).to_string().into()) - } + }) } static FORMATS: Lazy> = Lazy::new(|| { @@ -144,7 +83,7 @@ impl Format { fn new(fmt: &'static str) -> Self { let mut remaining = fmt; let mut fields = Vec::new(); - let mut regex = String::new(); + let mut regex = "^".to_string(); let mut max_len = 0; while let Some(i) = remaining.find('%') { @@ -166,6 +105,7 @@ impl Format { write!(regex, "({})", field.regex).unwrap(); remaining = &after[spec_len..]; } + regex += "$"; let regex = Regex::new(®ex).unwrap(); @@ -305,155 +245,47 @@ impl DateUnit { } } -fn ndays_in_month(year: i32, month: u32) -> u32 { - // The first day of the next month... - let (y, m) = if month == 12 { - (year + 1, 1) - } else { - (year, month + 1) - }; - let d = NaiveDate::from_ymd_opt(y, m, 1).unwrap(); - - // ...is preceded by the last day of the original month. - d.pred_opt().unwrap().day() -} - -fn add_months(date_time: NaiveDateTime, amount: i64) -> Option { - let month = (date_time.month0() as i64).checked_add(amount)?; - let year = date_time.year() + i32::try_from(month / 12).ok()?; - let year = if month.is_negative() { year - 1 } else { year }; - - // Normalize month - let month = month % 12; - let month = if month.is_negative() { - month + 12 - } else { - month - } as u32 - + 1; - - let day = cmp::min(date_time.day(), ndays_in_month(year, month)); - - NaiveDate::from_ymd_opt(year, month, day).map(|date| date.and_time(date_time.time())) -} - -fn add_years(date_time: NaiveDateTime, amount: i64) -> Option { - let year = i32::try_from((date_time.year() as i64).checked_add(amount)?).ok()?; - let ndays = ndays_in_month(year, date_time.month()); - - if date_time.day() > ndays { - NaiveDate::from_ymd_opt(year, date_time.month(), ndays) - .and_then(|date| date.succ_opt().map(|date| date.and_time(date_time.time()))) - } else { - date_time.with_year(year) - } -} - -fn add_duration(date_time: NaiveDateTime, duration: Duration) -> Option { - date_time.checked_add_signed(duration) -} - -fn toggle_am_pm(date_time: NaiveDateTime) -> Option { - if date_time.hour() < 12 { - add_duration(date_time, Duration::hours(12)) - } else { - add_duration(date_time, Duration::hours(-12)) - } -} - #[cfg(test)] mod test { use super::*; - use crate::Rope; #[test] fn test_increment_date_times() { let tests = [ // (original, cursor, amount, expected) - ("2020-02-28", 0, 1, "2021-02-28"), - ("2020-02-29", 0, 1, "2021-03-01"), - ("2020-01-31", 5, 1, "2020-02-29"), - ("2020-01-20", 5, 1, "2020-02-20"), - ("2021-01-01", 5, -1, "2020-12-01"), - ("2021-01-31", 5, -2, "2020-11-30"), - ("2020-02-28", 8, 1, "2020-02-29"), - ("2021-02-28", 8, 1, "2021-03-01"), - ("2021-02-28", 0, -1, "2020-02-28"), - ("2021-03-01", 0, -1, "2020-03-01"), - ("2020-02-29", 5, -1, "2020-01-29"), - ("2020-02-20", 5, -1, "2020-01-20"), - ("2020-02-29", 8, -1, "2020-02-28"), - ("2021-03-01", 8, -1, "2021-02-28"), - ("1980/12/21", 8, 100, "1981/03/31"), - ("1980/12/21", 8, -100, "1980/09/12"), - ("1980/12/21", 8, 1000, "1983/09/17"), - ("1980/12/21", 8, -1000, "1978/03/27"), - ("2021-11-24 07:12:23", 0, 1, "2022-11-24 07:12:23"), - ("2021-11-24 07:12:23", 5, 1, "2021-12-24 07:12:23"), - ("2021-11-24 07:12:23", 8, 1, "2021-11-25 07:12:23"), - ("2021-11-24 07:12:23", 11, 1, "2021-11-24 08:12:23"), - ("2021-11-24 07:12:23", 14, 1, "2021-11-24 07:13:23"), - ("2021-11-24 07:12:23", 17, 1, "2021-11-24 07:12:24"), - ("2021/11/24 07:12:23", 0, 1, "2022/11/24 07:12:23"), - ("2021/11/24 07:12:23", 5, 1, "2021/12/24 07:12:23"), - ("2021/11/24 07:12:23", 8, 1, "2021/11/25 07:12:23"), - ("2021/11/24 07:12:23", 11, 1, "2021/11/24 08:12:23"), - ("2021/11/24 07:12:23", 14, 1, "2021/11/24 07:13:23"), - ("2021/11/24 07:12:23", 17, 1, "2021/11/24 07:12:24"), - ("2021-11-24 07:12", 0, 1, "2022-11-24 07:12"), - ("2021-11-24 07:12", 5, 1, "2021-12-24 07:12"), - ("2021-11-24 07:12", 8, 1, "2021-11-25 07:12"), - ("2021-11-24 07:12", 11, 1, "2021-11-24 08:12"), - ("2021-11-24 07:12", 14, 1, "2021-11-24 07:13"), - ("2021/11/24 07:12", 0, 1, "2022/11/24 07:12"), - ("2021/11/24 07:12", 5, 1, "2021/12/24 07:12"), - ("2021/11/24 07:12", 8, 1, "2021/11/25 07:12"), - ("2021/11/24 07:12", 11, 1, "2021/11/24 08:12"), - ("2021/11/24 07:12", 14, 1, "2021/11/24 07:13"), - ("Wed Nov 24 2021", 0, 1, "Thu Nov 25 2021"), - ("Wed Nov 24 2021", 4, 1, "Fri Dec 24 2021"), - ("Wed Nov 24 2021", 8, 1, "Thu Nov 25 2021"), - ("Wed Nov 24 2021", 11, 1, "Thu Nov 24 2022"), - ("24-Nov-2021", 0, 1, "25-Nov-2021"), - ("24-Nov-2021", 3, 1, "24-Dec-2021"), - ("24-Nov-2021", 7, 1, "24-Nov-2022"), - ("2021 Nov 24", 0, 1, "2022 Nov 24"), - ("2021 Nov 24", 5, 1, "2021 Dec 24"), - ("2021 Nov 24", 9, 1, "2021 Nov 25"), - ("Nov 24, 2021", 0, 1, "Dec 24, 2021"), - ("Nov 24, 2021", 4, 1, "Nov 25, 2021"), - ("Nov 24, 2021", 8, 1, "Nov 24, 2022"), - ("7:21:53 am", 0, 1, "8:21:53 am"), - ("7:21:53 am", 3, 1, "7:22:53 am"), - ("7:21:53 am", 5, 1, "7:21:54 am"), - ("7:21:53 am", 8, 1, "7:21:53 pm"), - ("7:21:53 AM", 0, 1, "8:21:53 AM"), - ("7:21:53 AM", 3, 1, "7:22:53 AM"), - ("7:21:53 AM", 5, 1, "7:21:54 AM"), - ("7:21:53 AM", 8, 1, "7:21:53 PM"), - ("7:21 am", 0, 1, "8:21 am"), - ("7:21 am", 3, 1, "7:22 am"), - ("7:21 am", 5, 1, "7:21 pm"), - ("7:21 AM", 0, 1, "8:21 AM"), - ("7:21 AM", 3, 1, "7:22 AM"), - ("7:21 AM", 5, 1, "7:21 PM"), - ("23:24:23", 1, 1, "00:24:23"), - ("23:24:23", 3, 1, "23:25:23"), - ("23:24:23", 6, 1, "23:24:24"), - ("23:24", 1, 1, "00:24"), - ("23:24", 3, 1, "23:25"), + ("2020-02-28", 1, "2020-02-29"), + ("2020-02-29", 1, "2020-03-01"), + ("2020-01-31", 1, "2020-02-01"), + ("2020-01-20", 1, "2020-01-21"), + ("2021-01-01", -1, "2020-12-31"), + ("2021-01-31", -2, "2021-01-29"), + ("2020-02-28", 1, "2020-02-29"), + ("2021-02-28", 1, "2021-03-01"), + ("2021-03-01", -1, "2021-02-28"), + ("2020-02-29", -1, "2020-02-28"), + ("2020-02-20", -1, "2020-02-19"), + ("2021-03-01", -1, "2021-02-28"), + ("1980/12/21", 100, "1981/03/31"), + ("1980/12/21", -100, "1980/09/12"), + ("1980/12/21", 1000, "1983/09/17"), + ("1980/12/21", -1000, "1978/03/27"), + ("2021-11-24 07:12:23", 1, "2021-11-24 07:13:23"), + ("2021-11-24 07:12", 1, "2021-11-24 07:13"), + ("Wed Nov 24 2021", 1, "Thu Nov 25 2021"), + ("24-Nov-2021", 1, "25-Nov-2021"), + ("2021 Nov 24", 1, "2021 Nov 25"), + ("Nov 24, 2021", 1, "Nov 25, 2021"), + ("7:21:53 am", 1, "7:22:53 am"), + ("7:21:53 AM", 1, "7:22:53 AM"), + ("7:21 am", 1, "7:22 am"), + ("23:24:23", 1, "23:25:23"), + ("23:24", 1, "23:25"), + ("23:59", 1, "00:00"), + ("23:59:59", 1, "00:00:59"), ]; - for (original, cursor, amount, expected) in tests { - let rope = Rope::from_str(original); - let range = Range::new(cursor, cursor + 1); - assert_eq!( - DateTimeIncrementor::from_range(rope.slice(..), range) - .unwrap() - .increment(amount) - .1, - Tendril::from(expected) - ); + for (original, amount, expected) in tests { + assert_eq!(increment(original, amount).unwrap(), expected); } } @@ -482,10 +314,7 @@ mod test { ]; for invalid in tests { - let rope = Rope::from_str(invalid); - let range = Range::new(0, 1); - - assert_eq!(DateTimeIncrementor::from_range(rope.slice(..), range), None) + assert_eq!(increment(invalid, 1), None) } } } diff --git a/helix-core/src/increment/integer.rs b/helix-core/src/increment/integer.rs new file mode 100644 index 00000000..0dfabc0d --- /dev/null +++ b/helix-core/src/increment/integer.rs @@ -0,0 +1,235 @@ +const SEPARATOR: char = '_'; + +/// Increment an integer. +/// +/// Supported bases: +/// 2 with prefix 0b +/// 8 with prefix 0o +/// 10 with no prefix +/// 16 with prefix 0x +/// +/// An integer can contain `_` as a separator but may not start or end with a separator. +/// Base 10 integers can go negative, but bases 2, 8, and 16 cannot. +/// All addition and subtraction is saturating. +pub fn increment(selected_text: &str, amount: i64) -> Option { + if selected_text.is_empty() + || selected_text.ends_with(SEPARATOR) + || selected_text.starts_with(SEPARATOR) + { + return None; + } + + let radix = if selected_text.starts_with("0x") { + 16 + } else if selected_text.starts_with("0o") { + 8 + } else if selected_text.starts_with("0b") { + 2 + } else { + 10 + }; + + // Get separator indexes from right to left. + let separator_rtl_indexes: Vec = selected_text + .chars() + .rev() + .enumerate() + .filter_map(|(i, c)| if c == SEPARATOR { Some(i) } else { None }) + .collect(); + + let word: String = selected_text.chars().filter(|&c| c != SEPARATOR).collect(); + + let mut new_text = if radix == 10 { + let number = &word; + let value = i128::from_str_radix(number, radix).ok()?; + let new_value = value.saturating_add(amount as i128); + + let format_length = match (value.is_negative(), new_value.is_negative()) { + (true, false) => number.len() - 1, + (false, true) => number.len() + 1, + _ => number.len(), + } - separator_rtl_indexes.len(); + + if number.starts_with('0') || number.starts_with("-0") { + format!("{:01$}", new_value, format_length) + } else { + format!("{}", new_value) + } + } else { + let number = &word[2..]; + let value = u128::from_str_radix(number, radix).ok()?; + let new_value = (value as i128).saturating_add(amount as i128); + let new_value = if new_value < 0 { 0 } else { new_value }; + let format_length = selected_text.len() - 2 - separator_rtl_indexes.len(); + + match radix { + 2 => format!("0b{:01$b}", new_value, format_length), + 8 => format!("0o{:01$o}", new_value, format_length), + 16 => { + let (lower_count, upper_count): (usize, usize) = + number.chars().fold((0, 0), |(lower, upper), c| { + ( + lower + c.is_ascii_lowercase() as usize, + upper + c.is_ascii_uppercase() as usize, + ) + }); + if upper_count > lower_count { + format!("0x{:01$X}", new_value, format_length) + } else { + format!("0x{:01$x}", new_value, format_length) + } + } + _ => unimplemented!("radix not supported: {}", radix), + } + }; + + // Add separators from original number. + for &rtl_index in &separator_rtl_indexes { + if rtl_index < new_text.len() { + let new_index = new_text.len().saturating_sub(rtl_index); + if new_index > 0 { + new_text.insert(new_index, SEPARATOR); + } + } + } + + // Add in additional separators if necessary. + if new_text.len() > selected_text.len() && !separator_rtl_indexes.is_empty() { + let spacing = match separator_rtl_indexes.as_slice() { + [.., b, a] => a - b - 1, + _ => separator_rtl_indexes[0], + }; + + let prefix_length = if radix == 10 { 0 } else { 2 }; + if let Some(mut index) = new_text.find(SEPARATOR) { + while index - prefix_length > spacing { + index -= spacing; + new_text.insert(index, SEPARATOR); + } + } + } + + Some(new_text) +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_increment_basic_decimal_numbers() { + let tests = [ + ("100", 1, "101"), + ("100", -1, "99"), + ("99", 1, "100"), + ("100", 1000, "1100"), + ("100", -1000, "-900"), + ("-1", 1, "0"), + ("-1", 2, "1"), + ("1", -1, "0"), + ("1", -2, "-1"), + ]; + + for (original, amount, expected) in tests { + assert_eq!(increment(original, amount).unwrap(), expected); + } + } + + #[test] + fn test_increment_basic_hexadecimal_numbers() { + let tests = [ + ("0x0100", 1, "0x0101"), + ("0x0100", -1, "0x00ff"), + ("0x0001", -1, "0x0000"), + ("0x0000", -1, "0x0000"), + ("0xffffffffffffffff", 1, "0x10000000000000000"), + ("0xffffffffffffffff", 2, "0x10000000000000001"), + ("0xffffffffffffffff", -1, "0xfffffffffffffffe"), + ("0xABCDEF1234567890", 1, "0xABCDEF1234567891"), + ("0xabcdef1234567890", 1, "0xabcdef1234567891"), + ]; + + for (original, amount, expected) in tests { + assert_eq!(increment(original, amount).unwrap(), expected); + } + } + + #[test] + fn test_increment_basic_octal_numbers() { + let tests = [ + ("0o0107", 1, "0o0110"), + ("0o0110", -1, "0o0107"), + ("0o0001", -1, "0o0000"), + ("0o7777", 1, "0o10000"), + ("0o1000", -1, "0o0777"), + ("0o0107", 10, "0o0121"), + ("0o0000", -1, "0o0000"), + ("0o1777777777777777777777", 1, "0o2000000000000000000000"), + ("0o1777777777777777777777", 2, "0o2000000000000000000001"), + ("0o1777777777777777777777", -1, "0o1777777777777777777776"), + ]; + + for (original, amount, expected) in tests { + assert_eq!(increment(original, amount).unwrap(), expected); + } + } + + #[test] + fn test_increment_basic_binary_numbers() { + let tests = [ + ("0b00000100", 1, "0b00000101"), + ("0b00000100", -1, "0b00000011"), + ("0b00000100", 2, "0b00000110"), + ("0b00000100", -2, "0b00000010"), + ("0b00000001", -1, "0b00000000"), + ("0b00111111", 10, "0b01001001"), + ("0b11111111", 1, "0b100000000"), + ("0b10000000", -1, "0b01111111"), + ("0b0000", -1, "0b0000"), + ( + "0b1111111111111111111111111111111111111111111111111111111111111111", + 1, + "0b10000000000000000000000000000000000000000000000000000000000000000", + ), + ( + "0b1111111111111111111111111111111111111111111111111111111111111111", + 2, + "0b10000000000000000000000000000000000000000000000000000000000000001", + ), + ( + "0b1111111111111111111111111111111111111111111111111111111111111111", + -1, + "0b1111111111111111111111111111111111111111111111111111111111111110", + ), + ]; + + for (original, amount, expected) in tests { + assert_eq!(increment(original, amount).unwrap(), expected); + } + } + + #[test] + fn test_increment_with_separators() { + let tests = [ + ("999_999", 1, "1_000_000"), + ("1_000_000", -1, "999_999"), + ("-999_999", -1, "-1_000_000"), + ("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"), + ("0x0000_0000", -1, "0x0000_0000"), + ("0x0000_0000_0000", -1, "0x0000_0000_0000"), + ("0b01111111_11111111", 1, "0b10000000_00000000"), + ("0b11111111_11111111", 1, "0b1_00000000_00000000"), + ]; + + for (original, amount, expected) in tests { + assert_eq!(increment(original, amount).unwrap(), expected); + } + } + + #[test] + fn test_leading_and_trailing_separators_arent_a_match() { + assert_eq!(increment("9_", 1), None); + assert_eq!(increment("_9", 1), None); + assert_eq!(increment("_9_", 1), None); + } +} diff --git a/helix-core/src/increment/mod.rs b/helix-core/src/increment/mod.rs index f5945774..f1978bde 100644 --- a/helix-core/src/increment/mod.rs +++ b/helix-core/src/increment/mod.rs @@ -1,8 +1,10 @@ -pub mod date_time; -pub mod number; +mod date_time; +mod integer; -use crate::{Range, Tendril}; +pub fn integer(selected_text: &str, amount: i64) -> Option { + integer::increment(selected_text, amount) +} -pub trait Increment { - fn increment(&self, amount: i64) -> (Range, Tendril); +pub fn date_time(selected_text: &str, amount: i64) -> Option { + date_time::increment(selected_text, amount) } diff --git a/helix-core/src/increment/number.rs b/helix-core/src/increment/number.rs deleted file mode 100644 index 91268729..00000000 --- a/helix-core/src/increment/number.rs +++ /dev/null @@ -1,507 +0,0 @@ -use std::borrow::Cow; - -use ropey::RopeSlice; - -use super::Increment; - -use crate::{ - textobject::{textobject_word, TextObject}, - Range, Tendril, -}; - -#[derive(Debug, PartialEq, Eq)] -pub struct NumberIncrementor<'a> { - value: i64, - radix: u32, - range: Range, - - text: RopeSlice<'a>, -} - -impl<'a> NumberIncrementor<'a> { - /// Return information about number under rang if there is one. - pub fn from_range(text: RopeSlice, range: Range) -> Option { - // If the cursor is on the minus sign of a number we want to get the word textobject to the - // right of it. - let range = if range.to() < text.len_chars() - && range.to() - range.from() <= 1 - && text.char(range.from()) == '-' - { - Range::new(range.from() + 1, range.to() + 1) - } else { - range - }; - - let range = textobject_word(text, range, TextObject::Inside, 1, false); - - // If there is a minus sign to the left of the word object, we want to include it in the range. - let range = if range.from() > 0 && text.char(range.from() - 1) == '-' { - range.extend(range.from() - 1, range.from()) - } else { - range - }; - - let word: String = text - .slice(range.from()..range.to()) - .chars() - .filter(|&c| c != '_') - .collect(); - let (radix, prefixed) = if word.starts_with("0x") { - (16, true) - } else if word.starts_with("0o") { - (8, true) - } else if word.starts_with("0b") { - (2, true) - } else { - (10, false) - }; - - let number = if prefixed { &word[2..] } else { &word }; - - let value = i128::from_str_radix(number, radix).ok()?; - if (value.is_positive() && value.leading_zeros() < 64) - || (value.is_negative() && value.leading_ones() < 64) - { - return None; - } - - let value = value as i64; - Some(NumberIncrementor { - range, - value, - radix, - text, - }) - } -} - -impl<'a> Increment for NumberIncrementor<'a> { - fn increment(&self, amount: i64) -> (Range, Tendril) { - let old_text: Cow = self.text.slice(self.range.from()..self.range.to()).into(); - let old_length = old_text.len(); - let new_value = self.value.wrapping_add(amount); - - // Get separator indexes from right to left. - let separator_rtl_indexes: Vec = old_text - .chars() - .rev() - .enumerate() - .filter_map(|(i, c)| if c == '_' { Some(i) } else { None }) - .collect(); - - let format_length = if self.radix == 10 { - match (self.value.is_negative(), new_value.is_negative()) { - (true, false) => old_length - 1, - (false, true) => old_length + 1, - _ => old_text.len(), - } - } else { - old_text.len() - 2 - } - separator_rtl_indexes.len(); - - let mut new_text = match self.radix { - 2 => format!("0b{:01$b}", new_value, format_length), - 8 => format!("0o{:01$o}", new_value, format_length), - 10 if old_text.starts_with('0') || old_text.starts_with("-0") => { - format!("{:01$}", new_value, format_length) - } - 10 => format!("{}", new_value), - 16 => { - let (lower_count, upper_count): (usize, usize) = - old_text.chars().skip(2).fold((0, 0), |(lower, upper), c| { - ( - lower + usize::from(c.is_ascii_lowercase()), - upper + usize::from(c.is_ascii_uppercase()), - ) - }); - if upper_count > lower_count { - format!("0x{:01$X}", new_value, format_length) - } else { - format!("0x{:01$x}", new_value, format_length) - } - } - _ => unimplemented!("radix not supported: {}", self.radix), - }; - - // Add separators from original number. - for &rtl_index in &separator_rtl_indexes { - if rtl_index < new_text.len() { - let new_index = new_text.len() - rtl_index; - new_text.insert(new_index, '_'); - } - } - - // Add in additional separators if necessary. - if new_text.len() > old_length && !separator_rtl_indexes.is_empty() { - let spacing = match separator_rtl_indexes.as_slice() { - [.., b, a] => a - b - 1, - _ => separator_rtl_indexes[0], - }; - - let prefix_length = if self.radix == 10 { 0 } else { 2 }; - if let Some(mut index) = new_text.find('_') { - while index - prefix_length > spacing { - index -= spacing; - new_text.insert(index, '_'); - } - } - } - - (self.range, new_text.into()) - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::Rope; - - #[test] - fn test_decimal_at_point() { - let rope = Rope::from_str("Test text 12345 more text."); - let range = Range::point(12); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(10, 15), - value: 12345, - radix: 10, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_uppercase_hexadecimal_at_point() { - let rope = Rope::from_str("Test text 0x123ABCDEF more text."); - let range = Range::point(12); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(10, 21), - value: 0x123ABCDEF, - radix: 16, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_lowercase_hexadecimal_at_point() { - let rope = Rope::from_str("Test text 0xfa3b4e more text."); - let range = Range::point(12); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(10, 18), - value: 0xfa3b4e, - radix: 16, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_octal_at_point() { - let rope = Rope::from_str("Test text 0o1074312 more text."); - let range = Range::point(12); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(10, 19), - value: 0o1074312, - radix: 8, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_binary_at_point() { - let rope = Rope::from_str("Test text 0b10111010010101 more text."); - let range = Range::point(12); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(10, 26), - value: 0b10111010010101, - radix: 2, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_negative_decimal_at_point() { - let rope = Rope::from_str("Test text -54321 more text."); - let range = Range::point(12); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(10, 16), - value: -54321, - radix: 10, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_decimal_with_leading_zeroes_at_point() { - let rope = Rope::from_str("Test text 000045326 more text."); - let range = Range::point(12); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(10, 19), - value: 45326, - radix: 10, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_negative_decimal_cursor_on_minus_sign() { - let rope = Rope::from_str("Test text -54321 more text."); - let range = Range::point(10); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(10, 16), - value: -54321, - radix: 10, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_number_under_range_start_of_rope() { - let rope = Rope::from_str("100"); - let range = Range::point(0); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(0, 3), - value: 100, - radix: 10, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_number_under_range_end_of_rope() { - let rope = Rope::from_str("100"); - let range = Range::point(2); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(0, 3), - value: 100, - radix: 10, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_number_surrounded_by_punctuation() { - let rope = Rope::from_str(",100;"); - let range = Range::point(1); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range), - Some(NumberIncrementor { - range: Range::new(1, 4), - value: 100, - radix: 10, - text: rope.slice(..), - }) - ); - } - - #[test] - fn test_not_a_number_point() { - let rope = Rope::from_str("Test text 45326 more text."); - let range = Range::point(6); - assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None); - } - - #[test] - fn test_number_too_large_at_point() { - let rope = Rope::from_str("Test text 0xFFFFFFFFFFFFFFFFF more text."); - let range = Range::point(12); - assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None); - } - - #[test] - fn test_number_cursor_one_right_of_number() { - let rope = Rope::from_str("100 "); - let range = Range::point(3); - assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None); - } - - #[test] - fn test_number_cursor_one_left_of_number() { - let rope = Rope::from_str(" 100"); - let range = Range::point(0); - assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None); - } - - #[test] - fn test_increment_basic_decimal_numbers() { - let tests = [ - ("100", 1, "101"), - ("100", -1, "99"), - ("99", 1, "100"), - ("100", 1000, "1100"), - ("100", -1000, "-900"), - ("-1", 1, "0"), - ("-1", 2, "1"), - ("1", -1, "0"), - ("1", -2, "-1"), - ]; - - for (original, amount, expected) in tests { - let rope = Rope::from_str(original); - let range = Range::point(0); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range) - .unwrap() - .increment(amount) - .1, - Tendril::from(expected) - ); - } - } - - #[test] - fn test_increment_basic_hexadecimal_numbers() { - let tests = [ - ("0x0100", 1, "0x0101"), - ("0x0100", -1, "0x00ff"), - ("0x0001", -1, "0x0000"), - ("0x0000", -1, "0xffffffffffffffff"), - ("0xffffffffffffffff", 1, "0x0000000000000000"), - ("0xffffffffffffffff", 2, "0x0000000000000001"), - ("0xffffffffffffffff", -1, "0xfffffffffffffffe"), - ("0xABCDEF1234567890", 1, "0xABCDEF1234567891"), - ("0xabcdef1234567890", 1, "0xabcdef1234567891"), - ]; - - for (original, amount, expected) in tests { - let rope = Rope::from_str(original); - let range = Range::point(0); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range) - .unwrap() - .increment(amount) - .1, - Tendril::from(expected) - ); - } - } - - #[test] - fn test_increment_basic_octal_numbers() { - let tests = [ - ("0o0107", 1, "0o0110"), - ("0o0110", -1, "0o0107"), - ("0o0001", -1, "0o0000"), - ("0o7777", 1, "0o10000"), - ("0o1000", -1, "0o0777"), - ("0o0107", 10, "0o0121"), - ("0o0000", -1, "0o1777777777777777777777"), - ("0o1777777777777777777777", 1, "0o0000000000000000000000"), - ("0o1777777777777777777777", 2, "0o0000000000000000000001"), - ("0o1777777777777777777777", -1, "0o1777777777777777777776"), - ]; - - for (original, amount, expected) in tests { - let rope = Rope::from_str(original); - let range = Range::point(0); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range) - .unwrap() - .increment(amount) - .1, - Tendril::from(expected) - ); - } - } - - #[test] - fn test_increment_basic_binary_numbers() { - let tests = [ - ("0b00000100", 1, "0b00000101"), - ("0b00000100", -1, "0b00000011"), - ("0b00000100", 2, "0b00000110"), - ("0b00000100", -2, "0b00000010"), - ("0b00000001", -1, "0b00000000"), - ("0b00111111", 10, "0b01001001"), - ("0b11111111", 1, "0b100000000"), - ("0b10000000", -1, "0b01111111"), - ( - "0b0000", - -1, - "0b1111111111111111111111111111111111111111111111111111111111111111", - ), - ( - "0b1111111111111111111111111111111111111111111111111111111111111111", - 1, - "0b0000000000000000000000000000000000000000000000000000000000000000", - ), - ( - "0b1111111111111111111111111111111111111111111111111111111111111111", - 2, - "0b0000000000000000000000000000000000000000000000000000000000000001", - ), - ( - "0b1111111111111111111111111111111111111111111111111111111111111111", - -1, - "0b1111111111111111111111111111111111111111111111111111111111111110", - ), - ]; - - for (original, amount, expected) in tests { - let rope = Rope::from_str(original); - let range = Range::point(0); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range) - .unwrap() - .increment(amount) - .1, - Tendril::from(expected) - ); - } - } - - #[test] - fn test_increment_with_separators() { - let tests = [ - ("999_999", 1, "1_000_000"), - ("1_000_000", -1, "999_999"), - ("-999_999", -1, "-1_000_000"), - ("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"), - ("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"), - ("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"), - ("0x0000_0000", -1, "0xffff_ffff_ffff_ffff"), - ("0x0000_0000_0000", -1, "0xffff_ffff_ffff_ffff"), - ("0b01111111_11111111", 1, "0b10000000_00000000"), - ("0b11111111_11111111", 1, "0b1_00000000_00000000"), - ]; - - for (original, amount, expected) in tests { - let rope = Rope::from_str(original); - let range = Range::point(0); - assert_eq!( - NumberIncrementor::from_range(rope.slice(..), range) - .unwrap() - .increment(amount) - .1, - Tendril::from(expected) - ); - } - } -} diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs index d6aa5edb..db780ed9 100644 --- a/helix-core/src/indent.rs +++ b/helix-core/src/indent.rs @@ -4,6 +4,7 @@ use tree_sitter::{Query, QueryCursor, QueryPredicateArg}; use crate::{ chars::{char_is_line_ending, char_is_whitespace}, + graphemes::tab_width_at, syntax::{LanguageConfiguration, RopeProvider, Syntax}, tree_sitter::Node, Rope, RopeSlice, @@ -56,6 +57,14 @@ impl IndentStyle { } } } + + #[inline] + pub fn indent_width(&self, tab_width: usize) -> usize { + match *self { + IndentStyle::Tabs => tab_width, + IndentStyle::Spaces(width) => width as usize, + } + } } /// Attempts to detect the indentation style used in a document. @@ -177,17 +186,17 @@ pub fn auto_detect_indent_style(document_text: &Rope) -> Option { /// To determine indentation of a newly inserted line, figure out the indentation at the last col /// of the previous line. -pub fn indent_level_for_line(line: RopeSlice, tab_width: usize) -> usize { +pub fn indent_level_for_line(line: RopeSlice, tab_width: usize, indent_width: usize) -> usize { let mut len = 0; for ch in line.chars() { match ch { - '\t' => len += tab_width, + '\t' => len += tab_width_at(len, tab_width as u16), ' ' => len += 1, _ => break, } } - len / tab_width + len / indent_width } /// Computes for node and all ancestors whether they are the first node on their line. @@ -466,6 +475,7 @@ fn extend_nodes<'a>( text: RopeSlice, line: usize, tab_width: usize, + indent_width: usize, ) { let mut stop_extend = false; @@ -490,10 +500,12 @@ fn extend_nodes<'a>( if deepest_preceding.end_position().row == line { extend_node = true; } else { - let cursor_indent = indent_level_for_line(text.line(line), tab_width); + let cursor_indent = + indent_level_for_line(text.line(line), tab_width, indent_width); let node_indent = indent_level_for_line( text.line(deepest_preceding.start_position().row), tab_width, + indent_width, ); if cursor_indent > node_indent { extend_node = true; @@ -562,6 +574,7 @@ pub fn treesitter_indent_for_pos( syntax: &Syntax, indent_style: &IndentStyle, tab_width: usize, + indent_width: usize, text: RopeSlice, line: usize, pos: usize, @@ -604,7 +617,7 @@ pub fn treesitter_indent_for_pos( &mut cursor, text, query_range, - new_line.then(|| (line, byte_pos)), + new_line.then_some((line, byte_pos)), ); ts_parser.cursors.push(cursor); (query_result, deepest_preceding) @@ -622,9 +635,10 @@ pub fn treesitter_indent_for_pos( text, line, tab_width, + indent_width, ); } - let mut first_in_line = get_first_in_line(node, new_line.then(|| byte_pos)); + let mut first_in_line = get_first_in_line(node, new_line.then_some(byte_pos)); let mut result = Indentation::default(); // We always keep track of all the indent changes on one line, in order to only indent once @@ -709,6 +723,7 @@ pub fn indent_for_newline( line_before_end_pos: usize, current_line: usize, ) -> String { + let indent_width = indent_style.indent_width(tab_width); if let (Some(query), Some(syntax)) = ( language_config.and_then(|config| config.indent_query()), syntax, @@ -718,6 +733,7 @@ pub fn indent_for_newline( syntax, indent_style, tab_width, + indent_width, text, line_before, line_before_end_pos, @@ -726,7 +742,7 @@ pub fn indent_for_newline( return indent; }; } - let indent_level = indent_level_for_line(text.line(current_line), tab_width); + let indent_level = indent_level_for_line(text.line(current_line), tab_width, indent_width); indent_style.as_str().repeat(indent_level) } @@ -763,12 +779,22 @@ mod test { #[test] fn test_indent_level() { let tab_width = 4; + let indent_width = 4; let line = Rope::from(" fn new"); // 8 spaces - assert_eq!(indent_level_for_line(line.slice(..), tab_width), 2); + assert_eq!( + indent_level_for_line(line.slice(..), tab_width, indent_width), + 2 + ); let line = Rope::from("\t\t\tfn new"); // 3 tabs - assert_eq!(indent_level_for_line(line.slice(..), tab_width), 3); + assert_eq!( + indent_level_for_line(line.slice(..), tab_width, indent_width), + 3 + ); // mixed indentation let line = Rope::from("\t \tfn new"); // 1 tab, 4 spaces, tab - assert_eq!(indent_level_for_line(line.slice(..), tab_width), 3); + assert_eq!( + indent_level_for_line(line.slice(..), tab_width, indent_width), + 3 + ); } } diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index ee174e69..e3f862a6 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -6,6 +6,7 @@ pub mod comment; pub mod config; pub mod diagnostic; pub mod diff; +pub mod doc_formatter; pub mod graphemes; pub mod history; pub mod increment; @@ -24,6 +25,7 @@ pub mod shellwords; pub mod surround; pub mod syntax; pub mod test; +pub mod text_annotations; pub mod textobject; mod transaction; pub mod wrap; @@ -95,8 +97,12 @@ pub use {regex, tree_sitter}; pub use graphemes::RopeGraphemes; pub use position::{ - coords_at_pos, pos_at_coords, pos_at_visual_coords, visual_coords_at_pos, Position, + char_idx_at_visual_offset, coords_at_pos, pos_at_coords, visual_offset_from_anchor, + visual_offset_from_block, Position, }; +#[allow(deprecated)] +pub use position::{pos_at_visual_coords, visual_coords_at_pos}; + pub use selection::{Range, Selection}; pub use smallvec::{smallvec, SmallVec}; pub use syntax::Syntax; diff --git a/helix-core/src/line_ending.rs b/helix-core/src/line_ending.rs index 09e92523..953d567d 100644 --- a/helix-core/src/line_ending.rs +++ b/helix-core/src/line_ending.rs @@ -203,6 +203,13 @@ pub fn line_end_char_index(slice: &RopeSlice, line: usize) -> usize { .unwrap_or(0) } +pub fn line_end_byte_index(slice: &RopeSlice, line: usize) -> usize { + slice.line_to_byte(line + 1) + - get_line_ending(&slice.line(line)) + .map(|le| le.as_str().len()) + .unwrap_or(0) +} + /// Fetches line `line_idx` from the passed rope slice, sans any line ending. pub fn line_without_line_ending<'a>(slice: &'a RopeSlice, line_idx: usize) -> RopeSlice<'a> { let start = slice.line_to_char(line_idx); diff --git a/helix-core/src/movement.rs b/helix-core/src/movement.rs index 278375e8..60af47e5 100644 --- a/helix-core/src/movement.rs +++ b/helix-core/src/movement.rs @@ -4,16 +4,19 @@ use ropey::iter::Chars; use tree_sitter::{Node, QueryCursor}; use crate::{ + char_idx_at_visual_offset, chars::{categorize_char, char_is_line_ending, CharCategory}, + doc_formatter::TextFormat, graphemes::{ next_grapheme_boundary, nth_next_grapheme_boundary, nth_prev_grapheme_boundary, prev_grapheme_boundary, }, line_ending::rope_is_line_ending, - pos_at_visual_coords, + position::char_idx_at_visual_block_offset, syntax::LanguageConfiguration, + text_annotations::TextAnnotations, textobject::TextObject, - visual_coords_at_pos, Position, Range, RopeSlice, + visual_offset_from_block, Range, RopeSlice, }; #[derive(Debug, Copy, Clone, PartialEq, Eq)] @@ -34,7 +37,8 @@ pub fn move_horizontally( dir: Direction, count: usize, behaviour: Movement, - _: usize, + _: &TextFormat, + _: &mut TextAnnotations, ) -> Range { let pos = range.cursor(slice); @@ -48,35 +52,116 @@ pub fn move_horizontally( range.put_cursor(slice, new_pos, behaviour == Movement::Extend) } +pub fn move_vertically_visual( + slice: RopeSlice, + range: Range, + dir: Direction, + count: usize, + behaviour: Movement, + text_fmt: &TextFormat, + annotations: &mut TextAnnotations, +) -> Range { + if !text_fmt.soft_wrap { + move_vertically(slice, range, dir, count, behaviour, text_fmt, annotations); + } + annotations.clear_line_annotations(); + let pos = range.cursor(slice); + + // Compute the current position's 2d coordinates. + let (visual_pos, block_off) = visual_offset_from_block(slice, pos, pos, text_fmt, annotations); + let new_col = range + .old_visual_position + .map_or(visual_pos.col as u32, |(_, col)| col); + + // Compute the new position. + let mut row_off = match dir { + Direction::Forward => count as isize, + Direction::Backward => -(count as isize), + }; + + // TODO how to handle inline annotations that span an entire visual line (very unlikely). + + // Compute visual offset relative to block start to avoid trasversing the block twice + row_off += visual_pos.row as isize; + let new_pos = char_idx_at_visual_offset( + slice, + block_off, + row_off, + new_col as usize, + text_fmt, + annotations, + ) + .0; + + // Special-case to avoid moving to the end of the last non-empty line. + if behaviour == Movement::Extend && slice.line(slice.char_to_line(new_pos)).len_chars() == 0 { + return range; + } + + let mut new_range = range.put_cursor(slice, new_pos, behaviour == Movement::Extend); + new_range.old_visual_position = Some((0, new_col)); + new_range +} + pub fn move_vertically( slice: RopeSlice, range: Range, dir: Direction, count: usize, behaviour: Movement, - tab_width: usize, + text_fmt: &TextFormat, + annotations: &mut TextAnnotations, ) -> Range { + annotations.clear_line_annotations(); let pos = range.cursor(slice); + let line_idx = slice.char_to_line(pos); + let line_start = slice.line_to_char(line_idx); // Compute the current position's 2d coordinates. - let Position { row, col } = visual_coords_at_pos(slice, pos, tab_width); - let horiz = range.horiz.unwrap_or(col as u32); + let visual_pos = visual_offset_from_block(slice, line_start, pos, text_fmt, annotations).0; + let (mut new_row, new_col) = range + .old_visual_position + .map_or((visual_pos.row as u32, visual_pos.col as u32), |pos| pos); + new_row = new_row.max(visual_pos.row as u32); + let line_idx = slice.char_to_line(pos); // Compute the new position. - let new_row = match dir { - Direction::Forward => (row + count).min(slice.len_lines().saturating_sub(1)), - Direction::Backward => row.saturating_sub(count), + let mut new_line_idx = match dir { + Direction::Forward => line_idx.saturating_add(count), + Direction::Backward => line_idx.saturating_sub(count), + }; + + let line = if new_line_idx >= slice.len_lines() - 1 { + // there is no line terminator for the last line + // so the logic below is not necessary here + new_line_idx = slice.len_lines() - 1; + slice + } else { + // char_idx_at_visual_block_offset returns a one-past-the-end index + // in case it reaches the end of the slice + // to avoid moving to the nextline in that case the line terminator is removed from the line + let new_line_end = prev_grapheme_boundary(slice, slice.line_to_char(new_line_idx + 1)); + slice.slice(..new_line_end) }; - let new_col = col.max(horiz as usize); - let new_pos = pos_at_visual_coords(slice, Position::new(new_row, new_col), tab_width); + + let new_line_start = line.line_to_char(new_line_idx); + + let (new_pos, _) = char_idx_at_visual_block_offset( + line, + new_line_start, + new_row as usize, + new_col as usize, + text_fmt, + annotations, + ); // Special-case to avoid moving to the end of the last non-empty line. - if behaviour == Movement::Extend && slice.line(new_row).len_chars() == 0 { + if behaviour == Movement::Extend && slice.line(new_line_idx).len_chars() == 0 { return range; } let mut new_range = range.put_cursor(slice, new_pos, behaviour == Movement::Extend); - new_range.horiz = Some(horiz); + new_range.old_visual_position = Some((new_row, new_col)); new_range } @@ -142,9 +227,15 @@ fn word_move(slice: RopeSlice, range: Range, count: usize, target: WordMotionTar }; // Do the main work. - (0..count).fold(start_range, |r, _| { - slice.chars_at(r.head).range_to_target(target, r) - }) + let mut range = start_range; + for _ in 0..count { + let next_range = slice.chars_at(range.head).range_to_target(target, range); + if range == next_range { + break; + } + range = next_range; + } + range } pub fn move_prev_paragraph( @@ -166,6 +257,7 @@ pub fn move_prev_paragraph( let mut lines = slice.lines_at(line); lines.reverse(); let mut lines = lines.map(rope_is_line_ending).peekable(); + let mut last_line = line; for _ in 0..count { while lines.next_if(|&e| e).is_some() { line -= 1; @@ -173,6 +265,10 @@ pub fn move_prev_paragraph( while lines.next_if(|&e| !e).is_some() { line -= 1; } + if line == last_line { + break; + } + last_line = line; } let head = slice.line_to_char(line); @@ -208,6 +304,7 @@ pub fn move_next_paragraph( line += 1; } let mut lines = slice.lines_at(line).map(rope_is_line_ending).peekable(); + let mut last_line = line; for _ in 0..count { while lines.next_if(|&e| !e).is_some() { line += 1; @@ -215,6 +312,10 @@ pub fn move_next_paragraph( while lines.next_if(|&e| e).is_some() { line += 1; } + if line == last_line { + break; + } + last_line = line; } let head = slice.line_to_char(line); let anchor = if behavior == Movement::Move { @@ -438,7 +539,14 @@ pub fn goto_treesitter_object( // head of range should be at beginning Some(Range::new(start_char, end_char)) }; - (0..count).fold(range, |range, _| get_range(range).unwrap_or(range)) + let mut last_range = range; + for _ in 0..count { + match get_range(last_range) { + Some(r) if r != last_range => last_range = r, + _ => break, + } + } + last_range } #[cfg(test)] @@ -473,7 +581,16 @@ mod test { assert_eq!( coords_at_pos( slice, - move_vertically(slice, range, Direction::Forward, 1, Movement::Move, 4).head + move_vertically_visual( + slice, + range, + Direction::Forward, + 1, + Movement::Move, + &TextFormat::default(), + &mut TextAnnotations::default(), + ) + .head ), (1, 3).into() ); @@ -497,7 +614,15 @@ mod test { ]; for ((direction, amount), coordinates) in moves_and_expected_coordinates { - range = move_horizontally(slice, range, direction, amount, Movement::Move, 0); + range = move_horizontally( + slice, + range, + direction, + amount, + Movement::Move, + &TextFormat::default(), + &mut TextAnnotations::default(), + ); assert_eq!(coords_at_pos(slice, range.head), coordinates.into()) } } @@ -523,7 +648,15 @@ mod test { ]; for ((direction, amount), coordinates) in moves_and_expected_coordinates { - range = move_horizontally(slice, range, direction, amount, Movement::Move, 0); + range = move_horizontally( + slice, + range, + direction, + amount, + Movement::Move, + &TextFormat::default(), + &mut TextAnnotations::default(), + ); assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(range.head, range.anchor); } @@ -545,7 +678,15 @@ mod test { ]; for (direction, amount) in moves { - range = move_horizontally(slice, range, direction, amount, Movement::Extend, 0); + range = move_horizontally( + slice, + range, + direction, + amount, + Movement::Extend, + &TextFormat::default(), + &mut TextAnnotations::default(), + ); assert_eq!(range.anchor, original_anchor); } } @@ -569,7 +710,15 @@ mod test { ]; for ((direction, amount), coordinates) in moves_and_expected_coordinates { - range = move_vertically(slice, range, direction, amount, Movement::Move, 4); + range = move_vertically_visual( + slice, + range, + direction, + amount, + Movement::Move, + &TextFormat::default(), + &mut TextAnnotations::default(), + ); assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(range.head, range.anchor); } @@ -603,8 +752,24 @@ mod test { for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates { range = match axis { - Axis::H => move_horizontally(slice, range, direction, amount, Movement::Move, 0), - Axis::V => move_vertically(slice, range, direction, amount, Movement::Move, 4), + Axis::H => move_horizontally( + slice, + range, + direction, + amount, + Movement::Move, + &TextFormat::default(), + &mut TextAnnotations::default(), + ), + Axis::V => move_vertically_visual( + slice, + range, + direction, + amount, + Movement::Move, + &TextFormat::default(), + &mut TextAnnotations::default(), + ), }; assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(range.head, range.anchor); @@ -638,8 +803,24 @@ mod test { for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates { range = match axis { - Axis::H => move_horizontally(slice, range, direction, amount, Movement::Move, 0), - Axis::V => move_vertically(slice, range, direction, amount, Movement::Move, 4), + Axis::H => move_horizontally( + slice, + range, + direction, + amount, + Movement::Move, + &TextFormat::default(), + &mut TextAnnotations::default(), + ), + Axis::V => move_vertically_visual( + slice, + range, + direction, + amount, + Movement::Move, + &TextFormat::default(), + &mut TextAnnotations::default(), + ), }; assert_eq!(coords_at_pos(slice, range.head), coordinates.into()); assert_eq!(range.head, range.anchor); @@ -1293,7 +1474,7 @@ mod test { let text = Rope::from(s.as_str()); let selection = selection.transform(|r| move_prev_paragraph(text.slice(..), r, 1, Movement::Move)); - let actual = crate::test::plain(&s, selection); + let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } @@ -1316,7 +1497,7 @@ mod test { let text = Rope::from(s.as_str()); let selection = selection.transform(|r| move_prev_paragraph(text.slice(..), r, 2, Movement::Move)); - let actual = crate::test::plain(&s, selection); + let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } @@ -1339,7 +1520,7 @@ mod test { let text = Rope::from(s.as_str()); let selection = selection .transform(|r| move_prev_paragraph(text.slice(..), r, 1, Movement::Extend)); - let actual = crate::test::plain(&s, selection); + let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } @@ -1359,7 +1540,7 @@ mod test { "a\nb\n\n#[goto\nthird\n\n|]#paragraph", ), ( - "a\nb#[\n|]#\ngoto\nsecond\n\nparagraph", + "a\nb#[\n|]#\n\ngoto\nsecond\n\nparagraph", "a\nb#[\n\n|]#goto\nsecond\n\nparagraph", ), ( @@ -1381,7 +1562,7 @@ mod test { let text = Rope::from(s.as_str()); let selection = selection.transform(|r| move_next_paragraph(text.slice(..), r, 1, Movement::Move)); - let actual = crate::test::plain(&s, selection); + let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } @@ -1404,7 +1585,7 @@ mod test { let text = Rope::from(s.as_str()); let selection = selection.transform(|r| move_next_paragraph(text.slice(..), r, 2, Movement::Move)); - let actual = crate::test::plain(&s, selection); + let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } @@ -1427,7 +1608,7 @@ mod test { let text = Rope::from(s.as_str()); let selection = selection .transform(|r| move_next_paragraph(text.slice(..), r, 1, Movement::Extend)); - let actual = crate::test::plain(&s, selection); + let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } diff --git a/helix-core/src/position.rs b/helix-core/src/position.rs index f456eb98..7b8dc326 100644 --- a/helix-core/src/position.rs +++ b/helix-core/src/position.rs @@ -1,9 +1,11 @@ -use std::borrow::Cow; +use std::{borrow::Cow, cmp::Ordering}; use crate::{ chars::char_is_line_ending, + doc_formatter::{DocumentFormatter, TextFormat}, graphemes::{ensure_grapheme_boundary_prev, grapheme_width, RopeGraphemes}, line_ending::line_end_char_index, + text_annotations::TextAnnotations, RopeSlice, }; @@ -73,6 +75,13 @@ pub fn coords_at_pos(text: RopeSlice, pos: usize) -> Position { /// Takes \t, double-width characters (CJK) into account as well as text /// not in the document in the future. /// See [`coords_at_pos`] for an "objective" one. +/// +/// This function should be used very rarely. Usually `visual_offset_from_anchor` +/// or `visual_offset_from_block` is preferable. However when you want to compute the +/// actual visual row/column in the text (not what is actually shown on screen) +/// then you should use this function. For example aligning text should ignore virtual +/// text and softwrap. +#[deprecated = "Doesn't account for softwrap or decorations, use visual_offset_from_anchor instead"] pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Position { let line = text.char_to_line(pos); @@ -93,6 +102,82 @@ pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Po Position::new(line, col) } +/// Returns the visual offset from the start of the first visual line +/// in the block that contains anchor. +/// Text is always wrapped at blocks, they usually correspond to +/// actual line breaks but for very long lines +/// softwrapping positions are estimated with an O(1) algorithm +/// to ensure consistent performance for large lines (currently unimplemented) +/// +/// Usualy you want to use `visual_offset_from_anchor` instead but this function +/// can be useful (and faster) if +/// * You already know the visual position of the block +/// * You only care about the horizontal offset (column) and not the vertical offset (row) +pub fn visual_offset_from_block( + text: RopeSlice, + anchor: usize, + pos: usize, + text_fmt: &TextFormat, + annotations: &TextAnnotations, +) -> (Position, usize) { + let mut last_pos = Position::default(); + let (formatter, block_start) = + DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, annotations, anchor); + let mut char_pos = block_start; + + for (grapheme, vpos) in formatter { + last_pos = vpos; + char_pos += grapheme.doc_chars(); + + if char_pos > pos { + return (last_pos, block_start); + } + } + + (last_pos, block_start) +} + +/// Returns the visual offset from the start of the visual line +/// that contains anchor. +pub fn visual_offset_from_anchor( + text: RopeSlice, + anchor: usize, + pos: usize, + text_fmt: &TextFormat, + annotations: &TextAnnotations, + max_rows: usize, +) -> Option<(Position, usize)> { + let (formatter, block_start) = + DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, annotations, anchor); + let mut char_pos = block_start; + let mut anchor_line = None; + let mut last_pos = Position::default(); + + for (grapheme, vpos) in formatter { + last_pos = vpos; + char_pos += grapheme.doc_chars(); + + if char_pos > anchor && anchor_line.is_none() { + anchor_line = Some(last_pos.row); + } + if char_pos > pos { + last_pos.row -= anchor_line.unwrap(); + return Some((last_pos, block_start)); + } + + if let Some(anchor_line) = anchor_line { + if vpos.row >= anchor_line + max_rows { + return None; + } + } + } + + let anchor_line = anchor_line.unwrap_or(last_pos.row); + last_pos.row -= anchor_line; + + Some((last_pos, block_start)) +} + /// Convert (line, column) coordinates to a character index. /// /// If the `line` coordinate is beyond the end of the file, the EOF @@ -140,6 +225,11 @@ pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending /// If the `column` coordinate is past the end of the given line, the /// line-end position (in this case, just before the line ending /// character) will be returned. +/// This function should be used very rarely. Usually `char_idx_at_visual_offset` is preferable. +/// However when you want to compute a char position from the visual row/column in the text +/// (not what is actually shown on screen) then you should use this function. +/// For example aligning text should ignore virtual text and softwrap. +#[deprecated = "Doesn't account for softwrap or decorations, use char_idx_at_visual_offset instead"] pub fn pos_at_visual_coords(text: RopeSlice, coords: Position, tab_width: usize) -> usize { let Position { mut row, col } = coords; row = row.min(text.len_lines() - 1); @@ -169,6 +259,120 @@ pub fn pos_at_visual_coords(text: RopeSlice, coords: Position, tab_width: usize) line_start + col_char_offset } +/// Returns the char index on the visual line `row_offset` below the visual line of +/// the provided char index `anchor` that is closest to the supplied visual `column`. +/// +/// If the targeted visual line is entirely covered by virtual text the last +/// char position before the virtual text and a virtual offset is returned instead. +/// +/// If no (text) grapheme starts at exactly at the specified column the +/// start of the grapheme to the left is returned. If there is no grapheme +/// to the left (for example if the line starts with virtual text) then the positiong +/// of the next grapheme to the right is returned. +/// +/// If the `line` coordinate is beyond the end of the file, the EOF +/// position will be returned. +/// +/// If the `column` coordinate is past the end of the given line, the +/// line-end position (in this case, just before the line ending +/// character) will be returned. +/// +/// # Returns +/// +/// `(real_char_idx, virtual_lines)` +/// +/// The nearest character idx "closest" (see above) to the specified visual offset +/// on the visual line is returned if the visual line contains any text: +/// If the visual line at the specified offset is a virtual line generated by a `LineAnnotation` +/// the previous char_index is returned, together with the remaining vertical offset (`virtual_lines`) +pub fn char_idx_at_visual_offset<'a>( + text: RopeSlice<'a>, + mut anchor: usize, + mut row_offset: isize, + column: usize, + text_fmt: &TextFormat, + annotations: &TextAnnotations, +) -> (usize, usize) { + // convert row relative to visual line containing anchor to row relative to a block containing anchor (anchor may change) + loop { + let (visual_pos_in_block, block_char_offset) = + visual_offset_from_block(text, anchor, anchor, text_fmt, annotations); + row_offset += visual_pos_in_block.row as isize; + anchor = block_char_offset; + if row_offset >= 0 { + break; + } + + if block_char_offset == 0 { + row_offset = 0; + break; + } + // the row_offset is negative so we need to look at the previous block + // set the anchor to the last char before the current block + // this char index is also always a line earlier so increase the row_offset by 1 + anchor -= 1; + row_offset += 1; + } + + char_idx_at_visual_block_offset( + text, + anchor, + row_offset as usize, + column, + text_fmt, + annotations, + ) +} + +/// This function behaves the same as `char_idx_at_visual_offset`, except that +/// the vertical offset `row` is always computed relative to the block that contains `anchor` +/// instead of the visual line that contains `anchor`. +/// Usually `char_idx_at_visual_offset` is more useful but this function can be +/// used in some situations as an optimization when `visual_offset_from_block` was used +/// +/// # Returns +/// +/// `(real_char_idx, virtual_lines)` +/// +/// See `char_idx_at_visual_offset` for details +pub fn char_idx_at_visual_block_offset( + text: RopeSlice, + anchor: usize, + row: usize, + column: usize, + text_fmt: &TextFormat, + annotations: &TextAnnotations, +) -> (usize, usize) { + let (formatter, mut char_idx) = + DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, annotations, anchor); + let mut last_char_idx = char_idx; + let mut last_char_idx_on_line = None; + let mut last_row = 0; + for (grapheme, grapheme_pos) in formatter { + match grapheme_pos.row.cmp(&row) { + Ordering::Equal => { + if grapheme_pos.col + grapheme.width() > column { + if !grapheme.is_virtual() { + return (char_idx, 0); + } else if let Some(char_idx) = last_char_idx_on_line { + return (char_idx, 0); + } + } else if !grapheme.is_virtual() { + last_char_idx_on_line = Some(char_idx) + } + } + Ordering::Greater => return (last_char_idx, row - last_row), + _ => (), + } + + last_char_idx = char_idx; + last_row = grapheme_pos.row; + char_idx += grapheme.doc_chars(); + } + + (char_idx, 0) +} + #[cfg(test)] mod test { use super::*; @@ -228,6 +432,7 @@ mod test { } #[test] + #[allow(deprecated)] fn test_visual_coords_at_pos() { let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ"); let slice = text.slice(..); @@ -275,6 +480,130 @@ mod test { assert_eq!(visual_coords_at_pos(slice, 2, 8), (0, 9).into()); } + #[test] + fn test_visual_off_from_block() { + let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ"); + let slice = text.slice(..); + let annot = TextAnnotations::default(); + let text_fmt = TextFormat::default(); + assert_eq!( + visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0, + (0, 0).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 5, &text_fmt, &annot).0, + (0, 5).into() + ); // position on \n + assert_eq!( + visual_offset_from_block(slice, 0, 6, &text_fmt, &annot).0, + (1, 0).into() + ); // position on w + assert_eq!( + visual_offset_from_block(slice, 0, 7, &text_fmt, &annot).0, + (1, 1).into() + ); // position on o + assert_eq!( + visual_offset_from_block(slice, 0, 10, &text_fmt, &annot).0, + (1, 4).into() + ); // position on d + + // Test with wide characters. + let text = Rope::from("今日はいい\n"); + let slice = text.slice(..); + assert_eq!( + visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0, + (0, 0).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 1, &text_fmt, &annot).0, + (0, 2).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0, + (0, 4).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 3, &text_fmt, &annot).0, + (0, 6).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 4, &text_fmt, &annot).0, + (0, 8).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 5, &text_fmt, &annot).0, + (0, 10).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 6, &text_fmt, &annot).0, + (1, 0).into() + ); + + // Test with grapheme clusters. + let text = Rope::from("a̐éö̲\r\n"); + let slice = text.slice(..); + assert_eq!( + visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0, + (0, 0).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0, + (0, 1).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 4, &text_fmt, &annot).0, + (0, 2).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 7, &text_fmt, &annot).0, + (0, 3).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 9, &text_fmt, &annot).0, + (1, 0).into() + ); + + // Test with wide-character grapheme clusters. + // TODO: account for cluster. + let text = Rope::from("किमपि\n"); + let slice = text.slice(..); + assert_eq!( + visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0, + (0, 0).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0, + (0, 2).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 3, &text_fmt, &annot).0, + (0, 3).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 5, &text_fmt, &annot).0, + (0, 5).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 6, &text_fmt, &annot).0, + (1, 0).into() + ); + + // Test with tabs. + let text = Rope::from("\tHello\n"); + let slice = text.slice(..); + assert_eq!( + visual_offset_from_block(slice, 0, 0, &text_fmt, &annot).0, + (0, 0).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 1, &text_fmt, &annot).0, + (0, 4).into() + ); + assert_eq!( + visual_offset_from_block(slice, 0, 2, &text_fmt, &annot).0, + (0, 5).into() + ); + } #[test] fn test_pos_at_coords() { let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ"); @@ -341,6 +670,7 @@ mod test { } #[test] + #[allow(deprecated)] fn test_pos_at_visual_coords() { let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ"); let slice = text.slice(..); @@ -405,4 +735,100 @@ mod test { assert_eq!(pos_at_visual_coords(slice, (0, 10).into(), 4), 0); assert_eq!(pos_at_visual_coords(slice, (10, 10).into(), 4), 0); } + + #[test] + fn test_char_idx_at_visual_row_offset() { + let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ\nfoo"); + let slice = text.slice(..); + let mut text_fmt = TextFormat::default(); + for i in 0isize..3isize { + for j in -2isize..=2isize { + if !(0..3).contains(&(i + j)) { + continue; + } + println!("{i} {j}"); + assert_eq!( + char_idx_at_visual_offset( + slice, + slice.line_to_char(i as usize), + j, + 3, + &text_fmt, + &TextAnnotations::default(), + ) + .0, + slice.line_to_char((i + j) as usize) + 3 + ); + } + } + + text_fmt.soft_wrap = true; + let mut softwrapped_text = "foo ".repeat(10); + softwrapped_text.push('\n'); + let last_char = softwrapped_text.len() - 1; + + let text = Rope::from(softwrapped_text.repeat(3)); + let slice = text.slice(..); + assert_eq!( + char_idx_at_visual_offset( + slice, + last_char, + 0, + 0, + &text_fmt, + &TextAnnotations::default(), + ) + .0, + 32 + ); + assert_eq!( + char_idx_at_visual_offset( + slice, + last_char, + -1, + 0, + &text_fmt, + &TextAnnotations::default(), + ) + .0, + 16 + ); + assert_eq!( + char_idx_at_visual_offset( + slice, + last_char, + -2, + 0, + &text_fmt, + &TextAnnotations::default(), + ) + .0, + 0 + ); + assert_eq!( + char_idx_at_visual_offset( + slice, + softwrapped_text.len() + last_char, + -2, + 0, + &text_fmt, + &TextAnnotations::default(), + ) + .0, + softwrapped_text.len() + ); + + assert_eq!( + char_idx_at_visual_offset( + slice, + softwrapped_text.len() + last_char, + -5, + 0, + &text_fmt, + &TextAnnotations::default(), + ) + .0, + 0 + ); + } } diff --git a/helix-core/src/selection.rs b/helix-core/src/selection.rs index ffba46ab..8e93c633 100644 --- a/helix-core/src/selection.rs +++ b/helix-core/src/selection.rs @@ -21,14 +21,14 @@ use std::borrow::Cow; /// can be in any order, or even share the same position. /// /// The anchor and head positions use gap indexing, meaning -/// that their indices represent the the gaps *between* `char`s +/// that their indices represent the gaps *between* `char`s /// rather than the `char`s themselves. For example, 1 /// represents the position between the first and second `char`. /// -/// Below are some example `Range` configurations to better -/// illustrate. The anchor and head indices are show as -/// "(anchor, head)", followed by example text with "[" and "]" -/// inserted to represent the anchor and head positions: +/// Below are some examples of `Range` configurations. +/// The anchor and head indices are shown as "(anchor, head)" +/// tuples, followed by example text with "[" and "]" symbols +/// representing the anchor and head positions: /// /// - (0, 3): `[Som]e text`. /// - (3, 0): `]Som[e text`. @@ -53,7 +53,9 @@ pub struct Range { pub anchor: usize, /// The head of the range, moved when extending. pub head: usize, - pub horiz: Option, + /// The previous visual offset (softwrapped lines and columns) from + /// the start of the line + pub old_visual_position: Option<(u32, u32)>, } impl Range { @@ -61,7 +63,7 @@ impl Range { Self { anchor, head, - horiz: None, + old_visual_position: None, } } @@ -127,7 +129,7 @@ impl Range { Self { anchor: self.head, head: self.anchor, - horiz: self.horiz, + old_visual_position: self.old_visual_position, } } @@ -185,7 +187,7 @@ impl Range { Self { anchor, head, - horiz: None, + old_visual_position: None, } } @@ -198,13 +200,13 @@ impl Range { Self { anchor: self.anchor.min(from), head: self.head.max(to), - horiz: None, + old_visual_position: None, } } else { Self { anchor: self.anchor.max(to), head: self.head.min(from), - horiz: None, + old_visual_position: None, } } } @@ -219,13 +221,13 @@ impl Range { Range { anchor: self.anchor.max(other.anchor), head: self.head.min(other.head), - horiz: None, + old_visual_position: None, } } else { Range { anchor: self.from().min(other.from()), head: self.to().max(other.to()), - horiz: None, + old_visual_position: None, } } } @@ -279,8 +281,8 @@ impl Range { Range { anchor: new_anchor, head: new_head, - horiz: if new_anchor == self.anchor { - self.horiz + old_visual_position: if new_anchor == self.anchor { + self.old_visual_position } else { None }, @@ -306,7 +308,7 @@ impl Range { Range { anchor: self.anchor, head: next_grapheme_boundary(slice, self.head), - horiz: self.horiz, + old_visual_position: self.old_visual_position, } } else { *self @@ -378,7 +380,7 @@ impl From<(usize, usize)> for Range { Self { anchor, head, - horiz: None, + old_visual_position: None, } } } @@ -482,7 +484,7 @@ impl Selection { ranges: smallvec![Range { anchor, head, - horiz: None + old_visual_position: None }], primary_index: 0, } @@ -566,9 +568,9 @@ impl Selection { } /// Takes a closure and maps each `Range` over the closure. - pub fn transform(mut self, f: F) -> Self + pub fn transform(mut self, mut f: F) -> Self where - F: Fn(Range) -> Range, + F: FnMut(Range) -> Range, { for range in self.ranges.iter_mut() { *range = f(*range) @@ -576,6 +578,16 @@ impl Selection { self.normalize() } + /// Takes a closure and maps each `Range` over the closure to multiple `Range`s. + pub fn transform_iter(mut self, f: F) -> Self + where + F: FnMut(Range) -> I, + I: Iterator, + { + self.ranges = self.ranges.into_iter().flat_map(f).collect(); + self.normalize() + } + // Ensures the selection adheres to the following invariants: // 1. All ranges are grapheme aligned. // 2. All ranges are at least 1 character wide, unless at the @@ -613,11 +625,6 @@ impl Selection { // returns true if self ⊇ other pub fn contains(&self, other: &Selection) -> bool { - // can't contain other if it is larger - if other.len() > self.len() { - return false; - } - let (mut iter_self, mut iter_other) = (self.iter(), other.iter()); let (mut ele_self, mut ele_other) = (iter_self.next(), iter_other.next()); @@ -654,6 +661,15 @@ impl<'a> IntoIterator for &'a Selection { } } +impl IntoIterator for Selection { + type Item = Range; + type IntoIter = smallvec::IntoIter<[Range; 1]>; + + fn into_iter(self) -> smallvec::IntoIter<[Range; 1]> { + self.ranges.into_iter() + } +} + // TODO: checkSelection -> check if valid for doc length && sorted pub fn keep_or_remove_matches( @@ -1228,5 +1244,11 @@ mod test { vec!((3, 4), (7, 9)) )); assert!(!contains(vec!((1, 1), (5, 6)), vec!((1, 6)))); + + // multiple ranges of other are all contained in some ranges of self, + assert!(contains( + vec!((1, 4), (7, 10)), + vec!((1, 2), (3, 4), (7, 9)) + )); } } diff --git a/helix-core/src/shellwords.rs b/helix-core/src/shellwords.rs index 9475f5e5..0883eb91 100644 --- a/helix-core/src/shellwords.rs +++ b/helix-core/src/shellwords.rs @@ -129,8 +129,9 @@ impl<'a> From<&'a str> for Shellwords<'a> { DquoteEscaped => Dquoted, }; - if i >= input.len() - 1 && end == 0 { - end = i + 1; + let c_len = c.len_utf8(); + if i == input.len() - c_len && end == 0 { + end = i + c_len; } if end > 0 { @@ -333,4 +334,17 @@ mod test { assert_eq!(Shellwords::from(":o a").parts(), &[":o", "a"]); assert_eq!(Shellwords::from(":o a\\ ").parts(), &[":o", "a\\"]); } + + #[test] + fn test_multibyte_at_end() { + assert_eq!(Shellwords::from("𒀀").parts(), &["𒀀"]); + assert_eq!( + Shellwords::from(":sh echo 𒀀").parts(), + &[":sh", "echo", "𒀀"] + ); + assert_eq!( + Shellwords::from(":sh echo 𒀀 hello world𒀀").parts(), + &[":sh", "echo", "𒀀", "hello", "world𒀀"] + ); + } } diff --git a/helix-core/src/surround.rs b/helix-core/src/surround.rs index a3de3cd1..f430aee8 100644 --- a/helix-core/src/surround.rs +++ b/helix-core/src/surround.rs @@ -1,6 +1,6 @@ use std::fmt::Display; -use crate::{search, Range, Selection}; +use crate::{movement::Direction, search, Range, Selection}; use ropey::RopeSlice; pub const PAIRS: &[(char, char)] = &[ @@ -55,15 +55,18 @@ pub fn get_pair(ch: char) -> (char, char) { pub fn find_nth_closest_pairs_pos( text: RopeSlice, range: Range, - n: usize, + mut skip: usize, ) -> Result<(usize, usize)> { let is_open_pair = |ch| PAIRS.iter().any(|(open, _)| *open == ch); let is_close_pair = |ch| PAIRS.iter().any(|(_, close)| *close == ch); let mut stack = Vec::with_capacity(2); - let pos = range.cursor(text); + let pos = range.from(); + let mut close_pos = pos.saturating_sub(1); for ch in text.chars_at(pos) { + close_pos += 1; + if is_open_pair(ch) { // Track open pairs encountered so that we can step over // the corresponding close pairs that will come up further @@ -71,20 +74,46 @@ pub fn find_nth_closest_pairs_pos( // open pair is before the cursor position. stack.push(ch); continue; - } else if is_close_pair(ch) { - let (open, _) = get_pair(ch); - if stack.last() == Some(&open) { - stack.pop(); - continue; - } else { - // In the ideal case the stack would be empty here and the - // current character would be the close pair that we are - // looking for. It could also be the case that the pairs - // are unbalanced and we encounter a close pair that doesn't - // close the last seen open pair. In either case use this - // char as the auto-detected closest pair. - return find_nth_pairs_pos(text, ch, range, n); + } + + if !is_close_pair(ch) { + // We don't care if this character isn't a brace pair item, + // so short circuit here. + continue; + } + + let (open, close) = get_pair(ch); + + if stack.last() == Some(&open) { + // If we are encountering the closing pair for an opener + // we just found while traversing, then its inside the + // selection and should be skipped over. + stack.pop(); + continue; + } + + match find_nth_open_pair(text, open, close, close_pos, 1) { + // Before we accept this pair, we want to ensure that the + // pair encloses the range rather than just the cursor. + Some(open_pos) + if open_pos <= pos.saturating_add(1) + && close_pos >= range.to().saturating_sub(1) => + { + // Since we have special conditions for when to + // accept, we can't just pass the skip parameter on + // through to the find_nth_*_pair methods, so we + // track skips manually here. + if skip > 1 { + skip -= 1; + continue; + } + + return match range.direction() { + Direction::Forward => Ok((open_pos, close_pos)), + Direction::Backward => Ok((close_pos, open_pos)), + }; } + _ => continue, } } @@ -244,141 +273,140 @@ mod test { use ropey::Rope; use smallvec::SmallVec; - #[allow(clippy::type_complexity)] - fn check_find_nth_pair_pos( - text: &str, - cases: Vec<(usize, char, usize, Result<(usize, usize)>)>, - ) { - let doc = Rope::from(text); - let slice = doc.slice(..); - - for (cursor_pos, ch, n, expected_range) in cases { - let range = find_nth_pairs_pos(slice, ch, (cursor_pos, cursor_pos + 1).into(), n); - assert_eq!( - range, expected_range, - "Expected {:?}, got {:?}", - expected_range, range + #[test] + fn test_get_surround_pos() { + #[rustfmt::skip] + let (doc, selection, expectations) = + rope_with_selections_and_expectations( + "(some) (chars)\n(newline)", + "_ ^ _ _ ^ _\n_ ^ _" ); - } + + assert_eq!( + get_surround_pos(doc.slice(..), &selection, Some('('), 1).unwrap(), + expectations + ); } #[test] - fn test_find_nth_pairs_pos() { - check_find_nth_pair_pos( - "some (text) here", - vec![ - // cursor on [t]ext - (6, '(', 1, Ok((5, 10))), - (6, ')', 1, Ok((5, 10))), - // cursor on so[m]e - (2, '(', 1, Err(Error::PairNotFound)), - // cursor on bracket itself - (5, '(', 1, Ok((5, 10))), - (10, '(', 1, Ok((5, 10))), - ], + fn test_get_surround_pos_bail_different_surround_chars() { + #[rustfmt::skip] + let (doc, selection, _) = + rope_with_selections_and_expectations( + "[some]\n(chars)xx\n(newline)", + " ^ \n ^ \n " + ); + + assert_eq!( + get_surround_pos(doc.slice(..), &selection, Some('('), 1), + Err(Error::PairNotFound) ); } #[test] - fn test_find_nth_pairs_pos_skip() { - check_find_nth_pair_pos( - "(so (many (good) text) here)", - vec![ - // cursor on go[o]d - (13, '(', 1, Ok((10, 15))), - (13, '(', 2, Ok((4, 21))), - (13, '(', 3, Ok((0, 27))), - ], + fn test_get_surround_pos_bail_overlapping_surround_chars() { + #[rustfmt::skip] + let (doc, selection, _) = + rope_with_selections_and_expectations( + "[some]\n(chars)xx\n(newline)", + " \n ^ \n ^ " + ); + + assert_eq!( + get_surround_pos(doc.slice(..), &selection, Some('('), 1), + Err(Error::PairNotFound) // overlapping surround chars ); } #[test] - fn test_find_nth_pairs_pos_same() { - check_find_nth_pair_pos( - "'so 'many 'good' text' here'", - vec![ - // cursor on go[o]d - (13, '\'', 1, Ok((10, 15))), - (13, '\'', 2, Ok((4, 21))), - (13, '\'', 3, Ok((0, 27))), - // cursor on the quotes - (10, '\'', 1, Err(Error::CursorOnAmbiguousPair)), - ], - ) + fn test_get_surround_pos_bail_cursor_overlap() { + #[rustfmt::skip] + let (doc, selection, _) = + rope_with_selections_and_expectations( + "[some]\n(chars)xx\n(newline)", + " ^^ \n \n " + ); + + assert_eq!( + get_surround_pos(doc.slice(..), &selection, Some('['), 1), + Err(Error::CursorOverlap) + ); } #[test] - fn test_find_nth_pairs_pos_step() { - check_find_nth_pair_pos( - "((so)((many) good (text))(here))", - vec![ - // cursor on go[o]d - (15, '(', 1, Ok((5, 24))), - (15, '(', 2, Ok((0, 31))), - ], + fn test_find_nth_pairs_pos_quote_success() { + #[rustfmt::skip] + let (doc, selection, expectations) = + rope_with_selections_and_expectations( + "some 'quoted text' on this 'line'\n'and this one'", + " _ ^ _ \n " + ); + + assert_eq!(2, expectations.len()); + assert_eq!( + find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 1) + .expect("find should succeed"), + (expectations[0], expectations[1]) ) } #[test] - fn test_find_nth_pairs_pos_mixed() { - check_find_nth_pair_pos( - "(so [many {good} text] here)", - vec![ - // cursor on go[o]d - (13, '{', 1, Ok((10, 15))), - (13, '[', 1, Ok((4, 21))), - (13, '(', 1, Ok((0, 27))), - ], + fn test_find_nth_pairs_pos_nested_quote_success() { + #[rustfmt::skip] + let (doc, selection, expectations) = + rope_with_selections_and_expectations( + "some 'nested 'quoted' text' on this 'line'\n'and this one'", + " _ ^ _ \n " + ); + + assert_eq!(2, expectations.len()); + assert_eq!( + find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 2) + .expect("find should succeed"), + (expectations[0], expectations[1]) ) } #[test] - fn test_get_surround_pos() { - let doc = Rope::from("(some) (chars)\n(newline)"); - let slice = doc.slice(..); - let selection = Selection::new( - SmallVec::from_slice(&[Range::point(2), Range::point(9), Range::point(20)]), - 0, - ); + fn test_find_nth_pairs_pos_inside_quote_ambiguous() { + #[rustfmt::skip] + let (doc, selection, _) = + rope_with_selections_and_expectations( + "some 'nested 'quoted' text' on this 'line'\n'and this one'", + " ^ \n " + ); - // cursor on s[o]me, c[h]ars, newl[i]ne assert_eq!( - get_surround_pos(slice, &selection, Some('('), 1) - .unwrap() - .as_slice(), - &[0, 5, 7, 13, 15, 23] - ); + find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 1), + Err(Error::CursorOnAmbiguousPair) + ) } - #[test] - fn test_get_surround_pos_bail() { - let doc = Rope::from("[some]\n(chars)xx\n(newline)"); - let slice = doc.slice(..); + // Create a Rope and a matching Selection using a specification language. + // ^ is a single-point selection. + // _ is an expected index. These are returned as a Vec for use in assertions. + fn rope_with_selections_and_expectations( + text: &str, + spec: &str, + ) -> (Rope, Selection, Vec) { + if text.len() != spec.len() { + panic!("specification must match text length -- are newlines aligned?"); + } - let selection = - Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(9)]), 0); - // cursor on s[o]me, c[h]ars - assert_eq!( - get_surround_pos(slice, &selection, Some('('), 1), - Err(Error::PairNotFound) // different surround chars - ); + let rope = Rope::from(text); - let selection = Selection::new( - SmallVec::from_slice(&[Range::point(14), Range::point(24)]), - 0, - ); - // cursor on [x]x, newli[n]e - assert_eq!( - get_surround_pos(slice, &selection, Some('('), 1), - Err(Error::PairNotFound) // overlapping surround chars - ); + let selections: SmallVec<[Range; 1]> = spec + .match_indices('^') + .into_iter() + .map(|(i, _)| Range::point(i)) + .collect(); - let selection = - Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(3)]), 0); - // cursor on s[o][m]e - assert_eq!( - get_surround_pos(slice, &selection, Some('['), 1), - Err(Error::CursorOverlap) - ); + let expectations: Vec = spec + .match_indices('_') + .into_iter() + .map(|(i, _)| i) + .collect(); + + (rope, Selection::new(selections, 0), expectations) } } diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index 41ab23e1..e494ee9b 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -82,7 +82,8 @@ pub struct LanguageConfiguration { pub shebangs: Vec, // interpreter(s) associated with language pub roots: Vec, // these indicate project roots <.git, Cargo.toml> pub comment_token: Option, - pub max_line_length: Option, + pub text_width: Option, + pub soft_wrap: Option, #[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")] pub config: Option, @@ -427,7 +428,7 @@ impl TextObjectQuery { let nodes: Vec<_> = mat .captures .iter() - .filter_map(|cap| (cap.index == capture_idx).then(|| cap.node)) + .filter_map(|cap| (cap.index == capture_idx).then_some(cap.node)) .collect(); if nodes.len() > 1 { @@ -546,6 +547,33 @@ impl LanguageConfiguration { .ok() } } +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(default, rename_all = "kebab-case", deny_unknown_fields)] +pub struct SoftWrap { + /// Soft wrap lines that exceed viewport width. Default to off + pub enable: Option, + /// Maximum space left free at the end of the line. + /// This space is used to wrap text at word boundaries. If that is not possible within this limit + /// the word is simply split at the end of the line. + /// + /// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views. + /// + /// Default to 20 + pub max_wrap: Option, + /// Maximum number of indentation that can be carried over from the previous line when softwrapping. + /// If a line is indented further then this limit it is rendered at the start of the viewport instead. + /// + /// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views. + /// + /// Default to 40 + pub max_indent_retain: Option, + /// Indicator placed at the beginning of softwrapped lines + /// + /// Defaults to ↪ + pub wrap_indicator: Option, + /// Softwrap at `text_width` instead of viewport width if it is shorter + pub wrap_at_text_width: Option, +} // Expose loader as Lazy<> global since it's always static? @@ -1092,21 +1120,14 @@ impl Syntax { }], cursor, _tree: None, - captures, + captures: RefCell::new(captures), config: layer.config.as_ref(), // TODO: just reuse `layer` depth: layer.depth, // TODO: just reuse `layer` - ranges: &layer.ranges, // TODO: temp }) }) .collect::>(); - // HAXX: arrange layers by byte range, with deeper layers positioned first - layers.sort_by_key(|layer| { - ( - layer.ranges.first().cloned(), - std::cmp::Reverse(layer.depth), - ) - }); + layers.sort_unstable_by_key(|layer| layer.sort_key()); let mut result = HighlightIter { source, @@ -1136,6 +1157,7 @@ impl Syntax { bitflags! { /// Flags that track the status of a layer /// in the `Sytaxn::update` function + #[derive(Debug)] struct LayerUpdateFlags : u32{ const MODIFIED = 0b001; const MOVED = 0b010; @@ -1424,12 +1446,11 @@ impl<'a> TextProvider<'a> for RopeProvider<'a> { struct HighlightIterLayer<'a> { _tree: Option, cursor: QueryCursor, - captures: iter::Peekable>>, + captures: RefCell>>>, config: &'a HighlightConfiguration, highlight_end_stack: Vec, scope_stack: Vec>, depth: u32, - ranges: &'a [Range], } impl<'a> fmt::Debug for HighlightIterLayer<'a> { @@ -1610,10 +1631,11 @@ impl<'a> HighlightIterLayer<'a> { // First, sort scope boundaries by their byte offset in the document. At a // given position, emit scope endings before scope beginnings. Finally, emit // scope boundaries from deeper layers first. - fn sort_key(&mut self) -> Option<(usize, bool, isize)> { + fn sort_key(&self) -> Option<(usize, bool, isize)> { let depth = -(self.depth as isize); let next_start = self .captures + .borrow_mut() .peek() .map(|(m, i)| m.captures[*i].node.start_byte()); let next_end = self.highlight_end_stack.last().cloned(); @@ -1838,7 +1860,8 @@ impl<'a> Iterator for HighlightIter<'a> { // Get the next capture from whichever layer has the earliest highlight boundary. let range; let layer = &mut self.layers[0]; - if let Some((next_match, capture_index)) = layer.captures.peek() { + let captures = layer.captures.get_mut(); + if let Some((next_match, capture_index)) = captures.peek() { let next_capture = next_match.captures[*capture_index]; range = next_capture.node.byte_range(); @@ -1861,7 +1884,7 @@ impl<'a> Iterator for HighlightIter<'a> { return self.emit_event(self.source.len_bytes(), None); }; - let (mut match_, capture_index) = layer.captures.next().unwrap(); + let (mut match_, capture_index) = captures.next().unwrap(); let mut capture = match_.captures[capture_index]; // Remove from the local scope stack any local scopes that have already ended. @@ -1937,11 +1960,11 @@ impl<'a> Iterator for HighlightIter<'a> { } // Continue processing any additional matches for the same node. - if let Some((next_match, next_capture_index)) = layer.captures.peek() { + if let Some((next_match, next_capture_index)) = captures.peek() { let next_capture = next_match.captures[*next_capture_index]; if next_capture.node == capture.node { capture = next_capture; - match_ = layer.captures.next().unwrap().0; + match_ = captures.next().unwrap().0; continue; } } @@ -1964,11 +1987,11 @@ impl<'a> Iterator for HighlightIter<'a> { // highlighting patterns that are disabled for local variables. if definition_highlight.is_some() || reference_highlight.is_some() { while layer.config.non_local_variable_patterns[match_.pattern_index] { - if let Some((next_match, next_capture_index)) = layer.captures.peek() { + if let Some((next_match, next_capture_index)) = captures.peek() { let next_capture = next_match.captures[*next_capture_index]; if next_capture.node == capture.node { capture = next_capture; - match_ = layer.captures.next().unwrap().0; + match_ = captures.next().unwrap().0; continue; } } @@ -1983,10 +2006,10 @@ impl<'a> Iterator for HighlightIter<'a> { // for a given node are ordered by pattern index, so these subsequent // captures are guaranteed to be for highlighting, not injections or // local variables. - while let Some((next_match, next_capture_index)) = layer.captures.peek() { + while let Some((next_match, next_capture_index)) = captures.peek() { let next_capture = next_match.captures[*next_capture_index]; if next_capture.node == capture.node { - layer.captures.next(); + captures.next(); } else { break; } diff --git a/helix-core/src/test.rs b/helix-core/src/test.rs index 17523ed7..7183302c 100644 --- a/helix-core/src/test.rs +++ b/helix-core/src/test.rs @@ -1,7 +1,9 @@ //! Test helpers. use crate::{Range, Selection}; +use ropey::Rope; use smallvec::SmallVec; use std::cmp::Reverse; +use unicode_segmentation::UnicodeSegmentation; /// Convert annotated test string to test string and selection. /// @@ -10,6 +12,10 @@ use std::cmp::Reverse; /// `#[` for primary selection with head after anchor followed by `|]#`. /// `#(` for secondary selection with head after anchor followed by `|)#`. /// +/// If the selection contains any LF or CRLF sequences, which are immediately +/// followed by the same grapheme, then the subsequent one is removed. This is +/// to allow representing having the cursor over the end of the line. +/// /// # Examples /// /// ``` @@ -30,23 +36,23 @@ use std::cmp::Reverse; pub fn print(s: &str) -> (String, Selection) { let mut primary_idx = None; let mut ranges = SmallVec::new(); - let mut iter = s.chars().peekable(); + let mut iter = UnicodeSegmentation::graphemes(s, true).peekable(); let mut left = String::with_capacity(s.len()); 'outer: while let Some(c) = iter.next() { let start = left.chars().count(); - if c != '#' { - left.push(c); + if c != "#" { + left.push_str(c); continue; } let (is_primary, close_pair) = match iter.next() { - Some('[') => (true, ']'), - Some('(') => (false, ')'), + Some("[") => (true, "]"), + Some("(") => (false, ")"), Some(ch) => { left.push('#'); - left.push(ch); + left.push_str(ch); continue; } None => break, @@ -56,24 +62,45 @@ pub fn print(s: &str) -> (String, Selection) { panic!("primary `#[` already appeared {:?} {:?}", left, s); } - let head_at_beg = iter.next_if_eq(&'|').is_some(); + let head_at_beg = iter.next_if_eq(&"|").is_some(); + let last_grapheme = |s: &str| { + UnicodeSegmentation::graphemes(s, true) + .last() + .map(String::from) + }; while let Some(c) = iter.next() { - if !(c == close_pair && iter.peek() == Some(&'#')) { - left.push(c); + let next = iter.peek(); + let mut prev = last_grapheme(left.as_str()); + + if !(c == close_pair && next == Some(&"#")) { + left.push_str(c); continue; } if !head_at_beg { - let prev = left.pop().unwrap(); - if prev != '|' { - left.push(prev); - left.push(c); - continue; + match &prev { + Some(p) if p != "|" => { + left.push_str(c); + continue; + } + Some(p) if p == "|" => { + left.pop().unwrap(); // pop the | + prev = last_grapheme(left.as_str()); + } + _ => (), } } iter.next(); // skip "#" + let next = iter.peek(); + + // skip explicit line end inside selection + if (prev == Some(String::from("\r\n")) || prev == Some(String::from("\n"))) + && next.map(|n| String::from(*n)) == prev + { + iter.next(); + } if is_primary { primary_idx = Some(ranges.len()); @@ -118,14 +145,16 @@ pub fn print(s: &str) -> (String, Selection) { /// use smallvec::smallvec; /// /// assert_eq!( -/// plain("abc", Selection::new(smallvec![Range::new(0, 1), Range::new(3, 2)], 0)), +/// plain("abc", &Selection::new(smallvec![Range::new(0, 1), Range::new(3, 2)], 0)), /// "#[a|]#b#(|c)#".to_owned() /// ); /// ``` -pub fn plain(s: &str, selection: Selection) -> String { +pub fn plain>(s: R, selection: &Selection) -> String { + let s = s.into(); let primary = selection.primary_index(); - let mut out = String::with_capacity(s.len() + 5 * selection.len()); - out.push_str(s); + let mut out = String::with_capacity(s.len_bytes() + 5 * selection.len()); + out.push_str(&s.to_string()); + let mut insertion: Vec<_> = selection .iter() .enumerate() @@ -138,7 +167,9 @@ pub fn plain(s: &str, selection: Selection) -> String { (false, false) => [(range.anchor, ")#"), (range.head, "#(|")], } }) + .map(|(char_idx, marker)| (s.char_to_byte(char_idx), marker)) .collect(); + // insert in reverse order insertion.sort_unstable_by_key(|k| Reverse(k.0)); for (i, s) in insertion { @@ -262,4 +293,94 @@ mod test { print("hello #[|👨‍👩‍👧‍👦]# goodbye") ); } + + #[test] + fn plain_single() { + assert_eq!("#[|h]#ello", plain("hello", &Selection::single(1, 0))); + assert_eq!("#[h|]#ello", plain("hello", &Selection::single(0, 1))); + assert_eq!("#[|hell]#o", plain("hello", &Selection::single(4, 0))); + assert_eq!("#[hell|]#o", plain("hello", &Selection::single(0, 4))); + assert_eq!("#[|hello]#", plain("hello", &Selection::single(5, 0))); + assert_eq!("#[hello|]#", plain("hello", &Selection::single(0, 5))); + } + + #[test] + fn plain_multi() { + assert_eq!( + plain( + "hello", + &Selection::new( + SmallVec::from_slice(&[Range::new(1, 0), Range::new(5, 4)]), + 0 + ) + ), + String::from("#[|h]#ell#(|o)#") + ); + assert_eq!( + plain( + "hello", + &Selection::new( + SmallVec::from_slice(&[Range::new(0, 1), Range::new(4, 5)]), + 0 + ) + ), + String::from("#[h|]#ell#(o|)#") + ); + assert_eq!( + plain( + "hello", + &Selection::new( + SmallVec::from_slice(&[Range::new(2, 0), Range::new(5, 3)]), + 0 + ) + ), + String::from("#[|he]#l#(|lo)#") + ); + assert_eq!( + plain( + "hello\r\nhello\r\nhello\r\n", + &Selection::new( + SmallVec::from_slice(&[ + Range::new(7, 5), + Range::new(21, 19), + Range::new(14, 12) + ]), + 0 + ) + ), + String::from("hello#[|\r\n]#hello#(|\r\n)#hello#(|\r\n)#") + ); + } + + #[test] + fn plain_multi_byte_code_point() { + assert_eq!( + plain("„“", &Selection::single(1, 0)), + String::from("#[|„]#“") + ); + assert_eq!( + plain("„“", &Selection::single(2, 1)), + String::from("„#[|“]#") + ); + assert_eq!( + plain("„“", &Selection::single(0, 1)), + String::from("#[„|]#“") + ); + assert_eq!( + plain("„“", &Selection::single(1, 2)), + String::from("„#[“|]#") + ); + assert_eq!( + plain("they said „hello“", &Selection::single(11, 10)), + String::from("they said #[|„]#hello“") + ); + } + + #[test] + fn plain_multi_code_point_grapheme() { + assert_eq!( + plain("hello 👨‍👩‍👧‍👦 goodbye", &Selection::single(13, 6)), + String::from("hello #[|👨‍👩‍👧‍👦]# goodbye") + ); + } } diff --git a/helix-core/src/text_annotations.rs b/helix-core/src/text_annotations.rs new file mode 100644 index 00000000..3e48de4d --- /dev/null +++ b/helix-core/src/text_annotations.rs @@ -0,0 +1,274 @@ +use std::cell::Cell; +use std::convert::identity; +use std::ops::Range; +use std::rc::Rc; + +use crate::syntax::Highlight; +use crate::Tendril; + +/// An inline annotation is continuous text shown +/// on the screen before the grapheme that starts at +/// `char_idx` +#[derive(Debug, Clone)] +pub struct InlineAnnotation { + pub text: Tendril, + pub char_idx: usize, +} + +impl InlineAnnotation { + pub fn new(char_idx: usize, text: impl Into) -> Self { + Self { + char_idx, + text: text.into(), + } + } +} + +/// Represents a **single Grapheme** that is part of the document +/// that start at `char_idx` that will be replaced with +/// a different `grapheme`. +/// If `grapheme` contains multiple graphemes the text +/// will render incorrectly. +/// If you want to overlay multiple graphemes simply +/// use multiple `Overlays`. +/// +/// # Examples +/// +/// The following examples are valid overlays for the following text: +/// +/// `aX͎̊͢͜͝͡bc` +/// +/// ``` +/// use helix_core::text_annotations::Overlay; +/// +/// // replaces a +/// Overlay::new(0, "X"); +/// +/// // replaces X͎̊͢͜͝͡ +/// Overlay::new(1, "\t"); +/// +/// // replaces b +/// Overlay::new(6, "X̢̢̟͖̲͌̋̇͑͝"); +/// ``` +/// +/// The following examples are invalid uses +/// +/// ``` +/// use helix_core::text_annotations::Overlay; +/// +/// // overlay is not aligned at grapheme boundary +/// Overlay::new(3, "x"); +/// +/// // overlay contains multiple graphemes +/// Overlay::new(0, "xy"); +/// ``` +#[derive(Debug, Clone)] +pub struct Overlay { + pub char_idx: usize, + pub grapheme: Tendril, +} + +impl Overlay { + pub fn new(char_idx: usize, grapheme: impl Into) -> Self { + Self { + char_idx, + grapheme: grapheme.into(), + } + } +} + +/// Line annotations allow for virtual text between normal +/// text lines. They cause `height` empty lines to be inserted +/// below the document line that contains `anchor_char_idx`. +/// +/// These lines can be filled with text in the rendering code +/// as their contents have no effect beyond visual appearance. +/// +/// To insert a line after a document line simply set +/// `anchor_char_idx` to `doc.line_to_char(line_idx)` +#[derive(Debug, Clone)] +pub struct LineAnnotation { + pub anchor_char_idx: usize, + pub height: usize, +} + +#[derive(Debug)] +struct Layer { + annotations: Rc<[A]>, + current_index: Cell, + metadata: M, +} + +impl Clone for Layer { + fn clone(&self) -> Self { + Layer { + annotations: self.annotations.clone(), + current_index: self.current_index.clone(), + metadata: self.metadata.clone(), + } + } +} + +impl Layer { + pub fn reset_pos(&self, char_idx: usize, get_char_idx: impl Fn(&A) -> usize) { + let new_index = self + .annotations + .binary_search_by_key(&char_idx, get_char_idx) + .unwrap_or_else(identity); + + self.current_index.set(new_index); + } + + pub fn consume(&self, char_idx: usize, get_char_idx: impl Fn(&A) -> usize) -> Option<&A> { + let annot = self.annotations.get(self.current_index.get())?; + debug_assert!(get_char_idx(annot) >= char_idx); + if get_char_idx(annot) == char_idx { + self.current_index.set(self.current_index.get() + 1); + Some(annot) + } else { + None + } + } +} + +impl From<(Rc<[A]>, M)> for Layer { + fn from((annotations, metadata): (Rc<[A]>, M)) -> Layer { + Layer { + annotations, + current_index: Cell::new(0), + metadata, + } + } +} + +fn reset_pos(layers: &[Layer], pos: usize, get_pos: impl Fn(&A) -> usize) { + for layer in layers { + layer.reset_pos(pos, &get_pos) + } +} + +/// Annotations that change that is displayed when the document is render. +/// Also commonly called virtual text. +#[derive(Default, Debug, Clone)] +pub struct TextAnnotations { + inline_annotations: Vec>>, + overlays: Vec>>, + line_annotations: Vec>, +} + +impl TextAnnotations { + /// Prepare the TextAnnotations for iteration starting at char_idx + pub fn reset_pos(&self, char_idx: usize) { + reset_pos(&self.inline_annotations, char_idx, |annot| annot.char_idx); + reset_pos(&self.overlays, char_idx, |annot| annot.char_idx); + reset_pos(&self.line_annotations, char_idx, |annot| { + annot.anchor_char_idx + }); + } + + pub fn collect_overlay_highlights( + &self, + char_range: Range, + ) -> Vec<(usize, Range)> { + let mut highlights = Vec::new(); + self.reset_pos(char_range.start); + for char_idx in char_range { + if let Some((_, Some(highlight))) = self.overlay_at(char_idx) { + // we don't know the number of chars the original grapheme takes + // however it doesn't matter as highlight bounderies are automatically + // aligned to grapheme boundaries in the rendering code + highlights.push((highlight.0, char_idx..char_idx + 1)) + } + } + + highlights + } + + /// Add new inline annotations. + /// + /// The annotations grapheme will be rendered with `highlight` + /// patched on top of `ui.text`. + /// + /// The annotations **must be sorted** by their `char_idx`. + /// Multiple annotations with the same `char_idx` are allowed, + /// they will be display in the order that they are present in the layer. + /// + /// If multiple layers contain annotations at the same position + /// the annotations that belong to the layers added first will be shown first. + pub fn add_inline_annotations( + &mut self, + layer: Rc<[InlineAnnotation]>, + highlight: Option, + ) -> &mut Self { + self.inline_annotations.push((layer, highlight).into()); + self + } + + /// Add new grapheme overlays. + /// + /// The overlayed grapheme will be rendered with `highlight` + /// patched on top of `ui.text`. + /// + /// The overlays **must be sorted** by their `char_idx`. + /// Multiple overlays with the same `char_idx` **are allowed**. + /// + /// If multiple layers contain overlay at the same position + /// the overlay from the layer added last will be show. + pub fn add_overlay(&mut self, layer: Rc<[Overlay]>, highlight: Option) -> &mut Self { + self.overlays.push((layer, highlight).into()); + self + } + + /// Add new annotation lines. + /// + /// The line annotations **must be sorted** by their `char_idx`. + /// Multiple line annotations with the same `char_idx` **are not allowed**. + pub fn add_line_annotation(&mut self, layer: Rc<[LineAnnotation]>) -> &mut Self { + self.line_annotations.push((layer, ()).into()); + self + } + + /// Removes all line annotations, useful for vertical motions + /// so that virtual text lines are automatically skipped. + pub fn clear_line_annotations(&mut self) { + self.line_annotations.clear(); + } + + pub(crate) fn next_inline_annotation_at( + &self, + char_idx: usize, + ) -> Option<(&InlineAnnotation, Option)> { + self.inline_annotations.iter().find_map(|layer| { + let annotation = layer.consume(char_idx, |annot| annot.char_idx)?; + Some((annotation, layer.metadata)) + }) + } + + pub(crate) fn overlay_at(&self, char_idx: usize) -> Option<(&Overlay, Option)> { + let mut overlay = None; + for layer in &self.overlays { + while let Some(new_overlay) = layer.consume(char_idx, |annot| annot.char_idx) { + overlay = Some((new_overlay, layer.metadata)); + } + } + overlay + } + + pub(crate) fn annotation_lines_at(&self, char_idx: usize) -> usize { + self.line_annotations + .iter() + .map(|layer| { + let mut lines = 0; + while let Some(annot) = layer.annotations.get(layer.current_index.get()) { + if annot.anchor_char_idx == char_idx { + layer.current_index.set(layer.current_index.get() + 1); + lines += annot.height + } else { + break; + } + } + lines + }) + .sum() + } +} diff --git a/helix-core/src/textobject.rs b/helix-core/src/textobject.rs index 76c6d103..bf00a458 100644 --- a/helix-core/src/textobject.rs +++ b/helix-core/src/textobject.rs @@ -231,8 +231,20 @@ fn textobject_pair_surround_impl( }; pair_pos .map(|(anchor, head)| match textobject { - TextObject::Inside => Range::new(next_grapheme_boundary(slice, anchor), head), - TextObject::Around => Range::new(anchor, next_grapheme_boundary(slice, head)), + TextObject::Inside => { + if anchor < head { + Range::new(next_grapheme_boundary(slice, anchor), head) + } else { + Range::new(anchor, next_grapheme_boundary(slice, head)) + } + } + TextObject::Around => { + if anchor < head { + Range::new(anchor, next_grapheme_boundary(slice, head)) + } else { + Range::new(next_grapheme_boundary(slice, anchor), head) + } + } TextObject::Movement => unreachable!(), }) .unwrap_or(range) @@ -425,7 +437,7 @@ mod test { let text = Rope::from(s.as_str()); let selection = selection .transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Inside, 1)); - let actual = crate::test::plain(&s, selection); + let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } @@ -448,7 +460,7 @@ mod test { let text = Rope::from(s.as_str()); let selection = selection .transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Inside, 2)); - let actual = crate::test::plain(&s, selection); + let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } @@ -479,7 +491,7 @@ mod test { let text = Rope::from(s.as_str()); let selection = selection .transform(|r| textobject_paragraph(text.slice(..), r, TextObject::Around, 1)); - let actual = crate::test::plain(&s, selection); + let actual = crate::test::plain(s.as_ref(), &selection); assert_eq!(actual, expected, "\nbefore: `{:?}`", before); } } diff --git a/helix-core/src/transaction.rs b/helix-core/src/transaction.rs index 482fd6d9..d8e581aa 100644 --- a/helix-core/src/transaction.rs +++ b/helix-core/src/transaction.rs @@ -1,3 +1,5 @@ +use smallvec::SmallVec; + use crate::{Range, Rope, Selection, Tendril}; use std::borrow::Cow; @@ -466,6 +468,33 @@ impl Transaction { self } + /// Generate a transaction from a set of potentially overlapping changes. The `change_ranges` + /// iterator yield the range (of removed text) in the old document for each edit. If any change + /// overlaps with a range overlaps with a previous range then that range is ignored. + /// + /// The `process_change` callback is called for each edit that is not ignored (in the order + /// yielded by `changes`) and should return the new text that the associated range will be + /// replaced with. + /// + /// To make this function more flexible the iterator can yield additional data for each change + /// that is passed to `process_change` + pub fn change_ignore_overlapping( + doc: &Rope, + change_ranges: impl Iterator, + mut process_change: impl FnMut(usize, usize, T) -> Option, + ) -> Self { + let mut last = 0; + let changes = change_ranges.filter_map(|(from, to, data)| { + if from < last { + return None; + } + let tendril = process_change(from, to, data); + last = to; + Some((from, to, tendril)) + }); + Self::change(doc, changes) + } + /// Generate a transaction from a set of changes. pub fn change(doc: &Rope, changes: I) -> Self where @@ -481,6 +510,11 @@ impl Transaction { for (from, to, tendril) in changes { // Verify ranges are ordered and not overlapping debug_assert!(last <= from); + // Verify ranges are correct + debug_assert!( + from <= to, + "Edit end must end before it starts (should {from} <= {to})" + ); // Retain from last "to" to current "from" changeset.retain(from - last); @@ -508,6 +542,44 @@ impl Transaction { Self::change(doc, selection.iter().map(f)) } + pub fn change_by_selection_ignore_overlapping( + doc: &Rope, + selection: &Selection, + mut change_range: impl FnMut(&Range) -> (usize, usize), + mut create_tendril: impl FnMut(usize, usize) -> Option, + ) -> (Transaction, Selection) { + let mut last_selection_idx = None; + let mut new_primary_idx = None; + let mut ranges: SmallVec<[Range; 1]> = SmallVec::new(); + let process_change = |change_start, change_end, (idx, range): (usize, &Range)| { + // update the primary idx + if idx == selection.primary_index() { + new_primary_idx = Some(idx); + } else if new_primary_idx.is_none() { + if idx > selection.primary_index() { + new_primary_idx = last_selection_idx; + } else { + last_selection_idx = Some(idx); + } + } + ranges.push(*range); + create_tendril(change_start, change_end) + }; + let transaction = Self::change_ignore_overlapping( + doc, + selection.iter().enumerate().map(|range| { + let (change_start, change_end) = change_range(range.1); + (change_start, change_end, range) + }), + process_change, + ); + + ( + transaction, + Selection::new(ranges, new_primary_idx.unwrap_or(0)), + ) + } + /// Insert text at each selection head. pub fn insert(doc: &Rope, selection: &Selection, text: Tendril) -> Self { Self::change_by_selection(doc, selection, |range| { diff --git a/helix-core/src/wrap.rs b/helix-core/src/wrap.rs index eabc47d4..2ba8d173 100644 --- a/helix-core/src/wrap.rs +++ b/helix-core/src/wrap.rs @@ -2,6 +2,6 @@ use smartstring::{LazyCompact, SmartString}; /// Given a slice of text, return the text re-wrapped to fit it /// within the given width. -pub fn reflow_hard_wrap(text: &str, max_line_len: usize) -> SmartString { - textwrap::refill(text, max_line_len).into() +pub fn reflow_hard_wrap(text: &str, text_width: usize) -> SmartString { + textwrap::refill(text, text_width).into() } diff --git a/helix-core/tests/indent.rs b/helix-core/tests/indent.rs index e1114f4a..f558f86f 100644 --- a/helix-core/tests/indent.rs +++ b/helix-core/tests/indent.rs @@ -1,5 +1,5 @@ use helix_core::{ - indent::{treesitter_indent_for_pos, IndentStyle}, + indent::{indent_level_for_line, treesitter_indent_for_pos, IndentStyle}, syntax::Loader, Syntax, }; @@ -17,6 +17,39 @@ fn test_treesitter_indent_rust_2() { // test_treesitter_indent("commands.rs", "source.rust"); } +#[test] +fn test_indent_level_for_line_with_spaces() { + let tab_width: usize = 4; + let indent_width: usize = 4; + + let line = ropey::Rope::from_str(" Indented with 8 spaces"); + + let indent_level = indent_level_for_line(line.slice(0..), tab_width, indent_width); + assert_eq!(indent_level, 2) +} + +#[test] +fn test_indent_level_for_line_with_tabs() { + let tab_width: usize = 4; + let indent_width: usize = 4; + + let line = ropey::Rope::from_str("\t\tIndented with 2 tabs"); + + let indent_level = indent_level_for_line(line.slice(0..), tab_width, indent_width); + assert_eq!(indent_level, 2) +} + +#[test] +fn test_indent_level_for_line_with_spaces_and_tabs() { + let tab_width: usize = 4; + let indent_width: usize = 4; + + let line = ropey::Rope::from_str(" \t \tIndented with mix of spaces and tabs"); + + let indent_level = indent_level_for_line(line.slice(0..), tab_width, indent_width); + assert_eq!(indent_level, 2) +} + fn test_treesitter_indent(file_name: &str, lang_scope: &str) { let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); test_dir.push("tests/data/indent"); @@ -28,8 +61,8 @@ fn test_treesitter_indent(file_name: &str, lang_scope: &str) { let mut config_file = test_dir; config_file.push("languages.toml"); - let config = std::fs::read(config_file).unwrap(); - let config = toml::from_slice(&config).unwrap(); + let config = std::fs::read_to_string(config_file).unwrap(); + let config = toml::from_str(&config).unwrap(); let loader = Loader::new(config); // set runtime path so we can find the queries @@ -46,11 +79,13 @@ fn test_treesitter_indent(file_name: &str, lang_scope: &str) { for i in 0..doc.len_lines() { let line = text.line(i); if let Some(pos) = helix_core::find_first_non_whitespace_char(line) { + let tab_and_indent_width: usize = 4; let suggested_indent = treesitter_indent_for_pos( indent_query, &syntax, - &IndentStyle::Spaces(4), - 4, + &IndentStyle::Spaces(tab_and_indent_width as u8), + tab_and_indent_width, + tab_and_indent_width, text, i, text.line_to_char(i) + pos, diff --git a/helix-dap/Cargo.toml b/helix-dap/Cargo.toml index 95a05905..d42ce23f 100644 --- a/helix-dap/Cargo.toml +++ b/helix-dap/Cargo.toml @@ -19,7 +19,7 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" thiserror = "1.0" tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] } -which = "4.2" +which = "4.4" [dev-dependencies] fern = "0.6" diff --git a/helix-dap/src/client.rs b/helix-dap/src/client.rs index e72d290e..ff727d00 100644 --- a/helix-dap/src/client.rs +++ b/helix-dap/src/client.rs @@ -1,4 +1,5 @@ use crate::{ + requests::DisconnectArguments, transport::{Payload, Request, Response, Transport}, types::*, Error, Result, ThreadId, @@ -31,6 +32,8 @@ pub struct Client { _process: Option, server_tx: UnboundedSender, request_counter: AtomicU64, + connection_type: Option, + starting_request_args: Option, pub caps: Option, // thread_id -> frames pub stack_frames: HashMap>, @@ -41,6 +44,12 @@ pub struct Client { pub quirks: DebuggerQuirks, } +#[derive(Clone, Copy, Debug)] +pub enum ConnectionType { + Launch, + Attach, +} + impl Client { // Spawn a process and communicate with it by either TCP or stdio pub async fn process( @@ -78,7 +87,8 @@ impl Client { server_tx, request_counter: AtomicU64::new(0), caps: None, - // + connection_type: None, + starting_request_args: None, stack_frames: HashMap::new(), thread_states: HashMap::new(), thread_id: None, @@ -150,6 +160,10 @@ impl Client { ) } + pub fn starting_request_args(&self) -> &Option { + &self.starting_request_args + } + pub async fn tcp_process( cmd: &str, args: Vec<&str>, @@ -207,6 +221,10 @@ impl Client { self.id } + pub fn connection_type(&self) -> Option { + self.connection_type + } + fn next_request_id(&self) -> u64 { self.request_counter.fetch_add(1, Ordering::Relaxed) } @@ -254,7 +272,7 @@ impl Client { // TODO: specifiable timeout, delay other calls until initialize success timeout(Duration::from_secs(20), callback_rx.recv()) .await - .map_err(|_| Error::Timeout)? // return Timeout + .map_err(|_| Error::Timeout(id))? // return Timeout .ok_or(Error::StreamClosed)? .map(|response| response.body.unwrap_or_default()) // TODO: check response.success @@ -334,18 +352,35 @@ impl Client { Ok(()) } - pub fn disconnect(&self) -> impl Future> { - self.call::(()) + pub fn disconnect( + &mut self, + args: Option, + ) -> impl Future> { + self.connection_type = None; + self.call::(args) } - pub fn launch(&self, args: serde_json::Value) -> impl Future> { + pub fn launch(&mut self, args: serde_json::Value) -> impl Future> { + self.connection_type = Some(ConnectionType::Launch); + self.starting_request_args = Some(args.clone()); self.call::(args) } - pub fn attach(&self, args: serde_json::Value) -> impl Future> { + pub fn attach(&mut self, args: serde_json::Value) -> impl Future> { + self.connection_type = Some(ConnectionType::Attach); + self.starting_request_args = Some(args.clone()); self.call::(args) } + pub fn restart(&self) -> impl Future> { + let args = if let Some(args) = &self.starting_request_args { + args.clone() + } else { + Value::Null + }; + self.call::(args) + } + pub async fn set_breakpoints( &self, file: PathBuf, diff --git a/helix-dap/src/lib.rs b/helix-dap/src/lib.rs index f60b102c..21162cb8 100644 --- a/helix-dap/src/lib.rs +++ b/helix-dap/src/lib.rs @@ -2,7 +2,7 @@ mod client; mod transport; mod types; -pub use client::Client; +pub use client::{Client, ConnectionType}; pub use events::Event; pub use transport::{Payload, Response, Transport}; pub use types::*; @@ -14,8 +14,8 @@ pub enum Error { Parse(#[from] serde_json::Error), #[error("IO Error: {0}")] IO(#[from] std::io::Error), - #[error("request timed out")] - Timeout, + #[error("request {0} timed out")] + Timeout(u64), #[error("server closed the stream")] StreamClosed, #[error(transparent)] diff --git a/helix-dap/src/types.rs b/helix-dap/src/types.rs index 0a9ebe5e..bbaf53a6 100644 --- a/helix-dap/src/types.rs +++ b/helix-dap/src/types.rs @@ -378,7 +378,7 @@ pub mod requests { impl Request for Launch { type Arguments = Value; - type Result = Value; + type Result = (); const COMMAND: &'static str = "launch"; } @@ -387,15 +387,35 @@ pub mod requests { impl Request for Attach { type Arguments = Value; - type Result = Value; + type Result = (); const COMMAND: &'static str = "attach"; } + #[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)] + #[serde(rename_all = "camelCase")] + pub struct DisconnectArguments { + #[serde(skip_serializing_if = "Option::is_none")] + pub restart: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub terminate_debuggee: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub suspend_debuggee: Option, + } + + #[derive(Debug)] + pub enum Restart {} + + impl Request for Restart { + type Arguments = Value; + type Result = (); + const COMMAND: &'static str = "restart"; + } + #[derive(Debug)] pub enum Disconnect {} impl Request for Disconnect { - type Arguments = (); + type Arguments = Option; type Result = (); const COMMAND: &'static str = "disconnect"; } diff --git a/helix-loader/Cargo.toml b/helix-loader/Cargo.toml index a3d14584..9225ad1a 100644 --- a/helix-loader/Cargo.toml +++ b/helix-loader/Cargo.toml @@ -16,7 +16,7 @@ path = "src/main.rs" [dependencies] anyhow = "1" serde = { version = "1.0", features = ["derive"] } -toml = "0.5" +toml = "0.7" etcetera = "0.4" tree-sitter = "0.20" once_cell = "1.17" diff --git a/helix-loader/src/config.rs b/helix-loader/src/config.rs index 259b1318..0f329d21 100644 --- a/helix-loader/src/config.rs +++ b/helix-loader/src/config.rs @@ -1,6 +1,9 @@ +use std::str::from_utf8; + /// Default built-in languages.toml. pub fn default_lang_config() -> toml::Value { - toml::from_slice(include_bytes!("../../languages.toml")) + let default_config = include_bytes!("../../languages.toml"); + toml::from_str(from_utf8(default_config).unwrap()) .expect("Could not parse built-in languages.toml to valid toml") } @@ -11,8 +14,8 @@ pub fn user_lang_config() -> Result { .chain([crate::config_dir()].into_iter()) .map(|path| path.join("languages.toml")) .filter_map(|file| { - std::fs::read(&file) - .map(|config| toml::from_slice(&config)) + std::fs::read_to_string(file) + .map(|config| toml::from_str(&config)) .ok() }) .collect::, _>>()? diff --git a/helix-loader/src/grammar.rs b/helix-loader/src/grammar.rs index 2aa92475..a85cb274 100644 --- a/helix-loader/src/grammar.rs +++ b/helix-loader/src/grammar.rs @@ -67,8 +67,9 @@ pub fn get_language(name: &str) -> Result { #[cfg(not(target_arch = "wasm32"))] pub fn get_language(name: &str) -> Result { use libloading::{Library, Symbol}; - let mut library_path = crate::runtime_dir().join("grammars").join(name); - library_path.set_extension(DYLIB_EXTENSION); + let mut rel_library_path = PathBuf::new().join("grammars").join(name); + rel_library_path.set_extension(DYLIB_EXTENSION); + let library_path = crate::runtime_file(&rel_library_path); let library = unsafe { Library::new(&library_path) } .with_context(|| format!("Error opening dynamic library {:?}", library_path))?; @@ -252,7 +253,9 @@ fn fetch_grammar(grammar: GrammarConfiguration) -> Result { remote, revision, .. } = grammar.source { - let grammar_dir = crate::runtime_dir() + let grammar_dir = crate::runtime_dirs() + .first() + .expect("No runtime directories provided") // guaranteed by post-condition .join("grammars") .join("sources") .join(&grammar.grammar_id); @@ -350,7 +353,9 @@ fn build_grammar(grammar: GrammarConfiguration, target: Option<&str>) -> Result< let grammar_dir = if let GrammarSource::Local { path } = &grammar.source { PathBuf::from(&path) } else { - crate::runtime_dir() + crate::runtime_dirs() + .first() + .expect("No runtime directories provided") // guaranteed by post-condition .join("grammars") .join("sources") .join(&grammar.grammar_id) @@ -401,7 +406,10 @@ fn build_tree_sitter_library( None } }; - let parser_lib_path = crate::runtime_dir().join("grammars"); + let parser_lib_path = crate::runtime_dirs() + .first() + .expect("No runtime directories provided") // guaranteed by post-condition + .join("grammars"); let mut library_path = parser_lib_path.join(&grammar.grammar_id); library_path.set_extension(DYLIB_EXTENSION); @@ -511,9 +519,6 @@ fn mtime(path: &Path) -> Result { /// Gives the contents of a file from a language's `runtime/queries/` /// directory pub fn load_runtime_file(language: &str, filename: &str) -> Result { - let path = crate::RUNTIME_DIR - .join("queries") - .join(language) - .join(filename); - std::fs::read_to_string(&path) + let path = crate::runtime_file(&PathBuf::new().join("queries").join(language).join(filename)); + std::fs::read_to_string(path) } diff --git a/helix-loader/src/lib.rs b/helix-loader/src/lib.rs index 80d44a82..04b44b5a 100644 --- a/helix-loader/src/lib.rs +++ b/helix-loader/src/lib.rs @@ -2,11 +2,12 @@ pub mod config; pub mod grammar; use etcetera::base_strategy::{choose_base_strategy, BaseStrategy}; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH"); -pub static RUNTIME_DIR: once_cell::sync::Lazy = once_cell::sync::Lazy::new(runtime_dir); +static RUNTIME_DIRS: once_cell::sync::Lazy> = + once_cell::sync::Lazy::new(prioritize_runtime_dirs); static CONFIG_FILE: once_cell::sync::OnceCell = once_cell::sync::OnceCell::new(); @@ -25,31 +26,83 @@ pub fn initialize_config_file(specified_file: Option) { CONFIG_FILE.set(config_file).ok(); } -pub fn runtime_dir() -> PathBuf { - if let Ok(dir) = std::env::var("HELIX_RUNTIME") { - return dir.into(); - } - +/// A list of runtime directories from highest to lowest priority +/// +/// The priority is: +/// +/// 1. sibling directory to `CARGO_MANIFEST_DIR` (if environment variable is set) +/// 2. subdirectory of user config directory (always included) +/// 3. `HELIX_RUNTIME` (if environment variable is set) +/// 4. subdirectory of path to helix executable (always included) +/// +/// Postcondition: returns at least two paths (they might not exist). +fn prioritize_runtime_dirs() -> Vec { + const RT_DIR: &str = "runtime"; + // Adding higher priority first + let mut rt_dirs = Vec::new(); if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") { // this is the directory of the crate being run by cargo, we need the workspace path so we take the parent let path = std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR); log::debug!("runtime dir: {}", path.to_string_lossy()); - return path; + rt_dirs.push(path); } - const RT_DIR: &str = "runtime"; - let conf_dir = config_dir().join(RT_DIR); - if conf_dir.exists() { - return conf_dir; + let conf_rt_dir = config_dir().join(RT_DIR); + rt_dirs.push(conf_rt_dir); + + if let Ok(dir) = std::env::var("HELIX_RUNTIME") { + rt_dirs.push(dir.into()); } // fallback to location of the executable being run // canonicalize the path in case the executable is symlinked - std::env::current_exe() + let exe_rt_dir = std::env::current_exe() .ok() .and_then(|path| std::fs::canonicalize(path).ok()) .and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR))) - .unwrap() + .unwrap(); + rt_dirs.push(exe_rt_dir); + rt_dirs +} + +/// Runtime directories ordered from highest to lowest priority +/// +/// All directories should be checked when looking for files. +/// +/// Postcondition: returns at least one path (it might not exist). +pub fn runtime_dirs() -> &'static [PathBuf] { + &RUNTIME_DIRS +} + +/// Find file with path relative to runtime directory +/// +/// `rel_path` should be the relative path from within the `runtime/` directory. +/// The valid runtime directories are searched in priority order and the first +/// file found to exist is returned, otherwise None. +fn find_runtime_file(rel_path: &Path) -> Option { + RUNTIME_DIRS.iter().find_map(|rt_dir| { + let path = rt_dir.join(rel_path); + if path.exists() { + Some(path) + } else { + None + } + }) +} + +/// Find file with path relative to runtime directory +/// +/// `rel_path` should be the relative path from within the `runtime/` directory. +/// The valid runtime directories are searched in priority order and the first +/// file found to exist is returned, otherwise the path to the final attempt +/// that failed. +pub fn runtime_file(rel_path: &Path) -> PathBuf { + find_runtime_file(rel_path).unwrap_or_else(|| { + RUNTIME_DIRS + .last() + .map(|dir| dir.join(rel_path)) + .unwrap_or_default() + }) } pub fn config_dir() -> PathBuf { @@ -179,6 +232,8 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usi #[cfg(test)] mod merge_toml_tests { + use std::str; + use super::merge_toml_values; use toml::Value; @@ -191,8 +246,9 @@ mod merge_toml_tests { indent = { tab-width = 4, unit = " ", test = "aaa" } "#; - let base: Value = toml::from_slice(include_bytes!("../../languages.toml")) - .expect("Couldn't parse built-in languages config"); + let base = include_bytes!("../../languages.toml"); + let base = str::from_utf8(base).expect("Couldn't parse built-in languages config"); + let base: Value = toml::from_str(base).expect("Couldn't parse built-in languages config"); let user: Value = toml::from_str(USER).unwrap(); let merged = merge_toml_values(base, user, 3); @@ -224,8 +280,9 @@ mod merge_toml_tests { language-server = { command = "deno", args = ["lsp"] } "#; - let base: Value = toml::from_slice(include_bytes!("../../languages.toml")) - .expect("Couldn't parse built-in languages config"); + let base = include_bytes!("../../languages.toml"); + let base = str::from_utf8(base).expect("Couldn't parse built-in languages config"); + let base: Value = toml::from_str(base).expect("Couldn't parse built-in languages config"); let user: Value = toml::from_str(USER).unwrap(); let merged = merge_toml_values(base, user, 3); diff --git a/helix-lsp/Cargo.toml b/helix-lsp/Cargo.toml index d31731d2..9d76822d 100644 --- a/helix-lsp/Cargo.toml +++ b/helix-lsp/Cargo.toml @@ -14,15 +14,16 @@ homepage = "https://helix-editor.com" [dependencies] helix-core = { version = "0.6", path = "../helix-core" } helix-loader = { version = "0.6", path = "../helix-loader" } +helix-parsec = { version = "0.6", path = "../helix-parsec" } anyhow = "1.0" futures-executor = "0.3" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } log = "0.4" -lsp-types = { version = "0.93", features = ["proposed"] } +lsp-types = { version = "0.94" } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" thiserror = "1.0" -tokio = { version = "1.24", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } -tokio-stream = "0.1.11" -which = "4.2" +tokio = { version = "1.26", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } +tokio-stream = "0.1.12" +which = "4.4" diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index dd2581c6..f93e5826 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -6,6 +6,7 @@ use crate::{ use helix_core::{find_root, ChangeSet, Rope}; use helix_loader::{self, VERSION_AND_GIT_HASH}; +use lsp::PositionEncodingKind; use lsp_types as lsp; use serde::Deserialize; use serde_json::Value; @@ -32,7 +33,6 @@ pub struct Client { server_tx: UnboundedSender, request_counter: AtomicU64, pub(crate) capabilities: OnceCell, - offset_encoding: OffsetEncoding, config: Option, root_path: std::path::PathBuf, root_uri: Option, @@ -104,7 +104,6 @@ impl Client { server_tx, request_counter: AtomicU64::new(0), capabilities: OnceCell::new(), - offset_encoding: OffsetEncoding::Utf8, config, req_timeout, @@ -147,7 +146,19 @@ impl Client { } pub fn offset_encoding(&self) -> OffsetEncoding { - self.offset_encoding + self.capabilities() + .position_encoding + .as_ref() + .and_then(|encoding| match encoding.as_str() { + "utf-8" => Some(OffsetEncoding::Utf8), + "utf-16" => Some(OffsetEncoding::Utf16), + "utf-32" => Some(OffsetEncoding::Utf32), + encoding => { + log::error!("Server provided invalid position encording {encoding}, defaulting to utf-16"); + None + }, + }) + .unwrap_or_default() } pub fn config(&self) -> Option<&Value> { @@ -190,7 +201,7 @@ impl Client { let request = jsonrpc::MethodCall { jsonrpc: Some(jsonrpc::Version::V2), - id, + id: id.clone(), method: R::METHOD.to_string(), params: Self::value_into_params(params), }; @@ -207,7 +218,7 @@ impl Client { // TODO: delay other calls until initialize success timeout(Duration::from_secs(timeout_secs), rx.recv()) .await - .map_err(|_| Error::Timeout)? // return Timeout + .map_err(|_| Error::Timeout(id))? // return Timeout .ok_or(Error::StreamClosed)? } } @@ -304,12 +315,26 @@ impl Client { execute_command: Some(lsp::DynamicRegistrationClientCapabilities { dynamic_registration: Some(false), }), + inlay_hint: Some(lsp::InlayHintWorkspaceClientCapabilities { + refresh_support: Some(false), + }), + workspace_edit: Some(lsp::WorkspaceEditClientCapabilities { + document_changes: Some(true), + resource_operations: Some(vec![ + lsp::ResourceOperationKind::Create, + lsp::ResourceOperationKind::Rename, + lsp::ResourceOperationKind::Delete, + ]), + failure_handling: Some(lsp::FailureHandlingKind::Abort), + normalizes_line_endings: Some(false), + change_annotation_support: None, + }), ..Default::default() }), text_document: Some(lsp::TextDocumentClientCapabilities { completion: Some(lsp::CompletionClientCapabilities { completion_item: Some(lsp::CompletionItemCapability { - snippet_support: Some(false), + snippet_support: Some(true), resolve_support: Some(lsp::CompletionItemCapabilityResolveSupport { properties: vec![ String::from("documentation"), @@ -318,6 +343,10 @@ impl Client { ], }), insert_replace_support: Some(true), + deprecated_support: Some(true), + tag_support: Some(lsp::TagSupport { + value_set: vec![lsp::CompletionItemTag::DEPRECATED], + }), ..Default::default() }), completion_item_kind: Some(lsp::CompletionItemKindCapability { @@ -344,7 +373,7 @@ impl Client { }), rename: Some(lsp::RenameClientCapabilities { dynamic_registration: Some(false), - prepare_support: Some(false), + prepare_support: Some(true), prepare_support_default_behavior: None, honors_change_annotations: Some(false), }), @@ -369,14 +398,27 @@ impl Client { ..Default::default() }), publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities { + version_support: Some(true), ..Default::default() }), + inlay_hint: Some(lsp::InlayHintClientCapabilities { + dynamic_registration: Some(false), + resolve_support: None, + }), ..Default::default() }), window: Some(lsp::WindowClientCapabilities { work_done_progress: Some(true), ..Default::default() }), + general: Some(lsp::GeneralClientCapabilities { + position_encodings: Some(vec![ + PositionEncodingKind::UTF32, + PositionEncodingKind::UTF8, + PositionEncodingKind::UTF16, + ]), + ..Default::default() + }), ..Default::default() }, trace: None, @@ -577,7 +619,7 @@ impl Client { }] } lsp::TextDocumentSyncKind::INCREMENTAL => { - Self::changeset_to_changes(old_text, new_text, changes, self.offset_encoding) + Self::changeset_to_changes(old_text, new_text, changes, self.offset_encoding()) } lsp::TextDocumentSyncKind::NONE => return None, kind => unimplemented!("{:?}", kind), @@ -628,7 +670,7 @@ impl Client { Some(self.notify::( lsp::DidSaveTextDocumentParams { text_document, - text: include_text.then(|| text.into()), + text: include_text.then_some(text.into()), }, )) } @@ -703,6 +745,31 @@ impl Client { Some(self.call::(params)) } + pub fn text_document_range_inlay_hints( + &self, + text_document: lsp::TextDocumentIdentifier, + range: lsp::Range, + work_done_token: Option, + ) -> Option>> { + let capabilities = self.capabilities.get().unwrap(); + + match capabilities.inlay_hint_provider { + Some( + lsp::OneOf::Left(true) + | lsp::OneOf::Right(lsp::InlayHintServerCapabilities::Options(_)), + ) => (), + _ => return None, + } + + let params = lsp::InlayHintParams { + text_document, + range, + work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token }, + }; + + Some(self.call::(params)) + } + pub fn text_document_hover( &self, text_document: lsp::TextDocumentIdentifier, @@ -886,6 +953,31 @@ impl Client { )) } + pub fn goto_declaration( + &self, + text_document: lsp::TextDocumentIdentifier, + position: lsp::Position, + work_done_token: Option, + ) -> Option>> { + let capabilities = self.capabilities.get().unwrap(); + + // Return early if the server does not support goto-declaration. + match capabilities.declaration_provider { + Some( + lsp::DeclarationCapability::Simple(true) + | lsp::DeclarationCapability::RegistrationOptions(_) + | lsp::DeclarationCapability::Options(_), + ) => (), + _ => return None, + } + + Some(self.goto_request::( + text_document, + position, + work_done_token, + )) + } + pub fn goto_type_definition( &self, text_document: lsp::TextDocumentIdentifier, @@ -986,6 +1078,29 @@ impl Client { Some(self.call::(params)) } + pub fn prepare_rename( + &self, + text_document: lsp::TextDocumentIdentifier, + position: lsp::Position, + ) -> Option>> { + let capabilities = self.capabilities.get().unwrap(); + + match capabilities.rename_provider { + Some(lsp::OneOf::Right(lsp::RenameOptions { + prepare_provider: Some(true), + .. + })) => (), + _ => return None, + } + + let params = lsp::TextDocumentPositionParams { + text_document, + position, + }; + + Some(self.call::(params)) + } + // empty string to get all symbols pub fn workspace_symbols(&self, query: String) -> Option>> { let capabilities = self.capabilities.get().unwrap(); @@ -1002,7 +1117,7 @@ impl Client { partial_result_params: lsp::PartialResultParams::default(), }; - Some(self.call::(params)) + Some(self.call::(params)) } pub fn code_actions( @@ -1033,20 +1148,23 @@ impl Client { Some(self.call::(params)) } + pub fn supports_rename(&self) -> bool { + let capabilities = self.capabilities.get().unwrap(); + matches!( + capabilities.rename_provider, + Some(lsp::OneOf::Left(true) | lsp::OneOf::Right(_)) + ) + } + pub fn rename_symbol( &self, text_document: lsp::TextDocumentIdentifier, position: lsp::Position, new_name: String, ) -> Option>> { - let capabilities = self.capabilities.get().unwrap(); - - // Return early if the language server does not support renaming. - match capabilities.rename_provider { - Some(lsp::OneOf::Left(true)) | Some(lsp::OneOf::Right(_)) => (), - // None | Some(false) - _ => return None, - }; + if !self.supports_rename() { + return None; + } let params = lsp::RenameParams { text_document_position: lsp::TextDocumentPositionParams { diff --git a/helix-lsp/src/jsonrpc.rs b/helix-lsp/src/jsonrpc.rs index 69d02707..f415dde0 100644 --- a/helix-lsp/src/jsonrpc.rs +++ b/helix-lsp/src/jsonrpc.rs @@ -108,6 +108,16 @@ pub enum Id { Str(String), } +impl std::fmt::Display for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Id::Null => f.write_str("null"), + Id::Num(num) => write!(f, "{}", num), + Id::Str(string) => f.write_str(string), + } + } +} + /// Protocol Version #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] pub enum Version { diff --git a/helix-lsp/src/lib.rs b/helix-lsp/src/lib.rs index 8418896c..5609a624 100644 --- a/helix-lsp/src/lib.rs +++ b/helix-lsp/src/lib.rs @@ -1,5 +1,6 @@ mod client; pub mod jsonrpc; +pub mod snippet; mod transport; pub use client::Client; @@ -20,7 +21,6 @@ use std::{ }, }; -use serde::{Deserialize, Serialize}; use thiserror::Error; use tokio_stream::wrappers::UnboundedReceiverStream; @@ -35,8 +35,8 @@ pub enum Error { Parse(#[from] serde_json::Error), #[error("IO Error: {0}")] IO(#[from] std::io::Error), - #[error("request timed out")] - Timeout, + #[error("request {0} timed out")] + Timeout(jsonrpc::Id), #[error("server closed the stream")] StreamClosed, #[error("Unhandled")] @@ -45,18 +45,21 @@ pub enum Error { Other(#[from] anyhow::Error), } -#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, Default)] pub enum OffsetEncoding { /// UTF-8 code units aka bytes - #[serde(rename = "utf-8")] Utf8, + /// UTF-32 code units aka chars + Utf32, /// UTF-16 code units - #[serde(rename = "utf-16")] + #[default] Utf16, } pub mod util { use super::*; + use helix_core::line_ending::{line_end_byte_index, line_end_char_index}; + use helix_core::{chars, RopeSlice, SmallVec}; use helix_core::{diagnostic::NumberOrString, Range, Rope, Selection, Tendril, Transaction}; /// Converts a diagnostic in the document to [`lsp::Diagnostic`]. @@ -117,7 +120,7 @@ pub mod util { /// Converts [`lsp::Position`] to a position in the document. /// - /// Returns `None` if position exceeds document length or an operation overflows. + /// Returns `None` if position.line is out of bounds or an overflow occurs pub fn lsp_pos_to_pos( doc: &Rope, pos: lsp::Position, @@ -128,22 +131,63 @@ pub mod util { return None; } - match offset_encoding { + // We need to be careful here to fully comply ith the LSP spec. + // Two relevant quotes from the spec: + // + // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#position + // > If the character value is greater than the line length it defaults back + // > to the line length. + // + // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocuments + // > To ensure that both client and server split the string into the same + // > line representation the protocol specifies the following end-of-line sequences: + // > ‘\n’, ‘\r\n’ and ‘\r’. Positions are line end character agnostic. + // > So you can not specify a position that denotes \r|\n or \n| where | represents the character offset. + // + // This means that while the line must be in bounds the `charater` + // must be capped to the end of the line. + // Note that the end of the line here is **before** the line terminator + // so we must use `line_end_char_index` istead of `doc.line_to_char(pos_line + 1)` + // + // FIXME: Helix does not fully comply with the LSP spec for line terminators. + // The LSP standard requires that line terminators are ['\n', '\r\n', '\r']. + // Without the unicode-linebreak feature disabled, the `\r` terminator is not handled by helix. + // With the unicode-linebreak feature, helix recognizes multiple extra line break chars + // which means that positions will be decoded/encoded incorrectly in their presence + + let line = match offset_encoding { OffsetEncoding::Utf8 => { - let line = doc.line_to_char(pos_line); - let pos = line.checked_add(pos.character as usize)?; - if pos <= doc.len_chars() { - Some(pos) - } else { - None - } + let line_start = doc.line_to_byte(pos_line); + let line_end = line_end_byte_index(&doc.slice(..), pos_line); + line_start..line_end } OffsetEncoding::Utf16 => { - let line = doc.line_to_char(pos_line); - let line_start = doc.char_to_utf16_cu(line); - let pos = line_start.checked_add(pos.character as usize)?; - doc.try_utf16_cu_to_char(pos).ok() + // TODO directly translate line index to char-idx + // ropey can do this just as easily as utf-8 byte translation + // but the functions are just missing. + // Translate to char first and then utf-16 as a workaround + let line_start = doc.line_to_char(pos_line); + let line_end = line_end_char_index(&doc.slice(..), pos_line); + doc.char_to_utf16_cu(line_start)..doc.char_to_utf16_cu(line_end) + } + OffsetEncoding::Utf32 => { + let line_start = doc.line_to_char(pos_line); + let line_end = line_end_char_index(&doc.slice(..), pos_line); + line_start..line_end } + }; + + // The LSP spec demands that the offset is capped to the end of the line + let pos = line + .start + .checked_add(pos.character as usize) + .unwrap_or(line.end) + .min(line.end); + + match offset_encoding { + OffsetEncoding::Utf8 => doc.try_byte_to_char(pos).ok(), + OffsetEncoding::Utf16 => doc.try_utf16_cu_to_char(pos).ok(), + OffsetEncoding::Utf32 => Some(pos), } } @@ -158,8 +202,8 @@ pub mod util { match offset_encoding { OffsetEncoding::Utf8 => { let line = doc.char_to_line(pos); - let line_start = doc.line_to_char(line); - let col = pos - line_start; + let line_start = doc.line_to_byte(line); + let col = doc.char_to_byte(pos) - line_start; lsp::Position::new(line as u32, col as u32) } @@ -168,6 +212,13 @@ pub mod util { let line_start = doc.char_to_utf16_cu(doc.line_to_char(line)); let col = doc.char_to_utf16_cu(pos) - line_start; + lsp::Position::new(line as u32, col as u32) + } + OffsetEncoding::Utf32 => { + let line = doc.char_to_line(pos); + let line_start = doc.line_to_char(line); + let col = pos - line_start; + lsp::Position::new(line as u32, col as u32) } } @@ -196,40 +247,227 @@ pub mod util { Some(Range::new(start, end)) } + /// If the LS did not provide a range for the completion or the range of the + /// primary cursor can not be used for the secondary cursor, this function + /// can be used to find the completion range for a cursor + fn find_completion_range(text: RopeSlice, replace_mode: bool, cursor: usize) -> (usize, usize) { + let start = cursor + - text + .chars_at(cursor) + .reversed() + .take_while(|ch| chars::char_is_word(*ch)) + .count(); + let mut end = cursor; + if replace_mode { + end += text + .chars_at(cursor) + .skip(1) + .take_while(|ch| chars::char_is_word(*ch)) + .count(); + } + (start, end) + } + fn completion_range( + text: RopeSlice, + edit_offset: Option<(i128, i128)>, + replace_mode: bool, + cursor: usize, + ) -> Option<(usize, usize)> { + let res = match edit_offset { + Some((start_offset, end_offset)) => { + let start_offset = cursor as i128 + start_offset; + if start_offset < 0 { + return None; + } + let end_offset = cursor as i128 + end_offset; + if end_offset > text.len_chars() as i128 { + return None; + } + (start_offset as usize, end_offset as usize) + } + None => find_completion_range(text, replace_mode, cursor), + }; + Some(res) + } + /// Creates a [Transaction] from the [lsp::TextEdit] in a completion response. /// The transaction applies the edit to all cursors. pub fn generate_transaction_from_completion_edit( doc: &Rope, selection: &Selection, - edit: lsp::TextEdit, - offset_encoding: OffsetEncoding, + edit_offset: Option<(i128, i128)>, + replace_mode: bool, + new_text: String, ) -> Transaction { - let replacement: Option = if edit.new_text.is_empty() { + let replacement: Option = if new_text.is_empty() { None } else { - Some(edit.new_text.into()) + Some(new_text.into()) }; let text = doc.slice(..); - let primary_cursor = selection.primary().cursor(text); + let (removed_start, removed_end) = completion_range( + text, + edit_offset, + replace_mode, + selection.primary().cursor(text), + ) + .expect("transaction must be valid for primary selection"); + let removed_text = text.slice(removed_start..removed_end); - let start_offset = match lsp_pos_to_pos(doc, edit.range.start, offset_encoding) { - Some(start) => start as i128 - primary_cursor as i128, - None => return Transaction::new(doc), - }; - let end_offset = match lsp_pos_to_pos(doc, edit.range.end, offset_encoding) { - Some(end) => end as i128 - primary_cursor as i128, - None => return Transaction::new(doc), - }; + let (transaction, mut selection) = Transaction::change_by_selection_ignore_overlapping( + doc, + selection, + |range| { + let cursor = range.cursor(text); + completion_range(text, edit_offset, replace_mode, cursor) + .filter(|(start, end)| text.slice(start..end) == removed_text) + .unwrap_or_else(|| find_completion_range(text, replace_mode, cursor)) + }, + |_, _| replacement.clone(), + ); + if transaction.changes().is_empty() { + return transaction; + } + selection = selection.map(transaction.changes()); + transaction.with_selection(selection) + } + + /// Creates a [Transaction] from the [snippet::Snippet] in a completion response. + /// The transaction applies the edit to all cursors. + #[allow(clippy::too_many_arguments)] + pub fn generate_transaction_from_snippet( + doc: &Rope, + selection: &Selection, + edit_offset: Option<(i128, i128)>, + replace_mode: bool, + snippet: snippet::Snippet, + line_ending: &str, + include_placeholder: bool, + tab_width: usize, + indent_width: usize, + ) -> Transaction { + let text = doc.slice(..); + + let mut off = 0i128; + let mut mapped_doc = doc.clone(); + let mut selection_tabstops: SmallVec<[_; 1]> = SmallVec::new(); + let (removed_start, removed_end) = completion_range( + text, + edit_offset, + replace_mode, + selection.primary().cursor(text), + ) + .expect("transaction must be valid for primary selection"); + let removed_text = text.slice(removed_start..removed_end); + + let (transaction, selection) = Transaction::change_by_selection_ignore_overlapping( + doc, + selection, + |range| { + let cursor = range.cursor(text); + completion_range(text, edit_offset, replace_mode, cursor) + .filter(|(start, end)| text.slice(start..end) == removed_text) + .unwrap_or_else(|| find_completion_range(text, replace_mode, cursor)) + }, + |replacement_start, replacement_end| { + let mapped_replacement_start = (replacement_start as i128 + off) as usize; + let mapped_replacement_end = (replacement_end as i128 + off) as usize; + + let line_idx = mapped_doc.char_to_line(mapped_replacement_start); + let indent_level = helix_core::indent::indent_level_for_line( + mapped_doc.line(line_idx), + tab_width, + indent_width, + ) * indent_width; + + let newline_with_offset = format!( + "{line_ending}{blank:indent_level$}", + line_ending = line_ending, + blank = "" + ); + + let (replacement, tabstops) = + snippet::render(&snippet, &newline_with_offset, include_placeholder); + selection_tabstops.push((mapped_replacement_start, tabstops)); + mapped_doc.remove(mapped_replacement_start..mapped_replacement_end); + mapped_doc.insert(mapped_replacement_start, &replacement); + off += + replacement_start as i128 - replacement_end as i128 + replacement.len() as i128; + + Some(replacement) + }, + ); + + let changes = transaction.changes(); + if changes.is_empty() { + return transaction; + } + + let mut mapped_selection = SmallVec::with_capacity(selection.len()); + let mut mapped_primary_idx = 0; + let primary_range = selection.primary(); + for (range, (tabstop_anchor, tabstops)) in selection.into_iter().zip(selection_tabstops) { + if range == primary_range { + mapped_primary_idx = mapped_selection.len() + } - Transaction::change_by_selection(doc, selection, |range| { - let cursor = range.cursor(text); - ( - (cursor as i128 + start_offset) as usize, - (cursor as i128 + end_offset) as usize, - replacement.clone(), - ) - }) + let range = range.map(changes); + let tabstops = tabstops.first().filter(|tabstops| !tabstops.is_empty()); + let Some(tabstops) = tabstops else{ + // no tabstop normal mapping + mapped_selection.push(range); + continue; + }; + + // expand the selection to cover the tabstop to retain the helix selection semantic + // the tabstop closest to the range simply replaces `head` while anchor remains in place + // the remaining tabstops receive their own single-width cursor + if range.head < range.anchor { + let first_tabstop = tabstop_anchor + tabstops[0].1; + + // if selection is forward but was moved to the right it is + // contained entirely in the replacement text, just do a point + // selection (fallback below) + if range.anchor >= first_tabstop { + let range = Range::new(range.anchor, first_tabstop); + mapped_selection.push(range); + let rem_tabstops = tabstops[1..] + .iter() + .map(|tabstop| Range::point(tabstop_anchor + tabstop.1)); + mapped_selection.extend(rem_tabstops); + continue; + } + } else { + let last_idx = tabstops.len() - 1; + let last_tabstop = tabstop_anchor + tabstops[last_idx].1; + + // if selection is forward but was moved to the right it is + // contained entirely in the replacement text, just do a point + // selection (fallback below) + if range.anchor <= last_tabstop { + // we can't properly compute the the next grapheme + // here because the transaction hasn't been applied yet + // that is not a problem because the range gets grapheme aligned anyway + // tough so just adding one will always cause head to be grapheme + // aligned correctly when applied to the document + let range = Range::new(range.anchor, last_tabstop + 1); + mapped_selection.push(range); + let rem_tabstops = tabstops[..last_idx] + .iter() + .map(|tabstop| Range::point(tabstop_anchor + tabstop.0)); + mapped_selection.extend(rem_tabstops); + continue; + } + }; + + let tabstops = tabstops + .iter() + .map(|tabstop| Range::point(tabstop_anchor + tabstop.0)); + mapped_selection.extend(tabstops); + } + + transaction.with_selection(Selection::new(mapped_selection, mapped_primary_idx)) } pub fn generate_transaction_from_edits( @@ -288,6 +526,7 @@ pub enum MethodCall { ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams), WorkspaceFolders, WorkspaceConfiguration(lsp::ConfigurationParams), + RegisterCapability(lsp::RegistrationParams), } impl MethodCall { @@ -307,6 +546,10 @@ impl MethodCall { let params: lsp::ConfigurationParams = params.parse()?; Self::WorkspaceConfiguration(params) } + lsp::request::RegisterCapability::METHOD => { + let params: lsp::RegistrationParams = params.parse()?; + Self::RegisterCapability(params) + } _ => { return Err(Error::Unhandled); } @@ -427,6 +670,16 @@ impl Registry { } } + pub fn stop(&mut self, language_config: &LanguageConfiguration) { + let scope = language_config.scope.clone(); + + if let Some((_, client)) = self.inner.remove(&scope) { + tokio::spawn(async move { + let _ = client.force_shutdown().await; + }); + } + } + pub fn get( &mut self, language_config: &LanguageConfiguration, @@ -606,16 +859,55 @@ mod tests { } test_case!("", (0, 0) => Some(0)); - test_case!("", (0, 1) => None); + test_case!("", (0, 1) => Some(0)); test_case!("", (1, 0) => None); test_case!("\n\n", (0, 0) => Some(0)); test_case!("\n\n", (1, 0) => Some(1)); - test_case!("\n\n", (1, 1) => Some(2)); + test_case!("\n\n", (1, 1) => Some(1)); test_case!("\n\n", (2, 0) => Some(2)); test_case!("\n\n", (3, 0) => None); test_case!("test\n\n\n\ncase", (4, 3) => Some(11)); test_case!("test\n\n\n\ncase", (4, 4) => Some(12)); - test_case!("test\n\n\n\ncase", (4, 5) => None); + test_case!("test\n\n\n\ncase", (4, 5) => Some(12)); test_case!("", (u32::MAX, u32::MAX) => None); } + + #[test] + fn emoji_format_gh_4791() { + use lsp_types::{Position, Range, TextEdit}; + + let edits = vec![ + TextEdit { + range: Range { + start: Position { + line: 0, + character: 1, + }, + end: Position { + line: 1, + character: 0, + }, + }, + new_text: "\n ".to_string(), + }, + TextEdit { + range: Range { + start: Position { + line: 1, + character: 7, + }, + end: Position { + line: 2, + character: 0, + }, + }, + new_text: "\n ".to_string(), + }, + ]; + + let mut source = Rope::from_str("[\n\"🇺🇸\",\n\"🎄\",\n]"); + + let transaction = generate_transaction_from_edits(&source, edits, OffsetEncoding::Utf8); + assert!(transaction.apply(&mut source)); + } } diff --git a/helix-lsp/src/snippet.rs b/helix-lsp/src/snippet.rs new file mode 100644 index 00000000..a4f049e8 --- /dev/null +++ b/helix-lsp/src/snippet.rs @@ -0,0 +1,1010 @@ +use std::borrow::Cow; + +use anyhow::{anyhow, Result}; +use helix_core::{smallvec, SmallVec, Tendril}; + +#[derive(Debug, PartialEq, Eq)] +pub enum CaseChange { + Upcase, + Downcase, + Capitalize, +} + +#[derive(Debug, PartialEq, Eq)] +pub enum FormatItem { + Text(Tendril), + Capture(usize), + CaseChange(usize, CaseChange), + Conditional(usize, Option, Option), +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Regex { + value: Tendril, + replacement: Vec, + options: Tendril, +} + +#[derive(Debug, PartialEq, Eq)] +pub enum SnippetElement<'a> { + Tabstop { + tabstop: usize, + }, + Placeholder { + tabstop: usize, + value: Vec>, + }, + Choice { + tabstop: usize, + choices: Vec, + }, + Variable { + name: &'a str, + default: Option>>, + regex: Option, + }, + Text(Tendril), +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Snippet<'a> { + elements: Vec>, +} + +pub fn parse(s: &str) -> Result> { + parser::parse(s).map_err(|rest| anyhow!("Failed to parse snippet. Remaining input: {}", rest)) +} + +fn render_elements( + snippet_elements: &[SnippetElement<'_>], + insert: &mut Tendril, + offset: &mut usize, + tabstops: &mut Vec<(usize, (usize, usize))>, + newline_with_offset: &str, + include_placeholer: bool, +) { + use SnippetElement::*; + + for element in snippet_elements { + match element { + Text(text) => { + // small optimization to avoid calling replace when it's unnecessary + let text = if text.contains('\n') { + Cow::Owned(text.replace('\n', newline_with_offset)) + } else { + Cow::Borrowed(text.as_str()) + }; + *offset += text.chars().count(); + insert.push_str(&text); + } + Variable { + name: _, + regex: _, + r#default, + } => { + // TODO: variables. For now, fall back to the default, which defaults to "". + render_elements( + r#default.as_deref().unwrap_or_default(), + insert, + offset, + tabstops, + newline_with_offset, + include_placeholer, + ); + } + &Tabstop { tabstop } => { + tabstops.push((tabstop, (*offset, *offset))); + } + Placeholder { + tabstop, + value: inner_snippet_elements, + } => { + let start_offset = *offset; + if include_placeholer { + render_elements( + inner_snippet_elements, + insert, + offset, + tabstops, + newline_with_offset, + include_placeholer, + ); + } + tabstops.push((*tabstop, (start_offset, *offset))); + } + &Choice { + tabstop, + choices: _, + } => { + // TODO: choices + tabstops.push((tabstop, (*offset, *offset))); + } + } + } +} + +#[allow(clippy::type_complexity)] // only used one time +pub fn render( + snippet: &Snippet<'_>, + newline_with_offset: &str, + include_placeholer: bool, +) -> (Tendril, Vec>) { + let mut insert = Tendril::new(); + let mut tabstops = Vec::new(); + let mut offset = 0; + + render_elements( + &snippet.elements, + &mut insert, + &mut offset, + &mut tabstops, + newline_with_offset, + include_placeholer, + ); + + // sort in ascending order (except for 0, which should always be the last one (per lsp doc)) + tabstops.sort_unstable_by_key(|(n, _)| if *n == 0 { usize::MAX } else { *n }); + + // merge tabstops with the same index (we take advantage of the fact that we just sorted them + // above to simply look backwards) + let mut ntabstops = Vec::>::new(); + { + let mut prev = None; + for (tabstop, r) in tabstops { + if prev == Some(tabstop) { + let len_1 = ntabstops.len() - 1; + ntabstops[len_1].push(r); + } else { + prev = Some(tabstop); + ntabstops.push(smallvec![r]); + } + } + } + + (insert, ntabstops) +} + +mod parser { + use helix_core::Tendril; + use helix_parsec::*; + + use super::{CaseChange, FormatItem, Regex, Snippet, SnippetElement}; + + /* + https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#snippet_syntax + + any ::= tabstop | placeholder | choice | variable | text + tabstop ::= '$' int | '${' int '}' + placeholder ::= '${' int ':' any '}' + choice ::= '${' int '|' text (',' text)* '|}' + variable ::= '$' var | '${' var }' + | '${' var ':' any '}' + | '${' var '/' regex '/' (format | text)+ '/' options '}' + format ::= '$' int | '${' int '}' + | '${' int ':' '/upcase' | '/downcase' | '/capitalize' '}' + | '${' int ':+' if '}' + | '${' int ':?' if ':' else '}' + | '${' int ':-' else '}' | '${' int ':' else '}' + regex ::= Regular Expression value (ctor-string) + options ::= Regular Expression option (ctor-options) + var ::= [_a-zA-Z] [_a-zA-Z0-9]* + int ::= [0-9]+ + text ::= .* + if ::= text + else ::= text + */ + + fn var<'a>() -> impl Parser<'a, Output = &'a str> { + // var = [_a-zA-Z][_a-zA-Z0-9]* + move |input: &'a str| { + input + .char_indices() + .take_while(|(p, c)| { + *c == '_' + || if *p == 0 { + c.is_ascii_alphabetic() + } else { + c.is_ascii_alphanumeric() + } + }) + .last() + .map(|(index, c)| { + let index = index + c.len_utf8(); + (&input[index..], &input[0..index]) + }) + .ok_or(input) + } + } + + const TEXT_ESCAPE_CHARS: &[char] = &['\\', '}', '$']; + const CHOICE_TEXT_ESCAPE_CHARS: &[char] = &['\\', '|', ',']; + + fn text<'a>( + escape_chars: &'static [char], + term_chars: &'static [char], + ) -> impl Parser<'a, Output = Tendril> { + move |input: &'a str| { + let mut chars = input.char_indices().peekable(); + let mut res = Tendril::new(); + while let Some((i, c)) = chars.next() { + match c { + '\\' => { + if let Some(&(_, c)) = chars.peek() { + if escape_chars.contains(&c) { + chars.next(); + res.push(c); + continue; + } + } + res.push('\\'); + } + c if term_chars.contains(&c) => return Ok((&input[i..], res)), + c => res.push(c), + } + } + + Ok(("", res)) + } + } + + fn digit<'a>() -> impl Parser<'a, Output = usize> { + filter_map(take_while(|c| c.is_ascii_digit()), |s| s.parse().ok()) + } + + fn case_change<'a>() -> impl Parser<'a, Output = CaseChange> { + use CaseChange::*; + + choice!( + map("upcase", |_| Upcase), + map("downcase", |_| Downcase), + map("capitalize", |_| Capitalize), + ) + } + + fn format<'a>() -> impl Parser<'a, Output = FormatItem> { + use FormatItem::*; + + choice!( + // '$' int + map(right("$", digit()), Capture), + // '${' int '}' + map(seq!("${", digit(), "}"), |seq| Capture(seq.1)), + // '${' int ':' '/upcase' | '/downcase' | '/capitalize' '}' + map(seq!("${", digit(), ":/", case_change(), "}"), |seq| { + CaseChange(seq.1, seq.3) + }), + // '${' int ':+' if '}' + map( + seq!("${", digit(), ":+", text(TEXT_ESCAPE_CHARS, &['}']), "}"), + |seq| { Conditional(seq.1, Some(seq.3), None) } + ), + // '${' int ':?' if ':' else '}' + map( + seq!( + "${", + digit(), + ":?", + text(TEXT_ESCAPE_CHARS, &[':']), + ":", + text(TEXT_ESCAPE_CHARS, &['}']), + "}" + ), + |seq| { Conditional(seq.1, Some(seq.3), Some(seq.5)) } + ), + // '${' int ':-' else '}' | '${' int ':' else '}' + map( + seq!( + "${", + digit(), + ":", + optional("-"), + text(TEXT_ESCAPE_CHARS, &['}']), + "}" + ), + |seq| { Conditional(seq.1, None, Some(seq.4)) } + ), + ) + } + + fn regex<'a>() -> impl Parser<'a, Output = Regex> { + map( + seq!( + "/", + // TODO parse as ECMAScript and convert to rust regex + text(&['/'], &['/']), + "/", + zero_or_more(choice!( + format(), + // text doesn't parse $, if format fails we just accept the $ as text + map("$", |_| FormatItem::Text("$".into())), + map(text(&['\\', '/'], &['/', '$']), FormatItem::Text), + )), + "/", + // vscode really doesn't allow escaping } here + // so it's impossible to write a regex escape containing a } + // we can consider deviating here and allowing the escape + text(&[], &['}']), + ), + |(_, value, _, replacement, _, options)| Regex { + value, + replacement, + options, + }, + ) + } + + fn tabstop<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> { + map( + or( + right("$", digit()), + map(seq!("${", digit(), "}"), |values| values.1), + ), + |digit| SnippetElement::Tabstop { tabstop: digit }, + ) + } + + fn placeholder<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> { + map( + seq!( + "${", + digit(), + ":", + // according to the grammar there is just a single anything here. + // However in the prose it is explained that placeholders can be nested. + // The example there contains both a placeholder text and a nested placeholder + // which indicates a list. Looking at the VSCode sourcecode, the placeholder + // is indeed parsed as zero_or_more so the grammar is simply incorrect here + zero_or_more(anything(TEXT_ESCAPE_CHARS, true)), + "}" + ), + |seq| SnippetElement::Placeholder { + tabstop: seq.1, + value: seq.3, + }, + ) + } + + fn choice<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> { + map( + seq!( + "${", + digit(), + "|", + sep(text(CHOICE_TEXT_ESCAPE_CHARS, &['|', ',']), ","), + "|}", + ), + |seq| SnippetElement::Choice { + tabstop: seq.1, + choices: seq.3, + }, + ) + } + + fn variable<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> { + choice!( + // $var + map(right("$", var()), |name| SnippetElement::Variable { + name, + default: None, + regex: None, + }), + // ${var} + map(seq!("${", var(), "}",), |values| SnippetElement::Variable { + name: values.1, + default: None, + regex: None, + }), + // ${var:default} + map( + seq!( + "${", + var(), + ":", + zero_or_more(anything(TEXT_ESCAPE_CHARS, true)), + "}", + ), + |values| SnippetElement::Variable { + name: values.1, + default: Some(values.3), + regex: None, + } + ), + // ${var/value/format/options} + map(seq!("${", var(), regex(), "}"), |values| { + SnippetElement::Variable { + name: values.1, + default: None, + regex: Some(values.2), + } + }), + ) + } + + fn anything<'a>( + escape_chars: &'static [char], + end_at_brace: bool, + ) -> impl Parser<'a, Output = SnippetElement<'a>> { + let term_chars: &[_] = if end_at_brace { &['$', '}'] } else { &['$'] }; + move |input: &'a str| { + let parser = choice!( + tabstop(), + placeholder(), + choice(), + variable(), + map("$", |_| SnippetElement::Text("$".into())), + map(text(escape_chars, term_chars), SnippetElement::Text), + ); + parser.parse(input) + } + } + + fn snippet<'a>() -> impl Parser<'a, Output = Snippet<'a>> { + map(one_or_more(anything(TEXT_ESCAPE_CHARS, false)), |parts| { + Snippet { elements: parts } + }) + } + + pub fn parse(s: &str) -> Result { + snippet().parse(s).and_then(|(remainder, snippet)| { + if remainder.is_empty() { + Ok(snippet) + } else { + Err(remainder) + } + }) + } + + #[cfg(test)] + mod test { + use super::SnippetElement::*; + use super::*; + + #[test] + fn empty_string_is_error() { + assert_eq!(Err(""), parse("")); + } + + #[test] + fn parse_placeholders_in_function_call() { + assert_eq!( + Ok(Snippet { + elements: vec![ + Text("match(".into()), + Placeholder { + tabstop: 1, + value: vec!(Text("Arg1".into())), + }, + Text(")".into()) + ] + }), + parse("match(${1:Arg1})") + ) + } + + #[test] + fn unterminated_placeholder() { + assert_eq!( + Ok(Snippet { + elements: vec![Text("match(".into()), Text("$".into()), Text("{1:)".into())] + }), + parse("match(${1:)") + ) + } + + #[test] + fn parse_empty_placeholder() { + assert_eq!( + Ok(Snippet { + elements: vec![ + Text("match(".into()), + Placeholder { + tabstop: 1, + value: vec![], + }, + Text(")".into()) + ] + }), + parse("match(${1:})") + ) + } + + #[test] + fn parse_placeholders_in_statement() { + assert_eq!( + Ok(Snippet { + elements: vec![ + Text("local ".into()), + Placeholder { + tabstop: 1, + value: vec!(Text("var".into())), + }, + Text(" = ".into()), + Placeholder { + tabstop: 1, + value: vec!(Text("value".into())), + }, + ] + }), + parse("local ${1:var} = ${1:value}") + ) + } + + #[test] + fn parse_tabstop_nested_in_placeholder() { + assert_eq!( + Ok(Snippet { + elements: vec![Placeholder { + tabstop: 1, + value: vec!(Text("var, ".into()), Tabstop { tabstop: 2 },), + },] + }), + parse("${1:var, $2}") + ) + } + + #[test] + fn parse_placeholder_nested_in_placeholder() { + assert_eq!( + Ok(Snippet { + elements: vec![Placeholder { + tabstop: 1, + value: vec!( + Text("foo ".into()), + Placeholder { + tabstop: 2, + value: vec!(Text("bar".into())), + }, + ), + },] + }), + parse("${1:foo ${2:bar}}") + ) + } + + #[test] + fn parse_all() { + assert_eq!( + Ok(Snippet { + elements: vec![ + Text("hello ".into()), + Tabstop { tabstop: 1 }, + Tabstop { tabstop: 2 }, + Text(" ".into()), + Choice { + tabstop: 1, + choices: vec!["one".into(), "two".into(), "three".into()] + }, + Text(" ".into()), + Variable { + name: "name", + default: Some(vec![Text("foo".into())]), + regex: None + }, + Text(" ".into()), + Variable { + name: "var", + default: None, + regex: None + }, + Text(" ".into()), + Variable { + name: "TM", + default: None, + regex: None + }, + ] + }), + parse("hello $1${2} ${1|one,two,three|} ${name:foo} $var $TM") + ); + } + + #[test] + fn regex_capture_replace() { + assert_eq!( + Ok(Snippet { + elements: vec![Variable { + name: "TM_FILENAME", + default: None, + regex: Some(Regex { + value: "(.*).+$".into(), + replacement: vec![FormatItem::Capture(1), FormatItem::Text("$".into())], + options: Tendril::new(), + }), + }] + }), + parse("${TM_FILENAME/(.*).+$/$1$/}") + ); + } + + #[test] + fn rust_macro() { + assert_eq!( + Ok(Snippet { + elements: vec![ + Text("macro_rules! ".into()), + Tabstop { tabstop: 1 }, + Text(" {\n (".into()), + Tabstop { tabstop: 2 }, + Text(") => {\n ".into()), + Tabstop { tabstop: 0 }, + Text("\n };\n}".into()) + ] + }), + parse("macro_rules! $1 {\n ($2) => {\n $0\n };\n}") + ); + } + + fn assert_text(snippet: &str, parsed_text: &str) { + let res = parse(snippet).unwrap(); + let text = crate::snippet::render(&res, "\n", true).0; + assert_eq!(text, parsed_text) + } + + #[test] + fn robust_parsing() { + assert_text("$", "$"); + assert_text("\\\\$", "\\$"); + assert_text("{", "{"); + assert_text("\\}", "}"); + assert_text("\\abc", "\\abc"); + assert_text("foo${f:\\}}bar", "foo}bar"); + assert_text("\\{", "\\{"); + assert_text("I need \\\\\\$", "I need \\$"); + assert_text("\\", "\\"); + assert_text("\\{{", "\\{{"); + assert_text("{{", "{{"); + assert_text("{{dd", "{{dd"); + assert_text("}}", "}}"); + assert_text("ff}}", "ff}}"); + assert_text("farboo", "farboo"); + assert_text("far{{}}boo", "far{{}}boo"); + assert_text("far{{123}}boo", "far{{123}}boo"); + assert_text("far\\{{123}}boo", "far\\{{123}}boo"); + assert_text("far{{id:bern}}boo", "far{{id:bern}}boo"); + assert_text("far{{id:bern {{basel}}}}boo", "far{{id:bern {{basel}}}}boo"); + assert_text( + "far{{id:bern {{id:basel}}}}boo", + "far{{id:bern {{id:basel}}}}boo", + ); + assert_text( + "far{{id:bern {{id2:basel}}}}boo", + "far{{id:bern {{id2:basel}}}}boo", + ); + assert_text("${}$\\a\\$\\}\\\\", "${}$\\a$}\\"); + assert_text("farboo", "farboo"); + assert_text("far{{}}boo", "far{{}}boo"); + assert_text("far{{123}}boo", "far{{123}}boo"); + assert_text("far\\{{123}}boo", "far\\{{123}}boo"); + assert_text("far`123`boo", "far`123`boo"); + assert_text("far\\`123\\`boo", "far\\`123\\`boo"); + assert_text("\\$far-boo", "$far-boo"); + } + + fn assert_snippet(snippet: &str, expect: &[SnippetElement]) { + let parsed_snippet = parse(snippet).unwrap(); + assert_eq!(parsed_snippet.elements, expect.to_owned()) + } + + #[test] + fn parse_variable() { + use SnippetElement::*; + assert_snippet( + "$far-boo", + &[ + Variable { + name: "far", + default: None, + regex: None, + }, + Text("-boo".into()), + ], + ); + assert_snippet( + "far$farboo", + &[ + Text("far".into()), + Variable { + name: "farboo", + regex: None, + default: None, + }, + ], + ); + assert_snippet( + "far${farboo}", + &[ + Text("far".into()), + Variable { + name: "farboo", + regex: None, + default: None, + }, + ], + ); + assert_snippet("$123", &[Tabstop { tabstop: 123 }]); + assert_snippet( + "$farboo", + &[Variable { + name: "farboo", + regex: None, + default: None, + }], + ); + assert_snippet( + "$far12boo", + &[Variable { + name: "far12boo", + regex: None, + default: None, + }], + ); + assert_snippet( + "000_${far}_000", + &[ + Text("000_".into()), + Variable { + name: "far", + regex: None, + default: None, + }, + Text("_000".into()), + ], + ); + } + + #[test] + fn parse_variable_transform() { + assert_snippet( + "${foo///}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: Tendril::new(), + replacement: Vec::new(), + options: Tendril::new(), + }), + default: None, + }], + ); + assert_snippet( + "${foo/regex/format/gmi}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: "regex".into(), + replacement: vec![FormatItem::Text("format".into())], + options: "gmi".into(), + }), + default: None, + }], + ); + assert_snippet( + "${foo/([A-Z][a-z])/format/}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: "([A-Z][a-z])".into(), + replacement: vec![FormatItem::Text("format".into())], + options: Tendril::new(), + }), + default: None, + }], + ); + + // invalid regex TODO: reneable tests once we actually parse this regex flavour + // assert_text( + // "${foo/([A-Z][a-z])/format/GMI}", + // "${foo/([A-Z][a-z])/format/GMI}", + // ); + // assert_text( + // "${foo/([A-Z][a-z])/format/funky}", + // "${foo/([A-Z][a-z])/format/funky}", + // ); + // assert_text("${foo/([A-Z][a-z]/format/}", "${foo/([A-Z][a-z]/format/}"); + assert_text( + "${foo/regex\\/format/options}", + "${foo/regex\\/format/options}", + ); + + // tricky regex + assert_snippet( + "${foo/m\\/atch/$1/i}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: "m/atch".into(), + replacement: vec![FormatItem::Capture(1)], + options: "i".into(), + }), + default: None, + }], + ); + + // incomplete + assert_text("${foo///", "${foo///"); + assert_text("${foo/regex/format/options", "${foo/regex/format/options"); + + // format string + assert_snippet( + "${foo/.*/${0:fooo}/i}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: ".*".into(), + replacement: vec![FormatItem::Conditional(0, None, Some("fooo".into()))], + options: "i".into(), + }), + default: None, + }], + ); + assert_snippet( + "${foo/.*/${1}/i}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: ".*".into(), + replacement: vec![FormatItem::Capture(1)], + options: "i".into(), + }), + default: None, + }], + ); + assert_snippet( + "${foo/.*/$1/i}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: ".*".into(), + replacement: vec![FormatItem::Capture(1)], + options: "i".into(), + }), + default: None, + }], + ); + assert_snippet( + "${foo/.*/This-$1-encloses/i}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: ".*".into(), + replacement: vec![ + FormatItem::Text("This-".into()), + FormatItem::Capture(1), + FormatItem::Text("-encloses".into()), + ], + options: "i".into(), + }), + default: None, + }], + ); + assert_snippet( + "${foo/.*/complex${1:else}/i}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: ".*".into(), + replacement: vec![ + FormatItem::Text("complex".into()), + FormatItem::Conditional(1, None, Some("else".into())), + ], + options: "i".into(), + }), + default: None, + }], + ); + assert_snippet( + "${foo/.*/complex${1:-else}/i}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: ".*".into(), + replacement: vec![ + FormatItem::Text("complex".into()), + FormatItem::Conditional(1, None, Some("else".into())), + ], + options: "i".into(), + }), + default: None, + }], + ); + assert_snippet( + "${foo/.*/complex${1:+if}/i}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: ".*".into(), + replacement: vec![ + FormatItem::Text("complex".into()), + FormatItem::Conditional(1, Some("if".into()), None), + ], + options: "i".into(), + }), + default: None, + }], + ); + assert_snippet( + "${foo/.*/complex${1:?if:else}/i}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: ".*".into(), + replacement: vec![ + FormatItem::Text("complex".into()), + FormatItem::Conditional(1, Some("if".into()), Some("else".into())), + ], + options: "i".into(), + }), + default: None, + }], + ); + assert_snippet( + "${foo/.*/complex${1:/upcase}/i}", + &[Variable { + name: "foo", + regex: Some(Regex { + value: ".*".into(), + replacement: vec![ + FormatItem::Text("complex".into()), + FormatItem::CaseChange(1, CaseChange::Upcase), + ], + options: "i".into(), + }), + default: None, + }], + ); + assert_snippet( + "${TM_DIRECTORY/src\\//$1/}", + &[Variable { + name: "TM_DIRECTORY", + regex: Some(Regex { + value: "src/".into(), + replacement: vec![FormatItem::Capture(1)], + options: Tendril::new(), + }), + default: None, + }], + ); + assert_snippet( + "${TM_SELECTED_TEXT/a/\\/$1/g}", + &[Variable { + name: "TM_SELECTED_TEXT", + regex: Some(Regex { + value: "a".into(), + replacement: vec![FormatItem::Text("/".into()), FormatItem::Capture(1)], + options: "g".into(), + }), + default: None, + }], + ); + assert_snippet( + "${TM_SELECTED_TEXT/a/in\\/$1ner/g}", + &[Variable { + name: "TM_SELECTED_TEXT", + regex: Some(Regex { + value: "a".into(), + replacement: vec![ + FormatItem::Text("in/".into()), + FormatItem::Capture(1), + FormatItem::Text("ner".into()), + ], + options: "g".into(), + }), + default: None, + }], + ); + assert_snippet( + "${TM_SELECTED_TEXT/a/end\\//g}", + &[Variable { + name: "TM_SELECTED_TEXT", + regex: Some(Regex { + value: "a".into(), + replacement: vec![FormatItem::Text("end/".into())], + options: "g".into(), + }), + default: None, + }], + ); + } + // TODO port more tests from https://github.com/microsoft/vscode/blob/dce493cb6e36346ef2714e82c42ce14fc461b15c/src/vs/editor/contrib/snippet/test/browser/snippetParser.test.ts + } +} diff --git a/helix-parsec/Cargo.toml b/helix-parsec/Cargo.toml new file mode 100644 index 00000000..505a4247 --- /dev/null +++ b/helix-parsec/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "helix-parsec" +version = "0.6.0" +authors = ["Blaž Hrastnik "] +edition = "2021" +license = "MPL-2.0" +description = "Parser combinators for Helix" +categories = ["editor"] +repository = "https://github.com/helix-editor/helix" +homepage = "https://helix-editor.com" +include = ["src/**/*", "README.md"] + +[dependencies] diff --git a/helix-parsec/src/lib.rs b/helix-parsec/src/lib.rs new file mode 100644 index 00000000..846d02d6 --- /dev/null +++ b/helix-parsec/src/lib.rs @@ -0,0 +1,574 @@ +//! Parser-combinator functions +//! +//! This module provides parsers and parser combinators which can be used +//! together to build parsers by functional composition. + +// This module implements parser combinators following https://bodil.lol/parser-combinators/. +// `sym` (trait implementation for `&'static str`), `map`, `pred` (filter), `one_or_more`, +// `zero_or_more`, as well as the `Parser` trait originate mostly from that post. +// The remaining parsers and parser combinators are either based on +// https://github.com/archseer/snippets.nvim/blob/a583da6ef130d2a4888510afd8c4e5ffd62d0dce/lua/snippet/parser.lua#L5-L138 +// or are novel. + +// When a parser matches the input successfully, it returns `Ok((next_input, some_value))` +// where the type of the returned value depends on the parser. If the parser fails to match, +// it returns `Err(input)`. +type ParseResult<'a, Output> = Result<(&'a str, Output), &'a str>; + +/// A parser or parser-combinator. +/// +/// Parser-combinators compose multiple parsers together to parse input. +/// For example, two basic parsers (`&'static str`s) may be combined with +/// a parser-combinator like [or] to produce a new parser. +/// +/// ``` +/// use helix_parsec::{or, Parser}; +/// let foo = "foo"; // matches "foo" literally +/// let bar = "bar"; // matches "bar" literally +/// let foo_or_bar = or(foo, bar); // matches either "foo" or "bar" +/// assert_eq!(Ok(("", "foo")), foo_or_bar.parse("foo")); +/// assert_eq!(Ok(("", "bar")), foo_or_bar.parse("bar")); +/// assert_eq!(Err("baz"), foo_or_bar.parse("baz")); +/// ``` +pub trait Parser<'a> { + type Output; + + fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output>; +} + +// Most parser-combinators are written as higher-order functions which take some +// parser(s) as input and return a new parser: a function that takes input and returns +// a parse result. The underlying implementation of [Parser::parse] for these functions +// is simply application. +#[doc(hidden)] +impl<'a, F, T> Parser<'a> for F +where + F: Fn(&'a str) -> ParseResult, +{ + type Output = T; + + fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> { + self(input) + } +} + +/// A parser which matches the string literal exactly. +/// +/// This parser succeeds if the next characters in the input are equal to the given +/// string literal. +/// +/// Note that [str::parse] interferes with calling [Parser::parse] on string literals +/// directly; this trait implementation works when used within any parser combinator +/// but does not work on its own. To call [Parser::parse] on a parser for a string +/// literal, use the [token] parser. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{or, Parser}; +/// let parser = or("foo", "bar"); +/// assert_eq!(Ok(("", "foo")), parser.parse("foo")); +/// assert_eq!(Ok(("", "bar")), parser.parse("bar")); +/// assert_eq!(Err("baz"), parser.parse("baz")); +/// ``` +impl<'a> Parser<'a> for &'static str { + type Output = &'a str; + + fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> { + match input.get(0..self.len()) { + Some(actual) if actual == *self => Ok((&input[self.len()..], &input[0..self.len()])), + _ => Err(input), + } + } +} + +// Parsers + +/// A parser which matches the given string literally. +/// +/// This function is a convenience for interpreting string literals as parsers +/// and is only necessary to avoid conflict with [str::parse]. See the documentation +/// for the `&'static str` implementation of [Parser]. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{token, Parser}; +/// let parser = token("foo"); +/// assert_eq!(Ok(("", "foo")), parser.parse("foo")); +/// assert_eq!(Err("bar"), parser.parse("bar")); +/// ``` +pub fn token<'a>(literal: &'static str) -> impl Parser<'a, Output = &'a str> { + literal +} + +/// A parser which matches all values until the specified pattern is found. +/// +/// If the pattern is not found, this parser does not match. The input up to the +/// character which returns `true` is returned but not that character itself. +/// +/// If the pattern function returns true on the first input character, this +/// parser fails. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{take_until, Parser}; +/// let parser = take_until(|c| c == '.'); +/// assert_eq!(Ok((".bar", "foo")), parser.parse("foo.bar")); +/// assert_eq!(Err(".foo"), parser.parse(".foo")); +/// assert_eq!(Err("foo"), parser.parse("foo")); +/// ``` +pub fn take_until<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str> +where + F: Fn(char) -> bool, +{ + move |input: &'a str| match input.find(&pattern) { + Some(index) if index != 0 => Ok((&input[index..], &input[0..index])), + _ => Err(input), + } +} + +/// A parser which matches all values until the specified pattern no longer match. +/// +/// This parser only ever fails if the input has a length of zero. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{take_while, Parser}; +/// let parser = take_while(|c| c == '1'); +/// assert_eq!(Ok(("2", "11")), parser.parse("112")); +/// assert_eq!(Err("22"), parser.parse("22")); +/// ``` +pub fn take_while<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str> +where + F: Fn(char) -> bool, +{ + move |input: &'a str| match input + .char_indices() + .take_while(|(_p, c)| pattern(*c)) + .last() + { + Some((index, c)) => { + let index = index + c.len_utf8(); + Ok((&input[index..], &input[0..index])) + } + _ => Err(input), + } +} + +// Variadic parser combinators + +/// A parser combinator which matches a sequence of parsers in an all-or-nothing fashion. +/// +/// The returned value is a tuple containing the outputs of all parsers in order. Each +/// parser in the sequence may be typed differently. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{seq, Parser}; +/// let parser = seq!("<", "a", ">"); +/// assert_eq!(Ok(("", ("<", "a", ">"))), parser.parse("")); +/// assert_eq!(Err(""), parser.parse("")); +/// ``` +#[macro_export] +macro_rules! seq { + ($($parsers: expr),+ $(,)?) => { + ($($parsers),+) + } +} + +// Seq is implemented using trait-implementations of Parser for various size tuples. +// This allows sequences to be typed heterogeneously. +macro_rules! seq_impl { + ($($parser:ident),+) => { + #[allow(non_snake_case)] + impl<'a, $($parser),+> Parser<'a> for ($($parser),+) + where + $($parser: Parser<'a>),+ + { + type Output = ($($parser::Output),+); + + fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> { + let ($($parser),+) = self; + seq_body_impl!(input, input, $($parser),+ ; ) + } + } + } +} + +macro_rules! seq_body_impl { + ($input:expr, $next_input:expr, $head:ident, $($tail:ident),+ ; $(,)? $($acc:ident),*) => { + match $head.parse($next_input) { + Ok((next_input, $head)) => seq_body_impl!($input, next_input, $($tail),+ ; $($acc),*, $head), + Err(_) => Err($input), + } + }; + ($input:expr, $next_input:expr, $last:ident ; $(,)? $($acc:ident),*) => { + match $last.parse($next_input) { + Ok((next_input, last)) => Ok((next_input, ($($acc),+, last))), + Err(_) => Err($input), + } + } +} + +seq_impl!(A, B); +seq_impl!(A, B, C); +seq_impl!(A, B, C, D); +seq_impl!(A, B, C, D, E); +seq_impl!(A, B, C, D, E, F); +seq_impl!(A, B, C, D, E, F, G); +seq_impl!(A, B, C, D, E, F, G, H); +seq_impl!(A, B, C, D, E, F, G, H, I); +seq_impl!(A, B, C, D, E, F, G, H, I, J); + +/// A parser combinator which chooses the first of the input parsers which matches +/// successfully. +/// +/// All input parsers must have the same output type. This is a variadic form for [or]. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{choice, or, Parser}; +/// let parser = choice!("foo", "bar", "baz"); +/// assert_eq!(Ok(("", "foo")), parser.parse("foo")); +/// assert_eq!(Ok(("", "bar")), parser.parse("bar")); +/// assert_eq!(Err("quiz"), parser.parse("quiz")); +/// ``` +#[macro_export] +macro_rules! choice { + ($parser: expr $(,)?) => { + $parser + }; + ($parser: expr, $($rest: expr),+ $(,)?) => { + or($parser, choice!($($rest),+)) + } +} + +// Ordinary parser combinators + +/// A parser combinator which takes a parser as input and maps the output using the +/// given transformation function. +/// +/// This corresponds to [Result::map]. The value is only mapped if the input parser +/// matches against input. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{map, Parser}; +/// let parser = map("123", |s| s.parse::().unwrap()); +/// assert_eq!(Ok(("", 123)), parser.parse("123")); +/// assert_eq!(Err("abc"), parser.parse("abc")); +/// ``` +pub fn map<'a, P, F, T>(parser: P, map_fn: F) -> impl Parser<'a, Output = T> +where + P: Parser<'a>, + F: Fn(P::Output) -> T, +{ + move |input| { + parser + .parse(input) + .map(|(next_input, result)| (next_input, map_fn(result))) + } +} + +/// A parser combinator which succeeds if the given parser matches the input and +/// the given `filter_map_fn` returns `Some`. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{filter_map, take_until, Parser}; +/// let parser = filter_map(take_until(|c| c == '.'), |s| s.parse::().ok()); +/// assert_eq!(Ok((".456", 123)), parser.parse("123.456")); +/// assert_eq!(Err("abc.def"), parser.parse("abc.def")); +/// ``` +pub fn filter_map<'a, P, F, T>(parser: P, filter_map_fn: F) -> impl Parser<'a, Output = T> +where + P: Parser<'a>, + F: Fn(P::Output) -> Option, +{ + move |input| match parser.parse(input) { + Ok((next_input, value)) => match filter_map_fn(value) { + Some(value) => Ok((next_input, value)), + None => Err(input), + }, + Err(_) => Err(input), + } +} + +/// A parser combinator which succeeds if the first given parser matches the input and +/// the second given parse also matches. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{reparse_as, take_until, one_or_more, Parser}; +/// let parser = reparse_as(take_until(|c| c == '/'), one_or_more("a")); +/// assert_eq!(Ok(("/bb", vec!["a", "a"])), parser.parse("aa/bb")); +/// ``` +pub fn reparse_as<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T> +where + P1: Parser<'a, Output = &'a str>, + P2: Parser<'a, Output = T>, +{ + filter_map(parser1, move |str| { + parser2.parse(str).map(|(_, value)| value).ok() + }) +} + +/// A parser combinator which only matches the input when the predicate function +/// returns true. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{filter, take_until, Parser}; +/// let parser = filter(take_until(|c| c == '.'), |s| s == &"123"); +/// assert_eq!(Ok((".456", "123")), parser.parse("123.456")); +/// assert_eq!(Err("456.123"), parser.parse("456.123")); +/// ``` +pub fn filter<'a, P, F, T>(parser: P, pred_fn: F) -> impl Parser<'a, Output = T> +where + P: Parser<'a, Output = T>, + F: Fn(&P::Output) -> bool, +{ + move |input| { + if let Ok((next_input, value)) = parser.parse(input) { + if pred_fn(&value) { + return Ok((next_input, value)); + } + } + Err(input) + } +} + +/// A parser combinator which matches either of the input parsers. +/// +/// Both parsers must have the same output type. For a variadic form which +/// can take any number of parsers, use `choice!`. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{or, Parser}; +/// let parser = or("foo", "bar"); +/// assert_eq!(Ok(("", "foo")), parser.parse("foo")); +/// assert_eq!(Ok(("", "bar")), parser.parse("bar")); +/// assert_eq!(Err("baz"), parser.parse("baz")); +/// ``` +pub fn or<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T> +where + P1: Parser<'a, Output = T>, + P2: Parser<'a, Output = T>, +{ + move |input| match parser1.parse(input) { + ok @ Ok(_) => ok, + Err(_) => parser2.parse(input), + } +} + +/// A parser combinator which attempts to match the given parser, returning a +/// `None` output value if the parser does not match. +/// +/// The parser produced with this combinator always succeeds. If the given parser +/// succeeds, `Some(value)` is returned where `value` is the output of the given +/// parser. Otherwise, `None`. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{optional, Parser}; +/// let parser = optional("foo"); +/// assert_eq!(Ok(("bar", Some("foo"))), parser.parse("foobar")); +/// assert_eq!(Ok(("bar", None)), parser.parse("bar")); +/// ``` +pub fn optional<'a, P, T>(parser: P) -> impl Parser<'a, Output = Option> +where + P: Parser<'a, Output = T>, +{ + move |input| match parser.parse(input) { + Ok((next_input, value)) => Ok((next_input, Some(value))), + Err(_) => Ok((input, None)), + } +} + +/// A parser combinator which runs the given parsers in sequence and returns the +/// value of `left` if both are matched. +/// +/// This is useful for two-element sequences in which you only want the output +/// value of the `left` parser. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{left, Parser}; +/// let parser = left("foo", "bar"); +/// assert_eq!(Ok(("", "foo")), parser.parse("foobar")); +/// ``` +pub fn left<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T> +where + L: Parser<'a, Output = T>, + R: Parser<'a>, +{ + map(seq!(left, right), |(left_value, _)| left_value) +} + +/// A parser combinator which runs the given parsers in sequence and returns the +/// value of `right` if both are matched. +/// +/// This is useful for two-element sequences in which you only want the output +/// value of the `right` parser. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{right, Parser}; +/// let parser = right("foo", "bar"); +/// assert_eq!(Ok(("", "bar")), parser.parse("foobar")); +/// ``` +pub fn right<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T> +where + L: Parser<'a>, + R: Parser<'a, Output = T>, +{ + map(seq!(left, right), |(_, right_value)| right_value) +} + +/// A parser combinator which matches the given parser against the input zero or +/// more times. +/// +/// This parser always succeeds and returns the empty Vec when it matched zero +/// times. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{zero_or_more, Parser}; +/// let parser = zero_or_more("a"); +/// assert_eq!(Ok(("", vec![])), parser.parse("")); +/// assert_eq!(Ok(("", vec!["a"])), parser.parse("a")); +/// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa")); +/// assert_eq!(Ok(("bb", vec![])), parser.parse("bb")); +/// ``` +pub fn zero_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec> +where + P: Parser<'a, Output = T>, +{ + let parser = non_empty(parser); + move |mut input| { + let mut values = Vec::new(); + + while let Ok((next_input, value)) = parser.parse(input) { + input = next_input; + values.push(value); + } + + Ok((input, values)) + } +} + +/// A parser combinator which matches the given parser against the input one or +/// more times. +/// +/// This parser combinator acts the same as [zero_or_more] but must match at +/// least once. +/// +/// # Examples +/// +/// ``` +/// use helix_parsec::{one_or_more, Parser}; +/// let parser = one_or_more("a"); +/// assert_eq!(Err(""), parser.parse("")); +/// assert_eq!(Ok(("", vec!["a"])), parser.parse("a")); +/// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa")); +/// assert_eq!(Err("bb"), parser.parse("bb")); +/// ``` +pub fn one_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec> +where + P: Parser<'a, Output = T>, +{ + let parser = non_empty(parser); + move |mut input| { + let mut values = Vec::new(); + + match parser.parse(input) { + Ok((next_input, value)) => { + input = next_input; + values.push(value); + } + Err(err) => return Err(err), + } + + while let Ok((next_input, value)) = parser.parse(input) { + input = next_input; + values.push(value); + } + + Ok((input, values)) + } +} + +/// A parser combinator which matches one or more instances of the given parser +/// interspersed with the separator parser. +/// +/// Output values of the separator parser are discarded. +/// +/// This is typically used to parse function arguments or list items. +/// +/// # Examples +/// +/// ```rust +/// use helix_parsec::{sep, Parser}; +/// let parser = sep("a", ","); +/// assert_eq!(Ok(("", vec!["a", "a", "a"])), parser.parse("a,a,a")); +/// ``` +pub fn sep<'a, P, S, T>(parser: P, separator: S) -> impl Parser<'a, Output = Vec> +where + P: Parser<'a, Output = T>, + S: Parser<'a>, +{ + move |mut input| { + let mut values = Vec::new(); + + match parser.parse(input) { + Ok((next_input, value)) => { + input = next_input; + values.push(value); + } + Err(err) => return Err(err), + } + + loop { + match separator.parse(input) { + Ok((next_input, _)) => input = next_input, + Err(_) => break, + } + + match parser.parse(input) { + Ok((next_input, value)) => { + input = next_input; + values.push(value); + } + Err(_) => break, + } + } + + Ok((input, values)) + } +} + +pub fn non_empty<'a, T>(p: impl Parser<'a, Output = T>) -> impl Parser<'a, Output = T> { + move |input| { + let (new_input, res) = p.parse(input)?; + if new_input.len() == input.len() { + Err(input) + } else { + Ok((new_input, res)) + } + } +} diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index 895a0882..5222ddaa 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -10,11 +10,7 @@ repository = "https://github.com/helix-editor/helix" homepage = "https://helix-editor.com" include = ["src/**/*", "README.md"] default-run = "hx" -rust-version = "1.57" - -[package.metadata.nix] -build = true -app = true +rust-version = "1.65" [features] default = ["git"] @@ -37,11 +33,11 @@ helix-loader = { version = "0.6", path = "../helix-loader" } anyhow = "1" once_cell = "1.17" -which = "4.2" +which = "4.4" tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } -crossterm = { version = "0.25", features = ["event-stream"] } +crossterm = { version = "0.26", features = ["event-stream"] } signal-hook = "0.3" tokio-stream = "0.1" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } @@ -61,22 +57,23 @@ pulldown-cmark = { version = "0.9", default-features = false } content_inspector = "0.2.4" # config -toml = "0.5" +toml = "0.7" serde_json = "1.0" serde = { version = "1.0", features = ["derive"] } # ripgrep for global search -grep-regex = "0.1.10" -grep-searcher = "0.1.10" +grep-regex = "0.1.11" +grep-searcher = "0.1.11" [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } +libc = "0.2.140" [build-dependencies] helix-loader = { version = "0.6", path = "../helix-loader" } [dev-dependencies] smallvec = "1.10" -indoc = "1.0.8" -tempfile = "3.3.0" +indoc = "2.0.1" +tempfile = "3.4.0" diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index c0cbc245..95faa01b 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -30,26 +30,17 @@ use crate::{ use log::{debug, error, warn}; use std::{ - io::{stdin, stdout, Write}, + io::{stdin, stdout}, + path::Path, sync::Arc, time::{Duration, Instant}, }; use anyhow::{Context, Error}; -use crossterm::{ - event::{ - DisableBracketedPaste, DisableFocusChange, DisableMouseCapture, EnableBracketedPaste, - EnableFocusChange, EnableMouseCapture, Event as CrosstermEvent, - }, - execute, terminal, - tty::IsTty, -}; +use crossterm::{event::Event as CrosstermEvent, tty::IsTty}; #[cfg(not(windows))] -use { - signal_hook::{consts::signal, low_level}, - signal_hook_tokio::Signals, -}; +use {signal_hook::consts::signal, signal_hook_tokio::Signals}; #[cfg(windows)] type Signals = futures_util::stream::Empty<()>; @@ -62,10 +53,12 @@ use tui::backend::CrosstermBackend; use tui::backend::TestBackend; #[cfg(not(feature = "integration"))] -type Terminal = tui::terminal::Terminal>; +type TerminalBackend = CrosstermBackend; #[cfg(feature = "integration")] -type Terminal = tui::terminal::Terminal; +type TerminalBackend = TestBackend; + +type Terminal = tui::terminal::Terminal; pub struct Application { compositor: Compositor, @@ -107,23 +100,6 @@ fn setup_integration_logging() { .apply(); } -fn restore_term() -> Result<(), Error> { - let mut stdout = stdout(); - // reset cursor shape - write!(stdout, "\x1B[0 q")?; - // Ignore errors on disabling, this might trigger on windows if we call - // disable without calling enable previously - let _ = execute!(stdout, DisableMouseCapture); - execute!( - stdout, - DisableBracketedPaste, - DisableFocusChange, - terminal::LeaveAlternateScreen - )?; - terminal::disable_raw_mode()?; - Ok(()) -} - impl Application { pub fn new( args: Args, @@ -135,10 +111,9 @@ impl Application { use helix_view::editor::Action; - let theme_loader = std::sync::Arc::new(theme::Loader::new( - &helix_loader::config_dir(), - &helix_loader::runtime_dir(), - )); + let mut theme_parent_dirs = vec![helix_loader::config_dir()]; + theme_parent_dirs.extend(helix_loader::runtime_dirs().iter().cloned()); + let theme_loader = std::sync::Arc::new(theme::Loader::new(&theme_parent_dirs)); let true_color = config.editor.true_color || crate::true_color(); let theme = config @@ -159,7 +134,7 @@ impl Application { let syn_loader = std::sync::Arc::new(syntax::Loader::new(syn_loader_conf)); #[cfg(not(feature = "integration"))] - let backend = CrosstermBackend::new(stdout()); + let backend = CrosstermBackend::new(stdout(), &config.editor); #[cfg(feature = "integration")] let backend = TestBackend::new(120, 150); @@ -172,7 +147,7 @@ impl Application { area, theme_loader.clone(), syn_loader.clone(), - Box::new(Map::new(Arc::clone(&config), |config: &Config| { + Arc::new(Map::new(Arc::clone(&config), |config: &Config| { &config.editor })), ); @@ -184,7 +159,7 @@ impl Application { compositor.push(editor_view); if args.load_tutor { - let path = helix_loader::runtime_dir().join("tutor"); + let path = helix_loader::runtime_file(Path::new("tutor")); editor.open(&path, Action::VerticalSplit)?; // Unset path to prevent accidentally saving to the original tutor file. doc_mut!(editor).set_path(None)?; @@ -277,10 +252,6 @@ impl Application { Ok(app) } - #[cfg(feature = "integration")] - async fn render(&mut self) {} - - #[cfg(not(feature = "integration"))] async fn render(&mut self) { let mut cx = crate::compositor::Context { editor: &mut self.editor, @@ -309,8 +280,10 @@ impl Application { let surface = self.terminal.current_buffer_mut(); self.compositor.render(area, surface, &mut cx); - let (pos, kind) = self.compositor.cursor(area, &self.editor); + // reset cursor cache + self.editor.cursor_cache.set(None); + let pos = pos.map(|pos| (pos.col as u16, pos.row as u16)); self.terminal.draw(pos, kind).unwrap(); } @@ -343,12 +316,12 @@ impl Application { tokio::select! { biased; - Some(event) = input_stream.next() => { - self.handle_terminal_events(event).await; - } Some(signal) = self.signals.next() => { self.handle_signals(signal).await; } + Some(event) = input_stream.next() => { + self.handle_terminal_events(event).await; + } Some(callback) = self.jobs.futures.next() => { self.jobs.handle_callback(&mut self.editor, &mut self.compositor, callback); self.render().await; @@ -395,6 +368,13 @@ impl Application { // Update all the relevant members in the editor after updating // the configuration. self.editor.refresh_config(); + + // reset view position in case softwrap was enabled/disabled + let scrolloff = self.editor.config().scrolloff; + for (view, _) in self.editor.tree.views_mut() { + let doc = &self.editor.documents[&view.doc]; + view.ensure_cursor_in_view(doc, scrolloff) + } } /// refresh language config after config change @@ -463,14 +443,33 @@ impl Application { pub async fn handle_signals(&mut self, signal: i32) { match signal { signal::SIGTSTP => { - // restore cursor - use helix_view::graphics::CursorKind; - self.terminal - .backend_mut() - .show_cursor(CursorKind::Block) - .ok(); - restore_term().unwrap(); - low_level::emulate_default_handler(signal::SIGTSTP).unwrap(); + self.restore_term().unwrap(); + + // SAFETY: + // + // - helix must have permissions to send signals to all processes in its signal + // group, either by already having the requisite permission, or by having the + // user's UID / EUID / SUID match that of the receiving process(es). + let res = unsafe { + // A pid of 0 sends the signal to the entire process group, allowing the user to + // regain control of their terminal if the editor was spawned under another process + // (e.g. when running `git commit`). + // + // We have to send SIGSTOP (not SIGTSTP) to the entire process group, because, + // as mentioned above, the terminal will get stuck if `helix` was spawned from + // an external process and that process waits for `helix` to complete. This may + // be an issue with signal-hook-tokio, but the author of signal-hook believes it + // could be a tokio issue instead: + // https://github.com/vorner/signal-hook/issues/132 + libc::kill(0, signal::SIGSTOP) + }; + + if res != 0 { + let err = std::io::Error::last_os_error(); + eprintln!("{}", err); + let res = err.raw_os_error().unwrap_or(1); + std::process::exit(res); + } } signal::SIGCONT => { self.claim_term().await.unwrap(); @@ -629,6 +628,11 @@ impl Application { self.compositor .handle_event(&Event::Resize(width, height), &mut cx) } + // Ignore keyboard release events. + CrosstermEvent::Key(crossterm::event::KeyEvent { + kind: crossterm::event::KeyEventKind::Release, + .. + }) => false, event => self.compositor.handle_event(&event.into(), &mut cx), }; @@ -698,8 +702,23 @@ impl Application { } } Notification::PublishDiagnostics(mut params) => { - let path = params.uri.to_file_path().unwrap(); - let doc = self.editor.document_by_path_mut(&path); + let path = match params.uri.to_file_path() { + Ok(path) => path, + Err(_) => { + log::error!("Unsupported file URI: {}", params.uri); + return; + } + }; + let doc = self.editor.document_by_path_mut(&path).filter(|doc| { + if let Some(version) = params.version { + if version != doc.version() { + log::info!("Version ({version}) is out of date for {path:?} (expected ({}), dropping PublishDiagnostic notification", doc.version()); + return false; + } + } + + true + }); if let Some(doc) = doc { let lang_conf = doc.language_config(); @@ -940,24 +959,32 @@ impl Application { Call::MethodCall(helix_lsp::jsonrpc::MethodCall { method, params, id, .. }) => { - let call = match MethodCall::parse(&method, params) { - Ok(call) => call, + let reply = match MethodCall::parse(&method, params) { Err(helix_lsp::Error::Unhandled) => { - error!("Language Server: Method not found {}", method); - return; + error!( + "Language Server: Method {} not found in request {}", + method, id + ); + Err(helix_lsp::jsonrpc::Error { + code: helix_lsp::jsonrpc::ErrorCode::MethodNotFound, + message: format!("Method not found: {}", method), + data: None, + }) } Err(err) => { log::error!( - "received malformed method call from Language Server: {}: {}", + "Language Server: Received malformed method call {} in request {}: {}", method, + id, err ); - return; + Err(helix_lsp::jsonrpc::Error { + code: helix_lsp::jsonrpc::ErrorCode::ParseError, + message: format!("Malformed method call: {}", method), + data: None, + }) } - }; - - let reply = match call { - MethodCall::WorkDoneProgressCreate(params) => { + Ok(MethodCall::WorkDoneProgressCreate(params)) => { self.lsp_progress.create(server_id, params.token); let editor_view = self @@ -971,26 +998,29 @@ impl Application { Ok(serde_json::Value::Null) } - MethodCall::ApplyWorkspaceEdit(params) => { - apply_workspace_edit( + Ok(MethodCall::ApplyWorkspaceEdit(params)) => { + let res = apply_workspace_edit( &mut self.editor, helix_lsp::OffsetEncoding::Utf8, ¶ms.edit, ); Ok(json!(lsp::ApplyWorkspaceEditResponse { - applied: true, - failure_reason: None, - failed_change: None, + applied: res.is_ok(), + failure_reason: res.as_ref().err().map(|err| err.kind.to_string()), + failed_change: res + .as_ref() + .err() + .map(|err| err.failed_change_idx as u32), })) } - MethodCall::WorkspaceFolders => { + Ok(MethodCall::WorkspaceFolders) => { let language_server = self.editor.language_servers.get_by_id(server_id).unwrap(); Ok(json!(language_server.workspace_folders())) } - MethodCall::WorkspaceConfiguration(params) => { + Ok(MethodCall::WorkspaceConfiguration(params)) => { let result: Vec<_> = params .items .iter() @@ -1018,6 +1048,17 @@ impl Application { .collect(); Ok(json!(result)) } + Ok(MethodCall::RegisterCapability(_params)) => { + log::warn!("Ignoring a client/registerCapability request because dynamic capability registration is not enabled. Please report this upstream to the language server"); + // Language Servers based on the `vscode-languageserver-node` library often send + // client/registerCapability even though we do not enable dynamic registration + // for any capabilities. We should send a MethodNotFound JSONRPC error in this + // case but that rejects the registration promise in the server which causes an + // exit. So we work around this by ignoring the request and sending back an OK + // response. + + Ok(serde_json::Value::Null) + } }; let language_server = match self.editor.language_servers.get_by_id(server_id) { @@ -1034,24 +1075,19 @@ impl Application { } } - async fn claim_term(&mut self) -> Result<(), Error> { + async fn claim_term(&mut self) -> std::io::Result<()> { + let terminal_config = self.config.load().editor.clone().into(); + self.terminal.claim(terminal_config) + } + + fn restore_term(&mut self) -> std::io::Result<()> { + let terminal_config = self.config.load().editor.clone().into(); use helix_view::graphics::CursorKind; - terminal::enable_raw_mode()?; - if self.terminal.cursor_kind() == CursorKind::Hidden { - self.terminal.backend_mut().hide_cursor().ok(); - } - let mut stdout = stdout(); - execute!( - stdout, - terminal::EnterAlternateScreen, - EnableBracketedPaste, - EnableFocusChange - )?; - execute!(stdout, terminal::Clear(terminal::ClearType::All))?; - if self.config.load().editor.mouse { - execute!(stdout, EnableMouseCapture)?; - } - Ok(()) + self.terminal + .backend_mut() + .show_cursor(CursorKind::Block) + .ok(); + self.terminal.restore(terminal_config) } pub async fn run(&mut self, input_stream: &mut S) -> Result @@ -1066,7 +1102,7 @@ impl Application { // We can't handle errors properly inside this closure. And it's // probably not a good idea to `unwrap()` inside a panic handler. // So we just ignore the `Result`. - let _ = restore_term(); + let _ = TerminalBackend::force_restore(); hook(info); })); @@ -1074,13 +1110,7 @@ impl Application { let close_errs = self.close().await; - // restore cursor - use helix_view::graphics::CursorKind; - self.terminal - .backend_mut() - .show_cursor(CursorKind::Block) - .ok(); - restore_term()?; + self.restore_term()?; for err in close_errs { self.editor.exit_code = 1; diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 09c2e5df..1c1edece 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -5,30 +5,32 @@ pub(crate) mod typed; pub use dap::*; use helix_vcs::Hunk; pub use lsp::*; -use tui::text::Spans; +use tokio::sync::oneshot; +use tui::widgets::Row; pub use typed::*; use helix_core::{ - comment, coords_at_pos, encoding, find_first_non_whitespace_char, find_root, graphemes, + char_idx_at_visual_offset, comment, + doc_formatter::TextFormat, + encoding, find_first_non_whitespace_char, find_root, graphemes, history::UndoKind, - increment::date_time::DateTimeIncrementor, - increment::{number::NumberIncrementor, Increment}, - indent, + increment, indent, indent::IndentStyle, line_ending::{get_line_ending_of_str, line_end_char_index, str_is_line_ending}, match_brackets, - movement::{self, Direction}, - object, pos_at_coords, pos_at_visual_coords, + movement::{self, move_vertically_visual, Direction}, + object, pos_at_coords, regex::{self, Regex, RegexBuilder}, search::{self, CharMatcher}, - selection, shellwords, surround, textobject, + selection, shellwords, surround, + text_annotations::TextAnnotations, + textobject, tree_sitter::Node, unicode::width::UnicodeWidthChar, - visual_coords_at_pos, LineEnding, Position, Range, Rope, RopeGraphemes, RopeSlice, Selection, - SmallVec, Tendril, Transaction, + visual_offset_from_block, LineEnding, Position, Range, Rope, RopeGraphemes, RopeSlice, + Selection, SmallVec, Tendril, Transaction, }; use helix_view::{ - apply_transaction, clipboard::ClipboardType, document::{FormatterError, Mode, SCRATCH_BUFFER_NAME}, editor::{Action, Motion}, @@ -48,9 +50,13 @@ use movement::Movement; use crate::{ args, compositor::{self, Component, Compositor}, + filter_picker_entry, job::Callback, keymap::ReverseKeymap, - ui::{self, overlay::overlayed, FilePicker, Picker, Popup, Prompt, PromptEvent}, + ui::{ + self, editor::InsertEvent, overlay::overlayed, FilePicker, Picker, Popup, Prompt, + PromptEvent, + }, }; use crate::job::{self, Jobs}; @@ -71,13 +77,15 @@ use grep_searcher::{sinks, BinaryDetection, SearcherBuilder}; use ignore::{DirEntry, WalkBuilder, WalkState}; use tokio_stream::wrappers::UnboundedReceiverStream; +pub type OnKeyCallback = Box; + pub struct Context<'a> { pub register: Option, pub count: Option, pub editor: &'a mut Editor, pub callback: Option, - pub on_next_key_callback: Option>, + pub on_next_key_callback: Option, pub jobs: &'a mut Jobs, } @@ -106,17 +114,7 @@ impl<'a> Context<'a> { T: for<'de> serde::Deserialize<'de> + Send + 'static, F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, { - let callback = Box::pin(async move { - let json = call.await?; - let response = serde_json::from_value(json)?; - let call: job::Callback = Callback::EditorCompositor(Box::new( - move |editor: &mut Editor, compositor: &mut Compositor| { - callback(editor, compositor, response) - }, - )); - Ok(call) - }); - self.jobs.callback(callback); + self.jobs.callback(make_job_callback(call, callback)); } /// Returns 1 if no explicit count was provided @@ -126,6 +124,27 @@ impl<'a> Context<'a> { } } +#[inline] +fn make_job_callback( + call: impl Future> + 'static + Send, + callback: F, +) -> std::pin::Pin>>> +where + T: for<'de> serde::Deserialize<'de> + Send + 'static, + F: FnOnce(&mut Editor, &mut Compositor, T) + Send + 'static, +{ + Box::pin(async move { + let json = call.await?; + let response = serde_json::from_value(json)?; + let call: job::Callback = Callback::EditorCompositor(Box::new( + move |editor: &mut Editor, compositor: &mut Compositor| { + callback(editor, compositor, response) + }, + )); + Ok(call) + }) +} + use helix_view::{align_view, Align}; /// A MappableCommand is either a static command like "jump_view_up" or a Typable command like @@ -203,10 +222,14 @@ impl MappableCommand { move_char_right, "Move right", move_line_up, "Move up", move_line_down, "Move down", + move_visual_line_up, "Move up", + move_visual_line_down, "Move down", extend_char_left, "Extend left", extend_char_right, "Extend right", extend_line_up, "Extend up", extend_line_down, "Extend down", + extend_visual_line_up, "Extend up", + extend_visual_line_down, "Extend down", copy_selection_on_next_line, "Copy selection on next line", copy_selection_on_prev_line, "Copy selection on previous line", move_next_word_start, "Move to start of next word", @@ -270,6 +293,7 @@ impl MappableCommand { append_mode, "Append after selection", command_mode, "Enter command mode", file_picker, "Open file picker", + file_picker_in_current_buffer_directory, "Open file picker at current buffers's directory", file_picker_in_current_directory, "Open file picker at current working directory", code_action, "Perform code action", buffer_picker, "Open buffer picker", @@ -288,6 +312,7 @@ impl MappableCommand { select_mode, "Enter selection extend mode", exit_select_mode, "Exit selection mode", goto_definition, "Goto definition", + goto_declaration, "Goto declaration", add_newline_above, "Add newline above", add_newline_below, "Add newline below", goto_type_definition, "Goto type definition", @@ -386,6 +411,7 @@ impl MappableCommand { swap_view_down, "Swap with split below", transpose_view, "Transpose splits", rotate_view, "Goto next window", + rotate_view_reverse, "Goto previous window", hsplit, "Horizontal bottom split", hsplit_new, "Horizontal bottom split scratch buffer", vsplit, "Vertical right split", @@ -419,6 +445,7 @@ impl MappableCommand { goto_next_paragraph, "Goto next paragraph", goto_prev_paragraph, "Goto previous paragraph", dap_launch, "Launch debug target", + dap_restart, "Restart debugging session", dap_toggle_breakpoint, "Toggle breakpoint", dap_continue, "Continue program execution", dap_pause, "Pause program execution", @@ -450,9 +477,16 @@ impl MappableCommand { impl fmt::Debug for MappableCommand { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_tuple("MappableCommand") - .field(&self.name()) - .finish() + match self { + MappableCommand::Static { name, .. } => { + f.debug_tuple("MappableCommand").field(name).finish() + } + MappableCommand::Typable { name, args, .. } => f + .debug_tuple("MappableCommand") + .field(name) + .field(args) + .finish(), + } } } @@ -507,12 +541,16 @@ impl PartialEq for MappableCommand { match (self, other) { ( MappableCommand::Typable { - name: first_name, .. + name: first_name, + args: first_args, + .. }, MappableCommand::Typable { - name: second_name, .. + name: second_name, + args: second_args, + .. }, - ) => first_name == second_name, + ) => first_name == second_name && first_args == second_args, ( MappableCommand::Static { name: first_name, .. @@ -528,18 +566,27 @@ impl PartialEq for MappableCommand { fn no_op(_cx: &mut Context) {} -fn move_impl(cx: &mut Context, move_fn: F, dir: Direction, behaviour: Movement) -where - F: Fn(RopeSlice, Range, Direction, usize, Movement, usize) -> Range, -{ +type MoveFn = + fn(RopeSlice, Range, Direction, usize, Movement, &TextFormat, &mut TextAnnotations) -> Range; + +fn move_impl(cx: &mut Context, move_fn: MoveFn, dir: Direction, behaviour: Movement) { let count = cx.count(); let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); + let text_fmt = doc.text_format(view.inner_area(doc).width, None); + let mut annotations = view.text_annotations(doc, None); - let selection = doc - .selection(view.id) - .clone() - .transform(|range| move_fn(text, range, dir, count, behaviour, doc.tab_width())); + let selection = doc.selection(view.id).clone().transform(|range| { + move_fn( + text, + range, + dir, + count, + behaviour, + &text_fmt, + &mut annotations, + ) + }); doc.set_selection(view.id, selection); } @@ -561,6 +608,24 @@ fn move_line_down(cx: &mut Context) { move_impl(cx, move_vertically, Direction::Forward, Movement::Move) } +fn move_visual_line_up(cx: &mut Context) { + move_impl( + cx, + move_vertically_visual, + Direction::Backward, + Movement::Move, + ) +} + +fn move_visual_line_down(cx: &mut Context) { + move_impl( + cx, + move_vertically_visual, + Direction::Forward, + Movement::Move, + ) +} + fn extend_char_left(cx: &mut Context) { move_impl(cx, move_horizontally, Direction::Backward, Movement::Extend) } @@ -577,6 +642,24 @@ fn extend_line_down(cx: &mut Context) { move_impl(cx, move_vertically, Direction::Forward, Movement::Extend) } +fn extend_visual_line_up(cx: &mut Context) { + move_impl( + cx, + move_vertically_visual, + Direction::Backward, + Movement::Extend, + ) +} + +fn extend_visual_line_down(cx: &mut Context) { + move_impl( + cx, + move_vertically_visual, + Direction::Forward, + Movement::Extend, + ) +} + fn goto_line_end_impl(view: &mut View, doc: &mut Document, movement: Movement) { let text = doc.text().slice(..); @@ -804,7 +887,10 @@ fn trim_selections(cx: &mut Context) { } // align text in selection +#[allow(deprecated)] fn align_selections(cx: &mut Context) { + use helix_core::visual_coords_at_pos; + let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); let selection = doc.selection(view.id); @@ -865,7 +951,7 @@ fn align_selections(cx: &mut Context) { changes.sort_unstable_by_key(|(from, _, _)| *from); let transaction = Transaction::change(doc.text(), changes.into_iter()); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } fn goto_window(cx: &mut Context, align: Align) { @@ -881,17 +967,23 @@ fn goto_window(cx: &mut Context, align: Align) { // as we type let scrolloff = config.scrolloff.min(height.saturating_sub(1) / 2); - let last_line = view.last_line(doc); + let last_visual_line = view.last_visual_line(doc); - let line = match align { - Align::Top => view.offset.row + scrolloff + count, - Align::Center => view.offset.row + ((last_line - view.offset.row) / 2), - Align::Bottom => last_line.saturating_sub(scrolloff + count), - } - .max(view.offset.row + scrolloff) - .min(last_line.saturating_sub(scrolloff)); + let visual_line = match align { + Align::Top => view.offset.vertical_offset + scrolloff + count, + Align::Center => view.offset.vertical_offset + (last_visual_line / 2), + Align::Bottom => { + view.offset.vertical_offset + last_visual_line.saturating_sub(scrolloff + count) + } + }; + let visual_line = visual_line + .max(view.offset.vertical_offset + scrolloff) + .min(view.offset.vertical_offset + last_visual_line.saturating_sub(scrolloff)); + + let pos = view + .pos_at_visual_coords(doc, visual_line as u16, 0, false) + .expect("visual_line was constrained to the view area"); - let pos = doc.text().line_to_char(line); let text = doc.text().slice(..); let selection = doc .selection(view.id) @@ -1317,7 +1409,7 @@ fn replace(cx: &mut Context) { } }); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); exit_select_mode(cx); } }) @@ -1335,7 +1427,7 @@ where (range.from(), range.to(), Some(text)) }); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } fn switch_case(cx: &mut Context) { @@ -1375,53 +1467,72 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction) { let range = doc.selection(view.id).primary(); let text = doc.text().slice(..); - let cursor = visual_coords_at_pos(text, range.cursor(text), doc.tab_width()); - let doc_last_line = doc.text().len_lines().saturating_sub(1); - - let last_line = view.last_line(doc); - - if direction == Backward && view.offset.row == 0 - || direction == Forward && last_line == doc_last_line - { - return; - } - + let cursor = range.cursor(text); let height = view.inner_height(); let scrolloff = config.scrolloff.min(height / 2); + let offset = match direction { + Forward => offset as isize, + Backward => -(offset as isize), + }; - view.offset.row = match direction { - Forward => view.offset.row + offset, - Backward => view.offset.row.saturating_sub(offset), - } - .min(doc_last_line); - - // recalculate last line - let last_line = view.last_line(doc); - - // clamp into viewport - let line = cursor - .row - .max(view.offset.row + scrolloff) - .min(last_line.saturating_sub(scrolloff)); + let doc_text = doc.text().slice(..); + let viewport = view.inner_area(doc); + let text_fmt = doc.text_format(viewport.width, None); + let annotations = view.text_annotations(doc, None); + (view.offset.anchor, view.offset.vertical_offset) = char_idx_at_visual_offset( + doc_text, + view.offset.anchor, + view.offset.vertical_offset as isize + offset, + 0, + &text_fmt, + &annotations, + ); - // If cursor needs moving, replace primary selection - if line != cursor.row { - let head = pos_at_visual_coords(text, Position::new(line, cursor.col), doc.tab_width()); // this func will properly truncate to line end + let head; + match direction { + Forward => { + head = char_idx_at_visual_offset( + doc_text, + view.offset.anchor, + (view.offset.vertical_offset + scrolloff) as isize, + 0, + &text_fmt, + &annotations, + ) + .0; + if head <= cursor { + return; + } + } + Backward => { + head = char_idx_at_visual_offset( + doc_text, + view.offset.anchor, + (view.offset.vertical_offset + height - scrolloff) as isize, + 0, + &text_fmt, + &annotations, + ) + .0; + if head >= cursor { + return; + } + } + } - let anchor = if cx.editor.mode == Mode::Select { - range.anchor - } else { - head - }; + let anchor = if cx.editor.mode == Mode::Select { + range.anchor + } else { + head + }; - // replace primary selection with an empty selection at cursor pos - let prim_sel = Range::new(anchor, head); - let mut sel = doc.selection(view.id).clone(); - let idx = sel.primary_index(); - sel = sel.replace(idx, prim_sel); - doc.set_selection(view.id, sel); - } + // replace primary selection with an empty selection at cursor pos + let prim_sel = Range::new(anchor, head); + let mut sel = doc.selection(view.id).clone(); + let idx = sel.primary_index(); + sel = sel.replace(idx, prim_sel); + doc.set_selection(view.id, sel); } fn page_up(cx: &mut Context) { @@ -1448,7 +1559,15 @@ fn half_page_down(cx: &mut Context) { scroll(cx, offset, Direction::Forward); } +#[allow(deprecated)] +// currently uses the deprected `visual_coords_at_pos`/`pos_at_visual_coords` functions +// as this function ignores softwrapping (and virtual text) and instead only cares +// about "text visual position" +// +// TODO: implement a variant of that uses visual lines and respects virtual text fn copy_selection_on_line(cx: &mut Context, direction: Direction) { + use helix_core::{pos_at_visual_coords, visual_coords_at_pos}; + let count = cx.count(); let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); @@ -1515,6 +1634,10 @@ fn copy_selection_on_line(cx: &mut Context, direction: Direction) { sels += 1; } + if anchor_row == 0 && head_row == 0 { + break; + } + i += 1; } } @@ -1865,7 +1988,7 @@ fn global_search(cx: &mut Context) { impl ui::menu::Item for FileResult { type Data = Option; - fn label(&self, current_path: &Self::Data) -> Spans { + fn format(&self, current_path: &Self::Data) -> Row { let relative_path = helix_core::path::get_relative_path(&self.path) .to_string_lossy() .into_owned(); @@ -1915,6 +2038,11 @@ fn global_search(cx: &mut Context) { let search_root = std::env::current_dir() .expect("Global search error: Failed to get current dir"); + let dedup_symlinks = file_picker_config.deduplicate_links; + let absolute_root = search_root + .canonicalize() + .unwrap_or_else(|_| search_root.clone()); + WalkBuilder::new(search_root) .hidden(file_picker_config.hidden) .parents(file_picker_config.parents) @@ -1924,10 +2052,9 @@ fn global_search(cx: &mut Context) { .git_global(file_picker_config.git_global) .git_exclude(file_picker_config.git_exclude) .max_depth(file_picker_config.max_depth) - // We always want to ignore the .git directory, otherwise if - // `ignore` is turned off above, we end up with a lot of noise - // in our picker. - .filter_entry(|entry| entry.file_name() != ".git") + .filter_entry(move |entry| { + filter_picker_entry(entry, &absolute_root, dedup_symlinks) + }) .build_parallel() .run(|| { let mut searcher = searcher.clone(); @@ -2005,6 +2132,10 @@ fn global_search(cx: &mut Context) { let line_num = *line_num; let (view, doc) = current!(cx.editor); let text = doc.text(); + if line_num >= text.len_lines() { + cx.editor.set_error("The line you jumped to does not exist anymore because the file has changed."); + return; + } let start = text.line_to_char(line_num); let end = text.line_to_char((line_num + 1).min(text.len_lines())); @@ -2160,7 +2291,7 @@ fn delete_selection_impl(cx: &mut Context, op: Operation) { let transaction = Transaction::change_by_selection(doc.text(), selection, |range| { (range.from(), range.to(), None) }); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); match op { Operation::Delete => { @@ -2178,7 +2309,7 @@ fn delete_selection_insert_mode(doc: &mut Document, view: &mut View, selection: let transaction = Transaction::change_by_selection(doc.text(), selection, |range| { (range.from(), range.to(), None) }); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } fn delete_selection(cx: &mut Context) { @@ -2274,7 +2405,7 @@ fn append_mode(cx: &mut Context) { doc.text(), [(end, end, Some(doc.line_ending.as_str().into()))].into_iter(), ); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } let selection = doc.selection(view.id).clone().transform(|range| { @@ -2294,6 +2425,22 @@ fn file_picker(cx: &mut Context) { cx.push_layer(Box::new(overlayed(picker))); } +fn file_picker_in_current_buffer_directory(cx: &mut Context) { + let doc_dir = doc!(cx.editor) + .path() + .and_then(|path| path.parent().map(|path| path.to_path_buf())); + + let path = match doc_dir { + Some(path) => path, + None => { + cx.editor.set_error("current buffer has no path or parent"); + return; + } + }; + + let picker = ui::file_picker(path, &cx.editor.config()); + cx.push_layer(Box::new(overlayed(picker))); +} fn file_picker_in_current_directory(cx: &mut Context) { let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("./")); let picker = ui::file_picker(cwd, &cx.editor.config()); @@ -2313,7 +2460,7 @@ fn buffer_picker(cx: &mut Context) { impl ui::menu::Item for BufferMeta { type Data = (); - fn label(&self, _data: &Self::Data) -> Spans { + fn format(&self, _data: &Self::Data) -> Row { let path = self .path .as_deref() @@ -2323,20 +2470,15 @@ fn buffer_picker(cx: &mut Context) { None => SCRATCH_BUFFER_NAME, }; - let mut flags = Vec::new(); + let mut flags = String::new(); if self.is_modified { - flags.push("+"); + flags.push('+'); } if self.is_current { - flags.push("*"); + flags.push('*'); } - let flag = if flags.is_empty() { - "".into() - } else { - format!(" ({})", flags.join("")) - }; - format!("{} {}{}", self.id, path, flag).into() + Row::new([self.id.to_string(), flags, path.to_string()]) } } @@ -2382,7 +2524,7 @@ fn jumplist_picker(cx: &mut Context) { impl ui::menu::Item for JumpMeta { type Data = (); - fn label(&self, _data: &Self::Data) -> Spans { + fn format(&self, _data: &Self::Data) -> Row { let path = self .path .as_deref() @@ -2455,7 +2597,7 @@ fn jumplist_picker(cx: &mut Context) { impl ui::menu::Item for MappableCommand { type Data = ReverseKeymap; - fn label(&self, keymap: &Self::Data) -> Spans { + fn format(&self, keymap: &Self::Data) -> Row { let fmt_binding = |bindings: &Vec>| -> String { bindings.iter().fold(String::new(), |mut acc, bind| { if !acc.is_empty() { @@ -2506,7 +2648,22 @@ pub fn command_palette(cx: &mut Context) { on_next_key_callback: None, jobs: cx.jobs, }; + let focus = view!(ctx.editor).id; + command.execute(&mut ctx); + + if ctx.editor.tree.contains(focus) { + let config = ctx.editor.config(); + let mode = ctx.editor.mode(); + let view = view_mut!(ctx.editor, focus); + let doc = doc_mut!(ctx.editor, &view.doc); + + view.ensure_cursor_in_view(doc, config.scrolloff); + + if mode != Mode::Insert { + doc.append_changes_to_history(view); + } + } }); compositor.push(Box::new(overlayed(picker))); }, @@ -2569,7 +2726,7 @@ async fn make_format_callback( if let Ok(format) = format { if doc.version() == doc_version { - apply_transaction(&format, doc, view); + doc.apply(&format, view.id); doc.append_changes_to_history(view); doc.detect_indent_and_line_ending(); view.ensure_cursor_in_view(doc, scrolloff); @@ -2662,7 +2819,7 @@ fn open(cx: &mut Context, open: Open) { transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } // o inserts a new line after each line with a selection @@ -2686,10 +2843,15 @@ fn push_jump(view: &mut View, doc: &Document) { } fn goto_line(cx: &mut Context) { - goto_line_impl(cx.editor, cx.count) + if cx.count.is_some() { + let (view, doc) = current!(cx.editor); + push_jump(view, doc); + + goto_line_without_jumplist(cx.editor, cx.count); + } } -fn goto_line_impl(editor: &mut Editor, count: Option) { +fn goto_line_without_jumplist(editor: &mut Editor, count: Option) { if let Some(count) = count { let (view, doc) = current!(editor); let text = doc.text().slice(..); @@ -2706,7 +2868,6 @@ fn goto_line_impl(editor: &mut Editor, count: Option) { .clone() .transform(|range| range.put_cursor(text, pos, editor.mode == Mode::Select)); - push_jump(view, doc); doc.set_selection(view.id, selection); } } @@ -2793,14 +2954,6 @@ fn exit_select_mode(cx: &mut Context) { } } -fn goto_pos(editor: &mut Editor, pos: usize) { - let (view, doc) = current!(editor); - - push_jump(view, doc); - doc.set_selection(view.id, Selection::point(pos)); - align_view(doc, view, Align::Center); -} - fn goto_first_diag(cx: &mut Context) { let (view, doc) = current!(cx.editor); let selection = match doc.diagnostics().first() { @@ -2808,7 +2961,6 @@ fn goto_first_diag(cx: &mut Context) { None => return, }; doc.set_selection(view.id, selection); - align_view(doc, view, Align::Center); } fn goto_last_diag(cx: &mut Context) { @@ -2818,7 +2970,6 @@ fn goto_last_diag(cx: &mut Context) { None => return, }; doc.set_selection(view.id, selection); - align_view(doc, view, Align::Center); } fn goto_next_diag(cx: &mut Context) { @@ -2840,7 +2991,6 @@ fn goto_next_diag(cx: &mut Context) { None => return, }; doc.set_selection(view.id, selection); - align_view(doc, view, Align::Center); } fn goto_prev_diag(cx: &mut Context) { @@ -2865,7 +3015,6 @@ fn goto_prev_diag(cx: &mut Context) { None => return, }; doc.set_selection(view.id, selection); - align_view(doc, view, Align::Center); } fn goto_first_change(cx: &mut Context) { @@ -2878,20 +3027,20 @@ fn goto_last_change(cx: &mut Context) { fn goto_first_change_impl(cx: &mut Context, reverse: bool) { let editor = &mut cx.editor; - let (_, doc) = current!(editor); + let (view, doc) = current!(editor); if let Some(handle) = doc.diff_handle() { let hunk = { - let hunks = handle.hunks(); + let diff = handle.load(); let idx = if reverse { - hunks.len().saturating_sub(1) + diff.len().saturating_sub(1) } else { 0 }; - hunks.nth_hunk(idx) + diff.nth_hunk(idx) }; if hunk != Hunk::NONE { - let pos = doc.text().line_to_char(hunk.after.start as usize); - goto_pos(editor, pos) + let range = hunk_range(hunk, doc.text().slice(..)); + doc.set_selection(view.id, Selection::single(range.anchor, range.head)); } } } @@ -2919,30 +3068,20 @@ fn goto_next_change_impl(cx: &mut Context, direction: Direction) { let selection = doc.selection(view.id).clone().transform(|range| { let cursor_line = range.cursor_line(doc_text) as u32; - let hunks = diff_handle.hunks(); + let diff = diff_handle.load(); let hunk_idx = match direction { - Direction::Forward => hunks + Direction::Forward => diff .next_hunk(cursor_line) - .map(|idx| (idx + count).min(hunks.len() - 1)), - Direction::Backward => hunks + .map(|idx| (idx + count).min(diff.len() - 1)), + Direction::Backward => diff .prev_hunk(cursor_line) .map(|idx| idx.saturating_sub(count)), }; - // TODO refactor with let..else once MSRV reaches 1.65 - let hunk_idx = if let Some(hunk_idx) = hunk_idx { - hunk_idx - } else { + let Some(hunk_idx) = hunk_idx else { return range; }; - let hunk = hunks.nth_hunk(hunk_idx); - - let hunk_start = doc_text.line_to_char(hunk.after.start as usize); - let hunk_end = if hunk.after.is_empty() { - hunk_start + 1 - } else { - doc_text.line_to_char(hunk.after.end as usize) - }; - let new_range = Range::new(hunk_start, hunk_end); + let hunk = diff.nth_hunk(hunk_idx); + let new_range = hunk_range(hunk, doc_text); if editor.mode == Mode::Select { let head = if new_range.head < range.anchor { new_range.anchor @@ -2962,6 +3101,20 @@ fn goto_next_change_impl(cx: &mut Context, direction: Direction) { cx.editor.last_motion = Some(Motion(Box::new(motion))); } +/// Returns the [Range] for a [Hunk] in the given text. +/// Additions and modifications cover the added and modified ranges. +/// Deletions are represented as the point at the start of the deletion hunk. +fn hunk_range(hunk: Hunk, text: RopeSlice) -> Range { + let anchor = text.line_to_char(hunk.after.start as usize); + let head = if hunk.after.is_empty() { + anchor + 1 + } else { + text.line_to_char(hunk.after.end as usize) + }; + + Range::new(anchor, head) +} + pub mod insert { use super::*; pub type Hook = fn(&Rope, &Selection, char) -> Option; @@ -3090,7 +3243,7 @@ pub mod insert { let (view, doc) = current!(cx.editor); if let Some(t) = transaction { - apply_transaction(&t, doc, view); + doc.apply(&t, view.id); } // TODO: need a post insert hook too for certain triggers (autocomplete, signature help, etc) @@ -3112,7 +3265,7 @@ pub mod insert { &doc.selection(view.id).clone().cursors(doc.text().slice(..)), indent, ); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } pub fn insert_newline(cx: &mut Context) { @@ -3217,15 +3370,15 @@ pub mod insert { transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); let (view, doc) = current!(cx.editor); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } pub fn delete_char_backward(cx: &mut Context) { let count = cx.count(); let (view, doc) = current_ref!(cx.editor); let text = doc.text().slice(..); - let indent_unit = doc.indent_style.as_str(); - let tab_size = doc.tab_width(); + let tab_width = doc.tab_width(); + let indent_width = doc.indent_width(); let auto_pairs = doc.auto_pairs(cx.editor); let transaction = @@ -3246,18 +3399,11 @@ pub mod insert { None, ) } else { - let unit_len = indent_unit.chars().count(); - // NOTE: indent_unit always contains 'only spaces' or 'only tab' according to `IndentStyle` definition. - let unit_size = if indent_unit.starts_with('\t') { - tab_size * unit_len - } else { - unit_len - }; let width: usize = fragment .chars() .map(|ch| { if ch == '\t' { - tab_size + tab_width } else { // it can be none if it still meet control characters other than '\t' // here just set the width to 1 (or some value better?). @@ -3265,9 +3411,9 @@ pub mod insert { } }) .sum(); - let mut drop = width % unit_size; // round down to nearest unit + let mut drop = width % indent_width; // round down to nearest unit if drop == 0 { - drop = unit_size + drop = indent_width }; // if it's already at a unit, consume a whole unit let mut chars = fragment.chars().rev(); let mut start = pos; @@ -3312,7 +3458,7 @@ pub mod insert { } }); let (view, doc) = current!(cx.editor); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); } @@ -3330,7 +3476,7 @@ pub mod insert { None, ) }); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic); } @@ -3611,7 +3757,8 @@ fn paste_impl( transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); } - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); + doc.append_changes_to_history(view); } pub(crate) fn paste_bracketed_value(cx: &mut Context, contents: String) { @@ -3703,7 +3850,7 @@ fn replace_with_yanked(cx: &mut Context) { } }); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); exit_select_mode(cx); } } @@ -3727,7 +3874,7 @@ fn replace_selections_with_clipboard_impl( ) }); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); doc.append_changes_to_history(view); } Err(e) => return Err(e.context("Couldn't get system clipboard contents")), @@ -3799,7 +3946,7 @@ fn indent(cx: &mut Context) { Some((pos, pos, Some(indent.clone()))) }), ); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } fn unindent(cx: &mut Context) { @@ -3808,7 +3955,7 @@ fn unindent(cx: &mut Context) { let lines = get_lines(doc, view.id); let mut changes = Vec::with_capacity(lines.len()); let tab_width = doc.tab_width(); - let indent_width = count * tab_width; + let indent_width = count * doc.indent_width(); for line_idx in lines { let line = doc.text().line(line_idx); @@ -3838,7 +3985,7 @@ fn unindent(cx: &mut Context) { let transaction = Transaction::change(doc.text(), changes.into_iter()); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } fn format_selections(cx: &mut Context) { @@ -3893,7 +4040,7 @@ fn format_selections(cx: &mut Context) { language_server.offset_encoding(), ); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } fn join_selections_impl(cx: &mut Context, select_space: bool) { @@ -3925,6 +4072,11 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) { } } + // nothing to do, bail out early to avoid crashes later + if changes.is_empty() { + return; + } + changes.sort_unstable_by_key(|(from, _to, _text)| *from); changes.dedup(); @@ -3947,7 +4099,7 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) { Transaction::change(doc.text(), changes.into_iter()) }; - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } fn keep_or_remove_selections_impl(cx: &mut Context, remove: bool) { @@ -4034,6 +4186,24 @@ pub fn completion(cx: &mut Context) { None => return, }; + // setup a chanel that allows the request to be canceled + let (tx, rx) = oneshot::channel(); + // set completion_request so that this request can be canceled + // by setting completion_request, the old channel stored there is dropped + // and the associated request is automatically dropped + cx.editor.completion_request_handle = Some(tx); + let future = async move { + tokio::select! { + biased; + _ = rx => { + Ok(serde_json::Value::Null) + } + res = future => { + res + } + } + }; + let trigger_offset = cursor; // TODO: trigger_offset should be the cursor offset but we also need a starting offset from where we want to apply @@ -4044,12 +4214,35 @@ pub fn completion(cx: &mut Context) { iter.reverse(); let offset = iter.take_while(|ch| chars::char_is_word(*ch)).count(); let start_offset = cursor.saturating_sub(offset); + let savepoint = doc.savepoint(view); + + let trigger_doc = doc.id(); + let trigger_view = view.id; + + // FIXME: The commands Context can only have a single callback + // which means it gets overwritten when executing keybindings + // with multiple commands or macros. This would mean that completion + // might be incorrectly applied when repeating the insertmode action + // + // TODO: to solve this either make cx.callback a Vec of callbacks or + // alternatively move `last_insert` to `helix_view::Editor` + cx.callback = Some(Box::new( + move |compositor: &mut Compositor, _cx: &mut compositor::Context| { + let ui = compositor.find::().unwrap(); + ui.last_insert.1.push(InsertEvent::RequestCompletion); + }, + )); cx.callback( future, move |editor, compositor, response: Option| { - if editor.mode != Mode::Insert { - // we're not in insert mode anymore + let (view, doc) = current_ref!(editor); + // check if the completion request is stale. + // + // Completions are completed asynchrounsly and therefore the user could + //switch document/view or leave insert mode. In all of thoise cases the + // completion should be discarded + if editor.mode != Mode::Insert || view.id != trigger_view || doc.id() != trigger_doc { return; } @@ -4071,6 +4264,7 @@ pub fn completion(cx: &mut Context) { let ui = compositor.find::().unwrap(); ui.set_completion( editor, + savepoint, items, offset_encoding, start_offset, @@ -4090,7 +4284,7 @@ fn toggle_comments(cx: &mut Context) { .map(|tc| tc.as_ref()); let transaction = comment::toggle_line_comments(doc.text(), doc.selection(view.id), token); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); exit_select_mode(cx); } @@ -4146,7 +4340,7 @@ fn rotate_selection_contents(cx: &mut Context, direction: Direction) { .map(|(range, fragment)| (range.from(), range.to(), Some(fragment))), ); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } fn rotate_selection_contents_forward(cx: &mut Context) { @@ -4188,7 +4382,6 @@ fn shrink_selection(cx: &mut Context) { // try to restore previous selection if let Some(prev_selection) = view.object_selections.pop() { if current_selection.contains(&prev_selection) { - // allow shrinking the selection only if current selection contains the previous object selection doc.set_selection(view.id, prev_selection); return; } else { @@ -4304,6 +4497,10 @@ fn rotate_view(cx: &mut Context) { cx.editor.focus_next() } +fn rotate_view_reverse(cx: &mut Context) { + cx.editor.focus_prev() +} + fn jump_view_right(cx: &mut Context) { cx.editor.focus_direction(tree::Direction::Right) } @@ -4437,11 +4634,19 @@ fn align_view_bottom(cx: &mut Context) { fn align_view_middle(cx: &mut Context) { let (view, doc) = current!(cx.editor); - let text = doc.text().slice(..); - let pos = doc.selection(view.id).primary().cursor(text); - let pos = coords_at_pos(text, pos); + let inner_width = view.inner_width(doc); + let text_fmt = doc.text_format(inner_width, None); + // there is no horizontal position when softwrap is enabled + if text_fmt.soft_wrap { + return; + } + let doc_text = doc.text().slice(..); + let annotations = view.text_annotations(doc, None); + let pos = doc.selection(view.id).primary().cursor(doc_text); + let pos = + visual_offset_from_block(doc_text, view.offset.anchor, pos, &text_fmt, &annotations).0; - view.offset.col = pos + view.offset.horizontal_offset = pos .col .saturating_sub((view.inner_area(doc).width as usize) / 2); } @@ -4576,14 +4781,14 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) { let textobject_change = |range: Range| -> Range { let diff_handle = doc.diff_handle().unwrap(); - let hunks = diff_handle.hunks(); + let diff = diff_handle.load(); let line = range.cursor_line(text); - let hunk_idx = if let Some(hunk_idx) = hunks.hunk_at(line as u32, false) { + let hunk_idx = if let Some(hunk_idx) = diff.hunk_at(line as u32, false) { hunk_idx } else { return range; }; - let hunk = hunks.nth_hunk(hunk_idx).after; + let hunk = diff.nth_hunk(hunk_idx).after; let start = text.line_to_char(hunk.start as usize); let end = text.line_to_char(hunk.end as usize); @@ -4632,7 +4837,7 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) { ("a", "Argument/parameter (tree-sitter)"), ("c", "Comment (tree-sitter)"), ("T", "Test (tree-sitter)"), - ("m", "Closest surrounding pair to cursor"), + ("m", "Closest surrounding pair"), (" ", "... or any character acting as a pair"), ]; @@ -4641,41 +4846,45 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) { fn surround_add(cx: &mut Context) { cx.on_next_key(move |cx, event| { - let ch = match event.char() { - Some(ch) => ch, + let (view, doc) = current!(cx.editor); + // surround_len is the number of new characters being added. + let (open, close, surround_len) = match event.char() { + Some(ch) => { + let (o, c) = surround::get_pair(ch); + let mut open = Tendril::new(); + open.push(o); + let mut close = Tendril::new(); + close.push(c); + (open, close, 2) + } + None if event.code == KeyCode::Enter => ( + doc.line_ending.as_str().into(), + doc.line_ending.as_str().into(), + 2 * doc.line_ending.len_chars(), + ), None => return, }; - let (view, doc) = current!(cx.editor); - let selection = doc.selection(view.id); - let (open, close) = surround::get_pair(ch); - // The number of chars in get_pair - let surround_len = 2; + let selection = doc.selection(view.id); let mut changes = Vec::with_capacity(selection.len() * 2); let mut ranges = SmallVec::with_capacity(selection.len()); let mut offs = 0; for range in selection.iter() { - let mut o = Tendril::new(); - o.push(open); - let mut c = Tendril::new(); - c.push(close); - changes.push((range.from(), range.from(), Some(o))); - changes.push((range.to(), range.to(), Some(c))); - - // Add 2 characters to the range to select them + changes.push((range.from(), range.from(), Some(open.clone()))); + changes.push((range.to(), range.to(), Some(close.clone()))); + ranges.push( Range::new(offs + range.from(), offs + range.to() + surround_len) .with_direction(range.direction()), ); - // Add 2 characters to the offset for the next ranges offs += surround_len; } let transaction = Transaction::change(doc.text(), changes.into_iter()) .with_selection(Selection::new(ranges, selection.primary_index())); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); exit_select_mode(cx); }) } @@ -4715,7 +4924,7 @@ fn surround_replace(cx: &mut Context) { (pos, pos + 1, Some(t)) }), ); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); exit_select_mode(cx); }); }) @@ -4743,7 +4952,7 @@ fn surround_delete(cx: &mut Context) { let transaction = Transaction::change(doc.text(), change_pos.into_iter().map(|p| (p, p + 1, None))); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); exit_select_mode(cx); }) } @@ -4880,7 +5089,10 @@ async fn shell_impl_async( log::error!("Shell error: {}", err); bail!("Shell error: {}", err); } - bail!("Shell command failed"); + match output.status.code() { + Some(exit_code) => bail!("Shell command failed: status {}", exit_code), + None => bail!("Shell command failed"), + } } else if !output.stderr.is_empty() { log::debug!( "Command printed to stderr: {}", @@ -4958,7 +5170,7 @@ fn shell(cx: &mut compositor::Context, cmd: &str, behavior: &ShellBehavior) { if behavior != &ShellBehavior::Ignore { let transaction = Transaction::change(doc.text(), changes.into_iter()) .with_selection(Selection::new(ranges, selection.primary_index())); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); doc.append_changes_to_history(view); } @@ -5021,64 +5233,32 @@ fn add_newline_impl(cx: &mut Context, open: Open) { }); let transaction = Transaction::change(text, changes); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } enum IncrementDirection { Increase, Decrease, } -/// Increment object under cursor by count. + +/// Increment objects within selections by count. fn increment(cx: &mut Context) { increment_impl(cx, IncrementDirection::Increase); } -/// Decrement object under cursor by count. +/// Decrement objects within selections by count. fn decrement(cx: &mut Context) { increment_impl(cx, IncrementDirection::Decrease); } -/// This function differs from find_next_char_impl in that it stops searching at the newline, but also -/// starts searching at the current character, instead of the next. -/// It does not want to start at the next character because this function is used for incrementing -/// number and we don't want to move forward if we're already on a digit. -fn find_next_char_until_newline( - text: RopeSlice, - char_matcher: M, - pos: usize, - _count: usize, - _inclusive: bool, -) -> Option { - // Since we send the current line to find_nth_next instead of the whole text, we need to adjust - // the position we send to this function so that it's relative to that line and its returned - // position since it's expected this function returns a global position. - let line_index = text.char_to_line(pos); - let pos_delta = text.line_to_char(line_index); - let pos = pos - pos_delta; - search::find_nth_next(text.line(line_index), char_matcher, pos, 1).map(|pos| pos + pos_delta) -} - -/// Decrement object under cursor by `amount`. +/// Increment objects within selections by `amount`. +/// A negative `amount` will decrement objects within selections. fn increment_impl(cx: &mut Context, increment_direction: IncrementDirection) { - // TODO: when incrementing or decrementing a number that gets a new digit or lose one, the - // selection is updated improperly. - find_char_impl( - cx.editor, - &find_next_char_until_newline, - true, - true, - char::is_ascii_digit, - 1, - ); - - // Increase by 1 if `IncrementDirection` is `Increase` - // Decrease by 1 if `IncrementDirection` is `Decrease` let sign = match increment_direction { IncrementDirection::Increase => 1, IncrementDirection::Decrease => -1, }; let mut amount = sign * cx.count() as i64; - // If the register is `#` then increase or decrease the `amount` by 1 per element let increase_by = if cx.register == Some('#') { sign } else { 0 }; @@ -5086,56 +5266,41 @@ fn increment_impl(cx: &mut Context, increment_direction: IncrementDirection) { let selection = doc.selection(view.id); let text = doc.text().slice(..); - let changes: Vec<_> = selection - .ranges() - .iter() - .filter_map(|range| { - let incrementor: Box = - if let Some(incrementor) = DateTimeIncrementor::from_range(text, *range) { - Box::new(incrementor) - } else if let Some(incrementor) = NumberIncrementor::from_range(text, *range) { - Box::new(incrementor) - } else { - return None; - }; + let mut new_selection_ranges = SmallVec::new(); + let mut cumulative_length_diff: i128 = 0; + let mut changes = vec![]; - let (range, new_text) = incrementor.increment(amount); + for range in selection { + let selected_text: Cow = range.fragment(text); + let new_from = ((range.from() as i128) + cumulative_length_diff) as usize; + let incremented = [increment::integer, increment::date_time] + .iter() + .find_map(|incrementor| incrementor(selected_text.as_ref(), amount)); - amount += increase_by; + amount += increase_by; - Some((range.from(), range.to(), Some(new_text))) - }) - .collect(); - - // Overlapping changes in a transaction will panic, so we need to find and remove them. - // For example, if there are cursors on each of the year, month, and day of `2021-11-29`, - // incrementing will give overlapping changes, with each change incrementing a different part of - // the date. Since these conflict with each other we remove these changes from the transaction - // so nothing happens. - let mut overlapping_indexes = HashSet::new(); - for (i, changes) in changes.windows(2).enumerate() { - if changes[0].1 > changes[1].0 { - overlapping_indexes.insert(i); - overlapping_indexes.insert(i + 1); + match incremented { + None => { + let new_range = Range::new( + new_from, + (range.to() as i128 + cumulative_length_diff) as usize, + ); + new_selection_ranges.push(new_range); + } + Some(new_text) => { + let new_range = Range::new(new_from, new_from + new_text.len()); + cumulative_length_diff += new_text.len() as i128 - selected_text.len() as i128; + new_selection_ranges.push(new_range); + changes.push((range.from(), range.to(), Some(new_text.into()))); + } } } - let changes: Vec<_> = changes - .into_iter() - .enumerate() - .filter_map(|(i, change)| { - if overlapping_indexes.contains(&i) { - None - } else { - Some(change) - } - }) - .collect(); if !changes.is_empty() { + let new_selection = Selection::new(new_selection_ranges, selection.primary_index()); let transaction = Transaction::change(doc.text(), changes.into_iter()); - let transaction = transaction.with_selection(selection.clone()); - - apply_transaction(&transaction, doc, view); + let transaction = transaction.with_selection(new_selection); + doc.apply(&transaction, view.id); } } diff --git a/helix-term/src/commands/dap.rs b/helix-term/src/commands/dap.rs index b182f28c..dac1e9d5 100644 --- a/helix-term/src/commands/dap.rs +++ b/helix-term/src/commands/dap.rs @@ -12,7 +12,7 @@ use helix_view::editor::Breakpoint; use serde_json::{to_value, Value}; use tokio_stream::wrappers::UnboundedReceiverStream; -use tui::text::Spans; +use tui::{text::Spans, widgets::Row}; use std::collections::HashMap; use std::future::Future; @@ -25,7 +25,7 @@ use helix_view::handlers::dap::{breakpoints_changed, jump_to_stack_frame, select impl ui::menu::Item for StackFrame { type Data = (); - fn label(&self, _data: &Self::Data) -> Spans { + fn format(&self, _data: &Self::Data) -> Row { self.name.as_str().into() // TODO: include thread_states in the label } } @@ -33,7 +33,7 @@ impl ui::menu::Item for StackFrame { impl ui::menu::Item for DebugTemplate { type Data = (); - fn label(&self, _data: &Self::Data) -> Spans { + fn format(&self, _data: &Self::Data) -> Row { self.name.as_str().into() } } @@ -41,7 +41,7 @@ impl ui::menu::Item for DebugTemplate { impl ui::menu::Item for Thread { type Data = ThreadStates; - fn label(&self, thread_states: &Self::Data) -> Spans { + fn format(&self, thread_states: &Self::Data) -> Row { format!( "{} ({})", self.name, @@ -289,6 +289,36 @@ pub fn dap_launch(cx: &mut Context) { )))); } +pub fn dap_restart(cx: &mut Context) { + let debugger = match &cx.editor.debugger { + Some(debugger) => debugger, + None => { + cx.editor.set_error("Debugger is not running"); + return; + } + }; + if !debugger + .capabilities() + .supports_restart_request + .unwrap_or(false) + { + cx.editor + .set_error("Debugger does not support session restarts"); + return; + } + if debugger.starting_request_args().is_none() { + cx.editor + .set_error("No arguments found with which to restart the sessions"); + return; + } + + dap_callback( + cx.jobs, + debugger.restart(), + |editor, _compositor, _resp: ()| editor.set_status("Debugging session restarted"), + ); +} + fn debug_parameter_prompt( completions: Vec, config_name: String, @@ -475,19 +505,36 @@ pub fn dap_variables(cx: &mut Context) { if debugger.thread_id.is_none() { cx.editor - .set_status("Cannot access variables while target is running"); + .set_status("Cannot access variables while target is running."); return; } let (frame, thread_id) = match (debugger.active_frame, debugger.thread_id) { (Some(frame), Some(thread_id)) => (frame, thread_id), _ => { cx.editor - .set_status("Cannot find current stack frame to access variables"); + .set_status("Cannot find current stack frame to access variables."); + return; + } + }; + + let thread_frame = match debugger.stack_frames.get(&thread_id) { + Some(thread_frame) => thread_frame, + None => { + cx.editor + .set_error("Failed to get stack frame for thread: {thread_id}"); + return; + } + }; + let stack_frame = match thread_frame.get(frame) { + Some(stack_frame) => stack_frame, + None => { + cx.editor + .set_error("Failed to get stack frame for thread {thread_id} and frame {frame}."); return; } }; - let frame_id = debugger.stack_frames[&thread_id][frame].id; + let frame_id = stack_frame.id; let scopes = match block_on(debugger.scopes(frame_id)) { Ok(s) => s, Err(e) => { @@ -539,7 +586,7 @@ pub fn dap_variables(cx: &mut Context) { pub fn dap_terminate(cx: &mut Context) { let debugger = debugger!(cx.editor); - let request = debugger.disconnect(); + let request = debugger.disconnect(None); dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| { // editor.set_error(format!("Failed to disconnect: {}", e)); editor.debugger = None; diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs index 86b0c5fa..0b0d1db4 100644 --- a/helix-term/src/commands/lsp.rs +++ b/helix-term/src/commands/lsp.rs @@ -1,16 +1,27 @@ use futures_util::FutureExt; use helix_lsp::{ block_on, - lsp::{self, CodeAction, CodeActionOrCommand, DiagnosticSeverity, NumberOrString}, - util::{diagnostic_to_lsp_diagnostic, lsp_pos_to_pos, lsp_range_to_range, range_to_lsp_range}, + lsp::{ + self, CodeAction, CodeActionOrCommand, CodeActionTriggerKind, DiagnosticSeverity, + NumberOrString, + }, + util::{diagnostic_to_lsp_diagnostic, lsp_range_to_range, range_to_lsp_range}, OffsetEncoding, }; -use tui::text::{Span, Spans}; +use tui::{ + text::{Span, Spans}, + widgets::Row, +}; use super::{align_view, push_jump, Align, Context, Editor, Open}; -use helix_core::{path, Selection}; -use helix_view::{apply_transaction, document::Mode, editor::Action, theme::Style}; +use helix_core::{path, text_annotations::InlineAnnotation, Selection}; +use helix_view::{ + document::{DocumentInlayHints, DocumentInlayHintsId, Mode}, + editor::Action, + theme::Style, + Document, View, +}; use crate::{ compositor::{self, Compositor}, @@ -21,7 +32,7 @@ use crate::{ }; use std::{ - borrow::Cow, cmp::Ordering, collections::BTreeMap, fmt::Write, path::PathBuf, sync::Arc, + cmp::Ordering, collections::BTreeMap, fmt::Write, future::Future, path::PathBuf, sync::Arc, }; /// Gets the language server that is attached to a document, and @@ -46,7 +57,7 @@ impl ui::menu::Item for lsp::Location { /// Current working directory. type Data = PathBuf; - fn label(&self, cwdir: &Self::Data) -> Spans { + fn format(&self, cwdir: &Self::Data) -> Row { // The preallocation here will overallocate a few characters since it will account for the // URL's scheme, which is not used most of the time since that scheme will be "file://". // Those extra chars will be used to avoid allocating when writing the line number (in the @@ -80,7 +91,7 @@ impl ui::menu::Item for lsp::SymbolInformation { /// Path to currently focussed document type Data = Option; - fn label(&self, current_doc_path: &Self::Data) -> Spans { + fn format(&self, current_doc_path: &Self::Data) -> Row { if current_doc_path.as_ref() == Some(&self.location.uri) { self.name.as_str().into() } else { @@ -110,7 +121,7 @@ struct PickerDiagnostic { impl ui::menu::Item for PickerDiagnostic { type Data = (DiagnosticStyles, DiagnosticsFormat); - fn label(&self, (styles, format): &Self::Data) -> Spans { + fn format(&self, (styles, format): &Self::Data) -> Row { let mut style = self .diag .severity @@ -126,20 +137,17 @@ impl ui::menu::Item for PickerDiagnostic { // remove background as it is distracting in the picker list style.bg = None; - let code: Cow<'_, str> = self - .diag - .code - .as_ref() - .map(|c| match c { - NumberOrString::Number(n) => n.to_string().into(), - NumberOrString::String(s) => s.as_str().into(), - }) - .unwrap_or_default(); + let code = match self.diag.code.as_ref() { + Some(NumberOrString::Number(n)) => format!(" ({n})"), + Some(NumberOrString::String(s)) => format!(" ({s})"), + None => String::new(), + }; let path = match format { DiagnosticsFormat::HideSourcePath => String::new(), DiagnosticsFormat::ShowSourcePath => { - let path = path::get_truncated_path(self.url.path()); + let file_path = self.url.to_file_path().unwrap(); + let path = path::get_truncated_path(file_path); format!("{}: ", path.to_string_lossy()) } }; @@ -149,6 +157,7 @@ impl ui::menu::Item for PickerDiagnostic { Span::styled(&self.diag.message, style), Span::styled(code, style), ]) + .into() } } @@ -188,15 +197,15 @@ fn jump_to_location( } } let (view, doc) = current!(editor); - let definition_pos = location.range.start; // TODO: convert inside server - let new_pos = if let Some(new_pos) = lsp_pos_to_pos(doc.text(), definition_pos, offset_encoding) - { - new_pos - } else { - return; - }; - doc.set_selection(view.id, Selection::point(new_pos)); + let new_range = + if let Some(new_range) = lsp_range_to_range(doc.text(), location.range, offset_encoding) { + new_range + } else { + log::warn!("lsp position out of bounds - {:?}", location.range); + return; + }; + doc.set_selection(view.id, Selection::single(new_range.anchor, new_range.head)); align_view(doc, view, Align::Center); } @@ -467,7 +476,7 @@ pub fn workspace_diagnostics_picker(cx: &mut Context) { impl ui::menu::Item for lsp::CodeActionOrCommand { type Data = (); - fn label(&self, _data: &Self::Data) -> Spans { + fn format(&self, _data: &Self::Data) -> Row { match self { lsp::CodeActionOrCommand::CodeAction(action) => action.title.as_str().into(), lsp::CodeActionOrCommand::Command(command) => command.title.as_str().into(), @@ -557,6 +566,7 @@ pub fn code_action(cx: &mut Context) { .map(|diag| diagnostic_to_lsp_diagnostic(doc.text(), diag, offset_encoding)) .collect(), only: None, + trigger_kind: Some(CodeActionTriggerKind::INVOKED), }, ) { Some(future) => future, @@ -641,7 +651,7 @@ pub fn code_action(cx: &mut Context) { log::debug!("code action: {:?}", code_action); if let Some(ref workspace_edit) = code_action.edit { log::debug!("edit: {:?}", workspace_edit); - apply_workspace_edit(editor, offset_encoding, workspace_edit); + let _ = apply_workspace_edit(editor, offset_encoding, workspace_edit); } // if code action provides both edit and command first the edit @@ -662,7 +672,7 @@ pub fn code_action(cx: &mut Context) { impl ui::menu::Item for lsp::Command { type Data = (); - fn label(&self, _data: &Self::Data) -> Spans { + fn format(&self, _data: &Self::Data) -> Row { self.title.as_str().into() } } @@ -747,19 +757,50 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> { } } +#[derive(Debug)] +pub struct ApplyEditError { + pub kind: ApplyEditErrorKind, + pub failed_change_idx: usize, +} + +#[derive(Debug)] +pub enum ApplyEditErrorKind { + DocumentChanged, + FileNotFound, + UnknownURISchema, + IoError(std::io::Error), + // TODO: check edits before applying and propagate failure + // InvalidEdit, +} + +impl ToString for ApplyEditErrorKind { + fn to_string(&self) -> String { + match self { + ApplyEditErrorKind::DocumentChanged => "document has changed".to_string(), + ApplyEditErrorKind::FileNotFound => "file not found".to_string(), + ApplyEditErrorKind::UnknownURISchema => "URI schema not supported".to_string(), + ApplyEditErrorKind::IoError(err) => err.to_string(), + } + } +} + +///TODO make this transactional (and set failureMode to transactional) pub fn apply_workspace_edit( editor: &mut Editor, offset_encoding: OffsetEncoding, workspace_edit: &lsp::WorkspaceEdit, -) { - let mut apply_edits = |uri: &helix_lsp::Url, text_edits: Vec| { +) -> Result<(), ApplyEditError> { + let mut apply_edits = |uri: &helix_lsp::Url, + version: Option, + text_edits: Vec| + -> Result<(), ApplyEditErrorKind> { let path = match uri.to_file_path() { Ok(path) => path, Err(_) => { let err = format!("unable to convert URI to filepath: {}", uri); log::error!("{}", err); editor.set_error(err); - return; + return Err(ApplyEditErrorKind::UnknownURISchema); } }; @@ -770,11 +811,19 @@ pub fn apply_workspace_edit( let err = format!("failed to open document: {}: {}", uri, err); log::error!("{}", err); editor.set_error(err); - return; + return Err(ApplyEditErrorKind::FileNotFound); } }; let doc = doc_mut!(editor, &doc_id); + if let Some(version) = version { + if version != doc.version() { + let err = format!("outdated workspace edit for {path:?}"); + log::error!("{err}, expected {} but got {version}", doc.version()); + editor.set_error(err); + return Err(ApplyEditErrorKind::DocumentChanged); + } + } // Need to determine a view for apply/append_changes_to_history let selections = doc.selections(); @@ -796,33 +845,15 @@ pub fn apply_workspace_edit( offset_encoding, ); let view = view_mut!(editor, view_id); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); doc.append_changes_to_history(view); + Ok(()) }; - if let Some(ref changes) = workspace_edit.changes { - log::debug!("workspace changes: {:?}", changes); - for (uri, text_edits) in changes { - let text_edits = text_edits.to_vec(); - apply_edits(uri, text_edits) - } - return; - // Not sure if it works properly, it'll be safer to just panic here to avoid breaking some parts of code on which code actions will be used - // TODO: find some example that uses workspace changes, and test it - // for (url, edits) in changes.iter() { - // let file_path = url.origin().ascii_serialization(); - // let file_path = std::path::PathBuf::from(file_path); - // let file = std::fs::File::open(file_path).unwrap(); - // let mut text = Rope::from_reader(file).unwrap(); - // let transaction = edits_to_changes(&text, edits); - // transaction.apply(&mut text); - // } - } - if let Some(ref document_changes) = workspace_edit.document_changes { match document_changes { lsp::DocumentChanges::Edits(document_edits) => { - for document_edit in document_edits { + for (i, document_edit) in document_edits.iter().enumerate() { let edits = document_edit .edits .iter() @@ -834,15 +865,26 @@ pub fn apply_workspace_edit( }) .cloned() .collect(); - apply_edits(&document_edit.text_document.uri, edits); + apply_edits( + &document_edit.text_document.uri, + document_edit.text_document.version, + edits, + ) + .map_err(|kind| ApplyEditError { + kind, + failed_change_idx: i, + })?; } } lsp::DocumentChanges::Operations(operations) => { log::debug!("document changes - operations: {:?}", operations); - for operation in operations { + for (i, operation) in operations.iter().enumerate() { match operation { lsp::DocumentChangeOperation::Op(op) => { - apply_document_resource_op(op).unwrap(); + apply_document_resource_op(op).map_err(|io| ApplyEditError { + kind: ApplyEditErrorKind::IoError(io), + failed_change_idx: i, + })?; } lsp::DocumentChangeOperation::Edit(document_edit) => { @@ -857,13 +899,36 @@ pub fn apply_workspace_edit( }) .cloned() .collect(); - apply_edits(&document_edit.text_document.uri, edits); + apply_edits( + &document_edit.text_document.uri, + document_edit.text_document.version, + edits, + ) + .map_err(|kind| ApplyEditError { + kind, + failed_change_idx: i, + })?; } } } } } + + return Ok(()); } + + if let Some(ref changes) = workspace_edit.changes { + log::debug!("workspace changes: {:?}", changes); + for (i, (uri, text_edits)) in changes.iter().enumerate() { + let text_edits = text_edits.to_vec(); + apply_edits(uri, None, text_edits).map_err(|kind| ApplyEditError { + kind, + failed_change_idx: i, + })?; + } + } + + Ok(()) } fn goto_impl( @@ -910,6 +975,31 @@ fn to_locations(definitions: Option) -> Vec future, + None => { + cx.editor + .set_error("Language server does not support goto-declaration"); + return; + } + }; + + cx.callback( + future, + move |editor, compositor, response: Option| { + let items = to_locations(response); + goto_impl(editor, compositor, items, offset_encoding); + }, + ); +} + pub fn goto_definition(cx: &mut Context) { let (view, doc) = current!(cx.editor); let language_server = language_server!(cx.editor, doc); @@ -1198,49 +1288,123 @@ pub fn hover(cx: &mut Context) { } pub fn rename_symbol(cx: &mut Context) { - let (view, doc) = current_ref!(cx.editor); - let text = doc.text().slice(..); - let primary_selection = doc.selection(view.id).primary(); - let prefill = if primary_selection.len() > 1 { - primary_selection - } else { - use helix_core::textobject::{textobject_word, TextObject}; - textobject_word(text, primary_selection, TextObject::Inside, 1, false) + fn get_prefill_from_word_boundary(editor: &Editor) -> String { + let (view, doc) = current_ref!(editor); + let text = doc.text().slice(..); + let primary_selection = doc.selection(view.id).primary(); + if primary_selection.len() > 1 { + primary_selection + } else { + use helix_core::textobject::{textobject_word, TextObject}; + textobject_word(text, primary_selection, TextObject::Inside, 1, false) + } + .fragment(text) + .into() } - .fragment(text) - .into(); - ui::prompt_with_input( - cx, - "rename-to:".into(), - prefill, - None, - ui::completers::none, - move |cx: &mut compositor::Context, input: &str, event: PromptEvent| { - if event != PromptEvent::Validate { - return; + + fn get_prefill_from_lsp_response( + editor: &Editor, + offset_encoding: OffsetEncoding, + response: Option, + ) -> Result { + match response { + Some(lsp::PrepareRenameResponse::Range(range)) => { + let text = doc!(editor).text(); + + Ok(lsp_range_to_range(text, range, offset_encoding) + .ok_or("lsp sent invalid selection range for rename")? + .fragment(text.slice(..)) + .into()) + } + Some(lsp::PrepareRenameResponse::RangeWithPlaceholder { placeholder, .. }) => { + Ok(placeholder) } + Some(lsp::PrepareRenameResponse::DefaultBehavior { .. }) => { + Ok(get_prefill_from_word_boundary(editor)) + } + None => Err("lsp did not respond to prepare rename request"), + } + } - let (view, doc) = current!(cx.editor); - let language_server = language_server!(cx.editor, doc); - let offset_encoding = language_server.offset_encoding(); + fn create_rename_prompt(editor: &Editor, prefill: String) -> Box { + let prompt = ui::Prompt::new( + "rename-to:".into(), + None, + ui::completers::none, + move |cx: &mut compositor::Context, input: &str, event: PromptEvent| { + if event != PromptEvent::Validate { + return; + } - let pos = doc.position(view.id, offset_encoding); + let (view, doc) = current!(cx.editor); + let language_server = language_server!(cx.editor, doc); + let offset_encoding = language_server.offset_encoding(); + + let pos = doc.position(view.id, offset_encoding); + + let future = + match language_server.rename_symbol(doc.identifier(), pos, input.to_string()) { + Some(future) => future, + None => { + cx.editor + .set_error("Language server does not support symbol renaming"); + return; + } + }; + match block_on(future) { + Ok(edits) => { + let _ = apply_workspace_edit(cx.editor, offset_encoding, &edits); + } + Err(err) => cx.editor.set_error(err.to_string()), + } + }, + ) + .with_line(prefill, editor); - let future = - match language_server.rename_symbol(doc.identifier(), pos, input.to_string()) { - Some(future) => future, - None => { - cx.editor - .set_error("Language server does not support symbol renaming"); + Box::new(prompt) + } + + let (view, doc) = current!(cx.editor); + let language_server = language_server!(cx.editor, doc); + let offset_encoding = language_server.offset_encoding(); + + if !language_server.supports_rename() { + cx.editor + .set_error("Language server does not support symbol renaming"); + return; + } + + let pos = doc.position(view.id, offset_encoding); + + match language_server.prepare_rename(doc.identifier(), pos) { + // Language server supports textDocument/prepareRename, use it. + Some(future) => cx.callback( + future, + move |editor, compositor, response: Option| { + let prefill = match get_prefill_from_lsp_response(editor, offset_encoding, response) + { + Ok(p) => p, + Err(e) => { + editor.set_error(e); return; } }; - match block_on(future) { - Ok(edits) => apply_workspace_edit(cx.editor, offset_encoding, &edits), - Err(err) => cx.editor.set_error(err.to_string()), - } - }, - ); + + let prompt = create_rename_prompt(editor, prefill); + + compositor.push(prompt); + }, + ), + // Language server does not support textDocument/prepareRename, fall back + // to word boundary selection. + None => { + let prefill = get_prefill_from_word_boundary(cx.editor); + + let prompt = create_rename_prompt(cx.editor, prefill); + + cx.push_layer(prompt); + } + }; } pub fn select_references_to_symbol_under_cursor(cx: &mut Context) { @@ -1291,3 +1455,174 @@ pub fn select_references_to_symbol_under_cursor(cx: &mut Context) { }, ); } + +pub fn compute_inlay_hints_for_all_views(editor: &mut Editor, jobs: &mut crate::job::Jobs) { + if !editor.config().lsp.display_inlay_hints { + return; + } + + for (view, _) in editor.tree.views() { + let doc = match editor.documents.get(&view.doc) { + Some(doc) => doc, + None => continue, + }; + if let Some(callback) = compute_inlay_hints_for_view(view, doc) { + jobs.callback(callback); + } + } +} + +fn compute_inlay_hints_for_view( + view: &View, + doc: &Document, +) -> Option>>>> { + let view_id = view.id; + let doc_id = view.doc; + + let language_server = doc.language_server()?; + + let capabilities = language_server.capabilities(); + + let (future, new_doc_inlay_hints_id) = match capabilities.inlay_hint_provider { + Some( + lsp::OneOf::Left(true) + | lsp::OneOf::Right(lsp::InlayHintServerCapabilities::Options(_)), + ) => { + let doc_text = doc.text(); + let len_lines = doc_text.len_lines(); + + // Compute ~3 times the current view height of inlay hints, that way some scrolling + // will not show half the view with hints and half without while still being faster + // than computing all the hints for the full file (which could be dozens of time + // longer than the view is). + let view_height = view.inner_height(); + let first_visible_line = doc_text.char_to_line(view.offset.anchor); + let first_line = first_visible_line.saturating_sub(view_height); + let last_line = first_visible_line + .saturating_add(view_height.saturating_mul(2)) + .min(len_lines); + + let new_doc_inlay_hint_id = DocumentInlayHintsId { + first_line, + last_line, + }; + // Don't recompute the annotations in case nothing has changed about the view + if !doc.inlay_hints_oudated + && doc + .inlay_hints(view_id) + .map_or(false, |dih| dih.id == new_doc_inlay_hint_id) + { + return None; + } + + let doc_slice = doc_text.slice(..); + let first_char_in_range = doc_slice.line_to_char(first_line); + let last_char_in_range = doc_slice.line_to_char(last_line); + + let range = helix_lsp::util::range_to_lsp_range( + doc_text, + helix_core::Range::new(first_char_in_range, last_char_in_range), + language_server.offset_encoding(), + ); + + ( + language_server.text_document_range_inlay_hints(doc.identifier(), range, None), + new_doc_inlay_hint_id, + ) + } + _ => return None, + }; + + let callback = super::make_job_callback( + future?, + move |editor, _compositor, response: Option>| { + // The config was modified or the window was closed while the request was in flight + if !editor.config().lsp.display_inlay_hints || editor.tree.try_get(view_id).is_none() { + return; + } + + // Add annotations to relevant document, not the current one (it may have changed in between) + let doc = match editor.documents.get_mut(&doc_id) { + Some(doc) => doc, + None => return, + }; + + // If we have neither hints nor an LSP, empty the inlay hints since they're now oudated + let (mut hints, offset_encoding) = match (response, doc.language_server()) { + (Some(h), Some(ls)) if !h.is_empty() => (h, ls.offset_encoding()), + _ => { + doc.set_inlay_hints( + view_id, + DocumentInlayHints::empty_with_id(new_doc_inlay_hints_id), + ); + doc.inlay_hints_oudated = false; + return; + } + }; + + // Most language servers will already send them sorted but ensure this is the case to + // avoid errors on our end. + hints.sort_unstable_by_key(|inlay_hint| inlay_hint.position); + + let mut padding_before_inlay_hints = Vec::new(); + let mut type_inlay_hints = Vec::new(); + let mut parameter_inlay_hints = Vec::new(); + let mut other_inlay_hints = Vec::new(); + let mut padding_after_inlay_hints = Vec::new(); + + let doc_text = doc.text(); + + for hint in hints { + let char_idx = + match helix_lsp::util::lsp_pos_to_pos(doc_text, hint.position, offset_encoding) + { + Some(pos) => pos, + // Skip inlay hints that have no "real" position + None => continue, + }; + + let label = match hint.label { + lsp::InlayHintLabel::String(s) => s, + lsp::InlayHintLabel::LabelParts(parts) => parts + .into_iter() + .map(|p| p.value) + .collect::>() + .join(""), + }; + + let inlay_hints_vec = match hint.kind { + Some(lsp::InlayHintKind::TYPE) => &mut type_inlay_hints, + Some(lsp::InlayHintKind::PARAMETER) => &mut parameter_inlay_hints, + // We can't warn on unknown kind here since LSPs are free to set it or not, for + // example Rust Analyzer does not: every kind will be `None`. + _ => &mut other_inlay_hints, + }; + + if let Some(true) = hint.padding_left { + padding_before_inlay_hints.push(InlineAnnotation::new(char_idx, " ")); + } + + inlay_hints_vec.push(InlineAnnotation::new(char_idx, label)); + + if let Some(true) = hint.padding_right { + padding_after_inlay_hints.push(InlineAnnotation::new(char_idx, " ")); + } + } + + doc.set_inlay_hints( + view_id, + DocumentInlayHints { + id: new_doc_inlay_hints_id, + type_inlay_hints: type_inlay_hints.into(), + parameter_inlay_hints: parameter_inlay_hints.into(), + other_inlay_hints: other_inlay_hints.into(), + padding_before_inlay_hints: padding_before_inlay_hints.into(), + padding_after_inlay_hints: padding_after_inlay_hints.into(), + }, + ); + doc.inlay_hints_oudated = false; + }, + ); + + Some(callback) +} diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index de24c4fb..2c72686d 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1,13 +1,14 @@ +use std::fmt::Write; use std::ops::Deref; use crate::job::Job; use super::*; -use helix_view::{ - apply_transaction, - editor::{Action, CloseError, ConfigEvent}, -}; +use helix_core::{encoding, shellwords::Shellwords}; +use helix_view::document::DEFAULT_LANGUAGE_NAME; +use helix_view::editor::{Action, CloseError, ConfigEvent}; +use serde_json::Value; use ui::completers::{self, Completer}; #[derive(Clone)] @@ -17,7 +18,49 @@ pub struct TypableCommand { pub doc: &'static str, // params, flags, helper, completer pub fun: fn(&mut compositor::Context, &[Cow], PromptEvent) -> anyhow::Result<()>, - pub completer: Option, + /// What completion methods, if any, does this command have? + pub signature: CommandSignature, +} + +impl TypableCommand { + fn completer_for_argument_number(&self, n: usize) -> &Completer { + match self.signature.positional_args.get(n) { + Some(completer) => completer, + _ => &self.signature.var_args, + } + } +} + +#[derive(Clone)] +pub struct CommandSignature { + // Arguments with specific completion methods based on their position. + positional_args: &'static [Completer], + + // All remaining arguments will use this completion method, if set. + var_args: Completer, +} + +impl CommandSignature { + const fn none() -> Self { + Self { + positional_args: &[], + var_args: completers::none, + } + } + + const fn positional(completers: &'static [Completer]) -> Self { + Self { + positional_args: completers, + var_args: completers::none, + } + } + + const fn all(completer: Completer) -> Self { + Self { + positional_args: &[], + var_args: completer, + } + } } fn quit(cx: &mut compositor::Context, args: &[Cow], event: PromptEvent) -> anyhow::Result<()> { @@ -480,7 +523,7 @@ fn set_line_ending( } }), ); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); doc.append_changes_to_history(view); Ok(()) @@ -916,6 +959,7 @@ fn replace_selections_with_clipboard_impl( cx: &mut compositor::Context, clipboard_type: ClipboardType, ) -> anyhow::Result<()> { + let scrolloff = cx.editor.config().scrolloff; let (view, doc) = current!(cx.editor); match cx.editor.clipboard_provider.get_contents(clipboard_type) { @@ -925,8 +969,9 @@ fn replace_selections_with_clipboard_impl( (range.from(), range.to(), Some(contents.as_str().into())) }); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); doc.append_changes_to_history(view); + view.ensure_cursor_in_view(doc, scrolloff); Ok(()) } Err(e) => Err(e.context("Couldn't get system clipboard contents")), @@ -1034,6 +1079,131 @@ fn set_encoding( } } +/// Shows info about the character under the primary cursor. +fn get_character_info( + cx: &mut compositor::Context, + _args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + let (view, doc) = current_ref!(cx.editor); + let text = doc.text().slice(..); + + let grapheme_start = doc.selection(view.id).primary().cursor(text); + let grapheme_end = graphemes::next_grapheme_boundary(text, grapheme_start); + + if grapheme_start == grapheme_end { + return Ok(()); + } + + let grapheme = text.slice(grapheme_start..grapheme_end).to_string(); + let encoding = doc.encoding(); + + let printable = grapheme.chars().fold(String::new(), |mut s, c| { + match c { + '\0' => s.push_str("\\0"), + '\t' => s.push_str("\\t"), + '\n' => s.push_str("\\n"), + '\r' => s.push_str("\\r"), + _ => s.push(c), + } + + s + }); + + // Convert to Unicode codepoints if in UTF-8 + let unicode = if encoding == encoding::UTF_8 { + let mut unicode = " (".to_owned(); + + for (i, char) in grapheme.chars().enumerate() { + if i != 0 { + unicode.push(' '); + } + + unicode.push_str("U+"); + + let codepoint: u32 = if char.is_ascii() { + char.into() + } else { + // Not ascii means it will be multi-byte, so strip out the extra + // bits that encode the length & mark continuation bytes + + let s = String::from(char); + let bytes = s.as_bytes(); + + // First byte starts with 2-4 ones then a zero, so strip those off + let first = bytes[0]; + let codepoint = first & (0xFF >> (first.leading_ones() + 1)); + let mut codepoint = u32::from(codepoint); + + // Following bytes start with 10 + for byte in bytes.iter().skip(1) { + codepoint <<= 6; + codepoint += u32::from(*byte) & 0x3F; + } + + codepoint + }; + + write!(unicode, "{codepoint:0>4x}").unwrap(); + } + + unicode.push(')'); + unicode + } else { + String::new() + }; + + // Give the decimal value for ascii characters + let dec = if encoding.is_ascii_compatible() && grapheme.len() == 1 { + format!(" Dec {}", grapheme.as_bytes()[0]) + } else { + String::new() + }; + + let hex = { + let mut encoder = encoding.new_encoder(); + let max_encoded_len = encoder + .max_buffer_length_from_utf8_without_replacement(grapheme.len()) + .unwrap(); + let mut bytes = Vec::with_capacity(max_encoded_len); + let mut current_byte = 0; + let mut hex = String::new(); + + for (i, char) in grapheme.chars().enumerate() { + if i != 0 { + hex.push_str(" +"); + } + + let (result, _input_bytes_read) = encoder.encode_from_utf8_to_vec_without_replacement( + &char.to_string(), + &mut bytes, + true, + ); + + if let encoding::EncoderResult::Unmappable(char) = result { + bail!("{char:?} cannot be mapped to {}", encoding.name()); + } + + for byte in &bytes[current_byte..] { + write!(hex, " {byte:0>2x}").unwrap(); + } + + current_byte = bytes.len(); + } + + hex + }; + + cx.editor + .set_status(format!("\"{printable}\"{unicode}{dec} Hex{hex}")); + + Ok(()) +} + /// Reload the [`Document`] from its source file. fn reload( cx: &mut compositor::Context, @@ -1226,6 +1396,37 @@ fn lsp_restart( Ok(()) } +fn lsp_stop( + cx: &mut compositor::Context, + _args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + let doc = doc!(cx.editor); + + let ls_id = doc + .language_server() + .map(|ls| ls.id()) + .context("LSP not running for the current document")?; + + let config = doc + .language_config() + .context("LSP not defined for the current document")?; + cx.editor.language_servers.stop(config); + + for doc in cx.editor.documents_mut() { + if doc.language_server().map_or(false, |ls| ls.id() == ls_id) { + doc.set_language_server(None); + doc.set_diagnostics(Default::default()); + } + } + + Ok(()) +} + fn tree_sitter_scopes( cx: &mut compositor::Context, _args: &[Cow], @@ -1406,54 +1607,74 @@ fn tutor( return Ok(()); } - let path = helix_loader::runtime_dir().join("tutor"); + let path = helix_loader::runtime_file(Path::new("tutor")); cx.editor.open(&path, Action::Replace)?; // Unset path to prevent accidentally saving to the original tutor file. doc_mut!(cx.editor).set_path(None)?; Ok(()) } +fn abort_goto_line_number_preview(cx: &mut compositor::Context) { + if let Some(last_selection) = cx.editor.last_selection.take() { + let scrolloff = cx.editor.config().scrolloff; + + let (view, doc) = current!(cx.editor); + doc.set_selection(view.id, last_selection); + view.ensure_cursor_in_view(doc, scrolloff); + } +} + +fn update_goto_line_number_preview( + cx: &mut compositor::Context, + args: &[Cow], +) -> anyhow::Result<()> { + cx.editor.last_selection.get_or_insert_with(|| { + let (view, doc) = current!(cx.editor); + doc.selection(view.id).clone() + }); + + let scrolloff = cx.editor.config().scrolloff; + let line = args[0].parse::()?; + goto_line_without_jumplist(cx.editor, NonZeroUsize::new(line)); + + let (view, doc) = current!(cx.editor); + view.ensure_cursor_in_view(doc, scrolloff); + + Ok(()) +} + pub(super) fn goto_line_number( cx: &mut compositor::Context, args: &[Cow], event: PromptEvent, ) -> anyhow::Result<()> { match event { - PromptEvent::Abort => { - if let Some(line_number) = cx.editor.last_line_number { - goto_line_impl(cx.editor, NonZeroUsize::new(line_number)); - let (view, doc) = current!(cx.editor); - view.ensure_cursor_in_view(doc, line_number); - cx.editor.last_line_number = None; - } - return Ok(()); - } + PromptEvent::Abort => abort_goto_line_number_preview(cx), PromptEvent::Validate => { ensure!(!args.is_empty(), "Line number required"); - cx.editor.last_line_number = None; - } - PromptEvent::Update => { - if args.is_empty() { - if let Some(line_number) = cx.editor.last_line_number { - // When a user hits backspace and there are no numbers left, - // we can bring them back to their original line - goto_line_impl(cx.editor, NonZeroUsize::new(line_number)); - let (view, doc) = current!(cx.editor); - view.ensure_cursor_in_view(doc, line_number); - cx.editor.last_line_number = None; - } - return Ok(()); - } + + // If we are invoked directly via a keybinding, Validate is + // sent without any prior Update events. Ensure the cursor + // is moved to the appropriate location. + update_goto_line_number_preview(cx, args)?; + + let last_selection = cx + .editor + .last_selection + .take() + .expect("update_goto_line_number_preview should always set last_selection"); + let (view, doc) = current!(cx.editor); - let text = doc.text().slice(..); - let line = doc.selection(view.id).primary().cursor_line(text); - cx.editor.last_line_number.get_or_insert(line + 1); + view.jumps.push((doc.id(), last_selection)); } + + // When a user hits backspace and there are no numbers left, + // we can bring them back to their original selection. If they + // begin typing numbers again, we'll start a new preview session. + PromptEvent::Update if args.is_empty() => abort_goto_line_number_preview(cx), + PromptEvent::Update => update_goto_line_number_preview(cx, args)?, } - let line = args[0].parse::()?; - goto_line_impl(cx.editor, NonZeroUsize::new(line)); - let (view, doc) = current!(cx.editor); - view.ensure_cursor_in_view(doc, line); + Ok(()) } @@ -1520,6 +1741,46 @@ fn set_option( Ok(()) } +/// Toggle boolean config option at runtime. Access nested values by dot +/// syntax, for example to toggle smart case search, use `:toggle search.smart- +/// case`. +fn toggle_option( + cx: &mut compositor::Context, + args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + if args.len() != 1 { + anyhow::bail!("Bad arguments. Usage: `:toggle key`"); + } + let key = &args[0].to_lowercase(); + + let key_error = || anyhow::anyhow!("Unknown key `{}`", key); + + let mut config = serde_json::json!(&cx.editor.config().deref()); + let pointer = format!("/{}", key.replace('.', "/")); + let value = config.pointer_mut(&pointer).ok_or_else(key_error)?; + + if let Value::Bool(b) = *value { + *value = Value::Bool(!b); + } else { + anyhow::bail!("Key `{}` is not toggle-able", key) + } + + // This unwrap should never fail because we only replace one boolean value + // with another, maintaining a valid json config + let config = serde_json::from_value(config).unwrap(); + + cx.editor + .config_events + .0 + .send(ConfigEvent::Update(config))?; + Ok(()) +} + /// Change the language of the current buffer at runtime. fn language( cx: &mut compositor::Context, @@ -1530,13 +1791,20 @@ fn language( return Ok(()); } + if args.is_empty() { + let doc = doc!(cx.editor); + let language = &doc.language_name().unwrap_or(DEFAULT_LANGUAGE_NAME); + cx.editor.set_status(language.to_string()); + return Ok(()); + } + if args.len() != 1 { anyhow::bail!("Bad arguments. Usage: `:set-language language`"); } let doc = doc_mut!(cx.editor); - if args[0] == "text" { + if args[0] == DEFAULT_LANGUAGE_NAME { doc.set_language(None, None) } else { doc.set_language_by_language_id(&args[0], cx.editor.syn_loader.clone())?; @@ -1573,6 +1841,7 @@ fn sort_impl( _args: &[Cow], reverse: bool, ) -> anyhow::Result<()> { + let scrolloff = cx.editor.config().scrolloff; let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); @@ -1596,8 +1865,9 @@ fn sort_impl( .map(|(s, fragment)| (s.from(), s.to(), Some(fragment))), ); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); doc.append_changes_to_history(view); + view.ensure_cursor_in_view(doc, scrolloff); Ok(()) } @@ -1612,35 +1882,31 @@ fn reflow( } let scrolloff = cx.editor.config().scrolloff; + let cfg_text_width: usize = cx.editor.config().text_width; let (view, doc) = current!(cx.editor); - const DEFAULT_MAX_LEN: usize = 79; - - // Find the max line length by checking the following sources in order: + // Find the text_width by checking the following sources in order: // - The passed argument in `args` - // - The configured max_line_len for this language in languages.toml - // - The const default we set above - let max_line_len: usize = args + // - The configured text-width for this language in languages.toml + // - The configured text-width in the config.toml + let text_width: usize = args .get(0) .map(|num| num.parse::()) .transpose()? - .or_else(|| { - doc.language_config() - .and_then(|config| config.max_line_length) - }) - .unwrap_or(DEFAULT_MAX_LEN); + .or_else(|| doc.language_config().and_then(|config| config.text_width)) + .unwrap_or(cfg_text_width); let rope = doc.text(); let selection = doc.selection(view.id); let transaction = Transaction::change_by_selection(rope, selection, |range| { let fragment = range.fragment(rope.slice(..)); - let reflowed_text = helix_core::wrap::reflow_hard_wrap(&fragment, max_line_len); + let reflowed_text = helix_core::wrap::reflow_hard_wrap(&fragment, text_width); (range.from(), range.to(), Some(reflowed_text)) }); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); doc.append_changes_to_history(view); view.ensure_cursor_in_view(doc, scrolloff); @@ -1794,18 +2060,14 @@ fn run_shell_command( return Ok(()); } - let shell = &cx.editor.config().shell; - let (output, success) = shell_impl(shell, &args.join(" "), None)?; - if success { - cx.editor.set_status("Command succeeded"); - } else { - cx.editor.set_error("Command failed"); - } + let shell = cx.editor.config().shell.clone(); + let args = args.join(" "); - if !output.is_empty() { - let callback = async move { - let call: job::Callback = Callback::EditorCompositor(Box::new( - move |editor: &mut Editor, compositor: &mut Compositor| { + let callback = async move { + let (output, success) = shell_impl_async(&shell, &args, None).await?; + let call: job::Callback = Callback::EditorCompositor(Box::new( + move |editor: &mut Editor, compositor: &mut Compositor| { + if !output.is_empty() { let contents = ui::Markdown::new( format!("```sh\n{}\n```", output), editor.syn_loader.clone(), @@ -1814,14 +2076,76 @@ fn run_shell_command( helix_core::Position::new(editor.cursor().0.unwrap_or_default().row, 2), )); compositor.replace_or_push("shell", popup); - }, - )); - Ok(call) - }; + } + if success { + editor.set_status("Command succeeded"); + } else { + editor.set_error("Command failed"); + } + }, + )); + Ok(call) + }; + cx.jobs.callback(callback); - cx.jobs.callback(callback); + Ok(()) +} + +fn reset_diff_change( + cx: &mut compositor::Context, + args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); } + ensure!(args.is_empty(), ":reset-diff-change takes no arguments"); + + let editor = &mut cx.editor; + let scrolloff = editor.config().scrolloff; + + let (view, doc) = current!(editor); + // TODO refactor to use let..else once MSRV is raised to 1.65 + let handle = match doc.diff_handle() { + Some(handle) => handle, + None => bail!("Diff is not available in the current buffer"), + }; + let diff = handle.load(); + let doc_text = doc.text().slice(..); + let line = doc.selection(view.id).primary().cursor_line(doc_text); + + // TODO refactor to use let..else once MSRV is raised to 1.65 + let hunk_idx = match diff.hunk_at(line as u32, true) { + Some(hunk_idx) => hunk_idx, + None => bail!("There is no change at the cursor"), + }; + let hunk = diff.nth_hunk(hunk_idx); + let diff_base = diff.diff_base(); + let before_start = diff_base.line_to_char(hunk.before.start as usize); + let before_end = diff_base.line_to_char(hunk.before.end as usize); + let text: Tendril = diff + .diff_base() + .slice(before_start..before_end) + .chunks() + .collect(); + let anchor = doc_text.line_to_char(hunk.after.start as usize); + let transaction = Transaction::change( + doc.text(), + [( + anchor, + doc_text.line_to_char(hunk.after.end as usize), + (!text.is_empty()).then_some(text), + )] + .into_iter(), + ); + drop(diff); // make borrow check happy + doc.apply(&transaction, view.id); + // select inserted text + let text_len = before_end - before_start; + doc.set_selection(view.id, Selection::single(anchor, anchor + text_len)); + doc.append_changes_to_history(view); + view.ensure_cursor_in_view(doc, scrolloff); Ok(()) } @@ -1831,112 +2155,114 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ aliases: &["q"], doc: "Close the current view.", fun: quit, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "quit!", aliases: &["q!"], doc: "Force close the current view, ignoring unsaved changes.", fun: force_quit, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "open", aliases: &["o"], doc: "Open a file from disk into the current view.", fun: open, - completer: Some(completers::filename), + signature: CommandSignature::all(completers::filename), }, TypableCommand { name: "buffer-close", aliases: &["bc", "bclose"], doc: "Close the current buffer.", fun: buffer_close, - completer: Some(completers::buffer), + signature: CommandSignature::all(completers::buffer), }, TypableCommand { name: "buffer-close!", aliases: &["bc!", "bclose!"], doc: "Close the current buffer forcefully, ignoring unsaved changes.", fun: force_buffer_close, - completer: Some(completers::buffer), + signature: CommandSignature::all(completers::buffer) }, TypableCommand { name: "buffer-close-others", aliases: &["bco", "bcloseother"], doc: "Close all buffers but the currently focused one.", fun: buffer_close_others, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "buffer-close-others!", aliases: &["bco!", "bcloseother!"], doc: "Force close all buffers but the currently focused one.", fun: force_buffer_close_others, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "buffer-close-all", aliases: &["bca", "bcloseall"], doc: "Close all buffers without quitting.", fun: buffer_close_all, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "buffer-close-all!", aliases: &["bca!", "bcloseall!"], doc: "Force close all buffers ignoring unsaved changes without quitting.", fun: force_buffer_close_all, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "buffer-next", aliases: &["bn", "bnext"], doc: "Goto next buffer.", fun: buffer_next, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "buffer-previous", aliases: &["bp", "bprev"], doc: "Goto previous buffer.", fun: buffer_previous, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "write", aliases: &["w"], doc: "Write changes to disk. Accepts an optional path (:write some/path.txt)", fun: write, - completer: Some(completers::filename), + signature: CommandSignature::positional(&[completers::filename]), }, TypableCommand { name: "write!", aliases: &["w!"], doc: "Force write changes to disk creating necessary subdirectories. Accepts an optional path (:write some/path.txt)", fun: force_write, - completer: Some(completers::filename), + signature: CommandSignature::positional(&[completers::filename]), }, TypableCommand { name: "new", aliases: &["n"], doc: "Create a new scratch buffer.", fun: new_file, - completer: Some(completers::filename), + // TODO: This seems to complete with a filename, but doesn't use that filename to + // set the path of the newly created buffer. + signature: CommandSignature::positional(&[completers::filename]), }, TypableCommand { name: "format", aliases: &["fmt"], doc: "Format the file using the LSP formatter.", fun: format, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "indent-style", aliases: &[], doc: "Set the indentation style for editing. ('t' for tabs or 1-8 for number of spaces.)", fun: set_indent_style, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "line-ending", @@ -1946,399 +2272,428 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ #[cfg(feature = "unicode-lines")] doc: "Set the document's default line ending. Options: crlf, lf, cr, ff, nel.", fun: set_line_ending, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "earlier", aliases: &["ear"], doc: "Jump back to an earlier point in edit history. Accepts a number of steps or a time span.", fun: earlier, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "later", aliases: &["lat"], doc: "Jump to a later point in edit history. Accepts a number of steps or a time span.", fun: later, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "write-quit", aliases: &["wq", "x"], doc: "Write changes to disk and close the current view. Accepts an optional path (:wq some/path.txt)", fun: write_quit, - completer: Some(completers::filename), + signature: CommandSignature::positional(&[completers::filename]), }, TypableCommand { name: "write-quit!", aliases: &["wq!", "x!"], doc: "Write changes to disk and close the current view forcefully. Accepts an optional path (:wq! some/path.txt)", fun: force_write_quit, - completer: Some(completers::filename), + signature: CommandSignature::positional(&[completers::filename]), }, TypableCommand { name: "write-all", aliases: &["wa"], doc: "Write changes from all buffers to disk.", fun: write_all, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "write-quit-all", aliases: &["wqa", "xa"], doc: "Write changes from all buffers to disk and close all views.", fun: write_all_quit, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "write-quit-all!", aliases: &["wqa!", "xa!"], doc: "Write changes from all buffers to disk and close all views forcefully (ignoring unsaved changes).", fun: force_write_all_quit, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "quit-all", aliases: &["qa"], doc: "Close all views.", fun: quit_all, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "quit-all!", aliases: &["qa!"], doc: "Force close all views ignoring unsaved changes.", fun: force_quit_all, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "cquit", aliases: &["cq"], doc: "Quit with exit code (default 1). Accepts an optional integer exit code (:cq 2).", fun: cquit, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "cquit!", aliases: &["cq!"], doc: "Force quit with exit code (default 1) ignoring unsaved changes. Accepts an optional integer exit code (:cq! 2).", fun: force_cquit, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "theme", aliases: &[], doc: "Change the editor theme (show current theme if no name specified).", fun: theme, - completer: Some(completers::theme), + signature: CommandSignature::positional(&[completers::theme]), }, TypableCommand { name: "clipboard-yank", aliases: &[], doc: "Yank main selection into system clipboard.", fun: yank_main_selection_to_clipboard, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "clipboard-yank-join", aliases: &[], doc: "Yank joined selections into system clipboard. A separator can be provided as first argument. Default value is newline.", // FIXME: current UI can't display long doc. fun: yank_joined_to_clipboard, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "primary-clipboard-yank", aliases: &[], doc: "Yank main selection into system primary clipboard.", fun: yank_main_selection_to_primary_clipboard, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "primary-clipboard-yank-join", aliases: &[], doc: "Yank joined selections into system primary clipboard. A separator can be provided as first argument. Default value is newline.", // FIXME: current UI can't display long doc. fun: yank_joined_to_primary_clipboard, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "clipboard-paste-after", aliases: &[], doc: "Paste system clipboard after selections.", fun: paste_clipboard_after, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "clipboard-paste-before", aliases: &[], doc: "Paste system clipboard before selections.", fun: paste_clipboard_before, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "clipboard-paste-replace", aliases: &[], doc: "Replace selections with content of system clipboard.", fun: replace_selections_with_clipboard, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "primary-clipboard-paste-after", aliases: &[], doc: "Paste primary clipboard after selections.", fun: paste_primary_clipboard_after, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "primary-clipboard-paste-before", aliases: &[], doc: "Paste primary clipboard before selections.", fun: paste_primary_clipboard_before, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "primary-clipboard-paste-replace", aliases: &[], doc: "Replace selections with content of system primary clipboard.", fun: replace_selections_with_primary_clipboard, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "show-clipboard-provider", aliases: &[], doc: "Show clipboard provider name in status bar.", fun: show_clipboard_provider, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "change-current-directory", aliases: &["cd"], doc: "Change the current working directory.", fun: change_current_directory, - completer: Some(completers::directory), + signature: CommandSignature::positional(&[completers::directory]), }, TypableCommand { name: "show-directory", aliases: &["pwd"], doc: "Show the current working directory.", fun: show_current_directory, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "encoding", aliases: &[], doc: "Set encoding. Based on `https://encoding.spec.whatwg.org`.", fun: set_encoding, - completer: None, + signature: CommandSignature::none(), + }, + TypableCommand { + name: "character-info", + aliases: &["char"], + doc: "Get info about the character under the primary cursor.", + fun: get_character_info, + signature: CommandSignature::none(), }, TypableCommand { name: "reload", aliases: &[], doc: "Discard changes and reload from the source file.", fun: reload, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "reload-all", aliases: &[], doc: "Discard changes and reload all documents from the source files.", fun: reload_all, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "update", aliases: &[], doc: "Write changes only if the file has been modified.", fun: update, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "lsp-workspace-command", aliases: &[], doc: "Open workspace command picker", fun: lsp_workspace_command, - completer: Some(completers::lsp_workspace_command), + signature: CommandSignature::positional(&[completers::lsp_workspace_command]), }, TypableCommand { name: "lsp-restart", aliases: &[], doc: "Restarts the Language Server that is in use by the current doc", fun: lsp_restart, - completer: None, + signature: CommandSignature::none(), + }, + TypableCommand { + name: "lsp-stop", + aliases: &[], + doc: "Stops the Language Server that is in use by the current doc", + fun: lsp_stop, + signature: CommandSignature::none(), }, TypableCommand { name: "tree-sitter-scopes", aliases: &[], doc: "Display tree sitter scopes, primarily for theming and development.", fun: tree_sitter_scopes, - completer: None, - }, + signature: CommandSignature::none(), + }, TypableCommand { name: "debug-start", aliases: &["dbg"], doc: "Start a debug session from a given template with given parameters.", fun: debug_start, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "debug-remote", aliases: &["dbg-tcp"], doc: "Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters.", fun: debug_remote, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "debug-eval", aliases: &[], doc: "Evaluate expression in current debug context.", fun: debug_eval, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "vsplit", aliases: &["vs"], doc: "Open the file in a vertical split.", fun: vsplit, - completer: Some(completers::filename), + signature: CommandSignature::all(completers::filename) }, TypableCommand { name: "vsplit-new", aliases: &["vnew"], doc: "Open a scratch buffer in a vertical split.", fun: vsplit_new, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "hsplit", aliases: &["hs", "sp"], doc: "Open the file in a horizontal split.", fun: hsplit, - completer: Some(completers::filename), + signature: CommandSignature::all(completers::filename) }, TypableCommand { name: "hsplit-new", aliases: &["hnew"], doc: "Open a scratch buffer in a horizontal split.", fun: hsplit_new, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "tutor", aliases: &[], doc: "Open the tutorial.", fun: tutor, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "goto", aliases: &["g"], doc: "Goto line number.", fun: goto_line_number, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "set-language", aliases: &["lang"], - doc: "Set the language of current buffer.", + doc: "Set the language of current buffer (show current language if no value specified).", fun: language, - completer: Some(completers::language), + signature: CommandSignature::positional(&[completers::language]), }, TypableCommand { name: "set-option", aliases: &["set"], doc: "Set a config option at runtime.\nFor example to disable smart case search, use `:set search.smart-case false`.", fun: set_option, - completer: Some(completers::setting), + // TODO: Add support for completion of the options value(s), when appropriate. + signature: CommandSignature::positional(&[completers::setting]), + }, + TypableCommand { + name: "toggle-option", + aliases: &["toggle"], + doc: "Toggle a boolean config option at runtime.\nFor example to toggle smart case search, use `:toggle search.smart-case`.", + fun: toggle_option, + signature: CommandSignature::positional(&[completers::setting]), }, TypableCommand { name: "get-option", aliases: &["get"], doc: "Get the current value of a config option.", fun: get_option, - completer: Some(completers::setting), + signature: CommandSignature::positional(&[completers::setting]), }, TypableCommand { name: "sort", aliases: &[], doc: "Sort ranges in selection.", fun: sort, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "rsort", aliases: &[], doc: "Sort ranges in selection in reverse order.", fun: sort_reverse, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "reflow", aliases: &[], doc: "Hard-wrap the current selection of lines to a given width.", fun: reflow, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "tree-sitter-subtree", aliases: &["ts-subtree"], doc: "Display tree sitter subtree under cursor, primarily for debugging queries.", fun: tree_sitter_subtree, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "config-reload", aliases: &[], doc: "Refresh user config.", fun: refresh_config, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "config-open", aliases: &[], doc: "Open the user config.toml file.", fun: open_config, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "log-open", aliases: &[], doc: "Open the helix log file.", fun: open_log, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "insert-output", aliases: &[], doc: "Run shell command, inserting output before each selection.", fun: insert_output, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "append-output", aliases: &[], doc: "Run shell command, appending output after each selection.", fun: append_output, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "pipe", aliases: &[], doc: "Pipe each selection to the shell command.", fun: pipe, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "pipe-to", aliases: &[], doc: "Pipe each selection to the shell command, ignoring output.", fun: pipe_to, - completer: None, + signature: CommandSignature::none(), }, TypableCommand { name: "run-shell-command", aliases: &["sh"], doc: "Run a shell command", fun: run_shell_command, - completer: Some(completers::directory), + signature: CommandSignature::all(completers::filename) + }, + TypableCommand { + name: "reset-diff-change", + aliases: &["diffget", "diffg"], + doc: "Reset the diff change at the cursor position.", + fun: reset_diff_change, + signature: CommandSignature::none(), }, ]; @@ -2355,8 +2710,6 @@ pub static TYPABLE_COMMAND_MAP: Lazy usize { + if shellwords.ends_with_whitespace() { + shellwords.words().len().saturating_sub(1) + } else { + shellwords.words().len().saturating_sub(2) + } +} + +#[test] +fn test_argument_number_of() { + let cases = vec![ + ("set-option", 0), + ("set-option ", 0), + ("set-option a", 0), + ("set-option asdf", 0), + ("set-option asdf ", 1), + ("set-option asdf xyz", 1), + ("set-option asdf xyz abc", 2), + ("set-option asdf xyz abc ", 3), + ]; + + for case in cases { + assert_eq!(case.1, argument_number_of(&Shellwords::from(case.0))); + } +} diff --git a/helix-term/src/compositor.rs b/helix-term/src/compositor.rs index 2e4a2e20..bcb3e449 100644 --- a/helix-term/src/compositor.rs +++ b/helix-term/src/compositor.rs @@ -7,6 +7,7 @@ use helix_view::graphics::{CursorKind, Rect}; use tui::buffer::Buffer as Surface; pub type Callback = Box; +pub type SyncCallback = Box; // Cursive-inspired pub enum EventResult { diff --git a/helix-term/src/health.rs b/helix-term/src/health.rs index 6558fe19..480c2c67 100644 --- a/helix-term/src/health.rs +++ b/helix-term/src/health.rs @@ -52,7 +52,7 @@ pub fn general() -> std::io::Result<()> { let config_file = helix_loader::config_file(); let lang_file = helix_loader::lang_config_file(); let log_file = helix_loader::log_file(); - let rt_dir = helix_loader::runtime_dir(); + let rt_dirs = helix_loader::runtime_dirs(); let clipboard_provider = get_clipboard_provider(); if config_file.exists() { @@ -66,17 +66,31 @@ pub fn general() -> std::io::Result<()> { writeln!(stdout, "Language file: default")?; } writeln!(stdout, "Log file: {}", log_file.display())?; - writeln!(stdout, "Runtime directory: {}", rt_dir.display())?; - - if let Ok(path) = std::fs::read_link(&rt_dir) { - let msg = format!("Runtime directory is symlinked to {}", path.display()); - writeln!(stdout, "{}", msg.yellow())?; - } - if !rt_dir.exists() { - writeln!(stdout, "{}", "Runtime directory does not exist.".red())?; - } - if rt_dir.read_dir().ok().map(|it| it.count()) == Some(0) { - writeln!(stdout, "{}", "Runtime directory is empty.".red())?; + writeln!( + stdout, + "Runtime directories: {}", + rt_dirs + .iter() + .map(|d| d.to_string_lossy()) + .collect::>() + .join(";") + )?; + for rt_dir in rt_dirs.iter() { + if let Ok(path) = std::fs::read_link(rt_dir) { + let msg = format!( + "Runtime directory {} is symlinked to: {}", + rt_dir.display(), + path.display() + ); + writeln!(stdout, "{}", msg.yellow())?; + } + if !rt_dir.exists() { + let msg = format!("Runtime directory does not exist: {}", rt_dir.display()); + writeln!(stdout, "{}", msg.yellow())?; + } else if rt_dir.read_dir().ok().map(|it| it.count()) == Some(0) { + let msg = format!("Runtime directory is empty: {}", rt_dir.display()); + writeln!(stdout, "{}", msg.yellow())?; + } } writeln!(stdout, "Clipboard provider: {}", clipboard_provider.name())?; diff --git a/helix-term/src/job.rs b/helix-term/src/job.rs index 2888b6eb..19f2521a 100644 --- a/helix-term/src/job.rs +++ b/helix-term/src/job.rs @@ -5,9 +5,12 @@ use crate::compositor::Compositor; use futures_util::future::{BoxFuture, Future, FutureExt}; use futures_util::stream::{FuturesUnordered, StreamExt}; +pub type EditorCompositorCallback = Box; +pub type EditorCallback = Box; + pub enum Callback { - EditorCompositor(Box), - Editor(Box), + EditorCompositor(EditorCompositorCallback), + Editor(EditorCallback), } pub type JobFuture = BoxFuture<'static, anyhow::Result>>; diff --git a/helix-term/src/keymap.rs b/helix-term/src/keymap.rs index 4a131f0a..e94a5f66 100644 --- a/helix-term/src/keymap.rs +++ b/helix-term/src/keymap.rs @@ -184,7 +184,7 @@ impl<'de> serde::de::Visitor<'de> for KeyTrieVisitor { S: serde::de::SeqAccess<'de>, { let mut commands = Vec::new(); - while let Some(command) = seq.next_element::<&str>()? { + while let Some(command) = seq.next_element::()? { commands.push( command .parse::() @@ -600,4 +600,43 @@ mod tests { "Mismatch" ) } + + #[test] + fn escaped_keymap() { + use crate::commands::MappableCommand; + use helix_view::input::{KeyCode, KeyEvent, KeyModifiers}; + + let keys = r#" +"+" = [ + "select_all", + ":pipe sed -E 's/\\s+$//g'", +] + "#; + + let key = KeyEvent { + code: KeyCode::Char('+'), + modifiers: KeyModifiers::NONE, + }; + + let expectation = Keymap::new(KeyTrie::Node(KeyTrieNode::new( + "", + hashmap! { + key => KeyTrie::Sequence(vec!{ + MappableCommand::select_all, + MappableCommand::Typable { + name: "pipe".to_string(), + args: vec!{ + "sed".to_string(), + "-E".to_string(), + "'s/\\s+$//g'".to_string() + }, + doc: "".to_string(), + }, + }) + }, + vec![key], + ))); + + assert_eq!(toml::from_str(keys), Ok(expectation)); + } } diff --git a/helix-term/src/keymap/default.rs b/helix-term/src/keymap/default.rs index ef93dee0..9bd00280 100644 --- a/helix-term/src/keymap/default.rs +++ b/helix-term/src/keymap/default.rs @@ -7,8 +7,8 @@ use helix_core::hashmap; pub fn default() -> HashMap { let normal = keymap!({ "Normal mode" "h" | "left" => move_char_left, - "j" | "down" => move_line_down, - "k" | "up" => move_line_up, + "j" | "down" => move_visual_line_down, + "k" | "up" => move_visual_line_up, "l" | "right" => move_char_right, "t" => find_till_char, @@ -44,6 +44,7 @@ pub fn default() -> HashMap { "l" => goto_line_end, "s" => goto_first_nonwhitespace, "d" => goto_definition, + "D" => goto_declaration, "y" => goto_type_definition, "r" => goto_reference, "i" => goto_implementation, @@ -54,6 +55,8 @@ pub fn default() -> HashMap { "m" => goto_last_modified_file, "n" => goto_next_buffer, "p" => goto_previous_buffer, + "k" => move_line_up, + "j" => move_line_down, "." => goto_last_modification, }, ":" => command_mode, @@ -220,6 +223,7 @@ pub fn default() -> HashMap { "'" => last_picker, "g" => { "Debug (experimental)" sticky=true "l" => dap_launch, + "r" => dap_restart, "b" => dap_toggle_breakpoint, "c" => dap_continue, "h" => dap_pause, @@ -320,8 +324,8 @@ pub fn default() -> HashMap { let mut select = normal.clone(); select.merge_nodes(keymap!({ "Select mode" "h" | "left" => extend_char_left, - "j" | "down" => extend_line_down, - "k" | "up" => extend_line_up, + "j" | "down" => extend_visual_line_down, + "k" | "up" => extend_visual_line_up, "l" | "right" => extend_char_right, "w" => extend_next_word_start, @@ -344,6 +348,10 @@ pub fn default() -> HashMap { "esc" => exit_select_mode, "v" => normal_mode, + "g" => { "Goto" + "k" => extend_line_up, + "j" => extend_line_down, + }, })); let insert = keymap!({ "Insert mode" "esc" => normal_mode, @@ -356,13 +364,13 @@ pub fn default() -> HashMap { "A-d" | "A-del" => delete_word_forward, "C-u" => kill_to_line_start, "C-k" => kill_to_line_end, - "C-h" | "backspace" => delete_char_backward, + "C-h" | "backspace" | "S-backspace" => delete_char_backward, "C-d" | "del" => delete_char_forward, "C-j" | "ret" => insert_newline, "tab" => insert_tab, - "up" => move_line_up, - "down" => move_line_down, + "up" => move_visual_line_up, + "down" => move_visual_line_down, "left" => move_char_left, "right" => move_char_right, "pageup" => page_up, diff --git a/helix-term/src/lib.rs b/helix-term/src/lib.rs index a945b20d..2f6ec12b 100644 --- a/helix-term/src/lib.rs +++ b/helix-term/src/lib.rs @@ -10,6 +10,9 @@ pub mod health; pub mod job; pub mod keymap; pub mod ui; +use std::path::Path; + +use ignore::DirEntry; pub use keymap::macros::*; #[cfg(not(windows))] @@ -22,3 +25,25 @@ fn true_color() -> bool { fn true_color() -> bool { true } + +/// Function used for filtering dir entries in the various file pickers. +fn filter_picker_entry(entry: &DirEntry, root: &Path, dedup_symlinks: bool) -> bool { + // We always want to ignore the .git directory, otherwise if + // `ignore` is turned off, we end up with a lot of noise + // in our picker. + if entry.file_name() == ".git" { + return false; + } + + // We also ignore symlinks that point inside the current directory + // if `dedup_links` is enabled. + if dedup_symlinks && entry.path_is_symlink() { + return entry + .path() + .canonicalize() + .ok() + .map_or(false, |path| !path.starts_with(root)); + } + + true +} diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 11d7886a..da6b5ddc 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -1,16 +1,16 @@ use crate::compositor::{Component, Context, Event, EventResult}; -use helix_view::{apply_transaction, editor::CompleteAction, ViewId}; -use tui::buffer::Buffer as Surface; -use tui::text::Spans; +use helix_view::{ + document::SavePoint, + editor::CompleteAction, + theme::{Modifier, Style}, + ViewId, +}; +use tui::{buffer::Buffer as Surface, text::Span}; -use std::borrow::Cow; +use std::{borrow::Cow, sync::Arc}; use helix_core::{Change, Transaction}; -use helix_view::{ - graphics::Rect, - input::{KeyCode, KeyEvent}, - Document, Editor, -}; +use helix_view::{graphics::Rect, Document, Editor}; use crate::commands; use crate::ui::{menu, Markdown, Menu, Popup, PromptEvent}; @@ -33,13 +33,20 @@ impl menu::Item for CompletionItem { .into() } - fn label(&self, _data: &Self::Data) -> Spans { - self.label.as_str().into() - } - - fn row(&self, _data: &Self::Data) -> menu::Row { + fn format(&self, _data: &Self::Data) -> menu::Row { + let deprecated = self.deprecated.unwrap_or_default() + || self.tags.as_ref().map_or(false, |tags| { + tags.contains(&lsp::CompletionItemTag::DEPRECATED) + }); menu::Row::new(vec![ - menu::Cell::from(self.label.as_str()), + menu::Cell::from(Span::styled( + self.label.as_str(), + if deprecated { + Style::default().add_modifier(Modifier::CROSSED_OUT) + } else { + Style::default() + }, + )), menu::Cell::from(match self.kind { Some(lsp::CompletionItemKind::TEXT) => "text", Some(lsp::CompletionItemKind::METHOD) => "method", @@ -95,11 +102,13 @@ impl Completion { pub fn new( editor: &Editor, + savepoint: Arc, mut items: Vec, offset_encoding: helix_lsp::OffsetEncoding, start_offset: usize, trigger_offset: usize, ) -> Self { + let replace_mode = editor.config().completion_replace; // Sort completion items according to their preselect status (given by the LSP server) items.sort_by_key(|item| !item.preselect.unwrap_or(false)); @@ -110,50 +119,89 @@ impl Completion { view_id: ViewId, item: &CompletionItem, offset_encoding: helix_lsp::OffsetEncoding, - start_offset: usize, trigger_offset: usize, + include_placeholder: bool, + replace_mode: bool, ) -> Transaction { - let transaction = if let Some(edit) = &item.text_edit { + use helix_lsp::snippet; + let selection = doc.selection(view_id); + let text = doc.text().slice(..); + let primary_cursor = selection.primary().cursor(text); + + let (edit_offset, new_text) = if let Some(edit) = &item.text_edit { let edit = match edit { lsp::CompletionTextEdit::Edit(edit) => edit.clone(), lsp::CompletionTextEdit::InsertAndReplace(item) => { - // TODO: support using "insert" instead of "replace" via user config - lsp::TextEdit::new(item.replace, item.new_text.clone()) + let range = if replace_mode { + item.replace + } else { + item.insert + }; + lsp::TextEdit::new(range, item.new_text.clone()) } }; - util::generate_transaction_from_completion_edit( - doc.text(), - doc.selection(view_id), - edit, - offset_encoding, // TODO: should probably transcode in Client - ) + let start_offset = + match util::lsp_pos_to_pos(doc.text(), edit.range.start, offset_encoding) { + Some(start) => start as i128 - primary_cursor as i128, + None => return Transaction::new(doc.text()), + }; + let end_offset = + match util::lsp_pos_to_pos(doc.text(), edit.range.end, offset_encoding) { + Some(end) => end as i128 - primary_cursor as i128, + None => return Transaction::new(doc.text()), + }; + + (Some((start_offset, end_offset)), edit.new_text) } else { - let text = item.insert_text.as_ref().unwrap_or(&item.label); - // Some LSPs just give you an insertText with no offset ¯\_(ツ)_/¯ - // in these cases we need to check for a common prefix and remove it - let prefix = Cow::from(doc.text().slice(start_offset..trigger_offset)); - let text = text.trim_start_matches::<&str>(&prefix); - - // TODO: this needs to be true for the numbers to work out correctly - // in the closure below. It's passed in to a callback as this same - // formula, but can the value change between the LSP request and - // response? If it does, can we recover? - debug_assert!( - doc.selection(view_id) - .primary() - .cursor(doc.text().slice(..)) - == trigger_offset - ); - - Transaction::change_by_selection(doc.text(), doc.selection(view_id), |range| { - let cursor = range.cursor(doc.text().slice(..)); - - (cursor, cursor, Some(text.into())) - }) + let new_text = item + .insert_text + .clone() + .unwrap_or_else(|| item.label.clone()); + // check that we are still at the correct savepoint + // we can still generate a transaction regardless but if the + // document changed (and not just the selection) then we will + // likely delete the wrong text (same if we applied an edit sent by the LS) + debug_assert!(primary_cursor == trigger_offset); + (None, new_text) }; - transaction + if matches!(item.kind, Some(lsp::CompletionItemKind::SNIPPET)) + || matches!( + item.insert_text_format, + Some(lsp::InsertTextFormat::SNIPPET) + ) + { + match snippet::parse(&new_text) { + Ok(snippet) => util::generate_transaction_from_snippet( + doc.text(), + selection, + edit_offset, + replace_mode, + snippet, + doc.line_ending.as_str(), + include_placeholder, + doc.tab_width(), + doc.indent_width(), + ), + Err(err) => { + log::error!( + "Failed to parse snippet: {:?}, remaining output: {}", + &new_text, + err + ); + Transaction::new(doc.text()) + } + } + } else { + util::generate_transaction_from_completion_edit( + doc.text(), + selection, + edit_offset, + replace_mode, + new_text, + ) + } } fn completion_changes(transaction: &Transaction, trigger_offset: usize) -> Vec { @@ -166,11 +214,10 @@ impl Completion { let (view, doc) = current!(editor); // if more text was entered, remove it - doc.restore(view); + doc.restore(view, &savepoint); match event { PromptEvent::Abort => { - doc.restore(view); editor.last_completion = None; } PromptEvent::Update => { @@ -182,13 +229,13 @@ impl Completion { view.id, item, offset_encoding, - start_offset, trigger_offset, + true, + replace_mode, ); // initialize a savepoint - doc.savepoint(); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); editor.last_completion = Some(CompleteAction { trigger_offset, @@ -204,11 +251,12 @@ impl Completion { view.id, item, offset_encoding, - start_offset, trigger_offset, + false, + replace_mode, ); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); editor.last_completion = Some(CompleteAction { trigger_offset, @@ -238,13 +286,15 @@ impl Completion { additional_edits.clone(), offset_encoding, // TODO: should probably transcode in Client ); - apply_transaction(&transaction, doc, view); + doc.apply(&transaction, view.id); } } } }; }); - let popup = Popup::new(Self::ID, menu).with_scrollbar(false); + let popup = Popup::new(Self::ID, menu) + .with_scrollbar(false) + .ignore_escape_key(true); let mut completion = Self { popup, start_offset, @@ -368,13 +418,6 @@ impl Completion { impl Component for Completion { fn handle_event(&mut self, event: &Event, cx: &mut Context) -> EventResult { - // let the Editor handle Esc instead - if let Event::Key(KeyEvent { - code: KeyCode::Esc, .. - }) = event - { - return EventResult::Ignored(None); - } self.popup.handle_event(event, cx) } @@ -386,102 +429,102 @@ impl Component for Completion { self.popup.render(area, surface, cx); // if we have a selection, render a markdown popup on top/below with info - if let Some(option) = self.popup.contents().selection() { - // need to render: - // option.detail - // --- - // option.documentation - - let (view, doc) = current!(cx.editor); - let language = doc.language_name().unwrap_or(""); - let text = doc.text().slice(..); - let cursor_pos = doc.selection(view.id).primary().cursor(text); - let coords = helix_core::visual_coords_at_pos(text, cursor_pos, doc.tab_width()); - let cursor_pos = (coords.row - view.offset.row) as u16; - - let mut markdown_doc = match &option.documentation { - Some(lsp::Documentation::String(contents)) - | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { - kind: lsp::MarkupKind::PlainText, - value: contents, - })) => { - // TODO: convert to wrapped text - Markdown::new( - format!( - "```{}\n{}\n```\n{}", - language, - option.detail.as_deref().unwrap_or_default(), - contents - ), - cx.editor.syn_loader.clone(), - ) - } - Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { - kind: lsp::MarkupKind::Markdown, - value: contents, - })) => { - // TODO: set language based on doc scope - if let Some(detail) = &option.detail.as_deref() { - Markdown::new( - format!("```{}\n{}\n```\n{}", language, detail, contents), - cx.editor.syn_loader.clone(), - ) - } else { - Markdown::new(contents.to_string(), cx.editor.syn_loader.clone()) - } - } - None if option.detail.is_some() => { - // TODO: copied from above - - // TODO: set language based on doc scope - Markdown::new( - format!( - "```{}\n{}\n```", - language, - option.detail.as_deref().unwrap_or_default(), - ), - cx.editor.syn_loader.clone(), - ) - } - None => return, + let option = match self.popup.contents().selection() { + Some(option) => option, + None => return, + }; + // need to render: + // option.detail + // --- + // option.documentation + + let (view, doc) = current!(cx.editor); + let language = doc.language_name().unwrap_or(""); + let text = doc.text().slice(..); + let cursor_pos = doc.selection(view.id).primary().cursor(text); + let coords = view + .screen_coords_at_pos(doc, text, cursor_pos) + .expect("cursor must be in view"); + let cursor_pos = coords.row as u16; + + let markdowned = |lang: &str, detail: Option<&str>, doc: Option<&str>| { + let md = match (detail, doc) { + (Some(detail), Some(doc)) => format!("```{lang}\n{detail}\n```\n{doc}"), + (Some(detail), None) => format!("```{lang}\n{detail}\n```"), + (None, Some(doc)) => doc.to_string(), + (None, None) => String::new(), }; + Markdown::new(md, cx.editor.syn_loader.clone()) + }; + + let mut markdown_doc = match &option.documentation { + Some(lsp::Documentation::String(contents)) + | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { + kind: lsp::MarkupKind::PlainText, + value: contents, + })) => { + // TODO: convert to wrapped text + markdowned(language, option.detail.as_deref(), Some(contents)) + } + Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: contents, + })) => { + // TODO: set language based on doc scope + markdowned(language, option.detail.as_deref(), Some(contents)) + } + None if option.detail.is_some() => { + // TODO: set language based on doc scope + markdowned(language, option.detail.as_deref(), None) + } + None => return, + }; + let popup_area = { let (popup_x, popup_y) = self.popup.get_rel_position(area, cx); - let (popup_width, _popup_height) = self.popup.get_size(); - let mut width = area - .width - .saturating_sub(popup_x) - .saturating_sub(popup_width); - let area = if width > 30 { - let mut height = area.height.saturating_sub(popup_y); - let x = popup_x + popup_width; - let y = popup_y; - - if let Some((rel_width, rel_height)) = markdown_doc.required_size((width, height)) { - width = rel_width.min(width); - height = rel_height.min(height); - } - Rect::new(x, y, width, height) - } else { - let half = area.height / 2; - let height = 15.min(half); - // we want to make sure the cursor is visible (not hidden behind the documentation) - let y = if cursor_pos + area.y - >= (cx.editor.tree.area().height - height - 2/* statusline + commandline */) - { - 0 - } else { - // -2 to subtract command line + statusline. a bit of a hack, because of splits. - area.height.saturating_sub(height).saturating_sub(2) - }; + let (popup_width, popup_height) = self.popup.get_size(); + Rect::new(popup_x, popup_y, popup_width, popup_height) + }; - Rect::new(0, y, area.width, height) + let doc_width_available = area.width.saturating_sub(popup_area.right()); + let doc_area = if doc_width_available > 30 { + let mut doc_width = doc_width_available; + let mut doc_height = area.height.saturating_sub(popup_area.top()); + let x = popup_area.right(); + let y = popup_area.top(); + + if let Some((rel_width, rel_height)) = + markdown_doc.required_size((doc_width, doc_height)) + { + doc_width = rel_width.min(doc_width); + doc_height = rel_height.min(doc_height); + } + Rect::new(x, y, doc_width, doc_height) + } else { + // Documentation should not cover the cursor or the completion popup + // Completion popup could be above or below the current line + let avail_height_above = cursor_pos.min(popup_area.top()).saturating_sub(1); + let avail_height_below = area + .height + .saturating_sub(cursor_pos.max(popup_area.bottom()) + 1 /* padding */); + let (y, avail_height) = if avail_height_below >= avail_height_above { + ( + area.height.saturating_sub(avail_height_below), + avail_height_below, + ) + } else { + (0, avail_height_above) }; + if avail_height <= 1 { + return; + } - // clear area - let background = cx.editor.theme.get("ui.popup"); - surface.clear_with(area, background); - markdown_doc.render(area, surface, cx); - } + Rect::new(0, y, area.width, avail_height.min(15)) + }; + + // clear area + let background = cx.editor.theme.get("ui.popup"); + surface.clear_with(doc_area, background); + markdown_doc.render(doc_area, surface, cx); } } diff --git a/helix-term/src/ui/document.rs b/helix-term/src/ui/document.rs new file mode 100644 index 00000000..d4176264 --- /dev/null +++ b/helix-term/src/ui/document.rs @@ -0,0 +1,485 @@ +use std::cmp::min; + +use helix_core::doc_formatter::{DocumentFormatter, GraphemeSource, TextFormat}; +use helix_core::graphemes::Grapheme; +use helix_core::str_utils::char_to_byte_idx; +use helix_core::syntax::Highlight; +use helix_core::syntax::HighlightEvent; +use helix_core::text_annotations::TextAnnotations; +use helix_core::{visual_offset_from_block, Position, RopeSlice}; +use helix_view::editor::{WhitespaceConfig, WhitespaceRenderValue}; +use helix_view::graphics::Rect; +use helix_view::theme::Style; +use helix_view::view::ViewPosition; +use helix_view::Document; +use helix_view::Theme; +use tui::buffer::Buffer as Surface; + +pub trait LineDecoration { + fn render_background(&mut self, _renderer: &mut TextRenderer, _pos: LinePos) {} + fn render_foreground( + &mut self, + _renderer: &mut TextRenderer, + _pos: LinePos, + _end_char_idx: usize, + ) { + } +} + +impl LineDecoration for F { + fn render_background(&mut self, renderer: &mut TextRenderer, pos: LinePos) { + self(renderer, pos) + } +} + +/// A wrapper around a HighlightIterator +/// that merges the layered highlights to create the final text style +/// and yields the active text style and the char_idx where the active +/// style will have to be recomputed. +struct StyleIter<'a, H: Iterator> { + text_style: Style, + active_highlights: Vec, + highlight_iter: H, + theme: &'a Theme, +} + +impl> Iterator for StyleIter<'_, H> { + type Item = (Style, usize); + fn next(&mut self) -> Option<(Style, usize)> { + while let Some(event) = self.highlight_iter.next() { + match event { + HighlightEvent::HighlightStart(highlights) => { + self.active_highlights.push(highlights) + } + HighlightEvent::HighlightEnd => { + self.active_highlights.pop(); + } + HighlightEvent::Source { start, end } => { + if start == end { + continue; + } + let style = self + .active_highlights + .iter() + .fold(self.text_style, |acc, span| { + acc.patch(self.theme.highlight(span.0)) + }); + return Some((style, end)); + } + } + } + None + } +} + +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub struct LinePos { + /// Indicates whether the given visual line + /// is the first visual line of the given document line + pub first_visual_line: bool, + /// The line index of the document line that contains the given visual line + pub doc_line: usize, + /// Vertical offset from the top of the inner view area + pub visual_line: u16, + /// The first char index of this visual line. + /// Note that if the visual line is entirely filled by + /// a very long inline virtual text then this index will point + /// at the next (non-virtual) char after this visual line + pub start_char_idx: usize, +} + +pub type TranslatedPosition<'a> = (usize, Box); + +#[allow(clippy::too_many_arguments)] +pub fn render_document( + surface: &mut Surface, + viewport: Rect, + doc: &Document, + offset: ViewPosition, + doc_annotations: &TextAnnotations, + highlight_iter: impl Iterator, + theme: &Theme, + line_decoration: &mut [Box], + translated_positions: &mut [TranslatedPosition], +) { + let mut renderer = TextRenderer::new(surface, doc, theme, offset.horizontal_offset, viewport); + render_text( + &mut renderer, + doc.text().slice(..), + offset, + &doc.text_format(viewport.width, Some(theme)), + doc_annotations, + highlight_iter, + theme, + line_decoration, + translated_positions, + ) +} + +fn translate_positions( + char_pos: usize, + first_visisble_char_idx: usize, + translated_positions: &mut [TranslatedPosition], + text_fmt: &TextFormat, + renderer: &mut TextRenderer, + pos: Position, +) { + // check if any positions translated on the fly (like cursor) has been reached + for (char_idx, callback) in &mut *translated_positions { + if *char_idx < char_pos && *char_idx >= first_visisble_char_idx { + // by replacing the char_index with usize::MAX large number we ensure + // that the same position is only translated once + // text will never reach usize::MAX as rust memory allocations are limited + // to isize::MAX + *char_idx = usize::MAX; + + if text_fmt.soft_wrap { + callback(renderer, pos) + } else if pos.col >= renderer.col_offset + && pos.col - renderer.col_offset < renderer.viewport.width as usize + { + callback( + renderer, + Position { + row: pos.row, + col: pos.col - renderer.col_offset, + }, + ) + } + } + } +} + +#[allow(clippy::too_many_arguments)] +pub fn render_text<'t>( + renderer: &mut TextRenderer, + text: RopeSlice<'t>, + offset: ViewPosition, + text_fmt: &TextFormat, + text_annotations: &TextAnnotations, + highlight_iter: impl Iterator, + theme: &Theme, + line_decorations: &mut [Box], + translated_positions: &mut [TranslatedPosition], +) { + let ( + Position { + row: mut row_off, .. + }, + mut char_pos, + ) = visual_offset_from_block( + text, + offset.anchor, + offset.anchor, + text_fmt, + text_annotations, + ); + row_off += offset.vertical_offset; + assert_eq!(0, offset.vertical_offset); + + let (mut formatter, mut first_visible_char_idx) = + DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, text_annotations, offset.anchor); + let mut styles = StyleIter { + text_style: renderer.text_style, + active_highlights: Vec::with_capacity(64), + highlight_iter, + theme, + }; + + let mut last_line_pos = LinePos { + first_visual_line: false, + doc_line: usize::MAX, + visual_line: u16::MAX, + start_char_idx: usize::MAX, + }; + let mut is_in_indent_area = true; + let mut last_line_indent_level = 0; + let mut style_span = styles + .next() + .unwrap_or_else(|| (Style::default(), usize::MAX)); + + loop { + // formattter.line_pos returns to line index of the next grapheme + // so it must be called before formatter.next + let doc_line = formatter.line_pos(); + let Some((grapheme, mut pos)) = formatter.next() else { + let mut last_pos = formatter.visual_pos(); + if last_pos.row >= row_off { + last_pos.col -= 1; + last_pos.row -= row_off; + // check if any positions translated on the fly (like cursor) are at the EOF + translate_positions( + char_pos + 1, + first_visible_char_idx, + translated_positions, + text_fmt, + renderer, + last_pos, + ); + } + break; + }; + + // skip any graphemes on visual lines before the block start + if pos.row < row_off { + if char_pos >= style_span.1 { + style_span = if let Some(style_span) = styles.next() { + style_span + } else { + break; + } + } + char_pos += grapheme.doc_chars(); + first_visible_char_idx = char_pos + 1; + continue; + } + pos.row -= row_off; + + // if the end of the viewport is reached stop rendering + if pos.row as u16 >= renderer.viewport.height { + break; + } + + // apply decorations before rendering a new line + if pos.row as u16 != last_line_pos.visual_line { + if pos.row > 0 { + renderer.draw_indent_guides(last_line_indent_level, last_line_pos.visual_line); + is_in_indent_area = true; + for line_decoration in &mut *line_decorations { + line_decoration.render_foreground(renderer, last_line_pos, char_pos); + } + } + last_line_pos = LinePos { + first_visual_line: doc_line != last_line_pos.doc_line, + doc_line, + visual_line: pos.row as u16, + start_char_idx: char_pos, + }; + for line_decoration in &mut *line_decorations { + line_decoration.render_background(renderer, last_line_pos); + } + } + + // aquire the correct grapheme style + if char_pos >= style_span.1 { + style_span = styles.next().unwrap_or((Style::default(), usize::MAX)); + } + char_pos += grapheme.doc_chars(); + + // check if any positions translated on the fly (like cursor) has been reached + translate_positions( + char_pos, + first_visible_char_idx, + translated_positions, + text_fmt, + renderer, + pos, + ); + + let grapheme_style = if let GraphemeSource::VirtualText { highlight } = grapheme.source { + let style = renderer.text_style; + if let Some(highlight) = highlight { + style.patch(theme.highlight(highlight.0)) + } else { + style + } + } else { + style_span.0 + }; + + let virt = grapheme.is_virtual(); + renderer.draw_grapheme( + grapheme.grapheme, + grapheme_style, + virt, + &mut last_line_indent_level, + &mut is_in_indent_area, + pos, + ); + } + + renderer.draw_indent_guides(last_line_indent_level, last_line_pos.visual_line); + for line_decoration in &mut *line_decorations { + line_decoration.render_foreground(renderer, last_line_pos, char_pos); + } +} + +#[derive(Debug)] +pub struct TextRenderer<'a> { + pub surface: &'a mut Surface, + pub text_style: Style, + pub whitespace_style: Style, + pub indent_guide_char: String, + pub indent_guide_style: Style, + pub newline: String, + pub nbsp: String, + pub space: String, + pub tab: String, + pub virtual_tab: String, + pub indent_width: u16, + pub starting_indent: usize, + pub draw_indent_guides: bool, + pub col_offset: usize, + pub viewport: Rect, +} + +impl<'a> TextRenderer<'a> { + pub fn new( + surface: &'a mut Surface, + doc: &Document, + theme: &Theme, + col_offset: usize, + viewport: Rect, + ) -> TextRenderer<'a> { + let editor_config = doc.config.load(); + let WhitespaceConfig { + render: ws_render, + characters: ws_chars, + } = &editor_config.whitespace; + + let tab_width = doc.tab_width(); + let tab = if ws_render.tab() == WhitespaceRenderValue::All { + std::iter::once(ws_chars.tab) + .chain(std::iter::repeat(ws_chars.tabpad).take(tab_width - 1)) + .collect() + } else { + " ".repeat(tab_width) + }; + let virtual_tab = " ".repeat(tab_width); + let newline = if ws_render.newline() == WhitespaceRenderValue::All { + ws_chars.newline.into() + } else { + " ".to_owned() + }; + + let space = if ws_render.space() == WhitespaceRenderValue::All { + ws_chars.space.into() + } else { + " ".to_owned() + }; + let nbsp = if ws_render.nbsp() == WhitespaceRenderValue::All { + ws_chars.nbsp.into() + } else { + " ".to_owned() + }; + + let text_style = theme.get("ui.text"); + + let indent_width = doc.indent_style.indent_width(tab_width) as u16; + + TextRenderer { + surface, + indent_guide_char: editor_config.indent_guides.character.into(), + newline, + nbsp, + space, + tab, + virtual_tab, + whitespace_style: theme.get("ui.virtual.whitespace"), + indent_width, + starting_indent: col_offset / indent_width as usize + + (col_offset % indent_width as usize != 0) as usize + + editor_config.indent_guides.skip_levels as usize, + indent_guide_style: text_style.patch( + theme + .try_get("ui.virtual.indent-guide") + .unwrap_or_else(|| theme.get("ui.virtual.whitespace")), + ), + text_style, + draw_indent_guides: editor_config.indent_guides.render, + viewport, + col_offset, + } + } + + /// Draws a single `grapheme` at the current render position with a specified `style`. + pub fn draw_grapheme( + &mut self, + grapheme: Grapheme, + mut style: Style, + is_virtual: bool, + last_indent_level: &mut usize, + is_in_indent_area: &mut bool, + position: Position, + ) { + let cut_off_start = self.col_offset.saturating_sub(position.col); + let is_whitespace = grapheme.is_whitespace(); + + // TODO is it correct to apply the whitspace style to all unicode white spaces? + if is_whitespace { + style = style.patch(self.whitespace_style); + } + + let width = grapheme.width(); + let space = if is_virtual { " " } else { &self.space }; + let nbsp = if is_virtual { " " } else { &self.nbsp }; + let tab = if is_virtual { + &self.virtual_tab + } else { + &self.tab + }; + let grapheme = match grapheme { + Grapheme::Tab { width } => { + let grapheme_tab_width = char_to_byte_idx(tab, width); + &tab[..grapheme_tab_width] + } + // TODO special rendering for other whitespaces? + Grapheme::Other { ref g } if g == " " => space, + Grapheme::Other { ref g } if g == "\u{00A0}" => nbsp, + Grapheme::Other { ref g } => g, + Grapheme::Newline => &self.newline, + }; + + let in_bounds = self.col_offset <= position.col + && position.col < self.viewport.width as usize + self.col_offset; + + if in_bounds { + self.surface.set_string( + self.viewport.x + (position.col - self.col_offset) as u16, + self.viewport.y + position.row as u16, + grapheme, + style, + ); + } else if cut_off_start != 0 && cut_off_start < width { + // partially on screen + let rect = Rect::new( + self.viewport.x, + self.viewport.y + position.row as u16, + (width - cut_off_start) as u16, + 1, + ); + self.surface.set_style(rect, style); + } + + if *is_in_indent_area && !is_whitespace { + *last_indent_level = position.col; + *is_in_indent_area = false; + } + } + + /// Overlay indentation guides ontop of a rendered line + /// The indentation level is computed in `draw_lines`. + /// Therefore this function must always be called afterwards. + pub fn draw_indent_guides(&mut self, indent_level: usize, row: u16) { + if !self.draw_indent_guides { + return; + } + + // Don't draw indent guides outside of view + let end_indent = min( + indent_level, + // Add indent_width - 1 to round up, since the first visible + // indent might be a bit after offset.col + self.col_offset + self.viewport.width as usize + (self.indent_width as usize - 1), + ) / self.indent_width as usize; + + for i in self.starting_indent..end_indent { + let x = (self.viewport.x as usize + (i * self.indent_width as usize) - self.col_offset) + as u16; + let y = self.viewport.y + row; + debug_assert!(self.surface.in_bounds(x, y)); + self.surface + .set_string(x, y, &self.indent_guide_char, self.indent_guide_style); + } + } +} diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index 35cf77ab..0b6ab046 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -1,42 +1,46 @@ use crate::{ - commands, + commands::{self, OnKeyCallback}, compositor::{Component, Context, Event, EventResult}, job::{self, Callback}, key, keymap::{KeymapResult, Keymaps}, - ui::{Completion, ProgressSpinners}, + ui::{ + document::{render_document, LinePos, TextRenderer, TranslatedPosition}, + Completion, ProgressSpinners, + }, }; use helix_core::{ + diagnostic::NumberOrString, graphemes::{ ensure_grapheme_boundary_next_byte, next_grapheme_boundary, prev_grapheme_boundary, }, movement::Direction, syntax::{self, HighlightEvent}, + text_annotations::TextAnnotations, unicode::width::UnicodeWidthStr, - visual_coords_at_pos, LineEnding, Position, Range, Selection, Transaction, + visual_offset_from_block, Position, Range, Selection, Transaction, }; use helix_view::{ - apply_transaction, - document::{Mode, SCRATCH_BUFFER_NAME}, + document::{Mode, SavePoint, SCRATCH_BUFFER_NAME}, editor::{CompleteAction, CursorShapeConfig}, graphics::{Color, CursorKind, Modifier, Rect, Style}, input::{KeyEvent, MouseButton, MouseEvent, MouseEventKind}, keyboard::{KeyCode, KeyModifiers}, Document, Editor, Theme, View, }; -use std::{borrow::Cow, cmp::min, num::NonZeroUsize, path::PathBuf}; +use std::{mem::take, num::NonZeroUsize, path::PathBuf, rc::Rc, sync::Arc}; -use tui::buffer::Buffer as Surface; +use tui::{buffer::Buffer as Surface, text::Span}; -use super::lsp::SignatureHelp; use super::statusline; +use super::{document::LineDecoration, lsp::SignatureHelp}; pub struct EditorView { pub keymaps: Keymaps, - on_next_key: Option>, + on_next_key: Option, pseudo_pending: Vec, - last_insert: (commands::MappableCommand, Vec), + pub(crate) last_insert: (commands::MappableCommand, Vec), pub(crate) completion: Option, spinners: ProgressSpinners, } @@ -46,6 +50,7 @@ pub enum InsertEvent { Key(KeyEvent), CompletionApply(CompleteAction), TriggerCompletion, + RequestCompletion, } impl Default for EditorView { @@ -84,6 +89,10 @@ impl EditorView { let theme = &editor.theme; let config = editor.config(); + let text_annotations = view.text_annotations(doc, Some(theme)); + let mut line_decorations: Vec> = Vec::new(); + let mut translated_positions: Vec = Vec::new(); + // DAP: Highlight current stack frame position let stack_frame = editor.debugger.as_ref().and_then(|debugger| { if let (Some(frame), Some(thread_id)) = (debugger.active_frame, debugger.thread_id) { @@ -104,28 +113,40 @@ impl EditorView { == doc.path() { let line = frame.line - 1; // convert to 0-indexing - if line >= view.offset.row && line < view.offset.row + area.height as usize { - surface.set_style( - Rect::new( - area.x, - area.y + (line - view.offset.row) as u16, - area.width, - 1, - ), - theme.get("ui.highlight"), - ); - } + let style = theme.get("ui.highlight"); + let line_decoration = move |renderer: &mut TextRenderer, pos: LinePos| { + if pos.doc_line != line { + return; + } + renderer + .surface + .set_style(Rect::new(area.x, pos.visual_line, area.width, 1), style); + }; + + line_decorations.push(Box::new(line_decoration)); } } if is_focused && config.cursorline { - Self::highlight_cursorline(doc, view, surface, theme); + line_decorations.push(Self::cursorline_decorator(doc, view, theme)) } + if is_focused && config.cursorcolumn { - Self::highlight_cursorcolumn(doc, view, surface, theme); + Self::highlight_cursorcolumn(doc, view, surface, theme, inner, &text_annotations); + } + + let mut highlights = + Self::doc_syntax_highlights(doc, view.offset.anchor, inner.height, theme); + let overlay_highlights = Self::overlay_syntax_highlights( + doc, + view.offset.anchor, + inner.height, + &text_annotations, + ); + if !overlay_highlights.is_empty() { + highlights = Box::new(syntax::merge(highlights, overlay_highlights)); } - let mut highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme); for diagnostic in Self::doc_diagnostics_highlights(doc, theme) { // Most of the `diagnostic` Vecs are empty most of the time. Skipping // a merge for any empty Vec saves a significant amount of work. @@ -134,8 +155,9 @@ impl EditorView { } highlights = Box::new(syntax::merge(highlights, diagnostic)); } + let highlights: Box> = if is_focused { - Box::new(syntax::merge( + let highlights = syntax::merge( highlights, Self::doc_selection_highlights( editor.mode(), @@ -144,19 +166,52 @@ impl EditorView { theme, &config.cursor_shape, ), - )) + ); + let focused_view_elements = Self::highlight_focused_view_elements(view, doc, theme); + if focused_view_elements.is_empty() { + Box::new(highlights) + } else { + Box::new(syntax::merge(highlights, focused_view_elements)) + } } else { Box::new(highlights) }; - Self::render_text_highlights(doc, view.offset, inner, surface, theme, highlights, &config); - Self::render_gutter(editor, doc, view, view.area, surface, theme, is_focused); - Self::render_rulers(editor, doc, view, inner, surface, theme); + Self::render_gutter( + editor, + doc, + view, + view.area, + theme, + is_focused, + &mut line_decorations, + ); if is_focused { - Self::render_focused_view_elements(view, doc, inner, theme, surface); + let cursor = doc + .selection(view.id) + .primary() + .cursor(doc.text().slice(..)); + // set the cursor_cache to out of view in case the position is not found + editor.cursor_cache.set(Some(None)); + let update_cursor_cache = + |_: &mut TextRenderer, pos| editor.cursor_cache.set(Some(Some(pos))); + translated_positions.push((cursor, Box::new(update_cursor_cache))); } + render_document( + surface, + inner, + doc, + view.offset, + &text_annotations, + highlights, + theme, + &mut line_decorations, + &mut translated_positions, + ); + Self::render_rulers(editor, doc, view, inner, surface, theme); + // if we're not at the edge of the screen, draw a right border if viewport.right() != view.area.right() { let x = area.right(); @@ -204,31 +259,53 @@ impl EditorView { .iter() // View might be horizontally scrolled, convert from absolute distance // from the 1st column to relative distance from left of viewport - .filter_map(|ruler| ruler.checked_sub(1 + view.offset.col as u16)) + .filter_map(|ruler| ruler.checked_sub(1 + view.offset.horizontal_offset as u16)) .filter(|ruler| ruler < &viewport.width) .map(|ruler| viewport.clip_left(ruler).with_width(1)) .for_each(|area| surface.set_style(area, ruler_theme)) } + pub fn overlay_syntax_highlights( + doc: &Document, + anchor: usize, + height: u16, + text_annotations: &TextAnnotations, + ) -> Vec<(usize, std::ops::Range)> { + let text = doc.text().slice(..); + let row = text.char_to_line(anchor.min(text.len_chars())); + + let range = { + // Calculate viewport byte ranges: + // Saturating subs to make it inclusive zero indexing. + let last_line = text.len_lines().saturating_sub(1); + let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line); + let start = text.line_to_byte(row.min(last_line)); + let end = text.line_to_byte(last_visible_line + 1); + + start..end + }; + + text_annotations.collect_overlay_highlights(range) + } + /// Get syntax highlights for a document in a view represented by the first line /// and column (`offset`) and the last line. This is done instead of using a view /// directly to enable rendering syntax highlighted docs anywhere (eg. picker preview) pub fn doc_syntax_highlights<'doc>( doc: &'doc Document, - offset: Position, + anchor: usize, height: u16, _theme: &Theme, ) -> Box + 'doc> { let text = doc.text().slice(..); + let row = text.char_to_line(anchor.min(text.len_chars())); let range = { // Calculate viewport byte ranges: // Saturating subs to make it inclusive zero indexing. - let last_line = doc.text().len_lines().saturating_sub(1); - let last_visible_line = (offset.row + height as usize) - .saturating_sub(1) - .min(last_line); - let start = text.line_to_byte(offset.row.min(last_line)); + let last_line = text.len_lines().saturating_sub(1); + let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line); + let start = text.line_to_byte(row.min(last_line)); let end = text.line_to_byte(last_visible_line + 1); start..end @@ -273,11 +350,11 @@ impl EditorView { use helix_core::diagnostic::Severity; let get_scope_of = |scope| { theme - .find_scope_index(scope) + .find_scope_index_exact(scope) // get one of the themes below as fallback values - .or_else(|| theme.find_scope_index("diagnostic")) - .or_else(|| theme.find_scope_index("ui.cursor")) - .or_else(|| theme.find_scope_index("ui.selection")) + .or_else(|| theme.find_scope_index_exact("diagnostic")) + .or_else(|| theme.find_scope_index_exact("ui.cursor")) + .or_else(|| theme.find_scope_index_exact("ui.selection")) .expect( "at least one of the following scopes must be defined in the theme: `diagnostic`, `ui.cursor`, or `ui.selection`", ) @@ -340,25 +417,31 @@ impl EditorView { let cursor_is_block = cursorkind == CursorKind::Block; let selection_scope = theme - .find_scope_index("ui.selection") + .find_scope_index_exact("ui.selection") .expect("could not find `ui.selection` scope in the theme!"); + let primary_selection_scope = theme + .find_scope_index_exact("ui.selection.primary") + .unwrap_or(selection_scope); let base_cursor_scope = theme - .find_scope_index("ui.cursor") + .find_scope_index_exact("ui.cursor") .unwrap_or(selection_scope); + let base_primary_cursor_scope = theme + .find_scope_index("ui.cursor.primary") + .unwrap_or(base_cursor_scope); let cursor_scope = match mode { - Mode::Insert => theme.find_scope_index("ui.cursor.insert"), - Mode::Select => theme.find_scope_index("ui.cursor.select"), - Mode::Normal => Some(base_cursor_scope), + Mode::Insert => theme.find_scope_index_exact("ui.cursor.insert"), + Mode::Select => theme.find_scope_index_exact("ui.cursor.select"), + Mode::Normal => theme.find_scope_index_exact("ui.cursor.normal"), } .unwrap_or(base_cursor_scope); - let primary_cursor_scope = theme - .find_scope_index("ui.cursor.primary") - .unwrap_or(cursor_scope); - let primary_selection_scope = theme - .find_scope_index("ui.selection.primary") - .unwrap_or(selection_scope); + let primary_cursor_scope = match mode { + Mode::Insert => theme.find_scope_index_exact("ui.cursor.primary.insert"), + Mode::Select => theme.find_scope_index_exact("ui.cursor.primary.select"), + Mode::Normal => theme.find_scope_index_exact("ui.cursor.primary.normal"), + } + .unwrap_or(base_primary_cursor_scope); let mut spans: Vec<(usize, std::ops::Range)> = Vec::new(); for (i, range) in selection.iter().enumerate() { @@ -386,7 +469,14 @@ impl EditorView { if range.head > range.anchor { // Standard case. let cursor_start = prev_grapheme_boundary(text, range.head); - spans.push((selection_scope, range.anchor..cursor_start)); + // non block cursors look like they exclude the cursor + let selection_end = + if selection_is_primary && !cursor_is_block && mode != Mode::Insert { + range.head + } else { + cursor_start + }; + spans.push((selection_scope, range.anchor..selection_end)); if !selection_is_primary || cursor_is_block { spans.push((cursor_scope, cursor_start..range.head)); } @@ -396,255 +486,42 @@ impl EditorView { if !selection_is_primary || cursor_is_block { spans.push((cursor_scope, range.head..cursor_end)); } - spans.push((selection_scope, cursor_end..range.anchor)); + // non block cursors look like they exclude the cursor + let selection_start = if selection_is_primary + && !cursor_is_block + && !(mode == Mode::Insert && cursor_end == range.anchor) + { + range.head + } else { + cursor_end + }; + spans.push((selection_scope, selection_start..range.anchor)); } } spans } - pub fn render_text_highlights>( - doc: &Document, - offset: Position, - viewport: Rect, - surface: &mut Surface, - theme: &Theme, - highlights: H, - config: &helix_view::editor::Config, - ) { - let whitespace = &config.whitespace; - use helix_view::editor::WhitespaceRenderValue; - - // It's slightly more efficient to produce a full RopeSlice from the Rope, then slice that a bunch - // of times than it is to always call Rope::slice/get_slice (it will internally always hit RSEnum::Light). - let text = doc.text().slice(..); - - let characters = &whitespace.characters; - - let mut spans = Vec::new(); - let mut visual_x = 0usize; - let mut line = 0u16; - let tab_width = doc.tab_width(); - let tab = if whitespace.render.tab() == WhitespaceRenderValue::All { - std::iter::once(characters.tab) - .chain(std::iter::repeat(characters.tabpad).take(tab_width - 1)) - .collect() - } else { - " ".repeat(tab_width) - }; - let space = characters.space.to_string(); - let nbsp = characters.nbsp.to_string(); - let newline = if whitespace.render.newline() == WhitespaceRenderValue::All { - characters.newline.to_string() - } else { - " ".to_string() - }; - let indent_guide_char = config.indent_guides.character.to_string(); - - let text_style = theme.get("ui.text"); - let whitespace_style = theme.get("ui.virtual.whitespace"); - - let mut is_in_indent_area = true; - let mut last_line_indent_level = 0; - - // use whitespace style as fallback for indent-guide - let indent_guide_style = text_style.patch( - theme - .try_get("ui.virtual.indent-guide") - .unwrap_or_else(|| theme.get("ui.virtual.whitespace")), - ); - - let draw_indent_guides = |indent_level, line, surface: &mut Surface| { - if !config.indent_guides.render { - return; - } - - let starting_indent = - (offset.col / tab_width) + config.indent_guides.skip_levels as usize; - - // Don't draw indent guides outside of view - let end_indent = min( - indent_level, - // Add tab_width - 1 to round up, since the first visible - // indent might be a bit after offset.col - offset.col + viewport.width as usize + (tab_width - 1), - ) / tab_width; - - for i in starting_indent..end_indent { - let x = (viewport.x as usize + (i * tab_width) - offset.col) as u16; - let y = viewport.y + line; - debug_assert!(surface.in_bounds(x, y)); - surface.set_string(x, y, &indent_guide_char, indent_guide_style); - } - }; - - 'outer: for event in highlights { - match event { - HighlightEvent::HighlightStart(span) => { - spans.push(span); - } - HighlightEvent::HighlightEnd => { - spans.pop(); - } - HighlightEvent::Source { start, end } => { - let is_trailing_cursor = text.len_chars() < end; - - // `unwrap_or_else` part is for off-the-end indices of - // the rope, to allow cursor highlighting at the end - // of the rope. - let text = text.get_slice(start..end).unwrap_or_else(|| " ".into()); - let style = spans - .iter() - .fold(text_style, |acc, span| acc.patch(theme.highlight(span.0))); - - let space = if whitespace.render.space() == WhitespaceRenderValue::All - && !is_trailing_cursor - { - &space - } else { - " " - }; - - let nbsp = if whitespace.render.nbsp() == WhitespaceRenderValue::All - && text.len_chars() < end - { -   - } else { - " " - }; - - use helix_core::graphemes::{grapheme_width, RopeGraphemes}; - - for grapheme in RopeGraphemes::new(text) { - let out_of_bounds = offset.col > visual_x - || visual_x >= viewport.width as usize + offset.col; - - if LineEnding::from_rope_slice(&grapheme).is_some() { - if !out_of_bounds { - // we still want to render an empty cell with the style - surface.set_string( - (viewport.x as usize + visual_x - offset.col) as u16, - viewport.y + line, - &newline, - style.patch(whitespace_style), - ); - } - - draw_indent_guides(last_line_indent_level, line, surface); - - visual_x = 0; - line += 1; - is_in_indent_area = true; - - // TODO: with proper iter this shouldn't be necessary - if line >= viewport.height { - break 'outer; - } - } else { - let grapheme = Cow::from(grapheme); - let is_whitespace; - - let (display_grapheme, width) = if grapheme == "\t" { - is_whitespace = true; - // make sure we display tab as appropriate amount of spaces - let visual_tab_width = tab_width - (visual_x % tab_width); - let grapheme_tab_width = - helix_core::str_utils::char_to_byte_idx(&tab, visual_tab_width); - - (&tab[..grapheme_tab_width], visual_tab_width) - } else if grapheme == " " { - is_whitespace = true; - (space, 1) - } else if grapheme == "\u{00A0}" { - is_whitespace = true; - (nbsp, 1) - } else { - is_whitespace = false; - // Cow will prevent allocations if span contained in a single slice - // which should really be the majority case - let width = grapheme_width(&grapheme); - (grapheme.as_ref(), width) - }; - - let cut_off_start = offset.col.saturating_sub(visual_x); - - if !out_of_bounds { - // if we're offscreen just keep going until we hit a new line - surface.set_string( - (viewport.x as usize + visual_x - offset.col) as u16, - viewport.y + line, - display_grapheme, - if is_whitespace { - style.patch(whitespace_style) - } else { - style - }, - ); - } else if cut_off_start != 0 && cut_off_start < width { - // partially on screen - let rect = Rect::new( - viewport.x, - viewport.y + line, - (width - cut_off_start) as u16, - 1, - ); - surface.set_style( - rect, - if is_whitespace { - style.patch(whitespace_style) - } else { - style - }, - ); - } - - if is_in_indent_area && !(grapheme == " " || grapheme == "\t") { - draw_indent_guides(visual_x, line, surface); - is_in_indent_area = false; - last_line_indent_level = visual_x; - } - - visual_x = visual_x.saturating_add(width); - } - } - } - } - } - } - /// Render brace match, etc (meant for the focused view only) - pub fn render_focused_view_elements( + pub fn highlight_focused_view_elements( view: &View, doc: &Document, - viewport: Rect, theme: &Theme, - surface: &mut Surface, - ) { + ) -> Vec<(usize, std::ops::Range)> { // Highlight matching braces if let Some(syntax) = doc.syntax() { let text = doc.text().slice(..); use helix_core::match_brackets; let pos = doc.selection(view.id).primary().cursor(text); - let pos = match_brackets::find_matching_bracket(syntax, doc.text(), pos) - .and_then(|pos| view.screen_coords_at_pos(doc, text, pos)); - - if let Some(pos) = pos { + if let Some(pos) = match_brackets::find_matching_bracket(syntax, doc.text(), pos) { // ensure col is on screen - if (pos.col as u16) < viewport.width + view.offset.col as u16 - && pos.col >= view.offset.col - { - let style = theme.try_get("ui.cursor.match").unwrap_or_else(|| { - Style::default() - .add_modifier(Modifier::REVERSED) - .add_modifier(Modifier::DIM) - }); - - surface[(viewport.x + pos.col as u16, viewport.y + pos.row as u16)] - .set_style(style); + if let Some(highlight) = theme.find_scope_index_exact("ui.cursor.match") { + return vec![(highlight, pos..pos + 1)]; } } } + Vec::new() } /// Render bufferline at the top @@ -700,22 +577,17 @@ impl EditorView { } } - pub fn render_gutter( - editor: &Editor, - doc: &Document, + pub fn render_gutter<'d>( + editor: &'d Editor, + doc: &'d Document, view: &View, viewport: Rect, - surface: &mut Surface, theme: &Theme, is_focused: bool, + line_decorations: &mut Vec>, ) { let text = doc.text().slice(..); - let last_line = view.last_line(doc); - - // it's used inside an iterator so the collect isn't needless: - // https://github.com/rust-lang/rust-clippy/issues/6164 - #[allow(clippy::needless_collect)] - let cursors: Vec<_> = doc + let cursors: Rc<[_]> = doc .selection(view.id) .iter() .map(|range| range.cursor_line(text)) @@ -725,29 +597,36 @@ impl EditorView { let gutter_style = theme.get("ui.gutter"); let gutter_selected_style = theme.get("ui.gutter.selected"); - - // avoid lots of small allocations by reusing a text buffer for each line - let mut text = String::with_capacity(8); + let gutter_style_virtual = theme.get("ui.gutter.virtual"); + let gutter_selected_style_virtual = theme.get("ui.gutter.selected.virtual"); for gutter_type in view.gutters() { let mut gutter = gutter_type.style(editor, doc, view, theme, is_focused); let width = gutter_type.width(view, doc); - text.reserve(width); // ensure there's enough space for the gutter - for (i, line) in (view.offset.row..(last_line + 1)).enumerate() { - let selected = cursors.contains(&line); + // avoid lots of small allocations by reusing a text buffer for each line + let mut text = String::with_capacity(width); + let cursors = cursors.clone(); + let gutter_decoration = move |renderer: &mut TextRenderer, pos: LinePos| { + // TODO handle softwrap in gutters + let selected = cursors.contains(&pos.doc_line); let x = viewport.x + offset; - let y = viewport.y + i as u16; + let y = viewport.y + pos.visual_line; - let gutter_style = if selected { - gutter_selected_style - } else { - gutter_style + let gutter_style = match (selected, pos.first_visual_line) { + (false, true) => gutter_style, + (true, true) => gutter_selected_style, + (false, false) => gutter_style_virtual, + (true, false) => gutter_selected_style_virtual, }; - if let Some(style) = gutter(line, selected, &mut text) { - surface.set_stringn(x, y, &text, width, gutter_style.patch(style)); + if let Some(style) = + gutter(pos.doc_line, selected, pos.first_visual_line, &mut text) + { + renderer + .surface + .set_stringn(x, y, &text, width, gutter_style.patch(style)); } else { - surface.set_style( + renderer.surface.set_style( Rect { x, y, @@ -758,7 +637,8 @@ impl EditorView { ); } text.clear(); - } + }; + line_decorations.push(Box::new(gutter_decoration)); offset += width as u16; } @@ -805,6 +685,14 @@ impl EditorView { }); let text = Text::styled(&diagnostic.message, style); lines.extend(text.lines); + let code = diagnostic.code.as_ref().map(|x| match x { + NumberOrString::Number(n) => format!("({n})"), + NumberOrString::String(s) => format!("({s})"), + }); + if let Some(code) = code { + let span = Span::styled(code, style); + lines.push(span.into()); + } } let paragraph = Paragraph::new(lines) @@ -819,10 +707,13 @@ impl EditorView { } /// Apply the highlighting on the lines where a cursor is active - pub fn highlight_cursorline(doc: &Document, view: &View, surface: &mut Surface, theme: &Theme) { + pub fn cursorline_decorator( + doc: &Document, + view: &View, + theme: &Theme, + ) -> Box { let text = doc.text().slice(..); - let last_line = view.last_line(doc); - + // TODO only highlight the visual line that contains the cursor instead of the full visual line let primary_line = doc.selection(view.id).primary().cursor_line(text); // The secondary_lines do contain the primary_line, it doesn't matter @@ -839,20 +730,18 @@ impl EditorView { let primary_style = theme.get("ui.cursorline.primary"); let secondary_style = theme.get("ui.cursorline.secondary"); - - for line in view.offset.row..(last_line + 1) { - let area = Rect::new( - view.area.x, - view.area.y + (line - view.offset.row) as u16, - view.area.width, - 1, - ); - if primary_line == line { - surface.set_style(area, primary_style); - } else if secondary_lines.binary_search(&line).is_ok() { - surface.set_style(area, secondary_style); + let viewport = view.area; + + let line_decoration = move |renderer: &mut TextRenderer, pos: LinePos| { + let area = Rect::new(viewport.x, viewport.y + pos.visual_line, viewport.width, 1); + if primary_line == pos.doc_line { + renderer.surface.set_style(area, primary_style); + } else if secondary_lines.binary_search(&pos.doc_line).is_ok() { + renderer.surface.set_style(area, secondary_style); } - } + }; + + Box::new(line_decoration) } /// Apply the highlighting on the columns where a cursor is active @@ -861,6 +750,8 @@ impl EditorView { view: &View, surface: &mut Surface, theme: &Theme, + viewport: Rect, + text_annotations: &TextAnnotations, ) { let text = doc.text().slice(..); @@ -876,19 +767,23 @@ impl EditorView { .unwrap_or_else(|| theme.get("ui.cursorline.secondary")); let inner_area = view.inner_area(doc); - let offset = view.offset.col; let selection = doc.selection(view.id); let primary = selection.primary(); + let text_format = doc.text_format(viewport.width, None); for range in selection.iter() { let is_primary = primary == *range; + let cursor = range.cursor(text); + + let Position { col, .. } = + visual_offset_from_block(text, cursor, cursor, &text_format, text_annotations).0; - let Position { row: _, col } = - visual_coords_at_pos(text, range.cursor(text), doc.tab_width()); // if the cursor is horizontally in the view - if col >= offset && inner_area.width > (col - offset) as u16 { + if col >= view.offset.horizontal_offset + && inner_area.width > (col - view.offset.horizontal_offset) as u16 + { let area = Rect::new( - inner_area.x + (col - offset) as u16, + inner_area.x + (col - view.offset.horizontal_offset) as u16, view.area.y, 1, view.area.height, @@ -935,6 +830,7 @@ impl EditorView { (Mode::Insert, Mode::Normal) => { // if exiting insert mode, remove completion self.completion = None; + cxt.editor.completion_request_handle = None; // TODO: Use an on_mode_change hook to remove signature help cxt.jobs.callback(async { @@ -1005,6 +901,8 @@ impl EditorView { for _ in 0..cxt.editor.count.map_or(1, NonZeroUsize::into) { // first execute whatever put us into insert mode self.last_insert.0.execute(cxt); + let mut last_savepoint = None; + let mut last_request_savepoint = None; // then replay the inputs for key in self.last_insert.1.clone() { match key { @@ -1012,7 +910,9 @@ impl EditorView { InsertEvent::CompletionApply(compl) => { let (view, doc) = current!(cxt.editor); - doc.restore(view); + if let Some(last_savepoint) = last_savepoint.as_deref() { + doc.restore(view, last_savepoint); + } let text = doc.text().slice(..); let cursor = doc.selection(view.id).primary().cursor(text); @@ -1026,11 +926,14 @@ impl EditorView { (shift_position(start), shift_position(end), t) }), ); - apply_transaction(&tx, doc, view); + doc.apply(&tx, view.id); } InsertEvent::TriggerCompletion => { - let (_, doc) = current!(cxt.editor); - doc.savepoint(); + last_savepoint = take(&mut last_request_savepoint); + } + InsertEvent::RequestCompletion => { + let (view, doc) = current!(cxt.editor); + last_request_savepoint = Some(doc.savepoint(view)); } } } @@ -1055,26 +958,31 @@ impl EditorView { } } + #[allow(clippy::too_many_arguments)] pub fn set_completion( &mut self, editor: &mut Editor, + savepoint: Arc, items: Vec, offset_encoding: helix_lsp::OffsetEncoding, start_offset: usize, trigger_offset: usize, size: Rect, ) { - let mut completion = - Completion::new(editor, items, offset_encoding, start_offset, trigger_offset); + let mut completion = Completion::new( + editor, + savepoint, + items, + offset_encoding, + start_offset, + trigger_offset, + ); if completion.is_empty() { // skip if we got no completion results return; } - // Immediately initialize a savepoint - doc_mut!(editor).savepoint(); - editor.last_completion = None; self.last_insert.1.push(InsertEvent::TriggerCompletion); @@ -1087,12 +995,12 @@ impl EditorView { self.completion = None; // Clear any savepoints - let doc = doc_mut!(editor); - doc.savepoint = None; editor.clear_idle_timer(); // don't retrigger } pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult { + commands::compute_inlay_hints_for_all_views(cx.editor, cx.jobs); + if let Some(completion) = &mut self.completion { return if completion.ensure_item_resolved(cx) { EventResult::Consumed(None) @@ -1117,6 +1025,10 @@ impl EditorView { event: &MouseEvent, cxt: &mut commands::Context, ) -> EventResult { + if event.kind != MouseEventKind::Moved { + cxt.editor.reset_idle_timer(); + } + let config = cxt.editor.config(); let MouseEvent { kind, @@ -1128,7 +1040,7 @@ impl EditorView { let pos_and_view = |editor: &Editor, row, column| { editor.tree.views().find_map(|(view, _focus)| { - view.pos_at_screen_coords(&editor.documents[&view.doc], row, column) + view.pos_at_screen_coords(&editor.documents[&view.doc], row, column, true) .map(|pos| (pos, view.id)) }) }; @@ -1170,8 +1082,10 @@ impl EditorView { None => return EventResult::Ignored(None), }; - let line = coords.row + view.offset.row; - if line < doc.text().len_lines() { + if let Some(char_idx) = + view.pos_at_visual_coords(doc, coords.row as u16, coords.col as u16, true) + { + let line = doc.text().char_to_line(char_idx); commands::dap_toggle_breakpoint_impl(cxt, path, line); return EventResult::Consumed(None); } @@ -1183,7 +1097,7 @@ impl EditorView { MouseEventKind::Drag(MouseButton::Left) => { let (view, doc) = current!(cxt.editor); - let pos = match view.pos_at_screen_coords(doc, row, column) { + let pos = match view.pos_at_screen_coords(doc, row, column, true) { Some(pos) => pos, None => return EventResult::Ignored(None), }; @@ -1247,8 +1161,9 @@ impl EditorView { cxt.editor.focus(view_id); let (view, doc) = current!(cxt.editor); - let line = coords.row + view.offset.row; - if let Ok(pos) = doc.text().try_line_to_char(line) { + if let Some(pos) = + view.pos_at_visual_coords(doc, coords.row as u16, coords.col as u16, true) + { doc.set_selection(view_id, Selection::point(pos)); if modifiers == KeyModifiers::ALT { commands::MappableCommand::dap_edit_log.execute(cxt); diff --git a/helix-term/src/ui/fuzzy_match.rs b/helix-term/src/ui/fuzzy_match.rs index e25d7328..b406702f 100644 --- a/helix-term/src/ui/fuzzy_match.rs +++ b/helix-term/src/ui/fuzzy_match.rs @@ -4,41 +4,209 @@ use fuzzy_matcher::FuzzyMatcher; #[cfg(test)] mod test; +struct QueryAtom { + kind: QueryAtomKind, + atom: String, + ignore_case: bool, + inverse: bool, +} +impl QueryAtom { + fn new(atom: &str) -> Option { + let mut atom = atom.to_string(); + let inverse = atom.starts_with('!'); + if inverse { + atom.remove(0); + } + + let mut kind = match atom.chars().next() { + Some('^') => QueryAtomKind::Prefix, + Some('\'') => QueryAtomKind::Substring, + _ if inverse => QueryAtomKind::Substring, + _ => QueryAtomKind::Fuzzy, + }; + + if atom.starts_with(['^', '\'']) { + atom.remove(0); + } + + if atom.is_empty() { + return None; + } + + if atom.ends_with('$') && !atom.ends_with("\\$") { + atom.pop(); + kind = if kind == QueryAtomKind::Prefix { + QueryAtomKind::Exact + } else { + QueryAtomKind::Postfix + } + } + + Some(QueryAtom { + kind, + atom: atom.replace('\\', ""), + // not ideal but fuzzy_matches only knows ascii uppercase so more consistent + // to behave the same + ignore_case: kind != QueryAtomKind::Fuzzy + && atom.chars().all(|c| c.is_ascii_lowercase()), + inverse, + }) + } + + fn indices(&self, matcher: &Matcher, item: &str, indices: &mut Vec) -> bool { + // for inverse there are no indicies to return + // just return whether we matched + if self.inverse { + return self.matches(matcher, item); + } + let buf; + let item = if self.ignore_case { + buf = item.to_ascii_lowercase(); + &buf + } else { + item + }; + let off = match self.kind { + QueryAtomKind::Fuzzy => { + if let Some((_, fuzzy_indices)) = matcher.fuzzy_indices(item, &self.atom) { + indices.extend_from_slice(&fuzzy_indices); + return true; + } else { + return false; + } + } + QueryAtomKind::Substring => { + if let Some(off) = item.find(&self.atom) { + off + } else { + return false; + } + } + QueryAtomKind::Prefix if item.starts_with(&self.atom) => 0, + QueryAtomKind::Postfix if item.ends_with(&self.atom) => item.len() - self.atom.len(), + QueryAtomKind::Exact if item == self.atom => 0, + _ => return false, + }; + + indices.extend(off..(off + self.atom.len())); + true + } + + fn matches(&self, matcher: &Matcher, item: &str) -> bool { + let buf; + let item = if self.ignore_case { + buf = item.to_ascii_lowercase(); + &buf + } else { + item + }; + let mut res = match self.kind { + QueryAtomKind::Fuzzy => matcher.fuzzy_match(item, &self.atom).is_some(), + QueryAtomKind::Substring => item.contains(&self.atom), + QueryAtomKind::Prefix => item.starts_with(&self.atom), + QueryAtomKind::Postfix => item.ends_with(&self.atom), + QueryAtomKind::Exact => item == self.atom, + }; + if self.inverse { + res = !res; + } + res + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +enum QueryAtomKind { + /// Item is a fuzzy match of this behaviour + /// + /// Usage: `foo` + Fuzzy, + /// Item contains query atom as a continous substring + /// + /// Usage `'foo` + Substring, + /// Item starts with query atom + /// + /// Usage: `^foo` + Prefix, + /// Item ends with query atom + /// + /// Usage: `foo$` + Postfix, + /// Item is equal to query atom + /// + /// Usage `^foo$` + Exact, +} + +#[derive(Default)] pub struct FuzzyQuery { - queries: Vec, + first_fuzzy_atom: Option, + query_atoms: Vec, +} + +fn query_atoms(query: &str) -> impl Iterator + '_ { + let mut saw_backslash = false; + query.split(move |c| { + saw_backslash = match c { + ' ' if !saw_backslash => return true, + '\\' => true, + _ => false, + }; + false + }) } impl FuzzyQuery { + pub fn refine(&self, query: &str, old_query: &str) -> (FuzzyQuery, bool) { + // TODO: we could be a lot smarter about this + let new_query = Self::new(query); + let mut is_refinement = query.starts_with(old_query); + + // if the last atom is an inverse atom adding more text to it + // will actually increase the number of matches and we can not refine + // the matches. + if is_refinement && !self.query_atoms.is_empty() { + let last_idx = self.query_atoms.len() - 1; + if self.query_atoms[last_idx].inverse + && self.query_atoms[last_idx].atom != new_query.query_atoms[last_idx].atom + { + is_refinement = false; + } + } + + (new_query, is_refinement) + } + pub fn new(query: &str) -> FuzzyQuery { - let mut saw_backslash = false; - let queries = query - .split(|c| { - saw_backslash = match c { - ' ' if !saw_backslash => return true, - '\\' => true, - _ => false, - }; - false - }) - .filter_map(|query| { - if query.is_empty() { + let mut first_fuzzy_query = None; + let query_atoms = query_atoms(query) + .filter_map(|atom| { + let atom = QueryAtom::new(atom)?; + if atom.kind == QueryAtomKind::Fuzzy && first_fuzzy_query.is_none() { + first_fuzzy_query = Some(atom.atom); None } else { - Some(query.replace("\\ ", " ")) + Some(atom) } }) .collect(); - FuzzyQuery { queries } + FuzzyQuery { + first_fuzzy_atom: first_fuzzy_query, + query_atoms, + } } pub fn fuzzy_match(&self, item: &str, matcher: &Matcher) -> Option { - // use the rank of the first query for the rank, because merging ranks is not really possible + // use the rank of the first fuzzzy query for the rank, because merging ranks is not really possible // this behaviour matches fzf and skim - let score = matcher.fuzzy_match(item, self.queries.get(0)?)?; + let score = self + .first_fuzzy_atom + .as_ref() + .map_or(Some(0), |atom| matcher.fuzzy_match(item, atom))?; if self - .queries + .query_atoms .iter() - .any(|query| matcher.fuzzy_match(item, query).is_none()) + .any(|atom| !atom.matches(matcher, item)) { return None; } @@ -46,29 +214,26 @@ impl FuzzyQuery { } pub fn fuzzy_indicies(&self, item: &str, matcher: &Matcher) -> Option<(i64, Vec)> { - if self.queries.len() == 1 { - return matcher.fuzzy_indices(item, &self.queries[0]); - } - - // use the rank of the first query for the rank, because merging ranks is not really possible - // this behaviour matches fzf and skim - let (score, mut indicies) = matcher.fuzzy_indices(item, self.queries.get(0)?)?; + let (score, mut indices) = self.first_fuzzy_atom.as_ref().map_or_else( + || Some((0, Vec::new())), + |atom| matcher.fuzzy_indices(item, atom), + )?; - // fast path for the common case of not using a space - // during matching this branch should be free thanks to branch prediction - if self.queries.len() == 1 { - return Some((score, indicies)); + // fast path for the common case of just a single atom + if self.query_atoms.is_empty() { + return Some((score, indices)); } - for query in &self.queries[1..] { - let (_, matched_indicies) = matcher.fuzzy_indices(item, query)?; - indicies.extend_from_slice(&matched_indicies); + for atom in &self.query_atoms { + if !atom.indices(matcher, item, &mut indices) { + return None; + } } // deadup and remove duplicate matches - indicies.sort_unstable(); - indicies.dedup(); + indices.sort_unstable(); + indices.dedup(); - Some((score, indicies)) + Some((score, indices)) } } diff --git a/helix-term/src/ui/lsp.rs b/helix-term/src/ui/lsp.rs index 393d24c4..44050aa1 100644 --- a/helix-term/src/ui/lsp.rs +++ b/helix-term/src/ui/lsp.rs @@ -53,7 +53,10 @@ impl Component for SignatureHelp { let active_param_span = self.active_param_range.map(|(start, end)| { vec![( - cx.editor.theme.find_scope_index("ui.selection").unwrap(), + cx.editor + .theme + .find_scope_index_exact("ui.selection") + .unwrap(), start..end, )] }); diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 923dd73a..87136992 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -342,13 +342,10 @@ impl Component for Markdown { fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> { let padding = 2; - if padding >= viewport.1 || padding >= viewport.0 { - return None; - } let contents = self.parse(None); // TODO: account for tab width - let max_text_width = (viewport.0 - padding).min(120); + let max_text_width = (viewport.0.saturating_sub(padding)).min(120); let (width, height) = crate::ui::text::required_size(&contents, max_text_width); Some((width + padding, height + padding)) diff --git a/helix-term/src/ui/menu.rs b/helix-term/src/ui/menu.rs index b9c1f9de..30625ace 100644 --- a/helix-term/src/ui/menu.rs +++ b/helix-term/src/ui/menu.rs @@ -4,7 +4,7 @@ use crate::{ compositor::{Callback, Component, Compositor, Context, Event, EventResult}, ctrl, key, shift, }; -use tui::{buffer::Buffer as Surface, text::Spans, widgets::Table}; +use tui::{buffer::Buffer as Surface, widgets::Table}; pub use tui::widgets::{Cell, Row}; @@ -18,28 +18,24 @@ pub trait Item { /// Additional editor state that is used for label calculation. type Data; - fn label(&self, data: &Self::Data) -> Spans; + fn format(&self, data: &Self::Data) -> Row; fn sort_text(&self, data: &Self::Data) -> Cow { - let label: String = self.label(data).into(); + let label: String = self.format(data).cell_text().collect(); label.into() } fn filter_text(&self, data: &Self::Data) -> Cow { - let label: String = self.label(data).into(); + let label: String = self.format(data).cell_text().collect(); label.into() } - - fn row(&self, data: &Self::Data) -> Row { - Row::new(vec![Cell::from(self.label(data))]) - } } impl Item for PathBuf { /// Root prefix to strip. type Data = PathBuf; - fn label(&self, root_path: &Self::Data) -> Spans { + fn format(&self, root_path: &Self::Data) -> Row { self.strip_prefix(root_path) .unwrap_or(self) .to_string_lossy() @@ -47,6 +43,8 @@ impl Item for PathBuf { } } +pub type MenuCallback = Box, MenuEvent)>; + pub struct Menu { options: Vec, editor_data: T::Data, @@ -59,7 +57,7 @@ pub struct Menu { widths: Vec, - callback_fn: Box, MenuEvent)>, + callback_fn: MenuCallback, scroll: usize, size: (u16, u16), @@ -81,7 +79,7 @@ impl Menu { Self { options, editor_data, - matcher: Box::new(Matcher::default()), + matcher: Box::new(Matcher::default().ignore_case()), matches, cursor: None, widths: Vec::new(), @@ -144,10 +142,10 @@ impl Menu { let n = self .options .first() - .map(|option| option.row(&self.editor_data).cells.len()) + .map(|option| option.format(&self.editor_data).cells.len()) .unwrap_or_default(); let max_lens = self.options.iter().fold(vec![0; n], |mut acc, option| { - let row = option.row(&self.editor_data); + let row = option.format(&self.editor_data); // maintain max for each column for (acc, cell) in acc.iter_mut().zip(row.cells.iter()) { let width = cell.content.width(); @@ -256,12 +254,12 @@ impl Component for Menu { return EventResult::Consumed(close_fn); } // arrow up/ctrl-p/shift-tab prev completion choice (including updating the doc) - shift!(Tab) | key!(Up) | ctrl!('p') | ctrl!('k') => { + shift!(Tab) | key!(Up) | ctrl!('p') => { self.move_up(); (self.callback_fn)(cx.editor, self.selection(), MenuEvent::Update); return EventResult::Consumed(None); } - key!(Tab) | key!(Down) | ctrl!('n') | ctrl!('j') => { + key!(Tab) | key!(Down) | ctrl!('n') => { // arrow down/ctrl-n/tab advances completion choice (including updating the doc) self.move_down(); (self.callback_fn)(cx.editor, self.selection(), MenuEvent::Update); @@ -331,7 +329,9 @@ impl Component for Menu { (a + b - 1) / b } - let rows = options.iter().map(|option| option.row(&self.editor_data)); + let rows = options + .iter() + .map(|option| option.format(&self.editor_data)); let table = Table::new(rows) .style(style) .highlight_style(selected) diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index eb480758..3e9a14b0 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -1,4 +1,5 @@ mod completion; +mod document; pub(crate) mod editor; mod fuzzy_match; mod info; @@ -14,6 +15,7 @@ mod statusline; mod text; use crate::compositor::{Component, Compositor}; +use crate::filter_picker_entry; use crate::job::{self, Callback}; pub use completion::Completion; pub use editor::EditorView; @@ -162,6 +164,9 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi let now = Instant::now(); + let dedup_symlinks = config.file_picker.deduplicate_links; + let absolute_root = root.canonicalize().unwrap_or_else(|_| root.clone()); + let mut walk_builder = WalkBuilder::new(&root); walk_builder .hidden(config.file_picker.hidden) @@ -172,10 +177,7 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi .git_global(config.file_picker.git_global) .git_exclude(config.file_picker.git_exclude) .max_depth(config.file_picker.max_depth) - // We always want to ignore the .git directory, otherwise if - // `ignore` is turned off above, we end up with a lot of noise - // in our picker. - .filter_entry(|entry| entry.file_name() != ".git"); + .filter_entry(move |entry| filter_picker_entry(entry, &absolute_root, dedup_symlinks)); // We want to exclude files that the editor can't handle yet let mut type_builder = TypesBuilder::new(); @@ -194,14 +196,11 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> FilePi // We want files along with their modification date for sorting let files = walk_builder.build().filter_map(|entry| { let entry = entry.ok()?; - // This is faster than entry.path().is_dir() since it uses cached fs::Metadata fetched by ignore/walkdir - let is_dir = entry.file_type().map_or(false, |ft| ft.is_dir()); - if is_dir { - // Will give a false positive if metadata cannot be read (eg. permission error) - None - } else { + if entry.file_type()?.is_file() { Some(entry.into_path()) + } else { + None } }); @@ -281,10 +280,10 @@ pub mod completers { } pub fn theme(_editor: &Editor, input: &str) -> Vec { - let mut names = theme::Loader::read_names(&helix_loader::runtime_dir().join("themes")); - names.extend(theme::Loader::read_names( - &helix_loader::config_dir().join("themes"), - )); + let mut names = theme::Loader::read_names(&helix_loader::config_dir().join("themes")); + for rt_dir in helix_loader::runtime_dirs() { + names.extend(theme::Loader::read_names(&rt_dir.join("themes"))); + } names.push("default".into()); names.push("base16_default".into()); names.sort(); diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs index aad3f81c..3294a2a1 100644 --- a/helix-term/src/ui/picker.rs +++ b/helix-term/src/ui/picker.rs @@ -1,28 +1,38 @@ use crate::{ + alt, compositor::{Component, Compositor, Context, Event, EventResult}, ctrl, key, shift, - ui::{self, fuzzy_match::FuzzyQuery, EditorView}, + ui::{ + self, + document::{render_document, LineDecoration, LinePos, TextRenderer}, + fuzzy_match::FuzzyQuery, + EditorView, + }, }; use futures_util::future::BoxFuture; use tui::{ buffer::Buffer as Surface, - widgets::{Block, BorderType, Borders}, + layout::Constraint, + text::{Span, Spans}, + widgets::{Block, BorderType, Borders, Cell, Table}, }; use fuzzy_matcher::skim::SkimMatcherV2 as Matcher; use tui::widgets::Widget; -use std::{ - cmp::{self, Ordering}, - time::Instant, -}; +use std::cmp::{self, Ordering}; use std::{collections::HashMap, io::Read, path::PathBuf}; use crate::ui::{Prompt, PromptEvent}; -use helix_core::{movement::Direction, Position}; +use helix_core::{ + movement::Direction, text_annotations::TextAnnotations, + unicode::segmentation::UnicodeSegmentation, Position, +}; use helix_view::{ editor::Action, graphics::{CursorKind, Margin, Modifier, Rect}, + theme::Style, + view::ViewPosition, Document, DocumentId, Editor, }; @@ -60,6 +70,8 @@ impl From for PathOrId { } } +type FileCallback = Box Option>; + /// File path and range of lines (used to align and highlight lines) pub type FileLocation = (PathOrId, Option<(usize, usize)>); @@ -70,7 +82,7 @@ pub struct FilePicker { preview_cache: HashMap, read_buffer: Vec, /// Given an item in the picker, return the file path and line number to display. - file_fn: Box Option>, + file_fn: FileCallback, } pub enum CachedPreview { @@ -178,7 +190,7 @@ impl FilePicker { } _ => { // TODO: enable syntax highlighting; blocked by async rendering - Document::open(path, None, None) + Document::open(path, None, None, editor.config.clone()) .map(|doc| CachedPreview::Document(Box::new(doc))) .unwrap_or(CachedPreview::NotFound) } @@ -213,6 +225,9 @@ impl FilePicker { let loader = cx.editor.syn_loader.clone(); doc.detect_language(loader); } + + // QUESTION: do we want to compute inlay hints in pickers too ? Probably not for now + // but it could be interesting in the future } EventResult::Consumed(None) @@ -282,43 +297,58 @@ impl Component for FilePicker { }) .unwrap_or(0); - let offset = Position::new(first_line, 0); + let offset = ViewPosition { + anchor: doc.text().line_to_char(first_line), + horizontal_offset: 0, + vertical_offset: 0, + }; - let mut highlights = - EditorView::doc_syntax_highlights(doc, offset, area.height, &cx.editor.theme); + let mut highlights = EditorView::doc_syntax_highlights( + doc, + offset.anchor, + area.height, + &cx.editor.theme, + ); for spans in EditorView::doc_diagnostics_highlights(doc, &cx.editor.theme) { if spans.is_empty() { continue; } highlights = Box::new(helix_core::syntax::merge(highlights, spans)); } - EditorView::render_text_highlights( + let mut decorations: Vec> = Vec::new(); + + if let Some((start, end)) = range { + let style = cx + .editor + .theme + .try_get("ui.highlight") + .unwrap_or_else(|| cx.editor.theme.get("ui.selection")); + let draw_highlight = move |renderer: &mut TextRenderer, pos: LinePos| { + if (start..=end).contains(&pos.doc_line) { + let area = Rect::new( + renderer.viewport.x, + renderer.viewport.y + pos.visual_line, + renderer.viewport.width, + 1, + ); + renderer.surface.set_style(area, style) + } + }; + decorations.push(Box::new(draw_highlight)) + } + + render_document( + surface, + inner, doc, offset, - inner, - surface, - &cx.editor.theme, + // TODO: compute text annotations asynchronously here (like inlay hints) + &TextAnnotations::default(), highlights, - &cx.editor.config(), + &cx.editor.theme, + &mut decorations, + &mut [], ); - - // highlight the line - if let Some((start, end)) = range { - let offset = start.saturating_sub(first_line) as u16; - surface.set_style( - Rect::new( - inner.x, - inner.y + offset, - inner.width, - (end.saturating_sub(start) as u16 + 1) - .min(inner.height.saturating_sub(offset)), - ), - cx.editor - .theme - .try_get("ui.highlight") - .unwrap_or_else(|| cx.editor.theme.get("ui.selection")), - ); - } } } @@ -370,6 +400,8 @@ impl Ord for PickerMatch { } } +type PickerCallback = Box; + pub struct Picker { options: Vec, editor_data: T::Data, @@ -383,13 +415,15 @@ pub struct Picker { cursor: usize, // pattern: String, prompt: Prompt, - previous_pattern: String, + previous_pattern: (String, FuzzyQuery), /// Whether to truncate the start (default true) pub truncate_start: bool, /// Whether to show the preview panel (default true) show_preview: bool, + /// Constraints for tabular formatting + widths: Vec, - callback_fn: Box, + callback_fn: PickerCallback, } impl Picker { @@ -408,18 +442,21 @@ impl Picker { let mut picker = Self { options, editor_data, - matcher: Box::new(Matcher::default()), + matcher: Box::default(), matches: Vec::new(), cursor: 0, prompt, - previous_pattern: String::new(), + previous_pattern: (String::new(), FuzzyQuery::default()), truncate_start: true, show_preview: true, callback_fn: Box::new(callback_fn), completion_height: 0, + widths: Vec::new(), }; - // scoring on empty input: + picker.calculate_column_widths(); + + // scoring on empty input // TODO: just reuse score() picker .matches @@ -435,15 +472,50 @@ impl Picker { picker } - pub fn score(&mut self) { - let now = Instant::now(); + pub fn set_options(&mut self, new_options: Vec) { + self.options = new_options; + self.cursor = 0; + self.force_score(); + self.calculate_column_widths(); + } + + /// Calculate the width constraints using the maximum widths of each column + /// for the current options. + fn calculate_column_widths(&mut self) { + let n = self + .options + .first() + .map(|option| option.format(&self.editor_data).cells.len()) + .unwrap_or_default(); + let max_lens = self.options.iter().fold(vec![0; n], |mut acc, option| { + let row = option.format(&self.editor_data); + // maintain max for each column + for (acc, cell) in acc.iter_mut().zip(row.cells.iter()) { + let width = cell.content.width(); + if width > *acc { + *acc = width; + } + } + acc + }); + self.widths = max_lens + .into_iter() + .map(|len| Constraint::Length(len as u16)) + .collect(); + } + pub fn score(&mut self) { let pattern = self.prompt.line(); - if pattern == &self.previous_pattern { + if pattern == &self.previous_pattern.0 { return; } + let (query, is_refined) = self + .previous_pattern + .1 + .refine(pattern, &self.previous_pattern.0); + if pattern.is_empty() { // Fast path for no pattern. self.matches.clear(); @@ -456,8 +528,7 @@ impl Picker { len: text.chars().count(), } })); - } else if pattern.starts_with(&self.previous_pattern) { - let query = FuzzyQuery::new(pattern); + } else if is_refined { // optimization: if the pattern is a more specific version of the previous one // then we can score the filtered set. self.matches.retain_mut(|pmatch| { @@ -479,12 +550,11 @@ impl Picker { self.force_score(); } - log::debug!("picker score {:?}", Instant::now().duration_since(now)); - // reset cursor position self.cursor = 0; let pattern = self.prompt.line(); - self.previous_pattern.clone_from(pattern); + self.previous_pattern.0.clone_from(pattern); + self.previous_pattern.1 = query; } pub fn force_score(&mut self) { @@ -619,6 +689,11 @@ impl Component for Picker { key!(Esc) | ctrl!('c') => { return close_fn; } + alt!(Enter) => { + if let Some(option) = self.selection() { + (self.callback_fn)(cx, option, Action::Load); + } + } key!(Enter) => { if let Some(option) = self.selection() { (self.callback_fn)(cx, option, Action::Replace); @@ -651,7 +726,7 @@ impl Component for Picker { fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { let text_style = cx.editor.theme.get("ui.text"); let selected = cx.editor.theme.get("ui.text.focus"); - let highlighted = cx.editor.theme.get("special").add_modifier(Modifier::BOLD); + let highlight_style = cx.editor.theme.get("special").add_modifier(Modifier::BOLD); // -- Render the frame: // clear area @@ -691,61 +766,126 @@ impl Component for Picker { } // -- Render the contents: - // subtract area of prompt from top and current item marker " > " from left - let inner = inner.clip_top(2).clip_left(3); + // subtract area of prompt from top + let inner = inner.clip_top(2); let rows = inner.height; let offset = self.cursor - (self.cursor % std::cmp::max(1, rows as usize)); + let cursor = self.cursor.saturating_sub(offset); - let files = self + let options = self .matches .iter() .skip(offset) - .map(|pmatch| (pmatch.index, self.options.get(pmatch.index).unwrap())); - - for (i, (_index, option)) in files.take(rows as usize).enumerate() { - let is_active = i == (self.cursor - offset); - if is_active { - surface.set_string( - inner.x.saturating_sub(3), - inner.y + i as u16, - " > ", - selected, - ); - surface.set_style( - Rect::new(inner.x, inner.y + i as u16, inner.width, 1), - selected, - ); - } + .take(rows as usize) + .map(|pmatch| &self.options[pmatch.index]) + .map(|option| option.format(&self.editor_data)) + .map(|mut row| { + const TEMP_CELL_SEP: &str = " "; + + let line = row.cell_text().fold(String::new(), |mut s, frag| { + s.push_str(&frag); + s.push_str(TEMP_CELL_SEP); + s + }); + + // Items are filtered by using the text returned by menu::Item::filter_text + // but we do highlighting here using the text in Row and therefore there + // might be inconsistencies. This is the best we can do since only the + // text in Row is displayed to the end user. + let (_score, highlights) = FuzzyQuery::new(self.prompt.line()) + .fuzzy_indicies(&line, &self.matcher) + .unwrap_or_default(); + + let highlight_byte_ranges: Vec<_> = line + .char_indices() + .enumerate() + .filter_map(|(char_idx, (byte_offset, ch))| { + highlights + .contains(&char_idx) + .then(|| byte_offset..byte_offset + ch.len_utf8()) + }) + .collect(); + + // The starting byte index of the current (iterating) cell + let mut cell_start_byte_offset = 0; + for cell in row.cells.iter_mut() { + let spans = match cell.content.lines.get(0) { + Some(s) => s, + None => { + cell_start_byte_offset += TEMP_CELL_SEP.len(); + continue; + } + }; + + let mut cell_len = 0; - let spans = option.label(&self.editor_data); - let (_score, highlights) = FuzzyQuery::new(self.prompt.line()) - .fuzzy_indicies(&String::from(&spans), &self.matcher) - .unwrap_or_default(); - - spans.0.into_iter().fold(inner, |pos, span| { - let new_x = surface - .set_string_truncated( - pos.x, - pos.y + i as u16, - &span.content, - pos.width as usize, - |idx| { - if highlights.contains(&idx) { - highlighted.patch(span.style) - } else if is_active { - selected.patch(span.style) + let graphemes_with_style: Vec<_> = spans + .0 + .iter() + .flat_map(|span| { + span.content + .grapheme_indices(true) + .zip(std::iter::repeat(span.style)) + }) + .map(|((grapheme_byte_offset, grapheme), style)| { + cell_len += grapheme.len(); + let start = cell_start_byte_offset; + + let grapheme_byte_range = + grapheme_byte_offset..grapheme_byte_offset + grapheme.len(); + + if highlight_byte_ranges.iter().any(|hl_rng| { + hl_rng.start >= start + grapheme_byte_range.start + && hl_rng.end <= start + grapheme_byte_range.end + }) { + (grapheme, style.patch(highlight_style)) } else { - text_style.patch(span.style) + (grapheme, style) } - }, - true, - self.truncate_start, - ) - .0; - pos.clip_left(new_x - pos.x) + }) + .collect(); + + let mut span_list: Vec<(String, Style)> = Vec::new(); + for (grapheme, style) in graphemes_with_style { + if span_list.last().map(|(_, sty)| sty) == Some(&style) { + let (string, _) = span_list.last_mut().unwrap(); + string.push_str(grapheme); + } else { + span_list.push((String::from(grapheme), style)) + } + } + + let spans: Vec = span_list + .into_iter() + .map(|(string, style)| Span::styled(string, style)) + .collect(); + let spans: Spans = spans.into(); + *cell = Cell::from(spans); + + cell_start_byte_offset += cell_len + TEMP_CELL_SEP.len(); + } + + row }); - } + + let table = Table::new(options) + .style(text_style) + .highlight_style(selected) + .highlight_symbol(" > ") + .column_spacing(1) + .widths(&self.widths); + + use tui::widgets::TableState; + + table.render_table( + inner, + surface, + &mut TableState { + offset: 0, + selected: Some(cursor), + }, + ); } fn cursor(&self, area: Rect, editor: &Editor) -> (Option, CursorKind) { @@ -812,9 +952,7 @@ impl Component for DynamicPicker { Some(overlay) => &mut overlay.content.file_picker.picker, None => return, }; - picker.options = new_options; - picker.cursor = 0; - picker.force_score(); + picker.set_options(new_options); editor.reset_idle_timer(); })); anyhow::Ok(callback) diff --git a/helix-term/src/ui/prompt.rs b/helix-term/src/ui/prompt.rs index 5fb6745a..35ae8c2a 100644 --- a/helix-term/src/ui/prompt.rs +++ b/helix-term/src/ui/prompt.rs @@ -14,8 +14,11 @@ use helix_view::{ Editor, }; -pub type Completion = (RangeFrom, Cow<'static, str>); type PromptCharHandler = Box; +pub type Completion = (RangeFrom, Cow<'static, str>); +type CompletionFn = Box Vec>; +type CallbackFn = Box; +pub type DocFn = Box Option>>; pub struct Prompt { prompt: Cow<'static, str>, @@ -25,9 +28,9 @@ pub struct Prompt { selection: Option, history_register: Option, history_pos: Option, - completion_fn: Box Vec>, - callback_fn: Box, - pub doc_fn: Box Option>>, + completion_fn: CompletionFn, + callback_fn: CallbackFn, + pub doc_fn: DocFn, next_char_handler: Option, } @@ -513,7 +516,7 @@ impl Component for Prompt { alt!('d') | alt!(Delete) | ctrl!(Delete) => self.delete_word_forwards(cx.editor), ctrl!('k') => self.kill_to_end_of_line(cx.editor), ctrl!('u') => self.kill_to_start_of_line(cx.editor), - ctrl!('h') | key!(Backspace) => { + ctrl!('h') | key!(Backspace) | shift!(Backspace) => { self.delete_char_backwards(cx.editor); (self.callback_fn)(cx, &self.line, PromptEvent::Update); } diff --git a/helix-term/src/ui/statusline.rs b/helix-term/src/ui/statusline.rs index a25b4540..88786351 100644 --- a/helix-term/src/ui/statusline.rs +++ b/helix-term/src/ui/statusline.rs @@ -1,5 +1,6 @@ use helix_core::{coords_at_pos, encoding, Position}; use helix_lsp::lsp::DiagnosticSeverity; +use helix_view::document::DEFAULT_LANGUAGE_NAME; use helix_view::{ document::{Mode, SCRATCH_BUFFER_NAME}, graphics::Rect, @@ -141,6 +142,9 @@ where helix_view::editor::StatusLineElement::Spinner => render_lsp_spinner, helix_view::editor::StatusLineElement::FileBaseName => render_file_base_name, helix_view::editor::StatusLineElement::FileName => render_file_name, + helix_view::editor::StatusLineElement::FileModificationIndicator => { + render_file_modification_indicator + } helix_view::editor::StatusLineElement::FileEncoding => render_file_encoding, helix_view::editor::StatusLineElement::FileLineEnding => render_file_line_ending, helix_view::editor::StatusLineElement::FileType => render_file_type, @@ -155,6 +159,7 @@ where helix_view::editor::StatusLineElement::TotalLineNumbers => render_total_line_numbers, helix_view::editor::StatusLineElement::Separator => render_separator, helix_view::editor::StatusLineElement::Spacer => render_spacer, + helix_view::editor::StatusLineElement::VersionControl => render_version_control, } } @@ -402,7 +407,7 @@ fn render_file_type(context: &mut RenderContext, write: F) where F: Fn(&mut RenderContext, String, Option