diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 529543781..286441b66 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -7,6 +7,14 @@ updates: directory: "/" schedule: interval: "weekly" + groups: + tree-sitter: + patterns: + - "tree-sitter*" + rust-dependencies: + update-types: + - "minor" + - "patch" - package-ecosystem: "github-actions" directory: "/" diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3d47c2088..7ba46ce56 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,6 +12,7 @@ jobs: check: name: Check (msrv) runs-on: ubuntu-latest + if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' steps: - name: Checkout sources uses: actions/checkout@v4 @@ -31,6 +32,7 @@ jobs: test: name: Test Suite runs-on: ${{ matrix.os }} + if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' env: RUST_BACKTRACE: 1 HELIX_LOG_LEVEL: info @@ -65,6 +67,7 @@ jobs: lints: name: Lints runs-on: ubuntu-latest + if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' steps: - name: Checkout sources uses: actions/checkout@v4 @@ -92,6 +95,7 @@ jobs: docs: name: Docs runs-on: ubuntu-latest + if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule' steps: - name: Checkout sources uses: actions/checkout@v4 diff --git a/.github/workflows/cachix.yml b/.github/workflows/cachix.yml index 57f0a0db4..9638137b8 100644 --- a/.github/workflows/cachix.yml +++ b/.github/workflows/cachix.yml @@ -14,7 +14,7 @@ jobs: uses: actions/checkout@v4 - name: Install nix - uses: cachix/install-nix-action@v25 + uses: cachix/install-nix-action@v26 - name: Authenticate with Cachix uses: cachix/cachix-action@v14 diff --git a/Cargo.lock b/Cargo.lock index f7d14270a..632cd8a3b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.8.6" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "getrandom", @@ -62,15 +62,15 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.80" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" +checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" [[package]] name = "arc-swap" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" +checksum = "7b3d0060af21e8d11a926981cc00c6c1541aa91dd64b9f881985c3da1094425f" [[package]] name = "autocfg" @@ -101,9 +101,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.2" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" [[package]] name = "bstr" @@ -116,15 +116,6 @@ dependencies = [ "serde", ] -[[package]] -name = "btoi" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dd6407f73a9b8b6162d8a2ef999fe6afd7cc15902ebf42c5cd296addf17e0ad" -dependencies = [ - "num-traits", -] - [[package]] name = "bumpalo" version = "3.12.0" @@ -145,9 +136,9 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" [[package]] name = "cc" -version = "1.0.85" +version = "1.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b918671670962b48bc23753aef0c51d072dca6f52f01f800854ada6ddb7f7d3" +checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" [[package]] name = "cfg-if" @@ -168,9 +159,9 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.34" +version = "0.4.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b" +checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" dependencies = [ "android-tzdata", "iana-time-zone", @@ -180,9 +171,9 @@ dependencies = [ [[package]] name = "clipboard-win" -version = "5.1.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ec832972fefb8cf9313b45a0d1945e29c9c251f1d4c6eafc5fe2124c02d2e81" +checksum = "d517d4b86184dbb111d3556a10f1c8a04da7428d2987bf1081602bf11c3aa9ee" dependencies = [ "error-code", ] @@ -282,7 +273,7 @@ version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "crossterm_winapi", "filedescriptor", "futures-core", @@ -415,9 +406,6 @@ name = "faster-hex" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183" -dependencies = [ - "serde", -] [[package]] name = "fastrand" @@ -467,6 +455,12 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -531,9 +525,9 @@ checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" [[package]] name = "gix" -version = "0.58.0" +version = "0.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31887c304d9a935f3e5494fb5d6a0106c34e965168ec0db9b457424eedd0c741" +checksum = "e4e0e59a44bf00de058ee98d6ecf3c9ed8f8842c1da642258ae4120d41ded8f7" dependencies = [ "gix-actor", "gix-attributes", @@ -579,13 +573,13 @@ dependencies = [ [[package]] name = "gix-actor" -version = "0.30.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a7bb9fad6125c81372987c06469601d37e1a2d421511adb69971b9083517a8a" +checksum = "45c3a3bde455ad2ee8ba8a195745241ce0b770a8a26faae59fcf409d01b28c46" dependencies = [ "bstr", - "btoi", "gix-date", + "gix-utils", "itoa", "thiserror", "winnow", @@ -593,9 +587,9 @@ dependencies = [ [[package]] name = "gix-attributes" -version = "0.22.0" +version = "0.22.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "214ee3792e504ee1ce206b36dcafa4f328ca313d1e2ac0b41433d68ef4e14260" +checksum = "eefb48f42eac136a4a0023f49a54ec31be1c7a9589ed762c45dcb9b953f7ecc8" dependencies = [ "bstr", "gix-glob", @@ -610,27 +604,27 @@ dependencies = [ [[package]] name = "gix-bitmap" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b6cd0f246180034ddafac9b00a112f19178135b21eb031b3f79355891f7325" +checksum = "a371db66cbd4e13f0ed9dc4c0fea712d7276805fccc877f77e96374d317e87ae" dependencies = [ "thiserror", ] [[package]] name = "gix-chunk" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "003ec6deacf68076a0c157271a127e0bb2c031c1a41f7168cbe5d248d9b85c78" +checksum = "45c8751169961ba7640b513c3b24af61aa962c967aaf04116734975cd5af0c52" dependencies = [ "thiserror", ] [[package]] name = "gix-command" -version = "0.3.3" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce1ffc7db3fb50b7dae6ecd937a3527cb725f444614df2ad8988d81806f13f09" +checksum = "f90009020dc4b3de47beed28e1334706e0a330ddd17f5cfeb097df3b15a54b77" dependencies = [ "bstr", "gix-path", @@ -640,9 +634,9 @@ dependencies = [ [[package]] name = "gix-commitgraph" -version = "0.24.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82dbd7fb959862e3df2583331f0ad032ac93533e8a52f1b0694bc517f5d292bc" +checksum = "f7b102311085da4af18823413b5176d7c500fb2272eaf391cfa8635d8bcb12c4" dependencies = [ "bstr", "gix-chunk", @@ -654,9 +648,9 @@ dependencies = [ [[package]] name = "gix-config" -version = "0.34.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e62bf2073b6ce3921ffa6d8326f645f30eec5fc4a8e8a4bc0fcb721a2f3f69dc" +checksum = "62129c75e4b6229fe15fb9838cdc00c655e87105b651e4edd7c183fc5288b5d1" dependencies = [ "bstr", "gix-config-value", @@ -675,11 +669,11 @@ dependencies = [ [[package]] name = "gix-config-value" -version = "0.14.4" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e7bfb37a46ed0b8468db37a6d8a0a61d56bdbe4603ae492cb322e5f3958" +checksum = "fbd06203b1a9b33a78c88252a625031b094d9e1b647260070c25b09910c0a804" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "bstr", "gix-path", "libc", @@ -688,9 +682,9 @@ dependencies = [ [[package]] name = "gix-date" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb7f3dfb72bebe3449b5e642be64e3c6ccbe9821c8b8f19f487cf5bfbbf4067e" +checksum = "180b130a4a41870edfbd36ce4169c7090bca70e195da783dea088dd973daa59c" dependencies = [ "bstr", "itoa", @@ -700,9 +694,9 @@ dependencies = [ [[package]] name = "gix-diff" -version = "0.40.0" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbdcb5e49c4b9729dd1c361040ae5c3cd7c497b2260b18c954f62db3a63e98cf" +checksum = "78e605593c2ef74980a534ade0909c7dc57cca72baa30cbb67d2dda621f99ac4" dependencies = [ "bstr", "gix-hash", @@ -712,9 +706,9 @@ dependencies = [ [[package]] name = "gix-discover" -version = "0.29.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4669218f3ec0cbbf8f16857b32200890f8ca585f36f5817242e4115fe4551af" +checksum = "64bab49087ed3710caf77e473dc0efc54ca33d8ccc6441359725f121211482b1" dependencies = [ "bstr", "dunce", @@ -728,9 +722,9 @@ dependencies = [ [[package]] name = "gix-features" -version = "0.38.0" +version = "0.38.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "184f7f7d4e45db0e2a362aeaf12c06c5e84817d0ef91d08e8e90170dad9f0b07" +checksum = "db4254037d20a247a0367aa79333750146a369719f0c6617fec4f5752cc62b37" dependencies = [ "crc32fast", "flate2", @@ -747,9 +741,9 @@ dependencies = [ [[package]] name = "gix-filter" -version = "0.9.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9240862840fb740d209422937195e129e4ed3da49af212383260134bea8f6c1a" +checksum = "bd71bf3e64d8fb5d5635d4166ca5a36fe56b292ffff06eab1d93ea47fd5beb89" dependencies = [ "bstr", "encoding_rs", @@ -768,9 +762,9 @@ dependencies = [ [[package]] name = "gix-fs" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4436e883d5769f9fb18677b8712b49228357815f9e4104174a6fc2d8461a437b" +checksum = "634b8a743b0aae03c1a74ee0ea24e8c5136895efac64ce52b3ea106e1c6f0613" dependencies = [ "gix-features", "gix-utils", @@ -778,11 +772,11 @@ dependencies = [ [[package]] name = "gix-glob" -version = "0.16.0" +version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4965a1d06d0ab84a29d4a67697a97352ab14ae1da821084e5afb1fd6d8191ca0" +checksum = "682bdc43cb3c00dbedfcc366de2a849b582efd8d886215dbad2ea662ec156bb5" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "bstr", "gix-features", "gix-path", @@ -790,9 +784,9 @@ dependencies = [ [[package]] name = "gix-hash" -version = "0.14.1" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0ed89cdc1dce26685c80271c4287077901de3c3dd90234d5fa47c22b2268653" +checksum = "f93d7df7366121b5018f947a04d37f034717e113dcf9ccd85c34b58e57a74d5e" dependencies = [ "faster-hex", "thiserror", @@ -800,9 +794,9 @@ dependencies = [ [[package]] name = "gix-hashtable" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebe47d8c0887f82355e2e9e16b6cecaa4d5e5346a7a474ca78ff94de1db35a5b" +checksum = "7ddf80e16f3c19ac06ce415a38b8591993d3f73aede049cb561becb5b3a8e242" dependencies = [ "gix-hash", "hashbrown 0.14.3", @@ -811,9 +805,9 @@ dependencies = [ [[package]] name = "gix-ignore" -version = "0.11.0" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f7069aaca4a05784c4cb44e392f0eaf627c6e57e05d3100c0e2386a37a682f0" +checksum = "640dbeb4f5829f9fc14d31f654a34a0350e43a24e32d551ad130d99bf01f63f1" dependencies = [ "bstr", "gix-glob", @@ -824,14 +818,14 @@ dependencies = [ [[package]] name = "gix-index" -version = "0.29.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7152181ba8f0a3addc5075dd612cea31fc3e252b29c8be8c45f4892bf87426" +checksum = "549621f13d9ccf325a7de45506a3266af0d08f915181c5687abb5e8669bfd2e6" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "bstr", - "btoi", "filetime", + "fnv", "gix-bitmap", "gix-features", "gix-fs", @@ -839,6 +833,8 @@ dependencies = [ "gix-lock", "gix-object", "gix-traverse", + "gix-utils", + "hashbrown 0.14.3", "itoa", "libc", "memmap2", @@ -860,9 +856,9 @@ dependencies = [ [[package]] name = "gix-macros" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75e7ab728059f595f6ddc1ad8771b8d6a231971ae493d9d5948ecad366ee8bb" +checksum = "1dff438f14e67e7713ab9332f5fd18c8f20eb7eb249494f6c2bf170522224032" dependencies = [ "proc-macro2", "quote", @@ -871,16 +867,16 @@ dependencies = [ [[package]] name = "gix-object" -version = "0.41.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693ce9d30741506cb082ef2d8b797415b48e032cce0ab23eff894c19a7e4777b" +checksum = "3d4f8efae72030df1c4a81d02dbe2348e748d9b9a11e108ed6efbd846326e051" dependencies = [ "bstr", - "btoi", "gix-actor", "gix-date", "gix-features", "gix-hash", + "gix-utils", "gix-validate", "itoa", "smallvec", @@ -890,9 +886,9 @@ dependencies = [ [[package]] name = "gix-odb" -version = "0.57.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ba2fa9e81f2461b78b4d81a807867667326c84cdab48e0aed7b73a593aa1be4" +checksum = "81b55378c719693380f66d9dd21ce46721eed2981d8789fc698ec1ada6fa176e" dependencies = [ "arc-swap", "gix-date", @@ -910,9 +906,9 @@ dependencies = [ [[package]] name = "gix-pack" -version = "0.47.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da5f3e78c96b76c4e6fe5e8e06b76221e4a0ee9a255aa935ed1fdf68988dfd8" +checksum = "6391aeaa030ad64aba346a9f5c69bb1c4e5c6fb4411705b03b40b49d8614ec30" dependencies = [ "clru", "gix-chunk", @@ -942,9 +938,9 @@ dependencies = [ [[package]] name = "gix-path" -version = "0.10.4" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14a6282621aed1becc3f83d64099a564b3b9063f22783d9a87ea502a3e9f2e40" +checksum = "23623cf0f475691a6d943f898c4d0b89f5c1a2a64d0f92bce0e0322ee6528783" dependencies = [ "bstr", "gix-trace", @@ -955,11 +951,11 @@ dependencies = [ [[package]] name = "gix-pathspec" -version = "0.6.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cbd49750edb26b0a691e5246fc635fa554d344da825cd20fa9ee0da9c1b761f" +checksum = "1a96ed0e71ce9084a471fddfa74e842576a7cbf02fe8bd50388017ac461aed97" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "bstr", "gix-attributes", "gix-config-value", @@ -970,20 +966,20 @@ dependencies = [ [[package]] name = "gix-quote" -version = "0.4.10" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7dc10303d73a960d10fb82f81188b036ac3e6b11b5795b20b1a60b51d1321f" +checksum = "cbff4f9b9ea3fa7a25a70ee62f545143abef624ac6aa5884344e70c8b0a1d9ff" dependencies = [ "bstr", - "btoi", + "gix-utils", "thiserror", ] [[package]] name = "gix-ref" -version = "0.41.0" +version = "0.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5818958994ad7879fa566f5441ebcc48f0926aa027b28948e6fbf6578894dc31" +checksum = "fd4aba68b925101cb45d6df328979af0681364579db889098a0de75b36c77b65" dependencies = [ "gix-actor", "gix-date", @@ -1003,9 +999,9 @@ dependencies = [ [[package]] name = "gix-refspec" -version = "0.22.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613aa4d93034c5791d13bdc635e530f4ddab1412ddfb4a8215f76213177b61c7" +checksum = "dde848865834a54fe4d9b4573f15d0e9a68eaf3d061b42d3ed52b4b8acf880b2" dependencies = [ "bstr", "gix-hash", @@ -1017,9 +1013,9 @@ dependencies = [ [[package]] name = "gix-revision" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "288f6549d7666db74dc3f169a9a333694fc28ecd2f5aa7b2c979c89eb556751a" +checksum = "9e34196e1969bd5d36e2fbc4467d893999132219d503e23474a8ad2b221cb1e8" dependencies = [ "bstr", "gix-date", @@ -1033,9 +1029,9 @@ dependencies = [ [[package]] name = "gix-revwalk" -version = "0.12.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b9b4d91dfc5c14fee61a28c65113ded720403b65a0f46169c0460f731a5d03c" +checksum = "e0a7d393ae814eeaae41a333c0ff684b243121cc61ccdc5bbe9897094588047d" dependencies = [ "gix-commitgraph", "gix-date", @@ -1048,11 +1044,11 @@ dependencies = [ [[package]] name = "gix-sec" -version = "0.10.4" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8d9bf462feaf05f2121cba7399dbc6c34d88a9cad58fc1e95027791d6a3c6d2" +checksum = "fddc27984a643b20dd03e97790555804f98cf07404e0e552c0ad8133266a79a1" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "gix-path", "libc", "windows-sys 0.52.0", @@ -1060,9 +1056,9 @@ dependencies = [ [[package]] name = "gix-submodule" -version = "0.8.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73182f6c1f5ed1ed94ba16581ac62593d5e29cd1c028b2af618f836283b8f8d4" +checksum = "4fb7ea05666362472fecd44c1fc35fe48a5b9b841b431cc4f85b95e6f20c23ec" dependencies = [ "bstr", "gix-config", @@ -1088,15 +1084,15 @@ dependencies = [ [[package]] name = "gix-trace" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b202d766a7fefc596e2cc6a89cda8ad8ad733aed82da635ac120691112a9b1" +checksum = "9b838b2db8f62c9447d483a4c28d251b67fee32741a82cb4d35e9eb4e9fdc5ab" [[package]] name = "gix-traverse" -version = "0.37.0" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc30c5b5e4e838683b59e1b0574ce6bc1c35916df9709aaab32bb7751daf08b" +checksum = "95aef84bc777025403a09788b1e4815c06a19332e9e5d87a955e1ed7da9bf0cf" dependencies = [ "gix-commitgraph", "gix-date", @@ -1110,9 +1106,9 @@ dependencies = [ [[package]] name = "gix-url" -version = "0.27.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26f1981ecc700f4fd73ae62b9ca2da7c8816c8fd267f0185e3f8c21e967984ac" +checksum = "8f0b24f3ecc79a5a53539de9c2e99425d0ef23feacdcf3faac983aa9a2f26849" dependencies = [ "bstr", "gix-features", @@ -1124,9 +1120,9 @@ dependencies = [ [[package]] name = "gix-utils" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e839f3d0798b296411263da6bee780a176ef8008a5dfc31287f7eda9266ab8" +checksum = "0066432d4c277f9877f091279a597ea5331f68ca410efc874f0bdfb1cd348f92" dependencies = [ "fastrand", "unicode-normalization", @@ -1134,9 +1130,9 @@ dependencies = [ [[package]] name = "gix-validate" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac7cc36f496bd5d96cdca0f9289bb684480725d40db60f48194aa7723b883854" +checksum = "e39fc6e06044985eac19dd34d474909e517307582e462b2eb4c8fa51b6241545" dependencies = [ "bstr", "thiserror", @@ -1144,9 +1140,9 @@ dependencies = [ [[package]] name = "gix-worktree" -version = "0.30.0" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca36bb3dc54038c66507dc75c4d8edbee2d6d5cc45227b4eb508ad13dd60a006" +checksum = "fe78e03af9eec168eb187e05463a981c57f0a915f64b1788685a776bd2ef969c" dependencies = [ "bstr", "gix-attributes", @@ -1232,7 +1228,7 @@ version = "23.10.0" dependencies = [ "ahash", "arc-swap", - "bitflags 2.4.2", + "bitflags 2.5.0", "chrono", "dunce", "encoding_rs", @@ -1344,6 +1340,7 @@ version = "23.10.0" dependencies = [ "dunce", "etcetera", + "regex-cursor", "ropey", "tempfile", "which", @@ -1397,7 +1394,7 @@ dependencies = [ name = "helix-tui" version = "23.10.0" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "cassowary", "crossterm", "helix-core", @@ -1431,7 +1428,7 @@ version = "23.10.0" dependencies = [ "anyhow", "arc-swap", - "bitflags 2.4.2", + "bitflags 2.5.0", "chardetng", "clipboard-win", "crossterm", @@ -1604,12 +1601,12 @@ checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "libloading" -version = "0.8.1" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161" +checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" dependencies = [ "cfg-if", - "windows-sys 0.48.0", + "windows-targets 0.52.0", ] [[package]] @@ -1639,15 +1636,15 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "lsp-types" -version = "0.95.0" +version = "0.95.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "158c1911354ef73e8fe42da6b10c0484cb65c7f1007f28022e847706c1ab6984" +checksum = "8e34d33a8e9b006cd3fc4fe69a921affa097bae4bb65f76271f4644f9a334365" dependencies = [ "bitflags 1.3.2", "serde", @@ -1691,9 +1688,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", @@ -1768,9 +1765,9 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "open" -version = "5.0.1" +version = "5.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90878fb664448b54c4e592455ad02831e23a3f7e157374a8b95654731aac7349" +checksum = "449f0ff855d85ddbf1edd5b646d65249ead3f5e422aaa86b7d2d0b049b103e32" dependencies = [ "is-wsl", "libc", @@ -1845,7 +1842,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dce76ce678ffc8e5675b22aa1405de0b7037e2fdf8913fea40d1926c6fe1e6e7" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "memchr", "unicase", ] @@ -1940,15 +1937,28 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b7fa1134405e2ec9353fd416b17f8dacd46c473d7d3fd1cf202706a14eb792a" +checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] +[[package]] +name = "regex-cursor" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae4327b5fde3ae6fda0152128d3d59b95a5aad7be91c405869300091720f7169" +dependencies = [ + "log", + "memchr", + "regex-automata", + "regex-syntax", + "ropey", +] + [[package]] name = "regex-syntax" version = "0.8.2" @@ -1977,7 +1987,7 @@ version = "0.38.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "errno", "libc", "linux-raw-sys", @@ -2013,18 +2023,18 @@ checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1" [[package]] name = "serde" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", @@ -2033,9 +2043,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.113" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "itoa", "ryu", @@ -2203,9 +2213,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.10.0" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if", "fastrand", @@ -2244,18 +2254,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.57" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" +checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.57" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" +checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2", "quote", @@ -2347,9 +2357,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite", @@ -2358,9 +2368,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.10" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a9aad4a3066010876e8dcf5a8a06e70a558751117a145c6ce2b82c2e2054290" +checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3" dependencies = [ "serde", "serde_spanned", @@ -2379,9 +2389,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.4" +version = "0.22.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c9ffdf896f8daaabf9b66ba8e77ea1ed5ed0f72821b398aba62352e95062951" +checksum = "c12219811e0c1ba077867254e5ad62ee2c9c190b0d957110750ac0cda1ae96cd" dependencies = [ "indexmap", "serde", @@ -2392,8 +2402,9 @@ dependencies = [ [[package]] name = "tree-sitter" -version = "0.20.10" -source = "git+https://github.com/helix-editor/tree-sitter?rev=660481dbf71413eba5a928b0b0ab8da50c1109e0#660481dbf71413eba5a928b0b0ab8da50c1109e0" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdb9c9f15eae91dcd00ee0d86a281d16e6263786991b662b34fa9632c21a046b" dependencies = [ "cc", "regex", @@ -2800,9 +2811,9 @@ checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" [[package]] name = "winnow" -version = "0.5.28" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2" +checksum = "dffa400e67ed5a4dd237983829e66475f0a4a26938c4b04c21baede6262215b8" dependencies = [ "memchr", ] diff --git a/Cargo.toml b/Cargo.toml index 64e33d731..2dda61fee 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -37,7 +37,7 @@ package.helix-tui.opt-level = 2 package.helix-term.opt-level = 2 [workspace.dependencies] -tree-sitter = { version = "0.20", git = "https://github.com/helix-editor/tree-sitter", rev = "660481dbf71413eba5a928b0b0ab8da50c1109e0" } +tree-sitter = { version = "0.22" } nucleo = "0.2.0" ignore = "0.4" globset = "0.4.14" diff --git a/book/src/configuration.md b/book/src/configuration.md index de33c1ade..c55426c01 100644 --- a/book/src/configuration.md +++ b/book/src/configuration.md @@ -68,6 +68,7 @@ Its settings will be merged with the configuration directory `config.toml` and t | `insert-final-newline` | Whether to automatically insert a trailing line-ending on write if missing | `true` | | `popup-border` | Draw border around `popup`, `menu`, `all`, or `none` | `none` | | `indent-heuristic` | How the indentation for a newly inserted line is computed: `simple` just copies the indentation level from the previous line, `tree-sitter` computes the indentation based on the syntax tree and `hybrid` combines both approaches. If the chosen heuristic is not available, a different one will be used as a fallback (the fallback order being `hybrid` -> `tree-sitter` -> `simple`). | `hybrid` +| `jump-label-alphabet` | The characters that are used to generate two character jump labels. Characters at the start of the alphabet are used first. | "abcdefghijklmnopqrstuvwxyz" ### `[editor.statusline]` Section @@ -108,6 +109,7 @@ The following statusline elements can be configured: | `mode` | The current editor mode (`mode.normal`/`mode.insert`/`mode.select`) | | `spinner` | A progress spinner indicating LSP activity | | `file-name` | The path/name of the opened file | +| `file-absolute-path` | The absolute path/name of the opened file | | `file-base-name` | The basename of the opened file | | `file-modification-indicator` | The indicator to show whether the file is modified (a `[+]` appears when there are unsaved changes) | | `file-encoding` | The encoding of the opened file if it differs from UTF-8 | @@ -375,8 +377,25 @@ wrap-indicator = "" # set wrap-indicator to "" to hide it ### `[editor.smart-tab]` Section +Options for navigating and editing using tab key. | Key | Description | Default | |------------|-------------|---------| | `enable` | If set to true, then when the cursor is in a position with non-whitespace to its left, instead of inserting a tab, it will run `move_parent_node_end`. If there is only whitespace to the left, then it inserts a tab as normal. With the default bindings, to explicitly insert a tab character, press Shift-tab. | `true` | | `supersede-menu` | Normally, when a menu is on screen, such as when auto complete is triggered, the tab key is bound to cycling through the items. This means when menus are on screen, one cannot use the tab key to trigger the `smart-tab` command. If this option is set to true, the `smart-tab` command always takes precedence, which means one cannot use the tab key to cycle through menu items. One of the other bindings must be used instead, such as arrow keys or `C-n`/`C-p`. | `false` | + + +Due to lack of support for S-tab in some terminals, the default keybindings don't fully embrace smart-tab editing experience. If you enjoy smart-tab navigation and a terminal that supports the [Enhanced Keyboard protocol](https://github.com/helix-editor/helix/wiki/Terminal-Support#enhanced-keyboard-protocol), consider setting extra keybindings: + +``` +[keys.normal] +tab = "move_parent_node_end" +S-tab = "move_parent_node_start" + +[keys.insert] +S-tab = "move_parent_node_start" + +[keys.select] +tab = "extend_parent_node_end" +S-tab = "extend_parent_node_start" +``` diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index 7aec37778..b74a8cdb7 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -1,5 +1,6 @@ | Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default LSP | | --- | --- | --- | --- | --- | +| ada | ✓ | ✓ | | `ada_language_server`, `ada_language_server` | | agda | ✓ | | | | | astro | ✓ | | | | | awk | ✓ | ✓ | | `awk-language-server` | @@ -8,6 +9,7 @@ | beancount | ✓ | | | | | bibtex | ✓ | | | `texlab` | | bicep | ✓ | | | `bicep-langserver` | +| blade | ✓ | | | | | blueprint | ✓ | | | `blueprint-compiler` | | c | ✓ | ✓ | ✓ | `clangd` | | c-sharp | ✓ | ✓ | | `OmniSharp` | @@ -30,7 +32,7 @@ | devicetree | ✓ | | | | | dhall | ✓ | ✓ | | `dhall-lsp-server` | | diff | ✓ | | | | -| docker-compose | ✓ | | ✓ | `docker-compose-langserver` | +| docker-compose | ✓ | | ✓ | `docker-compose-langserver`, `yaml-language-server` | | dockerfile | ✓ | | | `docker-langserver` | | dot | ✓ | | | `dot-language-server` | | dtd | ✓ | | | | @@ -44,6 +46,7 @@ | erb | ✓ | | | | | erlang | ✓ | ✓ | | `erlang_ls` | | esdl | ✓ | | | | +| fidl | ✓ | | | | | fish | ✓ | ✓ | ✓ | | | forth | ✓ | | | `forth-lsp` | | fortran | ✓ | | ✓ | `fortls` | @@ -57,6 +60,7 @@ | git-ignore | ✓ | | | | | git-rebase | ✓ | | | | | gleam | ✓ | ✓ | | `gleam` | +| glimmer | ✓ | | | `ember-language-server` | | glsl | ✓ | ✓ | ✓ | | | gn | ✓ | | | | | go | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` | @@ -71,11 +75,13 @@ | haskell-persistent | ✓ | | | | | hcl | ✓ | ✓ | ✓ | `terraform-ls` | | heex | ✓ | ✓ | | `elixir-ls` | +| helm | ✓ | | | `helm_ls` | | hocon | ✓ | | ✓ | | | hoon | ✓ | | | | | hosts | ✓ | | | | | html | ✓ | | | `vscode-html-language-server` | | hurl | ✓ | | ✓ | | +| hyprlang | ✓ | | ✓ | | | idris | | | | `idris2-lsp` | | iex | ✓ | | | | | ini | ✓ | | | | @@ -86,13 +92,16 @@ | jsdoc | ✓ | | | | | json | ✓ | | ✓ | `vscode-json-language-server` | | json5 | ✓ | | | | +| jsonc | ✓ | | ✓ | `vscode-json-language-server` | | jsonnet | ✓ | | | `jsonnet-language-server` | | jsx | ✓ | ✓ | ✓ | `typescript-language-server` | | julia | ✓ | ✓ | ✓ | `julia` | | just | ✓ | ✓ | ✓ | | | kdl | ✓ | ✓ | ✓ | | +| koka | ✓ | | ✓ | | | kotlin | ✓ | | | `kotlin-language-server` | | latex | ✓ | ✓ | | `texlab` | +| ld | ✓ | | ✓ | | | lean | ✓ | | | `lean` | | ledger | ✓ | | | | | llvm | ✓ | ✓ | ✓ | | @@ -103,7 +112,7 @@ | lua | ✓ | ✓ | ✓ | `lua-language-server` | | make | ✓ | | ✓ | | | markdoc | ✓ | | | `markdoc-ls` | -| markdown | ✓ | | | `marksman` | +| markdown | ✓ | | | `marksman`, `markdown-oxide` | | markdown.inline | ✓ | | | | | matlab | ✓ | ✓ | ✓ | | | mermaid | ✓ | | | | @@ -127,10 +136,13 @@ | pem | ✓ | | | | | perl | ✓ | ✓ | ✓ | `perlnavigator` | | php | ✓ | ✓ | ✓ | `intelephense` | +| php-only | ✓ | | | | +| pkgbuild | ✓ | ✓ | ✓ | `pkgbuild-language-server`, `bash-language-server` | | pkl | ✓ | | ✓ | | | po | ✓ | ✓ | | | | pod | ✓ | | | | | ponylang | ✓ | ✓ | ✓ | | +| powershell | ✓ | | | | | prisma | ✓ | | | `prisma-language-server` | | prolog | | | | `swipl` | | protobuf | ✓ | ✓ | ✓ | `bufls`, `pb` | @@ -163,6 +175,7 @@ | sshclientconfig | ✓ | | | | | starlark | ✓ | ✓ | | | | strace | ✓ | | | | +| supercollider | ✓ | | | | | svelte | ✓ | | ✓ | `svelteserver` | | sway | ✓ | ✓ | ✓ | `forc` | | swift | ✓ | | | `sourcekit-lsp` | @@ -183,7 +196,7 @@ | unison | ✓ | | ✓ | | | uxntal | ✓ | | | | | v | ✓ | ✓ | ✓ | `v-analyzer` | -| vala | ✓ | | | `vala-language-server` | +| vala | ✓ | ✓ | | `vala-language-server` | | verilog | ✓ | ✓ | | `svlangserver` | | vhdl | ✓ | | | `vhdl_ls` | | vhs | ✓ | | | | diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md index f4fcb6f62..dbb8b5f38 100644 --- a/book/src/generated/typable-cmd.md +++ b/book/src/generated/typable-cmd.md @@ -86,3 +86,4 @@ | `:clear-register` | Clear given register. If no argument is provided, clear all registers. | | `:redraw` | Clear and re-render the whole UI | | `:move` | Move the current buffer and its corresponding file to a different path | +| `:yank-diagnostic` | Yank diagnostic(s) under primary cursor to register, or clipboard by default | diff --git a/book/src/guides/textobject.md b/book/src/guides/textobject.md index 405f11c1b..b83c067e4 100644 --- a/book/src/guides/textobject.md +++ b/book/src/guides/textobject.md @@ -44,4 +44,4 @@ doesn't make sense in a navigation context. [tree-sitter-queries]: https://tree-sitter.github.io/tree-sitter/using-parsers#query-syntax [tree-sitter-captures]: https://tree-sitter.github.io/tree-sitter/using-parsers#capturing-nodes -[textobject-examples]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+filename%3Atextobjects.scm&type=Code&ref=advsearch&l=&l= +[textobject-examples]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+path%3A%2A%2A/textobjects.scm&type=Code&ref=advsearch&l=&l= diff --git a/book/src/keymap.md b/book/src/keymap.md index ac84147cd..3a5ccca53 100644 --- a/book/src/keymap.md +++ b/book/src/keymap.md @@ -12,6 +12,7 @@ - [Match mode](#match-mode) - [Window mode](#window-mode) - [Space mode](#space-mode) + - [Comment mode](#comment-mode) - [Popup](#popup) - [Unimpaired](#unimpaired) - [Insert mode](#insert-mode) @@ -48,7 +49,7 @@ Normal mode is the default mode when you launch helix. You can return to it from | `T` | Find 'till previous char | `till_prev_char` | | `F` | Find previous char | `find_prev_char` | | `G` | Go to line number `` | `goto_line` | -| `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` | +| `Alt-.` | Repeat last motion (`f`, `t`, `m`, `[` or `]`) | `repeat_last_motion` | | `Home` | Move to the start of the line | `goto_line_start` | | `End` | Move to the end of the line | `goto_line_end` | | `Ctrl-b`, `PageUp` | Move page up | `page_up` | @@ -223,6 +224,7 @@ Jumps to various locations. | `.` | Go to last modification in current file | `goto_last_modification` | | `j` | Move down textual (instead of visual) line | `move_line_down` | | `k` | Move up textual (instead of visual) line | `move_line_up` | +| `w` | Show labels at each word and select the word that belongs to the entered labels | `goto_word` | #### Match mode @@ -289,6 +291,9 @@ This layer is a kludge of mappings, mostly pickers. | `h` | Select symbol references (**LSP**) | `select_references_to_symbol_under_cursor` | | `'` | Open last fuzzy picker | `last_picker` | | `w` | Enter [window mode](#window-mode) | N/A | +| `c` | Comment/uncomment selections | `toggle_comments` | +| `C` | Block comment/uncomment selections | `toggle_block_comments` | +| `Alt-c` | Line comment/uncomment selections | `toggle_line_comments` | | `p` | Paste system clipboard after selections | `paste_clipboard_after` | | `P` | Paste system clipboard before selections | `paste_clipboard_before` | | `y` | Yank selections to clipboard | `yank_to_clipboard` | diff --git a/book/src/languages.md b/book/src/languages.md index e3900dca9..dd93fec53 100644 --- a/book/src/languages.md +++ b/book/src/languages.md @@ -42,7 +42,7 @@ name = "mylang" scope = "source.mylang" injection-regex = "mylang" file-types = ["mylang", "myl"] -comment-token = "#" +comment-tokens = "#" indent = { tab-width = 2, unit = " " } formatter = { command = "mylang-formatter" , args = ["--stdin"] } language-servers = [ "mylang-lsp" ] @@ -61,7 +61,8 @@ These configuration keys are available: | `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` | | `auto-format` | Whether to autoformat this language when saving | | `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) | -| `comment-token` | The token to use as a comment-token | +| `comment-tokens` | The tokens to use as a comment token, either a single token `"//"` or an array `["//", "///", "//!"]` (the first token will be used for commenting). Also configurable as `comment-token` for backwards compatibility| +| `block-comment-tokens`| The start and end tokens for a multiline comment either an array or single table of `{ start = "/*", end = "*/"}`. The first set of tokens will be used for commenting, any pairs in the array can be uncommented | | `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) | | `language-servers` | The Language Servers used for this language. See below for more information in the section [Configuring Language Servers for a language](#configuring-language-servers-for-a-language) | | `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) | diff --git a/book/src/themes.md b/book/src/themes.md index f040dfb19..29a8c4ba8 100644 --- a/book/src/themes.md +++ b/book/src/themes.md @@ -314,6 +314,7 @@ These scopes are used for theming the editor interface: | `ui.virtual.inlay-hint.parameter` | Style for inlay hints of kind `parameter` (LSPs are not required to set a kind) | | `ui.virtual.inlay-hint.type` | Style for inlay hints of kind `type` (LSPs are not required to set a kind) | | `ui.virtual.wrap` | Soft-wrap indicator (see the [`editor.soft-wrap` config][editor-section]) | +| `ui.virtual.jump-label` | Style for virtual jump labels | | `ui.menu` | Code and command completion menus | | `ui.menu.selected` | Selected autocomplete item | | `ui.menu.scroll` | `fg` sets thumb color, `bg` sets track color of scrollbar | @@ -333,5 +334,7 @@ These scopes are used for theming the editor interface: | `diagnostic.info` | Diagnostics info (editing area) | | `diagnostic.warning` | Diagnostics warning (editing area) | | `diagnostic.error` | Diagnostics error (editing area) | +| `diagnostic.unnecessary` | Diagnostics with unnecessary tag (editing area) | +| `diagnostic.deprecated` | Diagnostics with deprecated tag (editing area) | [editor-section]: ./configuration.md#editor-section diff --git a/flake.lock b/flake.lock index 9bb5dece1..48fb4a59f 100644 --- a/flake.lock +++ b/flake.lock @@ -7,11 +7,11 @@ ] }, "locked": { - "lastModified": 1701025348, - "narHash": "sha256-42GHmYH+GF7VjwGSt+fVT1CQuNpGanJbNgVHTAZppUM=", + "lastModified": 1709610799, + "narHash": "sha256-5jfLQx0U9hXbi2skYMGodDJkIgffrjIOgMRjZqms2QE=", "owner": "ipetkov", "repo": "crane", - "rev": "42afaeb1a0325194a7cdb526332d2cb92fddd07b", + "rev": "81c393c776d5379c030607866afef6406ca1be57", "type": "github" }, "original": { @@ -25,11 +25,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1694529238, - "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", + "lastModified": 1709126324, + "narHash": "sha256-q6EQdSeUZOG26WelxqkmR7kArjgWCdw5sfJVHPH/7j8=", "owner": "numtide", "repo": "flake-utils", - "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", + "rev": "d465f4819400de7c8d874d50b982301f28a84605", "type": "github" }, "original": { @@ -40,11 +40,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1700794826, - "narHash": "sha256-RyJTnTNKhO0yqRpDISk03I/4A67/dp96YRxc86YOPgU=", + "lastModified": 1709479366, + "narHash": "sha256-n6F0n8UV6lnTZbYPl1A9q1BS0p4hduAv1mGAP17CVd0=", "owner": "nixos", "repo": "nixpkgs", - "rev": "5a09cb4b393d58f9ed0d9ca1555016a8543c2ac8", + "rev": "b8697e57f10292a6165a20f03d2f42920dfaf973", "type": "github" }, "original": { @@ -72,11 +72,11 @@ ] }, "locked": { - "lastModified": 1701137803, - "narHash": "sha256-0LcPAdql5IhQSUXJx3Zna0dYTgdIoYO7zUrsKgiBd04=", + "lastModified": 1709604635, + "narHash": "sha256-le4fwmWmjGRYWwkho0Gr7mnnZndOOe4XGbLw68OvF40=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "9dd940c967502f844eacea52a61e9596268d4f70", + "rev": "e86c0fb5d3a22a5f30d7f64ecad88643fe26449d", "type": "github" }, "original": { diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 6e29e8c48..d03d93653 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -31,8 +31,8 @@ tree-sitter.workspace = true once_cell = "1.19" arc-swap = "1" regex = "1" -bitflags = "2.4" -ahash = "0.8.6" +bitflags = "2.5" +ahash = "0.8.11" hashbrown = { version = "0.14.3", features = ["raw"] } dunce = "1.0" diff --git a/helix-core/src/comment.rs b/helix-core/src/comment.rs index 9c7e50f33..536b710ab 100644 --- a/helix-core/src/comment.rs +++ b/helix-core/src/comment.rs @@ -1,9 +1,12 @@ //! This module contains the functionality toggle comments on lines over the selection //! using the comment character defined in the user's `languages.toml` +use smallvec::SmallVec; + use crate::{ - find_first_non_whitespace_char, Change, Rope, RopeSlice, Selection, Tendril, Transaction, + syntax::BlockCommentToken, Change, Range, Rope, RopeSlice, Selection, Tendril, Transaction, }; +use helix_stdx::rope::RopeSliceExt; use std::borrow::Cow; /// Given text, a comment token, and a set of line indices, returns the following: @@ -22,12 +25,12 @@ fn find_line_comment( ) -> (bool, Vec, usize, usize) { let mut commented = true; let mut to_change = Vec::new(); - let mut min = usize::MAX; // minimum col for find_first_non_whitespace_char + let mut min = usize::MAX; // minimum col for first_non_whitespace_char let mut margin = 1; let token_len = token.chars().count(); for line in lines { let line_slice = text.line(line); - if let Some(pos) = find_first_non_whitespace_char(line_slice) { + if let Some(pos) = line_slice.first_non_whitespace_char() { let len = line_slice.len_chars(); if pos < min { @@ -94,6 +97,222 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st Transaction::change(doc, changes.into_iter()) } +#[derive(Debug, PartialEq, Eq)] +pub enum CommentChange { + Commented { + range: Range, + start_pos: usize, + end_pos: usize, + start_margin: bool, + end_margin: bool, + start_token: String, + end_token: String, + }, + Uncommented { + range: Range, + start_pos: usize, + end_pos: usize, + start_token: String, + end_token: String, + }, + Whitespace { + range: Range, + }, +} + +pub fn find_block_comments( + tokens: &[BlockCommentToken], + text: RopeSlice, + selection: &Selection, +) -> (bool, Vec) { + let mut commented = true; + let mut only_whitespace = true; + let mut comment_changes = Vec::with_capacity(selection.len()); + let default_tokens = tokens.first().cloned().unwrap_or_default(); + // TODO: check if this can be removed on MSRV bump + #[allow(clippy::redundant_clone)] + let mut start_token = default_tokens.start.clone(); + #[allow(clippy::redundant_clone)] + let mut end_token = default_tokens.end.clone(); + + let mut tokens = tokens.to_vec(); + // sort the tokens by length, so longer tokens will match first + tokens.sort_by(|a, b| { + if a.start.len() == b.start.len() { + b.end.len().cmp(&a.end.len()) + } else { + b.start.len().cmp(&a.start.len()) + } + }); + for range in selection { + let selection_slice = range.slice(text); + if let (Some(start_pos), Some(end_pos)) = ( + selection_slice.first_non_whitespace_char(), + selection_slice.last_non_whitespace_char(), + ) { + let mut line_commented = false; + let mut after_start = 0; + let mut before_end = 0; + let len = (end_pos + 1) - start_pos; + + for BlockCommentToken { start, end } in &tokens { + let start_len = start.chars().count(); + let end_len = end.chars().count(); + after_start = start_pos + start_len; + before_end = end_pos.saturating_sub(end_len); + + if len >= start_len + end_len { + let start_fragment = selection_slice.slice(start_pos..after_start); + let end_fragment = selection_slice.slice(before_end + 1..end_pos + 1); + + // block commented with these tokens + if start_fragment == start.as_str() && end_fragment == end.as_str() { + start_token = start.to_string(); + end_token = end.to_string(); + line_commented = true; + break; + } + } + } + + if !line_commented { + comment_changes.push(CommentChange::Uncommented { + range: *range, + start_pos, + end_pos, + start_token: default_tokens.start.clone(), + end_token: default_tokens.end.clone(), + }); + commented = false; + } else { + comment_changes.push(CommentChange::Commented { + range: *range, + start_pos, + end_pos, + start_margin: selection_slice + .get_char(after_start) + .map_or(false, |c| c == ' '), + end_margin: after_start != before_end + && selection_slice + .get_char(before_end) + .map_or(false, |c| c == ' '), + start_token: start_token.to_string(), + end_token: end_token.to_string(), + }); + } + only_whitespace = false; + } else { + comment_changes.push(CommentChange::Whitespace { range: *range }); + } + } + if only_whitespace { + commented = false; + } + (commented, comment_changes) +} + +#[must_use] +pub fn create_block_comment_transaction( + doc: &Rope, + selection: &Selection, + commented: bool, + comment_changes: Vec, +) -> (Transaction, SmallVec<[Range; 1]>) { + let mut changes: Vec = Vec::with_capacity(selection.len() * 2); + let mut ranges: SmallVec<[Range; 1]> = SmallVec::with_capacity(selection.len()); + let mut offs = 0; + for change in comment_changes { + if commented { + if let CommentChange::Commented { + range, + start_pos, + end_pos, + start_token, + end_token, + start_margin, + end_margin, + } = change + { + let from = range.from(); + changes.push(( + from + start_pos, + from + start_pos + start_token.len() + start_margin as usize, + None, + )); + changes.push(( + from + end_pos - end_token.len() - end_margin as usize + 1, + from + end_pos + 1, + None, + )); + } + } else { + // uncommented so manually map ranges through changes + match change { + CommentChange::Uncommented { + range, + start_pos, + end_pos, + start_token, + end_token, + } => { + let from = range.from(); + changes.push(( + from + start_pos, + from + start_pos, + Some(Tendril::from(format!("{} ", start_token))), + )); + changes.push(( + from + end_pos + 1, + from + end_pos + 1, + Some(Tendril::from(format!(" {}", end_token))), + )); + + let offset = start_token.chars().count() + end_token.chars().count() + 2; + ranges.push( + Range::new(from + offs, from + offs + end_pos + 1 + offset) + .with_direction(range.direction()), + ); + offs += offset; + } + CommentChange::Commented { range, .. } | CommentChange::Whitespace { range } => { + ranges.push(Range::new(range.from() + offs, range.to() + offs)); + } + } + } + } + (Transaction::change(doc, changes.into_iter()), ranges) +} + +#[must_use] +pub fn toggle_block_comments( + doc: &Rope, + selection: &Selection, + tokens: &[BlockCommentToken], +) -> Transaction { + let text = doc.slice(..); + let (commented, comment_changes) = find_block_comments(tokens, text, selection); + let (mut transaction, ranges) = + create_block_comment_transaction(doc, selection, commented, comment_changes); + if !commented { + transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index())); + } + transaction +} + +pub fn split_lines_of_selection(text: RopeSlice, selection: &Selection) -> Selection { + let mut ranges = SmallVec::new(); + for range in selection.ranges() { + let (line_start, line_end) = range.line_range(text.slice(..)); + let mut pos = text.line_to_char(line_start); + for line in text.slice(pos..text.line_to_char(line_end + 1)).lines() { + let start = pos; + pos += line.len_chars(); + ranges.push(Range::new(start, pos)); + } + } + Selection::new(ranges, 0) +} + #[cfg(test)] mod test { use super::*; @@ -149,4 +368,49 @@ mod test { // TODO: account for uncommenting with uneven comment indentation } + + #[test] + fn test_find_block_comments() { + // three lines 5 characters. + let mut doc = Rope::from("1\n2\n3"); + // select whole document + let selection = Selection::single(0, doc.len_chars()); + + let text = doc.slice(..); + + let res = find_block_comments(&[BlockCommentToken::default()], text, &selection); + + assert_eq!( + res, + ( + false, + vec![CommentChange::Uncommented { + range: Range::new(0, 5), + start_pos: 0, + end_pos: 4, + start_token: "/*".to_string(), + end_token: "*/".to_string(), + }] + ) + ); + + // comment + let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); + transaction.apply(&mut doc); + + assert_eq!(doc, "/* 1\n2\n3 */"); + + // uncomment + let selection = Selection::single(0, doc.len_chars()); + let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); + transaction.apply(&mut doc); + assert_eq!(doc, "1\n2\n3"); + + // don't panic when there is just a space in comment + doc = Rope::from("/* */"); + let selection = Selection::single(0, doc.len_chars()); + let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]); + transaction.apply(&mut doc); + assert_eq!(doc, ""); + } } diff --git a/helix-core/src/doc_formatter.rs b/helix-core/src/doc_formatter.rs index c7dc9081f..cbe2da3b6 100644 --- a/helix-core/src/doc_formatter.rs +++ b/helix-core/src/doc_formatter.rs @@ -116,7 +116,7 @@ impl Default for TextFormat { #[derive(Debug)] pub struct DocumentFormatter<'t> { text_fmt: &'t TextFormat, - annotations: &'t TextAnnotations, + annotations: &'t TextAnnotations<'t>, /// The visual position at the end of the last yielded word boundary visual_pos: Position, diff --git a/helix-core/src/doc_formatter/test.rs b/helix-core/src/doc_formatter/test.rs index ac8918bb7..d2b6ddc74 100644 --- a/helix-core/src/doc_formatter/test.rs +++ b/helix-core/src/doc_formatter/test.rs @@ -1,5 +1,3 @@ -use std::rc::Rc; - use crate::doc_formatter::{DocumentFormatter, TextFormat}; use crate::text_annotations::{InlineAnnotation, Overlay, TextAnnotations}; @@ -105,7 +103,7 @@ fn overlay_text(text: &str, char_pos: usize, softwrap: bool, overlays: &[Overlay DocumentFormatter::new_at_prev_checkpoint( text.into(), &TextFormat::new_test(softwrap), - TextAnnotations::default().add_overlay(overlays.into(), None), + TextAnnotations::default().add_overlay(overlays, None), char_pos, ) .0 @@ -142,7 +140,7 @@ fn annotate_text(text: &str, softwrap: bool, annotations: &[InlineAnnotation]) - DocumentFormatter::new_at_prev_checkpoint( text.into(), &TextFormat::new_test(softwrap), - TextAnnotations::default().add_inline_annotations(annotations.into(), None), + TextAnnotations::default().add_inline_annotations(annotations, None), 0, ) .0 @@ -164,15 +162,24 @@ fn annotation() { "foo foo foo foo \n.foo foo foo foo \n.foo foo foo " ); } + #[test] fn annotation_and_overlay() { + let annotations = [InlineAnnotation { + char_idx: 0, + text: "fooo".into(), + }]; + let overlay = [Overlay { + char_idx: 0, + grapheme: "\t".into(), + }]; assert_eq!( DocumentFormatter::new_at_prev_checkpoint( "bbar".into(), &TextFormat::new_test(false), TextAnnotations::default() - .add_inline_annotations(Rc::new([InlineAnnotation::new(0, "fooo")]), None) - .add_overlay(Rc::new([Overlay::new(0, "\t")]), None), + .add_inline_annotations(annotations.as_slice(), None) + .add_overlay(overlay.as_slice(), None), 0, ) .0 diff --git a/helix-core/src/graphemes.rs b/helix-core/src/graphemes.rs index d9e5e0224..7cb5cd062 100644 --- a/helix-core/src/graphemes.rs +++ b/helix-core/src/graphemes.rs @@ -425,6 +425,85 @@ impl<'a> Iterator for RopeGraphemes<'a> { } } +/// An iterator over the graphemes of a `RopeSlice` in reverse. +#[derive(Clone)] +pub struct RevRopeGraphemes<'a> { + text: RopeSlice<'a>, + chunks: Chunks<'a>, + cur_chunk: &'a str, + cur_chunk_start: usize, + cursor: GraphemeCursor, +} + +impl<'a> fmt::Debug for RevRopeGraphemes<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RevRopeGraphemes") + .field("text", &self.text) + .field("chunks", &self.chunks) + .field("cur_chunk", &self.cur_chunk) + .field("cur_chunk_start", &self.cur_chunk_start) + // .field("cursor", &self.cursor) + .finish() + } +} + +impl<'a> RevRopeGraphemes<'a> { + #[must_use] + pub fn new(slice: RopeSlice) -> RevRopeGraphemes { + let (mut chunks, mut cur_chunk_start, _, _) = slice.chunks_at_byte(slice.len_bytes()); + chunks.reverse(); + let first_chunk = chunks.next().unwrap_or(""); + cur_chunk_start -= first_chunk.len(); + RevRopeGraphemes { + text: slice, + chunks, + cur_chunk: first_chunk, + cur_chunk_start, + cursor: GraphemeCursor::new(slice.len_bytes(), slice.len_bytes(), true), + } + } +} + +impl<'a> Iterator for RevRopeGraphemes<'a> { + type Item = RopeSlice<'a>; + + fn next(&mut self) -> Option> { + let a = self.cursor.cur_cursor(); + let b; + loop { + match self + .cursor + .prev_boundary(self.cur_chunk, self.cur_chunk_start) + { + Ok(None) => { + return None; + } + Ok(Some(n)) => { + b = n; + break; + } + Err(GraphemeIncomplete::PrevChunk) => { + self.cur_chunk = self.chunks.next().unwrap_or(""); + self.cur_chunk_start -= self.cur_chunk.len(); + } + Err(GraphemeIncomplete::PreContext(idx)) => { + let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1)); + self.cursor.provide_context(chunk, byte_idx); + } + _ => unreachable!(), + } + } + + if a >= self.cur_chunk_start + self.cur_chunk.len() { + Some(self.text.byte_slice(b..a)) + } else { + let a2 = a - self.cur_chunk_start; + let b2 = b - self.cur_chunk_start; + Some((&self.cur_chunk[b2..a2]).into()) + } + } +} + /// A highly compressed Cow<'a, str> that holds /// atmost u31::MAX bytes and is readonly pub struct GraphemeStr<'a> { diff --git a/helix-core/src/increment/date_time.rs b/helix-core/src/increment/date_time.rs index 2980bb58b..04cff6b47 100644 --- a/helix-core/src/increment/date_time.rs +++ b/helix-core/src/increment/date_time.rs @@ -27,7 +27,7 @@ pub fn increment(selected_text: &str, amount: i64) -> Option { let date_time = NaiveDateTime::parse_from_str(date_time, format.fmt).ok()?; Some( date_time - .checked_add_signed(Duration::minutes(amount))? + .checked_add_signed(Duration::try_minutes(amount)?)? .format(format.fmt) .to_string(), ) @@ -35,14 +35,15 @@ pub fn increment(selected_text: &str, amount: i64) -> Option { (true, false) => { let date = NaiveDate::parse_from_str(date_time, format.fmt).ok()?; Some( - date.checked_add_signed(Duration::days(amount))? + date.checked_add_signed(Duration::try_days(amount)?)? .format(format.fmt) .to_string(), ) } (false, true) => { let time = NaiveTime::parse_from_str(date_time, format.fmt).ok()?; - let (adjusted_time, _) = time.overflowing_add_signed(Duration::minutes(amount)); + let (adjusted_time, _) = + time.overflowing_add_signed(Duration::try_minutes(amount)?); Some(adjusted_time.format(format.fmt).to_string()) } (false, false) => None, diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs index c29bb3a0b..2a0a3876c 100644 --- a/helix-core/src/indent.rs +++ b/helix-core/src/indent.rs @@ -1,10 +1,10 @@ use std::{borrow::Cow, collections::HashMap}; +use helix_stdx::rope::RopeSliceExt; use tree_sitter::{Query, QueryCursor, QueryPredicateArg}; use crate::{ chars::{char_is_line_ending, char_is_whitespace}, - find_first_non_whitespace_char, graphemes::{grapheme_width, tab_width_at}, syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax}, tree_sitter::Node, @@ -970,7 +970,7 @@ pub fn indent_for_newline( let mut num_attempts = 0; for line_idx in (0..=line_before).rev() { let line = text.line(line_idx); - let first_non_whitespace_char = match find_first_non_whitespace_char(line) { + let first_non_whitespace_char = match line.first_non_whitespace_char() { Some(i) => i, None => { continue; diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs index 94802eba9..1abd90d10 100644 --- a/helix-core/src/lib.rs +++ b/helix-core/src/lib.rs @@ -37,9 +37,6 @@ pub mod unicode { pub use helix_loader::find_workspace; -pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option { - line.chars().position(|ch| !ch.is_whitespace()) -} mod rope_reader; pub use rope_reader::RopeReader; diff --git a/helix-core/src/object.rs b/helix-core/src/object.rs index d2d4fe70a..0df105f1a 100644 --- a/helix-core/src/object.rs +++ b/helix-core/src/object.rs @@ -1,42 +1,52 @@ -use crate::{Range, RopeSlice, Selection, Syntax}; -use tree_sitter::Node; +use crate::{syntax::TreeCursor, Range, RopeSlice, Selection, Syntax}; pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { - select_node_impl(syntax, text, selection, |mut node, from, to| { - while node.start_byte() == from && node.end_byte() == to { - node = node.parent()?; + let cursor = &mut syntax.walk(); + + selection.transform(|range| { + let from = text.char_to_byte(range.from()); + let to = text.char_to_byte(range.to()); + + let byte_range = from..to; + cursor.reset_to_byte_range(from, to); + + while cursor.node().byte_range() == byte_range { + if !cursor.goto_parent() { + break; + } } - Some(node) + + let node = cursor.node(); + let from = text.byte_to_char(node.start_byte()); + let to = text.byte_to_char(node.end_byte()); + + Range::new(to, from).with_direction(range.direction()) }) } pub fn shrink_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { - select_node_impl(syntax, text, selection, |descendant, _from, _to| { - descendant.child(0).or(Some(descendant)) + select_node_impl(syntax, text, selection, |cursor| { + cursor.goto_first_child(); }) } -pub fn select_sibling( - syntax: &Syntax, - text: RopeSlice, - selection: Selection, - sibling_fn: &F, -) -> Selection -where - F: Fn(Node) -> Option, -{ - select_node_impl(syntax, text, selection, |descendant, _from, _to| { - find_sibling_recursive(descendant, sibling_fn) +pub fn select_next_sibling(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { + select_node_impl(syntax, text, selection, |cursor| { + while !cursor.goto_next_sibling() { + if !cursor.goto_parent() { + break; + } + } }) } -fn find_sibling_recursive(node: Node, sibling_fn: F) -> Option -where - F: Fn(Node) -> Option, -{ - sibling_fn(node).or_else(|| { - node.parent() - .and_then(|node| find_sibling_recursive(node, sibling_fn)) +pub fn select_prev_sibling(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection { + select_node_impl(syntax, text, selection, |cursor| { + while !cursor.goto_prev_sibling() { + if !cursor.goto_parent() { + break; + } + } }) } @@ -44,33 +54,25 @@ fn select_node_impl( syntax: &Syntax, text: RopeSlice, selection: Selection, - select_fn: F, + motion: F, ) -> Selection where - F: Fn(Node, usize, usize) -> Option, + F: Fn(&mut TreeCursor), { - let tree = syntax.tree(); + let cursor = &mut syntax.walk(); selection.transform(|range| { let from = text.char_to_byte(range.from()); let to = text.char_to_byte(range.to()); - let node = match tree - .root_node() - .descendant_for_byte_range(from, to) - .and_then(|node| select_fn(node, from, to)) - { - Some(node) => node, - None => return range, - }; + cursor.reset_to_byte_range(from, to); + motion(cursor); + + let node = cursor.node(); let from = text.byte_to_char(node.start_byte()); let to = text.byte_to_char(node.end_byte()); - if range.head < range.anchor { - Range::new(to, from) - } else { - Range::new(from, to) - } + Range::new(from, to).with_direction(range.direction()) }) } diff --git a/helix-core/src/selection.rs b/helix-core/src/selection.rs index c44685eea..579499de5 100644 --- a/helix-core/src/selection.rs +++ b/helix-core/src/selection.rs @@ -7,9 +7,11 @@ use crate::{ ensure_grapheme_boundary_next, ensure_grapheme_boundary_prev, next_grapheme_boundary, prev_grapheme_boundary, }, + line_ending::get_line_ending, movement::Direction, Assoc, ChangeSet, RopeGraphemes, RopeSlice, }; +use helix_stdx::rope::{self, RopeSliceExt}; use smallvec::{smallvec, SmallVec}; use std::borrow::Cow; @@ -703,17 +705,26 @@ impl IntoIterator for Selection { } } +impl From for Selection { + fn from(range: Range) -> Self { + Self { + ranges: smallvec![range], + primary_index: 0, + } + } +} + // TODO: checkSelection -> check if valid for doc length && sorted pub fn keep_or_remove_matches( text: RopeSlice, selection: &Selection, - regex: &crate::regex::Regex, + regex: &rope::Regex, remove: bool, ) -> Option { let result: SmallVec<_> = selection .iter() - .filter(|range| regex.is_match(&range.fragment(text)) ^ remove) + .filter(|range| regex.is_match(text.regex_input_at(range.from()..range.to())) ^ remove) .copied() .collect(); @@ -724,25 +735,20 @@ pub fn keep_or_remove_matches( None } +// TODO: support to split on capture #N instead of whole match pub fn select_on_matches( text: RopeSlice, selection: &Selection, - regex: &crate::regex::Regex, + regex: &rope::Regex, ) -> Option { let mut result = SmallVec::with_capacity(selection.len()); for sel in selection { - // TODO: can't avoid occasional allocations since Regex can't operate on chunks yet - let fragment = sel.fragment(text); - - let sel_start = sel.from(); - let start_byte = text.char_to_byte(sel_start); - - for mat in regex.find_iter(&fragment) { + for mat in regex.find_iter(text.regex_input_at(sel.from()..sel.to())) { // TODO: retain range direction - let start = text.byte_to_char(start_byte + mat.start()); - let end = text.byte_to_char(start_byte + mat.end()); + let start = text.byte_to_char(mat.start()); + let end = text.byte_to_char(mat.end()); let range = Range::new(start, end); // Make sure the match is not right outside of the selection. @@ -761,12 +767,7 @@ pub fn select_on_matches( None } -// TODO: support to split on capture #N instead of whole match -pub fn split_on_matches( - text: RopeSlice, - selection: &Selection, - regex: &crate::regex::Regex, -) -> Selection { +pub fn split_on_newline(text: RopeSlice, selection: &Selection) -> Selection { let mut result = SmallVec::with_capacity(selection.len()); for sel in selection { @@ -776,21 +777,47 @@ pub fn split_on_matches( continue; } - // TODO: can't avoid occasional allocations since Regex can't operate on chunks yet - let fragment = sel.fragment(text); - let sel_start = sel.from(); let sel_end = sel.to(); - let start_byte = text.char_to_byte(sel_start); + let mut start = sel_start; + for line in sel.slice(text).lines() { + let Some(line_ending) = get_line_ending(&line) else { break }; + let line_end = start + line.len_chars(); + // TODO: retain range direction + result.push(Range::new(start, line_end - line_ending.len_chars())); + start = line_end; + } + + if start < sel_end { + result.push(Range::new(start, sel_end)); + } + } + + // TODO: figure out a new primary index + Selection::new(result, 0) +} + +pub fn split_on_matches(text: RopeSlice, selection: &Selection, regex: &rope::Regex) -> Selection { + let mut result = SmallVec::with_capacity(selection.len()); + + for sel in selection { + // Special case: zero-width selection. + if sel.from() == sel.to() { + result.push(*sel); + continue; + } + + let sel_start = sel.from(); + let sel_end = sel.to(); let mut start = sel_start; - for mat in regex.find_iter(&fragment) { + for mat in regex.find_iter(text.regex_input_at(sel_start..sel_end)) { // TODO: retain range direction - let end = text.byte_to_char(start_byte + mat.start()); + let end = text.byte_to_char(mat.start()); result.push(Range::new(start, end)); - start = text.byte_to_char(start_byte + mat.end()); + start = text.byte_to_char(mat.end()); } if start < sel_end { @@ -1021,14 +1048,12 @@ mod test { #[test] fn test_select_on_matches() { - use crate::regex::{Regex, RegexBuilder}; - let r = Rope::from_str("Nobody expects the Spanish inquisition"); let s = r.slice(..); let selection = Selection::single(0, r.len_chars()); assert_eq!( - select_on_matches(s, &selection, &Regex::new(r"[A-Z][a-z]*").unwrap()), + select_on_matches(s, &selection, &rope::Regex::new(r"[A-Z][a-z]*").unwrap()), Some(Selection::new( smallvec![Range::new(0, 6), Range::new(19, 26)], 0 @@ -1038,8 +1063,14 @@ mod test { let r = Rope::from_str("This\nString\n\ncontains multiple\nlines"); let s = r.slice(..); - let start_of_line = RegexBuilder::new(r"^").multi_line(true).build().unwrap(); - let end_of_line = RegexBuilder::new(r"$").multi_line(true).build().unwrap(); + let start_of_line = rope::RegexBuilder::new() + .syntax(rope::Config::new().multi_line(true)) + .build(r"^") + .unwrap(); + let end_of_line = rope::RegexBuilder::new() + .syntax(rope::Config::new().multi_line(true)) + .build(r"$") + .unwrap(); // line without ending assert_eq!( @@ -1077,9 +1108,9 @@ mod test { select_on_matches( s, &Selection::single(0, s.len_chars()), - &RegexBuilder::new(r"^[a-z ]*$") - .multi_line(true) - .build() + &rope::RegexBuilder::new() + .syntax(rope::Config::new().multi_line(true)) + .build(r"^[a-z ]*$") .unwrap() ), Some(Selection::new( @@ -1171,13 +1202,15 @@ mod test { #[test] fn test_split_on_matches() { - use crate::regex::Regex; - let text = Rope::from(" abcd efg wrs xyz 123 456"); let selection = Selection::new(smallvec![Range::new(0, 9), Range::new(11, 20),], 0); - let result = split_on_matches(text.slice(..), &selection, &Regex::new(r"\s+").unwrap()); + let result = split_on_matches( + text.slice(..), + &selection, + &rope::Regex::new(r"\s+").unwrap(), + ); assert_eq!( result.ranges(), diff --git a/helix-core/src/surround.rs b/helix-core/src/surround.rs index b96cce5a0..ed9764883 100644 --- a/helix-core/src/surround.rs +++ b/helix-core/src/surround.rs @@ -167,6 +167,10 @@ fn find_nth_open_pair( mut pos: usize, n: usize, ) -> Option { + if pos >= text.len_chars() { + return None; + } + let mut chars = text.chars_at(pos + 1); // Adjusts pos for the first iteration, and handles the case of the @@ -260,7 +264,8 @@ pub fn get_surround_pos( if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) { return Err(Error::CursorOverlap); } - change_pos.extend_from_slice(&[open_pos, close_pos]); + // ensure the positions are always paired in the forward direction + change_pos.extend_from_slice(&[open_pos.min(close_pos), close_pos.max(open_pos)]); } Ok(change_pos) } @@ -382,6 +387,21 @@ mod test { ) } + #[test] + fn test_find_nth_closest_pairs_pos_index_range_panic() { + #[rustfmt::skip] + let (doc, selection, _) = + rope_with_selections_and_expectations( + "(a)c)", + "^^^^^" + ); + + assert_eq!( + find_nth_closest_pairs_pos(doc.slice(..), selection.primary(), 1), + Err(Error::PairNotFound) + ) + } + // Create a Rope and a matching Selection using a specification language. // ^ is a single-point selection. // _ is an expected index. These are returned as a Vec for use in assertions. diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index a9344448f..78abc0b0a 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -1,3 +1,5 @@ +mod tree_cursor; + use crate::{ auto_pairs::AutoPairs, chars::char_is_line_ending, @@ -12,6 +14,7 @@ use arc_swap::{ArcSwap, Guard}; use bitflags::bitflags; use globset::GlobSet; use hashbrown::raw::RawTable; +use helix_stdx::rope::{self, RopeSliceExt}; use slotmap::{DefaultKey as LayerId, HopSlotMap}; use std::{ @@ -20,7 +23,7 @@ use std::{ collections::{HashMap, HashSet, VecDeque}, fmt::{self, Display}, hash::{Hash, Hasher}, - mem::{replace, transmute}, + mem::replace, path::{Path, PathBuf}, str::FromStr, sync::Arc, @@ -31,6 +34,8 @@ use serde::{ser::SerializeSeq, Deserialize, Serialize}; use helix_loader::grammar::{get_language, load_runtime_file}; +pub use tree_cursor::TreeCursor; + fn deserialize_regex<'de, D>(deserializer: D) -> Result, D::Error> where D: serde::Deserializer<'de>, @@ -98,7 +103,19 @@ pub struct LanguageConfiguration { pub shebangs: Vec, // interpreter(s) associated with language #[serde(default)] pub roots: Vec, // these indicate project roots <.git, Cargo.toml> - pub comment_token: Option, + #[serde( + default, + skip_serializing, + deserialize_with = "from_comment_tokens", + alias = "comment-token" + )] + pub comment_tokens: Option>, + #[serde( + default, + skip_serializing, + deserialize_with = "from_block_comment_tokens" + )] + pub block_comment_tokens: Option>, pub text_width: Option, pub soft_wrap: Option, @@ -239,6 +256,59 @@ impl<'de> Deserialize<'de> for FileType { } } +fn from_comment_tokens<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: serde::Deserializer<'de>, +{ + #[derive(Deserialize)] + #[serde(untagged)] + enum CommentTokens { + Multiple(Vec), + Single(String), + } + Ok( + Option::::deserialize(deserializer)?.map(|tokens| match tokens { + CommentTokens::Single(val) => vec![val], + CommentTokens::Multiple(vals) => vals, + }), + ) +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BlockCommentToken { + pub start: String, + pub end: String, +} + +impl Default for BlockCommentToken { + fn default() -> Self { + BlockCommentToken { + start: "/*".to_string(), + end: "*/".to_string(), + } + } +} + +fn from_block_comment_tokens<'de, D>( + deserializer: D, +) -> Result>, D::Error> +where + D: serde::Deserializer<'de>, +{ + #[derive(Deserialize)] + #[serde(untagged)] + enum BlockCommentTokens { + Multiple(Vec), + Single(BlockCommentToken), + } + Ok( + Option::::deserialize(deserializer)?.map(|tokens| match tokens { + BlockCommentTokens::Single(val) => vec![val], + BlockCommentTokens::Multiple(vals) => vals, + }), + ) +} + #[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] #[serde(rename_all = "kebab-case")] pub enum LanguageServerFeature { @@ -739,7 +809,7 @@ impl LanguageConfiguration { if query_text.is_empty() { return None; } - let lang = self.highlight_config.get()?.as_ref()?.language; + let lang = &self.highlight_config.get()?.as_ref()?.language; Query::new(lang, &query_text) .map_err(|e| { log::error!( @@ -1024,6 +1094,7 @@ impl Syntax { start_point: Point::new(0, 0), end_point: Point::new(usize::MAX, usize::MAX), }], + parent: None, }; // track scope_descriptor: a Vec of scopes for item in tree @@ -1294,6 +1365,7 @@ impl Syntax { depth, ranges, flags: LayerUpdateFlags::empty(), + parent: Some(layer_id), }; // Find an identical existing layer @@ -1427,6 +1499,12 @@ impl Syntax { .descendant_for_byte_range(start, end) } + pub fn walk(&self) -> TreeCursor<'_> { + // data structure to find the smallest range that contains a point + // when some of the ranges in the structure can overlap. + TreeCursor::new(&self.layers, self.root) + } + // Commenting // comment_strings_for_pos // is_commented @@ -1459,6 +1537,7 @@ pub struct LanguageLayer { pub ranges: Vec, pub depth: u32, flags: LayerUpdateFlags, + parent: Option, } /// This PartialEq implementation only checks if that @@ -1478,13 +1557,7 @@ impl PartialEq for LanguageLayer { impl Hash for LanguageLayer { fn hash(&self, state: &mut H) { self.depth.hash(state); - // The transmute is necessary here because tree_sitter::Language does not derive Hash at the moment. - // However it does use #[repr] transparent so the transmute here is safe - // as `Language` (which `Grammar` is an alias for) is just a newtype wrapper around a (thin) pointer. - // This is also compatible with the PartialEq implementation of language - // as that is just a pointer comparison. - let language: *const () = unsafe { transmute(self.config.language) }; - language.hash(state); + self.config.language.hash(state); self.ranges.hash(state); } } @@ -1501,7 +1574,7 @@ impl LanguageLayer { .map_err(|_| Error::InvalidRanges)?; parser - .set_language(self.config.language) + .set_language(&self.config.language) .map_err(|_| Error::InvalidLanguage)?; // unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) }; @@ -1660,7 +1733,7 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use std::{iter, mem, ops, str, usize}; use tree_sitter::{ Language as Grammar, Node, Parser, Point, Query, QueryCaptures, QueryCursor, QueryError, - QueryMatch, Range, TextProvider, Tree, TreeCursor, + QueryMatch, Range, TextProvider, Tree, }; const CANCELLATION_CHECK_INTERVAL: usize = 100; @@ -1801,7 +1874,7 @@ impl HighlightConfiguration { // Construct a single query by concatenating the three query strings, but record the // range of pattern indices that belong to each individual string. - let query = Query::new(language, &query_source)?; + let query = Query::new(&language, &query_source)?; let mut highlights_pattern_index = 0; for i in 0..(query.pattern_count()) { let pattern_offset = query.start_byte_for_pattern(i); @@ -1810,7 +1883,7 @@ impl HighlightConfiguration { } } - let injections_query = Query::new(language, injection_query)?; + let injections_query = Query::new(&language, injection_query)?; let combined_injections_patterns = (0..injections_query.pattern_count()) .filter(|&i| { injections_query @@ -1961,11 +2034,16 @@ impl HighlightConfiguration { node_slice }; - static SHEBANG_REGEX: Lazy = Lazy::new(|| Regex::new(SHEBANG).unwrap()); + static SHEBANG_REGEX: Lazy = + Lazy::new(|| rope::Regex::new(SHEBANG).unwrap()); injection_capture = SHEBANG_REGEX - .captures(&Cow::from(lines)) - .map(|cap| InjectionLanguageMarker::Shebang(cap[1].to_owned())) + .captures_iter(lines.regex_input()) + .map(|cap| { + let cap = lines.byte_slice(cap.get_group(1).unwrap().range()); + InjectionLanguageMarker::Shebang(cap.into()) + }) + .next() } else if index == self.injection_content_capture_index { content_node = Some(capture.node); } @@ -2589,7 +2667,7 @@ pub fn pretty_print_tree(fmt: &mut W, node: Node) -> fmt::Result fn pretty_print_tree_impl( fmt: &mut W, - cursor: &mut TreeCursor, + cursor: &mut tree_sitter::TreeCursor, depth: usize, ) -> fmt::Result { let node = cursor.node(); @@ -2659,7 +2737,7 @@ mod test { .unwrap(); let language = get_language("rust").unwrap(); - let query = Query::new(language, query_str).unwrap(); + let query = Query::new(&language, query_str).unwrap(); let textobject = TextObjectQuery { query }; let mut cursor = QueryCursor::new(); @@ -2899,7 +2977,7 @@ mod test { // rule but `name` and `body` belong to an unnamed helper `_method_rest`. // This can cause a bug with a pretty-printing implementation that // uses `Node::field_name_for_child` to determine field names but is - // fixed when using `TreeCursor::field_name`. + // fixed when using `tree_sitter::TreeCursor::field_name`. let source = "def self.method_name true end"; diff --git a/helix-core/src/syntax/tree_cursor.rs b/helix-core/src/syntax/tree_cursor.rs new file mode 100644 index 000000000..d9d140c9f --- /dev/null +++ b/helix-core/src/syntax/tree_cursor.rs @@ -0,0 +1,160 @@ +use std::{cmp::Reverse, ops::Range}; + +use super::{LanguageLayer, LayerId}; + +use slotmap::HopSlotMap; +use tree_sitter::Node; + +/// The byte range of an injection layer. +/// +/// Injection ranges may overlap, but all overlapping parts are subsets of their parent ranges. +/// This allows us to sort the ranges ahead of time in order to efficiently find a range that +/// contains a point with maximum depth. +#[derive(Debug)] +struct InjectionRange { + start: usize, + end: usize, + layer_id: LayerId, + depth: u32, +} + +pub struct TreeCursor<'a> { + layers: &'a HopSlotMap, + root: LayerId, + current: LayerId, + injection_ranges: Vec, + // TODO: Ideally this would be a `tree_sitter::TreeCursor<'a>` but + // that returns very surprising results in testing. + cursor: Node<'a>, +} + +impl<'a> TreeCursor<'a> { + pub(super) fn new(layers: &'a HopSlotMap, root: LayerId) -> Self { + let mut injection_ranges = Vec::new(); + + for (layer_id, layer) in layers.iter() { + // Skip the root layer + if layer.parent.is_none() { + continue; + } + for byte_range in layer.ranges.iter() { + let range = InjectionRange { + start: byte_range.start_byte, + end: byte_range.end_byte, + layer_id, + depth: layer.depth, + }; + injection_ranges.push(range); + } + } + + injection_ranges.sort_unstable_by_key(|range| (range.end, Reverse(range.depth))); + + let cursor = layers[root].tree().root_node(); + + Self { + layers, + root, + current: root, + injection_ranges, + cursor, + } + } + + pub fn node(&self) -> Node<'a> { + self.cursor + } + + pub fn goto_parent(&mut self) -> bool { + if let Some(parent) = self.node().parent() { + self.cursor = parent; + return true; + } + + // If we are already on the root layer, we cannot ascend. + if self.current == self.root { + return false; + } + + // Ascend to the parent layer. + let range = self.node().byte_range(); + let parent_id = self.layers[self.current] + .parent + .expect("non-root layers have a parent"); + self.current = parent_id; + let root = self.layers[self.current].tree().root_node(); + self.cursor = root + .descendant_for_byte_range(range.start, range.end) + .unwrap_or(root); + + true + } + + /// Finds the injection layer that has exactly the same range as the given `range`. + fn layer_id_of_byte_range(&self, search_range: Range) -> Option { + let start_idx = self + .injection_ranges + .partition_point(|range| range.end < search_range.end); + + self.injection_ranges[start_idx..] + .iter() + .take_while(|range| range.end == search_range.end) + .find_map(|range| (range.start == search_range.start).then_some(range.layer_id)) + } + + pub fn goto_first_child(&mut self) -> bool { + // Check if the current node's range is an exact injection layer range. + if let Some(layer_id) = self + .layer_id_of_byte_range(self.node().byte_range()) + .filter(|&layer_id| layer_id != self.current) + { + // Switch to the child layer. + self.current = layer_id; + self.cursor = self.layers[self.current].tree().root_node(); + true + } else if let Some(child) = self.cursor.child(0) { + // Otherwise descend in the current tree. + self.cursor = child; + true + } else { + false + } + } + + pub fn goto_next_sibling(&mut self) -> bool { + if let Some(sibling) = self.cursor.next_sibling() { + self.cursor = sibling; + true + } else { + false + } + } + + pub fn goto_prev_sibling(&mut self) -> bool { + if let Some(sibling) = self.cursor.prev_sibling() { + self.cursor = sibling; + true + } else { + false + } + } + + /// Finds the injection layer that contains the given start-end range. + fn layer_id_containing_byte_range(&self, start: usize, end: usize) -> LayerId { + let start_idx = self + .injection_ranges + .partition_point(|range| range.end < end); + + self.injection_ranges[start_idx..] + .iter() + .take_while(|range| range.start < end) + .find_map(|range| (range.start <= start).then_some(range.layer_id)) + .unwrap_or(self.root) + } + + pub fn reset_to_byte_range(&mut self, start: usize, end: usize) { + self.current = self.layer_id_containing_byte_range(start, end); + let root = self.layers[self.current].tree().root_node(); + self.cursor = root.descendant_for_byte_range(start, end).unwrap_or(root); + } +} diff --git a/helix-core/src/text_annotations.rs b/helix-core/src/text_annotations.rs index 11d19d485..1576914e3 100644 --- a/helix-core/src/text_annotations.rs +++ b/helix-core/src/text_annotations.rs @@ -1,6 +1,5 @@ use std::cell::Cell; use std::ops::Range; -use std::rc::Rc; use crate::syntax::Highlight; use crate::Tendril; @@ -92,23 +91,23 @@ pub struct LineAnnotation { } #[derive(Debug)] -struct Layer { - annotations: Rc<[A]>, +struct Layer<'a, A, M> { + annotations: &'a [A], current_index: Cell, metadata: M, } -impl Clone for Layer { +impl Clone for Layer<'_, A, M> { fn clone(&self) -> Self { Layer { - annotations: self.annotations.clone(), + annotations: self.annotations, current_index: self.current_index.clone(), metadata: self.metadata.clone(), } } } -impl Layer { +impl Layer<'_, A, M> { pub fn reset_pos(&self, char_idx: usize, get_char_idx: impl Fn(&A) -> usize) { let new_index = self .annotations @@ -128,8 +127,8 @@ impl Layer { } } -impl From<(Rc<[A]>, M)> for Layer { - fn from((annotations, metadata): (Rc<[A]>, M)) -> Layer { +impl<'a, A, M> From<(&'a [A], M)> for Layer<'a, A, M> { + fn from((annotations, metadata): (&'a [A], M)) -> Layer { Layer { annotations, current_index: Cell::new(0), @@ -147,13 +146,13 @@ fn reset_pos(layers: &[Layer], pos: usize, get_pos: impl Fn(&A) -> u /// Annotations that change that is displayed when the document is render. /// Also commonly called virtual text. #[derive(Default, Debug, Clone)] -pub struct TextAnnotations { - inline_annotations: Vec>>, - overlays: Vec>>, - line_annotations: Vec>, +pub struct TextAnnotations<'a> { + inline_annotations: Vec>>, + overlays: Vec>>, + line_annotations: Vec>, } -impl TextAnnotations { +impl<'a> TextAnnotations<'a> { /// Prepare the TextAnnotations for iteration starting at char_idx pub fn reset_pos(&self, char_idx: usize) { reset_pos(&self.inline_annotations, char_idx, |annot| annot.char_idx); @@ -194,7 +193,7 @@ impl TextAnnotations { /// the annotations that belong to the layers added first will be shown first. pub fn add_inline_annotations( &mut self, - layer: Rc<[InlineAnnotation]>, + layer: &'a [InlineAnnotation], highlight: Option, ) -> &mut Self { self.inline_annotations.push((layer, highlight).into()); @@ -211,7 +210,7 @@ impl TextAnnotations { /// /// If multiple layers contain overlay at the same position /// the overlay from the layer added last will be show. - pub fn add_overlay(&mut self, layer: Rc<[Overlay]>, highlight: Option) -> &mut Self { + pub fn add_overlay(&mut self, layer: &'a [Overlay], highlight: Option) -> &mut Self { self.overlays.push((layer, highlight).into()); self } @@ -220,7 +219,7 @@ impl TextAnnotations { /// /// The line annotations **must be sorted** by their `char_idx`. /// Multiple line annotations with the same `char_idx` **are not allowed**. - pub fn add_line_annotation(&mut self, layer: Rc<[LineAnnotation]>) -> &mut Self { + pub fn add_line_annotation(&mut self, layer: &'a [LineAnnotation]) -> &mut Self { self.line_annotations.push((layer, ()).into()); self } diff --git a/helix-core/tests/indent.rs b/helix-core/tests/indent.rs index 53265e0b1..31946c56e 100644 --- a/helix-core/tests/indent.rs +++ b/helix-core/tests/indent.rs @@ -4,6 +4,7 @@ use helix_core::{ syntax::{Configuration, Loader}, Syntax, }; +use helix_stdx::rope::RopeSliceExt; use ropey::Rope; use std::{ops::Range, path::PathBuf, process::Command, sync::Arc}; @@ -211,7 +212,7 @@ fn test_treesitter_indent( if ignored_lines.iter().any(|range| range.contains(&(i + 1))) { continue; } - if let Some(pos) = helix_core::find_first_non_whitespace_char(line) { + if let Some(pos) = line.first_non_whitespace_char() { let tab_width: usize = 4; let suggested_indent = treesitter_indent_for_pos( indent_query, diff --git a/helix-event/Cargo.toml b/helix-event/Cargo.toml index a5c88e93d..616c323dc 100644 --- a/helix-event/Cargo.toml +++ b/helix-event/Cargo.toml @@ -12,7 +12,7 @@ homepage.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -ahash = "0.8.3" +ahash = "0.8.11" hashbrown = "0.14.0" tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] } # the event registry is essentially read only but must be an rwlock so we can diff --git a/helix-loader/Cargo.toml b/helix-loader/Cargo.toml index 25b559696..d15d87f95 100644 --- a/helix-loader/Cargo.toml +++ b/helix-loader/Cargo.toml @@ -30,7 +30,7 @@ log = "0.4" # cloning/compiling tree-sitter grammars cc = { version = "1" } threadpool = { version = "1.0" } -tempfile = "3.10.0" +tempfile = "3.10.1" dunce = "1.0.4" [target.'cfg(not(target_arch = "wasm32"))'.dependencies] diff --git a/helix-lsp/Cargo.toml b/helix-lsp/Cargo.toml index 5995cab59..1ef6ea5d9 100644 --- a/helix-lsp/Cargo.toml +++ b/helix-lsp/Cargo.toml @@ -28,6 +28,6 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" thiserror = "1.0" tokio = { version = "1.36", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } -tokio-stream = "0.1.14" +tokio-stream = "0.1.15" parking_lot = "0.12.1" arc-swap = "1" diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index 8d03d7992..a7b3989dd 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -631,6 +631,12 @@ impl Client { }), publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities { version_support: Some(true), + tag_support: Some(lsp::TagSupport { + value_set: vec![ + lsp::DiagnosticTag::UNNECESSARY, + lsp::DiagnosticTag::DEPRECATED, + ], + }), ..Default::default() }), inlay_hint: Some(lsp::InlayHintClientCapabilities { diff --git a/helix-stdx/Cargo.toml b/helix-stdx/Cargo.toml index 540a1b99a..ed23f4e4f 100644 --- a/helix-stdx/Cargo.toml +++ b/helix-stdx/Cargo.toml @@ -16,6 +16,7 @@ dunce = "1.0" etcetera = "0.8" ropey = { version = "1.6.1", default-features = false } which = "6.0" +regex-cursor = "0.1.4" [dev-dependencies] tempfile = "3.10" diff --git a/helix-stdx/src/env.rs b/helix-stdx/src/env.rs index 90a0aee87..59aba0adc 100644 --- a/helix-stdx/src/env.rs +++ b/helix-stdx/src/env.rs @@ -42,8 +42,9 @@ pub fn binary_exists>(binary_name: T) -> bool { pub fn which>( binary_name: T, ) -> Result { - which::which(binary_name.as_ref()).map_err(|err| ExecutableNotFoundError { - command: binary_name.as_ref().to_string_lossy().into_owned(), + let binary_name = binary_name.as_ref(); + which::which(binary_name).map_err(|err| ExecutableNotFoundError { + command: binary_name.to_string_lossy().into_owned(), inner: err, }) } diff --git a/helix-stdx/src/path.rs b/helix-stdx/src/path.rs index 1dc4d0b24..ff2bffae5 100644 --- a/helix-stdx/src/path.rs +++ b/helix-stdx/src/path.rs @@ -2,6 +2,7 @@ pub use etcetera::home_dir; use std::{ borrow::Cow, + ffi::OsString, path::{Component, Path, PathBuf}, }; @@ -9,14 +10,21 @@ use crate::env::current_working_dir; /// Replaces users home directory from `path` with tilde `~` if the directory /// is available, otherwise returns the path unchanged. -pub fn fold_home_dir(path: &Path) -> PathBuf { +pub fn fold_home_dir<'a, P>(path: P) -> Cow<'a, Path> +where + P: Into>, +{ + let path = path.into(); if let Ok(home) = home_dir() { if let Ok(stripped) = path.strip_prefix(&home) { - return PathBuf::from("~").join(stripped); + let mut path = OsString::with_capacity(2 + stripped.as_os_str().len()); + path.push("~/"); + path.push(stripped); + return Cow::Owned(PathBuf::from(path)); } } - path.to_path_buf() + path } /// Expands tilde `~` into users home directory if available, otherwise returns the path @@ -125,18 +133,21 @@ pub fn canonicalize(path: impl AsRef) -> PathBuf { normalize(path) } -pub fn get_relative_path(path: impl AsRef) -> PathBuf { - let path = PathBuf::from(path.as_ref()); - let path = if path.is_absolute() { +pub fn get_relative_path<'a, P>(path: P) -> Cow<'a, Path> +where + P: Into>, +{ + let path = path.into(); + if path.is_absolute() { let cwdir = normalize(current_working_dir()); - normalize(&path) - .strip_prefix(cwdir) - .map(PathBuf::from) - .unwrap_or(path) - } else { - path - }; - fold_home_dir(&path) + if let Ok(stripped) = normalize(&path).strip_prefix(cwdir) { + return Cow::Owned(PathBuf::from(stripped)); + } + + return fold_home_dir(path); + } + + path } /// Returns a truncated filepath where the basepart of the path is reduced to the first @@ -170,21 +181,20 @@ pub fn get_relative_path(path: impl AsRef) -> PathBuf { /// pub fn get_truncated_path(path: impl AsRef) -> PathBuf { let cwd = current_working_dir(); - let path = path - .as_ref() - .strip_prefix(cwd) - .unwrap_or_else(|_| path.as_ref()); + let path = path.as_ref(); + let path = path.strip_prefix(cwd).unwrap_or(path); let file = path.file_name().unwrap_or_default(); let base = path.parent().unwrap_or_else(|| Path::new("")); - let mut ret = PathBuf::new(); + let mut ret = PathBuf::with_capacity(file.len()); + // A char can't be directly pushed to a PathBuf + let mut first_char_buffer = String::new(); for d in base { - ret.push( - d.to_string_lossy() - .chars() - .next() - .unwrap_or_default() - .to_string(), - ); + let Some(first_char) = d.to_string_lossy().chars().next() else { + break; + }; + first_char_buffer.push(first_char); + ret.push(&first_char_buffer); + first_char_buffer.clear(); } ret.push(file); ret diff --git a/helix-stdx/src/rope.rs b/helix-stdx/src/rope.rs index 4ee39d4a8..7e2549f5a 100644 --- a/helix-stdx/src/rope.rs +++ b/helix-stdx/src/rope.rs @@ -1,11 +1,24 @@ +use std::ops::{Bound, RangeBounds}; + +pub use regex_cursor::engines::meta::{Builder as RegexBuilder, Regex}; +pub use regex_cursor::regex_automata::util::syntax::Config; +use regex_cursor::{Input as RegexInput, RopeyCursor}; use ropey::RopeSlice; -pub trait RopeSliceExt: Sized { +pub trait RopeSliceExt<'a>: Sized { fn ends_with(self, text: &str) -> bool; fn starts_with(self, text: &str) -> bool; + fn regex_input(self) -> RegexInput>; + fn regex_input_at_bytes>( + self, + byte_range: R, + ) -> RegexInput>; + fn regex_input_at>(self, char_range: R) -> RegexInput>; + fn first_non_whitespace_char(self) -> Option; + fn last_non_whitespace_char(self) -> Option; } -impl RopeSliceExt for RopeSlice<'_> { +impl<'a> RopeSliceExt<'a> for RopeSlice<'a> { fn ends_with(self, text: &str) -> bool { let len = self.len_bytes(); if len < text.len() { @@ -23,4 +36,43 @@ impl RopeSliceExt for RopeSlice<'_> { self.get_byte_slice(..len - text.len()) .map_or(false, |start| start == text) } + + fn regex_input(self) -> RegexInput> { + RegexInput::new(self) + } + + fn regex_input_at>(self, char_range: R) -> RegexInput> { + let start_bound = match char_range.start_bound() { + Bound::Included(&val) => Bound::Included(self.char_to_byte(val)), + Bound::Excluded(&val) => Bound::Excluded(self.char_to_byte(val)), + Bound::Unbounded => Bound::Unbounded, + }; + let end_bound = match char_range.end_bound() { + Bound::Included(&val) => Bound::Included(self.char_to_byte(val)), + Bound::Excluded(&val) => Bound::Excluded(self.char_to_byte(val)), + Bound::Unbounded => Bound::Unbounded, + }; + self.regex_input_at_bytes((start_bound, end_bound)) + } + fn regex_input_at_bytes>( + self, + byte_range: R, + ) -> RegexInput> { + let input = match byte_range.start_bound() { + Bound::Included(&pos) | Bound::Excluded(&pos) => { + RegexInput::new(RopeyCursor::at(self, pos)) + } + Bound::Unbounded => RegexInput::new(self), + }; + input.range(byte_range) + } + fn first_non_whitespace_char(self) -> Option { + self.chars().position(|ch| !ch.is_whitespace()) + } + fn last_non_whitespace_char(self) -> Option { + self.chars_at(self.len_chars()) + .reversed() + .position(|ch| !ch.is_whitespace()) + .map(|pos| self.len_chars() - pos - 1) + } } diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index ad1366a59..092da9b69 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -41,7 +41,7 @@ crossterm = { version = "0.27", features = ["event-stream"] } signal-hook = "0.3" tokio-stream = "0.1" futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } -arc-swap = { version = "1.6.0" } +arc-swap = { version = "1.7.0" } termini = "1" # Logging @@ -58,7 +58,7 @@ pulldown-cmark = { version = "0.10", default-features = false } content_inspector = "0.2.4" # opening URLs -open = "5.0.1" +open = "5.1.2" url = "2.5.0" # config @@ -86,4 +86,4 @@ helix-loader = { path = "../helix-loader" } [dev-dependencies] smallvec = "1.13" indoc = "2.0.4" -tempfile = "3.10.0" +tempfile = "3.10.1" diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs index 30df3981c..809393c7f 100644 --- a/helix-term/src/application.rs +++ b/helix-term/src/application.rs @@ -724,7 +724,7 @@ impl Application { } Notification::PublishDiagnostics(mut params) => { let path = match params.uri.to_file_path() { - Ok(path) => path, + Ok(path) => helix_stdx::path::normalize(&path), Err(_) => { log::error!("Unsupported file URI: {}", params.uri); return; @@ -753,9 +753,7 @@ impl Application { let lang_conf = doc.language.clone(); if let Some(lang_conf) = &lang_conf { - if let Some(old_diagnostics) = - self.editor.diagnostics.get(¶ms.uri) - { + if let Some(old_diagnostics) = self.editor.diagnostics.get(&path) { if !lang_conf.persistent_diagnostic_sources.is_empty() { // Sort diagnostics first by severity and then by line numbers. // Note: The `lsp::DiagnosticSeverity` enum is already defined in decreasing order @@ -788,7 +786,7 @@ impl Application { // Insert the original lsp::Diagnostics here because we may have no open document // for diagnosic message and so we can't calculate the exact position. // When using them later in the diagnostics picker, we calculate them on-demand. - let diagnostics = match self.editor.diagnostics.entry(params.uri) { + let diagnostics = match self.editor.diagnostics.entry(path) { Entry::Occupied(o) => { let current_diagnostics = o.into_mut(); // there may entries of other language servers, which is why we can't overwrite the whole entry diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index fab735945..30ce25207 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -3,15 +3,19 @@ pub(crate) mod lsp; pub(crate) mod typed; pub use dap::*; +use helix_stdx::rope::{self, RopeSliceExt}; use helix_vcs::Hunk; pub use lsp::*; use tui::widgets::Row; pub use typed::*; use helix_core::{ - char_idx_at_visual_offset, comment, + char_idx_at_visual_offset, + chars::char_is_word, + comment, doc_formatter::TextFormat, - encoding, find_first_non_whitespace_char, find_workspace, graphemes, + encoding, find_workspace, + graphemes::{self, next_grapheme_boundary, RevRopeGraphemes}, history::UndoKind, increment, indent, indent::IndentStyle, @@ -19,13 +23,12 @@ use helix_core::{ match_brackets, movement::{self, move_vertically_visual, Direction}, object, pos_at_coords, - regex::{self, Regex, RegexBuilder}, + regex::{self, Regex}, search::{self, CharMatcher}, selection, shellwords, surround, - syntax::LanguageServerFeature, - text_annotations::TextAnnotations, + syntax::{BlockCommentToken, LanguageServerFeature}, + text_annotations::{Overlay, TextAnnotations}, textobject, - tree_sitter::Node, unicode::width::UnicodeWidthChar, visual_offset_from_block, Deletion, LineEnding, Position, Range, Rope, RopeGraphemes, RopeReader, RopeSlice, Selection, SmallVec, Tendril, Transaction, @@ -56,6 +59,7 @@ use crate::{ use crate::job::{self, Jobs}; use std::{ + cmp::Ordering, collections::{HashMap, HashSet}, fmt, future::Future, @@ -299,6 +303,8 @@ impl MappableCommand { extend_line, "Select current line, if already selected, extend to another line based on the anchor", extend_line_below, "Select current line, if already selected, extend to next line", extend_line_above, "Select current line, if already selected, extend to previous line", + select_line_above, "Select current line, if already selected, extend or shrink line above based on the anchor", + select_line_below, "Select current line, if already selected, extend or shrink line below based on the anchor", extend_to_line_bounds, "Extend selection to line bounds", shrink_to_line_bounds, "Shrink selection to line bounds", delete_selection, "Delete selection", @@ -414,6 +420,8 @@ impl MappableCommand { completion, "Invoke completion popup", hover, "Show docs for item under cursor", toggle_comments, "Comment/uncomment selections", + toggle_line_comments, "Line comment/uncomment selections", + toggle_block_comments, "Block comment/uncomment selections", rotate_selections_forward, "Rotate selections forward", rotate_selections_backward, "Rotate selections backward", rotate_selection_contents_forward, "Rotate selection contents forward", @@ -497,6 +505,8 @@ impl MappableCommand { record_macro, "Record macro", replay_macro, "Replay macro", command_palette, "Open command palette", + goto_word, "Jump to a two-character label", + extend_to_word, "Extend to a two-character label", ); } @@ -612,6 +622,7 @@ fn move_impl(cx: &mut Context, move_fn: MoveFn, dir: Direction, behaviour: Movem &mut annotations, ) }); + drop(annotations); doc.set_selection(view.id, selection); } @@ -821,7 +832,7 @@ fn kill_to_line_start(cx: &mut Context) { let head = if anchor == first_char && line != 0 { // select until previous line line_end_char_index(&text, line - 1) - } else if let Some(pos) = find_first_non_whitespace_char(text.line(line)) { + } else if let Some(pos) = text.line(line).first_non_whitespace_char() { if first_char + pos < anchor { // select until first non-blank in line if cursor is after it first_char + pos @@ -883,7 +894,7 @@ fn goto_first_nonwhitespace_impl(view: &mut View, doc: &mut Document, movement: let selection = doc.selection(view.id).clone().transform(|range| { let line = range.cursor_line(text); - if let Some(pos) = find_first_non_whitespace_char(text.line(line)) { + if let Some(pos) = text.line(line).first_non_whitespace_char() { let pos = pos + text.line_to_char(line); range.put_cursor(text, pos, movement == Movement::Extend) } else { @@ -1632,7 +1643,7 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction, sync_cursor let doc_text = doc.text().slice(..); let viewport = view.inner_area(doc); let text_fmt = doc.text_format(viewport.width, None); - let mut annotations = view.text_annotations(doc, None); + let mut annotations = view.text_annotations(&*doc, None); (view.offset.anchor, view.offset.vertical_offset) = char_idx_at_visual_offset( doc_text, view.offset.anchor, @@ -1710,6 +1721,7 @@ pub fn scroll(cx: &mut Context, offset: usize, direction: Direction, sync_cursor let mut sel = doc.selection(view.id).clone(); let idx = sel.primary_index(); sel = sel.replace(idx, prim_sel); + drop(annotations); doc.set_selection(view.id, sel); } @@ -1907,11 +1919,7 @@ fn split_selection(cx: &mut Context) { fn split_selection_on_newline(cx: &mut Context) { let (view, doc) = current!(cx.editor); let text = doc.text().slice(..); - // only compile the regex once - #[allow(clippy::trivial_regex)] - static REGEX: Lazy = - Lazy::new(|| Regex::new(r"\r\n|[\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}]").unwrap()); - let selection = selection::split_on_matches(text, doc.selection(view.id), ®EX); + let selection = selection::split_on_newline(text, doc.selection(view.id)); doc.set_selection(view.id, selection); } @@ -1930,8 +1938,7 @@ fn merge_consecutive_selections(cx: &mut Context) { #[allow(clippy::too_many_arguments)] fn search_impl( editor: &mut Editor, - contents: &str, - regex: &Regex, + regex: &rope::Regex, movement: Movement, direction: Direction, scrolloff: usize, @@ -1959,23 +1966,20 @@ fn search_impl( // do a reverse search and wraparound to the end, we don't need to search // the text before the current cursor position for matches, but by slicing // it out, we need to add it back to the position of the selection. - let mut offset = 0; + let doc = doc!(editor).text().slice(..); // use find_at to find the next match after the cursor, loop around the end // Careful, `Regex` uses `bytes` as offsets, not character indices! let mut mat = match direction { - Direction::Forward => regex.find_at(contents, start), - Direction::Backward => regex.find_iter(&contents[..start]).last(), + Direction::Forward => regex.find(doc.regex_input_at_bytes(start..)), + Direction::Backward => regex.find_iter(doc.regex_input_at_bytes(..start)).last(), }; if mat.is_none() { if wrap_around { mat = match direction { - Direction::Forward => regex.find(contents), - Direction::Backward => { - offset = start; - regex.find_iter(&contents[start..]).last() - } + Direction::Forward => regex.find(doc.regex_input()), + Direction::Backward => regex.find_iter(doc.regex_input_at_bytes(start..)).last(), }; } if show_warnings { @@ -1992,8 +1996,8 @@ fn search_impl( let selection = doc.selection(view.id); if let Some(mat) = mat { - let start = text.byte_to_char(mat.start() + offset); - let end = text.byte_to_char(mat.end() + offset); + let start = text.byte_to_char(mat.start()); + let end = text.byte_to_char(mat.end()); if end == 0 { // skip empty matches that don't make sense @@ -2037,13 +2041,7 @@ fn searcher(cx: &mut Context, direction: Direction) { let scrolloff = config.scrolloff; let wrap_around = config.search.wrap_around; - let doc = doc!(cx.editor); - // TODO: could probably share with select_on_matches? - - // HAXX: sadly we can't avoid allocating a single string for the whole buffer since we can't - // feed chunks into the regex yet - let contents = doc.text().slice(..).to_string(); let completions = search_completions(cx, Some(reg)); ui::regex_prompt( @@ -2065,7 +2063,6 @@ fn searcher(cx: &mut Context, direction: Direction) { } search_impl( cx.editor, - &contents, ®ex, Movement::Move, direction, @@ -2085,8 +2082,6 @@ fn search_next_or_prev_impl(cx: &mut Context, movement: Movement, direction: Dir let config = cx.editor.config(); let scrolloff = config.scrolloff; if let Some(query) = cx.editor.registers.first(register, cx.editor) { - let doc = doc!(cx.editor); - let contents = doc.text().slice(..).to_string(); let search_config = &config.search; let case_insensitive = if search_config.smart_case { !query.chars().any(char::is_uppercase) @@ -2094,15 +2089,17 @@ fn search_next_or_prev_impl(cx: &mut Context, movement: Movement, direction: Dir false }; let wrap_around = search_config.wrap_around; - if let Ok(regex) = RegexBuilder::new(&query) - .case_insensitive(case_insensitive) - .multi_line(true) - .build() + if let Ok(regex) = rope::RegexBuilder::new() + .syntax( + rope::Config::new() + .case_insensitive(case_insensitive) + .multi_line(true), + ) + .build(&query) { for _ in 0..count { search_impl( cx.editor, - &contents, ®ex, movement, direction, @@ -2239,7 +2236,7 @@ fn global_search(cx: &mut Context) { let reg = cx.register.unwrap_or('/'); let completions = search_completions(cx, Some(reg)); - ui::regex_prompt( + ui::raw_regex_prompt( cx, "global-search:".into(), Some(reg), @@ -2250,7 +2247,7 @@ fn global_search(cx: &mut Context) { .map(|comp| (0.., std::borrow::Cow::Owned(comp.clone()))) .collect() }, - move |cx, regex, event| { + move |cx, _, input, event| { if event != PromptEvent::Validate { return; } @@ -2265,7 +2262,7 @@ fn global_search(cx: &mut Context) { if let Ok(matcher) = RegexMatcherBuilder::new() .case_smart(smart_case) - .build(regex.as_str()) + .build(input) { let search_root = helix_stdx::env::current_working_dir(); if !search_root.exists() { @@ -2437,7 +2434,6 @@ fn extend_line_below(cx: &mut Context) { fn extend_line_above(cx: &mut Context) { extend_line_impl(cx, Extend::Above); } - fn extend_line_impl(cx: &mut Context, extend: Extend) { let count = cx.count(); let (view, doc) = current!(cx.editor); @@ -2476,6 +2472,59 @@ fn extend_line_impl(cx: &mut Context, extend: Extend) { doc.set_selection(view.id, selection); } +fn select_line_below(cx: &mut Context) { + select_line_impl(cx, Extend::Below); +} +fn select_line_above(cx: &mut Context) { + select_line_impl(cx, Extend::Above); +} +fn select_line_impl(cx: &mut Context, extend: Extend) { + let mut count = cx.count(); + let (view, doc) = current!(cx.editor); + let text = doc.text(); + let saturating_add = |a: usize, b: usize| (a + b).min(text.len_lines()); + let selection = doc.selection(view.id).clone().transform(|range| { + let (start_line, end_line) = range.line_range(text.slice(..)); + let start = text.line_to_char(start_line); + let end = text.line_to_char(saturating_add(end_line, 1)); + let direction = range.direction(); + + // Extending to line bounds is counted as one step + if range.from() != start || range.to() != end { + count = count.saturating_sub(1) + } + let (anchor_line, head_line) = match (&extend, direction) { + (Extend::Above, Direction::Forward) => (start_line, end_line.saturating_sub(count)), + (Extend::Above, Direction::Backward) => (end_line, start_line.saturating_sub(count)), + (Extend::Below, Direction::Forward) => (start_line, saturating_add(end_line, count)), + (Extend::Below, Direction::Backward) => (end_line, saturating_add(start_line, count)), + }; + let (anchor, head) = match anchor_line.cmp(&head_line) { + Ordering::Less => ( + text.line_to_char(anchor_line), + text.line_to_char(saturating_add(head_line, 1)), + ), + Ordering::Equal => match extend { + Extend::Above => ( + text.line_to_char(saturating_add(anchor_line, 1)), + text.line_to_char(head_line), + ), + Extend::Below => ( + text.line_to_char(head_line), + text.line_to_char(saturating_add(anchor_line, 1)), + ), + }, + + Ordering::Greater => ( + text.line_to_char(saturating_add(anchor_line, 1)), + text.line_to_char(head_line), + ), + }; + Range::new(anchor, head) + }); + + doc.set_selection(view.id, selection); +} fn extend_to_line_bounds(cx: &mut Context) { let (view, doc) = current!(cx.editor); @@ -3091,11 +3140,11 @@ fn insert_with_indent(cx: &mut Context, cursor_fallback: IndentFallbackPos) { } else { // move cursor to the fallback position let pos = match cursor_fallback { - IndentFallbackPos::LineStart => { - find_first_non_whitespace_char(text.line(cursor_line)) - .map(|ws_offset| ws_offset + cursor_line_start) - .unwrap_or(cursor_line_start) - } + IndentFallbackPos::LineStart => text + .line(cursor_line) + .first_non_whitespace_char() + .map(|ws_offset| ws_offset + cursor_line_start) + .unwrap_or(cursor_line_start), IndentFallbackPos::LineEnd => line_end_char_index(&text, cursor_line), }; @@ -3386,48 +3435,55 @@ fn goto_last_diag(cx: &mut Context) { } fn goto_next_diag(cx: &mut Context) { - let (view, doc) = current!(cx.editor); + let motion = move |editor: &mut Editor| { + let (view, doc) = current!(editor); - let cursor_pos = doc - .selection(view.id) - .primary() - .cursor(doc.text().slice(..)); + let cursor_pos = doc + .selection(view.id) + .primary() + .cursor(doc.text().slice(..)); - let diag = doc - .diagnostics() - .iter() - .find(|diag| diag.range.start > cursor_pos) - .or_else(|| doc.diagnostics().first()); + let diag = doc + .diagnostics() + .iter() + .find(|diag| diag.range.start > cursor_pos) + .or_else(|| doc.diagnostics().first()); - let selection = match diag { - Some(diag) => Selection::single(diag.range.start, diag.range.end), - None => return, + let selection = match diag { + Some(diag) => Selection::single(diag.range.start, diag.range.end), + None => return, + }; + doc.set_selection(view.id, selection); }; - doc.set_selection(view.id, selection); + + cx.editor.apply_motion(motion); } fn goto_prev_diag(cx: &mut Context) { - let (view, doc) = current!(cx.editor); + let motion = move |editor: &mut Editor| { + let (view, doc) = current!(editor); - let cursor_pos = doc - .selection(view.id) - .primary() - .cursor(doc.text().slice(..)); + let cursor_pos = doc + .selection(view.id) + .primary() + .cursor(doc.text().slice(..)); - let diag = doc - .diagnostics() - .iter() - .rev() - .find(|diag| diag.range.start < cursor_pos) - .or_else(|| doc.diagnostics().last()); - - let selection = match diag { - // NOTE: the selection is reversed because we're jumping to the - // previous diagnostic. - Some(diag) => Selection::single(diag.range.end, diag.range.start), - None => return, + let diag = doc + .diagnostics() + .iter() + .rev() + .find(|diag| diag.range.start < cursor_pos) + .or_else(|| doc.diagnostics().last()); + + let selection = match diag { + // NOTE: the selection is reversed because we're jumping to the + // previous diagnostic. + Some(diag) => Selection::single(diag.range.end, diag.range.start), + None => return, + }; + doc.set_selection(view.id, selection); }; - doc.set_selection(view.id, selection); + cx.editor.apply_motion(motion) } fn goto_first_change(cx: &mut Context) { @@ -4374,16 +4430,27 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) { // select inserted spaces let transaction = if select_space { + let mut offset: usize = 0; let ranges: SmallVec<_> = changes .iter() - .scan(0, |offset, change| { - let range = Range::point(change.0 - *offset); - *offset += change.1 - change.0 - 1; // -1 because cursor is 0-sized - Some(range) + .filter_map(|change| { + if change.2.is_some() { + let range = Range::point(change.0 - offset); + offset += change.1 - change.0 - 1; // -1 adjusts for the replacement of the range by a space + Some(range) + } else { + offset += change.1 - change.0; + None + } }) .collect(); - let selection = Selection::new(ranges, 0); - Transaction::change(text, changes.into_iter()).with_selection(selection) + let t = Transaction::change(text, changes.into_iter()); + if ranges.is_empty() { + t + } else { + let selection = Selection::new(ranges, 0); + t.with_selection(selection) + } } else { Transaction::change(text, changes.into_iter()) }; @@ -4466,18 +4533,124 @@ pub fn completion(cx: &mut Context) { } // comments -fn toggle_comments(cx: &mut Context) { +type CommentTransactionFn = fn( + line_token: Option<&str>, + block_tokens: Option<&[BlockCommentToken]>, + doc: &Rope, + selection: &Selection, +) -> Transaction; + +fn toggle_comments_impl(cx: &mut Context, comment_transaction: CommentTransactionFn) { let (view, doc) = current!(cx.editor); - let token = doc + let line_token: Option<&str> = doc + .language_config() + .and_then(|lc| lc.comment_tokens.as_ref()) + .and_then(|tc| tc.first()) + .map(|tc| tc.as_str()); + let block_tokens: Option<&[BlockCommentToken]> = doc .language_config() - .and_then(|lc| lc.comment_token.as_ref()) - .map(|tc| tc.as_ref()); - let transaction = comment::toggle_line_comments(doc.text(), doc.selection(view.id), token); + .and_then(|lc| lc.block_comment_tokens.as_ref()) + .map(|tc| &tc[..]); + + let transaction = + comment_transaction(line_token, block_tokens, doc.text(), doc.selection(view.id)); doc.apply(&transaction, view.id); exit_select_mode(cx); } +/// commenting behavior: +/// 1. only line comment tokens -> line comment +/// 2. each line block commented -> uncomment all lines +/// 3. whole selection block commented -> uncomment selection +/// 4. all lines not commented and block tokens -> comment uncommented lines +/// 5. no comment tokens and not block commented -> line comment +fn toggle_comments(cx: &mut Context) { + toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { + let text = doc.slice(..); + + // only have line comment tokens + if line_token.is_some() && block_tokens.is_none() { + return comment::toggle_line_comments(doc, selection, line_token); + } + + let split_lines = comment::split_lines_of_selection(text, selection); + + let default_block_tokens = &[BlockCommentToken::default()]; + let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); + + let (line_commented, line_comment_changes) = + comment::find_block_comments(block_comment_tokens, text, &split_lines); + + // block commented by line would also be block commented so check this first + if line_commented { + return comment::create_block_comment_transaction( + doc, + &split_lines, + line_commented, + line_comment_changes, + ) + .0; + } + + let (block_commented, comment_changes) = + comment::find_block_comments(block_comment_tokens, text, selection); + + // check if selection has block comments + if block_commented { + return comment::create_block_comment_transaction( + doc, + selection, + block_commented, + comment_changes, + ) + .0; + } + + // not commented and only have block comment tokens + if line_token.is_none() && block_tokens.is_some() { + return comment::create_block_comment_transaction( + doc, + &split_lines, + line_commented, + line_comment_changes, + ) + .0; + } + + // not block commented at all and don't have any tokens + comment::toggle_line_comments(doc, selection, line_token) + }) +} + +fn toggle_line_comments(cx: &mut Context) { + toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { + if line_token.is_none() && block_tokens.is_some() { + let default_block_tokens = &[BlockCommentToken::default()]; + let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); + comment::toggle_block_comments( + doc, + &comment::split_lines_of_selection(doc.slice(..), selection), + block_comment_tokens, + ) + } else { + comment::toggle_line_comments(doc, selection, line_token) + } + }); +} + +fn toggle_block_comments(cx: &mut Context) { + toggle_comments_impl(cx, |line_token, block_tokens, doc, selection| { + if line_token.is_some() && block_tokens.is_none() { + comment::toggle_line_comments(doc, selection, line_token) + } else { + let default_block_tokens = &[BlockCommentToken::default()]; + let block_comment_tokens = block_tokens.unwrap_or(default_block_tokens); + comment::toggle_block_comments(doc, selection, block_comment_tokens) + } + }); +} + fn rotate_selections(cx: &mut Context, direction: Direction) { let count = cx.count(); let (view, doc) = current!(cx.editor); @@ -4598,18 +4771,17 @@ fn shrink_selection(cx: &mut Context) { cx.editor.apply_motion(motion); } -fn select_sibling_impl(cx: &mut Context, sibling_fn: &'static F) +fn select_sibling_impl(cx: &mut Context, sibling_fn: F) where - F: Fn(Node) -> Option, + F: Fn(&helix_core::Syntax, RopeSlice, Selection) -> Selection + 'static, { - let motion = |editor: &mut Editor| { + let motion = move |editor: &mut Editor| { let (view, doc) = current!(editor); if let Some(syntax) = doc.syntax() { let text = doc.text().slice(..); let current_selection = doc.selection(view.id); - let selection = - object::select_sibling(syntax, text, current_selection.clone(), sibling_fn); + let selection = sibling_fn(syntax, text, current_selection.clone()); doc.set_selection(view.id, selection); } }; @@ -4617,11 +4789,11 @@ where } fn select_next_sibling(cx: &mut Context) { - select_sibling_impl(cx, &|node| Node::next_sibling(&node)) + select_sibling_impl(cx, object::select_next_sibling) } fn select_prev_sibling(cx: &mut Context) { - select_sibling_impl(cx, &|node| Node::prev_sibling(&node)) + select_sibling_impl(cx, object::select_prev_sibling) } fn move_node_bound_impl(cx: &mut Context, dir: Direction, movement: Movement) { @@ -5167,12 +5339,21 @@ fn surround_replace(cx: &mut Context) { None => return doc.set_selection(view.id, selection), }; let (open, close) = surround::get_pair(to); + + // the changeset has to be sorted to allow nested surrounds + let mut sorted_pos: Vec<(usize, char)> = Vec::new(); + for p in change_pos.chunks(2) { + sorted_pos.push((p[0], open)); + sorted_pos.push((p[1], close)); + } + sorted_pos.sort_unstable(); + let transaction = Transaction::change( doc.text(), - change_pos.iter().enumerate().map(|(i, &pos)| { + sorted_pos.iter().map(|&pos| { let mut t = Tendril::new(); - t.push(if i % 2 == 0 { open } else { close }); - (pos, pos + 1, Some(t)) + t.push(pos.1); + (pos.0, pos.0 + 1, Some(t)) }), ); doc.set_selection(view.id, selection); @@ -5194,14 +5375,14 @@ fn surround_delete(cx: &mut Context) { let text = doc.text().slice(..); let selection = doc.selection(view.id); - let change_pos = match surround::get_surround_pos(text, selection, surround_ch, count) { + let mut change_pos = match surround::get_surround_pos(text, selection, surround_ch, count) { Ok(c) => c, Err(err) => { cx.editor.set_error(err.to_string()); return; } }; - + change_pos.sort_unstable(); // the changeset has to be sorted to allow nested surrounds let transaction = Transaction::change(doc.text(), change_pos.into_iter().map(|p| (p, p + 1, None))); doc.apply(&transaction, view.id); @@ -5257,16 +5438,9 @@ fn shell_keep_pipe(cx: &mut Context) { for (i, range) in selection.ranges().iter().enumerate() { let fragment = range.slice(text); - let (_output, success) = match shell_impl(shell, input, Some(fragment.into())) { - Ok(result) => result, - Err(err) => { - cx.editor.set_error(err.to_string()); - return; - } - }; - - // if the process exits successfully, keep the selection - if success { + if let Err(err) = shell_impl(shell, input, Some(fragment.into())) { + log::debug!("Shell command failed: {}", err); + } else { ranges.push(*range); if i >= old_index && index.is_none() { index = Some(ranges.len() - 1); @@ -5285,7 +5459,7 @@ fn shell_keep_pipe(cx: &mut Context) { ); } -fn shell_impl(shell: &[String], cmd: &str, input: Option) -> anyhow::Result<(Tendril, bool)> { +fn shell_impl(shell: &[String], cmd: &str, input: Option) -> anyhow::Result { tokio::task::block_in_place(|| helix_lsp::block_on(shell_impl_async(shell, cmd, input))) } @@ -5293,7 +5467,7 @@ async fn shell_impl_async( shell: &[String], cmd: &str, input: Option, -) -> anyhow::Result<(Tendril, bool)> { +) -> anyhow::Result { use std::process::Stdio; use tokio::process::Command; ensure!(!shell.is_empty(), "No shell set"); @@ -5356,7 +5530,7 @@ async fn shell_impl_async( let str = std::str::from_utf8(&output.stdout) .map_err(|_| anyhow!("Process did not output valid UTF-8"))?; let tendril = Tendril::from(str); - Ok((tendril, output.status.success())) + Ok(tendril) } fn shell(cx: &mut compositor::Context, cmd: &str, behavior: &ShellBehavior) { @@ -5377,14 +5551,14 @@ fn shell(cx: &mut compositor::Context, cmd: &str, behavior: &ShellBehavior) { let mut shell_output: Option = None; let mut offset = 0isize; for range in selection.ranges() { - let (output, success) = if let Some(output) = shell_output.as_ref() { - (output.clone(), true) + let output = if let Some(output) = shell_output.as_ref() { + output.clone() } else { let fragment = range.slice(text); match shell_impl(shell, cmd, pipe.then(|| fragment.into())) { Ok(result) => { if !pipe { - shell_output = Some(result.0.clone()); + shell_output = Some(result.clone()); } result } @@ -5395,11 +5569,6 @@ fn shell(cx: &mut compositor::Context, cmd: &str, behavior: &ShellBehavior) { } }; - if !success { - cx.editor.set_error("Command failed"); - return; - } - let output_len = output.chars().count(); let (from, to, deleted_len) = match behavior { @@ -5640,3 +5809,182 @@ fn replay_macro(cx: &mut Context) { cx.editor.macro_replaying.pop(); })); } + +fn goto_word(cx: &mut Context) { + jump_to_word(cx, Movement::Move) +} + +fn extend_to_word(cx: &mut Context) { + jump_to_word(cx, Movement::Extend) +} + +fn jump_to_label(cx: &mut Context, labels: Vec, behaviour: Movement) { + let doc = doc!(cx.editor); + let alphabet = &cx.editor.config().jump_label_alphabet; + if labels.is_empty() { + return; + } + let alphabet_char = |i| { + let mut res = Tendril::new(); + res.push(alphabet[i]); + res + }; + + // Add label for each jump candidate to the View as virtual text. + let text = doc.text().slice(..); + let mut overlays: Vec<_> = labels + .iter() + .enumerate() + .flat_map(|(i, range)| { + [ + Overlay::new(range.from(), alphabet_char(i / alphabet.len())), + Overlay::new( + graphemes::next_grapheme_boundary(text, range.from()), + alphabet_char(i % alphabet.len()), + ), + ] + }) + .collect(); + overlays.sort_unstable_by_key(|overlay| overlay.char_idx); + let (view, doc) = current!(cx.editor); + doc.set_jump_labels(view.id, overlays); + + // Accept two characters matching a visible label. Jump to the candidate + // for that label if it exists. + let primary_selection = doc.selection(view.id).primary(); + let view = view.id; + let doc = doc.id(); + cx.on_next_key(move |cx, event| { + let alphabet = &cx.editor.config().jump_label_alphabet; + let Some(i ) = event.char().and_then(|ch| alphabet.iter().position(|&it| it == ch)) else { + doc_mut!(cx.editor, &doc).remove_jump_labels(view); + return; + }; + let outer = i * alphabet.len(); + // Bail if the given character cannot be a jump label. + if outer > labels.len() { + doc_mut!(cx.editor, &doc).remove_jump_labels(view); + return; + } + cx.on_next_key(move |cx, event| { + doc_mut!(cx.editor, &doc).remove_jump_labels(view); + let alphabet = &cx.editor.config().jump_label_alphabet; + let Some(inner ) = event.char().and_then(|ch| alphabet.iter().position(|&it| it == ch)) else { + return; + }; + if let Some(mut range) = labels.get(outer + inner).copied() { + range = if behaviour == Movement::Extend { + let anchor = if range.anchor < range.head { + let from = primary_selection.from(); + if range.anchor < from { + range.anchor + } else { + from + } + } else { + let to = primary_selection.to(); + if range.anchor > to { + range.anchor + } else { + to + } + }; + Range::new(anchor, range.head) + }else{ + range.with_direction(Direction::Forward) + }; + doc_mut!(cx.editor, &doc).set_selection(view, range.into()); + } + }); + }); +} + +fn jump_to_word(cx: &mut Context, behaviour: Movement) { + // Calculate the jump candidates: ranges for any visible words with two or + // more characters. + let alphabet = &cx.editor.config().jump_label_alphabet; + let jump_label_limit = alphabet.len() * alphabet.len(); + let mut words = Vec::with_capacity(jump_label_limit); + let (view, doc) = current_ref!(cx.editor); + let text = doc.text().slice(..); + + // This is not necessarily exact if there is virtual text like soft wrap. + // It's ok though because the extra jump labels will not be rendered. + let start = text.line_to_char(text.char_to_line(view.offset.anchor)); + let end = text.line_to_char(view.estimate_last_doc_line(doc) + 1); + + let primary_selection = doc.selection(view.id).primary(); + let cursor = primary_selection.cursor(text); + let mut cursor_fwd = Range::point(cursor); + let mut cursor_rev = Range::point(cursor); + if text.get_char(cursor).is_some_and(|c| !c.is_whitespace()) { + let cursor_word_end = movement::move_next_word_end(text, cursor_fwd, 1); + // single grapheme words need a specical case + if cursor_word_end.anchor == cursor { + cursor_fwd = cursor_word_end; + } + let cursor_word_start = movement::move_prev_word_start(text, cursor_rev, 1); + if cursor_word_start.anchor == next_grapheme_boundary(text, cursor) { + cursor_rev = cursor_word_start; + } + } + 'outer: loop { + let mut changed = false; + while cursor_fwd.head < end { + cursor_fwd = movement::move_next_word_end(text, cursor_fwd, 1); + // The cursor is on a word that is atleast two graphemes long and + // madeup of word characters. The latter condition is needed because + // move_next_word_end simply treats a sequence of characters from + // the same char class as a word so `=<` would also count as a word. + let add_label = RevRopeGraphemes::new(text.slice(..cursor_fwd.head)) + .take(2) + .take_while(|g| g.chars().all(char_is_word)) + .count() + == 2; + if !add_label { + continue; + } + changed = true; + // skip any leading whitespace + cursor_fwd.anchor += text + .chars_at(cursor_fwd.anchor) + .take_while(|&c| !char_is_word(c)) + .count(); + words.push(cursor_fwd); + if words.len() == jump_label_limit { + break 'outer; + } + break; + } + while cursor_rev.head > start { + cursor_rev = movement::move_prev_word_start(text, cursor_rev, 1); + // The cursor is on a word that is atleast two graphemes long and + // madeup of word characters. The latter condition is needed because + // move_prev_word_start simply treats a sequence of characters from + // the same char class as a word so `=<` would also count as a word. + let add_label = RopeGraphemes::new(text.slice(cursor_rev.head..)) + .take(2) + .take_while(|g| g.chars().all(char_is_word)) + .count() + == 2; + if !add_label { + continue; + } + changed = true; + cursor_rev.anchor -= text + .chars_at(cursor_rev.anchor) + .reversed() + .take_while(|&c| !char_is_word(c)) + .count(); + words.push(cursor_rev); + if words.len() == jump_label_limit { + break 'outer; + } + break; + } + if !changed { + break; + } + } + jump_to_label(cx, words, behaviour) +} diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs index a1f7bf17d..63d1608f9 100644 --- a/helix-term/src/commands/lsp.rs +++ b/helix-term/src/commands/lsp.rs @@ -38,7 +38,7 @@ use std::{ collections::{BTreeMap, HashSet}, fmt::Write, future::Future, - path::PathBuf, + path::{Path, PathBuf}, }; /// Gets the first language server that is attached to a document which supports a specific feature. @@ -134,7 +134,7 @@ struct DiagnosticStyles { } struct PickerDiagnostic { - url: lsp::Url, + path: PathBuf, diag: lsp::Diagnostic, offset_encoding: OffsetEncoding, } @@ -167,8 +167,7 @@ impl ui::menu::Item for PickerDiagnostic { let path = match format { DiagnosticsFormat::HideSourcePath => String::new(), DiagnosticsFormat::ShowSourcePath => { - let file_path = self.url.to_file_path().unwrap(); - let path = path::get_truncated_path(file_path); + let path = path::get_truncated_path(&self.path); format!("{}: ", path.to_string_lossy()) } }; @@ -208,24 +207,33 @@ fn jump_to_location( return; } }; + jump_to_position(editor, &path, location.range, offset_encoding, action); +} - let doc = match editor.open(&path, action) { +fn jump_to_position( + editor: &mut Editor, + path: &Path, + range: lsp::Range, + offset_encoding: OffsetEncoding, + action: Action, +) { + let doc = match editor.open(path, action) { Ok(id) => doc_mut!(editor, &id), Err(err) => { - let err = format!("failed to open path: {:?}: {:?}", location.uri, err); + let err = format!("failed to open path: {:?}: {:?}", path, err); editor.set_error(err); return; } }; let view = view_mut!(editor); // TODO: convert inside server - let new_range = - if let Some(new_range) = lsp_range_to_range(doc.text(), location.range, offset_encoding) { - new_range - } else { - log::warn!("lsp position out of bounds - {:?}", location.range); - return; - }; + let new_range = if let Some(new_range) = lsp_range_to_range(doc.text(), range, offset_encoding) + { + new_range + } else { + log::warn!("lsp position out of bounds - {:?}", range); + return; + }; // we flip the range so that the cursor sits on the start of the symbol // (for example start of the function). doc.set_selection(view.id, Selection::single(new_range.head, new_range.anchor)); @@ -258,21 +266,20 @@ enum DiagnosticsFormat { fn diag_picker( cx: &Context, - diagnostics: BTreeMap>, - _current_path: Option, + diagnostics: BTreeMap>, format: DiagnosticsFormat, ) -> Picker { // TODO: drop current_path comparison and instead use workspace: bool flag? // flatten the map to a vec of (url, diag) pairs let mut flat_diag = Vec::new(); - for (url, diags) in diagnostics { + for (path, diags) in diagnostics { flat_diag.reserve(diags.len()); for (diag, ls) in diags { if let Some(ls) = cx.editor.language_server_by_id(ls) { flat_diag.push(PickerDiagnostic { - url: url.clone(), + path: path.clone(), diag, offset_encoding: ls.offset_encoding(), }); @@ -292,22 +299,17 @@ fn diag_picker( (styles, format), move |cx, PickerDiagnostic { - url, + path, diag, offset_encoding, }, action| { - jump_to_location( - cx.editor, - &lsp::Location::new(url.clone(), diag.range), - *offset_encoding, - action, - ) + jump_to_position(cx.editor, path, diag.range, *offset_encoding, action) }, ) - .with_preview(move |_editor, PickerDiagnostic { url, diag, .. }| { - let location = lsp::Location::new(url.clone(), diag.range); - Some(location_to_file_location(&location)) + .with_preview(move |_editor, PickerDiagnostic { path, diag, .. }| { + let line = Some((diag.range.start.line as usize, diag.range.end.line as usize)); + Some((path.clone().into(), line)) }) .truncate_start(false) } @@ -470,17 +472,16 @@ pub fn workspace_symbol_picker(cx: &mut Context) { pub fn diagnostics_picker(cx: &mut Context) { let doc = doc!(cx.editor); - if let Some(current_url) = doc.url() { + if let Some(current_path) = doc.path() { let diagnostics = cx .editor .diagnostics - .get(¤t_url) + .get(current_path) .cloned() .unwrap_or_default(); let picker = diag_picker( cx, - [(current_url.clone(), diagnostics)].into(), - Some(current_url), + [(current_path.clone(), diagnostics)].into(), DiagnosticsFormat::HideSourcePath, ); cx.push_layer(Box::new(overlaid(picker))); @@ -488,16 +489,9 @@ pub fn diagnostics_picker(cx: &mut Context) { } pub fn workspace_diagnostics_picker(cx: &mut Context) { - let doc = doc!(cx.editor); - let current_url = doc.url(); // TODO not yet filtered by LanguageServerFeature, need to do something similar as Document::shown_diagnostics here for all open documents let diagnostics = cx.editor.diagnostics.clone(); - let picker = diag_picker( - cx, - diagnostics, - current_url, - DiagnosticsFormat::ShowSourcePath, - ); + let picker = diag_picker(cx, diagnostics, DiagnosticsFormat::ShowSourcePath); cx.push_layer(Box::new(overlaid(picker))); } @@ -1321,11 +1315,11 @@ fn compute_inlay_hints_for_view( view_id, DocumentInlayHints { id: new_doc_inlay_hints_id, - type_inlay_hints: type_inlay_hints.into(), - parameter_inlay_hints: parameter_inlay_hints.into(), - other_inlay_hints: other_inlay_hints.into(), - padding_before_inlay_hints: padding_before_inlay_hints.into(), - padding_after_inlay_hints: padding_after_inlay_hints.into(), + type_inlay_hints, + parameter_inlay_hints, + other_inlay_hints, + padding_before_inlay_hints, + padding_after_inlay_hints, }, ); doc.inlay_hints_oudated = false; diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index 1ac058e78..41cacde44 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1401,7 +1401,11 @@ fn reload_all( // Ensure that the view is synced with the document's history. view.sync_changes(doc); - doc.reload(view, &cx.editor.diff_providers)?; + if let Err(error) = doc.reload(view, &cx.editor.diff_providers) { + cx.editor.set_error(format!("{}", error)); + continue; + } + if let Some(path) = doc.path() { cx.editor .language_servers @@ -2340,7 +2344,7 @@ fn run_shell_command( let args = args.join(" "); let callback = async move { - let (output, success) = shell_impl_async(&shell, &args, None).await?; + let output = shell_impl_async(&shell, &args, None).await?; let call: job::Callback = Callback::EditorCompositor(Box::new( move |editor: &mut Editor, compositor: &mut Compositor| { if !output.is_empty() { @@ -2353,11 +2357,7 @@ fn run_shell_command( )); compositor.replace_or_push("shell", popup); } - if success { - editor.set_status("Command succeeded"); - } else { - editor.set_error("Command failed"); - } + editor.set_status("Command succeeded"); }, )); Ok(call) @@ -2497,6 +2497,46 @@ fn move_buffer( Ok(()) } +fn yank_diagnostic( + cx: &mut compositor::Context, + args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + let reg = match args.first() { + Some(s) => { + ensure!(s.chars().count() == 1, format!("Invalid register {s}")); + s.chars().next().unwrap() + } + None => '+', + }; + + let (view, doc) = current_ref!(cx.editor); + let primary = doc.selection(view.id).primary(); + + // Look only for diagnostics that intersect with the primary selection + let diag: Vec<_> = doc + .diagnostics() + .iter() + .filter(|d| primary.overlaps(&helix_core::Range::new(d.range.start, d.range.end))) + .map(|d| d.message.clone()) + .collect(); + let n = diag.len(); + if n == 0 { + bail!("No diagnostics under primary selection"); + } + + cx.editor.registers.write(reg, diag)?; + cx.editor.set_status(format!( + "Yanked {n} diagnostic{} to register {reg}", + if n == 1 { "" } else { "s" } + )); + Ok(()) +} + pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "quit", @@ -3088,7 +3128,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ aliases: &[], doc: "Clear given register. If no argument is provided, clear all registers.", fun: clear_register, - signature: CommandSignature::none(), + signature: CommandSignature::all(completers::register), }, TypableCommand { name: "redraw", @@ -3104,6 +3144,13 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ fun: move_buffer, signature: CommandSignature::positional(&[completers::filename]), }, + TypableCommand { + name: "yank-diagnostic", + aliases: &[], + doc: "Yank diagnostic(s) under primary cursor to register, or clipboard by default", + fun: yank_diagnostic, + signature: CommandSignature::all(completers::register), + }, ]; pub static TYPABLE_COMMAND_MAP: Lazy> = diff --git a/helix-term/src/keymap/default.rs b/helix-term/src/keymap/default.rs index 92d6b5906..ca5a21d26 100644 --- a/helix-term/src/keymap/default.rs +++ b/helix-term/src/keymap/default.rs @@ -58,6 +58,7 @@ pub fn default() -> HashMap { "k" => move_line_up, "j" => move_line_down, "." => goto_last_modification, + "w" => goto_word, }, ":" => command_mode, @@ -276,6 +277,9 @@ pub fn default() -> HashMap { "k" => hover, "r" => rename_symbol, "h" => select_references_to_symbol_under_cursor, + "c" => toggle_comments, + "C" => toggle_block_comments, + "A-c" => toggle_line_comments, "?" => command_palette, }, "z" => { "View" @@ -357,6 +361,7 @@ pub fn default() -> HashMap { "g" => { "Goto" "k" => extend_line_up, "j" => extend_line_down, + "w" => extend_to_word, }, })); let insert = keymap!({ "Insert mode" diff --git a/helix-term/src/lib.rs b/helix-term/src/lib.rs index cdde86ec5..8b7919951 100644 --- a/helix-term/src/lib.rs +++ b/helix-term/src/lib.rs @@ -48,10 +48,13 @@ fn true_color() -> bool { /// Function used for filtering dir entries in the various file pickers. fn filter_picker_entry(entry: &DirEntry, root: &Path, dedup_symlinks: bool) -> bool { - // We always want to ignore the .git directory, otherwise if + // We always want to ignore popular VCS directories, otherwise if // `ignore` is turned off, we end up with a lot of noise // in our picker. - if entry.file_name() == ".git" { + if matches!( + entry.file_name().to_str(), + Some(".git" | ".pijul" | ".jj" | ".hg") + ) { return false; } diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index 15a7262a8..ad7aa5c5a 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -360,7 +360,7 @@ impl EditorView { doc: &Document, theme: &Theme, ) -> [Vec<(usize, std::ops::Range)>; 5] { - use helix_core::diagnostic::Severity; + use helix_core::diagnostic::{DiagnosticTag, Severity}; let get_scope_of = |scope| { theme .find_scope_index_exact(scope) @@ -380,6 +380,10 @@ impl EditorView { let error = get_scope_of("diagnostic.error"); let r#default = get_scope_of("diagnostic"); // this is a bit redundant but should be fine + // Diagnostic tags + let unnecessary = theme.find_scope_index_exact("diagnostic.unnecessary"); + let deprecated = theme.find_scope_index_exact("diagnostic.deprecated"); + let mut default_vec: Vec<(usize, std::ops::Range)> = Vec::new(); let mut info_vec = Vec::new(); let mut hint_vec = Vec::new(); @@ -396,6 +400,15 @@ impl EditorView { _ => (&mut default_vec, r#default), }; + let scope = diagnostic + .tags + .first() + .and_then(|tag| match tag { + DiagnosticTag::Unnecessary => unnecessary, + DiagnosticTag::Deprecated => deprecated, + }) + .unwrap_or(scope); + // If any diagnostic overlaps ranges with the prior diagnostic, // merge the two together. Otherwise push a new span. match vec.last_mut() { @@ -716,7 +729,8 @@ impl EditorView { } } - let paragraph = Paragraph::new(lines) + let text = Text::from(lines); + let paragraph = Paragraph::new(&text) .alignment(Alignment::Right) .wrap(Wrap { trim: true }); let width = 100.min(viewport.width); @@ -902,13 +916,15 @@ impl EditorView { fn command_mode(&mut self, mode: Mode, cxt: &mut commands::Context, event: KeyEvent) { match (event, cxt.editor.count) { - // count handling - (key!(i @ '0'), Some(_)) | (key!(i @ '1'..='9'), _) - if !self.keymaps.contains_key(mode, event) => - { + // If the count is already started and the input is a number, always continue the count. + (key!(i @ '0'..='9'), Some(count)) => { + let i = i.to_digit(10).unwrap() as usize; + cxt.editor.count = NonZeroUsize::new(count.get() * 10 + i); + } + // A non-zero digit will start the count if that number isn't used by a keymap. + (key!(i @ '1'..='9'), None) if !self.keymaps.contains_key(mode, event) => { let i = i.to_digit(10).unwrap() as usize; - cxt.editor.count = - std::num::NonZeroUsize::new(cxt.editor.count.map_or(i, |c| c.get() * 10 + i)); + cxt.editor.count = NonZeroUsize::new(i); } // special handling for repeat operator (key!('.'), _) if self.keymaps.pending().is_empty() => { @@ -1032,13 +1048,33 @@ impl EditorView { } impl EditorView { + /// must be called whenever the editor processed input that + /// is not a `KeyEvent`. In these cases any pending keys/on next + /// key callbacks must be canceled. + fn handle_non_key_input(&mut self, cxt: &mut commands::Context) { + cxt.editor.status_msg = None; + cxt.editor.reset_idle_timer(); + // HACKS: create a fake key event that will never trigger any actual map + // and therefore simply acts as "dismiss" + let null_key_event = KeyEvent { + code: KeyCode::Null, + modifiers: KeyModifiers::empty(), + }; + // dismiss any pending keys + if let Some(on_next_key) = self.on_next_key.take() { + on_next_key(cxt, null_key_event); + } + self.handle_keymap_event(cxt.editor.mode, cxt, null_key_event); + self.pseudo_pending.clear(); + } + fn handle_mouse_event( &mut self, event: &MouseEvent, cxt: &mut commands::Context, ) -> EventResult { if event.kind != MouseEventKind::Moved { - cxt.editor.reset_idle_timer(); + self.handle_non_key_input(cxt) } let config = cxt.editor.config(); @@ -1263,6 +1299,7 @@ impl Component for EditorView { match event { Event::Paste(contents) => { + self.handle_non_key_input(&mut cx); cx.count = cx.editor.count; commands::paste_bracketed_value(&mut cx, contents.clone()); cx.editor.count = None; diff --git a/helix-term/src/ui/info.rs b/helix-term/src/ui/info.rs index cc6b7483f..651e5ca93 100644 --- a/helix-term/src/ui/info.rs +++ b/helix-term/src/ui/info.rs @@ -2,6 +2,7 @@ use crate::compositor::{Component, Context}; use helix_view::graphics::{Margin, Rect}; use helix_view::info::Info; use tui::buffer::Buffer as Surface; +use tui::text::Text; use tui::widgets::{Block, Borders, Paragraph, Widget}; impl Component for Info { @@ -31,7 +32,7 @@ impl Component for Info { let inner = block.inner(area).inner(&margin); block.render(area, surface); - Paragraph::new(self.text.as_str()) + Paragraph::new(&Text::from(self.text.as_str())) .style(text_style) .render(inner, surface); } diff --git a/helix-term/src/ui/lsp.rs b/helix-term/src/ui/lsp.rs index 879f963e7..a3698e38d 100644 --- a/helix-term/src/ui/lsp.rs +++ b/helix-term/src/ui/lsp.rs @@ -77,7 +77,7 @@ impl Component for SignatureHelp { let (_, sig_text_height) = crate::ui::text::required_size(&sig_text, area.width); let sig_text_area = area.clip_top(1).with_height(sig_text_height); let sig_text_area = sig_text_area.inner(&margin).intersection(surface.area); - let sig_text_para = Paragraph::new(sig_text).wrap(Wrap { trim: false }); + let sig_text_para = Paragraph::new(&sig_text).wrap(Wrap { trim: false }); sig_text_para.render(sig_text_area, surface); if self.signature_doc.is_none() { @@ -100,7 +100,7 @@ impl Component for SignatureHelp { let sig_doc_area = area .clip_top(sig_text_area.height + 2) .clip_bottom(u16::from(cx.editor.popup_border())); - let sig_doc_para = Paragraph::new(sig_doc) + let sig_doc_para = Paragraph::new(&sig_doc) .wrap(Wrap { trim: false }) .scroll((cx.scroll.unwrap_or_default() as u16, 0)); sig_doc_para.render(sig_doc_area.inner(&margin), surface); diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 749d58508..81499d039 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -346,7 +346,7 @@ impl Component for Markdown { let text = self.parse(Some(&cx.editor.theme)); - let par = Paragraph::new(text) + let par = Paragraph::new(&text) .wrap(Wrap { trim: false }) .scroll((cx.scroll.unwrap_or_default() as u16, 0)); diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs index 7fd9149e2..07e96c228 100644 --- a/helix-term/src/ui/mod.rs +++ b/helix-term/src/ui/mod.rs @@ -18,6 +18,7 @@ use crate::filter_picker_entry; use crate::job::{self, Callback}; pub use completion::{Completion, CompletionItem}; pub use editor::EditorView; +use helix_stdx::rope; pub use markdown::Markdown; pub use menu::Menu; pub use picker::{DynamicPicker, FileLocation, Picker}; @@ -26,8 +27,6 @@ pub use prompt::{Prompt, PromptEvent}; pub use spinner::{ProgressSpinners, Spinner}; pub use text::Text; -use helix_core::regex::Regex; -use helix_core::regex::RegexBuilder; use helix_view::Editor; use std::path::PathBuf; @@ -63,7 +62,22 @@ pub fn regex_prompt( prompt: std::borrow::Cow<'static, str>, history_register: Option, completion_fn: impl FnMut(&Editor, &str) -> Vec + 'static, - fun: impl Fn(&mut crate::compositor::Context, Regex, PromptEvent) + 'static, + fun: impl Fn(&mut crate::compositor::Context, rope::Regex, PromptEvent) + 'static, +) { + raw_regex_prompt( + cx, + prompt, + history_register, + completion_fn, + move |cx, regex, _, event| fun(cx, regex, event), + ); +} +pub fn raw_regex_prompt( + cx: &mut crate::commands::Context, + prompt: std::borrow::Cow<'static, str>, + history_register: Option, + completion_fn: impl FnMut(&Editor, &str) -> Vec + 'static, + fun: impl Fn(&mut crate::compositor::Context, rope::Regex, &str, PromptEvent) + 'static, ) { let (view, doc) = current!(cx.editor); let doc_id = view.doc; @@ -94,10 +108,13 @@ pub fn regex_prompt( false }; - match RegexBuilder::new(input) - .case_insensitive(case_insensitive) - .multi_line(true) - .build() + match rope::RegexBuilder::new() + .syntax( + rope::Config::new() + .case_insensitive(case_insensitive) + .multi_line(true), + ) + .build(input) { Ok(regex) => { let (view, doc) = current!(cx.editor); @@ -110,7 +127,7 @@ pub fn regex_prompt( view.jumps.push((doc_id, snapshot.clone())); } - fun(cx, regex, event); + fun(cx, regex, input, event); let (view, doc) = current!(cx.editor); view.ensure_cursor_in_view(doc, config.scrolloff); @@ -484,4 +501,18 @@ pub mod completers { files } } + + pub fn register(editor: &Editor, input: &str) -> Vec { + let iter = editor + .registers + .iter_preview() + // Exclude special registers that shouldn't be written to + .filter(|(ch, _)| !matches!(ch, '%' | '#' | '.')) + .map(|(ch, _)| ch.to_string()); + + fuzzy_match(input, iter, false) + .into_iter() + .map(|(name, _)| ((0..), name.into())) + .collect() + } } diff --git a/helix-term/src/ui/prompt.rs b/helix-term/src/ui/prompt.rs index a6ee7f05d..d46c13138 100644 --- a/helix-term/src/ui/prompt.rs +++ b/helix-term/src/ui/prompt.rs @@ -544,6 +544,10 @@ impl Component for Prompt { (self.callback_fn)(cx, &self.line, PromptEvent::Update); } ctrl!('h') | key!(Backspace) | shift!(Backspace) => { + if self.line.is_empty() { + (self.callback_fn)(cx, &self.line, PromptEvent::Abort); + return close_fn; + } self.delete_char_backwards(cx.editor); (self.callback_fn)(cx, &self.line, PromptEvent::Update); } diff --git a/helix-term/src/ui/statusline.rs b/helix-term/src/ui/statusline.rs index 9871828ee..2939a2573 100644 --- a/helix-term/src/ui/statusline.rs +++ b/helix-term/src/ui/statusline.rs @@ -4,7 +4,6 @@ use helix_view::document::DEFAULT_LANGUAGE_NAME; use helix_view::{ document::{Mode, SCRATCH_BUFFER_NAME}, graphics::Rect, - theme::Style, Document, Editor, View, }; @@ -20,7 +19,6 @@ pub struct RenderContext<'a> { pub view: &'a View, pub focused: bool, pub spinners: &'a ProgressSpinners, - pub parts: RenderBuffer<'a>, } impl<'a> RenderContext<'a> { @@ -37,18 +35,10 @@ impl<'a> RenderContext<'a> { view, focused, spinners, - parts: RenderBuffer::default(), } } } -#[derive(Default)] -pub struct RenderBuffer<'a> { - pub left: Spans<'a>, - pub center: Spans<'a>, - pub right: Spans<'a>, -} - pub fn render(context: &mut RenderContext, viewport: Rect, surface: &mut Surface) { let base_style = if context.focused { context.editor.theme.get("ui.statusline") @@ -58,90 +48,93 @@ pub fn render(context: &mut RenderContext, viewport: Rect, surface: &mut Surface surface.set_style(viewport.with_height(1), base_style); - let write_left = |context: &mut RenderContext, text, style| { - append(&mut context.parts.left, text, &base_style, style) - }; - let write_center = |context: &mut RenderContext, text, style| { - append(&mut context.parts.center, text, &base_style, style) - }; - let write_right = |context: &mut RenderContext, text, style| { - append(&mut context.parts.right, text, &base_style, style) - }; - - // Left side of the status line. - - let config = context.editor.config(); - - let element_ids = &config.statusline.left; - element_ids - .iter() - .map(|element_id| get_render_function(*element_id)) - .for_each(|render| render(context, write_left)); + let statusline = render_statusline(context, viewport.width as usize); surface.set_spans( viewport.x, viewport.y, - &context.parts.left, - context.parts.left.width() as u16, + &statusline, + statusline.width() as u16, ); +} - // Right side of the status line. +pub fn render_statusline<'a>(context: &mut RenderContext, width: usize) -> Spans<'a> { + let config = context.editor.config(); - let element_ids = &config.statusline.right; - element_ids + let element_ids = &config.statusline.left; + let mut left = element_ids .iter() .map(|element_id| get_render_function(*element_id)) - .for_each(|render| render(context, write_right)); - - surface.set_spans( - viewport.x - + viewport - .width - .saturating_sub(context.parts.right.width() as u16), - viewport.y, - &context.parts.right, - context.parts.right.width() as u16, - ); - - // Center of the status line. + .flat_map(|render| render(context).0) + .collect::>(); let element_ids = &config.statusline.center; - element_ids + let mut center = element_ids .iter() .map(|element_id| get_render_function(*element_id)) - .for_each(|render| render(context, write_center)); + .flat_map(|render| render(context).0) + .collect::>(); - // Width of the empty space between the left and center area and between the center and right area. - let spacing = 1u16; - - let edge_width = context.parts.left.width().max(context.parts.right.width()) as u16; - let center_max_width = viewport.width.saturating_sub(2 * edge_width + 2 * spacing); - let center_width = center_max_width.min(context.parts.center.width() as u16); - - surface.set_spans( - viewport.x + viewport.width / 2 - center_width / 2, - viewport.y, - &context.parts.center, - center_width, - ); -} + let element_ids = &config.statusline.right; + let mut right = element_ids + .iter() + .map(|element_id| get_render_function(*element_id)) + .flat_map(|render| render(context).0) + .collect::>(); + + let left_area_width: usize = left.iter().map(|s| s.width()).sum(); + let center_area_width: usize = center.iter().map(|s| s.width()).sum(); + let right_area_width: usize = right.iter().map(|s| s.width()).sum(); + + let min_spacing_between_areas = 1usize; + let sides_space_required = left_area_width + right_area_width + min_spacing_between_areas; + let total_space_required = sides_space_required + center_area_width + min_spacing_between_areas; + + let mut statusline: Vec = vec![]; + + if center_area_width > 0 && total_space_required <= width { + // SAFETY: this subtraction cannot underflow because `left_area_width + center_area_width + right_area_width` + // is smaller than `total_space_required`, which is smaller than `width` in this branch. + let total_spacers = width - (left_area_width + center_area_width + right_area_width); + // This is how much padding space it would take on either side to align the center area to the middle. + let center_margin = (width - center_area_width) / 2; + let left_spacers = if left_area_width < center_margin && right_area_width < center_margin { + // Align the center area to the middle if there is enough space on both sides. + center_margin - left_area_width + } else { + // Otherwise split the available space evenly and use it as margin. + // The center element won't be aligned to the middle but it will be evenly + // spaced between the left and right areas. + total_spacers / 2 + }; + let right_spacers = total_spacers - left_spacers; + + statusline.append(&mut left); + statusline.push(" ".repeat(left_spacers).into()); + statusline.append(&mut center); + statusline.push(" ".repeat(right_spacers).into()); + statusline.append(&mut right); + } else if right_area_width > 0 && sides_space_required <= width { + let side_areas_width = left_area_width + right_area_width; + statusline.append(&mut left); + statusline.push(" ".repeat(width - side_areas_width).into()); + statusline.append(&mut right); + } else if left_area_width <= width { + statusline.append(&mut left); + } -fn append(buffer: &mut Spans, text: String, base_style: &Style, style: Option