merge in master

pull/8675/merge^2
mattwparas 1 year ago
commit 42c9997487

@ -160,7 +160,7 @@ jobs:
- name: Build AppImage - name: Build AppImage
shell: bash shell: bash
if: matrix.build == 'aarch64-linux' || matrix.build == 'x86_64-linux' if: matrix.build == 'x86_64-linux'
run: | run: |
# Required as of 22.x https://github.com/AppImage/AppImageKit/wiki/FUSE # Required as of 22.x https://github.com/AppImage/AppImageKit/wiki/FUSE
sudo add-apt-repository universe sudo add-apt-repository universe
@ -263,7 +263,7 @@ jobs:
mv bins-$platform/hx$exe $pkgname mv bins-$platform/hx$exe $pkgname
chmod +x $pkgname/hx$exe chmod +x $pkgname/hx$exe
if [[ "$platform" = "aarch64-linux" || "$platform" = "x86_64-linux" ]]; then if [[ "$platform" = "x86_64-linux" ]]; then
mv bins-$platform/helix-*.AppImage* dist/ mv bins-$platform/helix-*.AppImage* dist/
fi fi

@ -1,5 +1,2 @@
# Things that we don't want ripgrep to search that we do want in git # Things that we don't want ripgrep to search that we do want in git
# https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md#automatic-filtering # https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md#automatic-filtering
# Minified JS vendored from mdbook
book/theme/highlight.js

692
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -18,6 +18,7 @@ default-members = [
[workspace.dependencies] [workspace.dependencies]
steel-core = { path = "../../steel/crates/steel-core", version = "0.5.0", features = ["modules", "anyhow", "dylibs", "colors"] } steel-core = { path = "../../steel/crates/steel-core", version = "0.5.0", features = ["modules", "anyhow", "dylibs", "colors"] }
tree-sitter = { version = "0.20", git = "https://github.com/tree-sitter/tree-sitter", rev = "ab09ae20d640711174b8da8a654f6b3dec93da1a" }
[profile.release] [profile.release]
lto = "thin" lto = "thin"

@ -61,4 +61,4 @@ Discuss the project on the community [Matrix Space](https://matrix.to/#/#helix-c
# Credits # Credits
Thanks to [@JakeHL](https://github.com/JakeHL) for designing the logo! Thanks to [@jakenvac](https://github.com/jakenvac) for designing the logo!

@ -10,3 +10,7 @@ default-theme = "colibri"
preferred-dark-theme = "colibri" preferred-dark-theme = "colibri"
git-repository-url = "https://github.com/helix-editor/helix" git-repository-url = "https://github.com/helix-editor/helix"
edit-url-template = "https://github.com/helix-editor/helix/edit/master/book/{path}" edit-url-template = "https://github.com/helix-editor/helix/edit/master/book/{path}"
additional-css = ["custom.css"]
[output.html.search]
use-boolean-and = true

@ -0,0 +1,231 @@
html {
font-family: "Inter", sans-serif;
}
.sidebar .sidebar-scrollbox {
padding: 0;
}
.chapter {
margin: 0.25rem 0;
}
.chapter li.chapter-item {
line-height: initial;
margin: 0;
padding: 1rem 1.5rem;
}
.chapter .section li.chapter-item {
line-height: inherit;
padding: .5rem .5rem 0 .5rem;
}
.content {
overflow-y: auto;
padding: 0 15px;
padding-bottom: 50px;
}
/* 2 1.75 1.5 1.25 1 .875 */
.content h1 { font-size: 2em }
.content h2 { font-size: 1.75em }
.content h3 { font-size: 1.5em }
.content h4 { font-size: 1.25em }
.content h5 { font-size: 1em }
.content h6 { font-size: .875em }
.content h1,
.content h2,
.content h3,
.content h4 {
font-weight: 500;
margin-top: 1.275em;
margin-bottom: .875em;
}
.content p,
.content ol,
.content ul,
.content table {
margin-top: 0;
margin-bottom: .875em;
}
.content ul li {
margin-bottom: .25rem;
}
.content ul {
list-style-type: square;
}
.content ul ul,
.content ol ul {
margin-bottom: .5rem;
}
.content li p {
margin-bottom: .5em;
}
blockquote {
margin: 1.5rem 0;
padding: 1rem 1.5rem;
color: var(--fg);
opacity: .9;
background-color: var(--quote-bg);
border-left: 4px solid var(--quote-border);
border-top: none;
border-bottom: none;
}
blockquote *:last-child {
margin-bottom: 0;
}
table {
width: 100%;
}
table thead th {
padding: .75rem;
text-align: left;
font-weight: 500;
line-height: 1.5;
width: auto;
}
table td {
padding: .75rem;
border: none;
}
table thead tr {
border: none;
border-bottom: 2px var(--table-border-color) solid;
}
table tbody tr {
border-bottom: 1px var(--table-border-line) solid;
}
table tbody tr:nth-child(2n) {
background: unset;
}
pre code.hljs {
display: block;
overflow-x: auto;
padding: 1em;
}
code.hljs {
padding: 3px 5px;
}
.colibri {
--bg: #3b224c;
--fg: #bcbdd0;
--heading-fg: #fff;
--sidebar-bg: #281733;
--sidebar-fg: #c8c9db;
--sidebar-non-existent: #505274;
--sidebar-active: #a4a0e8;
--sidebar-spacer: #2d334f;
--scrollbar: var(--sidebar-fg);
--icons: #737480;
--icons-hover: #b7b9cc;
/* --links: #a4a0e8; */
--links: #ECCDBA;
--inline-code-color: hsl(48.7, 7.8%, 70%);
--theme-popup-bg: #161923;
--theme-popup-border: #737480;
--theme-hover: rgba(0, 0, 0, .2);
--quote-bg: #281733;
--quote-border: hsl(226, 15%, 22%);
--table-border-color: hsl(226, 23%, 76%);
--table-header-bg: hsla(226, 23%, 31%, 0);
--table-alternate-bg: hsl(226, 23%, 14%);
--table-border-line: hsla(201deg, 20%, 92%, 0.2);
--searchbar-border-color: #aaa;
--searchbar-bg: #aeaec6;
--searchbar-fg: #000;
--searchbar-shadow-color: #aaa;
--searchresults-header-fg: #5f5f71;
--searchresults-border-color: #5c5c68;
--searchresults-li-bg: #242430;
--search-mark-bg: #acff5;
}
.colibri .content .header {
color: #fff;
}
/* highlight.js theme, :where() is used to avoid increasing specificity */
:where(.colibri) .hljs {
background: #2f1e2e;
color: #a39e9b;
}
:where(.colibri) .hljs-comment,
:where(.colibri) .hljs-quote {
color: #8d8687;
}
:where(.colibri) .hljs-link,
:where(.colibri) .hljs-meta,
:where(.colibri) .hljs-name,
:where(.colibri) .hljs-regexp,
:where(.colibri) .hljs-selector-class,
:where(.colibri) .hljs-selector-id,
:where(.colibri) .hljs-tag,
:where(.colibri) .hljs-template-variable,
:where(.colibri) .hljs-variable {
color: #ef6155;
}
:where(.colibri) .hljs-built_in,
:where(.colibri) .hljs-deletion,
:where(.colibri) .hljs-literal,
:where(.colibri) .hljs-number,
:where(.colibri) .hljs-params,
:where(.colibri) .hljs-type {
color: #f99b15;
}
:where(.colibri) .hljs-attribute,
:where(.colibri) .hljs-section,
:where(.colibri) .hljs-title {
color: #fec418;
}
:where(.colibri) .hljs-addition,
:where(.colibri) .hljs-bullet,
:where(.colibri) .hljs-string,
:where(.colibri) .hljs-symbol {
color: #48b685;
}
:where(.colibri) .hljs-keyword,
:where(.colibri) .hljs-selector-tag {
color: #815ba4;
}
:where(.colibri) .hljs-emphasis {
font-style: italic;
}
:where(.colibri) .hljs-strong {
font-weight: 700;
}

@ -89,9 +89,9 @@ The `[editor.statusline]` key takes the following sub-keys:
| Key | Description | Default | | Key | Description | Default |
| --- | --- | --- | | --- | --- | --- |
| `left` | A list of elements aligned to the left of the statusline | `["mode", "spinner", "file-name"]` | | `left` | A list of elements aligned to the left of the statusline | `["mode", "spinner", "file-name", "read-only-indicator", "file-modification-indicator"]` |
| `center` | A list of elements aligned to the middle of the statusline | `[]` | | `center` | A list of elements aligned to the middle of the statusline | `[]` |
| `right` | A list of elements aligned to the right of the statusline | `["diagnostics", "selections", "position", "file-encoding"]` | | `right` | A list of elements aligned to the right of the statusline | `["diagnostics", "selections", "register", "position", "file-encoding"]` |
| `separator` | The character used to separate elements in the statusline | `"│"` | | `separator` | The character used to separate elements in the statusline | `"│"` |
| `mode.normal` | The text shown in the `mode` element for normal mode | `"NOR"` | | `mode.normal` | The text shown in the `mode` element for normal mode | `"NOR"` |
| `mode.insert` | The text shown in the `mode` element for insert mode | `"INS"` | | `mode.insert` | The text shown in the `mode` element for insert mode | `"INS"` |
@ -108,6 +108,7 @@ The following statusline elements can be configured:
| `file-modification-indicator` | The indicator to show whether the file is modified (a `[+]` appears when there are unsaved changes) | | `file-modification-indicator` | The indicator to show whether the file is modified (a `[+]` appears when there are unsaved changes) |
| `file-encoding` | The encoding of the opened file if it differs from UTF-8 | | `file-encoding` | The encoding of the opened file if it differs from UTF-8 |
| `file-line-ending` | The file line endings (CRLF or LF) | | `file-line-ending` | The file line endings (CRLF or LF) |
| `read-only-indicator` | An indicator that shows `[readonly]` when a file cannot be written |
| `total-line-numbers` | The total line numbers of the opened file | | `total-line-numbers` | The total line numbers of the opened file |
| `file-type` | The type of the opened file | | `file-type` | The type of the opened file |
| `diagnostics` | The number of warnings and/or errors | | `diagnostics` | The number of warnings and/or errors |
@ -347,3 +348,11 @@ max-wrap = 25 # increase value to reduce forced mid-word wrapping
max-indent-retain = 0 max-indent-retain = 0
wrap-indicator = "" # set wrap-indicator to "" to hide it wrap-indicator = "" # set wrap-indicator to "" to hide it
``` ```
### `[editor.smart-tab]` Section
| Key | Description | Default |
|------------|-------------|---------|
| `enable` | If set to true, then when the cursor is in a position with non-whitespace to its left, instead of inserting a tab, it will run `move_parent_node_end`. If there is only whitespace to the left, then it inserts a tab as normal. With the default bindings, to explicitly insert a tab character, press Shift-tab. | `true` |
| `supersede-menu` | Normally, when a menu is on screen, such as when auto complete is triggered, the tab key is bound to cycling through the items. This means when menus are on screen, one cannot use the tab key to trigger the `smart-tab` command. If this option is set to true, the `smart-tab` command always takes precedence, which means one cannot use the tab key to cycle through menu items. One of the other bindings must be used instead, such as arrow keys or `C-n`/`C-p`. | `false` |

@ -2,7 +2,7 @@
| --- | --- | --- | --- | --- | | --- | --- | --- | --- | --- |
| astro | ✓ | | | | | astro | ✓ | | | |
| awk | ✓ | ✓ | | `awk-language-server` | | awk | ✓ | ✓ | | `awk-language-server` |
| bash | ✓ | | ✓ | `bash-language-server` | | bash | ✓ | | ✓ | `bash-language-server` |
| bass | ✓ | | | `bass` | | bass | ✓ | | | `bass` |
| beancount | ✓ | | | | | beancount | ✓ | | | |
| bibtex | ✓ | | | `texlab` | | bibtex | ✓ | | | `texlab` |
@ -43,6 +43,7 @@
| fish | ✓ | ✓ | ✓ | | | fish | ✓ | ✓ | ✓ | |
| forth | ✓ | | | `forth-lsp` | | forth | ✓ | | | `forth-lsp` |
| fortran | ✓ | | ✓ | `fortls` | | fortran | ✓ | | ✓ | `fortls` |
| fsharp | ✓ | | | `fsautocomplete` |
| gdscript | ✓ | ✓ | ✓ | | | gdscript | ✓ | ✓ | ✓ | |
| git-attributes | ✓ | | | | | git-attributes | ✓ | | | |
| git-commit | ✓ | ✓ | | | | git-commit | ✓ | ✓ | | |
@ -59,6 +60,7 @@
| graphql | ✓ | | | | | graphql | ✓ | | | |
| hare | ✓ | | | | | hare | ✓ | | | |
| haskell | ✓ | ✓ | | `haskell-language-server-wrapper` | | haskell | ✓ | ✓ | | `haskell-language-server-wrapper` |
| haskell-persistent | ✓ | | | |
| hcl | ✓ | | ✓ | `terraform-ls` | | hcl | ✓ | | ✓ | `terraform-ls` |
| heex | ✓ | ✓ | | `elixir-ls` | | heex | ✓ | ✓ | | `elixir-ls` |
| hosts | ✓ | | | | | hosts | ✓ | | | |
@ -67,8 +69,9 @@
| idris | | | | `idris2-lsp` | | idris | | | | `idris2-lsp` |
| iex | ✓ | | | | | iex | ✓ | | | |
| ini | ✓ | | | | | ini | ✓ | | | |
| java | ✓ | ✓ | | `jdtls` | | java | ✓ | ✓ | | `jdtls` |
| javascript | ✓ | ✓ | ✓ | `typescript-language-server` | | javascript | ✓ | ✓ | ✓ | `typescript-language-server` |
| jinja | ✓ | | | |
| jsdoc | ✓ | | | | | jsdoc | ✓ | | | |
| json | ✓ | | ✓ | `vscode-json-language-server` | | json | ✓ | | ✓ | `vscode-json-language-server` |
| jsonnet | ✓ | | | `jsonnet-language-server` | | jsonnet | ✓ | | | `jsonnet-language-server` |
@ -98,6 +101,7 @@
| nim | ✓ | ✓ | ✓ | `nimlangserver` | | nim | ✓ | ✓ | ✓ | `nimlangserver` |
| nix | ✓ | | | `nil` | | nix | ✓ | | | `nil` |
| nu | ✓ | | | | | nu | ✓ | | | |
| nunjucks | ✓ | | | |
| ocaml | ✓ | | ✓ | `ocamllsp` | | ocaml | ✓ | | ✓ | `ocamllsp` |
| ocaml-interface | ✓ | | | `ocamllsp` | | ocaml-interface | ✓ | | | `ocamllsp` |
| odin | ✓ | | ✓ | `ols` | | odin | ✓ | | ✓ | `ols` |
@ -107,13 +111,14 @@
| pascal | ✓ | ✓ | | `pasls` | | pascal | ✓ | ✓ | | `pasls` |
| passwd | ✓ | | | | | passwd | ✓ | | | |
| pem | ✓ | | | | | pem | ✓ | | | |
| perl | ✓ | | | `perlnavigator` | | perl | ✓ | | | `perlnavigator` |
| php | ✓ | ✓ | ✓ | `intelephense` | | php | ✓ | ✓ | ✓ | `intelephense` |
| po | ✓ | ✓ | | | | po | ✓ | ✓ | | |
| pod | ✓ | | | |
| ponylang | ✓ | ✓ | ✓ | | | ponylang | ✓ | ✓ | ✓ | |
| prisma | ✓ | | | `prisma-language-server` | | prisma | ✓ | | | `prisma-language-server` |
| prolog | | | | `swipl` | | prolog | | | | `swipl` |
| protobuf | ✓ | | ✓ | | | protobuf | ✓ | | ✓ | `bufls`, `pb` |
| prql | ✓ | | | | | prql | ✓ | | | |
| purescript | ✓ | | | `purescript-language-server` | | purescript | ✓ | | | `purescript-language-server` |
| python | ✓ | ✓ | ✓ | `pylsp` | | python | ✓ | ✓ | ✓ | `pylsp` |
@ -140,21 +145,24 @@
| sql | ✓ | | | | | sql | ✓ | | | |
| sshclientconfig | ✓ | | | | | sshclientconfig | ✓ | | | |
| starlark | ✓ | ✓ | | | | starlark | ✓ | ✓ | | |
| svelte | ✓ | | | `svelteserver` | | strace | ✓ | | | |
| svelte | ✓ | | ✓ | `svelteserver` |
| sway | ✓ | ✓ | ✓ | `forc` | | sway | ✓ | ✓ | ✓ | `forc` |
| swift | ✓ | | | `sourcekit-lsp` | | swift | ✓ | | | `sourcekit-lsp` |
| t32 | ✓ | | | | | t32 | ✓ | | | |
| tablegen | ✓ | ✓ | ✓ | | | tablegen | ✓ | ✓ | ✓ | |
| task | ✓ | | | | | task | ✓ | | | |
| tfvars | ✓ | | ✓ | `terraform-ls` | | tfvars | ✓ | | ✓ | `terraform-ls` |
| todotxt | ✓ | | | |
| toml | ✓ | | | `taplo` | | toml | ✓ | | | `taplo` |
| tsq | ✓ | | | | | tsq | ✓ | | | |
| tsx | ✓ | ✓ | ✓ | `typescript-language-server` | | tsx | ✓ | ✓ | ✓ | `typescript-language-server` |
| twig | ✓ | | | | | twig | ✓ | | | |
| typescript | ✓ | ✓ | ✓ | `typescript-language-server` | | typescript | ✓ | ✓ | ✓ | `typescript-language-server` |
| ungrammar | ✓ | | | | | ungrammar | ✓ | | | |
| unison | ✓ | | | |
| uxntal | ✓ | | | | | uxntal | ✓ | | | |
| v | ✓ | ✓ | ✓ | `v` | | v | ✓ | ✓ | ✓ | `v-analyzer` |
| vala | ✓ | | | `vala-language-server` | | vala | ✓ | | | `vala-language-server` |
| verilog | ✓ | ✓ | | `svlangserver` | | verilog | ✓ | ✓ | | `svlangserver` |
| vhdl | ✓ | | | `vhdl_ls` | | vhdl | ✓ | | | `vhdl_ls` |
@ -165,6 +173,7 @@
| webc | ✓ | | | | | webc | ✓ | | | |
| wgsl | ✓ | | | `wgsl_analyzer` | | wgsl | ✓ | | | `wgsl_analyzer` |
| wit | ✓ | | ✓ | | | wit | ✓ | | ✓ | |
| wren | ✓ | ✓ | ✓ | |
| xit | ✓ | | | | | xit | ✓ | | | |
| xml | ✓ | | ✓ | | | xml | ✓ | | ✓ | |
| yaml | ✓ | | ✓ | `yaml-language-server` | | yaml | ✓ | | ✓ | `yaml-language-server` |

@ -24,6 +24,7 @@
| `:write-quit`, `:wq`, `:x` | Write changes to disk and close the current view. Accepts an optional path (:wq some/path.txt) | | `:write-quit`, `:wq`, `:x` | Write changes to disk and close the current view. Accepts an optional path (:wq some/path.txt) |
| `:write-quit!`, `:wq!`, `:x!` | Write changes to disk and close the current view forcefully. Accepts an optional path (:wq! some/path.txt) | | `:write-quit!`, `:wq!`, `:x!` | Write changes to disk and close the current view forcefully. Accepts an optional path (:wq! some/path.txt) |
| `:write-all`, `:wa` | Write changes from all buffers to disk. | | `:write-all`, `:wa` | Write changes from all buffers to disk. |
| `:write-all!`, `:wa!` | Forcefully write changes from all buffers to disk creating necessary subdirectories. |
| `:write-quit-all`, `:wqa`, `:xa` | Write changes from all buffers to disk and close all views. | | `:write-quit-all`, `:wqa`, `:xa` | Write changes from all buffers to disk and close all views. |
| `:write-quit-all!`, `:wqa!`, `:xa!` | Write changes from all buffers to disk and close all views forcefully (ignoring unsaved changes). | | `:write-quit-all!`, `:wqa!`, `:xa!` | Write changes from all buffers to disk and close all views forcefully (ignoring unsaved changes). |
| `:quit-all`, `:qa` | Close all views. | | `:quit-all`, `:qa` | Close all views. |

@ -1,76 +1,299 @@
# Adding indent queries # Adding indent queries
Helix uses tree-sitter to correctly indent new lines. This requires Helix uses tree-sitter to correctly indent new lines. This requires a tree-
a tree-sitter grammar and an `indent.scm` query file placed in sitter grammar and an `indent.scm` query file placed in `runtime/queries/
`runtime/queries/{language}/indents.scm`. The indentation for a line {language}/indents.scm`. The indentation for a line is calculated by traversing
is calculated by traversing the syntax tree from the lowest node at the the syntax tree from the lowest node at the beginning of the new line (see
beginning of the new line. Each of these nodes contributes to the total [Indent queries](#indent-queries)). Each of these nodes contributes to the total
indent when it is captured by the query (in what way depends on the name indent when it is captured by the query (in what way depends on the name of
of the capture). the capture.
Note that it matters where these added indents begin. For example, Note that it matters where these added indents begin. For example,
multiple indent level increases that start on the same line only increase multiple indent level increases that start on the same line only increase
the total indent level by 1. the total indent level by 1. See [Capture types](#capture-types).
## Scopes ## Indent queries
Added indents don't always apply to the whole node. For example, in most When Helix is inserting a new line through `o`, `O`, or `<ret>`, to determine
cases when a node should be indented, we actually only want everything the indent level for the new line, the query in `indents.scm` is run on the
except for its first line to be indented. For this, there are several document. The starting position of the query is the end of the line above where
scopes (more scopes may be added in the future if required): a new line will be inserted.
- `all`: For `o`, the inserted line is the line below the cursor, so that starting
This scope applies to the whole captured node. This is only different from position of the query is the end of the current line.
`tail` when the captured node is the first node on its line.
- `tail`: ```rust
This scope applies to everything except for the first line of the fn need_hero(some_hero: Hero, life: Life) -> {
captured node. matches!(some_hero, Hero { // ←─────────────────╮
strong: true,//←╮ ↑ ↑ │
fast: true, // │ │ ╰── query start │
sure: true, // │ ╰───── cursor ├─ traversal
soon: true, // ╰──────── new line inserted │ start node
}) && // │
// ↑ │
// ╰───────────────────────────────────────────────╯
some_hero > life
}
```
Every capture type has a default scope which should do the right thing For `O`, the newly inserted line is the *current* line, so the starting position
in most situations. When a different scope is required, this can be of the query is the end of the line above the cursor.
changed by using a `#set!` declaration anywhere in the pattern:
```scm ```rust
(assignment_expression fn need_hero(some_hero: Hero, life: Life) -> { // ←─╮
right: (_) @indent matches!(some_hero, Hero { // ←╮ ↑ │
(#set! "scope" "all")) strong: true,// ↑ ╭───╯ │ │
fast: true, // │ │ query start ─╯ │
sure: true, // ╰───┼ cursor ├─ traversal
soon: true, // ╰ new line inserted │ start node
}) && // │
some_hero > life // │
} // ←──────────────────────────────────────────────╯
``` ```
## Capture types From this starting node, the syntax tree is traversed up until the root node.
Each indent capture is collected along the way, and then combined according to
their [capture types](#capture-types) and [scopes](#scopes) to a final indent
level for the line.
- `@indent` (default scope `tail`): ### Capture types
Increase the indent level by 1. Multiple occurrences in the same line
don't stack. If there is at least one `@indent` and one `@outdent`
capture on the same line, the indent level isn't changed at all.
- `@indent` (default scope `tail`):
Increase the indent level by 1. Multiple occurrences in the same line *do not*
stack. If there is at least one `@indent` and one `@outdent` capture on the
same line, the indent level isn't changed at all.
- `@outdent` (default scope `all`): - `@outdent` (default scope `all`):
Decrease the indent level by 1. The same rules as for `@indent` apply. Decrease the indent level by 1. The same rules as for `@indent` apply.
- `@indent.always` (default scope `tail`):
Increase the indent level by 1. Multiple occurrences on the same line *do*
stack. The final indent level is `@indent.always` `@outdent.always`. If
an `@indent` and an `@indent.always` are on the same line, the `@indent` is
ignored.
- `@outdent.always` (default scope `all`):
Decrease the indent level by 1. The same rules as for `@indent.always` apply.
- `@align` (default scope `all`):
Align everything inside this node to some anchor. The anchor is given
by the start of the node captured by `@anchor` in the same pattern.
Every pattern with an `@align` should contain exactly one `@anchor`.
Indent (and outdent) for nodes below (in terms of their starting line)
the `@align` node is added to the indentation required for alignment.
- `@extend`: - `@extend`:
Extend the range of this node to the end of the line and to lines that Extend the range of this node to the end of the line and to lines that are
are indented more than the line that this node starts on. This is useful indented more than the line that this node starts on. This is useful for
for languages like Python, where for the purpose of indentation some nodes languages like Python, where for the purpose of indentation some nodes (like
(like functions or classes) should also contain indented lines that follow them. functions or classes) should also contain indented lines that follow them.
- `@extend.prevent-once`: - `@extend.prevent-once`:
Prevents the first extension of an ancestor of this node. For example, in Python Prevents the first extension of an ancestor of this node. For example, in Python
a return expression always ends the block that it is in. Note that this only stops the a return expression always ends the block that it is in. Note that this only
extension of the next `@extend` capture. If multiple ancestors are captured, stops the extension of the next `@extend` capture. If multiple ancestors are
only the extension of the innermost one is prevented. All other ancestors are unaffected captured, only the extension of the innermost one is prevented. All other
(regardless of whether the innermost ancestor would actually have been extended). ancestors are unaffected (regardless of whether the innermost ancestor would
actually have been extended).
#### `@indent` / `@outdent`
Consider this example:
```rust
fn shout(things: Vec<Thing>) {
// ↑
// ├───────────────────────╮ indent level
// @indent ├┄┄┄┄┄┄┄┄┄┄┄┄┄┄
// │
let it_all = |out| { things.filter(|thing| { // │ 1
// ↑ ↑ │
// ├───────────────────────┼─────┼┄┄┄┄┄┄┄┄┄┄┄┄┄┄
// @indent @indent
// │ 2
thing.can_do_with(out) // │
})}; // ├┄┄┄┄┄┄┄┄┄┄┄┄┄┄
//↑↑↑ │ 1
} //╰┼┴──────────────────────────────────────────────┴┄┄┄┄┄┄┄┄┄┄┄┄┄┄
// 3x @outdent
```
```scm
((block) @indent)
["}" ")"] @outdent
```
Note how on the second line, we have two blocks begin on the same line. In this
case, since both captures occur on the same line, they are combined and only
result in a net increase of 1. Also note that the closing `}`s are part of the
`@indent` captures, but the 3 `@outdent`s also combine into 1 and result in that
line losing one indent level.
#### `@extend` / `@extend.prevent-once`
For an example of where `@extend` can be useful, consider Python, which is
whitespace-sensitive.
```scm
]
(parenthesized_expression)
(function_definition)
(class_definition)
] @indent
```
```python
class Hero:
def __init__(self, strong, fast, sure, soon):# ←─╮
self.is_strong = strong # │
self.is_fast = fast # ╭─── query start │
self.is_sure = sure # │ ╭─ cursor │
self.is_soon = soon # │ │ │
# ↑ ↑ │ │ │
# │ ╰──────╯ │ │
# ╰─────────────────────╯ │
# ├─ traversal
def need_hero(self, life): # │ start node
return ( # │
self.is_strong # │
and self.is_fast # │
and self.is_sure # │
and self.is_soon # │
and self > life # │
) # ←─────────────────────────────────────────╯
```
Without braces to catch the scope of the function, the smallest descendant of
the cursor on a line feed ends up being the entire inside of the class. Because
of this, it will miss the entire function node and its indent capture, leading
to an indent level one too small.
To address this case, `@extend` tells helix to "extend" the captured node's span
to the line feed and every consecutive line that has a greater indent level than
the line of the node.
```scm
(parenthesized_expression) @indent
]
(function_definition)
(class_definition)
] @indent @extend
```
```python
class Hero:
def __init__(self, strong, fast, sure, soon):# ←─╮
self.is_strong = strong # │
self.is_fast = fast # ╭─── query start ├─ traversal
self.is_sure = sure # │ ╭─ cursor │ start node
self.is_soon = soon # │ │ ←───────────────╯
# ↑ ↑ │ │
# │ ╰──────╯ │
# ╰─────────────────────╯
def need_hero(self, life):
return (
self.is_strong
and self.is_fast
and self.is_sure
and self.is_soon
and self > life
)
```
Furthermore, there are some cases where extending to everything with a greater
indent level may not be desirable. Consider the `need_hero` function above. If
our cursor is on the last line of the returned expression.
```python
class Hero:
def __init__(self, strong, fast, sure, soon):
self.is_strong = strong
self.is_fast = fast
self.is_sure = sure
self.is_soon = soon
def need_hero(self, life):
return (
self.is_strong
and self.is_fast
and self.is_sure
and self.is_soon
and self > life
) # ←─── cursor
#←────────── where cursor should go on new line
```
In Python, the are a few tokens that will always end a scope, such as a return
statement. Since the scope ends, so should the indent level. But because the
function span is extended to every line with a greater indent level, a new line
would just continue on the same level. And an `@outdent` would not help us here
either, since it would cause everything in the parentheses to become outdented
as well.
To help, we need to signal an end to the extension. We can do this with
`@extend.prevent-once`.
```scm
(parenthesized_expression) @indent
]
(function_definition)
(class_definition)
] @indent @extend
(return_statement) @extend.prevent-once
```
#### `@indent.always` / `@outdent.always`
As mentioned before, normally if there is more than one `@indent` or `@outdent`
capture on the same line, they are combined.
Sometimes, there are cases when you may want to ensure that every indent capture
is additive, regardless of how many occur on the same line. Consider this
example in YAML.
```yaml
- foo: bar
# ↑ ↑
# │ ╰─────────────── start of map
# ╰───────────────── start of list element
baz: quux # ←─── cursor
# ←───────────── where the cursor should go on a new line
garply: waldo
- quux:
bar: baz
xyzzy: thud
fred: plugh
```
In YAML, you often have lists of maps. In these cases, the syntax is such that
the list element and the map both start on the same line. But we really do want
to start an indentation for each of these so that subsequent keys in the map
hang over the list and align properly. This is where `@indent.always` helps.
```scm
((block_sequence_item) @item @indent.always @extend
(#not-one-line? @item))
((block_mapping_pair
key: (_) @key
value: (_) @val
(#not-same-line? @key @val)
) @indent.always @extend
)
```
## Predicates ## Predicates
In some cases, an S-expression cannot express exactly what pattern should be matched. In some cases, an S-expression cannot express exactly what pattern should be matched.
For that, tree-sitter allows for predicates to appear anywhere within a pattern, For that, tree-sitter allows for predicates to appear anywhere within a pattern,
similar to how `#set!` declarations work: similar to how `#set!` declarations work:
```scm ```scm
(some_kind (some_kind
(child_kind) @indent (child_kind) @indent
(#predicate? arg1 arg2 ...) (#predicate? arg1 arg2 ...)
) )
``` ```
The number of arguments depends on the predicate that's used. The number of arguments depends on the predicate that's used.
Each argument is either a capture (`@name`) or a string (`"some string"`). Each argument is either a capture (`@name`) or a string (`"some string"`).
The following predicates are supported by tree-sitter: The following predicates are supported by tree-sitter:
@ -91,3 +314,47 @@ argument (a string).
- `#same-line?`/`#not-same-line?`: - `#same-line?`/`#not-same-line?`:
The captures given by the 2 arguments must/must not start on the same line. The captures given by the 2 arguments must/must not start on the same line.
- `#one-line?`/`#not-one-line?`:
The captures given by the fist argument must/must span a total of one line.
### Scopes
Added indents don't always apply to the whole node. For example, in most
cases when a node should be indented, we actually only want everything
except for its first line to be indented. For this, there are several
scopes (more scopes may be added in the future if required):
- `tail`:
This scope applies to everything except for the first line of the
captured node.
- `all`:
This scope applies to the whole captured node. This is only different from
`tail` when the captured node is the first node on its line.
For example, imagine we have the following function
```rust
fn aha() { // ←─────────────────────────────────────╮
let take = "on me"; // ←──────────────╮ scope: │
let take = "me on"; // ├─ "tail" ├─ (block) @indent
let ill = be_gone_days(1 || 2); // │ │
} // ←───────────────────────────────────┴──────────┴─ "}" @outdent
// scope: "all"
```
We can write the following query with the `#set!` declaration:
```scm
((block) @indent
(#set! "scope" "tail"))
("}" @outdent
(#set! "scope" "all"))
```
As we can see, the "tail" scope covers the node, except for the first line.
Everything up to and including the closing brace gets an indent level of 1.
Then, on the closing brace, we encounter an outdent with a scope of "all", which
means the first line is included, and the indent level is cancelled out on this
line. (Note these scopes are the defaults for `@indent` and `@outdent`—they are
written explicitly for demonstration.)

@ -6,9 +6,10 @@
- [Linux](#linux) - [Linux](#linux)
- [Ubuntu](#ubuntu) - [Ubuntu](#ubuntu)
- [Fedora/RHEL](#fedorarhel) - [Fedora/RHEL](#fedorarhel)
- [Arch Linux community](#arch-linux-community) - [Arch Linux extra](#arch-linux-extra)
- [NixOS](#nixos) - [NixOS](#nixos)
- [Flatpak](#flatpak) - [Flatpak](#flatpak)
- [Snap](#snap)
- [AppImage](#appimage) - [AppImage](#appimage)
- [macOS](#macos) - [macOS](#macos)
- [Homebrew Core](#homebrew-core) - [Homebrew Core](#homebrew-core)
@ -70,9 +71,9 @@ sudo dnf copr enable varlad/helix
sudo dnf install helix sudo dnf install helix
``` ```
### Arch Linux community ### Arch Linux extra
Releases are available in the `community` repository: Releases are available in the `extra` repository:
```sh ```sh
sudo pacman -S helix sudo pacman -S helix
@ -104,6 +105,16 @@ flatpak install flathub com.helix_editor.Helix
flatpak run com.helix_editor.Helix flatpak run com.helix_editor.Helix
``` ```
### Snap
Helix is available on [Snapcraft](https://snapcraft.io/helix) and can be installed with:
```sh
snap install --classic helix
```
This will install Helix as both `/snap/bin/helix` and `/snap/bin/hx`, so make sure `/snap/bin` is in your `PATH`.
### AppImage ### AppImage
Install Helix using the Linux [AppImage](https://appimage.org/) format. Install Helix using the Linux [AppImage](https://appimage.org/) format.

@ -291,7 +291,7 @@ This layer is a kludge of mappings, mostly pickers.
| `w` | Enter [window mode](#window-mode) | N/A | | `w` | Enter [window mode](#window-mode) | N/A |
| `p` | Paste system clipboard after selections | `paste_clipboard_after` | | `p` | Paste system clipboard after selections | `paste_clipboard_after` |
| `P` | Paste system clipboard before selections | `paste_clipboard_before` | | `P` | Paste system clipboard before selections | `paste_clipboard_before` |
| `y` | Join and yank selections to clipboard | `yank_joined_to_clipboard` | | `y` | Yank selections to clipboard | `yank_to_clipboard` |
| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` | | `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` |
| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` | | `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` |
| `/` | Global search in workspace folder | `global_search` | | `/` | Global search in workspace folder | `global_search` |
@ -420,6 +420,7 @@ Keys to use within picker. Remapping currently not supported.
| `Home` | Go to first entry | | `Home` | Go to first entry |
| `End` | Go to last entry | | `End` | Go to last entry |
| `Enter` | Open selected | | `Enter` | Open selected |
| `Alt-Enter` | Open selected in the background without closing the picker |
| `Ctrl-s` | Open horizontally | | `Ctrl-s` | Open horizontally |
| `Ctrl-v` | Open vertically | | `Ctrl-v` | Open vertically |
| `Ctrl-t` | Toggle preview | | `Ctrl-t` | Toggle preview |

@ -37,19 +37,35 @@ If a register is selected before invoking a change or delete command, the select
- `"hc` - Store the selection in register `h` and then change it (delete and enter insert mode). - `"hc` - Store the selection in register `h` and then change it (delete and enter insert mode).
- `"md` - Store the selection in register `m` and delete it. - `"md` - Store the selection in register `m` and delete it.
### Special registers ### Default registers
Commands that use registers, like yank (`y`), use a default register if none is specified.
These registers are used as defaults:
| Register character | Contains | | Register character | Contains |
| --- | --- | | --- | --- |
| `/` | Last search | | `/` | Last search |
| `:` | Last executed command | | `:` | Last executed command |
| `"` | Last yanked text | | `"` | Last yanked text |
| `_` | Black hole | | `@` | Last recorded macro |
The system clipboard is not directly supported by a special register. Instead, special commands and keybindings are provided. Refer to the ### Special registers
[key map](keymap.md#space-mode) for more details.
The black hole register is a no-op register, meaning that no data will be read or written to it. Some registers have special behavior when read from and written to.
| Register character | When read | When written |
| --- | --- | --- |
| `_` | No values are returned | All values are discarded |
| `#` | Selection indices (first selection is `1`, second is `2`, etc.) | This register is not writable |
| `.` | Contents of the current selections | This register is not writable |
| `%` | Name of the current file | This register is not writable |
| `*` | Reads from the system clipboard | Joins and yanks to the system clipboard |
| `+` | Reads from the primary clipboard | Joins and yanks to the primary clipboard |
When yanking multiple selections to the clipboard registers, the selections
are joined with newlines. Pasting from these registers will paste multiple
selections if the clipboard was last yanked to by the Helix session. Otherwise
the clipboard contents are pasted as one selection.
## Surround ## Surround

@ -1,660 +0,0 @@
"use strict";
// Fix back button cache problem
window.onunload = function () { };
// Global variable, shared between modules
function playground_text(playground) {
let code_block = playground.querySelector("code");
if (window.ace && code_block.classList.contains("editable")) {
let editor = window.ace.edit(code_block);
return editor.getValue();
} else {
return code_block.textContent;
}
}
(function codeSnippets() {
function fetch_with_timeout(url, options, timeout = 6000) {
return Promise.race([
fetch(url, options),
new Promise((_, reject) => setTimeout(() => reject(new Error('timeout')), timeout))
]);
}
var playgrounds = Array.from(document.querySelectorAll(".playground"));
if (playgrounds.length > 0) {
fetch_with_timeout("https://play.rust-lang.org/meta/crates", {
headers: {
'Content-Type': "application/json",
},
method: 'POST',
mode: 'cors',
})
.then(response => response.json())
.then(response => {
// get list of crates available in the rust playground
let playground_crates = response.crates.map(item => item["id"]);
playgrounds.forEach(block => handle_crate_list_update(block, playground_crates));
});
}
function handle_crate_list_update(playground_block, playground_crates) {
// update the play buttons after receiving the response
update_play_button(playground_block, playground_crates);
// and install on change listener to dynamically update ACE editors
if (window.ace) {
let code_block = playground_block.querySelector("code");
if (code_block.classList.contains("editable")) {
let editor = window.ace.edit(code_block);
editor.addEventListener("change", function (e) {
update_play_button(playground_block, playground_crates);
});
// add Ctrl-Enter command to execute rust code
editor.commands.addCommand({
name: "run",
bindKey: {
win: "Ctrl-Enter",
mac: "Ctrl-Enter"
},
exec: _editor => run_rust_code(playground_block)
});
}
}
}
// updates the visibility of play button based on `no_run` class and
// used crates vs ones available on http://play.rust-lang.org
function update_play_button(pre_block, playground_crates) {
var play_button = pre_block.querySelector(".play-button");
// skip if code is `no_run`
if (pre_block.querySelector('code').classList.contains("no_run")) {
play_button.classList.add("hidden");
return;
}
// get list of `extern crate`'s from snippet
var txt = playground_text(pre_block);
var re = /extern\s+crate\s+([a-zA-Z_0-9]+)\s*;/g;
var snippet_crates = [];
var item;
while (item = re.exec(txt)) {
snippet_crates.push(item[1]);
}
// check if all used crates are available on play.rust-lang.org
var all_available = snippet_crates.every(function (elem) {
return playground_crates.indexOf(elem) > -1;
});
if (all_available) {
play_button.classList.remove("hidden");
} else {
play_button.classList.add("hidden");
}
}
function run_rust_code(code_block) {
var result_block = code_block.querySelector(".result");
if (!result_block) {
result_block = document.createElement('code');
result_block.className = 'result hljs language-bash';
code_block.append(result_block);
}
let text = playground_text(code_block);
let classes = code_block.querySelector('code').classList;
let has_2018 = classes.contains("edition2018");
let edition = has_2018 ? "2018" : "2015";
var params = {
version: "stable",
optimize: "0",
code: text,
edition: edition
};
if (text.indexOf("#![feature") !== -1) {
params.version = "nightly";
}
result_block.innerText = "Running...";
fetch_with_timeout("https://play.rust-lang.org/evaluate.json", {
headers: {
'Content-Type': "application/json",
},
method: 'POST',
mode: 'cors',
body: JSON.stringify(params)
})
.then(response => response.json())
.then(response => result_block.innerText = response.result)
.catch(error => result_block.innerText = "Playground Communication: " + error.message);
}
// Syntax highlighting Configuration
hljs.configure({
tabReplace: ' ', // 4 spaces
languages: [], // Languages used for auto-detection
});
let code_nodes = Array
.from(document.querySelectorAll('code'))
// Don't highlight `inline code` blocks in headers.
.filter(function (node) {return !node.parentElement.classList.contains("header"); });
if (window.ace) {
// language-rust class needs to be removed for editable
// blocks or highlightjs will capture events
Array
.from(document.querySelectorAll('code.editable'))
.forEach(function (block) { block.classList.remove('language-rust'); });
Array
.from(document.querySelectorAll('code:not(.editable)'))
.forEach(function (block) { hljs.highlightBlock(block); });
} else {
code_nodes.forEach(function (block) { hljs.highlightBlock(block); });
}
// Adding the hljs class gives code blocks the color css
// even if highlighting doesn't apply
code_nodes.forEach(function (block) { block.classList.add('hljs'); });
Array.from(document.querySelectorAll("code.language-rust")).forEach(function (block) {
var lines = Array.from(block.querySelectorAll('.boring'));
// If no lines were hidden, return
if (!lines.length) { return; }
block.classList.add("hide-boring");
var buttons = document.createElement('div');
buttons.className = 'buttons';
buttons.innerHTML = "<button class=\"fa fa-eye\" title=\"Show hidden lines\" aria-label=\"Show hidden lines\"></button>";
// add expand button
var pre_block = block.parentNode;
pre_block.insertBefore(buttons, pre_block.firstChild);
pre_block.querySelector('.buttons').addEventListener('click', function (e) {
if (e.target.classList.contains('fa-eye')) {
e.target.classList.remove('fa-eye');
e.target.classList.add('fa-eye-slash');
e.target.title = 'Hide lines';
e.target.setAttribute('aria-label', e.target.title);
block.classList.remove('hide-boring');
} else if (e.target.classList.contains('fa-eye-slash')) {
e.target.classList.remove('fa-eye-slash');
e.target.classList.add('fa-eye');
e.target.title = 'Show hidden lines';
e.target.setAttribute('aria-label', e.target.title);
block.classList.add('hide-boring');
}
});
});
if (window.playground_copyable) {
Array.from(document.querySelectorAll('pre code')).forEach(function (block) {
var pre_block = block.parentNode;
if (!pre_block.classList.contains('playground')) {
var buttons = pre_block.querySelector(".buttons");
if (!buttons) {
buttons = document.createElement('div');
buttons.className = 'buttons';
pre_block.insertBefore(buttons, pre_block.firstChild);
}
var clipButton = document.createElement('button');
clipButton.className = 'fa fa-copy clip-button';
clipButton.title = 'Copy to clipboard';
clipButton.setAttribute('aria-label', clipButton.title);
clipButton.innerHTML = '<i class=\"tooltiptext\"></i>';
buttons.insertBefore(clipButton, buttons.firstChild);
}
});
}
// Process playground code blocks
Array.from(document.querySelectorAll(".playground")).forEach(function (pre_block) {
// Add play button
var buttons = pre_block.querySelector(".buttons");
if (!buttons) {
buttons = document.createElement('div');
buttons.className = 'buttons';
pre_block.insertBefore(buttons, pre_block.firstChild);
}
var runCodeButton = document.createElement('button');
runCodeButton.className = 'fa fa-play play-button';
runCodeButton.hidden = true;
runCodeButton.title = 'Run this code';
runCodeButton.setAttribute('aria-label', runCodeButton.title);
buttons.insertBefore(runCodeButton, buttons.firstChild);
runCodeButton.addEventListener('click', function (e) {
run_rust_code(pre_block);
});
if (window.playground_copyable) {
var copyCodeClipboardButton = document.createElement('button');
copyCodeClipboardButton.className = 'fa fa-copy clip-button';
copyCodeClipboardButton.innerHTML = '<i class="tooltiptext"></i>';
copyCodeClipboardButton.title = 'Copy to clipboard';
copyCodeClipboardButton.setAttribute('aria-label', copyCodeClipboardButton.title);
buttons.insertBefore(copyCodeClipboardButton, buttons.firstChild);
}
let code_block = pre_block.querySelector("code");
if (window.ace && code_block.classList.contains("editable")) {
var undoChangesButton = document.createElement('button');
undoChangesButton.className = 'fa fa-history reset-button';
undoChangesButton.title = 'Undo changes';
undoChangesButton.setAttribute('aria-label', undoChangesButton.title);
buttons.insertBefore(undoChangesButton, buttons.firstChild);
undoChangesButton.addEventListener('click', function () {
let editor = window.ace.edit(code_block);
editor.setValue(editor.originalCode);
editor.clearSelection();
});
}
});
})();
(function themes() {
var html = document.querySelector('html');
var themeToggleButton = document.getElementById('theme-toggle');
var themePopup = document.getElementById('theme-list');
var themeColorMetaTag = document.querySelector('meta[name="theme-color"]');
var stylesheets = {
ayuHighlight: document.querySelector("[href$='ayu-highlight.css']"),
tomorrowNight: document.querySelector("[href$='tomorrow-night.css']"),
highlight: document.querySelector("[href$='highlight.css']"),
};
function showThemes() {
themePopup.style.display = 'block';
themeToggleButton.setAttribute('aria-expanded', true);
themePopup.querySelector("button#" + get_theme()).focus();
}
function hideThemes() {
themePopup.style.display = 'none';
themeToggleButton.setAttribute('aria-expanded', false);
themeToggleButton.focus();
}
function get_theme() {
var theme;
try { theme = localStorage.getItem('mdbook-theme'); } catch (e) { }
if (theme === null || theme === undefined) {
return default_theme;
} else {
return theme;
}
}
function set_theme(theme, store = true) {
let ace_theme;
if (theme == 'coal' || theme == 'navy') {
stylesheets.ayuHighlight.disabled = true;
stylesheets.tomorrowNight.disabled = false;
stylesheets.highlight.disabled = true;
ace_theme = "ace/theme/tomorrow_night";
} else if (theme == 'ayu') {
stylesheets.ayuHighlight.disabled = false;
stylesheets.tomorrowNight.disabled = true;
stylesheets.highlight.disabled = true;
ace_theme = "ace/theme/tomorrow_night";
} else {
stylesheets.ayuHighlight.disabled = true;
stylesheets.tomorrowNight.disabled = true;
stylesheets.highlight.disabled = false;
ace_theme = "ace/theme/dawn";
}
setTimeout(function () {
themeColorMetaTag.content = getComputedStyle(document.body).backgroundColor;
}, 1);
if (window.ace && window.editors) {
window.editors.forEach(function (editor) {
editor.setTheme(ace_theme);
});
}
var previousTheme = get_theme();
if (store) {
try { localStorage.setItem('mdbook-theme', theme); } catch (e) { }
}
html.classList.remove(previousTheme);
html.classList.add(theme);
}
// Set theme
var theme = get_theme();
set_theme(theme, false);
themeToggleButton.addEventListener('click', function () {
if (themePopup.style.display === 'block') {
hideThemes();
} else {
showThemes();
}
});
themePopup.addEventListener('click', function (e) {
var theme = e.target.id || e.target.parentElement.id;
set_theme(theme);
});
themePopup.addEventListener('focusout', function(e) {
// e.relatedTarget is null in Safari and Firefox on macOS (see workaround below)
if (!!e.relatedTarget && !themeToggleButton.contains(e.relatedTarget) && !themePopup.contains(e.relatedTarget)) {
hideThemes();
}
});
// Should not be needed, but it works around an issue on macOS & iOS: https://github.com/rust-lang/mdBook/issues/628
document.addEventListener('click', function(e) {
if (themePopup.style.display === 'block' && !themeToggleButton.contains(e.target) && !themePopup.contains(e.target)) {
hideThemes();
}
});
document.addEventListener('keydown', function (e) {
if (e.altKey || e.ctrlKey || e.metaKey || e.shiftKey) { return; }
if (!themePopup.contains(e.target)) { return; }
switch (e.key) {
case 'Escape':
e.preventDefault();
hideThemes();
break;
case 'ArrowUp':
e.preventDefault();
var li = document.activeElement.parentElement;
if (li && li.previousElementSibling) {
li.previousElementSibling.querySelector('button').focus();
}
break;
case 'ArrowDown':
e.preventDefault();
var li = document.activeElement.parentElement;
if (li && li.nextElementSibling) {
li.nextElementSibling.querySelector('button').focus();
}
break;
case 'Home':
e.preventDefault();
themePopup.querySelector('li:first-child button').focus();
break;
case 'End':
e.preventDefault();
themePopup.querySelector('li:last-child button').focus();
break;
}
});
})();
(function sidebar() {
var html = document.querySelector("html");
var sidebar = document.getElementById("sidebar");
var sidebarLinks = document.querySelectorAll('#sidebar a');
var sidebarToggleButton = document.getElementById("sidebar-toggle");
var sidebarResizeHandle = document.getElementById("sidebar-resize-handle");
var firstContact = null;
function showSidebar() {
html.classList.remove('sidebar-hidden')
html.classList.add('sidebar-visible');
Array.from(sidebarLinks).forEach(function (link) {
link.setAttribute('tabIndex', 0);
});
sidebarToggleButton.setAttribute('aria-expanded', true);
sidebar.setAttribute('aria-hidden', false);
try { localStorage.setItem('mdbook-sidebar', 'visible'); } catch (e) { }
}
var sidebarAnchorToggles = document.querySelectorAll('#sidebar a.toggle');
function toggleSection(ev) {
ev.currentTarget.parentElement.classList.toggle('expanded');
}
Array.from(sidebarAnchorToggles).forEach(function (el) {
el.addEventListener('click', toggleSection);
});
function hideSidebar() {
html.classList.remove('sidebar-visible')
html.classList.add('sidebar-hidden');
Array.from(sidebarLinks).forEach(function (link) {
link.setAttribute('tabIndex', -1);
});
sidebarToggleButton.setAttribute('aria-expanded', false);
sidebar.setAttribute('aria-hidden', true);
try { localStorage.setItem('mdbook-sidebar', 'hidden'); } catch (e) { }
}
// Toggle sidebar
sidebarToggleButton.addEventListener('click', function sidebarToggle() {
if (html.classList.contains("sidebar-hidden")) {
var current_width = parseInt(
document.documentElement.style.getPropertyValue('--sidebar-width'), 10);
if (current_width < 150) {
document.documentElement.style.setProperty('--sidebar-width', '150px');
}
showSidebar();
} else if (html.classList.contains("sidebar-visible")) {
hideSidebar();
} else {
if (getComputedStyle(sidebar)['transform'] === 'none') {
hideSidebar();
} else {
showSidebar();
}
}
});
sidebarResizeHandle.addEventListener('mousedown', initResize, false);
function initResize(e) {
window.addEventListener('mousemove', resize, false);
window.addEventListener('mouseup', stopResize, false);
html.classList.add('sidebar-resizing');
}
function resize(e) {
var pos = (e.clientX - sidebar.offsetLeft);
if (pos < 20) {
hideSidebar();
} else {
if (html.classList.contains("sidebar-hidden")) {
showSidebar();
}
pos = Math.min(pos, window.innerWidth - 100);
document.documentElement.style.setProperty('--sidebar-width', pos + 'px');
}
}
//on mouseup remove windows functions mousemove & mouseup
function stopResize(e) {
html.classList.remove('sidebar-resizing');
window.removeEventListener('mousemove', resize, false);
window.removeEventListener('mouseup', stopResize, false);
}
document.addEventListener('touchstart', function (e) {
firstContact = {
x: e.touches[0].clientX,
time: Date.now()
};
}, { passive: true });
document.addEventListener('touchmove', function (e) {
if (!firstContact)
return;
var curX = e.touches[0].clientX;
var xDiff = curX - firstContact.x,
tDiff = Date.now() - firstContact.time;
if (tDiff < 250 && Math.abs(xDiff) >= 150) {
if (xDiff >= 0 && firstContact.x < Math.min(document.body.clientWidth * 0.25, 300))
showSidebar();
else if (xDiff < 0 && curX < 300)
hideSidebar();
firstContact = null;
}
}, { passive: true });
// Scroll sidebar to current active section
var activeSection = document.getElementById("sidebar").querySelector(".active");
if (activeSection) {
// https://developer.mozilla.org/en-US/docs/Web/API/Element/scrollIntoView
activeSection.scrollIntoView({ block: 'center' });
}
})();
(function chapterNavigation() {
document.addEventListener('keydown', function (e) {
if (e.altKey || e.ctrlKey || e.metaKey || e.shiftKey) { return; }
if (window.search && window.search.hasFocus()) { return; }
switch (e.key) {
case 'ArrowRight':
e.preventDefault();
var nextButton = document.querySelector('.nav-chapters.next');
if (nextButton) {
window.location.href = nextButton.href;
}
break;
case 'ArrowLeft':
e.preventDefault();
var previousButton = document.querySelector('.nav-chapters.previous');
if (previousButton) {
window.location.href = previousButton.href;
}
break;
}
});
})();
(function clipboard() {
var clipButtons = document.querySelectorAll('.clip-button');
function hideTooltip(elem) {
elem.firstChild.innerText = "";
elem.className = 'fa fa-copy clip-button';
}
function showTooltip(elem, msg) {
elem.firstChild.innerText = msg;
elem.className = 'fa fa-copy tooltipped';
}
var clipboardSnippets = new ClipboardJS('.clip-button', {
text: function (trigger) {
hideTooltip(trigger);
let playground = trigger.closest("pre");
return playground_text(playground);
}
});
Array.from(clipButtons).forEach(function (clipButton) {
clipButton.addEventListener('mouseout', function (e) {
hideTooltip(e.currentTarget);
});
});
clipboardSnippets.on('success', function (e) {
e.clearSelection();
showTooltip(e.trigger, "Copied!");
});
clipboardSnippets.on('error', function (e) {
showTooltip(e.trigger, "Clipboard error!");
});
})();
(function scrollToTop () {
var menuTitle = document.querySelector('.menu-title');
menuTitle.addEventListener('click', function () {
document.scrollingElement.scrollTo({ top: 0, behavior: 'smooth' });
});
})();
(function controlMenu() {
var menu = document.getElementById('menu-bar');
(function controlPosition() {
var scrollTop = document.scrollingElement.scrollTop;
var prevScrollTop = scrollTop;
var minMenuY = -menu.clientHeight - 50;
// When the script loads, the page can be at any scroll (e.g. if you reforesh it).
menu.style.top = scrollTop + 'px';
// Same as parseInt(menu.style.top.slice(0, -2), but faster
var topCache = menu.style.top.slice(0, -2);
menu.classList.remove('sticky');
var stickyCache = false; // Same as menu.classList.contains('sticky'), but faster
document.addEventListener('scroll', function () {
scrollTop = Math.max(document.scrollingElement.scrollTop, 0);
// `null` means that it doesn't need to be updated
var nextSticky = null;
var nextTop = null;
var scrollDown = scrollTop > prevScrollTop;
var menuPosAbsoluteY = topCache - scrollTop;
if (scrollDown) {
nextSticky = false;
if (menuPosAbsoluteY > 0) {
nextTop = prevScrollTop;
}
} else {
if (menuPosAbsoluteY > 0) {
nextSticky = true;
} else if (menuPosAbsoluteY < minMenuY) {
nextTop = prevScrollTop + minMenuY;
}
}
if (nextSticky === true && stickyCache === false) {
menu.classList.add('sticky');
stickyCache = true;
} else if (nextSticky === false && stickyCache === true) {
menu.classList.remove('sticky');
stickyCache = false;
}
if (nextTop !== null) {
menu.style.top = nextTop + 'px';
topCache = nextTop;
}
prevScrollTop = scrollTop;
}, { passive: true });
})();
(function controlBorder() {
menu.classList.remove('bordered');
document.addEventListener('scroll', function () {
if (menu.offsetTop === 0) {
menu.classList.remove('bordered');
} else {
menu.classList.add('bordered');
}
}, { passive: true });
})();
})();

@ -1,499 +0,0 @@
/* CSS for UI elements (a.k.a. chrome) */
@import 'variables.css';
::-webkit-scrollbar {
background: var(--bg);
}
::-webkit-scrollbar-thumb {
background: var(--scrollbar);
}
html {
scrollbar-color: var(--scrollbar) var(--bg);
}
#searchresults a,
.content a:link,
a:visited,
a > .hljs {
color: var(--links);
}
.content a:hover {
text-decoration: underline;
}
/* Menu Bar */
#menu-bar,
#menu-bar-hover-placeholder {
z-index: 101;
margin: auto calc(0px - var(--page-padding));
}
#menu-bar {
position: relative;
display: flex;
flex-wrap: wrap;
background-color: var(--bg);
border-bottom-color: var(--bg);
border-bottom-width: 1px;
border-bottom-style: solid;
}
#menu-bar.sticky,
.js #menu-bar-hover-placeholder:hover + #menu-bar,
.js #menu-bar:hover,
.js.sidebar-visible #menu-bar {
position: -webkit-sticky;
position: sticky;
top: 0 !important;
}
#menu-bar-hover-placeholder {
position: sticky;
position: -webkit-sticky;
top: 0;
height: var(--menu-bar-height);
}
#menu-bar.bordered {
border-bottom-color: var(--table-border-color);
}
#menu-bar i, #menu-bar .icon-button {
position: relative;
padding: 0 8px;
z-index: 10;
line-height: var(--menu-bar-height);
cursor: pointer;
transition: color 0.5s;
}
@media only screen and (max-width: 420px) {
#menu-bar i, #menu-bar .icon-button {
padding: 0 5px;
}
}
.icon-button {
border: none;
background: none;
padding: 0;
color: inherit;
}
.icon-button i {
margin: 0;
}
.right-buttons {
margin: 0 15px;
}
.right-buttons a {
text-decoration: none;
}
.left-buttons {
display: flex;
margin: 0 5px;
}
.no-js .left-buttons {
display: none;
}
.menu-title {
display: inline-block;
font-weight: 200;
font-size: 2.4rem;
line-height: var(--menu-bar-height);
text-align: center;
margin: 0;
flex: 1;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.js .menu-title {
cursor: pointer;
}
.menu-bar,
.menu-bar:visited,
.nav-chapters,
.nav-chapters:visited,
.mobile-nav-chapters,
.mobile-nav-chapters:visited,
.menu-bar .icon-button,
.menu-bar a i {
color: var(--icons);
}
.menu-bar i:hover,
.menu-bar .icon-button:hover,
.nav-chapters:hover,
.mobile-nav-chapters i:hover {
color: var(--icons-hover);
}
/* Nav Icons */
.nav-chapters {
font-size: 2.5em;
text-align: center;
text-decoration: none;
position: fixed;
top: 0;
bottom: 0;
margin: 0;
max-width: 150px;
min-width: 90px;
display: flex;
justify-content: center;
align-content: center;
flex-direction: column;
transition: color 0.5s, background-color 0.5s;
}
.nav-chapters:hover {
text-decoration: none;
background-color: var(--theme-hover);
transition: background-color 0.15s, color 0.15s;
}
.nav-wrapper {
margin-top: 50px;
display: none;
}
.mobile-nav-chapters {
font-size: 2.5em;
text-align: center;
text-decoration: none;
width: 90px;
border-radius: 5px;
background-color: var(--sidebar-bg);
}
.previous {
float: left;
}
.next {
float: right;
right: var(--page-padding);
}
@media only screen and (max-width: 1080px) {
.nav-wide-wrapper { display: none; }
.nav-wrapper { display: block; }
}
@media only screen and (max-width: 1380px) {
.sidebar-visible .nav-wide-wrapper { display: none; }
.sidebar-visible .nav-wrapper { display: block; }
}
/* Inline code */
:not(pre) > .hljs {
display: inline;
padding: 0.1em 0.3em;
border-radius: 3px;
}
:not(pre):not(a):not(td):not(p) > .hljs {
color: var(--inline-code-color);
overflow-x: initial;
}
a:hover > .hljs {
text-decoration: underline;
}
pre {
position: relative;
}
pre > .buttons {
position: absolute;
z-index: 100;
right: 5px;
top: 5px;
color: var(--sidebar-fg);
cursor: pointer;
}
pre > .buttons :hover {
color: var(--sidebar-active);
}
pre > .buttons i {
margin-left: 8px;
}
pre > .buttons button {
color: inherit;
background: transparent;
border: none;
cursor: inherit;
}
pre > .result {
margin-top: 10px;
}
/* Search */
#searchresults a {
text-decoration: none;
}
mark {
border-radius: 2px;
padding: 0 3px 1px 3px;
margin: 0 -3px -1px -3px;
background-color: var(--search-mark-bg);
transition: background-color 300ms linear;
cursor: pointer;
}
mark.fade-out {
background-color: rgba(0,0,0,0) !important;
cursor: auto;
}
.searchbar-outer {
margin-left: auto;
margin-right: auto;
max-width: var(--content-max-width);
}
#searchbar {
width: 100%;
margin: 5px auto 0px auto;
padding: 10px 16px;
transition: box-shadow 300ms ease-in-out;
border: 1px solid var(--searchbar-border-color);
border-radius: 3px;
background-color: var(--searchbar-bg);
color: var(--searchbar-fg);
}
#searchbar:focus,
#searchbar.active {
box-shadow: 0 0 3px var(--searchbar-shadow-color);
}
.searchresults-header {
font-weight: bold;
font-size: 1em;
padding: 18px 0 0 5px;
color: var(--searchresults-header-fg);
}
.searchresults-outer {
margin-left: auto;
margin-right: auto;
max-width: var(--content-max-width);
border-bottom: 1px dashed var(--searchresults-border-color);
}
ul#searchresults {
list-style: none;
padding-left: 20px;
}
ul#searchresults li {
margin: 10px 0px;
padding: 2px;
border-radius: 2px;
}
ul#searchresults li.focus {
background-color: var(--searchresults-li-bg);
}
ul#searchresults span.teaser {
display: block;
clear: both;
margin: 5px 0 0 20px;
font-size: 0.8em;
}
ul#searchresults span.teaser em {
font-weight: bold;
font-style: normal;
}
/* Sidebar */
.sidebar {
position: fixed;
left: 0;
top: 0;
bottom: 0;
width: var(--sidebar-width);
font-size: 0.875em;
box-sizing: border-box;
-webkit-overflow-scrolling: touch;
overscroll-behavior-y: contain;
background-color: var(--sidebar-bg);
color: var(--sidebar-fg);
}
.sidebar-resizing {
-moz-user-select: none;
-webkit-user-select: none;
-ms-user-select: none;
user-select: none;
}
.js:not(.sidebar-resizing) .sidebar {
transition: transform 0.3s; /* Animation: slide away */
}
.sidebar code {
line-height: 2em;
}
.sidebar .sidebar-scrollbox {
overflow-y: auto;
position: absolute;
top: 0;
bottom: 0;
left: 0;
right: 0;
}
.sidebar .sidebar-resize-handle {
position: absolute;
cursor: col-resize;
width: 0;
right: 0;
top: 0;
bottom: 0;
}
.js .sidebar .sidebar-resize-handle {
cursor: col-resize;
width: 5px;
}
.sidebar-hidden .sidebar {
transform: translateX(calc(0px - var(--sidebar-width)));
}
.sidebar::-webkit-scrollbar {
background: var(--sidebar-bg);
}
.sidebar::-webkit-scrollbar-thumb {
background: var(--scrollbar);
}
.sidebar-visible .page-wrapper {
transform: translateX(var(--sidebar-width));
}
@media only screen and (min-width: 620px) {
.sidebar-visible .page-wrapper {
transform: none;
margin-left: var(--sidebar-width);
}
}
.chapter {
list-style: none outside none;
padding-left: 0;
margin: .25rem 0;
}
.chapter ol {
width: 100%;
}
.chapter li {
display: flex;
color: var(--sidebar-non-existent);
}
.chapter li a {
display: block;
text-decoration: none;
color: var(--sidebar-fg);
}
.chapter li a:hover {
color: var(--sidebar-active);
}
.chapter li a.active {
color: var(--sidebar-active);
}
.chapter li > a.toggle {
cursor: pointer;
display: block;
margin-left: auto;
padding: 0 10px;
user-select: none;
opacity: 0.68;
}
.chapter li > a.toggle div {
transition: transform 0.5s;
}
/* collapse the section */
.chapter li:not(.expanded) + li > ol {
display: none;
}
.chapter li.chapter-item {
padding: 1rem 1.5rem;
}
.chapter .section li.chapter-item {
padding: .5rem .5rem 0 .5rem;
}
.chapter li.expanded > a.toggle div {
transform: rotate(90deg);
}
.spacer {
width: 100%;
height: 3px;
margin: 5px 0px;
}
.chapter .spacer {
background-color: var(--sidebar-spacer);
}
@media (-moz-touch-enabled: 1), (pointer: coarse) {
.chapter li a { padding: 5px 0; }
.spacer { margin: 10px 0; }
}
.section {
list-style: none outside none;
padding-left: 2rem;
line-height: 1.9em;
}
/* Theme Menu Popup */
.theme-popup {
position: absolute;
left: 10px;
top: var(--menu-bar-height);
z-index: 1000;
border-radius: 4px;
font-size: 0.7em;
color: var(--fg);
background: var(--theme-popup-bg);
border: 1px solid var(--theme-popup-border);
margin: 0;
padding: 0;
list-style: none;
display: none;
}
.theme-popup .default {
color: var(--icons);
}
.theme-popup .theme {
width: 100%;
border: 0;
margin: 0;
padding: 2px 10px;
line-height: 25px;
white-space: nowrap;
text-align: left;
cursor: pointer;
color: inherit;
background: inherit;
font-size: inherit;
}
.theme-popup .theme:hover {
background-color: var(--theme-hover);
}
.theme-popup .theme:hover:first-child,
.theme-popup .theme:hover:last-child {
border-top-left-radius: inherit;
border-top-right-radius: inherit;
}

@ -1,233 +0,0 @@
/* Base styles and content styles */
@import 'variables.css';
:root {
/* Browser default font-size is 16px, this way 1 rem = 10px */
font-size: 62.5%;
}
/* TODO: replace with self hosted fonts */
html {
font-family: "Inter", sans-serif;
color: var(--fg);
background-color: var(--bg);
text-size-adjust: none;
}
/* @supports (font-variation-settings: normal) { */
/* html { font-family: 'Inter var', sans-serif; } */
/* } */
body {
margin: 0;
font-size: 1.6rem;
overflow-x: hidden;
}
code {
font-family: "Source Code Pro", Consolas, "Ubuntu Mono", Menlo, "DejaVu Sans Mono", monospace, monospace !important;
font-size: 0.875em; /* please adjust the ace font size accordingly in editor.js */
}
/* Don't change font size in headers. */
h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
font-size: unset;
}
.left { float: left; }
.right { float: right; }
.boring { opacity: 0.6; }
.hide-boring .boring { display: none; }
.hidden { display: none !important; }
h2, h3 { margin-top: 2.5em; }
h4, h5 { margin-top: 2em; }
.header + .header h3,
.header + .header h4,
.header + .header h5 {
margin-top: 1em;
}
h1:target::before,
h2:target::before,
h3:target::before,
h4:target::before,
h5:target::before,
h6:target::before {
display: inline-block;
content: "»";
margin-left: -30px;
width: 30px;
}
/* This is broken on Safari as of version 14, but is fixed
in Safari Technology Preview 117 which I think will be Safari 14.2.
https://bugs.webkit.org/show_bug.cgi?id=218076
*/
:target {
scroll-margin-top: calc(var(--menu-bar-height) + 0.5em);
}
.page {
outline: 0;
padding: 0 var(--page-padding);
margin-top: calc(0px - var(--menu-bar-height)); /* Compensate for the #menu-bar-hover-placeholder */
}
.page-wrapper {
box-sizing: border-box;
}
.js:not(.sidebar-resizing) .page-wrapper {
transition: margin-left 0.3s ease, transform 0.3s ease; /* Animation: slide away */
}
.content {
overflow-y: auto;
padding: 0 15px;
padding-bottom: 50px;
}
.content main {
margin-left: auto;
margin-right: auto;
max-width: var(--content-max-width);
}
/* 2 1.75 1.5 1.25 1 .875 */
.content h1 { font-size: 2em }
.content h2 { font-size: 1.75em }
.content h3 { font-size: 1.5em }
.content h4 { font-size: 1.25em }
.content h5 { font-size: 1em }
.content h6 { font-size: .875em }
.content h1, .content h2, .content h3, .content h4 {
font-weight: 500;
margin-top: 1.275em;
margin-bottom: .875em;
}
.content p, .content ol, .content ul, .content table {
margin-top: 0;
margin-bottom: .875em;
}
.content ul li {
margin-bottom: .25rem;
}
.content ul {
list-style-type: square;
}
.content ul ul, .content ol ul {
margin-bottom: .5rem;
}
.content li p {
margin-bottom: .5em;
}
.content p { line-height: 1.45em; }
.content ol { line-height: 1.45em; }
.content ul { line-height: 1.45em; }
.content a { text-decoration: none; }
.content a:hover { text-decoration: underline; }
.content img { max-width: 100%; }
.content .header:link,
.content .header:visited {
color: var(--fg);
color: var(--heading-fg);
}
.content .header:link,
.content .header:visited:hover {
text-decoration: none;
}
table {
margin: 0 auto;
border-collapse: collapse;
width: 100%;
}
table td {
padding: .75rem;
width: auto;
}
table thead {
background: var(--table-header-bg);
}
table thead td {
font-weight: 700;
border: none;
}
table thead th {
padding: .75rem;
text-align: left;
font-weight: 500;
line-height: 1.5;
width: auto;
}
table thead tr {
border-bottom: 2px var(--table-border-color) solid;
}
table tbody tr {
border-bottom: 1px var(--table-border-line) solid;
}
/* Alternate background colors for rows */
table tbody tr:nth-child(2n) {
/* background: var(--table-alternate-bg); */
}
blockquote {
margin: 1.5rem 0;
padding: 1rem 1.5rem;
color: var(--fg);
opacity: .9;
background-color: var(--quote-bg);
border-left: 4px solid var(--quote-border);
}
blockquote *:last-child {
margin-bottom: 0;
}
:not(.footnote-definition) + .footnote-definition,
.footnote-definition + :not(.footnote-definition) {
margin-top: 2em;
}
.footnote-definition {
font-size: 0.9em;
margin: 0.5em 0;
}
.footnote-definition p {
display: inline;
}
.tooltiptext {
position: absolute;
visibility: hidden;
color: #fff;
background-color: #333;
transform: translateX(-50%); /* Center by moving tooltip 50% of its width left */
left: -8px; /* Half of the width of the icon */
top: -35px;
font-size: 0.8em;
text-align: center;
border-radius: 6px;
padding: 5px 8px;
margin: 5px;
z-index: 1000;
}
.tooltipped .tooltiptext {
visibility: visible;
}
.chapter li.part-title {
color: var(--sidebar-fg);
margin: 5px 0px;
font-weight: bold;
}
.result-no-output {
font-style: italic;
}

@ -1,54 +0,0 @@
#sidebar,
#menu-bar,
.nav-chapters,
.mobile-nav-chapters {
display: none;
}
#page-wrapper.page-wrapper {
transform: none;
margin-left: 0px;
overflow-y: initial;
}
#content {
max-width: none;
margin: 0;
padding: 0;
}
.page {
overflow-y: initial;
}
code {
background-color: #666666;
border-radius: 5px;
/* Force background to be printed in Chrome */
-webkit-print-color-adjust: exact;
}
pre > .buttons {
z-index: 2;
}
a, a:visited, a:active, a:hover {
color: #4183c4;
text-decoration: none;
}
h1, h2, h3, h4, h5, h6 {
page-break-inside: avoid;
page-break-after: avoid;
}
pre, code {
page-break-inside: avoid;
white-space: pre-wrap;
}
.fa {
display: none !important;
}

@ -1,411 +0,0 @@
/* Globals */
:root {
--sidebar-width: 300px;
--page-padding: 15px;
--content-max-width: 750px;
--menu-bar-height: 50px;
}
/* Themes */
.ayu {
--bg: hsl(210, 25%, 8%);
--fg: #c5c5c5;
--sidebar-bg: #14191f;
--sidebar-fg: #c8c9db;
--sidebar-non-existent: #5c6773;
--sidebar-active: #ffb454;
--sidebar-spacer: #2d334f;
--scrollbar: var(--sidebar-fg);
--icons: #737480;
--icons-hover: #b7b9cc;
--links: #0096cf;
--inline-code-color: #ffb454;
--theme-popup-bg: #14191f;
--theme-popup-border: #5c6773;
--theme-hover: #191f26;
--quote-bg: hsl(226, 15%, 17%);
--quote-border: hsl(226, 15%, 22%);
--table-border-color: hsl(210, 25%, 13%);
--table-header-bg: hsl(210, 25%, 28%);
--table-alternate-bg: hsl(210, 25%, 11%);
--searchbar-border-color: #848484;
--searchbar-bg: #424242;
--searchbar-fg: #fff;
--searchbar-shadow-color: #d4c89f;
--searchresults-header-fg: #666;
--searchresults-border-color: #888;
--searchresults-li-bg: #252932;
--search-mark-bg: #e3b171;
--hljs-background: #191f26;
--hljs-color: #e6e1cf;
--hljs-quote: #5c6773;
--hljs-variable: #ff7733;
--hljs-type: #ffee99;
--hljs-title: #b8cc52;
--hljs-symbol: #ffb454;
--hljs-selector-tag: #ff7733;
--hljs-selector-tag: #36a3d9;
--hljs-selector-tag: #00568d;
--hljs-selector-tag: #91b362;
--hljs-selector-tag: #d96c75;
}
.coal {
--bg: hsl(200, 7%, 8%);
--fg: #98a3ad;
--sidebar-bg: #292c2f;
--sidebar-fg: #a1adb8;
--sidebar-non-existent: #505254;
--sidebar-active: #3473ad;
--sidebar-spacer: #393939;
--scrollbar: var(--sidebar-fg);
--icons: #43484d;
--icons-hover: #b3c0cc;
--links: #2b79a2;
--inline-code-color: #c5c8c6;
--theme-popup-bg: #141617;
--theme-popup-border: #43484d;
--theme-hover: #1f2124;
--quote-bg: hsl(234, 21%, 18%);
--quote-border: hsl(234, 21%, 23%);
--table-border-color: hsl(200, 7%, 13%);
--table-header-bg: hsl(200, 7%, 28%);
--table-alternate-bg: hsl(200, 7%, 11%);
--searchbar-border-color: #aaa;
--searchbar-bg: #b7b7b7;
--searchbar-fg: #000;
--searchbar-shadow-color: #aaa;
--searchresults-header-fg: #666;
--searchresults-border-color: #98a3ad;
--searchresults-li-bg: #2b2b2f;
--search-mark-bg: #355c7d;
--hljs-background: #969896;
--hljs-color: #cc6666;
--hljs-quote: #de935f;
--hljs-variable: #f0c674;
--hljs-type: #b5bd68;
--hljs-title: #8abeb7;
--hljs-symbol: #81a2be;
--hljs-selector-tag: #b294bb;
--hljs-selector-tag: #1d1f21;
--hljs-selector-tag: #c5c8c6;
--hljs-selector-tag: #718c00;
--hljs-selector-tag: #c82829;
}
.light {
--bg: hsl(0, 0%, 100%);
--fg: hsl(0, 0%, 0%);
--sidebar-bg: #fafafa;
--sidebar-fg: hsl(0, 0%, 0%);
--sidebar-non-existent: #aaaaaa;
--sidebar-active: #1f1fff;
--sidebar-spacer: #f4f4f4;
--scrollbar: #8F8F8F;
--icons: #747474;
--icons-hover: #000000;
--links: #20609f;
--inline-code-color: #301900;
--theme-popup-bg: #fafafa;
--theme-popup-border: #cccccc;
--theme-hover: #e6e6e6;
--quote-bg: hsl(197, 37%, 96%);
--quote-border: hsl(197, 37%, 91%);
--table-border-color: hsl(0, 0%, 95%);
--table-header-bg: hsl(0, 0%, 80%);
--table-alternate-bg: hsl(0, 0%, 97%);
--searchbar-border-color: #aaa;
--searchbar-bg: #fafafa;
--searchbar-fg: #000;
--searchbar-shadow-color: #aaa;
--searchresults-header-fg: #666;
--searchresults-border-color: #888;
--searchresults-li-bg: #e4f2fe;
--search-mark-bg: #a2cff5;
--hljs-background: #f6f7f6;
--hljs-color: #000;
--hljs-quote: #575757;
--hljs-variable: #d70025;
--hljs-type: #b21e00;
--hljs-title: #0030f2;
--hljs-symbol: #008200;
--hljs-selector-tag: #9d00ec;
}
.navy {
--bg: hsl(226, 23%, 11%);
--fg: #bcbdd0;
--sidebar-bg: #282d3f;
--sidebar-fg: #c8c9db;
--sidebar-non-existent: #505274;
--sidebar-active: #2b79a2;
--sidebar-spacer: #2d334f;
--scrollbar: var(--sidebar-fg);
--icons: #737480;
--icons-hover: #b7b9cc;
--links: #2b79a2;
--inline-code-color: #c5c8c6;
--theme-popup-bg: #161923;
--theme-popup-border: #737480;
--theme-hover: #282e40;
--quote-bg: hsl(226, 15%, 17%);
--quote-border: hsl(226, 15%, 22%);
--table-border-color: hsl(226, 23%, 16%);
--table-header-bg: hsl(226, 23%, 31%);
--table-alternate-bg: hsl(226, 23%, 14%);
--searchbar-border-color: #aaa;
--searchbar-bg: #aeaec6;
--searchbar-fg: #000;
--searchbar-shadow-color: #aaa;
--searchresults-header-fg: #5f5f71;
--searchresults-border-color: #5c5c68;
--searchresults-li-bg: #242430;
--search-mark-bg: #a2cff5;
--hljs-background: #969896;
--hljs-color: #cc6666;
--hljs-quote: #de935f;
--hljs-variable: #f0c674;
--hljs-type: #b5bd68;
--hljs-title: #8abeb7;
--hljs-symbol: #81a2be;
--hljs-selector-tag: #b294bb;
--hljs-selector-tag: #1d1f21;
--hljs-selector-tag: #c5c8c6;
--hljs-selector-tag: #718c00;
--hljs-selector-tag: #c82829;
}
.rust {
--bg: hsl(60, 9%, 87%);
--fg: #262625;
--sidebar-bg: #3b2e2a;
--sidebar-fg: #c8c9db;
--sidebar-non-existent: #505254;
--sidebar-active: #e69f67;
--sidebar-spacer: #45373a;
--scrollbar: var(--sidebar-fg);
--icons: #737480;
--icons-hover: #262625;
--links: #2b79a2;
--inline-code-color: #6e6b5e;
--theme-popup-bg: #e1e1db;
--theme-popup-border: #b38f6b;
--theme-hover: #99908a;
--quote-bg: hsl(60, 5%, 75%);
--quote-border: hsl(60, 5%, 70%);
--table-border-color: hsl(60, 9%, 82%);
--table-header-bg: #b3a497;
--table-alternate-bg: hsl(60, 9%, 84%);
--searchbar-border-color: #aaa;
--searchbar-bg: #fafafa;
--searchbar-fg: #000;
--searchbar-shadow-color: #aaa;
--searchresults-header-fg: #666;
--searchresults-border-color: #888;
--searchresults-li-bg: #dec2a2;
--search-mark-bg: #e69f67;
--hljs-background: #f6f7f6;
--hljs-color: #000;
--hljs-quote: #575757;
--hljs-variable: #d70025;
--hljs-type: #b21e00;
--hljs-title: #0030f2;
--hljs-symbol: #008200;
--hljs-selector-tag: #9d00ec;
}
@media (prefers-color-scheme: dark) {
.light.no-js {
--bg: hsl(200, 7%, 8%);
--fg: #98a3ad;
--sidebar-bg: #292c2f;
--sidebar-fg: #a1adb8;
--sidebar-non-existent: #505254;
--sidebar-active: #3473ad;
--sidebar-spacer: #393939;
--scrollbar: var(--sidebar-fg);
--icons: #43484d;
--icons-hover: #b3c0cc;
--links: #2b79a2;
--inline-code-color: #c5c8c6;
--theme-popup-bg: #141617;
--theme-popup-border: #43484d;
--theme-hover: #1f2124;
--quote-bg: hsl(234, 21%, 18%);
--quote-border: hsl(234, 21%, 23%);
--table-border-color: hsl(200, 7%, 13%);
--table-header-bg: hsl(200, 7%, 28%);
--table-alternate-bg: hsl(200, 7%, 11%);
--searchbar-border-color: #aaa;
--searchbar-bg: #b7b7b7;
--searchbar-fg: #000;
--searchbar-shadow-color: #aaa;
--searchresults-header-fg: #666;
--searchresults-border-color: #98a3ad;
--searchresults-li-bg: #2b2b2f;
--search-mark-bg: #355c7d;
}
}
.colibri {
--bg: #3b224c;
--fg: #bcbdd0;
--heading-fg: #fff;
--sidebar-bg: #281733;
--sidebar-fg: #c8c9db;
--sidebar-non-existent: #505274;
--sidebar-active: #a4a0e8;
--sidebar-spacer: #2d334f;
--scrollbar: var(--sidebar-fg);
--icons: #737480;
--icons-hover: #b7b9cc;
/* --links: #a4a0e8; */
--links: #ECCDBA;
--inline-code-color: hsl(48.7, 7.8%, 70%);
--theme-popup-bg: #161923;
--theme-popup-border: #737480;
--theme-hover: rgba(0,0,0, .2);
--quote-bg: #281733;
--quote-border: hsl(226, 15%, 22%);
--table-border-color: hsl(226, 23%, 76%);
--table-header-bg: hsla(226, 23%, 31%, 0);
--table-alternate-bg: hsl(226, 23%, 14%);
--table-border-line: hsla(201deg, 20%, 92%, 0.2);
--searchbar-border-color: #aaa;
--searchbar-bg: #aeaec6;
--searchbar-fg: #000;
--searchbar-shadow-color: #aaa;
--searchresults-header-fg: #5f5f71;
--searchresults-border-color: #5c5c68;
--searchresults-li-bg: #242430;
--search-mark-bg: #acff5;
--hljs-background: #2f1e2e;
--hljs-color: #a39e9b;
--hljs-quote: #8d8687;
--hljs-variable: #ef6155;
--hljs-type: #f99b15;
--hljs-title: #fec418;
--hljs-symbol: #48b685;
--hljs-selector-tag: #815ba4;
}
.colibri {
/*
--bg: #ffffff;
--fg: #452859;
--fg: #5a5977;
--heading-fg: #281733;
--sidebar-bg: #281733;
--sidebar-fg: #c8c9db;
--sidebar-non-existent: #505274;
--sidebar-active: #a4a0e8;
--sidebar-spacer: #2d334f;
--scrollbar: var(--sidebar-fg);
--icons: #737480;
--icons-hover: #b7b9cc;
--links: #6F44F0;
--inline-code-color: #a39e9b;
--theme-popup-bg: #161923;
--theme-popup-border: #737480;
--theme-hover: rgba(0,0,0, .2);
--quote-bg: rgba(0, 0, 0, 0);
--quote-border: hsl(226, 15%, 75%);
--table-border-color: #5a5977;
--table-border-color: hsl(201deg 10% 67%);
--table-header-bg: hsl(0, 0%, 100%);
--table-alternate-bg: hsl(0, 0%, 97%);
--table-border-line: hsl(201deg, 20%, 92%);
--searchbar-border-color: #aaa;
--searchbar-bg: #aeaec6;
--searchbar-fg: #000;
--searchbar-shadow-color: #aaa;
--searchresults-header-fg: #5f5f71;
--searchresults-border-color: #5c5c68;
--searchresults-li-bg: #242430;
--search-mark-bg: #a2cff5;
--hljs-background: #TODO;
--hljs-color: #TODO;
--hljs-quote: #TODO;
--hljs-variable: #TODO;
--hljs-type: #TODO;
--hljs-title: #TODO;
--hljs-symbol: #TODO;
--hljs-selector-tag: #TODO;
*/
}

@ -1,56 +0,0 @@
pre code.hljs {
display:block;
overflow-x:auto;
padding:1em
}
code.hljs {
padding:3px 5px
}
.hljs {
background: var(--hljs-background);
color: var(--hljs-color);
}
.hljs-comment,
.hljs-quote {
color: var(--hljs-quote)
}
.hljs-link,
.hljs-meta,
.hljs-name,
.hljs-regexp,
.hljs-selector-class,
.hljs-selector-id,
.hljs-tag,
.hljs-template-variable,
.hljs-variable {
color: var(--hljs-variable)
}
.hljs-built_in,
.hljs-deletion,
.hljs-literal,
.hljs-number,
.hljs-params,
.hljs-type {
color: var(--hljs-type)
}
.hljs-attribute,
.hljs-section,
.hljs-title {
color: var(--hljs-title)
}
.hljs-addition,
.hljs-bullet,
.hljs-string,
.hljs-symbol {
color: var(--hljs-symbol)
}
.hljs-keyword,
.hljs-selector-tag {
color: var(--hljs-selector-tag)
}
.hljs-emphasis {
font-style:italic
}
.hljs-strong {
font-weight:700
}

File diff suppressed because one or more lines are too long

@ -15,7 +15,6 @@
<!-- Custom HTML head --> <!-- Custom HTML head -->
{{> head}} {{> head}}
<meta content="text/html; charset=utf-8" http-equiv="Content-Type">
<meta name="description" content="{{ description }}"> <meta name="description" content="{{ description }}">
<meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="theme-color" content="#ffffff" /> <meta name="theme-color" content="#ffffff" />
@ -53,18 +52,19 @@
{{#if mathjax_support}} {{#if mathjax_support}}
<!-- MathJax --> <!-- MathJax -->
<script async type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script> <script async src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
{{/if}} {{/if}}
</head> </head>
<body> <body>
<div id="body-container">
<!-- Provide site root to javascript --> <!-- Provide site root to javascript -->
<script type="text/javascript"> <script>
var path_to_root = "{{ path_to_root }}"; var path_to_root = "{{ path_to_root }}";
var default_theme = window.matchMedia("(prefers-color-scheme: dark)").matches ? "{{ preferred_dark_theme }}" : "{{ default_theme }}"; var default_theme = window.matchMedia("(prefers-color-scheme: dark)").matches ? "{{ preferred_dark_theme }}" : "{{ default_theme }}";
</script> </script>
<!-- Work around some values being stored in localStorage wrapped in quotes --> <!-- Work around some values being stored in localStorage wrapped in quotes -->
<script type="text/javascript"> <script>
try { try {
var theme = localStorage.getItem('mdbook-theme'); var theme = localStorage.getItem('mdbook-theme');
var sidebar = localStorage.getItem('mdbook-sidebar'); var sidebar = localStorage.getItem('mdbook-sidebar');
@ -80,7 +80,7 @@
</script> </script>
<!-- Set the theme before any content is loaded, prevents flash --> <!-- Set the theme before any content is loaded, prevents flash -->
<script type="text/javascript"> <script>
var theme; var theme;
try { theme = localStorage.getItem('mdbook-theme'); } catch(e) { } try { theme = localStorage.getItem('mdbook-theme'); } catch(e) { }
if (theme === null || theme === undefined) { theme = default_theme; } if (theme === null || theme === undefined) { theme = default_theme; }
@ -92,12 +92,14 @@
</script> </script>
<!-- Hide / unhide sidebar before it is displayed --> <!-- Hide / unhide sidebar before it is displayed -->
<script type="text/javascript"> <script>
var html = document.querySelector('html'); var html = document.querySelector('html');
var sidebar = 'hidden'; var sidebar = null;
if (document.body.clientWidth >= 1080) { if (document.body.clientWidth >= 1080) {
try { sidebar = localStorage.getItem('mdbook-sidebar'); } catch(e) { } try { sidebar = localStorage.getItem('mdbook-sidebar'); } catch(e) { }
sidebar = sidebar || 'visible'; sidebar = sidebar || 'visible';
} else {
sidebar = 'hidden';
} }
html.classList.remove('sidebar-visible'); html.classList.remove('sidebar-visible');
html.classList.add("sidebar-" + sidebar); html.classList.add("sidebar-" + sidebar);
@ -110,12 +112,34 @@
<div id="sidebar-resize-handle" class="sidebar-resize-handle"></div> <div id="sidebar-resize-handle" class="sidebar-resize-handle"></div>
</nav> </nav>
<!-- Track and set sidebar scroll position -->
<script>
var sidebarScrollbox = document.querySelector('#sidebar .sidebar-scrollbox');
sidebarScrollbox.addEventListener('click', function(e) {
if (e.target.tagName === 'A') {
sessionStorage.setItem('sidebar-scroll', sidebarScrollbox.scrollTop);
}
}, { passive: true });
var sidebarScrollTop = sessionStorage.getItem('sidebar-scroll');
sessionStorage.removeItem('sidebar-scroll');
if (sidebarScrollTop) {
// preserve sidebar scroll position when navigating via links within sidebar
sidebarScrollbox.scrollTop = sidebarScrollTop;
} else {
// scroll sidebar to current active section when navigating via "next/previous chapter" buttons
var activeSection = document.querySelector('#sidebar .active');
if (activeSection) {
activeSection.scrollIntoView({ block: 'center' });
}
}
</script>
<div id="page-wrapper" class="page-wrapper"> <div id="page-wrapper" class="page-wrapper">
<div class="page"> <div class="page">
{{> header}} {{> header}}
<div id="menu-bar-hover-placeholder"></div> <div id="menu-bar-hover-placeholder"></div>
<div id="menu-bar" class="menu-bar sticky bordered"> <div id="menu-bar" class="menu-bar sticky">
<div class="left-buttons"> <div class="left-buttons">
<button id="sidebar-toggle" class="icon-button" type="button" title="Toggle Table of Contents" aria-label="Toggle Table of Contents" aria-controls="sidebar"> <button id="sidebar-toggle" class="icon-button" type="button" title="Toggle Table of Contents" aria-label="Toggle Table of Contents" aria-controls="sidebar">
<i class="fa fa-bars"></i> <i class="fa fa-bars"></i>
@ -124,12 +148,12 @@
<i class="fa fa-paint-brush"></i> <i class="fa fa-paint-brush"></i>
</button> </button>
<ul id="theme-list" class="theme-popup" aria-label="Themes" role="menu"> <ul id="theme-list" class="theme-popup" aria-label="Themes" role="menu">
<li role="none"><button role="menuitem" class="theme" id="light">{{ theme_option "Light" }}</button></li> <li role="none"><button role="menuitem" class="theme" id="light">Light</button></li>
<li role="none"><button role="menuitem" class="theme" id="rust">{{ theme_option "Rust" }}</button></li> <li role="none"><button role="menuitem" class="theme" id="rust">Rust</button></li>
<li role="none"><button role="menuitem" class="theme" id="coal">{{ theme_option "Coal" }}</button></li> <li role="none"><button role="menuitem" class="theme" id="coal">Coal</button></li>
<li role="none"><button role="menuitem" class="theme" id="navy">{{ theme_option "Navy" }}</button></li> <li role="none"><button role="menuitem" class="theme" id="navy">Navy</button></li>
<li role="none"><button role="menuitem" class="theme" id="ayu">{{ theme_option "Ayu" }}</button></li> <li role="none"><button role="menuitem" class="theme" id="ayu">Ayu</button></li>
<li role="none"><button role="menuitem" class="theme" id="colibri">{{ theme_option "Colibri" }}</button></li> <li role="none"><button role="menuitem" class="theme" id="colibri">Colibri</button></li>
</ul> </ul>
{{#if search_enabled}} {{#if search_enabled}}
<button id="search-toggle" class="icon-button" type="button" title="Search. (Shortkey: s)" aria-label="Toggle Searchbar" aria-expanded="false" aria-keyshortcuts="S" aria-controls="searchbar"> <button id="search-toggle" class="icon-button" type="button" title="Search. (Shortkey: s)" aria-label="Toggle Searchbar" aria-expanded="false" aria-keyshortcuts="S" aria-controls="searchbar">
@ -151,13 +175,19 @@
<i id="git-repository-button" class="fa {{git_repository_icon}}"></i> <i id="git-repository-button" class="fa {{git_repository_icon}}"></i>
</a> </a>
{{/if}} {{/if}}
{{#if git_repository_edit_url}}
<a href="{{git_repository_edit_url}}" title="Suggest an edit" aria-label="Suggest an edit">
<i id="git-edit-button" class="fa fa-edit"></i>
</a>
{{/if}}
</div> </div>
</div> </div>
{{#if search_enabled}} {{#if search_enabled}}
<div id="search-wrapper" class="hidden"> <div id="search-wrapper" class="hidden">
<form id="searchbar-outer" class="searchbar-outer"> <form id="searchbar-outer" class="searchbar-outer">
<input type="search" name="search" id="searchbar" name="searchbar" placeholder="Search this book ..." aria-controls="searchresults-outer" aria-describedby="searchresults-header"> <input type="search" id="searchbar" name="searchbar" placeholder="Search this book ..." aria-controls="searchresults-outer" aria-describedby="searchresults-header">
</form> </form>
<div id="searchresults-outer" class="searchresults-outer hidden"> <div id="searchresults-outer" class="searchresults-outer hidden">
<div id="searchresults-header" class="searchresults-header"></div> <div id="searchresults-header" class="searchresults-header"></div>
@ -168,7 +198,7 @@
{{/if}} {{/if}}
<!-- Apply ARIA attributes after the sidebar and the sidebar toggle button are added to the DOM --> <!-- Apply ARIA attributes after the sidebar and the sidebar toggle button are added to the DOM -->
<script type="text/javascript"> <script>
document.getElementById('sidebar-toggle').setAttribute('aria-expanded', sidebar === 'visible'); document.getElementById('sidebar-toggle').setAttribute('aria-expanded', sidebar === 'visible');
document.getElementById('sidebar').setAttribute('aria-hidden', sidebar !== 'visible'); document.getElementById('sidebar').setAttribute('aria-hidden', sidebar !== 'visible');
Array.from(document.querySelectorAll('#sidebar a')).forEach(function(link) { Array.from(document.querySelectorAll('#sidebar a')).forEach(function(link) {
@ -216,10 +246,12 @@
</div> </div>
{{#if livereload}} {{#if live_reload_endpoint}}
<!-- Livereload script (if served using the cli tool) --> <!-- Livereload script (if served using the cli tool) -->
<script type="text/javascript"> <script>
var socket = new WebSocket("{{{livereload}}}"); const wsProtocol = location.protocol === 'https:' ? 'wss:' : 'ws:';
const wsAddress = wsProtocol + "//" + location.host + "/" + "{{{live_reload_endpoint}}}";
const socket = new WebSocket(wsAddress);
socket.onmessage = function (event) { socket.onmessage = function (event) {
if (event.data === "reload") { if (event.data === "reload") {
socket.close(); socket.close();
@ -235,7 +267,7 @@
{{#if google_analytics}} {{#if google_analytics}}
<!-- Google Analytics Tag --> <!-- Google Analytics Tag -->
<script type="text/javascript"> <script>
var localAddrs = ["localhost", "127.0.0.1", ""]; var localAddrs = ["localhost", "127.0.0.1", ""];
// make sure we don't activate google analytics if the developer is // make sure we don't activate google analytics if the developer is
@ -253,43 +285,43 @@
{{/if}} {{/if}}
{{#if playground_line_numbers}} {{#if playground_line_numbers}}
<script type="text/javascript"> <script>
window.playground_line_numbers = true; window.playground_line_numbers = true;
</script> </script>
{{/if}} {{/if}}
{{#if playground_copyable}} {{#if playground_copyable}}
<script type="text/javascript"> <script>
window.playground_copyable = true; window.playground_copyable = true;
</script> </script>
{{/if}} {{/if}}
{{#if playground_js}} {{#if playground_js}}
<script src="{{ path_to_root }}ace.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}ace.js"></script>
<script src="{{ path_to_root }}editor.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}editor.js"></script>
<script src="{{ path_to_root }}mode-rust.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}mode-rust.js"></script>
<script src="{{ path_to_root }}theme-dawn.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}theme-dawn.js"></script>
<script src="{{ path_to_root }}theme-tomorrow_night.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}theme-tomorrow_night.js"></script>
{{/if}} {{/if}}
{{#if search_js}} {{#if search_js}}
<script src="{{ path_to_root }}elasticlunr.min.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}elasticlunr.min.js"></script>
<script src="{{ path_to_root }}mark.min.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}mark.min.js"></script>
<script src="{{ path_to_root }}searcher.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}searcher.js"></script>
{{/if}} {{/if}}
<script src="{{ path_to_root }}clipboard.min.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}clipboard.min.js"></script>
<script src="{{ path_to_root }}highlight.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}highlight.js"></script>
<script src="{{ path_to_root }}book.js" type="text/javascript" charset="utf-8"></script> <script src="{{ path_to_root }}book.js"></script>
<!-- Custom JS scripts --> <!-- Custom JS scripts -->
{{#each additional_js}} {{#each additional_js}}
<script type="text/javascript" src="{{ ../path_to_root }}{{this}}"></script> <script src="{{ ../path_to_root }}{{this}}"></script>
{{/each}} {{/each}}
{{#if is_print}} {{#if is_print}}
{{#if mathjax_support}} {{#if mathjax_support}}
<script type="text/javascript"> <script>
window.addEventListener('load', function() { window.addEventListener('load', function() {
MathJax.Hub.Register.StartupHook('End', function() { MathJax.Hub.Register.StartupHook('End', function() {
window.setTimeout(window.print, 100); window.setTimeout(window.print, 100);
@ -297,7 +329,7 @@
}); });
</script> </script>
{{else}} {{else}}
<script type="text/javascript"> <script>
window.addEventListener('load', function() { window.addEventListener('load', function() {
window.setTimeout(window.print, 100); window.setTimeout(window.print, 100);
}); });
@ -305,5 +337,6 @@
{{/if}} {{/if}}
{{/if}} {{/if}}
</div>
</body> </body>
</html> </html>

@ -1,110 +1,29 @@
{ {
"nodes": { "nodes": {
"crane": { "crane": {
"flake": false,
"locked": {
"lastModified": 1681175776,
"narHash": "sha256-7SsUy9114fryHAZ8p1L6G6YSu7jjz55FddEwa2U8XZc=",
"owner": "ipetkov",
"repo": "crane",
"rev": "445a3d222947632b5593112bb817850e8a9cf737",
"type": "github"
},
"original": {
"owner": "ipetkov",
"ref": "v0.12.1",
"repo": "crane",
"type": "github"
}
},
"dream2nix": {
"inputs": { "inputs": {
"all-cabal-json": [
"nci"
],
"crane": "crane",
"devshell": [
"nci"
],
"drv-parts": "drv-parts",
"flake-compat": "flake-compat", "flake-compat": "flake-compat",
"flake-parts": [ "flake-utils": [
"nci", "flake-utils"
"parts"
],
"flake-utils-pre-commit": [
"nci"
],
"ghc-utils": [
"nci"
],
"gomod2nix": [
"nci"
],
"mach-nix": [
"nci"
],
"nix-pypi-fetcher": [
"nci"
], ],
"nixpkgs": [ "nixpkgs": [
"nci",
"nixpkgs" "nixpkgs"
], ],
"nixpkgsV1": "nixpkgsV1", "rust-overlay": [
"poetry2nix": [ "rust-overlay"
"nci"
],
"pre-commit-hooks": [
"nci"
],
"pruned-racket-catalog": [
"nci"
]
},
"locked": {
"lastModified": 1683212002,
"narHash": "sha256-EObtqyQsv9v+inieRY5cvyCMCUI5zuU5qu+1axlJCPM=",
"owner": "nix-community",
"repo": "dream2nix",
"rev": "fbfb09d2ab5ff761d822dd40b4a1def81651d096",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "dream2nix",
"type": "github"
}
},
"drv-parts": {
"inputs": {
"flake-compat": [
"nci",
"dream2nix",
"flake-compat"
],
"flake-parts": [
"nci",
"dream2nix",
"flake-parts"
],
"nixpkgs": [
"nci",
"dream2nix",
"nixpkgs"
] ]
}, },
"locked": { "locked": {
"lastModified": 1680698112, "lastModified": 1688772518,
"narHash": "sha256-FgnobN/DvCjEsc0UAZEAdPLkL4IZi2ZMnu2K2bUaElc=", "narHash": "sha256-ol7gZxwvgLnxNSZwFTDJJ49xVY5teaSvF7lzlo3YQfM=",
"owner": "davhau", "owner": "ipetkov",
"repo": "drv-parts", "repo": "crane",
"rev": "e8c2ec1157dc1edb002989669a0dbd935f430201", "rev": "8b08e96c9af8c6e3a2b69af5a7fa168750fcf88e",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "davhau", "owner": "ipetkov",
"repo": "drv-parts", "repo": "crane",
"type": "github" "type": "github"
} }
}, },
@ -129,11 +48,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1681202837, "lastModified": 1689068808,
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=", "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "cfacdce06f30d2b68473a46042957675eebb3401", "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -142,55 +61,13 @@
"type": "github" "type": "github"
} }
}, },
"mk-naked-shell": {
"flake": false,
"locked": {
"lastModified": 1681286841,
"narHash": "sha256-3XlJrwlR0nBiREnuogoa5i1b4+w/XPe0z8bbrJASw0g=",
"owner": "yusdacra",
"repo": "mk-naked-shell",
"rev": "7612f828dd6f22b7fb332cc69440e839d7ffe6bd",
"type": "github"
},
"original": {
"owner": "yusdacra",
"repo": "mk-naked-shell",
"type": "github"
}
},
"nci": {
"inputs": {
"dream2nix": "dream2nix",
"mk-naked-shell": "mk-naked-shell",
"nixpkgs": [
"nixpkgs"
],
"parts": "parts",
"rust-overlay": [
"rust-overlay"
]
},
"locked": {
"lastModified": 1683699050,
"narHash": "sha256-UWKQpzVcSshB+sU2O8CCHjOSTQrNS7Kk9V3+UeBsJpg=",
"owner": "yusdacra",
"repo": "nix-cargo-integration",
"rev": "ed27173cd1b223f598343ea3c15aacb1d140feac",
"type": "github"
},
"original": {
"owner": "yusdacra",
"repo": "nix-cargo-integration",
"type": "github"
}
},
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1683408522, "lastModified": 1690272529,
"narHash": "sha256-9kcPh6Uxo17a3kK3XCHhcWiV1Yu1kYj22RHiymUhMkU=", "narHash": "sha256-MakzcKXEdv/I4qJUtq/k/eG+rVmyOZLnYNC2w1mB59Y=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "897876e4c484f1e8f92009fd11b7d988a121a4e7", "rev": "ef99fa5c5ed624460217c31ac4271cfb5cb2502c",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -200,99 +77,29 @@
"type": "github" "type": "github"
} }
}, },
"nixpkgs-lib": {
"locked": {
"dir": "lib",
"lastModified": 1682879489,
"narHash": "sha256-sASwo8gBt7JDnOOstnps90K1wxmVfyhsTPPNTGBPjjg=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "da45bf6ec7bbcc5d1e14d3795c025199f28e0de0",
"type": "github"
},
"original": {
"dir": "lib",
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgsV1": {
"locked": {
"lastModified": 1678500271,
"narHash": "sha256-tRBLElf6f02HJGG0ZR7znMNFv/Uf7b2fFInpTHiHaSE=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "5eb98948b66de29f899c7fe27ae112a47964baf8",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixos-22.11",
"type": "indirect"
}
},
"parts": {
"inputs": {
"nixpkgs-lib": [
"nci",
"nixpkgs"
]
},
"locked": {
"lastModified": 1683560683,
"narHash": "sha256-XAygPMN5Xnk/W2c1aW0jyEa6lfMDZWlQgiNtmHXytPc=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "006c75898cf814ef9497252b022e91c946ba8e17",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"parts_2": {
"inputs": {
"nixpkgs-lib": "nixpkgs-lib"
},
"locked": {
"lastModified": 1683560683,
"narHash": "sha256-XAygPMN5Xnk/W2c1aW0jyEa6lfMDZWlQgiNtmHXytPc=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "006c75898cf814ef9497252b022e91c946ba8e17",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"root": { "root": {
"inputs": { "inputs": {
"nci": "nci", "crane": "crane",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs", "nixpkgs": "nixpkgs",
"parts": "parts_2",
"rust-overlay": "rust-overlay" "rust-overlay": "rust-overlay"
} }
}, },
"rust-overlay": { "rust-overlay": {
"inputs": { "inputs": {
"flake-utils": "flake-utils", "flake-utils": [
"flake-utils"
],
"nixpkgs": [ "nixpkgs": [
"nixpkgs" "nixpkgs"
] ]
}, },
"locked": { "locked": {
"lastModified": 1683771545, "lastModified": 1690424156,
"narHash": "sha256-we0GYcKTo2jRQGmUGrzQ9VH0OYAUsJMCsK8UkF+vZUA=", "narHash": "sha256-Bpml+L280tHTQpwpC5/BJbU4HSvEzMvW8IZ4gAXimhE=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "c57e210faf68e5d5386f18f1b17ad8365d25e4ed", "rev": "f335a0213504c7e6481c359dc1009be9cf34432c",
"type": "github" "type": "github"
}, },
"original": { "original": {

@ -3,19 +3,35 @@
inputs = { inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
rust-overlay = { rust-overlay = {
url = "github:oxalica/rust-overlay"; url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs"; inputs = {
nixpkgs.follows = "nixpkgs";
flake-utils.follows = "flake-utils";
}; };
nci = { };
url = "github:yusdacra/nix-cargo-integration"; crane = {
inputs.nixpkgs.follows = "nixpkgs"; url = "github:ipetkov/crane";
inputs.rust-overlay.follows = "rust-overlay"; inputs.rust-overlay.follows = "rust-overlay";
inputs.flake-utils.follows = "flake-utils";
inputs.nixpkgs.follows = "nixpkgs";
}; };
parts.url = "github:hercules-ci/flake-parts";
}; };
outputs = inp: let outputs = {
self,
nixpkgs,
crane,
flake-utils,
rust-overlay,
...
}:
flake-utils.lib.eachDefaultSystem (system: let
pkgs = import nixpkgs {
inherit system;
overlays = [(import rust-overlay)];
};
mkRootPath = rel: mkRootPath = rel:
builtins.path { builtins.path {
path = "${toString ./.}/${rel}"; path = "${toString ./.}/${rel}";
@ -46,7 +62,7 @@
"flake.lock" "flake.lock"
]; ];
ignorePaths = path: type: let ignorePaths = path: type: let
inherit (inp.nixpkgs) lib; inherit (nixpkgs) lib;
# split the nix store path into its components # split the nix store path into its components
components = lib.splitString "/" path; components = lib.splitString "/" path;
# drop off the `/nix/hash-source` section from the path # drop off the `/nix/hash-source` section from the path
@ -62,22 +78,6 @@
# filter out unnecessary paths # filter out unnecessary paths
filter = ignorePaths; filter = ignorePaths;
}; };
in
inp.parts.lib.mkFlake {inputs = inp;} {
imports = [inp.nci.flakeModule inp.parts.flakeModules.easyOverlay];
systems = [
"x86_64-linux"
"x86_64-darwin"
"aarch64-linux"
"aarch64-darwin"
"i686-linux"
];
perSystem = {
config,
pkgs,
lib,
...
}: let
makeOverridableHelix = old: config: let makeOverridableHelix = old: config: let
grammars = pkgs.callPackage ./grammars.nix config; grammars = pkgs.callPackage ./grammars.nix config;
runtimeDir = pkgs.runCommand "helix-runtime" {} '' runtimeDir = pkgs.runCommand "helix-runtime" {} ''
@ -122,38 +122,58 @@
if stdenv.isLinux if stdenv.isLinux
then ''$RUSTFLAGS -C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment'' then ''$RUSTFLAGS -C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment''
else "$RUSTFLAGS"; else "$RUSTFLAGS";
in { rustToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
nci.projects."helix-project".relPath = ""; craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain;
nci.crates."helix-term" = { commonArgs =
overrides = { {
add-meta.override = _: {meta.mainProgram = "hx";}; inherit stdenv;
add-inputs.overrideAttrs = prev: { src = filteredSource;
buildInputs = (prev.buildInputs or []) ++ [stdenv.cc.cc.lib];
};
disable-grammar-builds = {
# disable fetching and building of tree-sitter grammars in the helix-term build.rs # disable fetching and building of tree-sitter grammars in the helix-term build.rs
HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1"; HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1";
buildInputs = [stdenv.cc.cc.lib];
# disable tests
doCheck = false;
meta.mainProgram = "hx";
}
// craneLib.crateNameFromCargoToml {cargoToml = ./helix-term/Cargo.toml;};
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
in {
packages = {
helix-unwrapped = craneLib.buildPackage (commonArgs
// {
inherit cargoArtifacts;
});
helix = makeOverridableHelix self.packages.${system}.helix-unwrapped {};
default = self.packages.${system}.helix;
}; };
disable-tests = {checkPhase = ":";};
set-stdenv.override = _: {inherit stdenv;};
set-filtered-src.override = _: {src = filteredSource;};
};
};
packages.helix-unwrapped = config.nci.outputs."helix-term".packages.release; checks = {
packages.helix-unwrapped-dev = config.nci.outputs."helix-term".packages.dev; # Build the crate itself
packages.helix = makeOverridableHelix config.packages.helix-unwrapped {}; inherit (self.packages.${system}) helix;
packages.helix-dev = makeOverridableHelix config.packages.helix-unwrapped-dev {};
packages.default = config.packages.helix; clippy = craneLib.cargoClippy (commonArgs
// {
inherit cargoArtifacts;
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
});
overlayAttrs = { fmt = craneLib.cargoFmt commonArgs;
inherit (config.packages) helix;
doc = craneLib.cargoDoc (commonArgs
// {
inherit cargoArtifacts;
});
test = craneLib.cargoTest (commonArgs
// {
inherit cargoArtifacts;
});
}; };
devShells.default = config.nci.outputs."helix-project".devShell.overrideAttrs (old: { devShells.default = pkgs.mkShell {
nativeBuildInputs = inputsFrom = builtins.attrValues self.checks.${system};
(old.nativeBuildInputs or []) nativeBuildInputs = with pkgs;
++ (with pkgs; [lld_13 cargo-flamegraph rust-analyzer]) [lld_13 cargo-flamegraph rust-analyzer]
++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) pkgs.cargo-tarpaulin) ++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) pkgs.cargo-tarpaulin)
++ (lib.optional stdenv.isLinux pkgs.lldb) ++ (lib.optional stdenv.isLinux pkgs.lldb)
++ (lib.optional stdenv.isDarwin pkgs.darwin.apple_sdk.frameworks.CoreFoundation); ++ (lib.optional stdenv.isDarwin pkgs.darwin.apple_sdk.frameworks.CoreFoundation);
@ -162,7 +182,11 @@
export RUST_BACKTRACE="1" export RUST_BACKTRACE="1"
export RUSTFLAGS="${rustFlagsEnv}" export RUSTFLAGS="${rustFlagsEnv}"
''; '';
}); };
})
// {
overlays.default = final: prev: {
inherit (self.packages.${final.system}) helix;
}; };
}; };

@ -18,18 +18,18 @@ integration = []
helix-loader = { version = "0.6", path = "../helix-loader" } helix-loader = { version = "0.6", path = "../helix-loader" }
ropey = { version = "1.6.0", default-features = false, features = ["simd"] } ropey = { version = "1.6.0", default-features = false, features = ["simd"] }
smallvec = "1.10" smallvec = "1.11"
smartstring = "1.0.1" smartstring = "1.0.1"
unicode-segmentation = "1.10" unicode-segmentation = "1.10"
unicode-width = "0.1" unicode-width = "0.1"
unicode-general-category = "0.6" unicode-general-category = "0.6"
# slab = "0.4.2" # slab = "0.4.2"
slotmap = "1.0" slotmap = "1.0"
tree-sitter = "0.20" tree-sitter.workspace = true
once_cell = "1.18" once_cell = "1.18"
arc-swap = "1" arc-swap = "1"
regex = "1" regex = "1"
bitflags = "2.3" bitflags = "2.4"
ahash = "0.8.3" ahash = "0.8.3"
hashbrown = { version = "0.14.0", features = ["raw"] } hashbrown = { version = "0.14.0", features = ["raw"] }
dunce = "1.0" dunce = "1.0"
@ -52,4 +52,4 @@ steel-core = { workspace = true }
[dev-dependencies] [dev-dependencies]
quickcheck = { version = "1", default-features = false } quickcheck = { version = "1", default-features = false }
indoc = "2.0.1" indoc = "2.0.3"

@ -481,7 +481,7 @@ impl<'a> From<String> for GraphemeStr<'a> {
let ptr = Box::into_raw(g.into_bytes().into_boxed_slice()) as *mut u8; let ptr = Box::into_raw(g.into_bytes().into_boxed_slice()) as *mut u8;
GraphemeStr { GraphemeStr {
ptr: unsafe { NonNull::new_unchecked(ptr) }, ptr: unsafe { NonNull::new_unchecked(ptr) },
len: i32::try_from(len).unwrap() as u32, len: (i32::try_from(len).unwrap() as u32) | Self::MASK_OWNED,
phantom: PhantomData, phantom: PhantomData,
} }
} }

@ -72,8 +72,8 @@ impl Default for History {
revisions: vec![Revision { revisions: vec![Revision {
parent: 0, parent: 0,
last_child: None, last_child: None,
transaction: Transaction::from(ChangeSet::new(&Rope::new())), transaction: Transaction::from(ChangeSet::new("".into())),
inversion: Transaction::from(ChangeSet::new(&Rope::new())), inversion: Transaction::from(ChangeSet::new("".into())),
timestamp: Instant::now(), timestamp: Instant::now(),
}], }],
current: 0, current: 0,

@ -1,14 +1,14 @@
use std::collections::HashMap; use std::{borrow::Cow, collections::HashMap};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use tree_sitter::{Query, QueryCursor, QueryPredicateArg}; use tree_sitter::{Query, QueryCursor, QueryPredicateArg};
use crate::{ use crate::{
chars::{char_is_line_ending, char_is_whitespace}, chars::{char_is_line_ending, char_is_whitespace},
graphemes::tab_width_at, graphemes::{grapheme_width, tab_width_at},
syntax::{LanguageConfiguration, RopeProvider, Syntax}, syntax::{LanguageConfiguration, RopeProvider, Syntax},
tree_sitter::Node, tree_sitter::Node,
Rope, RopeSlice, Rope, RopeGraphemes, RopeSlice,
}; };
/// Enum representing indentation style. /// Enum representing indentation style.
@ -238,68 +238,117 @@ fn get_first_in_line(mut node: Node, new_line_byte_pos: Option<usize>) -> Vec<bo
/// This is usually constructed in one of 2 ways: /// This is usually constructed in one of 2 ways:
/// - Successively add indent captures to get the (added) indent from a single line /// - Successively add indent captures to get the (added) indent from a single line
/// - Successively add the indent results for each line /// - Successively add the indent results for each line
#[derive(Default)] /// The string that this indentation defines starts with the string contained in the align field (unless it is None), followed by:
/// - max(0, indent - outdent) tabs, if tabs are used for indentation
/// - max(0, indent - outdent)*indent_width spaces, if spaces are used for indentation
#[derive(Default, Debug, PartialEq, Eq, Clone)]
pub struct Indentation { pub struct Indentation {
/// The total indent (the number of indent levels) is defined as max(0, indent-outdent).
/// The string that this results in depends on the indent style (spaces or tabs, etc.)
indent: usize, indent: usize,
indent_always: usize,
outdent: usize, outdent: usize,
outdent_always: usize,
/// The alignment, as a string containing only tabs & spaces. Storing this as a string instead of e.g.
/// the (visual) width ensures that the alignment is preserved even if the tab width changes.
align: Option<String>,
} }
impl Indentation { impl Indentation {
/// Add some other [Indentation] to this. /// Add some other [Indentation] to this.
/// The added indent should be the total added indent from one line /// The added indent should be the total added indent from one line.
fn add_line(&mut self, added: &Indentation) { /// Indent should always be added starting from the bottom (or equivalently, the innermost tree-sitter node).
if added.indent > 0 && added.outdent == 0 { fn add_line(&mut self, added: Indentation) {
self.indent += 1; // Align overrides the indent from outer scopes.
} else if added.outdent > 0 && added.indent == 0 { if self.align.is_some() {
self.outdent += 1; return;
}
if added.align.is_some() {
self.align = added.align;
return;
} }
self.indent += added.indent;
self.indent_always += added.indent_always;
self.outdent += added.outdent;
self.outdent_always += added.outdent_always;
} }
/// Add an indent capture to this indent. /// Add an indent capture to this indent.
/// All the captures that are added in this way should be on the same line. /// All the captures that are added in this way should be on the same line.
fn add_capture(&mut self, added: IndentCaptureType) { fn add_capture(&mut self, added: IndentCaptureType) {
match added { match added {
IndentCaptureType::Indent => { IndentCaptureType::Indent => {
if self.indent_always == 0 {
self.indent = 1; self.indent = 1;
} }
}
IndentCaptureType::IndentAlways => {
// any time we encounter an `indent.always` on the same line, we
// want to cancel out all regular indents
self.indent_always += 1;
self.indent = 0;
}
IndentCaptureType::Outdent => { IndentCaptureType::Outdent => {
if self.outdent_always == 0 {
self.outdent = 1; self.outdent = 1;
} }
} }
IndentCaptureType::OutdentAlways => {
self.outdent_always += 1;
self.outdent = 0;
}
IndentCaptureType::Align(align) => {
self.align = Some(align);
}
}
} }
fn as_string(&self, indent_style: &IndentStyle) -> String { fn into_string(self, indent_style: &IndentStyle) -> String {
let indent_level = if self.indent >= self.outdent { let indent = self.indent_always + self.indent;
self.indent - self.outdent let outdent = self.outdent_always + self.outdent;
let indent_level = if indent >= outdent {
indent - outdent
} else { } else {
log::warn!("Encountered more outdent than indent nodes while calculating indentation: {} outdent, {} indent", self.outdent, self.indent); log::warn!("Encountered more outdent than indent nodes while calculating indentation: {} outdent, {} indent", self.outdent, self.indent);
0 0
}; };
indent_style.as_str().repeat(indent_level) let mut indent_string = if let Some(align) = self.align {
align
} else {
String::new()
};
indent_string.push_str(&indent_style.as_str().repeat(indent_level));
indent_string
} }
} }
/// An indent definition which corresponds to a capture from the indent query /// An indent definition which corresponds to a capture from the indent query
#[derive(Debug)]
struct IndentCapture { struct IndentCapture {
capture_type: IndentCaptureType, capture_type: IndentCaptureType,
scope: IndentScope, scope: IndentScope,
} }
#[derive(Clone, Copy)] #[derive(Debug, Clone, PartialEq)]
enum IndentCaptureType { enum IndentCaptureType {
Indent, Indent,
IndentAlways,
Outdent, Outdent,
OutdentAlways,
/// Alignment given as a string of whitespace
Align(String),
} }
impl IndentCaptureType { impl IndentCaptureType {
fn default_scope(&self) -> IndentScope { fn default_scope(&self) -> IndentScope {
match self { match self {
IndentCaptureType::Indent => IndentScope::Tail, IndentCaptureType::Indent | IndentCaptureType::IndentAlways => IndentScope::Tail,
IndentCaptureType::Outdent => IndentScope::All, IndentCaptureType::Outdent | IndentCaptureType::OutdentAlways => IndentScope::All,
IndentCaptureType::Align(_) => IndentScope::All,
} }
} }
} }
/// This defines which part of a node an [IndentCapture] applies to. /// This defines which part of a node an [IndentCapture] applies to.
/// Each [IndentCaptureType] has a default scope, but the scope can be changed /// Each [IndentCaptureType] has a default scope, but the scope can be changed
/// with `#set!` property declarations. /// with `#set!` property declarations.
#[derive(Clone, Copy)] #[derive(Debug, Clone, Copy)]
enum IndentScope { enum IndentScope {
/// The indent applies to the whole node /// The indent applies to the whole node
All, All,
@ -309,6 +358,7 @@ enum IndentScope {
/// A capture from the indent query which does not define an indent but extends /// A capture from the indent query which does not define an indent but extends
/// the range of a node. This is used before the indent is calculated. /// the range of a node. This is used before the indent is calculated.
#[derive(Debug)]
enum ExtendCapture { enum ExtendCapture {
Extend, Extend,
PreventOnce, PreventOnce,
@ -317,24 +367,41 @@ enum ExtendCapture {
/// The result of running a tree-sitter indent query. This stores for /// The result of running a tree-sitter indent query. This stores for
/// each node (identified by its ID) the relevant captures (already filtered /// each node (identified by its ID) the relevant captures (already filtered
/// by predicates). /// by predicates).
#[derive(Debug)]
struct IndentQueryResult { struct IndentQueryResult {
indent_captures: HashMap<usize, Vec<IndentCapture>>, indent_captures: HashMap<usize, Vec<IndentCapture>>,
extend_captures: HashMap<usize, Vec<ExtendCapture>>, extend_captures: HashMap<usize, Vec<ExtendCapture>>,
} }
fn get_node_start_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
let mut node_line = node.start_position().row;
// Adjust for the new line that will be inserted
if new_line_byte_pos.map_or(false, |pos| node.start_byte() >= pos) {
node_line += 1;
}
node_line
}
fn get_node_end_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
let mut node_line = node.end_position().row;
// Adjust for the new line that will be inserted (with a strict inequality since end_byte is exclusive)
if new_line_byte_pos.map_or(false, |pos| node.end_byte() > pos) {
node_line += 1;
}
node_line
}
fn query_indents( fn query_indents(
query: &Query, query: &Query,
syntax: &Syntax, syntax: &Syntax,
cursor: &mut QueryCursor, cursor: &mut QueryCursor,
text: RopeSlice, text: RopeSlice,
range: std::ops::Range<usize>, range: std::ops::Range<usize>,
// Position of the (optional) newly inserted line break. new_line_byte_pos: Option<usize>,
// Given as (line, byte_pos)
new_line_break: Option<(usize, usize)>,
) -> IndentQueryResult { ) -> IndentQueryResult {
let mut indent_captures: HashMap<usize, Vec<IndentCapture>> = HashMap::new(); let mut indent_captures: HashMap<usize, Vec<IndentCapture>> = HashMap::new();
let mut extend_captures: HashMap<usize, Vec<ExtendCapture>> = HashMap::new(); let mut extend_captures: HashMap<usize, Vec<ExtendCapture>> = HashMap::new();
cursor.set_byte_range(range); cursor.set_byte_range(range);
// Iterate over all captures from the query // Iterate over all captures from the query
for m in cursor.matches(query, syntax.tree().root_node(), RopeProvider(text)) { for m in cursor.matches(query, syntax.tree().root_node(), RopeProvider(text)) {
// Skip matches where not all custom predicates are fulfilled // Skip matches where not all custom predicates are fulfilled
@ -361,21 +428,13 @@ fn query_indents(
Some(QueryPredicateArg::Capture(capt1)), Some(QueryPredicateArg::Capture(capt1)),
Some(QueryPredicateArg::Capture(capt2)) Some(QueryPredicateArg::Capture(capt2))
) => { ) => {
let get_line_num = |node: Node| {
let mut node_line = node.start_position().row;
// Adjust for the new line that will be inserted
if let Some((line, byte)) = new_line_break {
if node_line==line && node.start_byte()>=byte {
node_line += 1;
}
}
node_line
};
let n1 = m.nodes_for_capture_index(*capt1).next(); let n1 = m.nodes_for_capture_index(*capt1).next();
let n2 = m.nodes_for_capture_index(*capt2).next(); let n2 = m.nodes_for_capture_index(*capt2).next();
match (n1, n2) { match (n1, n2) {
(Some(n1), Some(n2)) => { (Some(n1), Some(n2)) => {
let same_line = get_line_num(n1)==get_line_num(n2); let n1_line = get_node_start_line(n1, new_line_byte_pos);
let n2_line = get_node_start_line(n2, new_line_byte_pos);
let same_line = n1_line == n2_line;
same_line==(pred.operator.as_ref()=="same-line?") same_line==(pred.operator.as_ref()=="same-line?")
} }
_ => true, _ => true,
@ -386,6 +445,23 @@ fn query_indents(
} }
} }
} }
"one-line?" | "not-one-line?" => match pred.args.get(0) {
Some(QueryPredicateArg::Capture(capture_idx)) => {
let node = m.nodes_for_capture_index(*capture_idx).next();
match node {
Some(node) => {
let (start_line, end_line) = (get_node_start_line(node,new_line_byte_pos), get_node_end_line(node, new_line_byte_pos));
let one_line = end_line == start_line;
one_line != (pred.operator.as_ref() == "not-one-line?")
},
_ => true,
}
}
_ => {
panic!("Invalid indent query: Arguments to \"not-kind-eq?\" must be a capture and a string");
}
},
_ => { _ => {
panic!( panic!(
"Invalid indent query: Unknown predicate (\"{}\")", "Invalid indent query: Unknown predicate (\"{}\")",
@ -396,11 +472,28 @@ fn query_indents(
}) { }) {
continue; continue;
} }
// A list of pairs (node_id, indent_capture) that are added by this match.
// They cannot be added to indent_captures immediately since they may depend on other captures (such as an @anchor).
let mut added_indent_captures: Vec<(usize, IndentCapture)> = Vec::new();
// The row/column position of the optional anchor in this query
let mut anchor: Option<tree_sitter::Node> = None;
for capture in m.captures { for capture in m.captures {
let capture_name = query.capture_names()[capture.index as usize].as_str(); let capture_name = query.capture_names()[capture.index as usize].as_str();
let capture_type = match capture_name { let capture_type = match capture_name {
"indent" => IndentCaptureType::Indent, "indent" => IndentCaptureType::Indent,
"indent.always" => IndentCaptureType::IndentAlways,
"outdent" => IndentCaptureType::Outdent, "outdent" => IndentCaptureType::Outdent,
"outdent.always" => IndentCaptureType::OutdentAlways,
// The alignment will be updated to the correct value at the end, when the anchor is known.
"align" => IndentCaptureType::Align(String::from("")),
"anchor" => {
if anchor.is_some() {
log::error!("Invalid indent query: Encountered more than one @anchor in the same match.")
} else {
anchor = Some(capture.node);
}
continue;
}
"extend" => { "extend" => {
extend_captures extend_captures
.entry(capture.node.id()) .entry(capture.node.id())
@ -450,17 +543,52 @@ fn query_indents(
} }
} }
} }
added_indent_captures.push((capture.node.id(), indent_capture))
}
for (node_id, mut capture) in added_indent_captures {
// Set the anchor for all align queries.
if let IndentCaptureType::Align(_) = capture.capture_type {
let anchor = match anchor {
None => {
log::error!(
"Invalid indent query: @align requires an accompanying @anchor."
);
continue;
}
Some(anchor) => anchor,
};
// Create a string of tabs & spaces that should have the same width
// as the string that precedes the anchor (independent of the tab width).
let mut align = String::new();
for grapheme in RopeGraphemes::new(
text.line(anchor.start_position().row)
.byte_slice(0..anchor.start_position().column),
) {
if grapheme == "\t" {
align.push('\t');
} else {
align.extend(
std::iter::repeat(' ').take(grapheme_width(&Cow::from(grapheme))),
);
}
}
capture.capture_type = IndentCaptureType::Align(align);
}
indent_captures indent_captures
.entry(capture.node.id()) .entry(node_id)
// Most entries only need to contain a single IndentCapture
.or_insert_with(|| Vec::with_capacity(1)) .or_insert_with(|| Vec::with_capacity(1))
.push(indent_capture); .push(capture);
} }
} }
IndentQueryResult {
let result = IndentQueryResult {
indent_captures, indent_captures,
extend_captures, extend_captures,
} };
log::trace!("indent result = {:?}", result);
result
} }
/// Handle extend queries. deepest_preceding is the deepest descendant of node that directly precedes the cursor position. /// Handle extend queries. deepest_preceding is the deepest descendant of node that directly precedes the cursor position.
@ -579,12 +707,14 @@ pub fn treesitter_indent_for_pos(
new_line: bool, new_line: bool,
) -> Option<String> { ) -> Option<String> {
let byte_pos = text.char_to_byte(pos); let byte_pos = text.char_to_byte(pos);
let new_line_byte_pos = new_line.then_some(byte_pos);
// The innermost tree-sitter node which is considered for the indent // The innermost tree-sitter node which is considered for the indent
// computation. It may change if some predeceding node is extended // computation. It may change if some predeceding node is extended
let mut node = syntax let mut node = syntax
.tree() .tree()
.root_node() .root_node()
.descendant_for_byte_range(byte_pos, byte_pos)?; .descendant_for_byte_range(byte_pos, byte_pos)?;
let (query_result, deepest_preceding) = { let (query_result, deepest_preceding) = {
// The query range should intersect with all nodes directly preceding // The query range should intersect with all nodes directly preceding
// the position of the indent query in case one of them is extended. // the position of the indent query in case one of them is extended.
@ -615,13 +745,13 @@ pub fn treesitter_indent_for_pos(
&mut cursor, &mut cursor,
text, text,
query_range, query_range,
new_line.then_some((line, byte_pos)), new_line_byte_pos,
); );
ts_parser.cursors.push(cursor); ts_parser.cursors.push(cursor);
(query_result, deepest_preceding) (query_result, deepest_preceding)
}) })
}; };
let indent_captures = query_result.indent_captures; let mut indent_captures = query_result.indent_captures;
let extend_captures = query_result.extend_captures; let extend_captures = query_result.extend_captures;
// Check for extend captures, potentially changing the node that the indent calculation starts with // Check for extend captures, potentially changing the node that the indent calculation starts with
@ -643,12 +773,16 @@ pub fn treesitter_indent_for_pos(
// even if there are multiple "indent" nodes on the same line // even if there are multiple "indent" nodes on the same line
let mut indent_for_line = Indentation::default(); let mut indent_for_line = Indentation::default();
let mut indent_for_line_below = Indentation::default(); let mut indent_for_line_below = Indentation::default();
loop { loop {
// This can safely be unwrapped because `first_in_line` contains // This can safely be unwrapped because `first_in_line` contains
// one entry for each ancestor of the node (which is what we iterate over) // one entry for each ancestor of the node (which is what we iterate over)
let is_first = *first_in_line.last().unwrap(); let is_first = *first_in_line.last().unwrap();
// Apply all indent definitions for this node
if let Some(definitions) = indent_captures.get(&node.id()) { // Apply all indent definitions for this node.
// Since we only iterate over each node once, we can remove the
// corresponding captures from the HashMap to avoid cloning them.
if let Some(definitions) = indent_captures.remove(&node.id()) {
for definition in definitions { for definition in definitions {
match definition.scope { match definition.scope {
IndentScope::All => { IndentScope::All => {
@ -666,28 +800,22 @@ pub fn treesitter_indent_for_pos(
} }
if let Some(parent) = node.parent() { if let Some(parent) = node.parent() {
let mut node_line = node.start_position().row; let node_line = get_node_start_line(node, new_line_byte_pos);
let mut parent_line = parent.start_position().row; let parent_line = get_node_start_line(parent, new_line_byte_pos);
if node_line == line && new_line {
// Also consider the line that will be inserted
if node.start_byte() >= byte_pos {
node_line += 1;
}
if parent.start_byte() >= byte_pos {
parent_line += 1;
}
};
if node_line != parent_line { if node_line != parent_line {
if node_line < line + (new_line as usize) {
// Don't add indent for the line below the line of the query // Don't add indent for the line below the line of the query
result.add_line(&indent_for_line_below); if node_line < line + (new_line as usize) {
result.add_line(indent_for_line_below);
} }
if node_line == parent_line + 1 { if node_line == parent_line + 1 {
indent_for_line_below = indent_for_line; indent_for_line_below = indent_for_line;
} else { } else {
result.add_line(&indent_for_line); result.add_line(indent_for_line);
indent_for_line_below = Indentation::default(); indent_for_line_below = Indentation::default();
} }
indent_for_line = Indentation::default(); indent_for_line = Indentation::default();
} }
@ -699,13 +827,13 @@ pub fn treesitter_indent_for_pos(
if (node.start_position().row < line) if (node.start_position().row < line)
|| (new_line && node.start_position().row == line && node.start_byte() < byte_pos) || (new_line && node.start_position().row == line && node.start_byte() < byte_pos)
{ {
result.add_line(&indent_for_line_below); result.add_line(indent_for_line_below);
} }
result.add_line(&indent_for_line); result.add_line(indent_for_line);
break; break;
} }
} }
Some(result.as_string(indent_style)) Some(result.into_string(indent_style))
} }
// TODO: Make this be customizable, similar to how it works for vim // TODO: Make this be customizable, similar to how it works for vim
@ -1018,4 +1146,122 @@ mod test {
2 2
); );
} }
#[test]
fn add_capture() {
let indent = || Indentation {
indent: 1,
..Default::default()
};
let indent_always = || Indentation {
indent_always: 1,
..Default::default()
};
let outdent = || Indentation {
outdent: 1,
..Default::default()
};
let outdent_always = || Indentation {
outdent_always: 1,
..Default::default()
};
let add_capture = |mut indent: Indentation, capture| {
indent.add_capture(capture);
indent
};
// adding an indent to no indent makes an indent
assert_eq!(
indent(),
add_capture(Indentation::default(), IndentCaptureType::Indent)
);
assert_eq!(
indent_always(),
add_capture(Indentation::default(), IndentCaptureType::IndentAlways)
);
assert_eq!(
outdent(),
add_capture(Indentation::default(), IndentCaptureType::Outdent)
);
assert_eq!(
outdent_always(),
add_capture(Indentation::default(), IndentCaptureType::OutdentAlways)
);
// adding an indent to an already indented has no effect
assert_eq!(indent(), add_capture(indent(), IndentCaptureType::Indent));
assert_eq!(
outdent(),
add_capture(outdent(), IndentCaptureType::Outdent)
);
// adding an always to a regular makes it always
assert_eq!(
indent_always(),
add_capture(indent(), IndentCaptureType::IndentAlways)
);
assert_eq!(
outdent_always(),
add_capture(outdent(), IndentCaptureType::OutdentAlways)
);
// adding an always to an always is additive
assert_eq!(
Indentation {
indent_always: 2,
..Default::default()
},
add_capture(indent_always(), IndentCaptureType::IndentAlways)
);
assert_eq!(
Indentation {
outdent_always: 2,
..Default::default()
},
add_capture(outdent_always(), IndentCaptureType::OutdentAlways)
);
// adding regular to always should be associative
assert_eq!(
Indentation {
indent_always: 1,
..Default::default()
},
add_capture(
add_capture(indent(), IndentCaptureType::Indent),
IndentCaptureType::IndentAlways
)
);
assert_eq!(
Indentation {
indent_always: 1,
..Default::default()
},
add_capture(
add_capture(indent(), IndentCaptureType::IndentAlways),
IndentCaptureType::Indent
)
);
assert_eq!(
Indentation {
outdent_always: 1,
..Default::default()
},
add_capture(
add_capture(outdent(), IndentCaptureType::Outdent),
IndentCaptureType::OutdentAlways
)
);
assert_eq!(
Indentation {
outdent_always: 1,
..Default::default()
},
add_capture(
add_capture(outdent(), IndentCaptureType::OutdentAlways),
IndentCaptureType::Outdent
)
);
}
} }

@ -18,7 +18,6 @@ pub mod movement;
pub mod object; pub mod object;
pub mod path; pub mod path;
mod position; mod position;
pub mod register;
pub mod search; pub mod search;
pub mod selection; pub mod selection;
pub mod shellwords; pub mod shellwords;
@ -43,7 +42,9 @@ pub use helix_loader::find_workspace;
pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> { pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> {
line.chars().position(|ch| !ch.is_whitespace()) line.chars().position(|ch| !ch.is_whitespace())
} }
mod rope_reader;
pub use rope_reader::RopeReader;
pub use ropey::{self, str_utils, Rope, RopeBuilder, RopeSlice}; pub use ropey::{self, str_utils, Rope, RopeBuilder, RopeSlice};
// pub use tendril::StrTendril as Tendril; // pub use tendril::StrTendril as Tendril;

@ -106,12 +106,16 @@ fn find_pair(
for close in for close in
iter::successors(node.next_sibling(), |node| node.next_sibling()).take(MATCH_LIMIT) iter::successors(node.next_sibling(), |node| node.next_sibling()).take(MATCH_LIMIT)
{ {
let Some(open) = as_close_pair(doc, &close) else { continue; }; let Some(open) = as_close_pair(doc, &close) else {
continue;
};
if find_pair_end(doc, Some(node), open, Backward).is_some() { if find_pair_end(doc, Some(node), open, Backward).is_some() {
return doc.try_byte_to_char(close.start_byte()).ok(); return doc.try_byte_to_char(close.start_byte()).ok();
} }
} }
let Some(parent) = node.parent() else { break; }; let Some(parent) = node.parent() else {
break;
};
node = parent; node = parent;
} }
let node = tree.root_node().named_descendant_for_byte_range(pos, pos)?; let node = tree.root_node().named_descendant_for_byte_range(pos, pos)?;

@ -16,7 +16,7 @@ use crate::{
syntax::LanguageConfiguration, syntax::LanguageConfiguration,
text_annotations::TextAnnotations, text_annotations::TextAnnotations,
textobject::TextObject, textobject::TextObject,
visual_offset_from_block, Range, RopeSlice, visual_offset_from_block, Range, RopeSlice, Selection, Syntax,
}; };
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
@ -556,6 +556,85 @@ pub fn goto_treesitter_object(
last_range last_range
} }
fn find_parent_start(mut node: Node) -> Option<Node> {
let start = node.start_byte();
while node.start_byte() >= start || !node.is_named() {
node = node.parent()?;
}
Some(node)
}
pub fn move_parent_node_end(
syntax: &Syntax,
text: RopeSlice,
selection: Selection,
dir: Direction,
movement: Movement,
) -> Selection {
let tree = syntax.tree();
selection.transform(|range| {
let start_from = text.char_to_byte(range.from());
let start_to = text.char_to_byte(range.to());
let mut node = match tree
.root_node()
.named_descendant_for_byte_range(start_from, start_to)
{
Some(node) => node,
None => {
log::debug!(
"no descendant found for byte range: {} - {}",
start_from,
start_to
);
return range;
}
};
let mut end_head = match dir {
// moving forward, we always want to move one past the end of the
// current node, so use the end byte of the current node, which is an exclusive
// end of the range
Direction::Forward => text.byte_to_char(node.end_byte()),
// moving backward, we want the cursor to land on the start char of
// the current node, or if it is already at the start of a node, to traverse up to
// the parent
Direction::Backward => {
let end_head = text.byte_to_char(node.start_byte());
// if we're already on the beginning, look up to the parent
if end_head == range.cursor(text) {
node = find_parent_start(node).unwrap_or(node);
text.byte_to_char(node.start_byte())
} else {
end_head
}
}
};
if movement == Movement::Move {
// preserve direction of original range
if range.direction() == Direction::Forward {
Range::new(end_head, end_head + 1)
} else {
Range::new(end_head + 1, end_head)
}
} else {
// if we end up with a forward range, then adjust it to be one past
// where we want
if end_head >= range.anchor {
end_head += 1;
}
Range::new(range.anchor, end_head)
}
})
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use ropey::Rope; use ropey::Rope;

@ -85,23 +85,21 @@ pub fn get_normalized_path(path: &Path) -> PathBuf {
/// ///
/// This function is used instead of `std::fs::canonicalize` because we don't want to verify /// This function is used instead of `std::fs::canonicalize` because we don't want to verify
/// here if the path exists, just normalize it's components. /// here if the path exists, just normalize it's components.
pub fn get_canonicalized_path(path: &Path) -> std::io::Result<PathBuf> { pub fn get_canonicalized_path(path: &Path) -> PathBuf {
let path = expand_tilde(path); let path = expand_tilde(path);
let path = if path.is_relative() { let path = if path.is_relative() {
std::env::current_dir().map(|current_dir| current_dir.join(path))? helix_loader::current_working_dir().join(path)
} else { } else {
path path
}; };
Ok(get_normalized_path(path.as_path())) get_normalized_path(path.as_path())
} }
pub fn get_relative_path(path: &Path) -> PathBuf { pub fn get_relative_path(path: &Path) -> PathBuf {
let path = PathBuf::from(path); let path = PathBuf::from(path);
let path = if path.is_absolute() { let path = if path.is_absolute() {
let cwdir = std::env::current_dir() let cwdir = get_normalized_path(&helix_loader::current_working_dir());
.map(|path| get_normalized_path(&path))
.expect("couldn't determine current directory");
get_normalized_path(&path) get_normalized_path(&path)
.strip_prefix(cwdir) .strip_prefix(cwdir)
.map(PathBuf::from) .map(PathBuf::from)
@ -142,7 +140,7 @@ pub fn get_relative_path(path: &Path) -> PathBuf {
/// ``` /// ```
/// ///
pub fn get_truncated_path<P: AsRef<Path>>(path: P) -> PathBuf { pub fn get_truncated_path<P: AsRef<Path>>(path: P) -> PathBuf {
let cwd = std::env::current_dir().unwrap_or_default(); let cwd = helix_loader::current_working_dir();
let path = path let path = path
.as_ref() .as_ref()
.strip_prefix(cwd) .strip_prefix(cwd)

@ -1,89 +0,0 @@
use std::collections::HashMap;
#[derive(Debug)]
pub struct Register {
name: char,
values: Vec<String>,
}
impl Register {
pub const fn new(name: char) -> Self {
Self {
name,
values: Vec::new(),
}
}
pub fn new_with_values(name: char, values: Vec<String>) -> Self {
Self { name, values }
}
pub const fn name(&self) -> char {
self.name
}
pub fn read(&self) -> &[String] {
&self.values
}
pub fn write(&mut self, values: Vec<String>) {
self.values = values;
}
pub fn push(&mut self, value: String) {
self.values.push(value);
}
}
/// Currently just wraps a `HashMap` of `Register`s
#[derive(Debug, Default)]
pub struct Registers {
inner: HashMap<char, Register>,
}
impl Registers {
pub fn get(&self, name: char) -> Option<&Register> {
self.inner.get(&name)
}
pub fn read(&self, name: char) -> Option<&[String]> {
self.get(name).map(|reg| reg.read())
}
pub fn write(&mut self, name: char, values: Vec<String>) {
if name != '_' {
self.inner
.insert(name, Register::new_with_values(name, values));
}
}
pub fn push(&mut self, name: char, value: String) {
if name != '_' {
if let Some(r) = self.inner.get_mut(&name) {
r.push(value);
} else {
self.write(name, vec![value]);
}
}
}
pub fn first(&self, name: char) -> Option<&String> {
self.read(name).and_then(|entries| entries.first())
}
pub fn last(&self, name: char) -> Option<&String> {
self.read(name).and_then(|entries| entries.last())
}
pub fn inner(&self) -> &HashMap<char, Register> {
&self.inner
}
pub fn clear(&mut self) {
self.inner.clear();
}
pub fn remove(&mut self, name: char) -> Option<Register> {
self.inner.remove(&name)
}
}

@ -0,0 +1,37 @@
use std::io;
use ropey::iter::Chunks;
use ropey::RopeSlice;
pub struct RopeReader<'a> {
current_chunk: &'a [u8],
chunks: Chunks<'a>,
}
impl<'a> RopeReader<'a> {
pub fn new(rope: RopeSlice<'a>) -> RopeReader<'a> {
RopeReader {
current_chunk: &[],
chunks: rope.chunks(),
}
}
}
impl io::Read for RopeReader<'_> {
fn read(&mut self, mut buf: &mut [u8]) -> io::Result<usize> {
let buf_len = buf.len();
loop {
let read_bytes = self.current_chunk.read(buf)?;
buf = &mut buf[read_bytes..];
if buf.is_empty() {
return Ok(buf_len);
}
if let Some(next_chunk) = self.chunks.next() {
self.current_chunk = next_chunk.as_bytes();
} else {
return Ok(buf_len - buf.len());
}
}
}
}

@ -630,11 +630,19 @@ impl Selection {
self.transform(|range| Range::point(range.cursor(text))) self.transform(|range| Range::point(range.cursor(text)))
} }
pub fn fragments<'a>(&'a self, text: RopeSlice<'a>) -> impl Iterator<Item = Cow<str>> + 'a { pub fn fragments<'a>(
&'a self,
text: RopeSlice<'a>,
) -> impl DoubleEndedIterator<Item = Cow<'a, str>> + ExactSizeIterator<Item = Cow<str>> + 'a
{
self.ranges.iter().map(move |range| range.fragment(text)) self.ranges.iter().map(move |range| range.fragment(text))
} }
pub fn slices<'a>(&'a self, text: RopeSlice<'a>) -> impl Iterator<Item = RopeSlice> + 'a { pub fn slices<'a>(
&'a self,
text: RopeSlice<'a>,
) -> impl DoubleEndedIterator<Item = RopeSlice<'a>> + ExactSizeIterator<Item = RopeSlice<'a>> + 'a
{
self.ranges.iter().map(move |range| range.slice(text)) self.ranges.iter().map(move |range| range.slice(text))
} }

@ -4,7 +4,7 @@ use crate::{
diagnostic::Severity, diagnostic::Severity,
regex::Regex, regex::Regex,
transaction::{ChangeSet, Operation}, transaction::{ChangeSet, Operation},
Rope, RopeSlice, Tendril, RopeSlice, Tendril,
}; };
use ahash::RandomState; use ahash::RandomState;
@ -829,7 +829,10 @@ impl Loader {
// TODO: content_regex handling conflict resolution // TODO: content_regex handling conflict resolution
} }
pub fn language_config_for_shebang(&self, source: &Rope) -> Option<Arc<LanguageConfiguration>> { pub fn language_config_for_shebang(
&self,
source: RopeSlice,
) -> Option<Arc<LanguageConfiguration>> {
let line = Cow::from(source.line(0)); let line = Cow::from(source.line(0));
static SHEBANG_REGEX: Lazy<Regex> = static SHEBANG_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(&["^", SHEBANG].concat()).unwrap()); Lazy::new(|| Regex::new(&["^", SHEBANG].concat()).unwrap());
@ -939,7 +942,7 @@ fn byte_range_to_str(range: std::ops::Range<usize>, source: RopeSlice) -> Cow<st
impl Syntax { impl Syntax {
pub fn new( pub fn new(
source: &Rope, source: RopeSlice,
config: Arc<HighlightConfiguration>, config: Arc<HighlightConfiguration>,
loader: Arc<Loader>, loader: Arc<Loader>,
) -> Option<Self> { ) -> Option<Self> {
@ -978,8 +981,8 @@ impl Syntax {
pub fn update( pub fn update(
&mut self, &mut self,
old_source: &Rope, old_source: RopeSlice,
source: &Rope, source: RopeSlice,
changeset: &ChangeSet, changeset: &ChangeSet,
) -> Result<(), Error> { ) -> Result<(), Error> {
let mut queue = VecDeque::new(); let mut queue = VecDeque::new();
@ -1146,12 +1149,38 @@ impl Syntax {
layer.tree().root_node(), layer.tree().root_node(),
RopeProvider(source_slice), RopeProvider(source_slice),
); );
let mut combined_injections = vec![
(None, Vec::new(), IncludedChildren::default());
layer.config.combined_injections_patterns.len()
];
let mut injections = Vec::new(); let mut injections = Vec::new();
let mut last_injection_end = 0;
for mat in matches { for mat in matches {
let (injection_capture, content_node, included_children) = layer let (injection_capture, content_node, included_children) = layer
.config .config
.injection_for_match(&layer.config.injections_query, &mat, source_slice); .injection_for_match(&layer.config.injections_query, &mat, source_slice);
// in case this is a combined injection save it for more processing later
if let Some(combined_injection_idx) = layer
.config
.combined_injections_patterns
.iter()
.position(|&pattern| pattern == mat.pattern_index)
{
let entry = &mut combined_injections[combined_injection_idx];
if injection_capture.is_some() {
entry.0 = injection_capture;
}
if let Some(content_node) = content_node {
if content_node.start_byte() >= last_injection_end {
entry.1.push(content_node);
last_injection_end = content_node.end_byte();
}
}
entry.2 = included_children;
continue;
}
// Explicitly remove this match so that none of its other captures will remain // Explicitly remove this match so that none of its other captures will remain
// in the stream of captures. // in the stream of captures.
mat.remove(); mat.remove();
@ -1166,53 +1195,27 @@ impl Syntax {
intersect_ranges(&layer.ranges, &[content_node], included_children); intersect_ranges(&layer.ranges, &[content_node], included_children);
if !ranges.is_empty() { if !ranges.is_empty() {
if content_node.start_byte() < last_injection_end {
continue;
}
last_injection_end = content_node.end_byte();
injections.push((config, ranges)); injections.push((config, ranges));
} }
} }
} }
} }
// Process combined injections. for (lang_name, content_nodes, included_children) in combined_injections {
if let Some(combined_injections_query) = &layer.config.combined_injections_query {
let mut injections_by_pattern_index =
vec![
(None, Vec::new(), IncludedChildren::default());
combined_injections_query.pattern_count()
];
let matches = cursor.matches(
combined_injections_query,
layer.tree().root_node(),
RopeProvider(source_slice),
);
for mat in matches {
let entry = &mut injections_by_pattern_index[mat.pattern_index];
let (injection_capture, content_node, included_children) = layer
.config
.injection_for_match(combined_injections_query, &mat, source_slice);
if injection_capture.is_some() {
entry.0 = injection_capture;
}
if let Some(content_node) = content_node {
entry.1.push(content_node);
}
entry.2 = included_children;
}
for (lang_name, content_nodes, included_children) in injections_by_pattern_index
{
if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) { if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) {
if let Some(config) = (injection_callback)(&lang_name) { if let Some(config) = (injection_callback)(&lang_name) {
let ranges = intersect_ranges( let ranges =
&layer.ranges, intersect_ranges(&layer.ranges, &content_nodes, included_children);
&content_nodes,
included_children,
);
if !ranges.is_empty() { if !ranges.is_empty() {
injections.push((config, ranges)); injections.push((config, ranges));
} }
} }
} }
} }
}
let depth = layer.depth + 1; let depth = layer.depth + 1;
// TODO: can't inline this since matches borrows self.layers // TODO: can't inline this since matches borrows self.layers
@ -1398,7 +1401,7 @@ impl LanguageLayer {
self.tree.as_ref().unwrap() self.tree.as_ref().unwrap()
} }
fn parse(&mut self, parser: &mut Parser, source: &Rope) -> Result<(), Error> { fn parse(&mut self, parser: &mut Parser, source: RopeSlice) -> Result<(), Error> {
parser parser
.set_included_ranges(&self.ranges) .set_included_ranges(&self.ranges)
.map_err(|_| Error::InvalidRanges)?; .map_err(|_| Error::InvalidRanges)?;
@ -1429,7 +1432,7 @@ impl LanguageLayer {
} }
pub(crate) fn generate_edits( pub(crate) fn generate_edits(
old_text: &Rope, old_text: RopeSlice,
changeset: &ChangeSet, changeset: &ChangeSet,
) -> Vec<tree_sitter::InputEdit> { ) -> Vec<tree_sitter::InputEdit> {
use Operation::*; use Operation::*;
@ -1445,7 +1448,7 @@ pub(crate) fn generate_edits(
// TODO; this is a lot easier with Change instead of Operation. // TODO; this is a lot easier with Change instead of Operation.
fn point_at_pos(text: &Rope, pos: usize) -> (usize, Point) { fn point_at_pos(text: RopeSlice, pos: usize) -> (usize, Point) {
let byte = text.char_to_byte(pos); // <- attempted to index past end let byte = text.char_to_byte(pos); // <- attempted to index past end
let line = text.char_to_line(pos); let line = text.char_to_line(pos);
let line_start_byte = text.line_to_byte(line); let line_start_byte = text.line_to_byte(line);
@ -1571,7 +1574,7 @@ pub struct HighlightConfiguration {
pub language: Grammar, pub language: Grammar,
pub query: Query, pub query: Query,
injections_query: Query, injections_query: Query,
combined_injections_query: Option<Query>, combined_injections_patterns: Vec<usize>,
highlights_pattern_index: usize, highlights_pattern_index: usize,
highlight_indices: ArcSwap<Vec<Option<Highlight>>>, highlight_indices: ArcSwap<Vec<Option<Highlight>>>,
non_local_variable_patterns: Vec<bool>, non_local_variable_patterns: Vec<bool>,
@ -1622,7 +1625,7 @@ impl<'a> Iterator for ChunksBytes<'a> {
} }
pub struct RopeProvider<'a>(pub RopeSlice<'a>); pub struct RopeProvider<'a>(pub RopeSlice<'a>);
impl<'a> TextProvider<'a> for RopeProvider<'a> { impl<'a> TextProvider<&'a [u8]> for RopeProvider<'a> {
type I = ChunksBytes<'a>; type I = ChunksBytes<'a>;
fn text(&mut self, node: Node) -> Self::I { fn text(&mut self, node: Node) -> Self::I {
@ -1636,7 +1639,7 @@ impl<'a> TextProvider<'a> for RopeProvider<'a> {
struct HighlightIterLayer<'a> { struct HighlightIterLayer<'a> {
_tree: Option<Tree>, _tree: Option<Tree>,
cursor: QueryCursor, cursor: QueryCursor,
captures: RefCell<iter::Peekable<QueryCaptures<'a, 'a, RopeProvider<'a>>>>, captures: RefCell<iter::Peekable<QueryCaptures<'a, 'a, RopeProvider<'a>, &'a [u8]>>>,
config: &'a HighlightConfiguration, config: &'a HighlightConfiguration,
highlight_end_stack: Vec<usize>, highlight_end_stack: Vec<usize>,
scope_stack: Vec<LocalScope<'a>>, scope_stack: Vec<LocalScope<'a>>,
@ -1687,26 +1690,15 @@ impl HighlightConfiguration {
} }
} }
let mut injections_query = Query::new(language, injection_query)?; let injections_query = Query::new(language, injection_query)?;
let combined_injections_patterns = (0..injections_query.pattern_count())
// Construct a separate query just for dealing with the 'combined injections'. .filter(|&i| {
// Disable the combined injection patterns in the main query. injections_query
let mut combined_injections_query = Query::new(language, injection_query)?; .property_settings(i)
let mut has_combined_queries = false; .iter()
for pattern_index in 0..injections_query.pattern_count() { .any(|s| &*s.key == "injection.combined")
let settings = injections_query.property_settings(pattern_index); })
if settings.iter().any(|s| &*s.key == "injection.combined") { .collect();
has_combined_queries = true;
injections_query.disable_pattern(pattern_index);
} else {
combined_injections_query.disable_pattern(pattern_index);
}
}
let combined_injections_query = if has_combined_queries {
Some(combined_injections_query)
} else {
None
};
// Find all of the highlighting patterns that are disabled for nodes that // Find all of the highlighting patterns that are disabled for nodes that
// have been identified as local variables. // have been identified as local variables.
@ -1755,7 +1747,7 @@ impl HighlightConfiguration {
language, language,
query, query,
injections_query, injections_query,
combined_injections_query, combined_injections_patterns,
highlights_pattern_index, highlights_pattern_index,
highlight_indices, highlight_indices,
non_local_variable_patterns, non_local_variable_patterns,
@ -2551,7 +2543,7 @@ mod test {
let mut cursor = QueryCursor::new(); let mut cursor = QueryCursor::new();
let config = HighlightConfiguration::new(language, "", "", "").unwrap(); let config = HighlightConfiguration::new(language, "", "", "").unwrap();
let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)).unwrap(); let syntax = Syntax::new(source.slice(..), Arc::new(config), Arc::new(loader)).unwrap();
let root = syntax.tree().root_node(); let root = syntax.tree().root_node();
let mut test = |capture, range| { let mut test = |capture, range| {
@ -2625,7 +2617,7 @@ mod test {
fn main() {} fn main() {}
", ",
); );
let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)).unwrap(); let syntax = Syntax::new(source.slice(..), Arc::new(config), Arc::new(loader)).unwrap();
let tree = syntax.tree(); let tree = syntax.tree();
let root = tree.root_node(); let root = tree.root_node();
assert_eq!(root.kind(), "source_file"); assert_eq!(root.kind(), "source_file");
@ -2652,7 +2644,7 @@ mod test {
&doc, &doc,
vec![(6, 11, Some("test".into())), (12, 17, None)].into_iter(), vec![(6, 11, Some("test".into())), (12, 17, None)].into_iter(),
); );
let edits = generate_edits(&doc, transaction.changes()); let edits = generate_edits(doc.slice(..), transaction.changes());
// transaction.apply(&mut state); // transaction.apply(&mut state);
assert_eq!( assert_eq!(
@ -2681,7 +2673,7 @@ mod test {
let mut doc = Rope::from("fn test() {}"); let mut doc = Rope::from("fn test() {}");
let transaction = let transaction =
Transaction::change(&doc, vec![(8, 8, Some("a: u32".into()))].into_iter()); Transaction::change(&doc, vec![(8, 8, Some("a: u32".into()))].into_iter());
let edits = generate_edits(&doc, transaction.changes()); let edits = generate_edits(doc.slice(..), transaction.changes());
transaction.apply(&mut doc); transaction.apply(&mut doc);
assert_eq!(doc, "fn test(a: u32) {}"); assert_eq!(doc, "fn test(a: u32) {}");
@ -2715,7 +2707,7 @@ mod test {
let language = get_language(language_name).unwrap(); let language = get_language(language_name).unwrap();
let config = HighlightConfiguration::new(language, "", "", "").unwrap(); let config = HighlightConfiguration::new(language, "", "", "").unwrap();
let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)).unwrap(); let syntax = Syntax::new(source.slice(..), Arc::new(config), Arc::new(loader)).unwrap();
let root = syntax let root = syntax
.tree() .tree()

@ -1,3 +1,4 @@
use ropey::RopeSlice;
use smallvec::SmallVec; use smallvec::SmallVec;
use crate::{Range, Rope, Selection, Tendril}; use crate::{Range, Rope, Selection, Tendril};
@ -42,7 +43,7 @@ impl ChangeSet {
} }
#[must_use] #[must_use]
pub fn new(doc: &Rope) -> Self { pub fn new(doc: RopeSlice) -> Self {
let len = doc.len_chars(); let len = doc.len_chars();
Self { Self {
changes: Vec::new(), changes: Vec::new(),
@ -388,7 +389,10 @@ impl ChangeSet {
} }
let Some((i, change)) = iter.next() else { let Some((i, change)) = iter.next() else {
map!(|pos, _| (old_pos == pos).then_some(new_pos), self.changes.len()); map!(
|pos, _| (old_pos == pos).then_some(new_pos),
self.changes.len()
);
break; break;
}; };
@ -485,7 +489,7 @@ impl Transaction {
/// Create a new, empty transaction. /// Create a new, empty transaction.
pub fn new(doc: &Rope) -> Self { pub fn new(doc: &Rope) -> Self {
Self { Self {
changes: ChangeSet::new(doc), changes: ChangeSet::new(doc.slice(..)),
selection: None, selection: None,
} }
} }
@ -946,9 +950,9 @@ mod test {
#[test] #[test]
fn combine_with_empty() { fn combine_with_empty() {
let empty = Rope::from(""); let empty = Rope::from("");
let a = ChangeSet::new(&empty); let a = ChangeSet::new(empty.slice(..));
let mut b = ChangeSet::new(&empty); let mut b = ChangeSet::new(empty.slice(..));
b.insert("a".into()); b.insert("a".into());
let changes = a.compose(b); let changes = a.compose(b);
@ -962,9 +966,9 @@ mod test {
const TEST_CASE: &str = "Hello, これはヘリックスエディターです!"; const TEST_CASE: &str = "Hello, これはヘリックスエディターです!";
let empty = Rope::from(""); let empty = Rope::from("");
let a = ChangeSet::new(&empty); let a = ChangeSet::new(empty.slice(..));
let mut b = ChangeSet::new(&empty); let mut b = ChangeSet::new(empty.slice(..));
b.insert(TEST_CASE.into()); b.insert(TEST_CASE.into());
let changes = a.compose(b); let changes = a.compose(b);

@ -0,0 +1,48 @@
std::vector<std::string>
fn_with_many_parameters(int parm1, long parm2, float parm3, double parm4,
char* parm5, bool parm6);
std::vector<std::string>
fn_with_many_parameters(int parm1, long parm2, float parm3, double parm4,
char* parm5, bool parm6) {
auto lambda = []() {
return 0;
};
auto lambda_with_a_really_long_name_that_uses_a_whole_line
= [](int some_more_aligned_parameters,
std::string parm2) {
do_smth();
};
if (brace_on_same_line) {
do_smth();
} else if (brace_on_next_line)
{
do_smth();
} else if (another_condition) {
do_smth();
}
else {
do_smth();
}
if (inline_if_statement)
do_smth();
if (another_inline_if_statement)
return [](int parm1, char* parm2) {
this_is_a_really_pointless_lambda();
};
switch (var) {
case true:
return -1;
case false:
return 42;
}
}
class MyClass : public MyBaseClass {
public:
MyClass();
void public_fn();
private:
super_secret_private_fn();
}

@ -1 +0,0 @@
../../../src/indent.rs

@ -11,3 +11,16 @@ indent = { tab-width = 4, unit = " " }
[[grammar]] [[grammar]]
name = "rust" name = "rust"
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "0431a2c60828731f27491ee9fdefe25e250ce9c9" } source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "0431a2c60828731f27491ee9fdefe25e250ce9c9" }
[[language]]
name = "cpp"
scope = "source.cpp"
injection-regex = "cpp"
file-types = ["cc", "hh", "c++", "cpp", "hpp", "h", "ipp", "tpp", "cxx", "hxx", "ixx", "txx", "ino", "C", "H"]
roots = []
comment-token = "//"
indent = { tab-width = 2, unit = " " }
[[grammar]]
name = "cpp"
source = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "2d2c4aee8672af4c7c8edff68e7dd4c07e88d2b1" }

@ -1,20 +1,122 @@
use helix_core::{ use helix_core::{
indent::{indent_level_for_line, treesitter_indent_for_pos, IndentStyle}, indent::{indent_level_for_line, treesitter_indent_for_pos, IndentStyle},
syntax::Loader, syntax::{Configuration, Loader},
Syntax, Syntax,
}; };
use std::path::PathBuf; use ropey::Rope;
use std::{ops::Range, path::PathBuf, process::Command};
#[test] #[test]
fn test_treesitter_indent_rust() { fn test_treesitter_indent_rust() {
test_treesitter_indent("rust.rs", "source.rust"); standard_treesitter_test("rust.rs", "source.rust");
} }
#[test]
fn test_treesitter_indent_cpp() {
standard_treesitter_test("cpp.cpp", "source.cpp");
}
#[test] #[test]
fn test_treesitter_indent_rust_2() { fn test_treesitter_indent_rust_helix() {
test_treesitter_indent("indent.rs", "source.rust"); // We pin a specific git revision to prevent unrelated changes from causing the indent tests to fail.
// TODO Use commands.rs as indentation test. // Ideally, someone updates this once in a while and fixes any errors that occur.
// Currently this fails because we can't align the parameters of a closure yet let rev = "af382768cdaf89ff547dbd8f644a1bddd90e7c8f";
// test_treesitter_indent("commands.rs", "source.rust"); let files = Command::new("git")
.args([
"ls-tree",
"-r",
"--name-only",
"--full-tree",
rev,
"helix-term/src",
])
.output()
.unwrap();
let files = String::from_utf8(files.stdout).unwrap();
let ignored_files = vec![
// Contains many macros that tree-sitter does not parse in a meaningful way and is otherwise not very interesting
"helix-term/src/health.rs",
];
for file in files.split_whitespace() {
if ignored_files.contains(&file) {
continue;
}
let ignored_lines: Vec<Range<usize>> = match file {
"helix-term/src/application.rs" => vec![
// We can't handle complicated indent rules inside macros (`json!` in this case) since
// the tree-sitter grammar only parses them as `token_tree` and `identifier` nodes.
1045..1051,
],
"helix-term/src/commands.rs" => vec![
// This is broken because of the current handling of `call_expression`
// (i.e. having an indent query for it but outdenting again in specific cases).
// The indent query is needed to correctly handle multi-line arguments in function calls
// inside indented `field_expression` nodes (which occurs fairly often).
//
// Once we have the `@indent.always` capture type, it might be possible to just have an indent
// capture for the `arguments` field of a call expression. That could enable us to correctly
// handle this.
2226..2230,
],
"helix-term/src/commands/dap.rs" => vec![
// Complex `format!` macro
46..52,
],
"helix-term/src/commands/lsp.rs" => vec![
// Macro
624..627,
// Return type declaration of a closure. `cargo fmt` adds an additional space here,
// which we cannot (yet) model with our indent queries.
878..879,
// Same as in `helix-term/src/commands.rs`
1335..1343,
],
"helix-term/src/config.rs" => vec![
// Multiline string
146..152,
],
"helix-term/src/keymap.rs" => vec![
// Complex macro (see above)
456..470,
// Multiline string without indent
563..567,
],
"helix-term/src/main.rs" => vec![
// Multiline string
44..70,
],
"helix-term/src/ui/completion.rs" => vec![
// Macro
218..232,
],
"helix-term/src/ui/editor.rs" => vec![
// The chained function calls here are not indented, probably because of the comment
// in between. Since `cargo fmt` doesn't even attempt to format it, there's probably
// no point in trying to indent this correctly.
342..350,
],
"helix-term/src/ui/lsp.rs" => vec![
// Macro
56..61,
],
"helix-term/src/ui/statusline.rs" => vec![
// Same as in `helix-term/src/commands.rs`
436..442,
450..456,
],
_ => Vec::new(),
};
let git_object = rev.to_string() + ":" + file;
let content = Command::new("git")
.args(["cat-file", "blob", &git_object])
.output()
.unwrap();
let doc = Rope::from_reader(&mut content.stdout.as_slice()).unwrap();
test_treesitter_indent(file, doc, "source.rust", ignored_lines);
}
} }
#[test] #[test]
@ -50,20 +152,41 @@ fn test_indent_level_for_line_with_spaces_and_tabs() {
assert_eq!(indent_level, 2) assert_eq!(indent_level, 2)
} }
fn test_treesitter_indent(file_name: &str, lang_scope: &str) { fn indent_tests_dir() -> PathBuf {
let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
test_dir.push("tests/data/indent"); test_dir.push("tests/data/indent");
test_dir
}
fn indent_test_path(name: &str) -> PathBuf {
let mut path = indent_tests_dir();
path.push(name);
path
}
fn indent_tests_config() -> Configuration {
let mut config_path = indent_tests_dir();
config_path.push("languages.toml");
let config = std::fs::read_to_string(config_path).unwrap();
toml::from_str(&config).unwrap()
}
let mut test_file = test_dir.clone(); fn standard_treesitter_test(file_name: &str, lang_scope: &str) {
test_file.push(file_name); let test_path = indent_test_path(file_name);
let test_file = std::fs::File::open(test_file).unwrap(); let test_file = std::fs::File::open(test_path).unwrap();
let doc = ropey::Rope::from_reader(test_file).unwrap(); let doc = ropey::Rope::from_reader(test_file).unwrap();
test_treesitter_indent(file_name, doc, lang_scope, Vec::new())
}
let mut config_file = test_dir; /// Test that all the lines in the given file are indented as expected.
config_file.push("languages.toml"); /// ignored_lines is a list of (1-indexed) line ranges that are excluded from this test.
let config = std::fs::read_to_string(config_file).unwrap(); fn test_treesitter_indent(
let config = toml::from_str(&config).unwrap(); test_name: &str,
let loader = Loader::new(config); doc: Rope,
lang_scope: &str,
ignored_lines: Vec<std::ops::Range<usize>>,
) {
let loader = Loader::new(indent_tests_config());
// set runtime path so we can find the queries // set runtime path so we can find the queries
let mut runtime = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")); let mut runtime = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"));
@ -71,21 +194,25 @@ fn test_treesitter_indent(file_name: &str, lang_scope: &str) {
std::env::set_var("HELIX_RUNTIME", runtime.to_str().unwrap()); std::env::set_var("HELIX_RUNTIME", runtime.to_str().unwrap());
let language_config = loader.language_config_for_scope(lang_scope).unwrap(); let language_config = loader.language_config_for_scope(lang_scope).unwrap();
let indent_style = IndentStyle::from_str(&language_config.indent.as_ref().unwrap().unit);
let highlight_config = language_config.highlight_config(&[]).unwrap(); let highlight_config = language_config.highlight_config(&[]).unwrap();
let syntax = Syntax::new(&doc, highlight_config, std::sync::Arc::new(loader)).unwrap();
let indent_query = language_config.indent_query().unwrap();
let text = doc.slice(..); let text = doc.slice(..);
let syntax = Syntax::new(text, highlight_config, std::sync::Arc::new(loader)).unwrap();
let indent_query = language_config.indent_query().unwrap();
for i in 0..doc.len_lines() { for i in 0..doc.len_lines() {
let line = text.line(i); let line = text.line(i);
if ignored_lines.iter().any(|range| range.contains(&(i + 1))) {
continue;
}
if let Some(pos) = helix_core::find_first_non_whitespace_char(line) { if let Some(pos) = helix_core::find_first_non_whitespace_char(line) {
let tab_and_indent_width: usize = 4; let tab_width: usize = 4;
let suggested_indent = treesitter_indent_for_pos( let suggested_indent = treesitter_indent_for_pos(
indent_query, indent_query,
&syntax, &syntax,
&IndentStyle::Spaces(tab_and_indent_width as u8), &indent_style,
tab_and_indent_width, tab_width,
tab_and_indent_width, indent_style.indent_width(tab_width),
text, text,
i, i,
text.line_to_char(i) + pos, text.line_to_char(i) + pos,
@ -94,7 +221,8 @@ fn test_treesitter_indent(file_name: &str, lang_scope: &str) {
.unwrap(); .unwrap();
assert!( assert!(
line.get_slice(..pos).map_or(false, |s| s == suggested_indent), line.get_slice(..pos).map_or(false, |s| s == suggested_indent),
"Wrong indentation on line {}:\n\"{}\" (original line)\n\"{}\" (suggested indentation)\n", "Wrong indentation for file {:?} on line {}:\n\"{}\" (original line)\n\"{}\" (suggested indentation)\n",
test_name,
i+1, i+1,
line.slice(..line.len_chars()-1), line.slice(..line.len_chars()-1),
suggested_indent, suggested_indent,

@ -18,7 +18,7 @@ anyhow = "1"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
toml = "0.7" toml = "0.7"
etcetera = "0.8" etcetera = "0.8"
tree-sitter = "0.20" tree-sitter.workspace = true
once_cell = "1.18" once_cell = "1.18"
log = "0.4" log = "0.4"
which = "4.4" which = "4.4"
@ -28,7 +28,8 @@ which = "4.4"
# cloning/compiling tree-sitter grammars # cloning/compiling tree-sitter grammars
cc = { version = "1" } cc = { version = "1" }
threadpool = { version = "1.0" } threadpool = { version = "1.0" }
tempfile = "3.6.0" tempfile = "3.8.0"
dunce = "1.0.4"
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dependencies]
libloading = "0.8" libloading = "0.8"

@ -40,7 +40,9 @@ fn main() {
.ok() .ok()
.filter(|output| output.status.success()) .filter(|output| output.status.success())
.and_then(|x| String::from_utf8(x.stdout).ok()) .and_then(|x| String::from_utf8(x.stdout).ok())
else{ return; }; else {
return;
};
// If heads starts pointing at something else (different branch) // If heads starts pointing at something else (different branch)
// we need to return // we need to return
let head = Path::new(&git_dir).join("HEAD"); let head = Path::new(&git_dir).join("HEAD");
@ -55,7 +57,9 @@ fn main() {
.ok() .ok()
.filter(|output| output.status.success()) .filter(|output| output.status.success())
.and_then(|x| String::from_utf8(x.stdout).ok()) .and_then(|x| String::from_utf8(x.stdout).ok())
else{ return; }; else {
return;
};
let head_ref = Path::new(&git_dir).join(head_ref); let head_ref = Path::new(&git_dir).join(head_ref);
if head_ref.exists() { if head_ref.exists() {
println!("cargo:rerun-if-changed={}", head_ref.display()); println!("cargo:rerun-if-changed={}", head_ref.display());

@ -3,29 +3,56 @@ pub mod grammar;
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy}; use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::RwLock;
pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH"); pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH");
static CWD: RwLock<Option<PathBuf>> = RwLock::new(None);
static RUNTIME_DIRS: once_cell::sync::Lazy<Vec<PathBuf>> = static RUNTIME_DIRS: once_cell::sync::Lazy<Vec<PathBuf>> =
once_cell::sync::Lazy::new(prioritize_runtime_dirs); once_cell::sync::Lazy::new(prioritize_runtime_dirs);
static CONFIG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCell::new(); static CONFIG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
pub fn initialize_config_file(specified_file: Option<PathBuf>) { static LOG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
let config_file = specified_file.unwrap_or_else(|| {
let config_dir = config_dir();
if !config_dir.exists() { // Get the current working directory.
std::fs::create_dir_all(&config_dir).ok(); // This information is managed internally as the call to std::env::current_dir
// might fail if the cwd has been deleted.
pub fn current_working_dir() -> PathBuf {
if let Some(path) = &*CWD.read().unwrap() {
return path.clone();
} }
config_dir.join("config.toml") let path = std::env::current_dir()
}); .and_then(dunce::canonicalize)
.expect("Couldn't determine current working directory");
let mut cwd = CWD.write().unwrap();
*cwd = Some(path.clone());
path
}
pub fn set_current_working_dir(path: PathBuf) -> std::io::Result<()> {
let path = dunce::canonicalize(path)?;
std::env::set_current_dir(path.clone())?;
let mut cwd = CWD.write().unwrap();
*cwd = Some(path);
Ok(())
}
// We should only initialize this value once. pub fn initialize_config_file(specified_file: Option<PathBuf>) {
let config_file = specified_file.unwrap_or_else(default_config_file);
ensure_parent_dir(&config_file);
CONFIG_FILE.set(config_file).ok(); CONFIG_FILE.set(config_file).ok();
} }
pub fn initialize_log_file(specified_file: Option<PathBuf>) {
let log_file = specified_file.unwrap_or_else(default_log_file);
ensure_parent_dir(&log_file);
LOG_FILE.set(log_file).ok();
}
/// A list of runtime directories from highest to lowest priority /// A list of runtime directories from highest to lowest priority
/// ///
/// The priority is: /// The priority is:
@ -122,10 +149,11 @@ pub fn cache_dir() -> PathBuf {
} }
pub fn config_file() -> PathBuf { pub fn config_file() -> PathBuf {
CONFIG_FILE CONFIG_FILE.get().map(|path| path.to_path_buf()).unwrap()
.get() }
.map(|path| path.to_path_buf())
.unwrap_or_else(|| config_dir().join("config.toml")) pub fn log_file() -> PathBuf {
LOG_FILE.get().map(|path| path.to_path_buf()).unwrap()
} }
pub fn helix_module_file() -> PathBuf { pub fn helix_module_file() -> PathBuf {
@ -144,7 +172,7 @@ pub fn lang_config_file() -> PathBuf {
config_dir().join("languages.toml") config_dir().join("languages.toml")
} }
pub fn log_file() -> PathBuf { pub fn default_log_file() -> PathBuf {
cache_dir().join("helix.log") cache_dir().join("helix.log")
} }
@ -225,7 +253,7 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usi
/// If no workspace was found returns (CWD, true). /// If no workspace was found returns (CWD, true).
/// Otherwise (workspace, false) is returned /// Otherwise (workspace, false) is returned
pub fn find_workspace() -> (PathBuf, bool) { pub fn find_workspace() -> (PathBuf, bool) {
let current_dir = std::env::current_dir().expect("unable to determine current directory"); let current_dir = current_working_dir();
for ancestor in current_dir.ancestors() { for ancestor in current_dir.ancestors() {
if ancestor.join(".git").exists() || ancestor.join(".helix").exists() { if ancestor.join(".git").exists() || ancestor.join(".helix").exists() {
return (ancestor.to_owned(), false); return (ancestor.to_owned(), false);
@ -235,13 +263,37 @@ pub fn find_workspace() -> (PathBuf, bool) {
(current_dir, true) (current_dir, true)
} }
fn default_config_file() -> PathBuf {
config_dir().join("config.toml")
}
fn ensure_parent_dir(path: &Path) {
if let Some(parent) = path.parent() {
if !parent.exists() {
std::fs::create_dir_all(parent).ok();
}
}
}
#[cfg(test)] #[cfg(test)]
mod merge_toml_tests { mod merge_toml_tests {
use std::str; use std::str;
use super::merge_toml_values; use super::{current_working_dir, merge_toml_values, set_current_working_dir};
use toml::Value; use toml::Value;
#[test]
fn current_dir_is_set() {
let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap();
let cwd = current_working_dir();
assert_ne!(cwd, new_path);
set_current_working_dir(new_path.clone()).expect("Couldn't set new path");
let cwd = current_working_dir();
assert_eq!(cwd, new_path);
}
#[test] #[test]
fn language_toml_map_merges() { fn language_toml_map_merges() {
const USER: &str = r#" const USER: &str = r#"

@ -19,12 +19,13 @@ helix-parsec = { version = "0.6", path = "../helix-parsec" }
anyhow = "1.0" anyhow = "1.0"
futures-executor = "0.3" futures-executor = "0.3"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
globset = "0.4.13"
log = "0.4" log = "0.4"
lsp-types = { version = "0.94" } lsp-types = { version = "0.94" }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
thiserror = "1.0" thiserror = "1.0"
tokio = { version = "1.28", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } tokio = { version = "1.31", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
tokio-stream = "0.1.14" tokio-stream = "0.1.14"
which = "4.4" which = "4.4"
parking_lot = "0.12.1" parking_lot = "0.12.1"

@ -7,8 +7,9 @@ use crate::{
use helix_core::{find_workspace, path, syntax::LanguageServerFeature, ChangeSet, Rope}; use helix_core::{find_workspace, path, syntax::LanguageServerFeature, ChangeSet, Rope};
use helix_loader::{self, VERSION_AND_GIT_HASH}; use helix_loader::{self, VERSION_AND_GIT_HASH};
use lsp::{ use lsp::{
notification::DidChangeWorkspaceFolders, DidChangeWorkspaceFoldersParams, OneOf, notification::DidChangeWorkspaceFolders, CodeActionCapabilityResolveSupport,
PositionEncodingKind, WorkspaceFolder, WorkspaceFoldersChangeEvent, DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, WorkspaceFolder,
WorkspaceFoldersChangeEvent,
}; };
use lsp_types as lsp; use lsp_types as lsp;
use parking_lot::Mutex; use parking_lot::Mutex;
@ -543,6 +544,10 @@ impl Client {
normalizes_line_endings: Some(false), normalizes_line_endings: Some(false),
change_annotation_support: None, change_annotation_support: None,
}), }),
did_change_watched_files: Some(lsp::DidChangeWatchedFilesClientCapabilities {
dynamic_registration: Some(true),
relative_pattern_support: Some(false),
}),
..Default::default() ..Default::default()
}), }),
text_document: Some(lsp::TextDocumentClientCapabilities { text_document: Some(lsp::TextDocumentClientCapabilities {
@ -609,6 +614,12 @@ impl Client {
.collect(), .collect(),
}, },
}), }),
is_preferred_support: Some(true),
disabled_support: Some(true),
data_support: Some(true),
resolve_support: Some(CodeActionCapabilityResolveSupport {
properties: vec!["edit".to_owned(), "command".to_owned()],
}),
..Default::default() ..Default::default()
}), }),
publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities { publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities {
@ -954,6 +965,24 @@ impl Client {
Some(self.call::<lsp::request::ResolveCompletionItem>(completion_item)) Some(self.call::<lsp::request::ResolveCompletionItem>(completion_item))
} }
pub fn resolve_code_action(
&self,
code_action: lsp::CodeAction,
) -> Option<impl Future<Output = Result<Value>>> {
let capabilities = self.capabilities.get().unwrap();
// Return early if the server does not support resolving code action.
match capabilities.completion_provider {
Some(lsp::CompletionOptions {
resolve_provider: Some(true),
..
}) => (),
_ => return None,
}
Some(self.call::<lsp::request::CodeActionResolveRequest>(code_action))
}
pub fn text_document_signature_help( pub fn text_document_signature_help(
&self, &self,
text_document: lsp::TextDocumentIdentifier, text_document: lsp::TextDocumentIdentifier,
@ -1428,4 +1457,13 @@ impl Client {
Some(self.call::<lsp::request::ExecuteCommand>(params)) Some(self.call::<lsp::request::ExecuteCommand>(params))
} }
pub fn did_change_watched_files(
&self,
changes: Vec<lsp::FileEvent>,
) -> impl Future<Output = std::result::Result<(), Error>> {
self.notify::<lsp::notification::DidChangeWatchedFiles>(lsp::DidChangeWatchedFilesParams {
changes,
})
}
} }

@ -0,0 +1,193 @@
use std::{collections::HashMap, path::PathBuf, sync::Weak};
use globset::{GlobBuilder, GlobSetBuilder};
use tokio::sync::mpsc;
use crate::{lsp, Client};
enum Event {
FileChanged {
path: PathBuf,
},
Register {
client_id: usize,
client: Weak<Client>,
registration_id: String,
options: lsp::DidChangeWatchedFilesRegistrationOptions,
},
Unregister {
client_id: usize,
registration_id: String,
},
RemoveClient {
client_id: usize,
},
}
#[derive(Default)]
struct ClientState {
client: Weak<Client>,
registered: HashMap<String, globset::GlobSet>,
}
/// The Handler uses a dedicated tokio task to respond to file change events by
/// forwarding changes to LSPs that have registered for notifications with a
/// matching glob.
///
/// When an LSP registers for the DidChangeWatchedFiles notification, the
/// Handler is notified by sending the registration details in addition to a
/// weak reference to the LSP client. This is done so that the Handler can have
/// access to the client without preventing the client from being dropped if it
/// is closed and the Handler isn't properly notified.
#[derive(Clone, Debug)]
pub struct Handler {
tx: mpsc::UnboundedSender<Event>,
}
impl Default for Handler {
fn default() -> Self {
Self::new()
}
}
impl Handler {
pub fn new() -> Self {
let (tx, rx) = mpsc::unbounded_channel();
tokio::spawn(Self::run(rx));
Self { tx }
}
pub fn register(
&self,
client_id: usize,
client: Weak<Client>,
registration_id: String,
options: lsp::DidChangeWatchedFilesRegistrationOptions,
) {
let _ = self.tx.send(Event::Register {
client_id,
client,
registration_id,
options,
});
}
pub fn unregister(&self, client_id: usize, registration_id: String) {
let _ = self.tx.send(Event::Unregister {
client_id,
registration_id,
});
}
pub fn file_changed(&self, path: PathBuf) {
let _ = self.tx.send(Event::FileChanged { path });
}
pub fn remove_client(&self, client_id: usize) {
let _ = self.tx.send(Event::RemoveClient { client_id });
}
async fn run(mut rx: mpsc::UnboundedReceiver<Event>) {
let mut state: HashMap<usize, ClientState> = HashMap::new();
while let Some(event) = rx.recv().await {
match event {
Event::FileChanged { path } => {
log::debug!("Received file event for {:?}", &path);
state.retain(|id, client_state| {
if !client_state
.registered
.values()
.any(|glob| glob.is_match(&path))
{
return true;
}
let Some(client) = client_state.client.upgrade() else {
log::warn!("LSP client was dropped: {id}");
return false;
};
let Ok(uri) = lsp::Url::from_file_path(&path) else {
return true;
};
log::debug!(
"Sending didChangeWatchedFiles notification to client '{}'",
client.name()
);
if let Err(err) = crate::block_on(client
.did_change_watched_files(vec![lsp::FileEvent {
uri,
// We currently always send the CHANGED state
// since we don't actually have more context at
// the moment.
typ: lsp::FileChangeType::CHANGED,
}]))
{
log::warn!("Failed to send didChangeWatchedFiles notification to client: {err}");
}
true
});
}
Event::Register {
client_id,
client,
registration_id,
options: ops,
} => {
log::debug!(
"Registering didChangeWatchedFiles for client '{}' with id '{}'",
client_id,
registration_id
);
let entry = state.entry(client_id).or_insert_with(ClientState::default);
entry.client = client;
let mut builder = GlobSetBuilder::new();
for watcher in ops.watchers {
if let lsp::GlobPattern::String(pattern) = watcher.glob_pattern {
if let Ok(glob) = GlobBuilder::new(&pattern).build() {
builder.add(glob);
}
}
}
match builder.build() {
Ok(globset) => {
entry.registered.insert(registration_id, globset);
}
Err(err) => {
// Remove any old state for that registration id and
// remove the entire client if it's now empty.
entry.registered.remove(&registration_id);
if entry.registered.is_empty() {
state.remove(&client_id);
}
log::warn!(
"Unable to build globset for LSP didChangeWatchedFiles {err}"
)
}
}
}
Event::Unregister {
client_id,
registration_id,
} => {
log::debug!(
"Unregistering didChangeWatchedFiles with id '{}' for client '{}'",
registration_id,
client_id
);
if let Some(client_state) = state.get_mut(&client_id) {
client_state.registered.remove(&registration_id);
if client_state.registered.is_empty() {
state.remove(&client_id);
}
}
}
Event::RemoveClient { client_id } => {
log::debug!("Removing LSP client: {client_id}");
state.remove(&client_id);
}
}
}
}
}

@ -1,4 +1,5 @@
mod client; mod client;
pub mod file_event;
pub mod jsonrpc; pub mod jsonrpc;
pub mod snippet; pub mod snippet;
mod transport; mod transport;
@ -547,6 +548,7 @@ pub enum MethodCall {
WorkspaceFolders, WorkspaceFolders,
WorkspaceConfiguration(lsp::ConfigurationParams), WorkspaceConfiguration(lsp::ConfigurationParams),
RegisterCapability(lsp::RegistrationParams), RegisterCapability(lsp::RegistrationParams),
UnregisterCapability(lsp::UnregistrationParams),
} }
impl MethodCall { impl MethodCall {
@ -570,6 +572,10 @@ impl MethodCall {
let params: lsp::RegistrationParams = params.parse()?; let params: lsp::RegistrationParams = params.parse()?;
Self::RegisterCapability(params) Self::RegisterCapability(params)
} }
lsp::request::UnregisterCapability::METHOD => {
let params: lsp::UnregistrationParams = params.parse()?;
Self::UnregisterCapability(params)
}
_ => { _ => {
return Err(Error::Unhandled); return Err(Error::Unhandled);
} }
@ -629,6 +635,7 @@ pub struct Registry {
syn_loader: Arc<helix_core::syntax::Loader>, syn_loader: Arc<helix_core::syntax::Loader>,
counter: usize, counter: usize,
pub incoming: SelectAll<UnboundedReceiverStream<(usize, Call)>>, pub incoming: SelectAll<UnboundedReceiverStream<(usize, Call)>>,
pub file_event_handler: file_event::Handler,
} }
impl Registry { impl Registry {
@ -638,6 +645,7 @@ impl Registry {
syn_loader, syn_loader,
counter: 0, counter: 0,
incoming: SelectAll::new(), incoming: SelectAll::new(),
file_event_handler: file_event::Handler::new(),
} }
} }
@ -650,6 +658,7 @@ impl Registry {
} }
pub fn remove_by_id(&mut self, id: usize) { pub fn remove_by_id(&mut self, id: usize) {
self.file_event_handler.remove_client(id);
self.inner.retain(|_, language_servers| { self.inner.retain(|_, language_servers| {
language_servers.retain(|ls| id != ls.id()); language_servers.retain(|ls| id != ls.id());
!language_servers.is_empty() !language_servers.is_empty()
@ -715,6 +724,7 @@ impl Registry {
.unwrap(); .unwrap();
for old_client in old_clients { for old_client in old_clients {
self.file_event_handler.remove_client(old_client.id());
tokio::spawn(async move { tokio::spawn(async move {
let _ = old_client.force_shutdown().await; let _ = old_client.force_shutdown().await;
}); });
@ -731,6 +741,7 @@ impl Registry {
pub fn stop(&mut self, name: &str) { pub fn stop(&mut self, name: &str) {
if let Some(clients) = self.inner.remove(name) { if let Some(clients) = self.inner.remove(name) {
for client in clients { for client in clients {
self.file_event_handler.remove_client(client.id());
tokio::spawn(async move { tokio::spawn(async move {
let _ = client.force_shutdown().await; let _ = client.force_shutdown().await;
}); });
@ -931,7 +942,7 @@ pub fn find_lsp_workspace(
let mut file = if file.is_absolute() { let mut file = if file.is_absolute() {
file.to_path_buf() file.to_path_buf()
} else { } else {
let current_dir = std::env::current_dir().expect("unable to determine current directory"); let current_dir = helix_loader::current_working_dir();
current_dir.join(file) current_dir.join(file)
}; };
file = path::get_normalized_path(&file); file = path::get_normalized_path(&file);

@ -37,7 +37,7 @@ which = "4.4"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] }
crossterm = { version = "0.26", features = ["event-stream"] } crossterm = { version = "0.27", features = ["event-stream"] }
signal-hook = "0.3" signal-hook = "0.3"
tokio-stream = "0.1" tokio-stream = "0.1"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
@ -75,10 +75,13 @@ dlopen_derive = "0.1.4"
signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] }
libc = "0.2.147" libc = "0.2.147"
[target.'cfg(target_os = "macos")'.dependencies]
crossterm = { version = "0.27", features = ["event-stream", "use-dev-tty"] }
[build-dependencies] [build-dependencies]
helix-loader = { version = "0.6", path = "../helix-loader" } helix-loader = { version = "0.6", path = "../helix-loader" }
[dev-dependencies] [dev-dependencies]
smallvec = "1.10" smallvec = "1.11"
indoc = "2.0.1" indoc = "2.0.3"
tempfile = "3.6.0" tempfile = "3.8.0"

@ -5,7 +5,11 @@ use helix_core::{
path::get_relative_path, path::get_relative_path,
pos_at_coords, syntax, Selection, pos_at_coords, syntax, Selection,
}; };
use helix_lsp::{lsp, util::lsp_pos_to_pos, LspProgressMap}; use helix_lsp::{
lsp::{self, notification::Notification},
util::lsp_pos_to_pos,
LspProgressMap,
};
use helix_view::{ use helix_view::{
align_view, align_view,
document::DocumentSavedEventResult, document::DocumentSavedEventResult,
@ -29,13 +33,9 @@ use crate::{
}; };
use log::{debug, error, warn}; use log::{debug, error, warn};
use std::{ #[cfg(not(feature = "integration"))]
collections::btree_map::Entry, use std::io::stdout;
io::{stdin, stdout}, use std::{collections::btree_map::Entry, io::stdin, path::Path, sync::Arc};
path::Path,
sync::Arc,
time::{Duration, Instant},
};
use anyhow::{Context, Error}; use anyhow::{Context, Error};
@ -45,8 +45,6 @@ use {signal_hook::consts::signal, signal_hook_tokio::Signals};
#[cfg(windows)] #[cfg(windows)]
type Signals = futures_util::stream::Empty<()>; type Signals = futures_util::stream::Empty<()>;
const LSP_DEADLINE: Duration = Duration::from_millis(16);
#[cfg(not(feature = "integration"))] #[cfg(not(feature = "integration"))]
use tui::backend::CrosstermBackend; use tui::backend::CrosstermBackend;
@ -76,7 +74,6 @@ pub struct Application {
signals: Signals, signals: Signals,
jobs: Jobs, jobs: Jobs,
lsp_progress: LspProgressMap, lsp_progress: LspProgressMap,
last_render: Instant,
} }
#[cfg(feature = "integration")] #[cfg(feature = "integration")]
@ -163,11 +160,11 @@ impl Application {
let path = helix_loader::runtime_file(Path::new("tutor")); let path = helix_loader::runtime_file(Path::new("tutor"));
editor.open(&path, Action::VerticalSplit)?; editor.open(&path, Action::VerticalSplit)?;
// Unset path to prevent accidentally saving to the original tutor file. // Unset path to prevent accidentally saving to the original tutor file.
doc_mut!(editor).set_path(None)?; doc_mut!(editor).set_path(None);
} else if !args.files.is_empty() { } else if !args.files.is_empty() {
let first = &args.files[0].0; // we know it's not empty let first = &args.files[0].0; // we know it's not empty
if first.is_dir() { if first.is_dir() {
std::env::set_current_dir(first).context("set current dir")?; helix_loader::set_current_working_dir(first.clone())?;
editor.new_file(Action::VerticalSplit); editor.new_file(Action::VerticalSplit);
let picker = ui::file_picker(".".into(), &config.load().editor); let picker = ui::file_picker(".".into(), &config.load().editor);
compositor.push(Box::new(overlaid(picker))); compositor.push(Box::new(overlaid(picker)));
@ -215,11 +212,6 @@ impl Application {
} }
} else if stdin().is_tty() || cfg!(feature = "integration") { } else if stdin().is_tty() || cfg!(feature = "integration") {
editor.new_file(Action::VerticalSplit); editor.new_file(Action::VerticalSplit);
} else if cfg!(target_os = "macos") {
// On Linux and Windows, we allow the output of a command to be piped into the new buffer.
// This doesn't currently work on macOS because of the following issue:
// https://github.com/crossterm-rs/crossterm/issues/500
anyhow::bail!("Piping into helix-term is currently not supported on macOS");
} else { } else {
editor editor
.new_file_from_stdin(Action::VerticalSplit) .new_file_from_stdin(Action::VerticalSplit)
@ -253,7 +245,6 @@ impl Application {
signals, signals,
jobs: Jobs::new(), jobs: Jobs::new(),
lsp_progress: LspProgressMap::new(), lsp_progress: LspProgressMap::new(),
last_render: Instant::now(),
}; };
{ {
@ -310,10 +301,9 @@ impl Application {
pub async fn event_loop<S>(&mut self, input_stream: &mut S) pub async fn event_loop<S>(&mut self, input_stream: &mut S)
where where
S: Stream<Item = crossterm::Result<crossterm::event::Event>> + Unpin, S: Stream<Item = std::io::Result<crossterm::event::Event>> + Unpin,
{ {
self.render().await; self.render().await;
self.last_render = Instant::now();
loop { loop {
if !self.event_loop_until_idle(input_stream).await { if !self.event_loop_until_idle(input_stream).await {
@ -324,7 +314,7 @@ impl Application {
pub async fn event_loop_until_idle<S>(&mut self, input_stream: &mut S) -> bool pub async fn event_loop_until_idle<S>(&mut self, input_stream: &mut S) -> bool
where where
S: Stream<Item = crossterm::Result<crossterm::event::Event>> + Unpin, S: Stream<Item = std::io::Result<crossterm::event::Event>> + Unpin,
{ {
loop { loop {
if self.editor.should_close() { if self.editor.should_close() {
@ -582,16 +572,7 @@ impl Application {
let bytes = doc_save_event.text.len_bytes(); let bytes = doc_save_event.text.len_bytes();
if doc.path() != Some(&doc_save_event.path) { if doc.path() != Some(&doc_save_event.path) {
if let Err(err) = doc.set_path(Some(&doc_save_event.path)) { doc.set_path(Some(&doc_save_event.path));
log::error!(
"error setting path for doc '{:?}': {}",
doc.path(),
err.to_string(),
);
self.editor.set_error(err.to_string());
return;
}
let loader = self.editor.syn_loader.clone(); let loader = self.editor.syn_loader.clone();
@ -627,12 +608,7 @@ impl Application {
EditorEvent::LanguageServerMessage((id, call)) => { EditorEvent::LanguageServerMessage((id, call)) => {
self.handle_language_server_message(call, id).await; self.handle_language_server_message(call, id).await;
// limit render calls for fast language server messages // limit render calls for fast language server messages
let last = self.editor.language_servers.incoming.is_empty(); self.editor.redraw_handle.0.notify_one();
if last || self.last_render.elapsed() > LSP_DEADLINE {
self.render().await;
self.last_render = Instant::now();
}
} }
EditorEvent::DebuggerEvent(payload) => { EditorEvent::DebuggerEvent(payload) => {
let needs_render = self.editor.handle_debugger_message(payload).await; let needs_render = self.editor.handle_debugger_message(payload).await;
@ -640,6 +616,9 @@ impl Application {
self.render().await; self.render().await;
} }
} }
EditorEvent::Redraw => {
self.render().await;
}
EditorEvent::IdleTimer => { EditorEvent::IdleTimer => {
self.editor.clear_idle_timer(); self.editor.clear_idle_timer();
self.handle_idle_timeout().await; self.handle_idle_timeout().await;
@ -654,10 +633,7 @@ impl Application {
false false
} }
pub async fn handle_terminal_events( pub async fn handle_terminal_events(&mut self, event: std::io::Result<CrosstermEvent>) {
&mut self,
event: Result<CrosstermEvent, crossterm::ErrorKind>,
) {
let mut cx = crate::compositor::Context { let mut cx = crate::compositor::Context {
editor: &mut self.editor, editor: &mut self.editor,
jobs: &mut self.jobs, jobs: &mut self.jobs,
@ -1114,17 +1090,65 @@ impl Application {
.collect(); .collect();
Ok(json!(result)) Ok(json!(result))
} }
Ok(MethodCall::RegisterCapability(_params)) => { Ok(MethodCall::RegisterCapability(params)) => {
log::warn!("Ignoring a client/registerCapability request because dynamic capability registration is not enabled. Please report this upstream to the language server"); if let Some(client) = self
.editor
.language_servers
.iter_clients()
.find(|client| client.id() == server_id)
{
for reg in params.registrations {
match reg.method.as_str() {
lsp::notification::DidChangeWatchedFiles::METHOD => {
let Some(options) = reg.register_options else {
continue;
};
let ops: lsp::DidChangeWatchedFilesRegistrationOptions =
match serde_json::from_value(options) {
Ok(ops) => ops,
Err(err) => {
log::warn!("Failed to deserialize DidChangeWatchedFilesRegistrationOptions: {err}");
continue;
}
};
self.editor.language_servers.file_event_handler.register(
client.id(),
Arc::downgrade(client),
reg.id,
ops,
)
}
_ => {
// Language Servers based on the `vscode-languageserver-node` library often send // Language Servers based on the `vscode-languageserver-node` library often send
// client/registerCapability even though we do not enable dynamic registration // client/registerCapability even though we do not enable dynamic registration
// for any capabilities. We should send a MethodNotFound JSONRPC error in this // for most capabilities. We should send a MethodNotFound JSONRPC error in this
// case but that rejects the registration promise in the server which causes an // case but that rejects the registration promise in the server which causes an
// exit. So we work around this by ignoring the request and sending back an OK // exit. So we work around this by ignoring the request and sending back an OK
// response. // response.
log::warn!("Ignoring a client/registerCapability request because dynamic capability registration is not enabled. Please report this upstream to the language server");
}
}
}
}
Ok(serde_json::Value::Null) Ok(serde_json::Value::Null)
} }
Ok(MethodCall::UnregisterCapability(params)) => {
for unreg in params.unregisterations {
match unreg.method.as_str() {
lsp::notification::DidChangeWatchedFiles::METHOD => {
self.editor
.language_servers
.file_event_handler
.unregister(server_id, unreg.id);
}
_ => {
log::warn!("Received unregistration request for unsupported method: {}", unreg.method);
}
}
}
Ok(serde_json::Value::Null)
}
}; };
tokio::spawn(language_server!().reply(id, reply)); tokio::spawn(language_server!().reply(id, reply));
@ -1150,7 +1174,7 @@ impl Application {
pub async fn run<S>(&mut self, input_stream: &mut S) -> Result<i32, Error> pub async fn run<S>(&mut self, input_stream: &mut S) -> Result<i32, Error>
where where
S: Stream<Item = crossterm::Result<crossterm::event::Event>> + Unpin, S: Stream<Item = std::io::Result<crossterm::event::Event>> + Unpin,
{ {
self.claim_term().await?; self.claim_term().await?;

File diff suppressed because it is too large Load Diff

@ -217,7 +217,7 @@ pub fn dap_start_impl(
} }
} }
args.insert("cwd", to_value(std::env::current_dir().unwrap())?); args.insert("cwd", to_value(helix_loader::current_working_dir())?);
let args = to_value(args).unwrap(); let args = to_value(args).unwrap();
@ -339,8 +339,12 @@ fn debug_parameter_prompt(
.to_owned(); .to_owned();
let completer = match field_type { let completer = match field_type {
"filename" => ui::completers::filename, "filename" => |editor: &Editor, input: &str| {
"directory" => ui::completers::directory, ui::completers::filename_with_git_ignore(editor, input, false)
},
"directory" => |editor: &Editor, input: &str| {
ui::completers::directory_with_git_ignore(editor, input, false)
},
_ => ui::completers::none, _ => ui::completers::none,
}; };

@ -1712,7 +1712,7 @@ fn await_value(cx: &mut Context, value: SteelVal, callback_fn: SteelVal) {
} }
// Check that we successfully created a directory? // Check that we successfully created a directory?
fn create_directory(path: String) { fn create_directory(path: String) {
let path = helix_core::path::get_canonicalized_path(&PathBuf::from(path)).unwrap(); let path = helix_core::path::get_canonicalized_path(&PathBuf::from(path));
if path.exists() { if path.exists() {
return; return;

@ -195,7 +195,6 @@ fn location_to_file_location(location: &lsp::Location) -> FileLocation {
(path.into(), line) (path.into(), line)
} }
// TODO: share with symbol picker(symbol.location)
fn jump_to_location( fn jump_to_location(
editor: &mut Editor, editor: &mut Editor,
location: &lsp::Location, location: &lsp::Location,
@ -213,15 +212,16 @@ fn jump_to_location(
return; return;
} }
}; };
match editor.open(&path, action) {
Ok(_) => (), let doc = match editor.open(&path, action) {
Ok(id) => doc_mut!(editor, &id),
Err(err) => { Err(err) => {
let err = format!("failed to open path: {:?}: {:?}", location.uri, err); let err = format!("failed to open path: {:?}: {:?}", location.uri, err);
editor.set_error(err); editor.set_error(err);
return; return;
} }
} };
let (view, doc) = current!(editor); let view = view_mut!(editor);
// TODO: convert inside server // TODO: convert inside server
let new_range = let new_range =
if let Some(new_range) = lsp_range_to_range(doc.text(), location.range, offset_encoding) { if let Some(new_range) = lsp_range_to_range(doc.text(), location.range, offset_encoding) {
@ -233,45 +233,22 @@ fn jump_to_location(
// we flip the range so that the cursor sits on the start of the symbol // we flip the range so that the cursor sits on the start of the symbol
// (for example start of the function). // (for example start of the function).
doc.set_selection(view.id, Selection::single(new_range.head, new_range.anchor)); doc.set_selection(view.id, Selection::single(new_range.head, new_range.anchor));
if action.align_view(view, doc.id()) {
align_view(doc, view, Align::Center); align_view(doc, view, Align::Center);
} }
}
type SymbolPicker = Picker<SymbolInformationItem>; type SymbolPicker = Picker<SymbolInformationItem>;
fn sym_picker(symbols: Vec<SymbolInformationItem>, current_path: Option<lsp::Url>) -> SymbolPicker { fn sym_picker(symbols: Vec<SymbolInformationItem>, current_path: Option<lsp::Url>) -> SymbolPicker {
// TODO: drop current_path comparison and instead use workspace: bool flag? // TODO: drop current_path comparison and instead use workspace: bool flag?
Picker::new(symbols, current_path.clone(), move |cx, item, action| { Picker::new(symbols, current_path, move |cx, item, action| {
let (view, doc) = current!(cx.editor); jump_to_location(
push_jump(view, doc); cx.editor,
&item.symbol.location,
if current_path.as_ref() != Some(&item.symbol.location.uri) { item.offset_encoding,
let uri = &item.symbol.location.uri; action,
let path = match uri.to_file_path() { );
Ok(path) => path,
Err(_) => {
let err = format!("unable to convert URI to filepath: {}", uri);
cx.editor.set_error(err);
return;
}
};
if let Err(err) = cx.editor.open(&path, action) {
let err = format!("failed to open document: {}: {}", uri, err);
log::error!("{}", err);
cx.editor.set_error(err);
return;
}
}
let (view, doc) = current!(cx.editor);
if let Some(range) =
lsp_range_to_range(doc.text(), item.symbol.location.range, item.offset_encoding)
{
// we flip the range so that the cursor sits on the start of the symbol
// (for example start of the function).
doc.set_selection(view.id, Selection::single(range.head, range.anchor));
align_view(doc, view, Align::Center);
}
}) })
.with_preview(move |_editor, item| Some(location_to_file_location(&item.symbol.location))) .with_preview(move |_editor, item| Some(location_to_file_location(&item.symbol.location)))
.truncate_start(false) .truncate_start(false)
@ -286,7 +263,7 @@ enum DiagnosticsFormat {
fn diag_picker( fn diag_picker(
cx: &Context, cx: &Context,
diagnostics: BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>, diagnostics: BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>,
current_path: Option<lsp::Url>, _current_path: Option<lsp::Url>,
format: DiagnosticsFormat, format: DiagnosticsFormat,
) -> Picker<PickerDiagnostic> { ) -> Picker<PickerDiagnostic> {
// TODO: drop current_path comparison and instead use workspace: bool flag? // TODO: drop current_path comparison and instead use workspace: bool flag?
@ -324,22 +301,12 @@ fn diag_picker(
offset_encoding, offset_encoding,
}, },
action| { action| {
if current_path.as_ref() == Some(url) { jump_to_location(
let (view, doc) = current!(cx.editor); cx.editor,
push_jump(view, doc); &lsp::Location::new(url.clone(), diag.range),
} else { *offset_encoding,
let path = url.to_file_path().unwrap(); action,
cx.editor.open(&path, action).expect("editor.open failed"); )
}
let (view, doc) = current!(cx.editor);
if let Some(range) = lsp_range_to_range(doc.text(), diag.range, *offset_encoding) {
// we flip the range so that the cursor sits on the start of the symbol
// (for example start of the function).
doc.set_selection(view.id, Selection::single(range.head, range.anchor));
align_view(doc, view, Align::Center);
}
}, },
) )
.with_preview(move |_editor, PickerDiagnostic { url, diag, .. }| { .with_preview(move |_editor, PickerDiagnostic { url, diag, .. }| {
@ -730,7 +697,8 @@ pub fn code_action(cx: &mut Context) {
// always present here // always present here
let action = action.unwrap(); let action = action.unwrap();
let Some(language_server) = editor.language_server_by_id(action.language_server_id) else { let Some(language_server) = editor.language_server_by_id(action.language_server_id)
else {
editor.set_error("Language Server disappeared"); editor.set_error("Language Server disappeared");
return; return;
}; };
@ -743,7 +711,25 @@ pub fn code_action(cx: &mut Context) {
} }
lsp::CodeActionOrCommand::CodeAction(code_action) => { lsp::CodeActionOrCommand::CodeAction(code_action) => {
log::debug!("code action: {:?}", code_action); log::debug!("code action: {:?}", code_action);
if let Some(ref workspace_edit) = code_action.edit { // we support lsp "codeAction/resolve" for `edit` and `command` fields
let mut resolved_code_action = None;
if code_action.edit.is_none() || code_action.command.is_none() {
if let Some(future) =
language_server.resolve_code_action(code_action.clone())
{
if let Ok(response) = helix_lsp::block_on(future) {
if let Ok(code_action) =
serde_json::from_value::<CodeAction>(response)
{
resolved_code_action = Some(code_action);
}
}
}
}
let resolved_code_action =
resolved_code_action.as_ref().unwrap_or(code_action);
if let Some(ref workspace_edit) = resolved_code_action.edit {
log::debug!("edit: {:?}", workspace_edit); log::debug!("edit: {:?}", workspace_edit);
let _ = apply_workspace_edit(editor, offset_encoding, workspace_edit); let _ = apply_workspace_edit(editor, offset_encoding, workspace_edit);
} }
@ -1033,7 +1019,7 @@ fn goto_impl(
locations: Vec<lsp::Location>, locations: Vec<lsp::Location>,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
) { ) {
let cwdir = std::env::current_dir().unwrap_or_default(); let cwdir = helix_loader::current_working_dir();
match locations.as_slice() { match locations.as_slice() {
[location] => { [location] => {
@ -1173,7 +1159,8 @@ pub fn signature_help_impl(cx: &mut Context, invoked: SignatureHelpInvoked) {
// Do not show the message if signature help was invoked // Do not show the message if signature help was invoked
// automatically on backspace, trigger characters, etc. // automatically on backspace, trigger characters, etc.
if invoked == SignatureHelpInvoked::Manual { if invoked == SignatureHelpInvoked::Manual {
cx.editor.set_error("No configured language server supports signature-help"); cx.editor
.set_error("No configured language server supports signature-help");
} }
return; return;
}; };
@ -1398,7 +1385,8 @@ pub fn rename_symbol(cx: &mut Context) {
.language_servers_with_feature(LanguageServerFeature::RenameSymbol) .language_servers_with_feature(LanguageServerFeature::RenameSymbol)
.find(|ls| language_server_id.map_or(true, |id| id == ls.id())) .find(|ls| language_server_id.map_or(true, |id| id == ls.id()))
else { else {
cx.editor.set_error("No configured language server supports symbol renaming"); cx.editor
.set_error("No configured language server supports symbol renaming");
return; return;
}; };

@ -683,7 +683,7 @@ pub fn write_all_impl(
if doc.path().is_none() && doc.name.is_none() { if doc.path().is_none() && doc.name.is_none() {
if write_scratch { if write_scratch {
errors.push("cannot write a buffer without a filename\n"); errors.push("cannot write a buffer without a filename");
} }
return None; return None;
} }
@ -750,6 +750,18 @@ fn write_all(
write_all_impl(cx, false, true) write_all_impl(cx, false, true)
} }
fn force_write_all(
cx: &mut compositor::Context,
_args: &[Cow<str>],
event: PromptEvent,
) -> anyhow::Result<()> {
if event != PromptEvent::Validate {
return Ok(());
}
write_all_impl(cx, true, true)
}
fn write_all_quit( fn write_all_quit(
cx: &mut compositor::Context, cx: &mut compositor::Context,
_args: &[Cow<str>], _args: &[Cow<str>],
@ -903,7 +915,8 @@ fn yank_main_selection_to_clipboard(
return Ok(()); return Ok(());
} }
yank_main_selection_to_clipboard_impl(cx.editor, ClipboardType::Clipboard) yank_primary_selection_impl(cx.editor, '*');
Ok(())
} }
fn yank_joined( fn yank_joined(
@ -937,7 +950,8 @@ fn yank_joined_to_clipboard(
let doc = doc!(cx.editor); let doc = doc!(cx.editor);
let default_sep = Cow::Borrowed(doc.line_ending.as_str()); let default_sep = Cow::Borrowed(doc.line_ending.as_str());
let separator = args.first().unwrap_or(&default_sep); let separator = args.first().unwrap_or(&default_sep);
yank_joined_to_clipboard_impl(cx.editor, separator, ClipboardType::Clipboard) yank_joined_impl(cx.editor, separator, '*');
Ok(())
} }
fn yank_main_selection_to_primary_clipboard( fn yank_main_selection_to_primary_clipboard(
@ -949,7 +963,8 @@ fn yank_main_selection_to_primary_clipboard(
return Ok(()); return Ok(());
} }
yank_main_selection_to_clipboard_impl(cx.editor, ClipboardType::Selection) yank_primary_selection_impl(cx.editor, '+');
Ok(())
} }
fn yank_joined_to_primary_clipboard( fn yank_joined_to_primary_clipboard(
@ -964,7 +979,8 @@ fn yank_joined_to_primary_clipboard(
let doc = doc!(cx.editor); let doc = doc!(cx.editor);
let default_sep = Cow::Borrowed(doc.line_ending.as_str()); let default_sep = Cow::Borrowed(doc.line_ending.as_str());
let separator = args.first().unwrap_or(&default_sep); let separator = args.first().unwrap_or(&default_sep);
yank_joined_to_clipboard_impl(cx.editor, separator, ClipboardType::Selection) yank_joined_impl(cx.editor, separator, '+');
Ok(())
} }
fn paste_clipboard_after( fn paste_clipboard_after(
@ -976,7 +992,8 @@ fn paste_clipboard_after(
return Ok(()); return Ok(());
} }
paste_clipboard_impl(cx.editor, Paste::After, ClipboardType::Clipboard, 1) paste(cx.editor, '*', Paste::After, 1);
Ok(())
} }
fn paste_clipboard_before( fn paste_clipboard_before(
@ -988,7 +1005,8 @@ fn paste_clipboard_before(
return Ok(()); return Ok(());
} }
paste_clipboard_impl(cx.editor, Paste::Before, ClipboardType::Clipboard, 1) paste(cx.editor, '*', Paste::Before, 1);
Ok(())
} }
fn paste_primary_clipboard_after( fn paste_primary_clipboard_after(
@ -1000,7 +1018,8 @@ fn paste_primary_clipboard_after(
return Ok(()); return Ok(());
} }
paste_clipboard_impl(cx.editor, Paste::After, ClipboardType::Selection, 1) paste(cx.editor, '+', Paste::After, 1);
Ok(())
} }
fn paste_primary_clipboard_before( fn paste_primary_clipboard_before(
@ -1012,31 +1031,9 @@ fn paste_primary_clipboard_before(
return Ok(()); return Ok(());
} }
paste_clipboard_impl(cx.editor, Paste::Before, ClipboardType::Selection, 1) paste(cx.editor, '+', Paste::Before, 1);
}
fn replace_selections_with_clipboard_impl(
cx: &mut compositor::Context,
clipboard_type: ClipboardType,
) -> anyhow::Result<()> {
let scrolloff = cx.editor.config().scrolloff;
let (view, doc) = current!(cx.editor);
match cx.editor.clipboard_provider.get_contents(clipboard_type) {
Ok(contents) => {
let selection = doc.selection(view.id);
let transaction = Transaction::change_by_selection(doc.text(), selection, |range| {
(range.from(), range.to(), Some(contents.as_str().into()))
});
doc.apply(&transaction, view.id);
doc.append_changes_to_history(view);
view.ensure_cursor_in_view(doc, scrolloff);
Ok(()) Ok(())
} }
Err(e) => Err(e.context("Couldn't get system clipboard contents")),
}
}
fn replace_selections_with_clipboard( fn replace_selections_with_clipboard(
cx: &mut compositor::Context, cx: &mut compositor::Context,
@ -1047,7 +1044,8 @@ fn replace_selections_with_clipboard(
return Ok(()); return Ok(());
} }
replace_selections_with_clipboard_impl(cx, ClipboardType::Clipboard) replace_with_yanked_impl(cx.editor, '*', 1);
Ok(())
} }
fn replace_selections_with_primary_clipboard( fn replace_selections_with_primary_clipboard(
@ -1059,7 +1057,8 @@ fn replace_selections_with_primary_clipboard(
return Ok(()); return Ok(());
} }
replace_selections_with_clipboard_impl(cx, ClipboardType::Selection) replace_with_yanked_impl(cx.editor, '+', 1);
Ok(())
} }
fn show_clipboard_provider( fn show_clipboard_provider(
@ -1072,7 +1071,7 @@ fn show_clipboard_provider(
} }
cx.editor cx.editor
.set_status(cx.editor.clipboard_provider.name().to_string()); .set_status(cx.editor.registers.clipboard_provider_name().to_string());
Ok(()) Ok(())
} }
@ -1092,14 +1091,11 @@ fn change_current_directory(
.as_ref(), .as_ref(),
); );
if let Err(e) = std::env::set_current_dir(dir) { helix_loader::set_current_working_dir(dir)?;
bail!("Couldn't change the current working directory: {}", e);
}
let cwd = std::env::current_dir().context("Couldn't get the new working directory")?;
cx.editor.set_status(format!( cx.editor.set_status(format!(
"Current working directory is now {}", "Current working directory is now {}",
cwd.display() helix_loader::current_working_dir().display()
)); ));
Ok(()) Ok(())
} }
@ -1113,9 +1109,14 @@ fn show_current_directory(
return Ok(()); return Ok(());
} }
let cwd = std::env::current_dir().context("Couldn't get the new working directory")?; let cwd = helix_loader::current_working_dir();
cx.editor let message = format!("Current working directory is {}", cwd.display());
.set_status(format!("Current working directory is {}", cwd.display()));
if cwd.exists() {
cx.editor.set_status(message);
} else {
cx.editor.set_error(format!("{} (deleted)", message));
}
Ok(()) Ok(())
} }
@ -1280,7 +1281,14 @@ fn reload(
doc.reload(view, &cx.editor.diff_providers, redraw_handle) doc.reload(view, &cx.editor.diff_providers, redraw_handle)
.map(|_| { .map(|_| {
view.ensure_cursor_in_view(doc, scrolloff); view.ensure_cursor_in_view(doc, scrolloff);
}) })?;
if let Some(path) = doc.path() {
cx.editor
.language_servers
.file_event_handler
.file_changed(path.clone());
}
Ok(())
} }
fn reload_all( fn reload_all(
@ -1321,6 +1329,12 @@ fn reload_all(
let redraw_handle = cx.editor.redraw_handle.clone(); let redraw_handle = cx.editor.redraw_handle.clone();
doc.reload(view, &cx.editor.diff_providers, redraw_handle)?; doc.reload(view, &cx.editor.diff_providers, redraw_handle)?;
if let Some(path) = doc.path() {
cx.editor
.language_servers
.file_event_handler
.file_changed(path.clone());
}
for view_id in view_ids { for view_id in view_ids {
let view = view_mut!(cx.editor, view_id); let view = view_mut!(cx.editor, view_id);
@ -1369,9 +1383,8 @@ fn lsp_workspace_command(
.map(|options| (ls.id(), options)) .map(|options| (ls.id(), options))
}) })
else { else {
cx.editor.set_status( cx.editor
"No active language servers for this document support workspace commands", .set_status("No active language servers for this document support workspace commands");
);
return Ok(()); return Ok(());
}; };
@ -1677,7 +1690,7 @@ fn tutor(
let path = helix_loader::runtime_file(Path::new("tutor")); let path = helix_loader::runtime_file(Path::new("tutor"));
cx.editor.open(&path, Action::Replace)?; cx.editor.open(&path, Action::Replace)?;
// Unset path to prevent accidentally saving to the original tutor file. // Unset path to prevent accidentally saving to the original tutor file.
doc_mut!(cx.editor).set_path(None)?; doc_mut!(cx.editor).set_path(None);
Ok(()) Ok(())
} }
@ -1854,14 +1867,29 @@ fn toggle_option(
.to_string(), .to_string(),
) )
} }
Value::Null | Value::Object(_) | Value::Array(_) | Value::Number(_) => { Value::Number(ref value) => {
ensure!(
args.len() > 2,
"Bad arguments. For number configurations use: `:toggle key val1 val2 ...`",
);
Value::Number(
args[1..]
.iter()
.skip_while(|&e| value.to_string() != *e.to_string())
.nth(1)
.unwrap_or_else(|| &args[1])
.parse()?,
)
}
Value::Null | Value::Object(_) | Value::Array(_) => {
anyhow::bail!("Configuration {key} does not support toggle yet") anyhow::bail!("Configuration {key} does not support toggle yet")
} }
}; };
let status = format!("'{key}' is now set to {value}"); let status = format!("'{key}' is now set to {value}");
let config = serde_json::from_value(config) let config = serde_json::from_value(config)
.map_err(|_| anyhow::anyhow!("Could not parse field: `{:?}`", &args))?; .map_err(|err| anyhow::anyhow!("Cannot parse `{:?}`, {}", &args, err))?;
cx.editor cx.editor
.config_events .config_events
@ -2307,13 +2335,12 @@ fn clear_register(
format!("Invalid register {}", args[0]) format!("Invalid register {}", args[0])
); );
let register = args[0].chars().next().unwrap_or_default(); let register = args[0].chars().next().unwrap_or_default();
match cx.editor.registers.remove(register) { if cx.editor.registers.remove(register) {
Some(_) => cx cx.editor
.editor .set_status(format!("Register {} cleared", register));
.set_status(format!("Register {} cleared", register)), } else {
None => cx cx.editor
.editor .set_error(format!("Register {} not found", register));
.set_error(format!("Register {} not found", register)),
} }
Ok(()) Ok(())
} }

@ -120,6 +120,7 @@ impl Config {
)?, )?,
} }
} }
// these are just two io errors return the one for the global config // these are just two io errors return the one for the global config
(Err(err), Err(_)) => return Err(err), (Err(err), Err(_)) => return Err(err),
}; };

@ -88,6 +88,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"A-i" | "A-down" => shrink_selection, "A-i" | "A-down" => shrink_selection,
"A-p" | "A-left" => select_prev_sibling, "A-p" | "A-left" => select_prev_sibling,
"A-n" | "A-right" => select_next_sibling, "A-n" | "A-right" => select_next_sibling,
"A-e" => move_parent_node_end,
"A-b" => move_parent_node_start,
"%" => select_all, "%" => select_all,
"x" => extend_line_below, "x" => extend_line_below,
@ -265,7 +267,7 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"C-v" | "v" => vsplit_new, "C-v" | "v" => vsplit_new,
}, },
}, },
"y" => yank_joined_to_clipboard, "y" => yank_to_clipboard,
"Y" => yank_main_selection_to_clipboard, "Y" => yank_main_selection_to_clipboard,
"p" => paste_clipboard_after, "p" => paste_clipboard_after,
"P" => paste_clipboard_before, "P" => paste_clipboard_before,
@ -336,6 +338,9 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"B" => extend_prev_long_word_start, "B" => extend_prev_long_word_start,
"E" => extend_next_long_word_end, "E" => extend_next_long_word_end,
"A-e" => extend_parent_node_end,
"A-b" => extend_parent_node_start,
"n" => extend_search_next, "n" => extend_search_next,
"N" => extend_search_prev, "N" => extend_search_prev,
@ -368,7 +373,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"C-h" | "backspace" | "S-backspace" => delete_char_backward, "C-h" | "backspace" | "S-backspace" => delete_char_backward,
"C-d" | "del" => delete_char_forward, "C-d" | "del" => delete_char_forward,
"C-j" | "ret" => insert_newline, "C-j" | "ret" => insert_newline,
"tab" => insert_tab, "tab" => smart_tab,
"S-tab" => insert_tab,
"up" => move_visual_line_up, "up" => move_visual_line_up,
"down" => move_visual_line_down, "down" => move_visual_line_down,

@ -4,9 +4,8 @@ use helix_loader::VERSION_AND_GIT_HASH;
use helix_term::application::Application; use helix_term::application::Application;
use helix_term::args::Args; use helix_term::args::Args;
use helix_term::config::{Config, ConfigLoadError}; use helix_term::config::{Config, ConfigLoadError};
use std::path::PathBuf;
fn setup_logging(logpath: PathBuf, verbosity: u64) -> Result<()> { fn setup_logging(verbosity: u64) -> Result<()> {
let mut base_config = fern::Dispatch::new(); let mut base_config = fern::Dispatch::new();
base_config = match verbosity { base_config = match verbosity {
@ -27,7 +26,7 @@ fn setup_logging(logpath: PathBuf, verbosity: u64) -> Result<()> {
message message
)) ))
}) })
.chain(fern::log_file(logpath)?); .chain(fern::log_file(helix_loader::log_file())?);
base_config.chain(file_config).apply()?; base_config.chain(file_config).apply()?;
@ -41,12 +40,6 @@ fn main() -> Result<()> {
#[tokio::main] #[tokio::main]
async fn main_impl() -> Result<i32> { async fn main_impl() -> Result<i32> {
let logpath = helix_loader::log_file();
let parent = logpath.parent().unwrap();
if !parent.exists() {
std::fs::create_dir_all(parent).ok();
}
let help = format!( let help = format!(
"\ "\
{} {} {} {}
@ -78,11 +71,14 @@ FLAGS:
VERSION_AND_GIT_HASH, VERSION_AND_GIT_HASH,
env!("CARGO_PKG_AUTHORS"), env!("CARGO_PKG_AUTHORS"),
env!("CARGO_PKG_DESCRIPTION"), env!("CARGO_PKG_DESCRIPTION"),
logpath.display(), helix_loader::default_log_file().display(),
); );
let args = Args::parse_args().context("could not parse arguments")?; let args = Args::parse_args().context("could not parse arguments")?;
helix_loader::initialize_config_file(args.config_file.clone());
helix_loader::initialize_log_file(args.log_file.clone());
// Help has a higher priority and should be handled separately. // Help has a higher priority and should be handled separately.
if args.display_help { if args.display_help {
print!("{}", help); print!("{}", help);
@ -116,15 +112,7 @@ FLAGS:
return Ok(0); return Ok(0);
} }
let logpath = args.log_file.as_ref().cloned().unwrap_or(logpath); setup_logging(args.verbosity).context("failed to initialize logging")?;
setup_logging(logpath, args.verbosity).context("failed to initialize logging")?;
let config_dir = helix_loader::config_dir();
if !config_dir.exists() {
std::fs::create_dir_all(&config_dir).ok();
}
helix_loader::initialize_config_file(args.config_file.clone());
// Initialize the engine before we boot up! // Initialize the engine before we boot up!
helix_term::commands::ScriptingEngine::initialize(); helix_term::commands::ScriptingEngine::initialize();

@ -144,7 +144,9 @@ impl Completion {
} }
}; };
let Some(range) = util::lsp_range_to_range(doc.text(), edit.range, offset_encoding) else{ let Some(range) =
util::lsp_range_to_range(doc.text(), edit.range, offset_encoding)
else {
return Transaction::new(doc.text()); return Transaction::new(doc.text());
}; };
@ -292,6 +294,8 @@ impl Completion {
}; };
// if more text was entered, remove it // if more text was entered, remove it
doc.restore(view, &savepoint, true); doc.restore(view, &savepoint, true);
// save an undo checkpoint before the completion
doc.append_changes_to_history(view);
let transaction = item_to_transaction( let transaction = item_to_transaction(
doc, doc,
view.id, view.id,
@ -411,10 +415,18 @@ impl Completion {
_ => return false, _ => return false,
}; };
let Some(language_server) = cx.editor.language_server_by_id(current_item.language_server_id) else { return false; }; let Some(language_server) = cx
.editor
.language_server_by_id(current_item.language_server_id)
else {
return false;
};
// This method should not block the compositor so we handle the response asynchronously. // This method should not block the compositor so we handle the response asynchronously.
let Some(future) = language_server.resolve_completion_item(current_item.item.clone()) else { return false; }; let Some(future) = language_server.resolve_completion_item(current_item.item.clone())
else {
return false;
};
cx.callback( cx.callback(
future, future,

@ -163,6 +163,8 @@ impl EditorView {
Box::new(highlights) Box::new(highlights)
}; };
let gutter_overflow = view.gutter_offset(doc) == 0;
if !gutter_overflow {
Self::render_gutter( Self::render_gutter(
editor, editor,
doc, doc,
@ -172,6 +174,7 @@ impl EditorView {
is_focused, is_focused,
&mut line_decorations, &mut line_decorations,
); );
}
if is_focused { if is_focused {
let cursor = doc let cursor = doc

@ -62,7 +62,7 @@ impl Component for SignatureHelp {
}); });
let sig_text = crate::ui::markdown::highlighted_code_block( let sig_text = crate::ui::markdown::highlighted_code_block(
self.signature.clone(), &self.signature,
&self.language, &self.language,
Some(&cx.editor.theme), Some(&cx.editor.theme),
Arc::clone(&self.config_loader), Arc::clone(&self.config_loader),
@ -109,7 +109,7 @@ impl Component for SignatureHelp {
let max_text_width = (viewport.0 - PADDING).min(120); let max_text_width = (viewport.0 - PADDING).min(120);
let signature_text = crate::ui::markdown::highlighted_code_block( let signature_text = crate::ui::markdown::highlighted_code_block(
self.signature.clone(), &self.signature,
&self.language, &self.language,
None, None,
Arc::clone(&self.config_loader), Arc::clone(&self.config_loader),

@ -10,14 +10,14 @@ use pulldown_cmark::{CodeBlockKind, Event, HeadingLevel, Options, Parser, Tag};
use helix_core::{ use helix_core::{
syntax::{self, HighlightEvent, InjectionLanguageMarker, Syntax}, syntax::{self, HighlightEvent, InjectionLanguageMarker, Syntax},
Rope, RopeSlice,
}; };
use helix_view::{ use helix_view::{
graphics::{Margin, Rect, Style}, graphics::{Margin, Rect, Style},
Theme, Theme,
}; };
fn styled_multiline_text<'a>(text: String, style: Style) -> Text<'a> { fn styled_multiline_text<'a>(text: &str, style: Style) -> Text<'a> {
let spans: Vec<_> = text let spans: Vec<_> = text
.lines() .lines()
.map(|line| Span::styled(line.to_string(), style)) .map(|line| Span::styled(line.to_string(), style))
@ -27,7 +27,7 @@ fn styled_multiline_text<'a>(text: String, style: Style) -> Text<'a> {
} }
pub fn highlighted_code_block<'a>( pub fn highlighted_code_block<'a>(
text: String, text: &str,
language: &str, language: &str,
theme: Option<&Theme>, theme: Option<&Theme>,
config_loader: Arc<syntax::Loader>, config_loader: Arc<syntax::Loader>,
@ -45,13 +45,13 @@ pub fn highlighted_code_block<'a>(
None => return styled_multiline_text(text, code_style), None => return styled_multiline_text(text, code_style),
}; };
let rope = Rope::from(text.as_ref()); let ropeslice = RopeSlice::from(text);
let syntax = config_loader let syntax = config_loader
.language_configuration_for_injection_string(&InjectionLanguageMarker::Name( .language_configuration_for_injection_string(&InjectionLanguageMarker::Name(
language.into(), language.into(),
)) ))
.and_then(|config| config.highlight_config(theme.scopes())) .and_then(|config| config.highlight_config(theme.scopes()))
.and_then(|config| Syntax::new(&rope, config, Arc::clone(&config_loader))); .and_then(|config| Syntax::new(ropeslice, config, Arc::clone(&config_loader)));
let syntax = match syntax { let syntax = match syntax {
Some(s) => s, Some(s) => s,
@ -59,7 +59,7 @@ pub fn highlighted_code_block<'a>(
}; };
let highlight_iter = syntax let highlight_iter = syntax
.highlight_iter(rope.slice(..), None, None) .highlight_iter(ropeslice, None, None)
.map(|e| e.unwrap()); .map(|e| e.unwrap());
let highlight_iter: Box<dyn Iterator<Item = HighlightEvent>> = let highlight_iter: Box<dyn Iterator<Item = HighlightEvent>> =
if let Some(spans) = additional_highlight_spans { if let Some(spans) = additional_highlight_spans {
@ -267,7 +267,7 @@ impl Markdown {
CodeBlockKind::Indented => "", CodeBlockKind::Indented => "",
}; };
let tui_text = highlighted_code_block( let tui_text = highlighted_code_block(
text.to_string(), &text,
language, language,
theme, theme,
Arc::clone(&self.config_loader), Arc::clone(&self.config_loader),

@ -11,7 +11,7 @@ pub use tui::widgets::{Cell, Row};
use fuzzy_matcher::skim::SkimMatcherV2 as Matcher; use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
use fuzzy_matcher::FuzzyMatcher; use fuzzy_matcher::FuzzyMatcher;
use helix_view::{graphics::Rect, Editor}; use helix_view::{editor::SmartTabConfig, graphics::Rect, Editor};
use tui::layout::Constraint; use tui::layout::Constraint;
pub trait Item { pub trait Item {
@ -247,6 +247,21 @@ impl<T: Item + 'static> Component for Menu<T> {
compositor.pop(); compositor.pop();
})); }));
// Ignore tab key when supertab is turned on in order not to interfere
// with it. (Is there a better way to do this?)
if (event == key!(Tab) || event == shift!(Tab))
&& cx.editor.config().auto_completion
&& matches!(
cx.editor.config().smart_tab,
Some(SmartTabConfig {
enable: true,
supersede_menu: true,
})
)
{
return EventResult::Ignored(None);
}
match event { match event {
// esc or ctrl-c aborts the completion and closes the menu // esc or ctrl-c aborts the completion and closes the menu
key!(Esc) | ctrl!('c') => { key!(Esc) | ctrl!('c') => {

@ -142,16 +142,14 @@ pub fn regex_prompt(
}; };
cx.jobs.callback(callback); cx.jobs.callback(callback);
} else {
// Update
// TODO: mark command line as error
} }
} }
} }
} }
} }
}, },
); )
.with_language("regex", std::sync::Arc::clone(&cx.editor.syn_loader));
// Calculate initial completion // Calculate initial completion
prompt.recalculate_completion(cx.editor); prompt.recalculate_completion(cx.editor);
// prompt // prompt
@ -347,7 +345,15 @@ pub mod completers {
} }
pub fn filename(editor: &Editor, input: &str) -> Vec<Completion> { pub fn filename(editor: &Editor, input: &str) -> Vec<Completion> {
filename_impl(editor, input, |entry| { filename_with_git_ignore(editor, input, true)
}
pub fn filename_with_git_ignore(
editor: &Editor,
input: &str,
git_ignore: bool,
) -> Vec<Completion> {
filename_impl(editor, input, git_ignore, |entry| {
let is_dir = entry.file_type().map_or(false, |entry| entry.is_dir()); let is_dir = entry.file_type().map_or(false, |entry| entry.is_dir());
if is_dir { if is_dir {
@ -418,7 +424,15 @@ pub mod completers {
} }
pub fn directory(editor: &Editor, input: &str) -> Vec<Completion> { pub fn directory(editor: &Editor, input: &str) -> Vec<Completion> {
filename_impl(editor, input, |entry| { directory_with_git_ignore(editor, input, true)
}
pub fn directory_with_git_ignore(
editor: &Editor,
input: &str,
git_ignore: bool,
) -> Vec<Completion> {
filename_impl(editor, input, git_ignore, |entry| {
let is_dir = entry.file_type().map_or(false, |entry| entry.is_dir()); let is_dir = entry.file_type().map_or(false, |entry| entry.is_dir());
if is_dir { if is_dir {
@ -441,7 +455,12 @@ pub mod completers {
} }
// TODO: we could return an iter/lazy thing so it can fetch as many as it needs. // TODO: we could return an iter/lazy thing so it can fetch as many as it needs.
fn filename_impl<F>(_editor: &Editor, input: &str, filter_fn: F) -> Vec<Completion> fn filename_impl<F>(
_editor: &Editor,
input: &str,
git_ignore: bool,
filter_fn: F,
) -> Vec<Completion>
where where
F: Fn(&ignore::DirEntry) -> FileMatch, F: Fn(&ignore::DirEntry) -> FileMatch,
{ {
@ -472,7 +491,7 @@ pub mod completers {
match path.parent() { match path.parent() {
Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(), Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(),
// Path::new("h")'s parent is Some("")... // Path::new("h")'s parent is Some("")...
_ => std::env::current_dir().expect("couldn't determine current directory"), _ => helix_loader::current_working_dir(),
} }
}; };
@ -484,6 +503,7 @@ pub mod completers {
let mut files: Vec<_> = WalkBuilder::new(&dir) let mut files: Vec<_> = WalkBuilder::new(&dir)
.hidden(false) .hidden(false)
.follow_links(false) // We're scanning over depth 1 .follow_links(false) // We're scanning over depth 1
.git_ignore(git_ignore)
.max_depth(Some(1)) .max_depth(Some(1))
.build() .build()
.filter_map(|file| { .filter_map(|file| {

@ -27,7 +27,7 @@ use std::{collections::HashMap, io::Read, path::PathBuf};
use crate::ui::{Prompt, PromptEvent}; use crate::ui::{Prompt, PromptEvent};
use helix_core::{ use helix_core::{
movement::Direction, text_annotations::TextAnnotations, char_idx_at_visual_offset, movement::Direction, text_annotations::TextAnnotations,
unicode::segmentation::UnicodeSegmentation, Position, Syntax, unicode::segmentation::UnicodeSegmentation, Position, Syntax,
}; };
use helix_view::{ use helix_view::{
@ -51,12 +51,12 @@ pub enum PathOrId {
} }
impl PathOrId { impl PathOrId {
fn get_canonicalized(self) -> std::io::Result<Self> { fn get_canonicalized(self) -> Self {
use PathOrId::*; use PathOrId::*;
Ok(match self { match self {
Path(path) => Path(helix_core::path::get_canonicalized_path(&path)?), Path(path) => Path(helix_core::path::get_canonicalized_path(&path)),
Id(id) => Id(id), Id(id) => Id(id),
}) }
} }
} }
@ -375,7 +375,7 @@ impl<T: Item + 'static> Picker<T> {
fn current_file(&self, editor: &Editor) -> Option<FileLocation> { fn current_file(&self, editor: &Editor) -> Option<FileLocation> {
self.selection() self.selection()
.and_then(|current| (self.file_fn.as_ref()?)(editor, current)) .and_then(|current| (self.file_fn.as_ref()?)(editor, current))
.and_then(|(path_or_id, line)| path_or_id.get_canonicalized().ok().zip(Some(line))) .map(|(path_or_id, line)| (path_or_id.get_canonicalized(), line))
} }
/// Get (cached) preview for a given path. If a document corresponding /// Get (cached) preview for a given path. If a document corresponding
@ -432,7 +432,7 @@ impl<T: Item + 'static> Picker<T> {
fn handle_idle_timeout(&mut self, cx: &mut Context) -> EventResult { fn handle_idle_timeout(&mut self, cx: &mut Context) -> EventResult {
let Some((current_file, _)) = self.current_file(cx.editor) else { let Some((current_file, _)) = self.current_file(cx.editor) else {
return EventResult::Consumed(None) return EventResult::Consumed(None);
}; };
// Try to find a document in the cache // Try to find a document in the cache
@ -453,17 +453,20 @@ impl<T: Item + 'static> Picker<T> {
let text = doc.text().clone(); let text = doc.text().clone();
let loader = cx.editor.syn_loader.clone(); let loader = cx.editor.syn_loader.clone();
let job = tokio::task::spawn_blocking(move || { let job = tokio::task::spawn_blocking(move || {
let syntax = language_config let syntax = language_config.highlight_config(&loader.scopes()).and_then(
.highlight_config(&loader.scopes()) |highlight_config| Syntax::new(text.slice(..), highlight_config, loader),
.and_then(|highlight_config| Syntax::new(&text, highlight_config, loader)); );
let callback = move |editor: &mut Editor, compositor: &mut Compositor| { let callback = move |editor: &mut Editor, compositor: &mut Compositor| {
let Some(syntax) = syntax else { let Some(syntax) = syntax else {
log::info!("highlighting picker item failed"); log::info!("highlighting picker item failed");
return return;
}; };
let Some(Overlay { content: picker, .. }) = compositor.find::<Overlay<Self>>() else { let Some(Overlay {
content: picker, ..
}) = compositor.find::<Overlay<Self>>()
else {
log::info!("picker closed before syntax highlighting finished"); log::info!("picker closed before syntax highlighting finished");
return return;
}; };
// Try to find a document in the cache // Try to find a document in the cache
let doc = match current_file { let doc = match current_file {
@ -687,20 +690,20 @@ impl<T: Item + 'static> Picker<T> {
} }
}; };
let mut offset = ViewPosition::default();
if let Some(range) = range {
let text_fmt = doc.text_format(inner.width, None);
let annotations = TextAnnotations::default();
(offset.anchor, offset.vertical_offset) = char_idx_at_visual_offset(
doc.text().slice(..),
doc.text().line_to_char(range.0),
// align to middle // align to middle
let first_line = range -(inner.height as isize / 2),
.map(|(start, end)| { 0,
let height = end.saturating_sub(start) + 1; &text_fmt,
let middle = start + (height.saturating_sub(1) / 2); &annotations,
middle.saturating_sub(inner.height as usize / 2).min(start) );
}) }
.unwrap_or(0);
let offset = ViewPosition {
anchor: doc.text().line_to_char(first_line),
horizontal_offset: 0,
vertical_offset: 0,
};
let mut highlights = EditorView::doc_syntax_highlights( let mut highlights = EditorView::doc_syntax_highlights(
doc, doc,

@ -1,7 +1,9 @@
use crate::compositor::{Component, Compositor, Context, Event, EventResult}; use crate::compositor::{Component, Compositor, Context, Event, EventResult};
use crate::{alt, ctrl, key, shift, ui}; use crate::{alt, ctrl, key, shift, ui};
use helix_core::syntax;
use helix_view::input::KeyEvent; use helix_view::input::KeyEvent;
use helix_view::keyboard::KeyCode; use helix_view::keyboard::KeyCode;
use std::sync::Arc;
use std::{borrow::Cow, ops::RangeFrom}; use std::{borrow::Cow, ops::RangeFrom};
use tui::buffer::Buffer as Surface; use tui::buffer::Buffer as Surface;
use tui::widgets::{Block, Borders, Widget}; use tui::widgets::{Block, Borders, Widget};
@ -32,6 +34,7 @@ pub struct Prompt {
callback_fn: CallbackFn, callback_fn: CallbackFn,
pub doc_fn: DocFn, pub doc_fn: DocFn,
next_char_handler: Option<PromptCharHandler>, next_char_handler: Option<PromptCharHandler>,
language: Option<(&'static str, Arc<syntax::Loader>)>,
} }
#[derive(Clone, Copy, PartialEq, Eq)] #[derive(Clone, Copy, PartialEq, Eq)]
@ -84,6 +87,7 @@ impl Prompt {
callback_fn: Box::new(callback_fn), callback_fn: Box::new(callback_fn),
doc_fn: Box::new(|_| None), doc_fn: Box::new(|_| None),
next_char_handler: None, next_char_handler: None,
language: None,
} }
} }
@ -95,6 +99,11 @@ impl Prompt {
self self
} }
pub fn with_language(mut self, language: &'static str, loader: Arc<syntax::Loader>) -> Self {
self.language = Some((language, loader));
self
}
pub fn line(&self) -> &String { pub fn line(&self) -> &String {
&self.line &self.line
} }
@ -298,8 +307,8 @@ impl Prompt {
direction: CompletionDirection, direction: CompletionDirection,
) { ) {
(self.callback_fn)(cx, &self.line, PromptEvent::Abort); (self.callback_fn)(cx, &self.line, PromptEvent::Abort);
let values = match cx.editor.registers.read(register) { let mut values = match cx.editor.registers.read(register, cx.editor) {
Some(values) if !values.is_empty() => values, Some(values) if values.len() > 0 => values.rev(),
_ => return, _ => return,
}; };
@ -307,13 +316,16 @@ impl Prompt {
let index = match direction { let index = match direction {
CompletionDirection::Forward => self.history_pos.map_or(0, |i| i + 1), CompletionDirection::Forward => self.history_pos.map_or(0, |i| i + 1),
CompletionDirection::Backward => { CompletionDirection::Backward => self
self.history_pos.unwrap_or(values.len()).saturating_sub(1) .history_pos
} .unwrap_or_else(|| values.len())
.saturating_sub(1),
} }
.min(end); .min(end);
self.line = values[index].clone(); self.line = values.nth(index).unwrap().to_string();
// Appease the borrow checker.
drop(values);
self.history_pos = Some(index); self.history_pos = Some(index);
@ -357,6 +369,7 @@ impl Prompt {
let completion_color = theme.get("ui.menu"); let completion_color = theme.get("ui.menu");
let selected_color = theme.get("ui.menu.selected"); let selected_color = theme.get("ui.menu.selected");
let suggestion_color = theme.get("ui.text.inactive"); let suggestion_color = theme.get("ui.text.inactive");
let background = theme.get("ui.background");
// completion // completion
let max_len = self let max_len = self
@ -452,33 +465,32 @@ impl Prompt {
} }
let line = area.height - 1; let line = area.height - 1;
surface.clear_with(area.clip_top(line), background);
// render buffer text // render buffer text
surface.set_string(area.x, area.y + line, &self.prompt, prompt_color); surface.set_string(area.x, area.y + line, &self.prompt, prompt_color);
let (input, is_suggestion): (Cow<str>, bool) = if self.line.is_empty() { let line_area = area.clip_left(self.prompt.len() as u16).clip_top(line);
// latest value in the register list if self.line.is_empty() {
match self // Show the most recently entered value as a suggestion.
if let Some(suggestion) = self
.history_register .history_register
.and_then(|reg| cx.editor.registers.last(reg)) .and_then(|reg| cx.editor.registers.first(reg, cx.editor))
.map(|entry| entry.into())
{ {
Some(value) => (value, true), surface.set_string(line_area.x, line_area.y, suggestion, suggestion_color);
None => (Cow::from(""), false), }
} } else if let Some((language, loader)) = self.language.as_ref() {
} else { let mut text: ui::text::Text = crate::ui::markdown::highlighted_code_block(
(self.line.as_str().into(), false) &self.line,
}; language,
Some(&cx.editor.theme),
surface.set_string( loader.clone(),
area.x + self.prompt.len() as u16, None,
area.y + line, )
&input, .into();
if is_suggestion { text.render(line_area, surface, cx);
suggestion_color
} else { } else {
prompt_color surface.set_string(line_area.x, line_area.y, self.line.clone(), prompt_color);
}, }
);
} }
} }
@ -559,25 +571,29 @@ impl Component for Prompt {
} else { } else {
let last_item = self let last_item = self
.history_register .history_register
.and_then(|reg| cx.editor.registers.last(reg).cloned()) .and_then(|reg| cx.editor.registers.first(reg, cx.editor))
.map(|entry| entry.into()) .map(|entry| entry.to_string())
.unwrap_or_else(|| Cow::from("")); .unwrap_or_else(|| String::from(""));
// handle executing with last command in history if nothing entered // handle executing with last command in history if nothing entered
let input: Cow<str> = if self.line.is_empty() { let input = if self.line.is_empty() {
last_item &last_item
} else { } else {
if last_item != self.line { if last_item != self.line {
// store in history // store in history
if let Some(register) = self.history_register { if let Some(register) = self.history_register {
cx.editor.registers.push(register, self.line.clone()); if let Err(err) =
cx.editor.registers.push(register, self.line.clone())
{
cx.editor.set_error(err.to_string());
}
}; };
} }
self.line.as_str().into() &self.line
}; };
(self.callback_fn)(cx, &input, PromptEvent::Validate); (self.callback_fn)(cx, input, PromptEvent::Validate);
return close_fn; return close_fn;
} }
@ -609,25 +625,16 @@ impl Component for Prompt {
self.completion = cx self.completion = cx
.editor .editor
.registers .registers
.inner() .iter_preview()
.iter() .map(|(ch, preview)| (0.., format!("{} {}", ch, &preview).into()))
.map(|(ch, reg)| {
let content = reg
.read()
.get(0)
.and_then(|s| s.lines().next().to_owned())
.unwrap_or_default();
(0.., format!("{} {}", ch, &content).into())
})
.collect(); .collect();
self.next_char_handler = Some(Box::new(|prompt, c, context| { self.next_char_handler = Some(Box::new(|prompt, c, context| {
prompt.insert_str( prompt.insert_str(
context &context
.editor .editor
.registers .registers
.read(c) .first(c, context.editor)
.and_then(|r| r.first()) .unwrap_or_default(),
.map_or("", |r| r.as_str()),
context.editor, context.editor,
); );
})); }));

@ -148,6 +148,7 @@ where
helix_view::editor::StatusLineElement::FileModificationIndicator => { helix_view::editor::StatusLineElement::FileModificationIndicator => {
render_file_modification_indicator render_file_modification_indicator
} }
helix_view::editor::StatusLineElement::ReadOnlyIndicator => render_read_only_indicator,
helix_view::editor::StatusLineElement::FileEncoding => render_file_encoding, helix_view::editor::StatusLineElement::FileEncoding => render_file_encoding,
helix_view::editor::StatusLineElement::FileLineEnding => render_file_line_ending, helix_view::editor::StatusLineElement::FileLineEnding => render_file_line_ending,
helix_view::editor::StatusLineElement::FileType => render_file_type, helix_view::editor::StatusLineElement::FileType => render_file_type,
@ -447,6 +448,19 @@ where
write(context, title, None); write(context, title, None);
} }
fn render_read_only_indicator<F>(context: &mut RenderContext, write: F)
where
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
{
let title = if context.doc.readonly {
" [readonly] "
} else {
""
}
.to_string();
write(context, title, None);
}
fn render_file_base_name<F>(context: &mut RenderContext, write: F) fn render_file_base_name<F>(context: &mut RenderContext, write: F)
where where
F: Fn(&mut RenderContext, String, Option<Style>) + Copy, F: Fn(&mut RenderContext, String, Option<Style>) + Copy,

@ -18,7 +18,9 @@ mod test {
mod auto_indent; mod auto_indent;
mod auto_pairs; mod auto_pairs;
mod commands; mod commands;
mod languages;
mod movement; mod movement;
mod picker;
mod prompt; mod prompt;
mod splits; mod splits;
} }

@ -2,6 +2,7 @@ use helix_term::application::Application;
use super::*; use super::*;
mod movement;
mod write; mod write;
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]

@ -0,0 +1,452 @@
use super::*;
#[tokio::test(flavor = "multi_thread")]
async fn test_move_parent_node_end() -> anyhow::Result<()> {
let tests = vec![
// single cursor stays single cursor, first goes to end of current
// node, then parent
(
helpers::platform_line(indoc! {r##"
fn foo() {
let result = if true {
"yes"
} else {
"no#["|]#
}
}
"##}),
"<A-e>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
\"no\"#[\n|]#
}
}
"}),
),
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
\"no\"#[\n|]#
}
}
"}),
"<A-e>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
\"no\"
}#[\n|]#
}
"}),
),
// select mode extends
(
helpers::platform_line(indoc! {r##"
fn foo() {
let result = if true {
"yes"
} else {
#["no"|]#
}
}
"##}),
"v<A-e><A-e>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
#[\"no\"
}\n|]#
}
"}),
),
];
for test in tests {
test_with_config(AppBuilder::new().with_file("foo.rs", None), test).await?;
}
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_move_parent_node_start() -> anyhow::Result<()> {
let tests = vec![
// single cursor stays single cursor, first goes to end of current
// node, then parent
(
helpers::platform_line(indoc! {r##"
fn foo() {
let result = if true {
"yes"
} else {
"no#["|]#
}
}
"##}),
"<A-b>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
#[\"|]#no\"
}
}
"}),
),
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
\"no\"#[\n|]#
}
}
"}),
"<A-b>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else #[{|]#
\"no\"
}
}
"}),
),
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else #[{|]#
\"no\"
}
}
"}),
"<A-b>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} #[e|]#lse {
\"no\"
}
}
"}),
),
// select mode extends
(
helpers::platform_line(indoc! {r##"
fn foo() {
let result = if true {
"yes"
} else {
#["no"|]#
}
}
"##}),
"v<A-b><A-b>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else #[|{
]#\"no\"
}
}
"}),
),
(
helpers::platform_line(indoc! {r##"
fn foo() {
let result = if true {
"yes"
} else {
#["no"|]#
}
}
"##}),
"v<A-b><A-b><A-b>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} #[|else {
]#\"no\"
}
}
"}),
),
];
for test in tests {
test_with_config(AppBuilder::new().with_file("foo.rs", None), test).await?;
}
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_smart_tab_move_parent_node_end() -> anyhow::Result<()> {
let tests = vec![
// single cursor stays single cursor, first goes to end of current
// node, then parent
(
helpers::platform_line(indoc! {r##"
fn foo() {
let result = if true {
"yes"
} else {
"no#["|]#
}
}
"##}),
"i<tab>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
\"no\"#[|\n]#
}
}
"}),
),
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
\"no\"#[\n|]#
}
}
"}),
"i<tab>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
\"no\"
}#[|\n]#
}
"}),
),
// appending to the end of a line should still look at the current
// line, not the next one
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
\"no#[\"|]#
}
}
"}),
"a<tab>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
\"no\"
}#[\n|]#
}
"}),
),
// before cursor is all whitespace, so insert tab
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
#[\"no\"|]#
}
}
"}),
"i<tab>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
#[|\"no\"]#
}
}
"}),
),
// if selection spans multiple lines, it should still only look at the
// line on which the head is
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
#[\"yes\"
} else {
\"no\"|]#
}
}
"}),
"a<tab>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
} else {
\"no\"
}#[\n|]#
}
"}),
),
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
#[\"yes\"
} else {
\"no\"|]#
}
}
"}),
"i<tab>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
#[|\"yes\"
} else {
\"no\"]#
}
}
"}),
),
(
helpers::platform_line(indoc! {"\
fn foo() {
#[l|]#et result = if true {
#(\"yes\"
} else {
\"no\"|)#
}
}
"}),
"i<tab>",
helpers::platform_line(indoc! {"\
fn foo() {
#[|l]#et result = if true {
#(|\"yes\"
} else {
\"no\")#
}
}
"}),
),
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"#[\n|]#
} else {
\"no\"#(\n|)#
}
}
"}),
"i<tab>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
}#[| ]#else {
\"no\"
}#(|\n)#
}
"}),
),
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
#[\"yes\"|]#
} else {
#(\"no\"|)#
}
}
"}),
"i<tab>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
#[|\"yes\"]#
} else {
#(|\"no\")#
}
}
"}),
),
// if any cursors are not preceded by all whitespace, then do the
// smart_tab action
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
#[\"yes\"\n|]#
} else {
\"no#(\"\n|)#
}
}
"}),
"i<tab>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
\"yes\"
}#[| ]#else {
\"no\"
}#(|\n)#
}
"}),
),
// Ctrl-tab always inserts a tab
(
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
#[\"yes\"\n|]#
} else {
\"no#(\"\n|)#
}
}
"}),
"i<S-tab>",
helpers::platform_line(indoc! {"\
fn foo() {
let result = if true {
#[|\"yes\"\n]#
} else {
\"no #(|\"\n)#
}
}
"}),
),
];
for test in tests {
test_with_config(AppBuilder::new().with_file("foo.rs", None), test).await?;
}
Ok(())
}

@ -0,0 +1,41 @@
use super::*;
#[tokio::test(flavor = "multi_thread")]
async fn auto_indent() -> anyhow::Result<()> {
let app = || AppBuilder::new().with_file("foo.go", None);
let enter_tests = [
(
helpers::platform_line(indoc! {r##"
type Test struct {#[}|]#
"##}),
"i<ret>",
helpers::platform_line(indoc! {"\
type Test struct {
\t#[|\n]#
}
"}),
),
(
helpers::platform_line(indoc! {"\
func main() {
\tswitch nil {#[}|]#
}
"}),
"i<ret>",
helpers::platform_line(indoc! {"\
func main() {
\tswitch nil {
\t\t#[|\n]#
\t}
}
"}),
),
];
for test in enter_tests {
test_with_config(app(), test).await?;
}
Ok(())
}

@ -0,0 +1,4 @@
use super::*;
mod go;
mod yaml;

@ -0,0 +1,819 @@
use super::*;
#[tokio::test(flavor = "multi_thread")]
async fn auto_indent() -> anyhow::Result<()> {
let app = || AppBuilder::new().with_file("foo.yaml", None);
let below_tests = [
(
helpers::platform_line(indoc! {r##"
#[t|]#op:
baz: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"##}),
"o",
helpers::platform_line(indoc! {"\
top:
#[\n|]#
baz: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {r##"
top:
b#[a|]#z: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"##}),
"o",
helpers::platform_line(indoc! {"\
top:
baz: foo
#[\n|]#
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {r##"
top:
baz: foo
bazi#[:|]#
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"##}),
"o",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
#[\n|]#
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {r##"
top:
baz: foo
bazi:
more: #[yes|]#
why: because
quux:
- 1
- 2
bax: foox
fook:
"##}),
"o",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
#[\n|]#
why: because
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {r##"
top:
baz: foo
bazi:
more: yes
why: becaus#[e|]#
quux:
- 1
- 2
bax: foox
fook:
"##}),
"o",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
#[\n|]#
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:#[\n|]#
- 1
- 2
bax: foox
fook:
"}),
"o",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:
#[\n|]#
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:
- 1#[\n|]#
- 2
bax: foox
fook:
"}),
"o",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:
- 1
#[\n|]#
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:#[\n|]#
"}),
"o",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
#[\n|]#
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: |
some
multi
line
string#[\n|]#
fook:
"}),
"o",
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: |
some
multi
line
string
#[\n|]#
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: >
some
multi
line#[\n|]#
string
fook:
"}),
"o",
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: >
some
multi
line
#[\n|]#
string
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: >#[\n|]#
fook:
"}),
"o",
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: >
#[\n|]#
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
- top:#[\n|]#
baz: foo
bax: foox
fook:
"}),
"o",
helpers::platform_line(indoc! {"\
- top:
#[\n|]#
baz: foo
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
- top:
baz: foo#[\n|]#
bax: foox
fook:
"}),
"o",
helpers::platform_line(indoc! {"\
- top:
baz: foo
#[\n|]#
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
- top:
baz: foo
bax: foox#[\n|]#
fook:
"}),
"o",
helpers::platform_line(indoc! {"\
- top:
baz: foo
bax: foox
#[\n|]#
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz:
- one: two#[\n|]#
three: four
- top:
baz: foo
bax: foox
"}),
"o",
helpers::platform_line(indoc! {"\
top:
baz:
- one: two
#[\n|]#
three: four
- top:
baz: foo
bax: foox
"}),
),
// yaml map without a key
(
helpers::platform_line(indoc! {"\
top:#[\n|]#
"}),
"o",
helpers::platform_line(indoc! {"\
top:
#[\n|]#
"}),
),
(
helpers::platform_line(indoc! {"\
top#[:|]#
bottom: withvalue
"}),
"o",
helpers::platform_line(indoc! {"\
top:
#[\n|]#
bottom: withvalue
"}),
),
(
helpers::platform_line(indoc! {"\
bottom: withvalue
top#[:|]#
"}),
"o",
helpers::platform_line(indoc! {"\
bottom: withvalue
top:
#[\n|]#
"}),
),
];
for test in below_tests {
test_with_config(app(), test).await?;
}
let above_tests = [
(
helpers::platform_line(indoc! {r##"
#[t|]#op:
baz: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"##}),
"O",
helpers::platform_line(indoc! {"\
#[\n|]#
top:
baz: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {r##"
top:
b#[a|]#z: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"##}),
"O",
helpers::platform_line(indoc! {"\
top:
#[\n|]#
baz: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {r##"
top:
baz: foo
bazi#[:|]#
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"##}),
"O",
helpers::platform_line(indoc! {"\
top:
baz: foo
#[\n|]#
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {r##"
top:
baz: foo
bazi:
more: #[yes|]#
why: because
quux:
- 1
- 2
bax: foox
fook:
"##}),
"O",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
#[\n|]#
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {r##"
top:
baz: foo
bazi:
more: yes
why: becaus#[e|]#
quux:
- 1
- 2
bax: foox
fook:
"##}),
"O",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
#[\n|]#
why: because
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:#[\n|]#
- 1
- 2
bax: foox
fook:
"}),
"O",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
#[\n|]#
quux:
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:
- 1#[\n|]#
- 2
bax: foox
fook:
"}),
"O",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:
#[\n|]#
- 1
- 2
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
fook:#[\n|]#
"}),
"O",
helpers::platform_line(indoc! {"\
top:
baz: foo
bazi:
more: yes
why: because
quux:
- 1
- 2
bax: foox
#[\n|]#
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: |
some
multi
line
string#[\n|]#
fook:
"}),
"O",
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: |
some
multi
line
#[\n|]#
string
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: >
some#[\n|]#
multi
line
string
fook:
"}),
"O",
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: >
#[\n|]#
some
multi
line
string
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: >
fook:#[\n|]#
"}),
"O",
helpers::platform_line(indoc! {"\
top:
baz: foo
bax: >
#[\n|]#
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
- top:
baz: foo#[\n|]#
bax: foox
fook:
"}),
"O",
helpers::platform_line(indoc! {"\
- top:
#[\n|]#
baz: foo
bax: foox
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
- top:
baz: foo
bax: foox
fook:#[\n|]#
"}),
"O",
helpers::platform_line(indoc! {"\
- top:
baz: foo
bax: foox
#[\n|]#
fook:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
baz:
- one: two#[\n|]#
three: four
- top:
baz: foo
bax: foox
"}),
"O",
helpers::platform_line(indoc! {"\
top:
baz:
#[\n|]#
- one: two
three: four
- top:
baz: foo
bax: foox
"}),
),
// yaml map without a key
(
helpers::platform_line(indoc! {"\
top:#[\n|]#
"}),
"O",
helpers::platform_line(indoc! {"\
#[\n|]#
top:
"}),
),
(
helpers::platform_line(indoc! {"\
bottom: withvalue
top#[:|]#
"}),
"O",
helpers::platform_line(indoc! {"\
bottom: withvalue
#[\n|]#
top:
"}),
),
(
helpers::platform_line(indoc! {"\
top:
bottom:#[ |]#withvalue
"}),
"O",
helpers::platform_line(indoc! {"\
top:
#[\n|]#
bottom: withvalue
"}),
),
];
for test in above_tests {
test_with_config(app(), test).await?;
}
let enter_tests = [
(
helpers::platform_line(indoc! {r##"
foo: #[b|]#ar
"##}),
"i<ret>",
helpers::platform_line(indoc! {"\
foo:
#[|b]#ar
"}),
),
(
helpers::platform_line(indoc! {"\
foo:#[\n|]#
"}),
"i<ret>",
helpers::platform_line(indoc! {"\
foo:
#[|\n]#
"}),
),
];
for test in enter_tests {
test_with_config(app(), test).await?;
}
Ok(())
}

@ -0,0 +1,80 @@
use std::fs;
use helix_core::{path::get_canonicalized_path, Range};
use helix_loader::{current_working_dir, set_current_working_dir};
use helix_view::{current_ref, editor::Action};
use tempfile::{Builder, TempDir};
use super::*;
#[tokio::test(flavor = "multi_thread")]
async fn test_picker_alt_ret() -> anyhow::Result<()> {
// Create two files, open the first and run a global search for a word
// from the second file. Press <alt-ret> to have helix open the second file in the
// new buffer, but not change focus. Then check whether the word is highlighted
// correctly and the view of the first file has not changed.
let tmp_dir = TempDir::new()?;
set_current_working_dir(tmp_dir.path().into())?;
let mut app = AppBuilder::new().build()?;
log::debug!(
"set current working directory to {:?}",
current_working_dir()
);
// Add prefix so helix doesn't hide these files in a picker
let files = [
Builder::new().prefix("1").tempfile_in(&tmp_dir)?,
Builder::new().prefix("2").tempfile_in(&tmp_dir)?,
];
let paths = files
.iter()
.map(|f| get_canonicalized_path(f.path()))
.collect::<Vec<_>>();
fs::write(&paths[0], "1\n2\n3\n4")?;
fs::write(&paths[1], "first\nsecond")?;
log::debug!(
"created and wrote two temporary files: {:?} & {:?}",
paths[0],
paths[1]
);
// Manually open to save the offset, otherwise we won't be able to change the state in the Fn trait
app.editor.open(files[0].path(), Action::Replace)?;
let view_offset = current_ref!(app.editor).0.offset;
test_key_sequences(
&mut app,
vec![
(Some("<space>/"), None),
(Some("second<ret>"), None),
(
Some("<A-ret><esc>"),
Some(&|app| {
let (view, doc) = current_ref!(app.editor);
assert_eq!(doc.path().unwrap(), &paths[0]);
let select_ranges = doc.selection(view.id).ranges();
assert_eq!(select_ranges[0], Range::new(0, 1));
assert_eq!(view.offset, view_offset);
}),
),
(
Some(":buffer<minus>next<ret>"),
Some(&|app| {
let (view, doc) = current_ref!(app.editor);
assert_eq!(doc.path().unwrap(), &paths[1]);
let select_ranges = doc.selection(view.id).ranges();
assert_eq!(select_ranges.len(), 1);
assert_eq!(select_ranges[0], Range::new(6, 12));
}),
),
],
false,
)
.await?;
Ok(())
}

@ -16,10 +16,10 @@ include = ["src/**/*", "README.md"]
default = ["crossterm"] default = ["crossterm"]
[dependencies] [dependencies]
bitflags = "2.3" bitflags = "2.4"
cassowary = "0.3" cassowary = "0.3"
unicode-segmentation = "1.10" unicode-segmentation = "1.10"
crossterm = { version = "0.26", optional = true } crossterm = { version = "0.27", optional = true }
termini = "1.0" termini = "1.0"
serde = { version = "1", "optional" = true, features = ["derive"]} serde = { version = "1", "optional" = true, features = ["derive"]}
once_cell = "1.18" once_cell = "1.18"

@ -201,7 +201,7 @@ where
for (x, y, cell) in content { for (x, y, cell) in content {
// Move the cursor if the previous location was not (x - 1, y) // Move the cursor if the previous location was not (x - 1, y)
if !matches!(last_pos, Some(p) if x == p.0 + 1 && y == p.1) { if !matches!(last_pos, Some(p) if x == p.0 + 1 && y == p.1) {
map_error(queue!(self.buffer, MoveTo(x, y)))?; queue!(self.buffer, MoveTo(x, y))?;
} }
last_pos = Some((x, y)); last_pos = Some((x, y));
if cell.modifier != modifier { if cell.modifier != modifier {
@ -214,12 +214,12 @@ where
} }
if cell.fg != fg { if cell.fg != fg {
let color = CColor::from(cell.fg); let color = CColor::from(cell.fg);
map_error(queue!(self.buffer, SetForegroundColor(color)))?; queue!(self.buffer, SetForegroundColor(color))?;
fg = cell.fg; fg = cell.fg;
} }
if cell.bg != bg { if cell.bg != bg {
let color = CColor::from(cell.bg); let color = CColor::from(cell.bg);
map_error(queue!(self.buffer, SetBackgroundColor(color)))?; queue!(self.buffer, SetBackgroundColor(color))?;
bg = cell.bg; bg = cell.bg;
} }
@ -227,7 +227,7 @@ where
if self.capabilities.has_extended_underlines { if self.capabilities.has_extended_underlines {
if cell.underline_color != underline_color { if cell.underline_color != underline_color {
let color = CColor::from(cell.underline_color); let color = CColor::from(cell.underline_color);
map_error(queue!(self.buffer, SetUnderlineColor(color)))?; queue!(self.buffer, SetUnderlineColor(color))?;
underline_color = cell.underline_color; underline_color = cell.underline_color;
} }
} else { } else {
@ -239,24 +239,24 @@ where
if new_underline_style != underline_style { if new_underline_style != underline_style {
let attr = CAttribute::from(new_underline_style); let attr = CAttribute::from(new_underline_style);
map_error(queue!(self.buffer, SetAttribute(attr)))?; queue!(self.buffer, SetAttribute(attr))?;
underline_style = new_underline_style; underline_style = new_underline_style;
} }
map_error(queue!(self.buffer, Print(&cell.symbol)))?; queue!(self.buffer, Print(&cell.symbol))?;
} }
map_error(queue!( queue!(
self.buffer, self.buffer,
SetUnderlineColor(CColor::Reset), SetUnderlineColor(CColor::Reset),
SetForegroundColor(CColor::Reset), SetForegroundColor(CColor::Reset),
SetBackgroundColor(CColor::Reset), SetBackgroundColor(CColor::Reset),
SetAttribute(CAttribute::Reset) SetAttribute(CAttribute::Reset)
)) )
} }
fn hide_cursor(&mut self) -> io::Result<()> { fn hide_cursor(&mut self) -> io::Result<()> {
map_error(execute!(self.buffer, Hide)) execute!(self.buffer, Hide)
} }
fn show_cursor(&mut self, kind: CursorKind) -> io::Result<()> { fn show_cursor(&mut self, kind: CursorKind) -> io::Result<()> {
@ -266,7 +266,7 @@ where
CursorKind::Underline => SetCursorStyle::SteadyUnderScore, CursorKind::Underline => SetCursorStyle::SteadyUnderScore,
CursorKind::Hidden => unreachable!(), CursorKind::Hidden => unreachable!(),
}; };
map_error(execute!(self.buffer, Show, shape)) execute!(self.buffer, Show, shape)
} }
fn get_cursor(&mut self) -> io::Result<(u16, u16)> { fn get_cursor(&mut self) -> io::Result<(u16, u16)> {
@ -275,11 +275,11 @@ where
} }
fn set_cursor(&mut self, x: u16, y: u16) -> io::Result<()> { fn set_cursor(&mut self, x: u16, y: u16) -> io::Result<()> {
map_error(execute!(self.buffer, MoveTo(x, y))) execute!(self.buffer, MoveTo(x, y))
} }
fn clear(&mut self) -> io::Result<()> { fn clear(&mut self) -> io::Result<()> {
map_error(execute!(self.buffer, Clear(ClearType::All))) execute!(self.buffer, Clear(ClearType::All))
} }
fn size(&self) -> io::Result<Rect> { fn size(&self) -> io::Result<Rect> {
@ -294,10 +294,6 @@ where
} }
} }
fn map_error(error: crossterm::Result<()>) -> io::Result<()> {
error.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))
}
#[derive(Debug)] #[derive(Debug)]
struct ModifierDiff { struct ModifierDiff {
pub from: Modifier, pub from: Modifier,
@ -312,48 +308,48 @@ impl ModifierDiff {
//use crossterm::Attribute; //use crossterm::Attribute;
let removed = self.from - self.to; let removed = self.from - self.to;
if removed.contains(Modifier::REVERSED) { if removed.contains(Modifier::REVERSED) {
map_error(queue!(w, SetAttribute(CAttribute::NoReverse)))?; queue!(w, SetAttribute(CAttribute::NoReverse))?;
} }
if removed.contains(Modifier::BOLD) { if removed.contains(Modifier::BOLD) {
map_error(queue!(w, SetAttribute(CAttribute::NormalIntensity)))?; queue!(w, SetAttribute(CAttribute::NormalIntensity))?;
if self.to.contains(Modifier::DIM) { if self.to.contains(Modifier::DIM) {
map_error(queue!(w, SetAttribute(CAttribute::Dim)))?; queue!(w, SetAttribute(CAttribute::Dim))?;
} }
} }
if removed.contains(Modifier::ITALIC) { if removed.contains(Modifier::ITALIC) {
map_error(queue!(w, SetAttribute(CAttribute::NoItalic)))?; queue!(w, SetAttribute(CAttribute::NoItalic))?;
} }
if removed.contains(Modifier::DIM) { if removed.contains(Modifier::DIM) {
map_error(queue!(w, SetAttribute(CAttribute::NormalIntensity)))?; queue!(w, SetAttribute(CAttribute::NormalIntensity))?;
} }
if removed.contains(Modifier::CROSSED_OUT) { if removed.contains(Modifier::CROSSED_OUT) {
map_error(queue!(w, SetAttribute(CAttribute::NotCrossedOut)))?; queue!(w, SetAttribute(CAttribute::NotCrossedOut))?;
} }
if removed.contains(Modifier::SLOW_BLINK) || removed.contains(Modifier::RAPID_BLINK) { if removed.contains(Modifier::SLOW_BLINK) || removed.contains(Modifier::RAPID_BLINK) {
map_error(queue!(w, SetAttribute(CAttribute::NoBlink)))?; queue!(w, SetAttribute(CAttribute::NoBlink))?;
} }
let added = self.to - self.from; let added = self.to - self.from;
if added.contains(Modifier::REVERSED) { if added.contains(Modifier::REVERSED) {
map_error(queue!(w, SetAttribute(CAttribute::Reverse)))?; queue!(w, SetAttribute(CAttribute::Reverse))?;
} }
if added.contains(Modifier::BOLD) { if added.contains(Modifier::BOLD) {
map_error(queue!(w, SetAttribute(CAttribute::Bold)))?; queue!(w, SetAttribute(CAttribute::Bold))?;
} }
if added.contains(Modifier::ITALIC) { if added.contains(Modifier::ITALIC) {
map_error(queue!(w, SetAttribute(CAttribute::Italic)))?; queue!(w, SetAttribute(CAttribute::Italic))?;
} }
if added.contains(Modifier::DIM) { if added.contains(Modifier::DIM) {
map_error(queue!(w, SetAttribute(CAttribute::Dim)))?; queue!(w, SetAttribute(CAttribute::Dim))?;
} }
if added.contains(Modifier::CROSSED_OUT) { if added.contains(Modifier::CROSSED_OUT) {
map_error(queue!(w, SetAttribute(CAttribute::CrossedOut)))?; queue!(w, SetAttribute(CAttribute::CrossedOut))?;
} }
if added.contains(Modifier::SLOW_BLINK) { if added.contains(Modifier::SLOW_BLINK) {
map_error(queue!(w, SetAttribute(CAttribute::SlowBlink)))?; queue!(w, SetAttribute(CAttribute::SlowBlink))?;
} }
if added.contains(Modifier::RAPID_BLINK) { if added.contains(Modifier::RAPID_BLINK) {
map_error(queue!(w, SetAttribute(CAttribute::RapidBlink)))?; queue!(w, SetAttribute(CAttribute::RapidBlink))?;
} }
Ok(()) Ok(())
@ -407,7 +403,7 @@ impl Command for SetUnderlineColor {
} }
#[cfg(windows)] #[cfg(windows)]
fn execute_winapi(&self) -> crossterm::Result<()> { fn execute_winapi(&self) -> io::Result<()> {
Err(std::io::Error::new( Err(std::io::Error::new(
std::io::ErrorKind::Other, std::io::ErrorKind::Other,
"SetUnderlineColor not supported by winapi.", "SetUnderlineColor not supported by winapi.",

@ -65,20 +65,6 @@ where
viewport: Viewport, viewport: Viewport,
} }
impl<B> Drop for Terminal<B>
where
B: Backend,
{
fn drop(&mut self) {
// Attempt to restore the cursor state
if self.cursor_kind == CursorKind::Hidden {
if let Err(err) = self.show_cursor(CursorKind::Block) {
eprintln!("Failed to show the cursor: {}", err);
}
}
}
}
impl<B> Terminal<B> impl<B> Terminal<B>
where where
B: Backend, B: Backend,

@ -17,7 +17,7 @@ tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "p
parking_lot = "0.12" parking_lot = "0.12"
arc-swap = { version = "1.6.0" } arc-swap = { version = "1.6.0" }
gix = { version = "0.47.0", default-features = false , optional = true } gix = { version = "0.48.0", default-features = false , optional = true }
imara-diff = "0.1.5" imara-diff = "0.1.5"
anyhow = "1" anyhow = "1"
@ -27,4 +27,4 @@ log = "0.4"
git = ["gix"] git = ["gix"]
[dev-dependencies] [dev-dependencies]
tempfile = "3.6" tempfile = "3.8"

@ -14,13 +14,13 @@ default = []
term = ["crossterm"] term = ["crossterm"]
[dependencies] [dependencies]
bitflags = "2.3" bitflags = "2.4"
anyhow = "1" anyhow = "1"
helix-core = { version = "0.6", path = "../helix-core" } helix-core = { version = "0.6", path = "../helix-core" }
helix-loader = { version = "0.6", path = "../helix-loader" } helix-loader = { version = "0.6", path = "../helix-loader" }
helix-lsp = { version = "0.6", path = "../helix-lsp" } helix-lsp = { version = "0.6", path = "../helix-lsp" }
helix-dap = { version = "0.6", path = "../helix-dap" } helix-dap = { version = "0.6", path = "../helix-dap" }
crossterm = { version = "0.26", optional = true } crossterm = { version = "0.27", optional = true }
helix-vcs = { version = "0.6", path = "../helix-vcs" } helix-vcs = { version = "0.6", path = "../helix-vcs" }
# Conversion traits # Conversion traits
@ -54,6 +54,7 @@ clipboard-win = { version = "4.5", features = ["std"] }
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
libc = "0.2" libc = "0.2"
rustix = { version = "0.38", features = ["fs"] }
[dev-dependencies] [dev-dependencies]
helix-tui = { path = "../helix-tui" } helix-tui = { path = "../helix-tui" }

@ -193,6 +193,7 @@ pub struct Document {
// A name separate from the file name // A name separate from the file name
pub name: Option<String>, pub name: Option<String>,
pub readonly: bool,
} }
/// Inlay hints for a single `(Document, View)` combo. /// Inlay hints for a single `(Document, View)` combo.
@ -669,7 +670,7 @@ impl Document {
) -> Self { ) -> Self {
let (encoding, has_bom) = encoding_with_bom_info.unwrap_or((encoding::UTF_8, false)); let (encoding, has_bom) = encoding_with_bom_info.unwrap_or((encoding::UTF_8, false));
let line_ending = config.load().default_line_ending.into(); let line_ending = config.load().default_line_ending.into();
let changes = ChangeSet::new(&text); let changes = ChangeSet::new(text.slice(..));
let old_state = None; let old_state = None;
Self { Self {
@ -705,6 +706,7 @@ impl Document {
crate::graphics::Style::default().fg(crate::graphics::Color::Green), crate::graphics::Style::default().fg(crate::graphics::Color::Green),
)], )],
name: None, name: None,
readonly: false,
} }
} }
@ -737,7 +739,7 @@ impl Document {
let mut doc = Self::from(rope, Some((encoding, has_bom)), config); let mut doc = Self::from(rope, Some((encoding, has_bom)), config);
// set the path and try detecting the language // set the path and try detecting the language
doc.set_path(Some(path))?; doc.set_path(Some(path));
if let Some(loader) = config_loader { if let Some(loader) = config_loader {
doc.detect_language(loader); doc.detect_language(loader);
} }
@ -882,7 +884,7 @@ impl Document {
let text = self.text().clone(); let text = self.text().clone();
let path = match path { let path = match path {
Some(path) => helix_core::path::get_canonicalized_path(&path)?, Some(path) => helix_core::path::get_canonicalized_path(&path),
None => { None => {
if self.path.is_none() { if self.path.is_none() {
bail!("Can't save with no path set!"); bail!("Can't save with no path set!");
@ -970,7 +972,7 @@ impl Document {
) -> Option<Arc<helix_core::syntax::LanguageConfiguration>> { ) -> Option<Arc<helix_core::syntax::LanguageConfiguration>> {
config_loader config_loader
.language_config_for_file_name(self.path.as_ref()?) .language_config_for_file_name(self.path.as_ref()?)
.or_else(|| config_loader.language_config_for_shebang(self.text())) .or_else(|| config_loader.language_config_for_shebang(self.text().slice(..)))
} }
/// Detect the indentation used in the file, or otherwise defaults to the language indentation /// Detect the indentation used in the file, or otherwise defaults to the language indentation
@ -987,6 +989,38 @@ impl Document {
} }
} }
#[cfg(unix)]
// Detect if the file is readonly and change the readonly field if necessary (unix only)
pub fn detect_readonly(&mut self) {
use rustix::fs::{access, Access};
// Allows setting the flag for files the user cannot modify, like root files
self.readonly = match &self.path {
None => false,
Some(p) => match access(p, Access::WRITE_OK) {
Ok(_) => false,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => false,
Err(_) => true,
},
};
}
#[cfg(not(unix))]
// Detect if the file is readonly and change the readonly field if necessary (non-unix os)
pub fn detect_readonly(&mut self) {
// TODO Use the Windows' function `CreateFileW` to check if a file is readonly
// Discussion: https://github.com/helix-editor/helix/pull/7740#issuecomment-1656806459
// Vim implementation: https://github.com/vim/vim/blob/4c0089d696b8d1d5dc40568f25ea5738fa5bbffb/src/os_win32.c#L7665
// Windows binding: https://microsoft.github.io/windows-docs-rs/doc/windows/Win32/Storage/FileSystem/fn.CreateFileW.html
self.readonly = match &self.path {
None => false,
Some(p) => match std::fs::metadata(p) {
Err(err) if err.kind() == std::io::ErrorKind::NotFound => false,
Err(_) => false,
Ok(metadata) => metadata.permissions().readonly(),
},
};
}
/// Reload the document from its path. /// Reload the document from its path.
pub fn reload( pub fn reload(
&mut self, &mut self,
@ -1001,6 +1035,9 @@ impl Document {
.ok_or_else(|| anyhow!("can't find file to reload from {:?}", self.display_name()))? .ok_or_else(|| anyhow!("can't find file to reload from {:?}", self.display_name()))?
.to_owned(); .to_owned();
// Once we have a valid path we check if its readonly status has changed
self.detect_readonly();
let mut file = std::fs::File::open(&path)?; let mut file = std::fs::File::open(&path)?;
let (rope, ..) = from_reader(&mut file, Some(encoding))?; let (rope, ..) = from_reader(&mut file, Some(encoding))?;
@ -1041,16 +1078,14 @@ impl Document {
self.encoding self.encoding
} }
pub fn set_path(&mut self, path: Option<&Path>) -> Result<(), std::io::Error> { pub fn set_path(&mut self, path: Option<&Path>) {
let path = path let path = path.map(helix_core::path::get_canonicalized_path);
.map(helix_core::path::get_canonicalized_path)
.transpose()?;
// if parent doesn't exist we still want to open the document // if parent doesn't exist we still want to open the document
// and error out when document is saved // and error out when document is saved
self.path = path; self.path = path;
Ok(()) self.detect_readonly();
} }
/// Set the programming language for the file and load associated data (e.g. highlighting) /// Set the programming language for the file and load associated data (e.g. highlighting)
@ -1062,7 +1097,7 @@ impl Document {
) { ) {
if let (Some(language_config), Some(loader)) = (language_config, loader) { if let (Some(language_config), Some(loader)) = (language_config, loader) {
if let Some(highlight_config) = language_config.highlight_config(&loader.scopes()) { if let Some(highlight_config) = language_config.highlight_config(&loader.scopes()) {
self.syntax = Syntax::new(&self.text, highlight_config, loader); self.syntax = Syntax::new(self.text.slice(..), highlight_config, loader);
} }
self.language = Some(language_config); self.language = Some(language_config);
@ -1197,7 +1232,11 @@ impl Document {
// update tree-sitter syntax tree // update tree-sitter syntax tree
if let Some(syntax) = &mut self.syntax { if let Some(syntax) = &mut self.syntax {
// TODO: no unwrap // TODO: no unwrap
let res = syntax.update(&old_doc, &self.text, transaction.changes()); let res = syntax.update(
old_doc.slice(..),
self.text.slice(..),
transaction.changes(),
);
if res.is_err() { if res.is_err() {
log::error!("TS parser failed, disabeling TS for the current buffer: {res:?}"); log::error!("TS parser failed, disabeling TS for the current buffer: {res:?}");
self.syntax = None; self.syntax = None;
@ -1320,7 +1359,7 @@ impl Document {
if success { if success {
// reset changeset to fix len // reset changeset to fix len
self.changes = ChangeSet::new(self.text()); self.changes = ChangeSet::new(self.text().slice(..));
// Sync with changes with the jumplist selections. // Sync with changes with the jumplist selections.
view.sync_changes(self); view.sync_changes(self);
} }
@ -1403,7 +1442,7 @@ impl Document {
} }
if success { if success {
// reset changeset to fix len // reset changeset to fix len
self.changes = ChangeSet::new(self.text()); self.changes = ChangeSet::new(self.text().slice(..));
// Sync with changes with the jumplist selections. // Sync with changes with the jumplist selections.
view.sync_changes(self); view.sync_changes(self);
} }
@ -1426,7 +1465,7 @@ impl Document {
return; return;
} }
let new_changeset = ChangeSet::new(self.text()); let new_changeset = ChangeSet::new(self.text().slice(..));
let changes = std::mem::replace(&mut self.changes, new_changeset); let changes = std::mem::replace(&mut self.changes, new_changeset);
// Instead of doing this messy merge we could always commit, and based on transaction // Instead of doing this messy merge we could always commit, and based on transaction
// annotations either add a new layer or compose into the previous one. // annotations either add a new layer or compose into the previous one.

@ -1,10 +1,10 @@
use crate::{ use crate::{
align_view, align_view,
clipboard::{get_clipboard_provider, ClipboardProvider},
document::{DocumentSavedEventFuture, DocumentSavedEventResult, Mode, SavePoint}, document::{DocumentSavedEventFuture, DocumentSavedEventResult, Mode, SavePoint},
graphics::{CursorKind, Rect}, graphics::{CursorKind, Rect},
info::Info, info::Info,
input::KeyEvent, input::KeyEvent,
register::Registers,
theme::{self, Theme}, theme::{self, Theme},
tree::{self, Tree}, tree::{self, Tree},
view::ViewPosition, view::ViewPosition,
@ -40,7 +40,6 @@ use tokio::{
use anyhow::{anyhow, bail, Error}; use anyhow::{anyhow, bail, Error};
pub use helix_core::diagnostic::Severity; pub use helix_core::diagnostic::Severity;
pub use helix_core::register::Registers;
use helix_core::{ use helix_core::{
auto_pairs::AutoPairs, auto_pairs::AutoPairs,
syntax::{self, AutoPairConfig, SoftWrap}, syntax::{self, AutoPairConfig, SoftWrap},
@ -288,6 +287,24 @@ pub struct Config {
pub workspace_lsp_roots: Vec<PathBuf>, pub workspace_lsp_roots: Vec<PathBuf>,
/// Which line ending to choose for new documents. Defaults to `native`. i.e. `crlf` on Windows, otherwise `lf`. /// Which line ending to choose for new documents. Defaults to `native`. i.e. `crlf` on Windows, otherwise `lf`.
pub default_line_ending: LineEndingConfig, pub default_line_ending: LineEndingConfig,
/// Enables smart tab
pub smart_tab: Option<SmartTabConfig>,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Eq, PartialOrd, Ord)]
#[serde(rename_all = "kebab-case", default)]
pub struct SmartTabConfig {
pub enable: bool,
pub supersede_menu: bool,
}
impl Default for SmartTabConfig {
fn default() -> Self {
SmartTabConfig {
enable: true,
supersede_menu: false,
}
}
} }
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)]
@ -404,6 +421,7 @@ impl Default for StatusLineConfig {
E::Mode, E::Mode,
E::Spinner, E::Spinner,
E::FileName, E::FileName,
E::ReadOnlyIndicator,
E::FileModificationIndicator, E::FileModificationIndicator,
], ],
center: vec![], center: vec![],
@ -447,15 +465,18 @@ pub enum StatusLineElement {
/// The LSP activity spinner /// The LSP activity spinner
Spinner, Spinner,
/// The base file name, including a dirty flag if it's unsaved /// The file basename (the leaf of the open file's path)
FileBaseName, FileBaseName,
/// The relative file path, including a dirty flag if it's unsaved /// The relative file path
FileName, FileName,
// The file modification indicator // The file modification indicator
FileModificationIndicator, FileModificationIndicator,
/// An indicator that shows `"[readonly]"` when a file cannot be written
ReadOnlyIndicator,
/// The file encoding /// The file encoding
FileEncoding, FileEncoding,
@ -614,7 +635,9 @@ impl std::str::FromStr for GutterType {
"spacer" => Ok(Self::Spacer), "spacer" => Ok(Self::Spacer),
"line-numbers" => Ok(Self::LineNumbers), "line-numbers" => Ok(Self::LineNumbers),
"diff" => Ok(Self::Diff), "diff" => Ok(Self::Diff),
_ => anyhow::bail!("Gutter type can only be `diagnostics` or `line-numbers`."), _ => anyhow::bail!(
"Gutter type can only be `diagnostics`, `spacer`, `line-numbers` or `diff`."
),
} }
} }
} }
@ -822,6 +845,7 @@ impl Default for Config {
completion_replace: false, completion_replace: false,
workspace_lsp_roots: Vec::new(), workspace_lsp_roots: Vec::new(),
default_line_ending: LineEndingConfig::default(), default_line_ending: LineEndingConfig::default(),
smart_tab: Some(SmartTabConfig::default()),
} }
} }
} }
@ -835,18 +859,6 @@ impl Default for SearchConfig {
} }
} }
pub struct Motion(pub Box<dyn Fn(&mut Editor)>);
impl Motion {
pub fn run(&self, e: &mut Editor) {
(self.0)(e)
}
}
impl std::fmt::Debug for Motion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("motion")
}
}
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
pub struct Breakpoint { pub struct Breakpoint {
pub id: Option<usize>, pub id: Option<usize>,
@ -894,8 +906,6 @@ pub struct Editor {
pub debugger_events: SelectAll<UnboundedReceiverStream<dap::Payload>>, pub debugger_events: SelectAll<UnboundedReceiverStream<dap::Payload>>,
pub breakpoints: HashMap<PathBuf, Vec<Breakpoint>>, pub breakpoints: HashMap<PathBuf, Vec<Breakpoint>>,
pub clipboard_provider: Box<dyn ClipboardProvider>,
pub syn_loader: Arc<syntax::Loader>, pub syn_loader: Arc<syntax::Loader>,
pub theme_loader: Arc<theme::Loader>, pub theme_loader: Arc<theme::Loader>,
/// last_theme is used for theme previews. We store the current theme here, /// last_theme is used for theme previews. We store the current theme here,
@ -917,8 +927,8 @@ pub struct Editor {
pub auto_pairs: Option<AutoPairs>, pub auto_pairs: Option<AutoPairs>,
pub idle_timer: Pin<Box<Sleep>>, pub idle_timer: Pin<Box<Sleep>>,
pub last_motion: Option<Motion>, redraw_timer: Pin<Box<Sleep>>,
last_motion: Option<Motion>,
pub last_completion: Option<CompleteAction>, pub last_completion: Option<CompleteAction>,
pub exit_code: i32, pub exit_code: i32,
@ -952,6 +962,8 @@ pub struct Editor {
pub completion_request_handle: Option<oneshot::Sender<()>>, pub completion_request_handle: Option<oneshot::Sender<()>>,
} }
pub type Motion = Box<dyn Fn(&mut Editor)>;
pub type RedrawHandle = (Arc<Notify>, Arc<RwLock<()>>); pub type RedrawHandle = (Arc<Notify>, Arc<RwLock<()>>);
#[derive(Debug)] #[derive(Debug)]
@ -961,6 +973,7 @@ pub enum EditorEvent {
LanguageServerMessage((usize, Call)), LanguageServerMessage((usize, Call)),
DebuggerEvent(dap::Payload), DebuggerEvent(dap::Payload),
IdleTimer, IdleTimer,
Redraw,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -993,6 +1006,13 @@ pub enum Action {
VerticalSplit, VerticalSplit,
} }
impl Action {
/// Whether to align the view to the cursor after executing this action
pub fn align_view(&self, view: &View, new_doc: DocumentId) -> bool {
!matches!((self, view.doc == new_doc), (Action::Load, false))
}
}
/// Error thrown on failed document closed /// Error thrown on failed document closed
pub enum CloseError { pub enum CloseError {
/// Document doesn't exist /// Document doesn't exist
@ -1041,10 +1061,10 @@ impl Editor {
last_theme: None, last_theme: None,
last_selection: None, last_selection: None,
registers: Registers::default(), registers: Registers::default(),
clipboard_provider: get_clipboard_provider(),
status_msg: None, status_msg: None,
autoinfo: None, autoinfo: None,
idle_timer: Box::pin(sleep(conf.idle_timeout)), idle_timer: Box::pin(sleep(conf.idle_timeout)),
redraw_timer: Box::pin(sleep(Duration::MAX)),
last_motion: None, last_motion: None,
last_completion: None, last_completion: None,
config, config,
@ -1058,6 +1078,19 @@ impl Editor {
} }
} }
pub fn apply_motion<F: Fn(&mut Self) + 'static>(&mut self, motion: F) {
motion(self);
self.last_motion = Some(Box::new(motion));
}
pub fn repeat_last_motion(&mut self, count: usize) {
if let Some(motion) = self.last_motion.take() {
for _ in 0..count {
motion(self);
}
self.last_motion = Some(motion);
}
}
/// Current editing mode for the [`Editor`]. /// Current editing mode for the [`Editor`].
pub fn mode(&self) -> Mode { pub fn mode(&self) -> Mode {
self.mode self.mode
@ -1417,7 +1450,7 @@ impl Editor {
// ??? possible use for integration tests // ??? possible use for integration tests
pub fn open(&mut self, path: &Path, action: Action) -> Result<DocumentId, Error> { pub fn open(&mut self, path: &Path, action: Action) -> Result<DocumentId, Error> {
let path = helix_core::path::get_canonicalized_path(path)?; let path = helix_core::path::get_canonicalized_path(path);
let id = self.document_by_path(&path).map(|doc| doc.id); let id = self.document_by_path(&path).map(|doc| doc.id);
let id = if let Some(id) = id { let id = if let Some(id) = id {
@ -1542,7 +1575,18 @@ impl Editor {
let path = path.map(|path| path.into()); let path = path.map(|path| path.into());
let doc = doc_mut!(self, &doc_id); let doc = doc_mut!(self, &doc_id);
let future = doc.save(path, force)?; let doc_save_future = doc.save(path, force)?;
// When a file is written to, notify the file event handler.
// Note: This can be removed once proper file watching is implemented.
let handler = self.language_servers.file_event_handler.clone();
let future = async move {
let res = doc_save_future.await;
if let Ok(event) = &res {
handler.file_changed(event.path.clone());
}
res
};
use futures_util::stream; use futures_util::stream;
@ -1682,6 +1726,14 @@ impl Editor {
&self, &self,
timeout: Option<u64>, timeout: Option<u64>,
) -> Result<(), tokio::time::error::Elapsed> { ) -> Result<(), tokio::time::error::Elapsed> {
// Remove all language servers from the file event handler.
// Note: this is non-blocking.
for client in self.language_servers.iter_clients() {
self.language_servers
.file_event_handler
.remove_client(client.id());
}
tokio::time::timeout( tokio::time::timeout(
Duration::from_millis(timeout.unwrap_or(3000)), Duration::from_millis(timeout.unwrap_or(3000)),
future::join_all( future::join_all(
@ -1718,13 +1770,17 @@ impl Editor {
_ = self.redraw_handle.0.notified() => { _ = self.redraw_handle.0.notified() => {
if !self.needs_redraw{ if !self.needs_redraw{
self.needs_redraw = true; self.needs_redraw = true;
let timeout = Instant::now() + Duration::from_millis(96); let timeout = Instant::now() + Duration::from_millis(33);
if timeout < self.idle_timer.deadline(){ if timeout < self.idle_timer.deadline() && timeout < self.redraw_timer.deadline(){
self.idle_timer.as_mut().reset(timeout) self.redraw_timer.as_mut().reset(timeout)
} }
} }
} }
_ = &mut self.redraw_timer => {
self.redraw_timer.as_mut().reset(Instant::now() + Duration::from_secs(86400 * 365 * 30));
return EditorEvent::Redraw
}
_ = &mut self.idle_timer => { _ = &mut self.idle_timer => {
return EditorEvent::IdleTimer return EditorEvent::IdleTimer
} }

@ -1,4 +1,5 @@
use helix_core::{register::Registers, unicode::width::UnicodeWidthStr}; use crate::register::Registers;
use helix_core::unicode::width::UnicodeWidthStr;
use std::fmt::Write; use std::fmt::Write;
#[derive(Debug)] #[derive(Debug)]
@ -69,16 +70,8 @@ impl Info {
pub fn from_registers(registers: &Registers) -> Self { pub fn from_registers(registers: &Registers) -> Self {
let body: Vec<_> = registers let body: Vec<_> = registers
.inner() .iter_preview()
.iter() .map(|(ch, preview)| (ch.to_string(), preview))
.map(|(ch, reg)| {
let content = reg
.read()
.get(0)
.and_then(|s| s.lines().next())
.unwrap_or_default();
(ch.to_string(), content)
})
.collect(); .collect();
let mut infobox = Self::new("Registers", &body); let mut infobox = Self::new("Registers", &body);

@ -45,6 +45,10 @@ pub enum MouseEventKind {
ScrollDown, ScrollDown,
/// Scrolled mouse wheel upwards (away from the user). /// Scrolled mouse wheel upwards (away from the user).
ScrollUp, ScrollUp,
/// Scrolled mouse wheel leftwards.
ScrollLeft,
/// Scrolled mouse wheel rightwards.
ScrollRight,
} }
/// Represents a mouse button. /// Represents a mouse button.
@ -381,7 +385,7 @@ impl std::str::FromStr for KeyEvent {
function if function.len() > 1 && function.starts_with('F') => { function if function.len() > 1 && function.starts_with('F') => {
let function: String = function.chars().skip(1).collect(); let function: String = function.chars().skip(1).collect();
let function = str::parse::<u8>(&function)?; let function = str::parse::<u8>(&function)?;
(function > 0 && function < 13) (function > 0 && function < 25)
.then_some(KeyCode::F(function)) .then_some(KeyCode::F(function))
.ok_or_else(|| anyhow!("Invalid function key '{}'", function))? .ok_or_else(|| anyhow!("Invalid function key '{}'", function))?
} }
@ -460,6 +464,8 @@ impl From<crossterm::event::MouseEventKind> for MouseEventKind {
crossterm::event::MouseEventKind::Moved => Self::Moved, crossterm::event::MouseEventKind::Moved => Self::Moved,
crossterm::event::MouseEventKind::ScrollDown => Self::ScrollDown, crossterm::event::MouseEventKind::ScrollDown => Self::ScrollDown,
crossterm::event::MouseEventKind::ScrollUp => Self::ScrollUp, crossterm::event::MouseEventKind::ScrollUp => Self::ScrollUp,
crossterm::event::MouseEventKind::ScrollLeft => Self::ScrollLeft,
crossterm::event::MouseEventKind::ScrollRight => Self::ScrollRight,
} }
} }
} }
@ -684,7 +690,7 @@ mod test {
#[test] #[test]
fn parsing_nonsensical_keys_fails() { fn parsing_nonsensical_keys_fails() {
assert!(str::parse::<KeyEvent>("F13").is_err()); assert!(str::parse::<KeyEvent>("F25").is_err());
assert!(str::parse::<KeyEvent>("F0").is_err()); assert!(str::parse::<KeyEvent>("F0").is_err());
assert!(str::parse::<KeyEvent>("aaa").is_err()); assert!(str::parse::<KeyEvent>("aaa").is_err());
assert!(str::parse::<KeyEvent>("S-S-a").is_err()); assert!(str::parse::<KeyEvent>("S-S-a").is_err());

@ -15,6 +15,7 @@ pub mod base64;
pub mod info; pub mod info;
pub mod input; pub mod input;
pub mod keyboard; pub mod keyboard;
pub mod register;
pub mod theme; pub mod theme;
pub mod tree; pub mod tree;
pub mod view; pub mod view;

@ -0,0 +1,333 @@
use std::{borrow::Cow, collections::HashMap, iter};
use anyhow::Result;
use helix_core::NATIVE_LINE_ENDING;
use crate::{
clipboard::{get_clipboard_provider, ClipboardProvider, ClipboardType},
document::SCRATCH_BUFFER_NAME,
Editor,
};
/// A key-value store for saving sets of values.
///
/// Each register corresponds to a `char`. Most chars can be used to store any set of
/// values but a few chars are "special registers". Special registers have unique
/// behaviors when read or written to:
///
/// * Black hole (`_`): all values read and written are discarded
/// * Selection indices (`#`): index number of each selection starting at 1
/// * Selection contents (`.`)
/// * Document path (`%`): filename of the current buffer
/// * System clipboard (`*`)
/// * Primary clipboard (`+`)
#[derive(Debug)]
pub struct Registers {
/// The mapping of register to values.
/// Values are stored in reverse order when inserted with `Registers::write`.
/// The order is reversed again in `Registers::read`. This allows us to
/// efficiently prepend new values in `Registers::push`.
inner: HashMap<char, Vec<String>>,
clipboard_provider: Box<dyn ClipboardProvider>,
pub last_search_register: char,
}
impl Default for Registers {
fn default() -> Self {
Self {
inner: Default::default(),
clipboard_provider: get_clipboard_provider(),
last_search_register: '/',
}
}
}
impl Registers {
pub fn read<'a>(&'a self, name: char, editor: &'a Editor) -> Option<RegisterValues<'a>> {
match name {
'_' => Some(RegisterValues::new(iter::empty())),
'#' => {
let (view, doc) = current_ref!(editor);
let selections = doc.selection(view.id).len();
// ExactSizeIterator is implemented for Range<usize> but
// not RangeInclusive<usize>.
Some(RegisterValues::new(
(0..selections).map(|i| (i + 1).to_string().into()),
))
}
'.' => {
let (view, doc) = current_ref!(editor);
let text = doc.text().slice(..);
Some(RegisterValues::new(doc.selection(view.id).fragments(text)))
}
'%' => {
let doc = doc!(editor);
let path = doc
.path()
.as_ref()
.map(|p| p.to_string_lossy())
.unwrap_or_else(|| SCRATCH_BUFFER_NAME.into());
Some(RegisterValues::new(iter::once(path)))
}
'*' | '+' => Some(read_from_clipboard(
self.clipboard_provider.as_ref(),
self.inner.get(&name),
match name {
'*' => ClipboardType::Clipboard,
'+' => ClipboardType::Selection,
_ => unreachable!(),
},
)),
_ => self
.inner
.get(&name)
.map(|values| RegisterValues::new(values.iter().map(Cow::from).rev())),
}
}
pub fn write(&mut self, name: char, mut values: Vec<String>) -> Result<()> {
match name {
'_' => Ok(()),
'#' | '.' | '%' => Err(anyhow::anyhow!("Register {name} does not support writing")),
'*' | '+' => {
self.clipboard_provider.set_contents(
values.join(NATIVE_LINE_ENDING.as_str()),
match name {
'*' => ClipboardType::Clipboard,
'+' => ClipboardType::Selection,
_ => unreachable!(),
},
)?;
values.reverse();
self.inner.insert(name, values);
Ok(())
}
_ => {
values.reverse();
self.inner.insert(name, values);
Ok(())
}
}
}
pub fn push(&mut self, name: char, mut value: String) -> Result<()> {
match name {
'_' => Ok(()),
'#' | '.' | '%' => Err(anyhow::anyhow!("Register {name} does not support pushing")),
'*' | '+' => {
let clipboard_type = match name {
'*' => ClipboardType::Clipboard,
'+' => ClipboardType::Selection,
_ => unreachable!(),
};
let contents = self.clipboard_provider.get_contents(clipboard_type)?;
let saved_values = self.inner.entry(name).or_insert_with(Vec::new);
if !contents_are_saved(saved_values, &contents) {
anyhow::bail!("Failed to push to register {name}: clipboard does not match register contents");
}
saved_values.push(value.clone());
if !contents.is_empty() {
value.push_str(NATIVE_LINE_ENDING.as_str());
}
value.push_str(&contents);
self.clipboard_provider
.set_contents(value, clipboard_type)?;
Ok(())
}
_ => {
self.inner.entry(name).or_insert_with(Vec::new).push(value);
Ok(())
}
}
}
pub fn first<'a>(&'a self, name: char, editor: &'a Editor) -> Option<Cow<'a, str>> {
self.read(name, editor).and_then(|mut values| values.next())
}
pub fn last<'a>(&'a self, name: char, editor: &'a Editor) -> Option<Cow<'a, str>> {
self.read(name, editor).and_then(|values| values.last())
}
pub fn iter_preview(&self) -> impl Iterator<Item = (char, &str)> {
self.inner
.iter()
.filter(|(name, _)| !matches!(name, '*' | '+'))
.map(|(name, values)| {
let preview = values
.last()
.and_then(|s| s.lines().next())
.unwrap_or("<empty>");
(*name, preview)
})
.chain(
[
('_', "<empty>"),
('#', "<selection indices>"),
('.', "<selection contents>"),
('%', "<document path>"),
('*', "<system clipboard>"),
('+', "<primary clipboard>"),
]
.iter()
.copied(),
)
}
pub fn clear(&mut self) {
self.clear_clipboard(ClipboardType::Clipboard);
self.clear_clipboard(ClipboardType::Selection);
self.inner.clear()
}
pub fn remove(&mut self, name: char) -> bool {
match name {
'*' | '+' => {
self.clear_clipboard(match name {
'*' => ClipboardType::Clipboard,
'+' => ClipboardType::Selection,
_ => unreachable!(),
});
self.inner.remove(&name);
true
}
'_' | '#' | '.' | '%' => false,
_ => self.inner.remove(&name).is_some(),
}
}
fn clear_clipboard(&mut self, clipboard_type: ClipboardType) {
if let Err(err) = self
.clipboard_provider
.set_contents("".into(), clipboard_type)
{
log::error!(
"Failed to clear {} clipboard: {err}",
match clipboard_type {
ClipboardType::Clipboard => "system",
ClipboardType::Selection => "primary",
}
)
}
}
pub fn clipboard_provider_name(&self) -> Cow<str> {
self.clipboard_provider.name()
}
}
fn read_from_clipboard<'a>(
provider: &dyn ClipboardProvider,
saved_values: Option<&'a Vec<String>>,
clipboard_type: ClipboardType,
) -> RegisterValues<'a> {
match provider.get_contents(clipboard_type) {
Ok(contents) => {
// If we're pasting the same values that we just yanked, re-use
// the saved values. This allows pasting multiple selections
// even when yanked to a clipboard.
let Some(values) = saved_values else { return RegisterValues::new(iter::once(contents.into())) };
if contents_are_saved(values, &contents) {
RegisterValues::new(values.iter().map(Cow::from).rev())
} else {
RegisterValues::new(iter::once(contents.into()))
}
}
Err(err) => {
log::error!(
"Failed to read {} clipboard: {err}",
match clipboard_type {
ClipboardType::Clipboard => "system",
ClipboardType::Selection => "primary",
}
);
RegisterValues::new(iter::empty())
}
}
}
fn contents_are_saved(saved_values: &[String], mut contents: &str) -> bool {
let line_ending = NATIVE_LINE_ENDING.as_str();
let mut values = saved_values.iter().rev();
match values.next() {
Some(first) if contents.starts_with(first) => {
contents = &contents[first.len()..];
}
None if contents.is_empty() => return true,
_ => return false,
}
for value in values {
if contents.starts_with(line_ending) && contents[line_ending.len()..].starts_with(value) {
contents = &contents[line_ending.len() + value.len()..];
} else {
return false;
}
}
true
}
// This is a wrapper of an iterator that is both double ended and exact size,
// and can return either owned or borrowed values. Regular registers can
// return borrowed values while some special registers need to return owned
// values.
pub struct RegisterValues<'a> {
iter: Box<dyn DoubleEndedExactSizeIterator<Item = Cow<'a, str>> + 'a>,
}
impl<'a> RegisterValues<'a> {
fn new(
iter: impl DoubleEndedIterator<Item = Cow<'a, str>>
+ ExactSizeIterator<Item = Cow<'a, str>>
+ 'a,
) -> Self {
Self {
iter: Box::new(iter),
}
}
}
impl<'a> Iterator for RegisterValues<'a> {
type Item = Cow<'a, str>;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a> DoubleEndedIterator for RegisterValues<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a> ExactSizeIterator for RegisterValues<'a> {
fn len(&self) -> usize {
self.iter.len()
}
}
// Each RegisterValues iterator is both double ended and exact size. We can't
// type RegisterValues as `Box<dyn DoubleEndedIterator + ExactSizeIterator>`
// because only one non-auto trait is allowed in trait objects. So we need to
// create a new trait that covers both. `RegisterValues` wraps that type so that
// trait only needs to live in this module and not be imported for all register
// callsites.
trait DoubleEndedExactSizeIterator: DoubleEndedIterator + ExactSizeIterator {}
impl<I: DoubleEndedIterator + ExactSizeIterator> DoubleEndedExactSizeIterator for I {}

@ -187,11 +187,17 @@ impl View {
} }
pub fn gutter_offset(&self, doc: &Document) -> u16 { pub fn gutter_offset(&self, doc: &Document) -> u16 {
self.gutters let total_width = self
.gutters
.layout .layout
.iter() .iter()
.map(|gutter| gutter.width(self, doc) as u16) .map(|gutter| gutter.width(self, doc) as u16)
.sum() .sum();
if total_width < self.area.width {
total_width
} else {
0
}
} }
// //

@ -7,6 +7,7 @@ awk-language-server = { command = "awk-language-server" }
bash-language-server = { command = "bash-language-server", args = ["start"] } bash-language-server = { command = "bash-language-server", args = ["start"] }
bass = { command = "bass", args = ["--lsp"] } bass = { command = "bass", args = ["--lsp"] }
bicep-langserver = { command = "bicep-langserver" } bicep-langserver = { command = "bicep-langserver" }
bufls = { command = "bufls", args = ["serve"] }
cairo-language-server = { command = "cairo-language-server", args = [] } cairo-language-server = { command = "cairo-language-server", args = [] }
cl-lsp = { command = "cl-lsp", args = [ "stdio" ] } cl-lsp = { command = "cl-lsp", args = [ "stdio" ] }
clangd = { command = "clangd" } clangd = { command = "clangd" }
@ -14,6 +15,7 @@ clojure-lsp = { command = "clojure-lsp" }
cmake-language-server = { command = "cmake-language-server" } cmake-language-server = { command = "cmake-language-server" }
crystalline = { command = "crystalline", args = ["--stdio"] } crystalline = { command = "crystalline", args = ["--stdio"] }
cs = { command = "cs", args = ["launch", "--contrib", "smithy-language-server", "--", "0"] } cs = { command = "cs", args = ["launch", "--contrib", "smithy-language-server", "--", "0"] }
csharp-ls = { command = "csharp-ls" }
cuelsp = { command = "cuelsp" } cuelsp = { command = "cuelsp" }
dart = { command = "dart", args = ["language-server", "--client-id=helix"] } dart = { command = "dart", args = ["language-server", "--client-id=helix"] }
dhall-lsp-server = { command = "dhall-lsp-server" } dhall-lsp-server = { command = "dhall-lsp-server" }
@ -26,6 +28,7 @@ erlang-ls = { command = "erlang_ls" }
forc = { command = "forc", args = ["lsp"] } forc = { command = "forc", args = ["lsp"] }
forth-lsp = { command = "forth-lsp" } forth-lsp = { command = "forth-lsp" }
fortls = { command = "fortls", args = ["--lowercase_intrinsics"] } fortls = { command = "fortls", args = ["--lowercase_intrinsics"] }
fsharp-ls = { command = "fsautocomplete", config = { AutomaticWorkspaceInit = true } }
gleam = { command = "gleam", args = ["lsp"] } gleam = { command = "gleam", args = ["lsp"] }
haskell-language-server = { command = "haskell-language-server-wrapper", args = ["--lsp"] } haskell-language-server = { command = "haskell-language-server-wrapper", args = ["--lsp"] }
idris2-lsp = { command = "idris2-lsp" } idris2-lsp = { command = "idris2-lsp" }
@ -47,10 +50,13 @@ ols = { command = "ols", args = [] }
omnisharp = { command = "OmniSharp", args = [ "--languageserver" ] } omnisharp = { command = "OmniSharp", args = [ "--languageserver" ] }
openscad-lsp = { command = "openscad-lsp", args = ["--stdio"] } openscad-lsp = { command = "openscad-lsp", args = ["--stdio"] }
pasls = { command = "pasls", args = [] } pasls = { command = "pasls", args = [] }
pbkit = { command = "pb", args = [ "lsp" ] }
perlnavigator = { command = "perlnavigator", args= ["--stdio"] } perlnavigator = { command = "perlnavigator", args= ["--stdio"] }
prisma-language-server = { command = "prisma-language-server", args = ["--stdio"] } prisma-language-server = { command = "prisma-language-server", args = ["--stdio"] }
purescript-language-server = { command = "purescript-language-server", args = ["--stdio"] } purescript-language-server = { command = "purescript-language-server", args = ["--stdio"] }
pylsp = { command = "pylsp" } pylsp = { command = "pylsp" }
pyright = { command = "pyright-langserver", args = ["--stdio"], config = {} }
pylyzer = { command = "pylyzer", args = ["--server"] }
qmlls = { command = "qmlls" } qmlls = { command = "qmlls" }
r = { command = "R", args = ["--no-echo", "-e", "languageserver::run()"] } r = { command = "R", args = ["--no-echo", "-e", "languageserver::run()"] }
racket = { command = "racket", args = ["-l", "racket-langserver"] } racket = { command = "racket", args = ["-l", "racket-langserver"] }
@ -62,7 +68,6 @@ slint-lsp = { command = "slint-lsp", args = [] }
solargraph = { command = "solargraph", args = ["stdio"] } solargraph = { command = "solargraph", args = ["stdio"] }
solc = { command = "solc", args = ["--lsp"] } solc = { command = "solc", args = ["--lsp"] }
sourcekit-lsp = { command = "sourcekit-lsp" } sourcekit-lsp = { command = "sourcekit-lsp" }
svelteserver = { command = "svelteserver", args = ["--stdio"] }
svlangserver = { command = "svlangserver", args = [] } svlangserver = { command = "svlangserver", args = [] }
swipl = { command = "swipl", args = [ "-g", "use_module(library(lsp_server))", "-g", "lsp_server:main", "-t", "halt", "--", "stdio" ] } swipl = { command = "swipl", args = [ "-g", "use_module(library(lsp_server))", "-g", "lsp_server:main", "-t", "halt", "--", "stdio" ] }
taplo = { command = "taplo", args = ["lsp", "stdio"] } taplo = { command = "taplo", args = ["lsp", "stdio"] }
@ -70,7 +75,7 @@ terraform-ls = { command = "terraform-ls", args = ["serve"] }
texlab = { command = "texlab" } texlab = { command = "texlab" }
vala-language-server = { command = "vala-language-server" } vala-language-server = { command = "vala-language-server" }
vhdl_ls = { command = "vhdl_ls", args = [] } vhdl_ls = { command = "vhdl_ls", args = [] }
vlang-language-server = { command = "v", args = ["ls"] } vlang-language-server = { command = "v-analyzer" }
vscode-css-language-server = { command = "vscode-css-language-server", args = ["--stdio"], config = { "provideFormatter" = true }} vscode-css-language-server = { command = "vscode-css-language-server", args = ["--stdio"], config = { "provideFormatter" = true }}
vscode-html-language-server = { command = "vscode-html-language-server", args = ["--stdio"], config = { provideFormatter = true } } vscode-html-language-server = { command = "vscode-html-language-server", args = ["--stdio"], config = { provideFormatter = true } }
vscode-json-language-server = { command = "vscode-json-language-server", args = ["--stdio"], config = { provideFormatter = true } } vscode-json-language-server = { command = "vscode-json-language-server", args = ["--stdio"], config = { provideFormatter = true } }
@ -140,6 +145,25 @@ includeInlayParameterNameHintsWhenArgumentMatchesName = true
includeInlayPropertyDeclarationTypeHints = true includeInlayPropertyDeclarationTypeHints = true
includeInlayVariableTypeHints = true includeInlayVariableTypeHints = true
[language-server.svelteserver]
command = "svelteserver"
args = ["--stdio"]
[language-server.svelteserver.config.configuration.typescript]
inlayHints.parameterTypes.enabled = true
inlayHints.variableTypes.enabled = true
inlayHints.propertyDeclarationTypes.enabled = true
inlayHints.functionLikeReturnTypes.enabled = true
inlayHints.enumMemberValues.enabled = true
inlayHints.parameterNames.enabled = "all"
[language-server.svelteserver.config.configuration.javascript]
inlayHints.parameterTypes.enabled = true
inlayHints.variableTypes.enabled = true
inlayHints.propertyDeclarationTypes.enabled = true
inlayHints.functionLikeReturnTypes.enabled = true
inlayHints.enumMemberValues.enabled = true
inlayHints.parameterNames.enabled = "all"
[[language]] [[language]]
name = "rust" name = "rust"
@ -239,6 +263,7 @@ name = "protobuf"
scope = "source.proto" scope = "source.proto"
injection-regex = "protobuf" injection-regex = "protobuf"
file-types = ["proto"] file-types = ["proto"]
language-servers = [ "bufls", "pbkit" ]
roots = [] roots = []
comment-token = "//" comment-token = "//"
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -291,7 +316,7 @@ indent = { tab-width = 2, unit = " " }
name = "json" name = "json"
scope = "source.json" scope = "source.json"
injection-regex = "json" injection-regex = "json"
file-types = ["json", "jsonc", "arb", "ipynb", "geojson"] file-types = ["json", "jsonc", "arb", "ipynb", "geojson", "gltf"]
roots = [] roots = []
language-servers = [ "vscode-json-language-server" ] language-servers = [ "vscode-json-language-server" ]
auto-format = true auto-format = true
@ -524,7 +549,6 @@ file-types = ["js", "mjs", "cjs"]
shebangs = ["node"] shebangs = ["node"]
roots = [] roots = []
comment-token = "//" comment-token = "//"
# TODO: highlights-params
language-servers = [ "typescript-language-server" ] language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
@ -542,7 +566,7 @@ args = { program = "{0}" }
[[grammar]] [[grammar]]
name = "javascript" name = "javascript"
source = { git = "https://github.com/tree-sitter/tree-sitter-javascript", rev = "4a95461c4761c624f2263725aca79eeaefd36cad" } source = { git = "https://github.com/tree-sitter/tree-sitter-javascript", rev = "f772967f7b7bc7c28f845be2420a38472b16a8ee" }
[[language]] [[language]]
name = "jsx" name = "jsx"
@ -564,13 +588,12 @@ file-types = ["ts", "mts", "cts"]
language-id = "typescript" language-id = "typescript"
shebangs = [] shebangs = []
roots = [] roots = []
# TODO: highlights-params
language-servers = [ "typescript-language-server" ] language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "typescript" name = "typescript"
source = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "6aac031ad88dd6317f02ac0bb27d099a553a7d8c", subpath = "typescript" } source = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "b1bf4825d9eaa0f3bdeb1e52f099533328acfbdf", subpath = "typescript" }
[[language]] [[language]]
name = "tsx" name = "tsx"
@ -579,13 +602,12 @@ injection-regex = "(tsx)" # |typescript
language-id = "typescriptreact" language-id = "typescriptreact"
file-types = ["tsx"] file-types = ["tsx"]
roots = [] roots = []
# TODO: highlights-params
language-servers = [ "typescript-language-server" ] language-servers = [ "typescript-language-server" ]
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "tsx" name = "tsx"
source = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "6aac031ad88dd6317f02ac0bb27d099a553a7d8c", subpath = "tsx" } source = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "b1bf4825d9eaa0f3bdeb1e52f099533328acfbdf", subpath = "tsx" }
[[language]] [[language]]
name = "css" name = "css"
@ -635,7 +657,7 @@ scope = "source.python"
injection-regex = "python" injection-regex = "python"
file-types = ["py","pyi","py3","pyw","ptl",".pythonstartup",".pythonrc","SConstruct"] file-types = ["py","pyi","py3","pyw","ptl",".pythonstartup",".pythonrc","SConstruct"]
shebangs = ["python"] shebangs = ["python"]
roots = [] roots = ["pyproject.toml", "setup.py", "poetry.lock", "pyrightconfig.json"]
comment-token = "#" comment-token = "#"
language-servers = [ "pylsp" ] language-servers = [ "pylsp" ]
# TODO: pyls needs utf-8 offsets # TODO: pyls needs utf-8 offsets
@ -664,7 +686,7 @@ indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "nickel" name = "nickel"
source = { git = "https://github.com/nickel-lang/tree-sitter-nickel", rev = "3a794388773f2424a97b2186828aa3fac4c66ce6" } source = { git = "https://github.com/nickel-lang/tree-sitter-nickel", rev = "e1d9337864d209898a08c26b8cd4c2dd14c15148" }
[[language]] [[language]]
name = "nix" name = "nix"
@ -685,7 +707,7 @@ source = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "1b69
name = "ruby" name = "ruby"
scope = "source.ruby" scope = "source.ruby"
injection-regex = "ruby" injection-regex = "ruby"
file-types = ["rb", "rake", "rakefile", "irb", "gemfile", "gemspec", "Rakefile", "Gemfile", "rabl", "jbuilder", "jb", "Podfile", "podspec", "Vagrantfile"] file-types = ["rb", "rake", "rakefile", "irb", "gemfile", "gemspec", "Rakefile", "Gemfile", "rabl", "jbuilder", "jb", "Podfile", "podspec", "Vagrantfile", "Brewfile"]
shebangs = ["ruby"] shebangs = ["ruby"]
roots = [] roots = []
comment-token = "#" comment-token = "#"
@ -700,7 +722,7 @@ source = { git = "https://github.com/tree-sitter/tree-sitter-ruby", rev = "206c7
name = "bash" name = "bash"
scope = "source.bash" scope = "source.bash"
injection-regex = "(shell|bash|zsh|sh)" injection-regex = "(shell|bash|zsh|sh)"
file-types = ["sh", "bash", "zsh", ".bash_login", ".bash_logout", ".bash_profile", ".bashrc", ".profile", ".zshenv", ".zlogin", ".zlogout", ".zprofile", ".zshrc", ".zimrc", "APKBUILD", "PKGBUILD", "eclass", "ebuild", "bazelrc", ".bash_aliases"] file-types = ["sh", "bash", "zsh", ".bash_login", ".bash_logout", ".bash_profile", ".bashrc", ".profile", ".zshenv", "zshenv", ".zlogin", "zlogin", ".zlogout", "zlogout", ".zprofile", "zprofile", ".zshrc", "zshrc", ".zimrc", "APKBUILD", "PKGBUILD", "eclass", "ebuild", "bazelrc", ".bash_aliases", "Renviron", ".Renviron"]
shebangs = ["sh", "bash", "dash", "zsh"] shebangs = ["sh", "bash", "dash", "zsh"]
roots = [] roots = []
comment-token = "#" comment-token = "#"
@ -741,7 +763,7 @@ source = { git = "https://github.com/gbprod/tree-sitter-twig", rev = "807b293fec
name = "latex" name = "latex"
scope = "source.tex" scope = "source.tex"
injection-regex = "tex" injection-regex = "tex"
file-types = ["tex", "sty", "cls"] file-types = ["tex", "sty", "cls", "Rd"]
roots = [] roots = []
comment-token = "%" comment-token = "%"
language-servers = [ "texlab" ] language-servers = [ "texlab" ]
@ -814,7 +836,7 @@ injection-regex = "java"
file-types = ["java"] file-types = ["java"]
roots = ["pom.xml", "build.gradle", "build.gradle.kts"] roots = ["pom.xml", "build.gradle", "build.gradle.kts"]
language-servers = [ "jdtls" ] language-servers = [ "jdtls" ]
indent = { tab-width = 4, unit = " " } indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "java" name = "java"
@ -915,7 +937,7 @@ language-servers = [ "svelteserver" ]
[[grammar]] [[grammar]]
name = "svelte" name = "svelte"
source = { git = "https://github.com/Himujjal/tree-sitter-svelte", rev = "349a5984513b4a4a9e143a6e746120c6ff6cf6ed" } source = { git = "https://github.com/Himujjal/tree-sitter-svelte", rev = "be7f2e7db1fc19f0852265ec60923fc058380739" }
[[language]] [[language]]
name = "vue" name = "vue"
@ -958,6 +980,18 @@ indent = { tab-width = 2, unit = " " }
name = "haskell" name = "haskell"
source = { git = "https://github.com/tree-sitter/tree-sitter-haskell", rev = "98fc7f59049aeb713ab9b72a8ff25dcaaef81087" } source = { git = "https://github.com/tree-sitter/tree-sitter-haskell", rev = "98fc7f59049aeb713ab9b72a8ff25dcaaef81087" }
[[language]]
name = "haskell-persistent"
scope = "source.persistentmodels"
file-types = ["persistentmodels"]
roots = []
comment-token = "--"
indent = { tab-width = 2, unit = " " }
[[grammar]]
name = "haskell-persistent"
source = { git = "https://github.com/MercuryTechnologies/tree-sitter-haskell-persistent", rev = "58a6ccfd56d9f1de8fb9f77e6c42151f8f0d0f3d" }
[[language]] [[language]]
name = "purescript" name = "purescript"
scope = "source.purescript" scope = "source.purescript"
@ -1008,7 +1042,7 @@ args = { console = "internalConsole", attachCommands = [ "platform select remote
[[grammar]] [[grammar]]
name = "zig" name = "zig"
source = { git = "https://github.com/maxxnino/tree-sitter-zig", rev = "8d3224c3bd0890fe08358886ebf54fca2ed448a6" } source = { git = "https://github.com/maxxnino/tree-sitter-zig", rev = "0d08703e4c3f426ec61695d7617415fff97029bd" }
[[language]] [[language]]
name = "prolog" name = "prolog"
@ -1022,7 +1056,7 @@ language-servers = [ "swipl" ]
[[language]] [[language]]
name = "tsq" name = "tsq"
scope = "source.tsq" scope = "source.tsq"
file-types = ["scm"] file-types = ["tsq"]
roots = [] roots = []
comment-token = ";" comment-token = ";"
injection-regex = "tsq" injection-regex = "tsq"
@ -1084,7 +1118,18 @@ indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "perl" name = "perl"
source = { git = "https://github.com/ganezdragon/tree-sitter-perl", rev = "0ac2c6da562c7a2c26ed7e8691d4a590f7e8b90a" } source = { git = "https://github.com/tree-sitter-perl/tree-sitter-perl", rev = "ed21ecbcc128a6688770ebafd3ef68a1c9bc1ea9" }
[[language]]
name = "pod"
scope = "source.pod"
injection-regex = "pod"
roots = []
file-types = ["pod"]
[[grammar]]
name = "pod"
source = { git = "https://github.com/tree-sitter-perl/tree-sitter-pod", rev = "d466b84009a63986834498073ec05d58d727d55f" }
[[language]] [[language]]
name = "racket" name = "racket"
@ -1224,7 +1269,7 @@ indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "dart" name = "dart"
source = { git = "https://github.com/UserNobody14/tree-sitter-dart", rev = "2d7f66651c9319c1a0e4dda226cc2628fbb66528" } source = { git = "https://github.com/UserNobody14/tree-sitter-dart", rev = "e398400a0b785af3cf571f5a57eccab242f0cdf9" }
[[language]] [[language]]
name = "scala" name = "scala"
@ -1265,7 +1310,7 @@ text-width = 72
[[grammar]] [[grammar]]
name = "git-commit" name = "git-commit"
source = { git = "https://github.com/the-mikedavis/tree-sitter-git-commit", rev = "db88cffa3952dd2328b741af5d0fc69bdb76704f" } source = { git = "https://github.com/the-mikedavis/tree-sitter-git-commit", rev = "6f193a66e9aa872760823dff020960c6cedc37b3" }
[[language]] [[language]]
name = "diff" name = "diff"
@ -1297,7 +1342,7 @@ source = { git = "https://github.com/the-mikedavis/tree-sitter-git-rebase", rev
name = "regex" name = "regex"
scope = "source.regex" scope = "source.regex"
injection-regex = "regex" injection-regex = "regex"
file-types = ["regex"] file-types = ["regex", ".Rbuildignore"]
roots = [] roots = []
[[grammar]] [[grammar]]
@ -1522,13 +1567,13 @@ language-servers = [ "robotframework_ls" ]
[[grammar]] [[grammar]]
name = "robot" name = "robot"
source = { git = "https://github.com/Hubro/tree-sitter-robot", rev = "f1142bfaa6acfce95e25d2c6d18d218f4f533927" } source = { git = "https://github.com/Hubro/tree-sitter-robot", rev = "322e4cc65754d2b3fdef4f2f8a71e0762e3d13af" }
[[language]] [[language]]
name = "r" name = "r"
scope = "source.r" scope = "source.r"
injection-regex = "(r|R)" injection-regex = "(r|R)"
file-types = ["r", "R"] file-types = ["r", "R", ".Rprofile", "Rprofile.site"]
shebangs = ["r", "R"] shebangs = ["r", "R"]
roots = [] roots = []
comment-token = "#" comment-token = "#"
@ -1659,13 +1704,14 @@ name = "nu"
scope = "source.nu" scope = "source.nu"
injection-regex = "nu" injection-regex = "nu"
file-types = ["nu"] file-types = ["nu"]
shebangs = ["nu"]
roots = [] roots = []
comment-token = "#" comment-token = "#"
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "nu" name = "nu"
source = { git = "https://github.com/LhKipp/tree-sitter-nu", rev = "eb95bdac3abd73ef47e53f19c63e74a31405ebd2" } source = { git = "https://github.com/nushell/tree-sitter-nu", rev = "98c11c491e3405c75affa1cf004097692da3dda2" }
[[language]] [[language]]
name = "vala" name = "vala"
@ -1692,7 +1738,7 @@ indent = { tab-width = 8, unit = "\t" }
[[grammar]] [[grammar]]
name = "hare" name = "hare"
source = { git = "https://git.sr.ht/~ecmma/tree-sitter-hare", rev = "bc26a6a949f2e0d98b7bfc437d459b250900a165" } source = { git = "https://git.sr.ht/~ecmma/tree-sitter-hare", rev = "2495958aaf3f93581c87ec020164255e80655331" }
[[language]] [[language]]
name = "devicetree" name = "devicetree"
@ -1775,14 +1821,14 @@ source = { git = "https://github.com/metio/tree-sitter-ssh-client-config", rev =
name = "scheme" name = "scheme"
scope = "source.scheme" scope = "source.scheme"
injection-regex = "scheme" injection-regex = "scheme"
file-types = ["ss", "scm"] # "scm", file-types = ["ss", "scm"]
roots = [] roots = []
comment-token = ";" comment-token = ";"
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "scheme" name = "scheme"
source = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "c0741320bfca6b7b5b7a13b5171275951e96a842" } source = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af3af6c9356b936f8a515a1e449c32e804c2b1a8" }
[[language]] [[language]]
name = "v" name = "v"
@ -1797,7 +1843,7 @@ indent = { tab-width = 4, unit = "\t" }
[[grammar]] [[grammar]]
name = "v" name = "v"
source = { git = "https://github.com/vlang/vls", subpath = "tree_sitter_v", rev = "66cf9d3086fb5ecc827cb32c64c5d812ab17d2c6" } source = {git = "https://github.com/v-analyzer/v-analyzer", subpath = "tree_sitter_v", rev = "e14fdf6e661b10edccc744102e4ccf0b187aa8ad"}
[[language]] [[language]]
name = "verilog" name = "verilog"
@ -1883,7 +1929,7 @@ source = { git = "https://github.com/sogaiu/tree-sitter-clojure", rev = "e57c569
name = "starlark" name = "starlark"
scope = "source.starlark" scope = "source.starlark"
injection-regex = "(starlark|bzl|bazel)" injection-regex = "(starlark|bzl|bazel)"
file-types = ["bzl", "bazel", "BUILD"] file-types = ["bzl", "bazel", "BUILD", "star"]
roots = [] roots = []
comment-token = "#" comment-token = "#"
indent = { tab-width = 4, unit = " " } indent = { tab-width = 4, unit = " " }
@ -1983,7 +2029,7 @@ language-servers = [ "slint-lsp" ]
[[grammar]] [[grammar]]
name = "slint" name = "slint"
source = { git = "https://github.com/jrmoulton/tree-sitter-slint", rev = "0d4dda94f96623302dfc234e06be62a5717f47da" } source = { git = "https://github.com/jrmoulton/tree-sitter-slint", rev = "00c8a2d3645766f68c0d0460086c0a994e5b0d85" }
[[language]] [[language]]
name = "task" name = "task"
@ -2224,7 +2270,26 @@ grammar = "bash"
[[language]] [[language]]
name = "ini" name = "ini"
scope = "source.ini" scope = "source.ini"
file-types = ["ini"] file-types = [
"ini",
# Systemd unit files
"service",
"automount",
"device",
"mount",
"path",
"service",
"slice",
"socket",
"swap",
"target",
"timer",
# Podman quadlets
"container",
"volume",
"kube",
"network"
]
injection-regex = "ini" injection-regex = "ini"
comment-token = "#" comment-token = "#"
indent = { tab-width = 4, unit = "\t" } indent = { tab-width = 4, unit = "\t" }
@ -2285,7 +2350,7 @@ indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "matlab" name = "matlab"
source = { git = "https://github.com/acristoffers/tree-sitter-matlab", rev = "d7b24aaaf3e4814d073517d072727319d2b5ffc3" } source = { git = "https://github.com/acristoffers/tree-sitter-matlab", rev = "676117eafa64afedc8380a921a77cd9f2244bc6b" }
[[language]] [[language]]
name = "ponylang" name = "ponylang"
@ -2412,7 +2477,7 @@ indent = { tab-width = 4, unit = " " }
[[grammar]] [[grammar]]
name = "prql" name = "prql"
source = { git = "https://github.com/PRQL/tree-sitter-prql", rev = "3f27cac466f030ee7d985d91eba5470e01dd21ea" } source = { git = "https://github.com/PRQL/tree-sitter-prql", rev = "09e158cd3650581c0af4c49c2e5b10c4834c8646" }
[[language]] [[language]]
name = "po" name = "po"
@ -2614,6 +2679,21 @@ indent = { tab-width = 3, unit = " " }
name = "forth" name = "forth"
source = { git = "https://github.com/alexanderbrevig/tree-sitter-forth", rev = "304ed77beb113e37af38b20ff14e3c37bf350d10" } source = { git = "https://github.com/alexanderbrevig/tree-sitter-forth", rev = "304ed77beb113e37af38b20ff14e3c37bf350d10" }
[[language]]
name = "fsharp"
scope = "source.fs"
roots = ["sln", "fsproj"]
injection-regex = "fsharp"
file-types = ["fs", "fsx"]
comment-token = "//"
indent = { tab-width = 4, unit = " " }
auto-format = true
language-servers = ["fsharp-ls"]
[[grammar]]
name = "fsharp"
source = { git = "https://github.com/kaashyapan/tree-sitter-fsharp", rev = "18da392fd9bd5e79f357abcce13f61f3a15e3951" }
[[language]] [[language]]
name = "t32" name = "t32"
scope = "source.t32" scope = "source.t32"
@ -2625,7 +2705,7 @@ indent = { tab-width = 2, unit = " " }
[[grammar]] [[grammar]]
name = "t32" name = "t32"
source = { git = "https://codeberg.org/xasc/tree-sitter-t32", rev = "1dd98248b01e4a3933c1b85b58bab0875e2ba437" } source = { git = "https://gitlab.com/xasc/tree-sitter-t32", rev = "6da5e3cbabd376b566d04282005e52ffe67ef74a" }
[[language]] [[language]]
name = "webc" name = "webc"
@ -2635,3 +2715,81 @@ file-types = ["webc"]
roots = [] roots = []
indent = { tab-width = 2, unit = " " } indent = { tab-width = 2, unit = " " }
grammar = "html" grammar = "html"
[[language]]
name = "nunjucks"
scope = "text.html.nunjucks"
injection-regex = "nunjucks"
file-types = ["njk"]
roots = []
indent = { tab-width = 2, unit = " " }
grammar = "jinja2"
[[language]]
name = "jinja"
scope = "text.html.jinja"
injection-regex = "jinja"
file-types = ["jinja", "jinja2", "j2"]
roots = []
indent = { tab-width = 2, unit = " " }
grammar = "jinja2"
[[grammar]]
name = "jinja2"
source = { git = "https://github.com/varpeti/tree-sitter-jinja2", rev = "a533cd3c33aea6acb0f9bf9a56f35dcfe6a8eb53" }
[[grammar]]
name = "wren"
source = { git = "https://git.sr.ht/~jummit/tree-sitter-wren", rev = "793d58266924e6efcc40e411663393e9d72bec87"}
[[language]]
name = "wren"
scope = "source.wren"
injection-regex = "wren"
file-types = ["wren"]
roots = []
indent = { tab-width = 2, unit = " "}
[[language]]
name = "unison"
scope = "source.unison"
injection-regex = "unison"
file-types = ["u"]
shebangs = []
roots = []
auto-format = false
comment-token = "--"
indent = { tab-width = 4, unit = " " }
[language.auto-pairs]
'(' = ')'
'{' = '}'
'[' = ']'
'"' = '"'
'`' = '`'
[[grammar]]
name = "unison"
source = { git = "https://github.com/kylegoetz/tree-sitter-unison", rev = "98c4e8bc5c9f5989814a720457cf36963cf4043d" }
[[language]]
name = "todotxt"
scope = "text.todotxt"
file-types = [{ suffix = ".todo.txt" }, "todotxt"]
roots = []
formatter = { command = "sort" }
auto-format = true
[[grammar]]
name = "todotxt"
source = { git = "https://github.com/arnarg/tree-sitter-todotxt", rev = "0207f6a4ab6aeafc4b091914d31d8235049a2578" }
[[language]]
name = "strace"
scope = "source.strace"
file-types = ["strace"]
roots = []
[[grammar]]
name = "strace"
source = { git = "https://github.com/sigmaSd/tree-sitter-strace", rev = "a0f6c50ae4087a9299f055d0f30fe94fd98189a4" }

@ -0,0 +1,36 @@
; Function and method parameters
;-------------------------------
; Javascript and Typescript Treesitter grammars deviate when defining the
; tree structure for parameters, so we need to address them in each specific
; language instead of ecma.
; (p)
(formal_parameters
(identifier) @variable.parameter)
; (...p)
(formal_parameters
(rest_pattern
(identifier) @variable.parameter))
; ({ p })
(formal_parameters
(object_pattern
(shorthand_property_identifier_pattern) @variable.parameter))
; ({ a: p })
(formal_parameters
(object_pattern
(pair_pattern
value: (identifier) @variable.parameter)))
; ([ p ])
(formal_parameters
(array_pattern
(identifier) @variable.parameter))
; (p = 1)
(formal_parameters
(assignment_pattern
left: (identifier) @variable.parameter))

@ -0,0 +1,14 @@
; Definitions
;------------
; Javascript and Typescript Treesitter grammars deviate when defining the
; tree structure for parameters, so we need to address them in each specific
; language instead of ecma.
; (i)
(formal_parameters
(identifier) @local.definition)
; (i = 1)
(formal_parameters
(assignment_pattern
left: (identifier) @local.definition))

@ -0,0 +1,88 @@
(
(comment)* @doc
.
(method_definition
name: (property_identifier) @name) @definition.method
(#not-eq? @name "constructor")
(#strip! @doc "^[\\s\\*/]+|^[\\s\\*/]$")
(#select-adjacent! @doc @definition.method)
)
(
(comment)* @doc
.
[
(class
name: (_) @name)
(class_declaration
name: (_) @name)
] @definition.class
(#strip! @doc "^[\\s\\*/]+|^[\\s\\*/]$")
(#select-adjacent! @doc @definition.class)
)
(
(comment)* @doc
.
[
(function
name: (identifier) @name)
(function_declaration
name: (identifier) @name)
(generator_function
name: (identifier) @name)
(generator_function_declaration
name: (identifier) @name)
] @definition.function
(#strip! @doc "^[\\s\\*/]+|^[\\s\\*/]$")
(#select-adjacent! @doc @definition.function)
)
(
(comment)* @doc
.
(lexical_declaration
(variable_declarator
name: (identifier) @name
value: [(arrow_function) (function)]) @definition.function)
(#strip! @doc "^[\\s\\*/]+|^[\\s\\*/]$")
(#select-adjacent! @doc @definition.function)
)
(
(comment)* @doc
.
(variable_declaration
(variable_declarator
name: (identifier) @name
value: [(arrow_function) (function)]) @definition.function)
(#strip! @doc "^[\\s\\*/]+|^[\\s\\*/]$")
(#select-adjacent! @doc @definition.function)
)
(assignment_expression
left: [
(identifier) @name
(member_expression
property: (property_identifier) @name)
]
right: [(arrow_function) (function)]
) @definition.function
(pair
key: (property_identifier) @name
value: [(arrow_function) (function)]) @definition.function
(
(call_expression
function: (identifier) @name) @reference.call
(#not-match? @name "^(require)$")
)
(call_expression
function: (member_expression
property: (property_identifier) @name)
arguments: (_) @reference.call)
(new_expression
constructor: (_) @name) @reference.class

@ -0,0 +1,43 @@
; Opening elements
; ----------------
(jsx_opening_element ((identifier) @constructor
(#match? @constructor "^[A-Z]")))
(jsx_opening_element (identifier) @tag)
; Closing elements
; ----------------
(jsx_closing_element ((identifier) @constructor
(#match? @constructor "^[A-Z]")))
(jsx_closing_element (identifier) @tag)
; Self-closing elements
; ---------------------
(jsx_self_closing_element ((identifier) @constructor
(#match? @constructor "^[A-Z]")))
(jsx_self_closing_element (identifier) @tag)
; Attributes
; ----------
(jsx_attribute (property_identifier) @variable.other.member)
; Punctuation
; -----------
; Handle attribute delimiter (<Component color="red"/>)
(jsx_attribute "=" @punctuation.delimiter)
; <Component>
(jsx_opening_element ["<" ">"] @punctuation.bracket)
; </Component>
(jsx_closing_element ["</" ">"] @punctuation.bracket)
; <Component />
(jsx_self_closing_element ["<" "/>"] @punctuation.braket)

@ -0,0 +1,6 @@
[
(jsx_element)
(jsx_self_closing_element)
] @indent
(parenthesized_expression) @indent

@ -0,0 +1,140 @@
; Namespaces
; ----------
(internal_module
[((identifier) @namespace) ((nested_identifier (identifier) @namespace))])
(ambient_declaration "global" @namespace)
; Parameters
; ----------
; Javascript and Typescript Treesitter grammars deviate when defining the
; tree structure for parameters, so we need to address them in each specific
; language instead of ecma.
; (p: t)
; (p: t = 1)
(required_parameter
(identifier) @variable.parameter)
; (...p: t)
(required_parameter
(rest_pattern
(identifier) @variable.parameter))
; ({ p }: { p: t })
(required_parameter
(object_pattern
(shorthand_property_identifier_pattern) @variable.parameter))
; ({ a: p }: { a: t })
(required_parameter
(object_pattern
(pair_pattern
value: (identifier) @variable.parameter)))
; ([ p ]: t[])
(required_parameter
(array_pattern
(identifier) @variable.parameter))
; (p?: t)
; (p?: t = 1) // Invalid but still posible to hihglight.
(optional_parameter
(identifier) @variable.parameter)
; (...p?: t) // Invalid but still posible to hihglight.
(optional_parameter
(rest_pattern
(identifier) @variable.parameter))
; ({ p }: { p?: t})
(optional_parameter
(object_pattern
(shorthand_property_identifier_pattern) @variable.parameter))
; ({ a: p }: { a?: t })
(optional_parameter
(object_pattern
(pair_pattern
value: (identifier) @variable.parameter)))
; ([ p ]?: t[]) // Invalid but still posible to hihglight.
(optional_parameter
(array_pattern
(identifier) @variable.parameter))
; Punctuation
; -----------
[
":"
] @punctuation.delimiter
(optional_parameter "?" @punctuation.special)
(property_signature "?" @punctuation.special)
(conditional_type ["?" ":"] @operator)
; Keywords
; --------
[
"abstract"
"declare"
"export"
"infer"
"implements"
"keyof"
"namespace"
"override"
"satisfies"
] @keyword
[
"type"
"interface"
"enum"
] @keyword.storage.type
[
"public"
"private"
"protected"
"readonly"
] @keyword.storage.modifier
; Types
; -----
(type_identifier) @type
(predefined_type) @type.builtin
; Type arguments and parameters
; -----------------------------
(type_arguments
[
"<"
">"
] @punctuation.bracket)
(type_parameters
[
"<"
">"
] @punctuation.bracket)
; Literals
; --------
[
(template_literal_type)
] @string
; Tokens
; ------
(template_type
"${" @punctuation.special
"}" @punctuation.special) @embedded

@ -0,0 +1,5 @@
[
(enum_declaration)
(interface_declaration)
(object_type)
] @indent

@ -0,0 +1,16 @@
; Definitions
;------------
; Javascript and Typescript Treesitter grammars deviate when defining the
; tree structure for parameters, so we need to address them in each specific
; language instead of ecma.
; (i: t)
; (i: t = 1)
(required_parameter
(identifier) @local.definition)
; (i?: t)
; (i?: t = 1) // Invalid but still posible to hihglight.
(optional_parameter
(identifier) @local.definition)

@ -0,0 +1,23 @@
(function_signature
name: (identifier) @name) @definition.function
(method_signature
name: (property_identifier) @name) @definition.method
(abstract_method_signature
name: (property_identifier) @name) @definition.method
(abstract_class_declaration
name: (type_identifier) @name) @definition.class
(module
name: (identifier) @name) @definition.module
(interface_declaration
name: (type_identifier) @name) @definition.interface
(type_annotation
(type_identifier) @name) @reference.type
(new_expression
constructor: (identifier) @name) @reference.class

@ -0,0 +1,6 @@
[
(interface_declaration
body:(_) @class.inside)
(type_alias_declaration
value: (_) @class.inside)
] @class.around

@ -0,0 +1,9 @@
(function_definition
body: (_) @function.inside) @function.around
(command
argument: (_) @parameter.inside)
(comment) @comment.inside
(comment)+ @comment.around

@ -5,7 +5,6 @@
(enumerator_list) (enumerator_list)
(parameter_list) (parameter_list)
(init_declarator) (init_declarator)
(case_statement)
(expression_statement) (expression_statement)
] @indent ] @indent
@ -13,6 +12,7 @@
"case" "case"
"}" "}"
"]" "]"
")"
] @outdent ] @outdent
(if_statement (if_statement
@ -32,3 +32,7 @@
(_) @indent (_) @indent
(#not-kind-eq? @indent "compound_statement") (#not-kind-eq? @indent "compound_statement")
(#set! "scope" "all")) (#set! "scope" "all"))
(parameter_list
. (parameter_declaration) @anchor
(#set! "scope" "tail")) @align

@ -28,10 +28,10 @@
; Error level tags ; Error level tags
((tag (name) @error) ((tag (name) @error)
(match? @error "^(BUG|FIXME|ISSUE|XXX)$")) (#match? @error "^(BUG|FIXME|ISSUE|XXX)$"))
("text" @error ("text" @error
(match? @error "^(BUG|FIXME|ISSUE|XXX)$")) (#match? @error "^(BUG|FIXME|ISSUE|XXX)$"))
(tag (tag
(name) @ui.text (name) @ui.text

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save