diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 93fcb9816..c9f198d0c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -16,6 +16,7 @@ jobs: steps: - name: Checkout sources uses: actions/checkout@v4 + - name: Install stable toolchain uses: dtolnay/rust-toolchain@1.70 @@ -107,6 +108,9 @@ jobs: - name: Validate queries run: cargo xtask query-check + - name: Validate themes + run: cargo xtask theme-check + - name: Generate docs run: cargo xtask docgen diff --git a/.github/workflows/cachix.yml b/.github/workflows/cachix.yml index 3685a7c61..5052622c5 100644 --- a/.github/workflows/cachix.yml +++ b/.github/workflows/cachix.yml @@ -14,7 +14,7 @@ jobs: uses: actions/checkout@v4 - name: Install nix - uses: cachix/install-nix-action@V28 + uses: cachix/install-nix-action@v29 - name: Authenticate with Cachix uses: cachix/cachix-action@v15 diff --git a/Cargo.lock b/Cargo.lock index 7156fc27e..49a417890 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -136,9 +136,9 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" [[package]] name = "cc" -version = "1.1.19" +version = "1.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d74707dde2ba56f86ae90effb3b43ddd369504387e718014de010cec7959800" +checksum = "3bbb537bb4a30b90362caddba8f360c0a56bc13d3a5570028e7197204cb54a17" dependencies = [ "shlex", ] @@ -1609,9 +1609,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.158" +version = "0.2.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" [[package]] name = "libloading" @@ -1753,9 +1753,12 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "82881c4be219ab5faaf2ad5e5e5ecdff8c66bd7402ca3160975c93b24961afd1" +dependencies = [ + "portable-atomic", +] [[package]] name = "open" @@ -1914,9 +1917,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.6" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" dependencies = [ "aho-corasick", "memchr", @@ -1926,9 +1929,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" dependencies = [ "aho-corasick", "memchr", @@ -1950,9 +1953,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "ropey" @@ -2192,9 +2195,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.12.0" +version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" +checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" dependencies = [ "cfg-if", "fastrand", @@ -2225,18 +2228,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", diff --git a/README.md b/README.md index 3b639214d..90ebc9d16 100644 --- a/README.md +++ b/README.md @@ -37,8 +37,8 @@ All shortcuts/keymaps can be found [in the documentation on the website](https:/ - Built-in language server support - Smart, incremental syntax highlighting and code editing via tree-sitter -It's a terminal-based editor first, but I'd like to explore a custom renderer -(similar to Emacs) in wgpu or skulpin. +Although it's primarily a terminal-based editor, I am interested in exploring +a custom renderer (similar to Emacs) using wgpu or skulpin. Note: Only certain languages have indentation definitions at the moment. Check `runtime/queries//` for `indents.scm`. diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md index f223c8b22..79f3a6964 100644 --- a/book/src/generated/lang-support.md +++ b/book/src/generated/lang-support.md @@ -19,6 +19,7 @@ | cairo | ✓ | ✓ | ✓ | `cairo-language-server` | | capnp | ✓ | | ✓ | | | cel | ✓ | | | | +| circom | ✓ | | | `circom-lsp` | | clojure | ✓ | | | `clojure-lsp` | | cmake | ✓ | ✓ | ✓ | `cmake-language-server` | | comment | ✓ | | | | @@ -48,7 +49,7 @@ | elvish | ✓ | | | `elvish` | | env | ✓ | ✓ | | | | erb | ✓ | | | | -| erlang | ✓ | ✓ | | `erlang_ls` | +| erlang | ✓ | ✓ | | `erlang_ls`, `elp` | | esdl | ✓ | | | | | fidl | ✓ | | | | | fish | ✓ | ✓ | ✓ | | @@ -86,7 +87,7 @@ | hocon | ✓ | ✓ | ✓ | | | hoon | ✓ | | | | | hosts | ✓ | | | | -| html | ✓ | | | `vscode-html-language-server` | +| html | ✓ | | | `vscode-html-language-server`, `superhtml` | | hurl | ✓ | ✓ | ✓ | | | hyprlang | ✓ | | ✓ | | | idris | | | | `idris2-lsp` | diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md index f48e1490a..7d3622256 100644 --- a/book/src/generated/typable-cmd.md +++ b/book/src/generated/typable-cmd.md @@ -72,7 +72,7 @@ | `:sort` | Sort ranges in selection. | | `:rsort` | Sort ranges in selection in reverse order. | | `:reflow` | Hard-wrap the current selection of lines to a given width. | -| `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. | +| `:tree-sitter-subtree`, `:ts-subtree` | Display the smallest tree-sitter subtree that spans the primary selection, primarily for debugging queries. | | `:config-reload` | Refresh user config. | | `:config-open` | Open the user config.toml file. | | `:config-open-workspace` | Open the workspace config.toml file. | diff --git a/book/src/keymap.md b/book/src/keymap.md index e7ae6ae47..71ae5e31f 100644 --- a/book/src/keymap.md +++ b/book/src/keymap.md @@ -145,6 +145,9 @@ Normal mode is the default mode when you launch helix. You can return to it from | `Alt-i`, `Alt-down` | Shrink syntax tree object selection (**TS**) | `shrink_selection` | | `Alt-p`, `Alt-left` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` | | `Alt-n`, `Alt-right` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` | +| `Alt-a` | Select all sibling nodes in syntax tree (**TS**) | `select_all_siblings` | +| `Alt-e` | Move to end of parent node in syntax tree (**TS**) | `move_parent_node_end` | +| `Alt-b` | Move to start of parent node in syntax tree (**TS**) | `move_parent_node_start` | ### Search diff --git a/book/src/usage.md b/book/src/usage.md index 859cb6709..a22a18492 100644 --- a/book/src/usage.md +++ b/book/src/usage.md @@ -7,3 +7,27 @@ can be accessed via the command `hx --tutor` or `:tutor`. > 💡 Currently, not all functionality is fully documented, please refer to the > [key mappings](./keymap.md) list. +## Modes + +Helix is a modal editor, meaning it has different modes for different tasks. The main modes are: + +* [Normal mode](./keymap.md#normal-mode): For navigation and editing commands. This is the default mode. +* [Insert mode](./keymap.md#insert-mode): For typing text directly into the document. Access by typing `i` in normal mode. +* [Select/extend mode](./keymap.md#select--extend-mode): For making selections and performing operations on them. Access by typing `v` in normal mode. + +## Buffers + +Buffers are in-memory representations of files. You can have multiple buffers open at once. Use [pickers](./pickers.md) or commands like `:buffer-next` and `:buffer-previous` to open buffers or switch between them. + +## Selection-first editing + +Inspired by [Kakoune](http://kakoune.org/), Helix follows the `selection → action` model. This means that whatever you are going to act on (a word, a paragraph, a line, etc.) is selected first and the action itself (delete, change, yank, etc.) comes second. A cursor is simply a single width selection. + +## Multiple selections + +Also inspired by Kakoune, multiple selections are a core mode of interaction in Helix. For example, the standard way of replacing multiple instance of a word is to first select all instances (so there is one selection per instance) and then use the change action (`c`) to edit them all at the same time. + +## Motions + +Motions are commands that move the cursor or modify selections. They're used for navigation and text manipulation. Examples include `w` to move to the next word, or `f` to find a character. See the [Movement](./keymap.md#movement) section of the keymap for more motions. + diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index bc890e007..4cd516268 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -32,7 +32,7 @@ unicode-width = "=0.1.12" unicode-general-category = "0.6" slotmap.workspace = true tree-sitter.workspace = true -once_cell = "1.19" +once_cell = "1.20" arc-swap = "1" regex = "1" bitflags = "2.6" diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index 7be512f52..7de6ddf44 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -2692,6 +2692,8 @@ fn pretty_print_tree_impl( } write!(fmt, "({}", node.kind())?; + } else { + write!(fmt, " \"{}\"", node.kind())?; } // Handle children. @@ -2950,7 +2952,7 @@ mod test { #[test] fn test_pretty_print() { let source = r#"// Hello"#; - assert_pretty_print("rust", source, "(line_comment)", 0, source.len()); + assert_pretty_print("rust", source, "(line_comment \"//\")", 0, source.len()); // A large tree should be indented with fields: let source = r#"fn main() { @@ -2960,16 +2962,16 @@ mod test { "rust", source, concat!( - "(function_item\n", + "(function_item \"fn\"\n", " name: (identifier)\n", - " parameters: (parameters)\n", - " body: (block\n", + " parameters: (parameters \"(\" \")\")\n", + " body: (block \"{\"\n", " (expression_statement\n", " (macro_invocation\n", - " macro: (identifier)\n", - " (token_tree\n", - " (string_literal\n", - " (string_content)))))))", + " macro: (identifier) \"!\"\n", + " (token_tree \"(\"\n", + " (string_literal \"\"\"\n", + " (string_content) \"\"\") \")\")) \";\") \"}\"))", ), 0, source.len(), @@ -2981,7 +2983,7 @@ mod test { // Error nodes are printed as errors: let source = r#"}{"#; - assert_pretty_print("rust", source, "(ERROR)", 0, source.len()); + assert_pretty_print("rust", source, "(ERROR \"}\" \"{\")", 0, source.len()); // Fields broken under unnamed nodes are determined correctly. // In the following source, `object` belongs to the `singleton_method` @@ -2996,11 +2998,11 @@ mod test { "ruby", source, concat!( - "(singleton_method\n", - " object: (self)\n", + "(singleton_method \"def\"\n", + " object: (self) \".\"\n", " name: (identifier)\n", " body: (body_statement\n", - " (true)))" + " (true)) \"end\")" ), 0, source.len(), diff --git a/helix-core/src/uri.rs b/helix-core/src/uri.rs index 4e03c58b1..cbe0fadda 100644 --- a/helix-core/src/uri.rs +++ b/helix-core/src/uri.rs @@ -1,12 +1,18 @@ -use std::path::{Path, PathBuf}; +use std::{ + fmt, + path::{Path, PathBuf}, + sync::Arc, +}; /// A generic pointer to a file location. /// /// Currently this type only supports paths to local files. +/// +/// Cloning this type is cheap: the internal representation uses an Arc. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[non_exhaustive] pub enum Uri { - File(PathBuf), + File(Arc), } impl Uri { @@ -23,26 +29,18 @@ impl Uri { Self::File(path) => Some(path), } } - - pub fn as_path_buf(self) -> Option { - match self { - Self::File(path) => Some(path), - } - } } impl From for Uri { fn from(path: PathBuf) -> Self { - Self::File(path) + Self::File(path.into()) } } -impl TryFrom for PathBuf { - type Error = (); - - fn try_from(uri: Uri) -> Result { - match uri { - Uri::File(path) => Ok(path), +impl fmt::Display for Uri { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::File(path) => write!(f, "{}", path.display()), } } } @@ -59,11 +57,16 @@ pub enum UrlConversionErrorKind { UnableToConvert, } -impl std::fmt::Display for UrlConversionError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +impl fmt::Display for UrlConversionError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.kind { UrlConversionErrorKind::UnsupportedScheme => { - write!(f, "unsupported scheme in URL: {}", self.source.scheme()) + write!( + f, + "unsupported scheme '{}' in URL {}", + self.source.scheme(), + self.source + ) } UrlConversionErrorKind::UnableToConvert => { write!(f, "unable to convert URL to file path: {}", self.source) @@ -77,7 +80,7 @@ impl std::error::Error for UrlConversionError {} fn convert_url_to_uri(url: &url::Url) -> Result { if url.scheme() == "file" { url.to_file_path() - .map(|path| Uri::File(helix_stdx::path::normalize(path))) + .map(|path| Uri::File(helix_stdx::path::normalize(path).into())) .map_err(|_| UrlConversionErrorKind::UnableToConvert) } else { Err(UrlConversionErrorKind::UnsupportedScheme) diff --git a/helix-event/Cargo.toml b/helix-event/Cargo.toml index e7c877355..87e5019bd 100644 --- a/helix-event/Cargo.toml +++ b/helix-event/Cargo.toml @@ -19,7 +19,7 @@ tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "p # setup new events on initialization, hardware-lock-elision hugely benefits this case # as it essentially makes the lock entirely free as long as there is no writes parking_lot = { version = "0.12", features = ["hardware-lock-elision"] } -once_cell = "1.18" +once_cell = "1.20" anyhow = "1" log = "0.4" diff --git a/helix-loader/Cargo.toml b/helix-loader/Cargo.toml index f74829f30..26ab3f264 100644 --- a/helix-loader/Cargo.toml +++ b/helix-loader/Cargo.toml @@ -22,7 +22,7 @@ serde = { version = "1.0", features = ["derive"] } toml = "0.8" etcetera = "0.8" tree-sitter.workspace = true -once_cell = "1.19" +once_cell = "1.20" log = "0.4" # TODO: these two should be on !wasm32 only @@ -30,7 +30,7 @@ log = "0.4" # cloning/compiling tree-sitter grammars cc = { version = "1" } threadpool = { version = "1.0" } -tempfile = "3.12.0" +tempfile = "3.13.0" dunce = "1.0.5" [target.'cfg(not(target_arch = "wasm32"))'.dependencies] diff --git a/helix-loader/src/lib.rs b/helix-loader/src/lib.rs index f36c76c4f..0e7c134d0 100644 --- a/helix-loader/src/lib.rs +++ b/helix-loader/src/lib.rs @@ -225,7 +225,7 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usi /// Used as a ceiling dir for LSP root resolution, the filepicker and potentially as a future filewatching root /// /// This function starts searching the FS upward from the CWD -/// and returns the first directory that contains either `.git`, `.svn` or `.helix`. +/// and returns the first directory that contains either `.git`, `.svn`, `.jj` or `.helix`. /// If no workspace was found returns (CWD, true). /// Otherwise (workspace, false) is returned pub fn find_workspace() -> (PathBuf, bool) { @@ -233,6 +233,7 @@ pub fn find_workspace() -> (PathBuf, bool) { for ancestor in current_dir.ancestors() { if ancestor.join(".git").exists() || ancestor.join(".svn").exists() + || ancestor.join(".jj").exists() || ancestor.join(".helix").exists() { return (ancestor.to_owned(), false); diff --git a/helix-stdx/Cargo.toml b/helix-stdx/Cargo.toml index 1c0d06ab1..25c0a164d 100644 --- a/helix-stdx/Cargo.toml +++ b/helix-stdx/Cargo.toml @@ -26,4 +26,4 @@ windows-sys = { version = "0.59", features = ["Win32_Foundation", "Win32_Securit rustix = { version = "0.38", features = ["fs"] } [dev-dependencies] -tempfile = "3.12" +tempfile = "3.13" diff --git a/helix-stdx/src/rope.rs b/helix-stdx/src/rope.rs index 2695555e3..f7e31924a 100644 --- a/helix-stdx/src/rope.rs +++ b/helix-stdx/src/rope.rs @@ -51,7 +51,7 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> { if len < text.len() { return false; } - self.get_byte_slice(..len - text.len()) + self.get_byte_slice(..text.len()) .map_or(false, |start| start == text) } @@ -137,4 +137,14 @@ mod tests { } } } + + #[test] + fn starts_with() { + assert!(RopeSlice::from("asdf").starts_with("a")); + } + + #[test] + fn ends_with() { + assert!(RopeSlice::from("asdf").ends_with("f")); + } } diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index c66aa0621..b14d3d3c5 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -33,7 +33,7 @@ helix-vcs = { path = "../helix-vcs" } helix-loader = { path = "../helix-loader" } anyhow = "1" -once_cell = "1.19" +once_cell = "1.20" tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } @@ -74,7 +74,7 @@ grep-searcher = "0.1.14" [target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100 signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] } -libc = "0.2.158" +libc = "0.2.159" [target.'cfg(target_os = "macos")'.dependencies] crossterm = { version = "0.28", features = ["event-stream", "use-dev-tty", "libc"] } @@ -85,5 +85,5 @@ helix-loader = { path = "../helix-loader" } [dev-dependencies] smallvec = "1.13" indoc = "2.0.5" -tempfile = "3.12.0" +tempfile = "3.13.0" same-file = "1.0.1" diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index 6e037a471..b1c29378d 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -4626,6 +4626,14 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) { let text = doc.text(); let slice = text.slice(..); + let comment_tokens = doc + .language_config() + .and_then(|config| config.comment_tokens.as_deref()) + .unwrap_or(&[]); + // Sort by length to handle Rust's /// vs // + let mut comment_tokens: Vec<&str> = comment_tokens.iter().map(|x| x.as_str()).collect(); + comment_tokens.sort_unstable_by_key(|x| std::cmp::Reverse(x.len())); + let mut changes = Vec::new(); for selection in doc.selection(view.id) { @@ -4637,10 +4645,31 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) { changes.reserve(lines.len()); + let first_line_idx = slice.line_to_char(start); + let first_line_idx = skip_while(slice, first_line_idx, |ch| matches!(ch, ' ' | 't')) + .unwrap_or(first_line_idx); + let first_line = slice.slice(first_line_idx..); + let mut current_comment_token = comment_tokens + .iter() + .find(|token| first_line.starts_with(token)); + for line in lines { let start = line_end_char_index(&slice, line); let mut end = text.line_to_char(line + 1); end = skip_while(slice, end, |ch| matches!(ch, ' ' | '\t')).unwrap_or(end); + let slice_from_end = slice.slice(end..); + if let Some(token) = comment_tokens + .iter() + .find(|token| slice_from_end.starts_with(token)) + { + if Some(token) == current_comment_token { + end += token.chars().count(); + end = skip_while(slice, end, |ch| matches!(ch, ' ' | '\t')).unwrap_or(end); + } else { + // update current token, but don't delete this one. + current_comment_token = Some(token); + } + } let separator = if end == line_end_char_index(&slice, line + 1) { // the joining line contains only space-characters => don't include a whitespace when joining diff --git a/helix-term/src/commands/lsp.rs b/helix-term/src/commands/lsp.rs index 93ac2a849..fcc0333e8 100644 --- a/helix-term/src/commands/lsp.rs +++ b/helix-term/src/commands/lsp.rs @@ -34,7 +34,7 @@ use crate::{ use std::{ cmp::Ordering, collections::{BTreeMap, HashSet}, - fmt::{Display, Write}, + fmt::Display, future::Future, path::Path, }; @@ -61,10 +61,31 @@ macro_rules! language_server_with_feature { }}; } +/// A wrapper around `lsp::Location` that swaps out the LSP URI for `helix_core::Uri`. +#[derive(Debug, Clone, PartialEq, Eq)] +struct Location { + uri: Uri, + range: lsp::Range, +} + +fn lsp_location_to_location(location: lsp::Location) -> Option { + let uri = match location.uri.try_into() { + Ok(uri) => uri, + Err(err) => { + log::warn!("discarding invalid or unsupported URI: {err}"); + return None; + } + }; + Some(Location { + uri, + range: location.range, + }) +} + struct SymbolInformationItem { + location: Location, symbol: lsp::SymbolInformation, offset_encoding: OffsetEncoding, - uri: Uri, } struct DiagnosticStyles { @@ -75,35 +96,35 @@ struct DiagnosticStyles { } struct PickerDiagnostic { - uri: Uri, + location: Location, diag: lsp::Diagnostic, offset_encoding: OffsetEncoding, } -fn uri_to_file_location<'a>(uri: &'a Uri, range: &lsp::Range) -> Option> { - let path = uri.as_path()?; - let line = Some((range.start.line as usize, range.end.line as usize)); +fn location_to_file_location(location: &Location) -> Option { + let path = location.uri.as_path()?; + let line = Some(( + location.range.start.line as usize, + location.range.end.line as usize, + )); Some((path.into(), line)) } fn jump_to_location( editor: &mut Editor, - location: &lsp::Location, + location: &Location, offset_encoding: OffsetEncoding, action: Action, ) { let (view, doc) = current!(editor); push_jump(view, doc); - let path = match location.uri.to_file_path() { - Ok(path) => path, - Err(_) => { - let err = format!("unable to convert URI to filepath: {}", location.uri); - editor.set_error(err); - return; - } + let Some(path) = location.uri.as_path() else { + let err = format!("unable to convert URI to filepath: {:?}", location.uri); + editor.set_error(err); + return; }; - jump_to_position(editor, &path, location.range, offset_encoding, action); + jump_to_position(editor, path, location.range, offset_encoding, action); } fn jump_to_position( @@ -196,7 +217,10 @@ fn diag_picker( for (diag, ls) in diags { if let Some(ls) = cx.editor.language_server_by_id(ls) { flat_diag.push(PickerDiagnostic { - uri: uri.clone(), + location: Location { + uri: uri.clone(), + range: diag.range, + }, diag, offset_encoding: ls.offset_encoding(), }); @@ -243,7 +267,7 @@ fn diag_picker( // between message code and message 2, ui::PickerColumn::new("path", |item: &PickerDiagnostic, _| { - if let Some(path) = item.uri.as_path() { + if let Some(path) = item.location.uri.as_path() { path::get_truncated_path(path) .to_string_lossy() .to_string() @@ -261,26 +285,14 @@ fn diag_picker( primary_column, flat_diag, styles, - move |cx, - PickerDiagnostic { - uri, - diag, - offset_encoding, - }, - action| { - let Some(path) = uri.as_path() else { - return; - }; - jump_to_position(cx.editor, path, diag.range, *offset_encoding, action); + move |cx, diag, action| { + jump_to_location(cx.editor, &diag.location, diag.offset_encoding, action); let (view, doc) = current!(cx.editor); view.diagnostics_handler .immediately_show_diagnostic(doc, view.id); }, ) - .with_preview(move |_editor, PickerDiagnostic { uri, diag, .. }| { - let line = Some((diag.range.start.line as usize, diag.range.end.line as usize)); - Some((uri.as_path()?.into(), line)) - }) + .with_preview(move |_editor, diag| location_to_file_location(&diag.location)) .truncate_start(false) } @@ -303,7 +315,10 @@ pub fn symbol_picker(cx: &mut Context) { container_name: None, }, offset_encoding, - uri: uri.clone(), + location: Location { + uri: uri.clone(), + range: symbol.selection_range, + }, }); for child in symbol.children.into_iter().flatten() { nested_to_flat(list, file, uri, child, offset_encoding); @@ -337,7 +352,10 @@ pub fn symbol_picker(cx: &mut Context) { lsp::DocumentSymbolResponse::Flat(symbols) => symbols .into_iter() .map(|symbol| SymbolInformationItem { - uri: doc_uri.clone(), + location: Location { + uri: doc_uri.clone(), + range: symbol.location.range, + }, symbol, offset_encoding, }) @@ -392,17 +410,10 @@ pub fn symbol_picker(cx: &mut Context) { symbols, (), move |cx, item, action| { - jump_to_location( - cx.editor, - &item.symbol.location, - item.offset_encoding, - action, - ); + jump_to_location(cx.editor, &item.location, item.offset_encoding, action); }, ) - .with_preview(move |_editor, item| { - uri_to_file_location(&item.uri, &item.symbol.location.range) - }) + .with_preview(move |_editor, item| location_to_file_location(&item.location)) .truncate_start(false); compositor.push(Box::new(overlaid(picker))) @@ -453,8 +464,11 @@ pub fn workspace_symbol_picker(cx: &mut Context) { } }; Some(SymbolInformationItem { + location: Location { + uri, + range: symbol.location.range, + }, symbol, - uri, offset_encoding, }) }) @@ -490,7 +504,7 @@ pub fn workspace_symbol_picker(cx: &mut Context) { }) .without_filtering(), ui::PickerColumn::new("path", |item: &SymbolInformationItem, _| { - if let Some(path) = item.uri.as_path() { + if let Some(path) = item.location.uri.as_path() { path::get_relative_path(path) .to_string_lossy() .to_string() @@ -507,15 +521,10 @@ pub fn workspace_symbol_picker(cx: &mut Context) { [], (), move |cx, item, action| { - jump_to_location( - cx.editor, - &item.symbol.location, - item.offset_encoding, - action, - ); + jump_to_location(cx.editor, &item.location, item.offset_encoding, action); }, ) - .with_preview(|_editor, item| uri_to_file_location(&item.uri, &item.symbol.location.range)) + .with_preview(|_editor, item| location_to_file_location(&item.location)) .with_dynamic_query(get_symbols, None) .truncate_start(false); @@ -847,7 +856,7 @@ impl Display for ApplyEditErrorKind { fn goto_impl( editor: &mut Editor, compositor: &mut Compositor, - locations: Vec, + locations: Vec, offset_encoding: OffsetEncoding, ) { let cwdir = helix_stdx::env::current_working_dir(); @@ -860,80 +869,41 @@ fn goto_impl( _locations => { let columns = [ui::PickerColumn::new( "location", - |item: &lsp::Location, cwdir: &std::path::PathBuf| { - // The preallocation here will overallocate a few characters since it will account for the - // URL's scheme, which is not used most of the time since that scheme will be "file://". - // Those extra chars will be used to avoid allocating when writing the line number (in the - // common case where it has 5 digits or less, which should be enough for a cast majority - // of usages). - let mut res = String::with_capacity(item.uri.as_str().len()); - - if item.uri.scheme() == "file" { - // With the preallocation above and UTF-8 paths already, this closure will do one (1) - // allocation, for `to_file_path`, else there will be two (2), with `to_string_lossy`. - if let Ok(path) = item.uri.to_file_path() { - // We don't convert to a `helix_core::Uri` here because we've already checked the scheme. - // This path won't be normalized but it's only used for display. - res.push_str( - &path.strip_prefix(cwdir).unwrap_or(&path).to_string_lossy(), - ); - } + |item: &Location, cwdir: &std::path::PathBuf| { + let path = if let Some(path) = item.uri.as_path() { + path.strip_prefix(cwdir).unwrap_or(path).to_string_lossy() } else { - // Never allocates since we declared the string with this capacity already. - res.push_str(item.uri.as_str()); - } + item.uri.to_string().into() + }; - // Most commonly, this will not allocate, especially on Unix systems where the root prefix - // is a simple `/` and not `C:\` (with whatever drive letter) - write!(&mut res, ":{}", item.range.start.line + 1) - .expect("Will only failed if allocating fail"); - res.into() + format!("{path}:{}", item.range.start.line + 1).into() }, )]; let picker = Picker::new(columns, 0, locations, cwdir, move |cx, location, action| { jump_to_location(cx.editor, location, offset_encoding, action) }) - .with_preview(move |_editor, location| { - use crate::ui::picker::PathOrId; - - let lines = Some(( - location.range.start.line as usize, - location.range.end.line as usize, - )); - - // TODO: we should avoid allocating by doing the Uri conversion ahead of time. - // - // To do this, introduce a `Location` type in `helix-core` that reuses the core - // `Uri` type instead of the LSP `Url` type and replaces the LSP `Range` type. - // Refactor the callers of `goto_impl` to pass iterators that translate the - // LSP location type to the custom one in core, or have them collect and pass - // `Vec`s. Replace the `uri_to_file_location` function with - // `location_to_file_location` that takes only `&helix_core::Location` as - // parameters. - // - // By doing this we can also eliminate the duplicated URI info in the - // `SymbolInformationItem` type and introduce a custom Symbol type in `helix-core` - // which will be reused in the future for tree-sitter based symbol pickers. - let path = Uri::try_from(&location.uri).ok()?.as_path_buf()?; - #[allow(deprecated)] - Some((PathOrId::from_path_buf(path), lines)) - }); + .with_preview(move |_editor, location| location_to_file_location(location)); compositor.push(Box::new(overlaid(picker))); } } } -fn to_locations(definitions: Option) -> Vec { +fn to_locations(definitions: Option) -> Vec { match definitions { - Some(lsp::GotoDefinitionResponse::Scalar(location)) => vec![location], - Some(lsp::GotoDefinitionResponse::Array(locations)) => locations, + Some(lsp::GotoDefinitionResponse::Scalar(location)) => { + lsp_location_to_location(location).into_iter().collect() + } + Some(lsp::GotoDefinitionResponse::Array(locations)) => locations + .into_iter() + .flat_map(lsp_location_to_location) + .collect(), Some(lsp::GotoDefinitionResponse::Link(locations)) => locations .into_iter() - .map(|location_link| lsp::Location { - uri: location_link.target_uri, - range: location_link.target_range, + .map(|location_link| { + lsp::Location::new(location_link.target_uri, location_link.target_range) }) + .flat_map(lsp_location_to_location) .collect(), None => Vec::new(), } @@ -1018,7 +988,11 @@ pub fn goto_reference(cx: &mut Context) { cx.callback( future, move |editor, compositor, response: Option>| { - let items = response.unwrap_or_default(); + let items: Vec = response + .into_iter() + .flatten() + .flat_map(lsp_location_to_location) + .collect(); if items.is_empty() { editor.set_error("No references found."); } else { diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index 7ad0369fc..68ba9bab5 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -3032,7 +3032,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "tree-sitter-subtree", aliases: &["ts-subtree"], - doc: "Display tree sitter subtree under cursor, primarily for debugging queries.", + doc: "Display the smallest tree-sitter subtree that spans the primary selection, primarily for debugging queries.", fun: tree_sitter_subtree, signature: CommandSignature::none(), }, diff --git a/helix-term/src/ui/document.rs b/helix-term/src/ui/document.rs index 79145ba04..ae00ea149 100644 --- a/helix-term/src/ui/document.rs +++ b/helix-term/src/ui/document.rs @@ -433,7 +433,7 @@ impl<'a> TextRenderer<'a> { Grapheme::Newline => &self.newline, }; - let in_bounds = self.column_in_bounds(position.col + width - 1); + let in_bounds = self.column_in_bounds(position.col, width); if in_bounds { self.surface.set_string( @@ -452,7 +452,6 @@ impl<'a> TextRenderer<'a> { ); self.surface.set_style(rect, style); } - if *is_in_indent_area && !is_whitespace { *last_indent_level = position.col; *is_in_indent_area = false; @@ -461,8 +460,8 @@ impl<'a> TextRenderer<'a> { width } - pub fn column_in_bounds(&self, colum: usize) -> bool { - self.offset.col <= colum && colum < self.viewport.width as usize + self.offset.col + pub fn column_in_bounds(&self, colum: usize, width: usize) -> bool { + self.offset.col <= colum && colum + width <= self.offset.col + self.viewport.width as usize } /// Overlay indentation guides ontop of a rendered line diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs index b5cba13bd..93fe652c7 100644 --- a/helix-term/src/ui/picker.rs +++ b/helix-term/src/ui/picker.rs @@ -32,7 +32,7 @@ use std::{ borrow::Cow, collections::HashMap, io::Read, - path::{Path, PathBuf}, + path::Path, sync::{ atomic::{self, AtomicUsize}, Arc, @@ -63,26 +63,12 @@ pub const MAX_FILE_SIZE_FOR_PREVIEW: u64 = 10 * 1024 * 1024; #[derive(PartialEq, Eq, Hash)] pub enum PathOrId<'a> { Id(DocumentId), - // See [PathOrId::from_path_buf]: this will eventually become `Path(&Path)`. - Path(Cow<'a, Path>), -} - -impl<'a> PathOrId<'a> { - /// Creates a [PathOrId] from a PathBuf - /// - /// # Deprecated - /// The owned version of PathOrId will be removed in a future refactor - /// and replaced with `&'a Path`. See the caller of this function for - /// more details on its removal. - #[deprecated] - pub fn from_path_buf(path_buf: PathBuf) -> Self { - Self::Path(Cow::Owned(path_buf)) - } + Path(&'a Path), } impl<'a> From<&'a Path> for PathOrId<'a> { fn from(path: &'a Path) -> Self { - Self::Path(Cow::Borrowed(path)) + Self::Path(path) } } @@ -626,7 +612,6 @@ impl Picker { match path_or_id { PathOrId::Path(path) => { - let path = path.as_ref(); if let Some(doc) = editor.document_by_path(path) { return Some((Preview::EditorDocument(doc), range)); } diff --git a/helix-term/src/ui/text_decorations.rs b/helix-term/src/ui/text_decorations.rs index 630af5817..931ea4311 100644 --- a/helix-term/src/ui/text_decorations.rs +++ b/helix-term/src/ui/text_decorations.rs @@ -164,7 +164,7 @@ impl Decoration for Cursor<'_> { renderer: &mut TextRenderer, grapheme: &FormattedGrapheme, ) -> usize { - if renderer.column_in_bounds(grapheme.visual_pos.col) + if renderer.column_in_bounds(grapheme.visual_pos.col, grapheme.width()) && renderer.offset.row < grapheme.visual_pos.row { let position = grapheme.visual_pos - renderer.offset; diff --git a/helix-term/src/ui/text_decorations/diagnostics.rs b/helix-term/src/ui/text_decorations/diagnostics.rs index 2d9e83700..0bb0026f7 100644 --- a/helix-term/src/ui/text_decorations/diagnostics.rs +++ b/helix-term/src/ui/text_decorations/diagnostics.rs @@ -98,7 +98,7 @@ impl Renderer<'_, '_> { fn draw_eol_diagnostic(&mut self, diag: &Diagnostic, row: u16, col: usize) -> u16 { let style = self.styles.severity_style(diag.severity()); let width = self.renderer.viewport.width; - if !self.renderer.column_in_bounds(col + 1) { + if !self.renderer.column_in_bounds(col + 1, 1) { return 0; } let col = (col - self.renderer.offset.col) as u16; diff --git a/helix-term/tests/test/commands.rs b/helix-term/tests/test/commands.rs index 9f196827f..f71ae308d 100644 --- a/helix-term/tests/test/commands.rs +++ b/helix-term/tests/test/commands.rs @@ -632,6 +632,41 @@ async fn test_join_selections_space() -> anyhow::Result<()> { Ok(()) } +#[tokio::test(flavor = "multi_thread")] +async fn test_join_selections_comment() -> anyhow::Result<()> { + test(( + indoc! {"\ + /// #[a|]#bc + /// def + "}, + ":lang rustJ", + indoc! {"\ + /// #[a|]#bc def + "}, + )) + .await?; + + // Only join if the comment token matches the previous line. + test(( + indoc! {"\ + #[| // a + // b + /// c + /// d + e + /// f + // g]# + "}, + ":lang rustJ", + indoc! {"\ + #[| // a b /// c d e f // g]# + "}, + )) + .await?; + + Ok(()) +} + #[tokio::test(flavor = "multi_thread")] async fn test_read_file() -> anyhow::Result<()> { let mut file = tempfile::NamedTempFile::new()?; diff --git a/helix-tui/Cargo.toml b/helix-tui/Cargo.toml index a349623b1..96f008a01 100644 --- a/helix-tui/Cargo.toml +++ b/helix-tui/Cargo.toml @@ -24,5 +24,5 @@ unicode-segmentation = "1.12" crossterm = { version = "0.28", optional = true } termini = "1.0" serde = { version = "1", "optional" = true, features = ["derive"]} -once_cell = "1.19" +once_cell = "1.20" log = "~0.4" diff --git a/helix-vcs/Cargo.toml b/helix-vcs/Cargo.toml index 245fdb8dc..43d5d619b 100644 --- a/helix-vcs/Cargo.toml +++ b/helix-vcs/Cargo.toml @@ -29,4 +29,4 @@ log = "0.4" git = ["gix"] [dev-dependencies] -tempfile = "3.12" +tempfile = "3.13" diff --git a/helix-vcs/src/git.rs b/helix-vcs/src/git.rs index 78e582436..48220f4df 100644 --- a/helix-vcs/src/git.rs +++ b/helix-vcs/src/git.rs @@ -22,18 +22,24 @@ use crate::FileChange; #[cfg(test)] mod test; +#[inline] +fn get_repo_dir(file: &Path) -> Result<&Path> { + file.parent().context("file has no parent directory") +} + pub fn get_diff_base(file: &Path) -> Result> { debug_assert!(!file.exists() || file.is_file()); debug_assert!(file.is_absolute()); + let file = gix::path::realpath(file).context("resolve symlinks")?; // TODO cache repository lookup - let repo_dir = file.parent().context("file has no parent directory")?; + let repo_dir = get_repo_dir(&file)?; let repo = open_repo(repo_dir) .context("failed to open git repo")? .to_thread_local(); let head = repo.head_commit()?; - let file_oid = find_file_in_commit(&repo, &head, file)?; + let file_oid = find_file_in_commit(&repo, &head, &file)?; let file_object = repo.find_object(file_oid)?; let data = file_object.detach().data; @@ -56,7 +62,9 @@ pub fn get_diff_base(file: &Path) -> Result> { pub fn get_current_head_name(file: &Path) -> Result>>> { debug_assert!(!file.exists() || file.is_file()); debug_assert!(file.is_absolute()); - let repo_dir = file.parent().context("file has no parent directory")?; + let file = gix::path::realpath(file).context("resolve symlinks")?; + + let repo_dir = get_repo_dir(&file)?; let repo = open_repo(repo_dir) .context("failed to open git repo")? .to_thread_local(); diff --git a/helix-vcs/src/git/test.rs b/helix-vcs/src/git/test.rs index 95ff10b23..164040f50 100644 --- a/helix-vcs/src/git/test.rs +++ b/helix-vcs/src/git/test.rs @@ -98,9 +98,13 @@ fn directory() { assert!(git::get_diff_base(&dir).is_err()); } -/// Test that `get_file_head` does not return content for a symlink. -/// This is important to correctly cover cases where a symlink is removed and replaced by a file. -/// If the contents of the symlink object were returned a diff between a path and the actual file would be produced (bad ui). +/// Test that `get_diff_base` resolves symlinks so that the same diff base is +/// used as the target file. +/// +/// This is important to correctly cover cases where a symlink is removed and +/// replaced by a file. If the contents of the symlink object were returned +/// a diff between a literal file path and the actual file content would be +/// produced (bad ui). #[cfg(any(unix, windows))] #[test] fn symlink() { @@ -108,14 +112,41 @@ fn symlink() { use std::os::unix::fs::symlink; #[cfg(not(unix))] use std::os::windows::fs::symlink_file as symlink; + let temp_git = empty_git_repo(); let file = temp_git.path().join("file.txt"); - let contents = b"foo".as_slice(); - File::create(&file).unwrap().write_all(contents).unwrap(); + let contents = Vec::from(b"foo"); + File::create(&file).unwrap().write_all(&contents).unwrap(); let file_link = temp_git.path().join("file_link.txt"); + symlink("file.txt", &file_link).unwrap(); + create_commit(temp_git.path(), true); + + assert_eq!(git::get_diff_base(&file_link).unwrap(), contents); + assert_eq!(git::get_diff_base(&file).unwrap(), contents); +} + +/// Test that `get_diff_base` returns content when the file is a symlink to +/// another file that is in a git repo, but the symlink itself is not. +#[cfg(any(unix, windows))] +#[test] +fn symlink_to_git_repo() { + #[cfg(unix)] + use std::os::unix::fs::symlink; + #[cfg(not(unix))] + use std::os::windows::fs::symlink_file as symlink; + + let temp_dir = tempfile::tempdir().expect("create temp dir"); + let temp_git = empty_git_repo(); + let file = temp_git.path().join("file.txt"); + let contents = Vec::from(b"foo"); + File::create(&file).unwrap().write_all(&contents).unwrap(); create_commit(temp_git.path(), true); - assert!(git::get_diff_base(&file_link).is_err()); - assert_eq!(git::get_diff_base(&file).unwrap(), Vec::from(contents)); + + let file_link = temp_dir.path().join("file_link.txt"); + symlink(&file, &file_link).unwrap(); + + assert_eq!(git::get_diff_base(&file_link).unwrap(), contents); + assert_eq!(git::get_diff_base(&file).unwrap(), contents); } diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index ddfa9f7e4..725a77547 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -28,10 +28,10 @@ bitflags = "2.6" anyhow = "1" crossterm = { version = "0.28", optional = true } -tempfile = "3.12" +tempfile = "3.13" # Conversion traits -once_cell = "1.19" +once_cell = "1.20" url = "2.5.2" arc-swap = { version = "1.7.1" } diff --git a/helix-view/src/handlers/lsp.rs b/helix-view/src/handlers/lsp.rs index 6aff2e50c..1fd2289db 100644 --- a/helix-view/src/handlers/lsp.rs +++ b/helix-view/src/handlers/lsp.rs @@ -243,7 +243,7 @@ impl Editor { match op { ResourceOp::Create(op) => { let uri = Uri::try_from(&op.uri)?; - let path = uri.as_path_buf().expect("URIs are valid paths"); + let path = uri.as_path().expect("URIs are valid paths"); let ignore_if_exists = op.options.as_ref().map_or(false, |options| { !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) }); @@ -255,13 +255,15 @@ impl Editor { } } - fs::write(&path, [])?; - self.language_servers.file_event_handler.file_changed(path); + fs::write(path, [])?; + self.language_servers + .file_event_handler + .file_changed(path.to_path_buf()); } } ResourceOp::Delete(op) => { let uri = Uri::try_from(&op.uri)?; - let path = uri.as_path_buf().expect("URIs are valid paths"); + let path = uri.as_path().expect("URIs are valid paths"); if path.is_dir() { let recursive = op .options @@ -270,11 +272,13 @@ impl Editor { .unwrap_or(false); if recursive { - fs::remove_dir_all(&path)? + fs::remove_dir_all(path)? } else { - fs::remove_dir(&path)? + fs::remove_dir(path)? } - self.language_servers.file_event_handler.file_changed(path); + self.language_servers + .file_event_handler + .file_changed(path.to_path_buf()); } else if path.is_file() { fs::remove_file(path)?; } diff --git a/helix-view/src/theme.rs b/helix-view/src/theme.rs index 4acc56648..9dc326444 100644 --- a/helix-view/src/theme.rs +++ b/helix-view/src/theme.rs @@ -53,20 +53,34 @@ impl Loader { /// Loads a theme searching directories in priority order. pub fn load(&self, name: &str) -> Result { + let (theme, warnings) = self.load_with_warnings(name)?; + + for warning in warnings { + warn!("Theme '{}': {}", name, warning); + } + + Ok(theme) + } + + /// Loads a theme searching directories in priority order, returning any warnings + pub fn load_with_warnings(&self, name: &str) -> Result<(Theme, Vec)> { if name == "default" { - return Ok(self.default()); + return Ok((self.default(), Vec::new())); } if name == "base16_default" { - return Ok(self.base16_default()); + return Ok((self.base16_default(), Vec::new())); } let mut visited_paths = HashSet::new(); - let theme = self.load_theme(name, &mut visited_paths).map(Theme::from)?; + let (theme, warnings) = self + .load_theme(name, &mut visited_paths) + .map(Theme::from_toml)?; - Ok(Theme { + let theme = Theme { name: name.into(), ..theme - }) + }; + Ok((theme, warnings)) } /// Recursively load a theme, merging with any inherited parent themes. @@ -87,10 +101,7 @@ impl Loader { let theme_toml = if let Some(parent_theme_name) = inherits { let parent_theme_name = parent_theme_name.as_str().ok_or_else(|| { - anyhow!( - "Theme: expected 'inherits' to be a string: {}", - parent_theme_name - ) + anyhow!("Expected 'inherits' to be a string: {}", parent_theme_name) })?; let parent_theme_toml = match parent_theme_name { @@ -181,9 +192,9 @@ impl Loader { }) .ok_or_else(|| { if cycle_found { - anyhow!("Theme: cycle found in inheriting: {}", name) + anyhow!("Cycle found in inheriting: {}", name) } else { - anyhow!("Theme: file not found for: {}", name) + anyhow!("File not found for: {}", name) } }) } @@ -220,19 +231,11 @@ pub struct Theme { impl From for Theme { fn from(value: Value) -> Self { - if let Value::Table(table) = value { - let (styles, scopes, highlights) = build_theme_values(table); - - Self { - styles, - scopes, - highlights, - ..Default::default() - } - } else { - warn!("Expected theme TOML value to be a table, found {:?}", value); - Default::default() + let (theme, warnings) = Theme::from_toml(value); + for warning in warnings { + warn!("{}", warning); } + theme } } @@ -242,31 +245,29 @@ impl<'de> Deserialize<'de> for Theme { D: Deserializer<'de>, { let values = Map::::deserialize(deserializer)?; - - let (styles, scopes, highlights) = build_theme_values(values); - - Ok(Self { - styles, - scopes, - highlights, - ..Default::default() - }) + let (theme, warnings) = Theme::from_keys(values); + for warning in warnings { + warn!("{}", warning); + } + Ok(theme) } } fn build_theme_values( mut values: Map, -) -> (HashMap, Vec, Vec