Merge branch 'master' of https://github.com/helix-editor/helix into tree_explore

pull/9/head
wongjiahau 2 years ago
commit d043ea4db4

7
Cargo.lock generated

@ -1206,6 +1206,7 @@ dependencies = [
"futures-util",
"helix-core",
"helix-loader",
"helix-parsec",
"log",
"lsp-types",
"serde",
@ -1216,6 +1217,10 @@ dependencies = [
"which",
]
[[package]]
name = "helix-parsec"
version = "0.6.0"
[[package]]
name = "helix-term"
version = "0.6.0"
@ -1265,6 +1270,8 @@ dependencies = [
"crossterm",
"helix-core",
"helix-view",
"log",
"once_cell",
"serde",
"termini",
"unicode-segmentation",

@ -8,6 +8,7 @@ members = [
"helix-dap",
"helix-loader",
"helix-vcs",
"helix-parsec",
"xtask",
]

@ -55,6 +55,7 @@ signal to the Helix process on Unix operating systems, such as by using the comm
| `rulers` | List of column positions at which to display the rulers. Can be overridden by language specific `rulers` in `languages.toml` file | `[]` |
| `bufferline` | Renders a line at the top of the editor displaying open buffers. Can be `always`, `never` or `multiple` (only shown if more than one buffer is in use) | `never` |
| `color-modes` | Whether to color the mode indicator with different colors depending on the mode itself | `false` |
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap_at_text_width` is set | `80` |
### `[editor.statusline]` Section
@ -316,12 +317,13 @@ Currently unused
Options for soft wrapping lines that exceed the view width:
| Key | Description | Default |
| --- | --- | --- |
| `enable` | Whether soft wrapping is enabled | `false` |
| `max-wrap` | Maximum free space left at the end of the line | `20` |
| `max-indent-retain` | Maximum indentation to carry over when soft wrapping a line | `40` |
| `wrap-indicator` | Text inserted before soft wrapped lines, highlighted with `ui.virtual.wrap` | `↪ ` |
| Key | Description | Default |
| --- | --- | --- |
| `enable` | Whether soft wrapping is enabled. | `false` |
| `max-wrap` | Maximum free space left at the end of the line. | `20` |
| `max-indent-retain` | Maximum indentation to carry over when soft wrapping a line. | `40` |
| `wrap-indicator` | Text inserted before soft wrapped lines, highlighted with `ui.virtual.wrap` | `↪ ` |
| `wrap-at-text-width` | Soft wrap at `text-width` instead of using the full viewport size. | `false` |
Example:

@ -49,6 +49,7 @@
| `:update` | Write changes only if the file has been modified. |
| `:lsp-workspace-command` | Open workspace command picker |
| `:lsp-restart` | Restarts the Language Server that is in use by the current doc |
| `:lsp-stop` | Stops the Language Server that is in use by the current doc |
| `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. |
| `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. |
| `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. |
@ -75,3 +76,4 @@
| `:pipe` | Pipe each selection to the shell command. |
| `:pipe-to` | Pipe each selection to the shell command, ignoring output. |
| `:run-shell-command`, `:sh` | Run a shell command |
| `:reset-diff-change`, `:diffget`, `:diffg` | Reset the diff change at the cursor position. |

@ -63,7 +63,7 @@ These configuration keys are available:
| `config` | Language Server configuration |
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
| `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout |
| `max-line-length` | Maximum line length. Used for the `:reflow` command and soft-wrapping |
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap_at_text_width` is set, defaults to `editor.text-width` |
### File-type detection and the `file-types` key

@ -578,6 +578,16 @@ impl Selection {
self.normalize()
}
/// Takes a closure and maps each `Range` over the closure to multiple `Range`s.
pub fn transform_iter<F, I>(mut self, f: F) -> Self
where
F: FnMut(Range) -> I,
I: Iterator<Item = Range>,
{
self.ranges = self.ranges.into_iter().flat_map(f).collect();
self.normalize()
}
// Ensures the selection adheres to the following invariants:
// 1. All ranges are grapheme aligned.
// 2. All ranges are at least 1 character wide, unless at the

@ -275,51 +275,138 @@ mod test {
#[test]
fn test_get_surround_pos() {
let doc = Rope::from("(some) (chars)\n(newline)");
let slice = doc.slice(..);
let selection = Selection::new(
SmallVec::from_slice(&[Range::point(2), Range::point(9), Range::point(20)]),
0,
);
#[rustfmt::skip]
let (doc, selection, expectations) =
rope_with_selections_and_expectations(
"(some) (chars)\n(newline)",
"_ ^ _ _ ^ _\n_ ^ _"
);
// cursor on s[o]me, c[h]ars, newl[i]ne
assert_eq!(
get_surround_pos(slice, &selection, Some('('), 1)
.unwrap()
.as_slice(),
&[0, 5, 7, 13, 15, 23]
get_surround_pos(doc.slice(..), &selection, Some('('), 1).unwrap(),
expectations
);
}
#[test]
fn test_get_surround_pos_bail() {
let doc = Rope::from("[some]\n(chars)xx\n(newline)");
let slice = doc.slice(..);
fn test_get_surround_pos_bail_different_surround_chars() {
#[rustfmt::skip]
let (doc, selection, _) =
rope_with_selections_and_expectations(
"[some]\n(chars)xx\n(newline)",
" ^ \n ^ \n "
);
let selection =
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(9)]), 0);
// cursor on s[o]me, c[h]ars
assert_eq!(
get_surround_pos(slice, &selection, Some('('), 1),
Err(Error::PairNotFound) // different surround chars
get_surround_pos(doc.slice(..), &selection, Some('('), 1),
Err(Error::PairNotFound)
);
}
#[test]
fn test_get_surround_pos_bail_overlapping_surround_chars() {
#[rustfmt::skip]
let (doc, selection, _) =
rope_with_selections_and_expectations(
"[some]\n(chars)xx\n(newline)",
" \n ^ \n ^ "
);
let selection = Selection::new(
SmallVec::from_slice(&[Range::point(14), Range::point(24)]),
0,
);
// cursor on [x]x, newli[n]e
assert_eq!(
get_surround_pos(slice, &selection, Some('('), 1),
get_surround_pos(doc.slice(..), &selection, Some('('), 1),
Err(Error::PairNotFound) // overlapping surround chars
);
}
#[test]
fn test_get_surround_pos_bail_cursor_overlap() {
#[rustfmt::skip]
let (doc, selection, _) =
rope_with_selections_and_expectations(
"[some]\n(chars)xx\n(newline)",
" ^^ \n \n "
);
let selection =
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(3)]), 0);
// cursor on s[o][m]e
assert_eq!(
get_surround_pos(slice, &selection, Some('['), 1),
get_surround_pos(doc.slice(..), &selection, Some('['), 1),
Err(Error::CursorOverlap)
);
}
#[test]
fn test_find_nth_pairs_pos_quote_success() {
#[rustfmt::skip]
let (doc, selection, expectations) =
rope_with_selections_and_expectations(
"some 'quoted text' on this 'line'\n'and this one'",
" _ ^ _ \n "
);
assert_eq!(2, expectations.len());
assert_eq!(
find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 1)
.expect("find should succeed"),
(expectations[0], expectations[1])
)
}
#[test]
fn test_find_nth_pairs_pos_nested_quote_success() {
#[rustfmt::skip]
let (doc, selection, expectations) =
rope_with_selections_and_expectations(
"some 'nested 'quoted' text' on this 'line'\n'and this one'",
" _ ^ _ \n "
);
assert_eq!(2, expectations.len());
assert_eq!(
find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 2)
.expect("find should succeed"),
(expectations[0], expectations[1])
)
}
#[test]
fn test_find_nth_pairs_pos_inside_quote_ambiguous() {
#[rustfmt::skip]
let (doc, selection, _) =
rope_with_selections_and_expectations(
"some 'nested 'quoted' text' on this 'line'\n'and this one'",
" ^ \n "
);
assert_eq!(
find_nth_pairs_pos(doc.slice(..), '\'', selection.primary(), 1),
Err(Error::CursorOnAmbiguousPair)
)
}
// Create a Rope and a matching Selection using a specification language.
// ^ is a single-point selection.
// _ is an expected index. These are returned as a Vec<usize> for use in assertions.
fn rope_with_selections_and_expectations(
text: &str,
spec: &str,
) -> (Rope, Selection, Vec<usize>) {
if text.len() != spec.len() {
panic!("specification must match text length -- are newlines aligned?");
}
let rope = Rope::from(text);
let selections: SmallVec<[Range; 1]> = spec
.match_indices('^')
.into_iter()
.map(|(i, _)| Range::point(i))
.collect();
let expectations: Vec<usize> = spec
.match_indices('_')
.into_iter()
.map(|(i, _)| i)
.collect();
(rope, Selection::new(selections, 0), expectations)
}
}

@ -82,7 +82,8 @@ pub struct LanguageConfiguration {
pub shebangs: Vec<String>, // interpreter(s) associated with language
pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml>
pub comment_token: Option<String>,
pub max_line_length: Option<usize>,
pub text_width: Option<usize>,
pub soft_wrap: Option<SoftWrap>,
#[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")]
pub config: Option<serde_json::Value>,
@ -546,6 +547,33 @@ impl LanguageConfiguration {
.ok()
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
pub struct SoftWrap {
/// Soft wrap lines that exceed viewport width. Default to off
pub enable: Option<bool>,
/// Maximum space left free at the end of the line.
/// This space is used to wrap text at word boundaries. If that is not possible within this limit
/// the word is simply split at the end of the line.
///
/// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views.
///
/// Default to 20
pub max_wrap: Option<u16>,
/// Maximum number of indentation that can be carried over from the previous line when softwrapping.
/// If a line is indented further then this limit it is rendered at the start of the viewport instead.
///
/// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views.
///
/// Default to 40
pub max_indent_retain: Option<u16>,
/// Indicator placed at the beginning of softwrapped lines
///
/// Defaults to ↪
pub wrap_indicator: Option<String>,
/// Softwrap at `text_width` instead of viewport width if it is shorter
pub wrap_at_text_width: Option<bool>,
}
// Expose loader as Lazy<> global since it's always static?

@ -2,6 +2,6 @@ use smartstring::{LazyCompact, SmartString};
/// Given a slice of text, return the text re-wrapped to fit it
/// within the given width.
pub fn reflow_hard_wrap(text: &str, max_line_len: usize) -> SmartString<LazyCompact> {
textwrap::refill(text, max_line_len).into()
pub fn reflow_hard_wrap(text: &str, text_width: usize) -> SmartString<LazyCompact> {
textwrap::refill(text, text_width).into()
}

@ -14,6 +14,7 @@ homepage = "https://helix-editor.com"
[dependencies]
helix-core = { version = "0.6", path = "../helix-core" }
helix-loader = { version = "0.6", path = "../helix-loader" }
helix-parsec = { version = "0.6", path = "../helix-parsec" }
anyhow = "1.0"
futures-executor = "0.3"

@ -320,7 +320,7 @@ impl Client {
text_document: Some(lsp::TextDocumentClientCapabilities {
completion: Some(lsp::CompletionClientCapabilities {
completion_item: Some(lsp::CompletionItemCapability {
snippet_support: Some(false),
snippet_support: Some(true),
resolve_support: Some(lsp::CompletionItemCapabilityResolveSupport {
properties: vec![
String::from("documentation"),
@ -359,7 +359,7 @@ impl Client {
}),
rename: Some(lsp::RenameClientCapabilities {
dynamic_registration: Some(false),
prepare_support: Some(false),
prepare_support: Some(true),
prepare_support_default_behavior: None,
honors_change_annotations: Some(false),
}),
@ -1034,6 +1034,29 @@ impl Client {
Some(self.call::<lsp::request::DocumentSymbolRequest>(params))
}
pub fn prepare_rename(
&self,
text_document: lsp::TextDocumentIdentifier,
position: lsp::Position,
) -> Option<impl Future<Output = Result<Value>>> {
let capabilities = self.capabilities.get().unwrap();
match capabilities.rename_provider {
Some(lsp::OneOf::Right(lsp::RenameOptions {
prepare_provider: Some(true),
..
})) => (),
_ => return None,
}
let params = lsp::TextDocumentPositionParams {
text_document,
position,
};
Some(self.call::<lsp::request::PrepareRenameRequest>(params))
}
// empty string to get all symbols
pub fn workspace_symbols(&self, query: String) -> Option<impl Future<Output = Result<Value>>> {
let capabilities = self.capabilities.get().unwrap();

@ -1,5 +1,6 @@
mod client;
pub mod jsonrpc;
pub mod snippet;
mod transport;
pub use client::Client;
@ -59,6 +60,7 @@ pub mod util {
use super::*;
use helix_core::line_ending::{line_end_byte_index, line_end_char_index};
use helix_core::{diagnostic::NumberOrString, Range, Rope, Selection, Tendril, Transaction};
use helix_core::{smallvec, SmallVec};
/// Converts a diagnostic in the document to [`lsp::Diagnostic`].
///
@ -250,26 +252,17 @@ pub mod util {
pub fn generate_transaction_from_completion_edit(
doc: &Rope,
selection: &Selection,
edit: lsp::TextEdit,
offset_encoding: OffsetEncoding,
start_offset: i128,
end_offset: i128,
new_text: String,
) -> Transaction {
let replacement: Option<Tendril> = if edit.new_text.is_empty() {
let replacement: Option<Tendril> = if new_text.is_empty() {
None
} else {
Some(edit.new_text.into())
Some(new_text.into())
};
let text = doc.slice(..);
let primary_cursor = selection.primary().cursor(text);
let start_offset = match lsp_pos_to_pos(doc, edit.range.start, offset_encoding) {
Some(start) => start as i128 - primary_cursor as i128,
None => return Transaction::new(doc),
};
let end_offset = match lsp_pos_to_pos(doc, edit.range.end, offset_encoding) {
Some(end) => end as i128 - primary_cursor as i128,
None => return Transaction::new(doc),
};
Transaction::change_by_selection(doc, selection, |range| {
let cursor = range.cursor(text);
@ -281,6 +274,74 @@ pub mod util {
})
}
/// Creates a [Transaction] from the [snippet::Snippet] in a completion response.
/// The transaction applies the edit to all cursors.
pub fn generate_transaction_from_snippet(
doc: &Rope,
selection: &Selection,
start_offset: i128,
end_offset: i128,
snippet: snippet::Snippet,
line_ending: &str,
include_placeholder: bool,
) -> Transaction {
let text = doc.slice(..);
// For each cursor store offsets for the first tabstop
let mut cursor_tabstop_offsets = Vec::<SmallVec<[(i128, i128); 1]>>::new();
let transaction = Transaction::change_by_selection(doc, selection, |range| {
let cursor = range.cursor(text);
let replacement_start = (cursor as i128 + start_offset) as usize;
let replacement_end = (cursor as i128 + end_offset) as usize;
let newline_with_offset = format!(
"{line_ending}{blank:width$}",
line_ending = line_ending,
width = replacement_start - doc.line_to_char(doc.char_to_line(replacement_start)),
blank = ""
);
let (replacement, tabstops) =
snippet::render(&snippet, newline_with_offset, include_placeholder);
let replacement_len = replacement.chars().count();
cursor_tabstop_offsets.push(
tabstops
.first()
.unwrap_or(&smallvec![(replacement_len, replacement_len)])
.iter()
.map(|(from, to)| -> (i128, i128) {
(
*from as i128 - replacement_len as i128,
*to as i128 - replacement_len as i128,
)
})
.collect(),
);
(replacement_start, replacement_end, Some(replacement.into()))
});
// Create new selection based on the cursor tabstop from above
let mut cursor_tabstop_offsets_iter = cursor_tabstop_offsets.iter();
let selection = selection
.clone()
.map(transaction.changes())
.transform_iter(|range| {
cursor_tabstop_offsets_iter
.next()
.unwrap()
.iter()
.map(move |(from, to)| {
Range::new(
(range.anchor as i128 + *from) as usize,
(range.anchor as i128 + *to) as usize,
)
})
});
transaction.with_selection(selection)
}
pub fn generate_transaction_from_edits(
doc: &Rope,
mut edits: Vec<lsp::TextEdit>,
@ -476,6 +537,16 @@ impl Registry {
}
}
pub fn stop(&mut self, language_config: &LanguageConfiguration) {
let scope = language_config.scope.clone();
if let Some((_, client)) = self.inner.remove(&scope) {
tokio::spawn(async move {
let _ = client.force_shutdown().await;
});
}
}
pub fn get(
&mut self,
language_config: &LanguageConfiguration,

@ -0,0 +1,525 @@
use std::borrow::Cow;
use anyhow::{anyhow, Result};
use helix_core::{smallvec, SmallVec};
#[derive(Debug, PartialEq, Eq)]
pub enum CaseChange {
Upcase,
Downcase,
Capitalize,
}
#[derive(Debug, PartialEq, Eq)]
pub enum FormatItem<'a> {
Text(&'a str),
Capture(usize),
CaseChange(usize, CaseChange),
Conditional(usize, Option<&'a str>, Option<&'a str>),
}
#[derive(Debug, PartialEq, Eq)]
pub struct Regex<'a> {
value: &'a str,
replacement: Vec<FormatItem<'a>>,
options: Option<&'a str>,
}
#[derive(Debug, PartialEq, Eq)]
pub enum SnippetElement<'a> {
Tabstop {
tabstop: usize,
},
Placeholder {
tabstop: usize,
value: Vec<SnippetElement<'a>>,
},
Choice {
tabstop: usize,
choices: Vec<&'a str>,
},
Variable {
name: &'a str,
default: Option<&'a str>,
regex: Option<Regex<'a>>,
},
Text(&'a str),
}
#[derive(Debug, PartialEq, Eq)]
pub struct Snippet<'a> {
elements: Vec<SnippetElement<'a>>,
}
pub fn parse(s: &str) -> Result<Snippet<'_>> {
parser::parse(s).map_err(|rest| anyhow!("Failed to parse snippet. Remaining input: {}", rest))
}
fn render_elements(
snippet_elements: &[SnippetElement<'_>],
insert: &mut String,
offset: &mut usize,
tabstops: &mut Vec<(usize, (usize, usize))>,
newline_with_offset: &String,
include_placeholer: bool,
) {
use SnippetElement::*;
for element in snippet_elements {
match element {
&Text(text) => {
// small optimization to avoid calling replace when it's unnecessary
let text = if text.contains('\n') {
Cow::Owned(text.replace('\n', newline_with_offset))
} else {
Cow::Borrowed(text)
};
*offset += text.chars().count();
insert.push_str(&text);
}
&Variable {
name: _,
regex: _,
r#default,
} => {
// TODO: variables. For now, fall back to the default, which defaults to "".
let text = r#default.unwrap_or_default();
*offset += text.chars().count();
insert.push_str(text);
}
&Tabstop { tabstop } => {
tabstops.push((tabstop, (*offset, *offset)));
}
Placeholder {
tabstop,
value: inner_snippet_elements,
} => {
let start_offset = *offset;
if include_placeholer {
render_elements(
inner_snippet_elements,
insert,
offset,
tabstops,
newline_with_offset,
include_placeholer,
);
}
tabstops.push((*tabstop, (start_offset, *offset)));
}
&Choice {
tabstop,
choices: _,
} => {
// TODO: choices
tabstops.push((tabstop, (*offset, *offset)));
}
}
}
}
#[allow(clippy::type_complexity)] // only used one time
pub fn render(
snippet: &Snippet<'_>,
newline_with_offset: String,
include_placeholer: bool,
) -> (String, Vec<SmallVec<[(usize, usize); 1]>>) {
let mut insert = String::new();
let mut tabstops = Vec::new();
let mut offset = 0;
render_elements(
&snippet.elements,
&mut insert,
&mut offset,
&mut tabstops,
&newline_with_offset,
include_placeholer,
);
// sort in ascending order (except for 0, which should always be the last one (per lsp doc))
tabstops.sort_unstable_by_key(|(n, _)| if *n == 0 { usize::MAX } else { *n });
// merge tabstops with the same index (we take advantage of the fact that we just sorted them
// above to simply look backwards)
let mut ntabstops = Vec::<SmallVec<[(usize, usize); 1]>>::new();
{
let mut prev = None;
for (tabstop, r) in tabstops {
if prev == Some(tabstop) {
let len_1 = ntabstops.len() - 1;
ntabstops[len_1].push(r);
} else {
prev = Some(tabstop);
ntabstops.push(smallvec![r]);
}
}
}
(insert, ntabstops)
}
mod parser {
use helix_parsec::*;
use super::{CaseChange, FormatItem, Regex, Snippet, SnippetElement};
/*
https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#snippet_syntax
any ::= tabstop | placeholder | choice | variable | text
tabstop ::= '$' int | '${' int '}'
placeholder ::= '${' int ':' any '}'
choice ::= '${' int '|' text (',' text)* '|}'
variable ::= '$' var | '${' var }'
| '${' var ':' any '}'
| '${' var '/' regex '/' (format | text)+ '/' options '}'
format ::= '$' int | '${' int '}'
| '${' int ':' '/upcase' | '/downcase' | '/capitalize' '}'
| '${' int ':+' if '}'
| '${' int ':?' if ':' else '}'
| '${' int ':-' else '}' | '${' int ':' else '}'
regex ::= Regular Expression value (ctor-string)
options ::= Regular Expression option (ctor-options)
var ::= [_a-zA-Z] [_a-zA-Z0-9]*
int ::= [0-9]+
text ::= .*
if ::= text
else ::= text
*/
fn var<'a>() -> impl Parser<'a, Output = &'a str> {
// var = [_a-zA-Z][_a-zA-Z0-9]*
move |input: &'a str| match input
.char_indices()
.take_while(|(p, c)| {
*c == '_'
|| if *p == 0 {
c.is_ascii_alphabetic()
} else {
c.is_ascii_alphanumeric()
}
})
.last()
{
Some((index, c)) if index >= 1 => {
let index = index + c.len_utf8();
Ok((&input[index..], &input[0..index]))
}
_ => Err(input),
}
}
fn text<'a, const SIZE: usize>(cs: [char; SIZE]) -> impl Parser<'a, Output = &'a str> {
take_while(move |c| cs.into_iter().all(|c1| c != c1))
}
fn digit<'a>() -> impl Parser<'a, Output = usize> {
filter_map(take_while(|c| c.is_ascii_digit()), |s| s.parse().ok())
}
fn case_change<'a>() -> impl Parser<'a, Output = CaseChange> {
use CaseChange::*;
choice!(
map("upcase", |_| Upcase),
map("downcase", |_| Downcase),
map("capitalize", |_| Capitalize),
)
}
fn format<'a>() -> impl Parser<'a, Output = FormatItem<'a>> {
use FormatItem::*;
choice!(
// '$' int
map(right("$", digit()), Capture),
// '${' int '}'
map(seq!("${", digit(), "}"), |seq| Capture(seq.1)),
// '${' int ':' '/upcase' | '/downcase' | '/capitalize' '}'
map(seq!("${", digit(), ":/", case_change(), "}"), |seq| {
CaseChange(seq.1, seq.3)
}),
// '${' int ':+' if '}'
map(
seq!("${", digit(), ":+", take_until(|c| c == '}'), "}"),
|seq| { Conditional(seq.1, Some(seq.3), None) }
),
// '${' int ':?' if ':' else '}'
map(
seq!(
"${",
digit(),
":?",
take_until(|c| c == ':'),
":",
take_until(|c| c == '}'),
"}"
),
|seq| { Conditional(seq.1, Some(seq.3), Some(seq.5)) }
),
// '${' int ':-' else '}' | '${' int ':' else '}'
map(
seq!(
"${",
digit(),
":",
optional("-"),
take_until(|c| c == '}'),
"}"
),
|seq| { Conditional(seq.1, None, Some(seq.4)) }
),
)
}
fn regex<'a>() -> impl Parser<'a, Output = Regex<'a>> {
let text = map(text(['$', '/']), FormatItem::Text);
let replacement = reparse_as(
take_until(|c| c == '/'),
one_or_more(choice!(format(), text)),
);
map(
seq!(
"/",
take_until(|c| c == '/'),
"/",
replacement,
"/",
optional(take_until(|c| c == '}')),
),
|(_, value, _, replacement, _, options)| Regex {
value,
replacement,
options,
},
)
}
fn tabstop<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
map(
or(
right("$", digit()),
map(seq!("${", digit(), "}"), |values| values.1),
),
|digit| SnippetElement::Tabstop { tabstop: digit },
)
}
fn placeholder<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
let text = map(text(['$', '}']), SnippetElement::Text);
map(
seq!(
"${",
digit(),
":",
one_or_more(choice!(anything(), text)),
"}"
),
|seq| SnippetElement::Placeholder {
tabstop: seq.1,
value: seq.3,
},
)
}
fn choice<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
map(
seq!(
"${",
digit(),
"|",
sep(take_until(|c| c == ',' || c == '|'), ","),
"|}",
),
|seq| SnippetElement::Choice {
tabstop: seq.1,
choices: seq.3,
},
)
}
fn variable<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
choice!(
// $var
map(right("$", var()), |name| SnippetElement::Variable {
name,
default: None,
regex: None,
}),
// ${var:default}
map(
seq!("${", var(), ":", take_until(|c| c == '}'), "}",),
|values| SnippetElement::Variable {
name: values.1,
default: Some(values.3),
regex: None,
}
),
// ${var/value/format/options}
map(seq!("${", var(), regex(), "}"), |values| {
SnippetElement::Variable {
name: values.1,
default: None,
regex: Some(values.2),
}
}),
)
}
fn anything<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
// The parser has to be constructed lazily to avoid infinite opaque type recursion
|input: &'a str| {
let parser = choice!(tabstop(), placeholder(), choice(), variable());
parser.parse(input)
}
}
fn snippet<'a>() -> impl Parser<'a, Output = Snippet<'a>> {
let text = map(text(['$']), SnippetElement::Text);
map(one_or_more(choice!(anything(), text)), |parts| Snippet {
elements: parts,
})
}
pub fn parse(s: &str) -> Result<Snippet, &str> {
snippet().parse(s).map(|(_input, elements)| elements)
}
#[cfg(test)]
mod test {
use super::SnippetElement::*;
use super::*;
#[test]
fn empty_string_is_error() {
assert_eq!(Err(""), parse(""));
}
#[test]
fn parse_placeholders_in_function_call() {
assert_eq!(
Ok(Snippet {
elements: vec![
Text("match("),
Placeholder {
tabstop: 1,
value: vec!(Text("Arg1")),
},
Text(")")
]
}),
parse("match(${1:Arg1})")
)
}
#[test]
fn parse_placeholders_in_statement() {
assert_eq!(
Ok(Snippet {
elements: vec![
Text("local "),
Placeholder {
tabstop: 1,
value: vec!(Text("var")),
},
Text(" = "),
Placeholder {
tabstop: 1,
value: vec!(Text("value")),
},
]
}),
parse("local ${1:var} = ${1:value}")
)
}
#[test]
fn parse_tabstop_nested_in_placeholder() {
assert_eq!(
Ok(Snippet {
elements: vec![Placeholder {
tabstop: 1,
value: vec!(Text("var, "), Tabstop { tabstop: 2 },),
},]
}),
parse("${1:var, $2}")
)
}
#[test]
fn parse_placeholder_nested_in_placeholder() {
assert_eq!(
Ok(Snippet {
elements: vec![Placeholder {
tabstop: 1,
value: vec!(
Text("foo "),
Placeholder {
tabstop: 2,
value: vec!(Text("bar")),
},
),
},]
}),
parse("${1:foo ${2:bar}}")
)
}
#[test]
fn parse_all() {
assert_eq!(
Ok(Snippet {
elements: vec![
Text("hello "),
Tabstop { tabstop: 1 },
Tabstop { tabstop: 2 },
Text(" "),
Choice {
tabstop: 1,
choices: vec!["one", "two", "three"]
},
Text(" "),
Variable {
name: "name",
default: Some("foo"),
regex: None
},
Text(" "),
Variable {
name: "var",
default: None,
regex: None
},
Text(" "),
Variable {
name: "TM",
default: None,
regex: None
},
]
}),
parse("hello $1${2} ${1|one,two,three|} ${name:foo} $var $TM")
);
}
#[test]
fn regex_capture_replace() {
assert_eq!(
Ok(Snippet {
elements: vec![Variable {
name: "TM_FILENAME",
default: None,
regex: Some(Regex {
value: "(.*).+$",
replacement: vec![FormatItem::Capture(1)],
options: None,
}),
}]
}),
parse("${TM_FILENAME/(.*).+$/$1/}")
);
}
}
}

@ -0,0 +1,13 @@
[package]
name = "helix-parsec"
version = "0.6.0"
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
edition = "2021"
license = "MPL-2.0"
description = "Parser combinators for Helix"
categories = ["editor"]
repository = "https://github.com/helix-editor/helix"
homepage = "https://helix-editor.com"
include = ["src/**/*", "README.md"]
[dependencies]

@ -0,0 +1,561 @@
//! Parser-combinator functions
//!
//! This module provides parsers and parser combinators which can be used
//! together to build parsers by functional composition.
// This module implements parser combinators following https://bodil.lol/parser-combinators/.
// `sym` (trait implementation for `&'static str`), `map`, `pred` (filter), `one_or_more`,
// `zero_or_more`, as well as the `Parser` trait originate mostly from that post.
// The remaining parsers and parser combinators are either based on
// https://github.com/archseer/snippets.nvim/blob/a583da6ef130d2a4888510afd8c4e5ffd62d0dce/lua/snippet/parser.lua#L5-L138
// or are novel.
// When a parser matches the input successfully, it returns `Ok((next_input, some_value))`
// where the type of the returned value depends on the parser. If the parser fails to match,
// it returns `Err(input)`.
type ParseResult<'a, Output> = Result<(&'a str, Output), &'a str>;
/// A parser or parser-combinator.
///
/// Parser-combinators compose multiple parsers together to parse input.
/// For example, two basic parsers (`&'static str`s) may be combined with
/// a parser-combinator like [or] to produce a new parser.
///
/// ```
/// use helix_parsec::{or, Parser};
/// let foo = "foo"; // matches "foo" literally
/// let bar = "bar"; // matches "bar" literally
/// let foo_or_bar = or(foo, bar); // matches either "foo" or "bar"
/// assert_eq!(Ok(("", "foo")), foo_or_bar.parse("foo"));
/// assert_eq!(Ok(("", "bar")), foo_or_bar.parse("bar"));
/// assert_eq!(Err("baz"), foo_or_bar.parse("baz"));
/// ```
pub trait Parser<'a> {
type Output;
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output>;
}
// Most parser-combinators are written as higher-order functions which take some
// parser(s) as input and return a new parser: a function that takes input and returns
// a parse result. The underlying implementation of [Parser::parse] for these functions
// is simply application.
#[doc(hidden)]
impl<'a, F, T> Parser<'a> for F
where
F: Fn(&'a str) -> ParseResult<T>,
{
type Output = T;
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> {
self(input)
}
}
/// A parser which matches the string literal exactly.
///
/// This parser succeeds if the next characters in the input are equal to the given
/// string literal.
///
/// Note that [str::parse] interferes with calling [Parser::parse] on string literals
/// directly; this trait implementation works when used within any parser combinator
/// but does not work on its own. To call [Parser::parse] on a parser for a string
/// literal, use the [token] parser.
///
/// # Examples
///
/// ```
/// use helix_parsec::{or, Parser};
/// let parser = or("foo", "bar");
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
/// assert_eq!(Ok(("", "bar")), parser.parse("bar"));
/// assert_eq!(Err("baz"), parser.parse("baz"));
/// ```
impl<'a> Parser<'a> for &'static str {
type Output = &'a str;
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> {
match input.get(0..self.len()) {
Some(actual) if actual == *self => Ok((&input[self.len()..], &input[0..self.len()])),
_ => Err(input),
}
}
}
// Parsers
/// A parser which matches the given string literally.
///
/// This function is a convenience for interpreting string literals as parsers
/// and is only necessary to avoid conflict with [str::parse]. See the documentation
/// for the `&'static str` implementation of [Parser].
///
/// # Examples
///
/// ```
/// use helix_parsec::{token, Parser};
/// let parser = token("foo");
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
/// assert_eq!(Err("bar"), parser.parse("bar"));
/// ```
pub fn token<'a>(literal: &'static str) -> impl Parser<'a, Output = &'a str> {
literal
}
/// A parser which matches all values until the specified pattern is found.
///
/// If the pattern is not found, this parser does not match. The input up to the
/// character which returns `true` is returned but not that character itself.
///
/// If the pattern function returns true on the first input character, this
/// parser fails.
///
/// # Examples
///
/// ```
/// use helix_parsec::{take_until, Parser};
/// let parser = take_until(|c| c == '.');
/// assert_eq!(Ok((".bar", "foo")), parser.parse("foo.bar"));
/// assert_eq!(Err(".foo"), parser.parse(".foo"));
/// assert_eq!(Err("foo"), parser.parse("foo"));
/// ```
pub fn take_until<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str>
where
F: Fn(char) -> bool,
{
move |input: &'a str| match input.find(&pattern) {
Some(index) if index != 0 => Ok((&input[index..], &input[0..index])),
_ => Err(input),
}
}
/// A parser which matches all values until the specified pattern no longer match.
///
/// This parser only ever fails if the input has a length of zero.
///
/// # Examples
///
/// ```
/// use helix_parsec::{take_while, Parser};
/// let parser = take_while(|c| c == '1');
/// assert_eq!(Ok(("2", "11")), parser.parse("112"));
/// assert_eq!(Err("22"), parser.parse("22"));
/// ```
pub fn take_while<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str>
where
F: Fn(char) -> bool,
{
move |input: &'a str| match input
.char_indices()
.take_while(|(_p, c)| pattern(*c))
.last()
{
Some((index, c)) => {
let index = index + c.len_utf8();
Ok((&input[index..], &input[0..index]))
}
_ => Err(input),
}
}
// Variadic parser combinators
/// A parser combinator which matches a sequence of parsers in an all-or-nothing fashion.
///
/// The returned value is a tuple containing the outputs of all parsers in order. Each
/// parser in the sequence may be typed differently.
///
/// # Examples
///
/// ```
/// use helix_parsec::{seq, Parser};
/// let parser = seq!("<", "a", ">");
/// assert_eq!(Ok(("", ("<", "a", ">"))), parser.parse("<a>"));
/// assert_eq!(Err("<b>"), parser.parse("<b>"));
/// ```
#[macro_export]
macro_rules! seq {
($($parsers: expr),+ $(,)?) => {
($($parsers),+)
}
}
// Seq is implemented using trait-implementations of Parser for various size tuples.
// This allows sequences to be typed heterogeneously.
macro_rules! seq_impl {
($($parser:ident),+) => {
#[allow(non_snake_case)]
impl<'a, $($parser),+> Parser<'a> for ($($parser),+)
where
$($parser: Parser<'a>),+
{
type Output = ($($parser::Output),+);
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> {
let ($($parser),+) = self;
seq_body_impl!(input, input, $($parser),+ ; )
}
}
}
}
macro_rules! seq_body_impl {
($input:expr, $next_input:expr, $head:ident, $($tail:ident),+ ; $(,)? $($acc:ident),*) => {
match $head.parse($next_input) {
Ok((next_input, $head)) => seq_body_impl!($input, next_input, $($tail),+ ; $($acc),*, $head),
Err(_) => Err($input),
}
};
($input:expr, $next_input:expr, $last:ident ; $(,)? $($acc:ident),*) => {
match $last.parse($next_input) {
Ok((next_input, last)) => Ok((next_input, ($($acc),+, last))),
Err(_) => Err($input),
}
}
}
seq_impl!(A, B);
seq_impl!(A, B, C);
seq_impl!(A, B, C, D);
seq_impl!(A, B, C, D, E);
seq_impl!(A, B, C, D, E, F);
seq_impl!(A, B, C, D, E, F, G);
seq_impl!(A, B, C, D, E, F, G, H);
seq_impl!(A, B, C, D, E, F, G, H, I);
seq_impl!(A, B, C, D, E, F, G, H, I, J);
/// A parser combinator which chooses the first of the input parsers which matches
/// successfully.
///
/// All input parsers must have the same output type. This is a variadic form for [or].
///
/// # Examples
///
/// ```
/// use helix_parsec::{choice, or, Parser};
/// let parser = choice!("foo", "bar", "baz");
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
/// assert_eq!(Ok(("", "bar")), parser.parse("bar"));
/// assert_eq!(Err("quiz"), parser.parse("quiz"));
/// ```
#[macro_export]
macro_rules! choice {
($parser: expr $(,)?) => {
$parser
};
($parser: expr, $($rest: expr),+ $(,)?) => {
or($parser, choice!($($rest),+))
}
}
// Ordinary parser combinators
/// A parser combinator which takes a parser as input and maps the output using the
/// given transformation function.
///
/// This corresponds to [Result::map]. The value is only mapped if the input parser
/// matches against input.
///
/// # Examples
///
/// ```
/// use helix_parsec::{map, Parser};
/// let parser = map("123", |s| s.parse::<i32>().unwrap());
/// assert_eq!(Ok(("", 123)), parser.parse("123"));
/// assert_eq!(Err("abc"), parser.parse("abc"));
/// ```
pub fn map<'a, P, F, T>(parser: P, map_fn: F) -> impl Parser<'a, Output = T>
where
P: Parser<'a>,
F: Fn(P::Output) -> T,
{
move |input| {
parser
.parse(input)
.map(|(next_input, result)| (next_input, map_fn(result)))
}
}
/// A parser combinator which succeeds if the given parser matches the input and
/// the given `filter_map_fn` returns `Some`.
///
/// # Examples
///
/// ```
/// use helix_parsec::{filter_map, take_until, Parser};
/// let parser = filter_map(take_until(|c| c == '.'), |s| s.parse::<i32>().ok());
/// assert_eq!(Ok((".456", 123)), parser.parse("123.456"));
/// assert_eq!(Err("abc.def"), parser.parse("abc.def"));
/// ```
pub fn filter_map<'a, P, F, T>(parser: P, filter_map_fn: F) -> impl Parser<'a, Output = T>
where
P: Parser<'a>,
F: Fn(P::Output) -> Option<T>,
{
move |input| match parser.parse(input) {
Ok((next_input, value)) => match filter_map_fn(value) {
Some(value) => Ok((next_input, value)),
None => Err(input),
},
Err(_) => Err(input),
}
}
/// A parser combinator which succeeds if the first given parser matches the input and
/// the second given parse also matches.
///
/// # Examples
///
/// ```
/// use helix_parsec::{reparse_as, take_until, one_or_more, Parser};
/// let parser = reparse_as(take_until(|c| c == '/'), one_or_more("a"));
/// assert_eq!(Ok(("/bb", vec!["a", "a"])), parser.parse("aa/bb"));
/// ```
pub fn reparse_as<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T>
where
P1: Parser<'a, Output = &'a str>,
P2: Parser<'a, Output = T>,
{
filter_map(parser1, move |str| {
parser2.parse(str).map(|(_, value)| value).ok()
})
}
/// A parser combinator which only matches the input when the predicate function
/// returns true.
///
/// # Examples
///
/// ```
/// use helix_parsec::{filter, take_until, Parser};
/// let parser = filter(take_until(|c| c == '.'), |s| s == &"123");
/// assert_eq!(Ok((".456", "123")), parser.parse("123.456"));
/// assert_eq!(Err("456.123"), parser.parse("456.123"));
/// ```
pub fn filter<'a, P, F, T>(parser: P, pred_fn: F) -> impl Parser<'a, Output = T>
where
P: Parser<'a, Output = T>,
F: Fn(&P::Output) -> bool,
{
move |input| {
if let Ok((next_input, value)) = parser.parse(input) {
if pred_fn(&value) {
return Ok((next_input, value));
}
}
Err(input)
}
}
/// A parser combinator which matches either of the input parsers.
///
/// Both parsers must have the same output type. For a variadic form which
/// can take any number of parsers, use `choice!`.
///
/// # Examples
///
/// ```
/// use helix_parsec::{or, Parser};
/// let parser = or("foo", "bar");
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
/// assert_eq!(Ok(("", "bar")), parser.parse("bar"));
/// assert_eq!(Err("baz"), parser.parse("baz"));
/// ```
pub fn or<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T>
where
P1: Parser<'a, Output = T>,
P2: Parser<'a, Output = T>,
{
move |input| match parser1.parse(input) {
ok @ Ok(_) => ok,
Err(_) => parser2.parse(input),
}
}
/// A parser combinator which attempts to match the given parser, returning a
/// `None` output value if the parser does not match.
///
/// The parser produced with this combinator always succeeds. If the given parser
/// succeeds, `Some(value)` is returned where `value` is the output of the given
/// parser. Otherwise, `None`.
///
/// # Examples
///
/// ```
/// use helix_parsec::{optional, Parser};
/// let parser = optional("foo");
/// assert_eq!(Ok(("bar", Some("foo"))), parser.parse("foobar"));
/// assert_eq!(Ok(("bar", None)), parser.parse("bar"));
/// ```
pub fn optional<'a, P, T>(parser: P) -> impl Parser<'a, Output = Option<T>>
where
P: Parser<'a, Output = T>,
{
move |input| match parser.parse(input) {
Ok((next_input, value)) => Ok((next_input, Some(value))),
Err(_) => Ok((input, None)),
}
}
/// A parser combinator which runs the given parsers in sequence and returns the
/// value of `left` if both are matched.
///
/// This is useful for two-element sequences in which you only want the output
/// value of the `left` parser.
///
/// # Examples
///
/// ```
/// use helix_parsec::{left, Parser};
/// let parser = left("foo", "bar");
/// assert_eq!(Ok(("", "foo")), parser.parse("foobar"));
/// ```
pub fn left<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T>
where
L: Parser<'a, Output = T>,
R: Parser<'a>,
{
map(seq!(left, right), |(left_value, _)| left_value)
}
/// A parser combinator which runs the given parsers in sequence and returns the
/// value of `right` if both are matched.
///
/// This is useful for two-element sequences in which you only want the output
/// value of the `right` parser.
///
/// # Examples
///
/// ```
/// use helix_parsec::{right, Parser};
/// let parser = right("foo", "bar");
/// assert_eq!(Ok(("", "bar")), parser.parse("foobar"));
/// ```
pub fn right<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T>
where
L: Parser<'a>,
R: Parser<'a, Output = T>,
{
map(seq!(left, right), |(_, right_value)| right_value)
}
/// A parser combinator which matches the given parser against the input zero or
/// more times.
///
/// This parser always succeeds and returns the empty Vec when it matched zero
/// times.
///
/// # Examples
///
/// ```
/// use helix_parsec::{zero_or_more, Parser};
/// let parser = zero_or_more("a");
/// assert_eq!(Ok(("", vec![])), parser.parse(""));
/// assert_eq!(Ok(("", vec!["a"])), parser.parse("a"));
/// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa"));
/// assert_eq!(Ok(("bb", vec![])), parser.parse("bb"));
/// ```
pub fn zero_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec<T>>
where
P: Parser<'a, Output = T>,
{
move |mut input| {
let mut values = Vec::new();
while let Ok((next_input, value)) = parser.parse(input) {
input = next_input;
values.push(value);
}
Ok((input, values))
}
}
/// A parser combinator which matches the given parser against the input one or
/// more times.
///
/// This parser combinator acts the same as [zero_or_more] but must match at
/// least once.
///
/// # Examples
///
/// ```
/// use helix_parsec::{one_or_more, Parser};
/// let parser = one_or_more("a");
/// assert_eq!(Err(""), parser.parse(""));
/// assert_eq!(Ok(("", vec!["a"])), parser.parse("a"));
/// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa"));
/// assert_eq!(Err("bb"), parser.parse("bb"));
/// ```
pub fn one_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec<T>>
where
P: Parser<'a, Output = T>,
{
move |mut input| {
let mut values = Vec::new();
match parser.parse(input) {
Ok((next_input, value)) => {
input = next_input;
values.push(value);
}
Err(err) => return Err(err),
}
while let Ok((next_input, value)) = parser.parse(input) {
input = next_input;
values.push(value);
}
Ok((input, values))
}
}
/// A parser combinator which matches one or more instances of the given parser
/// interspersed with the separator parser.
///
/// Output values of the separator parser are discarded.
///
/// This is typically used to parse function arguments or list items.
///
/// # Examples
///
/// ```rust
/// use helix_parsec::{sep, Parser};
/// let parser = sep("a", ",");
/// assert_eq!(Ok(("", vec!["a", "a", "a"])), parser.parse("a,a,a"));
/// ```
pub fn sep<'a, P, S, T>(parser: P, separator: S) -> impl Parser<'a, Output = Vec<T>>
where
P: Parser<'a, Output = T>,
S: Parser<'a>,
{
move |mut input| {
let mut values = Vec::new();
match parser.parse(input) {
Ok((next_input, value)) => {
input = next_input;
values.push(value);
}
Err(err) => return Err(err),
}
loop {
match separator.parse(input) {
Ok((next_input, _)) => input = next_input,
Err(_) => break,
}
match parser.parse(input) {
Ok((next_input, value)) => {
input = next_input;
values.push(value);
}
Err(_) => break,
}
}
Ok((input, values))
}
}

@ -30,22 +30,14 @@ use crate::{
use log::{debug, error, warn};
use std::{
io::{stdin, stdout, Write},
io::{stdin, stdout},
sync::Arc,
time::{Duration, Instant},
};
use anyhow::{Context, Error};
use crossterm::{
event::{
DisableBracketedPaste, DisableFocusChange, DisableMouseCapture, EnableBracketedPaste,
EnableFocusChange, EnableMouseCapture, Event as CrosstermEvent, KeyboardEnhancementFlags,
PopKeyboardEnhancementFlags, PushKeyboardEnhancementFlags,
},
execute, terminal,
tty::IsTty,
};
use crossterm::{event::Event as CrosstermEvent, tty::IsTty};
#[cfg(not(windows))]
use {
signal_hook::{consts::signal, low_level},
@ -63,10 +55,12 @@ use tui::backend::CrosstermBackend;
use tui::backend::TestBackend;
#[cfg(not(feature = "integration"))]
type Terminal = tui::terminal::Terminal<CrosstermBackend<std::io::Stdout>>;
type TerminalBackend = CrosstermBackend<std::io::Stdout>;
#[cfg(feature = "integration")]
type Terminal = tui::terminal::Terminal<TestBackend>;
type TerminalBackend = TestBackend;
type Terminal = tui::terminal::Terminal<TerminalBackend>;
pub struct Application {
compositor: Compositor,
@ -108,26 +102,6 @@ fn setup_integration_logging() {
.apply();
}
fn restore_term() -> Result<(), Error> {
let mut stdout = stdout();
// reset cursor shape
write!(stdout, "\x1B[0 q")?;
if matches!(terminal::supports_keyboard_enhancement(), Ok(true)) {
execute!(stdout, PopKeyboardEnhancementFlags)?;
}
// Ignore errors on disabling, this might trigger on windows if we call
// disable without calling enable previously
let _ = execute!(stdout, DisableMouseCapture);
execute!(
stdout,
DisableBracketedPaste,
DisableFocusChange,
terminal::LeaveAlternateScreen
)?;
terminal::disable_raw_mode()?;
Ok(())
}
impl Application {
pub fn new(
args: Args,
@ -472,13 +446,7 @@ impl Application {
pub async fn handle_signals(&mut self, signal: i32) {
match signal {
signal::SIGTSTP => {
// restore cursor
use helix_view::graphics::CursorKind;
self.terminal
.backend_mut()
.show_cursor(CursorKind::Block)
.ok();
restore_term().unwrap();
self.restore_term().unwrap();
low_level::emulate_default_handler(signal::SIGTSTP).unwrap();
}
signal::SIGCONT => {
@ -960,24 +928,32 @@ impl Application {
Call::MethodCall(helix_lsp::jsonrpc::MethodCall {
method, params, id, ..
}) => {
let call = match MethodCall::parse(&method, params) {
Ok(call) => call,
let reply = match MethodCall::parse(&method, params) {
Err(helix_lsp::Error::Unhandled) => {
error!("Language Server: Method not found {}", method);
return;
error!(
"Language Server: Method {} not found in request {}",
method, id
);
Err(helix_lsp::jsonrpc::Error {
code: helix_lsp::jsonrpc::ErrorCode::MethodNotFound,
message: format!("Method not found: {}", method),
data: None,
})
}
Err(err) => {
log::error!(
"received malformed method call from Language Server: {}: {}",
"Language Server: Received malformed method call {} in request {}: {}",
method,
id,
err
);
return;
Err(helix_lsp::jsonrpc::Error {
code: helix_lsp::jsonrpc::ErrorCode::ParseError,
message: format!("Malformed method call: {}", method),
data: None,
})
}
};
let reply = match call {
MethodCall::WorkDoneProgressCreate(params) => {
Ok(MethodCall::WorkDoneProgressCreate(params)) => {
self.lsp_progress.create(server_id, params.token);
let editor_view = self
@ -991,7 +967,7 @@ impl Application {
Ok(serde_json::Value::Null)
}
MethodCall::ApplyWorkspaceEdit(params) => {
Ok(MethodCall::ApplyWorkspaceEdit(params)) => {
apply_workspace_edit(
&mut self.editor,
helix_lsp::OffsetEncoding::Utf8,
@ -1004,13 +980,13 @@ impl Application {
failed_change: None,
}))
}
MethodCall::WorkspaceFolders => {
Ok(MethodCall::WorkspaceFolders) => {
let language_server =
self.editor.language_servers.get_by_id(server_id).unwrap();
Ok(json!(language_server.workspace_folders()))
}
MethodCall::WorkspaceConfiguration(params) => {
Ok(MethodCall::WorkspaceConfiguration(params)) => {
let result: Vec<_> = params
.items
.iter()
@ -1054,37 +1030,19 @@ impl Application {
}
}
async fn claim_term(&mut self) -> Result<(), Error> {
use helix_view::graphics::CursorKind;
terminal::enable_raw_mode()?;
if self.terminal.cursor_kind() == CursorKind::Hidden {
self.terminal.backend_mut().hide_cursor().ok();
}
let mut stdout = stdout();
execute!(
stdout,
terminal::EnterAlternateScreen,
EnableBracketedPaste,
EnableFocusChange
)?;
execute!(stdout, terminal::Clear(terminal::ClearType::All))?;
if self.config.load().editor.mouse {
execute!(stdout, EnableMouseCapture)?;
}
if matches!(terminal::supports_keyboard_enhancement(), Ok(true)) {
log::debug!("The enhanced keyboard protocol is supported on this terminal");
execute!(
stdout,
PushKeyboardEnhancementFlags(
KeyboardEnhancementFlags::DISAMBIGUATE_ESCAPE_CODES
| KeyboardEnhancementFlags::REPORT_ALTERNATE_KEYS
)
)?;
} else {
log::debug!("The enhanced keyboard protocol is not supported on this terminal");
}
async fn claim_term(&mut self) -> std::io::Result<()> {
let terminal_config = self.config.load().editor.clone().into();
self.terminal.claim(terminal_config)
}
Ok(())
fn restore_term(&mut self) -> std::io::Result<()> {
let terminal_config = self.config.load().editor.clone().into();
use helix_view::graphics::CursorKind;
self.terminal
.backend_mut()
.show_cursor(CursorKind::Block)
.ok();
self.terminal.restore(terminal_config)
}
pub async fn run<S>(&mut self, input_stream: &mut S) -> Result<i32, Error>
@ -1099,7 +1057,7 @@ impl Application {
// We can't handle errors properly inside this closure. And it's
// probably not a good idea to `unwrap()` inside a panic handler.
// So we just ignore the `Result`.
let _ = restore_term();
let _ = TerminalBackend::force_restore();
hook(info);
}));
@ -1107,13 +1065,7 @@ impl Application {
let close_errs = self.close().await;
// restore cursor
use helix_view::graphics::CursorKind;
self.terminal
.backend_mut()
.show_cursor(CursorKind::Block)
.ok();
restore_term()?;
self.restore_term()?;
for err in close_errs {
self.editor.exit_code = 1;

@ -2883,10 +2883,15 @@ fn push_jump(view: &mut View, doc: &Document) {
}
fn goto_line(cx: &mut Context) {
goto_line_impl(cx.editor, cx.count)
if cx.count.is_some() {
let (view, doc) = current!(cx.editor);
push_jump(view, doc);
goto_line_without_jumplist(cx.editor, cx.count);
}
}
fn goto_line_impl(editor: &mut Editor, count: Option<NonZeroUsize>) {
fn goto_line_without_jumplist(editor: &mut Editor, count: Option<NonZeroUsize>) {
if let Some(count) = count {
let (view, doc) = current!(editor);
let text = doc.text().slice(..);
@ -2903,7 +2908,6 @@ fn goto_line_impl(editor: &mut Editor, count: Option<NonZeroUsize>) {
.clone()
.transform(|range| range.put_cursor(text, pos, editor.mode == Mode::Select));
push_jump(view, doc);
doc.set_selection(view.id, selection);
}
}
@ -3066,13 +3070,13 @@ fn goto_first_change_impl(cx: &mut Context, reverse: bool) {
let (view, doc) = current!(editor);
if let Some(handle) = doc.diff_handle() {
let hunk = {
let hunks = handle.hunks();
let diff = handle.load();
let idx = if reverse {
hunks.len().saturating_sub(1)
diff.len().saturating_sub(1)
} else {
0
};
hunks.nth_hunk(idx)
diff.nth_hunk(idx)
};
if hunk != Hunk::NONE {
let range = hunk_range(hunk, doc.text().slice(..));
@ -3104,19 +3108,19 @@ fn goto_next_change_impl(cx: &mut Context, direction: Direction) {
let selection = doc.selection(view.id).clone().transform(|range| {
let cursor_line = range.cursor_line(doc_text) as u32;
let hunks = diff_handle.hunks();
let diff = diff_handle.load();
let hunk_idx = match direction {
Direction::Forward => hunks
Direction::Forward => diff
.next_hunk(cursor_line)
.map(|idx| (idx + count).min(hunks.len() - 1)),
Direction::Backward => hunks
.map(|idx| (idx + count).min(diff.len() - 1)),
Direction::Backward => diff
.prev_hunk(cursor_line)
.map(|idx| idx.saturating_sub(count)),
};
let Some(hunk_idx) = hunk_idx else {
return range;
};
let hunk = hunks.nth_hunk(hunk_idx);
let hunk = diff.nth_hunk(hunk_idx);
let new_range = hunk_range(hunk, doc_text);
if editor.mode == Mode::Select {
let head = if new_range.head < range.anchor {
@ -4776,14 +4780,14 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) {
let textobject_change = |range: Range| -> Range {
let diff_handle = doc.diff_handle().unwrap();
let hunks = diff_handle.hunks();
let diff = diff_handle.load();
let line = range.cursor_line(text);
let hunk_idx = if let Some(hunk_idx) = hunks.hunk_at(line as u32, false) {
let hunk_idx = if let Some(hunk_idx) = diff.hunk_at(line as u32, false) {
hunk_idx
} else {
return range;
};
let hunk = hunks.nth_hunk(hunk_idx).after;
let hunk = diff.nth_hunk(hunk_idx).after;
let start = text.line_to_char(hunk.start as usize);
let end = text.line_to_char(hunk.end as usize);

@ -5,7 +5,7 @@ use helix_lsp::{
self, CodeAction, CodeActionOrCommand, CodeActionTriggerKind, DiagnosticSeverity,
NumberOrString,
},
util::{diagnostic_to_lsp_diagnostic, lsp_pos_to_pos, lsp_range_to_range, range_to_lsp_range},
util::{diagnostic_to_lsp_diagnostic, lsp_range_to_range, range_to_lsp_range},
OffsetEncoding,
};
use tui::{
@ -196,15 +196,15 @@ fn jump_to_location(
}
}
let (view, doc) = current!(editor);
let definition_pos = location.range.start;
// TODO: convert inside server
let new_pos = if let Some(new_pos) = lsp_pos_to_pos(doc.text(), definition_pos, offset_encoding)
{
new_pos
} else {
return;
};
doc.set_selection(view.id, Selection::point(new_pos));
let new_range =
if let Some(new_range) = lsp_range_to_range(doc.text(), location.range, offset_encoding) {
new_range
} else {
log::warn!("lsp position out of bounds - {:?}", location.range);
return;
};
doc.set_selection(view.id, Selection::single(new_range.anchor, new_range.head));
align_view(doc, view, Align::Center);
}
@ -1232,49 +1232,115 @@ pub fn hover(cx: &mut Context) {
}
pub fn rename_symbol(cx: &mut Context) {
let (view, doc) = current_ref!(cx.editor);
let text = doc.text().slice(..);
let primary_selection = doc.selection(view.id).primary();
let prefill = if primary_selection.len() > 1 {
primary_selection
} else {
use helix_core::textobject::{textobject_word, TextObject};
textobject_word(text, primary_selection, TextObject::Inside, 1, false)
fn get_prefill_from_word_boundary(editor: &Editor) -> String {
let (view, doc) = current_ref!(editor);
let text = doc.text().slice(..);
let primary_selection = doc.selection(view.id).primary();
if primary_selection.len() > 1 {
primary_selection
} else {
use helix_core::textobject::{textobject_word, TextObject};
textobject_word(text, primary_selection, TextObject::Inside, 1, false)
}
.fragment(text)
.into()
}
.fragment(text)
.into();
ui::prompt_with_input(
cx,
"rename-to:".into(),
prefill,
None,
ui::completers::none,
move |cx: &mut compositor::Context, input: &str, event: PromptEvent| {
if event != PromptEvent::Validate {
return;
fn get_prefill_from_lsp_response(
editor: &Editor,
offset_encoding: OffsetEncoding,
response: Option<lsp::PrepareRenameResponse>,
) -> Result<String, &'static str> {
match response {
Some(lsp::PrepareRenameResponse::Range(range)) => {
let text = doc!(editor).text();
Ok(lsp_range_to_range(text, range, offset_encoding)
.ok_or("lsp sent invalid selection range for rename")?
.fragment(text.slice(..))
.into())
}
Some(lsp::PrepareRenameResponse::RangeWithPlaceholder { placeholder, .. }) => {
Ok(placeholder)
}
Some(lsp::PrepareRenameResponse::DefaultBehavior { .. }) => {
Ok(get_prefill_from_word_boundary(editor))
}
None => Err("lsp did not respond to prepare rename request"),
}
}
let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc);
let offset_encoding = language_server.offset_encoding();
fn create_rename_prompt(editor: &Editor, prefill: String) -> Box<ui::Prompt> {
let prompt = ui::Prompt::new(
"rename-to:".into(),
None,
ui::completers::none,
move |cx: &mut compositor::Context, input: &str, event: PromptEvent| {
if event != PromptEvent::Validate {
return;
}
let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc);
let offset_encoding = language_server.offset_encoding();
let pos = doc.position(view.id, offset_encoding);
let future =
match language_server.rename_symbol(doc.identifier(), pos, input.to_string()) {
Some(future) => future,
None => {
cx.editor
.set_error("Language server does not support symbol renaming");
return;
}
};
match block_on(future) {
Ok(edits) => apply_workspace_edit(cx.editor, offset_encoding, &edits),
Err(err) => cx.editor.set_error(err.to_string()),
}
},
)
.with_line(prefill, editor);
let pos = doc.position(view.id, offset_encoding);
Box::new(prompt)
}
let future =
match language_server.rename_symbol(doc.identifier(), pos, input.to_string()) {
Some(future) => future,
None => {
cx.editor
.set_error("Language server does not support symbol renaming");
let (view, doc) = current!(cx.editor);
let language_server = language_server!(cx.editor, doc);
let offset_encoding = language_server.offset_encoding();
let pos = doc.position(view.id, offset_encoding);
match language_server.prepare_rename(doc.identifier(), pos) {
// Language server supports textDocument/prepareRename, use it.
Some(future) => cx.callback(
future,
move |editor, compositor, response: Option<lsp::PrepareRenameResponse>| {
let prefill = match get_prefill_from_lsp_response(editor, offset_encoding, response)
{
Ok(p) => p,
Err(e) => {
editor.set_error(e);
return;
}
};
match block_on(future) {
Ok(edits) => apply_workspace_edit(cx.editor, offset_encoding, &edits),
Err(err) => cx.editor.set_error(err.to_string()),
}
},
);
let prompt = create_rename_prompt(editor, prefill);
compositor.push(prompt);
},
),
// Language server does not support textDocument/prepareRename, fall back
// to word boundary selection.
None => {
let prefill = get_prefill_from_word_boundary(cx.editor);
let prompt = create_rename_prompt(cx.editor, prefill);
cx.push_layer(prompt);
}
};
}
pub fn select_references_to_symbol_under_cursor(cx: &mut Context) {

@ -1354,6 +1354,37 @@ fn lsp_restart(
Ok(())
}
fn lsp_stop(
cx: &mut compositor::Context,
_args: &[Cow<str>],
event: PromptEvent,
) -> anyhow::Result<()> {
if event != PromptEvent::Validate {
return Ok(());
}
let doc = doc!(cx.editor);
let ls_id = doc
.language_server()
.map(|ls| ls.id())
.context("LSP not running for the current document")?;
let config = doc
.language_config()
.context("LSP not defined for the current document")?;
cx.editor.language_servers.stop(config);
for doc in cx.editor.documents_mut() {
if doc.language_server().map_or(false, |ls| ls.id() == ls_id) {
doc.set_language_server(None);
doc.set_diagnostics(Default::default());
}
}
Ok(())
}
fn tree_sitter_scopes(
cx: &mut compositor::Context,
_args: &[Cow<str>],
@ -1541,47 +1572,67 @@ fn tutor(
Ok(())
}
fn abort_goto_line_number_preview(cx: &mut compositor::Context) {
if let Some(last_selection) = cx.editor.last_selection.take() {
let scrolloff = cx.editor.config().scrolloff;
let (view, doc) = current!(cx.editor);
doc.set_selection(view.id, last_selection);
view.ensure_cursor_in_view(doc, scrolloff);
}
}
fn update_goto_line_number_preview(
cx: &mut compositor::Context,
args: &[Cow<str>],
) -> anyhow::Result<()> {
cx.editor.last_selection.get_or_insert_with(|| {
let (view, doc) = current!(cx.editor);
doc.selection(view.id).clone()
});
let scrolloff = cx.editor.config().scrolloff;
let line = args[0].parse::<usize>()?;
goto_line_without_jumplist(cx.editor, NonZeroUsize::new(line));
let (view, doc) = current!(cx.editor);
view.ensure_cursor_in_view(doc, scrolloff);
Ok(())
}
pub(super) fn goto_line_number(
cx: &mut compositor::Context,
args: &[Cow<str>],
event: PromptEvent,
) -> anyhow::Result<()> {
match event {
PromptEvent::Abort => {
if let Some(line_number) = cx.editor.last_line_number {
goto_line_impl(cx.editor, NonZeroUsize::new(line_number));
let (view, doc) = current!(cx.editor);
view.ensure_cursor_in_view(doc, line_number);
cx.editor.last_line_number = None;
}
return Ok(());
}
PromptEvent::Abort => abort_goto_line_number_preview(cx),
PromptEvent::Validate => {
ensure!(!args.is_empty(), "Line number required");
cx.editor.last_line_number = None;
}
PromptEvent::Update => {
if args.is_empty() {
if let Some(line_number) = cx.editor.last_line_number {
// When a user hits backspace and there are no numbers left,
// we can bring them back to their original line
goto_line_impl(cx.editor, NonZeroUsize::new(line_number));
let (view, doc) = current!(cx.editor);
view.ensure_cursor_in_view(doc, line_number);
cx.editor.last_line_number = None;
}
return Ok(());
}
// If we are invoked directly via a keybinding, Validate is
// sent without any prior Update events. Ensure the cursor
// is moved to the appropriate location.
update_goto_line_number_preview(cx, args)?;
let last_selection = cx
.editor
.last_selection
.take()
.expect("update_goto_line_number_preview should always set last_selection");
let (view, doc) = current!(cx.editor);
let text = doc.text().slice(..);
let line = doc.selection(view.id).primary().cursor_line(text);
cx.editor.last_line_number.get_or_insert(line + 1);
view.jumps.push((doc.id(), last_selection));
}
// When a user hits backspace and there are no numbers left,
// we can bring them back to their original selection. If they
// begin typing numbers again, we'll start a new preview session.
PromptEvent::Update if args.is_empty() => abort_goto_line_number_preview(cx),
PromptEvent::Update => update_goto_line_number_preview(cx, args)?,
}
let line = args[0].parse::<usize>()?;
goto_line_impl(cx.editor, NonZeroUsize::new(line));
let (view, doc) = current!(cx.editor);
view.ensure_cursor_in_view(doc, line);
Ok(())
}
@ -1789,30 +1840,26 @@ fn reflow(
}
let scrolloff = cx.editor.config().scrolloff;
let cfg_text_width: usize = cx.editor.config().text_width;
let (view, doc) = current!(cx.editor);
const DEFAULT_MAX_LEN: usize = 79;
// Find the max line length by checking the following sources in order:
// Find the text_width by checking the following sources in order:
// - The passed argument in `args`
// - The configured max_line_len for this language in languages.toml
// - The const default we set above
let max_line_len: usize = args
// - The configured text-width for this language in languages.toml
// - The configured text-width in the config.toml
let text_width: usize = args
.get(0)
.map(|num| num.parse::<usize>())
.transpose()?
.or_else(|| {
doc.language_config()
.and_then(|config| config.max_line_length)
})
.unwrap_or(DEFAULT_MAX_LEN);
.or_else(|| doc.language_config().and_then(|config| config.text_width))
.unwrap_or(cfg_text_width);
let rope = doc.text();
let selection = doc.selection(view.id);
let transaction = Transaction::change_by_selection(rope, selection, |range| {
let fragment = range.fragment(rope.slice(..));
let reflowed_text = helix_core::wrap::reflow_hard_wrap(&fragment, max_line_len);
let reflowed_text = helix_core::wrap::reflow_hard_wrap(&fragment, text_width);
(range.from(), range.to(), Some(reflowed_text))
});
@ -2002,6 +2049,64 @@ fn run_shell_command(
Ok(())
}
fn reset_diff_change(
cx: &mut compositor::Context,
args: &[Cow<str>],
event: PromptEvent,
) -> anyhow::Result<()> {
if event != PromptEvent::Validate {
return Ok(());
}
ensure!(args.is_empty(), ":reset-diff-change takes no arguments");
let editor = &mut cx.editor;
let scrolloff = editor.config().scrolloff;
let (view, doc) = current!(editor);
// TODO refactor to use let..else once MSRV is raised to 1.65
let handle = match doc.diff_handle() {
Some(handle) => handle,
None => bail!("Diff is not available in the current buffer"),
};
let diff = handle.load();
let doc_text = doc.text().slice(..);
let line = doc.selection(view.id).primary().cursor_line(doc_text);
// TODO refactor to use let..else once MSRV is raised to 1.65
let hunk_idx = match diff.hunk_at(line as u32, true) {
Some(hunk_idx) => hunk_idx,
None => bail!("There is no change at the cursor"),
};
let hunk = diff.nth_hunk(hunk_idx);
let diff_base = diff.diff_base();
let before_start = diff_base.line_to_char(hunk.before.start as usize);
let before_end = diff_base.line_to_char(hunk.before.end as usize);
let text: Tendril = diff
.diff_base()
.slice(before_start..before_end)
.chunks()
.collect();
let anchor = doc_text.line_to_char(hunk.after.start as usize);
let transaction = Transaction::change(
doc.text(),
[(
anchor,
doc_text.line_to_char(hunk.after.end as usize),
(!text.is_empty()).then_some(text),
)]
.into_iter(),
);
drop(diff); // make borrow check happy
doc.apply(&transaction, view.id);
// select inserted text
let text_len = before_end - before_start;
doc.set_selection(view.id, Selection::single(anchor, anchor + text_len));
doc.append_changes_to_history(view);
view.ensure_cursor_in_view(doc, scrolloff);
Ok(())
}
pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
TypableCommand {
name: "quit",
@ -2349,6 +2454,13 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
fun: lsp_restart,
completer: None,
},
TypableCommand {
name: "lsp-stop",
aliases: &[],
doc: "Stops the Language Server that is in use by the current doc",
fun: lsp_stop,
completer: None,
},
TypableCommand {
name: "tree-sitter-scopes",
aliases: &[],
@ -2531,6 +2643,13 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
fun: run_shell_command,
completer: Some(completers::filename),
},
TypableCommand {
name: "reset-diff-change",
aliases: &["diffget", "diffg"],
doc: "Reset the diff change at the cursor position.",
fun: reset_diff_change,
completer: None,
},
];
pub static TYPABLE_COMMAND_MAP: Lazy<HashMap<&'static str, &'static TypableCommand>> =

@ -118,8 +118,12 @@ impl Completion {
offset_encoding: helix_lsp::OffsetEncoding,
start_offset: usize,
trigger_offset: usize,
include_placeholder: bool,
) -> Transaction {
let transaction = if let Some(edit) = &item.text_edit {
use helix_lsp::snippet;
let selection = doc.selection(view_id);
let (start_offset, end_offset, new_text) = if let Some(edit) = &item.text_edit {
let edit = match edit {
lsp::CompletionTextEdit::Edit(edit) => edit.clone(),
lsp::CompletionTextEdit::InsertAndReplace(item) => {
@ -127,19 +131,27 @@ impl Completion {
lsp::TextEdit::new(item.replace, item.new_text.clone())
}
};
util::generate_transaction_from_completion_edit(
doc.text(),
doc.selection(view_id),
edit,
offset_encoding, // TODO: should probably transcode in Client
)
let text = doc.text().slice(..);
let primary_cursor = selection.primary().cursor(text);
let start_offset =
match util::lsp_pos_to_pos(doc.text(), edit.range.start, offset_encoding) {
Some(start) => start as i128 - primary_cursor as i128,
None => return Transaction::new(doc.text()),
};
let end_offset =
match util::lsp_pos_to_pos(doc.text(), edit.range.end, offset_encoding) {
Some(end) => end as i128 - primary_cursor as i128,
None => return Transaction::new(doc.text()),
};
(start_offset, end_offset, edit.new_text)
} else {
let text = item.insert_text.as_ref().unwrap_or(&item.label);
let new_text = item.insert_text.as_ref().unwrap_or(&item.label);
// Some LSPs just give you an insertText with no offset ¯\_(ツ)_/¯
// in these cases we need to check for a common prefix and remove it
let prefix = Cow::from(doc.text().slice(start_offset..trigger_offset));
let text = text.trim_start_matches::<&str>(&prefix);
let new_text = new_text.trim_start_matches::<&str>(&prefix);
// TODO: this needs to be true for the numbers to work out correctly
// in the closure below. It's passed in to a callback as this same
@ -152,14 +164,43 @@ impl Completion {
== trigger_offset
);
Transaction::change_by_selection(doc.text(), doc.selection(view_id), |range| {
let cursor = range.cursor(doc.text().slice(..));
(cursor, cursor, Some(text.into()))
})
(0, 0, new_text.into())
};
transaction
if matches!(item.kind, Some(lsp::CompletionItemKind::SNIPPET))
|| matches!(
item.insert_text_format,
Some(lsp::InsertTextFormat::SNIPPET)
)
{
match snippet::parse(&new_text) {
Ok(snippet) => util::generate_transaction_from_snippet(
doc.text(),
selection,
start_offset,
end_offset,
snippet,
doc.line_ending.as_str(),
include_placeholder,
),
Err(err) => {
log::error!(
"Failed to parse snippet: {:?}, remaining output: {}",
&new_text,
err
);
Transaction::new(doc.text())
}
}
} else {
util::generate_transaction_from_completion_edit(
doc.text(),
selection,
start_offset,
end_offset,
new_text,
)
}
}
fn completion_changes(transaction: &Transaction, trigger_offset: usize) -> Vec<Change> {
@ -190,6 +231,7 @@ impl Completion {
offset_encoding,
start_offset,
trigger_offset,
true,
);
// initialize a savepoint
@ -212,6 +254,7 @@ impl Completion {
offset_encoding,
start_offset,
trigger_offset,
false,
);
doc.apply(&transaction, view.id);

@ -435,26 +435,6 @@ impl<T: Item> Picker<T> {
|_editor: &mut Context, _pattern: &str, _event: PromptEvent| {},
);
let n = options
.first()
.map(|option| option.format(&editor_data).cells.len())
.unwrap_or_default();
let max_lens = options.iter().fold(vec![0; n], |mut acc, option| {
let row = option.format(&editor_data);
// maintain max for each column
for (acc, cell) in acc.iter_mut().zip(row.cells.iter()) {
let width = cell.content.width();
if width > *acc {
*acc = width;
}
}
acc
});
let widths = max_lens
.into_iter()
.map(|len| Constraint::Length(len as u16))
.collect();
let mut picker = Self {
options,
editor_data,
@ -467,10 +447,12 @@ impl<T: Item> Picker<T> {
show_preview: true,
callback_fn: Box::new(callback_fn),
completion_height: 0,
widths,
widths: Vec::new(),
};
// scoring on empty input:
picker.calculate_column_widths();
// scoring on empty input
// TODO: just reuse score()
picker
.matches
@ -486,6 +468,38 @@ impl<T: Item> Picker<T> {
picker
}
pub fn set_options(&mut self, new_options: Vec<T>) {
self.options = new_options;
self.cursor = 0;
self.force_score();
self.calculate_column_widths();
}
/// Calculate the width constraints using the maximum widths of each column
/// for the current options.
fn calculate_column_widths(&mut self) {
let n = self
.options
.first()
.map(|option| option.format(&self.editor_data).cells.len())
.unwrap_or_default();
let max_lens = self.options.iter().fold(vec![0; n], |mut acc, option| {
let row = option.format(&self.editor_data);
// maintain max for each column
for (acc, cell) in acc.iter_mut().zip(row.cells.iter()) {
let width = cell.content.width();
if width > *acc {
*acc = width;
}
}
acc
});
self.widths = max_lens
.into_iter()
.map(|len| Constraint::Length(len as u16))
.collect();
}
pub fn score(&mut self) {
let pattern = self.prompt.line();
@ -931,9 +945,7 @@ impl<T: Item + Send + 'static> Component for DynamicPicker<T> {
Some(overlay) => &mut overlay.content.file_picker.picker,
None => return,
};
picker.options = new_options;
picker.cursor = 0;
picker.force_score();
picker.set_options(new_options);
editor.reset_idle_timer();
}));
anyhow::Ok(callback)

@ -22,5 +22,7 @@ unicode-segmentation = "1.10"
crossterm = { version = "0.26", optional = true }
termini = "0.1"
serde = { version = "1", "optional" = true, features = ["derive"]}
once_cell = "1.17"
log = "~0.4"
helix-view = { version = "0.6", path = "../helix-view", features = ["term"] }
helix-core = { version = "0.6", path = "../helix-core" }

@ -1,6 +1,11 @@
use crate::{backend::Backend, buffer::Cell};
use crate::{backend::Backend, buffer::Cell, terminal::Config};
use crossterm::{
cursor::{Hide, MoveTo, SetCursorStyle, Show},
event::{
DisableBracketedPaste, DisableFocusChange, DisableMouseCapture, EnableBracketedPaste,
EnableFocusChange, EnableMouseCapture, KeyboardEnhancementFlags,
PopKeyboardEnhancementFlags, PushKeyboardEnhancementFlags,
},
execute, queue,
style::{
Attribute as CAttribute, Color as CColor, Print, SetAttribute, SetBackgroundColor,
@ -10,6 +15,7 @@ use crossterm::{
Command,
};
use helix_view::graphics::{Color, CursorKind, Modifier, Rect, UnderlineStyle};
use once_cell::sync::OnceCell;
use std::{
fmt,
io::{self, Write},
@ -52,6 +58,7 @@ impl Capabilities {
pub struct CrosstermBackend<W: Write> {
buffer: W,
capabilities: Capabilities,
supports_keyboard_enhancement_protocol: OnceCell<bool>,
}
impl<W> CrosstermBackend<W>
@ -62,8 +69,27 @@ where
CrosstermBackend {
buffer,
capabilities: Capabilities::from_env_or_default(),
supports_keyboard_enhancement_protocol: OnceCell::new(),
}
}
#[inline]
fn supports_keyboard_enhancement_protocol(&self) -> io::Result<bool> {
self.supports_keyboard_enhancement_protocol
.get_or_try_init(|| {
use std::time::Instant;
let now = Instant::now();
let support = terminal::supports_keyboard_enhancement();
log::debug!(
"The keyboard enhancement protocol is {}supported in this terminal (checked in {:?})",
if matches!(support, Ok(true)) { "" } else { "not " },
Instant::now().duration_since(now)
);
support
})
.copied()
}
}
impl<W> Write for CrosstermBackend<W>
@ -83,6 +109,66 @@ impl<W> Backend for CrosstermBackend<W>
where
W: Write,
{
fn claim(&mut self, config: Config) -> io::Result<()> {
terminal::enable_raw_mode()?;
execute!(
self.buffer,
terminal::EnterAlternateScreen,
EnableBracketedPaste,
EnableFocusChange
)?;
execute!(self.buffer, terminal::Clear(terminal::ClearType::All))?;
if config.enable_mouse_capture {
execute!(self.buffer, EnableMouseCapture)?;
}
if self.supports_keyboard_enhancement_protocol()? {
execute!(
self.buffer,
PushKeyboardEnhancementFlags(
KeyboardEnhancementFlags::DISAMBIGUATE_ESCAPE_CODES
| KeyboardEnhancementFlags::REPORT_ALTERNATE_KEYS
)
)?;
}
Ok(())
}
fn restore(&mut self, config: Config) -> io::Result<()> {
// reset cursor shape
write!(self.buffer, "\x1B[0 q")?;
if config.enable_mouse_capture {
execute!(self.buffer, DisableMouseCapture)?;
}
if self.supports_keyboard_enhancement_protocol()? {
execute!(self.buffer, PopKeyboardEnhancementFlags)?;
}
execute!(
self.buffer,
DisableBracketedPaste,
DisableFocusChange,
terminal::LeaveAlternateScreen
)?;
terminal::disable_raw_mode()
}
fn force_restore() -> io::Result<()> {
let mut stdout = io::stdout();
// reset cursor shape
write!(stdout, "\x1B[0 q")?;
// Ignore errors on disabling, this might trigger on windows if we call
// disable without calling enable previously
let _ = execute!(stdout, DisableMouseCapture);
let _ = execute!(stdout, PopKeyboardEnhancementFlags);
execute!(
stdout,
DisableBracketedPaste,
DisableFocusChange,
terminal::LeaveAlternateScreen
)?;
terminal::disable_raw_mode()
}
fn draw<'a, I>(&mut self, content: I) -> io::Result<()>
where
I: Iterator<Item = (u16, u16, &'a Cell)>,

@ -1,6 +1,6 @@
use std::io;
use crate::buffer::Cell;
use crate::{buffer::Cell, terminal::Config};
use helix_view::graphics::{CursorKind, Rect};
@ -13,6 +13,9 @@ mod test;
pub use self::test::TestBackend;
pub trait Backend {
fn claim(&mut self, config: Config) -> Result<(), io::Error>;
fn restore(&mut self, config: Config) -> Result<(), io::Error>;
fn force_restore() -> Result<(), io::Error>;
fn draw<'a, I>(&mut self, content: I) -> Result<(), io::Error>
where
I: Iterator<Item = (u16, u16, &'a Cell)>;

@ -1,6 +1,7 @@
use crate::{
backend::Backend,
buffer::{Buffer, Cell},
terminal::Config,
};
use helix_core::unicode::width::UnicodeWidthStr;
use helix_view::graphics::{CursorKind, Rect};
@ -106,6 +107,18 @@ impl TestBackend {
}
impl Backend for TestBackend {
fn claim(&mut self, _config: Config) -> Result<(), io::Error> {
Ok(())
}
fn restore(&mut self, _config: Config) -> Result<(), io::Error> {
Ok(())
}
fn force_restore() -> Result<(), io::Error> {
Ok(())
}
fn draw<'a, I>(&mut self, content: I) -> Result<(), io::Error>
where
I: Iterator<Item = (u16, u16, &'a Cell)>,

@ -1,4 +1,5 @@
use crate::{backend::Backend, buffer::Buffer};
use helix_view::editor::Config as EditorConfig;
use helix_view::graphics::{CursorKind, Rect};
use std::io;
@ -16,6 +17,19 @@ pub struct Viewport {
resize_behavior: ResizeBehavior,
}
#[derive(Debug)]
pub struct Config {
pub enable_mouse_capture: bool,
}
impl From<EditorConfig> for Config {
fn from(config: EditorConfig) -> Self {
Self {
enable_mouse_capture: config.mouse,
}
}
}
impl Viewport {
/// UNSTABLE
pub fn fixed(area: Rect) -> Viewport {
@ -98,6 +112,14 @@ where
})
}
pub fn claim(&mut self, config: Config) -> io::Result<()> {
self.backend.claim(config)
}
pub fn restore(&mut self, config: Config) -> io::Result<()> {
self.backend.restore(config)
}
// /// Get a Frame object which provides a consistent view into the terminal state for rendering.
// pub fn get_frame(&mut self) -> Frame<B> {
// Frame {

@ -28,11 +28,18 @@ struct Event {
render_lock: Option<RenderLock>,
}
#[derive(Clone, Debug, Default)]
struct DiffInner {
diff_base: Rope,
doc: Rope,
hunks: Vec<Hunk>,
}
#[derive(Clone, Debug)]
pub struct DiffHandle {
channel: UnboundedSender<Event>,
render_lock: Arc<RwLock<()>>,
hunks: Arc<Mutex<Vec<Hunk>>>,
diff: Arc<Mutex<DiffInner>>,
inverted: bool,
}
@ -47,10 +54,10 @@ impl DiffHandle {
redraw_handle: RedrawHandle,
) -> (DiffHandle, JoinHandle<()>) {
let (sender, receiver) = unbounded_channel();
let hunks: Arc<Mutex<Vec<Hunk>>> = Arc::default();
let diff: Arc<Mutex<DiffInner>> = Arc::default();
let worker = DiffWorker {
channel: receiver,
hunks: hunks.clone(),
diff: diff.clone(),
new_hunks: Vec::default(),
redraw_notify: redraw_handle.0,
diff_finished_notify: Arc::default(),
@ -58,7 +65,7 @@ impl DiffHandle {
let handle = tokio::spawn(worker.run(diff_base, doc));
let differ = DiffHandle {
channel: sender,
hunks,
diff,
inverted: false,
render_lock: redraw_handle.1,
};
@ -69,9 +76,9 @@ impl DiffHandle {
self.inverted = !self.inverted;
}
pub fn hunks(&self) -> FileHunks {
FileHunks {
hunks: self.hunks.lock(),
pub fn load(&self) -> Diff {
Diff {
diff: self.diff.lock(),
inverted: self.inverted,
}
}
@ -168,12 +175,28 @@ impl Hunk {
/// A list of changes in a file sorted in ascending
/// non-overlapping order
#[derive(Debug)]
pub struct FileHunks<'a> {
hunks: MutexGuard<'a, Vec<Hunk>>,
pub struct Diff<'a> {
diff: MutexGuard<'a, DiffInner>,
inverted: bool,
}
impl FileHunks<'_> {
impl Diff<'_> {
pub fn diff_base(&self) -> &Rope {
if self.inverted {
&self.diff.doc
} else {
&self.diff.diff_base
}
}
pub fn doc(&self) -> &Rope {
if self.inverted {
&self.diff.diff_base
} else {
&self.diff.doc
}
}
pub fn is_inverted(&self) -> bool {
self.inverted
}
@ -181,7 +204,7 @@ impl FileHunks<'_> {
/// Returns the `Hunk` for the `n`th change in this file.
/// if there is no `n`th change `Hunk::NONE` is returned instead.
pub fn nth_hunk(&self, n: u32) -> Hunk {
match self.hunks.get(n as usize) {
match self.diff.hunks.get(n as usize) {
Some(hunk) if self.inverted => hunk.invert(),
Some(hunk) => hunk.clone(),
None => Hunk::NONE,
@ -189,7 +212,7 @@ impl FileHunks<'_> {
}
pub fn len(&self) -> u32 {
self.hunks.len() as u32
self.diff.hunks.len() as u32
}
pub fn is_empty(&self) -> bool {
@ -204,19 +227,20 @@ impl FileHunks<'_> {
};
let res = self
.diff
.hunks
.binary_search_by_key(&line, |hunk| hunk_range(hunk).start);
match res {
// Search found a hunk that starts exactly at this line, return the next hunk if it exists.
Ok(pos) if pos + 1 == self.hunks.len() => None,
Ok(pos) if pos + 1 == self.diff.hunks.len() => None,
Ok(pos) => Some(pos as u32 + 1),
// No hunk starts exactly at this line, so the search returns
// the position where a hunk starting at this line should be inserted.
// That position is exactly the position of the next hunk or the end
// of the list if no such hunk exists
Err(pos) if pos == self.hunks.len() => None,
Err(pos) if pos == self.diff.hunks.len() => None,
Err(pos) => Some(pos as u32),
}
}
@ -228,6 +252,7 @@ impl FileHunks<'_> {
|hunk: &Hunk| hunk.after.clone()
};
let res = self
.diff
.hunks
.binary_search_by_key(&line, |hunk| hunk_range(hunk).end);
@ -237,7 +262,7 @@ impl FileHunks<'_> {
// which represents a pure removal.
// Removals are technically empty but are still shown as single line hunks
// and as such we must jump to the previous hunk (if it exists) if we are already inside the removal
Ok(pos) if !hunk_range(&self.hunks[pos]).is_empty() => Some(pos as u32),
Ok(pos) if !hunk_range(&self.diff.hunks[pos]).is_empty() => Some(pos as u32),
// No hunk ends exactly at this line, so the search returns
// the position where a hunk ending at this line should be inserted.
@ -255,6 +280,7 @@ impl FileHunks<'_> {
};
let res = self
.diff
.hunks
.binary_search_by_key(&line, |hunk| hunk_range(hunk).start);
@ -267,7 +293,7 @@ impl FileHunks<'_> {
// The previous hunk contains this hunk if it exists and doesn't end before this line
Err(0) => None,
Err(pos) => {
let hunk = hunk_range(&self.hunks[pos - 1]);
let hunk = hunk_range(&self.diff.hunks[pos - 1]);
if hunk.end > line || include_removal && hunk.start == line && hunk.is_empty() {
Some(pos as u32 - 1)
} else {

@ -43,6 +43,14 @@ impl InternedRopeLines {
res
}
pub fn doc(&self) -> Rope {
self.doc.clone()
}
pub fn diff_base(&self) -> Rope {
self.diff_base.clone()
}
/// Updates the `diff_base` and optionally the document if `doc` is not None
pub fn update_diff_base(&mut self, diff_base: Rope, doc: Option<Rope>) {
self.interned.clear();

@ -10,7 +10,7 @@ use tokio::sync::Notify;
use tokio::time::{timeout, timeout_at, Duration};
use crate::diff::{
Event, RenderLock, ALGORITHM, DIFF_DEBOUNCE_TIME_ASYNC, DIFF_DEBOUNCE_TIME_SYNC,
DiffInner, Event, RenderLock, ALGORITHM, DIFF_DEBOUNCE_TIME_ASYNC, DIFF_DEBOUNCE_TIME_SYNC,
};
use super::line_cache::InternedRopeLines;
@ -21,7 +21,7 @@ mod test;
pub(super) struct DiffWorker {
pub channel: UnboundedReceiver<Event>,
pub hunks: Arc<Mutex<Vec<Hunk>>>,
pub diff: Arc<Mutex<DiffInner>>,
pub new_hunks: Vec<Hunk>,
pub redraw_notify: Arc<Notify>,
pub diff_finished_notify: Arc<Notify>,
@ -46,7 +46,7 @@ impl DiffWorker {
if let Some(lines) = interner.interned_lines() {
self.perform_diff(lines);
}
self.apply_hunks();
self.apply_hunks(interner.diff_base(), interner.doc());
while let Some(event) = self.channel.recv().await {
let (doc, diff_base) = self.accumulate_events(event).await;
@ -70,15 +70,18 @@ impl DiffWorker {
#[cfg(not(test))]
tokio::task::block_in_place(process_accumulated_events);
self.apply_hunks();
self.apply_hunks(interner.diff_base(), interner.doc());
}
}
/// update the hunks (used by the gutter) by replacing it with `self.new_hunks`.
/// `self.new_hunks` is always empty after this function runs.
/// To improve performance this function tries to reuse the allocation of the old diff previously stored in `self.line_diffs`
fn apply_hunks(&mut self) {
swap(&mut *self.hunks.lock(), &mut self.new_hunks);
fn apply_hunks(&mut self, diff_base: Rope, doc: Rope) {
let mut diff = self.diff.lock();
diff.diff_base = diff_base;
diff.doc = doc;
swap(&mut diff.hunks, &mut self.new_hunks);
self.diff_finished_notify.notify_waiters();
self.new_hunks.clear();
}

@ -12,12 +12,12 @@ impl DiffHandle {
)
}
async fn into_diff(self, handle: JoinHandle<()>) -> Vec<Hunk> {
let hunks = self.hunks;
let diff = self.diff;
// dropping the channel terminates the task
drop(self.channel);
handle.await.unwrap();
let hunks = hunks.lock();
Vec::clone(&*hunks)
let diff = diff.lock();
Vec::clone(&diff.hunks)
}
}

@ -1238,24 +1238,61 @@ impl Document {
}
pub fn text_format(&self, mut viewport_width: u16, theme: Option<&Theme>) -> TextFormat {
if let Some(max_line_len) = self
let config = self.config.load();
let text_width = self
.language_config()
.and_then(|config| config.max_line_length)
{
viewport_width = viewport_width.min(max_line_len as u16)
.and_then(|config| config.text_width)
.unwrap_or(config.text_width);
let soft_wrap_at_text_width = self
.language_config()
.and_then(|config| {
config
.soft_wrap
.as_ref()
.and_then(|soft_wrap| soft_wrap.wrap_at_text_width)
})
.or(config.soft_wrap.wrap_at_text_width)
.unwrap_or(false);
if soft_wrap_at_text_width {
// We increase max_line_len by 1 because softwrap considers the newline character
// as part of the line length while the "typical" expectation is that this is not the case.
// In particular other commands like :reflow do not count the line terminator.
// This is technically inconsistent for the last line as that line never has a line terminator
// but having the last visual line exceed the width by 1 seems like a rare edge case.
viewport_width = viewport_width.min(text_width as u16 + 1)
}
let config = self.config.load();
let soft_wrap = &config.soft_wrap;
let editor_soft_wrap = &config.soft_wrap;
let language_soft_wrap = self
.language
.as_ref()
.and_then(|config| config.soft_wrap.as_ref());
let enable_soft_wrap = language_soft_wrap
.and_then(|soft_wrap| soft_wrap.enable)
.or(editor_soft_wrap.enable)
.unwrap_or(false);
let max_wrap = language_soft_wrap
.and_then(|soft_wrap| soft_wrap.max_wrap)
.or(config.soft_wrap.max_wrap)
.unwrap_or(20);
let max_indent_retain = language_soft_wrap
.and_then(|soft_wrap| soft_wrap.max_indent_retain)
.or(editor_soft_wrap.max_indent_retain)
.unwrap_or(40);
let wrap_indicator = language_soft_wrap
.and_then(|soft_wrap| soft_wrap.wrap_indicator.clone())
.or_else(|| config.soft_wrap.wrap_indicator.clone())
.unwrap_or_else(|| "↪ ".into());
let tab_width = self.tab_width() as u16;
TextFormat {
soft_wrap: soft_wrap.enable && viewport_width > 10,
soft_wrap: enable_soft_wrap && viewport_width > 10,
tab_width,
max_wrap: soft_wrap.max_wrap.min(viewport_width / 4),
max_indent_retain: soft_wrap.max_indent_retain.min(viewport_width * 2 / 5),
max_wrap: max_wrap.min(viewport_width / 4),
max_indent_retain: max_indent_retain.min(viewport_width * 2 / 5),
// avoid spinning forever when the window manager
// sets the size to something tiny
viewport_width,
wrap_indicator: soft_wrap.wrap_indicator.clone().into_boxed_str(),
wrap_indicator: wrap_indicator.into_boxed_str(),
wrap_indicator_highlight: theme
.and_then(|theme| theme.find_scope_index("ui.virtual.wrap"))
.map(Highlight),

@ -40,12 +40,12 @@ use anyhow::{anyhow, bail, Error};
pub use helix_core::diagnostic::Severity;
pub use helix_core::register::Registers;
use helix_core::Position;
use helix_core::{
auto_pairs::AutoPairs,
syntax::{self, AutoPairConfig},
syntax::{self, AutoPairConfig, SoftWrap},
Change,
};
use helix_core::{Position, Selection};
use helix_dap as dap;
use helix_lsp::lsp;
@ -285,6 +285,8 @@ pub struct Config {
pub auto_format: bool,
/// Automatic save on focus lost. Defaults to false.
pub auto_save: bool,
/// Set a global text_width
pub text_width: usize,
/// Time in milliseconds since last keypress before idle timers trigger.
/// Used for autocompletion, set to 0 for instant. Defaults to 400ms.
#[serde(
@ -322,43 +324,6 @@ pub struct Config {
pub soft_wrap: SoftWrap,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
pub struct SoftWrap {
/// Soft wrap lines that exceed viewport width. Default to off
pub enable: bool,
/// Maximum space left free at the end of the line.
/// This space is used to wrap text at word boundaries. If that is not possible within this limit
/// the word is simply split at the end of the line.
///
/// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views.
///
/// Default to 20
pub max_wrap: u16,
/// Maximum number of indentation that can be carried over from the previous line when softwrapping.
/// If a line is indented further then this limit it is rendered at the start of the viewport instead.
///
/// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views.
///
/// Default to 40
pub max_indent_retain: u16,
/// Indicator placed at the beginning of softwrapped lines
///
/// Defaults to ↪
pub wrap_indicator: String,
}
impl Default for SoftWrap {
fn default() -> Self {
SoftWrap {
enable: false,
max_wrap: 20,
max_indent_retain: 40,
wrap_indicator: "↪ ".into(),
}
}
}
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
pub struct TerminalConfig {
@ -819,6 +784,7 @@ impl Default for Config {
color_modes: false,
explorer: ExplorerConfig::default(),
soft_wrap: SoftWrap::default(),
text_width: 80,
}
}
}
@ -895,7 +861,12 @@ pub struct Editor {
/// The currently applied editor theme. While previewing a theme, the previewed theme
/// is set here.
pub theme: Theme,
pub last_line_number: Option<usize>,
/// The primary Selection prior to starting a goto_line_number preview. This is
/// restored when the preview is aborted, or added to the jumplist when it is
/// confirmed.
pub last_selection: Option<Selection>,
pub status_msg: Option<(Cow<'static, str>, Severity)>,
pub autoinfo: Option<Info>,
@ -1023,7 +994,7 @@ impl Editor {
syn_loader,
theme_loader,
last_theme: None,
last_line_number: None,
last_selection: None,
registers: Registers::default(),
clipboard_provider: get_clipboard_provider(),
status_msg: None,

@ -100,7 +100,7 @@ pub fn diff<'doc>(
let deleted = theme.get("diff.minus");
let modified = theme.get("diff.delta");
if let Some(diff_handle) = doc.diff_handle() {
let hunks = diff_handle.hunks();
let hunks = diff_handle.load();
let mut hunk_i = 0;
let mut hunk = hunks.nth_hunk(hunk_i);
Box::new(

@ -324,7 +324,7 @@ args = { mode = "local", processId = "{0}" }
[[grammar]]
name = "go"
source = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "05900faa3cdb5d2d8c8bd5e77ee698487e0a8611" }
source = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "64457ea6b73ef5422ed1687178d4545c3e91334a" }
[[language]]
name = "gomod"
@ -571,7 +571,7 @@ indent = { tab-width = 4, unit = " " }
[[grammar]]
name = "php"
source = { git = "https://github.com/tree-sitter/tree-sitter-php", rev = "57f855461aeeca73bd4218754fb26b5ac143f98f" }
source = { git = "https://github.com/tree-sitter/tree-sitter-php", rev = "f860e598194f4a71747f91789bf536b393ad4a56" }
[[language]]
name = "twig"
@ -1104,7 +1104,7 @@ file-types = ["COMMIT_EDITMSG"]
comment-token = "#"
indent = { tab-width = 2, unit = " " }
rulers = [50, 72]
max-line-length = 72
text-width = 72
[[grammar]]
name = "git-commit"

@ -19,6 +19,9 @@
(method_declaration
name: (field_identifier) @function.method)
(method_spec
name: (field_identifier) @function.method)
; Identifiers
((identifier) @constant (match? @constant "^[A-Z][A-Z\\d_]+$"))
@ -32,10 +35,19 @@
(match? @type.builtin "^(any|bool|byte|comparable|complex128|complex64|error|float32|float64|int|int16|int32|int64|int8|rune|string|uint|uint16|uint32|uint64|uint8|uintptr)$"))
(type_identifier) @type
(type_spec
name: (type_identifier) @constructor)
(field_identifier) @variable.other.member
(identifier) @variable
(package_identifier) @variable
(package_identifier) @namespace
(parameter_declaration (identifier) @variable.parameter)
(variadic_parameter_declaration (identifier) @variable.parameter)
(label_name) @label
(const_spec
name: (identifier) @constant)
; Operators
@ -82,36 +94,57 @@
; Keywords
[
"break"
"case"
"chan"
"const"
"continue"
"default"
"defer"
"type"
] @keyword
[
"if"
"else"
"fallthrough"
"switch"
"select"
"case"
] @keyword.control.conditional
[
"for"
"func"
"go"
"goto"
"if"
"interface"
"map"
"range"
"return"
"select"
"struct"
"switch"
"type"
"var"
] @keyword
] @keyword.control.repeat
[
"import"
"package"
] @keyword.control.import
[
"return"
"continue"
"break"
"fallthrough"
] @keyword.control.return
[
"func"
] @keyword.function
[
"var"
"chan"
"interface"
"map"
"struct"
] @keyword.storage.type
[
"const"
] @keyword.storage.modifier
[
"defer"
"goto"
"go"
] @function.macro
; Delimiters
[

@ -2,15 +2,63 @@
"?>" @tag
; Types
[
(primitive_type)
(cast_type)
] @type.builtin
(named_type
[ (name) @type
(qualified_name (name) @type)])
(base_clause
[ (name) @type
(qualified_name (name) @type)])
(enum_declaration
name: (name) @type.enum)
(primitive_type) @type.builtin
(cast_type) @type.builtin
(named_type (name) @type) @type
(named_type (qualified_name) @type) @type
(interface_declaration
name: (name) @constructor)
(class_declaration
name: (name) @constructor)
(trait_declaration
name:(name) @constructor)
(namespace_definition
name: (namespace_name (name) @namespace))
(namespace_name_as_prefix
(namespace_name (name) @namespace))
(namespace_use_clause
[ (name) @namespace
(qualified_name (name) @type) ])
(namespace_aliasing_clause (name) @namespace)
(class_interface_clause
[(name) @type
(qualified_name (name) @type)])
(scoped_call_expression
scope: [(name) @type
(qualified_name (name) @type)])
(class_constant_access_expression
. [(name) @constructor
(qualified_name (name) @constructor)]
(name) @constant)
(use_declaration (name) @type)
(binary_expression
operator: "instanceof"
right: [(name) @type
(qualified_name (name) @type)])
; Superglobals
(subscript_expression
(variable_name(name) @constant.builtin
@ -36,6 +84,21 @@
(function_definition
name: (name) @function)
(nullsafe_member_call_expression
name: (name) @function.method)
(object_creation_expression
[(name) @constructor
(qualified_name (name) @constructor)])
; Parameters
[
(simple_parameter)
(variadic_parameter)
] @variable.parameter
(argument
(name) @variable.parameter)
; Member
@ -62,68 +125,192 @@
(variable_name) @variable
; Attributes
(attribute_list) @attribute
; Basic tokens
(string) @string
(heredoc) @string
[
(string)
(encapsed_string)
(heredoc_body)
(nowdoc_body)
(shell_command_expression)
] @string
(escape_sequence) @constant.character.escape
(boolean) @constant.builtin.boolean
(null) @constant.builtin
(integer) @constant.numeric.integer
(float) @constant.numeric.float
(comment) @comment
"$" @operator
(goto_statement (name) @label)
(named_label_statement (name) @label)
; Keywords
[
"abstract"
"as"
"break"
"case"
"catch"
"class"
"const"
"continue"
"declare"
"default"
"do"
"echo"
"else"
"elseif"
"enddeclare"
"endforeach"
"endif"
"endswitch"
"endwhile"
"enum"
"extends"
"final"
"finally"
"foreach"
"fn"
"function"
"goto"
"global"
"if"
"implements"
"include_once"
"include"
"insteadof"
"interface"
"match"
"namespace"
"new"
"private"
"protected"
"public"
] @keyword
[
"if"
"else"
"elseif"
"endif"
"switch"
"endswitch"
"case"
"match"
"declare"
"enddeclare"
"??"
] @keyword.control.conditional
[
"for"
"endfor"
"foreach"
"endforeach"
"while"
"endwhile"
"do"
] @keyword.control.repeat
[
"include_once"
"include"
"require_once"
"require"
"use"
] @keyword.control.import
[
"return"
"static"
"switch"
"break"
"continue"
"yield"
] @keyword.control.return
[
"throw"
"trait"
"try"
"use"
"while"
] @keyword
"catch"
"finally"
] @keyword.control.exception
[
"as"
"or"
"xor"
"and"
"instanceof"
] @keyword.operator
[
"fn"
"function"
] @keyword.function
[
"namespace"
"class"
"interface"
"trait"
"abstract"
] @keyword.storage.type
[
"static"
"const"
] @keyword.storage.modifier
[
","
";"
":"
"\\"
] @punctuation.delimiter
[
(php_tag)
"?>"
"("
")"
"["
"]"
"{"
"}"
"#["
] @punctuation.bracket
[
"="
"."
"-"
"*"
"/"
"+"
"%"
"**"
"~"
"|"
"^"
"&"
"<<"
">>"
"->"
"?->"
"=>"
"<"
"<="
">="
">"
"<>"
"=="
"!="
"==="
"!=="
"!"
"&&"
"||"
".="
"-="
"+="
"*="
"/="
"%="
"**="
"&="
"|="
"^="
"<<="
">>="
"??="
"--"
"++"
"@"
"::"
] @operator

Loading…
Cancel
Save